diff --git a/examples/enterprise_rulesets/README.md b/examples/enterprise_rulesets/README.md new file mode 100644 index 0000000000..7aaa73e550 --- /dev/null +++ b/examples/enterprise_rulesets/README.md @@ -0,0 +1,54 @@ +# GitHub Enterprise Ruleset Examples + +This directory demonstrates how to configure GitHub Enterprise rulesets using the Terraform GitHub provider. + +## Overview + +Enterprise rulesets allow you to enforce policies across all organizations in your GitHub Enterprise. The examples showcase all four target types: + +- **Branch Target** (`branch_target.tf`) - Branch protection rules with PR requirements, status checks, and commit patterns +- **Tag Target** (`tag_target.tf`) - Tag protection rules with naming patterns and immutability controls +- **Push Target** (`push_target.tf`) - File restrictions, size limits, and content policies (beta feature) +- **Repository Target** (`rulesets.tf`) - Repository management rules for creation, deletion, and naming conventions + +## Requirements + +- GitHub Enterprise Cloud account +- Personal access token with enterprise admin permissions +- Terraform >= 0.14 + +## Usage + +1. Set your environment variables: + +```bash +export TF_VAR_github_token="your_github_token" +export TF_VAR_enterprise_slug="your-enterprise-slug" +``` + +2. Customize the examples by replacing `"your-enterprise"` with your actual enterprise slug + +3. Apply the configuration: + +```bash +terraform init +terraform plan +terraform apply +``` + +## Target Types + +Each target type supports different rules: + +- **Branch/Tag**: creation, deletion, update, signatures, linear history, PR requirements, status checks +- **Push**: file restrictions, size limits, file extensions, commit patterns +- **Repository**: creation, deletion, transfer, naming patterns, visibility controls + +See the individual `.tf` files for detailed examples and available rules. + +## Important Notes + +- All enterprise rulesets require organization and repository targeting via `conditions` +- The `push` target is currently in beta and subject to change +- Branch and tag targets require `ref_name` conditions +- Repository and push targets do not use `ref_name` conditions diff --git a/examples/enterprise_rulesets/branch_rulesets.tf b/examples/enterprise_rulesets/branch_rulesets.tf new file mode 100644 index 0000000000..93af479900 --- /dev/null +++ b/examples/enterprise_rulesets/branch_rulesets.tf @@ -0,0 +1,174 @@ +# Example: Branch target ruleset with comprehensive branch protection rules +# This ruleset applies to branches across the enterprise + +resource "github_enterprise_ruleset" "branch_protection" { + enterprise_slug = "your-enterprise" + name = "branch-protection-ruleset" + target = "branch" + enforcement = "active" + + # Optional: Allow certain users/teams to bypass the ruleset + bypass_actors { + actor_id = 1 + actor_type = "OrganizationAdmin" + bypass_mode = "always" + } + + bypass_actors { + actor_type = "DeployKey" + bypass_mode = "always" + } + + # Conditions define which organizations, repositories, and refs this ruleset applies to + conditions { + # Target all organizations in the enterprise + organization_name { + include = ["~ALL"] + exclude = [] + } + + # Target all repositories + repository_name { + include = ["~ALL"] + exclude = ["test-*"] # Exclude test repositories + } + + # Target all branches (required for branch target) + ref_name { + include = ["~DEFAULT_BRANCH", "main", "master", "release/*"] + exclude = ["experimental/*"] + } + } + + # Rules that apply to matching branches + rules { + # Prevent branch creation without bypass permission + creation = true + + # Prevent branch updates without bypass permission + update = false + + # Prevent branch deletion without bypass permission + deletion = true + + # Require linear history (no merge commits) + required_linear_history = true + + # Require signed commits + required_signatures = true + + # Prevent force pushes + non_fast_forward = true + + # Pull request requirements + pull_request { + dismiss_stale_reviews_on_push = true + require_code_owner_review = true + require_last_push_approval = true + required_approving_review_count = 2 + required_review_thread_resolution = true + allowed_merge_methods = ["squash", "merge"] + } + + # Status check requirements + required_status_checks { + strict_required_status_checks_policy = true + do_not_enforce_on_create = false + + required_check { + context = "ci/build" + integration_id = 0 + } + + required_check { + context = "ci/test" + integration_id = 0 + } + } + + # Commit message pattern requirements + commit_message_pattern { + name = "Conventional Commits" + operator = "regex" + pattern = "^(feat|fix|docs|style|refactor|test|chore)(\\(.+\\))?: .{1,50}" + negate = false + } + + # Commit author email pattern + commit_author_email_pattern { + name = "Corporate Email Only" + operator = "regex" + pattern = "@your-company\\.com$" + negate = false + } + + # Committer email pattern + committer_email_pattern { + name = "Corporate Email Only" + operator = "regex" + pattern = "@your-company\\.com$" + negate = false + } + + # Branch name pattern (only for branch target) + branch_name_pattern { + name = "Valid Branch Names" + operator = "regex" + pattern = "^(main|master|develop|feature/|bugfix/|hotfix/|release/)" + negate = false + } + + # Code scanning requirements + required_code_scanning { + required_code_scanning_tool { + tool = "CodeQL" + alerts_threshold = "errors" + security_alerts_threshold = "high_or_higher" + } + } + + # Copilot code review (if enabled) + copilot_code_review { + review_on_push = true + review_draft_pull_requests = false + } + } +} + +resource "github_enterprise_ruleset" "branch_by_property" { + enterprise_slug = "your-enterprise" + name = "production-repos-branch-protection" + target = "branch" + enforcement = "active" + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + # Target repositories based on custom properties + repository_property { + include { + name = "environment" + property_values = ["production", "staging"] + source = "custom" + } + + exclude { + name = "lifecycle" + property_values = ["deprecated", "archived"] + } + } + + ref_name { + include = ["~DEFAULT_BRANCH", "refs/heads/release/*"] + exclude = [] + } + } + + rules { + deletion = true + non_fast_forward = true + } +} \ No newline at end of file diff --git a/examples/enterprise_rulesets/main.tf b/examples/enterprise_rulesets/main.tf new file mode 100644 index 0000000000..1cfc34c26c --- /dev/null +++ b/examples/enterprise_rulesets/main.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + github = { + source = "integrations/github" + version = "~> 6.0" + } + } +} diff --git a/examples/enterprise_rulesets/push_rulesets.tf b/examples/enterprise_rulesets/push_rulesets.tf new file mode 100644 index 0000000000..a3e7391e91 --- /dev/null +++ b/examples/enterprise_rulesets/push_rulesets.tf @@ -0,0 +1,154 @@ +# Example: Push target ruleset for file and content restrictions +# This ruleset applies to all pushes across the enterprise + +resource "github_enterprise_ruleset" "push_restrictions" { + enterprise_slug = "your-enterprise" + name = "push-restrictions-ruleset" + target = "push" + enforcement = "active" + + # Allow deploy keys and organization admins to bypass + bypass_actors { + actor_type = "DeployKey" + bypass_mode = "always" + } + + bypass_actors { + actor_id = 1 + actor_type = "OrganizationAdmin" + bypass_mode = "always" + } + + # Conditions define which organizations and repositories this ruleset applies to + # Note: ref_name is NOT used for push target + conditions { + # Target all organizations + organization_name { + include = ["~ALL"] + exclude = [] + } + + # Target all repositories + repository_name { + include = ["~ALL"] + exclude = ["sandbox-*"] + } + } + + # Rules that apply to all pushes + rules { + # Restrict specific file paths from being pushed + file_path_restriction { + restricted_file_paths = [ + "secrets.txt", + "*.key", + "*.pem", + ".env", + "credentials/*" + ] + } + + # Limit maximum file size to prevent large files + max_file_size { + max_file_size = 100 # Max 100 MB + } + + # Limit maximum file path length + max_file_path_length { + max_file_path_length = 255 + } + + # Restrict specific file extensions + file_extension_restriction { + restricted_file_extensions = [ + "*.exe", + "*.dll", + "*.so", + "*.dylib", + "*.zip", + "*.tar.gz" + ] + } + + # Commit message pattern + commit_message_pattern { + name = "Valid Commit Message" + operator = "regex" + pattern = "^(feat|fix|docs|style|refactor|test|chore)(\\(.+\\))?: .+" + negate = false + } + + # Commit author email pattern + commit_author_email_pattern { + name = "Corporate Email" + operator = "ends_with" + pattern = "@your-company.com" + negate = false + } + + # Committer email pattern + committer_email_pattern { + name = "Corporate Email" + operator = "ends_with" + pattern = "@your-company.com" + negate = false + } + } +} + +# Example: Security-focused push ruleset +resource "github_enterprise_ruleset" "security_push_restrictions" { + enterprise_slug = "your-enterprise" + name = "security-push-restrictions" + target = "push" + enforcement = "active" + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + repository_name { + include = ["*-prod", "*-production"] + exclude = [] + } + } + + rules { + # Block common secret file patterns + file_path_restriction { + restricted_file_paths = [ + "*.pem", + "*.key", + "*.cert", + "*.p12", + "*.pfx", + ".env", + ".env.*", + "secrets.yml", + "credentials.json" + ] + } + + # Strict file size limits for production + max_file_size { + max_file_size = 50 # Max 50 MB + } + + # Block executable and archive files + file_extension_restriction { + restricted_file_extensions = [ + "*.exe", + "*.dll", + "*.so", + "*.dylib", + "*.bin", + "*.dmg" + ] + } + + # Require signed commits + required_signatures = true + } +} diff --git a/examples/enterprise_rulesets/repository_ruleset.tf b/examples/enterprise_rulesets/repository_ruleset.tf new file mode 100644 index 0000000000..1d0ae4e8fc --- /dev/null +++ b/examples/enterprise_rulesets/repository_ruleset.tf @@ -0,0 +1,130 @@ + +# Example: Repository target ruleset for repository management +# This ruleset controls repository creation, deletion, and naming +resource "github_enterprise_ruleset" "repository_management" { + enterprise_slug = "your-enterprise" + name = "repository-management-ruleset" + target = "repository" + enforcement = "active" + + # Allow organization admins to bypass repository rules + bypass_actors { + actor_id = 1 + actor_type = "OrganizationAdmin" + bypass_mode = "always" + } + + # Conditions define which organizations and repositories this ruleset applies to + # Note: ref_name is NOT used for repository target + conditions { + # Target all organizations + organization_name { + include = ["~ALL"] + } + + # Target all repositories + repository_name { + include = ["~ALL"] + } + } + + # Repository-specific rules (only valid for repository target) + rules { + # Prevent repository creation without bypass permission + repository_creation = true + + # Prevent repository deletion without bypass permission + repository_deletion = true + + # Prevent repository transfer without bypass permission + repository_transfer = true + + # Enforce repository naming conventions + repository_name { + pattern = "^[a-z][a-z0-9-]*$" # lowercase letters, numbers, and hyphens only + negate = false + } + + # Control repository visibility changes + repository_visibility { + internal = true # Allow internal visibility + private = true # Allow private visibility + # Note: public visibility is implicitly allowed if not restricted + } + } +} + +# Example: Stricter repository ruleset for production organizations +resource "github_enterprise_ruleset" "production_repository_rules" { + enterprise_slug = "your-enterprise" + name = "production-repository-rules" + target = "repository" + enforcement = "active" + + bypass_actors { + actor_id = 1 + actor_type = "OrganizationAdmin" + bypass_mode = "always" + } + + conditions { + # Only apply to production organizations + organization_name { + include = ["*-production", "*-prod"] + } + + repository_name { + include = ["~ALL"] + } + } + + rules { + # Block repository creation, deletion, and transfer + repository_creation = true + repository_deletion = true + repository_transfer = true + + # Strict naming: must start with org prefix and follow kebab-case + repository_name { + pattern = "^prod-[a-z][a-z0-9-]*$" + negate = false + } + + # Only allow private repositories in production + repository_visibility { + internal = false + private = true + } + } +} + +# Example: Repository ruleset with organization ID targeting +resource "github_enterprise_ruleset" "org_id_repository_rules" { + enterprise_slug = "your-enterprise" + name = "org-id-repository-rules" + target = "repository" + enforcement = "evaluate" # Test mode - doesn't block, just reports + + conditions { + # Use organization_id instead of organization_name + # This is useful when you know the specific org IDs + organization_id = [123456, 789012] + + # Use repository_id for specific repositories + repository_id = [111111, 222222] + } + + rules { + repository_creation = true + + repository_name { + pattern = "^[a-z0-9-]+$" + negate = false + } + + repository_visibility { + internal = true + private = true + } + } +} diff --git a/examples/enterprise_rulesets/tag_ruleset.tf b/examples/enterprise_rulesets/tag_ruleset.tf new file mode 100644 index 0000000000..8c1238e58d --- /dev/null +++ b/examples/enterprise_rulesets/tag_ruleset.tf @@ -0,0 +1,121 @@ +# Example: Tag target ruleset for protecting tags +# This ruleset applies to tags across the enterprise + +resource "github_enterprise_ruleset" "tag_protection" { + enterprise_slug = "your-enterprise" + name = "tag-protection-ruleset" + target = "tag" + enforcement = "active" + + # Allow organization admins to bypass tag rules + bypass_actors { + actor_id = 1 + actor_type = "OrganizationAdmin" + bypass_mode = "always" + } + + # Conditions define which organizations, repositories, and refs this ruleset applies to + conditions { + # Target all organizations + organization_name { + include = ["~ALL"] + exclude = [] + } + + # Target all repositories + repository_name { + include = ["~ALL"] + exclude = [] + } + + # Target specific tag patterns (required for tag target) + ref_name { + include = ["v*", "release/*"] + exclude = ["*-beta", "*-alpha"] + } + } + + # Rules that apply to matching tags + rules { + # Prevent tag creation without bypass permission + creation = true + + # Prevent tag updates (tags should be immutable) + update = true + + # Prevent tag deletion without bypass permission + deletion = true + + # Require signed commits for tags + required_signatures = true + + # Tag name pattern (only for tag target) + tag_name_pattern { + name = "Semantic Version Tags" + operator = "regex" + pattern = "^v[0-9]+\\.[0-9]+\\.[0-9]+(-[a-zA-Z0-9.]+)?$" + negate = false + } + + # Commit message pattern for tagged commits + commit_message_pattern { + name = "Release Commit Message" + operator = "starts_with" + pattern = "Release:" + negate = false + } + + # Require specific commit author email pattern + commit_author_email_pattern { + name = "Release Manager Email" + operator = "contains" + pattern = "release@your-company.com" + negate = false + } + } +} + +# Example: Less restrictive tag ruleset for development tags +resource "github_enterprise_ruleset" "dev_tag_protection" { + enterprise_slug = "your-enterprise" + name = "dev-tag-protection-ruleset" + target = "tag" + enforcement = "active" + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + repository_name { + include = ["~ALL"] + exclude = [] + } + + # Only apply to development/snapshot tags + ref_name { + include = ["*-SNAPSHOT", "*-dev"] + exclude = [] + } + } + + rules { + # Allow tag creation + creation = false + + # Allow tag updates for development tags + update = false + + # Prevent tag deletion + deletion = true + + # Tag name pattern for development tags + tag_name_pattern { + name = "Development Tag Pattern" + operator = "regex" + pattern = "^v[0-9]+\\.[0-9]+\\.[0-9]+-[a-zA-Z0-9.]+(SNAPSHOT|dev)$" + negate = false + } + } +} diff --git a/github/data_source_github_enterprise_ruleset.go b/github/data_source_github_enterprise_ruleset.go new file mode 100644 index 0000000000..4db805951a --- /dev/null +++ b/github/data_source_github_enterprise_ruleset.go @@ -0,0 +1,417 @@ +package github + +import ( + "context" + "errors" + "net/http" + "strconv" + + "github.com/google/go-github/v82/github" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func dataSourceGithubEnterpriseRuleset() *schema.Resource { + return &schema.Resource{ + Description: "Use this data source to retrieve information about a GitHub enterprise ruleset.", + ReadContext: dataSourceGithubEnterpriseRulesetRead, + + Schema: map[string]*schema.Schema{ + "enterprise_slug": { + Type: schema.TypeString, + Required: true, + Description: "The slug of the enterprise.", + }, + "ruleset_id": { + Type: schema.TypeInt, + Required: true, + Description: "The ID of the ruleset to retrieve.", + }, + "name": { + Type: schema.TypeString, + Computed: true, + Description: "The name of the ruleset.", + }, + "target": { + Type: schema.TypeString, + Computed: true, + Description: "The target of the ruleset (branch, tag, or push).", + }, + "enforcement": { + Type: schema.TypeString, + Computed: true, + Description: "The enforcement level of the ruleset (disabled, active, or evaluate).", + }, + "bypass_actors": { + Type: schema.TypeList, + Computed: true, + Description: "The actors that can bypass the rules in this ruleset.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "actor_id": { + Type: schema.TypeInt, + Computed: true, + Description: "The ID of the actor that can bypass a ruleset.", + }, + "actor_type": { + Type: schema.TypeString, + Computed: true, + Description: "The type of actor that can bypass a ruleset.", + }, + "bypass_mode": { + Type: schema.TypeString, + Computed: true, + Description: "When the specified actor can bypass the ruleset.", + }, + }, + }, + }, + "node_id": { + Type: schema.TypeString, + Computed: true, + Description: "GraphQL global node id for use with v4 API.", + }, + "conditions": { + Type: schema.TypeList, + Computed: true, + Description: "Parameters for an enterprise ruleset condition.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "organization_name": { + Type: schema.TypeList, + Computed: true, + Description: "Conditions for organization names that the ruleset targets.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "include": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: "Array of organization name patterns to include.", + }, + "exclude": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: "Array of organization name patterns to exclude.", + }, + }, + }, + }, + "organization_id": { + Type: schema.TypeList, + Computed: true, + Description: "Conditions for organization IDs that the ruleset targets.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "organization_ids": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeInt}, + Description: "Array of organization IDs to target.", + }, + }, + }, + }, + "repository_name": { + Type: schema.TypeList, + Computed: true, + Description: "Conditions for repository names that the ruleset targets.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "include": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: "Array of repository name patterns to include.", + }, + "exclude": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: "Array of repository name patterns to exclude.", + }, + "protected": { + Type: schema.TypeBool, + Computed: true, + Description: "Whether to target only protected repositories.", + }, + }, + }, + }, + "repository_id": { + Type: schema.TypeList, + Computed: true, + Description: "Conditions for repository IDs that the ruleset targets.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "repository_ids": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeInt}, + Description: "Array of repository IDs to target.", + }, + }, + }, + }, + "ref_name": { + Type: schema.TypeList, + Computed: true, + Description: "Conditions for ref names (branches or tags) that the ruleset targets.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "include": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: "Array of ref name patterns to include.", + }, + "exclude": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: "Array of ref name patterns to exclude.", + }, + }, + }, + }, + }, + }, + }, + "rules": { + Type: schema.TypeList, + Computed: true, + Description: "Rules for the ruleset.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "creation": { + Type: schema.TypeBool, + Computed: true, + Description: "Only allow users with bypass permission to create matching refs.", + }, + "update": { + Type: schema.TypeBool, + Computed: true, + Description: "Only allow users with bypass permission to update matching refs.", + }, + "deletion": { + Type: schema.TypeBool, + Computed: true, + Description: "Only allow users with bypass permissions to delete matching refs.", + }, + "required_linear_history": { + Type: schema.TypeBool, + Computed: true, + Description: "Prevent merge commits from being pushed to matching branches.", + }, + "required_signatures": { + Type: schema.TypeBool, + Computed: true, + Description: "Commits pushed to matching branches must have verified signatures.", + }, + "non_fast_forward": { + Type: schema.TypeBool, + Computed: true, + Description: "Prevent users with push access from force pushing to branches.", + }, + "pull_request": { + Type: schema.TypeList, + Computed: true, + Description: "Require all commits be made to a non-target branch and submitted via a pull request before they can be merged.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "dismiss_stale_reviews_on_push": { + Type: schema.TypeBool, + Computed: true, + Description: "New, reviewable commits pushed will dismiss previous pull request review approvals.", + }, + "require_code_owner_review": { + Type: schema.TypeBool, + Computed: true, + Description: "Require an approving review in pull requests that modify files that have a designated code owner.", + }, + "require_last_push_approval": { + Type: schema.TypeBool, + Computed: true, + Description: "Whether the most recent reviewable push must be approved by someone other than the person who pushed it.", + }, + "required_approving_review_count": { + Type: schema.TypeInt, + Computed: true, + Description: "The number of approving reviews that are required before a pull request can be merged.", + }, + "required_review_thread_resolution": { + Type: schema.TypeBool, + Computed: true, + Description: "All conversations on code must be resolved before a pull request can be merged.", + }, + }, + }, + }, + "required_status_checks": { + Type: schema.TypeList, + Computed: true, + Description: "Choose which status checks must pass before branches can be merged into a branch that matches this rule.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "strict_required_status_checks_policy": { + Type: schema.TypeBool, + Computed: true, + Description: "Whether pull requests targeting a matching branch must be tested with the latest code.", + }, + "do_not_enforce_on_create": { + Type: schema.TypeBool, + Computed: true, + Description: "Allow repositories and branches to be created if a check would otherwise prohibit it.", + }, + }, + }, + }, + "required_workflows": { + Type: schema.TypeList, + Computed: true, + Description: "Choose which Actions workflows must pass before branches can be merged into a branch that matches this rule.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "do_not_enforce_on_create": { + Type: schema.TypeBool, + Computed: true, + Description: "Allow repositories and branches to be created if a check would otherwise prohibit it.", + }, + }, + }, + }, + // Repository target rules (only populated when target = "repository") + "repository_creation": { + Type: schema.TypeBool, + Computed: true, + Description: "Only allow users with bypass permission to create repositories. Only valid for `repository` target.", + }, + "repository_deletion": { + Type: schema.TypeBool, + Computed: true, + Description: "Only allow users with bypass permission to delete repositories. Only valid for `repository` target.", + }, + "repository_transfer": { + Type: schema.TypeBool, + Computed: true, + Description: "Only allow users with bypass permission to transfer repositories. Only valid for `repository` target.", + }, + "repository_name": { + Type: schema.TypeList, + Computed: true, + Description: "Restrict repository names to match specified patterns. Only valid for `repository` target.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "negate": { + Type: schema.TypeBool, + Computed: true, + Description: "If true, the rule will fail if the pattern matches.", + }, + "pattern": { + Type: schema.TypeString, + Computed: true, + Description: "The pattern to match repository names against.", + }, + }, + }, + }, + "repository_visibility": { + Type: schema.TypeList, + Computed: true, + Description: "Restrict repository visibility changes. Only valid for `repository` target.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "internal": { + Type: schema.TypeBool, + Computed: true, + Description: "Allow internal visibility for repositories.", + }, + "private": { + Type: schema.TypeBool, + Computed: true, + Description: "Allow private visibility for repositories.", + }, + }, + }, + }, + }, + }, + }, + "etag": { + Type: schema.TypeString, + Computed: true, + Description: "The ETag of the ruleset for conditional updates.", + }, + }, + } +} + +func dataSourceGithubEnterpriseRulesetRead(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*Owner).v3client + enterpriseSlug := d.Get("enterprise_slug").(string) + rulesetID := int64(d.Get("ruleset_id").(int)) + + tflog.Trace(ctx, "Reading enterprise ruleset", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + }) + + ruleset, resp, err := client.Enterprise.GetRepositoryRuleset(ctx, enterpriseSlug, rulesetID) + if err != nil { + var ghErr *github.ErrorResponse + if errors.As(err, &ghErr) { + if ghErr.Response.StatusCode == http.StatusNotFound { + tflog.Error(ctx, "Enterprise ruleset not found", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + }) + return diag.Errorf("enterprise ruleset %d not found in enterprise %s", rulesetID, enterpriseSlug) + } + } + tflog.Error(ctx, "Failed to read enterprise ruleset", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + "error": err.Error(), + }) + return diag.FromErr(err) + } + + // Set the ID to the ruleset ID + d.SetId(strconv.FormatInt(ruleset.GetID(), 10)) + + // Set all computed attributes + if err := d.Set("name", ruleset.Name); err != nil { + return diag.FromErr(err) + } + if err := d.Set("target", ruleset.GetTarget()); err != nil { + return diag.FromErr(err) + } + if err := d.Set("enforcement", ruleset.Enforcement); err != nil { + return diag.FromErr(err) + } + if err := d.Set("bypass_actors", flattenBypassActors(ruleset.BypassActors)); err != nil { + return diag.FromErr(err) + } + if err := d.Set("conditions", flattenConditions(ctx, ruleset.GetConditions(), RulesetLevelEnterprise)); err != nil { + return diag.FromErr(err) + } + if err := d.Set("rules", flattenRules(ctx, ruleset.Rules, RulesetLevelEnterprise)); err != nil { + return diag.FromErr(err) + } + if err := d.Set("node_id", ruleset.GetNodeID()); err != nil { + return diag.FromErr(err) + } + if err := d.Set("etag", resp.Header.Get("ETag")); err != nil { + return diag.FromErr(err) + } + + tflog.Trace(ctx, "Successfully read enterprise ruleset", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + "name": ruleset.Name, + }) + + return nil +} diff --git a/github/data_source_github_enterprise_ruleset_test.go b/github/data_source_github_enterprise_ruleset_test.go new file mode 100644 index 0000000000..69e2f7407d --- /dev/null +++ b/github/data_source_github_enterprise_ruleset_test.go @@ -0,0 +1,77 @@ +package github + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/knownvalue" + "github.com/hashicorp/terraform-plugin-testing/statecheck" + "github.com/hashicorp/terraform-plugin-testing/tfjsonpath" +) + +func TestAccGithubEnterpriseRulesetDataSource(t *testing.T) { + t.Run("queries an enterprise ruleset", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + testRulesetName := fmt.Sprintf("%senterprise-ruleset-%s", testResourcePrefix, randomID) + + config := fmt.Sprintf(` + resource "github_enterprise_ruleset" "test" { + enterprise_slug = "%s" + name = "%s" + target = "branch" + enforcement = "active" + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + repository_name { + include = ["~ALL"] + exclude = [] + } + + ref_name { + include = ["refs/heads/main"] + exclude = [] + } + } + + rules { + creation = false + deletion = false + } + } + + data "github_enterprise_ruleset" "test" { + enterprise_slug = github_enterprise_ruleset.test.enterprise_slug + ruleset_id = github_enterprise_ruleset.test.ruleset_id + + depends_on = [github_enterprise_ruleset.test] + } + `, testAccConf.enterpriseSlug, testRulesetName) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { + skipUnlessEnterprise(t) + }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ConfigStateChecks: []statecheck.StateCheck{ + statecheck.ExpectKnownValue("data.github_enterprise_ruleset.test", tfjsonpath.New("name"), knownvalue.NotNull()), + statecheck.ExpectKnownValue("data.github_enterprise_ruleset.test", tfjsonpath.New("name"), knownvalue.StringExact(testRulesetName)), + statecheck.ExpectKnownValue("data.github_enterprise_ruleset.test", tfjsonpath.New("target"), knownvalue.StringExact("branch")), + statecheck.ExpectKnownValue("data.github_enterprise_ruleset.test", tfjsonpath.New("enforcement"), knownvalue.StringExact("active")), + statecheck.ExpectKnownValue("data.github_enterprise_ruleset.test", tfjsonpath.New("node_id"), knownvalue.NotNull()), + statecheck.ExpectKnownValue("data.github_enterprise_ruleset.test", tfjsonpath.New("etag"), knownvalue.NotNull()), + }, + }, + }, + }) + }) +} diff --git a/github/provider.go b/github/provider.go index 2d18019542..ff348026e8 100644 --- a/github/provider.go +++ b/github/provider.go @@ -213,6 +213,7 @@ func Provider() *schema.Provider { "github_user_invitation_accepter": resourceGithubUserInvitationAccepter(), "github_user_ssh_key": resourceGithubUserSshKey(), "github_enterprise_organization": resourceGithubEnterpriseOrganization(), + "github_enterprise_ruleset": resourceGithubEnterpriseRuleset(), "github_enterprise_actions_runner_group": resourceGithubActionsEnterpriseRunnerGroup(), "github_enterprise_actions_workflow_permissions": resourceGithubEnterpriseActionsWorkflowPermissions(), "github_actions_organization_workflow_permissions": resourceGithubActionsOrganizationWorkflowPermissions(), @@ -294,6 +295,7 @@ func Provider() *schema.Provider { "github_user_external_identity": dataSourceGithubUserExternalIdentity(), "github_users": dataSourceGithubUsers(), "github_enterprise": dataSourceGithubEnterprise(), + "github_enterprise_ruleset": dataSourceGithubEnterpriseRuleset(), "github_repository_environment_deployment_policies": dataSourceGithubRepositoryEnvironmentDeploymentPolicies(), }, } diff --git a/github/resource_github_enterprise_ruleset.go b/github/resource_github_enterprise_ruleset.go new file mode 100644 index 0000000000..5e6ede2d46 --- /dev/null +++ b/github/resource_github_enterprise_ruleset.go @@ -0,0 +1,1035 @@ +package github + +import ( + "context" + "errors" + "fmt" + "net/http" + "strconv" + + "github.com/google/go-github/v82/github" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" +) + +func resourceGithubEnterpriseRuleset() *schema.Resource { + return &schema.Resource{ + Description: "Manages GitHub enterprise rulesets", + + CreateContext: resourceGithubEnterpriseRulesetCreate, + ReadContext: resourceGithubEnterpriseRulesetRead, + UpdateContext: resourceGithubEnterpriseRulesetUpdate, + DeleteContext: resourceGithubEnterpriseRulesetDelete, + + Importer: &schema.ResourceImporter{ + StateContext: resourceGithubEnterpriseRulesetImport, + }, + + CustomizeDiff: resourceGithubEnterpriseRulesetCustomizeDiff, + + Schema: map[string]*schema.Schema{ + "enterprise_slug": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "The slug of the enterprise.", + }, + "name": { + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: validation.ToDiagFunc(validation.StringLenBetween(1, 100)), + Description: "The name of the ruleset.", + }, + "target": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateDiagFunc: validation.ToDiagFunc(validation.StringInSlice(supportedEnterpriseRulesetTargetTypes, false)), + Description: "Possible values are `branch`, `tag`, `push` and `repository`. Note: The `repository` target is in preview and is subject to change.", + }, + "enforcement": { + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: validation.ToDiagFunc(validation.StringInSlice([]string{"disabled", "active", "evaluate"}, false)), + Description: "Possible values for Enforcement are `disabled`, `active`, `evaluate`. Note: `evaluate` is currently only supported for owners of type `organization`.", + }, + "bypass_actors": { + Type: schema.TypeList, + Optional: true, + DiffSuppressFunc: bypassActorsDiffSuppressFunc, + Description: "The actors that can bypass the rules in this ruleset.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "actor_id": { + Type: schema.TypeInt, + Optional: true, + Default: nil, + Description: "The ID of the actor that can bypass a ruleset. When `actor_type` is `OrganizationAdmin`, this should be set to `1`. Some resources such as DeployKey do not have an ID and this should be omitted.", + }, + "actor_type": { + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: validation.ToDiagFunc(validation.StringInSlice([]string{"Integration", "OrganizationAdmin", "RepositoryRole", "Team", "DeployKey", "EnterpriseOwner"}, false)), + Description: "The type of actor that can bypass a ruleset. See https://docs.github.com/en/rest/enterprise-admin/rules for more information", + }, + "bypass_mode": { + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: validation.ToDiagFunc(validation.StringInSlice([]string{"always", "pull_request", "exempt"}, false)), + Description: "When the specified actor can bypass the ruleset. pull_request means that an actor can only bypass rules on pull requests. Can be one of: `always`, `pull_request`, `exempt`.", + }, + }, + }, + }, + "node_id": { + Type: schema.TypeString, + Computed: true, + Description: "GraphQL global node id for use with v4 API.", + }, + "ruleset_id": { + Type: schema.TypeInt, + Computed: true, + Description: "GitHub ID for the ruleset.", + }, + "conditions": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Parameters for an enterprise ruleset condition. Enterprise rulesets must include organization targeting (organization_name or organization_id) and repository targeting (repository_name or repository_id). For branch and tag targets, ref_name is also required.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "organization_name": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + ConflictsWith: []string{"conditions.0.organization_id"}, + Description: "Conditions for organization names that the ruleset targets. Conflicts with `organization_id`.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "include": { + Type: schema.TypeList, + Required: true, + Description: "Array of organization names or patterns to include. One of these patterns must match for the condition to pass. Also accepts `~ALL` to include all organizations.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "exclude": { + Type: schema.TypeList, + Optional: true, + Description: "Array of organization names or patterns to exclude. The condition will not pass if any of these patterns match.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + "organization_id": { + Type: schema.TypeList, + Optional: true, + ConflictsWith: []string{"conditions.0.organization_name"}, + Description: "Organization IDs that the ruleset applies to. One of these IDs must match for the condition to pass. Conflicts with `organization_name`.", + Elem: &schema.Schema{ + Type: schema.TypeInt, + }, + }, + "ref_name": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Conditions for ref names (branches or tags) that the ruleset targets.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "include": { + Type: schema.TypeList, + Required: true, + Description: "Array of ref names or patterns to include. One of these patterns must match for the condition to pass. Also accepts `~DEFAULT_BRANCH` to include the default branch or `~ALL` to include all branches.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "exclude": { + Type: schema.TypeList, + Optional: true, + Description: "Array of ref names or patterns to exclude. The condition will not pass if any of these patterns match.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + "repository_name": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Conditions for repository names that the ruleset targets. Exactly one of `repository_name`, `repository_id`, or `repository_property` must be set.", + ExactlyOneOf: []string{"conditions.0.repository_name", "conditions.0.repository_id", "conditions.0.repository_property"}, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "include": { + Type: schema.TypeList, + Required: true, + Description: "Array of repository names or patterns to include. One of these patterns must match for the condition to pass. Also accepts `~ALL` to include all repositories.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "exclude": { + Type: schema.TypeList, + Optional: true, + Description: "Array of repository names or patterns to exclude. The condition will not pass if any of these patterns match.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "protected": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "Whether renaming of target repositories is prevented.", + }, + }, + }, + }, + "repository_id": { + Type: schema.TypeList, + Optional: true, + ExactlyOneOf: []string{"conditions.0.repository_name", "conditions.0.repository_id", "conditions.0.repository_property"}, + Description: "The repository IDs that the ruleset applies to. One of these IDs must match for the condition to pass. Exactly one of `repository_name`, `repository_id`, or `repository_property` must be set.", + Elem: &schema.Schema{ + Type: schema.TypeInt, + }, + }, + "repository_property": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Conditions based on repository properties. Exactly one of `repository_name`, `repository_id`, or `repository_property` must be set.", + ExactlyOneOf: []string{"conditions.0.repository_name", "conditions.0.repository_id", "conditions.0.repository_property"}, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "include": { + Type: schema.TypeList, + Optional: true, + Description: "Array of repository property conditions to include.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + Description: "The name of the repository property to target.", + }, + "property_values": { + Type: schema.TypeList, + Required: true, + Description: "The values to match for the repository property.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "source": { + Type: schema.TypeString, + Optional: true, + Description: "The source of the repository property.", + }, + }, + }, + }, + "exclude": { + Type: schema.TypeList, + Optional: true, + Description: "Array of repository property conditions to exclude.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + Description: "The name of the repository property to target.", + }, + "property_values": { + Type: schema.TypeList, + Required: true, + Description: "The values to match for the repository property.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "source": { + Type: schema.TypeString, + Optional: true, + Description: "The source of the repository property.", + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + "rules": { + Type: schema.TypeList, + Required: true, + MaxItems: 1, + Description: "Rules within the ruleset.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "creation": { + Type: schema.TypeBool, + Optional: true, + Description: "Only allow users with bypass permission to create matching refs.", + }, + "update": { + Type: schema.TypeBool, + Optional: true, + Description: "Only allow users with bypass permission to update matching refs.", + }, + "deletion": { + Type: schema.TypeBool, + Optional: true, + Description: "Only allow users with bypass permissions to delete matching refs.", + }, + "required_linear_history": { + Type: schema.TypeBool, + Optional: true, + Description: "Prevent merge commits from being pushed to matching branches.", + }, + "required_signatures": { + Type: schema.TypeBool, + Optional: true, + Description: "Commits pushed to matching branches must have verified signatures.", + }, + "pull_request": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: "Require all commits be made to a non-target branch and submitted via a pull request before they can be merged.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "allowed_merge_methods": { + Type: schema.TypeList, + Optional: true, + MinItems: 1, + Description: "Array of allowed merge methods. Allowed values include `merge`, `squash`, and `rebase`. At least one option must be enabled.", + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateDiagFunc: validation.ToDiagFunc(validation.StringInSlice([]string{"merge", "squash", "rebase"}, false)), + }, + }, + "dismiss_stale_reviews_on_push": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "New, reviewable commits pushed will dismiss previous pull request review approvals. Defaults to `false`.", + }, + "require_code_owner_review": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "Require an approving review in pull requests that modify files that have a designated code owner. Defaults to `false`.", + }, + "require_last_push_approval": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "Whether the most recent reviewable push must be approved by someone other than the person who pushed it. Defaults to `false`.", + }, + "required_approving_review_count": { + Type: schema.TypeInt, + Optional: true, + Default: 0, + Description: "The number of approving reviews that are required before a pull request can be merged. Defaults to `0`.", + }, + "required_review_thread_resolution": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "All conversations on code must be resolved before a pull request can be merged. Defaults to `false`.", + }, + }, + }, + }, + "copilot_code_review": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Automatically request Copilot code review for new pull requests if the author has access to Copilot code review and their premium requests quota has not reached the limit.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "review_on_push": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "Copilot automatically reviews each new push to the pull request. Defaults to `false`.", + }, + "review_draft_pull_requests": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "Copilot automatically reviews draft pull requests before they are marked as ready for review. Defaults to `false`.", + }, + }, + }, + }, + "required_status_checks": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: "Choose which status checks must pass before branches can be merged into a branch that matches this rule. When enabled, commits must first be pushed to another branch, then merged or pushed directly to a branch that matches this rule after status checks have passed.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "required_check": { + Type: schema.TypeSet, + MinItems: 1, + Required: true, + Description: "Status checks that are required. Several can be defined.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "context": { + Type: schema.TypeString, + Required: true, + Description: "The status check context name that must be present on the commit.", + }, + "integration_id": { + Type: schema.TypeInt, + Optional: true, + Default: 0, + Description: "The optional integration ID that this status check must originate from.", + }, + }, + }, + }, + "strict_required_status_checks_policy": { + Type: schema.TypeBool, + Optional: true, + Description: "Whether pull requests targeting a matching branch must be tested with the latest code. This setting will not take effect unless at least one status check is enabled. Defaults to `false`.", + }, + "do_not_enforce_on_create": { + Type: schema.TypeBool, + Optional: true, + Description: "Allow repositories and branches to be created if a check would otherwise prohibit it.", + Default: false, + }, + }, + }, + }, + "non_fast_forward": { + Type: schema.TypeBool, + Optional: true, + Description: "Prevent users with push access from force pushing to branches.", + }, + "commit_message_pattern": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: "Parameters to be used for the commit_message_pattern rule.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Optional: true, + Description: "How this rule will appear to users.", + }, + "negate": { + Type: schema.TypeBool, + Optional: true, + Description: "If true, the rule will fail if the pattern matches.", + }, + "operator": { + Type: schema.TypeString, + Required: true, + Description: "The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`.", + }, + "pattern": { + Type: schema.TypeString, + Required: true, + Description: "The pattern to match with.", + }, + }, + }, + }, + "commit_author_email_pattern": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: "Parameters to be used for the commit_author_email_pattern rule.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Optional: true, + Description: "How this rule will appear to users.", + }, + "negate": { + Type: schema.TypeBool, + Optional: true, + Description: "If true, the rule will fail if the pattern matches.", + }, + "operator": { + Type: schema.TypeString, + Required: true, + Description: "The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`.", + }, + "pattern": { + Type: schema.TypeString, + Required: true, + Description: "The pattern to match with.", + }, + }, + }, + }, + "committer_email_pattern": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: "Parameters to be used for the committer_email_pattern rule.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Optional: true, + Description: "How this rule will appear to users.", + }, + "negate": { + Type: schema.TypeBool, + Optional: true, + Description: "If true, the rule will fail if the pattern matches.", + }, + "operator": { + Type: schema.TypeString, + Required: true, + Description: "The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`.", + }, + "pattern": { + Type: schema.TypeString, + Required: true, + Description: "The pattern to match with.", + }, + }, + }, + }, + "branch_name_pattern": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + ConflictsWith: []string{"rules.0.tag_name_pattern"}, + Description: "Parameters to be used for the branch_name_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. Conflicts with `tag_name_pattern` as it only applies to rulesets with target `branch`.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Optional: true, + Description: "How this rule will appear to users.", + }, + "negate": { + Type: schema.TypeBool, + Optional: true, + Description: "If true, the rule will fail if the pattern matches.", + }, + "operator": { + Type: schema.TypeString, + Required: true, + Description: "The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`.", + }, + "pattern": { + Type: schema.TypeString, + Required: true, + Description: "The pattern to match with.", + }, + }, + }, + }, + "tag_name_pattern": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + ConflictsWith: []string{"rules.0.branch_name_pattern"}, + Description: "Parameters to be used for the tag_name_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. Conflicts with `branch_name_pattern` as it only applies to rulesets with target `tag`.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Optional: true, + Description: "How this rule will appear to users.", + }, + "negate": { + Type: schema.TypeBool, + Optional: true, + Description: "If true, the rule will fail if the pattern matches.", + }, + "operator": { + Type: schema.TypeString, + Required: true, + Description: "The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`.", + }, + "pattern": { + Type: schema.TypeString, + Required: true, + Description: "The pattern to match with.", + }, + }, + }, + }, + "required_workflows": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: "Choose which Actions workflows must pass before branches can be merged into a branch that matches this rule.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "do_not_enforce_on_create": { + Type: schema.TypeBool, + Optional: true, + Description: "Allow repositories and branches to be created if a check would otherwise prohibit it.", + }, + "required_workflow": { + Type: schema.TypeSet, + MinItems: 1, + Required: true, + Description: "Actions workflows that are required. Several can be defined.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "repository_id": { + Type: schema.TypeInt, + Required: true, + Description: "The repository in which the workflow is defined.", + }, + "path": { + Type: schema.TypeString, + Required: true, + Description: "The path to the workflow YAML definition file.", + }, + "ref": { + Type: schema.TypeString, + Optional: true, + Default: "master", + Description: "The ref (branch or tag) of the workflow file to use.", + }, + }, + }, + }, + }, + }, + }, + "required_code_scanning": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: "Choose which tools must provide code scanning results before the reference is updated. When configured, code scanning must be enabled and have results for both the commit and the reference being updated.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "required_code_scanning_tool": { + Type: schema.TypeSet, + MinItems: 1, + Required: true, + Description: "Tools that must provide code scanning results for this rule to pass.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "alerts_threshold": { + Type: schema.TypeString, + Required: true, + Description: "The severity level at which code scanning results that raise alerts block a reference update. Can be one of: `none`, `errors`, `errors_and_warnings`, `all`.", + }, + "security_alerts_threshold": { + Type: schema.TypeString, + Required: true, + Description: "The severity level at which code scanning results that raise security alerts block a reference update. Can be one of: `none`, `critical`, `high_or_higher`, `medium_or_higher`, `all`.", + }, + "tool": { + Type: schema.TypeString, + Required: true, + Description: "The name of a code scanning tool.", + }, + }, + }, + }, + }, + }, + }, + "file_path_restriction": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Prevent commits that include changes in specified file paths from being pushed to the commit graph.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "restricted_file_paths": { + Type: schema.TypeList, + MinItems: 1, + Required: true, + Description: "The file paths that are restricted from being pushed to the commit graph.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + "max_file_size": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Prevent pushes based on file size.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "max_file_size": { + Type: schema.TypeInt, + Required: true, + Description: "The maximum allowed size of a file in megabytes (MB). Valid range is 1-100 MB.", + ValidateDiagFunc: validation.ToDiagFunc(validation.IntBetween(1, 100)), + }, + }, + }, + }, + "max_file_path_length": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Prevent pushes based on file path length.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "max_file_path_length": { + Type: schema.TypeInt, + Required: true, + Description: "The maximum allowed length of a file path.", + }, + }, + }, + }, + "file_extension_restriction": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Prevent pushes based on file extensions.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "restricted_file_extensions": { + Type: schema.TypeSet, + MinItems: 1, + Required: true, + Description: "The file extensions that are restricted from being pushed to the commit graph.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + // Repository target rules (only valid when target = "repository") + "repository_creation": { + Type: schema.TypeBool, + Optional: true, + Description: "Only allow users with bypass permission to create repositories. Only valid for `repository` target.", + }, + "repository_deletion": { + Type: schema.TypeBool, + Optional: true, + Description: "Only allow users with bypass permission to delete repositories. Only valid for `repository` target.", + }, + "repository_transfer": { + Type: schema.TypeBool, + Optional: true, + Description: "Only allow users with bypass permission to transfer repositories. Only valid for `repository` target.", + }, + "repository_name": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Restrict repository names to match specified patterns. Only valid for `repository` target.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "negate": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "If true, the rule will fail if the pattern matches.", + }, + "pattern": { + Type: schema.TypeString, + Required: true, + Description: "The pattern to match repository names against.", + }, + }, + }, + }, + "repository_visibility": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Restrict repository visibility changes. Only valid for `repository` target.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "internal": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "Allow internal visibility for repositories.", + }, + "private": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "Allow private visibility for repositories.", + }, + }, + }, + }, + }, + }, + }, + "etag": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func resourceGithubEnterpriseRulesetCreate(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*Owner).v3client + enterpriseSlug := d.Get("enterprise_slug").(string) + name := d.Get("name").(string) + + tflog.Debug(ctx, "Creating enterprise ruleset", map[string]any{ + "enterprise_slug": enterpriseSlug, + "name": name, + }) + + rulesetReq := resourceGithubEnterpriseRulesetObject(d) + + ruleset, resp, err := client.Enterprise.CreateRepositoryRuleset(ctx, enterpriseSlug, rulesetReq) + if err != nil { + tflog.Error(ctx, "Failed to create enterprise ruleset", map[string]any{ + "enterprise_slug": enterpriseSlug, + "name": name, + "error": err.Error(), + }) + return diag.FromErr(err) + } + + d.SetId(strconv.FormatInt(ruleset.GetID(), 10)) + if err := d.Set("ruleset_id", ruleset.ID); err != nil { + return diag.FromErr(err) + } + if err := d.Set("node_id", ruleset.GetNodeID()); err != nil { + return diag.FromErr(err) + } + if err := d.Set("etag", resp.Header.Get("ETag")); err != nil { + return diag.FromErr(err) + } + + tflog.Info(ctx, "Created enterprise ruleset", map[string]any{ + "enterprise_slug": enterpriseSlug, + "name": name, + "ruleset_id": ruleset.GetID(), + }) + + return nil +} + +func resourceGithubEnterpriseRulesetRead(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*Owner).v3client + enterpriseSlug := d.Get("enterprise_slug").(string) + + rulesetID := int64(d.Get("ruleset_id").(int)) + + tflog.Trace(ctx, "Reading enterprise ruleset", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + }) + + ruleset, resp, err := client.Enterprise.GetRepositoryRuleset(ctx, enterpriseSlug, rulesetID) + if err != nil { + var ghErr *github.ErrorResponse + if errors.As(err, &ghErr) { + if ghErr.Response.StatusCode == http.StatusNotModified { + tflog.Debug(ctx, "API responded with StatusNotModified, not refreshing state", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + }) + return nil + } + if ghErr.Response.StatusCode == http.StatusNotFound { + tflog.Info(ctx, "Removing ruleset from state because it no longer exists in GitHub", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + }) + d.SetId("") + return nil + } + } + tflog.Error(ctx, "Failed to read enterprise ruleset", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + "error": err.Error(), + }) + return diag.FromErr(err) + } + + if err := d.Set("name", ruleset.Name); err != nil { + return diag.FromErr(err) + } + if err := d.Set("target", ruleset.GetTarget()); err != nil { + return diag.FromErr(err) + } + if err := d.Set("enforcement", ruleset.Enforcement); err != nil { + return diag.FromErr(err) + } + if err := d.Set("bypass_actors", flattenBypassActors(ruleset.BypassActors)); err != nil { + return diag.FromErr(err) + } + if err := d.Set("conditions", flattenConditions(ctx, ruleset.GetConditions(), RulesetLevelEnterprise)); err != nil { + return diag.FromErr(err) + } + if err := d.Set("rules", flattenRules(ctx, ruleset.Rules, RulesetLevelEnterprise)); err != nil { + return diag.FromErr(err) + } + if err := d.Set("node_id", ruleset.GetNodeID()); err != nil { + return diag.FromErr(err) + } + if err := d.Set("etag", resp.Header.Get("ETag")); err != nil { + return diag.FromErr(err) + } + + tflog.Trace(ctx, "Successfully read enterprise ruleset", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + "name": ruleset.Name, + }) + + return nil +} + +func resourceGithubEnterpriseRulesetUpdate(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*Owner).v3client + enterpriseSlug := d.Get("enterprise_slug").(string) + name := d.Get("name").(string) + rulesetID := int64(d.Get("ruleset_id").(int)) + + tflog.Debug(ctx, "Updating enterprise ruleset", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + "name": name, + }) + + rulesetReq := resourceGithubEnterpriseRulesetObject(d) + + _, resp, err := client.Enterprise.UpdateRepositoryRuleset(ctx, enterpriseSlug, rulesetID, rulesetReq) + if err != nil { + tflog.Error(ctx, "Failed to update enterprise ruleset", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + "error": err.Error(), + }) + return diag.FromErr(err) + } + + if err := d.Set("etag", resp.Header.Get("ETag")); err != nil { + return diag.FromErr(err) + } + + tflog.Info(ctx, "Updated enterprise ruleset", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + "name": name, + }) + + return nil +} + +func resourceGithubEnterpriseRulesetDelete(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*Owner).v3client + enterpriseSlug := d.Get("enterprise_slug").(string) + rulesetID := int64(d.Get("ruleset_id").(int)) + + tflog.Debug(ctx, "Deleting enterprise ruleset", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + }) + + _, err := client.Enterprise.DeleteRepositoryRuleset(ctx, enterpriseSlug, rulesetID) + if err != nil { + var ghErr *github.ErrorResponse + if errors.As(err, &ghErr) && ghErr.Response.StatusCode == http.StatusNotFound { + tflog.Info(ctx, "Enterprise ruleset already deleted", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + }) + return nil + } + tflog.Error(ctx, "Failed to delete enterprise ruleset", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + "error": err.Error(), + }) + return diag.FromErr(err) + } + + tflog.Info(ctx, "Deleted enterprise ruleset", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + }) + + return nil +} + +func resourceGithubEnterpriseRulesetImport(ctx context.Context, d *schema.ResourceData, _ any) ([]*schema.ResourceData, error) { + enterpriseSlug, rulesetIDStr, err := parseID2(d.Id()) + if err != nil { + return nil, fmt.Errorf("error importing enterprise ruleset (expected format: :): %w", err) + } + + rulesetID, err := strconv.ParseInt(rulesetIDStr, 10, 64) + if err != nil { + return nil, fmt.Errorf("error importing enterprise ruleset (expected format: :): %w", unconvertibleIdErr(rulesetIDStr, err)) + } + if rulesetID == 0 { + return nil, fmt.Errorf("error importing enterprise ruleset (expected format: :): ruleset_id must be present") + } + + tflog.Debug(ctx, "Importing enterprise ruleset", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + }) + + d.SetId(rulesetIDStr) + if err := d.Set("enterprise_slug", enterpriseSlug); err != nil { + return nil, err + } + if err := d.Set("ruleset_id", rulesetID); err != nil { + return nil, err + } + + tflog.Info(ctx, "Imported enterprise ruleset", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + }) + + return []*schema.ResourceData{d}, nil +} + +var supportedEnterpriseRulesetTargetTypes = []string{ + string(github.RulesetTargetBranch), + string(github.RulesetTargetTag), + string(github.RulesetTargetPush), + string(github.RulesetTargetRepository), +} + +// resourceGithubEnterpriseRulesetObject creates a GitHub RepositoryRuleset object for enterprise-level rulesets +func resourceGithubEnterpriseRulesetObject(d *schema.ResourceData) github.RepositoryRuleset { + return github.RepositoryRuleset{ + Name: d.Get("name").(string), + Target: github.Ptr(github.RulesetTarget(d.Get("target").(string))), + Source: d.Get("enterprise_slug").(string), + SourceType: github.Ptr(github.RulesetSourceType("Enterprise")), + Enforcement: github.RulesetEnforcement(d.Get("enforcement").(string)), + BypassActors: expandBypassActors(d.Get("bypass_actors").([]any)), + Conditions: expandConditions(d.Get("conditions").([]any), RulesetLevelEnterprise), + Rules: expandRules(d.Get("rules").([]any), RulesetLevelEnterprise), + } +} diff --git a/github/resource_github_enterprise_ruleset_test.go b/github/resource_github_enterprise_ruleset_test.go new file mode 100644 index 0000000000..ae85c63672 --- /dev/null +++ b/github/resource_github_enterprise_ruleset_test.go @@ -0,0 +1,699 @@ +package github + +import ( + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/knownvalue" + "github.com/hashicorp/terraform-plugin-testing/statecheck" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-plugin-testing/tfjsonpath" +) + +// Shared HCL templates for enterprise ruleset tests. +// Use fmt.Sprintf to fill in enterprise_slug and other parameters. + +var enterpriseRulesetBasicHCL = ` +resource "github_enterprise_ruleset" "test" { + enterprise_slug = "%s" + name = "%s" + target = "branch" + enforcement = "active" + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + repository_name { + include = ["~ALL"] + exclude = [] + } + + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + creation = true + } +} +` + +var enterpriseRulesetBypassHCL = ` +resource "github_enterprise_ruleset" "test" { + enterprise_slug = "%s" + name = "%s" + target = "branch" + enforcement = "active" + + bypass_actors { + actor_type = "DeployKey" + bypass_mode = "always" + } + + bypass_actors { + actor_type = "OrganizationAdmin" + bypass_mode = "always" + } + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + repository_name { + include = ["~ALL"] + exclude = [] + } + + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + creation = true + } +} +` + +var enterpriseRulesetBypassModeHCL = ` +resource "github_enterprise_ruleset" "test" { + enterprise_slug = "%s" + name = "%s" + target = "branch" + enforcement = "active" + + bypass_actors { + actor_type = "OrganizationAdmin" + bypass_mode = "%s" + } + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + repository_name { + include = ["~ALL"] + exclude = [] + } + + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + creation = true + } +} +` + +func TestAccGithubEnterpriseRuleset(t *testing.T) { + + t.Run("basic", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + rulesetName := fmt.Sprintf("%s-enterprise-basic-%s", testResourcePrefix, randomID) + + config := fmt.Sprintf(enterpriseRulesetBasicHCL, testAccConf.enterpriseSlug, rulesetName) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessEnterprise(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ConfigStateChecks: []statecheck.StateCheck{ + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("ruleset_id"), knownvalue.NotNull()), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("node_id"), knownvalue.NotNull()), + }, + }, + }, + }) + }) + + t.Run("branch_rules", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + rulesetName := fmt.Sprintf("%s-enterprise-branch-%s", testResourcePrefix, randomID) + + config := fmt.Sprintf(` +resource "github_enterprise_ruleset" "test" { + enterprise_slug = "%s" + name = "%s" + target = "branch" + enforcement = "active" + + bypass_actors { + actor_type = "DeployKey" + bypass_mode = "always" + } + + bypass_actors { + actor_type = "OrganizationAdmin" + bypass_mode = "always" + } + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + repository_name { + include = ["~ALL"] + exclude = [] + } + + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + creation = true + update = true + deletion = true + required_linear_history = true + required_signatures = false + + pull_request { + required_approving_review_count = 2 + required_review_thread_resolution = true + require_code_owner_review = true + dismiss_stale_reviews_on_push = true + require_last_push_approval = true + } + + copilot_code_review { + review_on_push = true + review_draft_pull_requests = false + } + + required_code_scanning { + required_code_scanning_tool { + alerts_threshold = "errors" + security_alerts_threshold = "high_or_higher" + tool = "CodeQL" + } + } + + branch_name_pattern { + name = "test" + negate = false + operator = "starts_with" + pattern = "test" + } + + non_fast_forward = true + } +} +`, testAccConf.enterpriseSlug, rulesetName) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessEnterprise(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ConfigStateChecks: []statecheck.StateCheck{ + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("ruleset_id"), knownvalue.NotNull()), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("node_id"), knownvalue.NotNull()), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("bypass_actors"), knownvalue.ListSizeExact(2)), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("bypass_actors").AtSliceIndex(0).AtMapKey("actor_type"), knownvalue.StringExact("DeployKey")), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("bypass_actors").AtSliceIndex(0).AtMapKey("bypass_mode"), knownvalue.StringExact("always")), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("bypass_actors").AtSliceIndex(1).AtMapKey("actor_type"), knownvalue.StringExact("OrganizationAdmin")), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("bypass_actors").AtSliceIndex(1).AtMapKey("bypass_mode"), knownvalue.StringExact("always")), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("rules").AtSliceIndex(0).AtMapKey("required_code_scanning").AtSliceIndex(0).AtMapKey("required_code_scanning_tool").AtSliceIndex(0).AtMapKey("alerts_threshold"), knownvalue.StringExact("errors")), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("rules").AtSliceIndex(0).AtMapKey("required_code_scanning").AtSliceIndex(0).AtMapKey("required_code_scanning_tool").AtSliceIndex(0).AtMapKey("security_alerts_threshold"), knownvalue.StringExact("high_or_higher")), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("rules").AtSliceIndex(0).AtMapKey("required_code_scanning").AtSliceIndex(0).AtMapKey("required_code_scanning_tool").AtSliceIndex(0).AtMapKey("tool"), knownvalue.StringExact("CodeQL")), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("rules").AtSliceIndex(0).AtMapKey("copilot_code_review").AtSliceIndex(0).AtMapKey("review_on_push"), knownvalue.Bool(true)), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("rules").AtSliceIndex(0).AtMapKey("copilot_code_review").AtSliceIndex(0).AtMapKey("review_draft_pull_requests"), knownvalue.Bool(false)), + }, + }, + }, + }) + }) + + t.Run("required_workflows", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + rulesetName := fmt.Sprintf("%s-enterprise-wf-ruleset-%s", testResourcePrefix, randomID) + workflowFilePath := ".github/workflows/echo.yaml" + + config := fmt.Sprintf(` +resource "github_enterprise_ruleset" "test" { + enterprise_slug = "%s" + name = "%s" + target = "branch" + enforcement = "active" + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + repository_name { + include = ["~ALL"] + exclude = [] + } + + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + required_workflows { + do_not_enforce_on_create = true + required_workflow { + path = "%s" + repository_id = 1234567 + ref = "main" + } + } + } +} +`, testAccConf.enterpriseSlug, rulesetName, workflowFilePath) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessEnterprise(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ConfigStateChecks: []statecheck.StateCheck{ + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("ruleset_id"), knownvalue.NotNull()), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("rules").AtSliceIndex(0).AtMapKey("required_workflows").AtSliceIndex(0).AtMapKey("do_not_enforce_on_create"), knownvalue.Bool(true)), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("rules").AtSliceIndex(0).AtMapKey("required_workflows").AtSliceIndex(0).AtMapKey("required_workflow").AtSliceIndex(0).AtMapKey("path"), knownvalue.StringExact(workflowFilePath)), + }, + }, + }, + }) + }) + + t.Run("tag", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + rulesetName := fmt.Sprintf("%s-enterprise-tag-%s", testResourcePrefix, randomID) + + config := fmt.Sprintf(` +resource "github_enterprise_ruleset" "test" { + enterprise_slug = "%s" + name = "%s" + target = "tag" + enforcement = "active" + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + repository_name { + include = ["~ALL"] + exclude = [] + } + + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + creation = false + deletion = false + required_linear_history = false + } +} +`, testAccConf.enterpriseSlug, rulesetName) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessEnterprise(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ConfigStateChecks: []statecheck.StateCheck{ + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("ruleset_id"), knownvalue.NotNull()), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("node_id"), knownvalue.NotNull()), + }, + }, + }, + }) + }) + + t.Run("push", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + rulesetName := fmt.Sprintf("%s-enterprise-push-%s", testResourcePrefix, randomID) + + config := fmt.Sprintf(` +resource "github_enterprise_ruleset" "test" { + enterprise_slug = "%s" + name = "%s" + target = "push" + enforcement = "active" + + bypass_actors { + actor_type = "DeployKey" + bypass_mode = "always" + } + + bypass_actors { + actor_type = "OrganizationAdmin" + bypass_mode = "always" + } + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + repository_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + file_path_restriction { + restricted_file_paths = ["test.txt"] + } + + max_file_size { + max_file_size = 99 + } + + file_extension_restriction { + restricted_file_extensions = ["*.zip"] + } + } +} +`, testAccConf.enterpriseSlug, rulesetName) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessEnterprise(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ConfigStateChecks: []statecheck.StateCheck{ + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("ruleset_id"), knownvalue.NotNull()), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("bypass_actors"), knownvalue.ListSizeExact(2)), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("bypass_actors").AtSliceIndex(0).AtMapKey("actor_type"), knownvalue.StringExact("DeployKey")), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("bypass_actors").AtSliceIndex(1).AtMapKey("actor_type"), knownvalue.StringExact("OrganizationAdmin")), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("rules").AtSliceIndex(0).AtMapKey("file_path_restriction").AtSliceIndex(0).AtMapKey("restricted_file_paths").AtSliceIndex(0), knownvalue.StringExact("test.txt")), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("rules").AtSliceIndex(0).AtMapKey("max_file_size").AtSliceIndex(0).AtMapKey("max_file_size"), knownvalue.Int64Exact(99)), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("rules").AtSliceIndex(0).AtMapKey("file_extension_restriction").AtSliceIndex(0).AtMapKey("restricted_file_extensions").AtSliceIndex(0), knownvalue.StringExact("*.zip")), + }, + }, + }, + }) + }) + + t.Run("update_name", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + name := fmt.Sprintf("test-enterprise-ruleset-%s", randomID) + nameUpdated := fmt.Sprintf("test-enterprise-ruleset-updated-%s", randomID) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessEnterprise(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(enterpriseRulesetBasicHCL, testAccConf.enterpriseSlug, name), + ConfigStateChecks: []statecheck.StateCheck{ + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("name"), knownvalue.StringExact(name)), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("ruleset_id"), knownvalue.NotNull()), + }, + }, + { + Config: fmt.Sprintf(enterpriseRulesetBasicHCL, testAccConf.enterpriseSlug, nameUpdated), + ConfigStateChecks: []statecheck.StateCheck{ + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("name"), knownvalue.StringExact(nameUpdated)), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("ruleset_id"), knownvalue.NotNull()), + }, + }, + }, + }) + }) + + t.Run("update_bypass_actors", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + rulesetName := fmt.Sprintf("%s-enterprise-bypass-%s", testResourcePrefix, randomID) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessEnterprise(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(enterpriseRulesetBypassHCL, testAccConf.enterpriseSlug, rulesetName), + ConfigStateChecks: []statecheck.StateCheck{ + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("bypass_actors"), knownvalue.ListSizeExact(2)), + }, + }, + { + Config: fmt.Sprintf(enterpriseRulesetBasicHCL, testAccConf.enterpriseSlug, rulesetName), + ConfigStateChecks: []statecheck.StateCheck{ + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("bypass_actors"), knownvalue.ListSizeExact(0)), + }, + }, + }, + }) + }) + + t.Run("update_bypass_mode", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + rulesetName := fmt.Sprintf("test-enterprise-bypass-mode-%s", randomID) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessEnterprise(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(enterpriseRulesetBypassModeHCL, testAccConf.enterpriseSlug, rulesetName, "always"), + ConfigStateChecks: []statecheck.StateCheck{ + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("bypass_actors").AtSliceIndex(0).AtMapKey("bypass_mode"), knownvalue.StringExact("always")), + }, + }, + { + Config: fmt.Sprintf(enterpriseRulesetBypassModeHCL, testAccConf.enterpriseSlug, rulesetName, "exempt"), + ConfigStateChecks: []statecheck.StateCheck{ + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("bypass_actors").AtSliceIndex(0).AtMapKey("bypass_mode"), knownvalue.StringExact("exempt")), + }, + }, + }, + }) + }) + + t.Run("repository_targeting", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + rulesetName := fmt.Sprintf("%s-enterprise-repo-targeting-%s", testResourcePrefix, randomID) + + config := fmt.Sprintf(` +resource "github_enterprise_ruleset" "test" { + enterprise_slug = "%s" + name = "%s" + target = "branch" + enforcement = "active" + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + repository_name { + include = ["prod-*", "production-*"] + exclude = ["prod-test*"] + protected = true + } + + ref_name { + include = ["refs/heads/main"] + exclude = [] + } + } + + rules { + creation = false + deletion = false + } +} +`, testAccConf.enterpriseSlug, rulesetName) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessEnterprise(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ConfigStateChecks: []statecheck.StateCheck{ + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("ruleset_id"), knownvalue.NotNull()), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("conditions").AtSliceIndex(0).AtMapKey("repository_name").AtSliceIndex(0).AtMapKey("include"), knownvalue.ListSizeExact(2)), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("conditions").AtSliceIndex(0).AtMapKey("repository_name").AtSliceIndex(0).AtMapKey("include").AtSliceIndex(0), knownvalue.StringExact("prod-*")), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("conditions").AtSliceIndex(0).AtMapKey("repository_name").AtSliceIndex(0).AtMapKey("include").AtSliceIndex(1), knownvalue.StringExact("production-*")), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("conditions").AtSliceIndex(0).AtMapKey("repository_name").AtSliceIndex(0).AtMapKey("exclude").AtSliceIndex(0), knownvalue.StringExact("prod-test*")), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("conditions").AtSliceIndex(0).AtMapKey("repository_name").AtSliceIndex(0).AtMapKey("protected"), knownvalue.Bool(true)), + }, + }, + }, + }) + }) + + t.Run("organizationID", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + rulesetName := fmt.Sprintf("%s-enterprise-org-id-%s", testResourcePrefix, randomID) + + config := fmt.Sprintf(` +resource "github_enterprise_ruleset" "test" { + enterprise_slug = "%s" + name = "%s" + target = "branch" + enforcement = "active" + + conditions { + organization_id = [2284107] + + repository_name { + include = ["~ALL"] + exclude = [] + } + + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + creation = true + } +} +`, testAccConf.enterpriseSlug, rulesetName) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessEnterprise(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ConfigStateChecks: []statecheck.StateCheck{ + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("ruleset_id"), knownvalue.NotNull()), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("conditions").AtSliceIndex(0).AtMapKey("organization_id"), knownvalue.ListSizeExact(1)), + statecheck.ExpectKnownValue("github_enterprise_ruleset.test", tfjsonpath.New("conditions").AtSliceIndex(0).AtMapKey("organization_id").AtSliceIndex(0), knownvalue.Int64Exact(2284107)), + }, + }, + }, + }) + }) + + t.Run("import", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + rulesetName := fmt.Sprintf("%s-enterprise-import-%s", testResourcePrefix, randomID) + + config := fmt.Sprintf(enterpriseRulesetBasicHCL, testAccConf.enterpriseSlug, rulesetName) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessEnterprise(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + }, + { + ResourceName: "github_enterprise_ruleset.test", + ImportState: true, + ImportStateVerify: true, + ImportStateIdFunc: importEnterpriseRulesetByResourcePath("github_enterprise_ruleset.test"), + ImportStateVerifyIgnore: []string{"etag"}, + }, + }, + }) + }) + + t.Run("conflictingRepositoryConditions", func(t *testing.T) { + config := fmt.Sprintf(` +resource "github_enterprise_ruleset" "test" { + enterprise_slug = "%s" + name = "%s-conflict-test" + target = "branch" + enforcement = "active" + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + repository_name { + include = ["~ALL"] + exclude = [] + } + + repository_property { + include { + name = "language" + property_values = ["Go"] + } + } + + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + creation = true + } +} +`, testAccConf.enterpriseSlug, testResourcePrefix) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessEnterprise(t) }, + ProviderFactories: providerFactories, + Steps: []resource.TestStep{ + { + Config: config, + ExpectError: regexp.MustCompile(`"conditions.0.repository_name": only one of`), + }, + }, + }) + }) +} + +func importEnterpriseRulesetByResourcePath(rulesetLogicalName string) resource.ImportStateIdFunc { + return func(s *terraform.State) (string, error) { + ruleset := s.RootModule().Resources[rulesetLogicalName] + if ruleset == nil { + return "", fmt.Errorf("Cannot find %s in terraform state", rulesetLogicalName) + } + + rulesetID := ruleset.Primary.ID + if rulesetID == "" { + return "", fmt.Errorf("ruleset %s does not have an id in terraform state", rulesetLogicalName) + } + + enterpriseSlug := ruleset.Primary.Attributes["enterprise_slug"] + if enterpriseSlug == "" { + return "", fmt.Errorf("ruleset %s does not have enterprise_slug in terraform state", rulesetLogicalName) + } + + return fmt.Sprintf("%s:%s", enterpriseSlug, rulesetID), nil + } +} diff --git a/github/resource_github_organization_ruleset.go b/github/resource_github_organization_ruleset.go index 07fa00dfc9..b8aaffc656 100644 --- a/github/resource_github_organization_ruleset.go +++ b/github/resource_github_organization_ruleset.go @@ -714,7 +714,7 @@ func resourceGithubOrganizationRulesetCreate(ctx context.Context, d *schema.Reso if err := d.Set("etag", resp.Header.Get("ETag")); err != nil { return diag.FromErr(err) } - if err := d.Set("rules", flattenRules(ctx, ruleset.Rules, true)); err != nil { + if err := d.Set("rules", flattenRules(ctx, ruleset.Rules, RulesetLevelOrganization)); err != nil { return diag.FromErr(err) } @@ -793,10 +793,10 @@ func resourceGithubOrganizationRulesetRead(ctx context.Context, d *schema.Resour if err := d.Set("bypass_actors", flattenBypassActors(ruleset.BypassActors)); err != nil { return diag.FromErr(err) } - if err := d.Set("conditions", flattenConditions(ctx, ruleset.GetConditions(), true)); err != nil { + if err := d.Set("conditions", flattenConditions(ctx, ruleset.GetConditions(), RulesetLevelOrganization)); err != nil { return diag.FromErr(err) } - if err := d.Set("rules", flattenRules(ctx, ruleset.Rules, true)); err != nil { + if err := d.Set("rules", flattenRules(ctx, ruleset.Rules, RulesetLevelOrganization)); err != nil { return diag.FromErr(err) } if err := d.Set("node_id", ruleset.GetNodeID()); err != nil { @@ -952,7 +952,7 @@ func resourceGithubOrganizationRulesetImport(ctx context.Context, d *schema.Reso } func resourceGithubOrganizationRulesetDiff(ctx context.Context, d *schema.ResourceDiff, _ any) error { - err := validateRulesetConditions(ctx, d, true) + err := validateRulesetConditions(ctx, d) if err != nil { return err } diff --git a/github/resource_github_organization_ruleset_test.go b/github/resource_github_organization_ruleset_test.go index 6f8dd502c5..7d5b77fac7 100644 --- a/github/resource_github_organization_ruleset_test.go +++ b/github/resource_github_organization_ruleset_test.go @@ -1000,7 +1000,7 @@ func TestOrganizationPushRulesetSupport(t *testing.T) { input := []any{rulesMap} // Test expand functionality (organization rulesets use org=true) - expandedRules := expandRules(input, true) + expandedRules := expandRules(input, RulesetLevelOrganization) if expandedRules == nil { t.Fatalf("expected expanded rules to not be nil") @@ -1047,7 +1047,7 @@ func TestOrganizationPushRulesetSupport(t *testing.T) { } // Test flatten functionality (organization rulesets use org=true) - flattenedResult := flattenRules(t.Context(), expandedRules, true) + flattenedResult := flattenRules(t.Context(), expandedRules, RulesetLevelOrganization) if len(flattenedResult) != 1 { t.Fatalf("Expected 1 flattened result, got %d", len(flattenedResult)) diff --git a/github/resource_github_repository_ruleset.go b/github/resource_github_repository_ruleset.go index 4e9b3ec119..f50759a039 100644 --- a/github/resource_github_repository_ruleset.go +++ b/github/resource_github_repository_ruleset.go @@ -717,7 +717,7 @@ func resourceGithubRepositoryRulesetCreate(ctx context.Context, d *schema.Resour if err := d.Set("etag", resp.Header.Get("ETag")); err != nil { return diag.FromErr(err) } - if err := d.Set("rules", flattenRules(ctx, ruleset.Rules, false)); err != nil { + if err := d.Set("rules", flattenRules(ctx, ruleset.Rules, RulesetLevelRepository)); err != nil { return diag.FromErr(err) } @@ -776,10 +776,10 @@ func resourceGithubRepositoryRulesetRead(ctx context.Context, d *schema.Resource if err := d.Set("bypass_actors", flattenBypassActors(ruleset.BypassActors)); err != nil { return diag.FromErr(err) } - if err := d.Set("conditions", flattenConditions(ctx, ruleset.GetConditions(), false)); err != nil { + if err := d.Set("conditions", flattenConditions(ctx, ruleset.GetConditions(), RulesetLevelRepository)); err != nil { return diag.FromErr(err) } - if err := d.Set("rules", flattenRules(ctx, ruleset.GetRules(), false)); err != nil { + if err := d.Set("rules", flattenRules(ctx, ruleset.GetRules(), RulesetLevelRepository)); err != nil { return diag.FromErr(err) } if err := d.Set("node_id", ruleset.GetNodeID()); err != nil { @@ -884,7 +884,7 @@ func resourceGithubRepositoryRulesetImport(ctx context.Context, d *schema.Resour } func resourceGithubRepositoryRulesetDiff(ctx context.Context, d *schema.ResourceDiff, meta any) error { - err := validateRulesetConditions(ctx, d, false) + err := validateRulesetConditions(ctx, d) if err != nil { return err } diff --git a/github/util_rules.go b/github/util_rules.go index 339394d5ee..9f4f2b9f3a 100644 --- a/github/util_rules.go +++ b/github/util_rules.go @@ -10,6 +10,17 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) +// RulesetLevel represents the scope at which a ruleset is defined. +// This is used to determine which conditions and rules are applicable, +// as some are only valid at certain levels. +type RulesetLevel int + +const ( + RulesetLevelRepository RulesetLevel = iota + RulesetLevelOrganization + RulesetLevelEnterprise +) + func toPullRequestMergeMethods(input any) []github.PullRequestMergeMethod { value, ok := input.([]any) if !ok || len(value) == 0 { @@ -92,13 +103,14 @@ func flattenRequiredReviewers(reviewers []*github.RulesetRequiredReviewer) []map } func resourceGithubRulesetObject(d *schema.ResourceData, org string) github.RepositoryRuleset { - isOrgLevel := len(org) > 0 - + var level RulesetLevel var source, sourceType string - if isOrgLevel { + if len(org) > 0 { + level = RulesetLevelOrganization source = org sourceType = "Organization" } else { + level = RulesetLevelRepository source = d.Get("repository").(string) sourceType = "Repository" } @@ -114,8 +126,8 @@ func resourceGithubRulesetObject(d *schema.ResourceData, org string) github.Repo SourceType: &sourceTypeEnum, Enforcement: enforcement, BypassActors: expandBypassActors(d.Get("bypass_actors").([]any)), - Conditions: expandConditions(d.Get("conditions").([]any), isOrgLevel), - Rules: expandRules(d.Get("rules").([]any), isOrgLevel), + Conditions: expandConditions(d.Get("conditions").([]any), level), + Rules: expandRules(d.Get("rules").([]any), level), } } @@ -182,14 +194,13 @@ func flattenBypassActors(bypassActors []*github.BypassActor) []any { return actorsSlice } -func expandConditions(input []any, org bool) *github.RepositoryRulesetConditions { +func expandConditions(input []any, level RulesetLevel) *github.RepositoryRulesetConditions { if len(input) == 0 || input[0] == nil { return nil } rulesetConditions := &github.RepositoryRulesetConditions{} inputConditions := input[0].(map[string]any) - // ref_name is available for both repo and org rulesets if v, ok := inputConditions["ref_name"].([]any); ok && v != nil && len(v) != 0 { inputRefName := v[0].(map[string]any) include := make([]string, 0) @@ -213,9 +224,48 @@ func expandConditions(input []any, org bool) *github.RepositoryRulesetConditions } } - // org-only fields - if org { - // repository_name and repository_id + // Enterprise-only: organization targeting + if level == RulesetLevelEnterprise { + if v, ok := inputConditions["organization_name"].([]any); ok && v != nil && len(v) != 0 { + inputOrgName := v[0].(map[string]any) + include := make([]string, 0) + exclude := make([]string, 0) + + for _, v := range inputOrgName["include"].([]any) { + if v != nil { + include = append(include, v.(string)) + } + } + + for _, v := range inputOrgName["exclude"].([]any) { + if v != nil { + exclude = append(exclude, v.(string)) + } + } + + rulesetConditions.OrganizationName = &github.RepositoryRulesetOrganizationNamesConditionParameters{ + Include: include, + Exclude: exclude, + } + } + + if v, ok := inputConditions["organization_id"].([]any); ok && v != nil && len(v) != 0 { + organizationIDs := make([]int64, 0) + + for _, v := range v { + if v != nil { + organizationIDs = append(organizationIDs, toInt64(v)) + } + } + + rulesetConditions.OrganizationID = &github.RepositoryRulesetOrganizationIDsConditionParameters{ + OrganizationIDs: organizationIDs, + } + } + } + + // Org and enterprise: repository targeting + if level == RulesetLevelOrganization || level == RulesetLevelEnterprise { if v, ok := inputConditions["repository_name"].([]any); ok && v != nil && len(v) != 0 { inputRepositoryName := v[0].(map[string]any) include := make([]string, 0) @@ -250,13 +300,21 @@ func expandConditions(input []any, org bool) *github.RepositoryRulesetConditions } rulesetConditions.RepositoryID = &github.RepositoryRulesetRepositoryIDsConditionParameters{RepositoryIDs: repositoryIDs} + } else if v, ok := inputConditions["repository_property"].([]any); ok && v != nil && len(v) != 0 { + inputRepoProp := v[0].(map[string]any) + include := expandRepositoryPropertyTargets(inputRepoProp["include"].([]any)) + exclude := expandRepositoryPropertyTargets(inputRepoProp["exclude"].([]any)) + rulesetConditions.RepositoryProperty = &github.RepositoryRulesetRepositoryPropertyConditionParameters{ + Include: include, + Exclude: exclude, + } } } return rulesetConditions } -func flattenConditions(ctx context.Context, conditions *github.RepositoryRulesetConditions, org bool) []any { +func flattenConditions(ctx context.Context, conditions *github.RepositoryRulesetConditions, level RulesetLevel) []any { if conditions == nil || reflect.DeepEqual(conditions, &github.RepositoryRulesetConditions{}) { tflog.Debug(ctx, "Conditions are empty, returning empty list") return []any{} @@ -274,8 +332,24 @@ func flattenConditions(ctx context.Context, conditions *github.RepositoryRuleset conditionsMap["ref_name"] = refNameSlice } - // org-only fields - if org { + // Enterprise-only: organization targeting + if level == RulesetLevelEnterprise { + if conditions.OrganizationName != nil { + organizationNameSlice := make([]map[string]any, 0) + organizationNameSlice = append(organizationNameSlice, map[string]any{ + "include": conditions.OrganizationName.Include, + "exclude": conditions.OrganizationName.Exclude, + }) + conditionsMap["organization_name"] = organizationNameSlice + } + + if conditions.OrganizationID != nil { + conditionsMap["organization_id"] = conditions.OrganizationID.OrganizationIDs + } + } + + // Org and enterprise: repository targeting + if level == RulesetLevelOrganization || level == RulesetLevelEnterprise { repositoryNameSlice := make([]map[string]any, 0) if conditions.RepositoryName != nil { @@ -296,12 +370,54 @@ func flattenConditions(ctx context.Context, conditions *github.RepositoryRuleset if conditions.RepositoryID != nil { conditionsMap["repository_id"] = conditions.RepositoryID.RepositoryIDs } + + if conditions.RepositoryProperty != nil { + conditionsMap["repository_property"] = []map[string]any{{ + "include": flattenRepositoryPropertyTargets(conditions.RepositoryProperty.Include), + "exclude": flattenRepositoryPropertyTargets(conditions.RepositoryProperty.Exclude), + }} + } } return []any{conditionsMap} } -func expandRules(input []any, org bool) *github.RepositoryRulesetRules { +func expandRepositoryPropertyTargets(input []any) []*github.RepositoryRulesetRepositoryPropertyTargetParameters { + targets := make([]*github.RepositoryRulesetRepositoryPropertyTargetParameters, 0, len(input)) + for _, item := range input { + targetMap := item.(map[string]any) + propertyValues := make([]string, 0) + for _, pv := range targetMap["property_values"].([]any) { + propertyValues = append(propertyValues, pv.(string)) + } + target := &github.RepositoryRulesetRepositoryPropertyTargetParameters{ + Name: targetMap["name"].(string), + PropertyValues: propertyValues, + } + if source, ok := targetMap["source"].(string); ok && source != "" { + target.Source = github.Ptr(source) + } + targets = append(targets, target) + } + return targets +} + +func flattenRepositoryPropertyTargets(targets []*github.RepositoryRulesetRepositoryPropertyTargetParameters) []map[string]any { + result := make([]map[string]any, 0, len(targets)) + for _, t := range targets { + m := map[string]any{ + "name": t.Name, + "property_values": t.PropertyValues, + } + if t.Source != nil { + m["source"] = *t.Source + } + result = append(result, m) + } + return result +} + +func expandRules(input []any, level RulesetLevel) *github.RepositoryRulesetRules { if len(input) == 0 || input[0] == nil { return &github.RepositoryRulesetRules{} } @@ -458,8 +574,8 @@ func expandRules(input []any, org bool) *github.RepositoryRulesetRules { } } - // Required workflows rule (org-only) - if org { + // Required workflows rule (org and enterprise only) + if level == RulesetLevelOrganization || level == RulesetLevelEnterprise { if v, ok := rulesMap["required_workflows"].([]any); ok && len(v) != 0 { requiredWorkflowsMap := v[0].(map[string]any) requiredWorkflows := make([]*github.RuleWorkflow, 0) @@ -573,10 +689,41 @@ func expandRules(input []any, org bool) *github.RepositoryRulesetRules { rulesetRules.CopilotCodeReview = params } + // Repository target rules + if v, ok := rulesMap["repository_creation"].(bool); ok && v { + rulesetRules.RepositoryCreate = &github.EmptyRuleParameters{} + } + + if v, ok := rulesMap["repository_deletion"].(bool); ok && v { + rulesetRules.RepositoryDelete = &github.EmptyRuleParameters{} + } + + if v, ok := rulesMap["repository_transfer"].(bool); ok && v { + rulesetRules.RepositoryTransfer = &github.EmptyRuleParameters{} + } + + if v, ok := rulesMap["repository_name"].([]any); ok && len(v) != 0 { + repoNameMap := v[0].(map[string]any) + params := &github.SimplePatternRuleParameters{ + Negate: repoNameMap["negate"].(bool), + Pattern: repoNameMap["pattern"].(string), + } + rulesetRules.RepositoryName = params + } + + if v, ok := rulesMap["repository_visibility"].([]any); ok && len(v) != 0 { + visibilityMap := v[0].(map[string]any) + params := &github.RepositoryVisibilityRuleParameters{ + Internal: visibilityMap["internal"].(bool), + Private: visibilityMap["private"].(bool), + } + rulesetRules.RepositoryVisibility = params + } + return rulesetRules } -func flattenRules(ctx context.Context, rules *github.RepositoryRulesetRules, org bool) []any { +func flattenRules(ctx context.Context, rules *github.RepositoryRulesetRules, level RulesetLevel) []any { if rules == nil { return []any{} } @@ -593,12 +740,12 @@ func flattenRules(ctx context.Context, rules *github.RepositoryRulesetRules, org // Update rule with parameters if rules.Update != nil { rulesMap["update"] = true - if !org { + if level == RulesetLevelRepository { rulesMap["update_allows_fetch_and_merge"] = rules.Update.UpdateAllowsFetchAndMerge } } else { rulesMap["update"] = false - if !org { + if level == RulesetLevelRepository { rulesMap["update_allows_fetch_and_merge"] = false } } // Required deployments rule @@ -700,8 +847,8 @@ func flattenRules(ctx context.Context, rules *github.RepositoryRulesetRules, org } } - // Required workflows rule (org-only) - if org && rules.Workflows != nil { + // Required workflows rule (org and enterprise only) + if (level == RulesetLevelOrganization || level == RulesetLevelEnterprise) && rules.Workflows != nil { requiredWorkflowsSlice := make([]map[string]any, 0) requiredWorkflows := make([]map[string]any, 0) @@ -800,6 +947,29 @@ func flattenRules(ctx context.Context, rules *github.RepositoryRulesetRules, org rulesMap["copilot_code_review"] = copilotCodeReviewSlice } + // Repository target rules + rulesMap["repository_creation"] = rules.RepositoryCreate != nil + rulesMap["repository_deletion"] = rules.RepositoryDelete != nil + rulesMap["repository_transfer"] = rules.RepositoryTransfer != nil + + if rules.RepositoryName != nil { + repoNameSlice := make([]map[string]any, 0) + repoNameSlice = append(repoNameSlice, map[string]any{ + "negate": rules.RepositoryName.Negate, + "pattern": rules.RepositoryName.Pattern, + }) + rulesMap["repository_name"] = repoNameSlice + } + + if rules.RepositoryVisibility != nil { + visibilitySlice := make([]map[string]any, 0) + visibilitySlice = append(visibilitySlice, map[string]any{ + "internal": rules.RepositoryVisibility.Internal, + "private": rules.RepositoryVisibility.Private, + }) + rulesMap["repository_visibility"] = visibilitySlice + } + return []any{rulesMap} } diff --git a/github/util_rules_test.go b/github/util_rules_test.go index 3283b4fa6e..72ee871640 100644 --- a/github/util_rules_test.go +++ b/github/util_rules_test.go @@ -1,6 +1,7 @@ package github import ( + "context" "testing" "github.com/google/go-github/v82/github" @@ -18,7 +19,7 @@ func TestExpandRulesBasicRules(t *testing.T) { } input := []any{rulesMap} - result := expandRules(input, false) + result := expandRules(input, RulesetLevelRepository) if result == nil { t.Fatal("Expected result to not be nil") @@ -57,7 +58,7 @@ func TestFlattenRulesBasicRules(t *testing.T) { NonFastForward: &github.EmptyRuleParameters{}, } - result := flattenRules(t.Context(), rules, false) + result := flattenRules(t.Context(), rules, RulesetLevelRepository) if len(result) != 1 { t.Fatalf("Expected 1 element in result, got %d", len(result)) @@ -100,7 +101,7 @@ func TestExpandRulesMaxFilePathLength(t *testing.T) { } input := []any{rulesMap} - result := expandRules(input, false) + result := expandRules(input, RulesetLevelRepository) if result == nil { t.Fatal("Expected result to not be nil") @@ -126,7 +127,7 @@ func TestFlattenRulesMaxFilePathLength(t *testing.T) { }, } - result := flattenRules(t.Context(), rules, false) + result := flattenRules(t.Context(), rules, RulesetLevelRepository) if len(result) != 1 { t.Fatalf("Expected 1 element in result, got %d", len(result)) @@ -160,14 +161,14 @@ func TestRoundTripMaxFilePathLength(t *testing.T) { input := []any{rulesMap} // Expand to GitHub API format - expandedRules := expandRules(input, false) + expandedRules := expandRules(input, RulesetLevelRepository) if expandedRules == nil { t.Fatal("Expected expandedRules to not be nil") } // Flatten back to terraform format - flattenedResult := flattenRules(t.Context(), expandedRules, false) + flattenedResult := flattenRules(t.Context(), expandedRules, RulesetLevelRepository) if len(flattenedResult) != 1 { t.Fatalf("Expected 1 flattened result, got %d", len(flattenedResult)) @@ -198,7 +199,7 @@ func TestExpandRulesMaxFileSize(t *testing.T) { } input := []any{rulesMap} - result := expandRules(input, false) + result := expandRules(input, RulesetLevelRepository) if result == nil { t.Fatal("Expected result to not be nil") @@ -224,7 +225,7 @@ func TestFlattenRulesMaxFileSize(t *testing.T) { }, } - result := flattenRules(t.Context(), rules, false) + result := flattenRules(t.Context(), rules, RulesetLevelRepository) if len(result) != 1 { t.Fatalf("Expected 1 element in result, got %d", len(result)) @@ -255,7 +256,7 @@ func TestExpandRulesFileExtensionRestriction(t *testing.T) { } input := []any{rulesMap} - result := expandRules(input, false) + result := expandRules(input, RulesetLevelRepository) if result == nil { t.Fatal("Expected result to not be nil") @@ -292,7 +293,7 @@ func TestFlattenRulesFileExtensionRestriction(t *testing.T) { }, } - result := flattenRules(t.Context(), rules, false) + result := flattenRules(t.Context(), rules, RulesetLevelRepository) if len(result) != 1 { t.Fatalf("Expected 1 element in result, got %d", len(result)) @@ -345,7 +346,7 @@ func TestCompletePushRulesetSupport(t *testing.T) { input := []any{rulesMap} // Expand to GitHub API format - expandedRules := expandRules(input, false) + expandedRules := expandRules(input, RulesetLevelRepository) if expandedRules == nil { t.Fatal("Expected expandedRules to not be nil") @@ -372,7 +373,7 @@ func TestCompletePushRulesetSupport(t *testing.T) { } // Flatten back to terraform format - flattenedResult := flattenRules(t.Context(), expandedRules, false) + flattenedResult := flattenRules(t.Context(), expandedRules, RulesetLevelRepository) if len(flattenedResult) != 1 { t.Fatalf("Expected 1 flattened result, got %d", len(flattenedResult)) @@ -433,7 +434,7 @@ func TestCopilotCodeReviewRoundTrip(t *testing.T) { input := []any{rulesMap} // Expand to GitHub API format - expandedRules := expandRules(input, false) + expandedRules := expandRules(input, RulesetLevelRepository) if expandedRules == nil { t.Fatal("Expected expandedRules to not be nil") @@ -452,7 +453,7 @@ func TestCopilotCodeReviewRoundTrip(t *testing.T) { } // Flatten back to terraform format - flattenedResult := flattenRules(t.Context(), expandedRules, false) + flattenedResult := flattenRules(t.Context(), expandedRules, RulesetLevelRepository) if len(flattenedResult) != 1 { t.Fatalf("Expected 1 flattened result, got %d", len(flattenedResult)) @@ -485,7 +486,7 @@ func TestFlattenConditions_PushRuleset_WithRepositoryNameOnly(t *testing.T) { }, } - result := flattenConditions(t.Context(), conditions, true) // org=true for organization rulesets + result := flattenConditions(t.Context(), conditions, RulesetLevelOrganization) if len(result) != 1 { t.Fatalf("Expected 1 conditions block, got %d", len(result)) @@ -531,7 +532,7 @@ func TestFlattenConditions_BranchRuleset_WithRefNameAndRepositoryName(t *testing }, } - result := flattenConditions(t.Context(), conditions, true) // org=true for organization rulesets + result := flattenConditions(t.Context(), conditions, RulesetLevelOrganization) if len(result) != 1 { t.Fatalf("Expected 1 conditions block, got %d", len(result)) @@ -599,7 +600,7 @@ func TestFlattenConditions_PushRuleset_WithRepositoryIdOnly(t *testing.T) { }, } - result := flattenConditions(t.Context(), conditions, true) // org=true for organization rulesets + result := flattenConditions(t.Context(), conditions, RulesetLevelOrganization) if len(result) != 1 { t.Fatalf("Expected 1 conditions block, got %d", len(result)) @@ -814,3 +815,296 @@ func TestRoundTripRequiredReviewers(t *testing.T) { t.Errorf("Expected reviewer type to be Team after round trip, got %v", reviewerBlock[0]["type"]) } } + +// Tests for new condition types: organization_id + +func TestExpandConditionsOrganizationID(t *testing.T) { + // Test expanding organization_id condition + conditionsMap := map[string]any{ + "ref_name": []any{ + map[string]any{ + "include": []any{"main", "develop"}, + "exclude": []any{"feature/*"}, + }, + }, + "organization_id": []any{123, 456, 789}, + } + + input := []any{conditionsMap} + result := expandConditions(input, RulesetLevelEnterprise) + + if result == nil { + t.Fatal("Expected result to not be nil") + } + + if result.OrganizationID == nil { + t.Fatal("Expected OrganizationID to be set") + } + + expectedIDs := []int64{123, 456, 789} + if len(result.OrganizationID.OrganizationIDs) != len(expectedIDs) { + t.Fatalf("Expected %d organization IDs, got %d", len(expectedIDs), len(result.OrganizationID.OrganizationIDs)) + } + + for i, expectedID := range expectedIDs { + if result.OrganizationID.OrganizationIDs[i] != expectedID { + t.Errorf("Expected organization ID %d at index %d, got %d", expectedID, i, result.OrganizationID.OrganizationIDs[i]) + } + } +} + +func TestFlattenConditionsOrganizationID(t *testing.T) { + // Test flattening organization_id condition + conditions := &github.RepositoryRulesetConditions{ + RefName: &github.RepositoryRulesetRefConditionParameters{ + Include: []string{"main"}, + Exclude: []string{}, + }, + OrganizationID: &github.RepositoryRulesetOrganizationIDsConditionParameters{ + OrganizationIDs: []int64{123, 456}, + }, + } + + result := flattenConditions(context.Background(), conditions, RulesetLevelEnterprise) + + if len(result) != 1 { + t.Fatalf("Expected 1 element in result, got %d", len(result)) + } + + conditionsMap := result[0].(map[string]any) + orgIDs := conditionsMap["organization_id"].([]int64) + + if len(orgIDs) != 2 { + t.Fatalf("Expected 2 organization IDs, got %d", len(orgIDs)) + } + + if orgIDs[0] != 123 || orgIDs[1] != 456 { + t.Errorf("Expected organization IDs [123, 456], got %v", orgIDs) + } +} + +func TestRoundTripConditionsWithAllProperties(t *testing.T) { + // Test that organization_id condition survives expand -> flatten round trip + conditionsMap := map[string]any{ + "ref_name": []any{ + map[string]any{ + "include": []any{"main", "develop"}, + "exclude": []any{"feature/*"}, + }, + }, + "organization_id": []any{123, 456}, + } + + input := []any{conditionsMap} + + // Expand to GitHub API format + expandedConditions := expandConditions(input, RulesetLevelEnterprise) + + if expandedConditions == nil { + t.Fatal("Expected expandedConditions to not be nil") + } + + // Flatten back to terraform format + flattenedResult := flattenConditions(context.Background(), expandedConditions, RulesetLevelEnterprise) + + if len(flattenedResult) != 1 { + t.Fatalf("Expected 1 flattened result, got %d", len(flattenedResult)) + } + + flattenedConditionsMap := flattenedResult[0].(map[string]any) + + // Verify organization_id survived + orgIDs := flattenedConditionsMap["organization_id"].([]int64) + if len(orgIDs) != 2 || orgIDs[0] != 123 || orgIDs[1] != 456 { + t.Errorf("Expected organization_id [123, 456] after round trip, got %v", orgIDs) + } +} + +func TestExpandConditionsRepositoryProperty(t *testing.T) { + conditionsMap := map[string]any{ + "ref_name": []any{ + map[string]any{ + "include": []any{"main"}, + "exclude": []any{}, + }, + }, + "organization_id": []any{123}, + "repository_property": []any{ + map[string]any{ + "include": []any{ + map[string]any{ + "name": "environment", + "property_values": []any{"production", "staging"}, + "source": "custom", + }, + }, + "exclude": []any{ + map[string]any{ + "name": "team", + "property_values": []any{"experimental"}, + "source": "", + }, + }, + }, + }, + } + + input := []any{conditionsMap} + result := expandConditions(input, RulesetLevelEnterprise) + + if result == nil { + t.Fatal("Expected result to not be nil") + } + + if result.RepositoryProperty == nil { + t.Fatal("Expected RepositoryProperty to be set") + } + + if len(result.RepositoryProperty.Include) != 1 { + t.Fatalf("Expected 1 include target, got %d", len(result.RepositoryProperty.Include)) + } + + inc := result.RepositoryProperty.Include[0] + if inc.Name != "environment" { + t.Errorf("Expected include name to be 'environment', got %q", inc.Name) + } + if len(inc.PropertyValues) != 2 || inc.PropertyValues[0] != "production" || inc.PropertyValues[1] != "staging" { + t.Errorf("Expected include property_values [production, staging], got %v", inc.PropertyValues) + } + if inc.Source == nil || *inc.Source != "custom" { + t.Errorf("Expected include source to be 'custom', got %v", inc.Source) + } + + if len(result.RepositoryProperty.Exclude) != 1 { + t.Fatalf("Expected 1 exclude target, got %d", len(result.RepositoryProperty.Exclude)) + } + + exc := result.RepositoryProperty.Exclude[0] + if exc.Name != "team" { + t.Errorf("Expected exclude name to be 'team', got %q", exc.Name) + } + if exc.Source != nil { + t.Errorf("Expected exclude source to be nil for empty string, got %v", exc.Source) + } +} + +func TestFlattenConditionsRepositoryProperty(t *testing.T) { + conditions := &github.RepositoryRulesetConditions{ + RefName: &github.RepositoryRulesetRefConditionParameters{ + Include: []string{"main"}, + Exclude: []string{}, + }, + OrganizationID: &github.RepositoryRulesetOrganizationIDsConditionParameters{ + OrganizationIDs: []int64{123}, + }, + RepositoryProperty: &github.RepositoryRulesetRepositoryPropertyConditionParameters{ + Include: []*github.RepositoryRulesetRepositoryPropertyTargetParameters{ + { + Name: "environment", + PropertyValues: []string{"production"}, + Source: github.Ptr("custom"), + }, + }, + Exclude: []*github.RepositoryRulesetRepositoryPropertyTargetParameters{ + { + Name: "team", + PropertyValues: []string{"experimental"}, + }, + }, + }, + } + + result := flattenConditions(context.Background(), conditions, RulesetLevelEnterprise) + + if len(result) != 1 { + t.Fatalf("Expected 1 element in result, got %d", len(result)) + } + + conditionsMap := result[0].(map[string]any) + repoProp, ok := conditionsMap["repository_property"].([]map[string]any) + if !ok { + t.Fatalf("Expected repository_property to be []map[string]any, got %T", conditionsMap["repository_property"]) + } + if len(repoProp) != 1 { + t.Fatalf("Expected 1 repository_property block, got %d", len(repoProp)) + } + + includes := repoProp[0]["include"].([]map[string]any) + if len(includes) != 1 { + t.Fatalf("Expected 1 include, got %d", len(includes)) + } + if includes[0]["name"] != "environment" { + t.Errorf("Expected include name to be 'environment', got %v", includes[0]["name"]) + } + if includes[0]["source"] != "custom" { + t.Errorf("Expected include source to be 'custom', got %v", includes[0]["source"]) + } + + excludes := repoProp[0]["exclude"].([]map[string]any) + if len(excludes) != 1 { + t.Fatalf("Expected 1 exclude, got %d", len(excludes)) + } + if excludes[0]["name"] != "team" { + t.Errorf("Expected exclude name to be 'team', got %v", excludes[0]["name"]) + } +} + +func TestRoundTripConditionsRepositoryProperty(t *testing.T) { + conditionsMap := map[string]any{ + "ref_name": []any{ + map[string]any{ + "include": []any{"main"}, + "exclude": []any{}, + }, + }, + "organization_id": []any{123}, + "repository_property": []any{ + map[string]any{ + "include": []any{ + map[string]any{ + "name": "environment", + "property_values": []any{"production", "staging"}, + "source": "custom", + }, + }, + "exclude": []any{ + map[string]any{ + "name": "team", + "property_values": []any{"experimental"}, + "source": "", + }, + }, + }, + }, + } + + input := []any{conditionsMap} + expanded := expandConditions(input, RulesetLevelEnterprise) + if expanded == nil { + t.Fatal("Expected expanded conditions to not be nil") + } + + flattened := flattenConditions(context.Background(), expanded, RulesetLevelEnterprise) + if len(flattened) != 1 { + t.Fatalf("Expected 1 flattened result, got %d", len(flattened)) + } + + flatMap := flattened[0].(map[string]any) + repoProp, ok := flatMap["repository_property"].([]map[string]any) + if !ok { + t.Fatalf("Expected repository_property after round trip, got %T", flatMap["repository_property"]) + } + + includes := repoProp[0]["include"].([]map[string]any) + if len(includes) != 1 { + t.Fatalf("Expected 1 include after round trip, got %d", len(includes)) + } + if includes[0]["name"] != "environment" { + t.Errorf("Expected include name 'environment' after round trip, got %v", includes[0]["name"]) + } + + propVals := includes[0]["property_values"].([]string) + if len(propVals) != 2 || propVals[0] != "production" || propVals[1] != "staging" { + t.Errorf("Expected property_values [production, staging] after round trip, got %v", propVals) + } +} diff --git a/github/util_ruleset_enterprise_validation.go b/github/util_ruleset_enterprise_validation.go new file mode 100644 index 0000000000..5ad73fdafc --- /dev/null +++ b/github/util_ruleset_enterprise_validation.go @@ -0,0 +1,120 @@ +package github + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +// Repository target rules (enterprise only) +var repositoryTargetRules = []string{ + "repository_creation", + "repository_deletion", + "repository_transfer", + "repository_name", + "repository_visibility", +} + +// resourceGithubEnterpriseRulesetCustomizeDiff validates enterprise ruleset configuration +func resourceGithubEnterpriseRulesetCustomizeDiff(_ context.Context, d *schema.ResourceDiff, _ interface{}) error { + target := d.Get("target").(string) + + // Validate conditions + if err := validateEnterpriseConditions(d, target); err != nil { + return err + } + + // Validate rules + if err := validateEnterpriseRules(d, target); err != nil { + return err + } + + return nil +} + +// validateEnterpriseConditions validates conditions based on target type +func validateEnterpriseConditions(d *schema.ResourceDiff, target string) error { + conditions := d.Get("conditions").([]interface{}) + if len(conditions) == 0 { + return nil + } + + conditionsMap := conditions[0].(map[string]interface{}) + refName := conditionsMap["ref_name"].([]interface{}) + hasRefName := len(refName) > 0 + + switch target { + case "branch", "tag": + if !hasRefName { + return fmt.Errorf("'ref_name' condition is required when target is '%s'", target) + } + case "push", "repository": + if hasRefName { + return fmt.Errorf("'ref_name' condition must not be set when target is '%s'", target) + } + } + + return nil +} + +// validateEnterpriseRules validates rules based on target type +func validateEnterpriseRules(d *schema.ResourceDiff, target string) error { + rules := d.Get("rules").([]interface{}) + if len(rules) == 0 { + return nil + } + + rulesMap := rules[0].(map[string]interface{}) + + // Repository rules only valid for repository target + if target != "repository" { + for _, rule := range repositoryTargetRules { + if isRuleSet(rulesMap, rule) { + return fmt.Errorf("rule '%s' is only valid for target 'repository', not '%s'", rule, target) + } + } + } + + // Push rules only valid for push target + pushRules := []string{"file_path_restriction", "max_file_size", "max_file_path_length", "file_extension_restriction"} + if target != "push" { + for _, rule := range pushRules { + if isRuleSet(rulesMap, rule) { + return fmt.Errorf("rule '%s' is only valid for target 'push', not '%s'", rule, target) + } + } + } + + // Branch/tag rules not valid for push or repository targets + if target == "push" || target == "repository" { + branchTagRules := []string{ + "creation", "deletion", "update", "required_linear_history", "required_signatures", + "pull_request", "required_status_checks", "non_fast_forward", "commit_message_pattern", + "commit_author_email_pattern", "committer_email_pattern", "branch_name_pattern", + "tag_name_pattern", "required_workflows", "required_code_scanning", "copilot_code_review", + } + for _, rule := range branchTagRules { + if isRuleSet(rulesMap, rule) { + return fmt.Errorf("rule '%s' is only valid for target 'branch' or 'tag', not '%s'", rule, target) + } + } + } + + return nil +} + +// isRuleSet checks if a rule is set in the rules map +func isRuleSet(rules map[string]interface{}, ruleName string) bool { + if val, ok := rules[ruleName]; ok { + switch v := val.(type) { + case bool: + return v + case []interface{}: + return len(v) > 0 + default: + return val != nil + } + } + return false +} diff --git a/github/util_ruleset_validation.go b/github/util_ruleset_validation.go index 2ee0254fc1..ad2e299a41 100644 --- a/github/util_ruleset_validation.go +++ b/github/util_ruleset_validation.go @@ -129,7 +129,7 @@ func validateRules(ctx context.Context, d *schema.ResourceDiff, allowedRules []g return nil } -func validateRulesetConditions(ctx context.Context, d *schema.ResourceDiff, isOrg bool) error { +func validateRulesetConditions(ctx context.Context, d *schema.ResourceDiff) error { target := github.RulesetTarget(d.Get("target").(string)) tflog.Debug(ctx, "Validating conditions field based on target", map[string]any{"target": target}) conditionsRaw := d.Get("conditions").([]any) @@ -143,7 +143,7 @@ func validateRulesetConditions(ctx context.Context, d *schema.ResourceDiff, isOr switch target { case github.RulesetTargetBranch, github.RulesetTargetTag: - return validateConditionsFieldForBranchAndTagTargets(ctx, target, conditions, isOrg) + return validateConditionsFieldForBranchAndTagTargets(ctx, target, conditions) case github.RulesetTargetPush: return validateConditionsFieldForPushTarget(ctx, conditions) } @@ -163,21 +163,14 @@ func validateRulesetRules(ctx context.Context, d *schema.ResourceDiff) error { return validateRulesForTarget(ctx, d) } -func validateConditionsFieldForBranchAndTagTargets(ctx context.Context, target github.RulesetTarget, conditions map[string]any, isOrg bool) error { - tflog.Debug(ctx, fmt.Sprintf("Validating conditions field for %s target", target), map[string]any{"target": target, "conditions": conditions, "isOrg": isOrg}) +func validateConditionsFieldForBranchAndTagTargets(ctx context.Context, target github.RulesetTarget, conditions map[string]any) error { + tflog.Debug(ctx, fmt.Sprintf("Validating conditions field for %s target", target), map[string]any{"target": target, "conditions": conditions}) if conditions["ref_name"] == nil || len(conditions["ref_name"].([]any)) == 0 { tflog.Debug(ctx, fmt.Sprintf("Missing ref_name for %s target", target), map[string]any{"target": target}) return fmt.Errorf("ref_name must be set for %s target", target) } - // Repository rulesets don't have repository_name or repository_id, only org rulesets do. - if isOrg { - if (conditions["repository_name"] == nil || len(conditions["repository_name"].([]any)) == 0) && (conditions["repository_id"] == nil || len(conditions["repository_id"].([]any)) == 0) { - tflog.Debug(ctx, fmt.Sprintf("Missing repository_name or repository_id for %s target", target), map[string]any{"target": target}) - return fmt.Errorf("either repository_name or repository_id must be set for %s target", target) - } - } tflog.Debug(ctx, fmt.Sprintf("Conditions validation passed for %s target", target)) return nil } diff --git a/github/util_ruleset_validation_test.go b/github/util_ruleset_validation_test.go index c3125fa6a5..911c002dca 100644 --- a/github/util_ruleset_validation_test.go +++ b/github/util_ruleset_validation_test.go @@ -114,7 +114,7 @@ func Test_validateRepositoryRulesetConditionsFieldForBranchAndTagTargets(t *test for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - err := validateConditionsFieldForBranchAndTagTargets(t.Context(), tt.target, tt.conditions, false) + err := validateConditionsFieldForBranchAndTagTargets(t.Context(), tt.target, tt.conditions) if tt.expectError { if err == nil { t.Errorf("expected error but got nil") @@ -130,7 +130,7 @@ func Test_validateRepositoryRulesetConditionsFieldForBranchAndTagTargets(t *test } } -func Test_validateConditionsFieldForBranchAndTagTargets(t *testing.T) { +func Test_validateConditionsFieldForBranchAndTagTargets_OrgLevel(t *testing.T) { tests := []struct { name string target github.RulesetTarget @@ -165,42 +165,11 @@ func Test_validateConditionsFieldForBranchAndTagTargets(t *testing.T) { expectError: true, errorMsg: "ref_name must be set for branch target", }, - { - name: "invalid branch target without repository_name or repository_id", - target: github.RulesetTargetBranch, - conditions: map[string]any{ - "ref_name": []any{map[string]any{"include": []any{"~DEFAULT_BRANCH"}, "exclude": []any{}}}, - }, - expectError: true, - errorMsg: "either repository_name or repository_id must be set for branch target", - }, - { - name: "invalid tag target with nil repository_name and repository_id", - target: github.RulesetTargetTag, - conditions: map[string]any{ - "ref_name": []any{map[string]any{"include": []any{"v*"}, "exclude": []any{}}}, - "repository_name": nil, - "repository_id": nil, - }, - expectError: true, - errorMsg: "either repository_name or repository_id must be set for tag target", - }, - { - name: "invalid branch target with empty repository_name and repository_id slices", - target: github.RulesetTargetBranch, - conditions: map[string]any{ - "ref_name": []any{map[string]any{"include": []any{"~DEFAULT_BRANCH"}, "exclude": []any{}}}, - "repository_name": []any{}, - "repository_id": []any{}, - }, - expectError: true, - errorMsg: "either repository_name or repository_id must be set for branch target", - }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - err := validateConditionsFieldForBranchAndTagTargets(t.Context(), tt.target, tt.conditions, true) + err := validateConditionsFieldForBranchAndTagTargets(t.Context(), tt.target, tt.conditions) if tt.expectError { if err == nil { t.Errorf("expected error but got nil") diff --git a/website/docs/d/enterprise_ruleset.html.markdown b/website/docs/d/enterprise_ruleset.html.markdown new file mode 100644 index 0000000000..d58ea30a6d --- /dev/null +++ b/website/docs/d/enterprise_ruleset.html.markdown @@ -0,0 +1,289 @@ +--- +layout: "github" +page_title: "github_enterprise_ruleset Data Source - terraform-provider-github" +description: |- + Use this data source to retrieve information about a GitHub enterprise ruleset. +--- + +# github_enterprise_ruleset (Data Source) + +Use this data source to retrieve information about a GitHub enterprise ruleset. + +## Example Usage + +```hcl +data "github_enterprise_ruleset" "example" { + enterprise_slug = "my-enterprise" + ruleset_id = 12345 +} +``` + +## Argument Reference + +- `enterprise_slug` - (Required) (String) The slug of the enterprise. + +- `ruleset_id` - (Required) (Number) The ID of the ruleset to retrieve. + +## Attributes Reference + +- `name` - (String) The name of the ruleset. + +- `target` - (String) The target of the ruleset. Possible values are `branch`, `tag`, `push`, and `repository`. + +- `enforcement` - (String) The enforcement level of the ruleset. Possible values are `disabled`, `active`, and `evaluate`. + +- `node_id` - (String) GraphQL global node id for use with v4 API. + +- `bypass_actors` - (List) The actors that can bypass the rules in this ruleset. (see [below for nested schema](#bypass_actors)) + +- `conditions` - (List) Parameters for an enterprise ruleset condition. (see [below for nested schema](#conditions)) + +- `rules` - (List) Rules within the ruleset. (see [below for nested schema](#rules)) + +### bypass_actors + +- `actor_id` - (Number) The ID of the actor that can bypass a ruleset. + +- `actor_type` - (String) The type of actor that can bypass a ruleset. + +- `bypass_mode` - (String) When the specified actor can bypass the ruleset. + +### conditions + +- `organization_name` - (List) Conditions for organization names that the ruleset targets. (see [below for nested schema](#conditionsorganization_name)) + +- `organization_id` - (List) Conditions for organization IDs that the ruleset targets. (see [below for nested schema](#conditionsorganization_id)) + +- `repository_name` - (List) Conditions for repository names that the ruleset targets. (see [below for nested schema](#conditionsrepository_name)) + +- `repository_id` - (List) Conditions for repository IDs that the ruleset targets. (see [below for nested schema](#conditionsrepository_id)) + +- `repository_property` - (List) Conditions for repository properties that the ruleset targets. (see [below for nested schema](#conditionsrepository_property)) + +- `ref_name` - (List) Conditions for ref names that the ruleset targets. (see [below for nested schema](#conditionsref_name)) + +### conditions.organization_name + +- `include` - (List of String) Array of organization name patterns to include. + +- `exclude` - (List of String) Array of organization name patterns to exclude. + +### conditions.organization_id + +- `organization_ids` - (List of Number) Array of organization IDs to target. + +### conditions.repository_name + +- `include` - (List of String) Array of repository name patterns to include. + +- `exclude` - (List of String) Array of repository name patterns to exclude. + +- `protected` - (Boolean) Whether to target only protected repositories. + +### conditions.repository_id + +- `repository_ids` - (List of Number) Array of repository IDs to target. + +### conditions.repository_property + +- `include` - (List of String) The repository properties to include. All properties must match for the condition to pass. + +- `exclude` - (List of String) The repository properties to exclude. + +### conditions.ref_name + +- `include` - (List of String) Array of ref names or patterns to include. + +- `exclude` - (List of String) Array of ref names or patterns to exclude. + +### rules + +- `creation` - (Boolean) Only allow users with bypass permission to create matching refs. + +- `update` - (Boolean) Only allow users with bypass permission to update matching refs. + +- `deletion` - (Boolean) Only allow users with bypass permissions to delete matching refs. + +- `required_linear_history` - (Boolean) Prevent merge commits from being pushed to matching branches. + +- `required_signatures` - (Boolean) Commits pushed to matching branches must have verified signatures. + +- `non_fast_forward` - (Boolean) Prevent users with push access from force pushing to branches. + +- `pull_request` - (List) Require all commits be made to a non-target branch and submitted via a pull request. (see [below for nested schema](#rulespull_request)) + +- `copilot_code_review` - (List) Automatically request Copilot code review for new pull requests. (see [below for nested schema](#rulescopilot_code_review)) + +- `required_status_checks` - (List) Status checks that are required. (see [below for nested schema](#rulesrequired_status_checks)) + +- `required_workflows` - (List) Actions workflows that are required. (see [below for nested schema](#rulesrequired_workflows)) + +- `required_code_scanning` - (List) Code scanning tools that are required. (see [below for nested schema](#rulesrequired_code_scanning)) + +- `branch_name_pattern` - (List) Parameters for the branch_name_pattern rule. (see [below for nested schema](#rulesbranch_name_pattern)) + +- `tag_name_pattern` - (List) Parameters for the tag_name_pattern rule. (see [below for nested schema](#rulestag_name_pattern)) + +- `commit_author_email_pattern` - (List) Parameters for the commit_author_email_pattern rule. (see [below for nested schema](#rulescommit_author_email_pattern)) + +- `commit_message_pattern` - (List) Parameters for the commit_message_pattern rule. (see [below for nested schema](#rulescommit_message_pattern)) + +- `committer_email_pattern` - (List) Parameters for the committer_email_pattern rule. (see [below for nested schema](#rulescommitter_email_pattern)) + +- `file_path_restriction` - (List) File path restrictions for push rulesets. (see [below for nested schema](#rulesfile_path_restriction)) + +- `max_file_size` - (List) Maximum file size restrictions for push rulesets. (see [below for nested schema](#rulesmax_file_size)) + +- `max_file_path_length` - (List) Maximum file path length restrictions for push rulesets. (see [below for nested schema](#rulesmax_file_path_length)) + +- `file_extension_restriction` - (List) File extension restrictions for push rulesets. (see [below for nested schema](#rulesfile_extension_restriction)) + +- `repository_creation` - (Boolean) Only allow users with bypass permission to create repositories. Only valid for `repository` target. + +- `repository_deletion` - (Boolean) Only allow users with bypass permission to delete repositories. Only valid for `repository` target. + +- `repository_transfer` - (Boolean) Only allow users with bypass permission to transfer repositories. Only valid for `repository` target. + +- `repository_name` - (List) Restrict repository names to match specified patterns. Only valid for `repository` target. (see [below for nested schema](#rulesrepository_name)) + +- `repository_visibility` - (List) Restrict repository visibility changes. Only valid for `repository` target. (see [below for nested schema](#rulesrepository_visibility)) + +### rules.pull_request + +- `dismiss_stale_reviews_on_push` - (Boolean) New, reviewable commits pushed will dismiss previous pull request review approvals. + +- `require_code_owner_review` - (Boolean) Require an approving review in pull requests that modify files that have a designated code owner. + +- `require_last_push_approval` - (Boolean) Whether the most recent reviewable push must be approved by someone other than the person who pushed it. + +- `required_approving_review_count` - (Number) The number of approving reviews that are required before a pull request can be merged. + +- `required_review_thread_resolution` - (Boolean) All conversations on code must be resolved before a pull request can be merged. + +- `allowed_merge_methods` - (List of String) The merge methods allowed for pull requests. Possible values are `merge`, `squash`, and `rebase`. + +### rules.copilot_code_review + +- `review_on_push` - (Boolean) Copilot automatically reviews each new push to the pull request. + +- `review_draft_pull_requests` - (Boolean) Copilot automatically reviews draft pull requests before they are marked as ready for review. + +### rules.required_status_checks + +- `required_check` - (List) Status checks that are required. (see [below for nested schema](#rulesrequired_status_checksrequired_check)) + +- `strict_required_status_checks_policy` - (Boolean) Whether pull requests targeting a matching branch must be tested with the latest code. + +- `do_not_enforce_on_create` - (Boolean) Allow repositories and branches to be created if a check would otherwise prohibit it. + +### rules.required_status_checks.required_check + +- `context` - (String) The status check context name that must be present on the commit. + +- `integration_id` - (Number) The optional integration ID that this status check must originate from. + +- `do_not_enforce_on_create` - (Boolean) Allow repositories and branches to be created if a check would otherwise prohibit it. + +### rules.required_workflows + +- `required_workflow` - (List) Actions workflows that are required. (see [below for nested schema](#rulesrequired_workflowsrequired_workflow)) + +- `do_not_enforce_on_create` - (Boolean) Allow repositories and branches to be created if a check would otherwise prohibit it. + +### rules.required_workflows.required_workflow + +- `repository_id` - (Number) The ID of the repository. + +- `path` - (String) The path to the YAML definition file of the workflow. + +- `ref` - (String) The ref from which to fetch the workflow. + +### rules.required_code_scanning + +- `required_code_scanning_tool` - (List) Code scanning tools that are required. (see [below for nested schema](#rulesrequired_code_scanningrequired_code_scanning_tool)) + +### rules.required_code_scanning.required_code_scanning_tool + +- `alerts_threshold` - (String) The severity level at which code scanning results that raise alerts block a reference update. + +- `security_alerts_threshold` - (String) The severity level at which code scanning results that raise security alerts block a reference update. + +- `tool` - (String) The name of a code scanning tool. + +### rules.branch_name_pattern + +- `operator` - (String) The operator to use for matching. + +- `pattern` - (String) The pattern to match with. + +- `name` - (String) How this rule will appear to users. + +- `negate` - (Boolean) If true, the rule will fail if the pattern matches. + +### rules.tag_name_pattern + +- `operator` - (String) The operator to use for matching. + +- `pattern` - (String) The pattern to match with. + +- `name` - (String) How this rule will appear to users. + +- `negate` - (Boolean) If true, the rule will fail if the pattern matches. + +### rules.commit_author_email_pattern + +- `operator` - (String) The operator to use for matching. + +- `pattern` - (String) The pattern to match with. + +- `name` - (String) How this rule will appear to users. + +- `negate` - (Boolean) If true, the rule will fail if the pattern matches. + +### rules.commit_message_pattern + +- `operator` - (String) The operator to use for matching. + +- `pattern` - (String) The pattern to match with. + +- `name` - (String) How this rule will appear to users. + +- `negate` - (Boolean) If true, the rule will fail if the pattern matches. + +### rules.committer_email_pattern + +- `operator` - (String) The operator to use for matching. + +- `pattern` - (String) The pattern to match with. + +- `name` - (String) How this rule will appear to users. + +- `negate` - (Boolean) If true, the rule will fail if the pattern matches. + +### rules.file_path_restriction + +- `restricted_file_paths` - (List of String) The file paths that are restricted from being pushed to the commit graph. + +### rules.max_file_size + +- `max_file_size` - (Number) The maximum allowed size, in megabytes (MB), of a file. + +### rules.max_file_path_length + +- `max_file_path_length` - (Number) The maximum number of characters allowed in file paths. + +### rules.file_extension_restriction + +- `restricted_file_extensions` - (List of String) The file extensions that are restricted from being pushed to the commit graph. + +### rules.repository_name + +- `pattern` - (String) The pattern to match repository names against. + +- `negate` - (Boolean) If true, the rule will fail if the pattern matches. + +### rules.repository_visibility + +- `internal` - (Boolean) Allow internal visibility for repositories. + +- `private` - (Boolean) Allow private visibility for repositories. diff --git a/website/docs/r/enterprise_ruleset.html.markdown b/website/docs/r/enterprise_ruleset.html.markdown new file mode 100644 index 0000000000..64d364004d --- /dev/null +++ b/website/docs/r/enterprise_ruleset.html.markdown @@ -0,0 +1,606 @@ +--- +layout: "github" +page_title: "github_enterprise_ruleset Resource - terraform-provider-github" +description: |- + Creates a GitHub enterprise ruleset. +--- + +# github_enterprise_ruleset (Resource) + +Creates a GitHub enterprise ruleset. + +This resource allows you to create and manage rulesets on the enterprise level. When applied, a new ruleset will be created. When destroyed, that ruleset will be removed. + +Enterprise rulesets allow you to manage rulesets across multiple organizations within your enterprise, providing centralized control over repository rules and policies. + +## Example Usage + +### Basic Branch Ruleset + +```hcl +resource "github_enterprise_ruleset" "example" { + enterprise_slug = "my-enterprise" + name = "example-branch-ruleset" + target = "branch" + enforcement = "active" + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + repository_name { + include = ["~ALL"] + exclude = [] + } + + ref_name { + include = ["~DEFAULT_BRANCH"] + exclude = [] + } + } + + bypass_actors { + actor_id = 1 + actor_type = "OrganizationAdmin" + bypass_mode = "always" + } + + rules { + creation = true + update = true + deletion = true + required_linear_history = true + required_signatures = true + + pull_request { + required_approving_review_count = 2 + require_code_owner_review = true + require_last_push_approval = true + required_review_thread_resolution = true + } + + required_status_checks { + strict_required_status_checks_policy = true + + required_check { + context = "ci/test" + } + + required_check { + context = "ci/deploy" + integration_id = 12345 + } + } + } +} +``` + +### Push Ruleset with File Restrictions + +```hcl +resource "github_enterprise_ruleset" "push_restrictions" { + enterprise_slug = "my-enterprise" + name = "push-restrictions" + target = "push" + enforcement = "active" + + conditions { + organization_id { + organization_ids = [123456, 789012] + } + + repository_name { + include = ["~ALL"] + exclude = ["legacy-*"] + } + } + + rules { + file_path_restriction { + restricted_file_paths = [".github/workflows/*", "*.env", "secrets/*"] + } + + max_file_size { + max_file_size = 100 + } + + max_file_path_length { + max_file_path_length = 255 + } + + file_extension_restriction { + restricted_file_extensions = ["*.exe", "*.dll", "*.so"] + } + } +} +``` + +### Tag Ruleset with Pattern Matching + +```hcl +resource "github_enterprise_ruleset" "tag_ruleset" { + enterprise_slug = "my-enterprise" + name = "tag-naming-convention" + target = "tag" + enforcement = "active" + + conditions { + organization_name { + include = ["production-*"] + exclude = [] + } + + repository_property { + include = ["repository_tier:production"] + exclude = [] + } + + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + creation = false + deletion = true + + tag_name_pattern { + name = "Semantic versioning" + operator = "regex" + pattern = "^v[0-9]+\\.[0-9]+\\.[0-9]+$" + negate = false + } + } +} +``` + +### Enterprise Ruleset with Code Scanning Requirements + +```hcl +resource "github_enterprise_ruleset" "security_requirements" { + enterprise_slug = "my-enterprise" + name = "security-requirements" + target = "branch" + enforcement = "active" + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + repository_name { + include = ["~ALL"] + exclude = [] + protected = true + } + + ref_name { + include = ["main", "master"] + exclude = [] + } + } + + rules { + required_code_scanning { + required_code_scanning_tool { + tool = "CodeQL" + alerts_threshold = "errors" + security_alerts_threshold = "high_or_higher" + } + + required_code_scanning_tool { + tool = "Semgrep" + alerts_threshold = "all" + security_alerts_threshold = "medium_or_higher" + } + } + + required_workflows { + required_workflow { + repository_id = 1234567 + path = ".github/workflows/security-scan.yml" + ref = "main" + } + } + } +} +``` + +### Enterprise Ruleset with Commit Pattern Enforcement + +```hcl +resource "github_enterprise_ruleset" "commit_patterns" { + enterprise_slug = "my-enterprise" + name = "commit-conventions" + target = "branch" + enforcement = "active" + + conditions { + organization_id { + organization_ids = [123456] + } + + repository_name { + include = ["~ALL"] + exclude = [] + } + + ref_name { + include = ["main", "develop"] + exclude = [] + } + } + + bypass_actors { + actor_id = 2 + actor_type = "RepositoryRole" + bypass_mode = "pull_request" + } + + rules { + commit_message_pattern { + name = "Conventional Commits" + operator = "regex" + pattern = "^(feat|fix|docs|style|refactor|test|chore)(\\(.+\\))?: .+" + negate = false + } + + commit_author_email_pattern { + name = "Corporate email required" + operator = "ends_with" + pattern = "@example.com" + negate = false + } + + committer_email_pattern { + name = "Corporate email required" + operator = "ends_with" + pattern = "@example.com" + negate = false + } + } +} +``` + +### Repository Target Ruleset + +```hcl +resource "github_enterprise_ruleset" "repository_management" { + enterprise_slug = "my-enterprise" + name = "repository-management" + target = "repository" + enforcement = "active" + + bypass_actors { + actor_id = 1 + actor_type = "OrganizationAdmin" + bypass_mode = "always" + } + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + repository_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + repository_creation = true + repository_deletion = true + repository_transfer = true + + repository_name { + pattern = "^[a-z][a-z0-9-]*$" + negate = false + } + + repository_visibility { + internal = true + private = true + } + } +} +``` + +## Argument Reference + +- `enterprise_slug` - (Required) (String) The slug of the enterprise. + +- `name` - (Required) (String) The name of the ruleset. + +- `target` - (Required) (String) Possible values are `branch`, `tag`, `push`, and `repository`. Note: The `push` and `repository` targets are in beta and are subject to change. + +- `enforcement` - (Required) (String) Possible values for Enforcement are `disabled`, `active`, `evaluate`. Note: `evaluate` is currently only supported for owners of type `organization`. + +- `rules` - (Required) (Block List, Min: 1, Max: 1) Rules within the ruleset. (see [below for nested schema](#rules)) + +- `bypass_actors` - (Optional) (Block List) The actors that can bypass the rules in this ruleset. (see [below for nested schema](#bypass_actors)) + +- `conditions` - (Optional) (Block List, Max: 1) Parameters for an enterprise ruleset condition. Enterprise rulesets must include organization targeting (organization_name or organization_id) and repository targeting (repository_name or repository_property). For branch and tag targets, ref_name is also required. (see [below for nested schema](#conditions)) + +### Rules + +The `rules` block supports the following: + +- `creation` - (Optional) (Boolean) Only allow users with bypass permission to create matching refs. + +- `update` - (Optional) (Boolean) Only allow users with bypass permission to update matching refs. + +- `deletion` - (Optional) (Boolean) Only allow users with bypass permissions to delete matching refs. + +- `required_linear_history` - (Optional) (Boolean) Prevent merge commits from being pushed to matching branches. + +- `required_signatures` - (Optional) (Boolean) Commits pushed to matching branches must have verified signatures. + +- `non_fast_forward` - (Optional) (Boolean) Prevent users with push access from force pushing to branches. + +- `pull_request` - (Optional) (Block List, Max: 1) Require all commits be made to a non-target branch and submitted via a pull request before they can be merged. (see [below for nested schema](#rulespull_request)) + +- `copilot_code_review` - (Optional) (Block List, Max: 1) Automatically request Copilot code review for new pull requests if the author has access to Copilot code review and their premium requests quota has not reached the limit. (see [below for nested schema](#rulescopilot_code_review)) + +- `required_status_checks` - (Optional) (Block List, Max: 1) Choose which status checks must pass before branches can be merged into a branch that matches this rule. (see [below for nested schema](#rulesrequired_status_checks)) + +- `required_workflows` - (Optional) (Block List, Max: 1) Define which Actions workflows must pass before changes can be merged into a branch matching the rule. (see [below for nested schema](#rulesrequired_workflows)) + +- `required_code_scanning` - (Optional) (Block List, Max: 1) Define which tools must provide code scanning results before the reference is updated. (see [below for nested schema](#rulesrequired_code_scanning)) + +- `branch_name_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the branch_name_pattern rule. Conflicts with `tag_name_pattern` as it only applies to rulesets with target `branch`. (see [below for nested schema](#rulesbranch_name_pattern)) + +- `tag_name_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the tag_name_pattern rule. Conflicts with `branch_name_pattern` as it only applies to rulesets with target `tag`. (see [below for nested schema](#rulestag_name_pattern)) + +- `commit_author_email_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the commit_author_email_pattern rule. (see [below for nested schema](#rulescommit_author_email_pattern)) + +- `commit_message_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the commit_message_pattern rule. (see [below for nested schema](#rulescommit_message_pattern)) + +- `committer_email_pattern` - (Optional) (Block List, Max: 1) Parameters to be used for the committer_email_pattern rule. (see [below for nested schema](#rulescommitter_email_pattern)) + +- `file_path_restriction` - (Optional) (Block List, Max: 1) Prevent commits that include changes to specified file paths from being pushed to the commit graph. This rule only applies to rulesets with target `push`. (see [below for nested schema](#rulesfile_path_restriction)) + +- `max_file_size` - (Optional) (Block List, Max: 1) Prevent commits that include files with a specified file size from being pushed to the commit graph. This rule only applies to rulesets with target `push`. (see [below for nested schema](#rulesmax_file_size)) + +- `max_file_path_length` - (Optional) (Block List, Max: 1) Prevent commits that include file paths that exceed a specified character limit from being pushed to the commit graph. This rule only applies to rulesets with target `push`. (see [below for nested schema](#rulesmax_file_path_length)) + +- `file_extension_restriction` - (Optional) (Block List, Max: 1) Prevent commits that include files with specified file extensions from being pushed to the commit graph. This rule only applies to rulesets with target `push`. (see [below for nested schema](#rulesfile_extension_restriction)) + +- `repository_creation` - (Optional) (Boolean) Only allow users with bypass permission to create repositories. Only valid for `repository` target. + +- `repository_deletion` - (Optional) (Boolean) Only allow users with bypass permission to delete repositories. Only valid for `repository` target. + +- `repository_transfer` - (Optional) (Boolean) Only allow users with bypass permission to transfer repositories. Only valid for `repository` target. + +- `repository_name` - (Optional) (Block List, Max: 1) Restrict repository names to match specified patterns. Only valid for `repository` target. (see [below for nested schema](#rulesrepository_name)) + +- `repository_visibility` - (Optional) (Block List, Max: 1) Restrict repository visibility changes. Only valid for `repository` target. (see [below for nested schema](#rulesrepository_visibility)) + +#### rules.pull_request + +- `dismiss_stale_reviews_on_push` - (Optional) (Boolean) New, reviewable commits pushed will dismiss previous pull request review approvals. Defaults to `false`. + +- `require_code_owner_review` - (Optional) (Boolean) Require an approving review in pull requests that modify files that have a designated code owner. Defaults to `false`. + +- `require_last_push_approval` - (Optional) (Boolean) Whether the most recent reviewable push must be approved by someone other than the person who pushed it. Defaults to `false`. + +- `required_approving_review_count` - (Optional) (Number) The number of approving reviews that are required before a pull request can be merged. Defaults to `0`. + +- `required_review_thread_resolution` - (Optional) (Boolean) All conversations on code must be resolved before a pull request can be merged. Defaults to `false`. + +- `allowed_merge_methods` - (Optional) (List of String, Min: 1) The merge methods allowed for pull requests. Possible values are `merge`, `squash`, and `rebase`. + +#### rules.copilot_code_review + +- `review_on_push` - (Optional) (Boolean) Copilot automatically reviews each new push to the pull request. Defaults to `false`. + +- `review_draft_pull_requests` - (Optional) (Boolean) Copilot automatically reviews draft pull requests before they are marked as ready for review. Defaults to `false`. + +#### rules.required_status_checks + +- `required_check` - (Required) (Block Set, Min: 1) Status checks that are required. Several can be defined. (see [below for nested schema](#rulesrequired_status_checksrequired_check)) + +- `strict_required_status_checks_policy` - (Optional) (Boolean) Whether pull requests targeting a matching branch must be tested with the latest code. This setting will not take effect unless at least one status check is enabled. Defaults to `false`. + +- `do_not_enforce_on_create` - (Optional) (Boolean) Allow repositories and branches to be created if a check would otherwise prohibit it. Defaults to `false`. + +#### rules.required_status_checks.required_check + +- `context` - (Required) (String) The status check context name that must be present on the commit. + +- `integration_id` - (Optional) (Number) The optional integration ID that this status check must originate from. + +- `do_not_enforce_on_create` - (Optional) (Boolean) Allow repositories and branches to be created if a check would otherwise prohibit it. Defaults to `false`. + +#### rules.required_workflows + +- `do_not_enforce_on_create` - (Optional) (Boolean) Allow repositories and branches to be created if a check would otherwise prohibit it. Defaults to `false`. + +- `required_workflow` - (Required) (Block Set, Min: 1) Actions workflows that are required. Multiple can be defined. (see [below for nested schema](#rulesrequired_workflowsrequired_workflow)) + +#### rules.required_workflows.required_workflow + +- `repository_id` - (Required) (Number) The ID of the repository. Names, full names and repository URLs are not supported. + +- `path` - (Required) (String) The path to the YAML definition file of the workflow. + +- `ref` - (Optional) (String) The optional ref from which to fetch the workflow. Defaults to `master`. + +#### rules.required_code_scanning + +- `required_code_scanning_tool` - (Required) (Block Set, Min: 1) Code scanning tools that are required. Multiple can be defined. (see [below for nested schema](#rulesrequired_code_scanningrequired_code_scanning_tool)) + +#### rules.required_code_scanning.required_code_scanning_tool + +- `alerts_threshold` - (Required) (String) The severity level at which code scanning results that raise alerts block a reference update. Can be one of: `none`, `errors`, `errors_and_warnings`, `all`. + +- `security_alerts_threshold` - (Required) (String) The severity level at which code scanning results that raise security alerts block a reference update. Can be one of: `none`, `critical`, `high_or_higher`, `medium_or_higher`, `all`. + +- `tool` - (Required) (String) The name of a code scanning tool. + +#### rules.branch_name_pattern + +- `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. + +- `pattern` - (Required) (String) The pattern to match with. + +- `name` - (Optional) (String) How this rule will appear to users. + +- `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. + +#### rules.tag_name_pattern + +- `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. + +- `pattern` - (Required) (String) The pattern to match with. + +- `name` - (Optional) (String) How this rule will appear to users. + +- `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. + +#### rules.commit_author_email_pattern + +- `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. + +- `pattern` - (Required) (String) The pattern to match with. + +- `name` - (Optional) (String) How this rule will appear to users. + +- `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. + +#### rules.commit_message_pattern + +- `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. + +- `pattern` - (Required) (String) The pattern to match with. + +- `name` - (Optional) (String) How this rule will appear to users. + +- `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. + +#### rules.committer_email_pattern + +- `operator` - (Required) (String) The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`. + +- `pattern` - (Required) (String) The pattern to match with. + +- `name` - (Optional) (String) How this rule will appear to users. + +- `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. + +#### rules.file_path_restriction + +- `restricted_file_paths` - (Required) (List of String, Min: 1) The file paths that are restricted from being pushed to the commit graph. + +#### rules.max_file_size + +- `max_file_size` - (Required) (Number) The maximum allowed size, in megabytes (MB), of a file. Valid range is 1-100 MB. + +#### rules.max_file_path_length + +- `max_file_path_length` - (Required) (Number) The maximum number of characters allowed in file paths. + +#### rules.file_extension_restriction + +- `restricted_file_extensions` - (Required) (List of String, Min: 1) The file extensions that are restricted from being pushed to the commit graph. + +#### rules.repository_name + +- `pattern` - (Required) (String) The pattern to match repository names against. + +- `negate` - (Optional) (Boolean) If true, the rule will fail if the pattern matches. Defaults to `false`. + +#### rules.repository_visibility + +- `internal` - (Optional) (Boolean) Allow internal visibility for repositories. Defaults to `false`. + +- `private` - (Optional) (Boolean) Allow private visibility for repositories. Defaults to `false`. + +### bypass_actors + +- `actor_id` - (Optional) (Number) The ID of the actor that can bypass a ruleset. When `actor_type` is `OrganizationAdmin`, this should be set to `1`. Some resources such as DeployKey do not have an ID and this should be omitted. + +- `actor_type` - (Required) (String) The type of actor that can bypass a ruleset. Can be one of: `Integration`, `OrganizationAdmin`, `RepositoryRole`, `Team`, `DeployKey`. + +- `bypass_mode` - (Required) (String) When the specified actor can bypass the ruleset. pull_request means that an actor can only bypass rules on pull requests. Can be one of: `always`, `pull_request`, `exempt`. + +~>Note: at the time of writing this, the following actor types correspond to the following actor IDs: + +- `OrganizationAdmin` -> `1` +- `RepositoryRole` (This is the actor type, the following are the base repository roles and their associated IDs.) + - `maintain` -> `2` + - `write` -> `4` + - `admin` -> `5` + +### conditions + +Enterprise rulesets require targeting both organizations and repositories. At least one organization targeting condition (`organization_name` or `organization_id`) and one repository targeting condition (`repository_name`, `repository_id`, or `repository_property`) must be specified. For `branch` and `tag` targets, `ref_name` is also required. + +- `organization_name` - (Optional) (Block List, Max: 1) Conditions for organization names that the ruleset targets. Conflicts with `organization_id`. (see [below for nested schema](#conditionsorganization_name)) + +- `organization_id` - (Optional) (Block List, Max: 1) Conditions for organization IDs that the ruleset targets. Conflicts with `organization_name`. (see [below for nested schema](#conditionsorganization_id)) + +- `repository_name` - (Optional) (Block List, Max: 1) Conditions for repository names that the ruleset targets. (see [below for nested schema](#conditionsrepository_name)) + +- `repository_id` - (Optional) (Block List, Max: 1) Conditions for repository IDs that the ruleset targets. (see [below for nested schema](#conditionsrepository_id)) + +- `repository_property` - (Optional) (Block List, Max: 1) Conditions for repository properties that the ruleset targets. (see [below for nested schema](#conditionsrepository_property)) + +- `ref_name` - (Optional) (Block List, Max: 1) Conditions for ref names that the ruleset targets. Required for `branch` and `tag` targets. (see [below for nested schema](#conditionsref_name)) + +#### conditions.organization_name + +- `include` - (Required) (List of String) Array of organization name patterns to include. One of these patterns must match for the condition to pass. Also accepts `~ALL` to include all organizations. + +- `exclude` - (Required) (List of String) Array of organization name patterns to exclude. The condition will not pass if any of these patterns match. + +#### conditions.organization_id + +- `organization_ids` - (Required) (List of Number) Array of organization IDs to target. One of these IDs must match for the condition to pass. + +#### conditions.repository_name + +- `include` - (Required) (List of String) Array of repository name patterns to include. One of these patterns must match for the condition to pass. Also accepts `~ALL` to include all repositories. + +- `exclude` - (Required) (List of String) Array of repository name patterns to exclude. The condition will not pass if any of these patterns match. + +- `protected` - (Optional) (Boolean) Whether to target only protected repositories. Defaults to `false`. + +#### conditions.repository_id + +- `repository_ids` - (Required) (List of Number) Array of repository IDs to target. One of these IDs must match for the condition to pass. + +#### conditions.repository_property + +- `include` - (Required) (List of String) The repository properties to include. All properties must match for the condition to pass. Repository properties are in the format `property_name:property_value`. + +- `exclude` - (Required) (List of String) The repository properties to exclude. Repository properties are in the format `property_name:property_value`. + +#### conditions.ref_name + +- `include` - (Required) (List of String) Array of ref names or patterns to include. One of these patterns must match for the condition to pass. Also accepts `~DEFAULT_BRANCH` to include the default branch or `~ALL` to include all branches. + +- `exclude` - (Required) (List of String) Array of ref names or patterns to exclude. The condition will not pass if any of these patterns match. + +## Attributes Reference + +The following additional attributes are exported: + +- `etag` - (String) The etag of the ruleset. + +- `node_id` - (String) GraphQL global node id for use with v4 API. + +- `ruleset_id` - (Number) GitHub ID for the ruleset. + +## Import + +GitHub Enterprise Rulesets can be imported using the enterprise slug and ruleset ID in the format `{enterprise_slug}:{ruleset_id}`, e.g. + +```sh +terraform import github_enterprise_ruleset.example my-enterprise:12345 +```