diff --git a/github/data_source_github_enterprise_ruleset.go b/github/data_source_github_enterprise_ruleset.go new file mode 100644 index 0000000000..caa607d74f --- /dev/null +++ b/github/data_source_github_enterprise_ruleset.go @@ -0,0 +1,369 @@ +package github + +import ( + "context" + "errors" + "fmt" + "net/http" + "strconv" + + "github.com/google/go-github/v81/github" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func dataSourceGithubEnterpriseRuleset() *schema.Resource { + return &schema.Resource{ + ReadContext: dataSourceGithubEnterpriseRulesetRead, + + Schema: map[string]*schema.Schema{ + "enterprise_slug": { + Type: schema.TypeString, + Required: true, + Description: "The slug of the enterprise.", + }, + "ruleset_id": { + Type: schema.TypeInt, + Required: true, + Description: "The ID of the ruleset to retrieve.", + }, + "name": { + Type: schema.TypeString, + Computed: true, + Description: "The name of the ruleset.", + }, + "target": { + Type: schema.TypeString, + Computed: true, + Description: "The target of the ruleset (branch, tag, or push).", + }, + "enforcement": { + Type: schema.TypeString, + Computed: true, + Description: "The enforcement level of the ruleset (disabled, active, or evaluate).", + }, + "bypass_actors": { + Type: schema.TypeList, + Computed: true, + Description: "The actors that can bypass the rules in this ruleset.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "actor_id": { + Type: schema.TypeInt, + Computed: true, + Description: "The ID of the actor that can bypass a ruleset.", + }, + "actor_type": { + Type: schema.TypeString, + Computed: true, + Description: "The type of actor that can bypass a ruleset.", + }, + "bypass_mode": { + Type: schema.TypeString, + Computed: true, + Description: "When the specified actor can bypass the ruleset.", + }, + }, + }, + }, + "node_id": { + Type: schema.TypeString, + Computed: true, + Description: "GraphQL global node id for use with v4 API.", + }, + "conditions": { + Type: schema.TypeList, + Computed: true, + Description: "Parameters for an enterprise ruleset condition.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "organization_name": { + Type: schema.TypeList, + Computed: true, + Description: "Conditions for organization names that the ruleset targets.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "include": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: "Array of organization name patterns to include.", + }, + "exclude": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: "Array of organization name patterns to exclude.", + }, + }, + }, + }, + "organization_id": { + Type: schema.TypeList, + Computed: true, + Description: "Conditions for organization IDs that the ruleset targets.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "organization_ids": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeInt}, + Description: "Array of organization IDs to target.", + }, + }, + }, + }, + "repository_name": { + Type: schema.TypeList, + Computed: true, + Description: "Conditions for repository names that the ruleset targets.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "include": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: "Array of repository name patterns to include.", + }, + "exclude": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: "Array of repository name patterns to exclude.", + }, + "protected": { + Type: schema.TypeBool, + Computed: true, + Description: "Whether to target only protected repositories.", + }, + }, + }, + }, + "repository_id": { + Type: schema.TypeList, + Computed: true, + Description: "Conditions for repository IDs that the ruleset targets.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "repository_ids": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeInt}, + Description: "Array of repository IDs to target.", + }, + }, + }, + }, + "repository_property": { + Type: schema.TypeList, + Computed: true, + Description: "Conditions for repository properties that the ruleset targets.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "include": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: "Array of repository property patterns to include.", + }, + "exclude": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: "Array of repository property patterns to exclude.", + }, + }, + }, + }, + "ref_name": { + Type: schema.TypeList, + Computed: true, + Description: "Conditions for ref names (branches or tags) that the ruleset targets.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "include": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: "Array of ref name patterns to include.", + }, + "exclude": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: "Array of ref name patterns to exclude.", + }, + }, + }, + }, + }, + }, + }, + "rules": { + Type: schema.TypeList, + Computed: true, + Description: "Rules for the ruleset.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "creation": { + Type: schema.TypeBool, + Computed: true, + Description: "Only allow users with bypass permission to create matching refs.", + }, + "update": { + Type: schema.TypeBool, + Computed: true, + Description: "Only allow users with bypass permission to update matching refs.", + }, + "deletion": { + Type: schema.TypeBool, + Computed: true, + Description: "Only allow users with bypass permissions to delete matching refs.", + }, + "required_linear_history": { + Type: schema.TypeBool, + Computed: true, + Description: "Prevent merge commits from being pushed to matching branches.", + }, + "required_signatures": { + Type: schema.TypeBool, + Computed: true, + Description: "Commits pushed to matching branches must have verified signatures.", + }, + "non_fast_forward": { + Type: schema.TypeBool, + Computed: true, + Description: "Prevent users with push access from force pushing to branches.", + }, + "pull_request": { + Type: schema.TypeList, + Computed: true, + Description: "Require all commits be made to a non-target branch and submitted via a pull request before they can be merged.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "dismiss_stale_reviews_on_push": { + Type: schema.TypeBool, + Computed: true, + Description: "New, reviewable commits pushed will dismiss previous pull request review approvals.", + }, + "require_code_owner_review": { + Type: schema.TypeBool, + Computed: true, + Description: "Require an approving review in pull requests that modify files that have a designated code owner.", + }, + "require_last_push_approval": { + Type: schema.TypeBool, + Computed: true, + Description: "Whether the most recent reviewable push must be approved by someone other than the person who pushed it.", + }, + "required_approving_review_count": { + Type: schema.TypeInt, + Computed: true, + Description: "The number of approving reviews that are required before a pull request can be merged.", + }, + "required_review_thread_resolution": { + Type: schema.TypeBool, + Computed: true, + Description: "All conversations on code must be resolved before a pull request can be merged.", + }, + }, + }, + }, + "required_status_checks": { + Type: schema.TypeList, + Computed: true, + Description: "Choose which status checks must pass before branches can be merged into a branch that matches this rule.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "strict_required_status_checks_policy": { + Type: schema.TypeBool, + Computed: true, + Description: "Whether pull requests targeting a matching branch must be tested with the latest code.", + }, + "do_not_enforce_on_create": { + Type: schema.TypeBool, + Computed: true, + Description: "Allow repositories and branches to be created if a check would otherwise prohibit it.", + }, + }, + }, + }, + "required_workflows": { + Type: schema.TypeList, + Computed: true, + Description: "Choose which Actions workflows must pass before branches can be merged into a branch that matches this rule.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "do_not_enforce_on_create": { + Type: schema.TypeBool, + Computed: true, + Description: "Allow repositories and branches to be created if a check would otherwise prohibit it.", + }, + }, + }, + }, + }, + }, + }, + "etag": { + Type: schema.TypeString, + Computed: true, + Description: "The ETag of the ruleset for conditional updates.", + }, + }, + } +} + +func dataSourceGithubEnterpriseRulesetRead(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*Owner).v3client + enterpriseSlug := d.Get("enterprise_slug").(string) + rulesetID := int64(d.Get("ruleset_id").(int)) + + tflog.Trace(ctx, fmt.Sprintf("Reading enterprise ruleset: %s/%d", enterpriseSlug, rulesetID), map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + }) + + ruleset, resp, err := client.Enterprise.GetRepositoryRuleset(ctx, enterpriseSlug, rulesetID) + if err != nil { + var ghErr *github.ErrorResponse + if errors.As(err, &ghErr) { + if ghErr.Response.StatusCode == http.StatusNotFound { + tflog.Error(ctx, fmt.Sprintf("Enterprise ruleset not found: %s/%d", enterpriseSlug, rulesetID), map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + }) + return diag.Errorf("enterprise ruleset %d not found in enterprise %s", rulesetID, enterpriseSlug) + } + } + tflog.Error(ctx, fmt.Sprintf("Failed to read enterprise ruleset: %s/%d", enterpriseSlug, rulesetID), map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + "error": err.Error(), + }) + return diag.FromErr(err) + } + + // Set the ID to the ruleset ID + d.SetId(strconv.FormatInt(ruleset.GetID(), 10)) + + // Set all computed attributes + _ = d.Set("ruleset_id", ruleset.ID) + _ = d.Set("name", ruleset.Name) + _ = d.Set("target", ruleset.GetTarget()) + _ = d.Set("enforcement", ruleset.Enforcement) + _ = d.Set("bypass_actors", flattenBypassActors(ruleset.BypassActors)) + _ = d.Set("conditions", flattenConditions(ruleset.GetConditions(), true)) + _ = d.Set("rules", flattenRules(ruleset.Rules, true)) + _ = d.Set("node_id", ruleset.GetNodeID()) + _ = d.Set("etag", resp.Header.Get("ETag")) + + tflog.Trace(ctx, fmt.Sprintf("Successfully read enterprise ruleset: %s/%d", enterpriseSlug, rulesetID), map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + "name": ruleset.Name, + }) + + return nil +} diff --git a/github/data_source_github_enterprise_ruleset_test.go b/github/data_source_github_enterprise_ruleset_test.go new file mode 100644 index 0000000000..73cbdbfb96 --- /dev/null +++ b/github/data_source_github_enterprise_ruleset_test.go @@ -0,0 +1,95 @@ +package github + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccGithubEnterpriseRulesetDataSource(t *testing.T) { + t.Run("queries an enterprise ruleset", func(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + testRulesetName := fmt.Sprintf("%senterprise-ruleset-%s", testResourcePrefix, randomID) + + config := fmt.Sprintf(` + resource "github_enterprise_ruleset" "test" { + enterprise_slug = "%s" + name = "%s" + target = "branch" + enforcement = "active" + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + repository_name { + include = ["~ALL"] + exclude = [] + } + + ref_name { + include = ["refs/heads/main"] + exclude = [] + } + } + + rules { + creation = false + deletion = false + } + } + `, testAccConf.enterpriseSlug, testRulesetName) + + config2 := config + ` + data "github_enterprise_ruleset" "test" { + enterprise_slug = github_enterprise_ruleset.test.enterprise_slug + ruleset_id = github_enterprise_ruleset.test.ruleset_id + } + ` + + check := resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet( + "data.github_enterprise_ruleset.test", "name", + ), + resource.TestCheckResourceAttr( + "data.github_enterprise_ruleset.test", "name", + testRulesetName, + ), + resource.TestCheckResourceAttr( + "data.github_enterprise_ruleset.test", "target", + "branch", + ), + resource.TestCheckResourceAttr( + "data.github_enterprise_ruleset.test", "enforcement", + "active", + ), + resource.TestCheckResourceAttrSet( + "data.github_enterprise_ruleset.test", "node_id", + ), + resource.TestCheckResourceAttrSet( + "data.github_enterprise_ruleset.test", "etag", + ), + ) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { + skipUnlessEnterprise(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: config, + Check: resource.ComposeTestCheckFunc(), + }, + { + Config: config2, + Check: check, + }, + }, + }) + }) +} diff --git a/github/provider.go b/github/provider.go index 4f857d27c0..70a8bd4474 100644 --- a/github/provider.go +++ b/github/provider.go @@ -210,6 +210,7 @@ func Provider() *schema.Provider { "github_user_invitation_accepter": resourceGithubUserInvitationAccepter(), "github_user_ssh_key": resourceGithubUserSshKey(), "github_enterprise_organization": resourceGithubEnterpriseOrganization(), + "github_enterprise_ruleset": resourceGithubEnterpriseRuleset(), "github_enterprise_actions_runner_group": resourceGithubActionsEnterpriseRunnerGroup(), "github_enterprise_actions_workflow_permissions": resourceGithubEnterpriseActionsWorkflowPermissions(), "github_actions_organization_workflow_permissions": resourceGithubActionsOrganizationWorkflowPermissions(), @@ -289,6 +290,7 @@ func Provider() *schema.Provider { "github_user_external_identity": dataSourceGithubUserExternalIdentity(), "github_users": dataSourceGithubUsers(), "github_enterprise": dataSourceGithubEnterprise(), + "github_enterprise_ruleset": dataSourceGithubEnterpriseRuleset(), "github_repository_environment_deployment_policies": dataSourceGithubRepositoryEnvironmentDeploymentPolicies(), }, } diff --git a/github/resource_github_enterprise_ruleset.go b/github/resource_github_enterprise_ruleset.go new file mode 100644 index 0000000000..ffad0ad488 --- /dev/null +++ b/github/resource_github_enterprise_ruleset.go @@ -0,0 +1,860 @@ +package github + +import ( + "context" + "errors" + "fmt" + "net/http" + "strconv" + + "github.com/google/go-github/v81/github" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" +) + +func resourceGithubEnterpriseRuleset() *schema.Resource { + return &schema.Resource{ + CreateContext: resourceGithubEnterpriseRulesetCreate, + ReadContext: resourceGithubEnterpriseRulesetRead, + UpdateContext: resourceGithubEnterpriseRulesetUpdate, + DeleteContext: resourceGithubEnterpriseRulesetDelete, + + SchemaVersion: 1, + + Schema: map[string]*schema.Schema{ + "enterprise_slug": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "The slug of the enterprise.", + }, + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringLenBetween(1, 100), + Description: "The name of the ruleset.", + }, + "target": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{"branch", "tag", "push"}, false), + Description: "Possible values are `branch`, `tag` and `push`. Note: The `push` target is in beta and is subject to change.", + }, + "enforcement": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{"disabled", "active", "evaluate"}, false), + Description: "Possible values for Enforcement are `disabled`, `active`, `evaluate`. Note: `evaluate` is currently only supported for owners of type `organization`.", + }, + "bypass_actors": { + Type: schema.TypeList, + Optional: true, + DiffSuppressFunc: bypassActorsDiffSuppressFunc, + Description: "The actors that can bypass the rules in this ruleset.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "actor_id": { + Type: schema.TypeInt, + Optional: true, + Default: nil, + Description: "The ID of the actor that can bypass a ruleset. When `actor_type` is `OrganizationAdmin`, this should be set to `1`. Some resources such as DeployKey do not have an ID and this should be omitted.", + }, + "actor_type": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{"Integration", "OrganizationAdmin", "RepositoryRole", "Team", "DeployKey"}, false), + Description: "The type of actor that can bypass a ruleset. See https://docs.github.com/en/rest/orgs/rules for more information", + }, + "bypass_mode": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{"always", "pull_request", "exempt"}, false), + Description: "When the specified actor can bypass the ruleset. pull_request means that an actor can only bypass rules on pull requests. Can be one of: `always`, `pull_request`, `exempt`.", + }, + }, + }, + }, + "node_id": { + Type: schema.TypeString, + Computed: true, + Description: "GraphQL global node id for use with v4 API.", + }, + "ruleset_id": { + Type: schema.TypeInt, + Computed: true, + Description: "GitHub ID for the ruleset.", + }, + "conditions": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Parameters for an enterprise ruleset condition. Enterprise rulesets must include organization targeting (organization_name or organization_id) and repository targeting (repository_name or repository_property). For branch and tag targets, ref_name is also required.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "organization_name": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Conditions for organization names that the ruleset targets. Conflicts with `organization_id`.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "include": { + Type: schema.TypeList, + Required: true, + Description: "Array of organization names or patterns to include. One of these patterns must match for the condition to pass. Also accepts `~ALL` to include all organizations.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "exclude": { + Type: schema.TypeList, + Required: true, + Description: "Array of organization names or patterns to exclude. The condition will not pass if any of these patterns match.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + "ref_name": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Conditions for ref names (branches or tags) that the ruleset targets.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "include": { + Type: schema.TypeList, + Required: true, + Description: "Array of ref names or patterns to include. One of these patterns must match for the condition to pass. Also accepts `~DEFAULT_BRANCH` to include the default branch or `~ALL` to include all branches.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "exclude": { + Type: schema.TypeList, + Required: true, + Description: "Array of ref names or patterns to exclude. The condition will not pass if any of these patterns match.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + "repository_name": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Conditions for repository names that the ruleset targets. Conflicts with `repository_id`.", + ExactlyOneOf: []string{"conditions.0.repository_id"}, + AtLeastOneOf: []string{"conditions.0.repository_id"}, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "include": { + Type: schema.TypeList, + Required: true, + Description: "Array of repository names or patterns to include. One of these patterns must match for the condition to pass. Also accepts `~ALL` to include all repositories.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "exclude": { + Type: schema.TypeList, + Required: true, + Description: "Array of repository names or patterns to exclude. The condition will not pass if any of these patterns match.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "protected": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "Whether renaming of target repositories is prevented.", + }, + }, + }, + }, + "repository_id": { + Type: schema.TypeList, + Optional: true, + Description: "The repository IDs that the ruleset applies to. One of these IDs must match for the condition to pass.", + Elem: &schema.Schema{ + Type: schema.TypeInt, + }, + }, + }, + }, + }, + "rules": { + Type: schema.TypeList, + Required: true, + MaxItems: 1, + Description: "Rules within the ruleset.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "creation": { + Type: schema.TypeBool, + Optional: true, + Description: "Only allow users with bypass permission to create matching refs.", + }, + "update": { + Type: schema.TypeBool, + Optional: true, + Description: "Only allow users with bypass permission to update matching refs.", + }, + "deletion": { + Type: schema.TypeBool, + Optional: true, + Description: "Only allow users with bypass permissions to delete matching refs.", + }, + "required_linear_history": { + Type: schema.TypeBool, + Optional: true, + Description: "Prevent merge commits from being pushed to matching branches.", + }, + "required_signatures": { + Type: schema.TypeBool, + Optional: true, + Description: "Commits pushed to matching branches must have verified signatures.", + }, + "pull_request": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: "Require all commits be made to a non-target branch and submitted via a pull request before they can be merged.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "allowed_merge_methods": { + Type: schema.TypeList, + Optional: true, + MinItems: 1, + Description: "Array of allowed merge methods. Allowed values include `merge`, `squash`, and `rebase`. At least one option must be enabled.", + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateDiagFunc: toDiagFunc(validation.StringInSlice([]string{"merge", "squash", "rebase"}, false), "allowed_merge_methods"), + }, + }, + "dismiss_stale_reviews_on_push": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "New, reviewable commits pushed will dismiss previous pull request review approvals. Defaults to `false`.", + }, + "require_code_owner_review": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "Require an approving review in pull requests that modify files that have a designated code owner. Defaults to `false`.", + }, + "require_last_push_approval": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "Whether the most recent reviewable push must be approved by someone other than the person who pushed it. Defaults to `false`.", + }, + "required_approving_review_count": { + Type: schema.TypeInt, + Optional: true, + Default: 0, + Description: "The number of approving reviews that are required before a pull request can be merged. Defaults to `0`.", + }, + "required_review_thread_resolution": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "All conversations on code must be resolved before a pull request can be merged. Defaults to `false`.", + }, + }, + }, + }, + "copilot_code_review": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Automatically request Copilot code review for new pull requests if the author has access to Copilot code review and their premium requests quota has not reached the limit.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "review_on_push": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "Copilot automatically reviews each new push to the pull request. Defaults to `false`.", + }, + "review_draft_pull_requests": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "Copilot automatically reviews draft pull requests before they are marked as ready for review. Defaults to `false`.", + }, + }, + }, + }, + "required_status_checks": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: "Choose which status checks must pass before branches can be merged into a branch that matches this rule. When enabled, commits must first be pushed to another branch, then merged or pushed directly to a branch that matches this rule after status checks have passed.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "required_check": { + Type: schema.TypeSet, + MinItems: 1, + Required: true, + Description: "Status checks that are required. Several can be defined.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "context": { + Type: schema.TypeString, + Required: true, + Description: "The status check context name that must be present on the commit.", + }, + "integration_id": { + Type: schema.TypeInt, + Optional: true, + Default: 0, + Description: "The optional integration ID that this status check must originate from.", + }, + }, + }, + }, + "strict_required_status_checks_policy": { + Type: schema.TypeBool, + Optional: true, + Description: "Whether pull requests targeting a matching branch must be tested with the latest code. This setting will not take effect unless at least one status check is enabled. Defaults to `false`.", + }, + "do_not_enforce_on_create": { + Type: schema.TypeBool, + Optional: true, + Description: "Allow repositories and branches to be created if a check would otherwise prohibit it.", + Default: false, + }, + }, + }, + }, + "non_fast_forward": { + Type: schema.TypeBool, + Optional: true, + Description: "Prevent users with push access from force pushing to branches.", + }, + "commit_message_pattern": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: "Parameters to be used for the commit_message_pattern rule.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Optional: true, + Description: "How this rule will appear to users.", + }, + "negate": { + Type: schema.TypeBool, + Optional: true, + Description: "If true, the rule will fail if the pattern matches.", + }, + "operator": { + Type: schema.TypeString, + Required: true, + Description: "The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`.", + }, + "pattern": { + Type: schema.TypeString, + Required: true, + Description: "The pattern to match with.", + }, + }, + }, + }, + "commit_author_email_pattern": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: "Parameters to be used for the commit_author_email_pattern rule.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Optional: true, + Description: "How this rule will appear to users.", + }, + "negate": { + Type: schema.TypeBool, + Optional: true, + Description: "If true, the rule will fail if the pattern matches.", + }, + "operator": { + Type: schema.TypeString, + Required: true, + Description: "The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`.", + }, + "pattern": { + Type: schema.TypeString, + Required: true, + Description: "The pattern to match with.", + }, + }, + }, + }, + "committer_email_pattern": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: "Parameters to be used for the committer_email_pattern rule.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Optional: true, + Description: "How this rule will appear to users.", + }, + "negate": { + Type: schema.TypeBool, + Optional: true, + Description: "If true, the rule will fail if the pattern matches.", + }, + "operator": { + Type: schema.TypeString, + Required: true, + Description: "The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`.", + }, + "pattern": { + Type: schema.TypeString, + Required: true, + Description: "The pattern to match with.", + }, + }, + }, + }, + "branch_name_pattern": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + ConflictsWith: []string{"rules.0.tag_name_pattern"}, + Description: "Parameters to be used for the branch_name_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. Conflicts with `tag_name_pattern` as it only applies to rulesets with target `branch`.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Optional: true, + Description: "How this rule will appear to users.", + }, + "negate": { + Type: schema.TypeBool, + Optional: true, + Description: "If true, the rule will fail if the pattern matches.", + }, + "operator": { + Type: schema.TypeString, + Required: true, + Description: "The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`.", + }, + "pattern": { + Type: schema.TypeString, + Required: true, + Description: "The pattern to match with.", + }, + }, + }, + }, + "tag_name_pattern": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + ConflictsWith: []string{"rules.0.branch_name_pattern"}, + Description: "Parameters to be used for the tag_name_pattern rule. This rule only applies to repositories within an enterprise, it cannot be applied to repositories owned by individuals or regular organizations. Conflicts with `branch_name_pattern` as it only applies to rulesets with target `tag`.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Optional: true, + Description: "How this rule will appear to users.", + }, + "negate": { + Type: schema.TypeBool, + Optional: true, + Description: "If true, the rule will fail if the pattern matches.", + }, + "operator": { + Type: schema.TypeString, + Required: true, + Description: "The operator to use for matching. Can be one of: `starts_with`, `ends_with`, `contains`, `regex`.", + }, + "pattern": { + Type: schema.TypeString, + Required: true, + Description: "The pattern to match with.", + }, + }, + }, + }, + "required_workflows": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: "Choose which Actions workflows must pass before branches can be merged into a branch that matches this rule.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "do_not_enforce_on_create": { + Type: schema.TypeBool, + Optional: true, + Description: "Allow repositories and branches to be created if a check would otherwise prohibit it.", + }, + "required_workflow": { + Type: schema.TypeSet, + MinItems: 1, + Required: true, + Description: "Actions workflows that are required. Several can be defined.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "repository_id": { + Type: schema.TypeInt, + Required: true, + Description: "The repository in which the workflow is defined.", + }, + "path": { + Type: schema.TypeString, + Required: true, + Description: "The path to the workflow YAML definition file.", + }, + "ref": { + Type: schema.TypeString, + Optional: true, + Default: "master", + Description: "The ref (branch or tag) of the workflow file to use.", + }, + }, + }, + }, + }, + }, + }, + "required_code_scanning": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Description: "Choose which tools must provide code scanning results before the reference is updated. When configured, code scanning must be enabled and have results for both the commit and the reference being updated.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "required_code_scanning_tool": { + Type: schema.TypeSet, + MinItems: 1, + Required: true, + Description: "Tools that must provide code scanning results for this rule to pass.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "alerts_threshold": { + Type: schema.TypeString, + Required: true, + Description: "The severity level at which code scanning results that raise alerts block a reference update. Can be one of: `none`, `errors`, `errors_and_warnings`, `all`.", + }, + "security_alerts_threshold": { + Type: schema.TypeString, + Required: true, + Description: "The severity level at which code scanning results that raise security alerts block a reference update. Can be one of: `none`, `critical`, `high_or_higher`, `medium_or_higher`, `all`.", + }, + "tool": { + Type: schema.TypeString, + Required: true, + Description: "The name of a code scanning tool.", + }, + }, + }, + }, + }, + }, + }, + "file_path_restriction": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Prevent commits that include changes in specified file paths from being pushed to the commit graph.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "restricted_file_paths": { + Type: schema.TypeList, + MinItems: 1, + Required: true, + Description: "The file paths that are restricted from being pushed to the commit graph.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + "max_file_size": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Prevent pushes based on file size.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "max_file_size": { + Type: schema.TypeInt, + Required: true, + Description: "The maximum allowed size of a file in megabytes (MB). Valid range is 1-100 MB.", + ValidateDiagFunc: toDiagFunc(validation.IntBetween(1, 100), "max_file_size"), + }, + }, + }, + }, + "max_file_path_length": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Prevent pushes based on file path length.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "max_file_path_length": { + Type: schema.TypeInt, + Required: true, + Description: "The maximum allowed length of a file path.", + }, + }, + }, + }, + "file_extension_restriction": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "Prevent pushes based on file extensions.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "restricted_file_extensions": { + Type: schema.TypeSet, + MinItems: 1, + Required: true, + Description: "The file extensions that are restricted from being pushed to the commit graph.", + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + }, + }, + }, + "etag": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +// resourceGithubEnterpriseRulesetObject creates a GitHub RepositoryRuleset object for enterprise-level rulesets +func resourceGithubEnterpriseRulesetObject(d *schema.ResourceData) github.RepositoryRuleset { + enterpriseSlug := d.Get("enterprise_slug").(string) + target := github.RulesetTarget(d.Get("target").(string)) + enforcement := github.RulesetEnforcement(d.Get("enforcement").(string)) + sourceTypeEnum := github.RulesetSourceType("Enterprise") + + return github.RepositoryRuleset{ + Name: d.Get("name").(string), + Target: &target, + Source: enterpriseSlug, + SourceType: &sourceTypeEnum, + Enforcement: enforcement, + BypassActors: expandBypassActors(d.Get("bypass_actors").([]any)), + Conditions: expandConditions(d.Get("conditions").([]any), true), + Rules: expandRules(d.Get("rules").([]any), true), + } +} + +func resourceGithubEnterpriseRulesetCreate(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*Owner).v3client + enterpriseSlug := d.Get("enterprise_slug").(string) + name := d.Get("name").(string) + + tflog.Debug(ctx, fmt.Sprintf("Creating enterprise ruleset: %s/%s", enterpriseSlug, name), map[string]any{ + "enterprise_slug": enterpriseSlug, + "name": name, + }) + + rulesetReq := resourceGithubEnterpriseRulesetObject(d) + + ruleset, resp, err := client.Enterprise.CreateRepositoryRuleset(ctx, enterpriseSlug, rulesetReq) + if err != nil { + tflog.Error(ctx, fmt.Sprintf("Failed to create enterprise ruleset: %s/%s", enterpriseSlug, name), map[string]any{ + "enterprise_slug": enterpriseSlug, + "name": name, + "error": err.Error(), + }) + return diag.FromErr(err) + } + + d.SetId(strconv.FormatInt(*ruleset.ID, 10)) + _ = d.Set("ruleset_id", ruleset.ID) + _ = d.Set("node_id", ruleset.GetNodeID()) + _ = d.Set("etag", resp.Header.Get("ETag")) + + tflog.Info(ctx, fmt.Sprintf("Created enterprise ruleset: %s/%s (ID: %d)", enterpriseSlug, name, *ruleset.ID), map[string]any{ + "enterprise_slug": enterpriseSlug, + "name": name, + "ruleset_id": *ruleset.ID, + }) + + return nil +} + +func resourceGithubEnterpriseRulesetRead(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*Owner).v3client + enterpriseSlug := d.Get("enterprise_slug").(string) + + tflog.Trace(ctx, fmt.Sprintf("Reading enterprise ruleset: %s", d.Id()), map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": d.Id(), + }) + + rulesetID, err := strconv.ParseInt(d.Id(), 10, 64) + if err != nil { + tflog.Error(ctx, fmt.Sprintf("Could not convert ruleset ID '%s' to int64", d.Id()), map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": d.Id(), + "error": err.Error(), + }) + return diag.FromErr(unconvertibleIdErr(d.Id(), err)) + } + + if !d.IsNewResource() { + ctx = context.WithValue(ctx, ctxEtag, d.Get("etag").(string)) + } + + ruleset, resp, err := client.Enterprise.GetRepositoryRuleset(ctx, enterpriseSlug, rulesetID) + if err != nil { + var ghErr *github.ErrorResponse + if errors.As(err, &ghErr) { + if ghErr.Response.StatusCode == http.StatusNotModified { + tflog.Debug(ctx, "API responded with StatusNotModified, not refreshing state", map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + }) + return nil + } + if ghErr.Response.StatusCode == http.StatusNotFound { + tflog.Info(ctx, fmt.Sprintf("Removing ruleset %s/%d from state because it no longer exists in GitHub", enterpriseSlug, rulesetID), map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + }) + d.SetId("") + return nil + } + } + tflog.Error(ctx, fmt.Sprintf("Failed to read enterprise ruleset: %s/%d", enterpriseSlug, rulesetID), map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + "error": err.Error(), + }) + return diag.FromErr(err) + } + + _ = d.Set("ruleset_id", ruleset.ID) + _ = d.Set("name", ruleset.Name) + _ = d.Set("target", ruleset.GetTarget()) + _ = d.Set("enforcement", ruleset.Enforcement) + _ = d.Set("bypass_actors", flattenBypassActors(ruleset.BypassActors)) + _ = d.Set("conditions", flattenConditions(ruleset.GetConditions(), true)) + _ = d.Set("rules", flattenRules(ruleset.Rules, true)) + _ = d.Set("node_id", ruleset.GetNodeID()) + _ = d.Set("etag", resp.Header.Get("ETag")) + + tflog.Trace(ctx, fmt.Sprintf("Successfully read enterprise ruleset: %s/%d", enterpriseSlug, rulesetID), map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + "name": ruleset.Name, + }) + + return nil +} + +func resourceGithubEnterpriseRulesetUpdate(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*Owner).v3client + enterpriseSlug := d.Get("enterprise_slug").(string) + name := d.Get("name").(string) + + rulesetID, err := strconv.ParseInt(d.Id(), 10, 64) + if err != nil { + tflog.Error(ctx, fmt.Sprintf("Could not convert ruleset ID '%s' to int64", d.Id()), map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": d.Id(), + "error": err.Error(), + }) + return diag.FromErr(unconvertibleIdErr(d.Id(), err)) + } + + tflog.Debug(ctx, fmt.Sprintf("Updating enterprise ruleset: %s/%d", enterpriseSlug, rulesetID), map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + "name": name, + }) + + rulesetReq := resourceGithubEnterpriseRulesetObject(d) + + ruleset, resp, err := client.Enterprise.UpdateRepositoryRuleset(ctx, enterpriseSlug, rulesetID, rulesetReq) + if err != nil { + tflog.Error(ctx, fmt.Sprintf("Failed to update enterprise ruleset: %s/%d", enterpriseSlug, rulesetID), map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + "error": err.Error(), + }) + return diag.FromErr(err) + } + + d.SetId(strconv.FormatInt(*ruleset.ID, 10)) + _ = d.Set("ruleset_id", ruleset.ID) + _ = d.Set("node_id", ruleset.GetNodeID()) + _ = d.Set("etag", resp.Header.Get("ETag")) + + tflog.Info(ctx, fmt.Sprintf("Updated enterprise ruleset: %s/%d", enterpriseSlug, rulesetID), map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + "name": name, + }) + + return nil +} + +func resourceGithubEnterpriseRulesetDelete(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*Owner).v3client + enterpriseSlug := d.Get("enterprise_slug").(string) + + rulesetID, err := strconv.ParseInt(d.Id(), 10, 64) + if err != nil { + tflog.Error(ctx, fmt.Sprintf("Could not convert ruleset ID '%s' to int64", d.Id()), map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": d.Id(), + "error": err.Error(), + }) + return diag.FromErr(unconvertibleIdErr(d.Id(), err)) + } + + tflog.Debug(ctx, fmt.Sprintf("Deleting enterprise ruleset: %s/%d", enterpriseSlug, rulesetID), map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + }) + + _, err = client.Enterprise.DeleteRepositoryRuleset(ctx, enterpriseSlug, rulesetID) + if err != nil { + tflog.Error(ctx, fmt.Sprintf("Failed to delete enterprise ruleset: %s/%d", enterpriseSlug, rulesetID), map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + "error": err.Error(), + }) + return diag.FromErr(err) + } + + tflog.Info(ctx, fmt.Sprintf("Deleted enterprise ruleset: %s/%d", enterpriseSlug, rulesetID), map[string]any{ + "enterprise_slug": enterpriseSlug, + "ruleset_id": rulesetID, + }) + + return nil +} + diff --git a/github/resource_github_enterprise_ruleset_test.go b/github/resource_github_enterprise_ruleset_test.go new file mode 100644 index 0000000000..ba234d72e9 --- /dev/null +++ b/github/resource_github_enterprise_ruleset_test.go @@ -0,0 +1,750 @@ +package github + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccGithubEnterpriseRuleset_basic(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + rulesetName := fmt.Sprintf("%s-enterprise-basic-%s", testResourcePrefix, randomID) + + rulesetHCL := ` + resource "github_enterprise_ruleset" "test" { + enterprise_slug = "%s" + name = "%s" + target = "branch" + enforcement = "active" + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + repository_name { + include = ["~ALL"] + exclude = [] + } + + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + creation = true + } + } + ` + config := fmt.Sprintf(rulesetHCL, testAccConf.enterpriseSlug, rulesetName) + + check := resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("github_enterprise_ruleset.test", "enterprise_slug", testAccConf.enterpriseSlug), + resource.TestCheckResourceAttr("github_enterprise_ruleset.test", "name", rulesetName), + resource.TestCheckResourceAttr("github_enterprise_ruleset.test", "target", "branch"), + resource.TestCheckResourceAttr("github_enterprise_ruleset.test", "enforcement", "active"), + ) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessEnterprise(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: config, + Check: check, + }, + }, + }) +} + +func TestAccGithubEnterpriseRuleset_branch_rules(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + rulesetName := fmt.Sprintf("%s-enterprise-branch-%s", testResourcePrefix, randomID) + + config := fmt.Sprintf(` +resource "github_enterprise_ruleset" "test" { + enterprise_slug = "%s" + name = "%s" + target = "branch" + enforcement = "active" + + bypass_actors { + actor_type = "DeployKey" + bypass_mode = "always" + } + + bypass_actors { + actor_id = 1 + actor_type = "OrganizationAdmin" + bypass_mode = "always" + } + + conditions { + organization_name { + include = ["~ALL"] + exclude = [] + } + + repository_name { + include = ["~ALL"] + exclude = [] + } + + ref_name { + include = ["~ALL"] + exclude = [] + } + } + + rules { + creation = true + update = true + deletion = true + required_linear_history = true + required_signatures = false + + pull_request { + required_approving_review_count = 2 + required_review_thread_resolution = true + require_code_owner_review = true + dismiss_stale_reviews_on_push = true + require_last_push_approval = true + } + + copilot_code_review { + review_on_push = true + review_draft_pull_requests = false + } + + required_code_scanning { + required_code_scanning_tool { + alerts_threshold = "errors" + security_alerts_threshold = "high_or_higher" + tool = "CodeQL" + } + } + + branch_name_pattern { + name = "test" + negate = false + operator = "starts_with" + pattern = "test" + } + + non_fast_forward = true + } +} +`, testAccConf.enterpriseSlug, rulesetName) + + check := resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("github_enterprise_ruleset.test", "name", rulesetName), + resource.TestCheckResourceAttr("github_enterprise_ruleset.test", "target", "branch"), + resource.TestCheckResourceAttr("github_enterprise_ruleset.test", "enforcement", "active"), + resource.TestCheckResourceAttr("github_enterprise_ruleset.test", "bypass_actors.#", "2"), + resource.TestCheckResourceAttr("github_enterprise_ruleset.test", "bypass_actors.0.actor_type", "DeployKey"), + resource.TestCheckResourceAttr("github_enterprise_ruleset.test", "bypass_actors.0.bypass_mode", "always"), + resource.TestCheckResourceAttr("github_enterprise_ruleset.test", "bypass_actors.1.actor_id", "1"), + resource.TestCheckResourceAttr("github_enterprise_ruleset.test", "bypass_actors.1.actor_type", "OrganizationAdmin"), + resource.TestCheckResourceAttr("github_enterprise_ruleset.test", "bypass_actors.1.bypass_mode", "always"), + resource.TestCheckResourceAttr("github_enterprise_ruleset.test", "rules.0.required_code_scanning.0.required_code_scanning_tool.0.alerts_threshold", "errors"), + resource.TestCheckResourceAttr("github_enterprise_ruleset.test", "rules.0.required_code_scanning.0.required_code_scanning_tool.0.security_alerts_threshold", "high_or_higher"), + resource.TestCheckResourceAttr("github_enterprise_ruleset.test", "rules.0.required_code_scanning.0.required_code_scanning_tool.0.tool", "CodeQL"), + resource.TestCheckResourceAttr("github_enterprise_ruleset.test", "rules.0.copilot_code_review.0.review_on_push", "true"), + resource.TestCheckResourceAttr("github_enterprise_ruleset.test", "rules.0.copilot_code_review.0.review_draft_pull_requests", "false"), + ) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { skipUnlessEnterprise(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: config, + Check: check, + }, + }, + }) +} + +func TestAccGithubEnterpriseRuleset_required_workflows(t *testing.T) { + randomID := acctest.RandStringFromCharSet(5, acctest.CharSetAlphaNum) + repoName := fmt.Sprintf("%srepo-enterprise-wf-%s", testResourcePrefix, randomID) + rulesetName := fmt.Sprintf("%s-enterprise-wf-ruleset-%s", testResourcePrefix, randomID) + workflowFilePath := ".github/workflows/echo.yaml" + + config := fmt.Sprintf(` +resource "github_repository" "test" { + name = "%s" + visibility = "private" + auto_init = true +} + +resource "github_repository_file" "workflow_file" { + repository = github_repository.test.name + branch = "main" + file = "%s" + content = <Note: at the time of writing this, the following actor types correspond to the following actor IDs: + +- `OrganizationAdmin` -> `1` +- `RepositoryRole` (This is the actor type, the following are the base repository roles and their associated IDs.) + - `maintain` -> `2` + - `write` -> `4` + - `admin` -> `5` + +### conditions + +Enterprise rulesets require targeting both organizations and repositories. At least one organization targeting condition (`organization_name` or `organization_id`) and one repository targeting condition (`repository_name`, `repository_id`, or `repository_property`) must be specified. For `branch` and `tag` targets, `ref_name` is also required. + +- `organization_name` - (Optional) (Block List, Max: 1) Conditions for organization names that the ruleset targets. Conflicts with `organization_id`. (see [below for nested schema](#conditionsorganization_name)) + +- `organization_id` - (Optional) (Block List, Max: 1) Conditions for organization IDs that the ruleset targets. Conflicts with `organization_name`. (see [below for nested schema](#conditionsorganization_id)) + +- `repository_name` - (Optional) (Block List, Max: 1) Conditions for repository names that the ruleset targets. (see [below for nested schema](#conditionsrepository_name)) + +- `repository_id` - (Optional) (Block List, Max: 1) Conditions for repository IDs that the ruleset targets. (see [below for nested schema](#conditionsrepository_id)) + +- `repository_property` - (Optional) (Block List, Max: 1) Conditions for repository properties that the ruleset targets. (see [below for nested schema](#conditionsrepository_property)) + +- `ref_name` - (Optional) (Block List, Max: 1) Conditions for ref names that the ruleset targets. Required for `branch` and `tag` targets. (see [below for nested schema](#conditionsref_name)) + +#### conditions.organization_name + +- `include` - (Required) (List of String) Array of organization name patterns to include. One of these patterns must match for the condition to pass. Also accepts `~ALL` to include all organizations. + +- `exclude` - (Required) (List of String) Array of organization name patterns to exclude. The condition will not pass if any of these patterns match. + +#### conditions.organization_id + +- `organization_ids` - (Required) (List of Number) Array of organization IDs to target. One of these IDs must match for the condition to pass. + +#### conditions.repository_name + +- `include` - (Required) (List of String) Array of repository name patterns to include. One of these patterns must match for the condition to pass. Also accepts `~ALL` to include all repositories. + +- `exclude` - (Required) (List of String) Array of repository name patterns to exclude. The condition will not pass if any of these patterns match. + +- `protected` - (Optional) (Boolean) Whether to target only protected repositories. Defaults to `false`. + +#### conditions.repository_id + +- `repository_ids` - (Required) (List of Number) Array of repository IDs to target. One of these IDs must match for the condition to pass. + +#### conditions.repository_property + +- `include` - (Required) (List of String) The repository properties to include. All properties must match for the condition to pass. Repository properties are in the format `property_name:property_value`. + +- `exclude` - (Required) (List of String) The repository properties to exclude. Repository properties are in the format `property_name:property_value`. + +#### conditions.ref_name + +- `include` - (Required) (List of String) Array of ref names or patterns to include. One of these patterns must match for the condition to pass. Also accepts `~DEFAULT_BRANCH` to include the default branch or `~ALL` to include all branches. + +- `exclude` - (Required) (List of String) Array of ref names or patterns to exclude. The condition will not pass if any of these patterns match. + +## Attributes Reference + +The following additional attributes are exported: + +- `etag` - (String) The etag of the ruleset. + +- `node_id` - (String) GraphQL global node id for use with v4 API. + +- `ruleset_id` - (Number) GitHub ID for the ruleset. + +## Import + +GitHub Enterprise Rulesets can be imported using the enterprise slug and ruleset ID in the format `{enterprise_slug}/{ruleset_id}`, e.g. + +```sh +terraform import github_enterprise_ruleset.example my-enterprise/12345 +```