Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## [Unreleased]

### Added
- **Integration Model Access Control** - `portkey_integration` now supports `allow_all_models` attribute:
- Defaults to `true` (all models available, matching API behavior)
- Set to `false` to restrict access to only models explicitly enabled via `portkey_integration_model_access` resources

## [0.2.14] - 2026-02-27

### Added
Expand Down
19 changes: 19 additions & 0 deletions docs/resources/integration.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,24 @@ resource "portkey_integration" "openai_dev" {
}
```

### Integration with restricted model access

```terraform
resource "portkey_integration" "openai_restricted" {
name = "OpenAI Restricted"
ai_provider_id = "openai"
key = var.openai_api_key
allow_all_models = false
}

# Only enable specific models
resource "portkey_integration_model_access" "gpt4" {
integration_id = portkey_integration.openai_restricted.slug
model_slug = "gpt-4"
enabled = true
}
```

<!-- schema generated by tfplugindocs -->
## Schema

Expand All @@ -52,6 +70,7 @@ resource "portkey_integration" "openai_dev" {

### Optional

- `allow_all_models` (Boolean) Whether all models are enabled by default for this integration. When true (the default), all models for the provider are available. Set to false to restrict access to only models explicitly enabled via `portkey_integration_model_access` resources. Defaults to `true`.
- `configurations` (String, Sensitive) Provider-specific configurations as JSON. For AWS Bedrock with IAM Role, use: jsonencode({aws_role_arn = "arn:aws:iam::...", aws_region = "us-east-1"}). For Azure OpenAI: jsonencode({azure_auth_mode = "default", azure_resource_name = "...", azure_deployment_config = [{azure_deployment_name = "...", azure_api_version = "...", azure_model_slug = "gpt-4", is_default = true}]}). This is write-only and will not be returned by the API.
- `description` (String) Optional description of the integration.
- `key` (String, Sensitive) API key for the provider. This is write-only and will not be returned by the API.
Expand Down
81 changes: 80 additions & 1 deletion internal/provider/integration_resource.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
"github.com/hashicorp/terraform-plugin-framework/types"
Expand Down Expand Up @@ -44,6 +45,7 @@ type integrationResourceModel struct {
Configurations types.String `tfsdk:"configurations"`
Description types.String `tfsdk:"description"`
WorkspaceID types.String `tfsdk:"workspace_id"`
AllowAllModels types.Bool `tfsdk:"allow_all_models"`
Type types.String `tfsdk:"type"`
Status types.String `tfsdk:"status"`
CreatedAt types.String `tfsdk:"created_at"`
Expand Down Expand Up @@ -119,6 +121,12 @@ func (r *integrationResource) Schema(_ context.Context, _ resource.SchemaRequest
stringplanmodifier.RequiresReplace(),
},
},
"allow_all_models": schema.BoolAttribute{
Description: "Whether all models are enabled by default for this integration. When true (the default), all models for the provider are available. Set to false to restrict access to only models explicitly enabled via portkey_integration_model_access resources.",
Optional: true,
Computed: true,
Default: booldefault.StaticBool(true),
},
"type": schema.StringAttribute{
Description: "Type of integration: 'organisation' for org-level integrations or 'workspace' for workspace-scoped integrations.",
Computed: true,
Expand Down Expand Up @@ -263,7 +271,9 @@ func (r *integrationResource) Create(ctx context.Context, req resource.CreateReq
return
}

// Map response body to schema
// Map response body to schema and set partial state BEFORE the models call.
// This ensures that if UpdateIntegrationModels fails, Terraform still tracks
// the created integration and can reconcile on the next plan/apply.
plan.ID = types.StringValue(integration.ID)
plan.Slug = types.StringValue(integration.Slug)
plan.Status = types.StringValue(integration.Status)
Expand All @@ -289,6 +299,36 @@ func (r *integrationResource) Create(ctx context.Context, req resource.CreateReq
}
}

// Only call UpdateIntegrationModels when allow_all_models is false,
// since the API already defaults to true.
if !plan.AllowAllModels.ValueBool() {
allowAll := plan.AllowAllModels.ValueBool()
modelsReq := client.BulkUpdateModelsRequest{
AllowAllModels: &allowAll,
Models: []client.IntegrationModel{},
}
err = r.client.UpdateIntegrationModels(ctx, createResp.Slug, modelsReq)
if err != nil {
resp.Diagnostics.AddError(
"Error setting allow_all_models",
"Could not update allow_all_models for integration: "+err.Error(),
)
// State is set below so Terraform tracks the integration even on failure
}
}

// Read allow_all_models from the models endpoint to get the actual API value
modelsResp, err := r.client.GetIntegrationModels(ctx, integration.Slug)
if err != nil {
// If we can't read models, use the plan value so state is still saved
resp.Diagnostics.AddWarning(
"Error reading integration models after creation",
"Could not read integration models, using plan value: "+err.Error(),
)
} else {
plan.AllowAllModels = types.BoolValue(modelsResp.AllowAllModels)
}

// Set state to fully populated data
diags = resp.State.Set(ctx, plan)
resp.Diagnostics.Append(diags...)
Expand Down Expand Up @@ -352,6 +392,17 @@ func (r *integrationResource) Read(ctx context.Context, req resource.ReadRequest
}
}

// Read allow_all_models from the models endpoint
modelsResp, err := r.client.GetIntegrationModels(ctx, state.Slug.ValueString())
if err != nil {
resp.Diagnostics.AddError(
"Error reading integration models",
"Could not read integration models: "+err.Error(),
)
return
}
state.AllowAllModels = types.BoolValue(modelsResp.AllowAllModels)

// Set refreshed state
diags = resp.State.Set(ctx, &state)
resp.Diagnostics.Append(diags...)
Expand Down Expand Up @@ -464,6 +515,23 @@ func (r *integrationResource) Update(ctx context.Context, req resource.UpdateReq
return
}

// Update allow_all_models if it changed
if !plan.AllowAllModels.Equal(state.AllowAllModels) {
allowAll := plan.AllowAllModels.ValueBool()
modelsReq := client.BulkUpdateModelsRequest{
AllowAllModels: &allowAll,
Models: []client.IntegrationModel{},
}
err = r.client.UpdateIntegrationModels(ctx, state.Slug.ValueString(), modelsReq)
if err != nil {
resp.Diagnostics.AddError(
"Error updating allow_all_models",
"Could not update allow_all_models for integration: "+err.Error(),
)
return
}
}

// Update resource state with updated items and timestamp
plan.ID = types.StringValue(integration.ID)
plan.Slug = types.StringValue(integration.Slug)
Expand All @@ -481,6 +549,17 @@ func (r *integrationResource) Update(ctx context.Context, req resource.UpdateReq
// Preserve workspace_id from state (workspace_id is immutable, RequiresReplace)
plan.WorkspaceID = state.WorkspaceID

// Read allow_all_models from the models endpoint
modelsResp, err := r.client.GetIntegrationModels(ctx, integration.Slug)
if err != nil {
resp.Diagnostics.AddError(
"Error reading integration models after update",
"Could not read integration models: "+err.Error(),
)
return
}
plan.AllowAllModels = types.BoolValue(modelsResp.AllowAllModels)

diags = resp.State.Set(ctx, plan)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
Expand Down
66 changes: 66 additions & 0 deletions internal/provider/integration_resource_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ func TestAccIntegrationResource_basic(t *testing.T) {
resource.TestCheckResourceAttr("portkey_integration.test", "ai_provider_id", "openai"),
resource.TestCheckResourceAttr("portkey_integration.test", "description", "Initial description"),
resource.TestCheckResourceAttr("portkey_integration.test", "status", "active"),
resource.TestCheckResourceAttr("portkey_integration.test", "allow_all_models", "true"),
resource.TestCheckResourceAttrSet("portkey_integration.test", "created_at"),
),
},
Expand Down Expand Up @@ -607,6 +608,71 @@ resource "portkey_integration" "test" {
`, name, key)
}

func TestAccIntegrationResource_allowAllModelsFalse(t *testing.T) {
rName := acctest.RandomWithPrefix("tf-acc-no-models")

resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
ProtoV6ProviderFactories: testAccProtoV6ProviderFactories,
Steps: []resource.TestStep{
// Create with allow_all_models = false
{
Config: testAccIntegrationResourceConfigAllowAllModels(rName, false),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttrSet("portkey_integration.test", "id"),
resource.TestCheckResourceAttr("portkey_integration.test", "name", rName),
resource.TestCheckResourceAttr("portkey_integration.test", "allow_all_models", "false"),
),
},
},
})
}

func TestAccIntegrationResource_allowAllModelsUpdate(t *testing.T) {
rName := acctest.RandomWithPrefix("tf-acc-models-update")

resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
ProtoV6ProviderFactories: testAccProtoV6ProviderFactories,
Steps: []resource.TestStep{
// Create with allow_all_models = true (default)
{
Config: testAccIntegrationResourceConfigAllowAllModels(rName, true),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr("portkey_integration.test", "allow_all_models", "true"),
),
},
// Update to allow_all_models = false
{
Config: testAccIntegrationResourceConfigAllowAllModels(rName, false),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr("portkey_integration.test", "allow_all_models", "false"),
),
},
// Update back to allow_all_models = true
{
Config: testAccIntegrationResourceConfigAllowAllModels(rName, true),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttr("portkey_integration.test", "allow_all_models", "true"),
),
},
},
})
}

func testAccIntegrationResourceConfigAllowAllModels(name string, allowAllModels bool) string {
return fmt.Sprintf(`
provider "portkey" {}

resource "portkey_integration" "test" {
name = %[1]q
ai_provider_id = "openai"
key = "sk-test-fake-key-12345"
allow_all_models = %[2]t
}
`, name, allowAllModels)
}

func TestAccIntegrationResource_workspaceScoped(t *testing.T) {
rName := acctest.RandomWithPrefix("tf-acc-test")
workspaceID := testAccGetEnvOrSkip(t, "TEST_WORKSPACE_ID")
Expand Down