Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 10 additions & 10 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ jobs:
- name: Get dependencies
run: go mod download
- name: TF acceptance tests
timeout-minutes: 10
timeout-minutes: 15
env:
TF_ACC: "true"
VERCEL_API_TOKEN: ${{ secrets.VERCEL_API_TOKEN }}
Expand All @@ -116,7 +116,7 @@ jobs:
VERCEL_TERRAFORM_TESTING_CERTIFICATE: ${{ secrets.VERCEL_TERRAFORM_TESTING_CERTIFICATE }}
VERCEL_TERRAFORM_TESTING_CERTIFICATE_KEY: ${{ secrets.VERCEL_TERRAFORM_TESTING_CERTIFICATE_KEY }}
run: |
go test ./...
go test -v ./...

summary:
name: Summary
Expand All @@ -130,11 +130,11 @@ jobs:
steps:
- name: Success
run: |-
for status in ${{ join(needs.*.result, ' ') }}
do
if [ "$status" != "success" ] && [ "$status" != "skipped" ]
then
echo "Some checks failed"
exit 1
fi
done
for status in ${{ join(needs.*.result, ' ') }}
do
if [ "$status" != "success" ] && [ "$status" != "skipped" ]
then
echo "Some checks failed"
exit 1
fi
done
165 changes: 165 additions & 0 deletions client/drain.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
package client

import (
"context"
"fmt"

"github.com/hashicorp/terraform-plugin-log/tflog"
)

type Drain struct {
ID string `json:"id"`
OwnerID string `json:"ownerId"`
Name string `json:"name"`
Projects string `json:"projects"` // "some" or "all"
ProjectIds []string `json:"projectIds"`
Schemas map[string]any `json:"schemas"`
Delivery DeliveryConfig `json:"delivery"`
Sampling []SamplingConfig `json:"sampling,omitempty"`
TeamID string `json:"teamId"`
Status string `json:"status"`
Filter *string `json:"filter,omitempty"`
Transforms []TransformConfig `json:"transforms,omitempty"`
}

type OTLPDeliveryEndpoint struct {
Traces string `json:"traces"`
}

type DeliveryConfig struct {
Type string `json:"type"`
Endpoint any `json:"endpoint"` // Can be string or object for different delivery types
Encoding string `json:"encoding"`
Compression *string `json:"compression,omitempty"`
Headers map[string]string `json:"headers"`
Secret *string `json:"secret,omitempty"`
}

type SamplingConfig struct {
Type string `json:"type"`
Rate float64 `json:"rate"` // Must be between 0 and 1
Env *string `json:"env,omitempty"`
RequestPath *string `json:"requestPath,omitempty"`
}

type TransformConfig struct {
ID string `json:"id"`
}

type SchemaConfig struct {
Version string `json:"version"`
}

type CreateDrainRequest struct {
TeamID string `json:"-"`
Name string `json:"name"`
Projects string `json:"projects"` // "some" or "all"
ProjectIds []string `json:"projectIds,omitempty"`
Filter *string `json:"filter,omitempty"`
Schemas map[string]SchemaConfig `json:"schemas"`
Delivery DeliveryConfig `json:"delivery"`
Sampling []SamplingConfig `json:"sampling,omitempty"`
Transforms []TransformConfig `json:"transforms,omitempty"`
}

type UpdateDrainRequest struct {
TeamID string `json:"-"`
Name *string `json:"name,omitempty"`
Projects *string `json:"projects,omitempty"`
ProjectIds []string `json:"projectIds,omitempty"`
Filter *string `json:"filter,omitempty"`
Schemas map[string]SchemaConfig `json:"schemas,omitempty"`
Delivery *DeliveryConfig `json:"delivery,omitempty"`
Sampling []SamplingConfig `json:"sampling,omitempty"`
Transforms []TransformConfig `json:"transforms,omitempty"`
Status *string `json:"status,omitempty"` // "enabled" or "disabled"
}

type ListDrainsResponse struct {
Drains []Drain `json:"drains"`
}

func (c *Client) CreateDrain(ctx context.Context, request CreateDrainRequest) (d Drain, err error) {
url := fmt.Sprintf("%s/v1/drains", c.baseURL)
if c.TeamID(request.TeamID) != "" {
url = fmt.Sprintf("%s?teamId=%s", url, c.TeamID(request.TeamID))
}
payload := string(mustMarshal(request))
tflog.Info(ctx, "creating drain", map[string]any{
"url": url,
"payload": payload,
})
err = c.doRequest(clientRequest{
ctx: ctx,
method: "POST",
url: url,
body: payload,
}, &d)
return d, err
}

func (c *Client) GetDrain(ctx context.Context, id, teamID string) (d Drain, err error) {
url := fmt.Sprintf("%s/v1/drains/%s", c.baseURL, id)
if c.TeamID(teamID) != "" {
url = fmt.Sprintf("%s?teamId=%s", url, c.TeamID(teamID))
}
tflog.Info(ctx, "reading drain", map[string]any{
"url": url,
})
err = c.doRequest(clientRequest{
ctx: ctx,
method: "GET",
url: url,
}, &d)
return d, err
}

func (c *Client) UpdateDrain(ctx context.Context, id string, request UpdateDrainRequest) (d Drain, err error) {
url := fmt.Sprintf("%s/v1/drains/%s", c.baseURL, id)
if c.TeamID(request.TeamID) != "" {
url = fmt.Sprintf("%s?teamId=%s", url, c.TeamID(request.TeamID))
}
payload := string(mustMarshal(request))
tflog.Info(ctx, "updating drain", map[string]any{
"url": url,
"payload": payload,
})
err = c.doRequest(clientRequest{
ctx: ctx,
method: "PATCH",
url: url,
body: payload,
}, &d)
return d, err
}

func (c *Client) DeleteDrain(ctx context.Context, id, teamID string) error {
url := fmt.Sprintf("%s/v1/drains/%s", c.baseURL, id)
if c.TeamID(teamID) != "" {
url = fmt.Sprintf("%s?teamId=%s", url, c.TeamID(teamID))
}
tflog.Info(ctx, "deleting drain", map[string]any{
"url": url,
})
return c.doRequest(clientRequest{
ctx: ctx,
method: "DELETE",
url: url,
}, nil)
}

func (c *Client) ListDrains(ctx context.Context, teamID string) (response ListDrainsResponse, err error) {
url := fmt.Sprintf("%s/v1/drains", c.baseURL)
if c.TeamID(teamID) != "" {
url = fmt.Sprintf("%s?teamId=%s", url, c.TeamID(teamID))
}
tflog.Info(ctx, "listing drains", map[string]any{
"url": url,
})
err = c.doRequest(clientRequest{
ctx: ctx,
method: "GET",
url: url,
}, &response)
return response, err
}
97 changes: 97 additions & 0 deletions docs/data-sources/drain.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
---
# generated by https://github.com/hashicorp/terraform-plugin-docs
page_title: "vercel_drain Data Source - terraform-provider-vercel"
subcategory: ""
description: |-
Provides information about an existing Drain.
Drains collect various types of data including logs, traces, analytics, and speed insights from your Vercel projects.
This is a more generic version of log drains that supports multiple data types and delivery methods.
Teams on Pro and Enterprise plans can create configurable drains from the Vercel dashboard.
---

# vercel_drain (Data Source)

Provides information about an existing Drain.

Drains collect various types of data including logs, traces, analytics, and speed insights from your Vercel projects.
This is a more generic version of log drains that supports multiple data types and delivery methods.

Teams on Pro and Enterprise plans can create configurable drains from the Vercel dashboard.

## Example Usage

```terraform
data "vercel_drain" "example" {
id = "drn_xxxxxxxxxxxxxxxxxxxxxxxx"
}
```

<!-- schema generated by tfplugindocs -->
## Schema

### Required

- `id` (String) The ID of the Drain.

### Optional

- `team_id` (String) The ID of the team the Drain should exist under. Required when configuring a team resource if a default team has not been set in the provider.

### Read-Only

- `delivery` (Attributes) Configuration for how data should be delivered. (see [below for nested schema](#nestedatt--delivery))
- `filter` (String) A filter expression applied to incoming data.
- `name` (String) The name of the Drain.
- `project_ids` (Set of String) A list of project IDs that the drain should be associated with. Only valid when `projects` is set to `some`.
- `projects` (String) Whether to include all projects or a specific set. Valid values are `all` or `some`.
- `sampling` (Attributes Set) Sampling configuration for the drain. (see [below for nested schema](#nestedatt--sampling))
- `schemas` (Map of Object) A map of schema configurations. Keys can be `log`, `trace`, `analytics`, or `speed_insights`. (see [below for nested schema](#nestedatt--schemas))
- `status` (String) The status of the drain.
- `transforms` (Attributes Set) Transform configurations for the drain. (see [below for nested schema](#nestedatt--transforms))

<a id="nestedatt--delivery"></a>
### Nested Schema for `delivery`

Read-Only:

- `compression` (String) The compression method. Valid values are `gzip` or `none`. Only applicable for HTTP delivery.
- `encoding` (String) The encoding format. Valid values are `json`, `ndjson` (for HTTP) or `proto` (for OTLP).
- `endpoint` (Attributes) Endpoint configuration. Contains `url` for HTTP or `traces` for OTLP. (see [below for nested schema](#nestedatt--delivery--endpoint))
- `headers` (Map of String) Custom headers to include in HTTP requests.
- `type` (String) The delivery type. Valid values are `http` or `otlphttp`.

<a id="nestedatt--delivery--endpoint"></a>
### Nested Schema for `delivery.endpoint`

Read-Only:

- `traces` (String) The traces endpoint URL for OTLP delivery type.
- `url` (String) The endpoint URL for HTTP delivery type.



<a id="nestedatt--sampling"></a>
### Nested Schema for `sampling`

Read-Only:

- `environment` (String) The environment to apply sampling to. Valid values are `production` or `preview`.
- `rate` (Number) The sampling rate from 0 to 1 (e.g., 0.1 for 10%).
- `request_path` (String) Request path prefix to apply the sampling rule to.
- `type` (String) The sampling type. Only `head_sampling` is supported.


<a id="nestedatt--schemas"></a>
### Nested Schema for `schemas`

Read-Only:

- `version` (String)


<a id="nestedatt--transforms"></a>
### Nested Schema for `transforms`

Read-Only:

- `id` (String) The transform ID.
Loading
Loading