Skip to content

Commit 87874c5

Browse files
committed
Add Vercel Drain resource
1 parent ef1e07e commit 87874c5

File tree

10 files changed

+2239
-0
lines changed

10 files changed

+2239
-0
lines changed

client/drain.go

Lines changed: 165 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,165 @@
1+
package client
2+
3+
import (
4+
"context"
5+
"fmt"
6+
7+
"github.com/hashicorp/terraform-plugin-log/tflog"
8+
)
9+
10+
type Drain struct {
11+
ID string `json:"id"`
12+
OwnerID string `json:"ownerId"`
13+
Name string `json:"name"`
14+
Projects string `json:"projects"` // "some" or "all"
15+
ProjectIds []string `json:"projectIds"`
16+
Schemas map[string]any `json:"schemas"`
17+
Delivery DeliveryConfig `json:"delivery"`
18+
Sampling []SamplingConfig `json:"sampling,omitempty"`
19+
TeamID string `json:"teamId"`
20+
Status string `json:"status"`
21+
Filter *string `json:"filter,omitempty"`
22+
Transforms []TransformConfig `json:"transforms,omitempty"`
23+
}
24+
25+
type OTLPDeliveryEndpoint struct {
26+
Traces string `json:"traces"`
27+
}
28+
29+
type DeliveryConfig struct {
30+
Type string `json:"type"`
31+
Endpoint any `json:"endpoint"` // Can be string or object for different delivery types
32+
Encoding string `json:"encoding"`
33+
Compression *string `json:"compression,omitempty"`
34+
Headers map[string]string `json:"headers"`
35+
Secret *string `json:"secret,omitempty"`
36+
}
37+
38+
type SamplingConfig struct {
39+
Type string `json:"type"`
40+
Rate float64 `json:"rate"` // Must be between 0 and 1
41+
Env *string `json:"env,omitempty"`
42+
RequestPath *string `json:"requestPath,omitempty"`
43+
}
44+
45+
type TransformConfig struct {
46+
ID string `json:"id"`
47+
}
48+
49+
type SchemaConfig struct {
50+
Version string `json:"version"`
51+
}
52+
53+
type CreateDrainRequest struct {
54+
TeamID string `json:"-"`
55+
Name string `json:"name"`
56+
Projects string `json:"projects"` // "some" or "all"
57+
ProjectIds []string `json:"projectIds,omitempty"`
58+
Filter *string `json:"filter,omitempty"`
59+
Schemas map[string]SchemaConfig `json:"schemas"`
60+
Delivery DeliveryConfig `json:"delivery"`
61+
Sampling []SamplingConfig `json:"sampling,omitempty"`
62+
Transforms []TransformConfig `json:"transforms,omitempty"`
63+
}
64+
65+
type UpdateDrainRequest struct {
66+
TeamID string `json:"-"`
67+
Name *string `json:"name,omitempty"`
68+
Projects *string `json:"projects,omitempty"`
69+
ProjectIds []string `json:"projectIds,omitempty"`
70+
Filter *string `json:"filter,omitempty"`
71+
Schemas map[string]SchemaConfig `json:"schemas,omitempty"`
72+
Delivery *DeliveryConfig `json:"delivery,omitempty"`
73+
Sampling []SamplingConfig `json:"sampling,omitempty"`
74+
Transforms []TransformConfig `json:"transforms,omitempty"`
75+
Status *string `json:"status,omitempty"` // "enabled" or "disabled"
76+
}
77+
78+
type ListDrainsResponse struct {
79+
Drains []Drain `json:"drains"`
80+
}
81+
82+
func (c *Client) CreateDrain(ctx context.Context, request CreateDrainRequest) (d Drain, err error) {
83+
url := fmt.Sprintf("%s/v1/drains", c.baseURL)
84+
if c.TeamID(request.TeamID) != "" {
85+
url = fmt.Sprintf("%s?teamId=%s", url, c.TeamID(request.TeamID))
86+
}
87+
payload := string(mustMarshal(request))
88+
tflog.Info(ctx, "creating drain", map[string]any{
89+
"url": url,
90+
"payload": payload,
91+
})
92+
err = c.doRequest(clientRequest{
93+
ctx: ctx,
94+
method: "POST",
95+
url: url,
96+
body: payload,
97+
}, &d)
98+
return d, err
99+
}
100+
101+
func (c *Client) GetDrain(ctx context.Context, id, teamID string) (d Drain, err error) {
102+
url := fmt.Sprintf("%s/v1/drains/%s", c.baseURL, id)
103+
if c.TeamID(teamID) != "" {
104+
url = fmt.Sprintf("%s?teamId=%s", url, c.TeamID(teamID))
105+
}
106+
tflog.Info(ctx, "reading drain", map[string]any{
107+
"url": url,
108+
})
109+
err = c.doRequest(clientRequest{
110+
ctx: ctx,
111+
method: "GET",
112+
url: url,
113+
}, &d)
114+
return d, err
115+
}
116+
117+
func (c *Client) UpdateDrain(ctx context.Context, id string, request UpdateDrainRequest) (d Drain, err error) {
118+
url := fmt.Sprintf("%s/v1/drains/%s", c.baseURL, id)
119+
if c.TeamID(request.TeamID) != "" {
120+
url = fmt.Sprintf("%s?teamId=%s", url, c.TeamID(request.TeamID))
121+
}
122+
payload := string(mustMarshal(request))
123+
tflog.Info(ctx, "updating drain", map[string]any{
124+
"url": url,
125+
"payload": payload,
126+
})
127+
err = c.doRequest(clientRequest{
128+
ctx: ctx,
129+
method: "PATCH",
130+
url: url,
131+
body: payload,
132+
}, &d)
133+
return d, err
134+
}
135+
136+
func (c *Client) DeleteDrain(ctx context.Context, id, teamID string) error {
137+
url := fmt.Sprintf("%s/v1/drains/%s", c.baseURL, id)
138+
if c.TeamID(teamID) != "" {
139+
url = fmt.Sprintf("%s?teamId=%s", url, c.TeamID(teamID))
140+
}
141+
tflog.Info(ctx, "deleting drain", map[string]any{
142+
"url": url,
143+
})
144+
return c.doRequest(clientRequest{
145+
ctx: ctx,
146+
method: "DELETE",
147+
url: url,
148+
}, nil)
149+
}
150+
151+
func (c *Client) ListDrains(ctx context.Context, teamID string) (response ListDrainsResponse, err error) {
152+
url := fmt.Sprintf("%s/v1/drains", c.baseURL)
153+
if c.TeamID(teamID) != "" {
154+
url = fmt.Sprintf("%s?teamId=%s", url, c.TeamID(teamID))
155+
}
156+
tflog.Info(ctx, "listing drains", map[string]any{
157+
"url": url,
158+
})
159+
err = c.doRequest(clientRequest{
160+
ctx: ctx,
161+
method: "GET",
162+
url: url,
163+
}, &response)
164+
return response, err
165+
}

docs/data-sources/drain.md

Lines changed: 97 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,97 @@
1+
---
2+
# generated by https://github.com/hashicorp/terraform-plugin-docs
3+
page_title: "vercel_drain Data Source - terraform-provider-vercel"
4+
subcategory: ""
5+
description: |-
6+
Provides information about an existing Drain.
7+
Drains collect various types of data including logs, traces, analytics, and speed insights from your Vercel projects.
8+
This is a more generic version of log drains that supports multiple data types and delivery methods.
9+
Teams on Pro and Enterprise plans can create configurable drains from the Vercel dashboard.
10+
---
11+
12+
# vercel_drain (Data Source)
13+
14+
Provides information about an existing Drain.
15+
16+
Drains collect various types of data including logs, traces, analytics, and speed insights from your Vercel projects.
17+
This is a more generic version of log drains that supports multiple data types and delivery methods.
18+
19+
Teams on Pro and Enterprise plans can create configurable drains from the Vercel dashboard.
20+
21+
## Example Usage
22+
23+
```terraform
24+
data "vercel_drain" "example" {
25+
id = "drn_xxxxxxxxxxxxxxxxxxxxxxxx"
26+
}
27+
```
28+
29+
<!-- schema generated by tfplugindocs -->
30+
## Schema
31+
32+
### Required
33+
34+
- `id` (String) The ID of the Drain.
35+
36+
### Optional
37+
38+
- `team_id` (String) The ID of the team the Drain should exist under. Required when configuring a team resource if a default team has not been set in the provider.
39+
40+
### Read-Only
41+
42+
- `delivery` (Attributes) Configuration for how data should be delivered. (see [below for nested schema](#nestedatt--delivery))
43+
- `filter` (String) A filter expression applied to incoming data.
44+
- `name` (String) The name of the Drain.
45+
- `project_ids` (Set of String) A list of project IDs that the drain should be associated with. Only valid when `projects` is set to `some`.
46+
- `projects` (String) Whether to include all projects or a specific set. Valid values are `all` or `some`.
47+
- `sampling` (Attributes Set) Sampling configuration for the drain. (see [below for nested schema](#nestedatt--sampling))
48+
- `schemas` (Map of Object) A map of schema configurations. Keys can be `log`, `trace`, `analytics`, or `speed_insights`. (see [below for nested schema](#nestedatt--schemas))
49+
- `status` (String) The status of the drain.
50+
- `transforms` (Attributes Set) Transform configurations for the drain. (see [below for nested schema](#nestedatt--transforms))
51+
52+
<a id="nestedatt--delivery"></a>
53+
### Nested Schema for `delivery`
54+
55+
Read-Only:
56+
57+
- `compression` (String) The compression method. Valid values are `gzip` or `none`. Only applicable for HTTP delivery.
58+
- `encoding` (String) The encoding format. Valid values are `json`, `ndjson` (for HTTP) or `proto` (for OTLP).
59+
- `endpoint` (Attributes) Endpoint configuration. Contains `url` for HTTP or `traces` for OTLP. (see [below for nested schema](#nestedatt--delivery--endpoint))
60+
- `headers` (Map of String) Custom headers to include in HTTP requests.
61+
- `type` (String) The delivery type. Valid values are `http` or `otlphttp`.
62+
63+
<a id="nestedatt--delivery--endpoint"></a>
64+
### Nested Schema for `delivery.endpoint`
65+
66+
Read-Only:
67+
68+
- `traces` (String) The traces endpoint URL for OTLP delivery type.
69+
- `url` (String) The endpoint URL for HTTP delivery type.
70+
71+
72+
73+
<a id="nestedatt--sampling"></a>
74+
### Nested Schema for `sampling`
75+
76+
Read-Only:
77+
78+
- `environment` (String) The environment to apply sampling to. Valid values are `production` or `preview`.
79+
- `rate` (Number) The sampling rate from 0 to 1 (e.g., 0.1 for 10%).
80+
- `request_path` (String) Request path prefix to apply the sampling rule to.
81+
- `type` (String) The sampling type. Only `head_sampling` is supported.
82+
83+
84+
<a id="nestedatt--schemas"></a>
85+
### Nested Schema for `schemas`
86+
87+
Read-Only:
88+
89+
- `version` (String)
90+
91+
92+
<a id="nestedatt--transforms"></a>
93+
### Nested Schema for `transforms`
94+
95+
Read-Only:
96+
97+
- `id` (String) The transform ID.

0 commit comments

Comments
 (0)