-
Notifications
You must be signed in to change notification settings - Fork 27
/
Copy pathtable_gcp_bigquery_dataset.go
278 lines (247 loc) · 7.5 KB
/
table_gcp_bigquery_dataset.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
package gcp
import (
"context"
"strings"
"github.com/turbot/go-kit/types"
"github.com/turbot/steampipe-plugin-sdk/v5/grpc/proto"
"github.com/turbot/steampipe-plugin-sdk/v5/plugin"
"github.com/turbot/steampipe-plugin-sdk/v5/plugin/transform"
"google.golang.org/api/bigquery/v2"
)
//// TABLE DEFINITION
func tableGcpBigQueryDataset(ctx context.Context) *plugin.Table {
return &plugin.Table{
Name: "gcp_bigquery_dataset",
Description: "GCP BigQuery Dataset",
Get: &plugin.GetConfig{
KeyColumns: plugin.SingleColumn("dataset_id"),
Hydrate: getBigQueryDataset,
},
List: &plugin.ListConfig{
Hydrate: listBigQueryDatasets,
},
Columns: []*plugin.Column{
{
Name: "name",
Description: "A descriptive name for the dataset, if one exists.",
Type: proto.ColumnType_STRING,
Transform: transform.FromField("FriendlyName"),
},
{
Name: "dataset_id",
Description: "The ID of the dataset resource.",
Type: proto.ColumnType_STRING,
Transform: transform.FromField("DatasetReference.DatasetId"),
},
{
Name: "id",
Description: "The fully-qualified, unique, opaque ID of the dataset.",
Type: proto.ColumnType_STRING,
},
{
Name: "kind",
Description: "The type of the resource. This property always returns the value 'bigquery#dataset'.",
Type: proto.ColumnType_STRING,
},
{
Name: "creation_time",
Description: "The time when this dataset was created.",
Type: proto.ColumnType_TIMESTAMP,
Hydrate: getBigQueryDataset,
Transform: transform.FromField("CreationTime").Transform(transform.UnixMsToTimestamp),
},
{
Name: "description",
Description: "A user-friendly description of the dataset.",
Type: proto.ColumnType_STRING,
Hydrate: getBigQueryDataset,
},
{
Name: "etag",
Description: "A hash of the resource.",
Type: proto.ColumnType_STRING,
Hydrate: getBigQueryDataset,
},
{
Name: "default_partition_expiration_ms",
Description: "The default partition expiration for all partitioned tables in the dataset, in milliseconds.",
Type: proto.ColumnType_INT,
Hydrate: getBigQueryDataset,
},
{
Name: "default_table_expiration_ms",
Description: "The default lifetime of all tables in the dataset, in milliseconds.",
Type: proto.ColumnType_INT,
Hydrate: getBigQueryDataset,
},
{
Name: "kms_key_name",
Description: "Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table.",
Type: proto.ColumnType_STRING,
Hydrate: getBigQueryDataset,
Transform: transform.FromField("DefaultEncryptionConfiguration.KmsKeyName"),
},
{
Name: "last_modified_time",
Description: "The date when this dataset or any of its tables was last modified.",
Type: proto.ColumnType_TIMESTAMP,
Hydrate: getBigQueryDataset,
Transform: transform.FromField("LastModifiedTime").Transform(transform.UnixMsToTimestamp),
},
{
Name: "self_link",
Description: "An URL that can be used to access the resource again.",
Type: proto.ColumnType_STRING,
Hydrate: getBigQueryDataset,
},
{
Name: "access",
Description: "An array of objects that define dataset access for one or more entities.",
Type: proto.ColumnType_JSON,
Hydrate: getBigQueryDataset,
},
{
Name: "labels",
Description: "A set of labels associated with this dataset.",
Type: proto.ColumnType_JSON,
},
// standard steampipe columns
{
Name: "title",
Description: ColumnDescriptionTitle,
Type: proto.ColumnType_STRING,
Transform: transform.From(bigQueryDatasetTitle),
},
{
Name: "tags",
Description: ColumnDescriptionTags,
Type: proto.ColumnType_JSON,
Transform: transform.FromField("Labels"),
},
{
Name: "akas",
Description: ColumnDescriptionAkas,
Type: proto.ColumnType_JSON,
Transform: transform.From(bigQueryDatasetAka),
},
// standard gcp columns
{
Name: "location",
Description: ColumnDescriptionLocation,
Type: proto.ColumnType_STRING,
},
{
Name: "project",
Description: ColumnDescriptionProject,
Type: proto.ColumnType_STRING,
Transform: transform.FromField("DatasetReference.ProjectId"),
},
},
}
}
//// LIST FUNCTION
func listBigQueryDatasets(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) {
plugin.Logger(ctx).Trace("listBigQueryDatasets")
// Create Service Connection
service, err := BigQueryService(ctx, d)
if err != nil {
return nil, err
}
// Max limit isn't mentioned in the documentation
// Default limit is set as 1000
pageSize := types.Int64(1000)
limit := d.QueryContext.Limit
if d.QueryContext.Limit != nil {
if *limit < *pageSize {
pageSize = limit
}
}
// Get project details
getProjectCached := plugin.HydrateFunc(getProject).WithCache()
projectId, err := getProjectCached(ctx, d, h)
if err != nil {
return nil, err
}
project := projectId.(string)
resp := service.Datasets.List(project).MaxResults(*pageSize)
if err := resp.Pages(ctx, func(page *bigquery.DatasetList) error {
for _, dataset := range page.Datasets {
d.StreamListItem(ctx, dataset)
// Check if context has been cancelled or if the limit has been hit (if specified)
// if there is a limit, it will return the number of rows required to reach this limit
if d.RowsRemaining(ctx) == 0 {
page.NextPageToken = ""
return nil
}
}
return nil
}); err != nil {
return nil, err
}
return nil, nil
}
//// HYDRATE FUNCTIONS
func getBigQueryDataset(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) {
// Create Service Connection
service, err := BigQueryService(ctx, d)
if err != nil {
return nil, err
}
// Get project details
getProjectCached := plugin.HydrateFunc(getProject).WithCache()
projectId, err := getProjectCached(ctx, d, h)
if err != nil {
return nil, err
}
project := projectId.(string)
var id string
if h.Item != nil {
data := datasetID(h.Item)
id = strings.Split(data, ":")[1]
} else {
id = d.EqualsQuals["dataset_id"].GetStringValue()
}
// check if id is empty
if id == "" {
return nil, nil
}
resp, err := service.Datasets.Get(project, id).Do()
if err != nil {
return nil, err
}
return resp, nil
}
//// TRANSFORM FUNCTIONS
func bigQueryDatasetAka(ctx context.Context, h *transform.TransformData) (interface{}, error) {
data := datasetID(h.HydrateItem)
projectID := strings.Split(data, ":")[0]
id := strings.Split(data, ":")[1]
akas := []string{"gcp://bigquery.googleapis.com/projects/" + projectID + "/datasets/" + id}
return akas, nil
}
func bigQueryDatasetTitle(ctx context.Context, h *transform.TransformData) (interface{}, error) {
data := datasetID(h.HydrateItem)
name := datasetName(h.HydrateItem)
if len(name) > 0 {
return name, nil
}
return strings.Split(data, ":")[1], nil
}
func datasetID(item interface{}) string {
switch item := item.(type) {
case *bigquery.DatasetListDatasets:
return item.Id
case *bigquery.Dataset:
return item.Id
}
return ""
}
func datasetName(item interface{}) string {
switch item := item.(type) {
case *bigquery.DatasetListDatasets:
return item.FriendlyName
case *bigquery.Dataset:
return item.FriendlyName
}
return ""
}