diff options
author | Zach Koopmans <zkoopmans@google.com> | 2020-03-30 13:04:44 -0700 |
---|---|---|
committer | gVisor bot <gvisor-bot@google.com> | 2020-03-30 13:05:45 -0700 |
commit | e36eccc4b18676e2cb441380d0e4e46f038f638e (patch) | |
tree | 98851303da36213210318fe583b7d8b4091479be | |
parent | 3fac85da951f9f56d0232718ea7584250cf11f31 (diff) |
BigQuery schema for benchmark-tools dashboard.
PiperOrigin-RevId: 303805784
-rw-r--r-- | WORKSPACE | 43 | ||||
-rw-r--r-- | tools/bigquery/BUILD | 10 | ||||
-rw-r--r-- | tools/bigquery/bigquery.go | 121 | ||||
-rw-r--r-- | tools/nogo.json | 4 |
4 files changed, 177 insertions, 1 deletions
@@ -386,6 +386,49 @@ go_repository( version = "v1.0.0", ) +go_repository( + name = "com_google_cloud_go_bigquery", + importpath = "cloud.google.com/go/bigquery", + sum = "h1:K2NyuHRuv15ku6eUpe0DQk5ZykPMnSOnvuVf6IHcjaE=", + version = "v1.5.0", +) + +go_repository( + name = "org_golang_google_api", + importpath = "google.golang.org/api", + sum = "h1:jz2KixHX7EcCPiQrySzPdnYT7DbINAypCqKZ1Z7GM40=", + version = "v0.20.0", +) + +# BigQuery Dependencies for Benchmarks +go_repository( + name = "com_google_cloud_go", + importpath = "cloud.google.com/go", + sum = "h1:eoz/lYxKSL4CNAiaUJ0ZfD1J3bfMYbU5B3rwM1C1EIU=", + version = "v0.55.0", +) + +go_repository( + name = "com_github_googleapis_gax_go_v2", + importpath = "github.com/googleapis/gax-go/v2", + sum = "h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM=", + version = "v2.0.5", +) + +go_repository( + name = "io_opencensus_go", + importpath = "go.opencensus.io", + sum = "h1:8sGtKOrtQqkN1bp2AtX+misvLIlOmsEsNd+9NIcPEm8=", + version = "v0.22.3", +) + +go_repository( + name = "com_github_golang_groupcache", + importpath = "github.com/golang/groupcache", + sum = "h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=", + version = "v0.0.0-20200121045136-8c9f03a8e57e", +) + # System Call test dependencies. http_archive( name = "com_google_absl", diff --git a/tools/bigquery/BUILD b/tools/bigquery/BUILD new file mode 100644 index 000000000..5748fb390 --- /dev/null +++ b/tools/bigquery/BUILD @@ -0,0 +1,10 @@ +load("//tools:defs.bzl", "go_library") + +package(licenses = ["notice"]) + +go_library( + name = "bigquery", + testonly = 1, + srcs = ["bigquery.go"], + deps = ["@com_google_cloud_go_bigquery//:go_default_library"], +) diff --git a/tools/bigquery/bigquery.go b/tools/bigquery/bigquery.go new file mode 100644 index 000000000..56f0dc5c9 --- /dev/null +++ b/tools/bigquery/bigquery.go @@ -0,0 +1,121 @@ +// Copyright 2020 The gVisor Authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package bigquery defines a BigQuery schema for benchmarks. +// +// This package contains a schema for BigQuery and methods for publishing +// benchmark data into tables. +package bigquery + +import ( + "context" + "fmt" + "strings" + "time" + + bq "cloud.google.com/go/bigquery" +) + +// Benchmark is the top level structure of recorded benchmark data. BigQuery +// will infer the schema from this. +type Benchmark struct { + Name string `bq:"name"` + Timestamp time.Time `bq:"timestamp"` + Official bool `bq:"official"` + Metric []*Metric `bq:"metric"` + Metadata *Metadata `bq:"metadata"` +} + +// Metric holds the actual metric data and unit information for this benchmark. +type Metric struct { + Name string `bq:"name"` + Unit string `bq:"unit"` + Sample float64 `bq:"sample"` +} + +// Metadata about this benchmark. +type Metadata struct { + CL string `bq:"changelist"` + IterationID string `bq:"iteration_id"` + PendingCL string `bq:"pending_cl"` + Workflow string `bq:"workflow"` + Platform string `bq:"platform"` + Gofer string `bq:"gofer"` +} + +// InitBigQuery initializes a BigQuery dataset/table in the project. If the dataset/table already exists, it is not duplicated. +func InitBigQuery(ctx context.Context, projectID, datasetID, tableID string) error { + client, err := bq.NewClient(ctx, projectID) + if err != nil { + return fmt.Errorf("failed to initialize client on project %s: %v", projectID, err) + } + defer client.Close() + + dataset := client.Dataset(datasetID) + if err := dataset.Create(ctx, nil); err != nil && !checkDuplicateError(err) { + return fmt.Errorf("failed to create dataset: %s: %v", datasetID, err) + } + + table := dataset.Table(tableID) + schema, err := bq.InferSchema(Benchmark{}) + if err != nil { + return fmt.Errorf("failed to infer schema: %v", err) + } + + if err := table.Create(ctx, &bq.TableMetadata{Schema: schema}); err != nil && !checkDuplicateError(err) { + return fmt.Errorf("failed to create table: %s: %v", tableID, err) + } + return nil +} + +// AddMetric adds a metric to an existing Benchmark. +func (bm *Benchmark) AddMetric(metricName, unit string, sample float64) { + m := &Metric{ + Name: metricName, + Unit: unit, + Sample: sample, + } + bm.Metric = append(bm.Metric, m) +} + +// NewBenchmark initializes a new benchmark. +func NewBenchmark(name string, official bool) *Benchmark { + return &Benchmark{ + Name: name, + Timestamp: time.Now().UTC(), + Official: official, + Metric: make([]*Metric, 0), + } +} + +// SendBenchmarks sends the slice of benchmarks to the BigQuery dataset/table. +func SendBenchmarks(ctx context.Context, benchmarks []*Benchmark, projectID, datasetID, tableID string) error { + client, err := bq.NewClient(ctx, projectID) + if err != nil { + return fmt.Errorf("Failed to initialize client on project: %s: %v", projectID, err) + } + defer client.Close() + + uploader := client.Dataset(datasetID).Table(tableID).Uploader() + if err = uploader.Put(ctx, benchmarks); err != nil { + return fmt.Errorf("failed to upload benchmarks to proejct %s, table %s.%s: %v", projectID, datasetID, tableID, err) + } + + return nil +} + +// BigQuery will error "409" for duplicate tables and datasets. +func checkDuplicateError(err error) bool { + return strings.Contains(err.Error(), "googleapi: Error 409: Already Exists") +} diff --git a/tools/nogo.json b/tools/nogo.json index 2b4c6d3b6..83cb76b93 100644 --- a/tools/nogo.json +++ b/tools/nogo.json @@ -35,7 +35,9 @@ "/com_github_vishvananda_netlink/route_linux.go": "allowed: false positive", "/external/bazel_gazelle/cmd/gazelle/.*": "allowed: false positive", "/org_golang_x_tools/go/packages/golist.go": "allowed: runtime internals", - "/pkg/sentry/platform/kvm/kvm_test.go": "allowed: intentional" + "/pkg/sentry/platform/kvm/kvm_test.go": "allowed: intentional", + "/tools/bigquery/bigquery.go": "allowed: false positive", + "/external/io_opencensus_go/tag/map_codec.go": "allowed: false positive" } }, "printf": { |