implement the command in cobra

This commit is contained in:
Shreyas Goenka 2025-03-05 13:43:46 +01:00
parent 217d4e7be2
commit bd35ac956e
No known key found for this signature in database
GPG Key ID: 92A07DF49CCB0622
8 changed files with 79 additions and 41 deletions

View File

@ -1,4 +1,7 @@
Attempt 0 failed due to a server side error. Retrying status code: 501 Warn: Attempt 0 failed due to a server side error. Retrying status code: 501
Attempt 1 failed due to a server side error. Retrying status code: 501
Attempt 2 failed due to a server side error. Retrying status code: 501 Warn: Attempt 1 failed due to a server side error. Retrying status code: 501
Warn: Attempt 2 failed due to a server side error. Retrying status code: 501
error: upload did not succeed after three attempts error: upload did not succeed after three attempts

View File

@ -1,4 +1,7 @@
Attempt 0 was a partial success. Number of logs uploaded: 1 out of 2 Warn: Attempt 0 was a partial success. Number of logs uploaded: 1 out of 2
Attempt 1 was a partial success. Number of logs uploaded: 1 out of 2
Attempt 2 was a partial success. Number of logs uploaded: 1 out of 2 Warn: Attempt 1 was a partial success. Number of logs uploaded: 1 out of 2
Warn: Attempt 2 was a partial success. Number of logs uploaded: 1 out of 2
error: upload did not succeed after three attempts error: upload did not succeed after three attempts

View File

@ -14,6 +14,7 @@ import (
"github.com/databricks/cli/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/cmd/selftest" "github.com/databricks/cli/cmd/selftest"
"github.com/databricks/cli/cmd/sync" "github.com/databricks/cli/cmd/sync"
"github.com/databricks/cli/cmd/telemetry"
"github.com/databricks/cli/cmd/version" "github.com/databricks/cli/cmd/version"
"github.com/databricks/cli/cmd/workspace" "github.com/databricks/cli/cmd/workspace"
"github.com/spf13/cobra" "github.com/spf13/cobra"
@ -76,6 +77,7 @@ func New(ctx context.Context) *cobra.Command {
cli.AddCommand(sync.New()) cli.AddCommand(sync.New())
cli.AddCommand(version.New()) cli.AddCommand(version.New())
cli.AddCommand(selftest.New()) cli.AddCommand(selftest.New())
cli.AddCommand(telemetry.New())
return cli return cli
} }

View File

@ -22,6 +22,7 @@ import (
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
"github.com/databricks/cli/libs/telemetry" "github.com/databricks/cli/libs/telemetry"
"github.com/databricks/cli/libs/telemetry/protos" "github.com/databricks/cli/libs/telemetry/protos"
"github.com/databricks/databricks-sdk-go/logger"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
@ -233,8 +234,15 @@ func uploadTelemetry(ctx context.Context, cmdStr string, startTime time.Time, ex
env = append(env, fmt.Sprintf("%s=%s", k, v)) env = append(env, fmt.Sprintf("%s=%s", k, v))
} }
// Default to warn log level. If debug is enabled in the parent process, we set
// the log level to debug for the telemetry worker as well.
logLevel := "warn"
if log.GetLogger(ctx).Enabled(ctx, logger.LevelDebug) {
logLevel = "debug"
}
d := daemon.Daemon{ d := daemon.Daemon{
Args: []string{"telemetry", "upload"}, Args: []string{"telemetry", "upload", fmt.Sprintf("--log-level=%s", logLevel)},
Env: env, Env: env,
PidFilePath: os.Getenv(telemetry.PidFileEnvVar), PidFilePath: os.Getenv(telemetry.PidFileEnvVar),
LogFile: os.Getenv(telemetry.UploadLogsFileEnvVar), LogFile: os.Getenv(telemetry.UploadLogsFileEnvVar),

View File

@ -0,0 +1,16 @@
package telemetry
import (
"github.com/spf13/cobra"
)
func New() *cobra.Command {
cmd := &cobra.Command{
Use: "telemetry",
Short: "commands that are used to upload telemetry",
Hidden: true,
}
cmd.AddCommand(newTelemetryUpload())
return cmd
}

37
cmd/telemetry/upload.go Normal file
View File

@ -0,0 +1,37 @@
package telemetry
import (
"encoding/json"
"fmt"
"os"
"github.com/databricks/cli/libs/telemetry"
"github.com/spf13/cobra"
)
func newTelemetryUpload() *cobra.Command {
return &cobra.Command{
Use: "upload",
Run: func(cmd *cobra.Command, args []string) {
ctx := cmd.Context()
// We exit the process explicitly at the end of Run to avoid the possibility
// of the worker being spawned recursively. This could otherwise happen if
// say telemetry was logged as part of this command.
defer os.Exit(0)
resp, err := telemetry.Upload(ctx)
if err != nil {
fmt.Fprintf(os.Stderr, "error: %s\n", err)
os.Exit(1)
}
fmt.Printf("Telemetry logs uploaded successfully\n")
fmt.Println("Response:")
b, err := json.Marshal(resp)
if err == nil {
fmt.Println(string(b))
}
},
}
}

View File

@ -10,6 +10,7 @@ import (
"os" "os"
"time" "time"
"github.com/databricks/cli/libs/log"
"github.com/databricks/cli/libs/telemetry/protos" "github.com/databricks/cli/libs/telemetry/protos"
"github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/client" "github.com/databricks/databricks-sdk-go/client"
@ -91,7 +92,7 @@ func Upload(ctx context.Context) (*ResponseBody, error) {
// Partial success. Retry. // Partial success. Retry.
if err == nil && resp.NumProtoSuccess < int64(len(logs)) { if err == nil && resp.NumProtoSuccess < int64(len(logs)) {
fmt.Fprintf(os.Stderr, "Attempt %d was a partial success. Number of logs uploaded: %d out of %d\n", i, resp.NumProtoSuccess, len(logs)) log.Warnf(ctx, "Attempt %d was a partial success. Number of logs uploaded: %d out of %d\n", i, resp.NumProtoSuccess, len(logs))
time.Sleep(2 * time.Second) time.Sleep(2 * time.Second)
continue continue
} }
@ -101,7 +102,7 @@ func Upload(ctx context.Context) (*ResponseBody, error) {
// ref: https://github.com/databricks/databricks-sdk-go/blob/cdb28002afacb8b762348534a4c4040a9f19c24b/apierr/errors.go#L91 // ref: https://github.com/databricks/databricks-sdk-go/blob/cdb28002afacb8b762348534a4c4040a9f19c24b/apierr/errors.go#L91
var apiErr *apierr.APIError var apiErr *apierr.APIError
if errors.As(err, &apiErr) && apiErr.StatusCode >= 500 && apiErr.StatusCode != 503 { if errors.As(err, &apiErr) && apiErr.StatusCode >= 500 && apiErr.StatusCode != 503 {
fmt.Fprintf(os.Stderr, "Attempt %d failed due to a server side error. Retrying status code: %d\n", i, apiErr.StatusCode) log.Warnf(ctx, "Attempt %d failed due to a server side error. Retrying status code: %d\n", i, apiErr.StatusCode)
time.Sleep(2 * time.Second) time.Sleep(2 * time.Second)
continue continue
} }

32
main.go
View File

@ -2,46 +2,14 @@ package main
import ( import (
"context" "context"
"encoding/json"
"fmt"
"os" "os"
"github.com/databricks/cli/cmd" "github.com/databricks/cli/cmd"
"github.com/databricks/cli/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/telemetry"
"github.com/databricks/databricks-sdk-go/logger"
) )
func main() { func main() {
ctx := context.Background() ctx := context.Background()
// A new process is spawned for uploading telemetry data. We handle this separately
// from the rest of the CLI commands.
// This is done because [root.Execute] spawns a new process to run the
// "telemetry upload" command if there are logs to be uploaded. Having this outside
// of [root.Execute] ensures that the telemetry upload process is not spawned
// infinitely in a recursive manner.
if len(os.Args) == 3 && os.Args[1] == "telemetry" && os.Args[2] == "upload" {
// Suppress info logs (and lower) from the SDK. This ensures that INFO / DEBUG logs
// from the SDK are not printed to stdout / stderr.
logger.DefaultLogger = &logger.SimpleLogger{
Level: logger.LevelWarn,
}
resp, err := telemetry.Upload(ctx)
if err != nil {
fmt.Fprintf(os.Stderr, "error: %s\n", err)
os.Exit(1)
}
fmt.Printf("Telemetry logs uploaded successfully\n")
fmt.Println("Response:")
b, err := json.Marshal(resp)
if err == nil {
fmt.Println(string(b))
}
os.Exit(0)
}
err := root.Execute(ctx, cmd.New(ctx)) err := root.Execute(ctx, cmd.New(ctx))
if err != nil { if err != nil {
os.Exit(1) os.Exit(1)