databricks-cli/cmd/root/root.go

122 lines
3.3 KiB
Go
Raw Permalink Normal View History

2022-05-14 17:54:35 +00:00
package root
2022-05-13 13:30:22 +00:00
import (
"context"
"errors"
"fmt"
2022-05-13 13:30:22 +00:00
"os"
2023-04-12 20:12:36 +00:00
"strings"
2022-05-13 13:30:22 +00:00
Upgraded Go version to 1.21 (#664) ## Changes Upgraded Go version to 1.21 Upgraded to use `slices` and `slog` from core instead of experimental. Still use `exp/maps` as our code relies on `maps.Keys` which is not part of core package and therefore refactoring required. ### Tests Integration tests passed ``` [DEBUG] Test execution command: /opt/homebrew/opt/go@1.21/bin/go test ./... -json -timeout 1h -run ^TestAcc [DEBUG] Test execution directory: /Users/andrew.nester/cli 2023/08/15 13:20:51 [INFO] ✅ TestAccAlertsCreateErrWhenNoArguments (2.150s) 2023/08/15 13:20:52 [INFO] ✅ TestAccApiGet (0.580s) 2023/08/15 13:20:53 [INFO] ✅ TestAccClustersList (0.900s) 2023/08/15 13:20:54 [INFO] ✅ TestAccClustersGet (0.870s) 2023/08/15 13:21:06 [INFO] ✅ TestAccFilerWorkspaceFilesReadWrite (11.980s) 2023/08/15 13:21:13 [INFO] ✅ TestAccFilerWorkspaceFilesReadDir (7.060s) 2023/08/15 13:21:25 [INFO] ✅ TestAccFilerDbfsReadWrite (12.810s) 2023/08/15 13:21:33 [INFO] ✅ TestAccFilerDbfsReadDir (7.380s) 2023/08/15 13:21:41 [INFO] ✅ TestAccFilerWorkspaceNotebookConflict (7.760s) 2023/08/15 13:21:49 [INFO] ✅ TestAccFilerWorkspaceNotebookWithOverwriteFlag (8.660s) 2023/08/15 13:21:49 [INFO] ✅ TestAccFilerLocalReadWrite (0.020s) 2023/08/15 13:21:49 [INFO] ✅ TestAccFilerLocalReadDir (0.010s) 2023/08/15 13:21:52 [INFO] ✅ TestAccFsCatForDbfs (3.190s) 2023/08/15 13:21:53 [INFO] ✅ TestAccFsCatForDbfsOnNonExistentFile (0.890s) 2023/08/15 13:21:54 [INFO] ✅ TestAccFsCatForDbfsInvalidScheme (0.600s) 2023/08/15 13:21:57 [INFO] ✅ TestAccFsCatDoesNotSupportOutputModeJson (2.960s) 2023/08/15 13:22:28 [INFO] ✅ TestAccFsCpDir (31.480s) 2023/08/15 13:22:43 [INFO] ✅ TestAccFsCpFileToFile (14.530s) 2023/08/15 13:22:58 [INFO] ✅ TestAccFsCpFileToDir (14.610s) 2023/08/15 13:23:29 [INFO] ✅ TestAccFsCpDirToDirFileNotOverwritten (31.810s) 2023/08/15 13:23:47 [INFO] ✅ TestAccFsCpFileToDirFileNotOverwritten (17.500s) 2023/08/15 13:24:04 [INFO] ✅ TestAccFsCpFileToFileFileNotOverwritten (17.260s) 2023/08/15 13:24:37 [INFO] ✅ TestAccFsCpDirToDirWithOverwriteFlag (32.690s) 2023/08/15 13:24:56 [INFO] ✅ TestAccFsCpFileToFileWithOverwriteFlag (19.290s) 2023/08/15 13:25:15 [INFO] ✅ TestAccFsCpFileToDirWithOverwriteFlag (19.230s) 2023/08/15 13:25:17 [INFO] ✅ TestAccFsCpErrorsWhenSourceIsDirWithoutRecursiveFlag (2.010s) 2023/08/15 13:25:18 [INFO] ✅ TestAccFsCpErrorsOnInvalidScheme (0.610s) 2023/08/15 13:25:33 [INFO] ✅ TestAccFsCpSourceIsDirectoryButTargetIsFile (14.900s) 2023/08/15 13:25:37 [INFO] ✅ TestAccFsLsForDbfs (3.770s) 2023/08/15 13:25:41 [INFO] ✅ TestAccFsLsForDbfsWithAbsolutePaths (4.160s) 2023/08/15 13:25:44 [INFO] ✅ TestAccFsLsForDbfsOnFile (2.990s) 2023/08/15 13:25:46 [INFO] ✅ TestAccFsLsForDbfsOnEmptyDir (1.870s) 2023/08/15 13:25:46 [INFO] ✅ TestAccFsLsForDbfsForNonexistingDir (0.850s) 2023/08/15 13:25:47 [INFO] ✅ TestAccFsLsWithoutScheme (0.560s) 2023/08/15 13:25:49 [INFO] ✅ TestAccFsMkdirCreatesDirectory (2.310s) 2023/08/15 13:25:52 [INFO] ✅ TestAccFsMkdirCreatesMultipleDirectories (2.920s) 2023/08/15 13:25:55 [INFO] ✅ TestAccFsMkdirWhenDirectoryAlreadyExists (2.320s) 2023/08/15 13:25:57 [INFO] ✅ TestAccFsMkdirWhenFileExistsAtPath (2.820s) 2023/08/15 13:26:01 [INFO] ✅ TestAccFsRmForFile (4.030s) 2023/08/15 13:26:05 [INFO] ✅ TestAccFsRmForEmptyDirectory (3.530s) 2023/08/15 13:26:08 [INFO] ✅ TestAccFsRmForNonEmptyDirectory (3.190s) 2023/08/15 13:26:09 [INFO] ✅ TestAccFsRmForNonExistentFile (0.830s) 2023/08/15 13:26:13 [INFO] ✅ TestAccFsRmForNonEmptyDirectoryWithRecursiveFlag (3.580s) 2023/08/15 13:26:13 [INFO] ✅ TestAccGitClone (0.800s) 2023/08/15 13:26:14 [INFO] ✅ TestAccGitCloneWithOnlyRepoNameOnAlternateBranch (0.790s) 2023/08/15 13:26:15 [INFO] ✅ TestAccGitCloneErrorsWhenRepositoryDoesNotExist (0.540s) 2023/08/15 13:26:23 [INFO] ✅ TestAccLock (8.630s) 2023/08/15 13:26:27 [INFO] ✅ TestAccLockUnlockWithoutAllowsLockFileNotExist (3.490s) 2023/08/15 13:26:30 [INFO] ✅ TestAccLockUnlockWithAllowsLockFileNotExist (3.130s) 2023/08/15 13:26:39 [INFO] ✅ TestAccSyncFullFileSync (9.370s) 2023/08/15 13:26:50 [INFO] ✅ TestAccSyncIncrementalFileSync (10.390s) 2023/08/15 13:27:00 [INFO] ✅ TestAccSyncNestedFolderSync (10.680s) 2023/08/15 13:27:11 [INFO] ✅ TestAccSyncNestedFolderDoesntFailOnNonEmptyDirectory (10.970s) 2023/08/15 13:27:22 [INFO] ✅ TestAccSyncNestedSpacePlusAndHashAreEscapedSync (10.930s) 2023/08/15 13:27:29 [INFO] ✅ TestAccSyncIncrementalFileOverwritesFolder (7.020s) 2023/08/15 13:27:37 [INFO] ✅ TestAccSyncIncrementalSyncPythonNotebookToFile (7.380s) 2023/08/15 13:27:43 [INFO] ✅ TestAccSyncIncrementalSyncFileToPythonNotebook (6.050s) 2023/08/15 13:27:48 [INFO] ✅ TestAccSyncIncrementalSyncPythonNotebookDelete (5.390s) 2023/08/15 13:27:51 [INFO] ✅ TestAccSyncEnsureRemotePathIsUsableIfRepoDoesntExist (2.570s) 2023/08/15 13:27:56 [INFO] ✅ TestAccSyncEnsureRemotePathIsUsableIfRepoExists (5.540s) 2023/08/15 13:27:58 [INFO] ✅ TestAccSyncEnsureRemotePathIsUsableInWorkspace (1.840s) 2023/08/15 13:27:59 [INFO] ✅ TestAccWorkspaceList (0.790s) 2023/08/15 13:28:08 [INFO] ✅ TestAccExportDir (8.860s) 2023/08/15 13:28:11 [INFO] ✅ TestAccExportDirDoesNotOverwrite (3.090s) 2023/08/15 13:28:14 [INFO] ✅ TestAccExportDirWithOverwriteFlag (3.500s) 2023/08/15 13:28:23 [INFO] ✅ TestAccImportDir (8.330s) 2023/08/15 13:28:34 [INFO] ✅ TestAccImportDirDoesNotOverwrite (10.970s) 2023/08/15 13:28:44 [INFO] ✅ TestAccImportDirWithOverwriteFlag (10.130s) 2023/08/15 13:28:44 [INFO] ✅ 68/68 passed, 0 failed, 3 skipped ```
2023-08-15 13:50:40 +00:00
"log/slog"
"github.com/databricks/cli/internal/build"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/log"
2022-05-13 13:30:22 +00:00
"github.com/spf13/cobra"
)
func New(ctx context.Context) *cobra.Command {
cmd := &cobra.Command{
Use: "databricks",
Short: "Databricks CLI",
Version: build.GetInfo().Version,
// Cobra prints the usage string to stderr if a command returns an error.
// This usage string should only be displayed if an invalid combination of flags
// is specified and not when runtime errors occur (e.g. resource not found).
// The usage string is include in [flagErrorFunc] for flag errors only.
SilenceUsage: true,
// Silence error printing by cobra. Errors are printed through cmdio.
SilenceErrors: true,
}
// Pass the context along through the command during initialization.
// It will be overwritten when the command is executed.
cmd.SetContext(ctx)
// Initialize flags
logFlags := initLogFlags(cmd)
progressLoggerFlag := initProgressLoggerFlag(cmd, logFlags)
outputFlag := initOutputFlag(cmd)
initProfileFlag(cmd)
initEnvironmentFlag(cmd)
initTargetFlag(cmd)
cmd.PersistentPreRunE = func(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
// Configure default logger.
ctx, err := logFlags.initializeContext(ctx)
2023-03-29 12:58:09 +00:00
if err != nil {
return err
}
2023-04-12 20:12:36 +00:00
logger := log.GetLogger(ctx)
logger.Info("start",
slog.String("version", build.GetInfo().Version),
slog.String("args", strings.Join(os.Args, ", ")))
2023-03-29 12:58:09 +00:00
// Configure progress logger
ctx, err = progressLoggerFlag.initializeContext(ctx)
if err != nil {
return err
}
Added OpenAPI command coverage (#357) This PR adds the following command groups: ## Workspace-level command groups * `bricks alerts` - The alerts API can be used to perform CRUD operations on alerts. * `bricks catalogs` - A catalog is the first layer of Unity Catalog’s three-level namespace. * `bricks cluster-policies` - Cluster policy limits the ability to configure clusters based on a set of rules. * `bricks clusters` - The Clusters API allows you to create, start, edit, list, terminate, and delete clusters. * `bricks current-user` - This API allows retrieving information about currently authenticated user or service principal. * `bricks dashboards` - In general, there is little need to modify dashboards using the API. * `bricks data-sources` - This API is provided to assist you in making new query objects. * `bricks experiments` - MLflow Experiment tracking. * `bricks external-locations` - An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path. * `bricks functions` - Functions implement User-Defined Functions (UDFs) in Unity Catalog. * `bricks git-credentials` - Registers personal access token for Databricks to do operations on behalf of the user. * `bricks global-init-scripts` - The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace. * `bricks grants` - In Unity Catalog, data is secure by default. * `bricks groups` - Groups simplify identity management, making it easier to assign access to Databricks Workspace, data, and other securable objects. * `bricks instance-pools` - Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times. * `bricks instance-profiles` - The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with. * `bricks ip-access-lists` - IP Access List enables admins to configure IP access lists. * `bricks jobs` - The Jobs API allows you to create, edit, and delete jobs. * `bricks libraries` - The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster. * `bricks metastores` - A metastore is the top-level container of objects in Unity Catalog. * `bricks model-registry` - MLflow Model Registry commands. * `bricks permissions` - Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints. * `bricks pipelines` - The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines. * `bricks policy-families` - View available policy families. * `bricks providers` - Databricks Providers REST API. * `bricks queries` - These endpoints are used for CRUD operations on query definitions. * `bricks query-history` - Access the history of queries through SQL warehouses. * `bricks recipient-activation` - Databricks Recipient Activation REST API. * `bricks recipients` - Databricks Recipients REST API. * `bricks repos` - The Repos API allows users to manage their git repos. * `bricks schemas` - A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace. * `bricks secrets` - The Secrets API allows you to manage secrets, secret scopes, and access permissions. * `bricks service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. * `bricks serving-endpoints` - The Serving Endpoints API allows you to create, update, and delete model serving endpoints. * `bricks shares` - Databricks Shares REST API. * `bricks storage-credentials` - A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant. * `bricks table-constraints` - Primary key and foreign key constraints encode relationships between fields in tables. * `bricks tables` - A table resides in the third layer of Unity Catalog’s three-level namespace. * `bricks token-management` - Enables administrators to get all tokens and delete tokens for other users. * `bricks tokens` - The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs. * `bricks users` - User identities recognized by Databricks and represented by email addresses. * `bricks volumes` - Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files. * `bricks warehouses` - A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL. * `bricks workspace` - The Workspace API allows you to list, import, export, and delete notebooks and folders. * `bricks workspace-conf` - This API allows updating known workspace settings for advanced users. ## Account-level command groups * `bricks account billable-usage` - This API allows you to download billable usage logs for the specified account and date range. * `bricks account budgets` - These APIs manage budget configuration including notifications for exceeding a budget for a period. * `bricks account credentials` - These APIs manage credential configurations for this workspace. * `bricks account custom-app-integration` - These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud. * `bricks account encryption-keys` - These APIs manage encryption key configurations for this workspace (optional). * `bricks account groups` - Groups simplify identity management, making it easier to assign access to Databricks Account, data, and other securable objects. * `bricks account ip-access-lists` - The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console. * `bricks account log-delivery` - These APIs manage log delivery configurations for this account. * `bricks account metastore-assignments` - These APIs manage metastore assignments to a workspace. * `bricks account metastores` - These APIs manage Unity Catalog metastores for an account. * `bricks account networks` - These APIs manage network configurations for customer-managed VPCs (optional). * `bricks account o-auth-enrollment` - These APIs enable administrators to enroll OAuth for their accounts, which is required for adding/using any OAuth published/custom application integration. * `bricks account private-access` - These APIs manage private access settings for this account. * `bricks account published-app-integration` - These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Cloud for Databricks in AWS cloud. * `bricks account service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. * `bricks account storage` - These APIs manage storage configurations for this workspace. * `bricks account storage-credentials` - These APIs manage storage credentials for a particular metastore. * `bricks account users` - User identities recognized by Databricks and represented by email addresses. * `bricks account vpc-endpoints` - These APIs manage VPC endpoint configurations for this account. * `bricks account workspace-assignment` - The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account. * `bricks account workspaces` - These APIs manage workspaces for this account.
2023-04-26 11:06:16 +00:00
// set context, so that initializeIO can have the current context
cmd.SetContext(ctx)
// Configure command IO
err = outputFlag.initializeIO(cmd)
Added OpenAPI command coverage (#357) This PR adds the following command groups: ## Workspace-level command groups * `bricks alerts` - The alerts API can be used to perform CRUD operations on alerts. * `bricks catalogs` - A catalog is the first layer of Unity Catalog’s three-level namespace. * `bricks cluster-policies` - Cluster policy limits the ability to configure clusters based on a set of rules. * `bricks clusters` - The Clusters API allows you to create, start, edit, list, terminate, and delete clusters. * `bricks current-user` - This API allows retrieving information about currently authenticated user or service principal. * `bricks dashboards` - In general, there is little need to modify dashboards using the API. * `bricks data-sources` - This API is provided to assist you in making new query objects. * `bricks experiments` - MLflow Experiment tracking. * `bricks external-locations` - An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path. * `bricks functions` - Functions implement User-Defined Functions (UDFs) in Unity Catalog. * `bricks git-credentials` - Registers personal access token for Databricks to do operations on behalf of the user. * `bricks global-init-scripts` - The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace. * `bricks grants` - In Unity Catalog, data is secure by default. * `bricks groups` - Groups simplify identity management, making it easier to assign access to Databricks Workspace, data, and other securable objects. * `bricks instance-pools` - Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times. * `bricks instance-profiles` - The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with. * `bricks ip-access-lists` - IP Access List enables admins to configure IP access lists. * `bricks jobs` - The Jobs API allows you to create, edit, and delete jobs. * `bricks libraries` - The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster. * `bricks metastores` - A metastore is the top-level container of objects in Unity Catalog. * `bricks model-registry` - MLflow Model Registry commands. * `bricks permissions` - Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints. * `bricks pipelines` - The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines. * `bricks policy-families` - View available policy families. * `bricks providers` - Databricks Providers REST API. * `bricks queries` - These endpoints are used for CRUD operations on query definitions. * `bricks query-history` - Access the history of queries through SQL warehouses. * `bricks recipient-activation` - Databricks Recipient Activation REST API. * `bricks recipients` - Databricks Recipients REST API. * `bricks repos` - The Repos API allows users to manage their git repos. * `bricks schemas` - A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace. * `bricks secrets` - The Secrets API allows you to manage secrets, secret scopes, and access permissions. * `bricks service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. * `bricks serving-endpoints` - The Serving Endpoints API allows you to create, update, and delete model serving endpoints. * `bricks shares` - Databricks Shares REST API. * `bricks storage-credentials` - A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant. * `bricks table-constraints` - Primary key and foreign key constraints encode relationships between fields in tables. * `bricks tables` - A table resides in the third layer of Unity Catalog’s three-level namespace. * `bricks token-management` - Enables administrators to get all tokens and delete tokens for other users. * `bricks tokens` - The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs. * `bricks users` - User identities recognized by Databricks and represented by email addresses. * `bricks volumes` - Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files. * `bricks warehouses` - A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL. * `bricks workspace` - The Workspace API allows you to list, import, export, and delete notebooks and folders. * `bricks workspace-conf` - This API allows updating known workspace settings for advanced users. ## Account-level command groups * `bricks account billable-usage` - This API allows you to download billable usage logs for the specified account and date range. * `bricks account budgets` - These APIs manage budget configuration including notifications for exceeding a budget for a period. * `bricks account credentials` - These APIs manage credential configurations for this workspace. * `bricks account custom-app-integration` - These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud. * `bricks account encryption-keys` - These APIs manage encryption key configurations for this workspace (optional). * `bricks account groups` - Groups simplify identity management, making it easier to assign access to Databricks Account, data, and other securable objects. * `bricks account ip-access-lists` - The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console. * `bricks account log-delivery` - These APIs manage log delivery configurations for this account. * `bricks account metastore-assignments` - These APIs manage metastore assignments to a workspace. * `bricks account metastores` - These APIs manage Unity Catalog metastores for an account. * `bricks account networks` - These APIs manage network configurations for customer-managed VPCs (optional). * `bricks account o-auth-enrollment` - These APIs enable administrators to enroll OAuth for their accounts, which is required for adding/using any OAuth published/custom application integration. * `bricks account private-access` - These APIs manage private access settings for this account. * `bricks account published-app-integration` - These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Cloud for Databricks in AWS cloud. * `bricks account service-principals` - Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. * `bricks account storage` - These APIs manage storage configurations for this workspace. * `bricks account storage-credentials` - These APIs manage storage credentials for a particular metastore. * `bricks account users` - User identities recognized by Databricks and represented by email addresses. * `bricks account vpc-endpoints` - These APIs manage VPC endpoint configurations for this account. * `bricks account workspace-assignment` - The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account. * `bricks account workspaces` - These APIs manage workspaces for this account.
2023-04-26 11:06:16 +00:00
if err != nil {
return err
}
// get the context back
ctx = cmd.Context()
// Configure our user agent with the command that's about to be executed.
ctx = withCommandInUserAgent(ctx, cmd)
ctx = withUpstreamInUserAgent(ctx)
cmd.SetContext(ctx)
return nil
}
cmd.SetFlagErrorFunc(flagErrorFunc)
cmd.SetVersionTemplate("Databricks CLI v{{.Version}}\n")
return cmd
2022-05-13 13:30:22 +00:00
}
// Wrap flag errors to include the usage string.
func flagErrorFunc(c *cobra.Command, err error) error {
return fmt.Errorf("%w\n\n%s", err, c.UsageString())
}
2022-05-13 13:30:22 +00:00
// Execute adds all child commands to the root command and sets flags appropriately.
// This is called by main.main(). It only needs to happen once to the rootCmd.
func Execute(ctx context.Context, cmd *cobra.Command) error {
2022-05-14 17:54:35 +00:00
// TODO: deferred panic recovery
2023-04-12 20:12:36 +00:00
// Run the command
cmd, err := cmd.ExecuteContextC(ctx)
if err != nil && !errors.Is(err, ErrAlreadyPrinted) {
// If cmdio logger initialization succeeds, then this function logs with the
// initialized cmdio logger, otherwise with the default cmdio logger
cmdio.LogError(cmd.Context(), err)
}
2023-04-12 20:12:36 +00:00
// Log exit status and error
// We only log if logger initialization succeeded and is stored in command
// context
if logger, ok := log.FromContext(cmd.Context()); ok {
if err == nil {
logger.Info("completed execution",
slog.String("exit_code", "0"))
} else {
logger.Error("failed execution",
slog.String("exit_code", "1"),
slog.String("error", err.Error()))
}
}
return err
2022-05-13 13:30:22 +00:00
}