mirror of https://github.com/databricks/cli.git
Ensure Go code is formatted (#37)
This commit is contained in:
parent
80a4c47d62
commit
5a55cad7c3
|
@ -48,3 +48,24 @@ jobs:
|
|||
|
||||
- name: Run tests
|
||||
run: make test
|
||||
|
||||
fmt:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- uses: actions/setup-go@v1
|
||||
with:
|
||||
go-version: 1.18.x
|
||||
|
||||
- name: Run gofmt
|
||||
run: |
|
||||
# -l: list files that were reformatted
|
||||
# -w: write back formatted files to disk
|
||||
gofmt -l -w ./
|
||||
|
||||
- name: Fail on differences
|
||||
run: |
|
||||
# Exit with status code 1 if there are differences (i.e. unformatted files)
|
||||
git diff --exit-code
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"[go]": {
|
||||
"editor.insertSpaces": false,
|
||||
"editor.formatOnSave": true
|
||||
}
|
||||
}
|
|
@ -42,9 +42,9 @@ const DBFSWheelLocation = "dbfs:/FileStore/wheels/simple"
|
|||
// or do we bypass the environment variable into terraform deployer. And make a decision.
|
||||
//
|
||||
// Whatever this method gets refactored to is intended to be used for two purposes:
|
||||
// - uploading project's wheel archives: one per project or one per project/developer, depending on isolation
|
||||
// - synchronising enterprise artifactories, jfrogs, azdo feeds, so that we fix the gap of private code artifact
|
||||
// repository integration.
|
||||
// - uploading project's wheel archives: one per project or one per project/developer, depending on isolation
|
||||
// - synchronising enterprise artifactories, jfrogs, azdo feeds, so that we fix the gap of private code artifact
|
||||
// repository integration.
|
||||
func UploadWheelToDBFSWithPEP503(ctx context.Context, dir string) (string, error) {
|
||||
wheel, err := BuildWheel(ctx, dir)
|
||||
if err != nil {
|
||||
|
|
|
@ -10,13 +10,14 @@ Solve the following adoption slowers:
|
|||
- users won't have to copy-paste these into their configs:
|
||||
|
||||
```hcl
|
||||
terraform {
|
||||
required_providers {
|
||||
databricks = {
|
||||
source = "databrickslabs/databricks"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
terraform {
|
||||
required_providers {
|
||||
databricks = {
|
||||
source = "databrickslabs/databricks"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
provider "databricks" {
|
||||
}
|
||||
|
@ -25,12 +26,12 @@ provider "databricks" {
|
|||
Terraform Plugin SDK v2 is using similar techniques for testing providers. One may find
|
||||
details in github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource/plugin.go. In short:
|
||||
|
||||
- init provider isntance
|
||||
- start terraform plugin GRPC server
|
||||
- "reattach" providers and specify the `tfexec.Reattach` options, which essentially
|
||||
forward GRPC address to terraform subprocess.
|
||||
- this can be done by either adding a source depenency on Databricks provider
|
||||
or adding a special launch mode to it.
|
||||
- init provider isntance
|
||||
- start terraform plugin GRPC server
|
||||
- "reattach" providers and specify the `tfexec.Reattach` options, which essentially
|
||||
forward GRPC address to terraform subprocess.
|
||||
- this can be done by either adding a source depenency on Databricks provider
|
||||
or adding a special launch mode to it.
|
||||
|
||||
For now
|
||||
---
|
||||
|
|
Loading…
Reference in New Issue