2023-02-15 16:14:59 +00:00
|
|
|
package notebook
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bufio"
|
|
|
|
"bytes"
|
|
|
|
"io"
|
2024-05-30 07:41:50 +00:00
|
|
|
"io/fs"
|
2023-02-15 16:14:59 +00:00
|
|
|
"os"
|
|
|
|
"path/filepath"
|
|
|
|
"strings"
|
|
|
|
|
|
|
|
"github.com/databricks/databricks-sdk-go/service/workspace"
|
|
|
|
)
|
|
|
|
|
2024-07-10 06:37:47 +00:00
|
|
|
// FileInfoWithWorkspaceObjectInfo is an interface implemented by [fs.FileInfo] values that
|
|
|
|
// contain a file's underlying [workspace.ObjectInfo].
|
|
|
|
//
|
|
|
|
// This may be the case when working with a [filer.Filer] backed by the workspace API.
|
|
|
|
// For these files we do not need to read a file's header to know if it is a notebook;
|
|
|
|
// we can use the [workspace.ObjectInfo] value directly.
|
|
|
|
type FileInfoWithWorkspaceObjectInfo interface {
|
|
|
|
WorkspaceObjectInfo() workspace.ObjectInfo
|
|
|
|
}
|
|
|
|
|
2023-02-15 16:14:59 +00:00
|
|
|
// Maximum length in bytes of the notebook header.
|
|
|
|
const headerLength = 32
|
|
|
|
|
2024-07-10 06:37:47 +00:00
|
|
|
// file wraps an fs.File and implements a few helper methods such that
|
|
|
|
// they don't need to be inlined in the [DetectWithFS] function below.
|
|
|
|
type file struct {
|
|
|
|
f fs.File
|
|
|
|
}
|
|
|
|
|
|
|
|
func openFile(fsys fs.FS, name string) (*file, error) {
|
2024-05-30 07:41:50 +00:00
|
|
|
f, err := fsys.Open(name)
|
2023-02-15 16:14:59 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2024-07-10 06:37:47 +00:00
|
|
|
return &file{f: f}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (f file) close() error {
|
|
|
|
return f.f.Close()
|
|
|
|
}
|
2023-02-15 16:14:59 +00:00
|
|
|
|
2024-07-10 06:37:47 +00:00
|
|
|
func (f file) readHeader() (string, error) {
|
2023-02-15 16:14:59 +00:00
|
|
|
// Scan header line with some padding.
|
|
|
|
var buf = make([]byte, headerLength)
|
2024-07-10 06:37:47 +00:00
|
|
|
n, err := f.f.Read([]byte(buf))
|
2023-02-15 16:14:59 +00:00
|
|
|
if err != nil && err != io.EOF {
|
2024-07-10 06:37:47 +00:00
|
|
|
return "", err
|
2023-02-15 16:14:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Trim buffer to actual read bytes.
|
2024-07-10 06:37:47 +00:00
|
|
|
buf = buf[:n]
|
|
|
|
|
|
|
|
// Read the first line from the buffer.
|
|
|
|
scanner := bufio.NewScanner(bytes.NewReader(buf))
|
|
|
|
scanner.Scan()
|
|
|
|
return scanner.Text(), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// getObjectInfo returns the [workspace.ObjectInfo] for the file if it is
|
|
|
|
// part of the [fs.FileInfo] value returned by the [fs.Stat] call.
|
|
|
|
func (f file) getObjectInfo() (oi workspace.ObjectInfo, ok bool, err error) {
|
|
|
|
stat, err := f.f.Stat()
|
|
|
|
if err != nil {
|
|
|
|
return workspace.ObjectInfo{}, false, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Use object info if available.
|
|
|
|
if i, ok := stat.(FileInfoWithWorkspaceObjectInfo); ok {
|
|
|
|
return i.WorkspaceObjectInfo(), true, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return workspace.ObjectInfo{}, false, nil
|
2023-02-15 16:14:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Detect returns whether the file at path is a Databricks notebook.
|
|
|
|
// If it is, it returns the notebook language.
|
2024-05-30 07:41:50 +00:00
|
|
|
func DetectWithFS(fsys fs.FS, name string) (notebook bool, language workspace.Language, err error) {
|
2023-02-15 16:14:59 +00:00
|
|
|
header := ""
|
|
|
|
|
2024-07-10 06:37:47 +00:00
|
|
|
f, err := openFile(fsys, name)
|
|
|
|
if err != nil {
|
|
|
|
return false, "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
defer f.close()
|
|
|
|
|
|
|
|
// Use object info if available.
|
|
|
|
oi, ok, err := f.getObjectInfo()
|
|
|
|
if err != nil {
|
|
|
|
return false, "", err
|
|
|
|
}
|
|
|
|
if ok {
|
|
|
|
return oi.ObjectType == workspace.ObjectTypeNotebook, oi.Language, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Read the first line of the file.
|
|
|
|
fileHeader, err := f.readHeader()
|
2023-03-21 17:13:16 +00:00
|
|
|
if err != nil {
|
|
|
|
return false, "", err
|
|
|
|
}
|
|
|
|
|
2023-02-15 16:14:59 +00:00
|
|
|
// Determine which header to expect based on filename extension.
|
2024-05-30 07:41:50 +00:00
|
|
|
ext := strings.ToLower(filepath.Ext(name))
|
2023-02-15 16:14:59 +00:00
|
|
|
switch ext {
|
Add support for non-Python ipynb notebooks to DABs (#1827)
## Changes
### Background
The workspace import APIs recently added support for importing Jupyter
notebooks written in R, Scala, or SQL, that is non-Python notebooks.
This now works for the `/import-file` API which we leverage in the CLI.
Note: We do not need any changes in `databricks sync`. It works out of
the box because any state mapping of local names to remote names that we
store is only scoped to the notebook extension (i.e., `.ipynb` in this
case) and is agnostic of the notebook's specific language.
### Problem this PR addresses
The extension-aware filer previously did not function because it checks
that a `.ipynb` notebook is written in Python. This PR relaxes that
constraint and adds integration tests for both the normal workspace
filer and extensions aware filer writing and reading non-Python `.ipynb`
notebooks.
This implies that after this PR DABs in the workspace / CLI from DBR
will work for non-Python notebooks as well. non-Python notebooks for
DABs deployment from local machines already works after the platform
side changes to the API landed, this PR just adds integration tests for
that bit of functionality.
Note: Any platform side changes we needed for the import API have
already been rolled out to production.
### Before
DABs deploy would work fine for non-Python notebooks. But DABs
deployments from DBR would not.
### After
DABs deploys both from local machines and DBR will work fine.
## Testing
For creating the `.ipynb` notebook fixtures used in the integration
tests I created them directly from the VSCode UI. This ensures high
fidelity with how users will create their non-Python notebooks locally.
For Python notebooks this is supported out of the box by VSCode but for
R and Scala notebooks this requires installing the Jupyter kernel for R
and Scala on my local machine and using that from VSCode.
For SQL, I ended up directly modifying the `language_info` field in the
Jupyter metadata to create the test fixture.
### Discussion: Issues with configuring language at the cell level
The language metadata for a Jupyter notebook is standardized at the
notebook level (in the `language_info` field). Unfortunately, it's not
standardized at the cell level. Thus, for example, if a user changes the
language for their cell in VSCode (which is supported by the standard
Jupyter VSCode integration), it'll cause a runtime error when the user
actually attempts to run the notebook. This is because the cell-level
metadata is encoded in a format specific to VSCode:
```
cells: []{
"vscode": {
"languageId": "sql"
}
}
```
Supporting cell level languages is thus out of scope for this PR and can
be revisited along with the workspace files team if there's strong
customer interest.
2024-11-13 21:39:51 +00:00
|
|
|
case ExtensionPython:
|
2023-02-15 16:14:59 +00:00
|
|
|
header = `# Databricks notebook source`
|
|
|
|
language = workspace.LanguagePython
|
Add support for non-Python ipynb notebooks to DABs (#1827)
## Changes
### Background
The workspace import APIs recently added support for importing Jupyter
notebooks written in R, Scala, or SQL, that is non-Python notebooks.
This now works for the `/import-file` API which we leverage in the CLI.
Note: We do not need any changes in `databricks sync`. It works out of
the box because any state mapping of local names to remote names that we
store is only scoped to the notebook extension (i.e., `.ipynb` in this
case) and is agnostic of the notebook's specific language.
### Problem this PR addresses
The extension-aware filer previously did not function because it checks
that a `.ipynb` notebook is written in Python. This PR relaxes that
constraint and adds integration tests for both the normal workspace
filer and extensions aware filer writing and reading non-Python `.ipynb`
notebooks.
This implies that after this PR DABs in the workspace / CLI from DBR
will work for non-Python notebooks as well. non-Python notebooks for
DABs deployment from local machines already works after the platform
side changes to the API landed, this PR just adds integration tests for
that bit of functionality.
Note: Any platform side changes we needed for the import API have
already been rolled out to production.
### Before
DABs deploy would work fine for non-Python notebooks. But DABs
deployments from DBR would not.
### After
DABs deploys both from local machines and DBR will work fine.
## Testing
For creating the `.ipynb` notebook fixtures used in the integration
tests I created them directly from the VSCode UI. This ensures high
fidelity with how users will create their non-Python notebooks locally.
For Python notebooks this is supported out of the box by VSCode but for
R and Scala notebooks this requires installing the Jupyter kernel for R
and Scala on my local machine and using that from VSCode.
For SQL, I ended up directly modifying the `language_info` field in the
Jupyter metadata to create the test fixture.
### Discussion: Issues with configuring language at the cell level
The language metadata for a Jupyter notebook is standardized at the
notebook level (in the `language_info` field). Unfortunately, it's not
standardized at the cell level. Thus, for example, if a user changes the
language for their cell in VSCode (which is supported by the standard
Jupyter VSCode integration), it'll cause a runtime error when the user
actually attempts to run the notebook. This is because the cell-level
metadata is encoded in a format specific to VSCode:
```
cells: []{
"vscode": {
"languageId": "sql"
}
}
```
Supporting cell level languages is thus out of scope for this PR and can
be revisited along with the workspace files team if there's strong
customer interest.
2024-11-13 21:39:51 +00:00
|
|
|
case ExtensionR:
|
2023-02-15 16:14:59 +00:00
|
|
|
header = `# Databricks notebook source`
|
|
|
|
language = workspace.LanguageR
|
Add support for non-Python ipynb notebooks to DABs (#1827)
## Changes
### Background
The workspace import APIs recently added support for importing Jupyter
notebooks written in R, Scala, or SQL, that is non-Python notebooks.
This now works for the `/import-file` API which we leverage in the CLI.
Note: We do not need any changes in `databricks sync`. It works out of
the box because any state mapping of local names to remote names that we
store is only scoped to the notebook extension (i.e., `.ipynb` in this
case) and is agnostic of the notebook's specific language.
### Problem this PR addresses
The extension-aware filer previously did not function because it checks
that a `.ipynb` notebook is written in Python. This PR relaxes that
constraint and adds integration tests for both the normal workspace
filer and extensions aware filer writing and reading non-Python `.ipynb`
notebooks.
This implies that after this PR DABs in the workspace / CLI from DBR
will work for non-Python notebooks as well. non-Python notebooks for
DABs deployment from local machines already works after the platform
side changes to the API landed, this PR just adds integration tests for
that bit of functionality.
Note: Any platform side changes we needed for the import API have
already been rolled out to production.
### Before
DABs deploy would work fine for non-Python notebooks. But DABs
deployments from DBR would not.
### After
DABs deploys both from local machines and DBR will work fine.
## Testing
For creating the `.ipynb` notebook fixtures used in the integration
tests I created them directly from the VSCode UI. This ensures high
fidelity with how users will create their non-Python notebooks locally.
For Python notebooks this is supported out of the box by VSCode but for
R and Scala notebooks this requires installing the Jupyter kernel for R
and Scala on my local machine and using that from VSCode.
For SQL, I ended up directly modifying the `language_info` field in the
Jupyter metadata to create the test fixture.
### Discussion: Issues with configuring language at the cell level
The language metadata for a Jupyter notebook is standardized at the
notebook level (in the `language_info` field). Unfortunately, it's not
standardized at the cell level. Thus, for example, if a user changes the
language for their cell in VSCode (which is supported by the standard
Jupyter VSCode integration), it'll cause a runtime error when the user
actually attempts to run the notebook. This is because the cell-level
metadata is encoded in a format specific to VSCode:
```
cells: []{
"vscode": {
"languageId": "sql"
}
}
```
Supporting cell level languages is thus out of scope for this PR and can
be revisited along with the workspace files team if there's strong
customer interest.
2024-11-13 21:39:51 +00:00
|
|
|
case ExtensionScala:
|
2023-02-15 16:14:59 +00:00
|
|
|
header = "// Databricks notebook source"
|
|
|
|
language = workspace.LanguageScala
|
Add support for non-Python ipynb notebooks to DABs (#1827)
## Changes
### Background
The workspace import APIs recently added support for importing Jupyter
notebooks written in R, Scala, or SQL, that is non-Python notebooks.
This now works for the `/import-file` API which we leverage in the CLI.
Note: We do not need any changes in `databricks sync`. It works out of
the box because any state mapping of local names to remote names that we
store is only scoped to the notebook extension (i.e., `.ipynb` in this
case) and is agnostic of the notebook's specific language.
### Problem this PR addresses
The extension-aware filer previously did not function because it checks
that a `.ipynb` notebook is written in Python. This PR relaxes that
constraint and adds integration tests for both the normal workspace
filer and extensions aware filer writing and reading non-Python `.ipynb`
notebooks.
This implies that after this PR DABs in the workspace / CLI from DBR
will work for non-Python notebooks as well. non-Python notebooks for
DABs deployment from local machines already works after the platform
side changes to the API landed, this PR just adds integration tests for
that bit of functionality.
Note: Any platform side changes we needed for the import API have
already been rolled out to production.
### Before
DABs deploy would work fine for non-Python notebooks. But DABs
deployments from DBR would not.
### After
DABs deploys both from local machines and DBR will work fine.
## Testing
For creating the `.ipynb` notebook fixtures used in the integration
tests I created them directly from the VSCode UI. This ensures high
fidelity with how users will create their non-Python notebooks locally.
For Python notebooks this is supported out of the box by VSCode but for
R and Scala notebooks this requires installing the Jupyter kernel for R
and Scala on my local machine and using that from VSCode.
For SQL, I ended up directly modifying the `language_info` field in the
Jupyter metadata to create the test fixture.
### Discussion: Issues with configuring language at the cell level
The language metadata for a Jupyter notebook is standardized at the
notebook level (in the `language_info` field). Unfortunately, it's not
standardized at the cell level. Thus, for example, if a user changes the
language for their cell in VSCode (which is supported by the standard
Jupyter VSCode integration), it'll cause a runtime error when the user
actually attempts to run the notebook. This is because the cell-level
metadata is encoded in a format specific to VSCode:
```
cells: []{
"vscode": {
"languageId": "sql"
}
}
```
Supporting cell level languages is thus out of scope for this PR and can
be revisited along with the workspace files team if there's strong
customer interest.
2024-11-13 21:39:51 +00:00
|
|
|
case ExtensionSql:
|
2023-02-15 16:14:59 +00:00
|
|
|
header = "-- Databricks notebook source"
|
|
|
|
language = workspace.LanguageSql
|
Add support for non-Python ipynb notebooks to DABs (#1827)
## Changes
### Background
The workspace import APIs recently added support for importing Jupyter
notebooks written in R, Scala, or SQL, that is non-Python notebooks.
This now works for the `/import-file` API which we leverage in the CLI.
Note: We do not need any changes in `databricks sync`. It works out of
the box because any state mapping of local names to remote names that we
store is only scoped to the notebook extension (i.e., `.ipynb` in this
case) and is agnostic of the notebook's specific language.
### Problem this PR addresses
The extension-aware filer previously did not function because it checks
that a `.ipynb` notebook is written in Python. This PR relaxes that
constraint and adds integration tests for both the normal workspace
filer and extensions aware filer writing and reading non-Python `.ipynb`
notebooks.
This implies that after this PR DABs in the workspace / CLI from DBR
will work for non-Python notebooks as well. non-Python notebooks for
DABs deployment from local machines already works after the platform
side changes to the API landed, this PR just adds integration tests for
that bit of functionality.
Note: Any platform side changes we needed for the import API have
already been rolled out to production.
### Before
DABs deploy would work fine for non-Python notebooks. But DABs
deployments from DBR would not.
### After
DABs deploys both from local machines and DBR will work fine.
## Testing
For creating the `.ipynb` notebook fixtures used in the integration
tests I created them directly from the VSCode UI. This ensures high
fidelity with how users will create their non-Python notebooks locally.
For Python notebooks this is supported out of the box by VSCode but for
R and Scala notebooks this requires installing the Jupyter kernel for R
and Scala on my local machine and using that from VSCode.
For SQL, I ended up directly modifying the `language_info` field in the
Jupyter metadata to create the test fixture.
### Discussion: Issues with configuring language at the cell level
The language metadata for a Jupyter notebook is standardized at the
notebook level (in the `language_info` field). Unfortunately, it's not
standardized at the cell level. Thus, for example, if a user changes the
language for their cell in VSCode (which is supported by the standard
Jupyter VSCode integration), it'll cause a runtime error when the user
actually attempts to run the notebook. This is because the cell-level
metadata is encoded in a format specific to VSCode:
```
cells: []{
"vscode": {
"languageId": "sql"
}
}
```
Supporting cell level languages is thus out of scope for this PR and can
be revisited along with the workspace files team if there's strong
customer interest.
2024-11-13 21:39:51 +00:00
|
|
|
case ExtensionJupyter:
|
2024-05-30 07:41:50 +00:00
|
|
|
return DetectJupyterWithFS(fsys, name)
|
2023-02-15 16:14:59 +00:00
|
|
|
default:
|
|
|
|
return false, "", nil
|
|
|
|
}
|
|
|
|
|
2023-03-21 17:13:16 +00:00
|
|
|
if fileHeader != header {
|
2023-02-15 16:14:59 +00:00
|
|
|
return false, "", nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return true, language, nil
|
|
|
|
}
|
2024-05-30 07:41:50 +00:00
|
|
|
|
|
|
|
// Detect calls DetectWithFS with the local filesystem.
|
|
|
|
// The name argument may be a local relative path or a local absolute path.
|
|
|
|
func Detect(name string) (notebook bool, language workspace.Language, err error) {
|
|
|
|
d := filepath.ToSlash(filepath.Dir(name))
|
|
|
|
b := filepath.Base(name)
|
|
|
|
return DetectWithFS(os.DirFS(d), b)
|
|
|
|
}
|