Skip to content

Commit

Permalink
[Feature] Allow to skip calling Read after Create/Update operations
Browse files Browse the repository at this point in the history
This PR adds the ability for a resource to specify that it may not need to call
`Read` after `Create` and `Update` operations so we can avoid performing another
API call(s). The resource may implement `CanSkipReadAfterCreateAndUpdate` function
that can decide if the Read operation should be skipped.

It was found that the import API returns `object_id` as a result of its execution,
so we don't really need to call get-status to fill all attributes.
This should help when we import a large number of notebooks, i.e., when applying exported resources.
  • Loading branch information
alexott committed Oct 30, 2024
1 parent 92357dc commit 2ddb757
Show file tree
Hide file tree
Showing 6 changed files with 115 additions and 112 deletions.
29 changes: 18 additions & 11 deletions common/resource.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,17 +16,18 @@ import (

// Resource aims to simplify things like error & deleted entities handling
type Resource struct {
Create func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
Read func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
Update func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
Delete func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
CustomizeDiff func(ctx context.Context, d *schema.ResourceDiff) error
StateUpgraders []schema.StateUpgrader
Schema map[string]*schema.Schema
SchemaVersion int
Timeouts *schema.ResourceTimeout
DeprecationMessage string
Importer *schema.ResourceImporter
Create func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
Read func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
Update func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
Delete func(ctx context.Context, d *schema.ResourceData, c *DatabricksClient) error
CustomizeDiff func(ctx context.Context, d *schema.ResourceDiff) error
StateUpgraders []schema.StateUpgrader
Schema map[string]*schema.Schema
SchemaVersion int
Timeouts *schema.ResourceTimeout
DeprecationMessage string
Importer *schema.ResourceImporter
CanSkipReadAfterCreateAndUpdate func(d *schema.ResourceData) bool
}

func nicerError(ctx context.Context, err error, action string) error {
Expand Down Expand Up @@ -94,6 +95,9 @@ func (r Resource) ToResource() *schema.Resource {
err = nicerError(ctx, err, "update")
return diag.FromErr(err)
}
if r.CanSkipReadAfterCreateAndUpdate != nil && r.CanSkipReadAfterCreateAndUpdate(d) {
return nil
}
if err := recoverable(r.Read)(ctx, d, c); err != nil {
err = nicerError(ctx, err, "read")
return diag.FromErr(err)
Expand Down Expand Up @@ -162,6 +166,9 @@ func (r Resource) ToResource() *schema.Resource {
err = nicerError(ctx, err, "create")
return diag.FromErr(err)
}
if r.CanSkipReadAfterCreateAndUpdate != nil && r.CanSkipReadAfterCreateAndUpdate(d) {
return nil
}
if err = recoverable(r.Read)(ctx, d, c); err != nil {
err = nicerError(ctx, err, "read")
return diag.FromErr(err)
Expand Down
2 changes: 2 additions & 0 deletions common/resource_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@ func TestImportingCallsRead(t *testing.T) {
assert.Equal(t, 1, d.Get("foo"))
}

// TODO: add test for CanSkipRead and without it

func TestHTTP404TriggersResourceRemovalForReadAndDelete(t *testing.T) {
nope := func(ctx context.Context,
d *schema.ResourceData,
Expand Down
7 changes: 6 additions & 1 deletion exporter/importables.go
Original file line number Diff line number Diff line change
Expand Up @@ -1614,7 +1614,12 @@ var resourcesMap map[string]importable = map[string]importable{
ic.emitWorkspaceObjectParentDirectory(r)
return r.Data.Set("source", fileName)
},
ShouldOmitField: shouldOmitMd5Field,
ShouldOmitField: func(ic *importContext, pathString string, as *schema.Schema, d *schema.ResourceData) bool {
if pathString == "language" {
return d.Get("language") == ""
}
return shouldOmitMd5Field(ic, pathString, as, d)
},
Depends: []reference{
{Path: "source", File: true},
{Path: "path", Resource: "databricks_directory", MatchType: MatchLongestPrefix,
Expand Down
1 change: 1 addition & 0 deletions workspace/resource_directory.go
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ func ResourceDirectory() common.Resource {
path := d.Get("path").(string)
err = client.Workspace.MkdirsByPath(ctx, path)
if err != nil {
// TODO: ignore RESOURCE_ALREADY_EXISTS error? make it configurable?
// TODO: handle RESOURCE_ALREADY_EXISTS
return err
}
Expand Down
64 changes: 42 additions & 22 deletions workspace/resource_notebook.go
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,10 @@ type ObjectStatus struct {
Size int64 `json:"size,omitempty"`
}

type ImportResponse struct {
ObjectID int64 `json:"object_id,omitempty"`
}

// ExportPath contains the base64 content of the notebook
type ExportPath struct {
Content string `json:"content,omitempty"`
Expand Down Expand Up @@ -98,12 +102,14 @@ type NotebooksAPI struct {
var mtx = &sync.Mutex{}

// Create creates a notebook given the content and path
func (a NotebooksAPI) Create(r ImportPath) error {
func (a NotebooksAPI) Create(r ImportPath) (ImportResponse, error) {
if r.Format == "DBC" {
mtx.Lock()
defer mtx.Unlock()
}
return a.client.Post(a.context, "/workspace/import", r, nil)
var responce ImportResponse
err := a.client.Post(a.context, "/workspace/import", r, &responce)
return responce, err
}

// Read returns the notebook metadata and not the contents
Expand Down Expand Up @@ -203,26 +209,24 @@ func (a NotebooksAPI) Delete(path string, recursive bool) error {
}, nil)
}

func setComputedProperties(d *schema.ResourceData, c *common.DatabricksClient) {
d.Set("url", c.FormatURL("#workspace", d.Id()))
d.Set("workspace_path", "/Workspace"+d.Id())
}

// ResourceNotebook manages notebooks
func ResourceNotebook() common.Resource {
s := FileContentSchema(map[string]*schema.Schema{
"language": {
Type: schema.TypeString,
Optional: true,
Computed: true, // we need it because it will be filled by the provider or backend
ValidateFunc: validation.StringInSlice([]string{
Scala,
Python,
R,
SQL,
}, false),
DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool {
source := d.Get("source").(string)
if source == "" {
return false
}
ext := strings.ToLower(filepath.Ext(source))
return old == extMap[ext].Language
},
},
"format": {
Type: schema.TypeString,
Expand Down Expand Up @@ -258,6 +262,9 @@ func ResourceNotebook() common.Resource {
return common.Resource{
Schema: s,
SchemaVersion: 1,
CanSkipReadAfterCreateAndUpdate: func(d *schema.ResourceData) bool {
return d.Get("format").(string) == "SOURCE"
},
Create: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
content, err := ReadContent(d)
if err != nil {
Expand All @@ -281,8 +288,9 @@ func ResourceNotebook() common.Resource {
createNotebook.Overwrite = extMap[ext].Overwrite
// by default it's SOURCE, but for DBC we have to change it
d.Set("format", createNotebook.Format)
d.Set("language", createNotebook.Language)
}
err = notebooksAPI.Create(createNotebook)
resp, err := notebooksAPI.Create(createNotebook)
if err != nil {
if isParentDoesntExistError(err) {
parent := filepath.ToSlash(filepath.Dir(path))
Expand All @@ -291,13 +299,18 @@ func ResourceNotebook() common.Resource {
if err != nil {
return err
}
err = notebooksAPI.Create(createNotebook)
resp, err = notebooksAPI.Create(createNotebook)
}
if err != nil {
return err
}
}
if d.Get("object_type").(string) == "" {
d.Set("object_type", Notebook)
}
d.Set("object_id", resp.ObjectID)
d.SetId(path)
setComputedProperties(d, c)
return nil
},
Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
Expand All @@ -311,8 +324,7 @@ func ResourceNotebook() common.Resource {
if err != nil {
return err
}
d.Set("url", c.FormatURL("#workspace", d.Id()))
d.Set("workspace_path", "/Workspace"+objectStatus.Path)
setComputedProperties(d, c)
return common.StructToData(objectStatus, s, d)
},
Update: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
Expand All @@ -322,25 +334,33 @@ func ResourceNotebook() common.Resource {
return err
}
format := d.Get("format").(string)
var resp ImportResponse
if format == "DBC" {
// Overwrite cannot be used for source format when importing a folder
err = notebooksAPI.Delete(d.Id(), true)
if err != nil {
return err
}
return notebooksAPI.Create(ImportPath{
resp, err = notebooksAPI.Create(ImportPath{
Content: base64.StdEncoding.EncodeToString(content),
Format: format,
Path: d.Id(),
})
} else {
resp, err = notebooksAPI.Create(ImportPath{
Content: base64.StdEncoding.EncodeToString(content),
Language: d.Get("language").(string),
Format: format,
Overwrite: true,
Path: d.Id(),
})
}
return notebooksAPI.Create(ImportPath{
Content: base64.StdEncoding.EncodeToString(content),
Language: d.Get("language").(string),
Format: format,
Overwrite: true,
Path: d.Id(),
})
if err != nil {
return err
}
d.Set("object_id", resp.ObjectID)
setComputedProperties(d, c)
return nil
},
Delete: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
objType := d.Get("object_type")
Expand Down
Loading

0 comments on commit 2ddb757

Please sign in to comment.