@@ -18,16 +18,16 @@ package controllers
1818
1919import (
2020 "context"
21- "encoding/json"
2221 "fmt"
23- "strings"
24-
2522 databricksv1alpha1 "github.com/microsoft/azure-databricks-operator/api/v1alpha1"
23+ "github.com/mitchellh/hashstructure"
2624 dbmodels "github.com/xinsnake/databricks-sdk-golang/azure/models"
2725 metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
2826 "k8s.io/apimachinery/pkg/types"
2927 "reflect"
3028 "sigs.k8s.io/controller-runtime/pkg/client"
29+ "strconv"
30+ "strings"
3131)
3232
3333func (r * DjobReconciler ) submit (instance * databricksv1alpha1.Djob ) error {
@@ -71,6 +71,13 @@ func (r *DjobReconciler) submit(instance *databricksv1alpha1.Djob) error {
7171 }
7272 instance .ObjectMeta .SetOwnerReferences (references )
7373 }
74+
75+ if hash , err := hashstructure .Hash (instance .Spec , nil ); err == nil {
76+ instance .ObjectMeta .SetAnnotations (map [string ]string {instance .GetName (): strconv .FormatUint (hash , 10 )})
77+ } else {
78+ r .Log .Info (fmt .Sprintf ("Failed to hash the Spec for job %s" , instance .GetName ()))
79+ }
80+
7481 jobSettings := databricksv1alpha1 .ToDatabricksJobSettings (instance .Spec )
7582 job , err := r .createJob (jobSettings )
7683
@@ -124,86 +131,36 @@ func (r *DjobReconciler) refresh(instance *databricksv1alpha1.Djob) error {
124131}
125132
126133/*
127- IsDJobUpdated is a method to check if the cluster has the latest version of a certain Djob
134+ IsDJobUpdated checks if the cluster has the latest version of a certain Djob
128135*/
129136func (r * DjobReconciler ) IsDJobUpdated (instance * databricksv1alpha1.Djob ) bool {
130- // func (r *DjobReconciler) checkIdentity(oldDJob, newDJob *[]byte) bool {
131- jobID := instance .Status .JobStatus .JobID
132- jobExisting , err := r .APIClient .Jobs ().Get (jobID )
133- if err != nil {
134- if strings .Contains (err .Error (), "does not exist" ) {
135- return true
136- }
137- return true
138- }
139- newDJob := []interface {}{}
140- ExistingDJob := []interface {}{}
141137
142- if instance .Spec .NotebookTask != nil {
143- v , _ := json .Marshal (instance .Spec .NotebookTask )
144- newDJob = append (newDJob , v )
145- }
146- if instance .Spec .SparkJarTask != nil {
147- v , _ := json .Marshal (instance .Spec .SparkJarTask )
148- newDJob = append (newDJob , v )
138+ currentAnnotation := instance .ObjectMeta .GetAnnotations ()[instance .GetName ()]
139+ var updatedHash uint64
140+ if returnUpdatedHash , err := hashstructure .Hash (instance .Spec , nil ); err != nil {
141+ r .Log .Info (fmt .Sprintf ("Deleting job %s" , instance .GetName ()))
142+ } else {
143+ updatedHash = returnUpdatedHash
149144 }
150145
151- if instance .Spec .SparkPythonTask != nil {
152- v , _ := json .Marshal (instance .Spec .SparkPythonTask )
153- newDJob = append (newDJob , v )
154- }
146+ // jobID := instance.Status.JobStatus.JobID
147+ // jobExisting, err := r.APIClient.Jobs().Get(jobID)
148+ // if err != nil {
149+ // if strings.Contains(err.Error(), "does not exist") {
150+ // return true
151+ // }
152+ // return true
153+ // }
155154
156- if instance .Spec .SparkSubmitTask != nil {
157- v , _ := json .Marshal (instance .Spec .SparkSubmitTask )
158- newDJob = append (newDJob , v )
159- }
155+ // hash2, err2 := hashstructure.Hash(jobExisting.Settings, nil)
156+ // if err2 != nil {
157+ // panic(err2)
158+ // }
159+ // r.Log.Info(fmt.Sprintf("Hashs %v", hash2))
160160
161- if instance .Spec .NewCluster != nil {
162- v , _ := json .Marshal (instance .Spec .NewCluster )
163- newDJob = append (newDJob , v )
164- }
161+ // r.Log.Info(fmt.Sprintf("2 object old %v and new %v", jobExisting.Settings, instance.Spec))
165162
166- if instance .Spec .Schedule != nil {
167- v , _ := json .Marshal (instance .Spec .Schedule )
168- newDJob = append (newDJob , v )
169- }
170- newDJob = append (newDJob , instance .Spec .TimeoutSeconds )
171- newDJob = append (newDJob , instance .Spec .MaxRetries )
172-
173- ////////////////////////////////////////////////
174-
175- if jobExisting .Settings .NotebookTask != nil {
176- v , _ := json .Marshal (jobExisting .Settings .NotebookTask )
177- ExistingDJob = append (ExistingDJob , v )
178- }
179- if jobExisting .Settings .SparkJarTask != nil {
180- v , _ := json .Marshal (jobExisting .Settings .SparkJarTask )
181- ExistingDJob = append (ExistingDJob , v )
182- }
183-
184- if jobExisting .Settings .SparkPythonTask != nil {
185- v , _ := json .Marshal (jobExisting .Settings .SparkPythonTask )
186- ExistingDJob = append (ExistingDJob , v )
187- }
188-
189- if jobExisting .Settings .SparkSubmitTask != nil {
190- v , _ := json .Marshal (jobExisting .Settings .SparkSubmitTask )
191- ExistingDJob = append (ExistingDJob , v )
192- }
193-
194- if jobExisting .Settings .NewCluster != nil {
195- v , _ := json .Marshal (jobExisting .Settings .NewCluster )
196- ExistingDJob = append (ExistingDJob , v )
197- }
198-
199- if jobExisting .Settings .Schedule != nil {
200- v , _ := json .Marshal (jobExisting .Settings .Schedule )
201- ExistingDJob = append (ExistingDJob , v )
202- }
203-
204- ExistingDJob = append (ExistingDJob , jobExisting .Settings .TimeoutSeconds )
205- ExistingDJob = append (ExistingDJob , jobExisting .Settings .MaxRetries )
206- return reflect .DeepEqual (ExistingDJob , newDJob )
163+ return currentAnnotation == strconv .FormatUint (updatedHash , 10 )
207164}
208165
209166func (r * DjobReconciler ) delete (instance * databricksv1alpha1.Djob ) error {
@@ -243,6 +200,19 @@ func (r *DjobReconciler) createJob(jobSettings dbmodels.JobSettings) (job dbmode
243200 return job , err
244201}
245202
203+ // UpdateHash updates the current job with a new annotation key
204+ func (r * DjobReconciler ) UpdateHash (instance * databricksv1alpha1.Djob ) error {
205+ hash , err := hashstructure .Hash (instance .Spec , nil )
206+ if err != nil {
207+ return err
208+ }
209+
210+ delete (instance .GetAnnotations (), instance .GetName ())
211+ instance .ObjectMeta .SetAnnotations (map [string ]string {instance .GetName (): strconv .FormatUint (hash , 10 )})
212+
213+ return r .Update (context .Background (), instance )
214+ }
215+
246216func (r * DjobReconciler ) reset (instance * databricksv1alpha1.Djob ) error {
247217 r .Log .Info (fmt .Sprintf ("Reset job %s" , instance .GetName ()))
248218 jobSettings := databricksv1alpha1 .ToDatabricksJobSettings (instance .Spec )
0 commit comments