Skip to content

Commit

Permalink
fix(control-plane): Fixed a bug in servings config generation
Browse files Browse the repository at this point in the history
  • Loading branch information
isala404 committed Mar 15, 2022
1 parent 9154586 commit d802f0a
Show file tree
Hide file tree
Showing 5 changed files with 35 additions and 17 deletions.
20 changes: 12 additions & 8 deletions control-plane/config/samples/lazykoala_v1alpha1_inspector.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,9 @@ metadata:
name: service-1
namespace: default
spec:
deploymentRef: service-1-05588114
serviceRef: service-1-05588114
deploymentRef: service-1-62bf5632
serviceRef: service-1-62bf5632
modelName: service-1-62bf5632
namespace: default
---
apiVersion: lazykoala.isala.me/v1alpha1
Expand All @@ -14,8 +15,9 @@ metadata:
name: service-2
namespace: default
spec:
deploymentRef: service-2-05588114
serviceRef: service-2-05588114
deploymentRef: service-2-62bf5632
serviceRef: service-2-62bf5632
modelName: service-1-62bf5632
namespace: default
---
apiVersion: lazykoala.isala.me/v1alpha1
Expand All @@ -24,8 +26,9 @@ metadata:
name: service-3
namespace: default
spec:
deploymentRef: service-3-05588114
serviceRef: service-3-05588114
deploymentRef: service-3-62bf5632
serviceRef: service-3-62bf5632
modelName: service-1-62bf5632
namespace: default
---
apiVersion: lazykoala.isala.me/v1alpha1
Expand All @@ -34,8 +37,9 @@ metadata:
name: service-4
namespace: default
spec:
deploymentRef: service-4-05588114
serviceRef: service-4-05588114
deploymentRef: service-4-62bf5632
serviceRef: service-4-62bf5632
modelName: service-1-62bf5632
namespace: default
---

5 changes: 3 additions & 2 deletions control-plane/controllers/inspector_controller.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,11 @@ import (
"bytes"
"context"
"fmt"
"gopkg.in/yaml.v3"
appsv1 "k8s.io/api/apps/v1"
"text/template"
"time"

"gopkg.in/yaml.v3"
v1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/types"
Expand Down Expand Up @@ -301,10 +301,11 @@ func (r *InspectorReconciler) configureSherlock(ctx context.Context, inspector *
ModelName: inspector.Spec.ModelName,
Namespace: inspector.Spec.Namespace,
}
modelsList[inspector.Spec.ModelName] = true
} else {
if _, ok := sherlockServiceList[inspector.Spec.DeploymentRef]; ok {
delete(sherlockServiceList, inspector.Spec.DeploymentRef)
delete(modelsList, inspector.Spec.DeploymentRef)
delete(modelsList, inspector.Spec.ModelName)
}
}

Expand Down
13 changes: 13 additions & 0 deletions gazer/deployment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,19 @@ spec:
requests:
storage: 8Gi
---
apiVersion: v1
kind: Service
metadata:
name: prometheus
namespace: lazy-koala
spec:
selector:
app: prometheus
ports:
- protocol: TCP
port: 9090
targetPort: 9090
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
Expand Down
12 changes: 6 additions & 6 deletions sherlock/deployment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,18 @@ spec:
serviceAccountName: gke-workload-identity
containers:
- name: sherlock
imagePullPolicy: Always
image: ghcr.io/mrsupiri/lazy-koala/sherlock:latest
ports:
- containerPort: 9898
name: metrics
env:
- name: END_POINT
value: "http://localhost:8501/v1/models/sherlock:predict"
- name: TENSORFLOW_END_POINT
value: "http://localhost:8501/v1/models"
- name: POOL_DURATION
value: "1"
value: "60"
- name: PROMETHEUS_END_POINT
value: "http://prometheus.lazy-koala.svc.cluster.local"
volumeMounts:
- name: sherlock-config
mountPath: /app/config
Expand Down Expand Up @@ -55,9 +58,6 @@ spec:
ports:
- containerPort: 8501
name: http
env:
- name: MODEL_NAME
value: "sherlock"
volumeMounts:
- name: sherlock-config
mountPath: /config
Expand Down
2 changes: 1 addition & 1 deletion sherlock/src/inference.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ async fn query_model(service: &str, input: [[[f64; 1]; 9]; 10]) -> Result<f64, B
}

async fn calculate_anomaly_score(service: &str, args: &InferenceData) -> Result<(), Box<dyn std::error::Error>> {
println!("Querying {} model", service);
println!("Calculate anomaly score for {} using {}", service, &args.model_name);
let input = build_telemetry_matrix(&service).await?;
let score = query_model(&args.model_name, input).await?;
ANOMLAY_GAUGE.with_label_values(&[service, &args.namespace]).set(score);
Expand Down

0 comments on commit d802f0a

Please sign in to comment.