Skip to content

Commit 32ef0e4

Browse files
cdxkerdensumesh
authored andcommitted
ops: cleanup collapse query logs, add sha tag to clustering script
1 parent 0bf5ffe commit 32ef0e4

File tree

2 files changed

+3
-3
lines changed

2 files changed

+3
-3
lines changed

.github/workflows/push-clustering-script.yml

+1
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,7 @@ jobs:
4141
trieve/clickhouse-clustering
4242
tags: |
4343
type=raw,latest
44+
type=sha
4445
4546
- name: Build and push Docker image
4647
uses: docker/build-push-action@v5

docker/collapse-query-script/collapse_queries.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ def set_dataset_last_collapsed(
8787
last_collapsed: datetime.datetime,
8888
):
8989
delete_dataset_last_collapsed(client, dataset_id)
90-
print("setting last collapsed for", dataset_id, last_collapsed)
90+
9191
client.insert(
9292
"last_collapsed_dataset",
9393
[
@@ -116,7 +116,6 @@ def collapse_queries(rows):
116116
elif row[1].startswith(cur_row[1]):
117117
# Check if the current row's timestamp is within 10 seconds of the previous row
118118
time_difference = (row[3] - cur_row[3]).total_seconds()
119-
print(time_difference)
120119
if time_difference <= 10:
121120
rows_to_be_deleted.append(cur_row)
122121
cur_row = row
@@ -156,7 +155,7 @@ def main():
156155

157156
last_collapsed = get_dataset_last_collapsed(client, dataset_id)
158157

159-
print("last collapsed", last_collapsed, "dataset_id", dataset_id)
158+
print("Collapsing dataset ", dataset_id, "from ", last_collapsed)
160159

161160
num_deleted = 0
162161

0 commit comments

Comments
 (0)