Skip to content

Commit a8e5111

Browse files
committed
fix: introduce constants.py
1 parent f89d24e commit a8e5111

File tree

8 files changed

+217
-175
lines changed

8 files changed

+217
-175
lines changed

src/common/constants.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
"""Shared constants used across modules to avoid duplicated string literals.
2+
3+
Placing these constants in a single module reduces duplicated literal occurrences
4+
and makes it easier to update canonical module paths in one place.
5+
"""
6+
7+
# Working-with-data module base
8+
WORKING_DATA_MODULE = 'dsl.c10_working_with_data'
9+
10+
# Common working-data suffixes used multiple times in the codebase
11+
E1004 = '.e1004_named_tuples'
12+
E1006 = '.e1006_cleaning'
13+
E1007 = '.e1007_manipulation'
14+
E1008 = '.e1008_rescaling'
15+
E1009 = '.e1009_dimensionality_reduction'
16+
17+
# Fully qualified working-data module constants
18+
WORKING_E1004 = WORKING_DATA_MODULE + E1004
19+
WORKING_E1006 = WORKING_DATA_MODULE + E1006
20+
WORKING_E1007 = WORKING_DATA_MODULE + E1007
21+
WORKING_E1008 = WORKING_DATA_MODULE + E1008
22+
WORKING_E1009 = WORKING_DATA_MODULE + E1009

src/data-scratch-amqp/data_scratch_amqp/strategies_library/dynamic_strategy.py

Lines changed: 162 additions & 142 deletions
Large diffs are not rendered by default.

src/data-scratch-library/dsl/c10_working_with_data/e01_working_with_data.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -238,8 +238,8 @@ def main2(path_to_stocks):
238238
ys1 = [x + random_normal() / 2 for x in xs]
239239
ys2 = [-x + random_normal() / 2 for x in xs]
240240

241-
logging.info("Correlation(xs, ys1) = %", correlation(xs, ys1))
242-
logging.info("Correlation(xs, ys2) = %", correlation(xs, ys2))
241+
logging.info("Correlation(xs, ys1) = %s", correlation(xs, ys1))
242+
logging.info("Correlation(xs, ys2) = %s", correlation(xs, ys2))
243243

244244
logging.info("Analyzing stocks...")
245245
_data = read_stocks_txt(path_to_stocks)

src/data-scratch-mqtt/data_scratch_mqtt/strategies_library/dynamic_strategy.py

Lines changed: 20 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ def get_all_library_functions():
5656
MATRICES_MODULE: ['shape', 'get_row', 'get_column', 'make_matrix', 'is_diagonal', 'make_identity_matrix', 'matrix_add', 'matrix_multiply', 'make_random_matrix'],
5757
CENTRAL_TENDENCY_MODULE: ['mean', 'median', 'quantile', 'mode'],
5858
DISPERSION_MODULE: ['data_range', 'de_mean', 'variance', 'standard_deviation', 'interquartile_range'],
59-
CORRELATION_MODULE: ['covariance', 'correlation', 'correlation_matrix'],
59+
CORRELATION_MODULE: ['covariance', 'correlation'],
6060
UNIFORM_MODULE: ['uniform_pd', 'uniform_cd'],
6161
NORMAL_MODULE: ['normal_pd', 'normal_cd', 'inverse_normal_cd'],
6262
BINOM_MODULE: ['bernoulli_trial', 'binomial', 'binom_pd', 'binom_cd', 'binom_pp'],
@@ -70,21 +70,25 @@ def get_all_library_functions():
7070

7171
# Simple helper modules for other grouped functions
7272
ML_MODULE = 'dsl.c11_machine_learning.machine_learning'
73-
WORKING_DATA_MODULE = 'dsl.c10_working_with_data'
74-
75-
# Common working-data suffixes used multiple times — extract to constants
76-
E1004 = '.e1004_named_tuples'
77-
E1006 = '.e1006_cleaning'
78-
E1007 = '.e1007_manipulation'
79-
E1008 = '.e1008_rescaling'
80-
E1009 = '.e1009_dimensionality_reduction'
81-
82-
# Fully qualified working-data module constants to avoid repeated concatenations
83-
WORKING_E1004 = WORKING_DATA_MODULE + E1004
84-
WORKING_E1006 = WORKING_DATA_MODULE + E1006
85-
WORKING_E1007 = WORKING_DATA_MODULE + E1007
86-
WORKING_E1008 = WORKING_DATA_MODULE + E1008
87-
WORKING_E1009 = WORKING_DATA_MODULE + E1009
73+
try:
74+
# Prefer shared constants module to reduce duplicated literals across files
75+
from common.constants import (
76+
WORKING_DATA_MODULE,
77+
WORKING_E1004,
78+
WORKING_E1006,
79+
WORKING_E1007,
80+
WORKING_E1008,
81+
WORKING_E1009,
82+
)
83+
except Exception:
84+
# Fallback to local definitions if the shared module isn't available in the
85+
# runtime environment (keeps this file robust for standalone use).
86+
WORKING_DATA_MODULE = 'dsl.c10_working_with_data'
87+
WORKING_E1004 = WORKING_DATA_MODULE + '.e1004_named_tuples'
88+
WORKING_E1006 = WORKING_DATA_MODULE + '.e1006_cleaning'
89+
WORKING_E1007 = WORKING_DATA_MODULE + '.e1007_manipulation'
90+
WORKING_E1008 = WORKING_DATA_MODULE + '.e1008_rescaling'
91+
WORKING_E1009 = WORKING_DATA_MODULE + '.e1009_dimensionality_reduction'
8892

8993
for module_const, fnames in grouped.items():
9094
for fn in fnames:

src/data-scratch-node-library/ts-data-scratch/linear_algebra.ts

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,7 @@ export function vector_add(v: Array<number>, w: Array<number>) {
1111

1212
const result: number[] = [];
1313

14-
for (let i = 0; i < v.length; i += 1) {
15-
const v_i = v[i];
14+
for (const [i, v_i] of v.entries()) {
1615
const w_i = w[i];
1716
result.push(v_i + w_i);
1817
}
@@ -23,8 +22,7 @@ export function vector_add(v: Array<number>, w: Array<number>) {
2322
export function vector_subtract(v: Array<number>, w: Array<number>) {
2423
//"""subtracts two vectors componentwise"""
2524
const result: number[] = [];
26-
for (let i = 0; i < v.length; i += 1) {
27-
const v_i = v[i];
25+
for (const [i, v_i] of v.entries()) {
2826
const w_i = w[i];
2927
result.push(v_i - w_i);
3028
}
@@ -69,8 +67,7 @@ export function vector_mean(vectors: Array<Array<number>>) {
6967
export function dot(v: Array<number>, w: Array<number>): number {
7068
//"""v_1 * w_1 + ... + v_n * w_n"""
7169
let result = 0;
72-
for (let i = 0; i < v.length; i += 1) {
73-
const v_i = v[i];
70+
for (const [i, v_i] of v.entries()) {
7471
const w_i = w[i];
7572
result += v_i * w_i;
7673
}

src/data-scratch-node-library/ts-data-scratch/stats.ts

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,8 @@ export function bucketize(point: number, bucket_size: number): number {
1010

1111
export function Counter(array: Array<number>) {
1212
let count = new Map<string, number>();
13-
for (let i = 0; i < array.length; i += 1) {
14-
const val = array[i].toString();
13+
for (const v of array) {
14+
const val = v.toString();
1515
const prev = count.get(val) || 0;
1616
count.set(val, prev + 1);
1717
}
@@ -98,8 +98,7 @@ export function de_mean(x: Array<number>) {
9898
// translate x by subtracting its mean (so the result has mean 0)
9999
const x_bar = mean(x);
100100
const result: number[] = [];
101-
for (let i = 0; i < x.length; i += 1) {
102-
const x_i = x[i];
101+
for (const x_i of x) {
103102
result.push(x_i - x_bar);
104103
}
105104
return result

src/rest-scratch-flask/app/routes/clustering.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -225,7 +225,7 @@ def analyze_clustering():
225225

226226
# Validate data types
227227
if not isinstance(points, list):
228-
return jsonify({'error': 'data must be a list of points'}), 400
228+
return jsonify({'error': ERR_DATA_MUST_BE_LIST}), 400
229229

230230
if not isinstance(assignments, list):
231231
return jsonify({'error': 'assignments must be a list'}), 400
@@ -318,7 +318,7 @@ def find_optimal_k():
318318
for k in range(1, max_k + 1):
319319
k_errors = []
320320

321-
for run in range(runs_per_k):
321+
for _ in range(runs_per_k):
322322
assignments, means = k_means(points, k)
323323
error = squared_clustering_errors(points, assignments, means)
324324
k_errors.append(error)

src/rest-scratch-flask/app/routes/naive_bayes.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ def train_classifier():
9898
'message': 'Classifier trained successfully. Use /classify endpoint with the same training data to classify messages.'
9999
})
100100

101-
except Exception as e:
101+
except Exception:
102102
logger.exception('Training failed')
103103
return jsonify({'error': 'Training failed due to an internal error'}), 500
104104

@@ -165,7 +165,7 @@ def classify_message():
165165
'smoothing_parameter': smoothing
166166
})
167167

168-
except Exception as e:
168+
except Exception:
169169
# Log full exception server-side but return a generic error to client
170170
logger.exception('Classification failed')
171171
return jsonify({'error': 'Classification failed due to an internal error'}), 500
@@ -240,7 +240,7 @@ def batch_classify():
240240
'smoothing_parameter': smoothing
241241
})
242242

243-
except Exception as e:
243+
except Exception:
244244
logger.exception('Batch classification failed')
245245
return jsonify({'error': 'Batch classification failed due to an internal error'}), 500
246246

0 commit comments

Comments
 (0)