Skip to content

Commit

Permalink
cherry-pick dot fixes from en branch (#28)
Browse files Browse the repository at this point in the history
  • Loading branch information
frankfliu authored Jul 5, 2023
1 parent d062e55 commit 2b653fd
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion chapter_linear-networks/linear-regression-scratch.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@
"// Generate y = X w + b + noise\n",
"public DataPoints syntheticData(NDManager manager, NDArray w, float b, int numExamples) {\n",
" NDArray X = manager.randomNormal(new Shape(numExamples, w.size()));\n",
" NDArray y = X.dot(w).add(b);\n",
" NDArray y = X.matMul(w).add(b);\n",
" // Add noise\n",
" y = y.add(manager.randomNormal(0, 0.01f, y.getShape(), DataType.FLOAT32));\n",
" return new DataPoints(X, y);\n",
Expand Down
2 changes: 1 addition & 1 deletion utils/DataPoints.java
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ public NDArray getY() {
// Generate y = X w + b + noise
public static DataPoints syntheticData(NDManager manager, NDArray w, float b, int numExamples) {
NDArray X = manager.randomNormal(new Shape(numExamples, w.size()));
NDArray y = X.dot(w).add(b);
NDArray y = X.matMul(w).add(b);
// Add noise
y = y.add(manager.randomNormal(0, 0.01f, y.getShape(), DataType.FLOAT32));
return new DataPoints(X, y);
Expand Down
2 changes: 1 addition & 1 deletion utils/Training.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
class Training {

public static NDArray linreg(NDArray X, NDArray w, NDArray b) {
return X.dot(w).add(b);
return X.matMul(w).add(b);
}

public static NDArray squaredLoss(NDArray yHat, NDArray y) {
Expand Down

0 comments on commit 2b653fd

Please sign in to comment.