Skip to content
This repository was archived by the owner on Apr 23, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Autoencoder/main.swift
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ struct Autoencoder: Layer {
activation: tanh)

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let encoder = input.sequenced(through: encoder1, encoder2, encoder3, encoder4)
return encoder.sequenced(through: decoder1, decoder2, decoder3, decoder4)
}
Expand Down
23 changes: 23 additions & 0 deletions CIFAR/Data.swift
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,32 @@ func downloadCIFAR10IfNotPresent(to directory: String = ".") {
}
}

extension Tensor where Scalar : _TensorFlowDataTypeCompatible {
public var _tfeTensorHandle: _AnyTensorHandle {
TFETensorHandle(_owning: handle._cTensorHandle)
}
}

struct Example: TensorGroup {
var label: Tensor<Int32>
var data: Tensor<Float>

init(label: Tensor<Int32>, data: Tensor<Float>) {
self.label = label
self.data = data
}

public init<C: RandomAccessCollection>(
_handles: C
) where C.Element: _AnyTensorHandle {
precondition(_handles.count == 2)
let labelIndex = _handles.startIndex
let dataIndex = _handles.index(labelIndex, offsetBy: 1)
label = Tensor<Int32>(handle: TensorHandle<Int32>(handle: _handles[labelIndex]))
data = Tensor<Float>(handle: TensorHandle<Float>(handle: _handles[dataIndex]))
}

public var _tensorHandles: [_AnyTensorHandle] { [label._tfeTensorHandle, data._tfeTensorHandle] }
}

// Each CIFAR data file is provided as a Python pickle of NumPy arrays
Expand Down
4 changes: 2 additions & 2 deletions CIFAR/Models.swift
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ struct PyTorchModel: Layer {
var dense3 = Dense<Float>(inputSize: 84, outputSize: 10, activation: identity)

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let convolved = input.sequenced(through: conv1, pool1, conv2, pool2)
return convolved.sequenced(through: flatten, dense1, dense2, dense3)
}
Expand All @@ -54,7 +54,7 @@ struct KerasModel: Layer {
var dense2 = Dense<Float>(inputSize: 512, outputSize: 10, activation: identity)

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let conv1 = input.sequenced(through: conv1a, conv1b, pool1, dropout1)
let conv2 = conv1.sequenced(through: conv2a, conv2b, pool2, dropout2)
return conv2.sequenced(through: flatten, dense1, dropout3, dense2)
Expand Down
6 changes: 3 additions & 3 deletions CIFAR/ResNet.swift
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ struct Conv2DBatchNorm: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return input.sequenced(through: conv, norm)
}
}
Expand Down Expand Up @@ -68,7 +68,7 @@ struct BasicBlock: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let blocksReduced = blocks.differentiableReduce(input) { last, layer in
relu(layer(last))
}
Expand Down Expand Up @@ -97,7 +97,7 @@ struct ResNet: Layer {
var classifier = Dense<Float>(inputSize: 64, outputSize: 10, activation: softmax)

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let tmp = relu(inputLayer(input))
let convolved = tmp.sequenced(through: basicBlock1, basicBlock2, basicBlock3)
return convolved.sequenced(through: averagePool, flatten, classifier)
Expand Down
6 changes: 3 additions & 3 deletions CIFAR/WideResNet.swift
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ struct BatchNormConv2DBlock: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let firstLayer = conv1(relu(norm1(input)))
return conv2(relu(norm2(firstLayer)))
}
Expand Down Expand Up @@ -87,7 +87,7 @@ struct WideResNetBasicBlock: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let blocksReduced = blocks.differentiableReduce(input) { last, layer in
relu(layer(last))
}
Expand Down Expand Up @@ -126,7 +126,7 @@ struct WideResNet: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let inputLayer = input.sequenced(through: l1, l2, l3, l4)
let finalNorm = relu(norm(inputLayer))
return finalNorm.sequenced(through: avgPool, flatten, classifier)
Expand Down
2 changes: 1 addition & 1 deletion Catch/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,5 +23,5 @@ installed. Make sure you've added the correct version of `swift` to your path.
To train the model, run:

```
swift -O catch.swift
swift -O main.swift
```
4 changes: 2 additions & 2 deletions Catch/main.swift
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ struct Model: Layer {
generator: &rng)

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return input.sequenced(through: layer1, layer2)
}
}
Expand Down Expand Up @@ -83,7 +83,7 @@ extension CatchAgent {
let (ŷ, backprop) = model.appliedForBackpropagation(to: x)
let maxIndex = ŷ.argmax().scalarized()

let 𝛁loss = -log(Tensor(ŷ.max())).broadcast(like: ŷ) * previousReward
let 𝛁loss = -log(Tensor<Float>(ŷ.max())).broadcasted(like: ŷ) * previousReward
let (𝛁model, _) = backprop(𝛁loss)
optimizer.update(&model.allDifferentiableVariables, along: 𝛁model)

Expand Down
2 changes: 1 addition & 1 deletion Gym/CartPole/main.swift
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ struct Net: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return input.sequenced(through: l1, l2)
}
}
Expand Down
2 changes: 1 addition & 1 deletion MNIST/main.swift
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ struct Classifier: Layer {
var layer1b = Dense<Float>(inputSize: 128, outputSize: 10, activation: softmax)

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let convolved = input.sequenced(through: conv1a, conv1b, pool1)
return convolved.sequenced(through: dropout1a, flatten, layer1a, dropout1b, layer1b)
}
Expand Down
6 changes: 3 additions & 3 deletions MiniGo/Models/GoModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ struct ConvBN: Layer {
}

@differentiable
func call(_ input: Tensor<Float>) -> Tensor<Float> {
func callAsFunction(_ input: Tensor<Float>) -> Tensor<Float> {
return norm(conv(input))
}
}
Expand Down Expand Up @@ -90,7 +90,7 @@ struct ResidualIdentityBlock: Layer {
}

@differentiable
func call(_ input: Tensor<Float>) -> Tensor<Float> {
func callAsFunction(_ input: Tensor<Float>) -> Tensor<Float> {
var tmp = relu(layer1(input))
tmp = layer2(tmp)
return relu(tmp + input)
Expand Down Expand Up @@ -158,7 +158,7 @@ public struct GoModel: Layer {
}

@differentiable(wrt: (self, input), vjp: _vjpCall)
public func call(_ input: Tensor<Float>) -> GoModelOutput {
public func callAsFunction(_ input: Tensor<Float>) -> GoModelOutput {
let batchSize = input.shape[0]
var output = relu(initialConv(input))

Expand Down
23 changes: 23 additions & 0 deletions ResNet/Data.swift
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,32 @@ func downloadCIFAR10IfNotPresent(to directory: String = ".") {
}
}

extension Tensor where Scalar : _TensorFlowDataTypeCompatible {
public var _tfeTensorHandle: _AnyTensorHandle {
TFETensorHandle(_owning: handle._cTensorHandle)
}
}

struct Example: TensorGroup {
var label: Tensor<Int32>
var data: Tensor<Float>

init(label: Tensor<Int32>, data: Tensor<Float>) {
self.label = label
self.data = data
}

public init<C: RandomAccessCollection>(
_handles: C
) where C.Element: _AnyTensorHandle {
precondition(_handles.count == 2)
let labelIndex = _handles.startIndex
let dataIndex = _handles.index(labelIndex, offsetBy: 1)
label = Tensor<Int32>(handle: TensorHandle<Int32>(handle: _handles[labelIndex]))
data = Tensor<Float>(handle: TensorHandle<Float>(handle: _handles[dataIndex]))
}

public var _tensorHandles: [_AnyTensorHandle] { [label._tfeTensorHandle, data._tfeTensorHandle] }
}

// Each CIFAR data file is provided as a Python pickle of NumPy arrays
Expand Down
22 changes: 11 additions & 11 deletions ResNet/ResNet50.swift
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ struct ConvBN: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return input.sequenced(through: conv, norm)
}
}
Expand Down Expand Up @@ -65,7 +65,7 @@ struct ResidualBasicBlock: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return layer2(relu(layer1(input)))
}
}
Expand Down Expand Up @@ -94,7 +94,7 @@ struct ResidualBasicBlockShortcut: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return layer2(relu(layer1(input))) + shortcut(input)
}
}
Expand Down Expand Up @@ -127,7 +127,7 @@ struct ResidualConvBlock: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let tmp = relu(layer2(relu(layer1(input))))
return relu(layer3(tmp) + shortcut(input))
}
Expand All @@ -150,7 +150,7 @@ struct ResidualIdentityBlock: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let tmp = relu(layer2(relu(layer1(input))))
return relu(layer3(tmp) + input)
}
Expand All @@ -175,7 +175,7 @@ struct ResidualIdentityBlockStack: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return input.sequenced(through: block1, block2, block3, block4, block5)
}
}
Expand Down Expand Up @@ -218,7 +218,7 @@ struct ResNet18: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let inputLayer = maxPool(relu(l1(input)))
let level2 = inputLayer.sequenced(through: l2a, l2b)
let level3 = level2.sequenced(through: l3a, l3b)
Expand Down Expand Up @@ -274,7 +274,7 @@ struct ResNet34: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let inputLayer = maxPool(relu(l1(input)))
let level2 = inputLayer.sequenced(through: l2a, l2b, l2c)
let level3 = level2.sequenced(through: l3a, l3b, l3c, l3d)
Expand Down Expand Up @@ -326,7 +326,7 @@ struct ResNet50: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let inputLayer = maxPool(relu(l1(input)))
let level2 = inputLayer.sequenced(through: l2a, l2b, l2c)
let level3 = level2.sequenced(through: l3a, l3b, l3c, l3d)
Expand Down Expand Up @@ -383,7 +383,7 @@ struct ResNet101: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let inputLayer = maxPool(relu(l1(input)))
let level2 = inputLayer.sequenced(through: l2a, l2b, l2c)
let level3 = level2.sequenced(through: l3a, l3b, l3c, l3d)
Expand Down Expand Up @@ -441,7 +441,7 @@ struct ResNet152: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let inputLayer = maxPool(relu(l1(input)))
let level2 = inputLayer.sequenced(through: l2a, l2b, l2c)
let level3 = level2.sequenced(through: l3a, l3b, l3c, l3d)
Expand Down
12 changes: 6 additions & 6 deletions ResNet/ResNetV2.swift
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ struct Conv2DBatchNorm: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return input.sequenced(through: conv, norm)
}
}
Expand All @@ -60,7 +60,7 @@ struct BatchNormConv2D: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return conv(relu(norm(input)))
}
}
Expand Down Expand Up @@ -88,7 +88,7 @@ struct PreActivatedResidualBasicBlock: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return input.sequenced(through: layer1, layer2)
}
}
Expand Down Expand Up @@ -117,7 +117,7 @@ struct PreActivatedResidualBasicBlockShortcut: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return input.sequenced(through: layer1, layer2) + shortcut(input)
}
}
Expand Down Expand Up @@ -162,7 +162,7 @@ struct PreActivatedResNet18: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let inputLayer = input.sequenced(through: l1, maxPool)
let level2 = inputLayer.sequenced(through: l2a, l2b)
let level3 = level2.sequenced(through: l3a, l3b)
Expand Down Expand Up @@ -221,7 +221,7 @@ struct PreActivatedResNet34: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let inputLayer = input.sequenced(through: l1, maxPool)
let level2 = inputLayer.sequenced(through: l2a, l2b, l2c)
let level3 = level2.sequenced(through: l3a, l3b, l3c, l3d)
Expand Down
Loading