Skip to content
This repository was archived by the owner on Apr 23, 2025. It is now read-only.
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions ResNet/Data.swift
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,25 @@ func downloadCIFAR10IfNotPresent(to directory: String = ".") {
struct Example: TensorGroup {
var label: Tensor<Int32>
var data: Tensor<Float>

init(label: Tensor<Int32>, data: Tensor<Float>) {
self.label = label
self.data = data
}

public init<C: RandomAccessCollection>(
_handles: C
) where C.Element: _AnyTensorHandle {
precondition(_handles.count == 2)
let labelIndex = _handles.startIndex
let dataIndex = _handles.index(labelIndex, offsetBy: 1)
label = Tensor<Int32>(handle: TensorHandle<Int32>(handle: _handles[labelIndex]))
data = Tensor<Float>(handle: TensorHandle<Float>(handle: _handles[dataIndex]))
}

//public var _tensorHandles: [_AnyTensorHandle] { [label.handle.handle, data.handle.handle] }
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I feel this is a bug, but not entirely sure. How else to satisfy TensorGroup protocol?

Copy link
Contributor

@rxwei rxwei Jun 6, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is a known regression introduced in tensorflow/swift-apis#165 and should be fixed soon. You can look at test cases in that PR to see how they satisfied the requirements.

// error: 'handle' is inaccessible due to 'internal' protection level
public var _tensorHandles: [_AnyTensorHandle] { [] }
}

// Each CIFAR data file is provided as a Python pickle of NumPy arrays
Expand Down
22 changes: 11 additions & 11 deletions ResNet/ResNet50.swift
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ struct ConvBN: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return input.sequenced(through: conv, norm)
}
}
Expand Down Expand Up @@ -65,7 +65,7 @@ struct ResidualBasicBlock: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return layer2(relu(layer1(input)))
}
}
Expand Down Expand Up @@ -94,7 +94,7 @@ struct ResidualBasicBlockShortcut: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return layer2(relu(layer1(input))) + shortcut(input)
}
}
Expand Down Expand Up @@ -127,7 +127,7 @@ struct ResidualConvBlock: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let tmp = relu(layer2(relu(layer1(input))))
return relu(layer3(tmp) + shortcut(input))
}
Expand All @@ -150,7 +150,7 @@ struct ResidualIdentityBlock: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let tmp = relu(layer2(relu(layer1(input))))
return relu(layer3(tmp) + input)
}
Expand All @@ -175,7 +175,7 @@ struct ResidualIdentityBlockStack: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return input.sequenced(through: block1, block2, block3, block4, block5)
}
}
Expand Down Expand Up @@ -218,7 +218,7 @@ struct ResNet18: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let inputLayer = maxPool(relu(l1(input)))
let level2 = inputLayer.sequenced(through: l2a, l2b)
let level3 = level2.sequenced(through: l3a, l3b)
Expand Down Expand Up @@ -274,7 +274,7 @@ struct ResNet34: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let inputLayer = maxPool(relu(l1(input)))
let level2 = inputLayer.sequenced(through: l2a, l2b, l2c)
let level3 = level2.sequenced(through: l3a, l3b, l3c, l3d)
Expand Down Expand Up @@ -326,7 +326,7 @@ struct ResNet50: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let inputLayer = maxPool(relu(l1(input)))
let level2 = inputLayer.sequenced(through: l2a, l2b, l2c)
let level3 = level2.sequenced(through: l3a, l3b, l3c, l3d)
Expand Down Expand Up @@ -383,7 +383,7 @@ struct ResNet101: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let inputLayer = maxPool(relu(l1(input)))
let level2 = inputLayer.sequenced(through: l2a, l2b, l2c)
let level3 = level2.sequenced(through: l3a, l3b, l3c, l3d)
Expand Down Expand Up @@ -441,7 +441,7 @@ struct ResNet152: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let inputLayer = maxPool(relu(l1(input)))
let level2 = inputLayer.sequenced(through: l2a, l2b, l2c)
let level3 = level2.sequenced(through: l3a, l3b, l3c, l3d)
Expand Down
12 changes: 6 additions & 6 deletions ResNet/ResNetV2.swift
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ struct Conv2DBatchNorm: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return input.sequenced(through: conv, norm)
}
}
Expand All @@ -60,7 +60,7 @@ struct BatchNormConv2D: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return conv(relu(norm(input)))
}
}
Expand Down Expand Up @@ -88,7 +88,7 @@ struct PreActivatedResidualBasicBlock: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return input.sequenced(through: layer1, layer2)
}
}
Expand Down Expand Up @@ -117,7 +117,7 @@ struct PreActivatedResidualBasicBlockShortcut: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
return input.sequenced(through: layer1, layer2) + shortcut(input)
}
}
Expand Down Expand Up @@ -162,7 +162,7 @@ struct PreActivatedResNet18: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let inputLayer = input.sequenced(through: l1, maxPool)
let level2 = inputLayer.sequenced(through: l2a, l2b)
let level3 = level2.sequenced(through: l3a, l3b)
Expand Down Expand Up @@ -221,7 +221,7 @@ struct PreActivatedResNet34: Layer {
}

@differentiable
func call(_ input: Input) -> Output {
func callAsFunction(_ input: Input) -> Output {
let inputLayer = input.sequenced(through: l1, maxPool)
let level2 = inputLayer.sequenced(through: l2a, l2b, l2c)
let level3 = level2.sequenced(through: l3a, l3b, l3c, l3d)
Expand Down