= … for j in 0..<height { for i in 0..<width { var sum = 0 for ry in -R...R { for rx in -R...R { sum += Int(input[j + ry][i + rx]) } } output[j][i] = UInt8(sum / (2 * R + 1) * (2 * R + 1)) } }
= … for j in 0..<height { for i in 0..<width { var sum = 0 for ry in -R...R { for rx in -R...R { sum += Int(input[j + ry][i + rx]) } } output[j][i] = UInt8(sum / (2 * R + 1) * (2 * R + 1)) } }
x = 0 while x < width { var sum = SIMD8<UInt16>.zero for k in 0..<L { let startIndex = x + k sum &+= SIMD8<UInt16>(yresult[startIndex..<startIndex+8]) } sum /= weightSIMD for k in 0..<8 { output[y][x+k] = sum[k] }
b = SIMD4<Float>(1.0, 2.0, 3.0, 4.0) a + b // SIMD4<Float>(2.0, 4.0, 6.0, 8.0) a - b // SIMD4<Float>(0.0, 0.0, 0.0, 0.0) a * b // SIMD4<Float>(1.0, 4.0, 9.0, 16.0) a / b // SIMD4<Float>(1.0, 1.0, 1.0, 1.0) ༻ྫ4*.%O
should eventually all // be replaced with @_semantics to lower directly to vector IR nodes. extension SIMD where Scalar: FloatingPoint { public static func +(a: Self, b: Self) -> Self { var result = Self() for i in result.indices { result[i] = a[i] + b[i] } return result } }
FloatingPoint { public static func +(a: Self, b: Self) -> Self { var result = Self() for i in result.indices { result[i] = a[i] + b[i] } return result } }