Skip to content

Commit d8135f1

Browse files
authored
Add support for individual lines waveform (#3)
Some audio files are more suitable to individual lines representation. This PR adds a support for that.
1 parent 7c12f27 commit d8135f1

6 files changed

Lines changed: 49 additions & 14 deletions

File tree

README.md

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -54,11 +54,12 @@ class MyAudioTests: XCTestCase {
5454
- [x] `AVAudioPCMBuffer` waveform snapshots
5555
- [x] `AVAudioPCMBuffer` overlayed waveform snapshots
5656
- [ ] Spectrogram
57-
- [ ] Spectra
57+
- [x] Spectra
58+
- [x] Different waveform rendering strategies
5859
- [ ] Test against other reference implementations and with known audio files
5960
- [ ] Documentation
6061
- [ ] Mention JUCE
61-
- [ ] Link blog post
62+
- [ ] Link blog post and talk
6263

6364
## Contributing
6465

Sources/AudioSnapshotTesting/AudioSnapshotTesting.swift

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,8 @@ public extension Snapshotting where Format == PlatformImage, Value == (AVAudioPC
2020
/// - Parameters:
2121
/// - width: The width of the resulting image.
2222
/// - height: The height of the resulting image.
23-
static func waveform(width: Int, height: Int) -> Snapshotting {
23+
/// - strategy: The strategy to use when generating the waveform. Defaults to `.joinedLines`.
24+
static func waveform(width: Int, height: Int, strategy: WaveformStrategy = .joinedLines) -> Snapshotting {
2425
Snapshotting<PlatformView, PlatformImage>.image(size: .init(width: width, height: height))
2526
.pullback { buffer1, buffer2 in
2627
let (buckets1, max1) = buffer1.reduce(bucketCount: width)
@@ -33,13 +34,15 @@ public extension Snapshotting where Format == PlatformImage, Value == (AVAudioPC
3334
buckets: data1,
3435
absMax: max1,
3536
height: waveformHeight,
36-
color: .red
37+
color: .red,
38+
strategy: strategy
3739
)
3840
let waveform2 = WaveformView(
3941
buckets: data2,
4042
absMax: max2,
4143
height: waveformHeight,
42-
color: .green
44+
color: .green,
45+
strategy: strategy
4346
)
4447
let waveform = ZStack {
4548
waveform1
@@ -58,7 +61,8 @@ public extension Snapshotting where Format == PlatformImage, Value == AVAudioPCM
5861
/// - Parameters:
5962
/// - width: The width of the resulting image.
6063
/// - height: The height of the resulting image.
61-
static func waveform(width: Int, height: Int) -> Snapshotting {
64+
/// - strategy: The strategy to use when generating the waveform. Defaults to `.joinedLines`.
65+
static func waveform(width: Int, height: Int, strategy: WaveformStrategy = .joinedLines) -> Snapshotting {
6266
Snapshotting<PlatformView, PlatformImage>.image(size: .init(width: width, height: height))
6367
.pullback { buffer in
6468
let verticalPadding: CGFloat = 4
@@ -69,7 +73,8 @@ public extension Snapshotting where Format == PlatformImage, Value == AVAudioPCM
6973
buckets: data,
7074
absMax: max,
7175
height: waveformHeight,
72-
color: .red
76+
color: .red,
77+
strategy: strategy
7378
)
7479
.padding(.vertical, verticalPadding)
7580
.background(Color.black)

Sources/AudioSnapshotTesting/WaveformView.swift

Lines changed: 26 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -8,16 +8,30 @@ struct WaveformView: View {
88
buckets: [Bucket],
99
absMax: Float,
1010
height: CGFloat,
11-
color: Color
11+
color: Color,
12+
strategy: WaveformStrategy = .joinedLines
1213
) {
1314
self.color = color
1415
let halfHeight = height / 2
15-
path = Path { path in
16-
path.move(to: CGPoint(x: 0, y: halfHeight))
17-
for (index, bucket) in buckets.enumerated() {
18-
let sampleHeight = absMax > 0 ? (CGFloat(bucket.max) / CGFloat(absMax)) * halfHeight : 0
19-
let point = CGPoint(x: CGFloat(index), y: halfHeight - sampleHeight)
20-
path.addLine(to: point)
16+
switch strategy {
17+
case .joinedLines:
18+
path = Path { path in
19+
path.move(to: CGPoint(x: 0, y: halfHeight))
20+
for (index, bucket) in buckets.enumerated() {
21+
let sampleHeight = absMax > 0 ? (CGFloat(bucket.max) / CGFloat(absMax)) * halfHeight : 0
22+
let point = CGPoint(x: CGFloat(index), y: halfHeight - sampleHeight)
23+
path.addLine(to: point)
24+
}
25+
}
26+
case .individualLines:
27+
path = Path { path in
28+
for (index, bucket) in buckets.enumerated() {
29+
let sampleHeight = absMax > 0 ? (CGFloat(bucket.max) / CGFloat(absMax)) * halfHeight : 0
30+
let pointMax = CGPoint(x: CGFloat(index), y: halfHeight - sampleHeight)
31+
let pointMin = CGPoint(x: CGFloat(index), y: halfHeight + sampleHeight)
32+
path.move(to: pointMax)
33+
path.addLine(to: pointMin)
34+
}
2135
}
2236
}
2337
}
@@ -28,3 +42,8 @@ struct WaveformView: View {
2842
}
2943
}
3044
}
45+
46+
public enum WaveformStrategy {
47+
case joinedLines
48+
case individualLines
49+
}

Tests/AudioSnapshotTestingTests/AudioSnapshotTestingTests.swift

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,16 @@ func fileWaveform(wave: String) async throws {
1717
)
1818
}
1919

20+
@Test(.snapshots(record: false, diffTool: .ksdiff))
21+
@MainActor
22+
func fileWaveformMetronome() async throws {
23+
assertSnapshot(
24+
of: try AVAudioPCMBuffer.read(wave: "metronome"),
25+
as: .waveform(width: 800, height: 400, strategy: .individualLines),
26+
named: "metronome"
27+
)
28+
}
29+
2030
@Test(.snapshots(record: false, diffTool: .ksdiff))
2131
@MainActor
2232
func fileWaveformOverlay() async throws {
431 KB
Binary file not shown.
11.3 KB
Loading

0 commit comments

Comments
 (0)