Skip to content

Commit

Permalink
Merge branch 'AudioKit:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
jcavar authored Nov 17, 2024
2 parents b743f5c + 73098f1 commit 7fd9b78
Show file tree
Hide file tree
Showing 6 changed files with 87 additions and 16 deletions.
8 changes: 7 additions & 1 deletion Sources/AudioKit/Audio Files/AVAudioFile+Utilities.swift
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,15 @@ public extension AVAudioFile {
guard let buffer = AVAudioPCMBuffer(pcmFormat: processingFormat,
frameCapacity: AVAudioFrameCount(length)) else { return nil }

guard let tmpBuffer = AVAudioPCMBuffer(pcmFormat: processingFormat,
frameCapacity: AVAudioFrameCount(length)) else { return nil }

do {
framePosition = 0
try read(into: buffer)
while framePosition < length {
try read(into: tmpBuffer)
buffer.append(tmpBuffer)
}
Log("Created buffer with format", processingFormat)

} catch let error as NSError {
Expand Down
19 changes: 7 additions & 12 deletions Sources/AudioKit/Audio Files/AVAudioPCMBuffer+Utilities.swift
Original file line number Diff line number Diff line change
Expand Up @@ -65,19 +65,14 @@ public extension AVAudioPCMBuffer {
precondition(frameLength + frameCount <= frameCapacity,
"Insufficient space in buffer")

let dst1 = floatChannelData![0]
let src1 = buffer.floatChannelData![0]
for channel in 0..<Int(format.channelCount) {
let dst = floatChannelData![channel]
let src = buffer.floatChannelData![channel]

memcpy(dst1.advanced(by: stride * Int(frameLength)),
src1.advanced(by: stride * Int(startingFrame)),
Int(frameCount) * stride * MemoryLayout<Float>.size)

let dst2 = floatChannelData![1]
let src2 = buffer.floatChannelData![1]

memcpy(dst2.advanced(by: stride * Int(frameLength)),
src2.advanced(by: stride * Int(startingFrame)),
Int(frameCount) * stride * MemoryLayout<Float>.size)
memcpy(dst.advanced(by: stride * Int(frameLength)),
src.advanced(by: stride * Int(startingFrame)),
Int(frameCount) * stride * MemoryLayout<Float>.size)
}

frameLength += frameCount
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ extension FormatConverter {
///
/// This is no longer used in this class as it's not possible to convert sample rate or other
/// required options. It will use the next function instead
@available(visionOS, unavailable, message: "This method is not supported on visionOS")
func convertCompressed(presetName: String, completionHandler: FormatConverterCallback? = nil) {
guard let inputURL = inputURL else {
completionHandler?(Self.createError(message: "Input file can't be nil."))
Expand Down Expand Up @@ -49,6 +50,44 @@ extension FormatConverter {
}
}

/// Example of the most simplistic AVFoundation conversion.
/// With this approach you can't really specify any settings other than the limited presets.
/// No sample rate conversion in this. This isn't used in the public methods but is here
/// for example.
///
/// see `AVAssetExportSession`:
/// *Prior to initializing an instance of AVAssetExportSession, you can invoke
/// +allExportPresets to obtain the complete list of presets available. Use
/// +exportPresetsCompatibleWithAsset: to obtain a list of presets that are compatible
/// with a specific AVAsset.*
///
/// This is no longer used in this class as it's not possible to convert sample rate or other
/// required options. It will use the next function instead
#if swift(>=6.0) // Swift 6.0 corresponds to Xcode 16+
@available(macOS 15, iOS 18, tvOS 18, visionOS 2.0, *)
func convertCompressed(presetName: String) async throws {
guard let inputURL = inputURL else {
throw Self.createError(message: "Input file can't be nil.")
}
guard let outputURL = outputURL else {
throw Self.createError(message: "Output file can't be nil.")
}

let asset = AVURLAsset(url: inputURL)
guard let session = AVAssetExportSession(asset: asset,
presetName: presetName) else {
throw Self.createError(message: "session can't be nil.")
}

let list = await session.compatibleFileTypes
guard let outputFileType: AVFileType = list.first else {
throw Self.createError(message: "Unable to determine a compatible file type from \(inputURL.path)")
}

try await session.export(to: outputURL, as: outputFileType)
}
#endif

/// Convert to compressed first creating a tmp file to PCM to allow more flexible conversion
/// options to work.
func convertCompressed(completionHandler: FormatConverterCallback? = nil) {
Expand Down
20 changes: 17 additions & 3 deletions Sources/AudioKit/Internals/Settings/Settings.swift
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,24 @@ public class Settings: NSObject {
}

/// Default audio format
public static let defaultAudioFormat = AVAudioFormat(standardFormatWithSampleRate: 44_100,
channels: 2) ?? AVAudioFormat()
public static let defaultAudioFormat: AVAudioFormat = {
if #available(iOS 18.0, *) {
if !ProcessInfo.processInfo.isMacCatalystApp && !ProcessInfo.processInfo.isiOSAppOnMac {
/// Default AVAudioFormat for iOS 18 and newer (not on Mac)
return AVAudioFormat(standardFormatWithSampleRate: 48_000, channels: 2) ?? AVAudioFormat()
}
}

if #available(macOS 15.0, *) {
/// Default AVAudioFormat for macOS 15 and newer
return AVAudioFormat(standardFormatWithSampleRate: 48_000, channels: 2) ?? AVAudioFormat()
}

/// Fallback default
return AVAudioFormat(standardFormatWithSampleRate: 44_100, channels: 2) ?? AVAudioFormat()
}()

/// The sample rate in Hertz, default is 44100 kHz. Set a new audioFormat if you want to change this value.
/// The sample rate in Hertz. Set a new audioFormat if you want to change this value.
/// See audioFormat. This is the format that is used for node connections.
public static var sampleRate: Double {
get {
Expand Down
17 changes: 17 additions & 0 deletions Tests/AudioKitTests/Extension Tests/AVAudioFileTests.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import XCTest
import AVFoundation

final class AVAudioFileTests: XCTestCase {

func testReadFile() throws {

let sampleURL = Bundle.module.url(forResource: "TestResources/0001_1-16", withExtension: "wav")!

let wavFile = try AVAudioFile(forReading: sampleURL)

let pcmBuffer = wavFile.toAVAudioPCMBuffer()!

XCTAssertEqual(Int(wavFile.length), Int(pcmBuffer.frameLength))
}

}
Binary file added Tests/AudioKitTests/TestResources/0001_1-16.wav
Binary file not shown.

0 comments on commit 7fd9b78

Please sign in to comment.