Created
March 22, 2022 12:54
-
-
Save sgoodwin/046cf401285c211a39f4e01f1bebe0dc to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// | |
// File.swift | |
// Swipey | |
// | |
// Created by Samuel Goodwin on 07/03/2022. | |
// | |
import Foundation | |
import AVFoundation | |
import Combine | |
import Accelerate | |
class Audio: ObservableObject { | |
private var player = AVPlayer() | |
private var timeObservation: Any? | |
private var statusObservation: Any? | |
let timePublisher = PassthroughSubject<TimeInterval, Never>() | |
let statusPublisher = PassthroughSubject<AVPlayer.TimeControlStatus, Never>() | |
init() { | |
let session = AVAudioSession.sharedInstance() | |
do { | |
try session.setCategory(.playback, mode: .default, policy: .longFormAudio) | |
try session.setActive(true) | |
} catch { | |
print(error) | |
} | |
} | |
func play(url: URL) { | |
let item = AVPlayerItem(url: url) | |
let inputParams = AVMutableAudioMixInputParameters(track: item.asset.tracks.first) | |
var format: AudioStreamBasicDescription? | |
var callbacks = MTAudioProcessingTapCallbacks(version: kMTAudioProcessingTapCallbacksVersion_0, clientInfo: nil) { tap, clientInfo, tapStorage in | |
print("Initializing") | |
} finalize: { tap in | |
print("Finalizing") | |
} prepare: { tap, maxFrames, processingFormat in | |
print("Preparing") | |
} unprepare: { tap in | |
print("Unprepare") | |
} process: { tap, numberOfFrames, flags, bufferList, numberOfFramesOut, flagsOut in | |
let err = MTAudioProcessingTapGetSourceAudio(tap, numberOfFrames, bufferList, flagsOut, nil, numberOfFramesOut) | |
if err != OSStatus.zero { | |
print("Failed to get source audio I guess?") | |
} | |
let n = vDSP_Length(numberOfFrames) | |
guard let buffer = UnsafeMutableAudioBufferListPointer(bufferList).first else { | |
fatalError("Failed to pick out mData from audio buffer") | |
} | |
let bufferData = UnsafeBufferPointer<Int16>(buffer) | |
let signal: [Float] = bufferData.map(Float.init) | |
let log2n = vDSP_Length(log2(Float(n))) | |
guard let fftSetUp = vDSP.FFT(log2n: log2n, radix: .radix2, ofType: DSPSplitComplex.self) else { | |
fatalError("Failed to setup FFT") | |
} | |
let halfN = Int(n/2) | |
var forwardInputReal = [Float](repeating: 0, count: halfN) | |
var forwardInputImag = [Float](repeating: 0, count: halfN) | |
var forwardOutputReal = [Float](repeating: 0, count: halfN) | |
var forwardOutputImag = [Float](repeating: 0, count: halfN) | |
forwardInputReal.withUnsafeMutableBufferPointer { forwardInputRealPtr in | |
forwardInputImag.withUnsafeMutableBufferPointer { forwardInputImagPtr in | |
forwardOutputReal.withUnsafeMutableBufferPointer { forwardOutputRealPtr in | |
forwardOutputImag.withUnsafeMutableBufferPointer { forwardOutputImagPtr in | |
// Create a `DSPSplitComplex` to contain the signal. | |
var forwardInput = DSPSplitComplex(realp: forwardInputRealPtr.baseAddress!, imagp: forwardInputImagPtr.baseAddress!) | |
// Convert the real values in `signal` to complex numbers. | |
signal.withUnsafeBytes { | |
vDSP.convert(interleavedComplexVector: [DSPComplex]($0.bindMemory(to: DSPComplex.self)), toSplitComplexVector: &forwardInput) | |
} | |
// Create a `DSPSplitComplex` to receive the FFT result. | |
var forwardOutput = DSPSplitComplex(realp: forwardOutputRealPtr.baseAddress!, imagp: forwardOutputImagPtr.baseAddress!) | |
// Perform the forward FFT. | |
fftSetUp.forward(input: forwardInput, output: &forwardOutput) | |
} | |
} | |
} | |
} | |
let autospectrum = [Float](unsafeUninitializedCapacity: halfN) { | |
autospectrumBuffer, initializedCount in | |
// The `vDSP_zaspec` function accumulates its output. Clear the | |
// uninitialized `autospectrumBuffer` before computing the spectrum. | |
vDSP.clear(&autospectrumBuffer) | |
forwardOutputReal.withUnsafeMutableBufferPointer { forwardOutputRealPtr in | |
forwardOutputImag.withUnsafeMutableBufferPointer { forwardOutputImagPtr in | |
var frequencyDomain = DSPSplitComplex(realp: forwardOutputRealPtr.baseAddress!, imagp: forwardOutputImagPtr.baseAddress!) | |
vDSP_zaspec(&frequencyDomain, autospectrumBuffer.baseAddress!, vDSP_Length(halfN)) | |
} | |
} | |
initializedCount = halfN | |
} | |
let componentFrequencies = autospectrum.enumerated().filter { | |
$0.element > 1 | |
}.map { | |
return $0.offset | |
} | |
print(componentFrequencies.count) | |
} | |
var tap: Unmanaged<MTAudioProcessingTap>? | |
let status = MTAudioProcessingTapCreate(kCFAllocatorDefault, &callbacks, kMTAudioProcessingTapCreationFlag_PostEffects, &tap) | |
print(status) | |
inputParams.audioTapProcessor = tap?.takeRetainedValue() | |
let mix = AVMutableAudioMix() | |
mix.inputParameters = [inputParams] | |
item.audioMix = mix | |
player.replaceCurrentItem(with: item) | |
player.play() | |
timeObservation = player.addPeriodicTimeObserver(forInterval: CMTime(seconds: 0.5, preferredTimescale: 1000), queue: nil) { [weak self] time in | |
self?.timePublisher.send(time.seconds) | |
} | |
statusObservation = player.observe(\.timeControlStatus, changeHandler: { [weak self] player, status in | |
self?.statusPublisher.send(player.timeControlStatus) | |
}) | |
} | |
func stop() { | |
player.replaceCurrentItem(with: nil) | |
if let observation = timeObservation { | |
player.removeTimeObserver(observation) | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment