I’ve used the under operate to Change the consequences of the audio file with the usage of avaudiomixer and avaudioengine. The issue is I have to take heed to the entire audio to avoid wasting the brand new audio. The operate helps me save the file with .caf extension, i additionally need it to be in .m4a. i additionally tried changing kAudioFormatLinearPCM and .caf extention of newaudio file, however the saved audiofile in doc just isn’t taking part in.
The Parameters used within the operate are:
var audioEngine = AVAudioEngine()
var audioPlayerNode = AVAudioPlayerNode()
var reverb = AVAudioUnitReverb()
var audioFile = AVAudioFile()
var format = AVAudioFormat()
var audioMixer = AVAudioMixerNode()
var participant = AVAudioPlayer()
var newAudio = AVAudioFile()
The under operate provides the consequences to audiofile.
non-public func playAudio(pitch : Float, fee: Float, reverb: Float, echo: Float) {
do{
let url = URL(fileURLWithPath: Bundle.most important.path(forResource: "audio_File_Name", ofType: "mp3")!)
audioFile = attempt AVAudioFile(forReading: url)
}catch{
print("error")
}
// Initialize variables
audioEngine.connect(audioPlayerNode)
// Setting the pitch
let pitchEffect = AVAudioUnitTimePitch()
pitchEffect.pitch = pitch
audioEngine.connect(pitchEffect)
// Setting the playback-rate
let playbackRateEffect = AVAudioUnitVarispeed()
playbackRateEffect.fee = fee
audioEngine.connect(playbackRateEffect)
// Setting the reverb impact
let reverbEffect = AVAudioUnitReverb()
reverbEffect.loadFactoryPreset(AVAudioUnitReverbPreset.cathedral)
reverbEffect.wetDryMix = reverb
audioEngine.connect(reverbEffect)
// Setting the echo impact on a selected interval
let echoEffect = AVAudioUnitDelay()
echoEffect.delayTime = TimeInterval(echo)
audioEngine.connect(echoEffect)
// Arrange a mixer node
audioEngine.connect(audioMixer)
// Chain all these up, ending with the output
audioEngine.join(audioPlayerNode, to: playbackRateEffect, format: audioFile.processingFormat)
audioEngine.join(playbackRateEffect, to: pitchEffect, format: audioFile.processingFormat)
audioEngine.join(pitchEffect, to: reverbEffect, format: audioFile.processingFormat)
audioEngine.join(reverbEffect, to: echoEffect, format: audioFile.processingFormat)
audioEngine.join(echoEffect, to: audioMixer, format: audioFile.processingFormat)
audioEngine.join(audioMixer, to: audioEngine.mainMixerNode , format: audioFile.processingFormat)
audioPlayerNode.scheduleFile(audioFile, at: nil)
attempt! audioEngine.begin()
let dirPaths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let tmpFileUrl = URL(fileURLWithPath: dirPaths.appending("/effectedSound(Int(Date().timeIntervalSince1970)).caf"))
print(tmpFileUrl)
do {
var settings: [String : Any] = [:]
settings[AVFormatIDKey] = kAudioFormatLinearPCM
settings[AVAudioFileTypeKey] = kAudioFileCAFType
settings[AVSampleRateKey] = audioFile.fileFormat.sampleRate //buffer.format.sampleRate
settings[AVNumberOfChannelsKey] = 2
settings[AVLinearPCMIsFloatKey] = (audioFile.fileFormat.commonFormat == .pcmFormatInt32)
newAudio = attempt AVAudioFile(forWriting: tmpFileUrl, settings: settings)
} catch {
print("Unable to open output audio file: (error).")
}
audioMixer.installTap(onBus: 0, bufferSize: AVAudioFrameCount(audioMixer.outputFormat(forBus: 0).sampleRate), format: self.audioMixer.outputFormat(forBus: 0)) { buffer, when in
do {
if self.audioFile.size > self.newAudio.size{
attempt self.newAudio.write(from: buffer)
}
} catch let error {
print(error)
}
}
audioPlayerNode.play()
}