diff --git a/korge-core/src/android/korlibs/audio/sound/AndroidNativeSoundProvider.kt b/korge-core/src/android/korlibs/audio/sound/AndroidNativeSoundProvider.kt index 0986b99ba2..19c1d0302f 100644 --- a/korge-core/src/android/korlibs/audio/sound/AndroidNativeSoundProvider.kt +++ b/korge-core/src/android/korlibs/audio/sound/AndroidNativeSoundProvider.kt @@ -3,22 +3,15 @@ package korlibs.audio.sound import android.content.* import android.media.* import android.os.* -import korlibs.datastructure.* -import korlibs.datastructure.lock.* +import korlibs.datastructure.event.* +import korlibs.datastructure.pauseable.* +import korlibs.datastructure.thread.* import korlibs.io.android.* -import korlibs.io.async.* -import korlibs.time.* -import kotlinx.coroutines.* import kotlin.coroutines.* -import kotlin.coroutines.cancellation.CancellationException actual val nativeSoundProvider: NativeSoundProvider by lazy { AndroidNativeSoundProvider() } -class AndroidNativeSoundProvider : NativeSoundProvider() { - companion object { - val MAX_CHANNELS = 16 - } - +class AndroidNativeSoundProvider : NativeSoundProviderNew() { override val target: String = "android" private var audioManager: AudioManager? = null @@ -26,193 +19,126 @@ class AndroidNativeSoundProvider : NativeSoundProvider() { if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) audioManager!!.generateAudioSessionId() else -1 } - //val audioSessionId get() = audioManager!!.generateAudioSessionId() - //private val activeOutputs = LinkedHashSet() - private val threadPool = Pool { id -> - //Console.info("Creating AudioThread[$id]") - AudioThread(this, id = id).also { it.isDaemon = true }.also { it.start() } + override fun createNewPlatformAudioOutput(coroutineContext: CoroutineContext, channels: Int, frequency: Int, gen: (AudioSamplesInterleaved) -> Unit): NewPlatformAudioOutput { + ensureAudioManager(coroutineContext) + return AndroidNewPlatformAudioOutput(this, coroutineContext, channels, frequency, gen) } - override var paused: Boolean = false - set(value) { - if (field != value) { - field = value - //(this as java.lang.Object).notifyAll() - } + private val pauseable = SyncPauseable() + override var paused: Boolean by pauseable::paused + + fun ensureAudioManager(coroutineContext: CoroutineContext) { + if (audioManager == null) { + val ctx = coroutineContext[AndroidCoroutineContext.Key]?.context ?: error("Can't find the Android Context on the CoroutineContext. Must call withAndroidContext first") + audioManager = ctx.getSystemService(Context.AUDIO_SERVICE) as AudioManager } + } - class AudioThread(val provider: AndroidNativeSoundProvider, var freq: Int = 44100, val id: Int = -1) : Thread() { - var props: SoundProps = DummySoundProps - val deque = AudioSamplesDeque(2) - val lock = Lock() - @Volatile - var running = true + class AndroidNewPlatformAudioOutput( + val provider: AndroidNativeSoundProvider, + coroutineContext: CoroutineContext, + channels: Int, + frequency: Int, + gen: (AudioSamplesInterleaved) -> Unit + ) : NewPlatformAudioOutput(coroutineContext, channels, frequency, gen) { + var thread: NativeThread? = null + + override fun internalStart() { + thread = nativeThread(isDaemon = true) { thread -> + //val bufferSamples = 4096 + val bufferSamples = 1024 + + val atChannelSize = Short.SIZE_BYTES * channels * bufferSamples + val atChannel = if (channels >= 2) AudioFormat.CHANNEL_OUT_STEREO else AudioFormat.CHANNEL_OUT_MONO + val atMode = AudioTrack.MODE_STREAM + val at = if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) { + AudioTrack( + AudioAttributes.Builder() + .setUsage(AudioAttributes.USAGE_GAME) + //.setContentType(AudioAttributes.CONTENT_TYPE_MUSIC) + .setContentType(AudioAttributes.CONTENT_TYPE_UNKNOWN) + .build(), + AudioFormat.Builder() + .setChannelMask(atChannel) + .setSampleRate(frequency) + .setEncoding(AudioFormat.ENCODING_PCM_16BIT) + .build(), + atChannelSize, + atMode, + provider.audioSessionId + ) + } else { + @Suppress("DEPRECATION") + AudioTrack( + AudioManager.STREAM_MUSIC, + frequency, + atChannel, + AudioFormat.ENCODING_PCM_16BIT, + atChannelSize, + atMode + ) + } + if (at.state == AudioTrack.STATE_UNINITIALIZED) { + System.err.println("Audio track was not initialized correctly frequency=$frequency, bufferSamples=$bufferSamples") + } - init { - this.isDaemon = true - } + val buffer = AudioSamplesInterleaved(channels, bufferSamples) + at.play() + + var lastVolL = Float.NaN + var lastVolR = Float.NaN - override fun run() { - val bufferSamples = 4096 - - val at = if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) { - AudioTrack( - AudioAttributes.Builder() - .setUsage(AudioAttributes.USAGE_GAME) - //.setContentType(AudioAttributes.CONTENT_TYPE_MUSIC) - .setContentType(AudioAttributes.CONTENT_TYPE_UNKNOWN) - .build(), - AudioFormat.Builder() - .setChannelMask(AudioFormat.CHANNEL_IN_STEREO) - .setSampleRate(freq) - .setEncoding(AudioFormat.ENCODING_PCM_16BIT) - .build(), - 2 * 2 * bufferSamples, - AudioTrack.MODE_STREAM, - provider.audioSessionId - ) - } else { - @Suppress("DEPRECATION") - AudioTrack( - AudioManager.STREAM_MUSIC, - freq, - AudioFormat.CHANNEL_OUT_STEREO, - AudioFormat.ENCODING_PCM_16BIT, - 2 * 2 * bufferSamples, - AudioTrack.MODE_STREAM - ) - } - if (at.state == AudioTrack.STATE_UNINITIALIZED) { - System.err.println("Audio track was not initialized correctly freq=$freq, bufferSamples=$bufferSamples") - } - //if (at.state == AudioTrack.STATE_INITIALIZED) at.play() - while (running) { try { - val temp = AudioSamplesInterleaved(2, bufferSamples) - //val tempEmpty = ShortArray(1024) - var paused = true - var lastVolume = Float.NaN - while (running) { - //println("Android sound thread running = ${currentThreadId} ${currentThreadName}") - - if (provider.paused) { - at.stop() - //at.pause() - //at.flush() - while (provider.paused && running) { - //(provider as java.lang.Object).wait(10_000L) - Thread.sleep(250L) - } - at.play() - } + while (thread.threadSuggestRunning) { + provider.pauseable.checkPaused() - val readCount = lock { deque.read(temp) } - if (at.state == AudioTrack.STATE_UNINITIALIZED) { - Thread.sleep(50L) + if (this.paused) { + at.pause() + Thread.sleep(20L) continue + } else { + at.play() } - if (readCount > 0) { - if (paused) { - //println("[KORAU] Resume $id") - paused = false - at.play() + + when (at.state) { + AudioTrack.STATE_UNINITIALIZED -> { + Thread.sleep(20L) } - //println("AUDIO CHUNK: $readCount : ${temp.data.toList()}") - if (at.state == AudioTrack.STATE_INITIALIZED) { - at.playbackRate = freq + AudioTrack.STATE_INITIALIZED -> { + at.playbackRate = frequency if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { - at.playbackParams.speed = props.pitch.toFloat() + at.playbackParams.speed = this.pitch.toFloat() } - val vol = props.volume.toFloat() - if (lastVolume != vol) { + val volL = this.volumeForChannel(0).toFloat() + val volR = this.volumeForChannel(1).toFloat() + if (lastVolL != volL || lastVolR != volR) { + lastVolL = volL + lastVolR = volR if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) { - at.setVolume(vol) + at.setVolume(volL) } else { - @Suppress("DEPRECATION") - at.setStereoVolume(vol, vol) + at.setStereoVolume(volL, volR) } } - lastVolume = vol - at.write(temp.data, 0, readCount * 2) - } - } else { - //at.write(tempEmpty, 0, tempEmpty.size) - if (!paused) { - //println("[KORAU] Stop $id") - //at.flush() - at.stop() - paused = true + + genSafe(buffer) + at.write(buffer.data, 0, buffer.data.size) } - Thread.sleep(2L) } } - } catch (e: Throwable) { - e.printStackTrace() } finally { - //println("[KORAU] Completed $id") - try { - at.stop() - } catch (e: CancellationException) { - throw e - } catch (e: Throwable) { - e.printStackTrace() - } + at.flush() + at.stop() + at.release() } - } - at.release() - } - } - fun ensureAudioManager(coroutineContext: CoroutineContext) { - if (audioManager == null) { - val ctx = coroutineContext[AndroidCoroutineContext.Key]?.context ?: error("Can't find the Android Context on the CoroutineContext. Must call withAndroidContext first") - audioManager = ctx.getSystemService(Context.AUDIO_SERVICE) as AudioManager - } - } - - override fun createPlatformAudioOutput(coroutineContext: CoroutineContext, freq: Int): PlatformAudioOutput { - ensureAudioManager(coroutineContext) - return AndroidPlatformAudioOutput(coroutineContext, freq, this) - } - - class AndroidPlatformAudioOutput(coroutineContext: CoroutineContext, frequency: Int, val provider: AndroidNativeSoundProvider) : PlatformAudioOutput(coroutineContext, frequency) { - private var started = false - internal var thread: AudioThread? = null - private val threadDeque get() = thread?.deque - - override val availableSamples: Int get() = threadDeque?.availableRead ?: 0 - - override suspend fun add(samples: AudioSamples, offset: Int, size: Int) { - //println("AndroidPlatformAudioOutput.add") - while (thread == null) delay(10.milliseconds) - while (threadDeque!!.availableRead >= 44100) delay(1.milliseconds) - - thread!!.lock { threadDeque!!.write(samples, offset, size) } - } - - override fun start() { - if (started) return - started = true - launchImmediately(coroutineContext) { - while (provider.threadPool.totalItemsInUse >= MAX_CHANNELS) { - delay(10.milliseconds) - } - thread = provider.threadPool.alloc() - thread?.props = this - thread?.freq = frequency - threadDeque?.clear() + //val temp = AudioSamplesInterleaved(2, bufferSamples) } - //provider.activeOutputs += this } - override fun stop() { - if (!started) return - //provider.activeOutputs -= this - started = false - if (thread != null) { - provider.threadPool.free(thread!!) - } + override fun internalStop() { + thread?.threadSuggestRunning = false thread = null } } diff --git a/korge-core/src/common/korlibs/audio/sound/AudioData.kt b/korge-core/src/common/korlibs/audio/sound/AudioData.kt index d7e3b5201d..0e89dd4ee4 100644 --- a/korge-core/src/common/korlibs/audio/sound/AudioData.kt +++ b/korge-core/src/common/korlibs/audio/sound/AudioData.kt @@ -1,19 +1,11 @@ package korlibs.audio.sound -import korlibs.time.TimeSpan -import korlibs.time.seconds -import korlibs.memory.arraycopy -import korlibs.audio.format.AudioDecodingProps -import korlibs.audio.format.AudioEncodingProps -import korlibs.audio.format.AudioFormat -import korlibs.audio.format.AudioFormats -import korlibs.audio.format.defaultAudioFormats -import korlibs.io.file.VfsFile -import korlibs.io.file.VfsOpenMode -import korlibs.io.file.baseName -import korlibs.io.lang.invalidOp -import korlibs.io.stream.openUse -import kotlin.math.min +import korlibs.audio.format.* +import korlibs.io.file.* +import korlibs.io.lang.* +import korlibs.memory.* +import korlibs.time.* +import kotlin.math.* class AudioData( val rate: Int, @@ -32,6 +24,7 @@ class AudioData( val totalSamples: Int get() = samples.totalSamples val totalTime: TimeSpan get() = timeAtSample(totalSamples) fun timeAtSample(sample: Int): TimeSpan = ((sample).toDouble() / rate.toDouble()).seconds + fun sampleAtTime(time: TimeSpan): Int = (time.seconds * rate.toDouble()).toInt() operator fun get(channel: Int): ShortArray = samples.data[channel] operator fun get(channel: Int, sample: Int): Short = samples.data[channel][sample] diff --git a/korge-core/src/common/korlibs/audio/sound/AudioSamples.kt b/korge-core/src/common/korlibs/audio/sound/AudioSamples.kt index 90fb524b9c..2679050911 100644 --- a/korge-core/src/common/korlibs/audio/sound/AudioSamples.kt +++ b/korge-core/src/common/korlibs/audio/sound/AudioSamples.kt @@ -1,15 +1,11 @@ package korlibs.audio.sound -import korlibs.memory.arraycopy -import korlibs.memory.arrayinterleave -import korlibs.math.clamp01 -import korlibs.audio.internal.SampleConvert -import korlibs.audio.internal.coerceToShort +import korlibs.audio.internal.* import korlibs.datastructure.iterators.* -import korlibs.io.lang.assert -import kotlin.math.absoluteValue -import kotlin.math.max -import kotlin.math.min +import korlibs.io.lang.* +import korlibs.math.* +import korlibs.memory.* +import kotlin.math.* interface IAudioSamples { val channels: Int @@ -19,11 +15,19 @@ interface IAudioSamples { operator fun get(channel: Int, sample: Int): Short operator fun set(channel: Int, sample: Int, value: Short): Unit fun getFloat(channel: Int, sample: Int): Float = SampleConvert.shortToFloat(this[channel, sample]) - fun setFloat(channel: Int, sample: Int, value: Float) { this[channel, sample] = SampleConvert.floatToShort(value) } + fun setFloat(channel: Int, sample: Int, value: Float) { + this[channel, sample] = SampleConvert.floatToShort(value) + } + fun setFloatStereo(sample: Int, l: Float, r: Float) { setFloat(0, sample, l) setFloat(1, sample, r) } + + fun scaleVolume(scale: Double): IAudioSamples = scaleVolume(scale.toFloat()) + fun scaleVolume(channelScales: DoubleArray): IAudioSamples = scaleVolume(FloatArray(channelScales.size) { channelScales[it].toFloat() }) + fun scaleVolume(scale: Float): IAudioSamples + fun scaleVolume(channelScales: FloatArray): IAudioSamples } internal fun AudioSamples.resample(scale: Double, totalSamples: Int = (this.totalSamples * scale).toInt(), out: AudioSamples = AudioSamples(channels, totalSamples)): AudioSamples { @@ -98,10 +102,8 @@ class AudioSamples(override val channels: Int, override val totalSamples: Int, v this[1, sample] = valueRight } - fun scaleVolume(scale: Double): AudioSamples = scaleVolume(scale.toFloat()) - fun scaleVolume(channelScales: DoubleArray): AudioSamples = scaleVolume(FloatArray(channelScales.size) { channelScales[it].toFloat() }) - fun scaleVolume(scale: Float): AudioSamples { + override fun scaleVolume(scale: Float): AudioSamples { data.fastForEach { channel -> for (n in channel.indices) { channel[n] = (channel[n] * scale).toInt().coerceToShort() @@ -109,7 +111,7 @@ class AudioSamples(override val channels: Int, override val totalSamples: Int, v } return this } - fun scaleVolume(channelScales: FloatArray): AudioSamples { + override fun scaleVolume(channelScales: FloatArray): AudioSamples { data.fastForEachWithIndex { ch, channel -> for (n in channel.indices) { channel[n] = (channel[n] * channelScales[ch]).toInt().coerceToShort() @@ -146,6 +148,21 @@ class AudioSamplesInterleaved(override val channels: Int, override val totalSamp override operator fun get(channel: Int, sample: Int): Short = data[index(channel, sample)] override operator fun set(channel: Int, sample: Int, value: Short) { data[index(channel, sample)] = value } + override fun scaleVolume(scale: Float): AudioSamplesInterleaved { + for (n in data.indices) data[n] = (data[n] * scale).toInt().coerceToShort() + return this + } + override fun scaleVolume(channelScales: FloatArray): AudioSamplesInterleaved { + for (ch in 0 until channels) { + val chVolume = channelScales[ch] + for (n in 0 until totalSamples) { + val i = n * channels + ch + data[i] = (data[i] * chVolume).toInt().coerceToShort() + } + } + return this + } + override fun toString(): String = "AudioSamplesInterleaved(channels=$channels, totalSamples=$totalSamples)" } diff --git a/korge-core/src/common/korlibs/audio/sound/PlatformAudioOutput.kt b/korge-core/src/common/korlibs/audio/sound/PlatformAudioOutput.kt index 04f7a9dd9f..f3bb3a4d74 100644 --- a/korge-core/src/common/korlibs/audio/sound/PlatformAudioOutput.kt +++ b/korge-core/src/common/korlibs/audio/sound/PlatformAudioOutput.kt @@ -2,12 +2,79 @@ package korlibs.audio.sound import korlibs.datastructure.lock.* import korlibs.datastructure.thread.* +import korlibs.io.async.* import korlibs.io.lang.* import korlibs.math.* import korlibs.time.* import kotlinx.coroutines.* import kotlin.coroutines.* +open class NewPlatformAudioOutput( + val coroutineContext: CoroutineContext, + val channels: Int, + val frequency: Int, + private val gen: (AudioSamplesInterleaved) -> Unit, +) : Disposable, SoundProps { + var onCancel: Cancellable? = null + var paused: Boolean = false + + private val lock = Lock() + fun genSafe(buffer: AudioSamplesInterleaved) { + lock { + try { + gen(buffer) + applyPropsTo(buffer) + } catch (e: Throwable) { + e.printStackTrace() + } + } + } + + override var pitch: Double = 1.0 + override var volume: Double = 1.0 + override var panning: Double = 0.0 + + protected open fun internalStart() = Unit + protected open fun internalStop() = Unit + + fun start() { + stop() + onCancel = coroutineContext.onCancel { stop() } + internalStart() + } + fun stop() { + onCancel?.cancel() + onCancel = null + internalStop() + } + final override fun dispose() = stop() +} + +open class PlatformAudioOutputBasedOnNew( + val soundProvider: NativeSoundProvider, + coroutineContext: CoroutineContext, + frequency: Int, +) : DequeBasedPlatformAudioOutput(coroutineContext, frequency) { + init{ + println("PlatformAudioOutputBasedOnNew[$frequency] = $soundProvider") + } + + val new = soundProvider.createNewPlatformAudioOutput(coroutineContext, 2, frequency) { buffer -> + //println("availableRead=$availableRead") + //if (availableRead >= buffer.data.size) { + readSamplesInterleaved(buffer, fully = true) + //} + } + + override fun start() { + new.start() + } + + override fun stop() { + new.stop() + } +} + open class PlatformAudioOutput( val coroutineContext: CoroutineContext, val frequency: Int @@ -195,15 +262,27 @@ open class DequeBasedPlatformAudioOutput( } } + protected fun readSamplesInterleaved(out: IAudioSamples, offset: Int = 0, count: Int = out.totalSamples - offset, nchannels: Int = out.channels, fully: Boolean): Int { + lock { + val totalRead = if (fully) count else minOf(availableRead, count) + + for (n in 0 until totalRead) { + for (ch in 0 until nchannels) { + out[ch, offset + n] = _readShort(ch) + } + } + + return totalRead + } + } + protected fun readSamples(samples: AudioSamples, offset: Int = 0, count: Int = samples.totalSamples - offset, fully: Boolean = false): Int { return _readShorts(samples.data, offset, count, fully = fully) } override val availableSamples: Int get() = lock { deque.availableRead } final override suspend fun add(samples: AudioSamples, offset: Int, size: Int) { - while (deque.availableRead >= 441 * 4) { - delay(10.milliseconds) - } + while (deque.availableRead >= 1024 * 16) delay(1.milliseconds) lock { deque.write(samples, offset, size) } } } diff --git a/korge-core/src/common/korlibs/audio/sound/Sound.kt b/korge-core/src/common/korlibs/audio/sound/Sound.kt index 359f822bd0..2b419b3222 100644 --- a/korge-core/src/common/korlibs/audio/sound/Sound.kt +++ b/korge-core/src/common/korlibs/audio/sound/Sound.kt @@ -1,31 +1,18 @@ package korlibs.audio.sound -import korlibs.datastructure.Extra -import korlibs.time.DateTime -import korlibs.time.TimeSpan -import korlibs.time.milliseconds -import korlibs.time.seconds -import korlibs.audio.format.AudioDecodingProps -import korlibs.audio.format.AudioFormats -import korlibs.audio.format.WAV -import korlibs.audio.format.defaultAudioFormats -import korlibs.io.async.Signal -import korlibs.io.async.delay -import korlibs.io.concurrent.atomic.korAtomic -import korlibs.io.file.FinalVfsFile -import korlibs.io.file.Vfs -import korlibs.io.file.VfsFile -import korlibs.io.file.baseName -import korlibs.io.lang.Disposable -import korlibs.io.lang.unsupported -import korlibs.io.stream.AsyncStream -import korlibs.io.stream.openAsync +import korlibs.audio.format.* +import korlibs.datastructure.* +import korlibs.datastructure.pauseable.* +import korlibs.io.async.* +import korlibs.io.file.* +import korlibs.io.lang.* +import korlibs.io.stream.* +import korlibs.math.* +import korlibs.time.* import kotlinx.coroutines.* -import kotlin.coroutines.CoroutineContext -import kotlin.native.concurrent.ThreadLocal +import kotlin.coroutines.* import kotlin.coroutines.coroutineContext as coroutineContextKt -@ThreadLocal expect val nativeSoundProvider: NativeSoundProvider open class LazyNativeSoundProvider(val gen: () -> NativeSoundProvider) : NativeSoundProvider() { @@ -34,6 +21,8 @@ open class LazyNativeSoundProvider(val gen: () -> NativeSoundProvider) : NativeS override val target: String get() = parent.target override fun createPlatformAudioOutput(coroutineContext: CoroutineContext, freq: Int): PlatformAudioOutput = parent.createPlatformAudioOutput(coroutineContext, freq) + override fun createNewPlatformAudioOutput(coroutineContext: CoroutineContext, channels: Int, frequency: Int, gen: (AudioSamplesInterleaved) -> Unit): NewPlatformAudioOutput = + parent.createNewPlatformAudioOutput(coroutineContext, channels, frequency, gen) override suspend fun createSound(data: ByteArray, streaming: Boolean, props: AudioDecodingProps, name: String): Sound = parent.createSound(data, streaming, props, name) @@ -51,16 +40,29 @@ open class LazyNativeSoundProvider(val gen: () -> NativeSoundProvider) : NativeS override fun dispose() = parent.dispose() } -open class NativeSoundProvider : Disposable { +open class NativeSoundProviderNew : NativeSoundProvider() { + final override fun createPlatformAudioOutput(coroutineContext: CoroutineContext, freq: Int): PlatformAudioOutput = + PlatformAudioOutputBasedOnNew(this, coroutineContext, freq) +} + +open class NativeSoundProvider() : Disposable, Pauseable { open val target: String = "unknown" - open var paused: Boolean = false + override var paused: Boolean = false + @Deprecated("") open fun createPlatformAudioOutput(coroutineContext: CoroutineContext, freq: Int = 44100): PlatformAudioOutput = PlatformAudioOutput(coroutineContext, freq) - + @Deprecated("") suspend fun createPlatformAudioOutput(freq: Int = 44100): PlatformAudioOutput = createPlatformAudioOutput(coroutineContextKt, freq) - open suspend fun createSound(data: ByteArray, streaming: Boolean = false, props: AudioDecodingProps = AudioDecodingProps.DEFAULT, name: String = "Unknown"): Sound { + open fun createNewPlatformAudioOutput(coroutineContext: CoroutineContext, channels: Int, frequency: Int = 44100, gen: (AudioSamplesInterleaved) -> Unit): NewPlatformAudioOutput { + //println("createNewPlatformAudioOutput: ${this::class}") + return NewPlatformAudioOutput(coroutineContext, channels, frequency, gen) + } + + suspend fun createNewPlatformAudioOutput(nchannels: Int, freq: Int = 44100, gen: (AudioSamplesInterleaved) -> Unit): NewPlatformAudioOutput = createNewPlatformAudioOutput(coroutineContextKt, nchannels, freq, gen) + + open suspend fun createSound(data: ByteArray, streaming: Boolean = false, props: AudioDecodingProps = AudioDecodingProps.DEFAULT, name: String = "Unknown"): Sound { val format = props.formats ?: audioFormats val stream = format.decodeStreamOrError(data.openAsync(), props) return if (streaming) { @@ -74,6 +76,7 @@ open class NativeSoundProvider : Disposable { //open val audioFormats: AudioFormats = AudioFormats(WAV, MP3Decoder, OGG) open suspend fun createSound(vfs: Vfs, path: String, streaming: Boolean = false, props: AudioDecodingProps = AudioDecodingProps.DEFAULT): Sound { + println("createSound.coroutineContext: $coroutineContextKt") return if (streaming) { //val stream = vfs.file(path).open() //createStreamingSound(audioFormats.decodeStreamOrError(stream, props)) { @@ -94,7 +97,8 @@ open class NativeSoundProvider : Disposable { open suspend fun createNonStreamingSound( data: AudioData, name: String = "Unknown" - ): Sound = createStreamingSound(data.toStream(), true, name) + //): Sound = createStreamingSound(data.toStream(), true, name) + ): Sound = SoundAudioData(coroutineContextKt, data, this, true, name) open suspend fun createSound( data: AudioData, @@ -186,6 +190,35 @@ fun SoundProps.copySoundPropsFrom(other: ReadonlySoundProps) { this.panning = other.panning } +fun SoundProps.volumeForChannel(channel: Int): Double { + return when (channel) { + 0 -> panning.convertRangeClamped(-1.0, 0.0, 0.0, 1.0) + else -> 1.0 - panning.convertRangeClamped(0.0, 1.0, 0.0, 1.0) + } +} + +fun SoundProps.applyPropsTo(samples: AudioSamplesInterleaved) { + for (ch in 0 until samples.channels) { + val volume01 = volumeForChannel(ch) + for (n in 0 until samples.totalSamples) { + var sample = samples[ch, n] + sample = (sample * volume01).toInt().toShort() + samples[ch, n] = sample + } + } +} + +fun SoundProps.applyPropsTo(samples: AudioSamples) { + for (ch in 0 until samples.channels) { + val volume01 = volumeForChannel(ch) + for (n in 0 until samples.totalSamples) { + var sample = samples[ch, n] + sample = (sample * volume01).toInt().toShort() + samples[ch, n] = sample + } + } +} + fun SoundProps.copySoundPropsFromCombined(l: ReadonlySoundProps, r: ReadonlySoundProps) { this.volume = l.volume * r.volume this.pitch = l.pitch * r.pitch diff --git a/korge-core/src/common/korlibs/audio/sound/SoundAudioStream.kt b/korge-core/src/common/korlibs/audio/sound/SoundAudioStream.kt index 09751d1cbb..10a6d83fc5 100644 --- a/korge-core/src/common/korlibs/audio/sound/SoundAudioStream.kt +++ b/korge-core/src/common/korlibs/audio/sound/SoundAudioStream.kt @@ -10,6 +10,72 @@ import kotlinx.coroutines.* import kotlin.coroutines.* import kotlin.coroutines.cancellation.CancellationException +@OptIn(ExperimentalCoroutinesApi::class) +class SoundAudioData( + coroutineContext: CoroutineContext, + val audioData: AudioData, + var soundProvider: NativeSoundProvider, + val closeStream: Boolean = false, + override val name: String = "Unknown", + val onComplete: (suspend () -> Unit)? = null +) : Sound(coroutineContext) { + override suspend fun decode(maxSamples: Int): AudioData = audioData + + override fun play(coroutineContext: CoroutineContext, params: PlaybackParameters): SoundChannel { + var pos = 0 + var paused = false + var times = params.times + var nas: NewPlatformAudioOutput? = null + nas = soundProvider.createNewPlatformAudioOutput(coroutineContext, audioData.channels, audioData.rate) { it -> + if (paused) { + // @TODO: paused should not even call this right? + for (ch in 0 until it.channels) { + audioData[ch].fill(0) + } + return@createNewPlatformAudioOutput + } + loop@for (ch in 0 until it.channels) { + val audioDataCh = audioData[ch] + for (n in 0 until it.totalSamples) { + val audioDataPos = pos + n + val sample = if (audioDataPos < audioDataCh.size) audioDataCh[audioDataPos] else 0 + it[ch, n] = sample + } + } + pos += it.totalSamples + if (pos >= audioData.totalSamples) { + pos = 0 + times = times.oneLess + + if (times == PlaybackTimes.ZERO) { + nas?.stop() + } + } + } + nas.copySoundPropsFromCombined(params, this) + nas.start() + return object : SoundChannel(this) { + override var volume: Double by nas::volume + override var pitch: Double by nas::pitch + override var panning: Double by nas::panning + override var current: TimeSpan + get() = audioData.timeAtSample(pos) + set(value) { + pos = audioData.sampleAtTime(value) + } + override val total: TimeSpan get() = audioData.totalTime + override val state: SoundChannelState get() = when { + paused -> SoundChannelState.PAUSED + playing -> SoundChannelState.PLAYING + else -> SoundChannelState.STOPPED + } + override fun pause() { nas.paused = true } + override fun resume() { nas.paused = false } + override fun stop() { nas.stop() } + } + } +} + @OptIn(ExperimentalCoroutinesApi::class) class SoundAudioStream( coroutineContext: CoroutineContext, diff --git a/korge-core/src/common/korlibs/audio/sound/backend/AL.kt b/korge-core/src/common/korlibs/audio/sound/backend/AL.kt deleted file mode 100644 index f465138aff..0000000000 --- a/korge-core/src/common/korlibs/audio/sound/backend/AL.kt +++ /dev/null @@ -1,687 +0,0 @@ -package korlibs.audio.sound.backend - -import korlibs.audio.internal.* -import korlibs.audio.sound.* -import korlibs.datastructure.* -import korlibs.ffi.* -import korlibs.io.async.* -import korlibs.io.lang.* -import korlibs.logger.* -import korlibs.math.* -import korlibs.memory.* -import korlibs.platform.* -import korlibs.time.* -import kotlinx.coroutines.* -import kotlin.coroutines.* -import kotlin.math.* - -class OpenALException(message: String) : RuntimeException(message) - -class FFIOpenALNativeSoundProvider : NativeSoundProvider() { - companion object { - val MAX_AVAILABLE_SOURCES = 100 - } - - val device = (AL.alcOpenDevice(null) ?: throw OpenALException("Can't open OpenAL device")) - val context = (AL.alcCreateContext(device, null) ?: throw OpenALException("Can't get OpenAL context")) - - val sourcePool = Pool { - alGenSourceAndInitialize() - //.also { println("CREATED OpenAL source $it") } - } - val bufferPool = Pool { - AL.alGenBuffer() - //.also { println("CREATED OpenAL buffer $it") } - } - - fun makeCurrent() { - AL.alcMakeContextCurrent(context) - } - - fun unmakeCurrent() { - AL.alcMakeContextCurrent(null) - } - - init { - makeCurrent() - - AL.alListener3f(AL.AL_POSITION, 0f, 0f, 1.0f) - checkAlErrors("alListener3f", 0) - AL.alListener3f(AL.AL_VELOCITY, 0f, 0f, 0f) - checkAlErrors("alListener3f", 0) - AL.alListenerfv(AL.AL_ORIENTATION, floatArrayOf(0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f)) - checkAlErrors("alListenerfv", 0) - - //java.lang.Runtime.getRuntime().addShutdownHook(Thread { - // unmakeCurrent() - // AL.alcDestroyContext(context) - // AL.alcCloseDevice(device) - //}) - } - - override suspend fun createNonStreamingSound(data: AudioData, name: String): Sound { - if (!AL.loaded) return super.createNonStreamingSound(data, name) - return FFIOpenALSoundNoStream(this, coroutineContext, data, name = name) - } - - override fun createPlatformAudioOutput(coroutineContext: CoroutineContext, freq: Int): PlatformAudioOutput { - if (!AL.loaded) return super.createPlatformAudioOutput(coroutineContext, freq) - return FFIOpenALPlatformAudioOutput(this, coroutineContext, freq) - } -} - -class FFIOpenALPlatformAudioOutput( - val provider: FFIOpenALNativeSoundProvider, - coroutineContext: CoroutineContext, - freq: Int, -) : PlatformAudioOutput(coroutineContext, freq) { - var source = 0 - val sourceProv = JnaSoundPropsProvider { source } - override var availableSamples: Int = 0 - - override var pitch: Double by sourceProv::pitch - override var volume: Double by sourceProv::volume - override var panning: Double by sourceProv::panning - - //val source - - //alSourceQueueBuffers - - //val buffersPool = Pool(6) { all.alGenBuffer() } - //val buffers = IntArray(32) - //val buffers = IntArray(6) - - init { - start() - } - - override suspend fun add(samples: AudioSamples, offset: Int, size: Int) { - println("OpenALPlatformAudioOutput.add") - availableSamples += samples.totalSamples - try { - provider.makeCurrent() - val tempBuffers = IntArray(1) - ensureSource() - while (true) { - //val buffer = al.alGetSourcei(source, AL.AL_BUFFER) - //val sampleOffset = al.alGetSourcei(source, AL.AL_SAMPLE_OFFSET) - val processed = AL.alGetSourcei(source, AL.AL_BUFFERS_PROCESSED) - val queued = AL.alGetSourcei(source, AL.AL_BUFFERS_QUEUED) - val total = processed + queued - val state = AL.alGetSourceState(source) - val playing = state == AL.AL_PLAYING - - //println("buffer=$buffer, processed=$processed, queued=$queued, state=$state, playing=$playing, sampleOffset=$sampleOffset") - //println("Samples.add") - - if (processed <= 0 && total >= 6) { - delay(10.milliseconds) - continue - } - - if (total < 6) { - AL.alGenBuffers(1, tempBuffers) - checkAlErrors("alGenBuffers", tempBuffers[0]) - //println("alGenBuffers: ${tempBuffers[0]}") - } else { - AL.alSourceUnqueueBuffers(source, 1, tempBuffers) - checkAlErrors("alSourceUnqueueBuffers", source) - //println("alSourceUnqueueBuffers: ${tempBuffers[0]}") - } - //println("samples: $samples - $offset, $size") - //al.alBufferData(tempBuffers[0], samples.copyOfRange(offset, offset + size), frequency, panning, volume) - AL.alBufferData(tempBuffers[0], samples.copyOfRange(offset, offset + size), frequency, panning) - checkAlErrors("alBufferData", tempBuffers[0]) - AL.alSourceQueueBuffers(source, 1, tempBuffers) - checkAlErrors("alSourceQueueBuffers", tempBuffers[0]) - - //val gain = al.alGetSourcef(source, AL.AL_GAIN) - //val pitch = al.alGetSourcef(source, AL.AL_PITCH) - //println("gain=$gain, pitch=$pitch") - if (!playing) { - AL.alSourcePlay(source) - } - break - } - } finally { - availableSamples -= samples.totalSamples - } - } - - fun ensureSource() { - if (source != 0) return - provider.makeCurrent() - - source = alGenSourceAndInitialize() - //al.alGenBuffers(buffers.size, buffers) - } - - override fun start() { - ensureSource() - AL.alSourcePlay(source) - checkAlErrors("alSourcePlay", source) - //checkAlErrors() - } - - //override fun pause() { - // al.alSourcePause(source) - //} - - override fun stop() { - provider.makeCurrent() - - AL.alSourceStop(source) - if (source != 0) { - AL.alDeleteSource(source) - source = 0 - } - //for (n in buffers.indices) { - // if (buffers[n] != 0) { - // al.alDeleteBuffer(buffers[n]) - // buffers[n] = 0 - // } - //} - } -} - -private class MyStopwatch { - private var running = false - private var ns = 0L - //private val now get() = System.nanoTime() - private val now get() = PerformanceCounter.reference.nanoseconds.toLong() - - fun resume() { - if (running) return - toggle() - } - - fun pause() { - if (!running) return - toggle() - } - - fun toggle() { - running = !running - ns = now - ns - } - - val elapsedNanoseconds: Long get() = if (running) now - ns else ns -} - -// https://ffainelli.github.io/openal-example/ -class FFIOpenALSoundNoStream( - val provider: FFIOpenALNativeSoundProvider, - coroutineContext: CoroutineContext, - val data: AudioData?, - override val name: String = "Unknown" -) : Sound(coroutineContext), SoundProps { - private val logger = Logger("OpenALSoundNoStream") - - override suspend fun decode(maxSamples: Int): AudioData = data ?: AudioData.DUMMY - - override var volume: Double = 1.0 - override var pitch: Double = 1.0 - override var panning: Double = 0.0 - - override val length: TimeSpan get() = data?.totalTime ?: 0.seconds - override val nchannels: Int get() = data?.channels ?: 1 - - override fun play(coroutineContext: CoroutineContext, params: PlaybackParameters): SoundChannel { - val data = data ?: return DummySoundChannel(this) - //println("provider.sourcePool.totalItemsInUse=${provider.sourcePool.totalItemsInUse}, provider.sourcePool.totalAllocatedItems=${provider.sourcePool.totalAllocatedItems}, provider.sourcePool.itemsInPool=${provider.sourcePool.itemsInPool}") - if (provider.sourcePool.totalItemsInUse >= FFIOpenALNativeSoundProvider.MAX_AVAILABLE_SOURCES) { - error("OpenAL too many sources in use") - } - provider.makeCurrent() - var buffer = provider.bufferPool.alloc() - var source = provider.sourcePool.alloc() - if (source == -1) logger.warn { "UNEXPECTED[0] source=-1" } - - AL.alBufferData(buffer, data, panning, volume) - - AL.alSourcei(source, AL.AL_BUFFER, buffer) - checkAlErrors("alSourcei", source) - - var stopped = false - - val sourceProvider: () -> Int = { source } - - val channel = object : SoundChannel(this), SoundProps by JnaSoundPropsProvider(sourceProvider) { - private val stopWatch = MyStopwatch() - val totalSamples get() = data.totalSamples - var currentSampleOffset: Int - get() { - if (source < 0) return 0 - return AL.alGetSourcei(source, AL.AL_SAMPLE_OFFSET) - } - set(value) { - if (source < 0) return - AL.alSourcei(source, AL.AL_SAMPLE_OFFSET, value) - } - - val estimatedTotalNanoseconds: Long - get() = total.nanoseconds.toLong() - val estimatedCurrentNanoseconds: Long - get() = stopWatch.elapsedNanoseconds - - override var current: TimeSpan - get() = data.timeAtSample(currentSampleOffset) - set(value) { - if (source < 0) return - AL.alSourcef(source, AL.AL_SEC_OFFSET, value.seconds.toFloat()) - } - override val total: TimeSpan get() = data.totalTime - - override val state: SoundChannelState - get() { - if (source < 0) return SoundChannelState.STOPPED - val result = AL.alGetSourceState(source) - checkAlErrors("alGetSourceState", source) - return when (result) { - AL.AL_INITIAL -> SoundChannelState.INITIAL - AL.AL_PLAYING -> SoundChannelState.PLAYING - AL.AL_PAUSED -> SoundChannelState.PAUSED - AL.AL_STOPPED -> SoundChannelState.STOPPED - else -> SoundChannelState.STOPPED - } - } - - override fun stop() { - if (stopped) return - stopped = true - if (source == -1) logger.warn { "UNEXPECTED[1] source=-1" } - AL.alSourceStop(source) - AL.alSourcei(source, AL.AL_BUFFER, 0) - provider.sourcePool.free(source) - provider.bufferPool.free(buffer) - source = -1 - buffer = -1 - stopWatch.pause() - // We reuse them from the pool - //AL.alDeleteSource(source) - //AL.alDeleteBuffer(buffer) - } - - override fun pause() { - AL.alSourcePause(source) - stopWatch.pause() - } - - override fun resume() { - AL.alSourcePlay(source) - stopWatch.resume() - } - }.also { - it.copySoundPropsFromCombined(this@FFIOpenALSoundNoStream, params) - } - launchImmediately(coroutineContext[ContinuationInterceptor] ?: coroutineContext) { - var times = params.times - var startTime = params.startTime - try { - while (times.hasMore && !stopped) { - times = times.oneLess - channel.reset() - AL.alSourcef(source, AL.AL_SEC_OFFSET, startTime.seconds.toFloat()) - channel.resume() - //checkAlErrors("alSourcePlay") - startTime = 0.seconds - while (channel.playingOrPaused) delay(10L) - } - } catch (e: CancellationException) { - params.onCancel?.invoke() - } catch (e: Throwable) { - e.printStackTrace() - } finally { - channel.stop() - params.onFinish?.invoke() - } - } - return channel - } -} - -class JnaSoundPropsProvider(val sourceProvider: () -> Int) : SoundProps { - val source get() = sourceProvider() - - private val temp1 = FloatArray(3) - private val temp2 = FloatArray(3) - private val temp3 = FloatArray(3) - - override var pitch: Double - get() = if (source < 0) 1.0 else AL.alGetSourcef(source, AL.AL_PITCH).toDouble() - set(value) { - if (source < 0) return - AL.alSourcef(source, AL.AL_PITCH, value.toFloat()) - } - override var volume: Double - get() = if (source < 0) 1.0 else AL.alGetSourcef(source, AL.AL_GAIN).toDouble() - set(value) { - if (source < 0) return - AL.alSourcef(source, AL.AL_GAIN, value.toFloat()) - } - override var panning: Double - get() { - if (source < 0) return 0.0 - AL.alGetSource3f(source, AL.AL_POSITION, temp1, temp2, temp3) - return temp1[0].toDouble() - } - set(value) { - if (source < 0) return - val pan = value.toFloat() - AL.alSourcef(source, AL.AL_ROLLOFF_FACTOR, 0.0f) - AL.alSourcei(source, AL.AL_SOURCE_RELATIVE, 1) - AL.alSource3f(source, AL.AL_POSITION, pan, 0f, -sqrt(1.0f - pan * pan)) - //println("SET PANNING: source=$source, pan=$pan") - } -} - -private fun AL.alBufferData(buffer: Int, data: AudioSamples, freq: Int, panning: Double = 0.0, volume: Double = 1.0) { - alBufferData(buffer, AudioData(freq, data), panning, volume) -} - -private fun applyStereoPanningInline(interleaved: ShortArray, panning: Double = 0.0, volume: Double = 1.0) { - if (panning == 0.0 || volume != 1.0) return - val vvolume = volume.clamp01() - val rratio = (((panning + 1.0) / 2.0).clamp01() * vvolume).toFloat() - val lratio = ((1.0 - rratio) * vvolume).toFloat() - //println("panning=$panning, lratio=$lratio, rratio=$rratio, vvolume=$vvolume") - for (n in interleaved.indices step 2) { - interleaved[n + 0] = (interleaved[n + 0] * lratio).coerceToShort() - interleaved[n + 1] = (interleaved[n + 1] * rratio).coerceToShort() - } -} - -private fun AL.alBufferData(buffer: Int, data: AudioData, panning: Double = 0.0, volume: Double = 1.0) { - val samples = data.samplesInterleaved.data - if (data.stereo && panning != 0.0) applyStereoPanningInline(samples, panning, volume) - //val bufferData = ShortBuffer.wrap(samples) - //val bufferData = Int16Buffer(samples) - val bufferData = samples - val format = if (data.stereo) AL.AL_FORMAT_STEREO16 else AL.AL_FORMAT_MONO16 - val samplesData = if (samples.isNotEmpty()) bufferData else null - val bytesSize = samples.size * 2 - val rate = data.rate - AL.alBufferData(buffer, format, samplesData, bytesSize, rate) - checkAlErrors("alBufferData", buffer) -} - -private fun alGenSourceAndInitialize() = AL.alGenSource().also { source -> - AL.alSourcef(source, AL.AL_PITCH, 1f) - AL.alSourcef(source, AL.AL_GAIN, 1f) - AL.alSource3f(source, AL.AL_POSITION, 0f, 0f, 0f) - AL.alSource3f(source, AL.AL_VELOCITY, 0f, 0f, 0f) - AL.alSourcei(source, AL.AL_LOOPING, AL.AL_FALSE) - AL.alSourceStop(source) -} - -fun ALerrorToString(value: Int): String = when (value) { - AL.AL_INVALID_NAME -> "AL_INVALID_NAME" - AL.AL_INVALID_ENUM -> "AL_INVALID_ENUM" - AL.AL_INVALID_VALUE -> "AL_INVALID_VALUE" - AL.AL_INVALID_OPERATION -> "AL_INVALID_OPERATION" - AL.AL_OUT_OF_MEMORY -> "AL_OUT_OF_MEMORY" - else -> "UNKNOWN" -} - -//fun checkAlErrors(name: String, value: Int = -1) { -fun checkAlErrors(name: String, value: Int) { - //AL.alGetError().also { error -> if (error != AL.AL_NO_ERROR) Console.error("OpenAL error ${error.shex} (${ALerrorToString(error)}) '$name' (value=$value)") } -} - -@Suppress("unused") -//object AL : FFILib(nativeOpenALLibraryPath, "/System/Library/Frameworks/OpenAL.framework/OpenAL") { -object AL : FFILib(nativeOpenALLibraryPath, "OpenAL", "AL") { - private val logger = Logger("AL") - - val alDopplerFactor: (value: Float) -> Unit by func() - val alDopplerVelocity: (value: Float) -> Unit by func() - val alSpeedOfSound: (value: Float) -> Unit by func() - val alDistanceModel: (distanceModel: Int) -> Unit by func() - val alEnable: (capability: Int) -> Unit by func() - val alDisable: (capability: Int) -> Unit by func() - val alIsEnabled: (capability: Int) -> Boolean by func() - val alGetString: (param: Int) -> String by func() - val alGetBooleanv: (param: Int, values: BooleanArray) -> Unit by func() - val alGetIntegerv: (param: Int, values: IntArray) -> Unit by func() - val alGetFloatv: (param: Int, values: FloatArray) -> Unit by func() - val alGetDoublev: (param: Int, values: DoubleArray) -> Unit by func() - val alGetBoolean: (param: Int) -> Boolean by func() - val alGetInteger: (param: Int) -> Int by func() - val alGetFloat: (param: Int) -> Float by func() - val alGetDouble: (param: Int) -> Double by func() - val alGetError: () -> Int by func() - val alIsExtensionPresent: (extname: String) -> Boolean by func() - val alGetProcAddress: (fname: String) -> FFIPointer? by func() - val alGetEnumValue: (ename: String) -> Int by func() - val alListenerf: (param: Int, value: Float) -> Unit by func() - val alListener3f: (param: Int, value1: Float, value2: Float, value3: Float) -> Unit by func() - val alListenerfv: (param: Int, values: FloatArray) -> Unit by func() - val alListeneri: (param: Int, value: Int) -> Unit by func() - val alListener3i: (param: Int, value1: Int, value2: Int, value3: Int) -> Unit by func() - val alListeneriv: (param: Int, values: IntArray) -> Unit by func() - val alGetListenerf: (param: Int, value: FloatArray) -> Unit by func() - val alGetListener3f: (param: Int, value1: FloatArray, value2: FloatArray, value3: FloatArray) -> Unit by func() - val alGetListenerfv: (param: Int, values: FloatArray) -> Unit by func() - val alGetListeneri: (param: Int, value: IntArray) -> Unit by func() - val alGetListener3i: (param: Int, value1: IntArray, value2: IntArray, value3: IntArray) -> Unit by func() - val alGetListeneriv: (param: Int, values: IntArray) -> Unit by func() - val alGenSources: (n: Int, sources: IntArray) -> Unit by func() - val alDeleteSources: (n: Int, sources: IntArray) -> Unit by func() - val alIsSource: (source: Int) -> Boolean by func() - val alSourcef: (source: Int, param: Int, value: Float) -> Unit by func() - val alSource3f: (source: Int, param: Int, value1: Float, value2: Float, value3: Float) -> Unit by func() - val alSourcefv: (source: Int, param: Int, values: FloatArray) -> Unit by func() - val alSourcei: (source: Int, param: Int, value: Int) -> Unit by func() - val alSource3i: (source: Int, param: Int, value1: Int, value2: Int, value3: Int) -> Unit by func() - val alSourceiv: (source: Int, param: Int, values: IntArray) -> Unit by func() - val alGetSourcef: (source: Int, param: Int, value: FloatArray) -> Unit by func() - val alGetSource3f: (source: Int, param: Int, value1: FloatArray, value2: FloatArray, value3: FloatArray) -> Unit by func() - val alGetSourcefv: (source: Int, param: Int, values: FloatArray) -> Unit by func() - val alGetSourcei: (source: Int, param: Int, value: IntArray) -> Unit by func() - val alGetSource3i: (source: Int, param: Int, value1: IntArray, value2: IntArray, value3: IntArray) -> Unit by func() - val alGetSourceiv: (source: Int, param: Int, values: IntArray) -> Unit by func() - val alSourcePlayv: (n: Int, sources: IntArray) -> Unit by func() - val alSourceStopv: (n: Int, sources: IntArray) -> Unit by func() - val alSourceRewindv: (n: Int, sources: IntArray) -> Unit by func() - val alSourcePausev: (n: Int, sources: IntArray) -> Unit by func() - val alSourcePlay: (source: Int) -> Unit by func() - val alSourceStop: (source: Int) -> Unit by func() - val alSourceRewind: (source: Int) -> Unit by func() - val alSourcePause: (source: Int) -> Unit by func() - val alSourceQueueBuffers: (source: Int, nb: Int, buffers: IntArray) -> Unit by func() - val alSourceUnqueueBuffers: (source: Int, nb: Int, buffers: IntArray) -> Unit by func() - val alGenBuffers: (n: Int, buffers: IntArray) -> Unit by func() - val alDeleteBuffers: (n: Int, buffers: IntArray) -> Unit by func() - val alIsBuffer: (buffer: Int) -> Boolean by func() - //val alBufferData: (buffer: Int, format: Int, data: Buffer?, size: Int, freq: Int) -> Unit by func() - val alBufferData: (buffer: Int, format: Int, data: ShortArray?, size: Int, freq: Int) -> Unit by func() - val alBufferf: (buffer: Int, param: Int, value: Float) -> Unit by func() - val alBuffer3f: (buffer: Int, param: Int, value1: Float, value2: Float, value3: Float) -> Unit by func() - val alBufferfv: (buffer: Int, param: Int, values: FloatArray) -> Unit by func() - val alBufferi: (buffer: Int, param: Int, value: Int) -> Unit by func() - val alBuffer3i: (buffer: Int, param: Int, value1: Int, value2: Int, value3: Int) -> Unit by func() - val alBufferiv: (buffer: Int, param: Int, values: IntArray) -> Unit by func() - val alGetBufferf: (buffer: Int, param: Int, value: FloatArray) -> Unit by func() - val alGetBuffer3f: (buffer: Int, param: Int, value1: FloatArray, value2: FloatArray, value3: FloatArray) -> Unit by func() - val alGetBufferfv: (buffer: Int, param: Int, values: FloatArray) -> Unit by func() - val alGetBufferi: (buffer: Int, param: Int, value: IntArray) -> Unit by func() - val alGetBuffer3i: (buffer: Int, param: Int, value1: IntArray, value2: IntArray, value3: IntArray) -> Unit by func() - val alGetBufferiv: (buffer: Int, param: Int, values: IntArray) -> Unit by func() - - private val tempF = FloatArray(1) - private val tempI = IntArray(1) - - fun alGenBuffer(): Int = tempI.also { alGenBuffers(1, it) }[0] - fun alGenSource(): Int = tempI.also { alGenSources(1, it) }[0] - fun alDeleteBuffer(buffer: Int) { alDeleteBuffers(1, tempI.also { it[0] = buffer }) } - fun alDeleteSource(buffer: Int) { alDeleteSources(1, tempI.also { it[0] = buffer }) } - fun alGetSourcef(source: Int, param: Int): Float = tempF.also { alGetSourcef(source, param, it) }[0] - fun alGetSourcei(source: Int, param: Int): Int = tempI.also { alGetSourcei(source, param, it) }[0] - fun alGetSourceState(source: Int): Int = alGetSourcei(source, AL.AL_SOURCE_STATE) - - const val AL_NONE = 0 - const val AL_FALSE = 0 - const val AL_TRUE = 1 - const val AL_SOURCE_RELATIVE = 0x202 - const val AL_CONE_INNER_ANGLE = 0x1001 - const val AL_CONE_OUTER_ANGLE = 0x1002 - const val AL_PITCH = 0x1003 - const val AL_POSITION = 0x1004 - const val AL_DIRECTION = 0x1005 - const val AL_VELOCITY = 0x1006 - const val AL_LOOPING = 0x1007 - const val AL_BUFFER = 0x1009 - const val AL_GAIN = 0x100A - const val AL_MIN_GAIN = 0x100D - const val AL_MAX_GAIN = 0x100E - const val AL_ORIENTATION = 0x100F - const val AL_SOURCE_STATE = 0x1010 - const val AL_INITIAL = 0x1011 - const val AL_PLAYING = 0x1012 - const val AL_PAUSED = 0x1013 - const val AL_STOPPED = 0x1014 - const val AL_BUFFERS_QUEUED = 0x1015 - const val AL_BUFFERS_PROCESSED = 0x1016 - const val AL_REFERENCE_DISTANCE = 0x1020 - const val AL_ROLLOFF_FACTOR = 0x1021 - const val AL_CONE_OUTER_GAIN = 0x1022 - const val AL_MAX_DISTANCE = 0x1023 - const val AL_SEC_OFFSET = 0x1024 - const val AL_SAMPLE_OFFSET = 0x1025 - const val AL_BYTE_OFFSET = 0x1026 - const val AL_SOURCE_TYPE = 0x1027 - const val AL_STATIC = 0x1028 - const val AL_STREAMING = 0x1029 - const val AL_UNDETERMINED = 0x1030 - const val AL_FORMAT_MONO8 = 0x1100 - const val AL_FORMAT_MONO16 = 0x1101 - const val AL_FORMAT_STEREO8 = 0x1102 - const val AL_FORMAT_STEREO16 = 0x1103 - const val AL_FREQUENCY = 0x2001 - const val AL_BITS = 0x2002 - const val AL_CHANNELS = 0x2003 - const val AL_SIZE = 0x2004 - const val AL_UNUSED = 0x2010 - const val AL_PENDING = 0x2011 - const val AL_PROCESSED = 0x2012 - const val AL_NO_ERROR = 0 - const val AL_INVALID_NAME = 0xA001 - const val AL_INVALID_ENUM = 0xA002 - const val AL_INVALID_VALUE = 0xA003 - const val AL_INVALID_OPERATION = 0xA004 - const val AL_OUT_OF_MEMORY = 0xA005 - const val AL_VENDOR = 0xB001 - const val AL_VERSION = 0xB002 - const val AL_RENDERER = 0xB003 - const val AL_EXTENSIONS = 0xB004 - const val AL_DOPPLER_FACTOR = 0xC000 - const val AL_DOPPLER_VELOCITY = 0xC001 - const val AL_SPEED_OF_SOUND = 0xC003 - const val AL_DISTANCE_MODEL = 0xD000 - const val AL_INVERSE_DISTANCE = 0xD001 - const val AL_INVERSE_DISTANCE_CLAMPED = 0xD002 - const val AL_LINEAR_DISTANCE = 0xD003 - const val AL_LINEAR_DISTANCE_CLAMPED = 0xD004 - const val AL_EXPONENT_DISTANCE = 0xD005 - const val AL_EXPONENT_DISTANCE_CLAMPED = 0xD006 - - // ALC - - val alcCreateContext: (device: FFIPointer?, attrlist: IntArray?) -> FFIPointer? by func() - val alcMakeContextCurrent: (context: FFIPointer?) -> Boolean by func() - val alcProcessContext: (context: FFIPointer?) -> Unit by func() - val alcSuspendContext: (context: FFIPointer?) -> Unit by func() - val alcDestroyContext: (context: FFIPointer?) -> Unit by func() - val alcGetCurrentContext: () -> FFIPointer? by func() - val alcGetContextsDevice: (context: FFIPointer?) -> FFIPointer? by func() - val alcOpenDevice: (devicename: String?) -> FFIPointer? by func() - val alcCloseDevice: (device: FFIPointer?) -> Boolean by func() - val alcGetError: (device: FFIPointer?) -> Int by func() - val alcIsExtensionPresent: (device: FFIPointer?, extname: String) -> Boolean by func() - val alcGetProcAddress: (device: FFIPointer?, funcname: String) -> FFIPointer? by func() - val alcGetEnumValue: (device: FFIPointer?, enumname: String) -> Int by func() - val alcGetString: (device: FFIPointer?, param: Int) -> String by func() - val alcGetIntegerv: (device: FFIPointer?, param: Int, size: Int, values: IntArray) -> Unit by func() - val alcCaptureOpenDevice: (devicename: String, frequency: Int, format: Int, buffersize: Int) -> FFIPointer? by func() - val alcCaptureCloseDevice: (device: FFIPointer?) -> Boolean by func() - val alcCaptureStart: (device: FFIPointer?) -> Unit by func() - val alcCaptureStop: (device: FFIPointer?) -> Unit by func() - val alcCaptureSamples: (device: FFIPointer?, buffer: Buffer, samples: Int) -> Unit by func() - - const val ALC_FALSE = 0 - const val ALC_TRUE = 1 - const val ALC_FREQUENCY = 0x1007 - const val ALC_REFRESH = 0x1008 - const val ALC_SYNC = 0x1009 - const val ALC_MONO_SOURCES = 0x1010 - const val ALC_STEREO_SOURCES = 0x1011 - const val ALC_NO_ERROR = 0 - const val ALC_INVALID_DEVICE = 0xA001 - const val ALC_INVALID_CONTEXT = 0xA002 - const val ALC_INVALID_ENUM = 0xA003 - const val ALC_INVALID_VALUE = 0xA004 - const val ALC_OUT_OF_MEMORY = 0xA005 - const val ALC_MAJOR_VERSION = 0x1000 - const val ALC_MINOR_VERSION = 0x1001 - const val ALC_ATTRIBUTES_SIZE = 0x1002 - const val ALC_ALL_ATTRIBUTES = 0x1003 - const val ALC_DEFAULT_DEVICE_SPECIFIER = 0x1004 - const val ALC_DEVICE_SPECIFIER = 0x1005 - const val ALC_EXTENSIONS = 0x1006 - const val ALC_EXT_CAPTURE = 1 - const val ALC_CAPTURE_DEVICE_SPECIFIER = 0x310 - const val ALC_CAPTURE_DEFAULT_DEVICE_SPECIFIER = 0x311 - const val ALC_CAPTURE_SAMPLES = 0x312 - const val ALC_ENUMERATE_ALL_EXT = 1 - const val ALC_DEFAULT_ALL_DEVICES_SPECIFIER = 0x1012 - const val ALC_ALL_DEVICES_SPECIFIER = 0x1013 -} - -val nativeOpenALLibraryPath: String? by lazy { - Environment["OPENAL_LIB_PATH"]?.let { path -> - return@lazy path - } - if (Environment["KORAU_JVM_DUMMY_SOUND"] == "true") { - return@lazy null - } - when { - Platform.isMac -> "OpenAL" // Mac already includes the OpenAL library - Platform.isLinux -> "libopenal.so.1" - Platform.isWindows -> "soft_oal.dll" - else -> { - println(" - Unknown/Unsupported OS") - null - } - } -} - -/* -//private val arch by lazy { System.getProperty("os.arch").toLowerCase() } -//private val alClassLoader by lazy { AL::class.java.classLoader } -//private fun getNativeFileURL(path: String): URL? = alClassLoader.getResource(path) -private fun getNativeFile(path: String): ByteArray = getNativeFileURL(path)?.readBytes() ?: error("Can't find '$path'") -private fun getNativeFileLocalPath(path: String): String { - val tempDir = File(System.getProperty("java.io.tmpdir")) - //val tempFile = File.createTempFile("libopenal_", ".${File(path).extension}") - val tempFile = File(tempDir, "korau_openal.${File(path).extension}") - - val expectedSize = getNativeFileURL(path)?.openStream()?.use { it.available().toLong() } - - if (!tempFile.exists() || tempFile.length() != expectedSize) { - try { - tempFile.writeBytes(getNativeFile(path)) - } catch (e: Throwable) { - e.printStackTrace() - } - } - return tempFile.absolutePath -} - -internal inline fun runCatchingAl(block: () -> T): T? { - val result = runCatching { block() } - if (result.isFailure) { - result.exceptionOrNull()?.printStackTrace() - } - return result.getOrNull() -} -*/ diff --git a/korge-core/src/common/korlibs/audio/sound/backend/ALSA.kt b/korge-core/src/common/korlibs/audio/sound/backend/ALSA.kt index 95d713969a..1da6f5680b 100644 --- a/korge-core/src/common/korlibs/audio/sound/backend/ALSA.kt +++ b/korge-core/src/common/korlibs/audio/sound/backend/ALSA.kt @@ -4,120 +4,78 @@ import korlibs.audio.sound.* import korlibs.datastructure.thread.* import korlibs.ffi.* import korlibs.io.lang.* -import korlibs.time.* -import kotlinx.coroutines.* import kotlin.coroutines.* object FFIALSANativeSoundProvider : NativeSoundProvider() { - override fun createPlatformAudioOutput(coroutineContext: CoroutineContext, freq: Int): PlatformAudioOutput { + override fun createNewPlatformAudioOutput(coroutineContext: CoroutineContext, channels: Int, frequency: Int, gen: (AudioSamplesInterleaved) -> Unit): NewPlatformAudioOutput { //println("ALSANativeSoundProvider.createPlatformAudioOutput(freq=$freq)") - return ALSAPlatformAudioOutput(this, coroutineContext, freq) + return ALSAPlatformAudioOutput(this, coroutineContext, channels, frequency, gen) } } class ALSAPlatformAudioOutput( val soundProvider: FFIALSANativeSoundProvider, coroutineContext: CoroutineContext, + channels: Int, frequency: Int, -) : DequeBasedPlatformAudioOutput(coroutineContext, frequency) { + gen: (AudioSamplesInterleaved) -> Unit, +) : NewPlatformAudioOutput(coroutineContext, channels, frequency, gen) { //var nativeThread: Job? = null var nativeThread: NativeThread? = null - var running = false - - var pcm: FFIPointer? = null - - override suspend fun wait() { - running = false - //println("WAITING") - val time = measureTime { - while (pcm.address != 0L) { - delay(10.milliseconds) - } - } - //println("WAITED: time=$time") - //super.wait() - } - override fun start() { - if (running) return - running = true + override fun internalStart() { //nativeThread = launchImmediately(coroutineContext) { - nativeThread = nativeThread(isDaemon = true) { - // @TODO: + nativeThread = nativeThread(isDaemon = true) { thread -> + val buffer = AudioSamplesInterleaved(channels, 1024) + val pcm = A2.snd_pcm_open("default", A2.SND_PCM_STREAM_PLAYBACK, 0) + if (pcm.address == 0L) { + error("Can't initialize ALSA") + //running = false + //return@nativeThread + } - val temp = AudioSamplesInterleaved(nchannels, 1024) + //val latency = 8 * 4096 + val latency = 32 * 4096 + A2.snd_pcm_set_params( + pcm, + A2.SND_PCM_FORMAT_S16_LE, + A2.SND_PCM_ACCESS_RW_INTERLEAVED, + channels, + frequency, + 1, + latency + ) try { - while (running || availableRead > 0) { - val readCount = readShortsInterleaved(temp) - - if (readCount == 0) { + while (thread.threadSuggestRunning) { + genSafe(buffer) + val written = A2.snd_pcm_writei(pcm, buffer.data, 0, buffer.totalSamples * channels, buffer.totalSamples) + //println("offset=$offset, pending=$pending, written=$written") + if (written == -A2.EPIPE) { + //println("ALSA: EPIPE error") + //A2.snd_pcm_prepare(pcm) + A2.snd_pcm_recover(pcm, written, 0) + continue + //blockingSleep(1.milliseconds) + } else if (written < 0) { + println("ALSA: OTHER error: $written") //delay(1.milliseconds) Thread_sleep(1L) - continue - } - - //println("readCount=$readCount") - var offset = 0 - var pending = readCount - while (pending > 0) { - if (pcm == null) { - pcm = A2.snd_pcm_open("default", A2.SND_PCM_STREAM_PLAYBACK, 0) - if (pcm.address == 0L) { - error("Can't initialize ALSA") - //running = false - //return@nativeThread - } - - //val latency = 8 * 4096 - val latency = 32 * 4096 - A2.snd_pcm_set_params( - pcm, - A2.SND_PCM_FORMAT_S16_LE, - A2.SND_PCM_ACCESS_RW_INTERLEAVED, - nchannels, - frequency, - 1, - latency - ) - } - - val written = A2.snd_pcm_writei(pcm, temp.data, offset * nchannels, pending * nchannels, pending) - //println("offset=$offset, pending=$pending, written=$written") - if (written == -A2.EPIPE) { - //println("ALSA: EPIPE error") - //A2.snd_pcm_prepare(pcm) - A2.snd_pcm_recover(pcm, written, 0) - offset = 0 - pending = readCount - continue - //blockingSleep(1.milliseconds) - } else if (written < 0) { - println("ALSA: OTHER error: $written") - //delay(1.milliseconds) - Thread_sleep(1L) - break - } else { - offset += written - pending -= written - } + break } } } finally { //println("!!COMPLETED : pcm=$pcm") - if (pcm.address != 0L) { - A2.snd_pcm_wait(pcm, 1000) - A2.snd_pcm_drain(pcm) - A2.snd_pcm_close(pcm) - pcm = null - //println("!!CLOSED = $pcm") - } + A2.snd_pcm_wait(pcm, 1000) + A2.snd_pcm_drain(pcm) + A2.snd_pcm_close(pcm) + //println("!!CLOSED = $pcm") } } } - override fun stop() { - running = false - super.stop() + override fun internalStop() { + nativeThread?.threadSuggestRunning = false + nativeThread = null } } diff --git a/korge-core/src/common/korlibs/audio/sound/backend/JvmWaveOutImpl.kt b/korge-core/src/common/korlibs/audio/sound/backend/JvmWaveOutImpl.kt index 2265c345ed..2d2a3e82e9 100644 --- a/korge-core/src/common/korlibs/audio/sound/backend/JvmWaveOutImpl.kt +++ b/korge-core/src/common/korlibs/audio/sound/backend/JvmWaveOutImpl.kt @@ -3,20 +3,21 @@ package korlibs.audio.sound.backend import korlibs.audio.sound.* import korlibs.datastructure.thread.* import korlibs.ffi.* -import korlibs.io.async.* import korlibs.io.lang.* import korlibs.memory.* -import korlibs.time.* -import kotlinx.coroutines.* import kotlin.coroutines.* val jvmWaveOutNativeSoundProvider: NativeSoundProvider? by lazy { JvmWaveOutNativeSoundProvider() } -class JvmWaveOutNativeSoundProvider : NativeSoundProvider() { - override fun createPlatformAudioOutput(coroutineContext: CoroutineContext, freq: Int): PlatformAudioOutput = - JvmWaveOutPlatformAudioOutput(this, coroutineContext, freq) +class JvmWaveOutNativeSoundProvider : NativeSoundProviderNew() { + override fun createNewPlatformAudioOutput( + coroutineContext: CoroutineContext, + channels: Int, + frequency: Int, + gen: (AudioSamplesInterleaved) -> Unit + ): NewPlatformAudioOutput = JvmWaveOutNewPlatformAudioOutput(coroutineContext, channels, frequency, gen) } /* @@ -68,39 +69,40 @@ class JvmWaveOutPlatformAudioOutputNew( } */ -class JvmWaveOutPlatformAudioOutput( - val provider: JvmWaveOutNativeSoundProvider, +class JvmWaveOutNewPlatformAudioOutput( coroutineContext: CoroutineContext, - frequency: Int -) : DequeBasedPlatformAudioOutput(coroutineContext, frequency) { + nchannels: Int, + freq: Int, + gen: (AudioSamplesInterleaved) -> Unit +) : NewPlatformAudioOutput(coroutineContext, nchannels, freq, gen) { val samplesLock = korlibs.datastructure.lock.NonRecursiveLock() var nativeThread: NativeThread? = null var running = false var totalEmittedSamples = 0L - override suspend fun wait() { - // @TODO: Get samples not reproduced - //println("WAITING...") - for (n in 0 until 1000) { - var currentPositionInSamples: Long = 0L - var totalEmittedSamples: Long = 0L - var availableRead = 0 - samplesLock { - currentPositionInSamples = WINMM.waveOutGetPositionInSamples(handle) - availableRead = this.availableRead - totalEmittedSamples = this.totalEmittedSamples - } - //println("availableRead=$availableRead, waveOutGetPosition=$currentPositionInSamples, totalEmittedSamples=$totalEmittedSamples") - if (availableRead <= 0 && currentPositionInSamples >= totalEmittedSamples) break - delay(1.milliseconds) - } - //println("DONE WAITING") - } + //override suspend fun wait() { + // // @TODO: Get samples not reproduced + // //println("WAITING...") + // for (n in 0 until 1000) { + // var currentPositionInSamples: Long = 0L + // var totalEmittedSamples: Long = 0L + // var availableRead = 0 + // samplesLock { + // currentPositionInSamples = WINMM.waveOutGetPositionInSamples(handle) + // availableRead = this.availableRead + // totalEmittedSamples = this.totalEmittedSamples + // } + // //println("availableRead=$availableRead, waveOutGetPosition=$currentPositionInSamples, totalEmittedSamples=$totalEmittedSamples") + // if (availableRead <= 0 && currentPositionInSamples >= totalEmittedSamples) break + // delay(1.milliseconds) + // } + // //println("DONE WAITING") + //} private var handle: FFIPointer? = null private var headers = emptyArray() - override fun start() { + override fun internalStart() { //println("TRYING TO START") if (running) return //println("STARTED") @@ -110,13 +112,13 @@ class JvmWaveOutPlatformAudioOutput( val arena = this val handlePtr = allocBytes(8).typed() val freq = frequency - val blockAlign = (nchannels * Short.SIZE_BYTES) + val blockAlign = (channels * Short.SIZE_BYTES) val format = WAVEFORMATEX(allocBytes(WAVEFORMATEX().size)).also { format -> format.wFormatTag = WINMM.WAVE_FORMAT_PCM.toShort() - format.nChannels = nchannels.toShort() // 2? + format.nChannels = channels.toShort() // 2? format.nSamplesPerSec = freq.toInt() format.wBitsPerSample = Short.SIZE_BITS.toShort() // 16 - format.nBlockAlign = ((nchannels * Short.SIZE_BYTES).toShort()) + format.nBlockAlign = ((channels * Short.SIZE_BYTES).toShort()) format.nAvgBytesPerSec = freq * blockAlign format.cbSize = format.size.toShort() } @@ -126,28 +128,26 @@ class JvmWaveOutPlatformAudioOutput( handle = handlePtr[0] //println("handle=$handle") - headers = Array(3) { WaveHeader(it, handle, 1024, nchannels, arena) } + headers = Array(4) { WaveHeader(it, handle, 1024, channels, arena) } try { - while (running || availableRead > 0) { + while (running) { + var queued = 0 for (header in headers) { if (!header.hdr.isInQueue) { + genSafe(header.samples) + header.prepareAndWrite() + queued++ //println("Sending running=$running, availableRead=$availableRead, header=${header}") - if (availableRead > 0) { - samplesLock { - totalEmittedSamples += header.totalSamples - readShortsFully(header.data) - } - header.prepareAndWrite() - } } - Thread_sleep(1L) } + if (queued == 0) Thread_sleep(1L) } } finally { - runBlockingNoJs { - wait() - } + for (header in headers) header.dispose() + //runBlockingNoJs { + // wait() + //} WINMM.waveOutReset(handle) WINMM.waveOutClose(handle) handle = null @@ -160,51 +160,53 @@ class JvmWaveOutPlatformAudioOutput( } } - override fun stop() { + override fun internalStop() { running = false //println("STOPPING") } +} - private class WaveHeader( - val id: Int, - val handle: FFIPointer?, - val totalSamples: Int, - val nchannels: Int, - val arena: FFIArena, - ) { - val data = Array(nchannels) { ShortArray(totalSamples) } - val totalBytes = (totalSamples * nchannels * Short.SIZE_BYTES) - val dataMem = arena.allocBytes(totalBytes).typed() - val hdr = WAVEHDR(arena.allocBytes(WAVEHDR().size)).also { hdr -> - hdr.lpData = dataMem.reinterpret() - hdr.dwBufferLength = totalBytes - hdr.dwFlags = 0 - } - fun prepareAndWrite(totalSamples: Int = this.totalSamples) { - //println(data[0].toList()) +private class WaveHeader( + val id: Int, + val handle: FFIPointer?, + val totalSamples: Int, + val channels: Int, + val arena: FFIArena, +) { + val samples = AudioSamplesInterleaved(channels, totalSamples) + + val totalBytes = (totalSamples * channels * Short.SIZE_BYTES) + val dataMem = arena.allocBytes(totalBytes).typed() + val hdr = WAVEHDR(arena.allocBytes(WAVEHDR().size)).also { hdr -> + hdr.lpData = dataMem.reinterpret() + hdr.dwBufferLength = totalBytes + hdr.dwFlags = 0 + } - hdr.dwBufferLength = (totalSamples * nchannels * Short.SIZE_BYTES) + fun prepareAndWrite(totalSamples: Int = this.totalSamples) { + //println(data[0].toList()) - for (ch in 0 until nchannels) { - for (n in 0 until totalSamples) { - dataMem[n * nchannels + ch] = data[ch][n] - } - } - //if (hdr.isPrepared) dispose() - if (!hdr.isPrepared) { - //println("-> prepare") - WINMM.waveOutPrepareHeader(handle, hdr.ptr, hdr.size) - } - WINMM.waveOutWrite(handle, hdr.ptr, hdr.size) - } + val channels = this.channels + hdr.dwBufferLength = (totalSamples * channels * Short.SIZE_BYTES) - fun dispose() { - WINMM.waveOutUnprepareHeader(handle, hdr.ptr, hdr.size) + val samplesData = samples.data + for (n in 0 until channels * totalSamples) { + dataMem[n] = samplesData[n] + } + //if (hdr.isPrepared) dispose() + if (!hdr.isPrepared) { + //println("-> prepare") + WINMM.waveOutPrepareHeader(handle, hdr.ptr, hdr.size) } + WINMM.waveOutWrite(handle, hdr.ptr, hdr.size) + } - override fun toString(): String = "WaveHeader(id=$id, totalSamples=$totalSamples, nchannels=$nchannels, hdr=$hdr)" + fun dispose() { + WINMM.waveOutUnprepareHeader(handle, hdr.ptr, hdr.size) } + + override fun toString(): String = "WaveHeader(id=$id, totalSamples=$totalSamples, nchannels=$channels, hdr=$hdr)" } internal typealias LPHWAVEOUT = FFIPointer diff --git a/korge-core/src/common/korlibs/audio/sound/fade/SoundChannelFade.kt b/korge-core/src/common/korlibs/audio/sound/fade/SoundChannelFade.kt index aab794a717..93bc9ccc6e 100644 --- a/korge-core/src/common/korlibs/audio/sound/fade/SoundChannelFade.kt +++ b/korge-core/src/common/korlibs/audio/sound/fade/SoundChannelFade.kt @@ -7,14 +7,11 @@ import korlibs.math.* import korlibs.math.interpolation.* import korlibs.time.* import kotlinx.coroutines.* -import kotlin.native.concurrent.* val DEFAULT_FADE_TIME get() = 0.5.seconds val DEFAULT_FADE_EASING get() = Easing.LINEAR -@ThreadLocal private val SoundChannel.fadeThread by extraProperty { AsyncThread() } -@ThreadLocal private var SoundChannel.changing by extraProperty { false } private inline fun SoundChannel.changing(block: () -> T): T { changing = true diff --git a/korge-core/src/common/korlibs/io/async/AsyncExt.kt b/korge-core/src/common/korlibs/io/async/AsyncExt.kt index 2a3ec9d64f..e017a62307 100644 --- a/korge-core/src/common/korlibs/io/async/AsyncExt.kt +++ b/korge-core/src/common/korlibs/io/async/AsyncExt.kt @@ -6,7 +6,6 @@ import korlibs.platform.* import korlibs.time.* import kotlinx.coroutines.* import kotlin.coroutines.* -import kotlin.native.concurrent.* private val logger = Logger("AsyncExt") @@ -22,6 +21,18 @@ suspend fun CoroutineContext.launchUnscopedAndWait(block: suspend () -> T): return deferred.await() } +fun CoroutineContext.onCancel(block: () -> Unit): Cancellable { + var running = true + launchUnscoped { + try { + while (running) kotlinx.coroutines.delay(1.seconds) + } catch (e: CancellationException) { + if (running) block() + } + } + return Cancellable { running = false } +} + fun CoroutineContext.launchUnscoped(block: suspend () -> Unit) { block.startCoroutine(object : Continuation { override val context: CoroutineContext = this@launchUnscoped @@ -66,7 +77,6 @@ fun suspendTest(cond: () -> Boolean, timeout: TimeSpan? = DEFAULT_SUSPEND_TEST_T fun suspendTestNoBrowser(callback: suspend CoroutineScope.() -> Unit) = suspendTest({ !Platform.isJsBrowser }, callback = callback) fun suspendTestNoJs(callback: suspend CoroutineScope.() -> Unit) = suspendTest({ !Platform.isJs && !Platform.isWasm }, callback = callback) -@ThreadLocal val DEBUG_ASYNC_LAUNCH_ERRORS by lazy { Environment["DEBUG_ASYNC_LAUNCH_ERRORS"] == "true" } private fun CoroutineScope._launch(start: CoroutineStart, callback: suspend () -> Unit): Job = launch(coroutineContext, start = start) { diff --git a/korge-core/src/common/korlibs/io/concurrent/atomic/KorAtomic.kt b/korge-core/src/common/korlibs/io/concurrent/atomic/KorAtomic.kt index 5a20eb017a..c1ce261381 100644 --- a/korge-core/src/common/korlibs/io/concurrent/atomic/KorAtomic.kt +++ b/korge-core/src/common/korlibs/io/concurrent/atomic/KorAtomic.kt @@ -1,6 +1,6 @@ package korlibs.io.concurrent.atomic -import kotlin.reflect.KProperty +import kotlin.reflect.* expect fun korAtomic(initial: T): KorAtomicRef expect fun korAtomic(initial: Boolean): KorAtomicBoolean @@ -22,6 +22,14 @@ interface KorAtomicBase { fun compareAndSet(expect: T, update: T): Boolean } +inline fun KorAtomicBase.update(transform: (T) -> T): T { + while (true) { + val value = this.value + val next = transform(value) + if (compareAndSet(value, next)) return next + } +} + interface KorAtomicNumber : KorAtomicBase { fun addAndGet(delta: T): T } @@ -68,11 +76,31 @@ open class KorAtomicInt internal constructor(initial: Int, dummy: Boolean) : Kor } } - override fun addAndGet(delta: Int): Int { - this.value += delta - return this.value - } + override fun addAndGet(delta: Int): Int = update { it + delta } + fun addAndGetMod(delta: Int, modulo: Int): Int = update { (it + delta) % modulo } + + override fun toString(): String = "$value" +} +class KorAtomicFloat(initial: Float) : KorAtomicNumber { + private val atomic = KorAtomicInt(initial.toRawBits()) + override var value: Float + get() = Float.fromBits(atomic.value) + set(value) { + atomic.value = value.toRawBits() + } + + override fun compareAndSet(expect: Float, update: Float): Boolean { + return if (value == expect) { + value = update + true + } else { + false + } + } + + override fun addAndGet(delta: Float): Float = update { it + delta } + fun addAndGetMod(delta: Float, modulo: Float): Float = update { (it + delta) % modulo } override fun toString(): String = "$value" } diff --git a/korge-core/src/darwin/korlibs/audio/sound/CoreAudioSoundProvider.kt b/korge-core/src/darwin/korlibs/audio/sound/CoreAudioSoundProvider.kt index 4c594b08c8..62c1400083 100644 --- a/korge-core/src/darwin/korlibs/audio/sound/CoreAudioSoundProvider.kt +++ b/korge-core/src/darwin/korlibs/audio/sound/CoreAudioSoundProvider.kt @@ -2,101 +2,59 @@ package korlibs.audio.sound //import mystdio.* import cnames.structs.OpaqueAudioQueue -import kotlinx.cinterop.Arena +import korlibs.memory.* +import kotlinx.cinterop.* import kotlinx.cinterop.COpaquePointer -import kotlinx.cinterop.CPointer -import kotlinx.cinterop.CPointerVarOf import kotlinx.cinterop.ShortVar -import kotlinx.cinterop.StableRef -import kotlinx.cinterop.alloc -import kotlinx.cinterop.allocArray -import kotlinx.cinterop.asStableRef import kotlinx.cinterop.convert -import kotlinx.cinterop.get -import kotlinx.cinterop.memScoped -import kotlinx.cinterop.plus -import kotlinx.cinterop.pointed -import kotlinx.cinterop.ptr -import kotlinx.cinterop.reinterpret -import kotlinx.cinterop.set -import kotlinx.cinterop.staticCFunction -import kotlinx.cinterop.value -import platform.AudioToolbox.AudioQueueAllocateBuffer -import platform.AudioToolbox.AudioQueueBuffer +import platform.AudioToolbox.* import platform.AudioToolbox.AudioQueueBufferRef -import platform.AudioToolbox.AudioQueueDispose -import platform.AudioToolbox.AudioQueueEnqueueBuffer -import platform.AudioToolbox.AudioQueueNewOutput import platform.AudioToolbox.AudioQueueRef -import platform.AudioToolbox.AudioQueueStart -import platform.AudioToolbox.kAudioFormatUnsupportedDataFormatError -import platform.AudioToolbox.kAudioQueueErr_BufferEmpty -import platform.AudioToolbox.kAudioQueueErr_BufferInQueue -import platform.AudioToolbox.kAudioQueueErr_CannotStart -import platform.AudioToolbox.kAudioQueueErr_CodecNotFound -import platform.AudioToolbox.kAudioQueueErr_DisposalPending -import platform.AudioToolbox.kAudioQueueErr_EnqueueDuringReset -import platform.AudioToolbox.kAudioQueueErr_InvalidBuffer -import platform.AudioToolbox.kAudioQueueErr_InvalidCodecAccess -import platform.AudioToolbox.kAudioQueueErr_InvalidDevice -import platform.AudioToolbox.kAudioQueueErr_InvalidOfflineMode -import platform.AudioToolbox.kAudioQueueErr_InvalidParameter -import platform.AudioToolbox.kAudioQueueErr_InvalidProperty -import platform.AudioToolbox.kAudioQueueErr_InvalidPropertySize -import platform.AudioToolbox.kAudioQueueErr_InvalidPropertyValue -import platform.AudioToolbox.kAudioQueueErr_InvalidQueueType -import platform.AudioToolbox.kAudioQueueErr_InvalidRunState -import platform.AudioToolbox.kAudioQueueErr_Permissions -import platform.AudioToolbox.kAudioQueueErr_PrimeTimedOut -import platform.AudioToolbox.kAudioQueueErr_QueueInvalidated -import platform.AudioToolbox.kAudioQueueErr_RecordUnderrun -import platform.CoreAudioTypes.AudioStreamBasicDescription -import platform.CoreAudioTypes.kAudioFormatFlagIsPacked -import platform.CoreAudioTypes.kAudioFormatLinearPCM -import platform.CoreAudioTypes.kLinearPCMFormatFlagIsSignedInteger -import platform.CoreFoundation.CFRunLoopGetCurrent -import platform.CoreFoundation.kCFRunLoopCommonModes +import platform.CoreAudioTypes.* import platform.darwin.OSStatus -import kotlin.coroutines.CoroutineContext +import platform.posix.* +import kotlin.Int +import kotlin.String +import kotlin.Unit +import kotlin.coroutines.* actual val nativeSoundProvider: NativeSoundProvider get() = CORE_AUDIO_NATIVE_SOUND_PROVIDER expect fun appleInitAudio() -@ThreadLocal val CORE_AUDIO_NATIVE_SOUND_PROVIDER: CoreAudioNativeSoundProvider by lazy { CoreAudioNativeSoundProvider() } -class CoreAudioNativeSoundProvider : NativeSoundProvider() { +class CoreAudioNativeSoundProvider : NativeSoundProviderNew() { init { appleInitAudio() } //override suspend fun createSound(data: ByteArray, streaming: Boolean, props: AudioDecodingProps): NativeSound = AVFoundationNativeSoundNoStream(CoroutineScope(coroutineContext), audioFormats.decode(data)) - override fun createPlatformAudioOutput(coroutineContext: CoroutineContext, freq: Int): PlatformAudioOutput = CoreAudioPlatformAudioOutput(coroutineContext, freq) + override fun createNewPlatformAudioOutput(coroutineContext: CoroutineContext, channels: Int, frequency: Int, gen: (AudioSamplesInterleaved) -> Unit): CoreAudioNewPlatformAudioOutput = CoreAudioNewPlatformAudioOutput(coroutineContext, frequency, channels, gen) } -class CoreAudioPlatformAudioOutput( +class CoreAudioNewPlatformAudioOutput( coroutineContext: CoroutineContext, - freq: Int -) : DequeBasedPlatformAudioOutput(coroutineContext, freq) { + freq: Int, + nchannels: Int, + gen: (AudioSamplesInterleaved) -> Unit, +) : NewPlatformAudioOutput(coroutineContext, nchannels, freq, gen) { + //private var samples: AudioSamplesInterleaved? = null + val generator = CoreAudioGenerator(freq, nchannels, coroutineContext = coroutineContext) { data, dataSize -> - val temp = ShortArray(dataSize / nchannels) - for (m in 0 until nchannels) { - readShorts(m, temp) - for (n in 0 until dataSize / nchannels) { - data[n * nchannels + m] = temp[n] - } - } - //for (n in 0 until dataSize / nchannels) { - // for (m in 0 until nchannels) { - // data[n * nchannels + m] = readShort(m) - // } + val totalSamples = dataSize / nchannels + //if (samples == null || samples!!.totalSamples != totalSamples || samples!!.channels != channels) { //} + val samples = AudioSamplesInterleaved(nchannels, totalSamples) + genSafe(samples) + samples.data.usePinned { + memcpy(data, it.startAddressOf, (dataSize * Short.SIZE_BYTES).convert()) + } } - override fun start() { + override fun internalStart() { generator.start() } - override fun stop() { + override fun internalStop() { generator.dispose() } } @@ -185,7 +143,7 @@ class CoreAudioGenerator( AudioQueueNewOutput( format.ptr, staticCFunction(::coreAudioOutputCallback), thisStableRef!!.asCPointer(), - CFRunLoopGetCurrent(), kCFRunLoopCommonModes, 0.convert(), queue!!.ptr + null, null, 0.convert(), queue!!.ptr ).also { if (it != 0) error("Error in AudioQueueNewOutput") } diff --git a/korge-core/src/js/korlibs/audio/sound/HtmlNativeSoundProvider.kt b/korge-core/src/js/korlibs/audio/sound/HtmlNativeSoundProvider.kt index c194b7e27a..48938f4e23 100644 --- a/korge-core/src/js/korlibs/audio/sound/HtmlNativeSoundProvider.kt +++ b/korge-core/src/js/korlibs/audio/sound/HtmlNativeSoundProvider.kt @@ -1,122 +1,109 @@ package korlibs.audio.sound -import korlibs.time.TimeSpan -import korlibs.time.seconds -import korlibs.audio.format.AudioDecodingProps -import korlibs.audio.internal.SampleConvert -import korlibs.io.file.Vfs -import korlibs.io.file.std.LocalVfs -import korlibs.io.file.std.UrlVfs -import korlibs.io.lang.invalidOp -import kotlinx.coroutines.CompletableDeferred -import org.w3c.dom.HTMLAudioElement -import kotlin.coroutines.CoroutineContext -import kotlin.coroutines.coroutineContext - -class HtmlNativeSoundProvider : NativeSoundProvider() { +import korlibs.audio.format.* +import korlibs.audio.internal.* +import korlibs.io.file.* +import korlibs.io.file.std.* +import korlibs.io.lang.* +import korlibs.memory.* +import korlibs.platform.* +import korlibs.time.* +import kotlin.coroutines.* + +actual val nativeSoundProvider: NativeSoundProvider by lazy { + if (Platform.isJsBrowser) { + HtmlNativeSoundProvider() + } else { + DummyNativeSoundProvider + } +} + +class HtmlNativeSoundProvider : NativeSoundProviderNew() { init { HtmlSimpleSound.ensureUnlockStart() } - override fun createPlatformAudioOutput(coroutineContext: CoroutineContext, freq: Int): PlatformAudioOutput = JsPlatformAudioOutput(coroutineContext, freq) + override fun createNewPlatformAudioOutput(coroutineContext: CoroutineContext, channels: Int, frequency: Int, gen: (AudioSamplesInterleaved) -> Unit): NewPlatformAudioOutput { + return JsNewPlatformAudioOutput(coroutineContext, channels, frequency, gen) + } - override suspend fun createSound(data: ByteArray, streaming: Boolean, props: AudioDecodingProps, name: String): Sound = + override suspend fun createSound(data: ByteArray, streaming: Boolean, props: AudioDecodingProps, name: String): Sound = AudioBufferSound(AudioBufferOrHTMLMediaElement(HtmlSimpleSound.loadSound(data)), "#bytes", coroutineContext, name) - override suspend fun createSound(vfs: Vfs, path: String, streaming: Boolean, props: AudioDecodingProps): Sound = when (vfs) { - is LocalVfs, is UrlVfs -> { + override suspend fun createSound(vfs: Vfs, path: String, streaming: Boolean, props: AudioDecodingProps): Sound = when (vfs) { + is LocalVfs, is UrlVfs -> { //println("createSound[1]") - val url = when (vfs) { - is LocalVfs -> path - is UrlVfs -> vfs.getFullUrl(path) - else -> invalidOp - } + val url = when (vfs) { + is LocalVfs -> path + is UrlVfs -> vfs.getFullUrl(path) + else -> invalidOp + } if (streaming) { AudioBufferSound(AudioBufferOrHTMLMediaElement(HtmlSimpleSound.loadSoundBuffer(url)), url, coroutineContext) //HtmlElementAudio(url) } else { AudioBufferSound(AudioBufferOrHTMLMediaElement(HtmlSimpleSound.loadSound(url)), url, coroutineContext) } - } - else -> { + } + else -> { //println("createSound[2]") - super.createSound(vfs, path) - } - } + super.createSound(vfs, path) + } + } } -class HtmlElementAudio( - val audio: HTMLAudioElement, +class JsNewPlatformAudioOutput( coroutineContext: CoroutineContext, -) : Sound(coroutineContext) { - override val length: TimeSpan get() = audio.duration.seconds - - override suspend fun decode(maxSamples: Int): AudioData = - AudioBufferSound(AudioBufferOrHTMLMediaElement(HtmlSimpleSound.loadSound(audio.src)), audio.src, defaultCoroutineContext).decode() - - companion object { - suspend operator fun invoke(url: String): HtmlElementAudio { - val audio = createAudioElement(url) - val promise = CompletableDeferred() - audio.oncanplay = { promise.complete(Unit) } - audio.oncanplaythrough = { promise.complete(Unit) } - promise.await() - //HtmlSimpleSound.waitUnlocked() - return HtmlElementAudio(audio, coroutineContext) - } + nchannels: Int, + frequency: Int, + gen: (AudioSamplesInterleaved) -> Unit +) : NewPlatformAudioOutput( + coroutineContext, nchannels, frequency, gen +) { + init { + nativeSoundProvider // Ensure it is created } - override fun play(coroutineContext: CoroutineContext, params: PlaybackParameters): SoundChannel { - val audioCopy = audio.clone() - audioCopy.volume = params.volume - HtmlSimpleSound.callOnUnlocked { - audioCopy.play() - } - audioCopy.oncancel = { - params.onCancel?.invoke() - } - audioCopy.onended = { - params.onFinish?.invoke() - } - return object : SoundChannel(this@HtmlElementAudio) { - override var volume: Double - get() = audioCopy.volume - set(value) { - audioCopy.volume = value - } + var missingDataCount = 0 + var nodeRunning = false + var node: ScriptProcessorNode? = null + + private var startPromise: Cancellable? = null + + override fun internalStart() { + if (nodeRunning) return + startPromise = HtmlSimpleSound.callOnUnlocked { + val ctx = HtmlSimpleSound.ctx + if (ctx != null) { + val bufferSize = 1024 + val scale = (frequency / ctx.sampleRate).toFloat() + val samples = AudioSamplesInterleaved(channels, (bufferSize * scale).toInt()) + node = ctx.createScriptProcessor(bufferSize, channels, channels) + //Console.log("sampleRate", ctx.sampleRate, "bufferSize", bufferSize, "totalSamples", samples.totalSamples, "scale", scale) + node?.onaudioprocess = { e -> + genSafe(samples) + val separated = samples.separated() + for (ch in 0 until channels) { + val outCh = e.outputBuffer.getChannelData(ch) + val data = separated[ch] + for (n in 0 until bufferSize) { + outCh[n] = SampleConvert.shortToFloat(data.getSampled(n * scale)) + } + } - override var pitch: Double - get() = 1.0 - set(value) {} - - override var panning: Double - get() = 0.0 - set(value) {} - - override val total: TimeSpan get() = audioCopy.duration.seconds - override var current: TimeSpan - get() = audioCopy.currentTime.seconds - set(value) { audioCopy.currentTime = value.seconds } - - override val state: SoundChannelState get() = when { - audioCopy.paused -> SoundChannelState.PAUSED - audioCopy.ended -> SoundChannelState.STOPPED - else -> SoundChannelState.PLAYING - } - - override fun pause() { - audioCopy.pause() - } - - override fun resume() { - audioCopy.play() - } - - override fun stop() { - audioCopy.pause() - current = 0.seconds + } + this.node?.connect(ctx.destination) } } + nodeRunning = true + missingDataCount = 0 + } + + override fun internalStop() { + if (!nodeRunning) return + startPromise?.cancel() + this.node?.disconnect() + nodeRunning = false } } diff --git a/korge-core/src/js/korlibs/audio/sound/NativeAudioStreamJs.kt b/korge-core/src/js/korlibs/audio/sound/NativeAudioStreamJs.kt deleted file mode 100644 index b2094b4ac0..0000000000 --- a/korge-core/src/js/korlibs/audio/sound/NativeAudioStreamJs.kt +++ /dev/null @@ -1,123 +0,0 @@ -package korlibs.audio.sound - -import korlibs.datastructure.FloatArrayDeque -import korlibs.time.milliseconds -import korlibs.time.seconds -import korlibs.memory.* -import korlibs.audio.internal.SampleConvert -import korlibs.audio.internal.write -import korlibs.io.async.delay -import korlibs.io.lang.Cancellable -import korlibs.io.lang.cancel -import korlibs.platform.* -import kotlinx.browser.document -import kotlin.coroutines.CoroutineContext - -actual val nativeSoundProvider: NativeSoundProvider by lazy { - if (Platform.isJsBrowser) { - HtmlNativeSoundProvider() - } else { - DummyNativeSoundProvider - } -} - -class JsPlatformAudioOutput(coroutineContext: CoroutineContext, val freq: Int) : PlatformAudioOutput(coroutineContext, freq) { - val id = lastId++ - - init { - nativeSoundProvider // Ensure it is created - } - - companion object { - var lastId = 0 - } - - var missingDataCount = 0 - var nodeRunning = false - var node: ScriptProcessorNode? = null - - private val nchannels = 2 - private val deques = Array(nchannels) { FloatArrayDeque() } - - private fun process(e: AudioProcessingEvent) { - //val outChannels = Array(e.outputBuffer.numberOfChannels) { e.outputBuffer.getChannelData(it) } - val outChannels = Array(e.outputBuffer.numberOfChannels) { e.outputBuffer.getChannelData(it) } - var hasData = true - - if (!document.asDynamic().hidden) { - for (channel in 0 until nchannels) { - val deque = deques[channel] - val outChannel = outChannels[channel] - val read = deque.read(outChannel) - if (read < outChannel.size) hasData = false - } - } - - if (!hasData) { - missingDataCount++ - } - - if (missingDataCount >= 500) { - stop() - } - } - - private fun ensureInit() { node } - - private var startPromise: Cancellable? = null - - override fun start() { - if (nodeRunning) return - startPromise = HtmlSimpleSound.callOnUnlocked { - node = HtmlSimpleSound.ctx?.createScriptProcessor(1024, 2, 2) - node?.onaudioprocess = { process(it) } - if (HtmlSimpleSound.ctx != null) this.node?.connect(HtmlSimpleSound.ctx.destination) - } - missingDataCount = 0 - nodeRunning = true - } - - override fun stop() { - if (!nodeRunning) return - startPromise?.cancel() - this.node?.disconnect() - nodeRunning = false - } - - fun ensureRunning() { - ensureInit() - if (!nodeRunning) { - start() - } - } - - var totalShorts = 0 - override val availableSamples get() = totalShorts - - override suspend fun add(samples: AudioSamples, offset: Int, size: Int) { - //println("addSamples: $available, $size") - //println(samples.sliceArray(offset until offset + size).toList()) - totalShorts += size - if (!HtmlSimpleSound.available) { - // Delay simulating consuming samples - val sampleCount = (size / 2) - val timeSeconds = sampleCount.toDouble() / 41_000.0 - coroutineContext.delay(timeSeconds.seconds) - } else { - ensureRunning() - - val schannels = samples.channels - for (channel in 0 until nchannels) { - val sample = samples[channel % schannels] - val deque = deques[channel] - for (n in 0 until size) { - deque.write(SampleConvert.shortToFloat(sample[offset + n])) - } - } - - while (deques[0].availableRead > samples.totalSamples * 4) { - coroutineContext.delay(4.milliseconds) - } - } - } -} diff --git a/korge-core/src/jvm/korlibs/audio/sound/NativeSoundProviderJvm.kt b/korge-core/src/jvm/korlibs/audio/sound/NativeSoundProviderJvm.kt index 38b77a42ad..305ac73711 100644 --- a/korge-core/src/jvm/korlibs/audio/sound/NativeSoundProviderJvm.kt +++ b/korge-core/src/jvm/korlibs/audio/sound/NativeSoundProviderJvm.kt @@ -1,9 +1,7 @@ package korlibs.audio.sound import korlibs.audio.sound.backend.* -import korlibs.audio.sound.impl.awt.* -import korlibs.audio.sound.impl.jna.* -import korlibs.audio.sound.impl.jna.OpenALException +import korlibs.datastructure.thread.* import korlibs.io.time.* import korlibs.logger.* import korlibs.platform.* @@ -15,17 +13,17 @@ private val nativeSoundProviderDeferred: NativeSoundProvider by lazy { traceTime("SoundProvider") { when { //Platform.isLinux -> FFIALSANativeSoundProvider - Platform.isLinux -> AwtNativeSoundProvider Platform.isApple -> jvmCoreAudioNativeSoundProvider Platform.isWindows -> jvmWaveOutNativeSoundProvider - else -> JnaOpenALNativeSoundProvider() + //else -> JnaOpenALNativeSoundProvider() + else -> AwtNativeSoundProvider } ?: AwtNativeSoundProvider } } catch (e: UnsatisfiedLinkError) { DummyNativeSoundProvider - } catch (e: OpenALException) { - logger.error { "OpenALException: ${e.message}" } - DummyNativeSoundProvider + //} catch (e: OpenALException) { + // logger.error { "OpenALException: ${e.message}" } + // DummyNativeSoundProvider } catch (e: Throwable) { e.printStackTrace() DummyNativeSoundProvider @@ -33,7 +31,7 @@ private val nativeSoundProviderDeferred: NativeSoundProvider by lazy { } actual val nativeSoundProvider: NativeSoundProvider by lazy { - Thread { nativeSoundProviderDeferred }.apply { isDaemon = true }.start() + nativeThread(isDaemon = true, start = true) { nativeSoundProviderDeferred } LazyNativeSoundProvider { nativeSoundProviderDeferred } } //actual val nativeSoundProvider: NativeSoundProvider by lazy { JogampNativeSoundProvider() } diff --git a/korge-core/src/jvm/korlibs/audio/sound/backend/AwtNativeSoundProvider.kt b/korge-core/src/jvm/korlibs/audio/sound/backend/AwtNativeSoundProvider.kt new file mode 100644 index 0000000000..4e786ed2e5 --- /dev/null +++ b/korge-core/src/jvm/korlibs/audio/sound/backend/AwtNativeSoundProvider.kt @@ -0,0 +1,80 @@ +package korlibs.audio.sound.backend + +import korlibs.audio.sound.* +import korlibs.datastructure.thread.* +import korlibs.memory.* +import javax.sound.sampled.* +import kotlin.coroutines.* + +private val mixer by lazy { AudioSystem.getMixer(null) } + +object AwtNativeSoundProvider : NativeSoundProviderNew() { + override fun createNewPlatformAudioOutput( + coroutineContext: CoroutineContext, + nchannels: Int, + freq: Int, + gen: (AudioSamplesInterleaved) -> Unit + ): NewPlatformAudioOutput { + return JvmNewPlatformAudioOutput(this, coroutineContext, nchannels, freq, gen) + } +} + +class JvmNewPlatformAudioOutput( + val provider: AwtNativeSoundProvider, + coroutineContext: CoroutineContext, + nchannels: Int, + freq: Int, + gen: (AudioSamplesInterleaved) -> Unit +) : NewPlatformAudioOutput(coroutineContext, nchannels, freq, gen) { + var nativeThread: NativeThread? = null + + val BYTES_PER_SAMPLE = nchannels * Short.SIZE_BYTES + + private fun bytesToSamples(bytes: Int): Int = bytes / BYTES_PER_SAMPLE + private fun samplesToBytes(samples: Int): Int = samples * BYTES_PER_SAMPLE + + override fun internalStart() { + //println("TRYING TO START") + if (nativeThread?.threadSuggestRunning == true) return + + //println("STARTED") + + // SAMPLE -> Short, FRAME -> nchannels * SAMPLE + nativeThread = nativeThread(isDaemon = true) { + it.threadSuggestRunning = true + val nchannels = this.channels + val format = AudioFormat(frequency.toFloat(), Short.SIZE_BITS, nchannels, true, false) + //val format = AudioFormat(44100.toFloat(), Short.SIZE_BITS, nchannels, true, false) + //val line = AudioSystem.getSourceDataLine(format) + val line = (mixer.getLine(DataLine.Info(SourceDataLine::class.java, format)) as SourceDataLine) + line.open() + line.start() + try { + val info = AudioSamplesInterleaved(nchannels, 1024) + val bytes = ByteArray(samplesToBytes(1024)) + while (it.threadSuggestRunning) { + if (paused) { + Thread.sleep(10L) + } else { + genSafe(info) + bytes.setArrayLE(0, info.data) + //println(bytes.count { it == 0.toByte() }) + line.write(bytes, 0, bytes.size) + } + } + } catch (e: Throwable) { + e.printStackTrace() + } finally { + line.drain() + line.stop() + line.close() + } + } + } + + override fun internalStop() { + nativeThread?.threadSuggestRunning = false + nativeThread = null + //println("STOPPING") + } +} diff --git a/korge-core/src/jvm/korlibs/audio/sound/backend/CoreAudioImpl.kt b/korge-core/src/jvm/korlibs/audio/sound/backend/JvmCoreAudioNativeSoundProvider.kt similarity index 58% rename from korge-core/src/jvm/korlibs/audio/sound/backend/CoreAudioImpl.kt rename to korge-core/src/jvm/korlibs/audio/sound/backend/JvmCoreAudioNativeSoundProvider.kt index 5725ea0857..8a2dac5fff 100644 --- a/korge-core/src/jvm/korlibs/audio/sound/backend/CoreAudioImpl.kt +++ b/korge-core/src/jvm/korlibs/audio/sound/backend/JvmCoreAudioNativeSoundProvider.kt @@ -5,6 +5,7 @@ import korlibs.audio.sound.* import korlibs.ffi.* import korlibs.internal.osx.* import korlibs.io.annotations.* +import korlibs.io.concurrent.atomic.* import java.util.concurrent.* import java.util.concurrent.atomic.* import kotlin.coroutines.* @@ -18,92 +19,75 @@ val jvmCoreAudioNativeSoundProvider: JvmCoreAudioNativeSoundProvider? by lazy { } } -class JvmCoreAudioNativeSoundProvider : NativeSoundProvider() { - override fun createPlatformAudioOutput(coroutineContext: CoroutineContext, freq: Int): PlatformAudioOutput = JvmCoreAudioPlatformAudioOutput(coroutineContext, freq) +class JvmCoreAudioNativeSoundProvider : NativeSoundProviderNew() { + override fun createNewPlatformAudioOutput(coroutineContext: CoroutineContext, nchannels: Int, freq: Int, gen: (AudioSamplesInterleaved) -> Unit): NewPlatformAudioOutput { + return JvmCoreAudioNewPlatformAudioOutput(coroutineContext, nchannels, freq, gen) + } } -private val audioOutputsById = ConcurrentHashMap() - -private val cti by lazy { CallbackThreadInitializer() } -private val jnaCoreAudioCallback by lazy { +private val newAudioOutputsById = ConcurrentHashMap() +private val jnaNewCoreAudioCallback by lazy { AudioQueueNewOutputCallback { inUserData, inAQ, inBuffer -> - val output = audioOutputsById[(inUserData?.address ?: 0L).toInt()] ?: return@AudioQueueNewOutputCallback 0 - - //val tone = AudioTone.generate(1.seconds, 41000.0) - val queue = AudioQueueBuffer(inBuffer) - val ptr = queue.mAudioData - val samplesCount = (queue.mAudioDataByteSize / Short.SIZE_BYTES) / 2 - - if (output.left.size != samplesCount) output.left = ShortArray(samplesCount) - if (output.right.size != samplesCount) output.right = ShortArray(samplesCount) - - val left: ShortArray = output.left - val right: ShortArray = output.right - - //val availableRead = this@JvmCoreAudioPlatformAudioOutput.availableRead - output._readShorts(0, left) - output._readShorts(1, right) - - //println("callback: availableRead=$availableRead, completed=$completed, inUserData=$inUserData, inAQ=$inAQ, inBuffer=$inBuffer, thread=${Thread.currentThread()}") - - //println(queue.mAudioDataByteSize) - if (ptr != null) { - for (n in 0 until samplesCount) { - ptr[n * DequeBasedPlatformAudioOutput.nchannels + 0] = left[n] - ptr[n * DequeBasedPlatformAudioOutput.nchannels + 1] = right[n] + try { + val output = newAudioOutputsById[(inUserData?.address ?: 0L).toLong()] ?: return@AudioQueueNewOutputCallback 0 + val nchannels = output.channels + + //val tone = AudioTone.generate(1.seconds, 41000.0) + val queue = AudioQueueBuffer(inBuffer) + val ptr = queue.mAudioData + val samplesCount = (queue.mAudioDataByteSize / Short.SIZE_BYTES) / nchannels + //println("samplesCount=$samplesCount") + + if (ptr != null) { + // Reuse instances as much as possible + if (output.buffer.totalSamples != samplesCount) output.buffer = AudioSamplesInterleaved(nchannels, samplesCount) + val samples = output.buffer + output.genSafe(samples) + + val samplesData = samples.data + for (n in 0 until samplesCount * nchannels) { + ptr[n] = samplesData[n] + } } - } - //println("queue.mAudioData=${queue.mAudioData}") - - if (!output.completed) { - CoreAudioKit.AudioQueueEnqueueBuffer(inAQ, queue.ptr, 0, null).also { - if (it != 0) println("CoreAudioKit.AudioQueueEnqueueBuffer -> $it") + //println("queue.mAudioData=${queue.mAudioData}") + + if (!output.completed) { + CoreAudioKit.AudioQueueEnqueueBuffer(inAQ, queue.ptr, 0, null).also { + if (it != 0) println("CoreAudioKit.AudioQueueEnqueueBuffer -> $it") + } + } else { + Unit } - } else { - Unit - //println("COMPLETED!") + } catch (e: Throwable) { + e.printStackTrace() } - - //initRuntimeIfNeeded() - //val output = custom_data?.asStableRef() ?: return println("outputCallback null[0]") - //val buf = buffer?.pointed ?: return println("outputCallback null[1]") - //val dat = buf.mAudioDataByteSize.toInt() / Short.SIZE_BYTES - //val shortBuf = buf.mAudioData?.reinterpret() ?: return println("outputCallback null[2]") - //output.get().generateOutput(shortBuf, dat) - //AudioQueueEnqueueBuffer(queue, buffer, 0.convert(), null).checkError("AudioQueueEnqueueBuffer") 0 }.also { - Native.setCallbackThreadInitializer(it, cti) + Native.setCallbackThreadInitializer(it, CallbackThreadInitializer(false, false)) } } -private class JvmCoreAudioPlatformAudioOutput( +private class JvmCoreAudioNewPlatformAudioOutput( coroutineContext: CoroutineContext, - frequency: Int -) : DequeBasedPlatformAudioOutput(coroutineContext, frequency) { + nchannels: Int, + freq: Int, + gen: (AudioSamplesInterleaved) -> Unit, +) : NewPlatformAudioOutput(coroutineContext, nchannels, freq, gen) { val id = lastId.incrementAndGet() companion object { - private var lastId = AtomicInteger(0) + private var lastId = AtomicLong(0L) const val bufferSizeInBytes = 2048 const val numBuffers = 3 } - init { - audioOutputsById[id] = this - } - internal var completed = false - var queue: Pointer? = null - - var left: ShortArray = ShortArray(0) - var right: ShortArray = ShortArray(0) + internal var buffer by KorAtomicRef(AudioSamplesInterleaved(nchannels, 0)) - internal fun _readShorts(channel: Int, out: ShortArray, offset: Int = 0, count: Int = out.size - offset) { - readShorts(channel, out, offset, count) - } + var queue: Pointer? = null - override fun start() { + override fun internalStart() { + newAudioOutputsById[id] = this completed = false val queueRef = Memory(16).also { it.clear() } val format = AudioStreamBasicDescription(Memory(40).also { it.clear() }) @@ -112,7 +96,7 @@ private class JvmCoreAudioPlatformAudioOutput( format.mFormatID = CoreAudioKit.kAudioFormatLinearPCM format.mFormatFlags = CoreAudioKit.kLinearPCMFormatFlagIsSignedInteger or CoreAudioKit.kAudioFormatFlagIsPacked format.mBitsPerChannel = (8 * Short.SIZE_BYTES) - format.mChannelsPerFrame = nchannels + format.mChannelsPerFrame = channels format.mBytesPerFrame = (Short.SIZE_BYTES * format.mChannelsPerFrame) format.mFramesPerPacket = 1 format.mBytesPerPacket = format.mBytesPerFrame * format.mFramesPerPacket @@ -121,10 +105,7 @@ private class JvmCoreAudioPlatformAudioOutput( val userDefinedPtr = Pointer(id.toLong()) CoreAudioKit.AudioQueueNewOutput( - format.ptr, jnaCoreAudioCallback, userDefinedPtr, - //CoreFoundation.CFRunLoopGetCurrent(), - CoreFoundation.CFRunLoopGetMain(), - CoreFoundation.kCFRunLoopCommonModes, 0, queueRef + format.ptr, jnaNewCoreAudioCallback, userDefinedPtr, null, null, 0, queueRef ).also { if (it != 0) println("CoreAudioKit.AudioQueueNewOutput -> $it") } @@ -139,22 +120,24 @@ private class JvmCoreAudioPlatformAudioOutput( val ptr = AudioQueueBuffer(bufferPtr.getPointer(0)) //println("AudioQueueAllocateBuffer=$res, ptr.pointer=${ptr.pointer}") ptr.mAudioDataByteSize = bufferSizeInBytes - jnaCoreAudioCallback.callback(userDefinedPtr, queue, ptr.ptr) + jnaNewCoreAudioCallback.callback(userDefinedPtr, queue, ptr.ptr) } CoreAudioKit.AudioQueueStart(queue, null).also { if (it != 0) println("CoreAudioKit.AudioQueueStart -> $it") } - } - override fun stop() { + override fun internalStop() { completed = true - CoreAudioKit.AudioQueueDispose(queue, false) - audioOutputsById.remove(id) + + if (queue != null) { + CoreAudioKit.AudioQueueDispose(queue, false) + queue = null + } + newAudioOutputsById.remove(id) } } - private class AudioQueueBuffer(p: FFIPointer? = null) : FFIStructure(p) { var mAudioDataBytesCapacity by int() var mAudioData by pointer() diff --git a/korge-core/src/jvm/korlibs/audio/sound/impl/alsa/Alsa.kt b/korge-core/src/jvm/korlibs/audio/sound/impl/alsa/Alsa.kt deleted file mode 100644 index b085b254f3..0000000000 --- a/korge-core/src/jvm/korlibs/audio/sound/impl/alsa/Alsa.kt +++ /dev/null @@ -1,377 +0,0 @@ -package korlibs.audio.sound.impl.alsa - -import korlibs.datastructure.lock.* -import korlibs.time.* -import korlibs.audio.sound.* -import korlibs.io.async.* -import korlibs.io.file.std.* -import com.sun.jna.Memory -import com.sun.jna.Native -import com.sun.jna.Pointer -import korlibs.io.annotations.* -import korlibs.math.* -import kotlinx.coroutines.* -import kotlin.coroutines.* - -object ALSAExample { - @JvmStatic - fun main(args: Array) { - runBlocking { - val sp = ALSANativeSoundProvider() - //val sp = JnaOpenALNativeSoundProvider() - val job1 = launch(coroutineContext) { - //sp.playAndWait(AudioTone.generate(10.seconds, 400.0).toStream()) - sp.playAndWait(resourcesVfs["Snowland.mp3"].readMusic().toStream()) - } - val job2 = launch(coroutineContext) { - //sp.playAndWait(AudioTone.generate(10.seconds, 200.0).toStream()) - } - println("Waiting...") - job1.join() - job2.join() - println("Done") - } - } -} - -class ALSANativeSoundProvider : NativeSoundProvider() { - override fun createPlatformAudioOutput(coroutineContext: CoroutineContext, freq: Int): PlatformAudioOutput { - return ALSAPlatformAudioOutput(this, coroutineContext, freq) - } -} - -class ALSAPlatformAudioOutput( - val soundProvider: ALSANativeSoundProvider, - coroutineContext: CoroutineContext, - frequency: Int, -) : PlatformAudioOutput(coroutineContext, frequency) { - val channels = 2 - val cmpPtr = Memory(1024L).also { it.clear() } - val params = Memory(1024L).also { it.clear() } - val temp = Memory(1024L).also { it.clear() } - var pcm: Pointer? = Pointer.NULL - private val lock = Lock() - val sdeque = AudioSamplesDeque(channels) - var running = true - var thread: Thread? = null - - init { - start() - } - - override suspend fun add(samples: AudioSamples, offset: Int, size: Int) { - if (!ASound2.initialized) return super.add(samples, offset, size) - - while (running && lock { sdeque.availableRead > 4 * 1024 }) { - delay(10.milliseconds) - } - lock { sdeque.write(samples, offset, size) } - } - - override fun start() { - sdeque.clear() - running = true - - if (!ASound2.initialized) return - - //cmpPtr.clear() - //cmpPtr.setLong(0L, 0L) - //println("test") - ASound2.snd_pcm_open(cmpPtr, "default", ASound2.SND_PCM_STREAM_PLAYBACK, 0).also { - if (it != 0) error("Can't initialize ALSA") - } - pcm = cmpPtr.getPointer(0L) - //println("pcm=$pcm") - ASound2.snd_pcm_hw_params_any(pcm, params) - ASound2.snd_pcm_hw_params_set_access(pcm, params, ASound2.SND_PCM_ACCESS_RW_INTERLEAVED).also { - if (it != 0) error("Error calling snd_pcm_hw_params_set_access=$it") - } - ASound2.snd_pcm_hw_params_set_format(pcm, params, ASound2.SND_PCM_FORMAT_S16_LE).also { - if (it != 0) error("Error calling snd_pcm_hw_params_set_format=$it") - } - ASound2.snd_pcm_hw_params_set_channels(pcm, params, channels).also { - if (it != 0) error("Error calling snd_pcm_hw_params_set_channels=$it") - } - ASound2.snd_pcm_hw_params_set_rate(pcm, params, frequency, +1).also { - if (it != 0) error("Error calling snd_pcm_hw_params_set_rate=$it") - } - ASound2.snd_pcm_hw_params(pcm, params).also { - if (it != 0) error("Error calling snd_pcm_hw_params=$it") - } - - //println(ASound2.snd_pcm_name(pcm)) - //println(ASound2.snd_pcm_state_name(ASound2.snd_pcm_state(pcm))) - ASound2.snd_pcm_hw_params_get_channels(params, temp).also { - if (it != 0) error("Error calling snd_pcm_hw_params_get_channels=$it") - } - val cchannels = temp.getInt(0L) - ASound2.snd_pcm_hw_params_get_rate(params, temp, null).also { if (it != 0) error("Error calling snd_pcm_hw_params_get_rate=$it") } - val crate = temp.getInt(0L) - ASound2.snd_pcm_hw_params_get_period_size(params, temp, null).also { if (it != 0) error("Error calling snd_pcm_hw_params_get_period_size=$it") } - val frames = temp.getInt(0L) - //println("cchannels: $cchannels, rate=$crate, frames=$frames") - val buff = Memory((frames * channels * 2).toLong()).also { it.clear() } - ASound2.snd_pcm_hw_params_get_period_time(params, temp, null).also { if (it != 0) error("Error calling snd_pcm_hw_params_get_period_size=$it") } - //val random = Random(0L) - thread = Thread { - val samples = AudioSamplesInterleaved(channels, frames) - try { - mainLoop@ while (running) { - while (lock { sdeque.availableRead < frames }) { - if (!running) break@mainLoop - Thread.sleep(1L) - } - val readCount = lock { sdeque.read(samples, 0, frames) } - //println("readCount=$readCount") - val panning = this.panning.toFloat() - //val panning = -1f - //val panning = +0f - //val panning = +1f - val volume = this.volume.toFloat().clamp01() - for (ch in 0 until channels) { - val pan = (if (ch == 0) -panning else +panning) + 1f - val npan = pan.clamp01() - val rscale: Float = npan * volume - //println("panning=$panning, volume=$volume, pan=$pan, npan=$npan, rscale=$rscale") - for (n in 0 until readCount) { - buff.setShort( - ((n * channels + ch) * 2).toLong(), - (samples[ch, n] * rscale).toInt().toShort() - ) - } - } - val result = ASound2.snd_pcm_writei(pcm, buff, frames) - //println("result=$result") - if (result == -ASound2.EPIPE) { - ASound2.snd_pcm_prepare(pcm) - } - } - } catch (e: InterruptedException) { - // Done - } - }.also { - it.isDaemon = true - it.start() - } - } - - override fun stop() { - running = false - thread?.interrupt() - if (!ASound2.initialized) return - - ASound2.snd_pcm_drain(pcm) - ASound2.snd_pcm_close(pcm) - } -} - -object AlsaTest { - @JvmStatic fun main(args: Array) { - /* - val data = AudioTone.generate(1.seconds, 400.0) - - var nn = 0 - while (true) { - for (n in 0 until frames * channels) { - val value = data[0, nn] - buff.setShort((n * 2).toLong(), value) - nn++ - if (nn >= data.totalSamples) nn = 0 - } - val result = ASound2.snd_pcm_writei(pcm, buff, frames) - println("result=$result") - if (result == -ASound2.EPIPE) { - ASound2.snd_pcm_prepare(pcm) - } - } - */ - - } -} - -@Keep -object ASound2 { - var initialized = false - - @JvmStatic external fun snd_pcm_open(pcmPtr: Pointer?, name: String, stream: Int, mode: Int): Int - @JvmStatic external fun snd_pcm_hw_params_any(pcm: Pointer?, params: Pointer): Int - @JvmStatic external fun snd_pcm_hw_params_set_access(pcm: Pointer?, params: Pointer, access: Int): Int - @JvmStatic external fun snd_pcm_hw_params_set_format(pcm: Pointer?, params: Pointer, format: Int): Int - @JvmStatic external fun snd_pcm_hw_params_set_channels(pcm: Pointer?, params: Pointer, channels: Int): Int - @JvmStatic external fun snd_pcm_hw_params_set_rate(pcm: Pointer?, params: Pointer, rate: Int, dir: Int): Int - @JvmStatic external fun snd_pcm_hw_params(pcm: Pointer?, params: Pointer): Int - @JvmStatic external fun snd_pcm_name(pcm: Pointer?): String - @JvmStatic external fun snd_pcm_state(pcm: Pointer?): Int - @JvmStatic external fun snd_pcm_state_name(state: Int): String - @JvmStatic external fun snd_pcm_hw_params_get_channels(params: Pointer, out: Pointer): Int - @JvmStatic external fun snd_pcm_hw_params_get_rate(params: Pointer?, value: Pointer?, dir: Pointer?): Int - @JvmStatic external fun snd_pcm_hw_params_get_period_size(params: Pointer?, value: Pointer?, dir: Pointer?): Int - @JvmStatic external fun snd_pcm_hw_params_get_period_time(params: Pointer?, value: Pointer?, dir: Pointer?): Int - @JvmStatic external fun snd_pcm_writei(pcm: Pointer?, buffer: Pointer, size: Int): Int - @JvmStatic external fun snd_pcm_prepare(pcm: Pointer?): Int - @JvmStatic external fun snd_pcm_drain(pcm: Pointer?): Int - @JvmStatic external fun snd_pcm_close(pcm: Pointer?): Int - - const val EPIPE = 32 // Broken pipe - const val EBADFD = 77 // File descriptor in bad state - const val ESTRPIPE = 86 // Streams pipe error - - const val SND_PCM_STREAM_PLAYBACK = 0 - const val SND_PCM_STREAM_CAPTURE = 1 - - const val SND_PCM_ACCESS_MMAP_INTERLEAVED = 0 // mmap access with simple interleaved channels - const val SND_PCM_ACCESS_MMAP_NONINTERLEAVED = 1 // mmap access with simple non interleaved channels - const val SND_PCM_ACCESS_MMAP_COMPLEX = 2 // mmap access with complex placement - const val SND_PCM_ACCESS_RW_INTERLEAVED = 3 // snd_pcm_readi/snd_pcm_writei access - const val SND_PCM_ACCESS_RW_NONINTERLEAVED = 4 // /snd_pcm_writen access - - const val SND_PCM_FORMAT_S16_LE = 2 - - const val SND_PCM_STATE_OPEN = 0 // Open - const val SND_PCM_STATE_SETUP = 1 // Setup installed - const val SND_PCM_STATE_PREPARED = 2 // Ready to start - const val SND_PCM_STATE_RUNNING = 3 // Running - const val SND_PCM_STATE_XRUN = 4 // Stopped: underrun (playback) or overrun (capture) detected - const val SND_PCM_STATE_DRAINING = 5 // Draining: running (playback) or stopped (capture) - const val SND_PCM_STATE_PAUSED = 6 // Paused - const val SND_PCM_STATE_SUSPENDED = 7 // Hardware is suspended - const val SND_PCM_STATE_DISCONNECTED = 8 // Hardware is disconnected - - init { - try { - Native.register("libasound.so.2") - initialized = true - } catch (e: Throwable) { - e.printStackTrace() - } - } -} - -/* -➜ korge git:(main) ✗ cat ~/alsatest.c -/* - * Simple sound playback using ALSA API and libasound. - * - * Compile: - * $ cc -o play sound_playback.c -lasound - * - * Usage: - * $ ./play < - * - * Examples: - * $ ./play 44100 2 5 < /dev/urandom - * $ ./play 22050 1 8 < /path/to/file.wav - * - * Copyright (C) 2009 Alessandro Ghedini - * -------------------------------------------------------------- - * "THE BEER-WARE LICENSE" (Revision 42): - * Alessandro Ghedini wrote this file. As long as you retain this - * notice you can do whatever you want with this stuff. If we - * meet some day, and you think this stuff is worth it, you can - * buy me a beer in return. - * -------------------------------------------------------------- - */ - -#include -#include - -#define PCM_DEVICE "default" - -int main(int argc, char **argv) { - unsigned int pcm, tmp, dir; - int rate, channels, seconds; - snd_pcm_t *pcm_handle; - snd_pcm_hw_params_t *params; - snd_pcm_uframes_t frames; - char *buff; - int buff_size, loops; - - if (argc < 4) { - printf("Usage: %s \n", - argv[0]); - return -1; - } - - rate = atoi(argv[1]); - channels = atoi(argv[2]); - seconds = atoi(argv[3]); - - /* Open the PCM device in playback mode */ - if (pcm = snd_pcm_open(&pcm_handle, PCM_DEVICE, - SND_PCM_STREAM_PLAYBACK, 0) < 0) - printf("ERROR: Can't open \"%s\" PCM device. %s\n", - PCM_DEVICE, snd_strerror(pcm)); - - /* Allocate parameters object and fill it with default values*/ - snd_pcm_hw_params_alloca(¶ms); - - snd_pcm_hw_params_any(pcm_handle, params); - - /* Set parameters */ - if (pcm = snd_pcm_hw_params_set_access(pcm_handle, params, - SND_PCM_ACCESS_RW_INTERLEAVED) < 0) - printf("ERROR: Can't set interleaved mode. %s\n", snd_strerror(pcm)); - - if (pcm = snd_pcm_hw_params_set_format(pcm_handle, params, - SND_PCM_FORMAT_S16_LE) < 0) - printf("ERROR: Can't set format. %s\n", snd_strerror(pcm)); - - if (pcm = snd_pcm_hw_params_set_channels(pcm_handle, params, channels) < 0) - printf("ERROR: Can't set channels number. %s\n", snd_strerror(pcm)); - - if (pcm = snd_pcm_hw_params_set_rate_near(pcm_handle, params, &rate, 0) < 0) - printf("ERROR: Can't set rate. %s\n", snd_strerror(pcm)); - - /* Write parameters */ - if (pcm = snd_pcm_hw_params(pcm_handle, params) < 0) - printf("ERROR: Can't set harware parameters. %s\n", snd_strerror(pcm)); - - /* Resume information */ - printf("PCM name: '%s'\n", snd_pcm_name(pcm_handle)); - - printf("PCM state: %s\n", snd_pcm_state_name(snd_pcm_state(pcm_handle))); - - snd_pcm_hw_params_get_channels(params, &tmp); - printf("channels: %i ", tmp); - - if (tmp == 1) - printf("(mono)\n"); - else if (tmp == 2) - printf("(stereo)\n"); - - snd_pcm_hw_params_get_rate(params, &tmp, 0); - printf("rate: %d bps\n", tmp); - - printf("seconds: %d\n", seconds); - - /* Allocate buffer to hold single period */ - snd_pcm_hw_params_get_period_size(params, &frames, 0); - - buff_size = frames * channels * 2 /* 2 -> sample size */; - buff = (char *) malloc(buff_size); - - snd_pcm_hw_params_get_period_time(params, &tmp, NULL); - - for (loops = (seconds * 1000000) / tmp; loops > 0; loops--) { - - if (pcm = read(0, buff, buff_size) == 0) { - printf("Early end of file.\n"); - return 0; - } - - if (pcm = snd_pcm_writei(pcm_handle, buff, frames) == -EPIPE) { - printf("XRUN.\n"); - snd_pcm_prepare(pcm_handle); - } else if (pcm < 0) { - printf("ERROR. Can't write to PCM device. %s\n", snd_strerror(pcm)); - } - - } - - snd_pcm_drain(pcm_handle); - snd_pcm_close(pcm_handle); - free(buff); - - return 0; -}% - */ diff --git a/korge-core/src/jvm/korlibs/audio/sound/impl/awt/AwtNativeSoundProvider.kt b/korge-core/src/jvm/korlibs/audio/sound/impl/awt/AwtNativeSoundProvider.kt deleted file mode 100644 index 76bca059e9..0000000000 --- a/korge-core/src/jvm/korlibs/audio/sound/impl/awt/AwtNativeSoundProvider.kt +++ /dev/null @@ -1,262 +0,0 @@ -package korlibs.audio.sound.impl.awt - -import korlibs.audio.sound.* -import korlibs.datastructure.* -import korlibs.datastructure.thread.* -import korlibs.memory.* -import korlibs.time.* -import kotlinx.coroutines.* -import javax.sound.sampled.* -import kotlin.coroutines.* -import kotlin.time.* - -// AudioSystem.getMixerInfo() -private val mixer by lazy { AudioSystem.getMixer(null) } - -object AwtNativeSoundProvider : NativeSoundProvider() { - val format = AudioFormat(44100f, 16, 2, true, false) - - val linePool = ConcurrentPool { (mixer.getLine(DataLine.Info(SourceDataLine::class.java, format)) as SourceDataLine).also { it.open() } } - - init { - // warming and preparing - mixer.mixerInfo - val info = DataLine.Info(SourceDataLine::class.java, format) - val line = AudioSystem.getLine(info) as SourceDataLine - line.open(format, 4096) - line.start() - line.write(ByteArray(4), 0, 4) - line.drain() - line.stop() - line.close() - } - - override fun createPlatformAudioOutput(coroutineContext: CoroutineContext, freq: Int): PlatformAudioOutput = - JvmPlatformAudioOutput(this, coroutineContext, freq) -} - -data class SampleBuffer(val timestamp: Long, val data: AudioSamples) - -/* -private class JvmCoreAudioPlatformAudioOutput( - coroutineContext: CoroutineContext, - frequency: Int -) : DequeBasedPlatformAudioOutput(coroutineContext, frequency) { - val id = lastId.incrementAndGet() - companion object { - private var lastId = AtomicInteger(0) - const val bufferSizeInBytes = 2048 - const val numBuffers = 3 - } - - init { - audioOutputsById[id] = this - } - - internal var completed = false - - var queue: Pointer? = null - - var left: ShortArray = ShortArray(0) - var right: ShortArray = ShortArray(0) - - internal fun _readShorts(channel: Int, out: ShortArray, offset: Int = 0, count: Int = out.size - offset) { - readShorts(channel, out, offset, count) - } - - override fun start() { - completed = false - val queueRef = Memory(16).also { it.clear() } - val format = AudioStreamBasicDescription(Memory(40).also { it.clear() }) - - format.mSampleRate = frequency.toDouble() - format.mFormatID = CoreAudioKit.kAudioFormatLinearPCM - format.mFormatFlags = CoreAudioKit.kLinearPCMFormatFlagIsSignedInteger or CoreAudioKit.kAudioFormatFlagIsPacked - format.mBitsPerChannel = (8 * Short.SIZE_BYTES) - format.mChannelsPerFrame = nchannels - format.mBytesPerFrame = (Short.SIZE_BYTES * format.mChannelsPerFrame) - format.mFramesPerPacket = 1 - format.mBytesPerPacket = format.mBytesPerFrame * format.mFramesPerPacket - format.mReserved = 0 - - val userDefinedPtr = Pointer(id.toLong()) - - CoreAudioKit.AudioQueueNewOutput( - format.ptr, jnaCoreAudioCallback, userDefinedPtr, - //CoreFoundation.CFRunLoopGetCurrent(), - CoreFoundation.CFRunLoopGetMain(), - CoreFoundation.kCFRunLoopCommonModes, 0, queueRef - ).also { - if (it != 0) println("CoreAudioKit.AudioQueueNewOutput -> $it") - } - queue = queueRef.getPointer(0L) - //println("result=$result, queue=$queue") - val buffersArray = Memory((8 * numBuffers).toLong()).also { it.clear() } - for (buf in 0 until numBuffers) { - val bufferPtr = Pointer(buffersArray.address + 8 * buf) - CoreAudioKit.AudioQueueAllocateBuffer(queue, bufferSizeInBytes, bufferPtr).also { - if (it != 0) println("CoreAudioKit.AudioQueueAllocateBuffer -> $it") - } - val ptr = AudioQueueBuffer(bufferPtr.getPointer(0)) - //println("AudioQueueAllocateBuffer=$res, ptr.pointer=${ptr.pointer}") - ptr.mAudioDataByteSize = bufferSizeInBytes - jnaCoreAudioCallback.callback(userDefinedPtr, queue, ptr.ptr) - } - CoreAudioKit.AudioQueueStart(queue, null).also { - if (it != 0) println("CoreAudioKit.AudioQueueStart -> $it") - } - - } - - override fun stop() { - completed = true - CoreAudioKit.AudioQueueDispose(queue, false) - audioOutputsById.remove(id) - } -} -*/ - - -class JvmPlatformAudioOutput( - val provider: AwtNativeSoundProvider, - coroutineContext: CoroutineContext, - frequency: Int -) : DequeBasedPlatformAudioOutput(coroutineContext, frequency) { - val samplesLock = korlibs.datastructure.lock.NonRecursiveLock() - var nativeThread: NativeThread? = null - var running = false - var totalEmittedSamples = 0L - - override suspend fun wait() { - if (line == null) return - for (n in 0 until 1000) { - var currentPositionInSamples: Long = 0L - var totalEmittedSamples: Long = 0L - var availableRead = 0 - samplesLock { - currentPositionInSamples = line?.longFramePosition ?: 0L - availableRead = this.availableRead - totalEmittedSamples = this.totalEmittedSamples - } - //println("availableRead=$availableRead, waveOutGetPosition=$currentPositionInSamples, totalEmittedSamples=$totalEmittedSamples") - if (availableRead <= 0 && currentPositionInSamples >= totalEmittedSamples) break - delay(1.milliseconds) - } - } - - val format = provider.format - var line: SourceDataLine? = null - - val BYTES_PER_SAMPLE = nchannels * Short.SIZE_BYTES - - fun bytesToSamples(bytes: Int): Int = bytes / BYTES_PER_SAMPLE - fun samplesToBytes(samples: Int): Int = samples * BYTES_PER_SAMPLE - - override fun start() { - //println("TRYING TO START") - if (running) return - //println("STARTED") - running = true - nativeThread = nativeThread(isDaemon = true) { - try { - var timesWithoutBuffers = 0 - while (running || availableRead > 0) { - while (availableRead > 0) { - timesWithoutBuffers = 0 - while (availableRead > 0) { - if (line == null) { - val prepareLineTime = measureTimedValue { - line = provider.linePool.alloc() - //println("OPEN LINE: $line") - line!!.stop() - line!!.flush() - line!!.start() - } - //println("prepareLineTime=$prepareLineTime") - } - val availableBytes = line!!.available() - //val availableSamples = minOf(availableRead, bytesToSamples(availableBytes)) - val availableSamples = minOf(441, minOf(availableRead, bytesToSamples(availableBytes))) - - val info = AudioSamplesInterleaved(nchannels, availableSamples) - val readCount = readShortsInterleaved(info) - val bytes = ByteArray(samplesToBytes(readCount)) - bytes.setArrayLE(0, info.data) - //println(bytes.hex) - val (written, time) = measureTimedValue { line!!.write(bytes, 0, bytes.size) } - if (written != bytes.size) { - println("NOT FULLY WRITTEN $written != ${bytes.size}") - } - //println("written=$written, write time=$time") - samplesLock { - this.totalEmittedSamples += readCount - } - } - //println(bytes.hex) - Thread.sleep(1L) - } - //println("SHUT($id)!") - //Thread.sleep(500L) // 0.5 seconds of grace before shutting down this thread! - Thread.sleep(50L) // 0.5 seconds of grace before shutting down this thread! - timesWithoutBuffers++ - if (timesWithoutBuffers >= 10) break - } - } catch (e: Throwable) { - e.printStackTrace() - } finally { - //println("CLOSED_LINE: $line running=$running!") - if (line != null) { - //line?.drain() - //line?.stop() - //line?.close() - provider.linePool.free(line!!) - line = null - } - } - } - } - - override fun stop() { - running = false - //println("STOPPING") - } - - /* - val line by lazy { mixer.getLine(DataLine.Info(SourceDataLine::class.java, format)) as SourceDataLine } - line.open() - line.start() - //println("OPENED_LINE($id)!") - try { - var timesWithoutBuffers = 0 - while (running) { - while (availableBuffers > 0) { - timesWithoutBuffers = 0 - val buf = synchronized(buffers) { buffers.dequeue() } - synchronized(buffers) { totalShorts -= buf.data.totalSamples * buf.data.channels } - val bdata = convertFromShortToByte(buf.data.interleaved().data) - - val msChunk = (((bdata.size / 2) * 1000.0) / frequency.toDouble()).toInt() - - _msElapsed += msChunk - val now = System.currentTimeMillis() - val latency = now - buf.timestamp - //val drop = latency >= 150 - val start = System.currentTimeMillis() - line.write(bdata, 0, bdata.size) - //line.drain() - val end = System.currentTimeMillis() - //println("LINE($id): ${end - start} :: msChunk=$msChunk :: start=$start, end=$end :: available=${line.available()} :: framePosition=${line.framePosition} :: availableBuffers=$availableBuffers") - } - //println("SHUT($id)!") - //Thread.sleep(500L) // 0.5 seconds of grace before shutting down this thread! - Thread.sleep(50L) // 0.5 seconds of grace before shutting down this thread! - timesWithoutBuffers++ - if (timesWithoutBuffers >= 10) break - } - } finally { - //println("CLOSED_LINE($id)!") - line.stop() - line.close() - } - */ -} diff --git a/korge-core/src/jvm/korlibs/audio/sound/impl/jna/AL.kt b/korge-core/src/jvm/korlibs/audio/sound/impl/jna/AL.kt deleted file mode 100644 index fe84c6b76b..0000000000 --- a/korge-core/src/jvm/korlibs/audio/sound/impl/jna/AL.kt +++ /dev/null @@ -1,281 +0,0 @@ -package korlibs.audio.sound.impl.jna - -import com.sun.jna.* -import korlibs.io.annotations.* -import korlibs.io.lang.* -import korlibs.io.time.* -import korlibs.logger.* -import korlibs.platform.Platform -import java.io.* -import java.net.* -import java.nio.* - -@Suppress("unused") -@Keep -object AL { - private val logger = Logger("AL") - - @JvmStatic external fun alDopplerFactor(value: Float) - @JvmStatic external fun alDopplerVelocity(value: Float) - @JvmStatic external fun alSpeedOfSound(value: Float) - @JvmStatic external fun alDistanceModel(distanceModel: Int) - @JvmStatic external fun alEnable(capability: Int) - @JvmStatic external fun alDisable(capability: Int) - @JvmStatic external fun alIsEnabled(capability: Int): Boolean - @JvmStatic external fun alGetString(param: Int): String - @JvmStatic external fun alGetBooleanv(param: Int, values: BooleanArray) - @JvmStatic external fun alGetIntegerv(param: Int, values: IntArray) - @JvmStatic external fun alGetFloatv(param: Int, values: FloatArray) - @JvmStatic external fun alGetDoublev(param: Int, values: DoubleArray) - @JvmStatic external fun alGetBoolean(param: Int): Boolean - @JvmStatic external fun alGetInteger(param: Int): Int - @JvmStatic external fun alGetFloat(param: Int): Float - @JvmStatic external fun alGetDouble(param: Int): Double - @JvmStatic external fun alGetError(): Int - @JvmStatic external fun alIsExtensionPresent(extname: String): Boolean - @JvmStatic external fun alGetProcAddress(fname: String): Pointer - @JvmStatic external fun alGetEnumValue(ename: String): Int - @JvmStatic external fun alListenerf(param: Int, value: Float) - @JvmStatic external fun alListener3f(param: Int, value1: Float, value2: Float, value3: Float) - @JvmStatic external fun alListenerfv(param: Int, values: FloatArray) - @JvmStatic external fun alListeneri(param: Int, value: Int) - @JvmStatic external fun alListener3i(param: Int, value1: Int, value2: Int, value3: Int) - @JvmStatic external fun alListeneriv(param: Int, values: IntArray) - @JvmStatic external fun alGetListenerf(param: Int, value: FloatArray) - @JvmStatic external fun alGetListener3f(param: Int, value1: FloatArray, value2: FloatArray, value3: FloatArray) - @JvmStatic external fun alGetListenerfv(param: Int, values: FloatArray) - @JvmStatic external fun alGetListeneri(param: Int, value: IntArray) - @JvmStatic external fun alGetListener3i(param: Int, value1: IntArray, value2: IntArray, value3: IntArray) - @JvmStatic external fun alGetListeneriv(param: Int, values: IntArray) - @JvmStatic external fun alGenSources(n: Int, sources: IntArray) - @JvmStatic external fun alDeleteSources(n: Int, sources: IntArray) - @JvmStatic external fun alIsSource(source: Int): Boolean - @JvmStatic external fun alSourcef(source: Int, param: Int, value: Float) - @JvmStatic external fun alSource3f(source: Int, param: Int, value1: Float, value2: Float, value3: Float) - @JvmStatic external fun alSourcefv(source: Int, param: Int, values: FloatArray) - @JvmStatic external fun alSourcei(source: Int, param: Int, value: Int) - @JvmStatic external fun alSource3i(source: Int, param: Int, value1: Int, value2: Int, value3: Int) - @JvmStatic external fun alSourceiv(source: Int, param: Int, values: IntArray) - @JvmStatic external fun alGetSourcef(source: Int, param: Int, value: FloatArray) - @JvmStatic external fun alGetSource3f(source: Int, param: Int, value1: FloatArray, value2: FloatArray, value3: FloatArray) - @JvmStatic external fun alGetSourcefv(source: Int, param: Int, values: FloatArray) - @JvmStatic external fun alGetSourcei(source: Int, param: Int, value: IntArray) - @JvmStatic external fun alGetSource3i(source: Int, param: Int, value1: IntArray, value2: IntArray, value3: IntArray) - @JvmStatic external fun alGetSourceiv(source: Int, param: Int, values: IntArray) - @JvmStatic external fun alSourcePlayv(n: Int, sources: IntArray) - @JvmStatic external fun alSourceStopv(n: Int, sources: IntArray) - @JvmStatic external fun alSourceRewindv(n: Int, sources: IntArray) - @JvmStatic external fun alSourcePausev(n: Int, sources: IntArray) - @JvmStatic external fun alSourcePlay(source: Int) - @JvmStatic external fun alSourceStop(source: Int) - @JvmStatic external fun alSourceRewind(source: Int) - @JvmStatic external fun alSourcePause(source: Int) - @JvmStatic external fun alSourceQueueBuffers(source: Int, nb: Int, buffers: IntArray) - @JvmStatic external fun alSourceUnqueueBuffers(source: Int, nb: Int, buffers: IntArray) - @JvmStatic external fun alGenBuffers(n: Int, buffers: IntArray) - @JvmStatic external fun alDeleteBuffers(n: Int, buffers: IntArray) - @JvmStatic external fun alIsBuffer(buffer: Int): Boolean - @JvmStatic external fun alBufferData(buffer: Int, format: Int, data: Buffer?, size: Int, freq: Int) - @JvmStatic external fun alBufferf(buffer: Int, param: Int, value: Float) - @JvmStatic external fun alBuffer3f(buffer: Int, param: Int, value1: Float, value2: Float, value3: Float) - @JvmStatic external fun alBufferfv(buffer: Int, param: Int, values: FloatArray) - @JvmStatic external fun alBufferi(buffer: Int, param: Int, value: Int) - @JvmStatic external fun alBuffer3i(buffer: Int, param: Int, value1: Int, value2: Int, value3: Int) - @JvmStatic external fun alBufferiv(buffer: Int, param: Int, values: IntArray) - @JvmStatic external fun alGetBufferf(buffer: Int, param: Int, value: FloatArray) - @JvmStatic external fun alGetBuffer3f(buffer: Int, param: Int, value1: FloatArray, value2: FloatArray, value3: FloatArray) - @JvmStatic external fun alGetBufferfv(buffer: Int, param: Int, values: FloatArray) - @JvmStatic external fun alGetBufferi(buffer: Int, param: Int, value: IntArray) - @JvmStatic external fun alGetBuffer3i(buffer: Int, param: Int, value1: IntArray, value2: IntArray, value3: IntArray) - @JvmStatic external fun alGetBufferiv(buffer: Int, param: Int, values: IntArray) - - private val tempF = FloatArray(1) - private val tempI = IntArray(1) - - fun alGenBuffer(): Int = tempI.also { alGenBuffers(1, it) }[0] - fun alGenSource(): Int = tempI.also { alGenSources(1, it) }[0] - fun alDeleteBuffer(buffer: Int) { alDeleteBuffers(1, tempI.also { it[0] = buffer }) } - fun alDeleteSource(buffer: Int) { alDeleteSources(1, tempI.also { it[0] = buffer }) } - fun alGetSourcef(source: Int, param: Int): Float = tempF.also { alGetSourcef(source, param, it) }[0] - fun alGetSourcei(source: Int, param: Int): Int = tempI.also { alGetSourcei(source, param, it) }[0] - fun alGetSourceState(source: Int): Int = alGetSourcei(source, AL.AL_SOURCE_STATE) - - const val AL_NONE = 0 - const val AL_FALSE = 0 - const val AL_TRUE = 1 - const val AL_SOURCE_RELATIVE = 0x202 - const val AL_CONE_INNER_ANGLE = 0x1001 - const val AL_CONE_OUTER_ANGLE = 0x1002 - const val AL_PITCH = 0x1003 - const val AL_POSITION = 0x1004 - const val AL_DIRECTION = 0x1005 - const val AL_VELOCITY = 0x1006 - const val AL_LOOPING = 0x1007 - const val AL_BUFFER = 0x1009 - const val AL_GAIN = 0x100A - const val AL_MIN_GAIN = 0x100D - const val AL_MAX_GAIN = 0x100E - const val AL_ORIENTATION = 0x100F - const val AL_SOURCE_STATE = 0x1010 - const val AL_INITIAL = 0x1011 - const val AL_PLAYING = 0x1012 - const val AL_PAUSED = 0x1013 - const val AL_STOPPED = 0x1014 - const val AL_BUFFERS_QUEUED = 0x1015 - const val AL_BUFFERS_PROCESSED = 0x1016 - const val AL_REFERENCE_DISTANCE = 0x1020 - const val AL_ROLLOFF_FACTOR = 0x1021 - const val AL_CONE_OUTER_GAIN = 0x1022 - const val AL_MAX_DISTANCE = 0x1023 - const val AL_SEC_OFFSET = 0x1024 - const val AL_SAMPLE_OFFSET = 0x1025 - const val AL_BYTE_OFFSET = 0x1026 - const val AL_SOURCE_TYPE = 0x1027 - const val AL_STATIC = 0x1028 - const val AL_STREAMING = 0x1029 - const val AL_UNDETERMINED = 0x1030 - const val AL_FORMAT_MONO8 = 0x1100 - const val AL_FORMAT_MONO16 = 0x1101 - const val AL_FORMAT_STEREO8 = 0x1102 - const val AL_FORMAT_STEREO16 = 0x1103 - const val AL_FREQUENCY = 0x2001 - const val AL_BITS = 0x2002 - const val AL_CHANNELS = 0x2003 - const val AL_SIZE = 0x2004 - const val AL_UNUSED = 0x2010 - const val AL_PENDING = 0x2011 - const val AL_PROCESSED = 0x2012 - const val AL_NO_ERROR = 0 - const val AL_INVALID_NAME = 0xA001 - const val AL_INVALID_ENUM = 0xA002 - const val AL_INVALID_VALUE = 0xA003 - const val AL_INVALID_OPERATION = 0xA004 - const val AL_OUT_OF_MEMORY = 0xA005 - const val AL_VENDOR = 0xB001 - const val AL_VERSION = 0xB002 - const val AL_RENDERER = 0xB003 - const val AL_EXTENSIONS = 0xB004 - const val AL_DOPPLER_FACTOR = 0xC000 - const val AL_DOPPLER_VELOCITY = 0xC001 - const val AL_SPEED_OF_SOUND = 0xC003 - const val AL_DISTANCE_MODEL = 0xD000 - const val AL_INVERSE_DISTANCE = 0xD001 - const val AL_INVERSE_DISTANCE_CLAMPED = 0xD002 - const val AL_LINEAR_DISTANCE = 0xD003 - const val AL_LINEAR_DISTANCE_CLAMPED = 0xD004 - const val AL_EXPONENT_DISTANCE = 0xD005 - const val AL_EXPONENT_DISTANCE_CLAMPED = 0xD006 - - // ALC - - @JvmStatic external fun alcCreateContext(device: Pointer, attrlist: IntArray?): Pointer? - @JvmStatic external fun alcMakeContextCurrent(context: Pointer?): Boolean - @JvmStatic external fun alcProcessContext(context: Pointer) - @JvmStatic external fun alcSuspendContext(context: Pointer) - @JvmStatic external fun alcDestroyContext(context: Pointer) - @JvmStatic external fun alcGetCurrentContext(): Pointer - @JvmStatic external fun alcGetContextsDevice(context: Pointer): Pointer - @JvmStatic external fun alcOpenDevice(devicename: String?): Pointer? - @JvmStatic external fun alcCloseDevice(device: Pointer): Boolean - @JvmStatic external fun alcGetError(device: Pointer): Int - @JvmStatic external fun alcIsExtensionPresent(device: Pointer, extname: String): Boolean - @JvmStatic external fun alcGetProcAddress(device: Pointer, funcname: String): Pointer - @JvmStatic external fun alcGetEnumValue(device: Pointer, enumname: String): Int - @JvmStatic external fun alcGetString(device: Pointer, param: Int): String - @JvmStatic external fun alcGetIntegerv(device: Pointer, param: Int, size: Int, values: IntArray) - @JvmStatic external fun alcCaptureOpenDevice(devicename: String, frequency: Int, format: Int, buffersize: Int): Pointer - @JvmStatic external fun alcCaptureCloseDevice(device: Pointer): Boolean - @JvmStatic external fun alcCaptureStart(device: Pointer) - @JvmStatic external fun alcCaptureStop(device: Pointer) - @JvmStatic external fun alcCaptureSamples(device: Pointer, buffer: Buffer, samples: Int) - - const val ALC_FALSE = 0 - const val ALC_TRUE = 1 - const val ALC_FREQUENCY = 0x1007 - const val ALC_REFRESH = 0x1008 - const val ALC_SYNC = 0x1009 - const val ALC_MONO_SOURCES = 0x1010 - const val ALC_STEREO_SOURCES = 0x1011 - const val ALC_NO_ERROR = 0 - const val ALC_INVALID_DEVICE = 0xA001 - const val ALC_INVALID_CONTEXT = 0xA002 - const val ALC_INVALID_ENUM = 0xA003 - const val ALC_INVALID_VALUE = 0xA004 - const val ALC_OUT_OF_MEMORY = 0xA005 - const val ALC_MAJOR_VERSION = 0x1000 - const val ALC_MINOR_VERSION = 0x1001 - const val ALC_ATTRIBUTES_SIZE = 0x1002 - const val ALC_ALL_ATTRIBUTES = 0x1003 - const val ALC_DEFAULT_DEVICE_SPECIFIER = 0x1004 - const val ALC_DEVICE_SPECIFIER = 0x1005 - const val ALC_EXTENSIONS = 0x1006 - const val ALC_EXT_CAPTURE = 1 - const val ALC_CAPTURE_DEVICE_SPECIFIER = 0x310 - const val ALC_CAPTURE_DEFAULT_DEVICE_SPECIFIER = 0x311 - const val ALC_CAPTURE_SAMPLES = 0x312 - const val ALC_ENUMERATE_ALL_EXT = 1 - const val ALC_DEFAULT_ALL_DEVICES_SPECIFIER = 0x1012 - const val ALC_ALL_DEVICES_SPECIFIER = 0x1013 - - internal var loaded = false - - init { - try { - if (nativeOpenALLibraryPath == null) error("Can't get OpenAL library") - traceTime("OpenAL Native.register") { - Native.register(nativeOpenALLibraryPath) - } - loaded = true - } catch (e: Throwable) { - logger.error { "Failed to initialize OpenAL: arch=$arch, OS.rawName=${Platform.rawOsName}, nativeOpenALLibraryPath=$nativeOpenALLibraryPath, message=${e.message}" } - //e.printStackTrace() - } - } -} - -val nativeOpenALLibraryPath: String? by lazy { - Environment["OPENAL_LIB_PATH"]?.let { path -> - return@lazy path - } - if (Environment["KORAU_JVM_DUMMY_SOUND"] == "true") { - return@lazy null - } - when { - Platform.isMac -> "OpenAL" // Mac already includes the OpenAL library - Platform.isLinux -> "libopenal.so.1" - Platform.isWindows -> "soft_oal.dll" - else -> { - println(" - Unknown/Unsupported OS") - null - } - } -} - -private val arch by lazy { System.getProperty("os.arch").toLowerCase() } -private val alClassLoader by lazy { AL::class.java.classLoader } -private fun getNativeFileURL(path: String): URL? = alClassLoader.getResource(path) -private fun getNativeFile(path: String): ByteArray = getNativeFileURL(path)?.readBytes() ?: error("Can't find '$path'") -private fun getNativeFileLocalPath(path: String): String { - val tempDir = File(System.getProperty("java.io.tmpdir")) - //val tempFile = File.createTempFile("libopenal_", ".${File(path).extension}") - val tempFile = File(tempDir, "korau_openal.${File(path).extension}") - - val expectedSize = getNativeFileURL(path)?.openStream()?.use { it.available().toLong() } - - if (!tempFile.exists() || tempFile.length() != expectedSize) { - try { - tempFile.writeBytes(getNativeFile(path)) - } catch (e: Throwable) { - e.printStackTrace() - } - } - return tempFile.absolutePath -} - -internal inline fun runCatchingAl(block: () -> T): T? { - val result = runCatching { block() } - if (result.isFailure) { - result.exceptionOrNull()?.printStackTrace() - } - return result.getOrNull() -} diff --git a/korge-core/src/jvm/korlibs/audio/sound/impl/jna/JnaSoundProvider.kt b/korge-core/src/jvm/korlibs/audio/sound/impl/jna/JnaSoundProvider.kt deleted file mode 100644 index c143e20980..0000000000 --- a/korge-core/src/jvm/korlibs/audio/sound/impl/jna/JnaSoundProvider.kt +++ /dev/null @@ -1,423 +0,0 @@ -package korlibs.audio.sound.impl.jna - -import com.sun.jna.* -import korlibs.audio.internal.* -import korlibs.audio.sound.* -import korlibs.datastructure.* -import korlibs.io.async.* -import korlibs.logger.* -import korlibs.math.* -import korlibs.time.* -import kotlinx.coroutines.* -import java.nio.* -import kotlin.coroutines.* -import kotlin.math.* - -class OpenALException(message: String) : RuntimeException(message) - -class JnaOpenALNativeSoundProvider : NativeSoundProvider() { - companion object { - val MAX_AVAILABLE_SOURCES = 100 - } - - val device = (AL.alcOpenDevice(null) ?: throw OpenALException("Can't open OpenAL device")) - val context = (AL.alcCreateContext(device, null) ?: throw OpenALException("Can't get OpenAL context")) - - val sourcePool = Pool { - alGenSourceAndInitialize() - //.also { println("CREATED OpenAL source $it") } - } - val bufferPool = Pool { - AL.alGenBuffer() - //.also { println("CREATED OpenAL buffer $it") } - } - - fun makeCurrent() { - AL.alcMakeContextCurrent(context) - } - - fun unmakeCurrent() { - AL.alcMakeContextCurrent(Pointer.NULL) - } - - init { - makeCurrent() - - AL.alListener3f(AL.AL_POSITION, 0f, 0f, 1.0f) - checkAlErrors("alListener3f", 0) - AL.alListener3f(AL.AL_VELOCITY, 0f, 0f, 0f) - checkAlErrors("alListener3f", 0) - AL.alListenerfv(AL.AL_ORIENTATION, floatArrayOf(0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f)) - checkAlErrors("alListenerfv", 0) - - java.lang.Runtime.getRuntime().addShutdownHook(Thread { - unmakeCurrent() - AL.alcDestroyContext(context) - AL.alcCloseDevice(device) - }) - } - - override suspend fun createNonStreamingSound(data: AudioData, name: String): Sound { - if (!AL.loaded) return super.createNonStreamingSound(data, name) - return OpenALSoundNoStream(this, coroutineContext, data, name = name) - } - - override fun createPlatformAudioOutput(coroutineContext: CoroutineContext, freq: Int): PlatformAudioOutput { - if (!AL.loaded) return super.createPlatformAudioOutput(coroutineContext, freq) - return OpenALPlatformAudioOutput(this, coroutineContext, freq) - } -} - -class OpenALPlatformAudioOutput( - val provider: JnaOpenALNativeSoundProvider, - coroutineContext: CoroutineContext, - freq: Int, -) : PlatformAudioOutput(coroutineContext, freq) { - var source = 0 - val sourceProv = JnaSoundPropsProvider { source } - override var availableSamples: Int = 0 - - override var pitch: Double by sourceProv::pitch - override var volume: Double by sourceProv::volume - override var panning: Double by sourceProv::panning - - //val source - - //alSourceQueueBuffers - - //val buffersPool = Pool(6) { all.alGenBuffer() } - //val buffers = IntArray(32) - //val buffers = IntArray(6) - - init { - start() - } - - override suspend fun add(samples: AudioSamples, offset: Int, size: Int) { - //println("OpenALPlatformAudioOutput.add") - availableSamples += samples.totalSamples - try { - provider.makeCurrent() - val tempBuffers = IntArray(1) - ensureSource() - while (true) { - //val buffer = al.alGetSourcei(source, AL.AL_BUFFER) - //val sampleOffset = al.alGetSourcei(source, AL.AL_SAMPLE_OFFSET) - val processed = AL.alGetSourcei(source, AL.AL_BUFFERS_PROCESSED) - val queued = AL.alGetSourcei(source, AL.AL_BUFFERS_QUEUED) - val total = processed + queued - val state = AL.alGetSourceState(source) - val playing = state == AL.AL_PLAYING - - //println("buffer=$buffer, processed=$processed, queued=$queued, state=$state, playing=$playing, sampleOffset=$sampleOffset") - //println("Samples.add") - - if (processed <= 0 && total >= 6) { - delay(10.milliseconds) - continue - } - - if (total < 6) { - AL.alGenBuffers(1, tempBuffers) - checkAlErrors("alGenBuffers", tempBuffers[0]) - //println("alGenBuffers: ${tempBuffers[0]}") - } else { - AL.alSourceUnqueueBuffers(source, 1, tempBuffers) - checkAlErrors("alSourceUnqueueBuffers", source) - //println("alSourceUnqueueBuffers: ${tempBuffers[0]}") - } - //println("samples: $samples - $offset, $size") - //al.alBufferData(tempBuffers[0], samples.copyOfRange(offset, offset + size), frequency, panning, volume) - AL.alBufferData(tempBuffers[0], samples.copyOfRange(offset, offset + size), frequency, panning) - checkAlErrors("alBufferData", tempBuffers[0]) - AL.alSourceQueueBuffers(source, 1, tempBuffers) - checkAlErrors("alSourceQueueBuffers", tempBuffers[0]) - - //val gain = al.alGetSourcef(source, AL.AL_GAIN) - //val pitch = al.alGetSourcef(source, AL.AL_PITCH) - //println("gain=$gain, pitch=$pitch") - if (!playing) { - AL.alSourcePlay(source) - } - break - } - } finally { - availableSamples -= samples.totalSamples - } - } - - fun ensureSource() { - if (source != 0) return - provider.makeCurrent() - - source = alGenSourceAndInitialize() - //al.alGenBuffers(buffers.size, buffers) - } - - override fun start() { - ensureSource() - AL.alSourcePlay(source) - checkAlErrors("alSourcePlay", source) - //checkAlErrors() - } - - //override fun pause() { - // al.alSourcePause(source) - //} - - override fun stop() { - provider.makeCurrent() - - AL.alSourceStop(source) - if (source != 0) { - AL.alDeleteSource(source) - source = 0 - } - //for (n in buffers.indices) { - // if (buffers[n] != 0) { - // al.alDeleteBuffer(buffers[n]) - // buffers[n] = 0 - // } - //} - } -} - -private class MyStopwatch { - private var running = false - private var ns = 0L - private val now get() = System.nanoTime() - - fun resume() { - if (running) return - toggle() - } - - fun pause() { - if (!running) return - toggle() - } - - fun toggle() { - running = !running - ns = now - ns - } - - val elapsedNanoseconds: Long get() = if (running) now - ns else ns -} - -// https://ffainelli.github.io/openal-example/ -class OpenALSoundNoStream( - val provider: JnaOpenALNativeSoundProvider, - coroutineContext: CoroutineContext, - val data: AudioData?, - override val name: String = "Unknown" -) : Sound(coroutineContext), SoundProps { - private val logger = Logger("OpenALSoundNoStream") - - override suspend fun decode(maxSamples: Int): AudioData = data ?: AudioData.DUMMY - - override var volume: Double = 1.0 - override var pitch: Double = 1.0 - override var panning: Double = 0.0 - - override val length: TimeSpan get() = data?.totalTime ?: 0.seconds - override val nchannels: Int get() = data?.channels ?: 1 - - override fun play(coroutineContext: CoroutineContext, params: PlaybackParameters): SoundChannel { - val data = data ?: return DummySoundChannel(this) - //println("provider.sourcePool.totalItemsInUse=${provider.sourcePool.totalItemsInUse}, provider.sourcePool.totalAllocatedItems=${provider.sourcePool.totalAllocatedItems}, provider.sourcePool.itemsInPool=${provider.sourcePool.itemsInPool}") - if (provider.sourcePool.totalItemsInUse >= JnaOpenALNativeSoundProvider.MAX_AVAILABLE_SOURCES) { - error("OpenAL too many sources in use") - } - provider.makeCurrent() - var buffer = provider.bufferPool.alloc() - var source = provider.sourcePool.alloc() - if (source == -1) logger.warn { "UNEXPECTED[0] source=-1" } - - AL.alBufferData(buffer, data, panning, volume) - - AL.alSourcei(source, AL.AL_BUFFER, buffer) - checkAlErrors("alSourcei", source) - - var stopped = false - - val sourceProvider: () -> Int = { source } - - val channel = object : SoundChannel(this), SoundProps by JnaSoundPropsProvider(sourceProvider) { - private val stopWatch = MyStopwatch() - val totalSamples get() = data.totalSamples - var currentSampleOffset: Int - get() { - if (source < 0) return 0 - return AL.alGetSourcei(source, AL.AL_SAMPLE_OFFSET) - } - set(value) { - if (source < 0) return - AL.alSourcei(source, AL.AL_SAMPLE_OFFSET, value) - } - - val estimatedTotalNanoseconds: Long - get() = total.nanoseconds.toLong() - val estimatedCurrentNanoseconds: Long - get() = stopWatch.elapsedNanoseconds - - override var current: TimeSpan - get() = data.timeAtSample(currentSampleOffset) - set(value) { - if (source < 0) return - AL.alSourcef(source, AL.AL_SEC_OFFSET, value.seconds.toFloat()) - } - override val total: TimeSpan get() = data.totalTime - - override val state: SoundChannelState get() { - if (source < 0) return SoundChannelState.STOPPED - val result = AL.alGetSourceState(source) - checkAlErrors("alGetSourceState", source) - return when (result) { - AL.AL_INITIAL -> SoundChannelState.INITIAL - AL.AL_PLAYING -> SoundChannelState.PLAYING - AL.AL_PAUSED -> SoundChannelState.PAUSED - AL.AL_STOPPED -> SoundChannelState.STOPPED - else -> SoundChannelState.STOPPED - } - } - - override fun stop() { - if (stopped) return - stopped = true - if (source == -1) logger.warn { "UNEXPECTED[1] source=-1" } - AL.alSourceStop(source) - AL.alSourcei(source, AL.AL_BUFFER, 0) - provider.sourcePool.free(source) - provider.bufferPool.free(buffer) - source = -1 - buffer = -1 - stopWatch.pause() - // We reuse them from the pool - //AL.alDeleteSource(source) - //AL.alDeleteBuffer(buffer) - } - - override fun pause() { - AL.alSourcePause(source) - stopWatch.pause() - } - - override fun resume() { - AL.alSourcePlay(source) - stopWatch.resume() - } - }.also { - it.copySoundPropsFromCombined(this@OpenALSoundNoStream, params) - } - launchImmediately(coroutineContext[ContinuationInterceptor] ?: coroutineContext) { - var times = params.times - var startTime = params.startTime - try { - while (times.hasMore && !stopped) { - times = times.oneLess - channel.reset() - AL.alSourcef(source, AL.AL_SEC_OFFSET, startTime.seconds.toFloat()) - channel.resume() - //checkAlErrors("alSourcePlay") - startTime = 0.seconds - while (channel.playingOrPaused) delay(10L) - } - } catch (e: CancellationException) { - params.onCancel?.invoke() - } catch (e: Throwable) { - e.printStackTrace() - } finally { - channel.stop() - params.onFinish?.invoke() - } - } - return channel - } -} - -class JnaSoundPropsProvider(val sourceProvider: () -> Int) : SoundProps { - val source get() = sourceProvider() - - private val temp1 = FloatArray(3) - private val temp2 = FloatArray(3) - private val temp3 = FloatArray(3) - - override var pitch: Double - get() = if (source < 0) 1.0 else AL.alGetSourcef(source, AL.AL_PITCH).toDouble() - set(value) { - if (source < 0) return - AL.alSourcef(source, AL.AL_PITCH, value.toFloat()) - } - override var volume: Double - get() = if (source < 0) 1.0 else AL.alGetSourcef(source, AL.AL_GAIN).toDouble() - set(value) { - if (source < 0) return - AL.alSourcef(source, AL.AL_GAIN, value.toFloat()) - } - override var panning: Double - get() { - if (source < 0) return 0.0 - AL.alGetSource3f(source, AL.AL_POSITION, temp1, temp2, temp3) - return temp1[0].toDouble() - } - set(value) { - if (source < 0) return - val pan = value.toFloat() - AL.alSourcef(source, AL.AL_ROLLOFF_FACTOR, 0.0f) - AL.alSourcei(source, AL.AL_SOURCE_RELATIVE, 1) - AL.alSource3f(source, AL.AL_POSITION, pan, 0f, -sqrt(1.0f - pan * pan)) - //println("SET PANNING: source=$source, pan=$pan") - } -} - -private fun AL.alBufferData(buffer: Int, data: AudioSamples, freq: Int, panning: Double = 0.0, volume: Double = 1.0) { - alBufferData(buffer, AudioData(freq, data), panning, volume) -} - -private fun applyStereoPanningInline(interleaved: ShortArray, panning: Double = 0.0, volume: Double = 1.0) { - if (panning == 0.0 || volume != 1.0) return - val vvolume = volume.clamp01() - val rratio = (((panning + 1.0) / 2.0).clamp01() * vvolume).toFloat() - val lratio = ((1.0 - rratio) * vvolume).toFloat() - //println("panning=$panning, lratio=$lratio, rratio=$rratio, vvolume=$vvolume") - for (n in interleaved.indices step 2) { - interleaved[n + 0] = (interleaved[n + 0] * lratio).coerceToShort() - interleaved[n + 1] = (interleaved[n + 1] * rratio).coerceToShort() - } -} - -private fun AL.alBufferData(buffer: Int, data: AudioData, panning: Double = 0.0, volume: Double = 1.0) { - val samples = data.samplesInterleaved.data - if (data.stereo && panning != 0.0) applyStereoPanningInline(samples, panning, volume) - val bufferData = ShortBuffer.wrap(samples) - val format = if (data.stereo) AL.AL_FORMAT_STEREO16 else AL.AL_FORMAT_MONO16 - val samplesData = if (samples.isNotEmpty()) bufferData else null - val bytesSize = samples.size * 2 - val rate = data.rate - AL.alBufferData(buffer, format, samplesData, bytesSize, rate) - checkAlErrors("alBufferData", buffer) -} - -private fun alGenSourceAndInitialize() = AL.alGenSource().also { source -> - AL.alSourcef(source, AL.AL_PITCH, 1f) - AL.alSourcef(source, AL.AL_GAIN, 1f) - AL.alSource3f(source, AL.AL_POSITION, 0f, 0f, 0f) - AL.alSource3f(source, AL.AL_VELOCITY, 0f, 0f, 0f) - AL.alSourcei(source, AL.AL_LOOPING, AL.AL_FALSE) - AL.alSourceStop(source) -} - -fun ALerrorToString(value: Int): String = when (value) { - AL.AL_INVALID_NAME -> "AL_INVALID_NAME" - AL.AL_INVALID_ENUM -> "AL_INVALID_ENUM" - AL.AL_INVALID_VALUE -> "AL_INVALID_VALUE" - AL.AL_INVALID_OPERATION -> "AL_INVALID_OPERATION" - AL.AL_OUT_OF_MEMORY -> "AL_OUT_OF_MEMORY" - else -> "UNKNOWN" -} - -//fun checkAlErrors(name: String, value: Int = -1) { -fun checkAlErrors(name: String, value: Int) { - //AL.alGetError().also { error -> if (error != AL.AL_NO_ERROR) Console.error("OpenAL error ${error.shex} (${ALerrorToString(error)}) '$name' (value=$value)") } -} diff --git a/korge-core/src/jvm/korlibs/audio/sound/impl/jogamp/JogampSoundProvider.kt b/korge-core/src/jvm/korlibs/audio/sound/impl/jogamp/JogampSoundProvider.kt deleted file mode 100644 index a354f9de95..0000000000 --- a/korge-core/src/jvm/korlibs/audio/sound/impl/jogamp/JogampSoundProvider.kt +++ /dev/null @@ -1,182 +0,0 @@ -package korlibs.audio.sound.impl.jogamp - -/* -import com.jogamp.openal.* -import com.jogamp.openal.util.* -import korlibs.time.* -import korlibs.audio.format.* -import korlibs.audio.sound.* -import korlibs.io.async.* -import korlibs.io.file.* -import kotlinx.coroutines.* -import java.nio.* -import kotlin.coroutines.* - -internal inline fun runCatchingAl(block: () -> T): T? { - val result = runCatching { block() } - if (result.isFailure) { - result.exceptionOrNull()?.printStackTrace() - } - return result.getOrNull() -} - -val al: AL? by lazy { - runCatchingAl { - ALFactory.getAL().also { al -> - //val error = al.alGetError() - //if (error != AL.AL_NO_ERROR) error("Error initializing OpenAL ${error.shex}") - } - } -} - -class JogampNativeSoundProvider : NativeSoundProvider() { - init { - //println("ALut.alutInit: ${Thread.currentThread()}") - runCatchingAl { - ALut.alutInit() - } - //alc.alcMakeContextCurrent(context) - al?.alListener3f(AL.AL_POSITION, 0f, 0f, 1.0f) - checkAlErrors() - al?.alListener3f(AL.AL_VELOCITY, 0f, 0f, 0f) - checkAlErrors() - al?.alListenerfv(AL.AL_ORIENTATION, floatArrayOf(0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f), 0) - checkAlErrors() - } - - override suspend fun createSound(data: ByteArray, streaming: Boolean): NativeSound { - return OpenALNativeSoundNoStream(coroutineContext, nativeAudioFormats.decode(data)) - } - - override suspend fun createSound(vfs: Vfs, path: String, streaming: Boolean): NativeSound { - return super.createSound(vfs, path, streaming) - } - - override suspend fun createSound(data: AudioData, formats: AudioFormats, streaming: Boolean): NativeSound { - return super.createSound(data, formats, streaming) - } -} - -// https://ffainelli.github.io/openal-example/ -class OpenALNativeSoundNoStream(val coroutineContext: CoroutineContext, val data: AudioData?) : NativeSound() { - override suspend fun decode(): AudioData = data ?: AudioData.DUMMY - - override fun play(): NativeSoundChannel { - //if (openalNativeSoundProvider.device == null || openalNativeSoundProvider.context == null) return DummyNativeSoundChannel(this, data) - //println("OpenALNativeSoundNoStream.play : $data") - //alc.alcMakeContextCurrent(context) - val data = data ?: return DummyNativeSoundChannel(this) - - val buffer = alGenBuffer() - alBufferData(buffer, data) - - val source = alGenSource() - al?.alSourcef(source, AL.AL_PITCH, 1f) - al?.alSourcef(source, AL.AL_GAIN, 1f) - al?.alSource3f(source, AL.AL_POSITION, 0f, 0f, 0f) - al?.alSource3f(source, AL.AL_VELOCITY, 0f, 0f, 0f) - al?.alSourcei(source, AL.AL_LOOPING, AL.AL_FALSE) - al?.alSourcei(source, AL.AL_BUFFER, buffer) - checkAlErrors() - - al?.alSourcePlay(source) - checkAlErrors() - - var stopped = false - - val channel = object : NativeSoundChannel(this) { - val totalSamples get() = data.totalSamples - val currentSampleOffset get() = alGetSourcei(source, AL.AL_SAMPLE_OFFSET) - - override var volume: Double - get() = run { alGetSourcef(source, AL.AL_GAIN).toDouble() } - set(value) = run { al?.alSourcef(source, AL.AL_GAIN, value.toFloat()) } - override var pitch: Double - get() = run { alGetSourcef(source, AL.AL_PITCH).toDouble() } - set(value) = run { al?.alSourcef(source, AL.AL_PITCH, value.toFloat()) } - override var panning: Double = 0.0 - set(value) = run { - field = value - al?.alSource3f(source, AL.AL_POSITION, panning.toFloat(), 0f, 0f) - } - - override val current: TimeSpan get() = data.timeAtSample(currentSampleOffset) - override val total: TimeSpan get() = data.totalTime - override val playing: Boolean get() { - val result = alGetSourceState(source) == AL.AL_PLAYING - checkAlErrors() - return result - } - - override fun stop() { - if (!stopped) { - stopped = true - alDeleteSource(source) - alDeleteBuffer(buffer) - } - } - } - launchImmediately(coroutineContext[ContinuationInterceptor] ?: coroutineContext) { - try { - do { - delay(1L) - } while (channel.playing) - } catch (e: Throwable) { - e.printStackTrace() - } finally { - channel.stop() - } - } - return channel - - } -} - -private val tempF = FloatArray(1) -private val tempI = IntArray(1) -private fun alGetSourcef(source: Int, param: Int): Float = tempF.apply { al?.alGetSourcef(source, param, this, 0) }[0] -private fun alGetSourcei(source: Int, param: Int): Int = tempI.apply { al?.alGetSourcei(source, param, this, 0) }[0] -private fun alGetSourceState(source: Int): Int = alGetSourcei(source, AL.AL_SOURCE_STATE) - -private fun alBufferData(buffer: Int, data: AudioData) { - val samples = data.samplesInterleaved.data - - val bufferData = ShortBuffer.wrap(samples) - //val bufferData = ByteBuffer.allocateDirect(samples.size * 2).order(ByteOrder.nativeOrder()) - //bufferData.asShortBuffer().put(samples) - - al?.alBufferData( - buffer, - if (data.channels == 1) AL.AL_FORMAT_MONO16 else AL.AL_FORMAT_STEREO16, - if (samples.isNotEmpty()) bufferData else null, - samples.size * 2, - data.rate - ) - checkAlErrors() -} - -private fun alGenBuffer(): Int = tempI.apply { al?.alGenBuffers(1, this, 0) }[0] -private fun alGenSource(): Int = tempI.apply { al?.alGenSources(1, this, 0) }[0] -private fun alDeleteBuffer(buffer: Int): Unit = run { al?.alDeleteBuffers(1, tempI.also { it[0] = buffer }, 0) } -private fun alDeleteSource(buffer: Int): Unit = run { al?.alDeleteSources(1, tempI.also { it[0] = buffer }, 0) } - -/* -val alc by lazy { - ALFactory.getALC().also { alc -> - //val error = alc.alcGetError() - //if (error != AL.AL_NO_ERROR) error("Error initializing OpenAL ${error.shex}") - } } - -private val device by lazy { alc.alcOpenDevice(null).also { - println("alc.alcOpenDevice: $it") -} } -private val context by lazy { alc.alcCreateContext(device, null).also { - println("alc.alcCreateContext: $it with device=$device") -} } -*/ - -fun checkAlErrors() { -// val error = al.alGetError() -// if (error != AL.AL_NO_ERROR) error("OpenAL error ${error.shex}") -} -*/ diff --git a/korge-core/src/jvm/korlibs/audio/sound/internal/jvm/JvmTools.kt b/korge-core/src/jvm/korlibs/audio/sound/internal/jvm/JvmTools.kt deleted file mode 100644 index 79260eb2e0..0000000000 --- a/korge-core/src/jvm/korlibs/audio/sound/internal/jvm/JvmTools.kt +++ /dev/null @@ -1,17 +0,0 @@ -package korlibs.audio.sound.internal.jvm - -import korlibs.datastructure.ByteArrayDeque -import java.io.InputStream - -internal fun ByteArrayDeque.inputStream() = object : InputStream() { - override fun read(b: ByteArray, off: Int, len: Int): Int { - val out = this@inputStream.read(b, off, len) - //if (out <= 0) return -1 - if (out <= 0) return 0 - return out - } - - override fun read(): Int { - return this@inputStream.readByte() - } -} diff --git a/korge-sandbox/src/commonMain/kotlin/samples/MainPolyphonic.kt b/korge-sandbox/src/commonMain/kotlin/samples/MainPolyphonic.kt index 32bb30737d..4513d2e948 100644 --- a/korge-sandbox/src/commonMain/kotlin/samples/MainPolyphonic.kt +++ b/korge-sandbox/src/commonMain/kotlin/samples/MainPolyphonic.kt @@ -1,14 +1,11 @@ package samples -import korlibs.memory.arraycopy -import korlibs.math.clamp -import korlibs.audio.sound.AudioSamples -import korlibs.audio.sound.nativeSoundProvider -import korlibs.korge.scene.Scene -import korlibs.korge.ui.uiVerticalStack -import korlibs.korge.view.SContainer -import korlibs.korge.view.text -import korlibs.io.async.launchImmediately +import korlibs.audio.sound.* +import korlibs.io.concurrent.atomic.* +import korlibs.korge.scene.* +import korlibs.korge.ui.* +import korlibs.korge.view.* +import korlibs.math.* import kotlin.math.* class MainPolyphonic : Scene() { @@ -22,65 +19,50 @@ class MainPolyphonic : Scene() { text("by Yann Tiersen") } - val maxAt = SAMPLE_COUNT / 16 - for (i in 0 until SAMPLE_COUNT) { - sample[i] = when { - i < maxAt -> (i.toFloat() / maxAt.toFloat() * 2f - 1f) - else -> (1f - (i - maxAt).toFloat() / (SAMPLE_COUNT - maxAt).toFloat() * 2f) - } - } - var base = 40.0f - for (i in 0 until OCTAVE_COUNT) { - createPitches(base, octaves[i]) - base *= 2f - } - channelStates[0].noteIndex = 0; nextNote(0) - channelStates[1].noteIndex = 0; nextNote(1) + channelStates[0].noteIndex.value = 0; nextNote(0) + channelStates[1].noteIndex.value = 0; nextNote(1) for (nchannel in 0 until 2) { - //for (nchannel in 0 until 1) { - launchImmediately { - //AudioTone.generate(0.25.seconds, 440.0).playAndWait() - val stream = nativeSoundProvider.createPlatformAudioOutput(44100) - stream.start() - while (true) { - //val samples = AudioSamples(1, 44100 * 6) - val samples = AudioSamples(1, 4410) - //val samples = AudioSamples(2, 44100) - //val samples = AudioSamples(1, 44100) - audioOutCallback(nchannel, samples.data[0], samples.data[0].size) - for (n in 1 until samples.channels) { - arraycopy(samples.data[0], 0, samples.data[n], 0, samples.data[0].size) - } - samples.scaleVolume(.05f) - //MemorySyncStream().apply { writeShortArrayLE(samples.data[0]) }.toByteArray().writeToFile("/tmp/data.raw") - //for (n in 0 until 44100) println(samples.data[0][n]) - stream.add(samples) - } + val stream2 = nativeSoundProvider.createNewPlatformAudioOutput(1, 44100) { samples -> + audioOutCallback(nchannel, samples.data, samples.data.size) + samples.scaleVolume(.05f) } + stream2.start() } } companion object { - const val SAMPLE_COUNT = 0x10000 - val sample = FloatArray(SAMPLE_COUNT) + const val SAMPLE_COUNT = 0x1000 + val SAMPLE = FloatArray(SAMPLE_COUNT).also { SAMPLE -> + val maxAt = SAMPLE_COUNT / 16 + for (i in 0 until SAMPLE_COUNT) { + SAMPLE[i] = when { + i < maxAt -> (i.toFloat() / maxAt.toFloat() * 2f - 1f) + else -> (1f - (i - maxAt).toFloat() / (SAMPLE_COUNT - maxAt).toFloat() * 2f) + } + } + } const val SAMPLE_RATE = 44100 - const val OCTAVE_COUNT = 6 - - val octaves = Array(6) { FloatArray(12) } + val OCTAVES = Array(6) { FloatArray(12) }.also { OCTAVES -> + var base = 40.0f + for (element in OCTAVES) { + createPitches(base, element) + base *= 2f + } + } data class Note_t(val note: Int, val octave: Int, val duration: Int) data class ChannelState_t( - var currentNote: Note_t = Note_t(0, 0, 0), - var noteIndex: Int = 0, - var currentTime: Int = 0, - var currentsampleIndex: Float = 0f, - var currentsampleIncrement: Float = 0f + val currentNote: KorAtomicRef = KorAtomicRef(Note_t(0, 0, 0)), + val noteIndex: KorAtomicInt = KorAtomicInt(0), + val currentTime: KorAtomicInt = KorAtomicInt(0), + val currentsampleIndex: KorAtomicFloat = KorAtomicFloat(0f), + val currentsampleIncrement: KorAtomicFloat = KorAtomicFloat(0f) ) - val channelStates = Array(3) { ChannelState_t() } + val channelStates = Array(2) { ChannelState_t() } // "S" means "#" const val NOTE_END = -2 @@ -359,18 +341,18 @@ class MainPolyphonic : Scene() { fun nextNote(channel: Int) { val state = channelStates[channel] - state.currentNote = channels[channel][state.noteIndex] - state.currentTime = 0 - state.currentsampleIndex = 0f - val note = state.currentNote.note + state.currentNote.value = channels[channel][state.noteIndex.value] + state.currentTime.value = 0 + state.currentsampleIndex.value = 0f + val note = state.currentNote.value.note if (note == NOTE_PAUSE) { - state.currentsampleIncrement = 0f + state.currentsampleIncrement.value = 0f } else { - state.currentsampleIncrement = octaves[state.currentNote.octave][note] * (SAMPLE_COUNT.toFloat()) / (SAMPLE_RATE.toFloat()) + state.currentsampleIncrement.value = OCTAVES[state.currentNote.value.octave][note] * (SAMPLE_COUNT.toFloat()) / (SAMPLE_RATE.toFloat()) } - state.noteIndex++ - if (channels[channel][state.noteIndex].note == NOTE_END) state.noteIndex = 0 + state.noteIndex.incrementAndGet() + if (channels[channel][state.noteIndex.value].note == NOTE_END) state.noteIndex.value = 0 } // calculate current value of attack/delay/sustain/release envelope @@ -397,18 +379,17 @@ class MainPolyphonic : Scene() { val state = channelStates[channel] var bufn = bufn for (i in 0 until reqn) { - val time = (state.currentTime.toFloat()) / (SAMPLE_RATE.toFloat()) - if (state.currentTime++ == state.currentNote.duration) { + val time = (state.currentTime.value.toFloat()) / (SAMPLE_RATE.toFloat()) + if (state.currentTime.getAndIncrement() == state.currentNote.value.duration) { nextNote(channel) } var value: Float - if (state.currentsampleIncrement == 0.0f) { + if (state.currentsampleIncrement.value == 0.0f) { value = 0.0f } else { - value = sample[state.currentsampleIndex.toInt()] * adsr(time, (state.currentNote.duration.toFloat()) / (SAMPLE_RATE.toFloat())) + value = SAMPLE[state.currentsampleIndex.value.toInt()] * adsr(time, (state.currentNote.value.duration.toFloat()) / (SAMPLE_RATE.toFloat())) value *= 0x7000f - state.currentsampleIndex += state.currentsampleIncrement - if (state.currentsampleIndex >= SAMPLE_COUNT) state.currentsampleIndex -= SAMPLE_COUNT.toFloat() + state.currentsampleIndex.addAndGetMod(state.currentsampleIncrement.value, SAMPLE_COUNT.toFloat()) } val rvalue = value.clamp(Short.MIN_VALUE.toFloat(), Short.MAX_VALUE.toInt().toFloat()).toInt().toShort() //for (n in 0 until nchannels) buf[bufn++] = value.toShort() diff --git a/korge-sandbox/src/commonMain/kotlin/samples/MainSound.kt b/korge-sandbox/src/commonMain/kotlin/samples/MainSound.kt index 710f283c76..26e29fad74 100644 --- a/korge-sandbox/src/commonMain/kotlin/samples/MainSound.kt +++ b/korge-sandbox/src/commonMain/kotlin/samples/MainSound.kt @@ -8,8 +8,8 @@ import korlibs.time.* class MainSound : Scene() { override suspend fun SContainer.sceneMain() { - - val music = resourcesVfs["sounds/Snowland.mp3"].readMusic() + //val music = resourcesVfs["sounds/Snowland.mp3"].readMusic() + val music = resourcesVfs["sounds/Snowland.mp3"].readSound() //val music = resourcesVfs["sounds/click.wav"].readSound() //val music = resourcesVfs["sounds/click.wav"].readMusic()