Compare commits

..

14 Commits

14 changed files with 489 additions and 74 deletions

1
.gitignore vendored
View File

@@ -12,3 +12,4 @@
app/src/main/obj/
.cxx
/txts
/data

View File

@@ -1,8 +1,16 @@
## TODO
* do not open oboe upon app startup, only if we are actually recording
* PlaybackEngine - Buffer overrun on output for channel (1.000000)
- we are feeding too much data into 'stretcher'
* E attributionTag not declared in manifest of at.lockstep
* analyze the (secondly or so) noise beeps in the mp3 playback
- introduced with this commit
- is it librubberband, my failure to feed it properly (buffer exhaustion), or sth else?
- the sizes of my buffers?
* correct sampling rate of libmpg123 vs. 48000 Hz using librubberband

View File

@@ -76,6 +76,7 @@ dependencies {
implementation libs.oboe
implementation libs.slf4j.api
implementation libs.logback.android
implementation libs.gson
implementation libs.androidx.core.ktx
implementation libs.androidx.lifecycle.runtime.ktx

View File

@@ -6,6 +6,7 @@
<uses-permission android:name="android.permission.FOREGROUND_SERVICE_MEDIA_PLAYBACK" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_MEDIA_AUDIO" />

View File

@@ -13,7 +13,12 @@
class AudioCallbackProvider {
public:
virtual ~AudioCallbackProvider() {}
/** in current impl, this passes a buffer where data may already live. the provider may add to it and re-normalize to [-1.0, 1.0]. */
/**
* in current impl, this passes a buffer where data may already live.
* the provider may add to it and re-normalize to [-1.0, 1.0].
*
* upon is_finished=true it will not do anything - the caller is responsible for 0-ing the data buffer.
* */
virtual void onAudioReady(float *data, int32_t frames) {}
};

View File

@@ -22,8 +22,9 @@ protected:
std::vector<int> beatIdx;
std::atomic<int> startBeat;
int numBeatsPlaying;
bool mIsPlaying;
public:
explicit MixingPlayer(std::vector<float> beatSound) : beatSound(beatSound), startBeat(0), numBeatsPlaying(0), mHaveMusic(false) {}
explicit MixingPlayer(std::vector<float> beatSound) : beatSound(beatSound), startBeat(0), numBeatsPlaying(0), mIsPlaying(false), mHaveMusic(false) {}
virtual ~MixingPlayer() = default;
@@ -52,9 +53,13 @@ public:
// Typically, start the stream after querying some stream information, as well as some input from the user
result = mStream->requestStart();
mIsPlaying = (result == Result::OK);
return (int32_t) result;
}
int getRate() { return kSampleRate; }
int getNumChannels() { return kChannelCount; }
// Call this from Activity onPause()
void stopAudio() {
// Stop, close and delete in case not already closed.
@@ -64,8 +69,11 @@ public:
mStream->close();
mStream.reset();
}
mIsPlaying = false;
}
bool isPlaying() { return mIsPlaying; }
void setMusic(std::shared_ptr<AudioCallbackProvider> cb) {
std::lock_guard<std::mutex> lock(mLock);
mMusic = std::move(cb);
@@ -112,6 +120,8 @@ public:
}
if(mHaveMusic.load()) {
// note: the contract for onAudioReady() upon is_finished=true implies it will not do anything
// (the buffer must be set to all-0 here in the caller)
mMusic->onAudioReady(floatData, numFrames);
}

View File

@@ -54,7 +54,6 @@ static bool read_mp3(std::string filename, std::vector<float>& samples) {
return ok1 && ok2;
}
struct RbLogger : public RubberBand::RubberBandStretcher::Logger {
virtual void log(const char *s) {
LOGI("%s", s);
@@ -80,24 +79,38 @@ PlaybackEngine::PlaybackEngine(std::string filesDir, int resid):
mFilesDir(filesDir),
haveMusicFile(false),
exitMusicFeedThread(false),
android_fd(0)
isSetMusic(false),
android_fd(0),
haveTimeRatio(false),
timeRatio(1.0),
// these 3 values are preliminary -- will be set from MixingPlayer defaults in the ctor body below
playbackRate(48000),
numOutChannels(2),
numInChannels(2),
back_pressure(0)
{
LOGI("PlaybackEngine()");
LOGI("NDK LOG_LEVEL=%d", LOG_LEVEL);
// NDK LOG_LEVEL=3 (DEBUG)
// load "bump" sound effect
std::vector<float> samples;
bool is_ok = read_mp3(mFilesDir + "/" + std::to_string(resid) + ".mp3", samples);
LOGI("read_mp3() is_ok=%d", is_ok);
LOGI("read_mp3() for bump effect, is_ok=%d", is_ok);
mPlayer = new MixingPlayer(samples);
int32_t res = mPlayer->startAudio();
LOGI("startAudio() = %d", res);
// configure stretcher and start musicFeedThread()
initRubberBand();
}
void PlaybackEngine::initRubberBand() {
// TODO: check mp3 actual sample rate, and adapt to 48000 Hz playback here
// we do not yet have a music file with actual sampling rate, so set the default ratio
stretcher.reset();
stretcher.setTimeRatio(1.0);
stretcher.setPitchScale(1.0);
stretcher.setDebugLevel(1); // 1: errors only. generally 0..4
// feed samples into 'stretcher' and read bogus output
@@ -145,55 +158,146 @@ void PlaybackEngine::closeMusicFile() {
close(android_fd);
android_fd = 0;
}
isSetMusic.store(false);
mPlayer->setMusic(nullptr);
}
void PlaybackEngine::mapChannels(int *channel_map, int num_ch_in, int num_ch_out) {
if(num_ch_in == num_ch_out) {
// map each channel as-is
for(int i = 0; i < num_ch_out; i++)
channel_map[i] = i;
} else if(num_ch_in == 1) {
// map mono to all output channels
for(int i = 0; i < num_ch_out; i++)
channel_map[i] = 0;
} else if(num_ch_in >= 2) {
// use a stereo mapping
for(int i = 0; i < num_ch_out; i++)
channel_map[i] = i;
} else {
LOGE("mapChannels(): strange channel layout, mapping to mono. num_ch_in=%d num_ch_out=%d", num_ch_in, num_ch_out);
// map mono to all output channels
for(int i = 0; i < num_ch_out; i++)
channel_map[i] = 0;
}
// TODO: check broken input (0 channels etc) and bubble up an error to app
}
void PlaybackEngine::musicFeedThread() {
// refactor: rename to 'num_buf_samples'
size_t num_pad = 48000; // hack! how much to actually reserve? is getPreferredStartPad() always < getSamplesRequired()?
size_t buf_stride = num_pad;
float* buf = (float*) malloc(num_pad*2*sizeof(float));
float* buf_ptr[] {buf, buf + num_pad};
memset(buf, 0, num_pad*2*sizeof(float));
unsigned char* cbuf = (unsigned char*) malloc(num_pad*2*sizeof(int16_t));
memset(cbuf, 0, num_pad*2*sizeof(int16_t));
size_t cbuf_size_bytes = num_pad*2*sizeof(int16_t);
LOGI("starting musicFeedThread()");
// strecher num channels: same as output num channels
// (this is because we play silence even without any input file, so we cannot set stretcher
// channel count for the music file's channel count)
int num_ch_in = numInChannels.load();
int num_ch_out = numOutChannels.load();
size_t num_buf_samples = buf_size_samples;
size_t buf_stride = num_buf_samples;
size_t buf_size_bytes = num_buf_samples * num_ch_out * sizeof(float);
float* buf = (float*) malloc(buf_size_bytes);
float** buf_ptr = (float**) malloc(num_ch_out * sizeof(float*));
for(int i = 0; i < num_ch_out; i++) {
buf_ptr[i] = buf + i * num_buf_samples;
}
memset(buf, 0, buf_size_bytes);
// preliminary allocation (actual music file buffer is unknown due to unknown channel count)
size_t cbuf_size_bytes = num_buf_samples * num_ch_in * sizeof(int16_t);
//size_t cbuf_load_bytes = num_buf_samples * num_ch_in * sizeof(int16_t);
unsigned char* cbuf = (unsigned char*) malloc(cbuf_size_bytes);
memset(cbuf, 0, cbuf_size_bytes);
int* channel_map = (int*) malloc(num_ch_out * sizeof(int));
// initial guess, as long as we do not have a music file (otherwise we should divide by mp3->rate)
size_t loop_delay_us = 1000000 * buf_size_samples / playbackRate.load();
int idebug = 0;
// thread 2: polling for decoding more mp3 -> process() -- getSamplesRequired()
while(!exitMusicFeedThread.load()) {
if(!haveMusicFile.load()) {
// while no MusicProvider is connected, no samples will be read from 'stretcher'
// therefore, we do not write any samples into it!
std::this_thread::sleep_for(std::chrono::milliseconds(50));
continue;
}
if(!isSetMusic.load()) {
mPlayer->setMusic(std::make_shared<MusicProvider>(&stretcher, buf_size_samples, numOutChannels.load(), &back_pressure));
isSetMusic.store(true);
}
if(haveTimeRatio.load()) {
double ratio = timeRatio.load();
stretcher.setTimeRatio(ratio);
stretcher.setPitchScale(1.0 / ratio);
haveTimeRatio.store(false);
}
// change buffer size, if necessary (changed input channel count)
if(numInChannels.load() != num_ch_in) {
LOGD("changed buffer size (changed input channel count)");
num_ch_in = numInChannels.load();
free(cbuf);
cbuf_size_bytes = num_buf_samples * num_ch_in * sizeof(int16_t);
cbuf = (unsigned char*) malloc(cbuf_size_bytes);
memset(cbuf, 0, cbuf_size_bytes);
}
mapChannels(channel_map, num_ch_in, num_ch_out);
// do work ...
// note: getSamplesRequired() itself only gives us how many samples to create another
// output buffer increment, not if the output buffer has been emptied.
// We need to manage the buffer sizes ourselves.
// this draft should always keep the output buffers filled at 50-100 ms
int target_output_buffer_frames = 100 * playbackRate.load() / 1000; // 100 ms worth of audio
if(idebug < 10) {
LOGI("back_pressure available=%d target=%d", back_pressure.load(), target_output_buffer_frames);
}
if(back_pressure.load() >= target_output_buffer_frames) {
std::this_thread::sleep_for(std::chrono::milliseconds(50));
}
// at 48000 Hz playbackRate, the 512-1024 frames returned here give us additional (10-21 ms) output buffer
// (this is somewhat approximate, but the above control loop should keep us within a reasonable buffer size)
size_t num_samples = stretcher.getSamplesRequired();
// note: how much to sleep until output has played x samples...?
// can we just measure the wall time here, instead of calculating? -- that will be imprecise.
// how large is one buffer, and when do we feed it more data?
// (is it like double-buffering implemented in 'stretcher'?)
// can we just wait some bogus interval here, for a first version?
if (num_samples == 0) {
//LOGD("waiting for getSamplesRequired()");
std::this_thread::sleep_for(std::chrono::milliseconds(20));
// this was never the case in actual testing -- see note above.
LOGD("waiting %d us for getSamplesRequired()", loop_delay_us);
std::this_thread::sleep_for(std::chrono::microseconds(loop_delay_us));
continue;
}
if (num_samples > num_pad) {
LOGE("wanted %d samples but buf is only %d samples", num_samples, num_pad);
continue;
if (num_samples > num_buf_samples) {
LOGE("wanted %d samples but buf is only %d samples", num_samples, num_buf_samples);
num_samples = num_buf_samples;
}
if (!haveMusicFile.load()) {
loop_delay_us = 1000000 * num_samples / playbackRate.load();
if(idebug++ < 10) {
LOGI("feed %d silence samples", num_samples);
// 1024, 512, 512
// 7 x 512
}
memset(buf, 0, num_samples*2*sizeof(float));
memset(buf, 0, num_samples*num_ch_out*sizeof(float));
stretcher.process(buf_ptr, num_samples, false);
continue;
}
if(idebug++ < 10) {
loop_delay_us = 1000000 * num_samples / musicFile->rate;
LOGI("feed %d music samples", num_samples);
// feed 1024 music samples
// => stretcher is asking for 1024 = getSamplesRequired()
@@ -201,39 +305,60 @@ void PlaybackEngine::musicFeedThread() {
}
size_t done = 0; // bytes!
int err = mpg123_read(musicFile->handle, cbuf, cbuf_size_bytes, &done);
size_t read_size_bytes = std::min(num_samples * num_ch_in * sizeof(int16_t), cbuf_size_bytes);
int err = mpg123_read(musicFile->handle, cbuf, read_size_bytes, &done);
musicFile->remaining_samples -= done / sizeof(int16_t);
musicFile->offset = 0;
musicFile->offset = 0; // unused here
if (err != MPG123_OK && err != MPG123_DONE) {
// error!
LOGE("mpg123_read() err=%d done=%d", err, done);
LOGE("error reading mp3 file: mpg123_read() err=%d done=%d", err, done);
// next iteration will play silence
closeMusicFile();
stretcher.setTimeRatio(1.0); // buffer size for playing silence is computed from 'playbackRate', so reset timeRatio
stretcher.setPitchScale(1.0);
stretcher.process(buf_ptr, 0, true); // set end of playback
mPlayer->stopAudio();
continue;
}
if(err == MPG123_DONE) {
// next iteration will play silence
LOGI("finished reading mp3 file (MPG123_DONE)");
closeMusicFile();
stretcher.setTimeRatio(1.0); // buffer size for playing silence is computed from 'playbackRate', so reset timeRatio
stretcher.setPitchScale(1.0);
stretcher.process(buf_ptr, 0, true); // set end of playback
mPlayer->stopAudio();
continue;
}
size_t num_decoded_samples = done / sizeof(int16_t) / 2; // 2 channels - TODO: actually use mp3 channels!! below, too. 2.
LOGI("num_decoded_samples = %d", num_decoded_samples);
// convert interleaved int16 to de-interleaved float [-1.0, 1.0] format
size_t num_decoded_samples = done / sizeof(int16_t) / num_ch_in;
//LOGD("num_decoded_samples = %d", num_decoded_samples);
// * convert interleaved int16 to de-interleaved float [-1.0, 1.0] format
// * map input to output channels
for(size_t i = 0; i < num_decoded_samples; i++) {
for(size_t j = 0; j < 2; j++) {
buf[i + buf_stride * j] = static_cast<float>(*(reinterpret_cast<int16_t*>(cbuf) + i*2 + j)) / 32768.0f;
for(size_t j = 0; j < num_ch_out; j++) {
buf[i + buf_stride * j] = static_cast<float>(*(reinterpret_cast<int16_t*>(cbuf) + i * num_ch_in + channel_map[j])) / 32768.0f;
}
}
LOGI("calling stretcher.process()");
//LOGD("calling stretcher.process()");
stretcher.process(buf_ptr, num_decoded_samples, false);
}
LOGI("musicFeedThread() exiting ...");
free(buf);
free(buf_ptr);
free(cbuf);
free(channel_map);
LOGI("musicFeedThread() exited.");
}
PlaybackEngine::~PlaybackEngine() {
closeRubberBand();
LOGI("~PlaybackEngine()");
closeRubberBand();
mPlayer->stopAudio();
delete mPlayer;
mPlayer = nullptr;
@@ -245,36 +370,51 @@ void PlaybackEngine::playBeat() {
void PlaybackEngine::playMusic(int fd) {
if(!mPlayer) return;
// TODO: fetch sampling rate from mp3 file, and use librubberband to correct for it
// MixingPlayer::kSampleRate (48000)
// mp3->rate
// feed samples to librubberband
// fetch resamples out of librubberband
//if(mPlayer) mPlayer->playMusic();
// TODO: fd is opened; dispose of fd when stopping or being discarded ...
LOGI("PlaybackEngine::playMusic(fd=%d)", fd);
//close(fd); // for now, nothing is implemented. we just close it again.
// we will use mp3file_open_fd() later.
android_fd = fd;
musicFile.reset(mp3file_open_fd(android_fd, 0));
haveMusicFile.store(true);
mPlayer->setMusic(std::make_shared<MusicProvider>(&stretcher));
if(musicFile) {
timeRatio.store(((double) playbackRate.load()) / ((double) musicFile->rate));
haveTimeRatio.store(true);
numInChannels.store(musicFile->channels);
haveMusicFile.store(true);
}
bool is_finished = (stretcher.available() == -1);
if(is_finished) {
// so that we may play again after "final chunk"
closeRubberBand();
initRubberBand();
}
if(!mPlayer->isPlaying()) {
int32_t res = mPlayer->startAudio();
playbackRate.store(mPlayer->getRate());
numOutChannels.store(mPlayer->getNumChannels());
LOGI("startAudio() = %d rate=%d channels=%d", res, playbackRate.load(), numOutChannels.load());
}
// to wait the 50 ms that the musicFeedThread() is idling when it first receives a file
// we don't call mPlayer->setMusic() here, but in the musicFeedThread()
}
MusicProvider::MusicProvider(RubberBand::RubberBandStretcher *stretcher) : stretcher(stretcher), idebug(0) {
// refactor: rename to 'num_buf_samples'
// TODO: for cache-friendliness, it would be better to have smaller 'num_buf_samples'
// hack! how much to actually reserve? is getPreferredStartPad() always < getSamplesRequired()?
//size_t buf_stride = num_pad;
buf = (float*) malloc(num_buf_samples*2*sizeof(float));
//float* buf_ptr[] {buf, buf + num_pad};
MusicProvider::MusicProvider(RubberBand::RubberBandStretcher *stretcher, size_t buf_size_samples, int num_ch_out, std::atomic<int> *back_pressure) :
stretcher(stretcher),
idebug(0),
buf_size_samples(buf_size_samples),
num_ch_out(num_ch_out),
back_pressure(back_pressure)
{
buf = (float*) malloc(buf_size_samples*num_ch_out*sizeof(float));
buf_ptr = (float**) malloc(num_ch_out * sizeof(float*));
for(int i = 0; i < num_ch_out; i++) {
buf_ptr[i] = buf + i * buf_size_samples;
}
}
MusicProvider::~MusicProvider() {
free(buf);
free(buf_ptr);
}
void MusicProvider::onAudioReady(float *data, int32_t frames) {
@@ -283,18 +423,38 @@ void MusicProvider::onAudioReady(float *data, int32_t frames) {
// frames=96 (48 kHz => 2 ms!!)
}
if(frames > buf_size_samples) {
LOGE("audio buffer too small! adapt PlaybackEngine::buf_size_samples!! asked for frames=%d but buf_size=%d", frames, buf_size_samples);
}
// 1. read from oboe into our temp de-interleaved buffer 'buf'
size_t num_frames = std::min((size_t) frames, num_buf_samples);
float* buf_ptr[] {buf, buf + num_buf_samples};
int num_frames_requested = std::min((int) frames, (int) buf_size_samples);
int num_frames_available = stretcher->available();
bool is_finished = (num_frames_available == -1);
(*back_pressure).store((int) num_frames_available);
if(is_finished) {
return;
}
if(idebug < 10) {
LOGI("onAudioReady() available=%d", num_frames_available);
}
if(num_frames_available < num_frames_requested) {
// this is an audio glitch
// TODO: bubble info upwards, in a counter (so we can collect device-specific glitch stats)
LOGI("stretcher lag: %d requested, %d available", num_frames_requested, num_frames_available);
}
size_t num_frames = std::min(num_frames_available, num_frames_requested);
stretcher->retrieve(buf_ptr, num_frames);
// 2. convert to add samples to interleaved *data
for(size_t i = 0; i < num_frames; i++) {
for(size_t j = 0; j < 2; j++) {
float sample = data[i*2 + j];
for(size_t j = 0; j < num_ch_out; j++) {
float sample = data[i*num_ch_out + j];
sample += buf_ptr[j][i];
sample /= 2.0;
data[i*2 + j] = sample;
data[i*num_ch_out + j] = sample;
}
}
}

View File

@@ -18,16 +18,20 @@
/** Provides music through a regular callback to oboe. Called from separate oboe thread. */
class MusicProvider : public AudioCallbackProvider {
public:
explicit MusicProvider(RubberBand::RubberBandStretcher *stretcher);
explicit MusicProvider(RubberBand::RubberBandStretcher *stretcher, size_t buf_size_samples, int num_ch_out, std::atomic<int> *back_pressure);
~MusicProvider() override;
/** Called from separate oboe thread. */
void onAudioReady(float *data, int32_t frames) override;
private:
const size_t num_buf_samples = 48000;
RubberBand::RubberBandStretcher *stretcher;
float *buf;
float **buf_ptr;
int idebug;
size_t buf_size_samples;
int num_ch_out;
/** contains the current available() frames from 'stretcher' in the audio callback thread 2 (oboe) */
std::atomic<int> *back_pressure;
};
class PlaybackEngine : public StepListener {
@@ -45,11 +49,23 @@ private:
std::atomic<bool> haveMusicFile;
std::unique_ptr<std::thread> musicFeed;
std::atomic<bool> exitMusicFeedThread;
/** where musicFeedThread() keeps track of the fact that we have music set -- will start the audio cb */
std::atomic<bool> isSetMusic;
int android_fd;
std::atomic<bool> haveTimeRatio;
std::atomic<double> timeRatio;
std::atomic<int> playbackRate;
std::atomic<int> numOutChannels;
std::atomic<int> numInChannels;
/** contains the current available() frames from 'stretcher' in the audio callback thread 2 (oboe) */
std::atomic<int> back_pressure;
/** this is actually in frames, not samples */
static size_t constexpr buf_size_samples = 1024;
void initRubberBand();
void closeRubberBand();
void closeMusicFile();
void musicFeedThread();
void mapChannels(int *channel_map, int num_ch_in, int num_ch_out);
};
#endif //LOCKSTEP_PLAYBACKENGINE_H

View File

@@ -13,6 +13,7 @@ struct MP3File
int android_fd;
int channels;
long rate;
/** num samples in total (stereo of 10 frames will have 20 'samples' here) */
long num_samples;
int samples_per_frame;
double secs_per_frame;
@@ -20,6 +21,7 @@ struct MP3File
double duration;
size_t buffer_size;
unsigned char* buffer;
/** total samples (stereo of 10 frames remaining will have 20 'remaining_samples' here) */
int remaining_samples;
size_t offset;
};

View File

@@ -11,17 +11,21 @@ import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.net.Uri;
import android.os.Binder;
import android.os.Build;
import android.os.IBinder;
import android.os.ParcelFileDescriptor;
import android.os.PowerManager;
import android.util.Log;
import android.widget.Toast;
import android.os.SystemClock;
import androidx.annotation.Nullable;
import androidx.core.app.NotificationCompat;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import at.lockstep.filter.StepDetector;
import at.lockstep.pb.PlaybackEngine;
@@ -59,6 +63,8 @@ public class LstForegroundService extends Service implements SensorEventListener
public void onCreate() {
super.onCreate();
Log.i("LstForegroundService", "onCreate()");
int resid = R.raw.track_beat;
PlaybackEngine.create(this, resid);
stepDetector = new StepDetector(PlaybackEngine.getEngineHandle());
@@ -85,6 +91,8 @@ public class LstForegroundService extends Service implements SensorEventListener
if (intent != null) {
String action = intent.getAction();
if (ACTION_START.equals(action)) {
Log.i("LstForegroundService", "onStartCommand() ACTION_START");
String contentUri = intent.getStringExtra("content_uri");
try {
if(contentUri != null) {
@@ -95,8 +103,9 @@ public class LstForegroundService extends Service implements SensorEventListener
Toast.makeText(this, "Could not open music file contentUri", Toast.LENGTH_LONG).show();
throw new RuntimeException(e);
}
startCollection();
startCollection(contentUri);
} else if (ACTION_STOP.equals(action)) {
Log.i("LstForegroundService", "ACTION_STOP");
stopCollectionAndSelf();
}
}
@@ -123,12 +132,12 @@ public class LstForegroundService extends Service implements SensorEventListener
return fd;
}
private void startCollection() {
private void startCollection(String meta) {
if (isCollecting) {
return;
}
startForeground(NOTIFICATION_ID, buildNotification("Collecting sensor data"));
startForeground(NOTIFICATION_ID, buildNotification());
if (wakeLock != null && !wakeLock.isHeld()) {
// TODO: provide a timeout reasonable for a run
@@ -136,12 +145,15 @@ public class LstForegroundService extends Service implements SensorEventListener
}
if (accelerometer != null && sensorManager != null) {
// TODO: use a HandlerThread to handle sensor events in background thread, not the main thread
// see https://stackoverflow.com/q/17681870/1616948
sensorManager.registerListener(
this,
accelerometer,
SensorManager.SENSOR_DELAY_GAME
);
isCollecting = true;
onStartRecording(meta);
} else {
stopCollectionAndSelf();
}
@@ -151,6 +163,7 @@ public class LstForegroundService extends Service implements SensorEventListener
if (isCollecting && sensorManager != null) {
sensorManager.unregisterListener(this);
isCollecting = false;
onStopRecording();
}
if (wakeLock != null && wakeLock.isHeld()) {
@@ -174,6 +187,8 @@ public class LstForegroundService extends Service implements SensorEventListener
// TODO: check threading to see if these run in separate threads - if so, deleting PlaybackEngine will leave a dangling pointer in StepDetector.
// 2026-03-04 01:26:11.741 12507-12507 libc at.lockstep A Fatal signal 11 (SIGSEGV), code 2 (SEGV_ACCERR), fault addr 0xb4000071d3a79000 in tid 12507 (at.lockstep), pid 12507 (at.lockstep)
Log.d("LstForegroundService", "onDestroy(), calling PlaybackEngine.delete()");
if(stepDetector != null) {
stepDetector.close();
PlaybackEngine.delete();
@@ -183,25 +198,79 @@ public class LstForegroundService extends Service implements SensorEventListener
super.onDestroy();
}
public class LocalBinder extends Binder {
LstForegroundService getService() { return LstForegroundService.this; }
}
private final LocalBinder binder = new LocalBinder();
@Nullable
@Override
public IBinder onBind(Intent intent) {
return null;
Log.i("LstForegroundService", "onBind()");
return binder;
}
public interface OnResultListener {
void onResult(SensorDataArray recording);
}
private OnResultListener listener;
public void setOnResultListener(OnResultListener listener) { this.listener = listener; }
/** single sensor sample */
public static class SensorData {
private long timestamp;
private float[] values;
public SensorData(SensorEvent event) {
timestamp = event.timestamp;
values = Arrays.copyOf(event.values, event.values.length);
}
public SensorData(long timestamp, float[] values) {
this.timestamp = timestamp;
this.values = values;
}
}
/** array of sensor samples */
public static class SensorDataArray {
private ArrayList<SensorData> data = new ArrayList<SensorData>();
private String meta;
public void add(SensorEvent event) { data.add(new SensorData(event)); }
public void add(SensorData d) { data.add(d); }
public void clear() { data.clear(); }
public void setMeta(String meta) { this.meta = meta; }
}
private final SensorDataArray recording = new SensorDataArray();
private long recordingStartTime = 0;
private void onStartRecording(String meta) {
recordingStartTime = SystemClock.elapsedRealtimeNanos();
recording.setMeta(meta);
}
private void onStopRecording() {
if(listener != null) {
listener.onResult(recording);
}
recording.clear();
}
@Override
public void onSensorChanged(SensorEvent event) {
// pass on to C++ filter bank
stepDetector.filter(event.timestamp, event.values);
// collect accelerometer recording - adjust timebase to 0.0 sec beginning
recording.add(new SensorData(event.timestamp - recordingStartTime, event.values));
// TODO: acquires at 8 ms intervals ... 125 Hz?!
// TODO: must compute actual sampling rate. and either downsample, or adapt the IIR filter parameters. (& length??) - easier to resample.
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {}
private Notification buildNotification(String contentText) {
private Notification buildNotification() {
return new NotificationCompat.Builder(this, CHANNEL_ID)
.setContentTitle("Lockstep is reading your pace.")
.setContentText(contentText)
.setSmallIcon(android.R.drawable.ic_menu_compass)
.setContentTitle(getString(R.string.app_name))
.setContentText(getString(R.string.notification_text))
.setSmallIcon(getApplicationInfo().icon)
.setOngoing(true)
.build();
}
@@ -210,7 +279,7 @@ public class LstForegroundService extends Service implements SensorEventListener
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
NotificationChannel channel = new NotificationChannel(
CHANNEL_ID,
"Lockstep",
getString(R.string.app_name),
NotificationManager.IMPORTANCE_LOW
);

View File

@@ -1,8 +1,13 @@
package at.lockstep.app;
import android.app.Activity;
import android.content.ComponentName;
import android.content.Intent;
import android.content.ServiceConnection;
import android.os.Bundle;
import android.os.Environment;
import android.os.IBinder;
import android.util.Log;
import android.widget.Button;
import androidx.activity.result.ActivityResultLauncher;
@@ -16,7 +21,15 @@ import at.lockstep.saf.SafPickerActivity;
import at.lockstep.ui.SongPickerActivity;
import android.widget.Toast;
public class MainActivity extends AppCompatActivity {
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
public class MainActivity extends AppCompatActivity implements LstForegroundService.OnResultListener {
private Button btnStart;
private Button btnStop;
private Button btnMediaStoreBenchmark;
@@ -80,4 +93,84 @@ public class MainActivity extends AppCompatActivity {
launcher.launch(intent);
});
}
private ServiceConnection conn = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName componentName, IBinder iBinder) {
LstForegroundService service = ((LstForegroundService.LocalBinder) iBinder).getService();
service.setOnResultListener(MainActivity.this);
}
@Override
public void onServiceDisconnected(ComponentName componentName) {
}
};
@Override
protected void onStart() {
super.onStart();
// attach ServiceConnection (so we can attach a listener). incidentally, it seems to also create the service. (will currently create a PlaybackEngine, etc.)
// TODO: check if this delays starting the application
bindService(new Intent(this, LstForegroundService.class), conn, BIND_AUTO_CREATE);
}
@Override
protected void onStop() {
super.onStop();
unbindService(conn);
Log.i("MainActivity", "onStop()");
}
@Override
protected void onDestroy() {
super.onDestroy();
Log.i("MainActivity", "onDestroy()");
// TODO: since the Service keeps running, we must signal oboe to stop playing
// TODO: signal the pause to the C++ lib
startService(LstForegroundService.stopIntent(MainActivity.this));
}
private boolean isForeground = false;
@Override
protected void onPause() {
super.onPause();
isForeground = false;
//
// telltale signs: logcat: "PlaybackEngine - Buffer overrun on output for channel (0.000000)" or (1.000000)
Log.i("MainActivity", "onPause()");
}
@Override
protected void onResume() {
super.onResume();
isForeground = true;
}
LstForegroundService.SensorDataArray recording;
@Override
public void onResult(LstForegroundService.SensorDataArray recording) {
if(!isForeground) {
Log.i("MainActivity", "ignore onResult() from LstForegroundService due to backgrounded MainActivity");
return;
}
this.recording = recording;
//
// write accelero recording to file
//
File f = getExternalFilesDir(Environment.DIRECTORY_DOWNLOADS);
String dir = f != null ? f.toString() : "/"; // make compiler happy
long unixTime = System.currentTimeMillis() / 1000L;
String fileName = dir + "/acc_" + unixTime + ".json";
Log.i("MainActivity", "written acc rec to " + fileName);
try (Writer writer = new FileWriter(fileName)) {
Gson gson = new GsonBuilder().create();
gson.toJson(recording, writer);
} catch (IOException e) {
// TODO error handling
Log.e("MainActivity", "IOException writing recording: " + e.getMessage());
throw new RuntimeException(e);
}
}
}

View File

@@ -1,3 +1,4 @@
<resources>
<string name="app_name">Lockstep</string>
<string name="notification_text">Reading your steps …</string>
</resources>

View File

@@ -0,0 +1,46 @@
package at.lockstep;
import android.hardware.SensorEvent;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import com.google.gson.Gson;
public class GsonUnitTest {
/** single sensor sample */
static class SensorData {
private long timestamp;
private float[] values;
public SensorData(SensorEvent event) {
timestamp = event.timestamp;
values = Arrays.copyOf(event.values, event.values.length);
}
public SensorData(long timestamp, float[] values) {
this.timestamp = timestamp;
this.values = values;
}
}
/** array of sensor samples */
public static class SensorDataArray {
private ArrayList<SensorData> data = new ArrayList<SensorData>();
public void add(long timestamp, float[] values) { data.add(new SensorData(timestamp, values)); }
public void add(SensorEvent event) { data.add(new SensorData(event)); }
public void clear() { data.clear(); }
}
@Test
public void testGson() {
SensorDataArray recording = new SensorDataArray();
recording.add(0, new float[]{1, 2, 3});
recording.add(1, new float[]{10, 20, 30});
Gson gson = new Gson();
String json = gson.toJson(recording);
System.out.println(json);
// {"data":[{"timestamp":0,"values":[1.0,2.0,3.0]},{"timestamp":1,"values":[10.0,20.0,30.0]}]}
}
}

View File

@@ -13,6 +13,7 @@ oboe = "1.10.0"
slf4jApi = "1.7.30"
recyclerview = "1.3.1"
appcompat = "1.7.1"
gson = "2.11.0"
[libraries]
androidx-core-ktx = { group = "androidx.core", name = "core-ktx", version.ref = "coreKtx" }
@@ -34,6 +35,7 @@ oboe = { module = "com.google.oboe:oboe", version.ref = "oboe" }
slf4j-api = { module = "org.slf4j:slf4j-api", version.ref = "slf4jApi" }
androidx-recyclerview = { group = "androidx.recyclerview", name = "recyclerview", version.ref = "recyclerview" }
androidx-appcompat = { group = "androidx.appcompat", name = "appcompat", version.ref = "appcompat" }
gson = { group = "com.google.code.gson", name="gson", version.ref = "gson" }
[plugins]
android-application = { id = "com.android.application", version.ref = "agp" }