Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Auditory Neurofeedback + Focus - 5.0.6 #982

Merged
merged 6 commits into from
Jul 31, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
# v5.0.6

### Improvements
* Add Auditory Feedback to the Focus Widget Fixes #709

### Bug Fixes
* Fix drawing error in Control Panel WiFi Shield static IP Textfield
* Accomodate high-dpi screens Fixes #968
* Add Arduino Focus Fan example to networking test kit on GitHub repo
* Allow synthetic square wave expert mode keyboard shortcut for Cyton and Ganglion Fixes #976

# v5.0.5

Expand Down
130 changes: 130 additions & 0 deletions OpenBCI_GUI/AuditoryNeurofeedback.pde
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
//Used in the Focus Widget to provide auditory neurofeedback
//Adjust amplitude of calming audio samples using normalized band power data or predicted metric

Minim minim;
FilePlayer[] auditoryNfbFilePlayers;
ddf.minim.ugens.Gain[] auditoryNfbGains;
AudioOutput audioOutput;

//Pre-load audio files into memory in delayedSetup for best app performance and no waiting
void asyncLoadAudioFiles() {
final int _numSoundFiles = 5;
minim = new Minim(this);
auditoryNfbFilePlayers = new FilePlayer[_numSoundFiles];
auditoryNfbGains = new ddf.minim.ugens.Gain[_numSoundFiles];
audioOutput = minim.getLineOut();
println("OpenBCI_GUI: AuditoryFeedback: Loading Audio...");
for (int i = 0; i < _numSoundFiles; i++) {
//Use large buffer size and cache files in memory
auditoryNfbFilePlayers[i] = new FilePlayer( minim.loadFileStream("bp" + (i+1) + ".mp3", 2048, true) );
auditoryNfbGains[i] = new ddf.minim.ugens.Gain(-15.0f);
auditoryNfbFilePlayers[i].patch(auditoryNfbGains[i]).patch(audioOutput);
}
println("OpenBCI_GUI: AuditoryFeedback: Done Loading Audio!");
}

class AuditoryNeurofeedback {

private int x, y, w, h;
private ControlP5 localCP5;
public Button startStopButton;
public Button modeButton;
private boolean usingBandPowers = false;
//There will always be 5 band powers, and 5 possible concurrent audio files for playback
private final int NUM_SOUND_FILES = auditoryNfbFilePlayers.length;
private final float MIN_GAIN = -42.0;
private final float MAX_GAIN = -7.0;
private final int MAX_BUTTON_W = 120;
private int buttonW = 120;
private int buttonH;

AuditoryNeurofeedback(int _x, int _y, int _w, int _h) {
localCP5 = new ControlP5(ourApplet);
localCP5.setGraphics(ourApplet, 0,0);
localCP5.setAutoDraw(false);
buttonH = _h;
createStartStopButton(_x, _y, buttonW, buttonH);
createModeButton(_x, _y, buttonW, buttonH);
}

//Use band powers or prediction value to control volume of each sound file
public void update(double[] bandPowers, float predictionVal) {
if (usingBandPowers) {
for (int i = 0; i < NUM_SOUND_FILES; i++) {
float gain = map((float)bandPowers[i], 0.1, .7, MIN_GAIN + 20f, MAX_GAIN);
auditoryNfbGains[i].setValue(gain);
}
} else {
float gain = map(predictionVal, 0.0, 1.0, MIN_GAIN, MAX_GAIN);
for (int i = 0; i < NUM_SOUND_FILES; i++) {
auditoryNfbGains[i].setValue(gain);
}
}
}

public void draw() {
localCP5.draw();
}

public void screenResized(int _x, int _y, int _w, int _h) {
localCP5.setGraphics(ourApplet, 0, 0);
buttonW = (_w - 6) / 2;
buttonW = buttonW > MAX_BUTTON_W ? MAX_BUTTON_W : buttonW;
startStopButton.setPosition(_x - buttonW - 3, _y);
startStopButton.setSize(buttonW, _h);
modeButton.setPosition(_x + 3, _y);
modeButton.setSize(buttonW, _h);
}

public void killAudio() {
for (int i = 0; i < NUM_SOUND_FILES; i++) {
auditoryNfbFilePlayers[i].pause();
auditoryNfbFilePlayers[i].rewind();
}
}

private void createStartStopButton(int _x, int _y, int _w, int _h) {
//This is a generalized createButton method that allows us to save code by using a few patterns and method overloading
startStopButton = createButton(localCP5, "startStopButton", "Turn Audio On", _x, _y, _w, _h, p5, 12, colorNotPressed, OPENBCI_DARKBLUE);
//Set the border color explicitely
startStopButton.setBorderColor(OBJECT_BORDER_GREY);
//For this button, only call the callback listener on mouse release
startStopButton.onRelease(new CallbackListener() {
public void controlEvent(CallbackEvent theEvent) {
//If using a TopNav object, ignore interaction with widget object (ex. widgetTemplateButton)
if (!topNav.configSelector.isVisible && !topNav.layoutSelector.isVisible) {
if (auditoryNfbFilePlayers[0].isPlaying()) {
killAudio();
startStopButton.getCaptionLabel().setText("Turn Audio On");
} else {
for (int i = 0; i < NUM_SOUND_FILES; i++) {
auditoryNfbFilePlayers[i].loop();
}
startStopButton.getCaptionLabel().setText("Turn Audio Off");
}
}
}
});
startStopButton.setDescription("Start and Stop Auditory Feedback.");
}

private void createModeButton(int _x, int _y, int _w, int _h) {
//This is a generalized createButton method that allows us to save code by using a few patterns and method overloading
modeButton = createButton(localCP5, "modeButton", "Use Band Powers", _x, _y, _w, _h, p5, 12, colorNotPressed, OPENBCI_DARKBLUE);
//Set the border color explicitely
modeButton.setBorderColor(OBJECT_BORDER_GREY);
//For this button, only call the callback listener on mouse release
modeButton.onRelease(new CallbackListener() {
public void controlEvent(CallbackEvent theEvent) {
//If using a TopNav object, ignore interaction with widget object (ex. widgetTemplateButton)
if (!topNav.configSelector.isVisible && !topNav.layoutSelector.isVisible) {
String s = !usingBandPowers ? "Use Metric" : "Use Band Powers";
modeButton.getCaptionLabel().setText(s);
usingBandPowers = !usingBandPowers;
}
}
});
modeButton.setDescription("Change Auditory Feedback mode. Use the Metric to control all notes at once, or use Band Powers to control certain notes of the chord.");
}

}
1 change: 1 addition & 0 deletions OpenBCI_GUI/ControlPanel.pde
Original file line number Diff line number Diff line change
Expand Up @@ -2786,6 +2786,7 @@ class InitBox {
controlPanel.dataLogBoxGanglion.setSessionTextfieldText(directoryManager.getFileNameDateTime());
controlPanel.dataLogBoxGalea.setSessionTextfieldText(directoryManager.getFileNameDateTime());
controlPanel.wifiBox.setStaticIPTextfield(wifi_ipAddress);
w_focus.killAuditoryFeedback();
haltSystem();
}
}
Expand Down
6 changes: 3 additions & 3 deletions OpenBCI_GUI/DataProcessing.pde
Original file line number Diff line number Diff line change
Expand Up @@ -64,12 +64,12 @@ void processNewData() {
}
}

void initializeFFTObjects(FFT[] fftBuff, float[][] dataProcessingRawBuffer, int Nfft, float fs_Hz) {
void initializeFFTObjects(ddf.minim.analysis.FFT[] fftBuff, float[][] dataProcessingRawBuffer, int Nfft, float fs_Hz) {

float[] fooData;
for (int Ichan=0; Ichan < nchan; Ichan++) {
//make the FFT objects...Following "SoundSpectrum" example that came with the Minim library
fftBuff[Ichan].window(FFT.HAMMING);
fftBuff[Ichan].window(ddf.minim.analysis.FFT.HAMMING);

//do the FFT on the initial data
if (isFFTFiltered == true) {
Expand Down Expand Up @@ -237,7 +237,7 @@ class DataProcessing {
}
}

public void process(float[][] data_forDisplay_uV, FFT[] fftData) { //holds the FFT (frequency spectrum) of the latest data
public void process(float[][] data_forDisplay_uV, ddf.minim.analysis.FFT[] fftData) { //holds the FFT (frequency spectrum) of the latest data

float prevFFTdata[] = new float[fftBuff[0].specSize()];

Expand Down
4 changes: 2 additions & 2 deletions OpenBCI_GUI/Info.plist.tmpl
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
<key>CFBundleShortVersionString</key>
<string>5</string>
<key>CFBundleVersion</key>
<string>5.0.5</string>
<string>5.0.6</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>NSHumanReadableCopyright</key>
Expand All @@ -32,7 +32,7 @@
Copyright © 2021 OpenBCI
</string>
<key>CFBundleGetInfoString</key>
<string>May 2021</string>
<string>July 2021</string>
<!-- End of the set that can be customized -->

@@jvm_runtime@@
Expand Down
12 changes: 7 additions & 5 deletions OpenBCI_GUI/OpenBCI_GUI.pde
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@ import http.requests.*;
// Global Variables & Instances
//------------------------------------------------------------------------
//Used to check GUI version in TopNav.pde and displayed on the splash screen on startup
String localGUIVersionString = "v5.0.6-alpha.2";
String localGUIVersionDate = "June 2021";
String localGUIVersionString = "v5.0.6";
String localGUIVersionDate = "July 2021";
String guiLatestVersionGithubAPI = "https://api.github.com/repos/OpenBCI/OpenBCI_GUI/releases/latest";
String guiLatestReleaseLocation = "https://github.com/OpenBCI/OpenBCI_GUI/releases/latest";

Expand Down Expand Up @@ -285,7 +285,7 @@ DirectoryManager directoryManager;
final int navBarHeight = 32;
TopNav topNav;

FFT[] fftBuff = new FFT[nchan]; //from the minim library
ddf.minim.analysis.FFT[] fftBuff = new ddf.minim.analysis.FFT[nchan]; //from the minim library
boolean isFFTFiltered = true; //yes by default ... this is used in dataProcessing.pde to determine which uV array feeds the FFT calculation

//------------------------------------------------------------------------
Expand Down Expand Up @@ -432,6 +432,8 @@ void delayedSetup() {
sessionTimeElapsed = new StopWatch();
streamTimeElapsed = new StopWatch();

asyncLoadAudioFiles();

synchronized(this) {
// Instantiate ControlPanel in the synchronized block.
// It's important to avoid instantiating a ControlP5 during a draw() call
Expand Down Expand Up @@ -695,7 +697,7 @@ void initFFTObjectsAndBuffer() {
//initialize the FFT objects
for (int Ichan=0; Ichan < nchan; Ichan++) {
// verbosePrint("Init FFT Buff – " + Ichan);
fftBuff[Ichan] = new FFT(getNfftSafe(), currentBoard.getSampleRate());
fftBuff[Ichan] = new ddf.minim.analysis.FFT(getNfftSafe(), currentBoard.getSampleRate());
} //make the FFT objects

//Attempt initialization. If error, print to console and exit function.
Expand Down Expand Up @@ -912,7 +914,7 @@ void systemInitSession() {
void updateToNChan(int _nchan) {
nchan = _nchan;
settings.slnchan = _nchan; //used in SoftwareSettings.pde only
fftBuff = new FFT[nchan]; //reinitialize the FFT buffer
fftBuff = new ddf.minim.analysis.FFT[nchan]; //reinitialize the FFT buffer
println("OpenBCI_GUI: Channel count set to " + str(nchan));
}

Expand Down
2 changes: 1 addition & 1 deletion OpenBCI_GUI/W_EMG.pde
Original file line number Diff line number Diff line change
Expand Up @@ -268,7 +268,7 @@ class W_emg extends Widget {

public void process(
float[][] data_forDisplay_uV, //this data has been filtered and is ready for plotting on the screen
FFT[] fftData) { //holds the FFT (frequency spectrum) of the latest data
ddf.minim.analysis.FFT[] fftData) { //holds the FFT (frequency spectrum) of the latest data

//for example, you could loop over each EEG channel to do some sort of time-domain processing
//using the sample values that have already been filtered, as will be plotted on the display
Expand Down
47 changes: 35 additions & 12 deletions OpenBCI_GUI/W_Focus.pde
Original file line number Diff line number Diff line change
Expand Up @@ -25,15 +25,17 @@ class W_Focus extends Widget {

//to see all core variables/methods of the Widget class, refer to Widget.pde
//put your custom variables here...
private ControlP5 focus_cp5;
private Button widgetTemplateButton;
//private ControlP5 focus_cp5;
//private Button widgetTemplateButton;
private ChannelSelect focusChanSelect;
private boolean prevChanSelectIsVisible = false;
private AuditoryNeurofeedback auditoryNeurofeedback;


private Grid dataGrid;
private final int numTableRows = 6;
private final int numTableColumns = 2;
private final int tableWidth = 142;
private final int NUM_TABLE_ROWS = 6;
private final int NUM_TABLE_COLUMNS = 2;
//private final int TABLE_WIDTH = 142;
private int tableHeight = 0;
private int cellHeight = 10;
private DecimalFormat df = new DecimalFormat("#.0000");
Expand Down Expand Up @@ -71,6 +73,8 @@ class W_Focus extends Widget {
focusChanSelect = new ChannelSelect(pApplet, this, x, y, w, navH, "FocusChannelSelect");
focusChanSelect.activateAllButtons();

auditoryNeurofeedback = new AuditoryNeurofeedback(x + PAD_FIVE, y + PAD_FIVE, w/2 - PAD_FIVE*2, navBarHeight/2);

exgChannels = currentBoard.getEXGChannels();
channelCount = currentBoard.getNumEXGChannels();
dataArray = new double[channelCount][];
Expand All @@ -87,7 +91,7 @@ class W_Focus extends Widget {


//Create data table
dataGrid = new Grid(numTableRows, numTableColumns, cellHeight);
dataGrid = new Grid(NUM_TABLE_ROWS, NUM_TABLE_COLUMNS, cellHeight);
dataGrid.setTableFontAndSize(p6, 10);
dataGrid.setDrawTableBorder(true);
dataGrid.setString("Metric Value", 0, 0);
Expand All @@ -98,9 +102,9 @@ class W_Focus extends Widget {
dataGrid.setString("Gamma (30-45Hz)", 5, 0);

//Instantiate local cp5 for this box. This allows extra control of drawing cp5 elements specifically inside this class.
focus_cp5 = new ControlP5(ourApplet);
focus_cp5.setGraphics(ourApplet, 0,0);
focus_cp5.setAutoDraw(false);
//focus_cp5 = new ControlP5(ourApplet);
//focus_cp5.setGraphics(ourApplet, 0,0);
//focus_cp5.setAutoDraw(false);

//create our focus graph
updateGraphDims();
Expand Down Expand Up @@ -128,6 +132,9 @@ class W_Focus extends Widget {
predictionExceedsThreshold = metricPrediction > focusThreshold.getValue();
}

//ignore top left button interaction when widgetSelector dropdown is active
lockElementOnOverlapCheck(auditoryNeurofeedback.startStopButton);

//put your code here...
}

Expand All @@ -154,7 +161,8 @@ class W_Focus extends Widget {
}

//This draws all cp5 objects in the local instance
focus_cp5.draw();
//focus_cp5.draw();
auditoryNeurofeedback.draw();

//Draw the graph
focusBar.draw();
Expand All @@ -166,14 +174,15 @@ class W_Focus extends Widget {
super.screenResized(); //calls the parent screenResized() method of Widget (DON'T REMOVE)

//Very important to allow users to interact with objects after app resize
focus_cp5.setGraphics(ourApplet, 0, 0);
//focus_cp5.setGraphics(ourApplet, 0, 0);

resizeTable();

//We need to set the position of our Cp5 object after the screen is resized
//widgetTemplateButton.setPosition(x + w/2 - widgetTemplateButton.getWidth()/2, y + h/2 - widgetTemplateButton.getHeight()/2);

updateStatusCircle();
updateAuditoryNeurofeedbackPosition();

updateGraphDims();
focusBar.screenResized(graphX, graphY, graphW, graphH);
Expand Down Expand Up @@ -212,12 +221,18 @@ class W_Focus extends Widget {
dataGrid.setHorizontalCenterTextInCells(true);
}

private void updateAuditoryNeurofeedbackPosition() {
int extraPadding = focusChanSelect.isVisible() ? navHeight : 0;
int subContainerMiddleX = x + w/4;
auditoryNeurofeedback.screenResized(subContainerMiddleX, (int)(y + h/2 - navHeight + extraPadding), w/2 - PAD_FIVE*2, navBarHeight/2);
}

private void updateStatusCircle() {
float upperLeftContainerW = w/2;
float upperLeftContainerH = h/2;
float min = min(upperLeftContainerW, upperLeftContainerH);
xc = x + w/4;
yc = y + h/4;
yc = y + h/4 - navHeight;
wc = min * (3f/5);
hc = wc;
}
Expand Down Expand Up @@ -263,6 +278,9 @@ class W_Focus extends Widget {
//Keep this here
double prediction = mlModel.predict(featureVector);
//println("Concentration: " + prediction);

//Send band power and prediction data to AuditoryNeurofeedback class
auditoryNeurofeedback.update(bands.getLeft(), (float)prediction);

return prediction;

Expand Down Expand Up @@ -361,6 +379,7 @@ class W_Focus extends Widget {
int factor = focusChanSelect.isVisible() ? 1 : -1;
yc += navHeight * factor;
resizeTable();
updateAuditoryNeurofeedbackPosition();
}

public void setFocusHorizScale(int n) {
Expand All @@ -387,6 +406,10 @@ class W_Focus extends Widget {
public int getMetricExceedsThreshold() {
return predictionExceedsThreshold ? 1 : 0;
}

public void killAuditoryFeedback() {
auditoryNeurofeedback.killAudio();
}
}; //end of class

//The following global functions are used by the Focus widget dropdowns. This method is the least amount of code.
Expand Down
Binary file added OpenBCI_GUI/data/bp1.mp3
Binary file not shown.
Binary file added OpenBCI_GUI/data/bp2.mp3
Binary file not shown.
Binary file added OpenBCI_GUI/data/bp3.mp3
Binary file not shown.
Binary file added OpenBCI_GUI/data/bp4.mp3
Binary file not shown.
Binary file added OpenBCI_GUI/data/bp5.mp3
Binary file not shown.