samsung gear live audio encoding - audio-recording

I'm currently working on an Android Wear app, and I'm looking toward audio recording. I've followed the tutorial on the Android developper website, and it works well on my Nexus 7, but not on the Samsung Gear Live I have for testing. The application just goes crashing all the time.
Digging a bit into the problem, I might have figured out that it was a problem with 2 parameters for the recorder to work: either the OutputFormat, or the AudioEncoder. I tried pairing and trying all the OutputFormat and AudioEncoder available, but without any luck.
So here's my question: did someone encounter the same problem? And if so, did you find the right combination of Format/Encoder?
I don't paste my code as it's exactly the same as in the documentation. Here is the link if you want to have a look: http://developer.android.com/guide/topics/media/audio-capture.html
Thank you in advance for your answers and your time :)

The root problem is that you cannot use MediaRecorder, even though the Android audio capture example does, but instead you need to use the AudioRecord class.
Also, I'd recommend streaming the raw data back to your phone to assemble it into an audio file as that is very thorny on a wearable.
For more, see this answer for more.
I have included a sample below that I got working.
import android.app.Activity;
import android.content.Intent;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.speech.RecognizerIntent;
import android.support.wearable.view.WatchViewStub;
import android.util.Log;
import android.widget.TextView;
import android.view.View;
import java.util.List;
public class MainActivity extends Activity {
private static final String TAG = MainActivity.class.getName();
private static final int SPEECH_REQUEST_CODE = 1;
private static final int RECORDER_SAMPLERATE = 44100;
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_STEREO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private TextView mTextView;
private AudioRecord recorder;
private int bufferSize = 0;
private Thread recordingThread = null;
private volatile boolean isRecording;
#Override
protected void onCreate(Bundle savedInstanceState) {
Log.v(TAG, "Creating MainActivity");
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
final WatchViewStub stub = (WatchViewStub) findViewById(R.id.watch_view_stub);
stub.setOnLayoutInflatedListener(new WatchViewStub.OnLayoutInflatedListener() {
#Override
public void onLayoutInflated(WatchViewStub stub) {
mTextView = (TextView) stub.findViewById(R.id.text);
}
});
bufferSize =
AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,
RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
}
public void handleRecordButtonClick(View view) {
startAudioCapture();
}
public void handleStopButtonClick(View view) {
stopAudioCapture();
}
private void startAudioCapture() {
Log.v(TAG, "Starting audio capture");
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING, bufferSize);
if (recorder.getState() == AudioRecord.STATE_INITIALIZED) {
recorder.startRecording();
isRecording = true;
Log.v(TAG, "Successfully started recording");
recordingThread = new Thread(new Runnable() {
#Override
public void run() {
processRawAudioData();
}
}, "AudioRecorder Thread");
recordingThread.start();
} else {
Log.v(TAG, "Failed to started recording");
}
}
private void stopAudioCapture() {
Log.v(TAG, "Stop audio capture");
recorder.stop();
isRecording = false;
recorder.release();
}
private void processRawAudioData() {
byte data[] = new byte[bufferSize];
int read = 0;
while(isRecording) {
read = recorder.read(data, 0, bufferSize);
if(AudioRecord.ERROR_INVALID_OPERATION != read) {
Log.v(TAG, "Successfully read " + data.length + " bytes of audio");
}
}
}
}

Related

Restarting and pausing and resuming clip hangs the gui of music player, while pressing pause and play resumes playing from stopping point

This program is a music player that allows user to pick a .wav file, play, pause, resume, and restart a the music file from a clip object and audioinput stream. The audio input stream loads a file that is determined by user via FileChooser. The program can play, pause, and resume by selecting a file, pressing play, pause, then play again, but does not play using the restart method or the resume method invoked via the respective buttons. Instead, the program hangs until the X button is clicked. I think it has something to do with the resetaudiostream method, but I am unsure what. Maybe something to do with ending the old clip and creating a new clip instance. Please review the logic and let me know what is making it hang and how that could be remedied.
package sample;
import javafx.application.Application;
import javafx.fxml.FXMLLoader;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.layout.GridPane;
import javafx.scene.layout.VBox;
import javafx.stage.FileChooser;
import javafx.stage.Stage;
import javax.sound.sampled.*;
import java.io.File;
import java.io.IOException;
public class Main extends Application {
static File musicfile;
static Long currentFrame;
static Clip clip;
static String status = "play";
static AudioInputStream audioInputStream;
static String filePath;
public void SimpleAudioPlayer()
throws UnsupportedAudioFileException,
IOException, LineUnavailableException
{
// create AudioInputStream object
audioInputStream =
AudioSystem.getAudioInputStream(new File(filePath).getAbsoluteFile());
// create clip reference
clip = AudioSystem.getClip();
// open audioInputStream to the clip
clip.open(audioInputStream);
clip.loop(Clip.LOOP_CONTINUOUSLY);
}
#Override
public void start(Stage primaryStage) throws Exception{
Parent root = FXMLLoader.load(getClass().getResource("sample.fxml"));
primaryStage.setTitle("Music Player");
GridPane gp = new GridPane();
Button selectFile = new Button("Select File");
GridPane.setConstraints(selectFile, 0,0);
selectFile.setOnAction(event->{
FileChooser filechooser = new FileChooser();
// create AudioInputStream object
try {
musicfile = filechooser.showOpenDialog(null);
audioInputStream = AudioSystem.getAudioInputStream(musicfile);
clip = AudioSystem.getClip();
// open audioInputStream to the clip
clip.open(audioInputStream);
}catch(IOException | UnsupportedAudioFileException | LineUnavailableException e){
e.printStackTrace();
}
});
Button play = new Button("Play");
GridPane.setConstraints(play, 1,0);
play.setOnAction(event->{
if(status == "play") {
clip.loop(Clip.LOOP_CONTINUOUSLY);
}
play();
});
Button pause = new Button("Pause");
GridPane.setConstraints(pause, 2,0);
pause.setOnAction(event -> pause());
Button restart = new Button("Restart");
GridPane.setConstraints(restart, 0,1);
restart.setOnAction(event -> {
try{
restart();
}
catch(IOException | UnsupportedAudioFileException | LineUnavailableException e){
e.printStackTrace();}
});
Button resume = new Button("Resume");
GridPane.setConstraints(resume, 1,1);
resume.setOnAction(event -> {
try {
resumeAudio();
}catch(IOException | LineUnavailableException | UnsupportedAudioFileException e){
e.printStackTrace();
}
});
gp.getChildren().addAll(play,selectFile, pause, restart, resume);
primaryStage.setScene(new Scene(gp, 300, 275));
primaryStage.show();
}
public void play()
{
//start the clip
clip.start();
status = "play";
}
// Method to pause the audio
public void pause()
{
if (status.equals("paused"))
{
System.out.println("audio is already paused");
return;
}
currentFrame =
clip.getMicrosecondPosition();
clip.stop();
status = "paused";
}
// Method to resume the audio
public void resumeAudio() throws UnsupportedAudioFileException,
IOException, LineUnavailableException
{
if (status.equals("play"))
{
System.out.println("Audio is already "+
"being played");
return;
}
clip.close();
resetAudioStream();
clip.setMicrosecondPosition(currentFrame);
status = "play";
play();
}
// Method to restart the audio
public void restart() throws IOException, LineUnavailableException,
UnsupportedAudioFileException
{
clip.stop();
clip.close();
resetAudioStream();
currentFrame = 0L;
clip.setMicrosecondPosition(0);
status = "play";
play();
}
// Method to stop the audio
public void stop() throws UnsupportedAudioFileException,
IOException, LineUnavailableException
{
currentFrame = 0L;
clip.stop();
clip.close();
}
// Method to jump over a specific part
public void jump(long c) throws UnsupportedAudioFileException, IOException,
LineUnavailableException
{
if (c > 0 && c < clip.getMicrosecondLength())
{
clip.stop();
clip.close();
resetAudioStream();
currentFrame = c;
clip.setMicrosecondPosition(c);
this.play();
}
}
// Method to reset audio stream
public void resetAudioStream() throws UnsupportedAudioFileException, IOException,
LineUnavailableException
{
audioInputStream = AudioSystem.getAudioInputStream(musicfile);
clip = AudioSystem.getClip();
clip.open(audioInputStream);
clip.loop(Clip.LOOP_CONTINUOUSLY);
}
public static void main(String[] args) {
launch(args);
}
}
It is quiet simple to get the required functionality with a MediaPlayer:
import java.net.URI;
import javafx.application.Application;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.layout.GridPane;
import javafx.scene.media.Media;
import javafx.scene.media.MediaPlayer;
import javafx.scene.media.MediaPlayer.Status;
import javafx.stage.Stage;
import javafx.util.Duration;
/*
* If you get "cannot access class com.sun.glass.utils.NativeLibLoader" exception you may need to
* add a VM argument: --add-modules javafx.controls,javafx.media as explained here:
* https://stackoverflow.com/questions/53237287/module-error-when-running-javafx-media-application
*/
public class Main extends Application {
private MediaPlayer player;
private static final long JUMP_BY = 5000;//millis
#Override
public void start(Stage primaryStage) throws Exception{
URI uri = new URI("https://www.soundhelix.com/examples/mp3/SoundHelix-Song-5.mp3");
Media media = new Media(uri.toString());
//OR Media media = new Media("https://www.soundhelix.com/examples/mp3/SoundHelix-Song-5.mp3");
player = new MediaPlayer(media);
player.setOnError(() -> System.out.println(media.getError().toString()));
GridPane gp = new GridPane();
gp.setHgap(10);
Button play = new Button("Play");
GridPane.setConstraints(play, 0,0);
play.setOnAction(event-> playAudio());
Button pause = new Button("Pause");
GridPane.setConstraints(pause, 1,0);
pause.setOnAction(event -> pauseAudio());
Button resume = new Button("Resume");
GridPane.setConstraints(resume, 2,0);
resume.setOnAction(event -> resumeAudio());
Button stop = new Button("Stop");
GridPane.setConstraints(stop, 3,0);
stop.setOnAction(event -> stopAudio());
Button restart = new Button("Restart");
GridPane.setConstraints(restart, 4,0);
restart.setOnAction(event -> restartAudio());
Button jump = new Button("Jump >");
GridPane.setConstraints(jump, 5,0);
jump.setOnAction(event -> jump(JUMP_BY));
Label time = new Label();
GridPane.setConstraints(time, 6,0);
time.textProperty().bind( player.currentTimeProperty().asString("%.4s") );
gp.getChildren().addAll(play, pause, resume, stop, restart, jump, time);
primaryStage.setScene(new Scene(gp, 400, 45));
primaryStage.show();
}
//play audio
public void playAudio()
{
player.play();
}
//pause audio
public void pauseAudio()
{
if (player.getStatus().equals(Status.PAUSED))
{
System.out.println("audio is already paused");
return;
}
player.pause();
}
//resume audio
public void resumeAudio()
{
if (player.getStatus().equals(Status.PLAYING))
{
System.out.println("Audio is already playing");
return;
}
playAudio();
}
//restart audio
public void restartAudio()
{
player.seek(Duration.ZERO);
playAudio();
}
// stop audio
public void stopAudio()
{
player.stop();
}
//jump by c millis
public void jump(long c)
{
player.seek(player.getCurrentTime().add(Duration.millis(c)));
}
public static void main(String[] args) {
launch(args);
}
}

HERE SDK TrafficUpdater.request(Geocordinate, TrafficUpdater.Listener) not returning traffic results

I have been working on integrating several HERE features into an app I am working on. Right now I am trying to add traffic data to the application. The default auto-updates aren't quite frequent enough for me (~1 min), so I am trying to use the TrafficUpdater.request(GeoCoordinate, TrafficUpdater.Listener) to manually retrieve traffic information every 5 seconds or so. The problem is, although the request line executes, the listener is never called, and I never receive any traffic updates. Below is my activity:
import android.os.CountDownTimer;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import com.here.android.mpa.common.GeoCoordinate;
import com.here.android.mpa.common.GeoPosition;
import com.here.android.mpa.common.MapSettings;
import com.here.android.mpa.common.OnEngineInitListener;
import com.here.android.mpa.common.PositioningManager;
import com.here.android.mpa.guidance.TrafficUpdater;
import com.here.android.mpa.mapping.Map;
import com.here.android.mpa.mapping.MapFragment;
import com.here.android.mpa.mapping.MapTrafficLayer;
import com.here.android.mpa.mapping.MapView;
import com.here.android.mpa.mapping.TrafficEvent;
import java.io.File;
import java.lang.ref.WeakReference;
public class MainActivity extends AppCompatActivity {
private Map map;
private MapFragment mapFragment;
private TrafficUpdater trafficUpdater;
private PositioningManager.OnPositionChangedListener onPositionChangedListener = new PositioningManager.OnPositionChangedListener() {
#Override
public void onPositionUpdated(PositioningManager.LocationMethod locationMethod, GeoPosition geoPosition, boolean b) {
onLocationUpdate(geoPosition);
}
#Override
public void onPositionFixChanged(PositioningManager.LocationMethod locationMethod, PositioningManager.LocationStatus locationStatus) {
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
MapSettings.setIsolatedDiskCacheRootPath(
getApplicationContext().getExternalFilesDir(null) + File.separator + ".here-maps",
"MAP_SERVICE");
mapFragment = (MapFragment) getFragmentManager().findFragmentById(R.id.map);
mapFragment.init(new OnEngineInitListener() {
#Override
public void onEngineInitializationCompleted(Error error) {
if (error == Error.NONE) {
map = mapFragment.getMap();
initTracker();
}
}
});
}
private void initTracker() {
trafficUpdater = TrafficUpdater.getInstance();
trafficUpdater.enableUpdate(false);
PositioningManager positioningManager = PositioningManager.getInstance();
positioningManager.addListener(new WeakReference<PositioningManager.OnPositionChangedListener>(onPositionChangedListener));
mapFragment.getPositionIndicator().setVisible(true);
positioningManager.start(PositioningManager.LocationMethod.GPS_NETWORK);
}
private boolean isTimerRunning = false;
CountDownTimer trafficTimer = new CountDownTimer(5000,5000) {
#Override
public void onTick(long millisUntilFinished) {
}
#Override
public void onFinish() {
isTimerRunning = false;
getTrafficInfo();
}
};
private GeoPosition lastGeoPosition;
private void onLocationUpdate(GeoPosition geoPosition) {
map.setCenter(geoPosition.getCoordinate(), Map.Animation.NONE);
Log.i("____MAINACTIVITY", "location update");
lastGeoPosition = geoPosition;
if(!isTimerRunning) {
trafficTimer.cancel();
trafficTimer.start();
isTimerRunning = true;
}
}
private TrafficUpdater.Listener trafficListener = new TrafficUpdater.Listener() {
#Override
public void onStatusChanged(TrafficUpdater.RequestState requestState) {
Log.i("____MAINACTIVITY", requestState.name());
}
};
private void getTrafficInfo() {
if(lastGeoPosition != null) {
TrafficUpdater.RequestInfo requestInfo = trafficUpdater.request(lastGeoPosition.getCoordinate(), trafficListener);
Log.i("___MAINACTIVITY", requestInfo.getError().name());
}
}
}
I have tried several things to remedy this issue. First, I have checked all of my app permissions and project dashboard on the developer portal to ensure everything is setup properly, and it is. I was providing the listener as an anonymous method in the line we execute the request, and that did not work. I moved the listener to be a private member variable of the activity, and provided it that way, but it still isn't working. I've checked the RequestInfo returned by the method, and it always indicates an error code of NONE, so it seems as though no errors are occurring. Lastly, I set my updater frequency to once every 1.5 seconds (well above the default value), and I still receive nothing. Does anyone know a solution to this problem? I feel as though it's something simple that I'm missing. Updates from the Positioning Manager are coming through just fine, and the app is talking to our server with no problems, so I don't think it's a connectivity issue.
The traffic feed does provide updates only in a one minute time frame. To force the application to request this in a higher frequency won't provide fresher data. I would recommend to keep the default auto-updates.

jogl installation to native library

How can we add jogl to native java library so that we can run a jogl program like any other java program using notepad without any ide like netbeans, eclipse.
I have downloaded jogl. To test it i have run the following example program given in the documentation.
But i get an error saying the package javax.media.opengl doesnot exist.
i read the installation guide in tutorialspoint.But it is given for an ide.
can't we run the jogl program using notepad.
import javax.media.opengl.GLAutoDrawable;
import javax.media.opengl.GLCapabilities;
import javax.media.opengl.GLEventListener;
import javax.media.opengl.GLProfile;
import javax.media.opengl.awt.GLCanvas;
import javax.swing.JFrame;
public class BasicFrame implements GLEventListener
{
#Override
public void display(GLAutoDrawable arg0)
{
// method body
}
#Override
public void dispose(GLAutoDrawable arg0)
{ //method body
}
#Override
public void init(GLAutoDrawable arg0)
{
// method body
}
#Override
public void reshape(GLAutoDrawable arg0, int arg1, int arg2, int arg3, int arg4) {
// method body
}
public static void main(String[] args)
{
//getting the capabilities object of GL2 profile
final GLProfile profile = GLProfile.get(GLProfile.GL2);
GLCapabilities capabilities = new GLCapabilities(profile);
// The canvas
final GLCanvas glcanvas = new GLCanvas(capabilities);
BasicFrame b = new BasicFrame();
glcanvas.addGLEventListener(b);
glcanvas.setSize(400, 400);
//creating frame
final Frame frame = new Frame (" Basic Frame");
//adding canvas to frame
frame.add(glcanvas);
frame.setSize( 640, 480 );
frame.setVisible(true);
}
}

Display progress bar while compressing video file

I have to compress video files. So I used this link http://whaticode.com/tag/audio/ and xuggler for the compression. Now I want to show the progress bar while compressing the video file in javafx.
import java.io.File;
import com.xuggle.mediatool.IMediaReader;
import com.xuggle.mediatool.IMediaWriter;
import com.xuggle.mediatool.MediaToolAdapter;
import com.xuggle.mediatool.ToolFactory;
import com.xuggle.mediatool.event.AudioSamplesEvent;
import com.xuggle.mediatool.event.IAddStreamEvent;
import com.xuggle.mediatool.event.IAudioSamplesEvent;
import com.xuggle.mediatool.event.IVideoPictureEvent;
import com.xuggle.mediatool.event.VideoPictureEvent;
import com.xuggle.xuggler.IAudioResampler;
import com.xuggle.xuggler.IAudioSamples;
import com.xuggle.xuggler.IRational;
import com.xuggle.xuggler.IStreamCoder;
import com.xuggle.xuggler.IVideoPicture;
import com.xuggle.xuggler.IVideoResampler;
import com.xuggle.xuggler.ICodec;
public class ConvertVideo extends MediaToolAdapter implements Runnable{
private int VIDEO_WIDTH = 640;
private int VIDEO_HEIGHT = 360;
private IMediaWriter writer;
private IMediaReader reader;
private File outputFile;
public ConvertVideo(File inputFile, File outputFile) {
this.outputFile = outputFile;
reader = ToolFactory.makeReader(inputFile.getAbsolutePath());
reader.addListener(this);
}
private IVideoResampler videoResampler = null;
private IAudioResampler audioResampler = null;
#Override
public void onAddStream(IAddStreamEvent event) {
int streamIndex = event.getStreamIndex();
IStreamCoder streamCoder = event.getSource().getContainer().getStream(streamIndex).getStreamCoder();
if (streamCoder.getCodecType() == ICodec.Type.CODEC_TYPE_AUDIO) {
writer.addAudioStream(streamIndex, streamIndex, 2, 44100);
} else if (streamCoder.getCodecType() == ICodec.Type.CODEC_TYPE_VIDEO) {
streamCoder.setWidth(VIDEO_WIDTH);
streamCoder.setHeight(VIDEO_HEIGHT);
streamCoder.setBitRate(100);
streamCoder.setBitRateTolerance(100);
writer.addVideoStream(streamIndex, streamIndex, ICodec.ID.CODEC_ID_H264,IRational.make((double)15),VIDEO_WIDTH, VIDEO_HEIGHT);
}
super.onAddStream(event);
}
#Override
public void onVideoPicture(IVideoPictureEvent event) {
IVideoPicture pic = event.getPicture();
if (videoResampler == null) {
videoResampler = IVideoResampler.make(VIDEO_WIDTH, VIDEO_HEIGHT, pic.getPixelType(), pic.getWidth(), pic.getHeight(), pic.getPixelType());
}
IVideoPicture out = IVideoPicture.make(pic.getPixelType(), VIDEO_WIDTH, VIDEO_HEIGHT);
videoResampler.resample(out, pic);
IVideoPictureEvent asc = new VideoPictureEvent(event.getSource(), out, event.getStreamIndex());
super.onVideoPicture(asc);
out.delete();
}
#Override
public void onAudioSamples(IAudioSamplesEvent event) {
IAudioSamples samples = event.getAudioSamples();
if (audioResampler == null) {
audioResampler = IAudioResampler.make(2, samples.getChannels(), 44100, samples.getSampleRate());
}
if (event.getAudioSamples().getNumSamples() > 0) {
IAudioSamples out = IAudioSamples.make(samples.getNumSamples(), samples.getChannels());
audioResampler.resample(out, samples, samples.getNumSamples());
AudioSamplesEvent asc = new AudioSamplesEvent(event.getSource(), out, event.getStreamIndex());
super.onAudioSamples(asc);
out.delete();
}
}
public void run() {
writer = ToolFactory.makeWriter(outputFile.getAbsolutePath(), reader);
this.addListener(writer);
while (reader.readPacket() == null) {
System.out.println("Converting file..");
}
}
public static void main(String[] args) {
try {
System.out.println("Converting process started");
File file = new File("C:\\Development\\1.mp4");
file.createNewFile();
ConvertVideo obj = new ConvertVideo(new File("C:\\Development\\camera 1_record_1417702745727.wmv"),file);
obj.run();
System.out.println("Converting process end");
} catch (Exception e) {
e.printStackTrace();
}
}
}
Thanks for any help!
I would suggest that you read this doc from oracle. It describes the basics of Concurrency in JavaFX.
Use a Task or a Service instead of a Runnable.
Use Task.updateProgress() to inform on the current progress / work done.
Bind the progressProperty of your running Task to a ProgressBar or ProgressIndicator.

Using two Buttons on one Activity

I'm still new to android. I have a kids book that l have done with two buttons "READ TO ME" and "READ TO MYSELF".
Read to me onclick will play a recording as it reads the book but it goes to SoundOne Activity.
Read to myself will display text and it leads to PageOne Activity.
How do l create some sort of an "if statement" such that when l click read to me it will play the recording but lead to PageOne Activity. And when l click read to myself it will display the text but still leads to PageOne Activity
This might help reduce the number of classes l have created so far and avoid ANR l assume. Some source code or a tutorial will help Thanks in advance my code is below:
package com.inerds.donkiejoukie;
import android.content.Intent;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.ImageButton;
import android.content.Context;
public class Fbone extends MainActivity {
MediaPlayer one;
MediaPlayer mb;
MediaPlayer mp;
#Override
public void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.fbone);
one = MediaPlayer.create(this, R.raw.pageone);
mb = MediaPlayer.create(this, R.raw.menubar);
mp = MediaPlayer.create(this, R.raw.pageflip);
ImageButton imageButton1 = (ImageButton) findViewById(R.id.imageButton1);
imageButton1.setOnClickListener (new OnClickListener() {
public void onClick(View vone) {
mb.pause();
mb.stop();
mp.start();
startActivity(new Intent(getApplicationContext(), PageOne.class));
finish();
}
});
ImageButton readtome = (ImageButton) findViewById(R.id.readtome);
readtome.setOnClickListener (new OnClickListener() {
public void onClick(View v) {
mb.pause();
mb.stop();
one.start();
startActivity(new Intent(getApplicationContext(), SoundOne.class));
AudioManager audioManager = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
audioManager.setStreamVolume(AudioManager.STREAM_MUSIC, 100, 0);
finish();
}
});
}
}
The Intent mechanism is used to communicate from one Activity to another. In the Intent, as well as a class to start, you can specify the name of an action to perform, as well as other options. In this case, you might want to use an extra to tell the following class whether to play sound or not. When you start the intent:
Intent nextActivity = new Intent(getApplicationContext(), SoundOne.class);
// Put true or false in the next line according to which click handler you're in.
nextActivity.putBooleanExtra(SoundOne.extra_enableSound, true);
startActivity(nextActivity);
and then in your SoundOne activity, you need a constant field for the extra name:
static final String extra_enableSound = "enableSound";
and you can find the value of this extra from your onCreate, or wherever you want to start the sound:
if (getIntent().getBooleanExtra(extra_enableSound, false)) {
// start the sound
}
Now your PageOne activity is unused and can be deleted. The documentation for Intent tells you what other information you can stash in there.
I'm not a professional programmer so this might not be the best way to do this, but it works well for me with minimal code.
Here's what I'm doing.
Set an "onClick" for the buttons in my XML
<Button
android:id="#+id/button2"
android:layout_width="144dp"
android:layout_height="wrap_content"
android:layout_toRightOf="#id/button1"
android:onClick="sendMessage1"
android:text="#string/cancel"
/>
the in the activity i set up my buttons
Button button1;
Button button2;
then use a sendmessage instead of onButtonclick
public void sendMessage(View view)
{
Intent intent = new Intent(this, ThirdActivity.class);
startActivity(intent);
}
public void sendMessage1(View view)
{
Intent intent = new Intent(this, MainActivity.class);
startActivity(intent);
}
package com.inerds.donkiejoukie;
public class Fbone extends MainActivity {
MediaPlayer one;
MediaPlayer mb;
MediaPlayer mp;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.fbone);
one = MediaPlayer.create(this, R.raw.pageone);
mb = MediaPlayer.create(this, R.raw.menubar);
mp = MediaPlayer.create(this, R.raw.pageflip);
ImageButton imageButton1 = (ImageButton) findViewById(R.id.imageButton1);
ImageButton readtome = (ImageButton) findViewById(R.id.readtome);
imageButton1.setOnClickListener (handleOnClickListener);
readtome.setOnClickListener (handleOnClickListener);
}
private OnClickListener handleOnClickListener = new OnClickListener() {
public void onClick(View v) {
switch(v.getId()){
case R.id.imageButton1:
mb.pause();
mb.stop();
mp.start();
startActivity(new Intent(getApplicationContext(), PageOne.class));
finish();
break;
case R.id.readtome:
mb.pause();
mb.stop();
one.start();
startActivity(new Intent(getApplicationContext(), SoundOne.class));
AudioManager audioManager = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
audioManager.setStreamVolume(AudioManager.STREAM_MUSIC, 100, 0);
finish();
break;
}
}
};
}

Resources