diff --git a/app/build.gradle b/app/build.gradle index 024db50..4aa164e 100644 --- a/app/build.gradle +++ b/app/build.gradle @@ -1,12 +1,12 @@ apply plugin: 'com.android.application' android { - compileSdkVersion 24 - buildToolsVersion "24.0.2" + compileSdkVersion 29 + buildToolsVersion "24.0.3" defaultConfig { applicationId "cafe.adriel.androidaudiorecorder.example" - minSdkVersion 15 - targetSdkVersion 24 + minSdkVersion 24 + targetSdkVersion 29 versionCode 1 versionName "1.0" } @@ -19,9 +19,8 @@ android { } dependencies { - compile 'com.android.support:appcompat-v7:24.2.1' - compile project(':lib') -// compile 'com.github.adrielcafe:AndroidAudioRecorder:0.1.0' + implementation fileTree(dir: "libs", include: ["*.jar"]) + implementation 'androidx.appcompat:appcompat:1.1.0' } repositories { diff --git a/build.gradle b/build.gradle index ccb76fa..adeafa8 100644 --- a/build.gradle +++ b/build.gradle @@ -1,15 +1,17 @@ buildscript { repositories { jcenter() + google() } dependencies { - classpath 'com.android.tools.build:gradle:2.2.0' + classpath 'com.android.tools.build:gradle:4.0.1' } } allprojects { repositories { jcenter() + google() } } diff --git a/gradle.properties b/gradle.properties index aac7c9b..e78e65c 100644 --- a/gradle.properties +++ b/gradle.properties @@ -15,3 +15,4 @@ org.gradle.jvmargs=-Xmx1536m # This option should only be used with decoupled projects. More details, visit # http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects # org.gradle.parallel=true +android.useAndroidX=true diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 2fac210..0fde763 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ -#Thu Aug 25 11:20:16 BRT 2016 +#Fri Jul 24 22:18:11 IST 2020 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-2.14.1-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-6.1.1-all.zip diff --git a/lib/build.gradle b/lib/build.gradle index 2a9c22c..9afa5ef 100644 --- a/lib/build.gradle +++ b/lib/build.gradle @@ -1,12 +1,12 @@ apply plugin: 'com.android.library' android { - compileSdkVersion 24 - buildToolsVersion "24.0.2" + compileSdkVersion 29 + buildToolsVersion "24.0.3" defaultConfig { - minSdkVersion 15 - targetSdkVersion 24 + minSdkVersion 24 + targetSdkVersion 29 versionCode 1 versionName "1.0" } @@ -19,7 +19,8 @@ android { } dependencies { - compile 'com.android.support:appcompat-v7:24.2.1' - compile 'com.kailashdabhi:om-recorder:1.1.0' - compile 'com.cleveroad:audiovisualization:1.0.0' + implementation fileTree(dir: "libs", include: ["*.jar"]) + implementation 'androidx.appcompat:appcompat:1.1.0' + implementation 'com.kailashdabhi:om-recorder:1.1.5' + implementation 'com.cleveroad:audiovisualization:1.0.1' } \ No newline at end of file diff --git a/lib/src/main/java/cafe/adriel/androidaudiorecorder/AndroidAudioRecorder.java b/lib/src/main/java/cafe/adriel/androidaudiorecorder/AndroidAudioRecorder.java index 27c6375..302c250 100644 --- a/lib/src/main/java/cafe/adriel/androidaudiorecorder/AndroidAudioRecorder.java +++ b/lib/src/main/java/cafe/adriel/androidaudiorecorder/AndroidAudioRecorder.java @@ -11,105 +11,105 @@ import cafe.adriel.androidaudiorecorder.model.AudioSource; public class AndroidAudioRecorder { - - protected static final String EXTRA_FILE_PATH = "filePath"; - protected static final String EXTRA_COLOR = "color"; - protected static final String EXTRA_SOURCE = "source"; - protected static final String EXTRA_CHANNEL = "channel"; - protected static final String EXTRA_SAMPLE_RATE = "sampleRate"; - protected static final String EXTRA_AUTO_START = "autoStart"; - protected static final String EXTRA_KEEP_DISPLAY_ON = "keepDisplayOn"; - - private Activity activity; - private Fragment fragment; - - private String filePath = Environment.getExternalStorageDirectory() + "/recorded_audio.wav"; - private AudioSource source = AudioSource.MIC; - private AudioChannel channel = AudioChannel.STEREO; - private AudioSampleRate sampleRate = AudioSampleRate.HZ_44100; - private int color = Color.parseColor("#546E7A"); - private int requestCode = 0; - private boolean autoStart = false; - private boolean keepDisplayOn = false; - - private AndroidAudioRecorder(Activity activity) { - this.activity = activity; - } - - private AndroidAudioRecorder(Fragment fragment) { - this.fragment = fragment; - } - - public static AndroidAudioRecorder with(Activity activity) { - return new AndroidAudioRecorder(activity); - } - - public static AndroidAudioRecorder with(Fragment fragment) { - return new AndroidAudioRecorder(fragment); - } - - public AndroidAudioRecorder setFilePath(String filePath) { - this.filePath = filePath; - return this; - } - - public AndroidAudioRecorder setColor(int color) { - this.color = color; - return this; - } - - public AndroidAudioRecorder setRequestCode(int requestCode) { - this.requestCode = requestCode; - return this; - } - - public AndroidAudioRecorder setSource(AudioSource source) { - this.source = source; - return this; - } - - public AndroidAudioRecorder setChannel(AudioChannel channel) { - this.channel = channel; - return this; - } - - public AndroidAudioRecorder setSampleRate(AudioSampleRate sampleRate) { - this.sampleRate = sampleRate; - return this; - } - - public AndroidAudioRecorder setAutoStart(boolean autoStart) { - this.autoStart = autoStart; - return this; - } - - public AndroidAudioRecorder setKeepDisplayOn(boolean keepDisplayOn) { - this.keepDisplayOn = keepDisplayOn; - return this; - } - - public void record() { - Intent intent = new Intent(activity, AudioRecorderActivity.class); - intent.putExtra(EXTRA_FILE_PATH, filePath); - intent.putExtra(EXTRA_COLOR, color); - intent.putExtra(EXTRA_SOURCE, source); - intent.putExtra(EXTRA_CHANNEL, channel); - intent.putExtra(EXTRA_SAMPLE_RATE, sampleRate); - intent.putExtra(EXTRA_AUTO_START, autoStart); - intent.putExtra(EXTRA_KEEP_DISPLAY_ON, keepDisplayOn); - activity.startActivityForResult(intent, requestCode); - } - - public void recordFromFragment() { - Intent intent = new Intent(fragment.getActivity(), AudioRecorderActivity.class); - intent.putExtra(EXTRA_FILE_PATH, filePath); - intent.putExtra(EXTRA_COLOR, color); - intent.putExtra(EXTRA_SOURCE, source); - intent.putExtra(EXTRA_CHANNEL, channel); - intent.putExtra(EXTRA_SAMPLE_RATE, sampleRate); - intent.putExtra(EXTRA_AUTO_START, autoStart); - intent.putExtra(EXTRA_KEEP_DISPLAY_ON, keepDisplayOn); - fragment.startActivityForResult(intent, requestCode); - } - -} + + protected static final String EXTRA_FILE_PATH = "filePath"; + protected static final String EXTRA_COLOR = "color"; + protected static final String EXTRA_SOURCE = "source"; + protected static final String EXTRA_CHANNEL = "channel"; + protected static final String EXTRA_SAMPLE_RATE = "sampleRate"; + protected static final String EXTRA_AUTO_START = "autoStart"; + protected static final String EXTRA_KEEP_DISPLAY_ON = "keepDisplayOn"; + + private Activity activity; + private Fragment fragment; + + private String filePath = Environment.getExternalStorageDirectory() + "/recorded_audio.wav"; + private AudioSource source = AudioSource.MIC; + private AudioChannel channel = AudioChannel.STEREO; + private AudioSampleRate sampleRate = AudioSampleRate.HZ_44100; + private int color = Color.parseColor("#546E7A"); + private int requestCode = 0; + private boolean autoStart = false; + private boolean keepDisplayOn = false; + + private AndroidAudioRecorder(Activity activity) { + this.activity = activity; + } + + private AndroidAudioRecorder(Fragment fragment) { + this.fragment = fragment; + } + + public static AndroidAudioRecorder with(Activity activity) { + return new AndroidAudioRecorder(activity); + } + + public static AndroidAudioRecorder with(Fragment fragment) { + return new AndroidAudioRecorder(fragment); + } + + public AndroidAudioRecorder setFilePath(String filePath) { + this.filePath = filePath; + return this; + } + + public AndroidAudioRecorder setColor(int color) { + this.color = color; + return this; + } + + public AndroidAudioRecorder setRequestCode(int requestCode) { + this.requestCode = requestCode; + return this; + } + + public AndroidAudioRecorder setSource(AudioSource source) { + this.source = source; + return this; + } + + public AndroidAudioRecorder setChannel(AudioChannel channel) { + this.channel = channel; + return this; + } + + public AndroidAudioRecorder setSampleRate(AudioSampleRate sampleRate) { + this.sampleRate = sampleRate; + return this; + } + + public AndroidAudioRecorder setAutoStart(boolean autoStart) { + this.autoStart = autoStart; + return this; + } + + public AndroidAudioRecorder setKeepDisplayOn(boolean keepDisplayOn) { + this.keepDisplayOn = keepDisplayOn; + return this; + } + + public void record() { + Intent intent = new Intent(activity, AudioRecorderActivity.class); + intent.putExtra(EXTRA_FILE_PATH, filePath); + intent.putExtra(EXTRA_COLOR, color); + intent.putExtra(EXTRA_SOURCE, source); + intent.putExtra(EXTRA_CHANNEL, channel); + intent.putExtra(EXTRA_SAMPLE_RATE, sampleRate); + intent.putExtra(EXTRA_AUTO_START, autoStart); + intent.putExtra(EXTRA_KEEP_DISPLAY_ON, keepDisplayOn); + activity.startActivityForResult(intent, requestCode); + } + + public void recordFromFragment() { + Intent intent = new Intent(fragment.getActivity(), AudioRecorderActivity.class); + intent.putExtra(EXTRA_FILE_PATH, filePath); + intent.putExtra(EXTRA_COLOR, color); + intent.putExtra(EXTRA_SOURCE, source); + intent.putExtra(EXTRA_CHANNEL, channel); + intent.putExtra(EXTRA_SAMPLE_RATE, sampleRate); + intent.putExtra(EXTRA_AUTO_START, autoStart); + intent.putExtra(EXTRA_KEEP_DISPLAY_ON, keepDisplayOn); + fragment.startActivityForResult(intent, requestCode); + } + +} \ No newline at end of file diff --git a/lib/src/main/java/cafe/adriel/androidaudiorecorder/AudioRecorderActivity.java b/lib/src/main/java/cafe/adriel/androidaudiorecorder/AudioRecorderActivity.java index e5a7d9a..c2c4235 100644 --- a/lib/src/main/java/cafe/adriel/androidaudiorecorder/AudioRecorderActivity.java +++ b/lib/src/main/java/cafe/adriel/androidaudiorecorder/AudioRecorderActivity.java @@ -15,10 +15,10 @@ import android.widget.RelativeLayout; import android.widget.TextView; -import com.cleveroad.audiovisualization.DbmHandler; import com.cleveroad.audiovisualization.GLAudioVisualizationView; import java.io.File; +import java.io.IOException; import java.util.Timer; import java.util.TimerTask; @@ -31,389 +31,390 @@ import omrecorder.Recorder; public class AudioRecorderActivity extends AppCompatActivity - implements PullTransport.OnAudioChunkPulledListener, MediaPlayer.OnCompletionListener { - - private String filePath; - private AudioSource source; - private AudioChannel channel; - private AudioSampleRate sampleRate; - private int color; - private boolean autoStart; - private boolean keepDisplayOn; - - private MediaPlayer player; - private Recorder recorder; - private VisualizerHandler visualizerHandler; - - private Timer timer; - private MenuItem saveMenuItem; - private int recorderSecondsElapsed; - private int playerSecondsElapsed; - private boolean isRecording; - - private RelativeLayout contentLayout; - private GLAudioVisualizationView visualizerView; - private TextView statusView; - private TextView timerView; - private ImageButton restartView; - private ImageButton recordView; - private ImageButton playView; - - @Override - protected void onCreate(Bundle savedInstanceState) { - super.onCreate(savedInstanceState); - setContentView(R.layout.aar_activity_audio_recorder); - - if(savedInstanceState != null) { - filePath = savedInstanceState.getString(AndroidAudioRecorder.EXTRA_FILE_PATH); - source = (AudioSource) savedInstanceState.getSerializable(AndroidAudioRecorder.EXTRA_SOURCE); - channel = (AudioChannel) savedInstanceState.getSerializable(AndroidAudioRecorder.EXTRA_CHANNEL); - sampleRate = (AudioSampleRate) savedInstanceState.getSerializable(AndroidAudioRecorder.EXTRA_SAMPLE_RATE); - color = savedInstanceState.getInt(AndroidAudioRecorder.EXTRA_COLOR); - autoStart = savedInstanceState.getBoolean(AndroidAudioRecorder.EXTRA_AUTO_START); - keepDisplayOn = savedInstanceState.getBoolean(AndroidAudioRecorder.EXTRA_KEEP_DISPLAY_ON); - } else { - filePath = getIntent().getStringExtra(AndroidAudioRecorder.EXTRA_FILE_PATH); - source = (AudioSource) getIntent().getSerializableExtra(AndroidAudioRecorder.EXTRA_SOURCE); - channel = (AudioChannel) getIntent().getSerializableExtra(AndroidAudioRecorder.EXTRA_CHANNEL); - sampleRate = (AudioSampleRate) getIntent().getSerializableExtra(AndroidAudioRecorder.EXTRA_SAMPLE_RATE); - color = getIntent().getIntExtra(AndroidAudioRecorder.EXTRA_COLOR, Color.BLACK); - autoStart = getIntent().getBooleanExtra(AndroidAudioRecorder.EXTRA_AUTO_START, false); - keepDisplayOn = getIntent().getBooleanExtra(AndroidAudioRecorder.EXTRA_KEEP_DISPLAY_ON, false); - } - - if(keepDisplayOn){ - getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); - } - - if (getSupportActionBar() != null) { - getSupportActionBar().setHomeButtonEnabled(true); - getSupportActionBar().setDisplayHomeAsUpEnabled(true); - getSupportActionBar().setDisplayShowTitleEnabled(false); - getSupportActionBar().setElevation(0); - getSupportActionBar().setBackgroundDrawable( - new ColorDrawable(Util.getDarkerColor(color))); - getSupportActionBar().setHomeAsUpIndicator( - ContextCompat.getDrawable(this, R.drawable.aar_ic_clear)); - } - - visualizerView = new GLAudioVisualizationView.Builder(this) - .setLayersCount(1) - .setWavesCount(6) - .setWavesHeight(R.dimen.aar_wave_height) - .setWavesFooterHeight(R.dimen.aar_footer_height) - .setBubblesPerLayer(20) - .setBubblesSize(R.dimen.aar_bubble_size) - .setBubblesRandomizeSize(true) - .setBackgroundColor(Util.getDarkerColor(color)) - .setLayerColors(new int[]{color}) - .build(); - - contentLayout = (RelativeLayout) findViewById(R.id.content); - statusView = (TextView) findViewById(R.id.status); - timerView = (TextView) findViewById(R.id.timer); - restartView = (ImageButton) findViewById(R.id.restart); - recordView = (ImageButton) findViewById(R.id.record); - playView = (ImageButton) findViewById(R.id.play); - - contentLayout.setBackgroundColor(Util.getDarkerColor(color)); - contentLayout.addView(visualizerView, 0); - restartView.setVisibility(View.INVISIBLE); - playView.setVisibility(View.INVISIBLE); - - if(Util.isBrightColor(color)) { - ContextCompat.getDrawable(this, R.drawable.aar_ic_clear) - .setColorFilter(Color.BLACK, PorterDuff.Mode.SRC_ATOP); - ContextCompat.getDrawable(this, R.drawable.aar_ic_check) - .setColorFilter(Color.BLACK, PorterDuff.Mode.SRC_ATOP); - statusView.setTextColor(Color.BLACK); - timerView.setTextColor(Color.BLACK); - restartView.setColorFilter(Color.BLACK); - recordView.setColorFilter(Color.BLACK); - playView.setColorFilter(Color.BLACK); - } - } - - @Override - public void onPostCreate(Bundle savedInstanceState) { - super.onPostCreate(savedInstanceState); - if(autoStart && !isRecording){ - toggleRecording(null); - } - } - - @Override - public void onResume() { - super.onResume(); - try { - visualizerView.onResume(); - } catch (Exception e){ } - } - - @Override - protected void onPause() { - restartRecording(null); - try { - visualizerView.onPause(); - } catch (Exception e){ } - super.onPause(); - } - - @Override - protected void onDestroy() { - restartRecording(null); - setResult(RESULT_CANCELED); - try { - visualizerView.release(); - } catch (Exception e){ } - super.onDestroy(); - } - - @Override - protected void onSaveInstanceState(Bundle outState) { - outState.putString(AndroidAudioRecorder.EXTRA_FILE_PATH, filePath); - outState.putInt(AndroidAudioRecorder.EXTRA_COLOR, color); - super.onSaveInstanceState(outState); - } - - @Override - public boolean onCreateOptionsMenu(Menu menu) { - getMenuInflater().inflate(R.menu.aar_audio_recorder, menu); - saveMenuItem = menu.findItem(R.id.action_save); - saveMenuItem.setIcon(ContextCompat.getDrawable(this, R.drawable.aar_ic_check)); - return super.onCreateOptionsMenu(menu); - } - - @Override - public boolean onOptionsItemSelected(MenuItem item) { - int i = item.getItemId(); - if (i == android.R.id.home) { - finish(); - } else if (i == R.id.action_save) { - selectAudio(); - } - return super.onOptionsItemSelected(item); - } - - @Override - public void onAudioChunkPulled(AudioChunk audioChunk) { - float amplitude = isRecording ? (float) audioChunk.maxAmplitude() : 0f; - visualizerHandler.onDataReceived(amplitude); - } - - @Override - public void onCompletion(MediaPlayer mediaPlayer) { - stopPlaying(); - } - - private void selectAudio() { - stopRecording(); - setResult(RESULT_OK); - finish(); - } - - public void toggleRecording(View v) { - stopPlaying(); - Util.wait(100, new Runnable() { - @Override - public void run() { - if (isRecording) { - pauseRecording(); - } else { - resumeRecording(); - } - } - }); - } - - public void togglePlaying(View v){ - pauseRecording(); - Util.wait(100, new Runnable() { - @Override - public void run() { - if(isPlaying()){ - stopPlaying(); - } else { - startPlaying(); - } - } - }); - } - - public void restartRecording(View v){ - if(isRecording) { - stopRecording(); - } else if(isPlaying()) { - stopPlaying(); - } else { - visualizerHandler = new VisualizerHandler(); - visualizerView.linkTo(visualizerHandler); - visualizerView.release(); - if(visualizerHandler != null) { - visualizerHandler.stop(); - } - } - saveMenuItem.setVisible(false); - statusView.setVisibility(View.INVISIBLE); - restartView.setVisibility(View.INVISIBLE); - playView.setVisibility(View.INVISIBLE); - recordView.setImageResource(R.drawable.aar_ic_rec); - timerView.setText("00:00:00"); - recorderSecondsElapsed = 0; - playerSecondsElapsed = 0; - } - - private void resumeRecording() { - isRecording = true; - saveMenuItem.setVisible(false); - statusView.setText(R.string.aar_recording); - statusView.setVisibility(View.VISIBLE); - restartView.setVisibility(View.INVISIBLE); - playView.setVisibility(View.INVISIBLE); - recordView.setImageResource(R.drawable.aar_ic_pause); - playView.setImageResource(R.drawable.aar_ic_play); - - visualizerHandler = new VisualizerHandler(); - visualizerView.linkTo(visualizerHandler); - - if(recorder == null) { - timerView.setText("00:00:00"); - - recorder = OmRecorder.wav( - new PullTransport.Default(Util.getMic(source, channel, sampleRate), AudioRecorderActivity.this), - new File(filePath)); - } - recorder.resumeRecording(); - - startTimer(); - } - - private void pauseRecording() { - isRecording = false; - if(!isFinishing()) { - saveMenuItem.setVisible(true); - } - statusView.setText(R.string.aar_paused); - statusView.setVisibility(View.VISIBLE); - restartView.setVisibility(View.VISIBLE); - playView.setVisibility(View.VISIBLE); - recordView.setImageResource(R.drawable.aar_ic_rec); - playView.setImageResource(R.drawable.aar_ic_play); - - visualizerView.release(); - if(visualizerHandler != null) { - visualizerHandler.stop(); - } - - if (recorder != null) { - recorder.pauseRecording(); - } - - stopTimer(); - } - - private void stopRecording(){ - visualizerView.release(); - if(visualizerHandler != null) { - visualizerHandler.stop(); - } - - recorderSecondsElapsed = 0; - if (recorder != null) { - recorder.stopRecording(); - recorder = null; - } - - stopTimer(); - } - - private void startPlaying(){ - try { - stopRecording(); - player = new MediaPlayer(); - player.setDataSource(filePath); - player.prepare(); - player.start(); - - visualizerView.linkTo(DbmHandler.Factory.newVisualizerHandler(this, player)); - visualizerView.post(new Runnable() { - @Override - public void run() { - player.setOnCompletionListener(AudioRecorderActivity.this); - } - }); - - timerView.setText("00:00:00"); - statusView.setText(R.string.aar_playing); - statusView.setVisibility(View.VISIBLE); - playView.setImageResource(R.drawable.aar_ic_stop); - - playerSecondsElapsed = 0; - startTimer(); - } catch (Exception e){ - e.printStackTrace(); - } - } - - private void stopPlaying(){ - statusView.setText(""); - statusView.setVisibility(View.INVISIBLE); - playView.setImageResource(R.drawable.aar_ic_play); - - visualizerView.release(); - if(visualizerHandler != null) { - visualizerHandler.stop(); - } - - if(player != null){ - try { - player.stop(); - player.reset(); - } catch (Exception e){ } - } - - stopTimer(); - } - - private boolean isPlaying(){ - try { - return player != null && player.isPlaying() && !isRecording; - } catch (Exception e){ - return false; - } - } - - private void startTimer(){ - stopTimer(); - timer = new Timer(); - timer.scheduleAtFixedRate(new TimerTask() { - @Override - public void run() { - updateTimer(); - } - }, 0, 1000); - } - - private void stopTimer(){ - if (timer != null) { - timer.cancel(); - timer.purge(); - timer = null; - } - } - - private void updateTimer() { - runOnUiThread(new Runnable() { - @Override - public void run() { - if(isRecording) { - recorderSecondsElapsed++; - timerView.setText(Util.formatSeconds(recorderSecondsElapsed)); - } else if(isPlaying()){ - playerSecondsElapsed++; - timerView.setText(Util.formatSeconds(playerSecondsElapsed)); - } - } - }); - } -} + implements PullTransport.OnAudioChunkPulledListener, MediaPlayer.OnCompletionListener { + + private String filePath; + private AudioSource source; + private AudioChannel channel; + private AudioSampleRate sampleRate; + private int color; + private boolean autoStart; + private boolean keepDisplayOn; + + private Recorder recorder; + private VisualizerHandler visualizerHandler; + + private Timer timer; + private MenuItem saveMenuItem; + private int recorderSecondsElapsed; + private int playerSecondsElapsed; + private boolean isRecording; + + private RelativeLayout contentLayout; + private GLAudioVisualizationView visualizerView; + private TextView statusView; + private TextView timerView; + private ImageButton restartView; + private ImageButton recordView; + private ImageButton playView; + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.aar_activity_audio_recorder); + + if(savedInstanceState != null) { + filePath = savedInstanceState.getString(AndroidAudioRecorder.EXTRA_FILE_PATH); + source = (AudioSource) savedInstanceState.getSerializable(AndroidAudioRecorder.EXTRA_SOURCE); + channel = (AudioChannel) savedInstanceState.getSerializable(AndroidAudioRecorder.EXTRA_CHANNEL); + sampleRate = (AudioSampleRate) savedInstanceState.getSerializable(AndroidAudioRecorder.EXTRA_SAMPLE_RATE); + color = savedInstanceState.getInt(AndroidAudioRecorder.EXTRA_COLOR); + autoStart = savedInstanceState.getBoolean(AndroidAudioRecorder.EXTRA_AUTO_START); + keepDisplayOn = savedInstanceState.getBoolean(AndroidAudioRecorder.EXTRA_KEEP_DISPLAY_ON); + } else { + filePath = getIntent().getStringExtra(AndroidAudioRecorder.EXTRA_FILE_PATH); + source = (AudioSource) getIntent().getSerializableExtra(AndroidAudioRecorder.EXTRA_SOURCE); + channel = (AudioChannel) getIntent().getSerializableExtra(AndroidAudioRecorder.EXTRA_CHANNEL); + sampleRate = (AudioSampleRate) getIntent().getSerializableExtra(AndroidAudioRecorder.EXTRA_SAMPLE_RATE); + color = getIntent().getIntExtra(AndroidAudioRecorder.EXTRA_COLOR, Color.BLACK); + autoStart = getIntent().getBooleanExtra(AndroidAudioRecorder.EXTRA_AUTO_START, false); + keepDisplayOn = getIntent().getBooleanExtra(AndroidAudioRecorder.EXTRA_KEEP_DISPLAY_ON, false); + } + + if(keepDisplayOn){ + getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); + } + + if (getSupportActionBar() != null) { + getSupportActionBar().setHomeButtonEnabled(true); + getSupportActionBar().setDisplayHomeAsUpEnabled(true); + getSupportActionBar().setDisplayShowTitleEnabled(false); + getSupportActionBar().setElevation(0); + getSupportActionBar().setBackgroundDrawable( + new ColorDrawable(Util.getDarkerColor(color))); + getSupportActionBar().setHomeAsUpIndicator( + ContextCompat.getDrawable(this, R.drawable.aar_ic_clear)); + } + + visualizerView = new GLAudioVisualizationView.Builder(this) + .setLayersCount(1) + .setWavesCount(6) + .setWavesHeight(R.dimen.aar_wave_height) + .setWavesFooterHeight(R.dimen.aar_footer_height) + .setBubblesPerLayer(20) + .setBubblesSize(R.dimen.aar_bubble_size) + .setBubblesRandomizeSize(true) + .setBackgroundColor(Util.getDarkerColor(color)) + .setLayerColors(new int[]{color}) + .build(); + + contentLayout = (RelativeLayout) findViewById(R.id.content); + statusView = (TextView) findViewById(R.id.status); + timerView = (TextView) findViewById(R.id.timer); + restartView = (ImageButton) findViewById(R.id.restart); + recordView = (ImageButton) findViewById(R.id.record); + playView = (ImageButton) findViewById(R.id.play); + + contentLayout.setBackgroundColor(Util.getDarkerColor(color)); + contentLayout.addView(visualizerView, 0); + restartView.setVisibility(View.INVISIBLE); + playView.setVisibility(View.INVISIBLE); + + if(Util.isBrightColor(color)) { + ContextCompat.getDrawable(this, R.drawable.aar_ic_clear) + .setColorFilter(Color.BLACK, PorterDuff.Mode.SRC_ATOP); + ContextCompat.getDrawable(this, R.drawable.aar_ic_check) + .setColorFilter(Color.BLACK, PorterDuff.Mode.SRC_ATOP); + statusView.setTextColor(Color.BLACK); + timerView.setTextColor(Color.BLACK); + restartView.setColorFilter(Color.BLACK); + recordView.setColorFilter(Color.BLACK); + playView.setColorFilter(Color.BLACK); + } + } + + @Override + public void onPostCreate(Bundle savedInstanceState) { + super.onPostCreate(savedInstanceState); + if(autoStart && !isRecording){ + toggleRecording(null); + } + } + + @Override + public void onResume() { + super.onResume(); + try { + visualizerView.onResume(); + } catch (Exception ignored){ } + } + + @Override + protected void onPause() { + try { + restartRecording(null); + } catch (IOException e) { + e.printStackTrace(); + } + try { + visualizerView.onPause(); + } catch (Exception ignored){ } + super.onPause(); + } + + @Override + protected void onDestroy() { + try { + restartRecording(null); + } catch (IOException e) { + e.printStackTrace(); + } + setResult(RESULT_CANCELED); + try { + visualizerView.release(); + } catch (Exception ignored){ } + super.onDestroy(); + } + + @Override + protected void onSaveInstanceState(Bundle outState) { + outState.putString(AndroidAudioRecorder.EXTRA_FILE_PATH, filePath); + outState.putInt(AndroidAudioRecorder.EXTRA_COLOR, color); + super.onSaveInstanceState(outState); + } + + @Override + public boolean onCreateOptionsMenu(Menu menu) { + getMenuInflater().inflate(R.menu.aar_audio_recorder, menu); + saveMenuItem = menu.findItem(R.id.action_save); + saveMenuItem.setIcon(ContextCompat.getDrawable(this, R.drawable.aar_ic_check)); + return super.onCreateOptionsMenu(menu); + } + + @Override + public boolean onOptionsItemSelected(MenuItem item) { + int i = item.getItemId(); + if (i == android.R.id.home) { + finish(); + } else if (i == R.id.action_save) { + try { + selectAudio(); + } catch (IOException e) { + e.printStackTrace(); + } + } + return super.onOptionsItemSelected(item); + } + + @Override + public void onAudioChunkPulled(AudioChunk audioChunk) { + float amplitude = isRecording ? (float) audioChunk.maxAmplitude() : 0f; + visualizerHandler.onDataReceived(amplitude); + } + + @Override + public void onCompletion(MediaPlayer mediaPlayer) { + stopPlaying(); + } + + private void selectAudio() throws IOException { + stopRecording(); + setResult(RESULT_OK); + finish(); + } + + public void toggleRecording(View v) { + stopPlaying(); + Util.wait(100, new Runnable() { + @Override + public void run() { + if (isRecording) { + pauseRecording(); + } else { + resumeRecording(); + } + } + }); + } + + public void togglePlaying(View v){ + pauseRecording(); + Util.wait(100, new Runnable() { + @Override + public void run() { + if(isPlaying()){ + stopPlaying(); + } else { + try { + startPlaying(); + } catch (IOException e) { + e.printStackTrace(); + } + } + } + }); + } + + public void restartRecording(View v) throws IOException { + if(isRecording) { + stopRecording(); + } else if(isPlaying()) { + stopPlaying(); + } else { + visualizerHandler = new VisualizerHandler(); + visualizerView.linkTo(visualizerHandler); + visualizerView.release(); + if(visualizerHandler != null) { + visualizerHandler.stop(); + } + } + saveMenuItem.setVisible(false); + statusView.setVisibility(View.INVISIBLE); + restartView.setVisibility(View.INVISIBLE); + playView.setVisibility(View.INVISIBLE); + recordView.setImageResource(R.drawable.aar_ic_rec); + timerView.setText("00:00:00"); + recorderSecondsElapsed = 0; + playerSecondsElapsed = 0; + } + + private void resumeRecording() { + isRecording = true; + saveMenuItem.setVisible(false); + statusView.setText(R.string.aar_recording); + statusView.setVisibility(View.VISIBLE); + restartView.setVisibility(View.INVISIBLE); + playView.setVisibility(View.INVISIBLE); + recordView.setImageResource(R.drawable.aar_ic_pause); + playView.setImageResource(R.drawable.aar_ic_play); + + visualizerHandler = new VisualizerHandler(); + visualizerView.linkTo(visualizerHandler); + + if(recorder == null) { + timerView.setText("00:00:00"); + + recorder = OmRecorder.wav( + new PullTransport.Default(Util.getMic(source, channel, sampleRate), AudioRecorderActivity.this), + new File(filePath)); + } + recorder.resumeRecording(); + + startTimer(); + } + + private void pauseRecording() { + isRecording = false; + if(!isFinishing()) { + saveMenuItem.setVisible(true); + } + statusView.setText(R.string.aar_paused); + statusView.setVisibility(View.VISIBLE); + restartView.setVisibility(View.VISIBLE); + playView.setVisibility(View.VISIBLE); + recordView.setImageResource(R.drawable.aar_ic_rec); + playView.setImageResource(R.drawable.aar_ic_play); + + visualizerView.release(); + if(visualizerHandler != null) { + visualizerHandler.stop(); + } + + if (recorder != null) { + recorder.pauseRecording(); + } + + stopTimer(); + } + + private void stopRecording() throws IOException { + visualizerView.release(); + if(visualizerHandler != null) { + visualizerHandler.stop(); + } + + recorderSecondsElapsed = 0; + if (recorder != null) { + recorder.stopRecording(); + recorder = null; + } + + stopTimer(); + } + + private void startPlaying() throws IOException { + + stopRecording(); + PlayerManager.getInstance().play(filePath, new PlayerManager.PlayerManagerPlayCallBack() { + @Override + public void onPlayFinished() { + stopPlaying(); + } + }); + + timerView.setText("00:00:00"); + statusView.setText(R.string.aar_playing); + statusView.setVisibility(View.VISIBLE); + playView.setImageResource(R.drawable.aar_ic_stop); + + playerSecondsElapsed = 0; + startTimer(); + } + + private void stopPlaying(){ + statusView.setText(""); + statusView.setVisibility(View.INVISIBLE); + playView.setImageResource(R.drawable.aar_ic_play); + + visualizerView.release(); + if(visualizerHandler != null) { + visualizerHandler.stop(); + } + + PlayerManager.getInstance().stop(); + + + stopTimer(); + } + + private boolean isPlaying(){ + if (PlayerManager.getInstance().isPlaying() && !isRecording) { + return true; + } + return false; + } + + private void startTimer(){ + stopTimer(); + timer = new Timer(); + timer.scheduleAtFixedRate(new TimerTask() { + @Override + public void run() { + updateTimer(); + } + }, 0, 1000); + } + + private void stopTimer(){ + if (timer != null) { + timer.cancel(); + timer.purge(); + timer = null; + } + } + + private void updateTimer() { + runOnUiThread(new Runnable() { + @Override + public void run() { + if(isRecording) { + recorderSecondsElapsed++; + timerView.setText(Util.formatSeconds(recorderSecondsElapsed)); + } else if(isPlaying()){ + playerSecondsElapsed++; + timerView.setText(Util.formatSeconds(playerSecondsElapsed)); + } + } + }); + } +} \ No newline at end of file diff --git a/lib/src/main/java/cafe/adriel/androidaudiorecorder/PlayerManager.java b/lib/src/main/java/cafe/adriel/androidaudiorecorder/PlayerManager.java new file mode 100644 index 0000000..73fd7e4 --- /dev/null +++ b/lib/src/main/java/cafe/adriel/androidaudiorecorder/PlayerManager.java @@ -0,0 +1,90 @@ +package cafe.adriel.androidaudiorecorder; + +import android.media.AudioFormat; +import android.media.AudioManager; +import android.media.AudioTrack; +import android.os.Handler; +import android.os.Looper; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; + +public class PlayerManager { + + public interface PlayerManagerPlayCallBack{ + void onPlayFinished(); + } + + private boolean isPlaying = false; + private AudioTrack at = null; + + private static final PlayerManager ourInstance = new PlayerManager(); + + public static PlayerManager getInstance() { + return ourInstance; + } + + private PlayerManager() { + } + + public void play(final String filePath, final PlayerManagerPlayCallBack playerManagerPlayCallBack){ + new Thread(new Runnable() { + @Override + public void run() { + try { + isPlaying = true; + // just put here your wav file + File yourWavFile = new File(filePath); + FileInputStream fis = new FileInputStream(yourWavFile); + int minBufferSize = AudioTrack.getMinBufferSize(48000, + AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT); + at = new AudioTrack(AudioManager.STREAM_MUSIC, 48000, + AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, + minBufferSize, AudioTrack.MODE_STREAM); + + int i = 0; + byte[] music = null; + try { + + music = new byte[512]; + at.play(); + + while ((i = fis.read(music)) != -1 && isPlaying){ + at.write(music, 0, i); + } + + + } catch (IOException e) { + e.printStackTrace(); + } + + at.stop(); + at.release(); + isPlaying = false; + new Handler(Looper.getMainLooper()).post(new Runnable() { + @Override + public void run() { + playerManagerPlayCallBack.onPlayFinished(); + } + }); + + } catch (IOException ex) { + ex.printStackTrace(); + } + } + }).start(); + } + + public void stop(){ + if (at == null){ + return; + } + isPlaying = false; + + } + + public boolean isPlaying() { + return isPlaying; + } +} \ No newline at end of file diff --git a/lib/src/main/java/cafe/adriel/androidaudiorecorder/Util.java b/lib/src/main/java/cafe/adriel/androidaudiorecorder/Util.java index c0b68c6..4ca6545 100644 --- a/lib/src/main/java/cafe/adriel/androidaudiorecorder/Util.java +++ b/lib/src/main/java/cafe/adriel/androidaudiorecorder/Util.java @@ -7,64 +7,69 @@ import cafe.adriel.androidaudiorecorder.model.AudioChannel; import cafe.adriel.androidaudiorecorder.model.AudioSampleRate; import cafe.adriel.androidaudiorecorder.model.AudioSource; +import omrecorder.AudioRecordConfig; +import omrecorder.PullableSource; public class Util { - private static final Handler HANDLER = new Handler(); - - private Util() { - } - - public static void wait(int millis, Runnable callback){ - HANDLER.postDelayed(callback, millis); - } - - public static omrecorder.AudioSource getMic(AudioSource source, - AudioChannel channel, - AudioSampleRate sampleRate) { - return new omrecorder.AudioSource.Smart( - source.getSource(), - AudioFormat.ENCODING_PCM_16BIT, - channel.getChannel(), - sampleRate.getSampleRate()); - } - - public static boolean isBrightColor(int color) { - if(android.R.color.transparent == color) { - return true; - } - int [] rgb = {Color.red(color), Color.green(color), Color.blue(color)}; - int brightness = (int) Math.sqrt( - rgb[0] * rgb[0] * 0.241 + - rgb[1] * rgb[1] * 0.691 + - rgb[2] * rgb[2] * 0.068); - return brightness >= 200; - } - - public static int getDarkerColor(int color) { - float factor = 0.8f; - int a = Color.alpha(color); - int r = Color.red(color); - int g = Color.green(color); - int b = Color.blue(color); - return Color.argb(a, - Math.max((int) (r * factor), 0), - Math.max((int) (g * factor), 0), - Math.max((int) (b * factor), 0)); - } - - public static String formatSeconds(int seconds) { - return getTwoDecimalsValue(seconds / 3600) + ":" - + getTwoDecimalsValue(seconds / 60) + ":" - + getTwoDecimalsValue(seconds % 60); - } - - private static String getTwoDecimalsValue(int value) { - if (value >= 0 && value <= 9) { - return "0" + value; - } else { - return value + ""; - } - } - + private static final Handler HANDLER = new Handler(); + + private Util() { + } + + public static void wait(int millis, Runnable callback){ + HANDLER.postDelayed(callback, millis); + } + + public static PullableSource.Default getMic(AudioSource source, + AudioChannel channel, + AudioSampleRate sampleRate) { + return new PullableSource.Default( + new AudioRecordConfig.Default( + source.getSource(), + AudioFormat.ENCODING_PCM_16BIT, + channel.getChannel(), + sampleRate.getSampleRate() + ) + ); + } + + public static boolean isBrightColor(int color) { + if(android.R.color.transparent == color) { + return true; + } + int [] rgb = {Color.red(color), Color.green(color), Color.blue(color)}; + int brightness = (int) Math.sqrt( + rgb[0] * rgb[0] * 0.241 + + rgb[1] * rgb[1] * 0.691 + + rgb[2] * rgb[2] * 0.068); + return brightness >= 200; + } + + public static int getDarkerColor(int color) { + float factor = 0.8f; + int a = Color.alpha(color); + int r = Color.red(color); + int g = Color.green(color); + int b = Color.blue(color); + return Color.argb(a, + Math.max((int) (r * factor), 0), + Math.max((int) (g * factor), 0), + Math.max((int) (b * factor), 0)); + } + + public static String formatSeconds(int seconds) { + return getTwoDecimalsValue(seconds / 3600) + ":" + + getTwoDecimalsValue(seconds / 60) + ":" + + getTwoDecimalsValue(seconds % 60); + } + + private static String getTwoDecimalsValue(int value) { + if (value >= 0 && value <= 9) { + return "0" + value; + } else { + return value + ""; + } + } + } \ No newline at end of file diff --git a/lib/src/main/java/cafe/adriel/androidaudiorecorder/model/AudioSource.java b/lib/src/main/java/cafe/adriel/androidaudiorecorder/model/AudioSource.java index daae49c..66d9743 100644 --- a/lib/src/main/java/cafe/adriel/androidaudiorecorder/model/AudioSource.java +++ b/lib/src/main/java/cafe/adriel/androidaudiorecorder/model/AudioSource.java @@ -7,11 +7,9 @@ public enum AudioSource { CAMCORDER; public int getSource(){ - switch (this){ - case CAMCORDER: - return MediaRecorder.AudioSource.CAMCORDER; - default: - return MediaRecorder.AudioSource.MIC; + if (this == AudioSource.CAMCORDER) { + return MediaRecorder.AudioSource.CAMCORDER; } + return MediaRecorder.AudioSource.MIC; } } \ No newline at end of file