Friday, January 3, 2014

Batch rename files from OS X terminal

As an example, let’s say you want to rename *.cxx files to *.cpp. The code below will do it:

for file in *.cxx
do
    echo mv "$file" "${file/.cxx/.cpp}"
    mv "$file" "${file/.cxx/.cpp}"
done

Written with StackEdit.

Wednesday, January 1, 2014

How to: Convert a file with raw PCM samples to MP4

Note: I have been working on some audio processing related projects at work and thought I could share some code which others find useful. This one is the second in the series. First in the series is here. Second is here.

We use in-built android classes like MediaMuxer, MediaFormat and MediaCodec for conversion. These classes are available only after Jelly-Bean release (API level 17). For older versions you will need to rely on native libraries and JNI to get the job done.

As with recording and playback, do the conversion in a background thread or an AsyncTask. Below is the code snippet:

public static final String AUDIO_RECORDING_FILE_NAME = "recording.raw"; // Input PCM file
public static final String COMPRESSED_AUDIO_FILE_NAME = "compressed.mp4"; // Output MP4 file
public static final String COMPRESSED_AUDIO_FILE_MIME_TYPE = "audio/mp4a-latm";
public static final int COMPRESSED_AUDIO_FILE_BIT_RATE = 128000; // 128kbps
public static final int SAMPLING_RATE = 44100;
public static final int CODEC_TIMEOUT_IN_MS = 5000;
public static final int BUFFER_SIZE = 88200;

@Override
public void run() {
    android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_BACKGROUND);

    try {
        String filePath = Environment.getExternalStorageDirectory().getPath() + "/" + AUDIO_RECORDING_FILE_NAME;
        File inputFile = new File(filePath);
        FileInputStream fis = new FileInputStream(inputFile);

        File outputFile = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/" + COMPRESSED_AUDIO_FILE_NAME);
        if (outputFile.exists()) outputFile.delete();

         MediaMuxer mux = new MediaMuxer(outputFile.getAbsolutePath(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);

        MediaFormat outputFormat = MediaFormat.createAudioFormat(COMPRESSED_AUDIO_FILE_MIME_TYPE,
                SAMPLING_RATE, 1);
        outputFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
        outputFormat.setInteger(MediaFormat.KEY_BIT_RATE, COMPRESSED_AUDIO_FILE_BIT_RATE);

        MediaCodec codec = MediaCodec.createEncoderByType(COMPRESSED_AUDIO_FILE_MIME_TYPE);
        codec.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        codec.start();

        ByteBuffer[] codecInputBuffers = codec.getInputBuffers(); // Note: Array of buffers
        ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();

        MediaCodec.BufferInfo outBuffInfo = new MediaCodec.BufferInfo();

        byte[] tempBuffer = new byte[BUFFER_SIZE];
        boolean hasMoreData = true;
        double presentationTimeUs = 0;
        int audioTrackIdx = 0;
        int totalBytesRead = 0;
        int percentComplete;

        do {

            int inputBufIndex = 0;
            while (inputBufIndex != -1 && hasMoreData) {
                inputBufIndex = codec.dequeueInputBuffer(CODEC_TIMEOUT_IN_MS);

                if (inputBufIndex >= 0) {
                    ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
                    dstBuf.clear();

                    int bytesRead = fis.read(tempBuffer, 0, dstBuf.limit());
                    if (bytesRead == -1) { // -1 implies EOS
                        hasMoreData = false;
                        codec.queueInputBuffer(inputBufIndex, 0, 0, (long) presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    } else {
                        totalBytesRead += bytesRead;
                        dstBuf.put(tempBuffer, 0, bytesRead);
                        codec.queueInputBuffer(inputBufIndex, 0, bytesRead, (long) presentationTimeUs, 0);
                        presentationTimeUs = 1000000l * (totalBytesRead / 2) / SAMPLING_RATE;
                    }
                }
            }

            // Drain audio
            int outputBufIndex = 0;
            while (outputBufIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {

                outputBufIndex = codec.dequeueOutputBuffer(outBuffInfo, CODEC_TIMEOUT_IN_MS);
                if (outputBufIndex >= 0) {
                    ByteBuffer encodedData = codecOutputBuffers[outputBufIndex];
                    encodedData.position(outBuffInfo.offset);
                    encodedData.limit(outBuffInfo.offset + outBuffInfo.size);

                    if ((outBuffInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 && outBuffInfo.size != 0) {
                        codec.releaseOutputBuffer(outputBufIndex, false);
                    } else {
                        mux.writeSampleData(audioTrackIdx, codecOutputBuffers[outputBufIndex], outBuffInfo);
                        codec.releaseOutputBuffer(outputBufIndex, false);
                    }
                } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                    outputFormat = codec.getOutputFormat();
                    Log.v(LOGTAG, "Output format changed - " + outputFormat);
                    audioTrackIdx = mux.addTrack(outputFormat);
                    mux.start();
                } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                    Log.e(LOGTAG, "Output buffers changed during encode!");
                } else if (outputBufIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
                    // NO OP
                } else {
                    Log.e(LOGTAG, "Unknown return code from dequeueOutputBuffer - " + outputBufIndex);
                }
            }
            percentComplete = (int) Math.round(((float) totalBytesRead / (float) inputFile.length()) * 100.0);
            Log.v(LOGTAG, "Conversion % - " percentComplete);
        } while (outBuffInfo.flags != MediaCodec.BUFFER_FLAG_END_OF_STREAM && !mStop);

        fis.close();
        mux.stop();
        mux.release();
        Log.v(LOGTAG, "Compression done ...");
    } catch (FileNotFoundException e) {
        Log.e(LOGTAG, "File not found!", e);
    } catch (IOException e) {
        Log.e(LOGTAG, "IO exception!", e);
    }

    mStop = false;
    // Notify UI thread...
}

Written with StackEdit.

How to: Playback file with PCM samples in android

Note: I have been working on some audio processing related projects at work and thought I could share some code which others find useful. This one is the second in the series. First in the series is here.

As with recording, it makes sense to do audio playback in a separate thread or AsyncTask. Below is the code; should be self-explanatory:

public static final int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
public static final int CHANNEL_IN_CONFIG = AudioFormat.CHANNEL_IN_MONO;
public static final int SAMPLING_RATE = 44100;
public static final int BUFFER_SIZE = AudioRecord.getMinBufferSize(SAMPLING_RATE, CHANNEL_IN_CONFIG, AUDIO_FORMAT);
public static final String AUDIO_RECORDING_FILE_NAME = "recording.raw";
public static final int CHANNEL_OUT_CONFIG = AudioFormat.CHANNEL_OUT_MONO;


@Override
public void run() {
    android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
    Log.v(LOGTAG, "Starting playback...");

    AudioTrack audioTrack;
    String filePath = Environment.getExternalStorageDirectory().getPath()
                + "/" + AudioConstants.AUDIO_RECORDING_FILE_NAME;
    File f = new File(filePath);

    try {

        audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
                SAMPLING_RATE,
                CHANNEL_OUT_CONFIG,
                AUDIO_FORMAT,
                BUFFER_SIZE, AudioTrack.MODE_STREAM);
        byte[] audioData = new byte[AudioConstants.BUFFER_SIZE];
        AudioTypeBuffer audioTypeBuffer = new AudioTypeBuffer();
        int bytesRead = 0;
        int readSize;
        int fileSize = (int) f.length();

        DataInputStream dis = new DataInputStream(new BufferedInputStream(new FileInputStream(f)));

        audioTrack.play();

        while (bytesRead < fileSize && !mStop) {
            readSize = dis.read(audioData, 0, audioData.length);
            if (readSize != -1) {
                // Write the byte array to the track
                audioTrack.write(audioData, 0, readSize);
                bytesRead += readSize;
            }
        }
        dis.close();
        audioTrack.stop();
        audioTrack.release();

        Log.v(LOGTAG, "Playback done...");

        mStop = false;
    } catch (IOException e) {
        Log.e(LOGTAG, "Error playing file ", e);
    } catch (IllegalStateException e) {
        Log.e(LOGTAG, "Audio flinger doesn't like our track", e);
    }   
}

Written with StackEdit.

How to: Record audio from MIC and save raw PCM values into a file (in Android)

Note: I have been working on some audio processing related projects at work and thought I could share some code which others find useful. This one is the first in the series. Please let me know if you find this useful; I can do share more reusable code.
First thing to remember is that it is better to run the audio recording code in a background thread or in an AysnTask. You can set the priority of that thread to THREAD_PRIORITY_URGENT_AUDIO.
Below is the code snippet; code is pretty self-explanatory.

public static final int SAMPLING_RATE = 44100;
public static final int AUDIO_SOURCE = MediaRecorder.AudioSource.MIC;
public static final int CHANNEL_IN_CONFIG = AudioFormat.CHANNEL_IN_MONO;
public static final int AUDIO_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
public static final int BUFFER_SIZE = AudioRecord.getMinBufferSize(SAMPLING_RATE, CHANNEL_IN_CONFIG, AUDIO_FORMAT);
public static final String AUDIO_RECORDING_FILE_NAME = "recording.raw";

@Override
public void run() {
    android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
    Log.v(LOGTAG, "Starting recording…");

    byte audioData[] = new byte[BUFFER_SIZE];
    AudioRecord recorder = new AudioRecord(AUDIO_SOURCE,
                SAMPLING_RATE, CHANNEL_IN_CONFIG,
                AUDIO_FORMAT, BUFFER_SIZE);
    recorder.startRecording();

    String filePath = Environment.getExternalStorageDirectory().getPath()
                + "/" + AUDIO_RECORDING_FILE_NAME;
    BufferedOutputStream os = null;
    try {
        os = new BufferedOutputStream(new FileOutputStream(filePath));
    } catch (FileNotFoundException e) {
        Log.e(LOGTAG, "File not found for recording ", e);
    }

    while (!mStop) {
        int status = recorder.read(audioData, 0, audioData.length);

        if (status == AudioRecord.ERROR_INVALID_OPERATION ||
            status == AudioRecord.ERROR_BAD_VALUE) {
            Log.e(LOGTAG, "Error reading audio data!");
            return;
        }

        try {
            os.write(audioData, 0, audioData.length);
        } catch (IOException e) {
            Log.e(LOGTAG, "Error saving recording ", e);
            return;
        }
    }

    try {
        os.close();

        recorder.stop();
        recorder.release();

        Log.v(LOGTAG, "Recording done…");
        mStop = false;

    } catch (IOException e) {
        Log.e(LOGTAG, "Error when releasing", e);
    }
}

You will need to add the following permissions in AndroidManifest.xml.
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>

Friday, August 10, 2012

JNI : Step-by-step tutorial on OS X with GCC

Here is a simple step-by-step tutorial to write a sample JNI application on Mac OS X using gcc compiler.
Stand alone GCC  (i.e, without Xcode) can be installed from here.

Step 1: Write the java code

public class JniSample {

   public native int sayHello();

   public static void main(String[] args) {

       System.loadLibrary("JniSample");
       System.out.println("In java main");

       JniSample s = new JniSample();
       s.sayHello();
   }
}

Step 2: Compile the java code

$ javac JniSample.java

Step 3: Create the header file

$ jahah JniSample

It looks like:
/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
/* Header for class JniSample */

#ifndef _Included_JniSample
#define _Included_JniSample
#ifdef __cplusplus
extern "C" {
#endif
/*
* Class:     JniSample
* Method:    sayHello
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_JniSample_sayHello
 (JNIEnv *, jobject);

#ifdef __cplusplus
}
#endif
#endif

Step 4: Write C code

#include <stdio.h>
#include "JniSample.h"

JNIEXPORT jint JNICALL Java_JniSample_sayHello (JNIEnv *env, jobject obj) {
 printf("Hello World\n");
 return 0;
}

Step 5: Compile C code and create native library

$ cc -v -c -fPIC -I/System/Library/Frameworks/JavaVM.framework/Versions/A/Headers/ JniSample.c -o libJniSample.o
$ libtool -dynamic -lSystem libJniSample.o -o libJniSample.dylib

Step 6: Set LD_LIBRARY_PATH env variable:

$ LD_LIBRARY_PATH=.
$ export $LD_LIBRARY_PATH

Step 7: Run

$ java JniSample
In java main
Hello World

Thursday, July 5, 2012

Dired on steroids - Sunrise command mode

Found a nice extension to emacs dired: Sunrise.

If you want to open files in external apps from Sunrise mode add the lines below in your ".emacs"

(defun lx-sunrise-display-external ()
      "Open file at point in an external application."
      (interactive)
      (let ((file (dired-get-filename)))
        (call-process shell-file-name nil nil nil shell-command-switch
                  (format "%s \"%s\"" lx-sunrise-external-viewer file))))
    (setq lx-sunrise-external-viewer "open")
    (define-key sr-mode-map '[\C-return] 'lx-sunrise-display-external)
Now you should be able to open files with external apps with C-return. I have only tried this with OS X Emacs 24.