Live video streaming from an Wireless IP Camera to Android mobile - android

Here, I have to get live video streaming from an wireless Ip camera to android mobile using RTSP protocol.Camera is connected to the wireless router and mobile also has same wifi network.Now I need to implement live video streaming from camera.
For this purpose ,What should I do?. this is new concept for me.How to connect android mobile and camera programmatically and get live streaming.Any help would be appreciated.

You can access the image live feed from your Ip Cam to your PC, mine was
String URL = "http://192.168.1.8/image/jpeg.cgi";
or some sort. You should check your device if that is included. You can then download the image and put it on the imageview. not the actual image file just its graphical details. You can search for MJpegInputStream for that, heres the sample code for it
public class MjpegInputStream extends DataInputStream {
private final byte[] SOI_MARKER = { (byte) 0xFF, (byte) 0xD8 };
private final byte[] EOF_MARKER = { (byte) 0xFF, (byte) 0xD9 };
private final String CONTENT_LENGTH = "Content-Length";
private final static int HEADER_MAX_LENGTH = 100;
private final static int FRAME_MAX_LENGTH = 40000 + HEADER_MAX_LENGTH;
private int mContentLength = -1;
public static MjpegInputStream read(Context context,String url) {
HttpResponse res;
MyHttpClient httpclient = new MyHttpClient( context );
try {
res = httpclient.execute(new HttpGet(URI.create(url)));
return new MjpegInputStream(res.getEntity().getContent());
} catch (ClientProtocolException e) {
} catch (IOException e) {}
return null;
}
public MjpegInputStream(InputStream in) { super(new BufferedInputStream(in, FRAME_MAX_LENGTH)); }
private int getEndOfSeqeunce(DataInputStream in, byte[] sequence) throws IOException {
int seqIndex = 0;
byte c;
for(int i=0; i < FRAME_MAX_LENGTH; i++) {
c = (byte) in.readUnsignedByte();
if(c == sequence[seqIndex]) {
seqIndex++;
if(seqIndex == sequence.length) return i + 1;
} else seqIndex = 0;
}
return -1;
}
private int getStartOfSequence(DataInputStream in, byte[] sequence) throws IOException {
int end = getEndOfSeqeunce(in, sequence);
return (end < 0) ? (-1) : (end - sequence.length);
}
private int parseContentLength(byte[] headerBytes) throws IOException, NumberFormatException {
ByteArrayInputStream headerIn = new ByteArrayInputStream(headerBytes);
Properties props = new Properties();
props.load(headerIn);
return Integer.parseInt(props.getProperty(CONTENT_LENGTH));
}
public Bitmap readMjpegFrame() throws IOException {
mark(FRAME_MAX_LENGTH);
int headerLen = getStartOfSequence(this, SOI_MARKER);
reset();
byte[] header = new byte[headerLen];
readFully(header);
try {
mContentLength = parseContentLength(header);
} catch (NumberFormatException nfe) {
mContentLength = getEndOfSeqeunce(this, EOF_MARKER);
}
reset();
byte[] frameData = new byte[mContentLength];
skipBytes(headerLen);
readFully(frameData);
return BitmapFactory.decodeStream(new ByteArrayInputStream(frameData));
}
You can look more about MJpegInput stream here and here
hope its helpful, happy codings.

Related

Implement echo cancellation in audio recorder android

I am using AudioRecord class to read voice input.
public class AudioSession {
private boolean isRecording=false;
public AudioRecord recorder;
private int port =50005;
public String ipAddress="10.105.14.252";
private int sampleRate =AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_SYSTEM);;
private int channelConfig =AudioFormat.CHANNEL_IN_MONO;
private int encodingFormat=AudioFormat.ENCODING_PCM_16BIT;
int minBufSize=AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
int bufferSize=0;
DatagramSocket socket;
public void startStreaming(){
new Thread(new Runnable(){
#Override
public
void run(){
try{
socket=new DatagramSocket();
byte[] buffer=new byte[minBufSize];
DatagramPacket packet;
final InetAddress destination=InetAddress.getByName(ipAddress);
recorder=new AudioRecord(MIC,sampleRate,channelConfig,encodingFormat,minBufSize);
if(recorder.getState()==AudioRecord.STATE_INITIALIZED)
recorder.startRecording();
while(isRecording){
bufferSize=recorder.read(buffer,0,minBufSize);
packet=new DatagramPacket(buffer,minBufSize,destination,port);
socket.send(packet);
}
}
catch(Exception e){
e.printStackTrace();
}
finally{
socket.close();
}
}
}).start();
}
and at server side
I am receiving it and giving it as output to speakers
class Server {
static AudioInputStream ais;
static AudioFormat audioFormat;
static int port = 50005;
static int sampleRate = 44100;
static SourceDataLine sourceDataLine;
static boolean streaming = true;
static int i = 0;
static Queue<InetAddress> q = new LinkedList<InetAddress>();
private static InetAddress currentSpeakerAddress;
static Set<InetAddress> h = new HashSet<InetAddress>();
public static final String PERMISSION_TEXT = "You may start talking";
public static void main(String args[]) throws Exception {
while (true) {
DatagramSocket serverSocket = new DatagramSocket(port);
byte[] receiveData = new byte[8192];
audioFormat = new AudioFormat(sampleRate, 16, 1, true, false);
sourceDataLine = (SourceDataLine) AudioSystem
.getLine(new DataLine.Info(SourceDataLine.class,
audioFormat));
sourceDataLine.open(audioFormat);
sourceDataLine.start();
DatagramPacket receivePacket = new DatagramPacket(receiveData,
receiveData.length);
ByteArrayInputStream baiss = new ByteArrayInputStream(
receivePacket.getData());
while (streaming == true) {
serverSocket.receive(receivePacket);
String requestText = new String(receivePacket.getData());
InetAddress requestAddress = receivePacket.getAddress();
if (i == 0) {
currentSpeakerAddress = requestAddress;
notifyToTalk(currentSpeakerAddress);
}
if (requestText.contains("Raise Hand")) {
if (currentSpeakerAddress.equals(requestAddress)) {
System.out.println(requestAddress.getHostAddress()
+ " is online");
} else {
storeID(requestAddress);
}
} else if (requestText.contains("Withdraw")) {
if (currentSpeakerAddress.equals(requestAddress)) {
if (h.isEmpty()) {
// break;
i = 0;
}
// currentSpeakerAddress = getNext();
} else {
if (h.remove(requestAddress)) {
q.remove(requestAddress);
}
}
} else if (currentSpeakerAddress.equals(requestAddress)) {
ais = new AudioInputStream(baiss, audioFormat,
receivePacket.getLength());
toSpeaker(receivePacket.getData());
System.out.println(i++ + " " + receivePacket.getLength());
}
}
sourceDataLine.drain();
sourceDataLine.close();
}
}
public static void toSpeaker(byte soundbytes[]) {
try {
sourceDataLine.write(soundbytes, 0, soundbytes.length);
} catch (Exception e) {
System.out.println("Not working in speakers...");
}
}
}
but using this,
there is a lot of echo
I read somewhere to use short[], but sourceDataLine and Datagram uses byte[] to write.
What should I do so that it could work even for API less than 11...
If you want to do it on the DSP level, you can check the hexagon SDK. I think it requires some extra licensing.

Converting a string sent via SMS into RSA Public/Private key in android

I've been working on it for several days but without success, I' m doing SMS encryption in android using RSA encryption, the problem is how can I convert a String to public key and use it for encryption, I'm sending the publicKey converted into String via SMS but I'm not able to get the key back.
This is how my algorithm works but when I send "puk" as a string via Sms and make encryption it doesn't work anymore!!! plz help me . . .
private final static String RSA = "RSA";
public static PublicKey uk;
public static PrivateKey rk;
public static String puk;
public static void generateKey() throws Exception
{
KeyPairGenerator gen = KeyPairGenerator.getInstance(RSA);
gen.initialize(128, new SecureRandom());
KeyPair keyPair = gen.generateKeyPair();
uk = keyPair.getPublic();
rk = keyPair.getPrivate();
puk = uk.toString();
}
private static byte[] encrypt(String text, PublicKey pubRSA) throws Exception
{
Cipher cipher = Cipher.getInstance(RSA);
cipher.init(Cipher.ENCRYPT_MODE, pubRSA);
return cipher.doFinal(text.getBytes());
}
public final static String encrypt(String text)
{
try {
return byte2hex(encrypt(text, uk));
}
catch(Exception e)
{
e.printStackTrace();
}
return null;
}
public final static String decrypt(String data)
{
try{
String text = new String(decrypt(hex2byte(data.getBytes())));
return text+" OK";
}
catch (Exception e)
{
return "Error: "+e;
}
}
private static byte[] decrypt(byte[] src) throws Exception
{
Cipher cipher = Cipher.getInstance(RSA);
cipher.init(Cipher.DECRYPT_MODE, rk);
return cipher.doFinal(src);
}
public static String byte2hex(byte[] b)
{
String hs = "";
String stmp = "";
for (int n = 0; n < b.length; n ++)
{
stmp = Integer.toHexString(b[n] & 0xFF);
if (stmp.length() == 1)
hs += ("0" + stmp);
else
hs += stmp;
}
return hs.toUpperCase();
}
public static byte[] hex2byte(byte[] b)
{
if ((b.length % 2) != 0)
throw new IllegalArgumentException("hello");
byte[] b2 = new byte[b.length / 2];
for (int n = 0; n < b.length; n += 2)
{
String item = new String(b, n, 2);
b2[n/2] = (byte)Integer.parseInt(item, 16);
}
return b2;
}

Upload live android webcam video to RTP/RTSP Server

I have already done proper research, but still lack information on the thing I would like to achieve.
So I would like to program an application where the user can record a video and instantly (live) upload the video to a RTP/RTSP Server.
The server side will not be a problem. The thing I am unclear about is how to achieve this on the phone-side.
My research so far is that I have to write the video on recording to a local socket rather than to a file, because the 3gp files if written to a file cannot be accessed, until finalized (when the video is stopped and the header information have been written to the video about length and others).
When the socket receives the continuous data, I will need to wrap it into a RTP packet and send it to the remote server. I possibly will also have to do basic encoding first (which is not so important yet).
Does anybody have any idea, if this theory is correct so far.
I would also like to know if someone could point me to a few code-snippets of similar approaches, especially for sending the video on the fly to the server. I am not sure yet how to do that.
Thank you very much and best regards
Your overall approach sounds correct, but there are a couple of things you need to consider.
So I would like to program an application where the user can record a video and instantly (live) upload the video to a RTP/RTSP Server.
I'm assuming you want to upload to an RTSP server so that it can redistribute the content to multiple clients?
How will you handle the signaling/setup of the RTP session to the
RTSP server? You need to notify the RTSP server somehow that a user
is going to upload live media so that it can open the appropriate
RTP/RTCP sockets etc.
How will you handle authentication? Multiple client devices?
My research so far is that I have to write the video on recording to a local socket rather than to a file, because the 3gp files if written to a file cannot be accessed, until finalized (when the video is stopped and the header information have been written to the video about length and others).
Sending frames in real-time over RTP/RTCP is the correct approach. As the capture device captures each frame, you need to encode/compress it and send it over the socket. 3gp, like mp4, is a container format used for file storage. For live capture there is no need to write to a file. The only time this makes sense is e.g. in HTTP Live Streaming or DASH approaches, where media is written to a transport stream or mp4 file, before being served over HTTP.
When the socket receives the continuous data, I will need to wrap it into a RTP packet and send it to the remote server. I possibly will also have to do basic encoding first (which is not so important yet).
I would disagree, encoding is very important, you'll likely never manage to send the video otherwise, and you'll have to deal with issues such as cost (over mobile networks) and just the sheer volume of media depending on resolution and framerate.
Does anybody have any idea, if this theory is correct so far. I would also like to know if someone could point me to a few code-snippets of similar approaches, especially for sending the video on the fly to the server. I am not sure yet how to do that.
Take a look at the spydroid open source project as a starting point.
It contains many of the necessary steps including how to configure the encoder, packetise to RTP, send RTCP, as well as some RTSP server functionality. Spydroid sets up an RTSP server so media is encoded and sent once an RTSP client such as VLC is used to setup an RTSP session. Since your application is driven by the phone user wanting to send media to a server, you may need to consider another approach to start the sending, even if you send some kind of message to the server to for instance setup an RTSP session like in spydroid.
A year ago I created android app that could stream its camera/microphone using rtsp over tcp to wowza media server.
General approach is to create unix socket, get its file descriptor and feed it to android media recorder component. Then media recorder is then instructed to record camera video in mp4/h264 format to that file descriptor. Now, your app reads the client socket, parses mp4 to remove the header and get iframes from it and wraps it into rtsp stream on fly.
Something similar can also be done for sound (normally AAC). Of course you have to handle time stamping your self and the most tricky thing in entire approach is video/audio synchronisation.
So here is first part of it. Something that can be called rtspsocket. It negotiates with media server in connect method and after that you can write into it the stream itself. I will show it later.
package com.example.android.streaming.streaming.rtsp;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Locale;
import java.util.concurrent.ConcurrentHashMap;
import android.util.Base64;
import android.util.Log;
import com.example.android.streaming.StreamingApp;
import com.example.android.streaming.streaming.Session;
import com.example.android.streaming.BuildConfig;
public class RtspSocket extends Socket {
public static final int RTSP_HEADER_LENGTH = 4;
public static final int RTP_HEADER_LENGTH = 12;
public static final int MTU = 1400;
public static final int PAYLOAD_OFFSET = RTSP_HEADER_LENGTH + RTP_HEADER_LENGTH;
public static final int RTP_OFFSET = RTSP_HEADER_LENGTH;
private ConcurrentHashMap<String, String> headerMap = new ConcurrentHashMap<String, String>();
static private final String kCRLF = "\r\n";
// RTSP request format strings
static private final String kOptions = "OPTIONS %s RTSP/1.0\r\n";
static private final String kDescribe = "DESCRIBE %s RTSP/1.0\r\n";
static private final String kAnnounce = "ANNOUNCE %s RTSP/1.0\r\n";
static private final String kSetupPublish = "SETUP %s/trackid=%d RTSP/1.0\r\n";
#SuppressWarnings("unused")
static private final String kSetupPlay = "SETUP %s/trackid=%d RTSP/1.0\r\n";
static private final String kRecord = "RECORD %s RTSP/1.0\r\n";
static private final String kPlay = "PLAY %s RTSP/1.0\r\n";
static private final String kTeardown = "TEARDOWN %s RTSP/1.0\r\n";
// RTSP header format strings
static private final String kCseq = "Cseq: %d\r\n";
static private final String kContentLength = "Content-Length: %d\r\n";
static private final String kContentType = "Content-Type: %s\r\n";
static private final String kTransport = "Transport: RTP/AVP/%s;unicast;mode=%s;%s\r\n";
static private final String kSession = "Session: %s\r\n";
static private final String kRange = "range: %s\r\n";
static private final String kAccept = "Accept: %s\r\n";
static private final String kAuthBasic = "Authorization: Basic %s\r\n";
static private final String kAuthDigest = "Authorization: Digest username=\"%s\",realm=\"%s\",nonce=\"%s\",uri=\"%s\",response=\"%s\"\r\n";
// RTSP header keys
static private final String kSessionKey = "Session";
static private final String kWWWAuthKey = "WWW-Authenticate";
byte header[] = new byte[RTSP_MAX_HEADER + 1];
static private final int RTSP_MAX_HEADER = 4095;
static private final int RTSP_MAX_BODY = 4095;
static private final int RTSP_RESP_ERR = -6;
// static private final int RTSP_RESP_ERR_SESSION = -7;
static public final int RTSP_OK = 200;
static private final int RTSP_BAD_USER_PASS = 401;
static private final int SOCK_ERR_READ = -5;
/* Number of channels including control ones. */
private int channelCount = 0;
/* RTSP negotiation cmd seq counter */
private int seq = 0;
private String authentication = null;
private String session = null;
private String path = null;
private String url = null;
private String user = null;
private String pass = null;
private String sdp = null;
private byte[] buffer = new byte[MTU];
public RtspSocket() {
super();
try {
setTcpNoDelay(true);
setSoTimeout(60000);
} catch (SocketException e) {
Log.e(StreamingApp.TAG, "Failed to set socket params.");
}
buffer[RTSP_HEADER_LENGTH] = (byte) Integer.parseInt("10000000", 2);
}
public byte[] getBuffer() {
return buffer;
}
public static final void setLong(byte[] buffer, long n, int begin, int end) {
for (end--; end >= begin; end--) {
buffer[end] = (byte) (n % 256);
n >>= 8;
}
}
public void setSequence(int seq) {
setLong(buffer, seq, RTP_OFFSET + 2, RTP_OFFSET + 4);
}
public void setSSRC(int ssrc) {
setLong(buffer, ssrc, RTP_OFFSET + 8, RTP_OFFSET + 12);
}
public void setPayload(int payload) {
buffer[RTP_OFFSET + 1] = (byte) (payload & 0x7f);
}
public void setRtpTimestamp(long timestamp) {
setLong(buffer, timestamp, RTP_OFFSET + 4, RTP_OFFSET + 8);
}
/** Sends the RTP packet over the network */
private void send(int length, int stream) throws IOException {
buffer[0] = '$';
buffer[1] = (byte) stream;
setLong(buffer, length, 2, 4);
OutputStream s = getOutputStream();
s.write(buffer, 0, length + RTSP_HEADER_LENGTH);
s.flush();
}
public void sendReport(int length, int ssrc, int stream) throws IOException {
setPayload(200);
setLong(buffer, ssrc, RTP_OFFSET + 4, RTP_OFFSET + 8);
send(length + RTP_HEADER_LENGTH, stream);
}
public void sendData(int length, int ssrc, int seq, int payload, int stream, boolean last) throws IOException {
setSSRC(ssrc);
setSequence(seq);
setPayload(payload);
buffer[RTP_OFFSET + 1] |= (((last ? 1 : 0) & 0x01) << 7);
send(length + RTP_HEADER_LENGTH, stream);
}
public int getChannelCount() {
return channelCount;
}
private void write(String request) throws IOException {
try {
String asci = new String(request.getBytes(), "US-ASCII");
OutputStream out = getOutputStream();
out.write(asci.getBytes());
} catch (IOException e) {
throw new IOException("Error writing to socket.");
}
}
private String read() throws IOException {
String response = null;
try {
InputStream in = getInputStream();
int i = 0, len = 0, crlf_count = 0;
boolean parsedHeader = false;
for (; i < RTSP_MAX_BODY && !parsedHeader && len > -1; i++) {
len = in.read(header, i, 1);
if (header[i] == '\r' || header[i] == '\n') {
crlf_count++;
if (crlf_count == 4)
parsedHeader = true;
} else {
crlf_count = 0;
}
}
if (len != -1) {
len = i;
header[len] = '\0';
response = new String(header, 0, len, "US-ASCII");
}
} catch (IOException e) {
throw new IOException("Connection timed out. Check your network settings.");
}
return response;
}
private int parseResponse(String response) {
String[] lines = response.split(kCRLF);
String[] items = response.split(" ");
String tempString, key, value;
headerMap.clear();
if (items.length < 2)
return RTSP_RESP_ERR;
int responseCode = RTSP_RESP_ERR;
try {
responseCode = Integer.parseInt(items[1]);
} catch (Exception e) {
Log.w(StreamingApp.TAG, e.getMessage());
Log.w(StreamingApp.TAG, response);
}
if (responseCode == RTSP_RESP_ERR)
return responseCode;
// Parse response header into key value pairs.
for (int i = 1; i < lines.length; i++) {
tempString = lines[i];
if (tempString.length() == 0)
break;
int idx = tempString.indexOf(":");
if (idx == -1)
continue;
key = tempString.substring(0, idx);
value = tempString.substring(idx + 1);
headerMap.put(key, value);
}
tempString = headerMap.get(kSessionKey);
if (tempString != null) {
// Parse session
items = tempString.split(";");
tempString = items[0];
session = tempString.trim();
}
return responseCode;
}
private void generateBasicAuth() throws UnsupportedEncodingException {
String userpass = String.format("%s:%s", user, pass);
authentication = String.format(kAuthBasic, Base64.encodeToString(userpass.getBytes("US-ASCII"), Base64.DEFAULT));
}
public static String md5(String s) {
MessageDigest digest;
try {
digest = MessageDigest.getInstance("MD5");
digest.update(s.getBytes(), 0, s.length());
String hash = new BigInteger(1, digest.digest()).toString(16);
return hash;
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
}
return "";
}
static private final int CC_MD5_DIGEST_LENGTH = 16;
private String md5HexDigest(String input) {
byte digest[] = md5(input).getBytes();
String result = new String();
for (int i = 0; i < CC_MD5_DIGEST_LENGTH; i++)
result = result.concat(String.format("%02x", digest[i]));
return result;
}
private void generateDigestAuth(String method) {
String nonce, realm;
String ha1, ha2, response;
// WWW-Authenticate: Digest realm="Streaming Server",
// nonce="206351b944cb28fe37a0794848c2e36f"
String wwwauth = headerMap.get(kWWWAuthKey);
int idx = wwwauth.indexOf("Digest");
String authReq = wwwauth.substring(idx + "Digest".length() + 1);
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, String.format("Auth Req: %s", authReq));
String[] split = authReq.split(",");
realm = split[0];
nonce = split[1];
split = realm.split("=");
realm = split[1];
realm = realm.substring(1, 1 + realm.length() - 2);
split = nonce.split("=");
nonce = split[1];
nonce = nonce.substring(1, 1 + nonce.length() - 2);
if (BuildConfig.DEBUG) {
Log.d(StreamingApp.TAG, String.format("realm=%s", realm));
Log.d(StreamingApp.TAG, String.format("nonce=%s", nonce));
}
ha1 = md5HexDigest(String.format("%s:%s:%s", user, realm, pass));
ha2 = md5HexDigest(String.format("%s:%s", method, url));
response = md5HexDigest(String.format("%s:%s:%s", ha1, nonce, ha2));
authentication = md5HexDigest(String.format(kAuthDigest, user, realm, nonce, url, response));
}
private int options() throws IOException {
seq++;
StringBuilder request = new StringBuilder();
request.append(String.format(kOptions, url));
request.append(String.format(kCseq, seq));
request.append(kCRLF);
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "--- OPTIONS Request ---\n\n" + request);
write(request.toString());
String response = read();
if (response == null)
return SOCK_ERR_READ;
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "--- OPTIONS Response ---\n\n" + response);
return parseResponse(response);
}
#SuppressWarnings("unused")
private int describe() throws IOException {
seq++;
StringBuilder request = new StringBuilder();
request.append(String.format(kDescribe, url));
request.append(String.format(kAccept, "application/sdp"));
request.append(String.format(kCseq, seq));
request.append(kCRLF);
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "--- DESCRIBE Request ---\n\n" + request);
write(request.toString());
String response = read();
if (response == null)
return SOCK_ERR_READ;
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "--- DESCRIBE Response ---\n\n" + response);
return parseResponse(response);
}
private int recurseDepth = 0;
private int announce() throws IOException {
seq++;
recurseDepth = 0;
StringBuilder request = new StringBuilder();
request.append(String.format(kAnnounce, url));
request.append(String.format(kCseq, seq));
request.append(String.format(kContentLength, sdp.length()));
request.append(String.format(kContentType, "application/sdp"));
request.append(kCRLF);
if (sdp.length() > 0)
request.append(sdp);
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "--- ANNOUNCE Request ---\n\n" + request);
write(request.toString());
String response = read();
if (response == null)
return SOCK_ERR_READ;
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "--- ANNOUNCE Response ---\n\n" + response);
int ret = parseResponse(response);
if (ret == RTSP_BAD_USER_PASS && recurseDepth == 0) {
String wwwauth = headerMap.get(kWWWAuthKey);
if (wwwauth != null) {
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, String.format("WWW Auth Value: %s", wwwauth));
int idx = wwwauth.indexOf("Basic");
recurseDepth++;
if (idx != -1) {
generateBasicAuth();
} else {
// We are assuming Digest here.
generateDigestAuth("ANNOUNCE");
}
ret = announce();
recurseDepth--;
}
}
return ret;
}
private int setup(int trackId) throws IOException {
seq++;
recurseDepth = 0;
StringBuilder request = new StringBuilder();
request.append(String.format(kSetupPublish, url, trackId));
request.append(String.format(kCseq, seq));
/* One channel for rtp (data) and one for rtcp (control) */
String tempString = String.format(Locale.getDefault(), "interleaved=%d-%d", channelCount++, channelCount++);
request.append(String.format(kTransport, "TCP", "record", tempString));
request.append(kCRLF);
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "--- SETUP Request ---\n\n" + request);
write(request.toString());
String response = read();
if (response == null)
return SOCK_ERR_READ;
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "--- SETUP Response ---\n\n" + response);
int ret = parseResponse(response);
if (ret == RTSP_BAD_USER_PASS && recurseDepth == 0) {
String wwwauth = headerMap.get(kWWWAuthKey);
if (wwwauth != null) {
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, String.format("WWW Auth Value: %s", wwwauth));
int idx = wwwauth.indexOf("Basic");
recurseDepth++;
if (idx != -1) {
generateBasicAuth();
} else {
// We are assuming Digest here.
generateDigestAuth("SETUP");
}
ret = setup(trackId);
authentication = null;
recurseDepth--;
}
}
return ret;
}
private int record() throws IOException {
seq++;
recurseDepth = 0;
StringBuilder request = new StringBuilder();
request.append(String.format(kRecord, url));
request.append(String.format(kCseq, seq));
request.append(String.format(kRange, "npt=0.000-"));
if (authentication != null)
request.append(authentication);
if (session != null)
request.append(String.format(kSession, session));
request.append(kCRLF);
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "--- RECORD Request ---\n\n" + request);
write(request.toString());
String response = read();
if (response == null)
return SOCK_ERR_READ;
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "--- RECORD Response ---\n\n" + response);
int ret = parseResponse(response);
if (ret == RTSP_BAD_USER_PASS && recurseDepth == 0) {
String wwwauth = headerMap.get(kWWWAuthKey);
if (wwwauth != null) {
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, String.format("WWW Auth Value: %s", wwwauth));
int idx = wwwauth.indexOf("Basic");
recurseDepth++;
if (idx != -1) {
generateBasicAuth();
} else {
// We are assuming Digest here.
generateDigestAuth("RECORD");
}
ret = record();
authentication = null;
recurseDepth--;
}
}
return ret;
}
#SuppressWarnings("unused")
private int play() throws IOException {
seq++;
recurseDepth = 0;
StringBuilder request = new StringBuilder();
request.append(String.format(kPlay, url));
request.append(String.format(kCseq, seq));
request.append(String.format(kRange, "npt=0.000-"));
if (authentication != null)
request.append(authentication);
if (session != null)
request.append(String.format(kSession, session));
request.append(kCRLF);
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "--- PLAY Request ---\n\n" + request);
write(request.toString());
String response = read();
if (response == null)
return SOCK_ERR_READ;
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "--- PLAY Response ---\n\n" + response);
int ret = parseResponse(response);
if (ret == RTSP_BAD_USER_PASS && recurseDepth == 0) {
String wwwauth = headerMap.get(kWWWAuthKey);
if (wwwauth != null) {
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, String.format("WWW Auth Value: %s", wwwauth));
int idx = wwwauth.indexOf("Basic");
recurseDepth++;
if (idx != -1) {
generateBasicAuth();
} else {
// We are assuming Digest here.
generateDigestAuth("PLAY");
}
ret = record();
authentication = null;
recurseDepth--;
}
}
return ret;
}
private int teardown() throws IOException {
seq++;
recurseDepth = 0;
StringBuilder request = new StringBuilder();
request.append(String.format(kTeardown, url));
request.append(String.format(kCseq, seq));
if (authentication != null)
request.append(authentication);
if (session != null)
request.append(String.format(kSession, session));
request.append(kCRLF);
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "--- TEARDOWN Request ---\n\n" + request);
write(request.toString());
String response = read();
if (response == null)
return SOCK_ERR_READ;
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "--- TEARDOWN Response ---\n\n" + response);
int ret = parseResponse(response);
if (ret == RTSP_BAD_USER_PASS && recurseDepth == 0) {
String wwwauth = headerMap.get(kWWWAuthKey);
if (wwwauth != null) {
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, String.format("WWW Auth Value: %s", wwwauth));
int idx = wwwauth.indexOf("Basic");
recurseDepth++;
if (idx != -1) {
generateBasicAuth();
} else {
// We are assuming Digest here.
generateDigestAuth("TEARDOWN");
}
ret = record();
authentication = null;
recurseDepth--;
}
}
return ret;
}
public void connect(String dest, int port, Session session) throws IOException {
int trackId = 1;
int responseCode;
if (isConnected())
return;
if (!session.hasAudioTrack() && !session.hasVideoTrack())
throw new IOException("No tracks found in session.");
InetSocketAddress addr = null;
try {
addr = new InetSocketAddress(dest, port);
} catch (Exception e) {
throw new IOException("Failed to resolve rtsp server address.");
}
this.sdp = session.getSDP();
this.user = session.getUser();
this.pass = session.getPass();
this.path = session.getPath();
this.url = String.format("rtsp://%s:%d%s", dest, addr.getPort(), this.path);
try {
super.connect(addr);
} catch (IOException e) {
throw new IOException("Failed to connect rtsp server.");
}
responseCode = announce();
if (responseCode != RTSP_OK) {
close();
throw new IOException("RTSP announce failed: " + responseCode);
}
responseCode = options();
if (responseCode != RTSP_OK) {
close();
throw new IOException("RTSP options failed: " + responseCode);
}
/* Setup audio */
if (session.hasAudioTrack()) {
session.getAudioTrack().setStreamId(channelCount);
responseCode = setup(trackId++);
if (responseCode != RTSP_OK) {
close();
throw new IOException("RTSP video failed: " + responseCode);
}
}
/* Setup video */
if (session.hasVideoTrack()) {
session.getVideoTrack().setStreamId(channelCount);
responseCode = setup(trackId++);
if (responseCode != RTSP_OK) {
close();
throw new IOException("RTSP audio setup failed: " + responseCode);
}
}
responseCode = record();
if (responseCode != RTSP_OK) {
close();
throw new IOException("RTSP record failed: " + responseCode);
}
}
public void close() throws IOException {
if (!isConnected())
return;
teardown();
super.close();
}
}
I tried to achieve the same result (but abandoned due to lack of experience). My way was to use ffmpeg and/or avlib because it already has working rtmp stack. So in theory all you need is to route video stream to ffmpeg process which will stream to server.
is there a reason for using 3gp on the client side? With mp4 (with MOOV atom set in header) you can read the temp file in chunks and send over to the server, there will likely be a slight time delay though, all depends on your connection speed as well. Your rtsp server should be able to re-encode the mp4 back to 3gp for low bandwidth viewing.
At this point, if i had to accept camera ( raw stream ) and immediately make it available to a set of clients, i would go the google hangouts route and use WebRTC. see ondello 'platform section' for the toolset/SDK. During your evaluation, you should have looked at comparative merit of WebRTC v RTSP.
IMO with its statefulness, RTSP will be a nightware behind firewalls and with NAT. AFAIK on 3G/4G the use of RTP in 3rd party apps is a bit risky.
That said, i put up on git an old android/rtp/rtsp/sdp project using libs from netty and 'efflux'. I think that this project was trying to retrieve and play just the audio track within the container ( vid track ignored and not pulled via network ) from Youtube videos all of which were encoded for RTSP at the time. I think there were some packet and frame header issues and i got fed up with RTSP and dropped it.
If you must pursue RTP/RTSP some of the packet and frame level stuff that other posters have mentioned is right there in the android classes and in the test cases that come with efflux
And here is rtsp session class. It uses rtsp socket to talk to media server. Its purpose is also to hold session params, such as, what streams it can send (video and/or audio), queues, somewhat audio/video sync code.
Used interface.
package com.example.android.streaming.streaming.rtsp;
public interface PacketListener {
public void onPacketReceived(Packet p);
}
Session itself.
package com.example.android.streaming.streaming;
import static java.util.EnumSet.of;
import java.io.IOException;
import java.util.EnumSet;
import java.util.concurrent.BlockingDeque;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import android.app.Activity;
import android.content.SharedPreferences;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.os.SystemClock;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.SurfaceHolder;
import com.example.android.streaming.BuildConfig;
import com.example.android.streaming.StreamingApp;
import com.example.android.streaming.streaming.audio.AACStream;
import com.example.android.streaming.streaming.rtsp.Packet;
import com.example.android.streaming.streaming.rtsp.Packet.PacketType;
import com.example.android.streaming.streaming.rtsp.PacketListener;
import com.example.android.streaming.streaming.rtsp.RtspSocket;
import com.example.android.streaming.streaming.video.H264Stream;
import com.example.android.streaming.streaming.video.VideoConfig;
import com.example.android.streaming.streaming.video.VideoStream;
public class Session implements PacketListener, Runnable {
public final static int MESSAGE_START = 0x03;
public final static int MESSAGE_STOP = 0x04;
public final static int VIDEO_H264 = 0x01;
public final static int AUDIO_AAC = 0x05;
public final static int VIDEO_TRACK = 1;
public final static int AUDIO_TRACK = 0;
private static VideoConfig defaultVideoQuality = VideoConfig.defaultVideoQualiy.clone();
private static int defaultVideoEncoder = VIDEO_H264, defaultAudioEncoder = AUDIO_AAC;
private static Session sessionUsingTheCamera = null;
private static Session sessionUsingTheCamcorder = null;
private static int startedStreamCount = 0;
private int sessionTrackCount = 0;
private static SurfaceHolder surfaceHolder;
private Stream[] streamList = new Stream[2];
protected RtspSocket socket = null;
private Activity context = null;
private String host = null;
private String path = null;
private String user = null;
private String pass = null;
private int port;
public interface SessionListener {
public void startSession(Session session);
public void stopSession(Session session);
};
public Session(Activity context, String host, int port, String path, String user, String pass) {
this.context = context;
this.host = host;
this.port = port;
this.path = path;
this.pass = pass;
}
public boolean isConnected() {
return socket != null && socket.isConnected();
}
/**
* Connect to rtsp server and start new session. This should be called when
* all the streams are added so that proper sdp can be generated.
*/
public void connect() throws IOException {
try {
socket = new RtspSocket();
socket.connect(host, port, this);
} catch (IOException e) {
socket = null;
throw e;
}
}
public void close() throws IOException {
if (socket != null) {
socket.close();
socket = null;
}
}
public static void setDefaultVideoQuality(VideoConfig quality) {
defaultVideoQuality = quality;
}
public static void setDefaultAudioEncoder(int encoder) {
defaultAudioEncoder = encoder;
}
public static void setDefaultVideoEncoder(int encoder) {
defaultVideoEncoder = encoder;
}
public static void setSurfaceHolder(SurfaceHolder sh) {
surfaceHolder = sh;
}
public boolean hasVideoTrack() {
return getVideoTrack() != null;
}
public MediaStream getVideoTrack() {
return (MediaStream) streamList[VIDEO_TRACK];
}
public void addVideoTrack(Camera camera, CameraInfo info) throws IllegalStateException, IOException {
addVideoTrack(camera, info, defaultVideoEncoder, defaultVideoQuality, false);
}
public synchronized void addVideoTrack(Camera camera, CameraInfo info, int encoder, VideoConfig quality,
boolean flash) throws IllegalStateException, IOException {
if (isCameraInUse())
throw new IllegalStateException("Camera already in use by another client.");
Stream stream = null;
VideoConfig.merge(quality, defaultVideoQuality);
switch (encoder) {
case VIDEO_H264:
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "Video streaming: H.264");
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context.getApplicationContext());
stream = new H264Stream(camera, info, this, prefs);
break;
}
if (stream != null) {
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "Quality is: " + quality.resX + "x" + quality.resY + "px " + quality.framerate
+ "fps, " + quality.bitrate + "bps");
((VideoStream) stream).setVideoQuality(quality);
((VideoStream) stream).setPreviewDisplay(surfaceHolder.getSurface());
streamList[VIDEO_TRACK] = stream;
sessionUsingTheCamera = this;
sessionTrackCount++;
}
}
public boolean hasAudioTrack() {
return getAudioTrack() != null;
}
public MediaStream getAudioTrack() {
return (MediaStream) streamList[AUDIO_TRACK];
}
public void addAudioTrack() throws IOException {
addAudioTrack(defaultAudioEncoder);
}
public synchronized void addAudioTrack(int encoder) throws IOException {
if (sessionUsingTheCamcorder != null)
throw new IllegalStateException("Audio device is already in use by another client.");
Stream stream = null;
switch (encoder) {
case AUDIO_AAC:
if (android.os.Build.VERSION.SDK_INT < 14)
throw new IllegalStateException("This device does not support AAC.");
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "Audio streaming: AAC");
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context.getApplicationContext());
stream = new AACStream(this, prefs);
break;
}
if (stream != null) {
streamList[AUDIO_TRACK] = stream;
sessionUsingTheCamcorder = this;
sessionTrackCount++;
}
}
public synchronized String getSDP() throws IllegalStateException, IOException {
StringBuilder sdp = new StringBuilder();
sdp.append("v=0\r\n");
/*
* The RFC 4566 (5.2) suggests to use an NTP timestamp here but we will
* simply use a UNIX timestamp.
*/
//sdp.append("o=- " + timestamp + " " + timestamp + " IN IP4 127.0.0.1\r\n");
sdp.append("o=- 0 0 IN IP4 127.0.0.1\r\n");
sdp.append("s=Vedroid\r\n");
sdp.append("c=IN IP4 " + host + "\r\n");
sdp.append("i=N/A\r\n");
sdp.append("t=0 0\r\n");
sdp.append("a=tool:Vedroid RTP\r\n");
int payload = 96;
int trackId = 1;
for (int i = 0; i < streamList.length; i++) {
if (streamList[i] != null) {
streamList[i].setPayloadType(payload++);
sdp.append(streamList[i].generateSDP());
sdp.append("a=control:trackid=" + trackId++ + "\r\n");
}
}
return sdp.toString();
}
public String getDest() {
return host;
}
public int getTrackCount() {
return sessionTrackCount;
}
public static boolean isCameraInUse() {
return sessionUsingTheCamera != null;
}
/** Indicates whether or not the microphone is being used in a session. **/
public static boolean isMicrophoneInUse() {
return sessionUsingTheCamcorder != null;
}
private SessionListener listener = null;
public synchronized void prepare(int trackId) throws IllegalStateException, IOException {
Stream stream = streamList[trackId];
if (stream != null && !stream.isStreaming())
stream.prepare();
}
public synchronized void start(int trackId) throws IllegalStateException, IOException {
Stream stream = streamList[trackId];
if (stream != null && !stream.isStreaming()) {
stream.start();
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "Started " + (trackId == VIDEO_TRACK ? "video" : "audio") + " channel.");
// if (++startedStreamCount == 1 && listener != null)
// listener.startSession(this);
}
}
public void startAll(SessionListener listener) throws IllegalStateException, IOException {
this.listener = listener;
startThread();
for (int i = 0; i < streamList.length; i++)
prepare(i);
/*
* Important to start video capture before audio capture. This makes
* audio/video de-sync smaller.
*/
for (int i = 0; i < streamList.length; i++)
start(streamList.length - i - 1);
}
public synchronized void stopAll() {
for (int i = 0; i < streamList.length; i++) {
if (streamList[i] != null && streamList[i].isStreaming()) {
streamList[i].stop();
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "Stopped " + (i == VIDEO_TRACK ? "video" : "audio") + " channel.");
if (--startedStreamCount == 0 && listener != null)
listener.stopSession(this);
}
}
stopThread();
this.listener = null;
if (BuildConfig.DEBUG)
Log.d(StreamingApp.TAG, "Session stopped.");
}
public synchronized void flush() {
for (int i = 0; i < streamList.length; i++) {
if (streamList[i] != null) {
streamList[i].release();
if (i == VIDEO_TRACK)
sessionUsingTheCamera = null;
else
sessionUsingTheCamcorder = null;
streamList[i] = null;
}
}
}
public String getPath() {
return path;
}
public String getUser() {
return user;
}
public String getPass() {
return pass;
}
private BlockingDeque<Packet> audioQueue = new LinkedBlockingDeque<Packet>(MAX_QUEUE_SIZE);
private BlockingDeque<Packet> videoQueue = new LinkedBlockingDeque<Packet>(MAX_QUEUE_SIZE);
private final static int MAX_QUEUE_SIZE = 1000;
private void sendPacket(Packet p) {
try {
MediaStream channel = (p.type == PacketType.AudioPacketType ? getAudioTrack() : getVideoTrack());
p.packetizer.send(p, socket, channel.getPayloadType(), channel.getStreamId());
getPacketQueue(p.type).remove(p);
} catch (IOException e) {
Log.e(StreamingApp.TAG, "Failed to send packet: " + e.getMessage());
}
}
private final ReentrantLock queueLock = new ReentrantLock();
private final Condition morePackets = queueLock.newCondition();
private AtomicBoolean stopped = new AtomicBoolean(true);
private Thread t = null;
private final void wakeupThread() {
queueLock.lock();
try {
morePackets.signalAll();
} finally {
queueLock.unlock();
}
}
public void startThread() {
if (t == null) {
t = new Thread(this);
stopped.set(false);
t.start();
}
}
public void stopThread() {
stopped.set(true);
if (t != null) {
t.interrupt();
try {
wakeupThread();
t.join();
} catch (InterruptedException e) {
}
t = null;
}
audioQueue.clear();
videoQueue.clear();
}
private long getStreamEndSampleTimestamp(BlockingDeque<Packet> queue) {
long sample = 0;
try {
sample = queue.getLast().getSampleTimestamp() + queue.getLast().getFrameLen();
} catch (Exception e) {
}
return sample;
}
private PacketType syncType = PacketType.AnyPacketType;
private boolean aligned = false;
private final BlockingDeque<Packet> getPacketQueue(PacketType type) {
return (type == PacketType.AudioPacketType ? audioQueue : videoQueue);
}
private void setPacketTimestamp(Packet p) {
/* Don't sync on SEI packet. */
if (!aligned && p.type != syncType) {
long shift = getStreamEndSampleTimestamp(getPacketQueue(syncType));
Log.w(StreamingApp.TAG, "Set shift +" + shift + "ms to "
+ (p.type == PacketType.VideoPacketType ? "video" : "audio") + " stream ("
+ (getPacketQueue(syncType).size() + 1) + ") packets.");
p.setTimestamp(p.getDuration(shift));
p.setSampleTimestamp(shift);
if (listener != null)
listener.startSession(this);
aligned = true;
} else {
p.setTimestamp(p.packetizer.getTimestamp());
p.setSampleTimestamp(p.packetizer.getSampleTimestamp());
}
p.packetizer.setSampleTimestamp(p.getSampleTimestamp() + p.getFrameLen());
p.packetizer.setTimestamp(p.getTimestamp() + p.getDuration());
// if (BuildConfig.DEBUG) {
// Log.d(StreamingApp.TAG, (p.type == PacketType.VideoPacketType ? "Video" : "Audio") + " packet timestamp: "
// + p.getTimestamp() + "; sampleTimestamp: " + p.getSampleTimestamp());
// }
}
/*
* Drop first frames if len is less than this. First sync frame will have
* frame len >= 10 ms.
*/
private final static int MinimalSyncFrameLength = 15;
#Override
public void onPacketReceived(Packet p) {
queueLock.lock();
try {
/*
* We always synchronize on video stream. Some devices have video
* coming faster than audio, this is ok. Audio stream time stamps
* will be adjusted. Other devices that have audio come first will
* see all audio packets dropped until first video packet comes.
* Then upon first video packet we again adjust the audio stream by
* time stamp of the last video packet in the queue.
*/
if (syncType == PacketType.AnyPacketType && p.type == PacketType.VideoPacketType
&& p.getFrameLen() >= MinimalSyncFrameLength)
syncType = p.type;
if (syncType == PacketType.VideoPacketType) {
setPacketTimestamp(p);
if (getPacketQueue(p.type).size() > MAX_QUEUE_SIZE - 1) {
Log.w(StreamingApp.TAG, "Queue (" + p.type + ") is full, dropping packet.");
} else {
/*
* Wakeup sending thread only if channels synchronization is
* already done.
*/
getPacketQueue(p.type).add(p);
if (aligned)
morePackets.signalAll();
}
}
} finally {
queueLock.unlock();
}
}
private boolean hasMorePackets(EnumSet<Packet.PacketType> mask) {
boolean gotPackets;
if (mask.contains(PacketType.AudioPacketType) && mask.contains(PacketType.VideoPacketType)) {
gotPackets = (audioQueue.size() > 0 && videoQueue.size() > 0) && aligned;
} else {
if (mask.contains(PacketType.AudioPacketType))
gotPackets = (audioQueue.size() > 0);
else if (mask.contains(PacketType.VideoPacketType))
gotPackets = (videoQueue.size() > 0);
else
gotPackets = (videoQueue.size() > 0 || audioQueue.size() > 0);
}
return gotPackets;
}
private void waitPackets(EnumSet<Packet.PacketType> mask) {
queueLock.lock();
try {
do {
if (!stopped.get() && !hasMorePackets(mask)) {
try {
morePackets.await();
} catch (InterruptedException e) {
}
}
} while (!stopped.get() && !hasMorePackets(mask));
} finally {
queueLock.unlock();
}
}
private void sendPackets() {
boolean send;
Packet a, v;
/*
* Wait for any type of packet and send asap. With time stamps correctly
* set, the real send moment is not important and may be quite
* different. Media server will only check for time stamps.
*/
waitPackets(of(PacketType.AnyPacketType));
v = videoQueue.peek();
if (v != null) {
sendPacket(v);
do {
a = audioQueue.peek();
if ((send = (a != null && a.getSampleTimestamp() <= v.getSampleTimestamp())))
sendPacket(a);
} while (!stopped.get() && send);
} else {
a = audioQueue.peek();
if (a != null)
sendPacket(a);
}
}
#Override
public void run() {
Log.w(StreamingApp.TAG, "Session thread started.");
/*
* Wait for both types of front packets to come and synchronize on each
* other.
*/
waitPackets(of(PacketType.AudioPacketType, PacketType.VideoPacketType));
while (!stopped.get())
sendPackets();
Log.w(StreamingApp.TAG, "Flushing session queues.");
Log.w(StreamingApp.TAG, " " + audioQueue.size() + " audio packets.");
Log.w(StreamingApp.TAG, " " + videoQueue.size() + " video packets.");
long start = SystemClock.elapsedRealtime();
while (audioQueue.size() > 0 || videoQueue.size() > 0)
sendPackets();
Log.w(StreamingApp.TAG, "Session thread stopped.");
Log.w(StreamingApp.TAG, "Queues flush took " + (SystemClock.elapsedRealtime() - start) + " ms.");
}
}
Check this answer: Video streaming over WIFI?
Then if u want to see the live streaming in android phone then include vlc plugin inside your application and connect through real time streaming protocol(rtsp).
Intent i = new Intent("org.videolan.vlc.VLCApplication.gui.video.VideoPlayerActivity");
i.setAction(Intent.ACTION_VIEW);
i.setData(Uri.parse("rtsp://10.0.0.179:8086/"));
startActivity(i);
If u have installed VLC on your android phone, then you can stream using intent and pass the ip address and port no as shown above.

Recording .Wav with Android AudioRecorder

I have read a lot of pages about Android's AudioRecorder. You can see a list of them below the question.
I'm trying to record audio with AudioRecorder, but it's not working well.
public class MainActivity extends Activity {
AudioRecord ar = null;
int buffsize = 0;
int blockSize = 256;
boolean isRecording = false;
private Thread recordingThread = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
public void baslat(View v)
{
// when click to START
buffsize = AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
ar = new AudioRecord(MediaRecorder.AudioSource.MIC, 44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize);
ar.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
public void run() {
writeAudioDataToFile();
}
}, "AudioRecorder Thread");
recordingThread.start();
}
public void durdur(View v)
{
// When click to STOP
ar.stop();
isRecording = false;
}
private void writeAudioDataToFile() {
// Write the output audio in byte
String filePath = "/sdcard/voice8K16bitmono.wav";
short sData[] = new short[buffsize/2];
FileOutputStream os = null;
try {
os = new FileOutputStream(filePath);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
while (isRecording) {
// gets the voice output from microphone to byte format
ar.read(sData, 0, buffsize/2);
Log.d("eray","Short wirting to file" + sData.toString());
try {
// // writes the data to file from buffer
// // stores the voice buffer
byte bData[] = short2byte(sData);
os.write(bData, 0, buffsize);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
private byte[] short2byte(short[] sData) {
int shortArrsize = sData.length;
byte[] bytes = new byte[shortArrsize * 2];
for (int i = 0; i < shortArrsize; i++) {
bytes[i * 2] = (byte) (sData[i] & 0x00FF);
bytes[(i * 2) + 1] = (byte) (sData[i] >> 8);
sData[i] = 0;
}
return bytes;
}
It's creating a .wav file but, when I try to listen to it, it's not opening. I'm getting a "file not supported" error. I've tried to play the file with quite a few media player applications.
NOTE : I have to use AudioRecorder instead of MediaRecorder because my app will be doing another process while recording (displaying an equalizer) .
Here is the list of pages that I've read about this subject:
http://developer.android.com/reference/android/media/AudioRecord.html#read(short[],%20int,%20int)
Android AudioRecord example
http://audiorecordandroid.blogspot.in
AudioRecord object not initializing
Recording a wav file from the mic in Android - problems
http://i-liger.com/article/android-wav-audio-recording
Creating a WAV file from raw PCM data using the Android SDK
Capturing Sound for Analysis and Visualizing Frequencies in Android
There are a lot of different ways to go about this. I've tried lots of them but nothing works for me. I've been working on this problem for about 6 hours now so I would appreciate a definitive answer, ideally some sample code.
I wrote a simple (by which you should read, not to professional standards) class to do this yesterday, and it works.
private class Wave {
private final int LONGINT = 4;
private final int SMALLINT = 2;
private final int INTEGER = 4;
private final int ID_STRING_SIZE = 4;
private final int WAV_RIFF_SIZE = LONGINT + ID_STRING_SIZE;
private final int WAV_FMT_SIZE = (4 * SMALLINT) + (INTEGER * 2) + LONGINT + ID_STRING_SIZE;
private final int WAV_DATA_SIZE = ID_STRING_SIZE + LONGINT;
private final int WAV_HDR_SIZE = WAV_RIFF_SIZE + ID_STRING_SIZE + WAV_FMT_SIZE + WAV_DATA_SIZE;
private final short PCM = 1;
private final int SAMPLE_SIZE = 2;
int cursor, nSamples;
byte[] output;
public Wave(int sampleRate, short nChannels, short[] data, int start, int end) {
nSamples = end - start + 1;
cursor = 0;
output = new byte[nSamples * SMALLINT + WAV_HDR_SIZE];
buildHeader(sampleRate, nChannels);
writeData(data, start, end);
}
// ------------------------------------------------------------
private void buildHeader(int sampleRate, short nChannels) {
write("RIFF");
write(output.length);
write("WAVE");
writeFormat(sampleRate, nChannels);
}
// ------------------------------------------------------------
public void writeFormat(int sampleRate, short nChannels) {
write("fmt ");
write(WAV_FMT_SIZE - WAV_DATA_SIZE);
write(PCM);
write(nChannels);
write(sampleRate);
write(nChannels * sampleRate * SAMPLE_SIZE);
write((short) (nChannels * SAMPLE_SIZE));
write((short) 16);
}
// ------------------------------------------------------------
public void writeData(short[] data, int start, int end) {
write("data");
write(nSamples * SMALLINT);
for (int i = start; i <= end; write(data[i++])) ;
}
// ------------------------------------------------------------
private void write(byte b) {
output[cursor++] = b;
}
// ------------------------------------------------------------
private void write(String id) {
if (id.length() != ID_STRING_SIZE)
Utils.logError("String " + id + " must have four characters.");
else {
for (int i = 0; i < ID_STRING_SIZE; ++i) write((byte) id.charAt(i));
}
}
// ------------------------------------------------------------
private void write(int i) {
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
}
// ------------------------------------------------------------
private void write(short i) {
write((byte) (i & 0xFF));
i >>= 8;
write((byte) (i & 0xFF));
}
// ------------------------------------------------------------
public boolean wroteToFile(String filename) {
boolean ok = false;
try {
File path = new File(getFilesDir(), filename);
FileOutputStream outFile = new FileOutputStream(path);
outFile.write(output);
outFile.close();
ok = true;
} catch (FileNotFoundException e) {
e.printStackTrace();
ok = false;
} catch (IOException e) {
ok = false;
e.printStackTrace();
}
return ok;
}
}
Hope this helps
PCMAudioHelper solved my problem. I'll modify this answer and explain it but firstly i have to do some tests over this class.
You might find this OMRECORDER helpful for recording .WAV format.
In case if .aac works with you then check out this WhatsappAudioRecorder:
On startRecording button click :
Initialise new thread.
Create file with .aac extension.
Create output stream of file.
Set output
SetListener and execute thread.
OnStopClick :
Interrupt the thread and audio will be saved in file.
Here is full gist of for reference :
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.os.Build;
import android.util.Log;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
public class AudioRecordThread implements Runnable {
private static final String TAG = AudioRecordThread.class.getSimpleName();
private static final int SAMPLE_RATE = 44100;
private static final int SAMPLE_RATE_INDEX = 4;
private static final int CHANNELS = 1;
private static final int BIT_RATE = 32000;
private final int bufferSize;
private final MediaCodec mediaCodec;
private final AudioRecord audioRecord;
private final OutputStream outputStream;
private OnRecorderFailedListener onRecorderFailedListener;
AudioRecordThread(OutputStream outputStream, OnRecorderFailedListener onRecorderFailedListener) throws IOException {
this.bufferSize = AudioRecord.getMinBufferSize(SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
this.audioRecord = createAudioRecord(this.bufferSize);
this.mediaCodec = createMediaCodec(this.bufferSize);
this.outputStream = outputStream;
this.onRecorderFailedListener = onRecorderFailedListener;
this.mediaCodec.start();
try {
audioRecord.startRecording();
} catch (Exception e) {
Log.w(TAG, e);
mediaCodec.release();
throw new IOException(e);
}
}
#Override
public void run() {
if (onRecorderFailedListener != null) {
Log.d(TAG, "onRecorderStarted");
onRecorderFailedListener.onRecorderStarted();
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
ByteBuffer[] codecInputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] codecOutputBuffers = mediaCodec.getOutputBuffers();
try {
while (!Thread.interrupted()) {
boolean success = handleCodecInput(audioRecord, mediaCodec, codecInputBuffers, Thread.currentThread().isAlive());
if (success)
handleCodecOutput(mediaCodec, codecOutputBuffers, bufferInfo, outputStream);
}
} catch (IOException e) {
Log.w(TAG, e);
} finally {
mediaCodec.stop();
audioRecord.stop();
mediaCodec.release();
audioRecord.release();
try {
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private boolean handleCodecInput(AudioRecord audioRecord,
MediaCodec mediaCodec, ByteBuffer[] codecInputBuffers,
boolean running) throws IOException {
byte[] audioRecordData = new byte[bufferSize];
int length = audioRecord.read(audioRecordData, 0, audioRecordData.length);
if (length == AudioRecord.ERROR_BAD_VALUE ||
length == AudioRecord.ERROR_INVALID_OPERATION ||
length != bufferSize) {
if (length != bufferSize) {
if (onRecorderFailedListener != null) {
Log.d(TAG, "length != BufferSize calling onRecordFailed");
onRecorderFailedListener.onRecorderFailed();
}
return false;
}
}
int codecInputBufferIndex = mediaCodec.dequeueInputBuffer(10 * 1000);
if (codecInputBufferIndex >= 0) {
ByteBuffer codecBuffer = codecInputBuffers[codecInputBufferIndex];
codecBuffer.clear();
codecBuffer.put(audioRecordData);
mediaCodec.queueInputBuffer(codecInputBufferIndex, 0, length, 0, running ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
}
return true;
}
private void handleCodecOutput(MediaCodec mediaCodec,
ByteBuffer[] codecOutputBuffers,
MediaCodec.BufferInfo bufferInfo,
OutputStream outputStream)
throws IOException {
int codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
while (codecOutputBufferIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
if (codecOutputBufferIndex >= 0) {
ByteBuffer encoderOutputBuffer = codecOutputBuffers[codecOutputBufferIndex];
encoderOutputBuffer.position(bufferInfo.offset);
encoderOutputBuffer.limit(bufferInfo.offset + bufferInfo.size);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
byte[] header = createAdtsHeader(bufferInfo.size - bufferInfo.offset);
outputStream.write(header);
byte[] data = new byte[encoderOutputBuffer.remaining()];
encoderOutputBuffer.get(data);
outputStream.write(data);
}
encoderOutputBuffer.clear();
mediaCodec.releaseOutputBuffer(codecOutputBufferIndex, false);
} else if (codecOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = mediaCodec.getOutputBuffers();
}
codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
private byte[] createAdtsHeader(int length) {
int frameLength = length + 7;
byte[] adtsHeader = new byte[7];
adtsHeader[0] = (byte) 0xFF; // Sync Word
adtsHeader[1] = (byte) 0xF1; // MPEG-4, Layer (0), No CRC
adtsHeader[2] = (byte) ((MediaCodecInfo.CodecProfileLevel.AACObjectLC - 1) << 6);
adtsHeader[2] |= (((byte) SAMPLE_RATE_INDEX) << 2);
adtsHeader[2] |= (((byte) CHANNELS) >> 2);
adtsHeader[3] = (byte) (((CHANNELS & 3) << 6) | ((frameLength >> 11) & 0x03));
adtsHeader[4] = (byte) ((frameLength >> 3) & 0xFF);
adtsHeader[5] = (byte) (((frameLength & 0x07) << 5) | 0x1f);
adtsHeader[6] = (byte) 0xFC;
return adtsHeader;
}
private AudioRecord createAudioRecord(int bufferSize) {
AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize * 10);
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
Log.d(TAG, "Unable to initialize AudioRecord");
throw new RuntimeException("Unable to initialize AudioRecord");
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
if (android.media.audiofx.NoiseSuppressor.isAvailable()) {
android.media.audiofx.NoiseSuppressor noiseSuppressor = android.media.audiofx.NoiseSuppressor
.create(audioRecord.getAudioSessionId());
if (noiseSuppressor != null) {
noiseSuppressor.setEnabled(true);
}
}
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
if (android.media.audiofx.AutomaticGainControl.isAvailable()) {
android.media.audiofx.AutomaticGainControl automaticGainControl = android.media.audiofx.AutomaticGainControl
.create(audioRecord.getAudioSessionId());
if (automaticGainControl != null) {
automaticGainControl.setEnabled(true);
}
}
}
return audioRecord;
}
private MediaCodec createMediaCodec(int bufferSize) throws IOException {
MediaCodec mediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
MediaFormat mediaFormat = new MediaFormat();
mediaFormat.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, SAMPLE_RATE);
mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNELS);
mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
try {
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (Exception e) {
Log.w(TAG, e);
mediaCodec.release();
throw new IOException(e);
}
return mediaCodec;
}
interface OnRecorderFailedListener {
void onRecorderFailed();
void onRecorderStarted();
}
}
I would add this as a comment but I don't yet have enough Stackoverflow rep points...
Opiatefuchs's link takes you to sample code that shows you the exact header formatting necessary to create a .wav file. I've been all over that code myself. Very helpful.
First you need know that wav file has its format -- header. so you can't just write the pure data to the .wav file.
Second the wav file header include the length of file . so you need write the header after recording.
My solution is , user AudioRecorder record the pcm file .
byte[] audiodata = new byte[bufferSizeInBytes];
FileOutputStream fos = null;
int readsize = 0;
try {
fos = new FileOutputStream(pcmFileName, true);
} catch (FileNotFoundException e) {
Log.e("AudioRecorder", e.getMessage());
}
status = Status.STATUS_START;
while (status == Status.STATUS_START && audioRecord != null) {
readsize = audioRecord.read(audiodata, 0, bufferSizeInBytes);
if (AudioRecord.ERROR_INVALID_OPERATION != readsize && fos != null) {
if (readsize > 0 && readsize <= audiodata.length)
fos.write(audiodata, 0, readsize);
} catch (IOException e) {
Log.e("AudioRecorder", e.getMessage());
}
}
}
try {
if (fos != null) {
fos.close();
}
} catch (IOException e) {
Log.e("AudioRecorder", e.getMessage());
}
then convert it to wav file.
byte buffer[] = null;
int TOTAL_SIZE = 0;
File file = new File(pcmPath);
if (!file.exists()) {
return false;
}
TOTAL_SIZE = (int) file.length();
WaveHeader header = new WaveHeader();
header.fileLength = TOTAL_SIZE + (44 - 8);
header.FmtHdrLeth = 16;
header.BitsPerSample = 16;
header.Channels = 1;
header.FormatTag = 0x0001;
header.SamplesPerSec = 8000;
header.BlockAlign = (short) (header.Channels * header.BitsPerSample / 8);
header.AvgBytesPerSec = header.BlockAlign * header.SamplesPerSec;
header.DataHdrLeth = TOTAL_SIZE;
byte[] h = null;
try {
h = header.getHeader();
} catch (IOException e1) {
Log.e("PcmToWav", e1.getMessage());
return false;
}
if (h.length != 44)
return false;
File destfile = new File(destinationPath);
if (destfile.exists())
destfile.delete();
try {
buffer = new byte[1024 * 4]; // Length of All Files, Total Size
InputStream inStream = null;
OutputStream ouStream = null;
ouStream = new BufferedOutputStream(new FileOutputStream(
destinationPath));
ouStream.write(h, 0, h.length);
inStream = new BufferedInputStream(new FileInputStream(file));
int size = inStream.read(buffer);
while (size != -1) {
ouStream.write(buffer);
size = inStream.read(buffer);
}
inStream.close();
ouStream.close();
} catch (FileNotFoundException e) {
Log.e("PcmToWav", e.getMessage());
return false;
} catch (IOException ioe) {
Log.e("PcmToWav", ioe.getMessage());
return false;
}
if (deletePcmFile) {
file.delete();
}
Log.i("PcmToWav", "makePCMFileToWAVFile success!" + new SimpleDateFormat("yyyy-MM-dd hh:mm").format(new Date()));
return true;

Android: onRecieve's intent getSerializableExtra(String key) returns null

So i have this code here;
myIntent.putExtra("schedule",serializableClass);
and this intent goes to my Broadcast Reciever and i did get that serializable as below,
public void onRecieve(Context context, Intent intent)
{
Schedule s = (Schedule) intent.getSerializableExtra("schedule");
}
but it always returns even though when i put the Extras its not null, even checked before passing it on myIntent.putExtra() i really don't know what happen returns, why does it always returns null?.. anyone knows this problem?
The cast is wrong, i would be more easier to pass the serialized string and do deserialization. I' m using this class.
public final class ObjectSerializer {
private ObjectSerializer() {
}
public static String serialize(Serializable obj) throws IOException {
if (obj == null)
return "";
try {
ByteArrayOutputStream serialObj = new ByteArrayOutputStream();
ObjectOutputStream objStream = new ObjectOutputStream(serialObj);
objStream.writeObject(obj);
objStream.close();
return encodeBytes(serialObj.toByteArray());
} catch (Exception e) {
throw new IOException("Serialization error: " + e.getMessage(), e);
}
}
public static Object deserialize(String str) throws IOException {
if (str == null || str.length() == 0)
return null;
try {
ByteArrayInputStream serialObj = new ByteArrayInputStream(
decodeBytes(str));
ObjectInputStream objStream = new ObjectInputStream(serialObj);
return objStream.readObject();
} catch (Exception e) {
throw new IOException("Serialization error: " + e.getMessage(), e);
}
}
public static String encodeBytes(byte[] bytes) {
StringBuffer strBuf = new StringBuffer();
for (int i = 0; i < bytes.length; i++) {
strBuf.append((char) (((bytes[i] >> 4) & 0xF) + ('a')));
strBuf.append((char) (((bytes[i]) & 0xF) + ('a')));
}
return strBuf.toString();
}
public static byte[] decodeBytes(String str) {
byte[] bytes = new byte[str.length() / 2];
for (int i = 0; i < str.length(); i += 2) {
char c = str.charAt(i);
bytes[i / 2] = (byte) ((c - 'a') << 4);
c = str.charAt(i + 1);
bytes[i / 2] += (c - 'a');
}
return bytes;
}
}
after that use like this:
String scheduleSerialization = ObjectSerializer.serialize(schedule);
myIntent.putExtra("schedule",scheduleSerialization);
the last thing to do is:
public void onRecieve(Context context, Intent intent)
{
String serial = intent.getStringExtra("schedule");
if(serial!=null)
Schedule s = (Schedule) ObjectSerializer.deserialize(serial) ;
}
Using Serializable on Android is discouraged because it is slow. If you look at the android source code you will see that
the usually break down the information into multiple keys and send them as primitive types (Integer, String, etc..)
when that can't be done, the will use a Parcelable object

Categories

Resources