How to print image with text from android phone to Bluetooth printer.? - android

I want to print some text with image which is reside on my android phone to Bluetooth printer but text is successfully printed and image is not printed on paper.
I using following code:
public class SendingdataActivity extends Activity {
/** Called when the activity is first created. */
private BluetoothAdapter mBluetoothAdapter = null;
static final UUID MY_UUID =
UUID.fromString("fa87c0d0-afac-11de-8a39-0800200c9a66");
static String address = "50:C3:00:00:00:00";
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
if (mBluetoothAdapter == null) {
Toast.makeText(this,
"Bluetooth is not available.",
Toast.LENGTH_LONG).show();
finish();
return;
}
if (!mBluetoothAdapter.isEnabled()) {
Toast.makeText(this,
"Please enable your BT and re-run this program.",
Toast.LENGTH_LONG).show();
finish();
return;
}
final SendData sendData = new SendData();
Button sendButton = (Button) findViewById(R.id.send);
sendButton.setOnClickListener(new OnClickListener() {
public void onClick(View view) {
sendData.sendMessage();
}
});
}
class SendData extends Thread {
private BluetoothDevice device = null;
private BluetoothSocket btSocket = null;
private OutputStream outStream = null;
public SendData(){
device = mBluetoothAdapter.getRemoteDevice(address);
try
{
btSocket = device.createRfcommSocketToServiceRecord(MY_UUID);
}
catch (Exception e) {
// TODO: handle exception
}
mBluetoothAdapter.cancelDiscovery();
try {
btSocket.connect();
} catch (IOException e) {
try {
btSocket.close();
} catch (IOException e2) {
}
}
Toast.makeText(getBaseContext(), "Connected to " + device.getName(),
Toast.LENGTH_SHORT).show();
try {
outStream = btSocket.getOutputStream();
} catch (IOException e) {
}
}
public void sendMessage()
{
try {
mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
Bitmap bm = BitmapFactory.decodeResource(getResources(), R.drawable.white);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
bm.compress(Bitmap.CompressFormat.JPEG, 100,baos); //bm is the bitmap object
byte[] b = baos.toByteArray();
Toast.makeText(getBaseContext(), String.valueOf(b.length), Toast.LENGTH_SHORT).show();
outStream.write(b);
outStream.flush();
} catch (IOException e) {
}
}
}
}
This code works and print only text on paper.
Thanks.

After long time I am created an app to print image on bluetooth printer by using following steps:
1. Connect your Bluetooth device with Android phone
2. Call senddatatodevice() method when you want to send data on printer
3. code executed within senddatatodevice() is like bellow.
private static void senddatatodevice() {
// TODO Auto-generated method stub
try {
//base64 image string
String sig="/9j/4AAQSkZJRgABAgEBLAEsAAD/4QoORXhpZgAATU0AKgAAAAgABwESAAMAAAABAAEAAAEaAAUAAAABAAAAYgEbAAUAAAABAAAAagEoAAMAAAABAAIAAAExAAIAAAAcAAAAcgEyAAIAAAAUAAAAjodpAAQAAAABAAAApAAAANAALcbAAAAnEAAtxsAAACcQQWRvYmUgUGhvdG9zaG9wIENTMyBXaW5kb3dzADIwMTQ6MDI6MjUgMTI6MjQ6MzYAAAAAA6ABAAMAAAABAAEAAKACAAQAAAABAAAAcaADAAQAAAABAAAAMAAAAAAAAAAGAQMAAwAAAAEABgAAARoABQAAAAEAAAEeARsABQAAAAEAAAEmASgAAwAAAAEAAgAAAgEABAAAAAEAAAEuAgIABAAAAAEAAAjYAAAAAAAAAEgAAAABAAAASAAAAAH/2P/gABBKRklGAAECAABIAEgAAP/tAAxBZG9iZV9DTQAB/+4ADkFkb2JlAGSAAAAAAf/bAIQADAgICAkIDAkJDBELCgsRFQ8MDA8VGBMTFRMTGBEMDAwMDAwRDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAENCwsNDg0QDg4QFA4ODhQUDg4ODhQRDAwMDAwREQwMDAwMDBEMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwM/8AAEQgAMABxAwEiAAIRAQMRAf/dAAQACP/EAT8AAAEFAQEBAQEBAAAAAAAAAAMAAQIEBQYHCAkKCwEAAQUBAQEBAQEAAAAAAAAAAQACAwQFBgcICQoLEAABBAEDAgQCBQcGCAUDDDMBAAIRAwQhEjEFQVFhEyJxgTIGFJGhsUIjJBVSwWIzNHKC0UMHJZJT8OHxY3M1FqKygyZEk1RkRcKjdDYX0lXiZfKzhMPTdePzRieUpIW0lcTU5PSltcXV5fVWZnaGlqa2xtbm9jdHV2d3h5ent8fX5/cRAAICAQIEBAMEBQYHBwYFNQEAAhEDITESBEFRYXEiEwUygZEUobFCI8FS0fAzJGLhcoKSQ1MVY3M08SUGFqKygwcmNcLSRJNUoxdkRVU2dGXi8rOEw9N14/NGlKSFtJXE1OT0pbXF1eX1VmZ2hpamtsbW5vYnN0dXZ3eHl6e3x//aAAwDAQACEQMRAD8A9VULra6Kn3WnbXU0ve7mGtG5x0U1V6mwuwnwC7YW2OYNS5tbm22Vgf8ACMZ6aSnMNvW3HKzuovGDgUg+hiVOZ6jmbQ/1snKsb7LvU/RehV6Vdf8ApshcZ03/ABqZeP8AWA4PWKgzptzmtrtM76gdGWmx1dP2ij99/pf8LXYus+u/S+qdW6GX9H6g/Dtxwclvokj1tjfUrr9ap7HM93vreuO+qX1Uw+v9Cd9Y/rQX573h9eK0Pcxxax5aJfWa/VyLcn1q2bvU/nP30lPqqSDh1PpxKKbHbn11tY53iWgNc5GSUpJJJJSkkkklKSSSSUpJJJJSkkkklP8A/9D0Dpn1j6f1TqXUOmY4sbk9LeGZAe2GkkvbNTtztzf0a0MrIZi4t2VYCWUMdY8NEuIYC921v73tXD9Gf+zv8avWMJ/tZ1PHZfTP5zmtre7/AN2v+213ORS2+iyh/wBG1jmO+DhtSU5HRM3G6301nU+j2uqxcgvH2e+sOZLXOrs/QhzXs3bfo15Hpf8ABqv1DL6T9VqME5u91VlzcbDx8etrKanuBM10NLP+3LrL7GLI/wAUeSf2Bk9Mt0yOnZVlb2eAdDx/4L66h/jBeM76y/VjobPc5+UMm9vhW1zBu/zGZSSnvVjZv1q6dh9Ws6Q+u+zMrxX5u2pgdurYHOLK/dufc70/ZXtWyvLeo2ftH63fW/NqM1dM6PkYzXjtZ6W13/gjcpJT6T03OZ1HCqza6raG3AkVXs9OwQS39JWfo/RVleSdXvyW/wCK36uWVWuZcc5kWgyQf12D/KWp9demv+r/AE/pmB059lXS87NL+s5V1tn6SxwpYx2blM3W1VXtZZ63o7P5tJT6OsvC+sWBm9Zzei0iz7X08NdeXNAZDwC303bvd9L91cJiYlGJ9Y+ljo+b03CyH2D1cbp1uTe3IpkeozIYyu3Hr9m707bvT/8AA1YxLM6r64/XS3pzd2czEa7GbEk2Cthr2t/Odu/NSU+jpLxrCo6dlfV09Qzsrpzc9we63OyMrI/aFdsug+jUHWb6/wAymqv3rX67Rk5XTvq9X1DrGJde2tz3Y2cb6cbMbP6G620sqs9b0gz+k7P0n/GfpUp9OSXmXQupYGPkdX6RTQzpNr8J9ruodOyn5eJVps9X7O07Me1m7fub+m/4tY1Z6f0bp9GfkU9O6t6T2uGbg591GdYS72l9btuT6nu/SVtakp9mSXO/876P/K3qH/bJ/wDJpJKf/9HV/wAZ2Ll9NzOl/XDAbut6bYKskcTW4zXvP+idvuof/wCGF2XResYPW+nU9RwLBZTcJI/OY78+qwfm2V/nKxmYmNnYtuHlVi3HvYa7a3cFrhBXkvUfqb9dvqjn25P1VtvvwbDLfQh7wPzWZOI4Obc9n+kZVZ/1v6CSnfrxM76r/X/qHUW17fq/1Gh2Vm5DjtrqIlznOd+df9q3+lT9N9eV7FX+pDr/AK1fXHP+t97HMxMQHG6e13aRsj+szHc993/CZawqfq5/jG+uWRUzrtl+Ngsdue/JaKWjzrwmNp9S39z9F/1xXess/wAYf1eyh0/6t4l9HSMdoqxRjVsyvUElz8vIPpWubkXvO9/6Or00kPffW76zYn1b6Pbm2uDshwLMSgnV9hHt0/0bPp2uXM/UnCyejfUjqn1gy2C7O6hVfnvbcJD2MrssobcPzm3/AKW3/i71kfV//F/9Yuv9TZ1f65Ps9FkEUXOm2yNW1em3242P++z2P/4P/CL0X6x49l31b6pi41ZfZZhZFdNTBqXOqeyutjR/mpJebf8AWZ7vqZ0nqzqMCh2XeGGnIa/0G65A/V2VNte279F7f+uJvrX9fW4XWaeg4X2abA8ZuRmV22VVwCfS9DH2WW7tv5v6P3rI6n0LrNn+LjoHT68K52Zj5jH344YS9jR9r972fmt/SMW/1bpufb/jK6H1CvHsfhY+Ncy7IDSWMc5mU1rXv+i3d6jElN5vWvqr0OrHFrsbGysultnp4dDtzwQHeo3Gxq7MltLvzPVWh0fqfQ+rNszOlW1XuJ23vY3bYD2bexzWXM4/wrVxub0jqPTPrj1LqeS3qT8PqTWGjK6W0WPbtDQ7Hyagy65tft9m1v8A6j1fqX0t7Oo9R6xZh5uIcvawPz7Wm29rfo3WYddNX2Z7Nv8AhLHpKelf0vpj8j7U/EodkTPrGthfPj6m3ei5GLjZVfpZNLL6zrssaHt/zXgoqSSkONh4mIz08SivHrJksqY1gn+qwNQmdJ6XXf8Aaa8Ohl8z6ramB8+PqBu9W0klKSSSSU//2f/tD0ZQaG90b3Nob3AgMy4wADhCSU0EJQAAAAAAEAAAAAAAAAAAAAAAAAAAAAA4QklNBC8AAAAAAEqoyAEASAAAAEgAAAAAAAAAAAAAANACAABAAgAAAAAAAAAAAAAYAwAAZAIAAAABwAMAALAEAAABAA8nAQBsbHVuAAAAAAAAAAAAADhCSU0D7QAAAAAAEAEsAAAAAQABASwAAAABAAE4QklNBCYAAAAAAA4AAAAAAAAAAAAAP4AAADhCSU0EDQAAAAAABAAAAHg4QklNBBkAAAAAAAQAAAAeOEJJTQPzAAAAAAAJAAAAAAAAAAABADhCSU0ECgAAAAAAAQAAOEJJTScQAAAAAAAKAAEAAAAAAAAAAjhCSU0D9QAAAAAASAAvZmYAAQBsZmYABgAAAAAAAQAvZmYAAQChmZoABgAAAAAAAQAyAAAAAQBaAAAABgAAAAAAAQA1AAAAAQAtAAAABgAAAAAAAThCSU0D+AAAAAAAcAAA/////////////////////////////wPoAAAAAP////////////////////////////8D6AAAAAD/////////////////////////////A+gAAAAA/////////////////////////////wPoAAA4QklNBAAAAAAAAAIAAThCSU0EAgAAAAAACAAAAAAAAAAAOEJJTQQwAAAAAAAEAQEBAThCSU0ELQAAAAAABgABAAAABThCSU0ECAAAAAAAEAAAAAEAAAJAAAACQAAAAAA4QklNBB4AAAAAAAQAAAAAOEJJTQQaAAAAAANJAAAABgAAAAAAAAAAAAAAMAAAAHEAAAAKAFUAbgB0AGkAdABsAGUAZAAtADEAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAHEAAAAwAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAEAAAAAAABudWxsAAAAAgAAAAZib3VuZHNPYmpjAAAAAQAAAAAAAFJjdDEAAAAEAAAAAFRvcCBsb25nAAAAAAAAAABMZWZ0bG9uZwAAAAAAAAAAQnRvbWxvbmcAAAAwAAAAAFJnaHRsb25nAAAAcQAAAAZzbGljZXNWbExzAAAAAU9iamMAAAABAAAAAAAFc2xpY2UAAAASAAAAB3NsaWNlSURsb25nAAAAAAAAAAdncm91cElEbG9uZwAAAAAAAAAGb3JpZ2luZW51bQAAAAxFU2xpY2VPcmlnaW4AAAANYXV0b0dlbmVyYXRlZAAAAABUeXBlZW51bQAAAApFU2xpY2VUeXBlAAAAAEltZyAAAAAGYm91bmRzT2JqYwAAAAEAAAAAAABSY3QxAAAABAAAAABUb3AgbG9uZwAAAAAAAAAATGVmdGxvbmcAAAAAAAAAAEJ0b21sb25nAAAAMAAAAABSZ2h0bG9uZwAAAHEAAAADdXJsVEVYVAAAAAEAAAAAAABudWxsVEVYVAAAAAEAAAAAAABNc2dlVEVYVAAAAAEAAAAAAAZhbHRUYWdURVhUAAAAAQAAAAAADmNlbGxUZXh0SXNIVE1MYm9vbAEAAAAIY2VsbFRleHRURVhUAAAAAQAAAAAACWhvcnpBbGlnbmVudW0AAAAPRVNsaWNlSG9yekFsaWduAAAAB2RlZmF1bHQAAAAJdmVydEFsaWduZW51bQAAAA9FU2xpY2VWZXJ0QWxpZ24AAAAHZGVmYXVsdAAAAAtiZ0NvbG9yVHlwZWVudW0AAAARRVNsaWNlQkdDb2xvclR5cGUAAAAATm9uZQAAAAl0b3BPdXRzZXRsb25nAAAAAAAAAApsZWZ0T3V0c2V0bG9uZwAAAAAAAAAMYm90dG9tT3V0c2V0bG9uZwAAAAAAAAALcmlnaHRPdXRzZXRsb25nAAAAAAA4QklNBCgAAAAAAAwAAAABP/AAAAAAAAA4QklNBBQAAAAAAAQAAAAFOEJJTQQMAAAAAAj0AAAAAQAAAHEAAAAwAAABVAAAP8AAAAjYABgAAf/Y/+AAEEpGSUYAAQIAAEgASAAA/+0ADEFkb2JlX0NNAAH/7gAOQWRvYmUAZIAAAAAB/9sAhAAMCAgICQgMCQkMEQsKCxEVDwwMDxUYExMVExMYEQwMDAwMDBEMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAQ0LCw0ODRAODhAUDg4OFBQODg4OFBEMDAwMDBERDAwMDAwMEQwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAz/wAARCAAwAHEDASIAAhEBAxEB/90ABAAI/8QBPwAAAQUBAQEBAQEAAAAAAAAAAwABAgQFBgcICQoLAQABBQEBAQEBAQAAAAAAAAABAAIDBAUGBwgJCgsQAAEEAQMCBAIFBwYIBQMMMwEAAhEDBCESMQVBUWETInGBMgYUkaGxQiMkFVLBYjM0coLRQwclklPw4fFjczUWorKDJkSTVGRFwqN0NhfSVeJl8rOEw9N14/NGJ5SkhbSVxNTk9KW1xdXl9VZmdoaWprbG1ub2N0dXZ3eHl6e3x9fn9xEAAgIBAgQEAwQFBgcHBgU1AQACEQMhMRIEQVFhcSITBTKBkRShsUIjwVLR8DMkYuFygpJDUxVjczTxJQYWorKDByY1wtJEk1SjF2RFVTZ0ZeLys4TD03Xj80aUpIW0lcTU5PSltcXV5fVWZnaGlqa2xtbm9ic3R1dnd4eXp7fH/9oADAMBAAIRAxEAPwD1VQutroqfdadtdTS97uYa0bnHRTVXqbC7CfALthbY5g1Lm1ubbZWB/wAIxnppKcw29bccrO6i8YOBSD6GJU5nqOZtD/Wycqxvsu9T9F6FXpV1/wCmyFxnTf8AGpl4/wBYDg9YqDOm3Oa2u0zvqB0ZabHV0/aKP33+l/wtdi6z679L6p1boZf0fqD8O3HByW+iSPW2N9Suv1qnscz3e+t6476pfVTD6/0J31j+tBfnveH14rQ9zHFrHlol9Zr9XItyfWrZu9T+c/fSU+qpIOHU+nEopsdufXW1jneJaA1zkZJSkkkklKSSSSUpJJJJSkkkklKSSSSU/wD/0PQOmfWPp/VOpdQ6ZjixuT0t4ZkB7YaSS9s1O3O3N/RrQyshmLi3ZVgJZQx1jw0S4hgL3bW/ve1cP0Z/7O/xq9Ywn+1nU8dl9M/nOa2t7v8A3a/7bXc5FLb6LKH/AEbWOY74OG1JTkdEzcbrfTWdT6Pa6rFyC8fZ76w5ktc6uz9CHNezdt+jXkel/wAGq/UMvpP1WowTm73VWXNxsPHx62spqe4EzXQ0s/7cusvsYsj/ABR5J/YGT0y3TI6dlWVvZ4B0PH/gvrqH+MF4zvrL9WOhs9zn5Qyb2+FbXMG7/MZlJKe9WNm/Wrp2H1azpD677MyvFfm7amB26tgc4sr9259zvT9le1bK8t6jZ+0frd9b82ozV0zo+RjNeO1npbXf+CNyklPpPTc5nUcKrNrqtobcCRVez07BBLf0lZ+j9FWV5J1e/Jb/AIrfq5ZVa5lxzmRaDJB/XYP8pan116a/6v8AT+mYHTn2VdLzs0v6zlXW2fpLHCljHZuUzdbVVe1lnrejs/m0lPo6y8L6xYGb1nN6LSLPtfTw115c0BkPALfTdu930v3VwmJiUYn1j6WOj5vTcLIfYPVxunW5N7cimR6jMhjK7cev2bvTtu9P/wADVjEszqvrj9dLenN3ZzMRrsZsSTYK2Gva3852781JT6OkvGsKjp2V9XT1DOyunNz3B7rc7Iysj9oV2y6D6NQdZvr/ADKaq/etfrtGTldO+r1fUOsYl17a3PdjZxvpxsxs/obrbSyqz1vSDP6Ts/Sf8Z+lSn05JeZdC6lgY+R1fpFNDOk2vwn2u6h07Kfl4lWmz1fs7Tsx7Wbt+5v6b/i1jVnp/Run0Z+RT07q3pPa4ZuDn3UZ1hLvaX1u25Pqe79JW1qSn2ZJc7/zvo/8reof9sn/AMmkkp//0dX/ABnYuX03M6X9cMBu63ptgqyRxNbjNe8/6J2+6h//AIYXZdF6xg9b6dT1HAsFlNwkj85jvz6rB+bZX+crGZiY2di24eVWLce9hrtrdwWuEFeS9R+pv12+qOfbk/VW2+/BsMt9CHvA/NZk4jg5tz2f6RlVn/W/oJKd+vEzvqv9f+odRbXt+r/UaHZWbkOO2uoiXOc5351/2rf6VP0315XsVf6kOv8ArV9cc/633sczExAcbp7XdpGyP6zMdz33f8JlrCp+rn+Mb65ZFTOu2X42Cx2578lopaPOvCY2n1Lf3P0X/XFd6yz/ABh/V7KHT/q3iX0dIx2irFGNWzK9QSXPy8g+la5uRe873/o6vTSQ999bvrNifVvo9uba4OyHAsxKCdX2Ee3T/Rs+na5cz9ScLJ6N9SOqfWDLYLs7qFV+e9twkPYyuyyhtw/Obf8Apbf+LvWR9X/8X/1i6/1NnV/rk+z0WQRRc6bbI1bV6bfbjY/77PY//g/8IvRfrHj2XfVvqmLjVl9lmFkV01MGpc6p7K62NH+akl5t/wBZnu+pnSerOowKHZd4Yachr/QbrkD9XZU217bv0Xt/64m+tf19bhdZp6DhfZpsDxm5GZXbZVXAJ9L0MfZZbu2/m/o/esjqfQus2f4uOgdPrwrnZmPmMffjhhL2NH2v3vZ+a39Ixb/Vum59v+MrofUK8ex+Fj41zLsgNJYxzmZTWte/6Ld3qMSU3m9a+qvQ6scWuxsbKy6W2enh0O3PBAd6jcbGrsyW0u/M9VaHR+p9D6s2zM6VbVe4nbe9jdtgPZt7HNZczj/CtXG5vSOo9M+uPUup5LepPw+pNYaMrpbRY9u0NDsfJqDLrm1+32bW/wDqPV+pfS3s6j1HrFmHm4hy9rA/Ptabb2t+jdZh101fZns2/wCEsekp6V/S+mPyPtT8Sh2RM+sa2F8+Pqbd6LkYuNlV+lk0svrOuyxoe3/NeCipJKQ42HiYjPTxKK8esmSypjWCf6rA1CZ0npdd/wBprw6GXzPqtqYHz4+oG71bSSUpJJJJT//ZOEJJTQQhAAAAAABVAAAAAQEAAAAPAEEAZABvAGIAZQAgAFAAaABvAHQAbwBzAGgAbwBwAAAAEwBBAGQAbwBiAGUAIABQAGgAbwB0AG8AcwBoAG8AcAAgAEMAUwAzAAAAAQA4QklNBAYAAAAAAAcACAEBAAMBAP/hD89odHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDQuMS1jMDM2IDQ2LjI3NjcyMCwgTW9uIEZlYiAxOSAyMDA3IDIyOjQwOjA4ICAgICAgICAiPiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPiA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczpkYz0iaHR0cDovL3B1cmwub3JnL2RjL2VsZW1lbnRzLzEuMS8iIHhtbG5zOnhhcD0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wLyIgeG1sbnM6eGFwTU09Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9tbS8iIHhtbG5zOnN0UmVmPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvc1R5cGUvUmVzb3VyY2VSZWYjIiB4bWxuczpwaG90b3Nob3A9Imh0dHA6Ly9ucy5hZG9iZS5jb20vcGhvdG9zaG9wLzEuMC8iIHhtbG5zOnRpZmY9Imh0dHA6Ly9ucy5hZG9iZS5jb20vdGlmZi8xLjAvIiB4bWxuczpleGlmPSJodHRwOi8vbnMuYWRvYmUuY29tL2V4aWYvMS4wLyIgZGM6Zm9ybWF0PSJpbWFnZS9qcGVnIiB4YXA6Q3JlYXRvclRvb2w9IkFkb2JlIFBob3Rvc2hvcCBDUzMgV2luZG93cyIgeGFwOkNyZWF0ZURhdGU9IjIwMTQtMDItMjVUMTI6MjQ6MzYrMDU6MzAiIHhhcDpNb2RpZnlEYXRlPSIyMDE0LTAyLTI1VDEyOjI0OjM2KzA1OjMwIiB4YXA6TWV0YWRhdGFEYXRlPSIyMDE0LTAyLTI1VDEyOjI0OjM2KzA1OjMwIiB4YXBNTTpEb2N1bWVudElEPSJ1dWlkOkQ3MkUyOTYyRTM5REUzMTFBQ0QxOUQzNUM5MzAwREIzIiB4YXBNTTpJbnN0YW5jZUlEPSJ1dWlkOkQ4MkUyOTYyRTM5REUzMTFBQ0QxOUQzNUM5MzAwREIzIiBwaG90b3Nob3A6Q29sb3JNb2RlPSIzIiBwaG90b3Nob3A6SUNDUHJvZmlsZT0ic1JHQiBJRUM2MTk2Ni0yLjEiIHBob3Rvc2hvcDpIaXN0b3J5PSIiIHRpZmY6T3JpZW50YXRpb249IjEiIHRpZmY6WFJlc29sdXRpb249IjMwMDAwMDAvMTAwMDAiIHRpZmY6WVJlc29sdXRpb249IjMwMDAwMDAvMTAwMDAiIHRpZmY6UmVzb2x1dGlvblVuaXQ9IjIiIHRpZmY6TmF0aXZlRGlnZXN0PSIyNTYsMjU3LDI1OCwyNTksMjYyLDI3NCwyNzcsMjg0LDUzMCw1MzEsMjgyLDI4MywyOTYsMzAxLDMxOCwzMTksNTI5LDUzMiwzMDYsMjcwLDI3MSwyNzIsMzA1LDMxNSwzMzQzMjs1NzYxNzA0NkUxNUE0RThGODE1NzcxNTIyREJFQUIzQiIgZXhpZjpQaXhlbFhEaW1lbnNpb249IjExMyIgZXhpZjpQaXhlbFlEaW1lbnNpb249IjQ4IiBleGlmOkNvbG9yU3BhY2U9IjEiIGV4aWY6TmF0aXZlRGlnZXN0PSIzNjg2NCw0MDk2MCw0MDk2MSwzNzEyMSwzNzEyMiw0MDk2Miw0MDk2MywzNzUxMCw0MDk2NCwzNjg2NywzNjg2OCwzMzQzNCwzMzQzNywzNDg1MCwzNDg1MiwzNDg1NSwzNDg1NiwzNzM3NywzNzM3OCwzNzM3OSwzNzM4MCwzNzM4MSwzNzM4MiwzNzM4MywzNzM4NCwzNzM4NSwzNzM4NiwzNzM5Niw0MTQ4Myw0MTQ4NCw0MTQ4Niw0MTQ4Nyw0MTQ4OCw0MTQ5Miw0MTQ5Myw0MTQ5NSw0MTcyOCw0MTcyOSw0MTczMCw0MTk4NSw0MTk4Niw0MTk4Nyw0MTk4OCw0MTk4OSw0MTk5MCw0MTk5MSw0MTk5Miw0MTk5Myw0MTk5NCw0MTk5NSw0MTk5Niw0MjAxNiwwLDIsNCw1LDYsNyw4LDksMTAsMTEsMTIsMTMsMTQsMTUsMTYsMTcsMTgsMjAsMjIsMjMsMjQsMjUsMjYsMjcsMjgsMzA7RjY5RTI3MkY2NzczN0M2MkI3MjNCQTdFQzg4QTM3OEIiPiA8eGFwTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0idXVpZDozRThEMUZDRUUwOURFMzExQUNEMTlEMzVDOTMwMERCMyIgc3RSZWY6ZG9jdW1lbnRJRD0idXVpZDozRThEMUZDRUUwOURFMzExQUNEMTlEMzVDOTMwMERCMyIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICA8P3hwYWNrZXQgZW5kPSJ3Ij8+/+IMWElDQ19QUk9GSUxFAAEBAAAMSExpbm8CEAAAbW50clJHQiBYWVogB84AAgAJAAYAMQAAYWNzcE1TRlQAAAAASUVDIHNSR0IAAAAAAAAAAAAAAAEAAPbWAAEAAAAA0y1IUCAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARY3BydAAAAVAAAAAzZGVzYwAAAYQAAABsd3RwdAAAAfAAAAAUYmtwdAAAAgQAAAAUclhZWgAAAhgAAAAUZ1hZWgAAAiwAAAAUYlhZWgAAAkAAAAAUZG1uZAAAAlQAAABwZG1kZAAAAsQAAACIdnVlZAAAA0wAAACGdmlldwAAA9QAAAAkbHVtaQAAA/gAAAAUbWVhcwAABAwAAAAkdGVjaAAABDAAAAAMclRSQwAABDwAAAgMZ1RSQwAABDwAAAgMYlRSQwAABDwAAAgMdGV4dAAAAABDb3B5cmlnaHQgKGMpIDE5OTggSGV3bGV0dC1QYWNrYXJkIENvbXBhbnkAAGRlc2MAAAAAAAAAEnNSR0IgSUVDNjE5NjYtMi4xAAAAAAAAAAAAAAASc1JHQiBJRUM2MTk2Ni0yLjEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFhZWiAAAAAAAADzUQABAAAAARbMWFlaIAAAAAAAAAAAAAAAAAAAAABYWVogAAAAAAAAb6IAADj1AAADkFhZWiAAAAAAAABimQAAt4UAABjaWFlaIAAAAAAAACSgAAAPhAAAts9kZXNjAAAAAAAAABZJRUMgaHR0cDovL3d3dy5pZWMuY2gAAAAAAAAAAAAAABZJRUMgaHR0cDovL3d3dy5pZWMuY2gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAZGVzYwAAAAAAAAAuSUVDIDYxOTY2LTIuMSBEZWZhdWx0IFJHQiBjb2xvdXIgc3BhY2UgLSBzUkdCAAAAAAAAAAAAAAAuSUVDIDYxOTY2LTIuMSBEZWZhdWx0IFJHQiBjb2xvdXIgc3BhY2UgLSBzUkdCAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGRlc2MAAAAAAAAALFJlZmVyZW5jZSBWaWV3aW5nIENvbmRpdGlvbiBpbiBJRUM2MTk2Ni0yLjEAAAAAAAAAAAAAACxSZWZlcmVuY2UgVmlld2luZyBDb25kaXRpb24gaW4gSUVDNjE5NjYtMi4xAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB2aWV3AAAAAAATpP4AFF8uABDPFAAD7cwABBMLAANcngAAAAFYWVogAAAAAABMCVYAUAAAAFcf521lYXMAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAKPAAAAAnNpZyAAAAAAQ1JUIGN1cnYAAAAAAAAEAAAAAAUACgAPABQAGQAeACMAKAAtADIANwA7AEAARQBKAE8AVABZAF4AYwBoAG0AcgB3AHwAgQCGAIsAkACVAJoAnwCkAKkArgCyALcAvADBAMYAywDQANUA2wDgAOUA6wDwAPYA+wEBAQcBDQETARkBHwElASsBMgE4AT4BRQFMAVIBWQFgAWcBbgF1AXwBgwGLAZIBmgGhAakBsQG5AcEByQHRAdkB4QHpAfIB+gIDAgwCFAIdAiYCLwI4AkECSwJUAl0CZwJxAnoChAKOApgCogKsArYCwQLLAtUC4ALrAvUDAAMLAxYDIQMtAzgDQwNPA1oDZgNyA34DigOWA6IDrgO6A8cD0wPgA+wD+QQGBBMEIAQtBDsESARVBGMEcQR+BIwEmgSoBLYExATTBOEE8AT+BQ0FHAUrBToFSQVYBWcFdwWGBZYFpgW1BcUF1QXlBfYGBgYWBicGNwZIBlkGagZ7BowGnQavBsAG0QbjBvUHBwcZBysHPQdPB2EHdAeGB5kHrAe/B9IH5Qf4CAsIHwgyCEYIWghuCIIIlgiqCL4I0gjnCPsJEAklCToJTwlkCXkJjwmkCboJzwnlCfsKEQonCj0KVApqCoEKmAquCsUK3ArzCwsLIgs5C1ELaQuAC5gLsAvIC+EL+QwSDCoMQwxcDHUMjgynDMAM2QzzDQ0NJg1ADVoNdA2ODakNww3eDfgOEw4uDkkOZA5/DpsOtg7SDu4PCQ8lD0EPXg96D5YPsw/PD+wQCRAmEEMQYRB+EJsQuRDXEPURExExEU8RbRGMEaoRyRHoEgcSJhJFEmQShBKjEsMS4xMDEyMTQxNjE4MTpBPFE+UUBhQnFEkUahSLFK0UzhTwFRIVNBVWFXgVmxW9FeAWAxYmFkkWbBaPFrIW1hb6Fx0XQRdlF4kXrhfSF/cYGxhAGGUYihivGNUY+hkgGUUZaxmRGbcZ3RoEGioaURp3Gp4axRrsGxQbOxtjG4obshvaHAIcKhxSHHscoxzMHPUdHh1HHXAdmR3DHeweFh5AHmoelB6+HukfEx8+H2kflB+/H+ogFSBBIGwgmCDEIPAhHCFIIXUhoSHOIfsiJyJVIoIiryLdIwojOCNmI5QjwiPwJB8kTSR8JKsk2iUJJTglaCWXJccl9yYnJlcmhya3JugnGCdJJ3onqyfcKA0oPyhxKKIo1CkGKTgpaymdKdAqAio1KmgqmyrPKwIrNitpK50r0SwFLDksbiyiLNctDC1BLXYtqy3hLhYuTC6CLrcu7i8kL1ovkS/HL/4wNTBsMKQw2zESMUoxgjG6MfIyKjJjMpsy1DMNM0YzfzO4M/E0KzRlNJ402DUTNU01hzXCNf02NzZyNq426TckN2A3nDfXOBQ4UDiMOMg5BTlCOX85vDn5OjY6dDqyOu87LTtrO6o76DwnPGU8pDzjPSI9YT2hPeA+ID5gPqA+4D8hP2E/oj/iQCNAZECmQOdBKUFqQaxB7kIwQnJCtUL3QzpDfUPARANER0SKRM5FEkVVRZpF3kYiRmdGq0bwRzVHe0fASAVIS0iRSNdJHUljSalJ8Eo3Sn1KxEsMS1NLmkviTCpMcky6TQJNSk2TTdxOJU5uTrdPAE9JT5NP3VAnUHFQu1EGUVBRm1HmUjFSfFLHUxNTX1OqU/ZUQlSPVNtVKFV1VcJWD1ZcVqlW91dEV5JX4FgvWH1Yy1kaWWlZuFoHWlZaplr1W0VblVvlXDVchlzWXSddeF3JXhpebF69Xw9fYV+zYAVgV2CqYPxhT2GiYfViSWKcYvBjQ2OXY+tkQGSUZOllPWWSZedmPWaSZuhnPWeTZ+loP2iWaOxpQ2maafFqSGqfavdrT2una/9sV2yvbQhtYG25bhJua27Ebx5veG/RcCtwhnDgcTpxlXHwcktypnMBc11zuHQUdHB0zHUodYV14XY+dpt2+HdWd7N4EXhueMx5KnmJeed6RnqlewR7Y3vCfCF8gXzhfUF9oX4BfmJ+wn8jf4R/5YBHgKiBCoFrgc2CMIKSgvSDV4O6hB2EgITjhUeFq4YOhnKG14c7h5+IBIhpiM6JM4mZif6KZIrKizCLlov8jGOMyo0xjZiN/45mjs6PNo+ekAaQbpDWkT+RqJIRknqS45NNk7aUIJSKlPSVX5XJljSWn5cKl3WX4JhMmLiZJJmQmfyaaJrVm0Kbr5wcnImc951kndKeQJ6unx2fi5/6oGmg2KFHobaiJqKWowajdqPmpFakx6U4pammGqaLpv2nbqfgqFKoxKk3qamqHKqPqwKrdavprFys0K1ErbiuLa6hrxavi7AAsHWw6rFgsdayS7LCszizrrQltJy1E7WKtgG2ebbwt2i34LhZuNG5SrnCuju6tbsuu6e8IbybvRW9j74KvoS+/796v/XAcMDswWfB48JfwtvDWMPUxFHEzsVLxcjGRsbDx0HHv8g9yLzJOsm5yjjKt8s2y7bMNcy1zTXNtc42zrbPN8+40DnQutE80b7SP9LB00TTxtRJ1MvVTtXR1lXW2Ndc1+DYZNjo2WzZ8dp22vvbgNwF3IrdEN2W3hzeot8p36/gNuC94UThzOJT4tvjY+Pr5HPk/OWE5g3mlucf56noMui86Ubp0Opb6uXrcOv77IbtEe2c7ijutO9A78zwWPDl8XLx//KM8xnzp/Q09ML1UPXe9m32+/eK+Bn4qPk4+cf6V/rn+3f8B/yY/Sn9uv5L/tz/bf///+4AJkFkb2JlAGRAAAAAAQMAFQQDBgoNAAAAAAAAAAAAAAAAAAAAAP/bAIQAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQICAgICAgICAgICAwMDAwMDAwMDAwEBAQEBAQEBAQEBAgIBAgIDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMD/8IAEQgAMABxAwERAAIRAQMRAf/EAMAAAAIDAAMBAAAAAAAAAAAAAAAJBgcIAwQFCgEBAQEAAAAAAAAAAAAAAAAAAAECEAACAgEDAwMFAAAAAAAAAAAFBgQHAwACCCAwARBBFkBQYBEXEQABBAECBAUBBAcJAAAAAAADAQIEBQYREgAhEwcxIiMUFVEQcTMkIDBBYYEyCEJSU3PUlRY2FxIBAQEAAAAAAAAAAAAAAAAAUGABEwEAAgICAwEAAgMAAAAAAAABABEhMRBBIDBRYVChQHGB/9oADAMBAwIRAxEAAAF/h5xmRFm3L3psAAAAAAAgiZlRa9j3JqQqAAAAABQiW8ufUiCbOazUlvE3UKJS9lAABSVw1abV+z5A25pDFxBC9yKkvMlpoleYzKjmpvJNyy+aWfcw8YjNYaubBl6poguBZYe+vCeGk+WOJ88VxHyzDsjnZtSlzriWi7NASsVaAAD/2gAIAQEAAQUC1LlYIMXzJd8m5a5Tlxtgdxlxb8oa7lhncEepqqEWMjCIuaCK7SzYy+1shQhhEjEo0Me1tgLKlSQtGbTXQjYuHMLKF9AthgTrl0p2b4xypnw8ZCDxGJbvgHILNtP2VphkfJrcbppHZxbupbzVwAEioIexhUg5DuIJCXitdvMEiXXkZjBDZ8feASF/+uwdcmxhVWMJjgDe13AJOVNftI751vXHbdmCqtUKTCkkekc1mZctM2rfOwG5Y3OrECMoMyO4bMqus555AYNLRxogSGw4VRXjTdFxI48LYqbuulzsOuuRl4T3LDyFrUpX9AWJYrNYo+RNrhmRXKTxybFs9L5KGVJiVbjpdYz4WHp//9oACAECAAEFAvxr/9oACAEDAAEFAvX9/avfXjo9ux5+u//aAAgBAgIGPwKa/9oACAEDAgY/AltE/9oACAEBAQY/AuJM6UTpRYcc0qSXa53TBHG4pibWI57tg2KuiIq8XmQZNIDjONQGv+Lx+sl1/wAhLhNhjkssrq9lAe2NYvmuQHtQOAEOjtxpDdquWgzaAMOJ2EiPGhz3dX3tKMyoyPPJLJX1ny9UuvqkUCPVNSse9PK79ZKVo3m9uSFPLHG1z3y41bOjWEqExjdVI+bGjOEjf7W/Th5sKzCdjsuoaTIhfElMP/kLYUV8uHCbYwZcY8XUzGkCRqkZ1Ebq3wc03c/u0+dlJ5AZ8CiA2fMhy3xoNkUTHEPDfDJOtp117gI96l6nVTXV+m2shSCdY8OvhRTm1VeqaPGGIhNXeZd7268/1eX4pWssR22FSgxLdJsVoAEeckoTCQStMVTC1iLzcjF0c1UReeljayWlfHrIMuwOyOzqHeGHHJIK0I9W9QrmDXamqarxGyvCZ8qDTW757PiLqtDKgtKCYeFMc+vZKFIiqZ4VdsDLaBd25R7nP1xdbxJhYUq3i4/jdRRVcGHR1E+UMr2vi1IjR0Gx213qyCyyjVV2Km9+vEvCzxLuXexcVm5f0K6vZLbJroIZRnxomklp5Fmb2jmjEjPO5UTXnxAvI8G0rQ2AyEZCuoTq60AgzFArZcJ7nuA9yi1RNebVRftyfBYTLBLrEhRDWjjxxshObMYEgvanbIeQq7Tprqxv6XciiP6QM1x6Bd1m5zvzUqLGr5BWta53PRVn/XRBctE4mwC/hTokiGXx/DkheF/grV/lf9U4u8Vl6Ms8PyyzgyY27c4QZew7HLz5I6eyU3wRPT+/jsXgIdSkPlLMitAIvNlbFmQ2NLt567osWdz/AGdP9/2f1GXkVyuh4X2ayigBJYvmDaJQsCViO8RvHKFPbyXXyffx2YkRbCTGnPzqAjZrXueZr9c36b3bnesjHtau13JdOMHx3GZVhCw3Jc2JL7lXttd3SutLKQykjRj5Lex3msIFfahjF9z7dBs9JFa1HeOCNwjJ+2WOWcme1Z9JgN5meRRsppUeL3kW4jx4E+oiv9tvUR5Lg+bzbl6aK3+puVjAlNkUbEYp6USCQ732YqeI+IgwORWnL1kTYxeTnaJw/JL+/wC2wcmIKaawym/zLMv/AEqruGyDtYRK6EI81sqI/TpRo4nsKNPB+5yr2bj5H3JxOxshV8qWajzQ2VUOK57HUr0rLGbYPhQZfyKQBjarZqD6hn666EXqdxcLhVkPApkjCrCxPl+A5pZ5nhtK1A+3ZPSoC90anmxyF6m9n5j/AC925avIrKs7bZ77OdHkNyjC+5+Q4/3Bs3nmtQJJUQyhuSTE6vrBYxuxo1XyIzfx/wBJ7i/7Eb/W8YH3sx0HVm4dPFW3LU1b1auQd5IbJD2tVWQivkSYpV8fzjdOK7JsflMkwJ4muVu5vXhSka1ZFfNGir0ZkR7tr2/xTVqoq5fkzInT7YZdSSMkyi4K/wBvWUhmKQxSmM7yHtfnOp0Y7NSFFP8AIiq1dMs70T45gUlEJ+P4gIyJ6LjAWO0bHeZELGqDEJIanLrT+S6cuJ17LIEloUZI2P1bnp1bK0exeinT13+zjKvUO/waNNP5lai573IuYo7DI8rqsizeQC1Z1BWEGvqbCZVCshtVOqG0I88h/NNwpWnJeO32ZEq+31aS/vwQ3Vt9Eskx6LuJkDNKgFfHnnFZqkDcxX+TRxNV8OK3t5QrjOskU1uTW+W1eQWVRWdIRiNr/iqlseZYPOwGm5ikFqRuuiI9UpmyzY1T3N7UQ7BYmKY1N93YBNHGV00VLTV026BWleiqP3DdUa3RVVWu4mXeHzqmyIpGx7STEjpGsxlTXphtQHBHs45NB+Vp2NXy8vDj5U+O0RrRHoRLItRAJPR6eD/ePjrI3p9d3CxLWvg2cVV3LGsIgJkdXJ4KoZAyD1T7uFjU9XXVUdXb1j1sKNBCr18XKKKMTFd+/Tj5ONjdBHst/U+QDT14pu/l5/dsjoffy8d32T6W3ijnVlpFNCnRDa7DxzsVhGaoqOY7ReTmqjmrzRUXifadop97Z45MJ1BJTqOdPYNNXCi3WOlGUNmeNzawwwFa5vP01crOIQO4Eu9p8dAdpZMjIooqSOD+8SJjEYFc6XYdNVQaqBrU8FI1F4Zjfa/HryrwWqjNq6JMfqa7LVtWdQhJN/cE+MnFBcWsojiE3CAgW7WN5JudHzPvhKsfj4zhkZU2stpbW1Qbt4oSxROcGkp9/wCKPQZHc0axu7qJndVVxHyJUvCMnr66BEFuIaQeimxokSMAac3kI5rGNT9vLjtHjkfGbo19UZpBm2dQOAd0+BEYuX75EqMjeoETfeC5r/iJ9eO1mSxqiwPQVeMX0axuBRiPr4Ug9blYwhkyUTpiIUkoaIi+KvT68ZtldmDuXIoMwjwXVV92ziitJ8VYoI7CVFzBZDsbEMUfSRonMZsXYxfr08yzWRjmb0D7720NkrOLuAa1yIEVU6NlNxyHTQH0sqOwW31SkcqEXx1Vf0v/2gAIAQEDAT8hh+msG6h9YEVgWYjSoob2Ffpg7VLD2TnpAIA09uBJIQjWoHCKwRSVdy4AzQMQUYJqYIqfbKPzq65nsO6Dj9ttsm8+u71sUzB3LcQGAYe0uy5i2kSyAlbjL0wqO4kW7tJ6LqFfWtnGvJKAapA1NV0UhA+SBMnL6jmn0Q3uYujrt78iaDoApjI3MZClgXNjIZFBZ2zSfpFrkGYc82ONIU7hHej5IeLiKi7dqi0J1YJxY0JWvixLNj3ZlFy1GuMRA7KNnBedyke7nkkJi3/LwPHcSdIuBwbaI1A7cAWUjbgIWRBU3OBKbGJwoZKP9HRgD4xdQh9mx2ysqsxicBIuiOXdAM+UUDbBZrNHJkvww4eHOjfeHB9km8ojLCPqtrBZVaWvmrFnaUVhVhaUz9t4YBjOqIUdAZXLc3bhFgBjVQ20GPVgULVd6gaZf/rg/YeoJYF5qIUW9BdBgqDdxg1dYOxrIQcJ7ZgKUThyjsH2ty0Vv6SrkUW7U5vI7McHpqBNkysCDDAmLmz8gyrqWA078ETELuKu6rl5d8JUnuy0wI9Hqun2bwUNurHO1sk3MHR5QCPhCRzqOaDA/FGpmYv+7jwQ+kXbk11/XEV8uCMIrhwC4HFT5H//2gAIAQIDAT8h/j3+L//aAAgBAwMBPyH/AATAvL7L43NcX4X5duTsOO2dI4nc7jqfMw74v8n1NzT+Qy3FqGrl4IvUweik1KWZNSl3HU6J2TuHl//aAAwDAQMCEQMRAAAQEUAAAAAArhAAAAAwzGoFAAFiwEMJg2Nt12ZoBBupM9QAA//aAAgBAQMBPxCL6a9u4ywF0YBAnc8egeQaGJ0bdt7Y/M/M2PqVRnDbkk9pSDhALh00hsToJw1+J2fWCwBH6s2TkY3xzUt+s2A/YK8bbKkEOkk4iSRDk2QsF1hGJTNSUOmDbeEMQU2S2fCuS/CYr8e4HtaH7EAVpIFIGwQ4DEthsawgCQGgvjdKN606aLZZlMktBjbFVytmNJkWK3AD/UtQlbg9TdbP+GsIAo8NA5W+6YYdrUEz1SZOaMXKMoKbqa1mq4U8QQxYY97/ANzsGwh1ipqAHGpH5IoUa8VqWOYoVNZdyL8NAviCSZ+3If5sCSacyWOgsKAP2vpFiKNmxlls9Om5Esn19gKqEfDtDdJ1d7pe5Vey4++i/sFPlGBgYLGscj4xwgiIZp3O7BGUptEUFTG0AqUnRH+1e3C2hvZUcRV+/MXRhFEtE59pBGbiWQyyUB+UHQDC1Oc05hAgRaIsxEQFpzEuO4qS6lWk7bYdYANyIKTOahvBwJvPpOKSfNQ8hBDEG+kYnrSAibaawISC+Ugwp+KBqxX3OnKsqEBr8+kgXEpCAIcl1aZ+TQQS/ByseNM6l2Fpe6JTVN4AuyiNfI+WuxTJZmb0K7WUoKzeHx//2gAIAQIDAT8Q5r57iLWD2Vxqb4rwry6ORpeOidsMzqdE7n3EeuK/YfJqbP2OCoFx3UrLA7mX0WO5YTDuWGobnbOmdR8v/9oACAEDAwE/EOM5XBBDSY9rqFTDAGyGj1gVOyOMwaWOI1S+EDXcGy+QKnZ5GA+8PCdkz4TlfhH+2GgGrzAAUl/kLxbqFZKX/cRS0IwqsdTAXh/7P9kCIeoILJlXtMyQF9w01tmxRKUEuj7BHJKPk3KrUo+cJZTH8advUdQxBlxsPyI1Vm4jdWIiJzT8hytPl//Z";
byte[] imageAsBytes = android.util.Base64
.decode(sig, android.util.Base64.DEFAULT);
btMap = BitmapFactory.decodeByteArray(imageAsBytes, 0,
imageAsBytes.length);
Bitmap bitmapOrg = resizeImage(btMap, 384,
150);// Bit
byte[] sendbuf = StartBmpToPrintCode(bitmapOrg, 0);
/*byte[] a=t2.getBytes();
byte[] combined = new byte[a.length + sendbuf.length];
System.arraycopy(a,0,combined,0 ,a.length);
System.arraycopy(sendbuf,0,combined,a.length,sendbuf.length);*/
mbtOutputStream = mbtSocket.getOutputStream();
mbtOutputStream.write(sendbuf);
mbtOutputStream.flush();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static Bitmap resizeImage(Bitmap bitmap, int w, int h) {
Bitmap BitmapOrg = bitmap;
int width = BitmapOrg.getWidth();
int height = BitmapOrg.getHeight();
int newWidth = w;
int newHeight = h;
float scaleWidth = ((float) newWidth) / width;
float scaleHeight = ((float) newHeight) / height;
Matrix matrix = new Matrix();
matrix.postScale(scaleWidth, scaleWidth);
Bitmap resizedBitmap = Bitmap.createBitmap(BitmapOrg, 10,10, width,
height, matrix, true);
return resizedBitmap;
}
private static byte[] StartBmpToPrintCode(Bitmap bitmap, int t) {
byte temp = 0;
int j = 7;
int start = 0;
if (bitmap != null) {
int mWidth = bitmap.getWidth();
int mHeight = bitmap.getHeight();
int[] mIntArray = new int[mWidth * mHeight];
byte[] data = new byte[mWidth * mHeight];
bitmap.getPixels(mIntArray, 0, mWidth, 0, 0, mWidth, mHeight);
encodeYUV420SP(data, mIntArray, mWidth, mHeight, t);
byte[] result = new byte[mWidth * mHeight / 8];
for (int i = 0; i < mWidth * mHeight; i++) {
temp = (byte) ((byte) (data[i] << j) + temp);
j--;
if (j < 0) {
j = 7;
}
if (i % 8 == 7) {
result[start++] = temp;
temp = 0;
}
}
if (j != 7) {
result[start++] = temp;
}
int aHeight = 24 - mHeight % 24;
byte[] add = new byte[aHeight * 48];
byte[] nresult = new byte[mWidth * mHeight / 8 + aHeight * 48];
System.arraycopy(result, 0, nresult, 0, result.length);
System.arraycopy(add, 0, nresult, result.length, add.length);
byte[] byteContent = new byte[(mWidth / 8 + 4)
* (mHeight + aHeight)];// ´òÓ¡Êý×é
byte[] bytehead = new byte[4];// ÿÐдòÓ¡Í·
bytehead[0] = (byte) 0x1f;
bytehead[1] = (byte) 0x10;
bytehead[2] = (byte) (mWidth / 8);
bytehead[3] = (byte) 0x00;
for (int index = 0; index < mHeight + aHeight; index++) {
System.arraycopy(bytehead, 0, byteContent, index * 52, 4);
System.arraycopy(nresult, index * 48, byteContent,
index * 52 + 4, 48);
}
return byteContent;
}
return null;
}
public static void encodeYUV420SP(byte[] yuv420sp, int[] rgba, int width,
int height, int t) {
final int frameSize = width * height;
int[] U, V;
U = new int[frameSize];
V = new int[frameSize];
final int uvwidth = width / 2;
int r, g, b, y, u, v;
int bits = 8;
int index = 0;
int f = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
r = (rgba[index] & 0xff000000) >> 24;
g = (rgba[index] & 0xff0000) >> 16;
b = (rgba[index] & 0xff00) >> 8;
// rgb to yuv
y = ((66 * r + 129 * g + 25 * b + 128) >> 8) + 16;
u = ((-38 * r - 74 * g + 112 * b + 128) >> 8) + 128;
v = ((112 * r - 94 * g - 18 * b + 128) >> 8) + 128;
// clip y
// yuv420sp[index++] = (byte) ((y < 0) ? 0 : ((y > 255) ? 255 :
// y));
byte temp = (byte) ((y < 0) ? 0 : ((y > 255) ? 255 : y));
if (t == 0) {
yuv420sp[index++] = temp > 0 ? (byte) 1 : (byte) 0;
} else {
yuv420sp[index++] = temp > 0 ? (byte) 0 : (byte) 1;
}
// {
// if (f == 0) {
// yuv420sp[index++] = 0;
// f = 1;
// } else {
// yuv420sp[index++] = 1;
// f = 0;
// }
// }
}
}
f = 0;
}

Related

Encoding ARGB_8888 bitmaps to video file using mediacodec android fails on Pixel 3A device

I'm decoding and encoding a video file using android MediaCodec. Both decoding and encoding are working fine with the following code except Pixel 3a device. For encoding, the encoder uses a set of bitmaps to create a video file. But only on the Pixel 3A device encoding bitmaps are failing and producing distorted video file.
Device details:
Name: Pixel 3A, Android version: 11
public class ImageProcessor implements Runnable {
private static final String VIDEO = "video/";
private static final String TAG = "VideoDecoder";
private static final long DEFAULT_TIMEOUT_US = 0;[enter image description here][1]
private final String inputFile;
private final String outputFile;
private MediaCodec mDecoder;
private MediaExtractor mExtractor;
private RenderScript rs;
private ScriptIntrinsicYuvToRGB yuvToRgbIntrinsic;
private int width;
private int height;
private MediaCodec mEncoder;
private MediaMuxer mediaMuxer;
private int mTrackIndex;
private ScriptC_rotators rotateScript;
private int newWidth = 0, newHeight = 0;
private int preRotateHeight;
private int preRotateWidth;
private Allocation fromRotateAllocation;
private Allocation toRotateAllocation;
private int frameIndex;
private int deviceOrientation;
private int sensorOrientation;
private final Handler handler;
boolean sawOutputEOS = false;
boolean sawInputEOS = false;
private static final int SENSOR_ORIENTATION_DEFAULT_DEGREES = 90;
private FrameObject defaultObject;
private int faceBlurCount;
private long startTime;
private float frameRate;
private int generateIndex;
public ImageProcessor1(Handler handler, String inputFile, String outputFile) {
this.inputFile = inputFile;
this.outputFile = outputFile;
this.handler = handler;
}
public void setDeviceOrientation(int deviceOrientation) {
this.deviceOrientation = deviceOrientation;
}
public void setSensorOrientation(int sensorOrientation) {
this.sensorOrientation = sensorOrientation;
}
public void setDefaultObject(FrameObject frameObject) {
this.defaultObject = frameObject;
}
private void init() {
try {
mExtractor = new MediaExtractor();
mExtractor.setDataSource(inputFile);
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
retriever.setDataSource(inputFile);
FFmpegMediaMetadataRetriever metadataRetriever = new FFmpegMediaMetadataRetriever();
metadataRetriever.setDataSource(inputFile);
rs = RenderScript.create(Globals.getAppContext());
yuvToRgbIntrinsic = ScriptIntrinsicYuvToRGB.create(rs, Element.U8_4(rs));
rotateScript = new ScriptC_rotators(rs);
for (int i = 0; i < mExtractor.getTrackCount(); i++) {
MediaFormat format = mExtractor.getTrackFormat(i);
String mimeType = format.getString(MediaFormat.KEY_MIME);
width = format.getInteger(MediaFormat.KEY_WIDTH);
height = format.getInteger(MediaFormat.KEY_HEIGHT);
frameRate = Float.parseFloat(metadataRetriever.extractMetadata(
FFmpegMediaMetadataRetriever.METADATA_KEY_FRAMERATE));
int bitRate = Integer.parseInt(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_BITRATE));
if (mimeType != null && mimeType.startsWith(VIDEO)) {
mExtractor.selectTrack(i);
mDecoder = MediaCodec.createDecoderByType(mimeType);
mDecoder.configure(format, null, null, 0 /* Decoder */);
mDecoder.start();
MediaCodecInfo mediaCodecInfo = selectCodec(mimeType);
if (mediaCodecInfo == null) {
throw new RuntimeException("Failed to initialise codec");
}
switch (deviceOrientation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
newWidth = height;
newHeight = width;
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
newWidth = width;
newHeight = height;
break;
}
MediaFormat mediaFormat = MediaFormat.createVideoFormat(mimeType, newWidth, newHeight);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
mediaFormat.setFloat(MediaFormat.KEY_FRAME_RATE, frameRate);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
mEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mEncoder.start();
mediaMuxer = new MediaMuxer(outputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
break;
}
}
} catch (IOException e) {
throw new RuntimeException("Failed to initialise codec");
}
}
/**
* Returns the first codec capable of encoding the specified MIME type, or null if no
* match was found.
*/
private MediaCodecInfo selectCodec(String mimeType) throws IOException {
MediaCodecList list = new MediaCodecList(MediaCodecList.ALL_CODECS);
MediaCodecInfo[] codecInfos = list.getCodecInfos();
for (MediaCodecInfo info : codecInfos) {
if (info.isEncoder()) {
mEncoder = MediaCodec.createByCodecName(info.getName());
String[] types = info.getSupportedTypes();
for (String type : types) {
if (type.equalsIgnoreCase(mimeType)) {
return info;
}
}
}
}
return null;
}
public void startProcessing() {
init();
MediaCodec.BufferInfo decoderBufferInfo = new MediaCodec.BufferInfo();
MediaCodec.BufferInfo encoderBufferInfo = new MediaCodec.BufferInfo();
startTime = System.currentTimeMillis();
while (!sawOutputEOS) {
Log.d(TAG, "startProcessing: " + frameIndex);
if (!sawInputEOS && mDecoder != null) {
int inputBufferId = mDecoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
if (inputBufferId >= 0) {
ByteBuffer inputBuffer = mDecoder.getInputBuffer(inputBufferId);
int sampleSize = mExtractor.readSampleData(inputBuffer, 0);
if (sampleSize < 0) {
mDecoder.queueInputBuffer(inputBufferId, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
sawInputEOS = true;
} else {
if (mExtractor != null) {
long presentationTimeUs = mExtractor.getSampleTime();
mDecoder.queueInputBuffer(inputBufferId, 0, sampleSize, presentationTimeUs, 0);
mExtractor.advance();
}
}
}
}
int outputBufferId = mDecoder.dequeueOutputBuffer(decoderBufferInfo, DEFAULT_TIMEOUT_US);
if (outputBufferId >= 0) {
if ((decoderBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawOutputEOS = true;
Log.d(TAG, "endProcessing: " + TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - startTime));
}
boolean doRender = (decoderBufferInfo.size != 0);
if (doRender && mDecoder != null) {
Image image = mDecoder.getOutputImage(outputBufferId);
if (image != null) {
try {
frameIndex++;
byte[] frameData = quarterNV21(convertYUV420888ToNV21(image), image.getWidth(), image.getHeight());
byte[] data = getDataFromImage(image);
Type.Builder yuvType = new Type.Builder(rs, Element.U8(rs)).setX(data.length);
Allocation in = Allocation.createTyped(rs, yuvType.create(), Allocation.USAGE_SCRIPT);
Type.Builder rgbaType = new Type.Builder(rs, Element.RGBA_8888(rs)).setX(width).setY(height);
Allocation out = Allocation.createTyped(rs, rgbaType.create(), Allocation.USAGE_SCRIPT);
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
in.copyFromUnchecked(data);
yuvToRgbIntrinsic.setInput(in);
yuvToRgbIntrinsic.forEach(out);
out.copyTo(bitmap);
image.close();
encodeBitmaps(bitmap, encoderBufferInfo);
} catch (Exception e) {
Log.d(TAG, "startProcessing: " + e.getMessage());
}
}
if (mDecoder != null) {
mDecoder.releaseOutputBuffer(outputBufferId, false);
}
}
}
}
}
private long computePresentationTime(int frameIndex) {
return 132 + frameIndex * 1000000 / (int)frameRate;
}
private byte[] convertYUV420888ToNV21(Image image) {
byte[] data;
ByteBuffer buffer0 = image.getPlanes()[0].getBuffer();
ByteBuffer buffer2 = image.getPlanes()[2].getBuffer();
int buffer0_size = buffer0.remaining();
int buffer2_size = buffer2.remaining();
data = new byte[buffer0_size + buffer2_size];
buffer0.get(data, 0, buffer0_size);
buffer2.get(data, buffer0_size, buffer2_size);
return data;
}
private byte[] quarterNV21(byte[] data, int iWidth, int iHeight) {
byte[] yuv = new byte[iWidth * iHeight * 3 / 2];
// halve yuma
int i = 0;
for (int y = 0; y < iHeight; y++) {
for (int x = 0; x < iWidth; x++) {
yuv[i] = data[y * iWidth + x];
i++;
}
}
return yuv;
}
private void release() {
try {
if (mExtractor != null) {
mExtractor.release();
mExtractor = null;
}
if (mDecoder != null) {
mDecoder.stop();
mDecoder.release();
mDecoder = null;
}
if (mEncoder != null) {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
}
if (mediaMuxer != null) {
mediaMuxer.stop();
mediaMuxer.release();
mediaMuxer = null;
}
} catch (Exception e) {
Log.d(TAG, "imageprocessor release: " + e.fillInStackTrace());
}
Message message = handler.obtainMessage();
Bundle bundle = new Bundle();
bundle.putString(FrameUtil.COMPUTATION_SUCCESS_KEY, this.outputFile);
bundle.putInt(FrameUtil.FACE_BLUR_COUNT, faceBlurCount);
message.setData(bundle);
handler.sendMessage(message);
}
// encode the bitmap to a new video file
private void encodeBitmaps(Bitmap bitmap, MediaCodec.BufferInfo encoderBufferInfo) {
Bitmap rotatedBitmap = null;
switch (deviceOrientation) {
case Surface.ROTATION_0:
if (sensorOrientation == SENSOR_ORIENTATION_DEFAULT_DEGREES) {
rotatedBitmap = rotateBitmap(bitmap, 270);
} else {
rotatedBitmap = rotateBitmap(bitmap, 90);
}
break;
case Surface.ROTATION_90:
Bitmap newBitmap = rotateBitmap(bitmap, 90);
bitmap.recycle();
rotatedBitmap = rotateBitmap(newBitmap, 90);
break;
default:
rotatedBitmap = bitmap;
}
byte[] bytes = getNV21(rotatedBitmap.getWidth(), rotatedBitmap.getHeight(), rotatedBitmap);
int inputBufIndex = mEncoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
long ptsUsec = computePresentationTime(generateIndex);
if (inputBufIndex >= 0) {
ByteBuffer inputBuffer = mEncoder.getInputBuffer(inputBufIndex);
if (inputBuffer != null) {
inputBuffer.clear();
inputBuffer.put(bytes);
mEncoder.queueInputBuffer(inputBufIndex, 0, bytes.length,
ptsUsec, 0);
generateIndex++;
}
}
int encoderStatus = mEncoder.dequeueOutputBuffer(encoderBufferInfo, DEFAULT_TIMEOUT_US);
if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat newFormat = mEncoder.getOutputFormat();
mTrackIndex = mediaMuxer.addTrack(newFormat);
mediaMuxer.start();
} else if (encoderBufferInfo.size != 0) {
ByteBuffer outputBuffer = mEncoder.getOutputBuffer(encoderStatus);
if (outputBuffer != null) {
outputBuffer.position(encoderBufferInfo.offset);
outputBuffer.limit(encoderBufferInfo.offset + encoderBufferInfo.size);
mediaMuxer.writeSampleData(mTrackIndex, outputBuffer, encoderBufferInfo);
mEncoder.releaseOutputBuffer(encoderStatus, false);
}
if ((encoderBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
mEncoder.signalEndOfInputStream();
}
}
}
private Allocation getFromRotateAllocation(Bitmap bitmap) {
int targetHeight = bitmap.getWidth();
int targetWidth = bitmap.getHeight();
if (targetHeight != preRotateHeight || targetWidth != preRotateWidth) {
preRotateHeight = targetHeight;
preRotateWidth = targetWidth;
fromRotateAllocation = Allocation.createFromBitmap(rs, bitmap,
Allocation.MipmapControl.MIPMAP_NONE,
Allocation.USAGE_SCRIPT);
}
return fromRotateAllocation;
}
private Allocation getToRotateAllocation(Bitmap bitmap) {
int targetHeight = bitmap.getWidth();
int targetWidth = bitmap.getHeight();
if (targetHeight != preRotateHeight || targetWidth != preRotateWidth) {
toRotateAllocation = Allocation.createFromBitmap(rs, bitmap,
Allocation.MipmapControl.MIPMAP_NONE,
Allocation.USAGE_SCRIPT);
}
return toRotateAllocation;
}
private Bitmap rotateBitmap(Bitmap bitmap, int angle) {
Bitmap.Config config = bitmap.getConfig();
int targetHeight = bitmap.getWidth();
int targetWidth = bitmap.getHeight();
rotateScript.set_inWidth(bitmap.getWidth());
rotateScript.set_inHeight(bitmap.getHeight());
Allocation sourceAllocation = getFromRotateAllocation(bitmap);
sourceAllocation.copyFrom(bitmap);
rotateScript.set_inImage(sourceAllocation);
Bitmap target = Bitmap.createBitmap(targetWidth, targetHeight, config);
final Allocation targetAllocation = getToRotateAllocation(target);
if (angle == 90) {
rotateScript.forEach_rotate_90_clockwise(targetAllocation, targetAllocation);
} else {
rotateScript.forEach_rotate_270_clockwise(targetAllocation, targetAllocation);
}
targetAllocation.copyTo(target);
return target;
}
private byte[] getNV21(int inputWidth, int inputHeight, Bitmap bitmap) {
int[] argb = new int[inputWidth * inputHeight];
bitmap.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);
byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2];
encodeYUV420SP(yuv, argb, inputWidth, inputHeight);
bitmap.recycle();
return yuv;
}
private void encodeYUV420SP(byte[] yuv420sp, int[] rgb, int width, int height) {
final int frameSize = width * height;
int yIndex = 0;
int uvIndex = frameSize;
int R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
//a = (aRGB[index] & 0xff000000) >> 24; //not using it right now
R = (rgb[index] & 0xff0000) >> 16;
G = (rgb[index] & 0xff00) >> 8;
B = (rgb[index] & 0xff);
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : (Math.min(Y, 255)));
if (j % 2 == 0 && index % 2 == 0) {
yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : (Math.min(U, 255)));
yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : (Math.min(V, 255)));
}
index++;
}
}
}
private static byte[] getDataFromImage(Image image) {
Rect crop = image.getCropRect();
int format = image.getFormat();
int width = crop.width();
int height = crop.height();
Image.Plane[] planes = image.getPlanes();
byte[] data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
byte[] rowData = new byte[planes[0].getRowStride()];
int channelOffset = 0;
int outputStride = 1;
for (int i = 0; i < planes.length; i++) {
switch (i) {
case 0:
channelOffset = 0;
outputStride = 1;
break;
case 1:
channelOffset = width * height + 1;
outputStride = 2;
break;
case 2:
channelOffset = width * height;
outputStride = 2;
break;
}
ByteBuffer buffer = planes[i].getBuffer();
int rowStride = planes[i].getRowStride();
int pixelStride = planes[i].getPixelStride();
int shift = (i == 0) ? 0 : 1;
int w = width >> shift;
int h = height >> shift;
buffer.position(rowStride * (crop.top >> shift) + pixelStride * (crop.left >> shift));
for (int row = 0; row < h; row++) {
int length;
if (pixelStride == 1 && outputStride == 1) {
length = w;
buffer.get(data, channelOffset, length);
channelOffset += length;
} else {
length = (w - 1) * pixelStride + 1;
buffer.get(rowData, 0, length);
for (int col = 0; col < w; col++) {
data[channelOffset] = rowData[col * pixelStride];
channelOffset += outputStride;
}
}
if (row < h - 1) {
buffer.position(buffer.position() + rowStride - length);
}
}
}
return data;
}
#Override
public void run() {
try {
startProcessing();
} catch (Exception ex) {
Log.d(TAG, "run: " + ex.getCause());
} finally {
release();
}
}
public void stopProcessing() {
sawOutputEOS = true;
}
}
Kindly have a look at the code and tell me what I am doing wrong.
Distorted video frame

Unable to change ARGB to YUV420Planar

I'm attempting to use MediaCodec and MediaMuxer to change a series of JPEGs into a mp4.
No matter what I do, I always get a green staticy screen as output on the MP4.
Code Follows:
public class AvcEncoder
{
public bool CanEncode = true;
MediaCodec codec;
MediaMuxer muxer;
MediaFormat format;
public AvcEncoder()
{
codec = MediaCodec.CreateEncoderByType("video/avc");
format = MediaFormat.CreateVideoFormat("video/avc", 720, 480);
format.SetInteger(MediaFormat.KeyBitRate, 700000);
format.SetInteger(MediaFormat.KeyFrameRate, 10);
format.SetInteger(MediaFormat.KeyColorFormat, (int)Android.Media.MediaCodecCapabilities.Formatyuv420planar);
format.SetInteger(MediaFormat.KeyIFrameInterval, 5);
codec.Configure(format, null, null, MediaCodecConfigFlags.Encode);
codec.Start();
Java.IO.File f = new Java.IO.File(Android.OS.Environment.ExternalStorageDirectory, "Parkingdom");
if (!f.Exists())
{
f.Mkdirs();
}
muxer = new MediaMuxer(f.ToString() + "/test.mp4", MuxerOutputType.Mpeg4);
}
public void EncodeFrame(Bitmap image)
{
int mWidth = image.Width;
int mHeight = image.Height;
int[] mIntArray = new int[mWidth * mHeight];
// Copy pixel data from the Bitmap into the 'intArray' array
image.GetPixels(mIntArray, 0, mWidth, 0, 0, mWidth, mHeight);
byte[] byteArray = new byte[mWidth * mHeight * 3 / 2];
// Call to encoding function : convert intArray to Yuv Binary data
EncodeYUV420P(byteArray, mIntArray, mWidth, mHeight);
using (var stream = new MemoryStream())
{
image.Compress(Bitmap.CompressFormat.Png, 100, stream);
byteArray = stream.ToArray();
}
int inputBufferIndex = codec.DequeueInputBuffer(-1);
if (inputBufferIndex >= 0)
{
ByteBuffer buffer = codec.GetInputBuffer(inputBufferIndex);
buffer.Clear();
buffer.Put(byteArray);
codec.QueueInputBuffer(inputBufferIndex, 0, byteArray.Length, 0, 0);
}
}
public void SaveMp4()
{
CanEncode = false;
bool running = true;
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int track = -1;
while (running)
{
int index = codec.DequeueOutputBuffer(bufferInfo, 10000);
if (index == (int)MediaCodecInfoState.OutputFormatChanged)
{
MediaFormat format = codec.OutputFormat;
track = muxer.AddTrack(format);
muxer.Start();
}
else if (index == (int)MediaCodecInfoState.TryAgainLater)
{
break;
}
else if (index >= 0)
{
if ((bufferInfo.Flags & MediaCodecBufferFlags.CodecConfig) != 0)
{
bufferInfo.Size = 0;
}
if (track != -1)
{
ByteBuffer outBuffer = codec.GetOutputBuffer(index);
outBuffer.Position(bufferInfo.Offset);
outBuffer.Limit(bufferInfo.Offset + bufferInfo.Size);
muxer.WriteSampleData(track, outBuffer, bufferInfo);
codec.ReleaseOutputBuffer(index, false);
}
}
}
codec.Stop();
codec.Release();
muxer.Stop();
muxer.Release();
CanEncode = true;
}
void EncodeYUV420P(byte[] yuv420p, int[] argb, int width, int height)
{
int frameSize = width * height;
int chromasize = frameSize / 4;
int yIndex = 0;
int uIndex = frameSize;
int vIndex = frameSize + chromasize;
int a, R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++)
{
for (int i = 0; i < width; i++)
{
a = (int)(argb[index] & 0xff000000) >> 24; // a is not used obviously
R = (argb[index] & 0xff0000) >> 16;
G = (argb[index] & 0xff00) >> 8;
B = (argb[index] & 0xff) >> 0;
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
yuv420p[yIndex++] = (byte)((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
if (j % 2 == 0 && index % 2 == 0)
{
yuv420p[uIndex++] = (byte)((U < 0) ? 0 : ((U > 255) ? 255 : U));
yuv420p[vIndex++] = (byte)((V < 0) ? 0 : ((V > 255) ? 255 : V));
}
index++;
}
}
}
}
Each time a new jpeg is generated "EncodeFrame" is called which is supposed to be changing it into a YUV420Planar format for the media codec. The codec I'm testing with doesn't support semiplanar.
In case someone comes across this later I changed
EncodeFrame to use a Surface instead and just used DrawBitmap().
It's slower than the byte copy but is working for my purposes.

How to convert & rotate raw NV21 array image (android.media.Image) from front cam portrait mode in onImageAvailable (android Camera2)?

Note: All info in my post only goes for Samsung Galaxy S7 device. I do not know how emulators and other devices behave.
In onImageAvailable I convert continuously each image to a NV21 byte array and forward it to an API expecting raw NV21 format.
This is how I initialize the image reader and receive the images:
private void openCamera() {
...
mImageReader = ImageReader.newInstance(WIDTH, HEIGHT,
ImageFormat.YUV_420_888, 1); // only 1 for best performance
mImageReader.setOnImageAvailableListener(
mOnImageAvailableListener, mBackgroundHandler);
...
}
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener
= new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image != null) {
byte[] data = convertYUV420ToNV21_ALL_PLANES(image); // this image is turned 90 deg using front cam in portrait mode
byte[] data_rotated = rotateNV21_working(data, WIDTH, HEIGHT, 270);
ForwardToAPI(data_rotated); // image data is being forwarded to api and received later on
image.close();
}
}
};
The function converting the image to raw NV21 (from here), working fine, the image is (due to android?) turned by 90 degrees when using front cam in portrait mode:
(I modified it, slightly according to comments of Alex Cohn)
private byte[] convertYUV420ToNV21_ALL_PLANES(Image imgYUV420) {
byte[] rez;
ByteBuffer buffer0 = imgYUV420.getPlanes()[0].getBuffer();
ByteBuffer buffer1 = imgYUV420.getPlanes()[1].getBuffer();
ByteBuffer buffer2 = imgYUV420.getPlanes()[2].getBuffer();
// actually here should be something like each second byte
// however I simply get the last byte of buffer 2 and the entire buffer 1
int buffer0_size = buffer0.remaining();
int buffer1_size = buffer1.remaining(); // / 2 + 1;
int buffer2_size = 1;//buffer2.remaining(); // / 2 + 1;
byte[] buffer0_byte = new byte[buffer0_size];
byte[] buffer1_byte = new byte[buffer1_size];
byte[] buffer2_byte = new byte[buffer2_size];
buffer0.get(buffer0_byte, 0, buffer0_size);
buffer1.get(buffer1_byte, 0, buffer1_size);
buffer2.get(buffer2_byte, buffer2_size-1, buffer2_size);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
try {
// swap 1 and 2 as blue and red colors are swapped
outputStream.write(buffer0_byte);
outputStream.write(buffer2_byte);
outputStream.write(buffer1_byte);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
rez = outputStream.toByteArray();
return rez;
}
Hence "data" needs to be rotated. Using this function (from here), I get a weird 3-times interlaced picture error:
public static byte[] rotateNV21(byte[] input, int width, int height, int rotation) {
byte[] output = new byte[input.length];
boolean swap = (rotation == 90 || rotation == 270);
// **EDIT:** in portrait mode & front cam this needs to be set to true:
boolean yflip = true;// (rotation == 90 || rotation == 180);
boolean xflip = (rotation == 270 || rotation == 180);
for (int x = 0; x < width; x++) {
for (int y = 0; y < height; y++) {
int xo = x, yo = y;
int w = width, h = height;
int xi = xo, yi = yo;
if (swap) {
xi = w * yo / h;
yi = h * xo / w;
}
if (yflip) {
yi = h - yi - 1;
}
if (xflip) {
xi = w - xi - 1;
}
output[w * yo + xo] = input[w * yi + xi];
int fs = w * h;
int qs = (fs >> 2);
xi = (xi >> 1);
yi = (yi >> 1);
xo = (xo >> 1);
yo = (yo >> 1);
w = (w >> 1);
h = (h >> 1);
// adjust for interleave here
int ui = fs + (w * yi + xi) * 2;
int uo = fs + (w * yo + xo) * 2;
// and here
int vi = ui + 1;
int vo = uo + 1;
output[uo] = input[ui];
output[vo] = input[vi];
}
}
return output;
}
Resulting into this picture:
Note: it is still the same cup, however you see it 3-4 times.
Using another suggested rotate function from here gives the proper result:
public static byte[] rotateNV21_working(final byte[] yuv,
final int width,
final int height,
final int rotation)
{
if (rotation == 0) return yuv;
if (rotation % 90 != 0 || rotation < 0 || rotation > 270) {
throw new IllegalArgumentException("0 <= rotation < 360, rotation % 90 == 0");
}
final byte[] output = new byte[yuv.length];
final int frameSize = width * height;
final boolean swap = rotation % 180 != 0;
final boolean xflip = rotation % 270 != 0;
final boolean yflip = rotation >= 180;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
final int yIn = j * width + i;
final int uIn = frameSize + (j >> 1) * width + (i & ~1);
final int vIn = uIn + 1;
final int wOut = swap ? height : width;
final int hOut = swap ? width : height;
final int iSwapped = swap ? j : i;
final int jSwapped = swap ? i : j;
final int iOut = xflip ? wOut - iSwapped - 1 : iSwapped;
final int jOut = yflip ? hOut - jSwapped - 1 : jSwapped;
final int yOut = jOut * wOut + iOut;
final int uOut = frameSize + (jOut >> 1) * wOut + (iOut & ~1);
final int vOut = uOut + 1;
output[yOut] = (byte)(0xff & yuv[yIn]);
output[uOut] = (byte)(0xff & yuv[uIn]);
output[vOut] = (byte)(0xff & yuv[vIn]);
}
}
return output;
}
The result is fine now:
The top image shows the direct stream using a texture view's surface and adding it to the captureRequestBuilder. The bottom image shows the raw image data after rotating.
The questions are:
Does this hack in "convertYUV420ToNV21_ALL_PLANES" work on any
device/emulator?
Why does rotateNV21 not work, while rotateNV21_working works fine.
Edit: The mirror issue is fixed, see code comment. The squeeze issue is fixed, it was caused by the API it gets forwarded.
The actual open issue is a proper not too expensive function, converting and rotating an image into raw NV21 working on any device.
Here is the code to convert the Image to NV21 byte[]. This will work when the imgYUV420 U and V planes have pixelStride=1 (as on emulator) or pixelStride=2 (as on Nexus):
private byte[] convertYUV420ToNV21_ALL_PLANES(Image imgYUV420) {
assert(imgYUV420.getFormat() == ImageFormat.YUV_420_888);
Log.d(TAG, "image: " + imgYUV420.getWidth() + "x" + imgYUV420.getHeight() + " " + imgYUV420.getFormat());
Log.d(TAG, "planes: " + imgYUV420.getPlanes().length);
for (int nplane = 0; nplane < imgYUV420.getPlanes().length; nplane++) {
Log.d(TAG, "plane[" + nplane + "]: length " + imgYUV420.getPlanes()[nplane].getBuffer().remaining() + ", strides: " + imgYUV420.getPlanes()[nplane].getPixelStride() + " " + imgYUV420.getPlanes()[nplane].getRowStride());
}
byte[] rez = new byte[imgYUV420.getWidth() * imgYUV420.getHeight() * 3 / 2];
ByteBuffer buffer0 = imgYUV420.getPlanes()[0].getBuffer();
ByteBuffer buffer1 = imgYUV420.getPlanes()[1].getBuffer();
ByteBuffer buffer2 = imgYUV420.getPlanes()[2].getBuffer();
int n = 0;
assert(imgYUV420.getPlanes()[0].getPixelStride() == 1);
for (int row = 0; row < imgYUV420.getHeight(); row++) {
for (int col = 0; col < imgYUV420.getWidth(); col++) {
rez[n++] = buffer0.get();
}
}
assert(imgYUV420.getPlanes()[2].getPixelStride() == imgYUV420.getPlanes()[1].getPixelStride());
int stride = imgYUV420.getPlanes()[1].getPixelStride();
for (int row = 0; row < imgYUV420.getHeight(); row += 2) {
for (int col = 0; col < imgYUV420.getWidth(); col += 2) {
rez[n++] = buffer1.get();
rez[n++] = buffer2.get();
for (int skip = 1; skip < stride; skip++) {
if (buffer1.remaining() > 0) {
buffer1.get();
}
if (buffer2.remaining() > 0) {
buffer2.get();
}
}
}
}
Log.w(TAG, "total: " + rez.length);
return rez;
}
optimized Java code is available here.
As you can see, it is very easy to change this code to produce a rotated image in a single step:
private byte[] rotateYUV420ToNV21(Image imgYUV420) {
Log.d(TAG, "image: " + imgYUV420.getWidth() + "x" + imgYUV420.getHeight() + " " + imgYUV420.getFormat());
Log.d(TAG, "planes: " + imgYUV420.getPlanes().length);
for (int nplane = 0; nplane < imgYUV420.getPlanes().length; nplane++) {
Log.d(TAG, "plane[" + nplane + "]: length " + imgYUV420.getPlanes()[nplane].getBuffer().remaining() + ", strides: " + imgYUV420.getPlanes()[nplane].getPixelStride() + " " + imgYUV420.getPlanes()[nplane].getRowStride());
}
byte[] rez = new byte[imgYUV420.getWidth() * imgYUV420.getHeight() * 3 / 2];
ByteBuffer buffer0 = imgYUV420.getPlanes()[0].getBuffer();
ByteBuffer buffer1 = imgYUV420.getPlanes()[1].getBuffer();
ByteBuffer buffer2 = imgYUV420.getPlanes()[2].getBuffer();
int width = imgYUV420.getHeight();
assert(imgYUV420.getPlanes()[0].getPixelStride() == 1);
for (int row = imgYUV420.getHeight()-1; row >=0; row--) {
for (int col = 0; col < imgYUV420.getWidth(); col++) {
rez[col*width+row] = buffer0.get();
}
}
int uv_offset = imgYUV420.getWidth()*imgYUV420.getHeight();
assert(imgYUV420.getPlanes()[2].getPixelStride() == imgYUV420.getPlanes()[1].getPixelStride());
int stride = imgYUV420.getPlanes()[1].getPixelStride();
for (int row = imgYUV420.getHeight() - 2; row >= 0; row -= 2) {
for (int col = 0; col < imgYUV420.getWidth(); col += 2) {
rez[uv_offset+col/2*width+row] = buffer1.get();
rez[uv_offset+col/2*width+row+1] = buffer2.get();
for (int skip = 1; skip < stride; skip++) {
if (buffer1.remaining() > 0) {
buffer1.get();
}
if (buffer2.remaining() > 0) {
buffer2.get();
}
}
}
}
Log.w(TAG, "total rotated: " + rez.length);
return rez;
}
I sincerely recommend the site http://rawpixels.net/ to see the actual structure of your raw images.
With OpenCV and Android Camera API 2 this task is very fast and you don't need YUV420toNV21 Java conversion, and with OpenCV this convertion is 4x more fast:
Java side:
//Starts a builtin camera with api camera 2
public void startCamera() {
CameraManager manager = (CameraManager) AppData.getAppContext().getSystemService(Context.CAMERA_SERVICE);
try {
String pickedCamera = getCamera(manager);
manager.openCamera(pickedCamera, cameraStateCallback, null);
// set image format on YUV
mImageReader = ImageReader.newInstance(mWidth,mHeight, ImageFormat.YUV_420_888, 4);
mImageReader.setOnImageAvailableListener(onImageAvailableListener, null);
Log.d(TAG, "imageReader created");
} catch (CameraAccessException e) {
Log.e(TAG, e.getMessage());
}
}
//Listens for frames and send them to be processed
protected ImageReader.OnImageAvailableListener onImageAvailableListener = new ImageReader.OnImageAvailableListener() {
#Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] frameData = new byte[buffer.capacity()];
buffer.get(frameData);
// Native process (see below)
processAndRotateFrame(frameData);
image.close();
} catch (Exception e) {
Logger.e(TAG, "imageReader exception: "+e.getMessage());
} finally {
if (image != null) {
image.close();
}
}
}
};
Native side (NDK or Cmake):
JNIEXPORT jint JNICALL com_android_mvf_Utils_ProccessAndRotateFrame
(JNIEnv *env, jobject object, jint width, jint height, jbyteArray frame, jint rotation) {
// load data from JAVA side
jbyte *pFrameData = env->GetByteArrayElements(frame, 0);
// convert array to Mat, for example GRAY or COLOR
Mat mGray(height, width, cv::IMREAD_GRAYSCALE, (unsigned char *)pFrameData);
// rotate image
rotateMat(mGray, rotation);
int objects = your_function(env, mGray);
env->ReleaseByteArrayElements(frame, pFrameData, 0);
return objects;
}
void rotateMat(cv::Mat &matImage, int rotFlag) {
if (rotFlag != 0 && rotFlag != 360) {
if (rotFlag == 90) {
cv::transpose(matImage, matImage);
cv::flip(matImage, matImage, 1);
} else if (rotFlag == 270 || rotFlag == -90) {
cv::transpose(matImage, matImage);
cv::flip(matImage, matImage, 0);
} else if (rotFlag == 180) {
cv::flip(matImage, matImage, -1);
}
}
}

Preserving red part on Android Camera Preview and other part on screen is Grayscale

I am writing a android app about camera.
The function I want to implement is let preview preserving red color on screen,
the other part on screen be grayscale.
Like this picture
I use the particular scalar multiply R, G and B component to attach grayscale
and preserve red part of preview after changeing raw data format to RGB.
but after I override onDraw(Canvas canvas) method in my custom View,
the result is like blue filter effect
Can anybody give some hint to let me know which step I am wrong.
THANK YOU.
The code of my custom View is below
package com.example.macampreviewdemo;
public class CamPreviewDemoActivity extends Activity {
/** Called when the activity is first created. */
int h, w;
public String tag = "tag";
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.i(tag, "onCreate");
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
h = metrics.heightPixels;
w = metrics.widthPixels;
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
Display display = ((WindowManager)
getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
setContentView(R.layout.main);
setRequestedOrientation(0);
ViewToDraw dtw = (ViewToDraw) findViewById(R.id.vtd);
CameraView cameraView = new CameraView(this, dtw, w, h);
((FrameLayout) findViewById(R.id.preview)).addView(cameraView);
}
}
And this:
package com.example.macampreviewdemo;
public class ViewToDraw extends View{
public String tag = "tag";
public byte[] image;
public boolean isCameraSet = false;
public int imgWidth, imgHeight;
Bitmap overlayBitmap;
Matrix matrix;
public ViewToDraw(Context context, AttributeSet attrs) {
super(context, attrs);
matrix = new Matrix();
}
public void cameraSet(){
isCameraSet = true;
}
public void putImage(byte[] img){
image = img;
}
#Override
protected void onDraw(Canvas canvas){
Log.i(tag, "onDraw() ");
int size = imgWidth * imgHeight;
int[] rgb = new int[imgWidth * imgHeight];
if(isCameraSet){
rgb = convertYUV420_NV21toARGB8888(image, imgWidth, imgHeight);
for (int k = 0; k < size; k++) {
if(Color.red(rgb[k]) == 255 &&
Color.green(rgb[k]) == 0 &&
Color.blue(rgb[k]) == 50){}
else{
rgb[k] = (int) (
(0.2126 * Color.red(rgb[k])) +
(0.7152 * Color.green(rgb[k])) +
(0.0722 * Color.blue(rgb[k]))
);
}
}
Log.i("tag", "rgb length = " + rgb.length);
overlayBitmap =
Bitmap.createBitmap(rgb, 0, imgWidth,
imgWidth, imgHeight,
Bitmap.Config.RGB_565);
canvas.drawBitmap(overlayBitmap, matrix, null);
overlayBitmap.recycle();
}
}
static public void decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height) {
final int frameSize = width * height;
int rtmp, gtmp, btmp;
for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0)y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
if (r < 0)r = 0;
else if (r > 262143)r = 262143;
if (g < 0)g = 0;
else if (g > 262143)g = 262143;
if (b < 0)b = 0;
else if (b > 262143)b = 262143;
rtmp = ((r << 6) & 0xff0000);
gtmp = ((g >> 2) & 0xff00);
btmp = ((b >> 10) & 0xff);
rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00)
| ((b >> 10) & 0xff);
}
}
}
int[] yuv420ToGrayScale(byte[] yuv420, int width, int height){
int size = width * height;
int y1, y2, y3, y4;
int[] pixel = new int[size];
for (int i = 0; i < size; i+=2) {
y1 = yuv420[i]&0xff;;
y2 = yuv420[i + 1]&0xff;;
y3 = yuv420[i + width]&0xff;;
y4 = yuv420[i + width +1]&0xff;;
pixel[i] = yuv420[i];
pixel[i + 1] = yuv420[i +1];
pixel[i + width ] = yuv420[width + i];
pixel[i + width + 1] = yuv420[i + width + 1];
if (i!=0 && (i+2)%width==0)
i+=width;
}
return pixel;
}
/**
* Converts YUV420 NV21 to ARGB8888
*
* #param data byte array on YUV420 NV21 format.
* #param width pixels width
* #param height pixels height
* #return a ARGB8888 pixels int array. Where each int is a pixels ARGB.
*/
public static int[] convertYUV420_NV21toARGB8888(byte [] data, int width, int height) {
int size = width*height;
int offset = size;
int[] pixels = new int[size];
int u, v, y1, y2, y3, y4;
// i along Y and the final pixels
// k along pixels U and V
for(int i=0, k=0; i < size; i+=2, k+=1) {
y1 = data[i ]&0xff;
y2 = data[i+1]&0xff;
y3 = data[width+i ]&0xff;
y4 = data[width+i+1]&0xff;
v = data[offset+k ]&0xff;
u = data[offset+k+1]&0xff;
v = v-128;
u = u-128;
pixels[i ] = convertYUVtoARGB(y1, u, v);
pixels[i+1] = convertYUVtoARGB(y2, u, v);
pixels[width+i ] = convertYUVtoARGB(y3, u, v);
pixels[width+i+1] = convertYUVtoARGB(y4, u, v);
if (i!=0 && (i+2)%width==0)
i+=width;
}
return pixels;
}
private static int convertYUVtoARGB(int y, int u, int v) {
int r,g,b;
r = y + (int)(1.402f*u);
g = y - (int)(0.344f*v + 0.714f*u);
b = y + (int)(1.772f*v);
r = r>255? 255 : r<0 ? 0 : r;
g = g>255? 255 : g<0 ? 0 : g;
b = b>255? 255 : b<0 ? 0 : b;
return 0xff000000 | (r<<16) | (g<<8) | b;
}
public static void myNV21ToRGB(int width, int height){
int yy, u, v;
int frame_size = width * height;
}
}
And this:
package com.example.macampreviewdemo;
public class CameraView extends SurfaceView implements SurfaceHolder.Callback{
public Camera mycamera;
List<Camera.Size> cameraSize;
private SurfaceHolder mHolder;
public ViewToDraw vtd;
int pickedH, pickedW;
int defaultH, defaultW;
public String tag = "tag";
public CameraView(Context context, ViewToDraw _vtd, int width, int height) {
super(context);
// TODO Auto-generated constructor stub
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
vtd = _vtd;
defaultH = height;
defaultW = width;
}
#Override
public void surfaceCreated(SurfaceHolder holder) {
Log.i("tag"," surfaceCreated");
int i;
mycamera = Camera.open();
cameraSize = mycamera.getParameters().getSupportedPreviewSizes();
if(cameraSize != null){
// pick resolution
pickedH = defaultH;
pickedW = defaultW;
for(i=0;i<cameraSize.size();i++){
if(cameraSize.get(i).width < defaultW){
break;
}else{
pickedH = cameraSize.get(i).height;
pickedW = cameraSize.get(i).width;
}
}
}else{
Log.e("tag","null");
};
try {
mycamera.setPreviewDisplay(holder);
} catch (IOException e) {
e.printStackTrace();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Log.i("tag","surfaceChanged");
Camera.Parameters parameters = mycamera.getParameters();
parameters.setPreviewSize(pickedW, pickedH);
mycamera.setParameters(parameters);
//create buffer
PixelFormat p = new PixelFormat();
PixelFormat.getPixelFormatInfo(parameters.getPreviewFormat(),p);
int bufSize = (pickedW*pickedH*p.bitsPerPixel)/8;
//add buffers
byte[] buffer = new byte[bufSize];
mycamera.addCallbackBuffer(buffer);
buffer = new byte[bufSize];
mycamera.addCallbackBuffer(buffer);
buffer = new byte[bufSize];
mycamera.addCallbackBuffer(buffer);
mycamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
Log.i("tag", "onPreviewFrame");
Log.i("tag", "pickedH = " + pickedH);
Log.i("tag", "pickedW = " + pickedW);
vtd.putImage(data);
vtd.cameraSet();
vtd.imgHeight = pickedH;
vtd.imgWidth = pickedW;
vtd.invalidate();
mycamera.addCallbackBuffer(data);
}
});
mycamera.startPreview();
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i("tag", "surfaceDestroyed");
mycamera.setPreviewCallback(null);
mycamera.release();
mycamera = null;
}
}
And this:
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:paddingBottom="#dimen/activity_vertical_margin"
android:paddingLeft="#dimen/activity_horizontal_margin"
android:paddingRight="#dimen/activity_horizontal_margin"
android:paddingTop="#dimen/activity_vertical_margin"
tools:context=".CamPreviewDemoActivity" >
<FrameLayout android:id="#+id/FrameLayout01"
android:layout_height="fill_parent" android:layout_width="fill_parent">
<FrameLayout
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="#+id/preview">
</FrameLayout>
<com.example.macampreviewdemo.ViewToDraw
android:id="#+id/vtd"
android:layout_height="fill_parent"
android:layout_width="fill_parent"/>
</FrameLayout>
</RelativeLayout>

Android Convert Raw data to Png image color is not good?

I have tried to convert the pure raw data to png image. i am not able to get the correct output image with exact color.
for reference i have attached the both raw file as well as image . please advice me to get the image with correct color. i have added both image and raw file .
CODE
File screensPath = new File(SCREENSHOT_FOLDER);
screensPath.mkdirs();
// construct screenshot file name
StringBuilder sb = new StringBuilder();
sb.append(SCREENSHOT_FOLDER);
sb.append(Math.abs(UUID.randomUUID().hashCode())); // hash code of UUID should be quite random yet short
sb.append(".png");
String file = sb.toString();
// fetch the screen and save it
Screenshot ss = null;
try {
ss = retreiveRawScreenshot();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if(ss!=null)
{
writeImageFile(ss, file);
}
incre++;
private Screenshot retreiveRawScreenshot() throws Exception {
try {
InputStream is = new FileInputStream("/mnt/sdcard/screenshots/ss"+incre+".raw");
// retrieve response -- first the size and BPP of the screenshot
StringBuilder sb = new StringBuilder();
int c;
while ((c = is.read()) != -1) {
if (c == 0) break;
sb.append((char)c);
}
//========================================= not used =====================================
// parse it
String[] screenData = sb.toString().split(" ");
if (screenData.length >= 3) {
System.out.println("$$$$$$$$$$$$$$$$$$$$$$$$$$$ ");
Screenshot ss = new Screenshot();
ss.width = Integer.parseInt(screenData[0]);
ss.height = Integer.parseInt(screenData[1]);
ss.bpp = Integer.parseInt(screenData[2]);
System.out.println("$$$$$$$$$$$$$$$$$$$$$$$$$$$ ");
// retreive the screenshot
// (this method - via ByteBuffer - seems to be the fastest)
ByteBuffer bytes = ByteBuffer.allocate (ss.width * ss.height * ss.bpp / 8);
is = new BufferedInputStream(is); // buffering is very important apparently
byte[] rgbsnew = null;
toRGB565(bytes.array(), ss.width, ss.height, rgbsnew);
// is.read(bytes.array()); // reading all at once for speed
is.read(rgbsnew); // reading all at once for speed
bytes.position(0); // reset position to the beginning of ByteBuffer
ss.pixels =ByteBuffer.wrap(rgbsnew);
// convert byte-buffer to integer
return ss;
}
//========================================= not used ==========================================
Screenshot ss = new Screenshot();
ss.width = 320;
ss.height = 480;
ss.bpp = 16;
ByteBuffer bytes = ByteBuffer.allocate (ss.width * ss.height * ss.bpp / 8);
is = new BufferedInputStream(is); // buffering is very important apparently
is.read(bytes.array()); // reading all at once for speed
bytes.position(0); // reset position to the beginning of ByteBuffer
ss.pixels = bytes;
//============================= newly tried to set raw to image view ==============================
/*mRawImage = new RawImage();
mRawImage.readHeader(1, bytes);
// Receive framebuffer data.
byte[] data = new byte[mRawImage.size];
bytes = ByteBuffer.wrap(data);
mRawImage.data = data;
Bitmap bmp=BitmapFactory.decodeByteArray(mRawImage.data,0,mRawImage.data.length);
imageView1.setImageBitmap(bmp);*/
//============================newly tried to set raw to image view ===============================
return ss;
}
catch (Exception e) {
// throw new Exception(e);
return null;
}
finally {}
//return null;
}
class Screenshot {
public Buffer pixels;
public int width;
public int height;
public int bpp;
public boolean isValid() {
if (pixels == null || pixels.capacity() == 0 || pixels.limit() == 0) return false;
if (width <= 0 || height <= 0) return false;
return true;
}
}
private void writeImageFile(Screenshot ss, String file) {
//if (ss == null || !ss.isValid()) throw new IllegalArgumentException();
//if (file == null || file.length() == 0) throw new IllegalArgumentException();
// resolve screenshot's BPP to actual bitmap pixel format
Bitmap.Config pf;
switch (ss.bpp) {
//case 16: pf = Config.RGB_565; break;
case 16: pf = Config.RGB_565; break;
case 32: pf = Config.ARGB_8888; break;
default: pf = Config.ARGB_8888; break;
}
//=====================================================================
/*int[] rgb24 = new int[ss.pixels.capacity()];
int i = 0;
for (;i<320*480;i++)
{
//uint16_t pixel16 = ((uint16_t *)gr_framebuffer[0].data)[i];
//int pixel16=(IntBuffer)
int pixel16=Integer.parseInt(ss.pixels.position(i).toString());
// RRRRRGGGGGGBBBBBB -> RRRRRRRRGGGGGGGGBBBBBBBB
// in rgb24 color max is 2^8 per channel (*255/32 *255/64 *255/32)
rgb24[3*i+2] = (255*(pixel16 & 0x001F))/ 32; //Blue
rgb24[3*i+1] = (255*((pixel16 & 0x07E0) >> 5))/64; //Green
rgb24[3*i] = (255*((pixel16 & 0xF800) >> 11))/32; //Red
}
//ss.pixels=rgb24;
*///=====================================================================
// create appropriate bitmap and fill it wit data
Bitmap bmp = Bitmap.createBitmap(ss.width, ss.height, pf);
bmp.copyPixelsFromBuffer(ss.pixels);
// handle the screen rotation
int rot = getScreenRotation();
if (rot != 0) {
Matrix matrix = new Matrix();
matrix.postRotate(-rot);
bmp = Bitmap.createBitmap(bmp, 0, 0, bmp.getWidth(), bmp.getHeight(), matrix, true);
}
// save it in PNG format
FileOutputStream fos;
try {
fos = new FileOutputStream(file);
} catch (FileNotFoundException e) {
throw new InvalidParameterException();
}
bmp.compress(CompressFormat.PNG, 100, fos);
}
private int getScreenRotation() {
WindowManager wm = (WindowManager)getSystemService(WINDOW_SERVICE);
Display disp = wm.getDefaultDisplay();
// check whether we operate under Android 2.2 or later
try {
Class<?> displayClass = disp.getClass();
Method getRotation = displayClass.getMethod("getRotation");
int rot = ((Integer)getRotation.invoke(disp)).intValue();
switch (rot) {
case Surface.ROTATION_0: return 0;
case Surface.ROTATION_90: return 90;
case Surface.ROTATION_180: return 180;
case Surface.ROTATION_270: return 270;
default: return 0;
}
} catch (NoSuchMethodException e) {
// no getRotation() method -- fall back to dispation()
int orientation = disp.getOrientation();
// Sometimes you may get undefined orientation Value is 0
// simple logic solves the problem compare the screen
// X,Y Co-ordinates and determine the Orientation in such cases
if(orientation==Configuration.ORIENTATION_UNDEFINED){
Configuration config = getResources().getConfiguration();
orientation = config.orientation;
if(orientation==Configuration.ORIENTATION_UNDEFINED){
//if height and widht of screen are equal then
// it is square orientation
if(disp.getWidth()==disp.getHeight()){
orientation = Configuration.ORIENTATION_SQUARE;
}else{ //if widht is less than height than it is portrait
if(disp.getWidth() < disp.getHeight()){
orientation = Configuration.ORIENTATION_PORTRAIT;
}else{ // if it is not any of the above it will defineitly be landscape
orientation = Configuration.ORIENTATION_LANDSCAPE;
}
}
}
}
return orientation == 1 ? 0 : 90; // 1 for portrait, 2 for landscape
} catch (Exception e) {
return 0; // bad, I know ;P
}
}
//===========================================================================
/**
* Converts semi-planar YUV420 as generated for camera preview into RGB565
* format for use as an OpenGL ES texture. It assumes that both the input
* and output data are contiguous and start at zero.
*
* #param yuvs the array of YUV420 semi-planar data
* #param rgbs an array into which the RGB565 data will be written
* #param width the number of pixels horizontally
* #param height the number of pixels vertically
*/
//we tackle the conversion two pixels at a time for greater speed
private void toRGB565(byte[] yuvs, int width, int height, byte[] rgbs) {
//the end of the luminance data
final int lumEnd = width * height;
//points to the next luminance value pair
int lumPtr = 0;
//points to the next chromiance value pair
int chrPtr = lumEnd;
//points to the next byte output pair of RGB565 value
int outPtr = 0;
//the end of the current luminance scanline
int lineEnd = width;
while (true) {
//skip back to the start of the chromiance values when necessary
if (lumPtr == lineEnd) {
if (lumPtr == lumEnd) break; //we've reached the end
//division here is a bit expensive, but's only done once per scanline
chrPtr = lumEnd + ((lumPtr >> 1) / width) * width;
lineEnd += width;
}
//read the luminance and chromiance values
final int Y1 = yuvs[lumPtr++] & 0xff;
final int Y2 = yuvs[lumPtr++] & 0xff;
final int Cr = (yuvs[chrPtr++] & 0xff) - 128;
final int Cb = (yuvs[chrPtr++] & 0xff) - 128;
int R, G, B;
//generate first RGB components
B = Y1 + ((454 * Cb) >> 8);
if(B < 0) B = 0; else if(B > 255) B = 255;
G = Y1 - ((88 * Cb + 183 * Cr) >> 8);
if(G < 0) G = 0; else if(G > 255) G = 255;
R = Y1 + ((359 * Cr) >> 8);
if(R < 0) R = 0; else if(R > 255) R = 255;
//NOTE: this assume little-endian encoding
rgbs[outPtr++] = (byte) (((G & 0x3c) << 3) | (B >> 3));
rgbs[outPtr++] = (byte) ((R & 0xf8) | (G >> 5));
//generate second RGB components
B = Y2 + ((454 * Cb) >> 8);
if(B < 0) B = 0; else if(B > 255) B = 255;
G = Y2 - ((88 * Cb + 183 * Cr) >> 8);
if(G < 0) G = 0; else if(G > 255) G = 255;
R = Y2 + ((359 * Cr) >> 8);
if(R < 0) R = 0; else if(R > 255) R = 255;
//NOTE: this assume little-endian encoding
rgbs[outPtr++] = (byte) (((G & 0x3c) << 3) | (B >> 3));
rgbs[outPtr++] = (byte) ((R & 0xf8) | (G >> 5));
}
}
Thanks .
RAW file

Categories

Resources