I am using QRCodeReaderView https://github.com/dlazaro66/QRCodeReaderView for implementing my own QR Code Scanner and it works well but the camera still starts slow (3-4 seconds) and I came up with the idea to pre start previewing the camera before using (keeping the camera open when the focus is on the fragment that has the button to start scanning so it could be opened right away when needed) and I tried everything but it seems like I don't understand the concept and it still starts slow.
Here is the code for the QRCodeReaderView:
import com.google.zxing.BinaryBitmap;
import com.google.zxing.ChecksumException;
import com.google.zxing.FormatException;
import com.google.zxing.NotFoundException;
import com.google.zxing.PlanarYUVLuminanceSource;
import com.google.zxing.Result;
import com.google.zxing.ResultPoint;
import com.google.zxing.client.android.camera.open.CameraManager;
import com.google.zxing.common.HybridBinarizer;
import com.google.zxing.qrcode.QRCodeReader;
import java.io.IOException;
public class QRCodeReaderView extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback {
public interface OnQRCodeReadListener {
public void onQRCodeRead(String text, PointF[] points);
public void cameraNotFound();
public void QRCodeNotFoundOnCamImage();
}
private OnQRCodeReadListener mOnQRCodeReadListener;
private static final String TAG = QRCodeReaderView.class.getName();
private QRCodeReader mQRCodeReader;
private int mPreviewWidth;
private int mPreviewHeight;
private SurfaceHolder mHolder;
private CameraManager mCameraManager;
public QRCodeReaderView(Context context) {
super(context);
init();
}
public QRCodeReaderView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public void setOnQRCodeReadListener(OnQRCodeReadListener onQRCodeReadListener) {
mOnQRCodeReadListener = onQRCodeReadListener;
}
public CameraManager getCameraManager() {
return mCameraManager;
}
#SuppressWarnings("deprecation")
private void init() {
if (checkCameraHardware(getContext())) {
mCameraManager = new CameraManager(getContext());
mHolder = this.getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); // Need to set this flag despite it's deprecated
} else {
Log.e(TAG, "Error: Camera not found");
if (mOnQRCodeReadListener != null) {
mOnQRCodeReadListener.cameraNotFound();
}
}
}
/**
* *************************************************
* SurfaceHolder.Callback,Camera.PreviewCallback
* **************************************************
*/
#Override
public void surfaceCreated(SurfaceHolder holder) {
try {
// Indicate camera, our View dimensions
mCameraManager.openDriver(holder, this.getWidth(), this.getHeight());
} catch (IOException e) {
Log.w(TAG, "Can not openDriver: " + e.getMessage());
mCameraManager.closeDriver();
}
try {
mQRCodeReader = new QRCodeReader();
mCameraManager.startPreview();
} catch (Exception e) {
Log.e(TAG, "Exception: " + e.getMessage());
mCameraManager.closeDriver();
}
}
#Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.d(TAG, "surfaceDestroyed");
mCameraManager.getCamera().setPreviewCallback(null);
mCameraManager.getCamera().stopPreview();
mCameraManager.getCamera().release();
mCameraManager.closeDriver();
}
// Called when camera take a frame
#Override
public void onPreviewFrame(byte[] data, Camera camera) {
PlanarYUVLuminanceSource source = mCameraManager.buildLuminanceSource(data, mPreviewWidth, mPreviewHeight);
HybridBinarizer hybBin = new HybridBinarizer(source);
BinaryBitmap bitmap = new BinaryBitmap(hybBin);
try {
Result result = mQRCodeReader.decode(bitmap);
// Notify we found a QRCode
if (mOnQRCodeReadListener != null) {
// Transform resultPoints to View coordinates
PointF[] transformedPoints = transformToViewCoordinates(result.getResultPoints());
mOnQRCodeReadListener.onQRCodeRead(result.getText(), transformedPoints);
}
} catch (ChecksumException e) {
Log.d(TAG, "ChecksumException");
e.printStackTrace();
} catch (NotFoundException e) {
// Notify QR not found
if (mOnQRCodeReadListener != null) {
mOnQRCodeReadListener.QRCodeNotFoundOnCamImage();
}
} catch (FormatException e) {
Log.d(TAG, "FormatException");
e.printStackTrace();
} finally {
mQRCodeReader.reset();
}
}
#Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.d(TAG, "surfaceChanged");
if (mHolder.getSurface() == null) {
Log.e(TAG, "Error: preview surface does not exist");
return;
}
//preview_width = width;
//preview_height = height;
mPreviewWidth = mCameraManager.getPreviewSize().x;
mPreviewHeight = mCameraManager.getPreviewSize().y;
mCameraManager.stopPreview();
mCameraManager.getCamera().setPreviewCallback(this);
mCameraManager.getCamera().setDisplayOrientation(90); // Portrait mode
mCameraManager.startPreview();
}
/**
* Transform result to surfaceView coordinates
* <p/>
* This method is needed because coordinates are given in landscape camera coordinates.
* Now is working but transform operations aren't very explained
* <p/>
* TODO re-write this method explaining each single value
*
* #return a new PointF array with transformed points
*/
private PointF[] transformToViewCoordinates(ResultPoint[] resultPoints) {
PointF[] transformedPoints = new PointF[resultPoints.length];
int index = 0;
if (resultPoints != null) {
float previewX = mCameraManager.getPreviewSize().x;
float previewY = mCameraManager.getPreviewSize().y;
float scaleX = this.getWidth() / previewY;
float scaleY = this.getHeight() / previewX;
for (ResultPoint point : resultPoints) {
PointF tmppoint = new PointF((previewY - point.getY()) * scaleX, point.getX() * scaleY);
transformedPoints[index] = tmppoint;
index++;
}
}
return transformedPoints;
}
/**
* Check if this device has a camera
*/
private boolean checkCameraHardware(Context context) {
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
// this device has a camera
return true;
} else if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FRONT)) {
// this device has a front camera
return true;
} else if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_ANY)) {
// this device has any camera
return true;
} else {
// no camera on this device
return false;
}
}
}
and here is my fragment that uses it:
package com.breadwallet.presenter.fragments;
import com.breadwallet.R;
import com.breadwallet.presenter.activities.ScanResultActivity;
import com.breadwallet.tools.animation.SpringAnimator;
import com.breadwallet.tools.qrcode.QRCodeReaderView;
public class MainFragmentDecoder extends Fragment implements QRCodeReaderView.OnQRCodeReadListener {
public static final String TAG = "MainFragmentDecoder";
private boolean accessGranted = true;
private TextView myTextView;
private static QRCodeReaderView mydecoderview;
private ImageView camera_guide_image;
private Intent intent;
public static MainFragmentDecoder mainFragmentDecoder;
private RelativeLayout layout;
public MainFragmentDecoder() {
mainFragmentDecoder = this;
}
#Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_decoder, container, false);
intent = new Intent(getActivity(), ScanResultActivity.class);
myTextView = (TextView) rootView.findViewById(R.id.exampleTextView);
camera_guide_image = (ImageView) rootView.findViewById(R.id.camera_guide_image);
SpringAnimator.showExpandCameraGuide(camera_guide_image);
// Inflate the layout for this fragment
return rootView;
}
#Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
layout = (RelativeLayout) getView().findViewById(R.id.fragment_decoder_layout);
mydecoderview = new QRCodeReaderView(getActivity().getApplicationContext());
mydecoderview.setOnQRCodeReadListener(mainFragmentDecoder);
if (mydecoderview != null)
mydecoderview.getCameraManager().startPreview();
}
/**
* Called when a QR is decoded
* "text" : the text encoded in QR
* "points" : points where QR control points are placed
*/
#Override
public void onQRCodeRead(String text, PointF[] points) {
synchronized (this) {
if (accessGranted) {
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
accessGranted = true;
}
}, 300);
accessGranted = false;
// Log.e(TAG, "Activity STARTED!!!!!");
intent.putExtra("result", text);
startActivity(intent);
}
}
}
// Called when your device have no camera
#Override
public void cameraNotFound() {
Log.d(TAG, "No Camera found!");
}
// Called when there's no QR codes in the camera preview image
#Override
public void QRCodeNotFoundOnCamImage() {
// Log.d(TAG, "No QR Code found!");
}
#Override
public void onResume() {
super.onResume();
new CameraOpenerTask().execute();
}
#Override
public void onPause() {
super.onPause();
Log.e(TAG, "In onPause");
mydecoderview.getCameraManager().stopPreview();
layout.removeView(mydecoderview);
}
private class CameraOpenerTask extends AsyncTask {
#Override
protected Object doInBackground(Object[] params) {
return null;
}
#Override
protected void onPostExecute(Object o) {
new Handler().postDelayed(new Runnable() {
#Override
public void run() {
layout.addView(mydecoderview, 0);
}
}, 1300);
Log.e(TAG, "The camera started");
}
}
public void stopCamera() {
if (mydecoderview != null) {
mydecoderview.getCameraManager().stopPreview();
}
mydecoderview = null;
}
}
I tried:
camera.StartPreview() earlier than using it.
pre-create the mydecoderview and then simply make it visible when
pressing the button but it still takes 3-4 seconds to start it.
You could try photo app in CyanogenMod (11 version) firmware, maybe this is just that you're searching for?
Download from somewhere it source and add its to your code.
Related
I use dm77/barcodescanner library to Scan QrCode. But When using This in my app, camera focus time is 1000L, and this is not optimal parameter for all phones.
How to improve the focus speed of camera?
I found the answer to this question with help #TeunVR in github.
You must create a class and extends from ZXingScannerView and Override setupCameraPreview and setAutoFocus .
public class ZXingAutofocusScannerView extends ZXingScannerView {
private boolean callbackFocus = false ;
public ZXingAutofocusScannerView(Context context) {
super(context);
}
#Override
public void setupCameraPreview(CameraWrapper cameraWrapper) {
Camera.Parameters parameters= cameraWrapper.mCamera.getParameters();
if(parameters != null)
{
try {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
cameraWrapper.mCamera.setParameters(parameters);
}catch (Exception e)
{
fallbackFocus = true ;
}
// cameraWrapper.mCamera.getParameters()
}
super.setupCameraPreview(cameraWrapper);
}
#Override
public void setAutoFocus(boolean state) {
super.setAutoFocus(callbackFocus);
}
}
Now you must use this class instead ZXingScannerView.
public class SimpleScannerActivity extends AppCompatActivity implements
ZXingAutofocusScannerView.ResultHandler {
private ZXingAutofocusScannerView mScannerView;
#Override
public void onCreate(Bundle state) {
super.onCreate(state);
mScannerView = new ZXingAutofocusScannerView(this);
setContentView(mScannerView);
#Override
public void onResume() {
super.onResume();
mScannerView.setResultHandler(this);
mScannerView.startCamera();
}
#Override
public void onPause() {
super.onPause();
mScannerView.stopCamera();
}
#Override
public void handleResult(Result rawResult) {
Toast.makeText(this, ""+rawResult.getText(), Toast.LENGTH_SHORT).show();
mScannerView.resumeCameraPreview(this);
}
}
If You Use Koltin see this answer :
class ZXingAutofocusScannerView(context: Context) :
ZXingScannerView(context) {
private val TAG = ZXingAutofocusScannerView::class.qualifiedName
private var callbackFocus = false
override fun setupCameraPreview(cameraWrapper: CameraWrapper?) {
cameraWrapper?.mCamera?.parameters?.let{parameters->
try {
parameters.focusMode =
Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE
cameraWrapper.mCamera.parameters = parameters
}catch(ex:Exception){
Log.e(TAG, "Failed to set CONTINOUS_PICTURE", ex)
callbackFocus = true
}
}
super.setupCameraPreview(cameraWrapper)
}
override fun setAutoFocus(state: Boolean) {
super.setAutoFocus(callbackFocus)
}
}
I am finding the solution of my problem but not satisfied from all of them.
I create an android library which shows a ad view layout witch is and know I want to call this layout in unity but can't find any solution please anyone help me how can I call my layout in unity?
public class RedeemLayout extends LinearLayout implements View.OnClickListener,HttpCallBacks {
public Dialog dialogBox;
public ImageButton close;
public ImageView advert;
public TextView location_text;
TextView tv_massagetext;
EditText et_redeemdetial;
Button btn_redem;
ImageButton btn_closead;
DeviceInfo device = new DeviceInfo();
String adClickUrl;
HttpNetworkCalls httpNetworkCalls;
Context context;
Activity activity;
ImageView bmImage;
FrameLayout redeemLayout;
AdInfo ad;
private AdInfo adInfo;
private UserInfo user;
public RedeemLayout(Context context) {
super(context);
initialize(context);
this.context = context;
}
public RedeemLayout(Activity activity, Context context) {
super(context);
initialize(context);
this.context = context;
this.activity = activity;
}
public RedeemLayout(Context context, AttributeSet attr) {
super(context, attr);
initialize(context);
this.context = context;
}
private void initialize(Context context) {
inflate(context, R.layout.ad_lyout, this);
tv_massagetext = (TextView) findViewById(R.id.massagetext);
et_redeemdetial = (EditText) findViewById(R.id.redeemdetail);
btn_redem = (Button) findViewById(R.id.btn_redeem);
btn_closead = (ImageButton) findViewById(R.id.btn_CloseFullScreenAd);
bmImage = (ImageView) findViewById(R.id.adimage);
redeemLayout = (FrameLayout) findViewById(R.id.redeemLayout);
httpNetworkCalls = new HttpNetworkCalls(this);
btn_redem.setOnClickListener(this);
btn_closead.setOnClickListener(this);
DownloadAdAccordingToLocation();
}
public void onClick(View view) {
int i = view.getId();
if (i == R.id.btn_redeem) {
Toast.makeText(getContext(), "Thanks for Redeem You will get Massage soon...", Toast.LENGTH_LONG).show();
Map<String, String> data = new HashMap<>();
data.put("ad_id",ad.getAdId());
data.put("app_id","1");
data.put("location","lahore");
data.put("session","1");
try {
httpNetworkCalls.post(data, API.UPDATE_IMPRESSIONS);
// call AsynTask to perform network operation on separate thread
} catch (Exception e) {
e.printStackTrace();
}
// call AsynTask to perform network operation on separate thread
}
if (i == R.id.btn_CloseFullScreenAd) {
redeemLayout.removeAllViews();
redeemLayout.setVisibility(View.GONE);
Map<String, String> data = new HashMap<>();
data.put("ad_id",ad.getAdId());
data.put("app_id","1");
data.put("location","lahore");
data.put("session","1");
Toast.makeText(getContext(), "Thanks for Redeem You will get Massage soon...", Toast.LENGTH_LONG).show();
try {
httpNetworkCalls.post(data, API.UPDATE_IMPRESSIONS);
// call AsynTask to perform network operation on separate thread
} catch (Exception e) {
e.printStackTrace();
}
}
}
private void DownloadAdAccordingToLocation() {
try {
httpNetworkCalls.get(API.RANDOM_ADVERTISEMENT);
} catch (IOException e) {
e.printStackTrace();
}
}
public void adButtonClicked(View v) {
// FullScreenAdDialog db = new FullScreenAdDialog(this, ad, updateAdClick);
// db.show();
// Intent x = new Intent(xcontext, AdActivity.class);
// x.putExtra("image_link", ad.getImage_link());
// x.putExtra("url", ad.getUrl());
// x.putExtra("adid", ad.getAdId());
// x.putExtra("adclickurl", updateAdClick);
// startActivity(x);
}
#Override
public void HttpResponse(final int apiCode, final JSONObject response, final boolean isSuccess) {
activity.runOnUiThread(new Runnable() {
#Override
public void run() {
try {
if (apiCode == API.RANDOM_ADVERTISEMENT) {
if (response.has("networkError")) {
Log.e("Error", response.getString("networkError"));
} else {
ad = AdInfo.fromJson(response);
if (ad.isSuccess()) {
Picasso.Builder builder = new Picasso.Builder(context);
builder.listener(new Picasso.Listener() {
#Override
public void onImageLoadFailed(Picasso picasso, Uri uri, Exception exception) {
redeemLayout.removeAllViews();
redeemLayout.setVisibility(View.GONE);
}
});
Picasso pic = builder.build();
pic.load(ad.getImage_url()).into(bmImage);
// Picasso.with(context)
// .load(ad.getImage_url())
// .error(R.drawable.imagecross)
// .into(bmImage);
} else {
Log.e("Error", response.getString("parseError"));
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
and call my library in android app like this.
public class MainActivity extends Activity {
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
RelativeLayout layout= (RelativeLayout)findViewById(R.id.test);
RedeemLayout redeemLayout= new RedeemLayout(this, this);
redeemLayout.setGravity(Gravity.CENTER);
layout.addView(redeemLayout);
}
}
It's more unity3d question. You cannot call your layout directly, you can only send message to android code. Read Unity3d script documentation (or google for code) of AndroidJavaClass and AndroidJavaObject.
From Android perspective I think that you should implement some kind of static method that you can call from unity and it should broadcast or send event in event bus that will be handled by your advertisement engine.
Unity part of code should be similar to this:
AndroidJavaClass javaClass = new AndroidJavaClass("com.mypackage.MyClassWithMyStaticMethod");
javaClass.getStatic<AndroidJavaObject>("MyStaticMethod", 42);
Android MyClassWithMyStaticMethod class should implement:
public static void MyStaticMethid(int param) {...}
Check this doc: AndroidJavaClass
In my Android application, I am having Barcode scanning functionality. For this functionality I am using Android https://github.com/dm77/barcodescanner library for scanning Barcodes. It is working good, But if I scanned repeatedly in my project, sometimes it is returning a wrong value(not the actual value) of the barcode.I would like to know why it is happening and how to resolve this issue. I have googled but unfortunately I didn't find any better solution.Anyone please guide me to fix the issue.
note: I am using latest version 1.8.4
SimpleScannerActivity.java
import com.google.zxing.Result;
import me.dm7.barcodescanner.core.IViewFinder;
import me.dm7.barcodescanner.core.ViewFinderView;
import me.dm7.barcodescanner.zxing.ZXingScannerView;
public class SimpleScannerActivity extends BaseScannerActivity implements ZXingScannerView.ResultHandler {
private ZXingScannerView mScannerView;
LoadingFlowScreen loadingFlowScreen;
NextScanScreen nextScanScreen;
String shipmentin,locationin;
#Override
public void onCreate(Bundle state) {
super.onCreate(state);
setContentView(R.layout.activity_simple_scanner);
setupToolbar();
loadingFlowScreen = new LoadingFlowScreen();
nextScanScreen = new NextScanScreen();
ViewGroup contentFrame = (ViewGroup) findViewById(R.id.content_frame);
mScannerView = new ZXingScannerView(this) {
#Override
protected IViewFinder createViewFinderView(Context context) {
return new CustomViewFinderView(context);
}
};
contentFrame.addView(mScannerView);
}
#Override
public void onResume() {
super.onResume();
mScannerView.setResultHandler(this);
mScannerView.startCamera();
}
#Override
public void onPause() {
super.onPause();
mScannerView.stopCamera();
}
#Override
public void handleResult(Result rawResult) {
Intent in = new Intent(SimpleScannerActivity.this, NextScreen.class);//forwaring to another activity once scanned the barcode
in.putExtra("scannedText",rawResult.getText());//storing the value in prefernce
startActivity(in);
finish();
mScannerView.resumeCameraPreview(SimpleScannerActivity.this);
}
private static class CustomViewFinderView extends ViewFinderView {
public static final String TRADE_MARK_TEXT = "";
public static final int TRADE_MARK_TEXT_SIZE_SP = 40;
public final Paint PAINT = new Paint();
public CustomViewFinderView(Context context) {
super(context);
init();
}
public CustomViewFinderView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
private void init() {
PAINT.setColor(Color.WHITE);
PAINT.setAntiAlias(true);
float textPixelSize = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_SP,
TRADE_MARK_TEXT_SIZE_SP, getResources().getDisplayMetrics());
PAINT.setTextSize(textPixelSize);
}
#Override
public void onDraw(Canvas canvas) {
super.onDraw(canvas);
drawTradeMark(canvas);
}
private void drawTradeMark(Canvas canvas) {
Rect framingRect = getFramingRect();
float tradeMarkTop;
float tradeMarkLeft;
if (framingRect != null) {
tradeMarkTop = framingRect.bottom + PAINT.getTextSize() + 10;
tradeMarkLeft = framingRect.left;
} else {
tradeMarkTop = 10;
tradeMarkLeft = canvas.getHeight() - PAINT.getTextSize() - 10;
}
canvas.drawText(TRADE_MARK_TEXT, tradeMarkLeft, tradeMarkTop, PAINT);
}
}
}
BaseScannerActivity.java
public class BaseScannerActivity extends AppCompatActivity {
public void setupToolbar() {
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
final ActionBar ab = getSupportActionBar();
if(ab != null) {
ab.setDisplayHomeAsUpEnabled(true);
}
}
#Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
// Respond to the action bar's Up/Home button
case android.R.id.home:
finish();
return true;
}
return super.onOptionsItemSelected(item);
}
}
I would suggest you to use Zbar (from same repo).
We had some performance issues with Zxing and had to switch to Zbar.
Using in production for like 2 years - no issues.
I am making a slot machine app and using kankan's wheel for the same. I want to modify the library such that when the rotation stops the item it will point shoud be the one that I set . I have done this but there is a glitch that shows that we have changed the actual image to the one that we want . How to achieve this?
Update:
I have researched a lot on this and if I am right , android scroll is based on duration and distance not items . From kankan's wheel library I can get current item .Now , I am trying to stop the animation as well as scroll , as soon as a certain duration has been reached and the item is the one that I want (through index) . But this is not working .Please help!!
GameActivity
public class GameActivity extends Activity {
float mDeviceDensity;
String mUuid, mTitle, mContent, mReward;
ImageButton play;
SlotMachineAdapter slotAdapter;
private List<HashMap<String, Object>> slotImages = new ArrayList<HashMap<String, Object>>();
ArrayList<String> imagesWinId = new ArrayList<String>();
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_filler_up_game);
DisplayMetrics display = getResources().getDisplayMetrics();
mDeviceDensity = display.density;
slotAdapter = new SlotMachineAdapter(this);
getPassedData();
setSoundPlayer(R.raw.clicks,true);
initWheel(R.id.slot_1, false, 0);
initWheel(R.id.slot_2, false, 1);
initWheel(R.id.slot_3, true, 2);
play = (ImageButton) findViewById(R.id.btn_mix);
play.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
shuffle(R.id.slot_1, 5000);
shuffle(R.id.slot_2, 7000);
shuffle(R.id.slot_3, 9000);
}
});
}
protected ImageLoader imageLoader;
ArrayList<SlotItem> arrListSlotItems;
private void getPassedData() {
try {
mUuid = getIntent().getStringExtra(getString(R.string.FILLER_UP_UUID));
imageLoader = ImageLoader.getInstance();
Uuid slotImagesExtra = (Uuid) (getIntent()
.getSerializableExtra(getString(R.string.FILLER_UP_IMAGES)));
arrListSlotItems = slotImagesExtra.getArrSlotItemArray();
for (int i = 0; i < arrListSlotItems.size(); i++)
downloadSlotImages(arrListSlotItems.get(i).getSlotId(), arrListSlotItems.get(i).getImageUrl());
} catch (Exception e) {
e.printStackTrace();
}
}
// Wheel scrolled flag
private boolean wheelScrolled = false;
// Wheel scrolled listener
OnWheelScrollListener scrolledListener = new OnWheelScrollListener() {
public void onScrollingStarted(WheelView wheel) {
wheelScrolled = true;
}
public void onScrollingFinished(WheelView wheel) {
wheelScrolled = false;
setStatus(wheel.getId(), getWheel(wheel.getId()).getWinningIndex());
}
};
// Wheel changed listener
private OnWheelChangedListener changedListener = new OnWheelChangedListener() {
public void onChanged(WheelView wheel, int oldValue, int newValue) {
if (!wheelScrolled) {
}
}
};
/**
* Updates status
*/
private void updateStatus() {
myThread();
}
public void myThread(){
Thread th=new Thread(){
#Override
public void run(){
try
{
Thread.sleep(2000);
GameActivity.this.runOnUiThread(new Runnable() {
#Override
public void run() {
showAlertDialogWithSingleButton(GameActivity.this, mTitle, mContent, success);
}
});
}catch (InterruptedException e) {
// TODO: handle exception
}
}
};
th.start();
}
android.content.DialogInterface.OnClickListener success = new android.content.DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
if (mContent != null && mContent.contains("again"))
startHomeActivity();
else
startNewsActivity();
}
};
private void startHomeActivity() {
}
private void startNewsActivity() {
}
android.content.DialogInterface.OnClickListener fail = new android.content.DialogInterface.OnClickListener() {
#Override
public void onClick(DialogInterface dialog, int which) {
//
}
};
public void showAlertDialogWithSingleButton(final Activity ctx, final String title, final String message,
DialogInterface.OnClickListener onClickListener) {
// show dialog
}
private void initWheel(int id, boolean monitorScroll, int itemIndex) {
Random randomGenerator = new Random();
int index = randomGenerator.nextInt(arrListSlotItems.size());
WheelView wheel = getWheel(id);
wheel.setViewAdapter(slotAdapter);
wheel.setCurrentItem((index ));
wheel.setVisibleItems(1);
wheel.setWinningIndex(itemIndex);
wheel.addChangingListener(changedListener);
wheel.addScrollingListener(scrolledListener);
wheel.setCyclic(true);
wheel.setEnabled(false);
}
private WheelView getWheel(int id) {
return (WheelView) findViewById(id);
}
private void setStatus(int id, int item) {
int index = 0;
for (int i = 0; i < arrListSlotItems.size(); i++) {
SlotItem d = arrListSlotItems.get(i);
if (d.getSlotId() != 0 && d.getSlotId() == Integer.parseInt(imagesWinId.get(item)))
index = arrListSlotItems.indexOf(d);
}
getWheel(id).setCurrentItem(index, true);
if (id == R.id.slot_3) {
if(player.isPlaying())
{
stopBackgroundAudio();
}
updateStatus();
}
}
private void shuffle(int id, int duration) {
WheelView wheel = getWheel(id);
wheel.scroll(450 + (int) (Math.random() * 50), duration);
}
private class SlotMachineAdapter extends AbstractWheelAdapter {
final int IMAGE_WIDTH = getImageWidth(mDeviceDensity);
final int IMAGE_HEIGHT = getImageHeight(mDeviceDensity);
private Context context;
/**
* Constructor
*/
public SlotMachineAdapter(Context context) {
this.context = context;
}
/**
* Loads image from resources
*/
private Bitmap loadImage(Bitmap bitmap) {
Bitmap scaled = Bitmap.createScaledBitmap(bitmap, IMAGE_WIDTH, IMAGE_HEIGHT, true);
return scaled;
}
#Override
public int getItemsCount() {
return slotImages.size();
}
// Layout params for image view
final LayoutParams params = new LayoutParams(IMAGE_WIDTH, IMAGE_HEIGHT);
#Override
public View getItem(int index, View cachedView, ViewGroup parent) {
ImageView img;
if (cachedView != null) {
img = (ImageView) cachedView;
} else {
img = new ImageView(context);
}
img.setPadding(0, 5, 0, 5);
img.setLayoutParams(params);
#SuppressWarnings("unchecked")
SoftReference<Bitmap> bitmapRef = (SoftReference<Bitmap>) slotImages.get(index).get("image");
Bitmap bitmap = bitmapRef.get();
if (bitmap == null) {
bitmap = loadImage(bitmap);
}
img.setImageBitmap(bitmap);
return img;
}
}
private int getImageWidth(float density) {
}
private int getImageHeight(float density) {
}
private void downloadSlotImages(final int id, String slotObj) {
//downloading slot images from server
}
}
This is the code. Through this code, when slot stops I want it to scroll some more untill it reaches the image position that I receaved from server. I can do this .But this is providing a lil glitch . Is there any way to stop scrolling when the image is reached as soon as certain duration is reached.
P.S. If you need anymore detail I can provide you.
P.P.S. Screenshots wont give you any detailed insight about the issue.
After days of searching I finally did it.All I had to do was set interpolater as LinearInterpolater and While setting setCurrentItem set animation as true.
As my previous question, I am trying "GLSurfaceView + TextureView" to show camera preview in one GLSurfaceView and multiple TextureViews, but facing some problems...
In GLSurfaceView render thread, I tried to share built-in EGLContext to TextureView, create a EGL surface by TextureView's surfaceTexture, then use GLES to draw on it.
#Override
public void onDrawFrame(final GL10 gl) {
// GLES draw on GLSurfaceView
renderToTextureView();
}
private void renderToTextureView() {
saveEGLState();
for(TextureViewItem item : mTextureViewItemList) {
item.render(mSavedEglContext);
}
restoreEGLState();
}
private void saveEGLState() {
mSavedEglDisplay = EGL14.eglGetCurrentDisplay();
mSavedEglContext = EGL14.eglGetCurrentContext();
mSavedEglDrawSurface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW);
mSavedEglReadSurface = EGL14.eglGetCurrentSurface(EGL14.EGL_READ);
}
private void restoreEGLState() {
if (!EGL14.eglMakeCurrent(mSavedEglDisplay, mSavedEglDrawSurface, mSavedEglReadSurface, mSavedEglContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
public class TextureViewItem implements TextureView.SurfaceTextureListener {
private static EglCore sEglCore;
private WindowSurface mWindowSurface;
public void render(EGLContext sharedContext) {
if(mSavedSurfaceTexture == null) return;
getWindowSurface(sharedContext).makeCurrent();
// GLES draw on TextureView
getWindowSurface(sharedContext).swapBuffers();
}
private WindowSurface getWindowSurface(EGLContext sharedContext) {
if(sEglCore == null) {
sEglCore = new EglCore(sharedContext, EglCore.FLAG_TRY_GLES3);
}
if(mWindowSurface == null) {
mWindowSurface = new WindowSurface(mEglCore, mSavedSurfaceTexture);
}
return mWindowSurface;
}
#Override
public void onSurfaceTextureAvailable(SurfaceTexture st, int width, int height) {
if (mSavedSurfaceTexture == null) {
mSavedSurfaceTexture = st;
}
}
#Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture st) {
if (mWindowSurface != null) {
mWindowSurface.release();
}
if (sEglCore != null) {
sEglCore.release();
}
mSavedSurfaceTexture = null;
return true;
}
}
Everything works fine except press "back" key. I call GLSurfaceView's onPause() when the activity pauses, it caused swapBuffers (EGL14.eglSwapBuffers) won't return...
Some suspected logcat messages also
W/WindowManager(1077): Window freeze timeout expired.
I/WindowManager(1077): Screen frozen for +2s42ms due to Window ..
Anyone knows why? And any way to solve this problem?
Thanks.