LibGDX - Cannot find Preferences location in Android Device - android

I am learning preferences using LibGDX. I can run the following code sucessfully both in Windows and Android Device. The file "MyDemo" is stored in
my C: drive user directory. It is prefect. I can run the code in Android device sucessfully as well. However, I can't find the file "MyDemo". There is nothing
in Android/data/...
package com.hkprogram.mydemo;
import com.badlogic.gdx.ApplicationListener;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Preferences;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.g2d.BitmapFont;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
public class MyDemo implements ApplicationListener {
private SpriteBatch batch;
private BitmapFont font1;
int screenWidth, screenHeight;
public Preferences prefs;
String name;
#Override
public void create() {
batch = new SpriteBatch();
font1 = new BitmapFont();
font1.setColor(Color.BLACK);
font1.setScale(5);
screenWidth=Gdx.graphics.getWidth();
screenHeight=Gdx.graphics.getHeight();
Preferences prefs = Gdx.app.getPreferences("MyDemo");
prefs.putString("Name", "Peter");
prefs.flush();
prefs = Gdx.app.getPreferences("MyDemo");
name=prefs.getString("Name","no name stored");
System.out.println("Name="+name);
}
#Override
public void dispose() {
}
#Override
public void pause() {
}
#Override
public void render() {
Gdx.gl.glClearColor(159/255.0f,220/255.0f,235/255.0f,0xff/255.0f);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
batch.begin();
font1.draw(batch, name, screenWidth/2, screenHeight/2);
batch.end();
}
#Override
public void resize(int arg0, int arg1) {
}
#Override
public void resume() {
}
}

You can get the path where you have MyDemo doing this :
File MyDemoFile = getDatabasePath("MyDemo.txt"); //choose your extension
if (MyDemoFile != null){
Log.d("Absolute path : ", MyDemoFile.getAbsolutePath());
}

Related

Agora.io audio working but video not transmitting

I am new to Agora.io and I am creating an android app for 1 to 1 video calling. Everything is working fine in my app - I can join/leave the channel and even see my own local image through my camera. However, in the call, only the audio is being transmitted and video is not being transmitted.
When I debug the app, I can see that the event 'onFirstRemoteVideoFrame' is not being triggered. I tried changing it to 'onFirstRemoteVideoDecoded'(which I saw in many tutorials but android studio says the method is depreciated) but it is still not working.
Also, please note that when I add the line '-keep class io.agora.**{;}' in my proguard-rules.pro file, it says that it can't find the class. So instead I am using '-keep class io.agora.{*;}'.
Below is the java code for my activity. I am using agora 3.1.3.
package com.guideu.helloagora;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import android.Manifest;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
import android.view.SurfaceView;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.Toast;
import io.agora.rtc.IRtcEngineEventHandler;
import io.agora.rtc.RtcEngine;
import io.agora.rtc.video.VideoCanvas;
import io.agora.rtc.video.VideoEncoderConfiguration;
public class MainActivity extends AppCompatActivity {
private static final String TAG=MainActivity.class.getSimpleName();
private static final int PERMISSION_REQ_ID=22;
private static final String[] REQUESTED_PERMISSIONS={
Manifest.permission.RECORD_AUDIO,
Manifest.permission.CAMERA,
Manifest.permission.WRITE_EXTERNAL_STORAGE
};
private RtcEngine mRtcEngine; // Agora engine reference
private FrameLayout mLocalContainer;
private RelativeLayout mRemoteContainer;
private SurfaceView mLocalView;
private SurfaceView mRemoteView;
private ImageView mCallBtn;
private ImageView mMuteBtn;
private ImageView mSwitchCameraBtn;
private boolean mCallEnd;
private boolean mMuted;
//Agora engine event handler
private final IRtcEngineEventHandler mRTCHandler=new IRtcEngineEventHandler() {
#Override
public void onJoinChannelSuccess(String channel, final int uid, int elapsed) {
super.onJoinChannelSuccess(channel, uid, elapsed);
runOnUiThread(new Runnable() {
#Override
public void run() {
Log.i("agora","Join channel success, uid: " + (uid & 0xFFFFFFFFL));
}
});
}
#Override
public void onUserOffline(final int uid, int reason) {
super.onUserOffline(uid, reason);
runOnUiThread(new Runnable() {
#Override
public void run() {
Log.i("agora","User offline, uid: " + (uid & 0xFFFFFFFFL));
removeRemoteView();
}
});
}
#Override
public void onFirstRemoteVideoFrame(final int uid, int width, int height, int elapsed) {
super.onFirstRemoteVideoFrame(uid, width, height, elapsed);
runOnUiThread(new Runnable() {
#Override
public void run() {
Log.i("agora","First remote video decoded, uid: " + (uid & 0xFFFFFFFFL));
setupRemoteVideo(uid);
}
});
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
initUI();
if(checkSelfPermission(REQUESTED_PERMISSIONS[0],PERMISSION_REQ_ID) &&
checkSelfPermission(REQUESTED_PERMISSIONS[1],PERMISSION_REQ_ID) &&
checkSelfPermission(REQUESTED_PERMISSIONS[2],PERMISSION_REQ_ID)){
//init engine
initEngineAndJoinChannel();
}
}
#Override
protected void onDestroy(){
super.onDestroy();
if(!mCallEnd){
leaveChannel();
}
RtcEngine.destroy();
}
private void initUI(){
mLocalContainer=findViewById(R.id.local_video_view_container);
mRemoteContainer=findViewById(R.id.remote_video_view_container);
mCallBtn=findViewById(R.id.btn_call);
mMuteBtn=findViewById(R.id.btn_mute);
mSwitchCameraBtn=findViewById(R.id.btn_switch_camera);
}
private void initEngineAndJoinChannel(){
//initialize engine
initializeEngine();
//setup video config
setupVideoConfig();
//setup local video
setupLocalVideo();
//join channel
joinChannel();
}
private void initializeEngine(){
try {
mRtcEngine = RtcEngine.create(getBaseContext(), getString(R.string.agora_app_id), mRTCHandler);
}
catch (Exception e){
Log.e(TAG,Log.getStackTraceString(e));
throw new RuntimeException("Need to check rtc sdk init fatal error\n" + Log.getStackTraceString(e));
}
}
private void setupVideoConfig(){
mRtcEngine.enableVideo();
mRtcEngine.setVideoEncoderConfiguration(new VideoEncoderConfiguration(
VideoEncoderConfiguration.VD_640x360,
VideoEncoderConfiguration.FRAME_RATE.FRAME_RATE_FPS_15,
VideoEncoderConfiguration.STANDARD_BITRATE,
VideoEncoderConfiguration.ORIENTATION_MODE.ORIENTATION_MODE_FIXED_PORTRAIT
));
}
private void setupLocalVideo(){
mRtcEngine.enableVideo();
mLocalView=RtcEngine.CreateRendererView(getBaseContext());
mLocalView.setZOrderMediaOverlay(true);
mLocalContainer.addView(mLocalView);
VideoCanvas localVideoCanvas=new VideoCanvas(mLocalView,VideoCanvas.RENDER_MODE_HIDDEN,0);
mRtcEngine.setupLocalVideo(localVideoCanvas);
}
private void setupRemoteVideo(int uid){
/*int count=mRemoteContainer.getChildCount();
View view=null;
for(int i=0;i<count;i++){
View v=mRemoteContainer.getChildAt(i);
if(v.getTag() instanceof Integer && ((int) v.getTag())==uid){
view=v;
}
}
if(view!=null){
return;
}*/
mRemoteView=RtcEngine.CreateRendererView(getBaseContext());
mRemoteContainer.addView(mRemoteView);
mRtcEngine.setupRemoteVideo(new VideoCanvas(mRemoteView,VideoCanvas.RENDER_MODE_HIDDEN,uid));
mRemoteView.setTag(uid);
}
private void removeRemoteView(){
if(mRemoteView!=null){
mRemoteContainer.removeView(mRemoteView);
}
mRemoteView=null;
}
private void joinChannel(){
String token=getString(R.string.agora_access_token);
if(TextUtils.isEmpty(token)){
token=null;
}
mRtcEngine.joinChannel(token,"HelloAgora","",0);
}
private void leaveChannel(){
mRtcEngine.leaveChannel();
}
public void onLocalAudioMuteClicked(View view){
mMuted=!mMuted;
mRtcEngine.muteLocalAudioStream(mMuted);
int res=mMuted?R.drawable.btn_mutecall:R.drawable.btn_unmute;
mMuteBtn.setImageResource(res);
}
public void onSwitchCameraClicked(View view){
mRtcEngine.switchCamera();
}
public void onCallClicked(View view){
if(mCallEnd){
startCall();
mCallEnd=false;
mCallBtn.setImageResource(R.drawable.btn_endcall);
}
else{
endCall();
mCallEnd=true;
mCallBtn.setImageResource(R.drawable.btn_startcall);
}
showButtons(!mCallEnd);
}
private void startCall(){
setupLocalVideo();
joinChannel();
}
private void endCall(){
removeLocalVideo();
removeRemoteView();
leaveChannel();
}
private void removeLocalVideo(){
if(mLocalView!=null){
mLocalContainer.removeView(mLocalView);
}
mLocalView=null;
}
private void showButtons(boolean show){
int visibility=show?View.VISIBLE:View.GONE;
mMuteBtn.setVisibility(visibility);
mSwitchCameraBtn.setVisibility(visibility);
}
private boolean checkSelfPermission(String permission, int requestCode){
if(ContextCompat.checkSelfPermission(this,permission)!= PackageManager.PERMISSION_GRANTED){
ActivityCompat.requestPermissions(this,REQUESTED_PERMISSIONS,requestCode);
Toast.makeText(this,"No Permission",Toast.LENGTH_LONG);
return false;
}
return true;
}
}
'''
You can try "onRemoteVideoStateChanged" callback instead of "onFirstRemoteVideoFrame".
Here is the code snippet:
#Override
public void onRemoteVideoStateChanged(final int uid, int state, int reason, int elapsed) {
super.onRemoteVideoStateChanged(uid, state, reason, elapsed);
if (state == Constants.REMOTE_VIDEO_STATE_STARTING) {
runOnUiThread(new Runnable() {
#Override
public void run() {
setupRemoteVideo(uid);
}
});
}
}

how to create a tablet background using android studio

I have been following this tutorial:
Page.
I have been getting a lot of errors.
Can you please help me by sharing the code on GitHub?
You can also post your answer on Stack Overflow.
This is my Gif
I have used this in my java file:
package com.example.background;
import android.graphics.Canvas;
import android.graphics.Movie;
import android.os.Handler;
import android.service.wallpaper.WallpaperService;
import android.util.Log;
import android.view.SurfaceHolder;
import java.io.IOException;
public class GIFWallpaperEngine extends WallpaperService{
private final int frameDuration = 20;
private SurfaceHolder holder;
private Movie movie;
private boolean visible;
private Handler handler;
public GIFWallpaperEngine(Movie movie) {
this.movie = movie;
handler = new Handler();
}
public void onCreate(SurfaceHolder surfaceHolder) {
super.onCreate(surfaceHolder);
this.holder = surfaceHolder;
}
private Runnable drawGIF = new Runnable() {
public void run() {
draw();
}
};
private void draw() {
if (visible) {
Canvas canvas = holder.lockCanvas();
canvas.save();
// Adjust size and position so that
// the image looks good on your screen
canvas.scale(3f, 3f);
movie.draw(canvas, -100, 0);
canvas.restore();
holder.unlockCanvasAndPost(canvas);
movie.setTime((int) (System.currentTimeMillis() % movie.duration()));
handler.removeCallbacks(drawGIF);
handler.postDelayed(drawGIF, frameDuration);
}
}
#Override
public void onVisibilityChanged(boolean visible) {
this.visible = visible;
if (visible) {
handler.post(drawGIF);
} else {
handler.removeCallbacks(drawGIF);
}
}
#Override
public void onDestroy() {
super.onDestroy();
handler.removeCallbacks(drawGIF);
}
#Override
public Engine onCreateEngine() {
try {
Movie movie = Movie.decodeStream(
getResources().getAssets().open("wolverine.gif"));
return new Engine(movie);
}catch(IOException e){
Log.d("GIF", "Could not load asset");
return null;
}
}
}
Is there anything wrong because the override is showing an error.
It's this: Override is not required because method whatever super does.
Well, this is the solution for me GitLink
Solution to your problem when you try to run app, first click in app and in Edit Configurations
Now, in Launch Options select NOTHING:
Last step, press OK:
Finally, you can run your app. Try it!

Android - Real-Time get image data from camera?

I want to do some image processing jobs and I want to get real-time image data from my camera(during preview state) instead of after taking pictures.
I looked on this post but don't know how to use this.
After setting this,
SurfaceHolder.Callback surfaceCallback=new SurfaceHolder.Callback()
{
public void surfaceCreated(SurfaceHolder holder) {
camera.setPreviewCallback(previewCallback);
}
}
and this
private Camera.PreviewCallback previewCallback= new Camera.PreviewCallback()
{
#Override
public void onPreviewFrame(byte[] data,Camera cam)
{
Camera.Size previewSize = cam.getParameters().getPreviewSize();
YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21,previewSize.width,previewSize.height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0,0,previewSize.width,previewSize.height),80,baos);
byte[] jdata = baos.toByteArray();
Bitmap bitmap = BitmapFactory.decodeByteArray(jdata,0,jdata.length);
}
};
How can I real-time get my image data? Could anyone please kindly give me an short example code?
Thanks
For doing image processing based operations you can use with OpenCV Library.
Here i given sample code to process the frames directly from camera using OpenCV
import android.Manifest;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.JavaCameraView;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import static org.opencv.core.CvType.CV_8UC1;
public class MainActivity extends AppCompatActivity implements CameraBridgeViewBase.CvCameraViewListener2 {
private static final String TAG = "MainActivity";
JavaCameraView javaCameraView;
Mat frame;
public static final int CAMERA_PERMISSION_REQUEST_CODE = 3;
static {
System.loadLibrary("MyOpenCVLibs");
}
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch(status){
case LoaderCallbackInterface.SUCCESS:
{
javaCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (ContextCompat.checkSelfPermission(this,Manifest.permission.CAMERA)!= PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(this,new String[]{Manifest.permission.CAMERA},CAMERA_PERMISSION_REQUEST_CODE);
}
javaCameraView = (JavaCameraView) findViewById(R.id.java_camera_view);
javaCameraView.setVisibility(View.VISIBLE);
javaCameraView.setCvCameraViewListener(this);
}
#Override
protected void onPause(){
super.onPause();
if(javaCameraView!=null)
javaCameraView.disableView();
}
#Override
protected void onDestroy(){
super.onDestroy();
if(javaCameraView!=null)
javaCameraView.disableView();
}
#Override
protected void onResume(){
super.onResume();
if (OpenCVLoader.initDebug()) {
Log.i(TAG, "OpenCV loaded successfully.");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
} else {
Log.i(TAG, "OpenCV not loaded.");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, this, mLoaderCallback);
}
}
#Override
public void onCameraViewStarted(int width, int height) {
frame=new Mat(height,width,CV_8UC4);
}
#Override
public void onCameraViewStopped() {
frame.release();
}
#Override
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
frame=inputFrame.rgba();
}
}
This code checks OpenCV properly imported or not and Checks permission for Camera.
And each input frames are saved in frame variable.

From extending from AndroidApplication to Game

When you start a project with libgdx it automatically makes the class extend Android Application. I did not think about this until later and now I want to change it to the Game and Screen classes. But unfortunately without success...
My first question is, how to I change the android project?
import android.os.Bundle;
import com.badlogic.gdx.backends.android.AndroidApplication;
import com.badlogic.gdx.backends.android.AndroidApplicationConfiguration;
public class AndroidClass extends AndroidApplication {
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
AndroidApplicationConfiguration cfg = new AndroidApplicationConfiguration();
cfg.useGL20 = false;
initialize(new SplashScreen(), cfg);
}
}
My second question: How do I change the deskop project:
import com.badlogic.gdx.backends.lwjgl.LwjglApplication;
import com.badlogic.gdx.backends.lwjgl.LwjglApplicationConfiguration;
public class DeskopClass {
public static void main(String[] args) {
LwjglApplicationConfiguration cfg = new LwjglApplicationConfiguration();
cfg.title = "MyApp";
cfg.useGL20 = false;
cfg.width = 800;
cfg.height = 480;
new LwjglApplication(new SplashScreen(), cfg);
}
}
Third question: How do I change the SplashScreen:
import com.badlogic.gdx.ApplicationListener;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
public class SplashScreen implements ApplicationListener{
#Override
public void create() {
// TODO Auto-generated method stub
}
Thanks!
The Game class is just an ApplicationListener. AndroidApplication and Game are not interchangeable classes as they accomplish two different things.
You need an AndroidApplication class to pass events on to your ApplicationListener classes. If you want a Game class in your app then you can always create your own.
public class Game implements ApplicationListener {
#Override
public void dispose () {
}
#Override
public void pause () {
}
#Override
public void resume () {
}
#Override
public void render () {
}
#Override
public void resize (int width, int height) {
}
}
Game class in libgdx is itself an ApplicationListener.
You can create a class that extends Game and directly pass its object to initialize (for android) and LwjglApplication (for desktop).
This way you can use setScreen without a problem.

Android App freezing on second launch

I'm developing a game for android, and I've been trying to implement a dialog to show a webview for web banner ads. (I'm using cocos2d for my development). Im using the static method ads() to open the dialog statically. Since doing this, however, I've started having problems launching.
On first launch, it runs fine, but when the app is closed and reopened, the "openscreen" scene starts but then doesn't play run its fade in and fade outs or continue into the menu scene.
Activity (where the ads() method is)
package wingdev.defence;
import java.util.Timer;
import java.util.TimerTask;
import org.cocos2d.layers.CCScene;
import org.cocos2d.nodes.CCDirector;
import org.cocos2d.opengl.CCGLSurfaceView;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.webkit.WebView;
import android.widget.Button;
public class DefenceActivity extends Activity{
protected CCGLSurfaceView _glSurfaceView;
public static int highscore = 0;
public static int mode= Activity.MODE_PRIVATE;
public static SharedPreferences mySharedPreferences ;
public static String CurrentLevel = "";
public static boolean firstlaunch=true;
public static int screenforchoice = 0;
public static boolean choicemade=false;
public static boolean choice=false;
public static DefenceActivity context;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON, WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
_glSurfaceView = new CCGLSurfaceView(this);
setContentView(_glSurfaceView);
CCScene scene = OpenScreen.scene();
CCDirector.sharedDirector().runWithScene(scene);
}
#Override
public void onStart()
{
super.onStart();
CCDirector.sharedDirector().attachInView(_glSurfaceView);
CCDirector.sharedDirector().setDeviceOrientation(CCDirector.kCCDeviceOrientationLandscapeLeft);
CCDirector.sharedDirector().setAnimationInterval(1.0f / 60.0f);
mySharedPreferences=getSharedPreferences("HighScore",mode);
highscore= mySharedPreferences.getInt("HighScore",0);
SharedPreferences.Editor editor= mySharedPreferences.edit();
editor.remove("HighScore");
editor.putInt("HighScore", highscore);
editor.commit();
CCScene scene = OpenScreen.scene();
CCDirector.sharedDirector().runWithScene(scene);
Config.context = this;
context = this;
}
public void startgame()
{
CCDirector.sharedDirector().attachInView(_glSurfaceView);
CCDirector.sharedDirector().setDeviceOrientation(CCDirector.kCCDeviceOrientationLandscapeLeft);;
CCDirector.sharedDirector().setAnimationInterval(1.0f / 60.0f);
CCScene scene = GameLayerSurvival.scene();
CCDirector.sharedDirector().runWithScene(scene);
}
#Override
public void onPause()
{
super.onPause();
CCDirector.sharedDirector().pause();
}
#Override
public void onResume()
{
super.onResume();
}
#Override
public void onStop()
{
super.onStop();
finish();
CCDirector.sharedDirector().end();
}
static public void newhighscore()
{if(GameLayerSurvival.score>highscore){
highscore=GameLayerSurvival.score;
}
SharedPreferences.Editor editor= mySharedPreferences.edit();
editor.remove("HighScore");
editor.putInt("HighScore", highscore);
editor.commit();
}
public static void ads(){
context.runOnUiThread(new Runnable() {
public void run() {
final Dialog dialog = new Dialog(context);
dialog.setContentView(R.layout.main);
dialog.setTitle("Advert");
dialog.setCancelable(true);
WebView mWebView = (WebView) dialog.findViewById(R.id.webbanner);
mWebView.getSettings().setJavaScriptEnabled(true);
mWebView.loadUrl("http://www.example.co.uk/");
Button bc_btn1 = (Button) dialog.findViewById(R.id.button1);
bc_btn1.setOnClickListener(new View.OnClickListener() {
public void onClick(View v){
killDialog(dialog);
}
});
dialog.show();
}
});
}
public static void killDialog(Dialog dialog){
dialog.cancel();
}
}
Openscreen- the first scene used on launch
package wingdev.defence;
import org.cocos2d.actions.instant.CCCallFunc;
import org.cocos2d.actions.interval.CCDelayTime;
import org.cocos2d.actions.interval.CCFadeIn;
import org.cocos2d.actions.interval.CCFadeOut;
import org.cocos2d.actions.interval.CCSequence;
import org.cocos2d.layers.CCColorLayer;
import org.cocos2d.layers.CCScene;
import org.cocos2d.nodes.CCDirector;
import org.cocos2d.nodes.CCLabel;
import org.cocos2d.nodes.CCSprite;
import org.cocos2d.types.CGSize;
import org.cocos2d.types.ccColor4B;
public class OpenScreen extends CCColorLayer {
CCLabel _start;
CCLabel _easy;
CCLabel _normal;
CCLabel _highscore;
CCLabel _howto;
CCLabel _hard;
CCLabel _reset;
CCSprite wingdev = CCSprite.sprite("wingdevpresents.png");
CCSprite darkorange = CCSprite.sprite("PirateTea.png");
CCSprite plane = CCSprite.sprite("plane.png");
CCSprite back2 = CCSprite.sprite("black.png");
public static CCScene scene()
{
//
CCScene scene = CCScene.node();
CCColorLayer layer = new OpenScreen(ccColor4B.ccc4(255, 255, 255, 255));
scene.addChild(layer);
return scene;
}
protected OpenScreen(ccColor4B color) {
super(color);
CGSize winSize = CCDirector.sharedDirector().displaySize();
this.setIsTouchEnabled(true);
back2.setPosition(winSize.width / 2.0f, winSize.height / 2.0f);
back2.setScaleX(winSize.width / back2.getContentSize().width);
back2.setScaleY(winSize.height / back2.getContentSize().height);
addChild(back2);
wingdev.setPosition(winSize.width / 2.0f, winSize.height / 2.0f);
wingdev.setScaleX(winSize.width / wingdev.getContentSize().width);
wingdev.setScaleY(winSize.height / wingdev.getContentSize().height);
addChild(wingdev);
wingdev.runAction(CCSequence.actions(CCFadeIn.action(1.0f),CCDelayTime.action(2.0f),CCFadeOut.action(1.0f), CCCallFunc.action(this,"openscreen2")));
}
public void openscreen()
{
CCDirector.sharedDirector().replaceScene(Menu.scene());
}
public void openscreen2(){
CGSize winSize = CCDirector.sharedDirector().displaySize();
removeChild(wingdev,true);
darkorange.setPosition(winSize.width / 2.0f, winSize.height / 2.0f);
darkorange.setScaleX(winSize.width / darkorange.getContentSize().width);
darkorange.setScaleY(winSize.height / darkorange.getContentSize().height);
addChild(darkorange);
darkorange.runAction(CCSequence.actions(CCFadeIn.action(1.0f),CCDelayTime.action(2.0f),CCFadeOut.action(1.0f),CCCallFunc.action(this,"openscreen")));
}
}
Any help in understanding where I'm going wrong would be very appreciated!
You pause your CCDirector in
#Override
public void onPause()
{
super.onPause();
CCDirector.sharedDirector().pause();
}
I think you need to start it again in onResume()
#Override
public void onResume()
{
super.onResume();
CCDirector.sharedDirector().resume();
}
onStart() won't get called if the app is not killed.
http://developer.android.com/reference/android/app/Activity.html#ActivityLifecycle

Categories

Resources