Reading an image file from device storage and converting to Base64 results in an invalid base64 in Flutter - android

I'm trying to read file from a known path get base64 string from the image. I still don't understand why base64 is broken. This happens only in flutter code.
I did try doing the same conversion using a simple dart program i got the desired base64 string. This issue is happening in flutter.
onPressed: () async {
File imageFile =
await ImagePicker.pickImage(source: ImageSource.camera);
if (imageFile != null) {
Uint8List bytes = await imageFile.readAsBytes();
String base64data = base64.encode(bytes);
print(base64data);
}
}
Following is my console output.
W/ExifInterface(15331): Skip the tag entry since tag number is not defined: 2
W/ExifInterface(15331): Stop reading file since a wrong offset may cause an infinite loop: 0
I/chatty (15331): uid=10271(com.crowdanalytix.datax_app) Binder:15331_5 identical 2 lines
W/ExifInterface(15331): Stop reading file since a wrong offset may cause an infinite loop: 0
I/flutter (15331): /9j/4QGLRXhpZgAATU0AKgAAAAgACAEAAAQAAAABAAAKMgEQAAIAAAAOAAAAbgEBAAQAAAABAAASIAEPAAIAAAAIAAAAfIdpAAQAAAABAAAAmAESAAMAAAABAAAAAAEyAAIAAAAUAAAAhIglAAQAAAABAAABCgAAAABPTkVQTFVTIEEzMDAzAE9uZVBsdXMAMjAxOTowOTowMiAxOTo0MDo1MgAAB6QDAAMAAAABAAAAAIgnAAMAAAABD6AAAJIKAAUAAAABAAAA8oKaAAUAAAABAAAA+pIJAAMAAAABABAAAJIIAAQAAAABAAAAAIKdAAUAAAABAAABAgAAAAAAAAGqAAAAZAAAAkwAACcQAABOIAAAJxAAAQAFAAEAAAAHAAABHAAAAAAyMDAvMTAwAAQBEAACAAAADgAAAVkBDwACAAAACAAAAWcBEgADAAAAAQAAAAABMgACAAAAFAAAAW8AAAAAT05FUExVUyBBMzAwMwBPbmVQbHVzADIwMTk6MDk6MDIgMTk6NDA6NTIA/+AAEEpGSUYAAQEAAAEAAQAA/9sAQwABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEB/9sAQwEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEB/8AAEQgSIAoyAwEiAAIRAQMRAf/EAB8AAAIDAQEBAQEBAQAAAAAAAAQFAwYHAggAAQkKC//EAF0QAAEBBAYHCAIBAwMDAQECHwIBAAMREgQhIjFBUQUyYXGBkfAGE0KhscHR4VLxIwcUYjNDchVTgghjkhYkc4OiNJOjF0RUstIlZISzwsPilMTTGHSkNVW01PP0/8QAHAEAAwEBAQEBAQAAAAAAAAAAAgMEAAEFBgcI/8QAQhEAAAQDBgUFAQEAAQQBAgENAAECEQMhMRJBUWFx8IG
Base64 string which i'm getting is invalid. Can someone try this?
I'm using a OnePlus3t (ONEPLUS A3003).

print function did not print everything.
you can see full result with debug mode
code snippet
List<int> imageBytes = await _imageFile.readAsBytes();
String base64Image = base64Encode(imageBytes);
print(base64Image);
Copy base64 string from debug mode and paste to online converter, you can see result is correct.
full code from offical demo and add base64 convert
// Copyright 2019 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'dart:io';
import 'dart:convert';
import 'package:flutter/material.dart';
import 'package:image_picker/image_picker.dart';
import 'package:video_player/video_player.dart';
import 'dart:typed_data';
import 'package:image/image.dart' as ImageProcess;
void main() {
runApp(MyApp());
}
class MyApp extends StatelessWidget {
#override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Image Picker Demo',
home: MyHomePage(title: 'Image Picker Example'),
);
}
}
class MyHomePage extends StatefulWidget {
MyHomePage({Key key, this.title}) : super(key: key);
final String title;
#override
_MyHomePageState createState() => _MyHomePageState();
}
class _MyHomePageState extends State<MyHomePage> {
File _imageFile;
dynamic _pickImageError;
bool isVideo = false;
VideoPlayerController _controller;
String _retrieveDataError;
Future<void> _playVideo(File file) async {
if (file != null && mounted) {
await _disposeVideoController();
_controller = VideoPlayerController.file(file);
await _controller.setVolume(1.0);
await _controller.initialize();
await _controller.setLooping(true);
await _controller.play();
setState(() {});
}
}
void _onImageButtonPressed(ImageSource source) async {
if (_controller != null) {
await _controller.setVolume(0.0);
}
if (isVideo) {
final File file = await ImagePicker.pickVideo(source: source);
await _playVideo(file);
} else {
try {
_imageFile = await ImagePicker.pickImage(source: source);
List<int> imageBytes = await _imageFile.readAsBytes();
String base64Image = base64Encode(imageBytes);
print(base64Image);
setState(() {});
} catch (e) {
_pickImageError = e;
}
}
}
#override
void deactivate() {
if (_controller != null) {
_controller.setVolume(0.0);
_controller.pause();
}
super.deactivate();
}
#override
void dispose() {
_disposeVideoController();
super.dispose();
}
Future<void> _disposeVideoController() async {
if (_controller != null) {
await _controller.dispose();
_controller = null;
}
}
Widget _previewVideo() {
final Text retrieveError = _getRetrieveErrorWidget();
if (retrieveError != null) {
return retrieveError;
}
if (_controller == null) {
return const Text(
'You have not yet picked a video',
textAlign: TextAlign.center,
);
}
return Padding(
padding: const EdgeInsets.all(10.0),
child: AspectRatioVideo(_controller),
);
}
Widget _previewImage() {
final Text retrieveError = _getRetrieveErrorWidget();
if (retrieveError != null) {
return retrieveError;
}
if (_imageFile != null) {
return Image.file(_imageFile);
} else if (_pickImageError != null) {
return Text(
'Pick image error: $_pickImageError',
textAlign: TextAlign.center,
);
} else {
return const Text(
'You have not yet picked an image.',
textAlign: TextAlign.center,
);
}
}
Future<void> retrieveLostData() async {
final LostDataResponse response = await ImagePicker.retrieveLostData();
if (response.isEmpty) {
return;
}
if (response.file != null) {
if (response.type == RetrieveType.video) {
isVideo = true;
await _playVideo(response.file);
} else {
isVideo = false;
setState(() {
_imageFile = response.file;
});
}
} else {
_retrieveDataError = response.exception.code;
}
}
#override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text(widget.title),
),
body: Center(
child: Platform.isAndroid
? FutureBuilder<void>(
future: retrieveLostData(),
builder: (BuildContext context, AsyncSnapshot<void> snapshot) {
switch (snapshot.connectionState) {
case ConnectionState.none:
case ConnectionState.waiting:
return const Text(
'You have not yet picked an image.',
textAlign: TextAlign.center,
);
case ConnectionState.done:
return isVideo ? _previewVideo() : _previewImage();
default:
if (snapshot.hasError) {
return Text(
'Pick image/video error: ${snapshot.error}}',
textAlign: TextAlign.center,
);
} else {
return const Text(
'You have not yet picked an image.',
textAlign: TextAlign.center,
);
}
}
},
)
: (isVideo ? _previewVideo() : _previewImage()),
),
floatingActionButton: Column(
mainAxisAlignment: MainAxisAlignment.end,
children: <Widget>[
FloatingActionButton(
onPressed: () {
isVideo = false;
_onImageButtonPressed(ImageSource.gallery);
},
heroTag: 'image0',
tooltip: 'Pick Image from gallery',
child: const Icon(Icons.photo_library),
),
Padding(
padding: const EdgeInsets.only(top: 16.0),
child: FloatingActionButton(
onPressed: () {
isVideo = false;
_onImageButtonPressed(ImageSource.camera);
},
heroTag: 'image1',
tooltip: 'Take a Photo',
child: const Icon(Icons.camera_alt),
),
),
Padding(
padding: const EdgeInsets.only(top: 16.0),
child: FloatingActionButton(
backgroundColor: Colors.red,
onPressed: () {
isVideo = true;
_onImageButtonPressed(ImageSource.gallery);
},
heroTag: 'video0',
tooltip: 'Pick Video from gallery',
child: const Icon(Icons.video_library),
),
),
Padding(
padding: const EdgeInsets.only(top: 16.0),
child: FloatingActionButton(
backgroundColor: Colors.red,
onPressed: () {
isVideo = true;
_onImageButtonPressed(ImageSource.camera);
},
heroTag: 'video1',
tooltip: 'Take a Video',
child: const Icon(Icons.videocam),
),
),
],
),
);
}
Text _getRetrieveErrorWidget() {
if (_retrieveDataError != null) {
final Text result = Text(_retrieveDataError);
_retrieveDataError = null;
return result;
}
return null;
}
}
class AspectRatioVideo extends StatefulWidget {
AspectRatioVideo(this.controller);
final VideoPlayerController controller;
#override
AspectRatioVideoState createState() => AspectRatioVideoState();
}
class AspectRatioVideoState extends State<AspectRatioVideo> {
VideoPlayerController get controller => widget.controller;
bool initialized = false;
void _onVideoControllerUpdate() {
if (!mounted) {
return;
}
if (initialized != controller.value.initialized) {
initialized = controller.value.initialized;
setState(() {});
}
}
#override
void initState() {
super.initState();
controller.addListener(_onVideoControllerUpdate);
}
#override
void dispose() {
controller.removeListener(_onVideoControllerUpdate);
super.dispose();
}
#override
Widget build(BuildContext context) {
if (initialized) {
return Center(
child: AspectRatio(
aspectRatio: controller.value?.aspectRatio,
child: VideoPlayer(controller),
),
);
} else {
return Container();
}
}
}

Related

Voice Commands On Flutter Application

I tried to do give a command to the flutter application using voice commands and in the application there was a text to voice function. so when i tried to give read command using voice there was a repeat on that function. how to avoid the repeat.
I have tried to terminate the Listening function after getting one input but it dosen't work.
Tried to terminate the Listening function.
but same error
import 'package:flutter/material.dart';
import 'package:http/http.dart' as http;
import 'package:newsapp/screens/News_Screen.dart';
import 'package:newsapp/utils/app_colors.dart';
import 'package:newsapp/utils/assets_constants.dart';
import 'package:webfeed/webfeed.dart';
import 'package:flutter_tts/flutter_tts.dart';
import 'package:url_launcher/url_launcher.dart';
import 'dart:io';
import 'package:flutter/foundation.dart';
import 'package:flutter_speech/flutter_speech.dart';
const languages = const [
// const Language('Francais', 'fr_FR'),
const Language('English', 'en_US'),
const Language('Pусский', 'ru_RU'),
const Language('Italiano', 'it_IT'),
const Language('Español', 'es_ES'),
];
class Language {
final String name;
final String code;
const Language(this.name, this.code);
}
class BBCNews extends StatefulWidget {
#override
_BBCNewsState createState() => _BBCNewsState();
}
class _BBCNewsState extends State<BBCNews> {
late SpeechRecognition _speech;
FlutterTts flutterTts = FlutterTts();
bool _speechRecognitionAvailable = false;
bool _isListening = false;
String transcription = '';
String _currentLocale = 'en_US';
//Language selectedLang = languages.first;
#override
initState() {
super.initState();
activateSpeechRecognizer();
}
void activateSpeechRecognizer() {
//print('_MyAppState.activateSpeechRecognizer... ');
_speech = SpeechRecognition();
_speech.setAvailabilityHandler(onSpeechAvailability);
_speech.setRecognitionStartedHandler(onRecognitionStarted);
_speech.setRecognitionResultHandler(onRecognitionResult);
_speech.setRecognitionCompleteHandler(onRecognitionComplete);
_speech.setErrorHandler(errorHandler);
_speech.activate('en_US').then((res) {
setState(() => _speechRecognitionAvailable = res);
});
}
var client = http.Client();
Future myDevBlog() async {
var response = await client
.get(Uri.parse('http://feeds.bbci.co.uk/news/world/rss.xml'));
var channel = RssFeed.parse(response.body);
final item = channel.items;
return item;
}
Future<void> openFeed(String url) async {
if (await canLaunch(url)) {
await launch(
url,
forceSafariVC: true,
forceWebView: false,
);
return;
}
}
rightIcon() {
return const Icon(
Icons.keyboard_arrow_right,
color: Colors.grey,
size: 30,
);
}
#override
Widget build(BuildContext context) {
var currentIndex;
return Scaffold(
appBar: AppBar(
backgroundColor: AppColors.primaryColor,
//title: const Text('VoiceNews'),
title: Image.asset(
AssetConstants.iconPath,
width: 150,
),
elevation: 10.0,
actions: [
IconButton(
onPressed: _speechRecognitionAvailable && !_isListening
? () => start()
: null,
icon: const Icon(Icons.play_arrow_rounded)),
IconButton(
onPressed: _isListening ? () => stop() : null,
icon: const Icon(Icons.stop_circle))
],
),
body: StreamBuilder(
stream: myDevBlog().asStream(),
builder: (context, snapshot) {
if (snapshot.hasData) {
return ListView.builder(
itemCount: snapshot.data.length,
itemBuilder: (context, int index) {
if (transcription.toLowerCase() == "play") {
// _isListening = false;
//stop();
print(transcription);
speak(snapshot.data[1].title);
var currentIndex = 0;
} else if (transcription.toLowerCase() == "next") {
// _isListening = false;
speak(snapshot.data[2].title);
print(transcription);
} else if (transcription.toLowerCase() == "all") {
// _isListening = false;
errorHandler();
//speak("Wrong Input");
//cancel();
}
return Padding(
padding: const EdgeInsets.all(8.0),
child: Card(
child: ListTile(
title: Text(snapshot.data[index].title),
subtitle:
Text(snapshot.data[index].description),
//onTap: () =>
speak(snapshot.data[index].title),
// onTap: () =>
openFeed(snapshot.data[index].link),
onTap: () {
Navigator.push(
context,
MaterialPageRoute(
builder: (context) =>
newsScreen(snapshot.data[index].link),
// Pass the arguments as part of the
RouteSettings. The
// DetailScreen reads the arguments
from these settings.
// settings: RouteSettings(
// arguments: todos[index],
// ),
),
);
print(snapshot.data[index].link);
},
onLongPress: () =>
speak(snapshot.data[index].description),
leading: Text((1 + index).toString()),
trailing: rightIcon(),
),
),
);
},
);
} else if (snapshot.hasError ||
snapshot.connectionState == ConnectionState.none) {
return Center(
child: Text(snapshot.error.toString()),
);
}
return const Center(
child: CircularProgressIndicator(),
);
},
),
);
}
// List<CheckedPopupMenuItem<Language>> get
_buildLanguagesWidgets => languages
void start() => _speech.activate(_currentLocale).then((_) {
return _speech.listen().then((result) {
print('_MyAppState.start => result $result');
setState(() {
_isListening = result;
});
});
});
void cancel() =>
_speech.cancel().then((_) => setState(() => _isListening =
false));
void stop() => _speech.stop().then((_) {
setState(() => _isListening = false);
});
void onSpeechAvailability(bool result) =>
setState(() => _speechRecognitionAvailable = result);
// void onCurrentLocale(String locale) {
// // print('_MyAppState.onCurrentLocale... $locale');
// setState(
// () => _currentLocale = languages.firstWhere((l) =>
l.code == locale));
// }
void onRecognitionStarted() {
setState(() => _isListening = true);
}
void onRecognitionResult(String text) {
// print('_MyAppState.onRecognitionResult... $text');
setState(() => transcription = text);
}
void onRecognitionComplete(String text) {
//print('_MyAppState.onRecognitionComplete... $text');
setState(() => _isListening = false);
}
void errorHandler() => activateSpeechRecognizer();
void speak(String text) async {
await flutterTts.speak(text);
await flutterTts.setLanguage("en-US");
await flutterTts.setPitch(1);
await flutterTts.setSpeechRate(0.6);
}
}

the flutter audio wiveforms package does not display waveforms

I hope you are well, I have a big problem with the flutter audio waveforms package, indeed I want to display the waveforms of my voice files in a folder, but they are not displayed and when I display the value of playerController.bufferData I get a null, please how can I manage this problem, I have already spent days on it.
Sincerely
below are my classes
import 'dart:async';
import 'dart:io';
import 'package:audio_recorder/AudioWaveFormsWidget.dart';
import 'package:audio_waveforms/audio_waveforms.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:path_provider/path_provider.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:record/record.dart';
void main() {
runApp(const MyApp());
}
class MyApp extends StatelessWidget {
const MyApp({Key? key}) : super(key: key);
// This widget is the root of your application.
#override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Audio Recorder',
debugShowCheckedModeBanner: false,
theme: ThemeData(
primarySwatch: Colors.blue,
),
home: const MyHomePage(title: 'Audio Recorder'),
);
}
}
class MyHomePage extends StatefulWidget {
const MyHomePage({Key? key, required this.title}) : super(key: key);
final String title;
#override
State<MyHomePage> createState() => _MyHomePageState();
}
class _MyHomePageState extends State<MyHomePage> {
//late final PlayerController playerController1;
final record = Record();
int _recordDuration = 0;
Timer? _timer;
RecordState _recordState = RecordState.stop;
StreamSubscription<RecordState>? _recordSub;
final List<File> _audioFile = [];
#override
void initState() {
super.initState();
//Listening record state
_recordSub = record.onStateChanged().listen((recordState) {
setState(() => _recordState = recordState);
});
initCheckPermission();
setState(() {
initFileInDir();
});
}
#override
void dispose() {
_timer?.cancel();
_recordSub?.cancel();
record.dispose();
super.dispose();
}
Future initCheckPermission() async {
final statusMicrophone = await Permission.microphone.request();
final statusStorage = await Permission.storage.request();
if (statusMicrophone != PermissionStatus.granted ||
statusStorage != PermissionStatus.granted) {
await Permission.microphone.request();
await Permission.storage.request();
}
}
void _startTimer() {
_timer?.cancel();
_timer = Timer.periodic(const Duration(seconds: 1), (Timer t) {
setState(() => _recordDuration++);
});
}
Widget _buildText() {
if (_recordState != RecordState.stop) {
return _buildTimer();
}
return const Text("Waiting to record");
}
String _formatNumber(int number) {
String numberStr = number.toString();
if (number < 10) {
numberStr = '0$numberStr';
}
return numberStr;
}
Widget _buildTimer() {
final String minutes = _formatNumber(_recordDuration ~/ 60);
final String seconds = _formatNumber(_recordDuration % 60);
return Text(
'$minutes : $seconds',
style: const TextStyle(color: Colors.red),
);
}
Future<Directory> _getDir() async {
//Check platform and adjust settings
final tempDir = Platform.isAndroid
? await getExternalStorageDirectory()
: await getApplicationDocumentsDirectory();
Directory finalPath;
if (Platform.isAndroid) {
String buildDir =
tempDir!.path.replaceFirst("/data/", "/media/").split("files").first;
buildDir += "AudioRecorder"; // Plus ajout du nom de l'application
return finalPath = await Directory(buildDir).create(recursive: true);
} else {
return finalPath = await Directory(tempDir!.path).create(recursive: true);
}
}
void initFileInDir() async {
Directory finalPath = await _getDir();
var exists = await finalPath.exists();
if (exists) {
finalPath.list(recursive: false).forEach((element) {
File file = File(element.path);
_audioFile.add(file);
});
}
}
Widget _buildStopAndPlayIcon() {
if (_recordState != RecordState.stop) {
return const Icon(Icons.stop);
} else {
return const Icon(Icons.mic);
}
}
Future<void> _start() async {
Directory finalPath = await _getDir();
//Start recording
await record.start(
path: '${finalPath.path}/${DateTime.now().millisecondsSinceEpoch}.m4a',
encoder: AudioEncoder.aacLc, // by default
bitRate: 128000, // by default
samplingRate: 44100);
_recordDuration = 0;
_startTimer();
}
Future<File?> _stop() async {
_timer?.cancel();
_recordDuration = 0;
final path = await record.stop();
File? file;
if (path != null) {
file = File(path);
if (kDebugMode) {
print("--------------------------$path");
}
}
return file;
}
Future<void> _pause() async {
_timer?.cancel();
await record.pause();
}
Future<void> _resume() async {
_startTimer();
await record.resume();
}
#override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text(widget.title),
),
body: Padding(
padding: const EdgeInsets.all(10.0),
child: Column(
mainAxisAlignment: MainAxisAlignment.end,
children: [
LimitedBox(
maxHeight: 650,
child: ListView.builder(
shrinkWrap: true,
itemCount: _audioFile.length,
itemBuilder: (context, index) {
var data = _audioFile[index];
return AudioWaveFormsWidget(path: data);
},
),
),
const Spacer(),
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
_buildText(),
ElevatedButton(
onPressed: () async {
if (_recordState == RecordState.stop) {
_start();
} else {
File? newFile = await _stop();
setState((){ _audioFile.add(newFile!);});
}
},
style: ButtonStyle(
elevation: MaterialStateProperty.all(0),
shape: MaterialStateProperty.all(RoundedRectangleBorder(
borderRadius: BorderRadius.circular(100),
side: BorderSide.none))),
child: _buildStopAndPlayIcon(),
)
],
),
],
),
),
// This trailing comma makes auto-formatting nicer for build methods.
);
}
}
import 'dart:io';
import 'package:audio_waveforms/audio_waveforms.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
class AudioWaveFormsWidget extends StatefulWidget {
final File path;
//final PlayerController playerController1;
const AudioWaveFormsWidget({Key? key, required this.path}) : super(key: key);
#override
State<AudioWaveFormsWidget> createState() => _AudioWaveFormsWidgetState();
}
class _AudioWaveFormsWidgetState extends State<AudioWaveFormsWidget> {
late final PlayerController playerController1;
#override
void initState() {
playerController1 = PlayerController()
..addListener(() {
if (mounted) setState(() {});
});
_preparePlayer();
super.initState();
}
void _preparePlayer() async {
bool exist = await widget.path.exists();
if (exist) {
playerController1.preparePlayer(widget.path.path);
}
print("--------------------------------${playerController1.bufferData}");
}
void _playOrPausePlayer(PlayerController controller) async {
debugPrint(
'****************************************${controller.playerState.toString()}');
controller.playerState == PlayerState.playing
? await controller.pausePlayer()
: await controller.startPlayer(finishMode: FinishMode.pause);
}
#override
Widget build(BuildContext context) {
return Padding(
padding: const EdgeInsets.all(8.0),
child: Container(
decoration: BoxDecoration(
borderRadius: BorderRadius.circular(10.0),
shape: BoxShape.rectangle,
color: Colors.lightGreenAccent,
),
child: Row(
children: [
IconButton(
onPressed: () {
_playOrPausePlayer(playerController1);
},
icon: playerController1.playerState == PlayerState.playing
? const Icon(Icons.pause)
: const Icon(Icons.play_arrow)),
AudioFileWaveforms(
density: 1.0,
enableSeekGesture: true,
size: const Size(100.0, 70.0),
playerController: playerController1),
],
)),
);
}
}
if you're trying to record, try setting the bitRate to 48000 (worked for me on iOS when it wasn't showing the waveforms when recording).
final RecorderController recorderController = RecorderController()
..androidEncoder = AndroidEncoder.aac
..androidOutputFormat = AndroidOutputFormat.mpeg4
..iosEncoder = IosEncoder.kAudioFormatMPEG4AAC
..sampleRate = 44100
..bitRate = 48000;

Flutter qr_code_scanner showing black screen instead of the scanner

I am using qr_code_scanner to scan a qr code in flutter . The point here is that it shows a black screen instead of showing a scanner . I cant seem to start the qr reader . but suppose i exit the app like put the app in the background and then come to the app again the camera suddenly started . Cant seem to find why the camera dosent start on the app start .
class Home extends StatefulWidget {
const Home({Key? key}) : super(key: key);
#override
State<Home> createState() => _HomeState();
}
class _HomeState extends State<Home> {
#override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(),
body: QrScan(),
);
}
}
class QrScan extends StatefulWidget {
const QrScan({Key? key}) : super(key: key);
#override
State<QrScan> createState() => _QrScanState();
}
class _QrScanState extends State<QrScan> {
final GlobalKey qrKey = GlobalKey(debugLabel: 'QR');
Barcode? result;
QRViewController? controller;
#override
void initState() {
super.initState();
controller?.resumeCamera();
}
// In order to get hot reload to work we need to pause the camera if the platform
// is android, or resume the camera if the platform is iOS.
#override
void reassemble() {
super.reassemble();
if (Platform.isAndroid) {
controller?.pauseCamera();
} else if (Platform.isIOS) {
controller?.resumeCamera();
}
}
#override
Widget build(BuildContext context) {
return Scaffold(
body: Column(
children: <Widget>[
Expanded(
flex: 5,
child: QRView(
key: qrKey,
onQRViewCreated: _onQRViewCreated,
overlay: QrScannerOverlayShape(
borderRadius: 10, borderWidth: 5, borderColor: Colors.white),
),
),
Expanded(
flex: 1,
child: Center(
child: (result != null)
? Text(
'Barcode Type: ${describeEnum(result!.format)} Data: ${result?.code}')
: Text('Scan a code'),
),
)
],
),
);
}
void _onQRViewCreated(QRViewController controller) {
this.controller = controller;
controller.scannedDataStream.listen((scanData) {
setState(() {
result = scanData;
});
});
}
#override
void dispose() {
controller?.dispose();
super.dispose();
}
}
Try to pause and resume the camera inside your _onQRViewCreated:
void _onQRViewCreated(QRViewController controller) {
this.controller = controller;
controller.scannedDataStream.listen((scanData) {
setState(() {
result = scanData;
});
});
controller.pauseCamera();
controller.resumeCamera();
}
Try this it will surely solve the problem!
#override
Widget build(BuildContext context) {
if (controller != null && mounted) {
controller!.pauseCamera();
controller!.resumeCamera();
}
Use the below code if you are using the flashlight feature!
class QRScanner extends StatefulWidget {
const QRScanner({Key? key}) : super(key: key);
#override
State<StatefulWidget> createState() => _QRScannerState();
}
class _QRScannerState extends State<QRScanner> {
Barcode? result;
QRViewController? controller;
final GlobalKey qrKey = GlobalKey(debugLabel: 'QR');
bool isflash = false;
// In order to get hot reload to work we need to pause the camera if the platform
// is android, or resume the camera if the platform is iOS.
#override
void reassemble() {
super.reassemble();
if (Platform.isAndroid) {
controller!.pauseCamera();
controller!.resumeCamera();
}
controller!.resumeCamera();
}
#override
Widget build(BuildContext context) {
if (controller != null && mounted && !isflash) {
controller!.pauseCamera();
controller!.resumeCamera();
}
return Scaffold(
appBar:
appBar(context, backbutton: true, icon: true, notifications: true),
body: Stack(
children: <Widget>[
_buildQrView(context),
Column(
mainAxisAlignment: MainAxisAlignment.start,
children: <Widget>[
// if (result != null)
// Text(
// 'Barcode Type: ${describeEnum(result!.format)} Data: ${result!.code}',
// style: const TextStyle(color: Colors.white),
// )
// else
// const Text('Scan a code'),
Row(
mainAxisAlignment: MainAxisAlignment.center,
crossAxisAlignment: CrossAxisAlignment.center,
children: <Widget>[
GestureDetector(
onTap: () async {
isflash = !isflash;
await controller?.toggleFlash();
setState(() {});
},
child: Container(
margin: const EdgeInsets.all(8),
child: FutureBuilder(
future: controller?.getFlashStatus(),
builder: (context, snapshot) {
return snapshot.data == false
? const Icon(
Icons.flash_off,
color: Colors.white,
)
: const Icon(Icons.flash_on, color: Colors.white);
},
),
),
),
GestureDetector(
onTap: () async {
await controller?.flipCamera();
setState(() {});
},
child: Container(
margin: const EdgeInsets.all(8),
child: FutureBuilder(
future: controller?.getCameraInfo(),
builder: (context, snapshot) {
if (snapshot.data != null) {
return const Icon(Icons.cameraswitch_rounded,
color: Colors.white);
} else {
return const SizedBox();
}
},
),
),
)
],
),
],
)
],
),
);
}
Widget _buildQrView(BuildContext context) {
var scanArea = (Get.width < 400 || Get.height < 400) ? 230.0 : 330.0;
return QRView(
key: qrKey,
onQRViewCreated: _onQRViewCreated,
overlay: QrScannerOverlayShape(
borderColor: buttonColor,
borderRadius: 2,
borderLength: 35,
borderWidth: 6,
cutOutSize: scanArea),
onPermissionSet: (ctrl, p) => _onPermissionSet(context, ctrl, p),
);
}
void _onQRViewCreated(QRViewController controller) {
setState(() {
this.controller = controller;
});
controller.scannedDataStream.listen((scanData) {
if (scanData != null) {
setState(() {
result = scanData;
});
Get.find<QRService>().updateResult(scanData.code);
// Get.defaultDialog(
// content: Text("${scanData.code}"),
// );
Get.back();
// Get.offAll(() => const QRmainScreen());
}
});
}
void _onPermissionSet(BuildContext context, QRViewController ctrl, bool p) {
log('${DateTime.now().toIso8601String()}_onPermissionSet $p');
if (!p) {
ScaffoldMessenger.of(context).showSnackBar(
const SnackBar(content: Text('no Permission')),
);
}
}
#override
void dispose() {
controller?.dispose();
super.dispose();
}
}
Remove resume camera from init state
#override
void initState() {
super.initState();
//controller?.resumeCamera();
}

ImagePicker PlatformException that occurs only on packaged build

I'm building a simple flutter app, which needs to takes pictures. For that i'm using ImagePicker.
When built into an APK, an error is returned when _getImage is called:
"Pick image error.PlatformException(error,null,null)[...]"
When executed through flutter run on the same device, it works as expected.
Relevant part of the code:
class _BodyState extends State<Body> {
final ImagePicker _picker = ImagePicker();
XFile? _imageFile;
dynamic _pickImageError;
String? _retrieveDataError;
void _getImage(ImageSource source) async {
try {
final pickedFile = await _picker.pickImage(
source: source,
maxWidth: 720,
maxHeight: 1280,
imageQuality: 80,
// preferredCameraDevice: CameraDevice.rear,
);
setState(() {
_imageFile = pickedFile;
});
} catch (e) {
setState(() {
_pickImageError = e;
});
}
}
Widget _previewImages() {
final Text? retrieveError = _getRetrieveErrorWidget();
Size size = MediaQuery.of(context).size;
if (retrieveError != null) {
return retrieveError;
}
if (_imageFile != null) {
return CircleAvatar(
backgroundColor: Colors.transparent,
radius: 150,
backgroundImage: FileImage(File(_imageFile!.path)),
);
} else if (_pickImageError != null) {
return Text(
'Pick image error: $_pickImageError',
textAlign: TextAlign.center,
);
} else {
return CircleAvatar(
backgroundColor: Colors.transparent,
radius: 150,
child: Image.asset(
"assets/images/user.png",
width: size.width,
),
);
}
}
Text? _getRetrieveErrorWidget() {
if (_retrieveDataError != null) {
final Text result = Text(_retrieveDataError!);
_retrieveDataError = null;
return result;
}
return null;
}
Future<void> retrieveLostData() async {
final LostDataResponse response = await _picker.retrieveLostData();
if (response.isEmpty) {
return;
}
if (response.file != null) {
setState(() {
_imageFile = response.file;
});
} else {
_retrieveDataError = response.exception!.code;
}
}
#override
Widget build(BuildContext context) {
return Background(
child: Column(
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
Center(
child: FutureBuilder<void>(
future: retrieveLostData(),
builder: (BuildContext context, AsyncSnapshot<dynamic> snapshot) {
switch (snapshot.connectionState) {
case ConnectionState.none:
case ConnectionState.waiting:
return const Text(
'You have not yet picked an image.',
textAlign: TextAlign.center,
);
case ConnectionState.done:
return _previewImages();
default:
if (snapshot.hasError) {
return Text(
'Pick image/video error: ${snapshot.error}}',
textAlign: TextAlign.center,
);
} else {
return const Text(
'You have not yet picked an image.',
textAlign: TextAlign.center,
);
}
}
},
)
//: _previewImages(),
),
Error log (couldn't copy it)
How it should look like

Flutter Directory.systemTemp is not working in the release apk

I'm trying to capture an image from camera and save it in cache(i.e., Directory.systemTemp available from dart.io package). It is working fine in debug mode. But when I build the release apk and install, it is not working. This is the code:
import 'dart:async';
import 'dart:io';
import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
import 'package:path_provider/path_provider.dart';
class CameraExampleHome extends StatefulWidget {
#override
_CameraExampleHomeState createState() {
return new _CameraExampleHomeState();
}
}
/// Returns a suitable camera icon for [direction].
IconData getCameraLensIcon(CameraLensDirection direction) {
switch (direction) {
case CameraLensDirection.back:
return Icons.camera_rear;
case CameraLensDirection.front:
return Icons.camera_front;
case CameraLensDirection.external:
return Icons.camera;
}
throw new ArgumentError('Unknown lens direction');
}
void logError(String code, String message) =>
print('Error: $code\nError Message: $message');
class _CameraExampleHomeState extends State<CameraExampleHome> {
CameraController controller;
String imagePath;
final GlobalKey<ScaffoldState> _scaffoldKey = new GlobalKey<ScaffoldState>();
Future<List<CameraDescription>> getCameras() async {
return await availableCameras();
}
#override
Widget build(BuildContext context) {
return new Scaffold(
key: _scaffoldKey,
appBar: new AppBar(
title: const Text('Camera example'),
),
body: FutureBuilder(
future: getCameras(),
builder: (BuildContext context, AsyncSnapshot snapshot) {
switch (snapshot.connectionState) {
case ConnectionState.none:
case ConnectionState.waiting:
return new Text('loading...');
default:
if (snapshot.hasError)
return new Text('Error: ${snapshot.error}');
else {
if (snapshot.hasData)
return createCameraView(context, snapshot);
else
return new Text('Null');
}
}
},
),
);
}
/// Display the preview from the camera (or a message if the preview is not available).
Widget _cameraPreviewWidget() {
if (controller == null || !controller.value.isInitialized) {
return const Text(
'Tap a camera',
style: const TextStyle(
color: Colors.white,
fontSize: 24.0,
fontWeight: FontWeight.w900,
),
);
} else {
return new AspectRatio(
aspectRatio: controller.value.aspectRatio,
child: new CameraPreview(controller),
);
}
}
/// Display the thumbnail of the captured image or video
/// Display the control bar with buttons to take pictures and record videos.
Widget _captureControlRowWidget() {
return new Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
mainAxisSize: MainAxisSize.max,
children: <Widget>[
new IconButton(
icon: const Icon(Icons.camera_alt),
color: Colors.blue,
onPressed: controller != null &&
controller.value.isInitialized &&
!controller.value.isRecordingVideo
? onTakePictureButtonPressed
: null,
),
],
);
}
/// Display a row of toggle to select the camera (or a message if no camera is available).
Widget _cameraTogglesRowWidget(var cameras) {
final List<Widget> toggles = <Widget>[];
if (cameras.isEmpty) {
return const Text('No camera found');
} else {
for (CameraDescription cameraDescription in cameras) {
toggles.add(
new SizedBox(
width: 90.0,
child: new RadioListTile<CameraDescription>(
title:
new Icon(getCameraLensIcon(cameraDescription.lensDirection)),
groupValue: controller?.description,
value: cameraDescription,
onChanged: controller != null && controller.value.isRecordingVideo
? null
: onNewCameraSelected,
),
),
);
}
}
return new Row(children: toggles);
}
String timestamp() => new DateTime.now().millisecondsSinceEpoch.toString();
void showInSnackBar(String message) {
_scaffoldKey.currentState
.showSnackBar(new SnackBar(content: new Text(message)));
}
void onNewCameraSelected(CameraDescription cameraDescription) async {
if (controller != null) {
await controller.dispose();
}
controller = new CameraController(cameraDescription, ResolutionPreset.high);
// If the controller is updated then update the UI.
controller.addListener(() {
if (mounted) setState(() {});
if (controller.value.hasError) {
showInSnackBar('Camera error ${controller.value.errorDescription}');
}
});
try {
await controller.initialize();
} on CameraException catch (e) {
_showCameraException(e);
}
if (mounted) {
setState(() {});
}
}
void onTakePictureButtonPressed() {
takePicture().then((String filePath) {
if (mounted) {
setState(() {
imagePath = filePath;
// videoController?.dispose();
// videoController = null;
});
if (filePath != null) showInSnackBar('Picture saved to $filePath');
}
});
}
Future<String> takePicture() async {
if (!controller.value.isInitialized) {
showInSnackBar('Error: select a camera first.');
return null;
}
//final Directory extDir = await getApplicationDocumentsDirectory();
final Directory extDir = Directory.systemTemp;
final String dirPath = '${extDir.path}/Pictures/flutter_test';
await new Directory(dirPath).create(recursive: true);
final String filePath = '$dirPath/${timestamp()}.jpg';
if (controller.value.isTakingPicture) {
// A capture is already pending, do nothing.
return null;
}
try {
await controller.takePicture(filePath);
} on CameraException catch (e) {
_showCameraException(e);
return null;
}
return filePath;
}
void _showCameraException(CameraException e) {
logError(e.code, e.description);
showInSnackBar('Error: ${e.code}\n${e.description}');
}
Widget createCameraView(BuildContext context, AsyncSnapshot snapshot) {
return new Column(
children: <Widget>[
new Expanded(
child: new Container(
width: MediaQuery.of(context).size.width,
child: _cameraPreviewWidget(),
),
),
_captureControlRowWidget(),
new Padding(
padding: const EdgeInsets.all(5.0),
child: new Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: <Widget>[
_cameraTogglesRowWidget(snapshot.data),
//_thumbnailWidget(),
],
),
),
],
);
}
}
class CameraApp extends StatelessWidget {
#override
Widget build(BuildContext context) {
return new MaterialApp(
home: new CameraExampleHome(),
);
}
}
List<CameraDescription> cameras;
void main() {
// Fetch the available cameras before initializing the app.
runApp(new CameraApp());
}
I've tried setting the directory to await getApplicationDocumentsDirectory() like this:
final Directory extDir = await getApplicationDocumentsDirectory();
//final Directory extDir = Directory.systemTemp;
final String dirPath = '${extDir.path}/Pictures/flutter_test';
await new Directory(dirPath).create(recursive: true);
final String filePath = '$dirPath/${timestamp()}.jpg';
Then the pictures are being saved both in debug mode and the release apk. But the pictures are lost once I relaunch the app. I want them to accessible even after the app is relaunched(that is the main reason why I want to use cache).
So what's going wrong when I try to save to cache in release build of the app?
use the path_provider package instead to get the temp path https://pub.dartlang.org/packages/path_provider
Directory tempDir = await getTemporaryDirectory();
String tempPath = tempDir.path;
Directory.systemTemp is only supposed to be used on the server/console.

Categories

Resources