In my project I use library firebase_ml_vision 0.9.3+8 and now this library is not detecting faces.
My code which I use in project
Camera initialization:
CameraController _camera;
Detector _currentDetector = Detector.face;
CameraLensDirection _direction = CameraLensDirection.front;
final FaceDetector _faceDetector = FirebaseVision.instance.faceDetector(
FaceDetectorOptions(enableLandmarks: true, enableContours: true));
Future<dynamic> Function(FirebaseVisionImage image) _getDetectionMethod() {
return _faceDetector.processImage;
}
Future<void> _initializeCamera() async {
final CameraDescription description =
await Facekeypoint.getCamera(_direction);
_camera = CameraController(
description,
defaultTargetPlatform == TargetPlatform.iOS
? ResolutionPreset.low
: ResolutionPreset.medium,
);
await _camera.initialize();
await Future.delayed(Duration(milliseconds: 200));
_camera.startImageStream((CameraImage image) {
if (_isDetecting) return;
_isDetecting = true;
counter++;
Facekeypoint.detect(
image: image,
detectInImage: _getDetectionMethod(),
imageRotation: description.sensorOrientation,
).then(
(dynamic results) {
if (_currentDetector == null) return;
setState(() {
_scanResults = results;
});
},
).whenComplete(() => _isDetecting = false);
});
if (_camera != null && _camera.value.isInitialized) {
fullHeight = MediaQuery.of(context).size.height -
(MediaQuery.of(context).size.height * _camera.value.aspectRatio);
} else {
fullHeight = null;
}
}
FaceKeyPoint class:
class Facekeypoint {
static const MethodChannel _channel = const MethodChannel('facekeypoint');
static Future<String> get platformVersion async {
final String version = await _channel.invokeMethod('getPlatformVersion');
return version;
}
static Future<CameraDescription> getCamera(CameraLensDirection dir) async {
return await availableCameras().then(
(List<CameraDescription> cameras) => cameras.firstWhere(
(CameraDescription camera) => camera.lensDirection == dir,
),
);
}
static Future<dynamic> detect({
#required CameraImage image,
#required Future<dynamic> Function(FirebaseVisionImage image) detectInImage,
#required int imageRotation,
}) async {
return detectInImage(
FirebaseVisionImage.fromBytes(
_concatenatePlanes(image.planes),
_buildMetaData(image, _rotationIntToImageRotation(imageRotation)),
),
);
}
static Uint8List _concatenatePlanes(List<Plane> planes) {
final WriteBuffer allBytes = WriteBuffer();
for (Plane plane in planes) {
allBytes.putUint8List(plane.bytes);
}
return allBytes.done().buffer.asUint8List();
}
static FirebaseVisionImageMetadata _buildMetaData(
CameraImage image,
ImageRotation rotation,
) {
return FirebaseVisionImageMetadata(
rawFormat: image.format.raw,
size: Size(image.width.toDouble(), image.height.toDouble()),
rotation: rotation,
planeData: image.planes.map(
(Plane plane) {
return FirebaseVisionImagePlaneMetadata(
bytesPerRow: plane.bytesPerRow,
height: plane.height,
width: plane.width,
);
},
).toList(),
);
}
static ImageRotation _rotationIntToImageRotation(int rotation) {
switch (rotation) {
case 0:
return ImageRotation.rotation0;
case 90:
return ImageRotation.rotation90;
case 180:
return ImageRotation.rotation180;
default:
assert(rotation == 270);
return ImageRotation.rotation270;
}
}
}
Googling didn't help me.
Early I'm trying change library version (downgrade, upgrade) its not help me.
But _scanResalts always return empty list. Sorry for my English, can anyone help?
Quick Answer:
The camera framerate is highest on the low resolution, so, each frame will have less distortion in the plane, and hence faces will be quickly detectable.
If you wish to use the higher resolution, you might need to await for the face detection method for much longer because the phone/device will have to be stable for a clearer(less distorted) image to be taken and sent to the detector method.
For Quick Detection: Change set the resolutionPreset of the camera controller to ResolutionPreset.low for both android and IOS platforms.
_camera = CameraController(
description,
ResolutionPreset.low,
);
Long Answer: If you want to use ResolutionPreset greater than low
You can also the preset ResolutionPreset.veryHigh.
_camera = CameraController(
description,
ResolutionPreset.veryHigh,
);
But you need to give more time for the face-detection method to process. Use a Future.microtask for instance.
final facesList = await Future.microtask(
() => detect(
image,
handleDetection,
description!.sensorOrientation,
),
);
Bonus firebase_ml_vision is discontinued and deprecated in favour of google_ml_vission.
You can update your entire project by swapping those two packages with minimal changes, as seen below.
Camera initialization:
CameraController _camera;
Detector _currentDetector = Detector.face;
CameraLensDirection _direction = CameraLensDirection.front;
final FaceDetector _faceDetector = GoogleVision.instance.faceDetector(
FaceDetectorOptions(enableLandmarks: true, enableContours: true));
Future<dynamic> Function(GoogleVisionImage image) _getDetectionMethod() {
return _faceDetector.processImage;
}
Future<void> _initializeCamera() async {
final CameraDescription description =
await Facekeypoint.getCamera(_direction);
_camera = CameraController(
description,
ResolutionPreset.low,
);
await _camera.initialize();
await Future.delayed(Duration(milliseconds: 200));
_camera.startImageStream((CameraImage image) {
if (_isDetecting) return;
_isDetecting = true;
counter++;
Facekeypoint.detect(
image: image,
detectInImage: _getDetectionMethod(),
imageRotation: description.sensorOrientation,
).then(
(dynamic results) {
if (_currentDetector == null) return;
setState(() {
_scanResults = results;
});
},
).whenComplete(() => _isDetecting = false);
});
if (_camera != null && _camera.value.isInitialized) {
fullHeight = MediaQuery.of(context).size.height -
(MediaQuery.of(context).size.height * _camera.value.aspectRatio);
} else {
fullHeight = null;
}
}
FaceKeyPoint class:
class Facekeypoint {
static const MethodChannel _channel = const MethodChannel('facekeypoint');
static Future<String> get platformVersion async {
final String version = await _channel.invokeMethod('getPlatformVersion');
return version;
}
static Future<CameraDescription> getCamera(CameraLensDirection dir) async {
return await availableCameras().then(
(List<CameraDescription> cameras) => cameras.firstWhere(
(CameraDescription camera) => camera.lensDirection == dir,
),
);
}
static Future<dynamic> detect({
#required CameraImage image,
#required Future<dynamic> Function(GoogleVisionImage image) detectInImage,
#required int imageRotation,
}) async {
return detectInImage(
GoogleeVisionImage.fromBytes(
_concatenatePlanes(image.planes),
_buildMetaData(image, _rotationIntToImageRotation(imageRotation)),
),
);
}
static Uint8List _concatenatePlanes(List<Plane> planes) {
final WriteBuffer allBytes = WriteBuffer();
for (Plane plane in planes) {
allBytes.putUint8List(plane.bytes);
}
return allBytes.done().buffer.asUint8List();
}
static GoogleVisionImageMetadata _buildMetaData(
CameraImage image,
ImageRotation rotation,
) {
return GoogleVisionImageMetadata(
rawFormat: image.format.raw,
size: Size(image.width.toDouble(), image.height.toDouble()),
rotation: rotation,
planeData: image.planes.map(
(Plane plane) {
return GoogleVisionImagePlaneMetadata(
bytesPerRow: plane.bytesPerRow,
height: plane.height,
width: plane.width,
);
},
).toList(),
);
}
static ImageRotation _rotationIntToImageRotation(int rotation) {
switch (rotation) {
case 0:
return ImageRotation.rotation0;
case 90:
return ImageRotation.rotation90;
case 180:
return ImageRotation.rotation180;
default:
assert(rotation == 270);
return ImageRotation.rotation270;
}
}
}
Related
I am trying to build a screen that will use camera package to get the image stream from the live feed.
my code: -
class CameraView extends StatefulWidget {
CameraView(
{Key? key,
required this.title,
required this.customPaint,
required this.onImage,
this.initialDirection = CameraLensDirection.back})
: super(key: key);
final String title;
final CustomPaint? customPaint;
final Function(InputImage inputImage) onImage;
final CameraLensDirection initialDirection;
#override
_CameraViewState createState() => _CameraViewState();
}
class _CameraViewState extends State<CameraView> {
String _numberplate = 'Unknown';
ScreenMode _mode = ScreenMode.liveFeed;
CameraController? _controller;
File? _image;
int _cameraIndex = 0;
double zoomLevel = 0.0, minZoomLevel = 0.0, maxZoomLevel = 0.0;
List<String> dialogsShown = [];
#override
void initState() {
super.initState();
initNpGroups();
// _imagePicker = ImagePicker();
for (var i = 0; i < cameras.length; i++) {
if (cameras[i].lensDirection == widget.initialDirection) {
_cameraIndex = i;
}
}
_startLiveFeed();
}
#override
void dispose() {
_stopLiveFeed();
_controller?.dispose();
super.dispose();
}
#override
Widget build(BuildContext context) {
return Scaffold(
body: ListView(
children: [
],
),
);
}
Future _startLiveFeed() async {
final camera = cameras[0];
_controller = CameraController(
camera,
ResolutionPreset.low,
enableAudio: false,
);
_controller?.initialize().then((_) async {
if (!mounted) {
return;
}
setState(() {});
_controller?.getMinZoomLevel().then((value) {
zoomLevel = value;
minZoomLevel = value;
});
_controller?.getMaxZoomLevel().then((value) {
maxZoomLevel = value;
});
if (_controller!.value.isInitialized == true) {
_controller?.startImageStream(_processCameraImage);
setState(() {});
} else {
await _controller?.initialize();
_controller?.startImageStream(_processCameraImage);
setState(() {});
}
});
}
Future _processCameraImage(CameraImage image) async {
textFound.length > 6 ? stopLiveFeed() : print(textFound);
final WriteBuffer allBytes = WriteBuffer();
for (Plane plane in image.planes) {
allBytes.putUint8List(plane.bytes);
}
final bytes = allBytes.done().buffer.asUint8List();
final Size imageSize =
Size(image.width.toDouble(), image.height.toDouble());
final camera = cameras[_cameraIndex];
final imageRotation =
InputImageRotationMethods.fromRawValue(camera.sensorOrientation) ??
InputImageRotation.Rotation_0deg;
final inputImageFormat =
InputImageFormatMethods.fromRawValue(image.format.raw) ??
InputImageFormat.NV21;
final planeData = image.planes.map(
(Plane plane) {
return InputImagePlaneMetadata(
bytesPerRow: plane.bytesPerRow,
height: plane.height,
width: plane.width,
);
},
).toList();
final inputImageData = InputImageData(
size: imageSize,
imageRotation: imageRotation,
inputImageFormat: inputImageFormat,
planeData: planeData,
);
final inputImage =
InputImage.fromBytes(bytes: bytes, inputImageData: inputImageData);
widget.onImage(inputImage);
}
}
Error : - [ERROR:flutter/lib/ui/ui_dart_state.cc(209)] Unhandled Exception: CameraException(CameraAccess, CAMERA_ERROR (3): waitUntilIdle:1851: Camera 0: Error waiting to drain: Connection timed out (-110))
one 1 in 2 times I get the undesired output.
undesired output : - working properly
desired output : - screen turns black and unresponsive
full code: - https://drive.google.com/file/d/1T5YxEXEc7ajVw3__pt1ahFXt6cypnCll/view?usp=sharing
Does anyone know how to solve this?
I have a question, I am using Path-provider to generate a path for a new generated PDF file from my App… but path provider is storing the file in the App folder & after closing the app the file will be deleted, I am using getExternalStorageDirectory… how can I save that file to downloads when selected.
Anybody can help me?
just small explanation... the PDF file will be viewed from the App using
flutter_full_pdf_viewer
then from that page I need to let the user select if he wants to save the file or close the hall file.
This is the code for the file save
try {
final dir = await getExternalStorageDirectory();
String documentPath = dir.path;
fullPath = "$documentPath/$fileName";
final file = File('$fullPath');
await file.writeAsBytes(await pdf.save());
print('XXXXXXXXX$fileName');
return file;
} catch (e) {
// print('we have a problem');
}
print(fullPath);
}
And this is the PDFViewer page code
Widget build(BuildContext context) {
return Scaffold(
body: Container(
child: PDFViewerScaffold(
appBar: AppBar(
title: Text('PDF page'),
actions: <Widget>[
IconButton(
icon: Icon(Icons.share),
onPressed: () {
Share.shareFiles(
['$path'],
);
print('iiiiiiiiiiiiii$path');
},
),
IconButton(
icon: Icon(Icons.save),
onPressed: () {},
),
],
),
path: path,
),
),
);
}
}
============================================================================
after I modified the code now it looks like this for the PDF_save Page (for Methods)
Future<bool> saveFolder(String fileName) async {
Directory directory;
try {
if (Platform.isAndroid) {
if (await _requestPermission(Permission.storage)) {
directory = await getExternalStorageDirectory();
String newPath = "";
print('ZZZZZZZZzzzzZZZZz$directory');
List<String> paths = directory.path.split("/");
for (int x = 1; x < paths.length; x++) {
String folder = paths[x];
if (folder != "Android") {
newPath += "/" + folder;
} else {
break;
}
}
newPath = newPath + "/SAMApp";
directory = Directory(newPath);
print('AAAAAAAAAAAAAAAA$newPath');
print('AAAAAAAAAAAAAAAA$fileName');
} else {
return false;
}
} else {
if (await _requestPermission(Permission.photos)) {
directory = await getTemporaryDirectory();
} else {
return false;
}
}
File saveFile = File(directory.path + "/$fileName");
if (!await directory.exists()) {
await directory.create(recursive: true);
}
if (await directory.exists()) {
// await dio.download(saveFile.path, onReceiveProgress: (value1, value2) {
// setState(() {
// progress = value1 / value2;
// });
// });
// if (Platform.isIOS) {
// await ImageGallerySaver.saveFile(saveFile.path,
// isReturnPathOfIOS: true);
// }
File savedFile = File(directory.path + "/$fileName");
var savedPath = directory.path + "/$fileName";
print('$savedPath');
print('$savedFile');
savedFile.writeAsBytesSync(byteList);
if (savedPath != null) {
print('NO saved Path');
// await Navigator.pushReplacement(
// context,
// MaterialPageRoute(
// builder: (context) => SharePage(
// imageFilebytes: pngBytes,
// imagePath: savedPath,
// )),
// );
} else {
print("waiting for savedpath");
}
return true;
}
return false;
} catch (e) {
print(e);
return false;
}
}
did I made something wrong??
This is worked to save the image. It can be useful for you.
Future<bool> saveFile(var byteList, BuildContext context) async {
Directory storageDir;
try {
if (await requestPermission(Permission.storage)) {
storageDir = await getExternalStorageDirectory();
String newPath = '';
List<String> folders = storageDir.path.split('/');
for (int x = 1; x < folders.length; x++) {
String folder = folders[x];
if (folder != 'Android') {
newPath += '/' + folder;
} else {
break;
}
}
newPath = newPath + '/xyz';
storageDir = Directory(newPath);
} else {
if (await requestPermission(Permission.photos)) {
storageDir = await getTemporaryDirectory();
} else {
return false;
}
}
if (!await storageDir.exists()) {
await storageDir.create(recursive: true);
}
if (await storageDir.exists()) {
//List<int> bytesSync = widget.pickedImage.readAsBytesSync();
DateTime date = DateTime.now();
String baseFileName = 'abc' + date.toString() + ".jpg";
File savedFile = File(storageDir.path + "/$baseFileName");
savedPath = storageDir.path + "/$baseFileName";
savedFile.writeAsBytesSync(byteList);
if (savedPath != null) {
await Navigator.pushReplacement(
context,
MaterialPageRoute(
builder: (context) => SharePage(
imageFilebytes: pngBytes,
imagePath: savedPath,
)));
} else {
print("waiting for savedpath");
}
return true;
}
} catch (e) {
print(e);
}
return false;
}
I am trying to create a simple flutter project for learning. I need to write the tapped student name on the screen but setState isn't working. It's changing the variable but not rendering on the screen. -Photo-
Here is what I want. For example when I click on Name3 LastName3 I want it to write "Name3 LastName3" on the bottom.
class MyApp extends StatefulWidget {
#override
_MyAppState createState() => _MyAppState();
}
class _MyAppState extends State<MyApp> {
#override
Widget build(BuildContext context) {
// TODO: implement build
return Scaffold(
appBar: AppBar(
title: Text("Student Exam Result System"),
),
body: bodyBuilder(context),
);
}
bodyBuilder(BuildContext context) {
String ChoosedStudent = "";
StudentManager student1 = new StudentManager("Name1", "LastName1", 22,
"https://uifaces.co/our-content/donated/hvaUVob5.jpg");
StudentManager student2 = new StudentManager("Name2", "LastName2", 70,
"https://uifaces.co/our-content/donated/eqUZLcBO.jpg");
StudentManager student3 = new StudentManager("Name3", "LastName3", 55,
"https://uifaces.co/our-content/donated/-oe25tWA.png");
StudentManager student4 = new StudentManager("Name4", "LastName4", 99,
"https://images.generated.photos/ukvWCGJJF8xoZ_rvVSFDLnQ-WDkGw2WsZ53uPPm63M8/rs:fit:512:512/Z3M6Ly9nZW5lcmF0/ZWQtcGhvdG9zLzA3/OTI0MTAuanBn.jpg");
StudentManager student5 = new StudentManager(
"Name5", "LastName5", 45, "https://thispersondoesnotexist.com/image");
List<StudentManager> StudentList = [
student1,
student2,
student3,
student4,
student5
];
return Column(
children: [
Expanded(
child: ListView.builder(
itemCount: StudentList.length,
itemBuilder: (BuildContext context, int index) {
if (StudentList[index].Grade >= 50) {
StudentList[index].IsPassed = true;
} else if (StudentList[index].Grade < 50) {
StudentList[index].IsPassed = false;
}
return ListTile(
leading: CircleAvatar(
backgroundImage:
NetworkImage(StudentList[index].PhotoURL),
),
title: Text(StudentList[index].Name +
" " +
StudentList[index].Surname),
subtitle: Text(
"${StudentList[index].Name} named students grade is "
"${StudentList[index].Grade}, ${StudentList[index].AlphebaticalGrade}"),
isThreeLine: true,
trailing: buildStatusIcon(StudentList[index].IsPassed),
onTap: () {
setState(() {
ChoosedStudent = StudentList[index].Name +
" " +
StudentList[index].Surname;
});
},
);
})),
Text("Student: " + ChoosedStudent),
Center(),
],
);
}
}
Widget buildStatusIcon(bool IsPassed) {
if (IsPassed == true) {
return Icon(Icons.done);
} else if (IsPassed == false) {
return Icon(Icons.warning);
} else {
return Icon(Icons.error);
}
}
I don't know if it's necessary but here is student class:
class StudentManager{
String Name;
String Surname;
int Grade;
String AlphebaticalGrade;
bool IsPassed;
String PhotoURL;
StudentManager(String Name, String Surname, int Grade, String PhotoURL){
this.Grade = Grade;
this.Surname = Surname;
this.Name=Name;
this.PhotoURL = PhotoURL;
if (Grade >= 90 && Grade <= 100) {
AlphebaticalGrade = "A+";
} else if (Grade >= 80 && Grade <= 90) {
AlphebaticalGrade = "A";
} else if (Grade >= 70 && Grade <= 80) {
AlphebaticalGrade = "B+";
} else if (Grade >= 60 && Grade <= 70) {
AlphebaticalGrade = "B";
} else if (Grade >= 50 && Grade <= 60) {
AlphebaticalGrade = "D+";
} else if (Grade >= 40 && Grade <= 50) {
AlphebaticalGrade = "D";
} else if (Grade <= 39 && Grade <= 40) {
AlphebaticalGrade = "F";
} else {
AlphebaticalGrade =
"The grade is unknown please contact with your teacher";
}
}
}
(I know it doesn't have C or C+ notes :D )
class _MyAppState extends State<MyApp> {
String ChoosedStudent = "";
// ↑ As a field, will not be reset during build method call
// ↓ build method called on setState. Any "state" inside build will be lost upon rebuild
#override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text("Student Exam Result System"),
),
body: bodyBuilder(context),
);
}
bodyBuilder(BuildContext context) {
// ↓ MOVE THIS UP AS A FIELD
// String ChoosedStudent = "";
you should define String ChoosedStudent = ""; as global variable.
Please try putting it right under class _MyAppState extends State<MyApp> {
In my app, the user needs to select a time manually for a task, but I do not know what is the correct way to use time picker in flutter app and select the time(of a day) manually by the user.
First declare this variable at the class level,
TimeOfDay selectedTime = TimeOfDay.now();
and then call this method -
Future<void> _selectTime(BuildContext context) async {
final TimeOfDay picked_s = await showTimePicker(
context: context,
initialTime: selectedTime, builder: (BuildContext context, Widget child) {
return MediaQuery(
data: MediaQuery.of(context).copyWith(alwaysUse24HourFormat: false),
child: child,
);});
if (picked_s != null && picked_s != selectedTime )
setState(() {
selectedTime = picked_s;
});
}
This code will shows a dialog containing a material design time picker.
Note that a showTimePicker is Future. So, you need to use await to get a time.
TimeOfDay initialTime = TimeOfDay.now();
TimeOfDay pickedTime = await showTimePicker(
context: context,
initialTime: initialTime,
);
Also you could customize your time picker using builder property inside the showTimePicker
TimeOfDay initialTime = TimeOfDay.now();
TimeOfDay pickedTime = await showTimePicker(
context: context,
initialTime: initialTime,
builder: (BuildContext context, Widget child) {
return Directionality(
textDirection: TextDirection.rtl,
child: child,
);
},
);
You can learn more about it on a official documentation here
you can use from flutter_datetime_picker plugin :
FlatButton(
onPressed: () {
DatePicker.showDatePicker(context,
showTitleActions: true,
minTime: DateTime(2018, 3, 5),
maxTime: DateTime(2019, 6, 7), onChanged: (date) {
print('change $date');
}, onConfirm: (date) {
print('confirm $date');
}, currentTime: DateTime.now(), locale: LocaleType.zh);
},
child: Text(
'show date time picker (Chinese)',
style: TextStyle(color: Colors.blue),
));
class CustomPicker extends CommonPickerModel {
String digits(int value, int length) {
return '$value'.padLeft(length, "0");
}
CustomPicker({DateTime currentTime, LocaleType locale}) : super(locale: locale) {
this.currentTime = currentTime ?? DateTime.now();
this.setLeftIndex(this.currentTime.hour);
this.setMiddleIndex(this.currentTime.minute);
this.setRightIndex(this.currentTime.second);
}
#override
String leftStringAtIndex(int index) {
if (index >= 0 && index < 24) {
return this.digits(index, 2);
} else {
return null;
}
}
#override
String middleStringAtIndex(int index) {
if (index >= 0 && index < 60) {
return this.digits(index, 2);
} else {
return null;
}
}
#override
String rightStringAtIndex(int index) {
if (index >= 0 && index < 60) {
return this.digits(index, 2);
} else {
return null;
}
}
#override
String leftDivider() {
return "|";
}
#override
String rightDivider() {
return "|";
}
#override
List<int> layoutProportions() {
return [1, 2, 1];
}
#override
DateTime finalTime() {
return currentTime.isUtc
? DateTime.utc(currentTime.year, currentTime.month, currentTime.day,
this.currentLeftIndex(), this.currentMiddleIndex(),
this.currentRightIndex())
: DateTime(currentTime.year, currentTime.month, currentTime.day,
this.currentLeftIndex(),
this.currentMiddleIndex(), this.currentRightIndex());
}
}
You can use this for timepicker in flutter
Future<void> _selectTime(BuildContext context) async {
TimeOfDay? picked = await showTimePicker(
context: context,
initialTime: TimeOfDay.now(), builder: (BuildContext context, Widget child) {
return MediaQuery(
data: MediaQuery.of(context).copyWith(alwaysUse24HourFormat: false),
child: child,
);});
if (picked != null && picked != TimeOfDay.now() )
}
How to make a function in Flutter which will Read audio files from SD card and display them to a ListView?
If you're asking about showing list of audio-files - here example of my code:
import 'dart:async';
import 'dart:io';
import 'package:flutter/material.dart';
import 'package:flutter_app/storage.dart';
import 'package:path_provider/path_provider.dart';
import 'package:simple_permissions/simple_permissions.dart';
class BrowserScaffold extends StatefulWidget {
#override
State<StatefulWidget> createState() {
return _BrowserScaffoldState();
}
}
class _BrowserScaffoldState extends State<BrowserScaffold> {
final List<FileSystemEntity> files = List<FileSystemEntity>();
final savedFiles = Set<File>();
Directory parent;
ScrollController controller = ScrollController();
#override
Widget build(BuildContext context) {
if (parent == null) {
SimplePermissions
.requestPermission(Permission.WriteExternalStorage)
.then((value) {
if (value == PermissionStatus.authorized) {
localPath.then((String value) {
Directory dir = Directory(value);
while (dir.path != dir.parent.path) {
dir.isAbsolute;
dir = dir.parent;
}
parent = dir;
setState(() {
files.addAll(dir.parent.listSync());
sortFiles();
});
});
} else {
SimplePermissions.openSettings();
}
});
}
return Scaffold(
appBar: AppBar(
title: Text('Files'),
actions: <Widget>[
IconButton(icon: Icon(Icons.check), onPressed: apply),
],
),
body: buildList(),
);
}
void sortFiles() {
for (int i = files.length - 1; i >= 0; i--) {
FileSystemEntity entity = files[i];
if (entity is Link) {
files.remove(entity);
} else if (entity is Directory) {
try {
entity.listSync();
} catch (ex) {
print('catch: ${entity.path}, $ex');
files.remove(entity);
}
}
}
files.sort((FileSystemEntity a, FileSystemEntity b) {
if (a is Directory && b is File) return -1;
if (a is File && b is Directory) return 1;
return a.path.compareTo(b.path);
});
}
Widget buildList() {
if (parent == null) return ListView();
bool isRoot = parent.path == parent.parent.path;
return ListView.builder(
itemBuilder: (context, i) {
if (i.isOdd)
return Divider(
height: 4.0,
);
final index = isRoot ? i ~/ 2 : i ~/ 2 - 1;
return buildRow(index);
},
itemCount: isRoot ? files.length * 2 : (files.length + 1) * 2,
controller: controller,
);
}
Widget buildRow(int index) {
if (index == -1) return getRootTile();
FileSystemEntity file = files[index];
if (file is Directory) return getDirectoryTile(file);
if (file is File) return getFileTile(file);
if (file is Link)
return Container(
height: 0.0,
);
return Container(
height: 0.0,
);
}
Widget getRootTile() {
return ListTile(
title: Text('../'),
trailing: Icon(Icons.keyboard_backspace),
onTap: () {
setState(() {
parent = parent.parent;
List<FileSystemEntity> rootList = parent.listSync();
files.clear();
files.addAll(rootList);
sortFiles();
});
},
);
}
Widget getDirectoryTile(Directory dir) {
return ListTile(
title: Text(dir.path.split('/').last),
trailing: Icon(
Icons.folder_open,
color: Colors.grey,
),
onTap: () {
setState(() {
parent = dir;
files.clear();
files.addAll(dir.listSync());
sortFiles();
});
},
);
}
Widget getFileTile(File file) {
bool isSaved = savedFiles.map((file) => file.path).contains(file.path);
final List<String> extensions = <String>[
"MP3",
"WAV",
"AAC",
"WMA",
"AMR",
"OGG",
"MIDI"
];
bool isRightType =
extensions.contains(file.path.split('.').last.toUpperCase());
return ListTile(
title: Text(file.path.split('/').last),
trailing: isRightType
// ignore: missing_required_param
? IconButton(
icon: Icon(
isSaved ? Icons.check_box : Icons.check_box_outline_blank,
color: isSaved ? Colors.blueGrey : Colors.grey,
),
)
: null,
onTap: () {
setState(() {
if (isSaved) {
savedFiles.remove(file);
} else {
savedFiles.add(file);
}
});
},
);
}
void apply() async {
final List<Track> list = List();
list.addAll(savedFiles.map((file) => Track(file.path)));
Navigator.of(context).pop(list);
}
Future<String> get localPath async {
final directory = await getApplicationDocumentsDirectory();
return directory.path;
}
}
In my case I show all files, but only audio is selectable (with checkbox)
if you're talking about being able to identify a file extension, then you first need to install Path and import it:
import 'package:path/path.dart' as p;
then check against this:
String _extension = p.extension(_someFile).split('?').first;
I suggest you read the documentation of Android Studio | Files
You can use fileList() to get an ArrayList
Further, use a for loop to get the names and paths -> file.getName() | file.getPath()