2

I'm working on both flutter and swift in order to customize my camera.

My Flutter version flutter doctor -v

My XCode version XCode version

I've made my camera as below custom camera screen

I want to take a picture when button is pressed. How can I complete this?

Here is my code

.dart file

class IOSCompositionWidget extends StatefulWidget {
  const IOSCompositionWidget({super.key});

  @override
  State<IOSCompositionWidget> createState() => _IOSCompositionWidgetState();
}

class _IOSCompositionWidgetState extends State<IOSCompositionWidget> {
  static const platformChannel =
      MethodChannel('com.vrin.methodchannel/cameraButton');

  dynamic nativePhoto;
  final String text = '';

  @override
  Widget build(BuildContext context) {
    Map<String, dynamic> creationParams = <String, dynamic>{};
    creationParams["text"] = text;

    return Scaffold(
      backgroundColor: Colors.black,
      body: Column(
        mainAxisAlignment: MainAxisAlignment.start,
        children: [
          SizedBox(
            height: MediaQuery.of(context).size.height * 0.068,
          ),
          Padding(
            padding: EdgeInsets.only(
              left: MediaQuery.of(context).size.width * 0.062,
              right: MediaQuery.of(context).size.width * 0.038,
            ),
            child: Row(
              mainAxisAlignment: MainAxisAlignment.spaceBetween,
              children: [
                Row(
                  mainAxisAlignment: MainAxisAlignment.center,
                  children: [
                    SvgPicture.asset(
                      'images/help.svg',
                      height: 24,
                      width: 24,
                    ),
                    SizedBox(
                      width: MediaQuery.of(context).size.width * 0.026,
                    ),
                    SvgPicture.asset(
                      'images/timer.svg',
                      height: 24,
                      width: 24,
                    ),
                  ],
                ),
                Container(
                  height: MediaQuery.of(context).size.height * 0.028,
                  width: MediaQuery.of(context).size.width * 0.262,
                  color: Colors.white,
                ),
                SvgPicture.asset(
                  'images/close.svg',
                  height: 24,
                  width: 24,
                ),
              ],
            ),
          ),
          SizedBox(
            height: MediaQuery.of(context).size.height * 0.017,
          ),
          SizedBox(
            height: MediaQuery.of(context).size.height * 0.646,
            width: MediaQuery.of(context).size.width,
            child: UiKitView(
              viewType: 'NativeView',
              creationParams: creationParams,
              creationParamsCodec: const StandardMessageCodec(),
            ),
          ),
          SizedBox(
            height: MediaQuery.of(context).size.height * 0.063,
          ),
          Row(
            mainAxisAlignment: MainAxisAlignment.center,
            children: [
              GestureDetector(
                onTap: () async {
                  print("pressed");
                  //TODO
                },
                child: Container(
                  height: 60,
                  width: 60,
                  decoration: BoxDecoration(
                    color: Colors.white,
                    borderRadius: BorderRadius.circular(50),
                  ),
                ),
              ),
            ],
          ),
        ],
      ),
    );
  }
}

My Swift Code

AppDelegate.swift

@UIApplicationMain
@objc class AppDelegate: FlutterAppDelegate {
    
  override func application(
    _ application: UIApplication,
    didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?
  ) -> Bool {
    if #available(iOS 12.0, *) {
      UNUserNotificationCenter.current().delegate = self as? UNUserNotificationCenterDelegate
    }
      
    let nativeViewFactory = NativeViewFactory()
    registrar(forPlugin: "Runner")!.register(nativeViewFactory, withId: "NativeView")
    
    UIApplication.shared.setMinimumBackgroundFetchInterval(TimeInterval(60*15))
    FirebaseApp.configure()
    GeneratedPluginRegistrant.register(with: self)
    
    return super.application(application, didFinishLaunchingWithOptions: launchOptions)
  }
}

NativeViewFactory.swift

import Foundation
import Flutter
import UIKit
import AVFoundation


class NativeViewFactory: NSObject, FlutterPlatformViewFactory{
    
        private var nativeView: NativeView?
        private var messenger: FlutterBinaryMessenger?
    
    func createArgsCodec() -> FlutterMessageCodec & NSObjectProtocol {
        return FlutterStandardMessageCodec.sharedInstance()
    }
    
    
    
        override init(){
        super.init()
    }
    
        init(messenger: FlutterBinaryMessenger) {
                self.messenger = messenger
                super.init()
            }
    
    
        func create(withFrame frame: CGRect, viewIdentifier viewId: Int64, arguments args: Any?) -> FlutterPlatformView {
        
                self.nativeView = NativeView(
                        frame: frame,
                        viewIdentifier: viewId,
                        arguments: args,
                        binaryMessenger: messenger)
                return nativeView ?? NativeView(
                        frame: frame,
                        viewIdentifier: viewId,
                        arguments: args,
                        binaryMessenger: messenger)
            }
}



class NativeView: NSObject, FlutterPlatformView{
    
        var likeAction: (() -> Void)?
        private var returnView: UIView?
        var previewView : UIView!
        var boxView:UIView!
        let myButton: UIButton = UIButton()
        //Camera Capture requiered properties
    
    var videoDataOutput: AVCaptureVideoDataOutput!
    var videoDataOutputQueue: DispatchQueue!
    var previewLayer:AVCaptureVideoPreviewLayer!
    var captureDevice : AVCaptureDevice!
    let session = AVCaptureSession()
    var photoOutput: AVCapturePhotoOutput?
    var imageData: Data?
    var depthData: AVDepthData?
    var depthDataMap: CVPixelBuffer?
    
    
    
    
        override init() {
                returnView = UIView()
                super.init()
            }
    
    
    
    
    
        init(
            frame: CGRect,
            viewIdentifier viewId: Int64,
            arguments args: Any?,
            binaryMessenger messenger: FlutterBinaryMessenger?
        ) {
                returnView = UIView()
                super.init()
                // iOS views can be created here
                createNativeView(view: returnView!, args: args)
        //        ViewController()
            }
    
    
    
        func view() -> UIView {
                return returnView!
            }
    
    
    
        func receiveGyeomViewMethod(){
                print("receiveGyeomViewMethod")
            }
    
    
    
        @objc func onClickMyButton(sender: UIButton){
                print("button pressed")
            }
    
    
    
    
        func createNativeView(view _view: UIView, args: Any?){
                _view.backgroundColor = UIColor.black
        
        
                previewView = UIView(frame: CGRect(x: 0, y: -150, width: UIScreen.main.bounds.size.width, height: UIScreen.main.bounds.size.height))
                previewView.contentMode = UIView.ContentMode.scaleAspectFit
                view().addSubview(previewView)
        
                self.setupAVCapture()
        
            }
    
}



extension NativeView:  AVCaptureVideoDataOutputSampleBufferDelegate{
    
         func setupAVCapture(){
                 session.sessionPreset = AVCaptureSession.Preset.photo
                let device = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera, .builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back).devices
                        print(device)
                        captureDevice = device.first
                 do {
                         let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice!)
                         session.addInput(captureDeviceInput)
                         photoOutput = AVCapturePhotoOutput()
            
                         //let pixelFormatType = "kCVPixelFormatType_DisparityFloat32"
            
             //            photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey : AVVideoCodecType.jpeg]
            
             //                )], completionHandler: nil)
            photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey : AVVideoCodecType.hevc])], completionHandler: nil)
            
            
            
                         //captureSession.addOutput(photoOutput!)
                         if session.canAddOutput(photoOutput!) {
                                 session.addOutput(photoOutput!)
                                 photoOutput!.isDepthDataDeliveryEnabled = photoOutput!.isDepthDataDeliverySupported
                             }
                     } catch {
                             print(error)
                         }
                beginSession()
            }
    
    
        func beginSession(){
                var deviceInput: AVCaptureDeviceInput!
        
                do {
                        deviceInput = try AVCaptureDeviceInput(device: captureDevice)
                        guard deviceInput != nil else {
                                print("error: cant get deviceInput")
                                return
                            }
            
                        if self.session.canAddInput(deviceInput){
                
                                self.session.addInput(deviceInput)
                
                            }
            
                        videoDataOutput = AVCaptureVideoDataOutput()
                        videoDataOutput.alwaysDiscardsLateVideoFrames=true
                        videoDataOutputQueue = DispatchQueue(label: "VideoDataOutputQueue")
                        videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
            
            
                        if session.canAddOutput(self.videoDataOutput){
                                session.addOutput(self.videoDataOutput)
                            }
                        videoDataOutput.connection(with: .video)?.isEnabled = true
                        previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
                        previewLayer.videoGravity = AVLayerVideoGravity.resizeAspect
            
                        let rootLayer :CALayer = self.previewView.layer
                        rootLayer.masksToBounds=true
                        previewLayer.frame = rootLayer.bounds
                        rootLayer.addSublayer(self.previewLayer)
                        session.startRunning()
                    } catch let error as NSError {
                            deviceInput = nil
                            print("error: \(error.localizedDescription)")
                        }
            }
    
        // clean up AVCapture
    
        func stopCamera(){
                session.stopRunning()
            }
}

I'm making my custom camera and I need to take picture when flutter button is clicked.

1 Answer 1

1

I think you can create a Flutter Plugin for this feature.

To call Swift from Flutter you can use MethodChannel.

To send result from Swift to Flutter you can EventChanel

Sign up to request clarification or add additional context in comments.

1 Comment

I'm using hosting view right now but Thx :) I'll try to make a Flutter Plugin after I finish my work :) Thx for the suggestion.

Your Answer

By clicking “Post Your Answer”, you agree to our terms of service and acknowledge you have read our privacy policy.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.