Flutter合并一个视频和音频文件从url基于字符串!使用ffmpeg或任何工具

hwamh0ep  于 2023-04-07  发布在  Flutter
关注(0)|答案(1)|浏览(209)

这是我尝试的,我得到一些随机错误

所有我想做的是看看是否有可能concat一个视频从网址与一堆音频层和播放它都从一个来源使用它后与airplay!
这是可能使用ffmpeg或任何其他工具.请我需要hgelp与findng正确的软件包和工具来做到这一点!或者应该都是服务器端和一些云函数?什么是最好的方法在这里?谢谢!
NB:这是我第一次使用Flutter,如果可以使用React Native,请说明!

import 'dart:io';
import 'dart:async';
import 'dart:developer';
import 'package:flutter/material.dart';
import 'package:flutter_ffmpeg/flutter_ffmpeg.dart';
import 'package:video_player/video_player.dart';
import 'package:path_provider/path_provider.dart';

class VideoPlayerScreen extends StatefulWidget {
  final String videoUrl;
  final String audioUrl;

  VideoPlayerScreen({required this.videoUrl, required this.audioUrl});

  @override
  _VideoPlayerScreenState createState() => _VideoPlayerScreenState();
}

class _VideoPlayerScreenState extends State<VideoPlayerScreen> {
  // the class will take videoUrl and audioUrl as input and will combine them using ffmpeg and play the combined video

  late VideoPlayerController _controller;
  late Future<void> _initializeVideoPlayerFuture;

  @override
  void initState() {
    super.initState();
    _controller = VideoPlayerController.network(widget.videoUrl);
    _initializeVideoPlayerFuture = _controller.initialize();
    _controller.setLooping(true);
    _combineVideoAndAudio();
  }

  @override
  void dispose() {
    _controller.dispose();
    super.dispose();
  }

  Future<Directory> createTempDir() async {
        Directory tempDir = await getTemporaryDirectory();
        return Directory('${tempDir.path}/my_temp_dir').create(recursive: true);
      }
  
  Future<void> _combineVideoAndAudio() async {
    log('combineVideoAndAudio');
    // this function will combine the video and audio and play the combined video
    // then we will play the combined video

    // now we will combine the video and audio using ffmpeg
    createTempDir().then((tempDir){
    final ffmpeg = FlutterFFmpeg();
    final String outputPath = '${tempDir.path}/output.mp4';
    final String command = '-i ${widget.videoUrl} -i ${widget.audioUrl} -c:v copy -c:a aac -strict experimental $outputPath';
    log("Running the command");
    // now we will combine the video and audio
    ffmpeg.execute(command).then((result) {
    // now we will play the combined video
      log("Command done!");
      setState(() {
      _controller = VideoPlayerController.file(File(outputPath));
      _initializeVideoPlayerFuture = _controller.initialize();
      _controller.setLooping(true);
    });
    });
    });
  }

  // getApplicationDocumentsDirectory

  @override
  Widget build(BuildContext context) {
    return Scaffold(
      appBar: AppBar(
        title: Text('Video Player'),
      ),
      body: Center(
        child: FutureBuilder(
          future: _initializeVideoPlayerFuture,
          builder: (context, snapshot) {
            if (snapshot.connectionState == ConnectionState.done) {
              return AspectRatio(
                aspectRatio: _controller.value.aspectRatio,
                child: VideoPlayer(_controller),
              );
            } else {
              return CircularProgressIndicator();
            }
          },
        ),
      ),
      floatingActionButton: FloatingActionButton(
        onPressed: () {
          setState(() {
            if (_controller.value.isPlaying) {
              _controller.pause();
            } else {
              _controller.play();
            }
          });
        },
        child: Icon(
          _controller.value.isPlaying ? Icons.pause : Icons.play_arrow,
        ),
      ),
    );
  }
}

相关问题