likes
comments
collection
share

flutter调用ffmpeg提取音频并裁剪固定时长渲染和工具类方法

作者站长头像
站长
· 阅读数 53

flutter调用ffmpeg提取音频并裁剪固定时长渲染和处理 裁剪所有工具类,包括缓存处理,wavebar获取解析,适配渲染等全流程方法

对应包版本

audio_session: 0.1.18
ffmpeg_kit_flutter_full: 6.0.3
audioplayers: 5.2.1
...

工具类方法(ffmpeg_utils_kit.dart)

import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'package:ffmpeg_kit_flutter_full/session_state.dart';
import 'package:file_picker/file_picker.dart';
import 'package:flutter/material.dart';
import 'package:path_provider/path_provider.dart';
import 'package:ffmpeg_kit_flutter_full/ffmpeg_kit.dart';
import 'package:ffmpeg_kit_flutter_full/ffmpeg_session.dart';
import 'package:ffmpeg_kit_flutter_full/return_code.dart';
import 'package:ffmpeg_kit_flutter_full/ffprobe_kit.dart';
import 'package:uuid/uuid.dart';

int defaultFfmpegKitExecuteTimeoutCancelMilliseconds = 30000;

class AudioClipFixedDurationClipConfig {
  final double boxLRMargin; // 盒子左右边距
  final double clipMaxDuration;  // 裁剪保留时长
  final double clipAreaRTPadding; // 裁剪两边边距
  final double waveItemWidth; // 每个竖线宽度
  final double waveItemHeight; // 每个竖线最大高度
  final double waveItemSpacing; // 每个竖线间距
  final Color waveItemColor; // 竖线颜色
  final Color waveItemHighColor; // 竖线高亮颜色

  final double clipAreaRenderHeight; // 裁剪区域渲染高度
  final double playOpBtnSize; // 播放按钮大小
  final double playOpBtnBorderRadius; // 播放按钮圆角
  final double playOpBtnMargin; // 播放按钮边距
  final double playOpIconSize; // 播放按钮图标大小

  final double clipAreaLeftMargin; // 裁剪区域左边距


  AudioClipFixedDurationClipConfig({
    this.boxLRMargin = 16,
    this.clipMaxDuration = 30,
    this.clipAreaRTPadding = 12,
    this.waveItemWidth = 2,
    this.waveItemHeight = 36,
    this.waveItemSpacing = 1,
    this.waveItemColor = const Color.fromRGBO(151, 159, 171, 1),
    this.waveItemHighColor = const Color.fromRGBO(0, 87, 255, 1),
    this.clipAreaRenderHeight = 56,
    this.playOpBtnSize = 44,
    this.playOpIconSize = 24,
    this.playOpBtnBorderRadius = 44,
    this.playOpBtnMargin = 2,
    this.clipAreaLeftMargin = 8,
  });

  AudioClipFixedDurationClipConfig copyWith({
    double? boxLRMargin,
    double? clipMaxDuration,
    double? clipAreaRTPadding,
    double? waveItemWidth,
    double? waveItemHeight,
    double? waveItemSpacing,
    Color? waveItemColor,
    Color? waveItemHighColor,
    double? clipAreaRenderHeight,
    double? playOpBtnSize,
    double? playOpIconSize,
    double? playOpBtnBorderRadius,
    double? playOpBtnMargin,
    double? clipAreaLeftMargin,
  }) {
    return AudioClipFixedDurationClipConfig(
      boxLRMargin: boxLRMargin ?? this.boxLRMargin,
      clipMaxDuration: clipMaxDuration ?? this.clipMaxDuration,
      clipAreaRTPadding: clipAreaRTPadding ?? this.clipAreaRTPadding,
      waveItemWidth: waveItemWidth ?? this.waveItemWidth,
      waveItemHeight: waveItemHeight ?? this.waveItemHeight,
      waveItemSpacing: waveItemSpacing ?? this.waveItemSpacing,
      waveItemColor: waveItemColor ?? this.waveItemColor,
      waveItemHighColor: waveItemHighColor ?? this.waveItemHighColor,
      clipAreaRenderHeight: clipAreaRenderHeight ?? this.clipAreaRenderHeight,
      playOpBtnSize: playOpBtnSize ?? this.playOpBtnSize,
      playOpIconSize: playOpIconSize ?? this.playOpIconSize,
      playOpBtnBorderRadius: playOpBtnBorderRadius ?? this.playOpBtnBorderRadius,
      playOpBtnMargin: playOpBtnMargin ?? this.playOpBtnMargin,
      clipAreaLeftMargin: clipAreaLeftMargin ?? this.clipAreaLeftMargin,
    );
  }
}

AudioClipFixedDurationClipConfig audioClipFixedDurationClipConfig = AudioClipFixedDurationClipConfig();

// AudioClipFixedDurationClipConfig().copyWith(
//   boxLRMargin: 16,
//   clipMaxDuration: 30,
//   clipAreaRTPadding: 12,
//   waveItemWidth: 2,
//   waveItemHeight: 36,
//   waveItemSpacing: 1,
//   waveItemColor: const Color.fromRGBO(151, 159, 171, 1),
//   waveItemHighColor: const Color.fromRGBO(0, 87, 255, 1),
//   playOpIconSize: 24,

//   clipAreaRenderHeight: 56,
//   playOpBtnSize: 44,
//   playOpBtnBorderRadius: 44,
//   playOpBtnMargin: 2,
//   clipAreaLeftMargin: 8,
// );
class FfmpegKitExecuteResData {
  final String command;
  final FFmpegSession? session;
  final dynamic error;
  final int code;

  FfmpegKitExecuteResData({
    required this.command,
    this.session,
    this.error,
    required this.code,
  });
}

class FfmpegKitInitClipAudioData {
  String souceFilePath;
  String mp3FilePath;
  double duration;
  List<double> waveBarData;
  FfmpegKitInitClipAudioData({
    required this.souceFilePath,
    required this.mp3FilePath,
    required this.duration,
    required this.waveBarData
  });
}

class AudioClipFixedDurationClipedData {
  final String mp3FilePath;
  double clipStartTime = 0;
  double clipEndTime = 0;
  double clipDuaration = 0;
  double totalDuration = 0;


  AudioClipFixedDurationClipedData({
    required this.mp3FilePath,
    required this.clipStartTime,
    required this.clipEndTime,
    required this.clipDuaration,
    required this.totalDuration,
  });
}


class FfmpegUtilsKit {
  static getUuid() {
    return const Uuid().v4().replaceAll(RegExp("-"), '');
  }

  static String formatSeconds(double seconds) {
    int totalSeconds = seconds.ceil();
    int minutes = (totalSeconds ~/ 60) % 60;
    int remainingSeconds = totalSeconds % 60;

    String minutesStr = minutes.toString().padLeft(2, '0');
    String secondsStr = remainingSeconds.toString().padLeft(2, '0');

    return '$minutesStr:$secondsStr';
  }
  
  static String formatDoubleSecondsToTime(double seconds) {
    int totalSeconds = seconds.floor();
    int hours = (totalSeconds ~/ 3600) % 24;
    int minutes = (totalSeconds ~/ 60) % 60;
    int secondsRemainder = totalSeconds % 60;
    int milliseconds = ((seconds - totalSeconds) * 1000).round();

    String hoursString = hours.toString().padLeft(2, '0');
    String minutesString = minutes.toString().padLeft(2, '0');
    String secondsString = secondsRemainder.toString().padLeft(2, '0');
    String millisecondsString = milliseconds.toString().padLeft(3, '0');

    return '$hoursString:$minutesString:$secondsString.$millisecondsString';
  }

  static Future<Directory> createSubdirectoryInTemporaryDirectory() async {
    DateTime now = DateTime.now();
    String formattedDate = '${now.year}_${now.month.toString().padLeft(2, '0')}_${now.day.toString().padLeft(2, '0')}';
    Directory tempDir = await getTemporaryDirectory();
    String subDirName = 'audio_edit_temp/$formattedDate';
    String subDirPath = '${tempDir.path}/$subDirName';
    Directory subDir = Directory(subDirPath);
    if (!await subDir.exists()) {
      await subDir.create(recursive: true);
      debugPrint('Subdirectory created: $subDirPath');
    } else {
      debugPrint('Subdirectory already exists: $subDirPath');
    }
    return subDir;
  }

  static Future<bool> deleteDirectory(String path) async {
    Directory directory = Directory(path);
    if (await directory.exists()) {
      await directory.delete(recursive: true);
      debugPrint('Directory deleted: $path');
      return true;
    } else {
      debugPrint('Directory does not exist: $path');
      return false;
    }
  }

  static Future<void> deleteAllTempFiles() async {
    Directory tempDir = await getTemporaryDirectory();
    await deleteDirectory(tempDir.path);
  }

  static Future<void> deleteAudioDirectoriesStartingWith(String prefix) async {
    Directory tempDir = await getTemporaryDirectory();
    String subDirName = 'audio_edit_temp';
    String subDirPath = '${tempDir.path}/$subDirName';
    Directory directory = Directory(subDirPath);
    if (await directory.exists()) {
      List<FileSystemEntity> contents = directory.listSync();
      for (FileSystemEntity entity in contents) {
        if (entity is Directory) {
          String directoryName = entity.path.split('/').last;
          if (directoryName.startsWith(prefix)) {
            await entity.delete(recursive: true);
            debugPrint('Directory deleted: ${entity.path}');
          }
        }
      }
    } else {
      debugPrint('Directory does not exist: $subDirPath');
    }
  }

  static Future<bool> deleteAudioTempDirectory() async {
    Directory tempDir = await getTemporaryDirectory();
    String subDirName = 'audio_edit_temp';
    String subDirPath = '${tempDir.path}/$subDirName';
    Directory directory = Directory(subDirPath);
    if (await directory.exists()) {
      await directory.delete(recursive: true);
      debugPrint('Directory deleted: $subDirPath');
      return true;
    } else {
      debugPrint('Directory does not exist: $subDirPath');
      return true;
    }
  }

  static Future<void> deleteAudioTempFilesByDate(String formattedDate) async {
    Directory tempDir = await getTemporaryDirectory();
    String subDirName = 'audio_edit_temp/$formattedDate';
    String subDirPath = '${tempDir.path}/$subDirName';
    Directory directory = Directory(subDirPath);
    if (await directory.exists()) {
      await directory.delete(recursive: true);
      debugPrint('Directory deleted: $subDirPath');
    } else {
      debugPrint('Directory does not exist or is not a directory: $subDirPath');
    }
  }

  static Future<void> copyFileFromCacheToDirectory(String cacheFilePath) async {
    try {
      String filename = cacheFilePath.split('/').last;
      String? filePath = await FilePicker.platform.getDirectoryPath();
      if (filePath == null) {
        debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_copyFileFromCacheToDirectory:用户取消了选择目标文件夹');
        return;
      }
      String destinationDirectoryPath = filePath;
      // debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_copyFileFromCacheToDirectory:用户选择的目标文件夹路径:$destinationDirectoryPath');
      // 构建目标文件路径
      String destinationFilePath = '$destinationDirectoryPath/$filename';
      await File(cacheFilePath).copy(destinationFilePath);
      // debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_copyFileFromCacheToDirectory:文件已成功复制到目标路径:$destinationFilePath');
    } catch (e) {
      debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_copyFileFromCacheToDirectory:复制文件时出现错误:$e');
    }
  }

  static List<List<T>> splitList<T>(List<T> list, int totalChunks) {
    if (list.isEmpty || totalChunks <= 0) {
      return [];
    }else if(totalChunks<=0){
      return [list];
    }else{
      final chunkSize = (list.length / totalChunks).ceil();
      final result = List<List<T>>.generate(totalChunks, (index) {
        final start = index * chunkSize;
        final end = (index + 1) * chunkSize;
        return list.sublist(start, end.clamp(0, list.length));
      });
      return result;
    }
  }

  static List<double> getWavebarDataByDecodeData<T extends num>(
    List<T> decodedata, {
    int points = 200,
    int averageCount = 100,
  }) {
    try{
      List<List<T>> channelDatasChunk = splitList<T>(decodedata, points).toList();
      // debugPrint("🍎🍎🍎🍎🍎FfmpegUtilsKit_getWavebarDataByDecodeData with channelDatasChunk: 🍉🍉🍉<----->>>${channelDatasChunk.length}<<<----->🍉🍉🍉");
      List<double> res = [];
      for(int index=0; index < channelDatasChunk.length; index++){
        final int step = (channelDatasChunk[index].length / averageCount).floor();
        double sum = 0;
        for (int i = 0; i < channelDatasChunk[index].length; i += step) {
          sum += (channelDatasChunk[index][i.clamp(0, channelDatasChunk[index].length-1)]).toDouble();
        }
        final double average = sum / (channelDatasChunk[index].length / step);
        res.add(average);
      }
      return res;
    } catch (e){
      debugPrint("🍎🍎🍎🍎🍎FfmpegUtilsKit_getWavebarDataByDecodeData with catchError: 🐛🐛🐛$e🐛🐛🐛");
      return [];
    }
  }

  static List<double> fixedPaintGainMaxHeightWaveBarData(List<double> data, double halfHeight) {
    double max = data.reduce((value, element) => value > element ? value : element);
    if (max == 0) {
      return List.generate(data.length, (index) => 1);
    }
    double gainRatio = halfHeight / max;
    return List.generate(data.length, (index) {
      double gainHeight = data[index] * gainRatio;
      return gainHeight <= 0 ? 1 : gainHeight;
    });
  }

  static Future<List<double>> getRenderWaveData(String filePath, {
    int points = 200,
    double gainMaxHeight = 100,
    int averageCount = 10,
    int? timeoutCancelMilliseconds,
  }) {
    Completer<List<double>> completer = Completer<List<double>>();
    getDecodedAudioData(filePath, timeoutCancelMilliseconds: timeoutCancelMilliseconds).then((decodedata) {
      if(decodedata.isNotEmpty){
        // debugPrint("FfmpegUtilsKit_getRenderWaveData data: 🍉🍉🍉<----->>>decodedata.length:${decodedata.length} points:$points<<<----->🍉🍉🍉");
        List<double> waveData = getWavebarDataByDecodeData<int>(decodedata, points: points, averageCount: averageCount);
        // debugPrint("FfmpegUtilsKit_getRenderWaveData data: 🍉🍉🍉🍉🍉<----->>>decodedata.length:${decodedata.length} points:$points ${waveData.length}<<<----->🍉🍉🍉");
        completer.complete(fixedPaintGainMaxHeightWaveBarData(waveData, gainMaxHeight));
      }else{
        debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_ffmpegExecute with null: 🐛🐛🐛${null}🐛🐛🐛');
        completer.complete([]);
      }
    }).catchError((onError){
       debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_ffmpegExecute with catchError: 🐛🐛🐛$onError🐛🐛🐛');
       completer.complete([]);
    });
    return completer.future;
  }

  /// 获取视频时长
  static Future<double> getMediaDuration(String filepath) async {
    Completer<double> completer = Completer<double>();

    try {
      String command = '-i $filepath -show_entries format=duration -v quiet -of json';
      final session = await FFprobeKit.getMediaInformationFromCommand(command);
      final information = session.getMediaInformation();

      if (information != null) {
        final output = await session.getOutput();
        if (output != null) {
          var outputData = jsonDecode(output);
          double duration = double.parse(outputData["format"]["duration"]);
          completer.complete(duration);
        } else {
          debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_getMediaDuration with null output: 🐛🐛🐛null output🐛🐛🐛');
          completer.complete(0);
        }
      } else {
        debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_getMediaDuration with null information: 🐛🐛🐛null information🐛🐛🐛');
        completer.complete(0);
      }

      session.cancel();

    } catch (error, stackTrace) {
      debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_getMediaDuration with error: 🐛🐛🐛$error🐛🐛🐛');
      debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_getMediaDuration with stackTrace: 🐛🐛🐛$stackTrace');
      completer.complete(0);
    }

    return completer.future;
    // FFprobeKit.getMediaInformation(filepath).then((session) async {
    //   final information = await session.getMediaInformation();
    //   if (information != null) {
    //     // CHECK THE FOLLOWING ATTRIBUTES ON ERROR
    //     final output = await session.getOutput();
    //     if(output != null){
    //       // debugPrint("getMediaInformation: $output");
    //       var outputData = jsonDecode(output!);
    //       debugPrint("getMediaInformation: $outputData");
    //       debugPrint("getMediaInformation: ${outputData["streams"][0]["duration"]}");
    //     }
    //   }
    // }).onError((error, stackTrace){
    //   debugPrint('getMediaInformation failed with error: $error');
    // });
  }

  static Future<FfmpegKitExecuteResData?> ffmpegExecute(String command, {int? timeoutCancelMilliseconds}) {
    debugPrint("🍎🍎🍎🍎🍎FfmpegUtilsKit_ffmpegKitExecute with command: 🍉🍉🍉<----->>>$command<<<----->🍉🍉🍉");
    Completer<FfmpegKitExecuteResData?> completer = Completer<FfmpegKitExecuteResData?>();
    try {
      FFmpegKit.execute(command).then((session) {
        Timer? timer;
        void handleTimeout() {
          timer?.cancel();
          session.cancel();
          timer = null;
          debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_ffmpegExecute 执行超时');
        }
        timer = Timer(Duration(milliseconds: timeoutCancelMilliseconds ?? defaultFfmpegKitExecuteTimeoutCancelMilliseconds), handleTimeout);
        session.getState().then((sessionState) {
          timer?.cancel();
          timer = null;
          if (sessionState == SessionState.completed) {
            debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_ffmpegExecute 执行成功 ${session.getAllLogsAsString()}');
          } else if (sessionState == SessionState.failed) {
            debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_ffmpegExecute 执行失败 ${session.getAllLogsAsString()}');
          } else if (sessionState == SessionState.created) {
            debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_ffmpegExecute 创建');
          }
        });
        // debugPrint("🍎🍎🍎🍎🍎FfmpegUtilsKit_ffmpegExecute session:  🍑🍑🍑$session🍑🍑🍑");
        completer.complete(FfmpegKitExecuteResData(command: command, session: session, error: null, code: 1));
      }).catchError((error, stackTrace) {
        debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_ffmpegExecute with catchError: 🐛🐛🐛$error🐛🐛🐛');
        debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_ffmpegExecute with catchError: 🐛🐛🐛$stackTrace');
        completer.complete(FfmpegKitExecuteResData(command: command, session: null, error: error, code: 0));
      });
    } catch (error) {
      debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_ffmpegExecute with Exceptionerror: 🐛🐛🐛$error🐛🐛🐛');
      completer.complete(FfmpegKitExecuteResData(command: command, session: null, error: error, code: 0));
    }

    return completer.future;
  }

  static Future<String?> transToMp3(String filePath,{ int? timeoutCancelMilliseconds }) async {
    Completer<String?> completer = Completer<String?>();
    Directory tempDir = await createSubdirectoryInTemporaryDirectory();
    final outputPath = '${tempDir.path}/trans_out_audio__${getUuid()}.mp3';

    try {
      // 执行转换命令,并设置超时处理
      ffmpegExecute('-i "$filePath" -c:a libmp3lame -q:a 2 "$outputPath"', timeoutCancelMilliseconds: timeoutCancelMilliseconds)
        .then((FfmpegKitExecuteResData? data) async {
          if (data != null && data.code == 1) {
            FFmpegSession session = data.session!;
            final returnCode = await session.getReturnCode();
            if (ReturnCode.isSuccess(returnCode)) {
              debugPrint("🍎🍎🍎🍎🍎FfmpegUtilsKit_transToMp3 Successfully with path: 🍉🍉🍉<----->>>$outputPath<<<----->🍉🍉🍉");
              completer.complete(outputPath);
            } else {
              debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_transToMp3 with failure: 🐛🐛🐛null🐛🐛🐛');
              completer.complete(null);
            }
            // 取消会话
            session.cancel();
          } else {
            completer.complete(null);
            debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_transToMp3 with failure: 🐛🐛🐛null🐛🐛🐛');
          }
        }).catchError((error) {
          debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_transToMp3 with failure: 🐛🐛🐛$error🐛🐛🐛');
          completer.complete(null);
        });

    } catch (error) {
      debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_transToMp3 with failure: 🐛🐛🐛$error🐛🐛🐛');
      completer.complete(null);
    }

    return completer.future;
  }


  static Future<String?> transToWav(String filePath, { int? timeoutCancelMilliseconds }) async {
    Completer<String?> completer = Completer<String?>();
    Directory tempDir = await createSubdirectoryInTemporaryDirectory();
    final outputPath = '${tempDir.path}/trans_out_audio__${getUuid()}.wav';

    try {
      // 执行转换命令,并设置超时处理
      ffmpegExecute('-i "$filePath" "$outputPath"', timeoutCancelMilliseconds: timeoutCancelMilliseconds)
        .then((FfmpegKitExecuteResData? data) async {
          if (data != null && data.code == 1) {
            FFmpegSession session = data.session!;
            final returnCode = await session.getReturnCode();
            if (ReturnCode.isSuccess(returnCode)) {
              // debugPrint("🍎🍎🍎🍎🍎FfmpegUtilsKit_transToWav Successfully with path: 🍉🍉🍉<----->>>$outputPath<<<----->🍉🍉🍉");
              completer.complete(outputPath);
            } else {
              debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_transToWav with failure: 🐛🐛🐛null🐛🐛🐛');
              completer.complete(null);
            }
            // 取消会话
            session.cancel();
          } else {
            debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_transToWav with failure: 🐛🐛🐛null🐛🐛🐛');
            completer.complete(null);
          }
        }).catchError((error) {
          debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_transToWav with failure: 🐛🐛🐛$error🐛🐛🐛');
          completer.complete(null);
        });

    } catch (error) {
      debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_transToWav with failure: 🐛🐛🐛$error🐛🐛🐛');
      completer.complete(null);
    }

    return completer.future;
  }

  // 裁剪区间输入和输出皆是 mp3
  static Future<String?> cropMedia({
    required String filePath,
    required double clipStartTime,
    required double clipEndTime,
    int? timeoutCancelMilliseconds
  }) async {
    Completer<String?> completer = Completer<String?>();
    Directory tempDir = await createSubdirectoryInTemporaryDirectory();
    final outputPath = '${tempDir.path}/trans_out_audio__${getUuid()}.mp3';

    try {
      // 执行裁剪命令,并设置超时处理
      ffmpegExecute('-i "$filePath" -ss ${formatDoubleSecondsToTime(clipStartTime)} -to ${formatDoubleSecondsToTime(clipEndTime)} -c:v copy -c:a copy "$outputPath"', timeoutCancelMilliseconds: timeoutCancelMilliseconds)
        .then((FfmpegKitExecuteResData? data) async {
          if (data != null && data.code == 1) {
            FFmpegSession session = data.session!;
            final returnCode = await session.getReturnCode();
            if (ReturnCode.isSuccess(returnCode)) {
              debugPrint("🍎🍎🍎🍎🍎FfmpegUtilsKit_cropMedia Successfully with path: 🍉🍉🍉<----->>>$outputPath<<<----->🍉🍉🍉");
              completer.complete(outputPath);
            } else {
              debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_cropMedia with failure: 🐛🐛🐛null🐛🐛🐛');
              completer.complete(null);
            }
            // 取消会话
            session.cancel();
          } else {
            debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_cropMedia with failure: 🐛🐛🐛null🐛🐛🐛');
            completer.complete(null);
          }
        }).catchError((error) {
          debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_cropMedia with failure: 🐛🐛🐛$error🐛🐛🐛');
          completer.complete(null);
        });

    } catch (error) {
      debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_cropMedia with failure: 🐛🐛🐛$error🐛🐛🐛');
      completer.complete(null);
    }

    return completer.future;
  }


  static Future<List<int>> getDecodedAudioData(String filePath, {int? timeoutCancelMilliseconds}) async {
    Completer<List<int>> completer = Completer<List<int>>();
    Directory tempDir = await createSubdirectoryInTemporaryDirectory();
    final outputPath = '${tempDir.path}/trans_out_audio__${getUuid()}.pcm';

    try {
      // 执行解码命令,并设置超时处理
      ffmpegExecute('-i "$filePath" -f s16le -acodec pcm_s16le -ar 4800 "$outputPath"', timeoutCancelMilliseconds: timeoutCancelMilliseconds)
        .then((FfmpegKitExecuteResData? data) async {
          if (data != null && data.code == 1) {
            FFmpegSession session = data.session!;
            final returnCode = await session.getReturnCode();
            if (ReturnCode.isSuccess(returnCode)) {
              File file = File(outputPath);
              if (await file.exists()) {
                // debugPrint("🍎🍎🍎🍎🍎FfmpegUtilsKit_getDecodedAudioData PCM file is exist: 🍉🍉🍉<----->>>$outputPath<<<----->🍉🍉🍉");
                List<int> decodedData = await file.readAsBytes();
                // debugPrint("🍎🍎🍎🍎🍎FfmpegUtilsKit_getDecodedAudioData output: 🍉🍉🍉<----->>>${decodedData.length}<<<----->🍉🍉🍉");
                completer.complete(decodedData);
              } else {
                debugPrint("🍎🍎🍎🍎🍎FfmpegUtilsKit_getDecodedAudioData PCM file does not exist.: 🍉🍉🍉<----->>>null<<<----->🍉🍉🍉");
                completer.complete([]);
              }
            } else {
              debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_getDecodedAudioData with failure: 🐛🐛🐛null🐛🐛🐛');
              completer.complete([]);
            }
            // 取消会话
            session.cancel();
          } else {
            debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_getDecodedAudioData with failure: 🐛🐛🐛null🐛🐛🐛');
            completer.complete([]);
          }
        }).catchError((error) {
          debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_getDecodedAudioData with failure: 🐛🐛🐛$error🐛🐛🐛');
          completer.complete([]);
        });

    } catch (e) {
      debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_getDecodedAudioData with Exceptionerror: 🐛🐛🐛$e🐛🐛🐛');
      completer.complete([]);
    }

    return completer.future;
  }

  // testWaveBarData
  // static Future<List<double>> getWaveBarData({
  //   required double waveRenderWidth,
  //   required double waveBarMaxheight,
  //   required double waveItemWidth,
  //   required double waveItemSpacing,  
  // }) async {
  //   List<double> waveBarData = [];
  //   Random random = Random();
  //   int count = ((waveRenderWidth + waveItemSpacing) / (waveItemWidth + waveItemSpacing)).truncate();
  //   for(int i = 0; i < count; i++) {
  //     double randomNumber = random.nextDouble() * (waveBarMaxheight - 1) + 1;
  //     waveBarData.add(randomNumber.toDouble());
  //   }
  //   return waveBarData;
  //   // waveBarData = await FfmpegUtilsKit.getWaveBarData(
  //   //    waveRenderWidth: waveRenderMaxWidth.toDouble(),
  //   //    waveBarMaxheight: clipConfig['waveItemHeight'].toDouble(),
  //   //    waveItemWidth: clipConfig['waveItemWidth'].toDouble(),
  //   //    waveItemSpacing: clipConfig['waveItemSpacing'].toDouble(),
  //   // );
  // }
  
  // 计算波形渲染的最大宽度
  static double calculateWaveRenderMaxWidth(double duration, double windowWidth, AudioClipFixedDurationClipConfig clipConfig) {
    double left = clipConfig.playOpBtnSize + clipConfig.boxLRMargin + clipConfig.clipAreaLeftMargin + clipConfig.playOpBtnMargin * 2;
    double waveBarRenderViewMaxBoxWidth = windowWidth - left - clipConfig.clipAreaRTPadding * 2;
    double waveRenderWidthRatio =  duration / clipConfig.clipMaxDuration;
    return waveBarRenderViewMaxBoxWidth * waveRenderWidthRatio;
  }

  // 计算波形渲染点的数量
  static int calculateWaveRenderPoints(double waveRenderMaxWidth, AudioClipFixedDurationClipConfig clipConfig) {
    return ((waveRenderMaxWidth + clipConfig.waveItemSpacing) ~/ (clipConfig.waveItemWidth + clipConfig.waveItemSpacing));
  }
  
  // 获取裁剪组件初始化渲染数据
  static Future<FfmpegKitInitClipAudioData?> getAudioDataByFile({
   required String filepath,
   required double windowWidth,
   AudioClipFixedDurationClipConfig? clipConfig,
   int? timeoutCancelMilliseconds,
  }) async {
    try {
      clipConfig ??= audioClipFixedDurationClipConfig;
      
      // 转换为 MP3 格式
      String? mp3FilePath = await transToMp3(filepath, timeoutCancelMilliseconds: timeoutCancelMilliseconds);
      if (mp3FilePath == null) {
        debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_getAudioDataByFile: 转换mp3失败 filePath: $filepath');
        return null;
      }
      
      // 获取音频持续时间
      double duration = await getMediaDuration(mp3FilePath);
      
      // 计算波形数据
      double waveRenderMaxWidth = calculateWaveRenderMaxWidth(duration, windowWidth, clipConfig);
      List<double> waveBarData = await getRenderWaveData(
        mp3FilePath,
        points: calculateWaveRenderPoints(waveRenderMaxWidth, clipConfig),
        gainMaxHeight: clipConfig.waveItemHeight,
        timeoutCancelMilliseconds: timeoutCancelMilliseconds,
      );

      return FfmpegKitInitClipAudioData(
        souceFilePath: filepath,
        mp3FilePath: mp3FilePath,
        duration: duration,
        waveBarData: waveBarData,
      );
    } catch (e) {
      debugPrint('🍎🍎🍎🍎🍎FfmpegUtilsKit_getAudioDataByFile:获取裁剪初始化数据失败:$e');
      return null;
    } 
  }
}




点对点渲染getx数据(audio_clip_fixed_duration_controller.dart)


import 'package:get/get.dart';

class ClipTimeController extends GetxController {
  RxDouble clipStartTime = 0.0.obs;
  RxDouble clipEndTime = 0.0.obs;
  RxDouble clipDuaration = 0.0.obs;
  RxDouble totalDuration = 0.0.obs;

  void setData({
    required double startTime,
    required double endTime,
    required double cduration,
    required double tDuration,
  }) {
    // 更新剪辑时间
    clipStartTime.value = startTime;
    clipEndTime.value = endTime;
    clipDuaration.value = cduration;
    totalDuration.value = tDuration;
  }
}

class ClipRenderWaveBarScrollX extends GetxController {
  RxDouble renderWaveBarScrollX = 0.0.obs;
  RxBool disabledDrag = false.obs;

  void setData({
    required double scrollX,
    required bool disabledD,
  }) {
    // 更新waveBar滚动位置
    disabledDrag.value = disabledD;
    renderWaveBarScrollX.value = scrollX;
  }
}


class ClipIsPlayingController extends GetxController {
  RxBool isPlaying = false.obs;

  void setData({
    required bool isPlay,
  }) {
    // 更新播放状态
   isPlaying.value = isPlay;
  }
}

class ClipPlayProgressController extends GetxController {
  RxDouble playProgress = 0.0.obs;
  RxInt highStartIndex = 0.obs;
  RxInt highEndIndex = 0.obs;

  void setData({
    required double progress,
    required int startIndex,
    required int endIndex,
  }) {
    // 更新播放进度
    playProgress.value = progress;
    highStartIndex.value = startIndex;
    highEndIndex.value = endIndex;
  }
}


class ClipWaveDataController extends GetxController {
  RxList<double> waveBarData = <double>[].obs;

  void setData({
    required List<double> wData,
  }) {
    // 更新wave数据
    waveBarData.value = wData;
    update();
  }
}

wavebar渲染组件(wave_bar_painter.dart)

import 'package:flutter/material.dart';

class WaveBarPainter extends CustomPainter {
  List<double> waveData;
  int highStartIndex;
  int highEndIndex;
  Color waveItemColor;
  Color waveItemHighColor;
  double waveItemWidth;
  double waveItemSpacing;

  WaveBarPainter({
    required this.waveData,
    required this.highStartIndex,
    required this.highEndIndex,
    required this.waveItemColor,
    required this.waveItemHighColor,
    required this.waveItemWidth,
    required this.waveItemSpacing,
  });

  @override
  void paint(Canvas canvas, Size size) {
     Paint paint = Paint()..style = PaintingStyle.fill;

    double centerY = size.height / 2;
    double startX = 0;

    for (int i = 0; i < waveData.length; i++) {
      double waveHeight = centerY - waveData[i] / 2;
      paint.color= (i >= highStartIndex && i < highEndIndex) ? waveItemHighColor : waveItemColor;
      // 创建圆角矩形
      RRect rect = RRect.fromLTRBR(
        startX, 
        waveHeight, 
        startX + waveItemWidth, 
        waveHeight + waveData[i], 
        Radius.circular(waveItemWidth),
      );
      canvas.drawRRect(rect, paint);
      startX += waveItemWidth + waveItemSpacing;
    }
  }

  @override
  bool shouldRepaint(covariant CustomPainter oldDelegate) {
    // print("object shouldRepaint $highStartIndex $highEndIndex");
    return true;
  }
}

渲染组件

import 'dart:async';
import 'package:flutter/material.dart';
import 'package:get/get.dart';
import 'package:audioplayers/audioplayers.dart';
import 'package:audio_session/audio_session.dart';

import 'audio_clip_fixed_duration_controller.dart';
import 'ffmpeg_utils_kit.dart';

class AudioClipFixedDuration extends StatefulWidget {
  String? filePath; 
  final Function(AudioClipFixedDurationClipedData)? onClipChange;
  final double? windowWidth;
  FfmpegKitInitClipAudioData? initClipAudioData;
  AudioClipFixedDurationClipConfig? clipConfig;
  final bool? hideLabel;
  final Widget Function(BuildContext, ClipTimeController)? builderTimeRow;
  bool? renderDebug;
  bool? initNoPlay=false;
  bool? onlyInitClipOne = true;
  int? ffmpegExecuteTimeoutCancelMilliseconds;
  

  AudioClipFixedDuration({
    this.filePath,
    this.onClipChange,
    this.windowWidth,
    this.initClipAudioData,
    this.clipConfig,
    this.hideLabel,
    this.builderTimeRow,
    this.renderDebug,
    this.initNoPlay,
    this.onlyInitClipOne,
    this.ffmpegExecuteTimeoutCancelMilliseconds,
    super.key,
  });
  
  @override
  AudioClipFixedDurationState createState() => AudioClipFixedDurationState();
}

class AudioClipFixedDurationState extends State<AudioClipFixedDuration> with AutomaticKeepAliveClientMixin {

  String filePath = '';
  FfmpegKitInitClipAudioData? initClipAudioData;

  
  AudioPlayer? audioPlayer;
  StreamSubscription<Duration>? audioPositionStream;
  StreamSubscription<PlayerState>? audioPlayerStateStream;
  StreamSubscription<AudioInterruptionEvent>? sessionInterruptionEventStream;
  ScrollController? scrollController;

  
  String mp3FilePath = '';
  String mp3ClipedOutputPath = '';
  String curPlayPath = '';

  double clipStartTime = 0;
  double clipEndTime = 0;
  double totalDuration = 0;
  double clipDuaration = 0;
  bool disabledDrag =  true;

  bool isPlaying = false;
  double playProgress = 0;
  int highStartIndex = 0;
  int highEndIndex = 0;

  List<double> waveBarData = [];
  double waveBarRenderViewMaxBoxWidth = 0;
  double waveRenderTotalWidth = 0;
  double renderWaveBarScrollX = 0;
  
  double dragEndX = 0;
  double dragStartX = 0;
  String gestureStatus = '';
  double scrollCurrentPosition = 0;
  double scrollTotalWidth = 0;
  double scrollViewWidth = 0;

  late AudioClipFixedDurationClipConfig audioClipFixedDurationClipConfig;
  @override
  bool get wantKeepAlive => true;

  @override
  void initState() {
    print("🍎🍎🍎🍎🍎🍎🍎🍎🍎AudioClipFixedDuration initState");
    super.initState();
    audioClipFixedDurationClipConfig = widget.clipConfig ?? AudioClipFixedDurationClipConfig();
    if(widget.windowWidth!=null){
       double left = audioClipFixedDurationClipConfig.playOpBtnSize + audioClipFixedDurationClipConfig.boxLRMargin  + audioClipFixedDurationClipConfig.clipAreaLeftMargin +  audioClipFixedDurationClipConfig.playOpBtnMargin * 2 ;
       waveBarRenderViewMaxBoxWidth = widget.windowWidth! - left - audioClipFixedDurationClipConfig.clipAreaRTPadding * 2;
    }
    scrollController = ScrollController();
    initGetXControler();
    initAudioPlayer();
    initClip();
  }

  @override
  void dispose() {
    print("🍎🍎🍎🍎🍎🍎🍎🍎🍎AudioClipFixedDuration dispose");
    destroyPlayer();
    scrollController?.dispose();
    scrollController = null;
    super.dispose();
    
  }

  @override
  void didUpdateWidget(covariant AudioClipFixedDuration oldWidget) {
    print("🍎🍎🍎🍎🍎🍎🍎🍎🍎AudioClipFixedDuration didUpdateWidget");
    super.didUpdateWidget(oldWidget);
    if(widget.onlyInitClipOne != true && (filePath != widget.filePath || initClipAudioData != widget.initClipAudioData)){
      initClip();
      print("🍎🍎🍎🍎🍎🍎🍎🍎🍎AudioClipFixedDuration didUpdateWidget1");
    }
  }

  initGetXControler(){
    Get.put(ClipPlayProgressController());
    Get.put(ClipTimeController());
    Get.put(ClipIsPlayingController());
    Get.put(ClipWaveDataController());
    Get.put(ClipRenderWaveBarScrollX());
  }

  updateClipIsPlayingController(){
    Get.find<ClipIsPlayingController>().setData(
      isPlay: isPlaying,
    );
  }

  updateClipTimeController(){
    Get.find<ClipTimeController>().setData(
      startTime: clipStartTime,
      endTime: clipEndTime,
      cduration: clipDuaration,
      tDuration: totalDuration,
    );
  }

  updateClipPlayProgressController(){
    Get.find<ClipPlayProgressController>().setData(
      progress: playProgress,
      startIndex: highStartIndex,
      endIndex: highEndIndex,
    );
  }

  updateClipWaveDataController(){
    Get.find<ClipWaveDataController>().setData(
      wData: waveBarData,
    );
  }

  updateClipRenderWaveBarScrollX(){
    Get.find<ClipRenderWaveBarScrollX>().setData(
      scrollX: renderWaveBarScrollX,
      disabledD: disabledDrag,
    );
  }
  
  Future resetData() async {
    
    mp3FilePath = '';
    mp3ClipedOutputPath = '';

    clipStartTime = 0;
    clipEndTime = 0;
    totalDuration = 0;
    clipDuaration = 0;
    disabledDrag =  true;

    isPlaying = false;

    playProgress = 0;
    highStartIndex = 0;
    highEndIndex = 0;
    
    waveBarData = [];
    waveRenderTotalWidth = 0;
    renderWaveBarScrollX = 0;
    scrollController?.jumpTo(0);

    dragEndX = 0;
    dragStartX = 0;
    gestureStatus = '';
    scrollCurrentPosition = 0;
    scrollTotalWidth = 0;
    scrollViewWidth = 0;
    
    updateClipIsPlayingController();
    updateClipTimeController();
    updateClipPlayProgressController();
    updateClipWaveDataController();
    updateClipRenderWaveBarScrollX();
  }
  
  onClipChange(){
    if(widget.onClipChange!=null){
      widget.onClipChange!(AudioClipFixedDurationClipedData(
        mp3FilePath: mp3FilePath,
        clipStartTime: clipStartTime,
        clipEndTime: clipEndTime,
        clipDuaration: clipDuaration,
        totalDuration: totalDuration,
      ));
    }
  }

  initAudioPlayer() async {
    audioPlayer = AudioPlayer();
    await audioPlayer?.setReleaseMode(ReleaseMode.stop);
    audioPositionStream = audioPlayer?.onPositionChanged.listen((position) {
      // debugPrint('🍎🍎🍎🍎🍎🍎Current Position: ${position.inMilliseconds/1000}');
      onPositionChange(position);
    });
    audioPlayerStateStream  = audioPlayer?.onPlayerStateChanged.listen((PlayerState playerState) async {
      isPlaying = playerState == PlayerState.playing;
      if (playerState == PlayerState.completed) {
         Duration seekTo = Duration(milliseconds: (clipStartTime * 1000).toInt());
         await audioPlayer?.seek(seekTo);
         await audioPlayer?.pause();
      }
      // debugPrint('🍎🍎🍎🍎🍎🍎Current PlayerState: $playerState isPlaying: $isPlaying');
      updateClipIsPlayingController();
    });
    final session = await AudioSession.instance;
    bool isPlay = false;
    sessionInterruptionEventStream  =  session.interruptionEventStream.listen((event) async {
      if (event.begin) {
        isPlay = isPlaying;
        await audioPlayer?.pause();
      } else {
        if(isPlay){
          audioPlayer?.play(DeviceFileSource(mp3FilePath), position: (await audioPlayer?.getCurrentPosition() ?? Duration(milliseconds: (clipStartTime * 1000).toInt())));
        }
      }
    });
  }

  destroyPlayer() async {
    await audioPositionStream?.cancel(); 
    await audioPlayerStateStream?.cancel();
    await sessionInterruptionEventStream?.cancel();
    await audioPlayer?.stop();
    await audioPlayer?.dispose();
    audioPlayer = null;
  }

  reInitPlayer() async {
    await destroyPlayer();
    await initAudioPlayer();
  }

  setPlayUrl() async {
    curPlayPath = mp3FilePath;
    debugPrint("setPlayUrl path: $mp3FilePath");
    Duration seekto = Duration(milliseconds: (clipStartTime * 1000).toInt());
    audioPlayer?.play(DeviceFileSource(mp3FilePath), position: seekto);
  }
  initClip() async {
    await audioPlayer?.pause();
    if(widget.initClipAudioData != null && widget.initClipAudioData?.mp3FilePath != mp3FilePath){
      initClipAudioData = widget.initClipAudioData;

      debugPrint("🍎🍎🍎🍎🍎🍎 initClip with initClipAudioData start");
      filePath = widget.initClipAudioData!.souceFilePath;
      mp3FilePath =  widget.initClipAudioData!.mp3FilePath;
      totalDuration =  widget.initClipAudioData!.duration;
      waveBarData = widget.initClipAudioData!.waveBarData;
      await calcRenderSizeData();
      onClipChange();
      updateClipTimeController();
      updateClipPlayProgressController();
      updateClipWaveDataController();
      updateClipRenderWaveBarScrollX();
      if(widget.initNoPlay!=true){
        await setPlayUrl();
      }
      // debugPrint("🍎🍎🍎🍎🍎🍎 initClip with initClipAudioData start");
    }else if(widget.filePath != null && widget.filePath !='' && filePath != widget.filePath ){
      // debugPrint("🍎🍎🍎🍎🍎🍎 initClip with initClipAudioData start0");
      filePath = widget.filePath!;
      if(filePath!='') {
        await initClipByFilePath();
        
      }else{
        resetData();
        onClipChange();
      }
    } else {
      // debugPrint("🍎🍎🍎🍎🍎🍎 initClip with initClipAudioData start2 ${widget.initClipAudioData?.mp3FilePath}");
      resetData();
      onClipChange();
    }
  }

  Future initClipByFilePath() async {
    await resetData();
    await transToMp3();
    await getMediaDuration();
    try{
      if(totalDuration > 0){
        await calcRenderSizeData();
        onClipChange();
        updateClipTimeController();
        updateClipPlayProgressController();
        updateClipRenderWaveBarScrollX();
        await getMediaWaveData();
        updateClipWaveDataController();
        if(widget.initNoPlay!=true){
          await setPlayUrl();
        }
      }
    }catch(e){
      debugPrint("🍎🍎🍎🍎🍎initClipByFilePath with error $e");
    }
  }

  Future calcRenderSizeData() async {
    if(totalDuration > audioClipFixedDurationClipConfig.clipMaxDuration){
      clipEndTime = audioClipFixedDurationClipConfig.clipMaxDuration;
      disabledDrag =  false;
      clipDuaration = audioClipFixedDurationClipConfig.clipMaxDuration;
    }else{
      clipEndTime = totalDuration;
      clipDuaration = totalDuration;
      disabledDrag =  true;      
    }
    highEndIndex = 0;
    double waveRenderWidthRatio =  totalDuration / audioClipFixedDurationClipConfig.clipMaxDuration;
    waveRenderTotalWidth = waveBarRenderViewMaxBoxWidth * waveRenderWidthRatio;
    scrollTotalWidth = waveRenderTotalWidth + audioClipFixedDurationClipConfig.clipAreaRTPadding * 2;
    scrollViewWidth = waveBarRenderViewMaxBoxWidth + audioClipFixedDurationClipConfig.clipAreaRTPadding * 2;
    debugPrint("""🍎🍎🍎🍎🍎getWaveBarData  
       totalDuration: $totalDuration
       clipMaxDuration: ${audioClipFixedDurationClipConfig.clipMaxDuration}
       clipDuaration $clipDuaration
       waveBarRenderViewMaxBoxWidth: $waveBarRenderViewMaxBoxWidth
       waveRenderWidthRatio: $waveRenderWidthRatio
       waveRenderTotalWidth: $waveRenderTotalWidth
       scrollViewWidth: $scrollViewWidth,
       scrollTotalWidth: $scrollTotalWidth
       """);
  }


  onPositionChange(Duration position) async {
    if(clipDuaration == 0) {
        playProgress = 0;
    } else {
        playProgress = (position.inMilliseconds / 1000 - clipStartTime) / clipDuaration; 
    }
    
    Duration seekTo = Duration(milliseconds: (clipStartTime * 1000).toInt());
    if(position.inMilliseconds < (clipStartTime * 1000).toInt()) {
        debugPrint('🍎🍎🍎🍎🍎🍎Current Position1: ${position.inMilliseconds / 1000} $playProgress');
    } else if(position.inMilliseconds > (clipEndTime * 1000).toInt()) {
        playProgress = 1;
        debugPrint('🍎🍎🍎🍎🍎🍎Current Position2: ${position.inMilliseconds / 1000} $playProgress');
        await audioPlayer?.seek(seekTo);
        await audioPlayer?.pause();
    } else {
    }
    
    renderPlayerProgres();
    updateClipPlayProgressController();
  }

  renderPlayerProgres(){
    // debugPrint("renderPlayerProgres playProgress: $playProgress");
    if(disabledDrag==true){
      highEndIndex = highStartIndex + (playProgress * waveBarData.length).floor();
    }else{
      highEndIndex =  highStartIndex +  (waveBarData.length  * (waveBarRenderViewMaxBoxWidth/waveRenderTotalWidth) * playProgress).ceil();
    }
  }


  getMediaDuration() async {
    double duration = await FfmpegUtilsKit.getMediaDuration(mp3FilePath);
    totalDuration = duration;
    debugPrint("🍎🍎🍎getMediaDuration: $duration");
  }

  transToMp3() async {
    String? outputPath = await FfmpegUtilsKit.transToMp3(filePath, timeoutCancelMilliseconds: widget.ffmpegExecuteTimeoutCancelMilliseconds);
    mp3FilePath = outputPath ?? '';
    debugPrint("🍎🍎🍎transToMp3 end: $outputPath");
  }

  transToWav() async {
    String? outputPath = await FfmpegUtilsKit.transToWav(filePath, timeoutCancelMilliseconds: widget.ffmpegExecuteTimeoutCancelMilliseconds);
    mp3FilePath = outputPath ?? '';
    debugPrint("🍎🍎🍎transToMp3 end: $outputPath");
  }

  getMediaWaveData() async {
    int count = ((waveRenderTotalWidth + audioClipFixedDurationClipConfig.waveItemSpacing) / (audioClipFixedDurationClipConfig.waveItemWidth + audioClipFixedDurationClipConfig.waveItemSpacing)).truncate();
    waveBarData =  await FfmpegUtilsKit.getRenderWaveData(
      mp3FilePath,
      points: count,
      gainMaxHeight: audioClipFixedDurationClipConfig.waveItemHeight,
      timeoutCancelMilliseconds: widget.ffmpegExecuteTimeoutCancelMilliseconds
    );
    debugPrint("🍎🍎🍎🍎🍎getMediaWaveData  length: ${waveBarData.length}");
    
    return  waveBarData;
  }

  Future doSetClipArea() async {
    double start =  renderWaveBarScrollX;
    double totalEnd =  waveRenderTotalWidth;
    highStartIndex = (start / totalEnd * waveBarData.length).ceil();
    highEndIndex = highStartIndex;
    clipStartTime = start / totalEnd * totalDuration;
    clipEndTime = disabledDrag ? clipStartTime + totalDuration : clipStartTime+ audioClipFixedDurationClipConfig.clipMaxDuration;
    if(clipEndTime > totalDuration){
      clipEndTime = totalDuration;
      clipStartTime= (clipEndTime - audioClipFixedDurationClipConfig.clipMaxDuration).clamp(0, totalDuration);
    }
    debugPrint("doSetClipArea start: $start totalEnd: $totalEnd highStartIndex: $highStartIndex highEndIndex: $highEndIndex clipStartTime: $clipStartTime clipEndTime: $clipEndTime");
    updateClipTimeController();
    // await reInitPlayer();
    Duration seekto = Duration(milliseconds: (clipStartTime * 1000).toInt());
    print("🍎🍎🍎 doSetClipArea_seekto: ${seekto.inMilliseconds} ");
    await audioPlayer?.play(DeviceFileSource(mp3FilePath), position: seekto);
    onClipChange();
  }



  togglePlay([bool isPlay = false]) async {
    updateClipIsPlayingController();
    if (isPlay==true) {
      audioPlayer?.play(DeviceFileSource(mp3FilePath), position: (await audioPlayer?.getCurrentPosition() ?? Duration(milliseconds: (clipStartTime * 1000).toInt())));
    }else{
      audioPlayer?.pause();
    }
    debugPrint("togglePlay: isPlaying: $isPlaying");
  }

  cropMedia() async {
    String? outputPath = await FfmpegUtilsKit.cropMedia(
      filePath: mp3FilePath,
      clipStartTime: clipStartTime, 
      clipEndTime: clipEndTime,
      timeoutCancelMilliseconds: widget.ffmpegExecuteTimeoutCancelMilliseconds
    );
    mp3ClipedOutputPath = outputPath??'';
    debugPrint("cropAudio: $outputPath");
  }
  copyFileFromCacheToDirectory(){
    FfmpegUtilsKit.copyFileFromCacheToDirectory(mp3FilePath);
  }

  @override
  Widget build(BuildContext context) {
    super.build(context);
    double left = audioClipFixedDurationClipConfig.playOpBtnSize + audioClipFixedDurationClipConfig.boxLRMargin  + audioClipFixedDurationClipConfig.clipAreaLeftMargin +  audioClipFixedDurationClipConfig.playOpBtnMargin * 2 ;
    waveBarRenderViewMaxBoxWidth = waveBarRenderViewMaxBoxWidth == 0 ? MediaQuery.of(context).size.width - left - audioClipFixedDurationClipConfig.clipAreaRTPadding * 2 : waveBarRenderViewMaxBoxWidth;
    return Column(
      mainAxisAlignment: MainAxisAlignment.center,
      children: [
          ...(
            widget.hideLabel == true
              ? [] 
              : [
                Container(
                  padding: EdgeInsets.symmetric(vertical: 0,horizontal: audioClipFixedDurationClipConfig.boxLRMargin),
                  child: const Row(
                    children: [
                      Expanded(
                        child: Text(
                          '截取歌曲',
                          style: TextStyle(
                            fontSize: 14,
                            color: Colors.black,
                            fontFamily: 'PingFang SC',
                            fontWeight: FontWeight.w500,
                          ),
                        )
                      ),
                    ],
                  ),
                ),
                const SizedBox(height: 8)
              ]
        ),
        Container(
          padding: EdgeInsets.only(left: audioClipFixedDurationClipConfig.boxLRMargin),
          height: audioClipFixedDurationClipConfig.clipAreaRenderHeight,
          child: Row(
            mainAxisAlignment: MainAxisAlignment.center,
            crossAxisAlignment: CrossAxisAlignment.stretch,
            children: [
              buildMainClipPlayControlWidget(context),
              const SizedBox(width: 8),
              Expanded(
                child: buildMainCropWidget(context)
              )
            ],
          ),

        ),
        GetX<ClipTimeController>(
          init: ClipTimeController(), // 初始化控制器
          builder: (controller) {
            double left = audioClipFixedDurationClipConfig.playOpBtnSize + audioClipFixedDurationClipConfig.boxLRMargin  + audioClipFixedDurationClipConfig.clipAreaLeftMargin +  audioClipFixedDurationClipConfig.playOpBtnMargin * 2 ;
            return  widget.builderTimeRow !=null
              ? widget.builderTimeRow!(context, controller)
              : Container(
                  padding: EdgeInsets.only(left: left,right: audioClipFixedDurationClipConfig.boxLRMargin, top: 8),
                  child:  Row(
                    children: [
                      Text(
                        FfmpegUtilsKit.formatSeconds(controller.clipStartTime.value),
                        style: const TextStyle(
                          fontSize: 12,
                          height: 20/12,
                          color: Colors.black,
                          fontFamily: 'PingFang SC',
                          fontWeight: FontWeight.w400,
                        ),
                      ),
                      ...(
                        widget.renderDebug ==true 
                          ? [
                              Text(
                                '/${FfmpegUtilsKit.formatSeconds(controller.clipEndTime.value)}',
                                style: const TextStyle(
                                  fontSize: 12,
                                  height: 20/12,
                                  color: Color.fromRGBO(151, 159, 171, 1),
                                  fontFamily: 'PingFang SC',
                                  fontWeight: FontWeight.w400,
                              ),
                            )
                          ]
                        : []
                      ),
                      Text(
                        '/${FfmpegUtilsKit.formatSeconds(controller.totalDuration.value)}',
                        style: const TextStyle(
                          fontSize: 12,
                          height: 20/12,
                          color: Color.fromRGBO(151, 159, 171, 1),
                          fontFamily: 'PingFang SC',
                          fontWeight: FontWeight.w400,
                        ),
                      ),
                      Expanded(
                        child: Text(
                          '已选取${controller.clipDuaration.value.ceil()}秒音频',
                          textAlign: TextAlign.right,
                          style: const TextStyle(
                            fontSize: 12,
                            height: 20/12,
                            color: Color.fromRGBO(151, 159, 171, 1),
                            fontFamily: 'PingFang SC',
                            fontWeight: FontWeight.w400,
                          ),
                        )
                      ),
                    ],
                  ),
                );
          },
        ),  
        ...(widget.renderDebug==true ? [builderTestBtnWidgets(context)] :[])
      ],
    );
  }

  builderTestBtnWidgets(BuildContext context){
    return Container(
      padding: const EdgeInsets.only(left: 16,right: 16, top: 16),
      child: Wrap(
        spacing: 10,
        children: [
          ElevatedButton(
            onPressed:  () async {
              initClipByFilePath();
            }, 
            child: const Text("initClipByFilePath")
          ),
          ElevatedButton(
            onPressed: ()=>{
              getMediaDuration()
            }, 
            child: const Text("getDduration")
          ),

          ElevatedButton(
            onPressed:  () async {
              calcRenderSizeData();
              updateClipTimeController();
              updateClipPlayProgressController();
              updateClipRenderWaveBarScrollX();
            }, 
            child: const Text("calcRenderSizeData")
          ),
          ElevatedButton(
            onPressed:  () async {
              transToMp3();
            }, 
            child: const Text("TrasToMp3")
          ),
          ElevatedButton(
            onPressed:  () async {
              transToWav();
            }, 
            child: const Text("transToWav")
          ),
          ElevatedButton(
            onPressed:  () async {
              getMediaWaveData();
              updateClipWaveDataController();
            }, 
            child: const Text("getWaveData")
          ),
          ElevatedButton(
            onPressed:  () async {
               audioPlayer?.play(DeviceFileSource(filePath), position: Duration.zero);
            }, 
            child: const Text("setlayUrlFilePath")
          ),
          ElevatedButton(
            onPressed:  () async {
               audioPlayer?.play(DeviceFileSource(mp3FilePath), position: Duration.zero);
            }, 
            child: const Text("setlayUrlMp3FilePath")
          ),
          ElevatedButton(
            onPressed:  () async {
               audioPlayer?.play(DeviceFileSource(mp3ClipedOutputPath), position: Duration.zero);
            }, 
            child: const Text("setPlayUrlCliped")
          ),
          ElevatedButton(
            onPressed: ()=>{
              cropMedia()
            }, 
            child: const Text("Clip")
          ),
          // ElevatedButton(
          //   onPressed: ()=>{
          //     setPlayProgress()
          //   }, 
          //   child: const Text("testPlayProgressRender")
          // ),
          ElevatedButton(
            onPressed: ()=>{
              copyFileFromCacheToDirectory()
            }, 
            child: const Text("copyFileFromCacheToDirectory")
          )
          
        ]
      ),
    );
  }

  buildMainClipPlayControlWidget(BuildContext context){
    return InkWell(
      focusColor: Colors.transparent,
      highlightColor: Colors.transparent,
      hoverColor: Colors.transparent,
      splashColor: Colors.transparent,
      overlayColor:MaterialStateProperty.resolveWith<Color?>(
        (Set<MaterialState> states) {
          if (states.contains(MaterialState.hovered)) {
            return Colors.transparent; // 悬停时的遮罩颜色
          }
          if (states.contains(MaterialState.pressed)) {
            return Colors.transparent; // 按下时的遮罩颜色
          }
          return Colors.transparent; // 默认情况下没有遮罩颜色
        },
      ),
      onTap: (){
        // 播放
        if(!isPlaying){
          togglePlay(true);
        }else{
          togglePlay(false);
        }
      },
      child: Container(
        margin: EdgeInsets.all(audioClipFixedDurationClipConfig.playOpBtnMargin),
        alignment: Alignment.center,
        child:  Container(
          height: audioClipFixedDurationClipConfig.playOpBtnSize,
          width: audioClipFixedDurationClipConfig.playOpBtnSize,
          decoration: BoxDecoration(
            color: const Color.fromRGBO(225, 227, 231, 1),
            borderRadius: BorderRadius.circular(24),
          ),
          child: GetX<ClipIsPlayingController>(
              init: ClipIsPlayingController(), // 初始化控制器
              builder: (controller) {
                return  controller.isPlaying.value 
                  ? Icon(
                      Icons.pause,
                      size: audioClipFixedDurationClipConfig.playOpIconSize,
                      color:  Colors.black,
                    ) 
                  : Icon(
                      Icons.play_arrow,
                      size: audioClipFixedDurationClipConfig.playOpIconSize,
                      color: Colors.black,
                    );
              },
            ),
          
         
        )
      )
    );

  }
  // done will test
  buildMainCropWidget(BuildContext context){
     return SizedBox(
      height: audioClipFixedDurationClipConfig.clipAreaRenderHeight,
      child: Stack(
        fit: StackFit.expand,
        children: [
          Positioned(
            left: 0,
            right: 0,
            top: 0,
            bottom: 0,
            child: Container(
              height: audioClipFixedDurationClipConfig.clipAreaRenderHeight,
              decoration: const BoxDecoration(
                color: Color.fromRGBO(245, 246, 247, 1),
              ),
            )
          ),
          Positioned(
            left: audioClipFixedDurationClipConfig.clipAreaRTPadding,
            right: audioClipFixedDurationClipConfig.clipAreaRTPadding,
            top: 0,
            bottom: 0,
            child: Container(
              height: audioClipFixedDurationClipConfig.clipAreaRenderHeight,
              decoration: const BoxDecoration(
                color: Color.fromRGBO(225, 227, 231, 1),
              ),
            )
          ),
          buildRenderWaveBarWidgets(context),
          Positioned(
            left: audioClipFixedDurationClipConfig.clipAreaRTPadding,
            top: 0,
            bottom: 0,
            child: Container(
              height: audioClipFixedDurationClipConfig.clipAreaRenderHeight,
              width: 2,
              decoration: const BoxDecoration(
                color: Colors.black
              ),
            )
          ),
          buildRenderClipAreaOpWidget(context),
        ]
      )
    );
  }

  buildRenderWaveBarWidgets(BuildContext context){
    return Positioned(
      left: 0,
      right: 0,
      top: 0,
      bottom: 0,
      child: SizedBox(
        height: audioClipFixedDurationClipConfig.clipAreaRenderHeight,
        child: SingleChildScrollView(
          controller: scrollController, // 使用 ScrollController
          scrollDirection: Axis.horizontal,
          physics: const NeverScrollableScrollPhysics(), // 禁止手动滚动
          child: buildWaveBarWidgets(context),
        ),
      )
    );
  }

  buildRenderClipAreaOpWidget(BuildContext context){
    return Positioned(
      left: 0,
      right: 0,
      top: 0,
      bottom: 0,
      child:  GestureDetector(
        onTapDown: (details) {
          // scrollTotalWidth = scrollController.position.maxScrollExtent;
        },
        onPanUpdate: (details) {
        },
        onTapUp: (details) {
        },
        onHorizontalDragStart: (details) async {
          if(disabledDrag){
            return;
          }
          gestureStatus = 'Tap Down';
          dragStartX = details.globalPosition.dx;
          scrollCurrentPosition = scrollController?.position.pixels ?? 0;
          await audioPlayer?.pause();
          playProgress = 0;
          renderPlayerProgres();
          updateClipPlayProgressController();
          // debugPrint("🍎🍎🍎🍎🍎scrollCurrentPosition, $scrollCurrentPosition dragStartX: $dragStartX");
        },
        onHorizontalDragUpdate: (details) {
          if(gestureStatus == 'Tap Down'){
            dragEndX = details.globalPosition.dx;
            // debugPrint("🍎🍎🍎🍎🍎scrollCurrentPosition, $scrollCurrentPosition dragStartX: $dragStartX dragStartX: $dragEndX");
            double distance = dragEndX - dragStartX;
            double scrollX= (scrollCurrentPosition - distance);
            if(scrollX > (scrollTotalWidth - scrollViewWidth)){
              scrollX= (scrollTotalWidth - scrollViewWidth);
            }else if(scrollX<0){
              scrollX = 0;
            }else{}
            renderWaveBarScrollX = scrollX;
            scrollController?.jumpTo(scrollX);
            updateClipRenderWaveBarScrollX();
            // debugPrint("scrollController.jumpTo, $scrollX");
            // if (distance > 0) {
            //   debugPrint('Right swipe distance: $distance');
            // } else {
            //   debugPrint('Left swipe distance: $distance');
            // }
          }
        },
        onHorizontalDragEnd: (details) async {
          if(gestureStatus == 'Tap Down'){
            doSetClipArea();
          }else{
            Duration seekto = Duration(milliseconds: (clipStartTime * 1000).toInt());
            await audioPlayer?.play(DeviceFileSource(mp3FilePath), position: seekto);
          }
          gestureStatus = 'Tap Up';
        
        },
        child: Container(
          color: Colors.amber.withOpacity(0),
        )
      )
 
    );
  }
  buildWaveBarWidgets(BuildContext context){
    final ClipPlayProgressController pcontroller = Get.put(ClipPlayProgressController());
    return GetX<ClipWaveDataController>(
      init: ClipWaveDataController(), // 初始化控制器
      builder: (controller) {
        // List<Widget> list = [];
        // // print("🍎🍎🍎:buildWaveBarWidgets__controller.waveBarData.value.length ${controller.waveBarData.length}");
        // EdgeInsets  margin = EdgeInsets.only(right: audioClipFixedDurationClipConfig.waveItemSpacing);
        // for (var index = 0; index < controller.waveBarData.length; index++) {
        //   list.add(
        //     Obx(() => Container(
        //       margin: index == controller.waveBarData.length-1 ? const EdgeInsets.all(0) : margin,
        //       width: audioClipFixedDurationClipConfig.waveItemWidth,
        //       height: controller.waveBarData[index],
        //       decoration: BoxDecoration(
        //         borderRadius: BorderRadius.circular(2),
        //         color: (index >= pcontroller.highStartIndex.value && index < pcontroller.highEndIndex.value)
        //             ? audioClipFixedDurationClipConfig.waveItemHighColor
        //             : audioClipFixedDurationClipConfig.waveItemColor,
        //       ),
        //     ))
        //   );
        // }
        return SizedBox(
          width: scrollTotalWidth, // 容器宽度
          height: audioClipFixedDurationClipConfig.clipAreaRenderHeight,
          child: Row(
            mainAxisAlignment: MainAxisAlignment.start,
            crossAxisAlignment: CrossAxisAlignment.center,
            children:[
              SizedBox(width: audioClipFixedDurationClipConfig.clipAreaRTPadding),
              //...list,
              CustomPaint(
                painter: WaveBarPainter(
                  waveData: controller.waveBarData,
                  highStartIndex: pcontroller.highStartIndex.value,
                  highEndIndex: pcontroller.highEndIndex.value,
                  waveItemColor: audioClipFixedDurationClipConfig.waveItemColor,
                  waveItemHighColor: audioClipFixedDurationClipConfig.waveItemHighColor,
                  waveItemWidth: audioClipFixedDurationClipConfig.waveItemWidth,
                  waveItemSpacing: audioClipFixedDurationClipConfig.waveItemSpacing,
                ),
                size: waveRenderTotalWidth > 0 ? Size(waveRenderTotalWidth, audioClipFixedDurationClipConfig.clipAreaRenderHeight) : Size.zero,
              ),
              SizedBox(width: audioClipFixedDurationClipConfig.clipAreaRTPadding),
            ]
          )
        );
      },
    );
  }
}

调用示例

import 'package:file_picker/file_picker.dart';
import 'package:image_picker/image_picker.dart';
import 'package:permission_handler/permission_handler.dart';
import 'package:flutter/material.dart';

import 'audio_clip_fixed_duration.dart';
import 'ffmpeg_utils_kit.dart';

class TestAudioClipFixedDuration extends StatefulWidget {
  const TestAudioClipFixedDuration({
    super.key,
  });
  
  @override
  TestAudioClipFixedDurationState createState() => TestAudioClipFixedDurationState();
}

class TestAudioClipFixedDurationState extends State<TestAudioClipFixedDuration> {
  
  @override
  void initState() {
    super.initState();
  }

  @override
  void dispose() {
    super.dispose();
  }

  
  // 选择音频文件
 String _selectedFilePath = '';
 AudioClipFixedDurationClipedData? _clipData;

  FfmpegKitInitClipAudioData? _initClipAudioData;
  final picker = ImagePicker();
  
  AudioClipFixedDurationClipConfig audioClipFixedDurationClipConfig = AudioClipFixedDurationClipConfig().copyWith(
    boxLRMargin: 16,
    clipMaxDuration: 30,
    clipAreaRTPadding: 12,
    waveItemWidth: 2,
    waveItemHeight: 36,
    waveItemSpacing: 1,
    waveItemColor: const Color.fromRGBO(151, 159, 171, 1),
    waveItemHighColor: const Color.fromRGBO(0, 87, 255, 1),
    playOpIconSize: 24,

    clipAreaRenderHeight: 56,
    playOpBtnSize: 44,
    playOpBtnBorderRadius: 44,
    playOpBtnMargin: 2,
    clipAreaLeftMargin: 8,
  );


  Future<void> _selectAudioFile(BuildContext context) async {
    print("_selectAudioFile");
    
    // 检查存储权限
    // if (await Permission.storage.request().isGranted) {
    //   // 用户已经授予存储权限,可以进行文件选择操作
    //   _selectAudioFile();
    // } else {
    //   // 用户拒绝了存储权限,无法进行文件选择操作
    // }
    // if (await Permission.storage.request().isGranted ) {
      XFile? pickedFile = await picker.pickVideo(source: ImageSource.gallery);
      print("🍎🍎🍎🍎🍎🍎pickedFile: ${pickedFile?.path}");


      _selectedFilePath = pickedFile?.path ?? '';
      setState(() {});
       
      
      // XFile? pickedFile = await picker.pickVideo(source: ImageSource.gallery);
      // double windowWidth = MediaQuery.of(context).size.width;
      // _initClipAudioData = await FfmpegUtilsKit.getAudioDataByFile(
      //   filepath: pickedFile?.path ?? '',
      //   windowWidth: windowWidth,
      //   clipConfig: audioClipFixedDurationClipConfig
      // );
      // if(_initClipAudioData==null){
      //   print("提取音频失败");
      // }else{
      //   print("🍎🍎🍎🍎🍎🍎_initClipAudioData: ${_initClipAudioData}");
      //   setState(() {});
      // }
      

      // FilePickerResult? result = await FilePicker.platform.pickFiles(
      // );
      // if (result != null) {
      //   print("🍎🍎🍎🍎🍎🍎_selectAudioFile_result: ${result.files.single.path}");
      //   _selectedFilePath = result.files.single.path ?? '';
      //   setState(() {
      //     //  print("🍎🍎🍎🍎🍎🍎_selectAudioFiler_esult: ${result.files[0].bytes?.buffer.lengthInBytes}");
      //   });
      // }
    // } else {
    //   // 用户拒绝了存储权限
    // }
   
  }

  Future<void> _clearAllAudioEditTempData(BuildContext context) async {
    await FfmpegUtilsKit.deleteAudioTempDirectory();
    return;
  }

  Future<void> _getClipedFile(BuildContext context) async {
    String? outputPath = await FfmpegUtilsKit.cropMedia(
      filePath: _clipData?.mp3FilePath??'',
      clipStartTime: _clipData?.clipStartTime??0, 
      clipEndTime: _clipData?.clipEndTime??0,
    );
    debugPrint("cropAudio: $outputPath");
  }

  @override
  Widget build(BuildContext context) {
    return Scaffold(
      appBar: AppBar(
        title: const Text('Audio Crop'),
      ),
      body:  Container(
        padding: const EdgeInsets.all(0),
        child:  Column(
          children: [
            ElevatedButton(
              onPressed: ()=>{
                _selectAudioFile(context)
              },
              child: const Text('Select Audio File'),
            ),
            ElevatedButton(
              onPressed: ()=>{
                _getClipedFile(context)
              },
              child: const Text('get cliped File'),
            ),
            ElevatedButton(
              onPressed: ()=>{
                _clearAllAudioEditTempData(context)
              },
              child: const Text('clear all audio edit temp data'),
            ),
            const SizedBox(height: 20),
            Text(
              _selectedFilePath, 
              style: const TextStyle(fontSize: 16)
            ),
            const SizedBox(height: 20),
            AudioClipFixedDuration(
              initNoPlay: true,
              clipConfig: audioClipFixedDurationClipConfig,
              renderDebug: true,
              onlyInitClipOne: false,
              initClipAudioData: _initClipAudioData,
              windowWidth: MediaQuery.of(context).size.width,
              filePath: _selectedFilePath,
              // builderTimeRow: (BuildContext context, ClipTimeController controller){
              //   return Container(
              //     height: 20,
              //     child: Text("${controller.clipStartTime}---${controller.clipEndTime}---${controller.totalDuration}---${controller.clipDuaration}"),
              //   );
              // },
              onClipChange: (AudioClipFixedDurationClipedData clipData) {
                _clipData = clipData;
                debugPrint('🍎🍎🍎🍎🍎🍎clipData_mp3FilePath: ${clipData.mp3FilePath}');
                debugPrint('🍎🍎🍎🍎🍎🍎clipData_clipStartTime: ${clipData.clipStartTime}');
                debugPrint('🍎🍎🍎🍎🍎🍎clipData_clipEndTime: ${clipData.clipEndTime}');
                debugPrint('🍎🍎🍎🍎🍎🍎clipData_clipDuaration: ${clipData.clipDuaration}');
                debugPrint('🍎🍎🍎🍎🍎🍎clipData_totalDuration: ${clipData.totalDuration}');
                
              },
            )
          ]
        )
      )
    );
  }
}


转载自:https://juejin.cn/post/7362029084878716928
评论
请登录