using System.Runtime.InteropServices;
using Bmp.Core.FFMpeg.CsCoreExt;
namespace Bmp.Core.FFMpeg.CsCorePorts.FFMpegWrap;
///
/// Generic FFmpeg based decoder.
///
///
/// The uses the FFmpeg libraries to decode audio files.
/// In order to make sure that the FFmpeg libraries are compatible with the ,
/// use the binaries shipped with the CSCore.Ffmpeg project.
/// If a custom build is necessary, use the FFmpeg source code, from the CSCore git repository
/// (https://github.com/filoe/cscore).
///
public class FfmpegDecoder : IWaveSource
{
// BmpMod: Add `AttachedPic` and `BitPerRawSample`
public FFMpegAttachedPicCollection? AttachedPics => _formatContext == null ? null : new FFMpegAttachedPicCollection(_formatContext.FormatContext);
public unsafe int? BitPerRawSample => _formatContext != null ? _formatContext.SelectedStream.Stream.codec->bits_per_raw_sample : null;
public unsafe long? StreamBitPerSecond => _formatContext != null ? _formatContext.SelectedStream.Stream.codec->bit_rate : null;
public long? FileBitPerSecond => _formatContext?.FormatContext.bit_rate;
public unsafe string? FileFormat
{
get
{
if (_formatContext == null) return null;
var codec = Marshal.PtrToStringUTF8((nint)_formatContext.SelectedStream.Stream.codec->codec->long_name);
var container = Marshal.PtrToStringUTF8((nint)_formatContext.FormatContext.iformat->name);
return $"{codec} @ {container}";
}
}
// Orig
private readonly object _lockObject = new object();
private readonly Uri _uri;
private FfmpegStream? _ffmpegStream;
private AvFormatContext? _formatContext;
private bool _disposeStream = false;
private byte[] _overflowBuffer = Array.Empty();
private int _overflowCount;
private int _overflowOffset;
private long _position;
private Stream? _stream;
///
/// Gets a dictionary with found metadata.
///
public Dictionary Metadata
{
get
{
if (_formatContext == null)
return new Dictionary();
return _formatContext.Metadata;
}
}
///
/// Initializes a new instance of the class based on a specified filename or url.
///
/// A url containing a filename or web url.
///
/// Any ffmpeg error.
///
///
/// DBL format is not supported.
/// or
/// Audio Sample Format not supported.
///
/// uri
public FfmpegDecoder(string url)
{
const int invalidArgument = unchecked((int)0xffffffea);
_uri = new Uri(url);
try
{
_formatContext = new AvFormatContext(url);
Initialize();
}
catch (FfmpegException ex)
{
if (ex.ErrorCode == invalidArgument && "avformat_open_input".Equals(ex.Function, StringComparison.OrdinalIgnoreCase))
{
if (!TryInitializeWithFileAsStream(url))
throw;
}
else
{
throw;
}
}
}
///
/// Initializes a new instance of the class based on a .
///
/// The stream.
/// Any ffmpeg error.
/// stream
/// Stream is not readable.
/// Could not allocate FormatContext.
///
/// DBL format is not supported.
/// or
/// Audio Sample Format not supported.
///
public FfmpegDecoder(Stream stream)
{
if (stream == null)
throw new ArgumentNullException("stream");
InitializeWithStream(stream, false);
}
///
/// Reads a sequence of bytes from the and advances the position within the
/// stream by the
/// number of bytes read.
///
///
/// An array of bytes. When this method returns, the contains the specified
/// array of bytes with the values between and ( +
/// - 1) replaced by the bytes read from the current source.
///
///
/// The zero-based offset in the at which to begin storing the data
/// read from the current stream.
///
/// The maximum number of bytes to read from the current source.
/// The total number of bytes read into the buffer.
public int Read(byte[] buffer, int offset, int count)
{
var read = 0;
count -= count % WaveFormat.BlockAlign;
var fetchedOverflows = GetOverflows(buffer, ref offset, count);
read += fetchedOverflows;
while (read < count)
{
long packetPosition;
int bufferLength;
lock (_lockObject)
{
using (var frame = new AvFrame(_formatContext))
{
double seconds;
bufferLength = frame.ReadNextFrame(out seconds, ref _overflowBuffer);
packetPosition = this.GetRawElements(TimeSpan.FromSeconds(seconds));
}
}
if (bufferLength <= 0)
{
//if (_uri != null && !_uri.IsFile)
//{
// //webstream: don't exit, maybe the connection was lost -> give it a try to recover
// Thread.Sleep(10);
//}
//else
break; //no webstream -> exit
}
var bytesToCopy = Math.Min(count - read, bufferLength);
Array.Copy(_overflowBuffer, 0, buffer, offset, bytesToCopy);
read += bytesToCopy;
offset += bytesToCopy;
_overflowCount = bufferLength > bytesToCopy ? bufferLength - bytesToCopy : 0;
_overflowOffset = bufferLength > bytesToCopy ? bytesToCopy : 0;
_position = packetPosition + read - fetchedOverflows;
}
if (fetchedOverflows == read)
{
//no new packet was decoded -> add the read bytes to the position
_position += read;
}
return read;
}
///
/// Gets a value indicating whether the supports seeking.
///
public bool CanSeek
{
get
{
if (_formatContext == null)
return false;
return _formatContext.CanSeek;
}
}
///
/// Gets the of the waveform-audio data.
///
public WaveFormat WaveFormat { get; private set; }
///
/// Gets or sets the current position in bytes.
///
public long Position
{
get { return _position; }
set { SeekPosition(value); }
}
///
/// Gets the length of the waveform-audio data in bytes.
///
public long Length
{
get
{
if (_formatContext == null || _formatContext.SelectedStream == null)
return 0;
return this.GetRawElements(TimeSpan.FromSeconds(_formatContext.LengthInSeconds));
}
}
///
/// Releases all allocated resources used by the .
///
public void Dispose()
{
Dispose(true);
}
///
/// Releases unmanaged and - optionally - managed resources.
///
/// true to release both managed and unmanaged resources; false to release only unmanaged resources.
protected void Dispose(bool disposing)
{
GC.SuppressFinalize(this);
if (disposing)
{
if (_disposeStream && _stream != null)
{
_stream.Dispose();
_stream = null;
}
if (_formatContext != null)
{
_formatContext.Dispose();
_formatContext = null;
}
if (_ffmpegStream != null)
{
_ffmpegStream.Dispose();
_ffmpegStream = null;
}
}
}
private unsafe void Initialize()
{
if (_formatContext == null) throw new InvalidOperationException();
WaveFormat = _formatContext.SelectedStream.GetSuggestedWaveFormat();
}
private void InitializeWithStream(Stream stream, bool disposeStream)
{
_stream = stream;
_disposeStream = disposeStream;
_ffmpegStream = new FfmpegStream(stream, false);
_formatContext = new AvFormatContext(_ffmpegStream);
Initialize();
}
private bool TryInitializeWithFileAsStream(string filename)
{
if (!File.Exists(filename))
return false;
Stream stream = null;
try
{
stream = File.OpenRead(filename);
InitializeWithStream(stream, true);
return true;
}
catch (Exception)
{
if (stream != null)
{
stream.Dispose();
}
return false;
}
}
///
/// Finalizes an instance of the class.
///
~FfmpegDecoder()
{
Dispose(false);
}
private void SeekPosition(long position)
{
//https://ffmpeg.org/doxygen/trunk/seek-test_8c-source.html
var seconds = this.GetMilliseconds(position) / 1000.0;
lock (_lockObject)
{
_formatContext.SeekFile(seconds);
_position = position;
_overflowCount = 0;
_overflowOffset = 0;
}
}
private int GetOverflows(byte[] buffer, ref int offset, int count)
{
if (_overflowCount != 0 && _overflowBuffer != null && count > 0)
{
var bytesToCopy = Math.Min(count, _overflowCount);
Array.Copy(_overflowBuffer, _overflowOffset, buffer, offset, bytesToCopy);
_overflowCount -= bytesToCopy;
_overflowOffset += bytesToCopy;
offset += bytesToCopy;
return bytesToCopy;
}
return 0;
}
}