[FFmpeg-devel] [PATCH 4/7] dshow: add audio/video options
Ramiro Polla
ramiro.polla at gmail.com
Fri Sep 2 06:45:12 CEST 2011
---
libavdevice/dshow.c | 150 ++++++++++++++++++++++++++++++++++++++++++++
libavdevice/dshow.h | 2 +
libavdevice/dshow_common.c | 49 ++++++++++++++
3 files changed, 201 insertions(+), 0 deletions(-)
diff --git a/libavdevice/dshow.c b/libavdevice/dshow.c
index 555a2b8..4cda218 100644
--- a/libavdevice/dshow.c
+++ b/libavdevice/dshow.c
@@ -19,6 +19,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
+#include "libavutil/parseutils.h"
#include "libavutil/opt.h"
#include "avdevice.h"
@@ -46,6 +47,17 @@ struct dshow_ctx {
unsigned int video_frame_num;
IMediaControl *control;
+
+ char *video_size;
+ char *framerate;
+
+ int requested_width;
+ int requested_height;
+ AVRational requested_framerate;
+
+ int sample_rate;
+ int sample_size;
+ int channels;
};
static enum PixelFormat dshow_pixfmt(DWORD biCompression, WORD biBitCount)
@@ -210,6 +222,109 @@ fail:
}
static int
+dshow_set_format(AVFormatContext *avctx, enum dshowDeviceType devtype,
+ IPin *pin, AM_MEDIA_TYPE *type)
+{
+ struct dshow_ctx *ctx = avctx->priv_data;
+ IAMStreamConfig *c = NULL;
+ void *caps = NULL;
+ int i, n, size;
+ int ret = 0;
+
+ if (IPin_QueryInterface(pin, &IID_IAMStreamConfig, (void **) &c) != S_OK)
+ return 0;
+ if (IAMStreamConfig_GetNumberOfCapabilities(c, &n, &size) != S_OK)
+ goto end;
+
+ caps = av_malloc(size);
+ if (!caps)
+ goto end;
+
+ for (i = 0; i < n; i++) {
+ IAMStreamConfig_GetStreamCaps(c, i, &type, (void *) caps);
+
+#if DSHOWDEBUG
+ ff_print_AM_MEDIA_TYPE(type);
+#endif
+
+ if (devtype == VideoDevice) {
+ VIDEO_STREAM_CONFIG_CAPS *vcaps = caps;
+ BITMAPINFOHEADER *bih;
+ int64_t *fr;
+#if DSHOWDEBUG
+ ff_print_VIDEO_STREAM_CONFIG_CAPS(vcaps);
+#endif
+ if (IsEqualGUID(&type->formattype, &FORMAT_VideoInfo)) {
+ VIDEOINFOHEADER *v = (void *) type->pbFormat;
+ fr = &v->AvgTimePerFrame;
+ bih = &v->bmiHeader;
+ } else if (IsEqualGUID(&type->formattype, &FORMAT_VideoInfo2)) {
+ VIDEOINFOHEADER2 *v = (void *) type->pbFormat;
+ fr = &v->AvgTimePerFrame;
+ bih = &v->bmiHeader;
+ } else {
+ continue;
+ }
+ if (ctx->framerate) {
+ int64_t framerate = (ctx->requested_framerate.den*10000000)
+ / ctx->requested_framerate.num;
+ if (framerate > vcaps->MaxFrameInterval ||
+ framerate < vcaps->MinFrameInterval)
+ continue;
+ *fr = framerate;
+ }
+ if (ctx->video_size) {
+ if (ctx->requested_width > vcaps->MaxOutputSize.cx ||
+ ctx->requested_width < vcaps->MinOutputSize.cx ||
+ ctx->requested_height < vcaps->MinOutputSize.cy ||
+ ctx->requested_height < vcaps->MinOutputSize.cy)
+ continue;
+ bih->biWidth = ctx->requested_width;
+ bih->biHeight = ctx->requested_height;
+ }
+ } else {
+ AUDIO_STREAM_CONFIG_CAPS *acaps = caps;
+ WAVEFORMATEX *fx;
+#if DSHOWDEBUG
+ ff_print_AUDIO_STREAM_CONFIG_CAPS(acaps);
+#endif
+ if (IsEqualGUID(&type->formattype, &FORMAT_WaveFormatEx)) {
+ fx = (void *) type->pbFormat;
+ } else {
+ continue;
+ }
+ if (ctx->sample_rate) {
+ if (ctx->sample_rate > acaps->MaximumSampleFrequency ||
+ ctx->sample_rate < acaps->MinimumSampleFrequency)
+ continue;
+ fx->nSamplesPerSec = ctx->sample_rate;
+ }
+ if (ctx->sample_size) {
+ if (ctx->sample_size > acaps->MaximumBitsPerSample ||
+ ctx->sample_size < acaps->MinimumBitsPerSample)
+ continue;
+ fx->wBitsPerSample = ctx->sample_size;
+ }
+ if (ctx->channels) {
+ if (ctx->channels > acaps->MaximumChannels ||
+ ctx->channels < acaps->MinimumChannels)
+ continue;
+ fx->nChannels = ctx->channels;
+ }
+ }
+ if (IAMStreamConfig_SetFormat(c, type) != S_OK)
+ continue;
+ ret = 1;
+ break;
+ }
+end:
+ if (c)
+ IAMStreamConfig_Release(c);
+ av_free(caps);
+ return ret;
+}
+
+static int
dshow_open_device(AVFormatContext *avctx, ICreateDevEnum *devenum,
enum dshowDeviceType devtype)
{
@@ -234,6 +349,10 @@ dshow_open_device(AVFormatContext *avctx, ICreateDevEnum *devenum,
const char *devtypename = (devtype == VideoDevice) ? "video" : "audio";
const wchar_t *filter_name[2] = { L"Audio capture filter", L"Video capture filter" };
+ int set_format = (devtype == VideoDevice && (ctx->video_size || ctx->framerate))
+ || (devtype == AudioDevice && (ctx->channels || ctx->sample_rate));
+ int format_set = 1;
+
r = ICreateDevEnum_CreateClassEnumerator(devenum, device_guid[devtype],
(IEnumMoniker **) &classenum, 0);
if (r != S_OK) {
@@ -322,6 +441,13 @@ fail1:
if (!IsEqualGUID(&category, &PIN_CATEGORY_CAPTURE))
goto next;
+ if (set_format) {
+ format_set = dshow_set_format(avctx, devtype, pin, type);
+ if (!format_set) {
+ goto next;
+ }
+ }
+
if (IPin_EnumMediaTypes(pin, &types) != S_OK)
goto next;
@@ -342,6 +468,10 @@ next:
if (device_pin != pin)
IPin_Release(pin);
}
+ if (!format_set) {
+ av_log(avctx, AV_LOG_ERROR, "Could not set %s options\n", devtypename);
+ goto error;
+ }
if (!device_pin) {
av_log(avctx, AV_LOG_ERROR,
@@ -543,6 +673,21 @@ static int dshow_read_header(AVFormatContext *avctx, AVFormatParameters *ap)
goto error;
}
+ if (ctx->video_size) {
+ r = av_parse_video_size(&ctx->requested_width, &ctx->requested_height, ctx->video_size);
+ if (r < 0) {
+ av_log(avctx, AV_LOG_ERROR, "Couldn't parse video size.\n");
+ goto error;
+ }
+ }
+ if (ctx->framerate) {
+ r = av_parse_video_rate(&ctx->requested_framerate, ctx->framerate);
+ if (r < 0) {
+ av_log(avctx, AV_LOG_ERROR, "Could not parse framerate '%s'.\n", ctx->framerate);
+ goto error;
+ }
+ }
+
CoInitialize(0);
r = CoCreateInstance(&CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER,
@@ -656,6 +801,11 @@ static int dshow_read_packet(AVFormatContext *s, AVPacket *pkt)
#define OFFSET(x) offsetof(struct dshow_ctx, x)
#define DEC AV_OPT_FLAG_DECODING_PARAM
static const AVOption options[] = {
+ { "video_size", "A string describing frame size, such as 640x480 or hd720.", OFFSET(video_size), FF_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
+ { "framerate", "", OFFSET(framerate), FF_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
+ { "sample_rate", "", OFFSET(sample_rate), FF_OPT_TYPE_INT, {.dbl = 0}, 0, INT_MAX, DEC },
+ { "sample_size", "", OFFSET(sample_size), FF_OPT_TYPE_INT, {.dbl = 0}, 0, 16, DEC },
+ { "channels", "", OFFSET(channels), FF_OPT_TYPE_INT, {.dbl = 0}, 0, INT_MAX, DEC },
{ "list_devices", "list available devices", OFFSET(list_devices), FF_OPT_TYPE_INT, {.dbl=0}, 0, 1, DEC, "list_devices" },
{ "true", "", 0, FF_OPT_TYPE_CONST, {.dbl=1}, 0, 0, DEC, "list_devices" },
{ "false", "", 0, FF_OPT_TYPE_CONST, {.dbl=0}, 0, 0, DEC, "list_devices" },
diff --git a/libavdevice/dshow.h b/libavdevice/dshow.h
index 4e79680..83c71c4 100644
--- a/libavdevice/dshow.h
+++ b/libavdevice/dshow.h
@@ -29,6 +29,8 @@
#include <dvdmedia.h>
long ff_copy_dshow_media_type(AM_MEDIA_TYPE *dst, const AM_MEDIA_TYPE *src);
+void ff_print_VIDEO_STREAM_CONFIG_CAPS(const VIDEO_STREAM_CONFIG_CAPS *caps);
+void ff_print_AUDIO_STREAM_CONFIG_CAPS(const AUDIO_STREAM_CONFIG_CAPS *caps);
void ff_print_AM_MEDIA_TYPE(const AM_MEDIA_TYPE *type);
void ff_printGUID(const GUID *g);
diff --git a/libavdevice/dshow_common.c b/libavdevice/dshow_common.c
index c813dc1..8fe2f77 100644
--- a/libavdevice/dshow_common.c
+++ b/libavdevice/dshow_common.c
@@ -82,6 +82,55 @@ static void dump_bih(void *s, BITMAPINFOHEADER *bih)
}
#endif
+void ff_print_VIDEO_STREAM_CONFIG_CAPS(const VIDEO_STREAM_CONFIG_CAPS *caps)
+{
+#if DSHOWDEBUG
+ dshowdebug(" VIDEO_STREAM_CONFIG_CAPS\n");
+ dshowdebug(" guid\t");
+ ff_printGUID(&caps->guid);
+ dshowdebug("\n");
+ dshowdebug(" VideoStandard\t%lu\n", caps->VideoStandard);
+ dshowdebug(" InputSize %ld\t%ld\n", caps->InputSize.cx, caps->InputSize.cy);
+ dshowdebug(" MinCroppingSize %ld\t%ld\n", caps->MinCroppingSize.cx, caps->MinCroppingSize.cy);
+ dshowdebug(" MaxCroppingSize %ld\t%ld\n", caps->MaxCroppingSize.cx, caps->MaxCroppingSize.cy);
+ dshowdebug(" CropGranularityX\t%d\n", caps->CropGranularityX);
+ dshowdebug(" CropGranularityY\t%d\n", caps->CropGranularityY);
+ dshowdebug(" CropAlignX\t%d\n", caps->CropAlignX);
+ dshowdebug(" CropAlignY\t%d\n", caps->CropAlignY);
+ dshowdebug(" MinOutputSize %ld\t%ld\n", caps->MinOutputSize.cx, caps->MinOutputSize.cy);
+ dshowdebug(" MaxOutputSize %ld\t%ld\n", caps->MaxOutputSize.cx, caps->MaxOutputSize.cy);
+ dshowdebug(" OutputGranularityX\t%d\n", caps->OutputGranularityX);
+ dshowdebug(" OutputGranularityY\t%d\n", caps->OutputGranularityY);
+ dshowdebug(" StretchTapsX\t%d\n", caps->StretchTapsX);
+ dshowdebug(" StretchTapsY\t%d\n", caps->StretchTapsY);
+ dshowdebug(" ShrinkTapsX\t%d\n", caps->ShrinkTapsX);
+ dshowdebug(" ShrinkTapsY\t%d\n", caps->ShrinkTapsY);
+ dshowdebug(" MinFrameInterval\t%"PRId64"\n", caps->MinFrameInterval);
+ dshowdebug(" MaxFrameInterval\t%"PRId64"\n", caps->MaxFrameInterval);
+ dshowdebug(" MinBitsPerSecond\t%ld\n", caps->MinBitsPerSecond);
+ dshowdebug(" MaxBitsPerSecond\t%ld\n", caps->MaxBitsPerSecond);
+#endif
+}
+
+void ff_print_AUDIO_STREAM_CONFIG_CAPS(const AUDIO_STREAM_CONFIG_CAPS *caps)
+{
+#if DSHOWDEBUG
+ dshowdebug(" AUDIO_STREAM_CONFIG_CAPS\n");
+ dshowdebug(" guid\t");
+ ff_printGUID(&caps->guid);
+ dshowdebug("\n");
+ dshowdebug(" MinimumChannels\t%lu\n", caps->MinimumChannels);
+ dshowdebug(" MaximumChannels\t%lu\n", caps->MaximumChannels);
+ dshowdebug(" ChannelsGranularity\t%lu\n", caps->ChannelsGranularity);
+ dshowdebug(" MinimumBitsPerSample\t%lu\n", caps->MinimumBitsPerSample);
+ dshowdebug(" MaximumBitsPerSample\t%lu\n", caps->MaximumBitsPerSample);
+ dshowdebug(" BitsPerSampleGranularity\t%lu\n", caps->BitsPerSampleGranularity);
+ dshowdebug(" MinimumSampleFrequency\t%lu\n", caps->MinimumSampleFrequency);
+ dshowdebug(" MaximumSampleFrequency\t%lu\n", caps->MaximumSampleFrequency);
+ dshowdebug(" SampleFrequencyGranularity\t%lu\n", caps->SampleFrequencyGranularity);
+#endif
+}
+
void ff_print_AM_MEDIA_TYPE(const AM_MEDIA_TYPE *type)
{
#if DSHOWDEBUG
--
1.7.4.1
More information about the ffmpeg-devel
mailing list