diff mbox

adding support for setting bus parameters in sub device

Message ID 1244580891-24153-1-git-send-email-m-karicheri2@ti.com (mailing list archive)
State RFC
Headers show

Commit Message

Murali Karicheri June 9, 2009, 8:54 p.m. UTC
From: Muralidharan Karicheri <a0868495@gt516km11.gt.design.ti.com>

This patch adds support for setting bus parameters such as bus type
(BT.656, BT.1120 etc), width (example 10 bit raw image data bus)
and polarities (vsync, hsync, field etc) in sub device. This allows
bridge driver to configure the sub device for a specific set of bus
parameters through s_bus() function call.

Reviewed By "Hans Verkuil".
Signed-off-by: Muralidharan Karicheri <m-karicheri2@ti.com>
---
Applies to v4l-dvb repository

 include/media/v4l2-subdev.h |   36 ++++++++++++++++++++++++++++++++++++
 1 files changed, 36 insertions(+), 0 deletions(-)

Comments

Guennadi Liakhovetski June 10, 2009, 6:32 p.m. UTC | #1
On Tue, 9 Jun 2009, m-karicheri2@ti.com wrote:

> From: Muralidharan Karicheri <a0868495@gt516km11.gt.design.ti.com>
> 
> This patch adds support for setting bus parameters such as bus type
> (BT.656, BT.1120 etc), width (example 10 bit raw image data bus)
> and polarities (vsync, hsync, field etc) in sub device. This allows
> bridge driver to configure the sub device for a specific set of bus
> parameters through s_bus() function call.

Yes, this is required, but this is not enough. Firstly, you're missing at 
least one more flag - master or slave. Secondly, it is not enough to 
provide a s_bus function. Many hosts and sensors can configure one of 
several alternate configurations - they can select signal polarities, data 
widths, master / slave role, etc. Whereas others have some or all of these 
parameters fixed. That's why we have a query method in soc-camera, which 
delivers all supported configurations, and then the host can select some 
mutually acceptable subset. No, just returning an error code is not 
enough.

So, you would need to request supported flags, the sensor would return a 
bitmask, and the host would then issue a s_bus call with a selected subset 
and configure itself. And then you realise, that one bit per parameter is 
not enough - you need a bit for both, e.g., vsync active low and active 
high.

Have a look at 
include/media/soc_camera.h::soc_camera_bus_param_compatible() and macros 
defined there, then you might want to have a look at 
drivers/media/video/pxa_camera.c::pxa_camera_set_bus_param() to see how 
the whole machinery works. And you also want inverter flags, see 
drivers/media/video/soc_camera.c::soc_camera_apply_sensor_flags().

So, this is a step in the right direction, but it doesn't seem final yet.

Thanks
Guennadi

> 
> Reviewed By "Hans Verkuil".
> Signed-off-by: Muralidharan Karicheri <m-karicheri2@ti.com>
> ---
> Applies to v4l-dvb repository
> 
>  include/media/v4l2-subdev.h |   36 ++++++++++++++++++++++++++++++++++++
>  1 files changed, 36 insertions(+), 0 deletions(-)
> 
> diff --git a/include/media/v4l2-subdev.h b/include/media/v4l2-subdev.h
> index 1785608..c1cfb3b 100644
> --- a/include/media/v4l2-subdev.h
> +++ b/include/media/v4l2-subdev.h
> @@ -37,6 +37,41 @@ struct v4l2_decode_vbi_line {
>  	u32 type;		/* VBI service type (V4L2_SLICED_*). 0 if no service found */
>  };
>  
> +/*
> + * Some sub-devices are connected to the bridge device through a bus that
> + * carries * the clock, vsync, hsync and data. Some interfaces such as BT.656
> + * carries the sync embedded in the data where as others have separate line
> + * carrying the sync signals. The structure below is used by bridge driver to
> + * set the desired bus parameters in the sub device to work with it.
> + */
> +enum v4l2_subdev_bus_type {
> +	/* BT.656 interface. Embedded sync */
> +	V4L2_SUBDEV_BUS_BT_656,
> +	/* BT.1120 interface. Embedded sync */
> +	V4L2_SUBDEV_BUS_BT_1120,
> +	/* 8 bit muxed YCbCr bus, separate sync and field signals */
> +	V4L2_SUBDEV_BUS_YCBCR_8,
> +	/* 16 bit YCbCr bus, separate sync and field signals */
> +	V4L2_SUBDEV_BUS_YCBCR_16,
> +	/* Raw Bayer image data bus , 8 - 16 bit wide, sync signals */
> +	V4L2_SUBDEV_BUS_RAW_BAYER
> +};
> +
> +struct v4l2_subdev_bus	{
> +	enum v4l2_subdev_bus_type type;
> +	u8 width;
> +	/* 0 - active low, 1 - active high */
> +	unsigned pol_vsync:1;
> +	/* 0 - active low, 1 - active high */
> +	unsigned pol_hsync:1;
> +	/* 0 - low to high , 1 - high to low */
> +	unsigned pol_field:1;
> +	/* 0 - sample at falling edge , 1 - sample at rising edge */
> +	unsigned pol_pclock:1;
> +	/* 0 - active low , 1 - active high */
> +	unsigned pol_data:1;
> +};
> +
>  /* Sub-devices are devices that are connected somehow to the main bridge
>     device. These devices are usually audio/video muxers/encoders/decoders or
>     sensors and webcam controllers.
> @@ -109,6 +144,7 @@ struct v4l2_subdev_core_ops {
>  	int (*querymenu)(struct v4l2_subdev *sd, struct v4l2_querymenu *qm);
>  	int (*s_std)(struct v4l2_subdev *sd, v4l2_std_id norm);
>  	long (*ioctl)(struct v4l2_subdev *sd, unsigned int cmd, void *arg);
> +	int (*s_bus)(struct v4l2_subdev *sd, struct v4l2_subdev_bus *bus);
>  #ifdef CONFIG_VIDEO_ADV_DEBUG
>  	int (*g_register)(struct v4l2_subdev *sd, struct v4l2_dbg_register *reg);
>  	int (*s_register)(struct v4l2_subdev *sd, struct v4l2_dbg_register *reg);
> -- 
> 1.6.0.4
> 
> --
> To unsubscribe from this list: send the line "unsubscribe linux-media" in
> the body of a message to majordomo@vger.kernel.org
> More majordomo info at  http://vger.kernel.org/majordomo-info.html
> 

---
Guennadi Liakhovetski, Ph.D.
Freelance Open-Source Software Developer
http://www.open-technology.de/
--
To unsubscribe from this list: send the line "unsubscribe linux-media" in
the body of a message to majordomo@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Hans Verkuil June 10, 2009, 7:49 p.m. UTC | #2
On Wednesday 10 June 2009 20:32:25 Guennadi Liakhovetski wrote:
> On Tue, 9 Jun 2009, m-karicheri2@ti.com wrote:
> > From: Muralidharan Karicheri <a0868495@gt516km11.gt.design.ti.com>
> >
> > This patch adds support for setting bus parameters such as bus type
> > (BT.656, BT.1120 etc), width (example 10 bit raw image data bus)
> > and polarities (vsync, hsync, field etc) in sub device. This allows
> > bridge driver to configure the sub device for a specific set of bus
> > parameters through s_bus() function call.
>
> Yes, this is required, but this is not enough. Firstly, you're missing at
> least one more flag - master or slave. Secondly, it is not enough to
> provide a s_bus function. Many hosts and sensors can configure one of
> several alternate configurations - they can select signal polarities,
> data widths, master / slave role, etc. Whereas others have some or all of
> these parameters fixed. That's why we have a query method in soc-camera,
> which delivers all supported configurations, and then the host can select
> some mutually acceptable subset. No, just returning an error code is not
> enough.

Why would you want to query this? I would expect this to be fixed settings: 
something that is determined by the architecture. Actually, I would expect 
this to be part of the platform_data in many cases and setup when the i2c 
driver is initialized and not touched afterwards.

If we want to negotiate these bus settings, then we indeed need something 
more. But it seems unnecessarily complex to me to implement autonegotiation 
when this is in practice a fixed setting determined by how the sensor is 
hooked up to the host.

I may be missing something, of course.

Regards,

	Hans

> So, you would need to request supported flags, the sensor would return a
> bitmask, and the host would then issue a s_bus call with a selected
> subset and configure itself. And then you realise, that one bit per
> parameter is not enough - you need a bit for both, e.g., vsync active low
> and active high.
>
> Have a look at
> include/media/soc_camera.h::soc_camera_bus_param_compatible() and macros
> defined there, then you might want to have a look at
> drivers/media/video/pxa_camera.c::pxa_camera_set_bus_param() to see how
> the whole machinery works. And you also want inverter flags, see
> drivers/media/video/soc_camera.c::soc_camera_apply_sensor_flags().
>
> So, this is a step in the right direction, but it doesn't seem final yet.
>
> Thanks
> Guennadi
>
> > Reviewed By "Hans Verkuil".
> > Signed-off-by: Muralidharan Karicheri <m-karicheri2@ti.com>
> > ---
> > Applies to v4l-dvb repository
> >
> >  include/media/v4l2-subdev.h |   36
> > ++++++++++++++++++++++++++++++++++++ 1 files changed, 36 insertions(+),
> > 0 deletions(-)
> >
> > diff --git a/include/media/v4l2-subdev.h b/include/media/v4l2-subdev.h
> > index 1785608..c1cfb3b 100644
> > --- a/include/media/v4l2-subdev.h
> > +++ b/include/media/v4l2-subdev.h
> > @@ -37,6 +37,41 @@ struct v4l2_decode_vbi_line {
> >  	u32 type;		/* VBI service type (V4L2_SLICED_*). 0 if no service found
> > */ };
> >
> > +/*
> > + * Some sub-devices are connected to the bridge device through a bus
> > that + * carries * the clock, vsync, hsync and data. Some interfaces
> > such as BT.656 + * carries the sync embedded in the data where as
> > others have separate line + * carrying the sync signals. The structure
> > below is used by bridge driver to + * set the desired bus parameters in
> > the sub device to work with it. + */
> > +enum v4l2_subdev_bus_type {
> > +	/* BT.656 interface. Embedded sync */
> > +	V4L2_SUBDEV_BUS_BT_656,
> > +	/* BT.1120 interface. Embedded sync */
> > +	V4L2_SUBDEV_BUS_BT_1120,
> > +	/* 8 bit muxed YCbCr bus, separate sync and field signals */
> > +	V4L2_SUBDEV_BUS_YCBCR_8,
> > +	/* 16 bit YCbCr bus, separate sync and field signals */
> > +	V4L2_SUBDEV_BUS_YCBCR_16,
> > +	/* Raw Bayer image data bus , 8 - 16 bit wide, sync signals */
> > +	V4L2_SUBDEV_BUS_RAW_BAYER
> > +};
> > +
> > +struct v4l2_subdev_bus	{
> > +	enum v4l2_subdev_bus_type type;
> > +	u8 width;
> > +	/* 0 - active low, 1 - active high */
> > +	unsigned pol_vsync:1;
> > +	/* 0 - active low, 1 - active high */
> > +	unsigned pol_hsync:1;
> > +	/* 0 - low to high , 1 - high to low */
> > +	unsigned pol_field:1;
> > +	/* 0 - sample at falling edge , 1 - sample at rising edge */
> > +	unsigned pol_pclock:1;
> > +	/* 0 - active low , 1 - active high */
> > +	unsigned pol_data:1;
> > +};
> > +
> >  /* Sub-devices are devices that are connected somehow to the main
> > bridge device. These devices are usually audio/video
> > muxers/encoders/decoders or sensors and webcam controllers.
> > @@ -109,6 +144,7 @@ struct v4l2_subdev_core_ops {
> >  	int (*querymenu)(struct v4l2_subdev *sd, struct v4l2_querymenu *qm);
> >  	int (*s_std)(struct v4l2_subdev *sd, v4l2_std_id norm);
> >  	long (*ioctl)(struct v4l2_subdev *sd, unsigned int cmd, void *arg);
> > +	int (*s_bus)(struct v4l2_subdev *sd, struct v4l2_subdev_bus *bus);
> >  #ifdef CONFIG_VIDEO_ADV_DEBUG
> >  	int (*g_register)(struct v4l2_subdev *sd, struct v4l2_dbg_register
> > *reg); int (*s_register)(struct v4l2_subdev *sd, struct
> > v4l2_dbg_register *reg); --
> > 1.6.0.4
> >
> > --
> > To unsubscribe from this list: send the line "unsubscribe linux-media"
> > in the body of a message to majordomo@vger.kernel.org
> > More majordomo info at  http://vger.kernel.org/majordomo-info.html
>
> ---
> Guennadi Liakhovetski, Ph.D.
> Freelance Open-Source Software Developer
> http://www.open-technology.de/
> --
> To unsubscribe from this list: send the line "unsubscribe linux-media" in
> the body of a message to majordomo@vger.kernel.org
> More majordomo info at  http://vger.kernel.org/majordomo-info.html
Guennadi Liakhovetski June 10, 2009, 7:59 p.m. UTC | #3
On Wed, 10 Jun 2009, Hans Verkuil wrote:

> On Wednesday 10 June 2009 20:32:25 Guennadi Liakhovetski wrote:
> > On Tue, 9 Jun 2009, m-karicheri2@ti.com wrote:
> > > From: Muralidharan Karicheri <a0868495@gt516km11.gt.design.ti.com>
> > >
> > > This patch adds support for setting bus parameters such as bus type
> > > (BT.656, BT.1120 etc), width (example 10 bit raw image data bus)
> > > and polarities (vsync, hsync, field etc) in sub device. This allows
> > > bridge driver to configure the sub device for a specific set of bus
> > > parameters through s_bus() function call.
> >
> > Yes, this is required, but this is not enough. Firstly, you're missing at
> > least one more flag - master or slave. Secondly, it is not enough to
> > provide a s_bus function. Many hosts and sensors can configure one of
> > several alternate configurations - they can select signal polarities,
> > data widths, master / slave role, etc. Whereas others have some or all of
> > these parameters fixed. That's why we have a query method in soc-camera,
> > which delivers all supported configurations, and then the host can select
> > some mutually acceptable subset. No, just returning an error code is not
> > enough.
> 
> Why would you want to query this? I would expect this to be fixed settings: 
> something that is determined by the architecture. Actually, I would expect 
> this to be part of the platform_data in many cases and setup when the i2c 
> driver is initialized and not touched afterwards.
> 
> If we want to negotiate these bus settings, then we indeed need something 
> more. But it seems unnecessarily complex to me to implement autonegotiation 
> when this is in practice a fixed setting determined by how the sensor is 
> hooked up to the host.

On the platform level I have so far seen two options: signal connected 
directly or via an inverter. For that you need platform data, yes. But 
otherwise - why? say, if both your sensor and your host can select hsync 
polarity in software - what should platform tell about it? This knowledge 
belongs in the respective drivers - they know, that they can configure 
arbitrary polarity and they advertise that. Another sensor, that is 
statically active high - what does platform have to do with it? The driver 
knows perfectly, that it can only do one polarity, and it negotiates that. 
Earlier we also had this flags configured in platform code, but then we 
realised that this wasn't correct. This information and configuration 
methods are chip-specific, not platform-specific.

And the negotiation code is not that complex - just copy respective 
soc-camera functions, later the originals will be removed.

Thanks
Guennadi
---
Guennadi Liakhovetski, Ph.D.
Freelance Open-Source Software Developer
http://www.open-technology.de/
--
To unsubscribe from this list: send the line "unsubscribe linux-media" in
the body of a message to majordomo@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Murali Karicheri June 10, 2009, 8:28 p.m. UTC | #4
Guennadi,

Thanks for responding. I acknowledge I need to add
master & slave information in the structure. Querying
the capabilities from the sub device is a good feature.
I will look into your references and let you know if I
have any question.

I will send an updated patch based on this.

BTW, I have a question about the mt9t031.c driver. Could
I use this driver to stream VGA frames from the sensor?
Is it possible to configure the driver to stream at a
specific fps? We have a version of the driver used internally
and it can do capture and stream of Bayer RGB data at VGA,
480p, 576p and 720p resolutions. I have started integrating
your driver with my vpfe capture driver and it wasn't very
clear to me. Looks like driver calculates the timing parameters
based on the width and height of the capture area. We need
streaming capability in the driver. This is how our driver works
with mt9t031 sensor
		  raw-bus (10 bit)
vpfe-capture  ----------------- mt9t031 driver
	  |					   |
	  V				         V
	VPFE	 				MT9T031

VPFE hardware has internal timing and DMA controller to
copy data frame by frame from the sensor output to SDRAM.
The PCLK form the sensor is used to generate the internal
timing.

Thanks.

Murali
email: m-karicheri2@ti.com

>-----Original Message-----
>From: Guennadi Liakhovetski [mailto:g.liakhovetski@gmx.de]
>Sent: Wednesday, June 10, 2009 2:32 PM
>To: Karicheri, Muralidharan
>Cc: linux-media@vger.kernel.org; davinci-linux-open-
>source@linux.davincidsp.com; Muralidharan Karicheri
>Subject: Re: [PATCH] adding support for setting bus parameters in sub
>device
>
>On Tue, 9 Jun 2009, m-karicheri2@ti.com wrote:
>
>> From: Muralidharan Karicheri <a0868495@gt516km11.gt.design.ti.com>
>>
>> This patch adds support for setting bus parameters such as bus type
>> (BT.656, BT.1120 etc), width (example 10 bit raw image data bus)
>> and polarities (vsync, hsync, field etc) in sub device. This allows
>> bridge driver to configure the sub device for a specific set of bus
>> parameters through s_bus() function call.
>
>Yes, this is required, but this is not enough. Firstly, you're missing at
>least one more flag - master or slave. Secondly, it is not enough to
>provide a s_bus function. Many hosts and sensors can configure one of
>several alternate configurations - they can select signal polarities, data
>widths, master / slave role, etc. Whereas others have some or all of these
>parameters fixed. That's why we have a query method in soc-camera, which
>delivers all supported configurations, and then the host can select some
>mutually acceptable subset. No, just returning an error code is not
>enough.
>
>So, you would need to request supported flags, the sensor would return a
>bitmask, and the host would then issue a s_bus call with a selected subset
>and configure itself. And then you realise, that one bit per parameter is
>not enough - you need a bit for both, e.g., vsync active low and active
>high.
>
>Have a look at
>include/media/soc_camera.h::soc_camera_bus_param_compatible() and macros
>defined there, then you might want to have a look at
>drivers/media/video/pxa_camera.c::pxa_camera_set_bus_param() to see how
>the whole machinery works. And you also want inverter flags, see
>drivers/media/video/soc_camera.c::soc_camera_apply_sensor_flags().
>
>So, this is a step in the right direction, but it doesn't seem final yet.
>
>Thanks
>Guennadi
>
>>
>> Reviewed By "Hans Verkuil".
>> Signed-off-by: Muralidharan Karicheri <m-karicheri2@ti.com>
>> ---
>> Applies to v4l-dvb repository
>>
>>  include/media/v4l2-subdev.h |   36 ++++++++++++++++++++++++++++++++++++
>>  1 files changed, 36 insertions(+), 0 deletions(-)
>>
>> diff --git a/include/media/v4l2-subdev.h b/include/media/v4l2-subdev.h
>> index 1785608..c1cfb3b 100644
>> --- a/include/media/v4l2-subdev.h
>> +++ b/include/media/v4l2-subdev.h
>> @@ -37,6 +37,41 @@ struct v4l2_decode_vbi_line {
>>  	u32 type;		/* VBI service type (V4L2_SLICED_*). 0 if no
>service found */
>>  };
>>
>> +/*
>> + * Some sub-devices are connected to the bridge device through a bus
>that
>> + * carries * the clock, vsync, hsync and data. Some interfaces such as
>BT.656
>> + * carries the sync embedded in the data where as others have separate
>line
>> + * carrying the sync signals. The structure below is used by bridge
>driver to
>> + * set the desired bus parameters in the sub device to work with it.
>> + */
>> +enum v4l2_subdev_bus_type {
>> +	/* BT.656 interface. Embedded sync */
>> +	V4L2_SUBDEV_BUS_BT_656,
>> +	/* BT.1120 interface. Embedded sync */
>> +	V4L2_SUBDEV_BUS_BT_1120,
>> +	/* 8 bit muxed YCbCr bus, separate sync and field signals */
>> +	V4L2_SUBDEV_BUS_YCBCR_8,
>> +	/* 16 bit YCbCr bus, separate sync and field signals */
>> +	V4L2_SUBDEV_BUS_YCBCR_16,
>> +	/* Raw Bayer image data bus , 8 - 16 bit wide, sync signals */
>> +	V4L2_SUBDEV_BUS_RAW_BAYER
>> +};
>> +
>> +struct v4l2_subdev_bus	{
>> +	enum v4l2_subdev_bus_type type;
>> +	u8 width;
>> +	/* 0 - active low, 1 - active high */
>> +	unsigned pol_vsync:1;
>> +	/* 0 - active low, 1 - active high */
>> +	unsigned pol_hsync:1;
>> +	/* 0 - low to high , 1 - high to low */
>> +	unsigned pol_field:1;
>> +	/* 0 - sample at falling edge , 1 - sample at rising edge */
>> +	unsigned pol_pclock:1;
>> +	/* 0 - active low , 1 - active high */
>> +	unsigned pol_data:1;
>> +};
>> +
>>  /* Sub-devices are devices that are connected somehow to the main bridge
>>     device. These devices are usually audio/video
>muxers/encoders/decoders or
>>     sensors and webcam controllers.
>> @@ -109,6 +144,7 @@ struct v4l2_subdev_core_ops {
>>  	int (*querymenu)(struct v4l2_subdev *sd, struct v4l2_querymenu *qm);
>>  	int (*s_std)(struct v4l2_subdev *sd, v4l2_std_id norm);
>>  	long (*ioctl)(struct v4l2_subdev *sd, unsigned int cmd, void *arg);
>> +	int (*s_bus)(struct v4l2_subdev *sd, struct v4l2_subdev_bus *bus);
>>  #ifdef CONFIG_VIDEO_ADV_DEBUG
>>  	int (*g_register)(struct v4l2_subdev *sd, struct v4l2_dbg_register
>*reg);
>>  	int (*s_register)(struct v4l2_subdev *sd, struct v4l2_dbg_register
>*reg);
>> --
>> 1.6.0.4
>>
>> --
>> To unsubscribe from this list: send the line "unsubscribe linux-media" in
>> the body of a message to majordomo@vger.kernel.org
>> More majordomo info at  http://vger.kernel.org/majordomo-info.html
>>
>
>---
>Guennadi Liakhovetski, Ph.D.
>Freelance Open-Source Software Developer
>http://www.open-technology.de/

--
To unsubscribe from this list: send the line "unsubscribe linux-media" in
the body of a message to majordomo@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Hans Verkuil June 10, 2009, 8:51 p.m. UTC | #5
On Wednesday 10 June 2009 21:59:13 Guennadi Liakhovetski wrote:
> On Wed, 10 Jun 2009, Hans Verkuil wrote:
> > On Wednesday 10 June 2009 20:32:25 Guennadi Liakhovetski wrote:
> > > On Tue, 9 Jun 2009, m-karicheri2@ti.com wrote:
> > > > From: Muralidharan Karicheri <a0868495@gt516km11.gt.design.ti.com>
> > > >
> > > > This patch adds support for setting bus parameters such as bus type
> > > > (BT.656, BT.1120 etc), width (example 10 bit raw image data bus)
> > > > and polarities (vsync, hsync, field etc) in sub device. This allows
> > > > bridge driver to configure the sub device for a specific set of bus
> > > > parameters through s_bus() function call.
> > >
> > > Yes, this is required, but this is not enough. Firstly, you're
> > > missing at least one more flag - master or slave. Secondly, it is not
> > > enough to provide a s_bus function. Many hosts and sensors can
> > > configure one of several alternate configurations - they can select
> > > signal polarities, data widths, master / slave role, etc. Whereas
> > > others have some or all of these parameters fixed. That's why we have
> > > a query method in soc-camera, which delivers all supported
> > > configurations, and then the host can select some mutually acceptable
> > > subset. No, just returning an error code is not enough.
> >
> > Why would you want to query this? I would expect this to be fixed
> > settings: something that is determined by the architecture. Actually, I
> > would expect this to be part of the platform_data in many cases and
> > setup when the i2c driver is initialized and not touched afterwards.
> >
> > If we want to negotiate these bus settings, then we indeed need
> > something more. But it seems unnecessarily complex to me to implement
> > autonegotiation when this is in practice a fixed setting determined by
> > how the sensor is hooked up to the host.
>
> On the platform level I have so far seen two options: signal connected
> directly or via an inverter. For that you need platform data, yes. But
> otherwise - why? say, if both your sensor and your host can select hsync
> polarity in software - what should platform tell about it? This knowledge
> belongs in the respective drivers - they know, that they can configure
> arbitrary polarity and they advertise that. Another sensor, that is
> statically active high - what does platform have to do with it? The
> driver knows perfectly, that it can only do one polarity, and it
> negotiates that. Earlier we also had this flags configured in platform
> code, but then we realised that this wasn't correct. This information and
> configuration methods are chip-specific, not platform-specific.
>
> And the negotiation code is not that complex - just copy respective
> soc-camera functions, later the originals will be removed.

My view of this would be that the board specification specifies the sensor 
(and possibly other chips) that are on the board. And to me it makes sense 
that that also supplies the bus settings. I agree that it is not complex 
code, but I think it is also unnecessary code. Why negotiate if you can 
just set it?

BTW, looking at the code there doesn't seem to be a bus type (probably only 
Bayer is used), correct? How is the datawidth negotiation done? Is the 
largest available width chosen? I assume that the datawidth is generally 
fixed by the host? I don't quite see how that can be negotiated since what 
is chosen there is linked to how the video data is transferred to memory. 
E.g., chosing a bus width of 8 or 10 bits can be the difference between 
transferring 8 bit or 16 bit data (with 6 bits padding) when the image is 
transferred to memory. If I would implement negotiation I would probably 
limit it to those one-bit settings and not include bus type and width.

Regards,

	Hans

>
> Thanks
> Guennadi
> ---
> Guennadi Liakhovetski, Ph.D.
> Freelance Open-Source Software Developer
> http://www.open-technology.de/
> --
> To unsubscribe from this list: send the line "unsubscribe linux-media" in
> the body of a message to majordomo@vger.kernel.org
> More majordomo info at  http://vger.kernel.org/majordomo-info.html
Guennadi Liakhovetski June 10, 2009, 9:09 p.m. UTC | #6
On Wed, 10 Jun 2009, Karicheri, Muralidharan wrote:

> Guennadi,
> 
> Thanks for responding. I acknowledge I need to add
> master & slave information in the structure. Querying
> the capabilities from the sub device is a good feature.
> I will look into your references and let you know if I
> have any question.
> 
> I will send an updated patch based on this.
> 
> BTW, I have a question about the mt9t031.c driver. Could
> I use this driver to stream VGA frames from the sensor?

Sure, any size the chip supports (up to 2048x1536) and your host can 
handle.

> Is it possible to configure the driver to stream at a
> specific fps?

No, patches welcome.

> We have a version of the driver used internally
> and it can do capture and stream of Bayer RGB data at VGA,
> 480p, 576p and 720p resolutions. I have started integrating
> your driver with my vpfe capture driver and it wasn't very
> clear to me. Looks like driver calculates the timing parameters
> based on the width and height of the capture area.

Yes, it provides exposure control by setting shutter timing, and it 
emulates autoexposure by calculating shutter times from window geometry.

> We need
> streaming capability in the driver. This is how our driver works
> with mt9t031 sensor
> 		  raw-bus (10 bit)
> vpfe-capture  ----------------- mt9t031 driver
> 	  |					   |
> 	  V				         V
> 	VPFE	 				MT9T031
> 
> VPFE hardware has internal timing and DMA controller to
> copy data frame by frame from the sensor output to SDRAM.
> The PCLK form the sensor is used to generate the internal
> timing.

So, what is missing in the driver apart from the ability to specify 
a frame-rate?

Thanks
Guennadi
---
Guennadi Liakhovetski, Ph.D.
Freelance Open-Source Software Developer
http://www.open-technology.de/
--
To unsubscribe from this list: send the line "unsubscribe linux-media" in
the body of a message to majordomo@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Murali Karicheri June 10, 2009, 9:15 p.m. UTC | #7
Hi,

Now that there is discussion on this let me put how I see it working..

The bridge driver is aware of all the sub devices it is working with. So for each of the sub device, bridge driver could define per sub device configuration such as bus type, data width, and polarities as a platform data. So when bridge driver loads the sub device, first thing it will do is to read these settings and call s_bus() and configure the interface. vpfe capture works with tvp514x and mt9t031. The first one uses YUV bus and second one uses Bayer bus. For each of these, I will have per platform specific bus parameters. Now if any other bridge driver needs to use any these sub device, it is a matter of customizing these platform data. One example is mt9t031 driver which needs to work with vpfe capture as well soc camera system. What is the advantage of using querying versus defining them statically in the platform data? 

Murali
email: m-karicheri2@ti.com

>-----Original Message-----
>From: Hans Verkuil [mailto:hverkuil@xs4all.nl]
>Sent: Wednesday, June 10, 2009 4:52 PM
>To: Guennadi Liakhovetski
>Cc: Karicheri, Muralidharan; Linux Media Mailing List; davinci-linux-open-
>source@linux.davincidsp.com
>Subject: Re: [PATCH] adding support for setting bus parameters in sub
>device
>
>On Wednesday 10 June 2009 21:59:13 Guennadi Liakhovetski wrote:
>> On Wed, 10 Jun 2009, Hans Verkuil wrote:
>> > On Wednesday 10 June 2009 20:32:25 Guennadi Liakhovetski wrote:
>> > > On Tue, 9 Jun 2009, m-karicheri2@ti.com wrote:
>> > > > From: Muralidharan Karicheri <a0868495@gt516km11.gt.design.ti.com>
>> > > >
>> > > > This patch adds support for setting bus parameters such as bus type
>> > > > (BT.656, BT.1120 etc), width (example 10 bit raw image data bus)
>> > > > and polarities (vsync, hsync, field etc) in sub device. This allows
>> > > > bridge driver to configure the sub device for a specific set of bus
>> > > > parameters through s_bus() function call.
>> > >
>> > > Yes, this is required, but this is not enough. Firstly, you're
>> > > missing at least one more flag - master or slave. Secondly, it is not
>> > > enough to provide a s_bus function. Many hosts and sensors can
>> > > configure one of several alternate configurations - they can select
>> > > signal polarities, data widths, master / slave role, etc. Whereas
>> > > others have some or all of these parameters fixed. That's why we have
>> > > a query method in soc-camera, which delivers all supported
>> > > configurations, and then the host can select some mutually acceptable
>> > > subset. No, just returning an error code is not enough.
>> >
>> > Why would you want to query this? I would expect this to be fixed
>> > settings: something that is determined by the architecture. Actually, I
>> > would expect this to be part of the platform_data in many cases and
>> > setup when the i2c driver is initialized and not touched afterwards.
>> >
>> > If we want to negotiate these bus settings, then we indeed need
>> > something more. But it seems unnecessarily complex to me to implement
>> > autonegotiation when this is in practice a fixed setting determined by
>> > how the sensor is hooked up to the host.
>>
>> On the platform level I have so far seen two options: signal connected
>> directly or via an inverter. For that you need platform data, yes. But
>> otherwise - why? say, if both your sensor and your host can select hsync
>> polarity in software - what should platform tell about it? This knowledge
>> belongs in the respective drivers - they know, that they can configure
>> arbitrary polarity and they advertise that. Another sensor, that is
>> statically active high - what does platform have to do with it? The
>> driver knows perfectly, that it can only do one polarity, and it
>> negotiates that. Earlier we also had this flags configured in platform
>> code, but then we realised that this wasn't correct. This information and
>> configuration methods are chip-specific, not platform-specific.
>>
>> And the negotiation code is not that complex - just copy respective
>> soc-camera functions, later the originals will be removed.
>
>My view of this would be that the board specification specifies the sensor
>(and possibly other chips) that are on the board. And to me it makes sense
>that that also supplies the bus settings. I agree that it is not complex
>code, but I think it is also unnecessary code. Why negotiate if you can
>just set it?
>
>BTW, looking at the code there doesn't seem to be a bus type (probably only
>Bayer is used), correct? How is the datawidth negotiation done? Is the
>largest available width chosen? I assume that the datawidth is generally
>fixed by the host? I don't quite see how that can be negotiated since what
>is chosen there is linked to how the video data is transferred to memory.
>E.g., chosing a bus width of 8 or 10 bits can be the difference between
>transferring 8 bit or 16 bit data (with 6 bits padding) when the image is
>transferred to memory. If I would implement negotiation I would probably
>limit it to those one-bit settings and not include bus type and width.
>
>Regards,
>
>	Hans
>
>>
>> Thanks
>> Guennadi
>> ---
>> Guennadi Liakhovetski, Ph.D.
>> Freelance Open-Source Software Developer
>> http://www.open-technology.de/
>> --
>> To unsubscribe from this list: send the line "unsubscribe linux-media" in
>> the body of a message to majordomo@vger.kernel.org
>> More majordomo info at  http://vger.kernel.org/majordomo-info.html
>
>
>
>--
>Hans Verkuil - video4linux developer - sponsored by TANDBERG Telecom

--
To unsubscribe from this list: send the line "unsubscribe linux-media" in
the body of a message to majordomo@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Murali Karicheri June 10, 2009, 9:29 p.m. UTC | #8
>> We need
>> streaming capability in the driver. This is how our driver works
>> with mt9t031 sensor
>> 		  raw-bus (10 bit)
>> vpfe-capture  ----------------- mt9t031 driver
>> 	  |					   |
>> 	  V				         V
>> 	VPFE	 				MT9T031
>>
>> VPFE hardware has internal timing and DMA controller to
>> copy data frame by frame from the sensor output to SDRAM.
>> The PCLK form the sensor is used to generate the internal
>> timing.
>
>So, what is missing in the driver apart from the ability to specify
>a frame-rate?
>
[MK] Does the mt9t031 output one frame (snapshot) like in a camera or can it output frame continuously along with PCLK, Hsync and Vsync signals like in a video streaming device. VPFE capture can accept frames continuously from the sensor synchronized to PCLK, HSYNC and VSYNC and output frames to application using QBUF/DQBUF. In our implementation, we have timing parameters for the sensor to do streaming at various resolutions and fps. So application calls S_STD to set these timings. I am not sure if this is an acceptable way of implementing it. Any comments?

Thanks

Murali

>Thanks
>Guennadi
>---
>Guennadi Liakhovetski, Ph.D.
>Freelance Open-Source Software Developer
>http://www.open-technology.de/

--
To unsubscribe from this list: send the line "unsubscribe linux-media" in
the body of a message to majordomo@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Guennadi Liakhovetski June 10, 2009, 9:30 p.m. UTC | #9
On Wed, 10 Jun 2009, Hans Verkuil wrote:

> My view of this would be that the board specification specifies the sensor 
> (and possibly other chips) that are on the board. And to me it makes sense 
> that that also supplies the bus settings. I agree that it is not complex 
> code, but I think it is also unnecessary code. Why negotiate if you can 
> just set it?

Why force all platforms to set it if the driver is perfectly capable do 
this itself? As I said - this is not a platform-specific feature, it's 
chip-specific. What good would it make to have all platforms using mt9t031 
to specify, that yes, the chip can use both falling and rising pclk edge, 
but only active high vsync and hsync?

> BTW, looking at the code there doesn't seem to be a bus type (probably only 
> Bayer is used), correct? How is the datawidth negotiation done? Is the 
> largest available width chosen? I assume that the datawidth is generally 
> fixed by the host? I don't quite see how that can be negotiated since what 
> is chosen there is linked to how the video data is transferred to memory. 
> E.g., chosing a bus width of 8 or 10 bits can be the difference between 
> transferring 8 bit or 16 bit data (with 6 bits padding) when the image is 
> transferred to memory. If I would implement negotiation I would probably 
> limit it to those one-bit settings and not include bus type and width.

Well, this is indeed hairy. And hardware developers compete in creativity 
making us invent new and new algorithms:-( I think, so far _practically_ I 
have only worked with the following setups:

8 bit parallel with syncs and clocks. Data can be either Bayer or YUV. 
This is most usual in my practice so far.

10 bit parallel (PXA270) with syncs and clocks bus with an 8 bit sensor 
connected to most significant lanes... This is achieved by providing an 
additional I2C controlled switch...

10 bit parallel with a 10 bit sensor, data 0-extended to 16 bit, raw 
Bayer.

15 bit parallel bus (i.MX31) with 8 bit sensor connected to least 
significant lanes, because i.MX31 is smart enough to use them correctly.

ATM this is a part of soc-camera pixel format negotiation code, you can 
look at various .set_bus_param() methods in sensor drivers. E.g., look in 
mt9m001 and mt9v022 for drivers, using external bus-width switches...

Now, I do not know how standard these various data packing methods on the 
bus are, I think, we will have to give it a good thought when designing an 
API, comments from developers familiar with various setups are much 
appreciated.

Thanks
Guennadi
---
Guennadi Liakhovetski, Ph.D.
Freelance Open-Source Software Developer
http://www.open-technology.de/
--
To unsubscribe from this list: send the line "unsubscribe linux-media" in
the body of a message to majordomo@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Guennadi Liakhovetski June 10, 2009, 9:37 p.m. UTC | #10
On Wed, 10 Jun 2009, Karicheri, Muralidharan wrote:

> 
> 
> >> We need
> >> streaming capability in the driver. This is how our driver works
> >> with mt9t031 sensor
> >> 		  raw-bus (10 bit)
> >> vpfe-capture  ----------------- mt9t031 driver
> >> 	  |					   |
> >> 	  V				         V
> >> 	VPFE	 				MT9T031
> >>
> >> VPFE hardware has internal timing and DMA controller to
> >> copy data frame by frame from the sensor output to SDRAM.
> >> The PCLK form the sensor is used to generate the internal
> >> timing.
> >
> >So, what is missing in the driver apart from the ability to specify
> >a frame-rate?
> >
> [MK] Does the mt9t031 output one frame (snapshot) like in a camera or 
> can it output frame continuously along with PCLK, Hsync and Vsync 
> signals like in a video streaming device. VPFE capture can accept frames 
> continuously from the sensor synchronized to PCLK, HSYNC and VSYNC and 
> output frames to application using QBUF/DQBUF. In our implementation, we 
> have timing parameters for the sensor to do streaming at various 
> resolutions and fps. So application calls S_STD to set these timings. I 
> am not sure if this is an acceptable way of implementing it. Any 
> comments?

Yes, it is streaming.

Thanks
Guennadi
---
Guennadi Liakhovetski, Ph.D.
Freelance Open-Source Software Developer
http://www.open-technology.de/
--
To unsubscribe from this list: send the line "unsubscribe linux-media" in
the body of a message to majordomo@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Murali Karicheri June 10, 2009, 9:45 p.m. UTC | #11
>> >So, what is missing in the driver apart from the ability to specify
>> >a frame-rate?
>> >
>> [MK] Does the mt9t031 output one frame (snapshot) like in a camera or
>> can it output frame continuously along with PCLK, Hsync and Vsync
>> signals like in a video streaming device. VPFE capture can accept frames
>> continuously from the sensor synchronized to PCLK, HSYNC and VSYNC and
>> output frames to application using QBUF/DQBUF. In our implementation, we
>> have timing parameters for the sensor to do streaming at various
>> resolutions and fps. So application calls S_STD to set these timings. I
>> am not sure if this is an acceptable way of implementing it. Any
>> comments?
>
>Yes, it is streaming.
>
So how do I know what frame-rate I get? Sensor output frame rate depends on the resolution of the frame, blanking, exposure time etc.

Murali
>Thanks
>Guennadi
>---
>Guennadi Liakhovetski, Ph.D.
>Freelance Open-Source Software Developer
>http://www.open-technology.de/

--
To unsubscribe from this list: send the line "unsubscribe linux-media" in
the body of a message to majordomo@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Hans Verkuil June 10, 2009, 9:51 p.m. UTC | #12
On Wednesday 10 June 2009 23:30:55 Guennadi Liakhovetski wrote:
> On Wed, 10 Jun 2009, Hans Verkuil wrote:
> > My view of this would be that the board specification specifies the
> > sensor (and possibly other chips) that are on the board. And to me it
> > makes sense that that also supplies the bus settings. I agree that it
> > is not complex code, but I think it is also unnecessary code. Why
> > negotiate if you can just set it?
>
> Why force all platforms to set it if the driver is perfectly capable do
> this itself? As I said - this is not a platform-specific feature, it's
> chip-specific. What good would it make to have all platforms using
> mt9t031 to specify, that yes, the chip can use both falling and rising
> pclk edge, but only active high vsync and hsync?

???

You will just tell the chip what to use. So you set 'use falling edge' and 
either set 'active high vsync/hsync' or just leave that out since you know 
the mt9t031 has that fixed. You don't specify in the platform data what the 
chip can support, that's not relevant. You know what the host expects and 
you pass that information on to the chip.

A board designer knows what the host supports, knows what the sensor 
supports, and knows if he added any inverters on the board, and based on 
all that information he can just setup these parameters for the sensor 
chip. Settings that are fixed on the sensor chip he can just ignore, he 
only need to specify those settings that the sensor really needs.

> > BTW, looking at the code there doesn't seem to be a bus type (probably
> > only Bayer is used), correct? How is the datawidth negotiation done? Is
> > the largest available width chosen? I assume that the datawidth is
> > generally fixed by the host? I don't quite see how that can be
> > negotiated since what is chosen there is linked to how the video data
> > is transferred to memory. E.g., chosing a bus width of 8 or 10 bits can
> > be the difference between transferring 8 bit or 16 bit data (with 6
> > bits padding) when the image is transferred to memory. If I would
> > implement negotiation I would probably limit it to those one-bit
> > settings and not include bus type and width.
>
> Well, this is indeed hairy. And hardware developers compete in creativity
> making us invent new and new algorithms:-( I think, so far _practically_
> I have only worked with the following setups:
>
> 8 bit parallel with syncs and clocks. Data can be either Bayer or YUV.
> This is most usual in my practice so far.
>
> 10 bit parallel (PXA270) with syncs and clocks bus with an 8 bit sensor
> connected to most significant lanes... This is achieved by providing an
> additional I2C controlled switch...
>
> 10 bit parallel with a 10 bit sensor, data 0-extended to 16 bit, raw
> Bayer.
>
> 15 bit parallel bus (i.MX31) with 8 bit sensor connected to least
> significant lanes, because i.MX31 is smart enough to use them correctly.
>
> ATM this is a part of soc-camera pixel format negotiation code, you can
> look at various .set_bus_param() methods in sensor drivers. E.g., look in
> mt9m001 and mt9v022 for drivers, using external bus-width switches...
>
> Now, I do not know how standard these various data packing methods on the
> bus are, I think, we will have to give it a good thought when designing
> an API, comments from developers familiar with various setups are much
> appreciated.

I think we should not attempt to be too fancy with this part of the API. 
Something like the pixel format API in the v4l2 spec which is basically 
just a random number with an associated format specification and no attempt 
and trying high-level format descriptions. In this case a simple enum might 
be enough. I'm not even sure whether we should specify the bus width but 
instead have it implicit in the bus type.

I'm sure we are never going to be able to come up with something that will 
work on all hardware, so perhaps we shouldn't try to. Each different format 
gets its own type. Which is OK I think as long as that format is properly 
documented.

Regards,

	Hans

>
> Thanks
> Guennadi
> ---
> Guennadi Liakhovetski, Ph.D.
> Freelance Open-Source Software Developer
> http://www.open-technology.de/
> --
> To unsubscribe from this list: send the line "unsubscribe linux-media" in
> the body of a message to majordomo@vger.kernel.org
> More majordomo info at  http://vger.kernel.org/majordomo-info.html
Guennadi Liakhovetski June 10, 2009, 11:12 p.m. UTC | #13
On Wed, 10 Jun 2009, Hans Verkuil wrote:

> On Wednesday 10 June 2009 23:30:55 Guennadi Liakhovetski wrote:
> > On Wed, 10 Jun 2009, Hans Verkuil wrote:
> > > My view of this would be that the board specification specifies the
> > > sensor (and possibly other chips) that are on the board. And to me it
> > > makes sense that that also supplies the bus settings. I agree that it
> > > is not complex code, but I think it is also unnecessary code. Why
> > > negotiate if you can just set it?
> >
> > Why force all platforms to set it if the driver is perfectly capable do
> > this itself? As I said - this is not a platform-specific feature, it's
> > chip-specific. What good would it make to have all platforms using
> > mt9t031 to specify, that yes, the chip can use both falling and rising
> > pclk edge, but only active high vsync and hsync?
> 
> ???
> 
> You will just tell the chip what to use. So you set 'use falling edge' and 
> either set 'active high vsync/hsync' or just leave that out since you know 
> the mt9t031 has that fixed. You don't specify in the platform data what the 
> chip can support, that's not relevant. You know what the host expects and 
> you pass that information on to the chip.
> 
> A board designer knows what the host supports,

No, he doesn't have to. That's not board specific, that's SoC specific.

> knows what the sensor supports,

Ditto, this is sensor-specific, not board-specific.

> and knows if he added any inverters on the board, and based on 
> all that information he can just setup these parameters for the sensor 
> chip. Settings that are fixed on the sensor chip he can just ignore, he 
> only need to specify those settings that the sensor really needs.

Of all the boards that I know of that use soc-camera only one (supposedly) 
had an inverter on one line, and even that one is not in the mainline. So, 
in the present soc-camera code not a single board have to bother with 
that. And now you want to add _all_ those polarity, master / slave flags 
to _all_ of them? Let me try again:

you have an HSYNC output on the sensor. Its capabilities are known to the 
sensor driver

you have an HSYNC input on the SoC. Its capabilities are known to the 
SoC-specific camera host driver

these two lines are routed on the board to connect either directly or over 
some logic, hopefully, not more complex than an inverter. Now, this is 
_the_ _only_ bit of information, that is specific to the board.

Thanks
Guennadi
---
Guennadi Liakhovetski, Ph.D.
Freelance Open-Source Software Developer
http://www.open-technology.de/
--
To unsubscribe from this list: send the line "unsubscribe linux-media" in
the body of a message to majordomo@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Guennadi Liakhovetski June 10, 2009, 11:13 p.m. UTC | #14
On Wed, 10 Jun 2009, Karicheri, Muralidharan wrote:

> So how do I know what frame-rate I get? Sensor output frame rate depends 
> on the resolution of the frame, blanking, exposure time etc.

This is not supported.

Thanks
Guennadi
---
Guennadi Liakhovetski, Ph.D.
Freelance Open-Source Software Developer
http://www.open-technology.de/
--
To unsubscribe from this list: send the line "unsubscribe linux-media" in
the body of a message to majordomo@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Jean-Philippe François June 11, 2009, 8:01 a.m. UTC | #15
Karicheri, Muralidharan a écrit :
> 
>>> We need
>>> streaming capability in the driver. This is how our driver works
>>> with mt9t031 sensor
>>> 		  raw-bus (10 bit)
>>> vpfe-capture  ----------------- mt9t031 driver
>>> 	  |					   |
>>> 	  V				         V
>>> 	VPFE	 				MT9T031
>>>
>>> VPFE hardware has internal timing and DMA controller to
>>> copy data frame by frame from the sensor output to SDRAM.
>>> The PCLK form the sensor is used to generate the internal
>>> timing.
>> So, what is missing in the driver apart from the ability to specify
>> a frame-rate?
>>
> [MK] Does the mt9t031 output one frame (snapshot) like in a camera or can it output frame continuously along with PCLK, Hsync and Vsync signals like in a video streaming device. VPFE capture can accept frames continuously from the sensor synchronized to PCLK, HSYNC and VSYNC and output frames to application using QBUF/DQBUF. In our implementation, we have timing parameters for the sensor to do streaming at various resolutions and fps. So application calls S_STD to set these timings. I am not sure if this is an acceptable way of implementing it. Any comments?
> 
PCLK, HSYNC, VSYNC are generated by the CMOS sensor. I don't think you 
can set the timings. Depending on sensor settings, pixel clock speed etc 
.., the frame rate will vary.

You could perhaps play with the CMOS sensor registers so that when 
settings a standard, the driver somehow set the various exposition 
parameter and pll settings to get a specified framerate.

This will vary with each sensor and each platform (because of 
pixelclock). More over, chances are that it will be conflicting with
other control.

For example if you set a fixed gain and autoexpo, some sensor will see
a drop in fps under low light conditions. I think this kind of 
arbitration  should be left to userspace.

Unless the sensor supports a specific standard, I don't think the driver
should try to make behind the scene modification to camera sensor 
register in response to a S_STD ioctl.

JP François


> Thanks
> 
> Murali
> 
>> Thanks
>> Guennadi
>> ---
>> Guennadi Liakhovetski, Ph.D.
>> Freelance Open-Source Software Developer
>> http://www.open-technology.de/
> 
> _______________________________________________
> Davinci-linux-open-source mailing list
> Davinci-linux-open-source@linux.davincidsp.com
> http://linux.davincidsp.com/mailman/listinfo/davinci-linux-open-source
> 

--
To unsubscribe from this list: send the line "unsubscribe linux-media" in
the body of a message to majordomo@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Murali Karicheri June 11, 2009, 1:37 p.m. UTC | #16
>> Why force all platforms to set it if the driver is perfectly capable do
>> this itself? As I said - this is not a platform-specific feature, it's
>> chip-specific. What good would it make to have all platforms using
>> mt9t031 to specify, that yes, the chip can use both falling and rising
>> pclk edge, but only active high vsync and hsync?
>
>???
>
>You will just tell the chip what to use. So you set 'use falling edge' and
>either set 'active high vsync/hsync' or just leave that out since you know
>the mt9t031 has that fixed. You don't specify in the platform data what the
>chip can support, that's not relevant. You know what the host expects and
>you pass that information on to the chip.
>
>A board designer knows what the host supports, knows what the sensor
>supports, and knows if he added any inverters on the board, and based on
>all that information he can just setup these parameters for the sensor
>chip. Settings that are fixed on the sensor chip he can just ignore, he
>only need to specify those settings that the sensor really needs.
>
[MK] I agree with Hans. Looking my experience with various platforms, this is true. We have following decoders connected to the VPFE bus working in our internal release kernel. In all these cases, the bus parameters are fixed given a board and the platform. Given below are the list of boards, and decoders connected and bus available....

DM6446 -Bayer RGB bus(10 bit connected to MSB), Vsync, Hsync, PCLK
	  (MT9T001)
	 -YUV bus (embedded sync / separate sync), PCLK, Vsync, HSync, Field
	  (TVP5146) - 8/10 bit data bus
DM355 - Same as above 
	- VPFE Also supports YUV bus with 16 bit bus (8 bit, Y and 8 bit 
	  Cb/Cr), but no decoders tested with this interface.
DM365 - Bayer RGB bus Same as above
	- YUV bus - similar to that in DM355
	- TVP7002 - connected through 16 bit YUV bus with embedded sync 
	  (BT.1120)

>> > BTW, looking at the code there doesn't seem to be a bus type (probably
>> > only Bayer is used), correct? How is the datawidth negotiation done? Is
>> > the largest available width chosen? I assume that the datawidth is
>> > generally fixed by the host? I don't quite see how that can be
>> > negotiated since what is chosen there is linked to how the video data
>> > is transferred to memory. E.g., chosing a bus width of 8 or 10 bits can
>> > be the difference between transferring 8 bit or 16 bit data (with 6
>> > bits padding) when the image is transferred to memory. If I would
>> > implement negotiation I would probably limit it to those one-bit
>> > settings and not include bus type and width.
>>
>> Well, this is indeed hairy. And hardware developers compete in creativity
>> making us invent new and new algorithms:-( I think, so far _practically_
>> I have only worked with the following setups:
>>
>> 8 bit parallel with syncs and clocks. Data can be either Bayer or YUV.
>> This is most usual in my practice so far.
>>
>> 10 bit parallel (PXA270) with syncs and clocks bus with an 8 bit sensor
>> connected to most significant lanes... This is achieved by providing an
>> additional I2C controlled switch...
>>
>> 10 bit parallel with a 10 bit sensor, data 0-extended to 16 bit, raw
>> Bayer.
>>
>> 15 bit parallel bus (i.MX31) with 8 bit sensor connected to least
>> significant lanes, because i.MX31 is smart enough to use them correctly.
>>
>> ATM this is a part of soc-camera pixel format negotiation code, you can
>> look at various .set_bus_param() methods in sensor drivers. E.g., look in
>> mt9m001 and mt9v022 for drivers, using external bus-width switches...
>>
>> Now, I do not know how standard these various data packing methods on the
>> bus are, I think, we will have to give it a good thought when designing
>> an API, comments from developers familiar with various setups are much
>> appreciated.
>
>I think we should not attempt to be too fancy with this part of the API.
>Something like the pixel format API in the v4l2 spec which is basically
>just a random number with an associated format specification and no attempt
>and trying high-level format descriptions. In this case a simple enum might
>be enough. I'm not even sure whether we should specify the bus width but
>instead have it implicit in the bus type.
>
[MK] VPFE at least need to know if YUV bus has sync embedded or separate sync lines as well if it 10 bit or 8 bit. Similarly it needs 16 bit for interfacing with TVP7002.  For Raw Bayer, it needs to know the size of valid data on the bus and where the MSB of the sensor is connected (this can be specified in the platform data of the bridge driver, and is not applicable for sensor device). So data width information could either be explicit in the bus type or could be specified in width parameter. Both works for our driver.

What about embedded sync bool I have added in the latest patch? This can be removed too if it is explicit in the bus type. But my preference is to keep them as per my latest patch. I am also not sure if query capability will be really useful versus defining them per board/platform basis. 

>I'm sure we are never going to be able to come up with something that will
>work on all hardware, so perhaps we shouldn't try to. Each different format
>gets its own type. Which is OK I think as long as that format is properly
>documented.
>
>Regards,
>
>	Hans
>
>>
>> Thanks
>> Guennadi
>> ---
>> Guennadi Liakhovetski, Ph.D.
>> Freelance Open-Source Software Developer
>> http://www.open-technology.de/
>> --
>> To unsubscribe from this list: send the line "unsubscribe linux-media" in
>> the body of a message to majordomo@vger.kernel.org
>> More majordomo info at  http://vger.kernel.org/majordomo-info.html
>
>
>
>--
>Hans Verkuil - video4linux developer - sponsored by TANDBERG Telecom

--
To unsubscribe from this list: send the line "unsubscribe linux-media" in
the body of a message to majordomo@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Murali Karicheri June 11, 2009, 3 p.m. UTC | #17
>On Wed, 10 Jun 2009, Karicheri, Muralidharan wrote:
>
>> So how do I know what frame-rate I get? Sensor output frame rate depends
>> on the resolution of the frame, blanking, exposure time etc.
>
>This is not supported.
>
I am still not clear. You had said in an earlier email that it can support streaming. That means application can stream frames from the capture device.
I know you don't have support for setting a specific frame rate, but it must be outputting frame at some rate right?

Here is my usecase.

open capture device,
set resolutions (say VGA) for capture (S_FMT ???)
request buffer for streaming & mmap & QUERYBUF
start streaming (STREAMON)
DQBUF/QBUF in a loop -> get VGA buffers at some fps.
STREAMOFF
close device

Is this possible with mt9t031 available currently in the tree? This requires sensor device output frames continuously on the bus using PCLK/HSYNC/VSYNC timing to the bridge device connected to the bus. Can you give a use case like above that you are using. I just want to estimate how much effort is required to add this support in the mt9t031 driver.

Thanks

Murali

>Thanks
>Guennadi
>---
>Guennadi Liakhovetski, Ph.D.
>Freelance Open-Source Software Developer
>http://www.open-technology.de/

--
To unsubscribe from this list: send the line "unsubscribe linux-media" in
the body of a message to majordomo@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Guennadi Liakhovetski June 11, 2009, 3:58 p.m. UTC | #18
On Thu, 11 Jun 2009, Karicheri, Muralidharan wrote:

> >On Wed, 10 Jun 2009, Karicheri, Muralidharan wrote:
> >
> >> So how do I know what frame-rate I get? Sensor output frame rate depends
> >> on the resolution of the frame, blanking, exposure time etc.
> >
> >This is not supported.
> >
> I am still not clear. You had said in an earlier email that it can 
> support streaming. That means application can stream frames from the 
> capture device.
> I know you don't have support for setting a specific frame rate, but it 
> must be outputting frame at some rate right?

I am sorry, I do not know how I can explain myself clearer.

Yes, you can stream video with mt9t031.

No, you neither get the framerate measured by the driver nor can you set a 
specific framerate. Frames are produced as fast as it goes, depending on 
clock settings, frame size, black areas, autoexposure.

Thanks
Guennadi

> 
> Here is my usecase.
> 
> open capture device,
> set resolutions (say VGA) for capture (S_FMT ???)
> request buffer for streaming & mmap & QUERYBUF
> start streaming (STREAMON)
> DQBUF/QBUF in a loop -> get VGA buffers at some fps.
> STREAMOFF
> close device
> 
> Is this possible with mt9t031 available currently in the tree? This requires sensor device output frames continuously on the bus using PCLK/HSYNC/VSYNC timing to the bridge device connected to the bus. Can you give a use case like above that you are using. I just want to estimate how much effort is required to add this support in the mt9t031 driver.
> 
> Thanks
> 
> Murali
> 
> >Thanks
> >Guennadi
> >---
> >Guennadi Liakhovetski, Ph.D.
> >Freelance Open-Source Software Developer
> >http://www.open-technology.de/
> 

---
Guennadi Liakhovetski, Ph.D.
Freelance Open-Source Software Developer
http://www.open-technology.de/
--
To unsubscribe from this list: send the line "unsubscribe linux-media" in
the body of a message to majordomo@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Murali Karicheri June 11, 2009, 4:30 p.m. UTC | #19
>I am sorry, I do not know how I can explain myself clearer.
>
Thanks for helping me to understand better :)
>Yes, you can stream video with mt9t031.
>
>No, you neither get the framerate measured by the driver nor can you set a
>specific framerate. Frames are produced as fast as it goes, depending on
>clock settings, frame size, black areas, autoexposure.
>
Ok. It is now clear to me. 

Thanks for all your help.

>Thanks
>Guennadi
>
>>
>> Here is my usecase.
>>
>> open capture device,
>> set resolutions (say VGA) for capture (S_FMT ???)
>> request buffer for streaming & mmap & QUERYBUF
>> start streaming (STREAMON)
>> DQBUF/QBUF in a loop -> get VGA buffers at some fps.
>> STREAMOFF
>> close device
>>
>> Is this possible with mt9t031 available currently in the tree? This
>requires sensor device output frames continuously on the bus using
>PCLK/HSYNC/VSYNC timing to the bridge device connected to the bus. Can you
>give a use case like above that you are using. I just want to estimate how
>much effort is required to add this support in the mt9t031 driver.
>>
>> Thanks
>>
>> Murali
>>
>> >Thanks
>> >Guennadi
>> >---
>> >Guennadi Liakhovetski, Ph.D.
>> >Freelance Open-Source Software Developer
>> >http://www.open-technology.de/
>>
>
>---
>Guennadi Liakhovetski, Ph.D.
>Freelance Open-Source Software Developer
>http://www.open-technology.de/

--
To unsubscribe from this list: send the line "unsubscribe linux-media" in
the body of a message to majordomo@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
Guennadi Liakhovetski June 12, 2009, 12:15 p.m. UTC | #20
On Wed, 10 Jun 2009, Hans Verkuil wrote:

> On Wednesday 10 June 2009 23:30:55 Guennadi Liakhovetski wrote:
> > On Wed, 10 Jun 2009, Hans Verkuil wrote:
> > > My view of this would be that the board specification specifies the
> > > sensor (and possibly other chips) that are on the board. And to me it
> > > makes sense that that also supplies the bus settings. I agree that it
> > > is not complex code, but I think it is also unnecessary code. Why
> > > negotiate if you can just set it?
> >
> > Why force all platforms to set it if the driver is perfectly capable do
> > this itself? As I said - this is not a platform-specific feature, it's
> > chip-specific. What good would it make to have all platforms using
> > mt9t031 to specify, that yes, the chip can use both falling and rising
> > pclk edge, but only active high vsync and hsync?
> 
> ???
> 
> You will just tell the chip what to use. So you set 'use falling edge' and 
> either set 'active high vsync/hsync' or just leave that out since you know 
> the mt9t031 has that fixed. You don't specify in the platform data what the 
> chip can support, that's not relevant. You know what the host expects and 
> you pass that information on to the chip.
> 
> A board designer knows what the host supports, knows what the sensor 
> supports, and knows if he added any inverters on the board, and based on 
> all that information he can just setup these parameters for the sensor 
> chip. Settings that are fixed on the sensor chip he can just ignore, he 
> only need to specify those settings that the sensor really needs.

I'd like to have this resolved somehow (preferably my way of ourse:-)), 
here once again (plus some new) my main arguments:

1. it is very unusual that the board designer has to mandate what signal 
polarity has to be used - only when there's additional logic between the 
capture device and the host. So, we shouldn't overload all boards with 
this information. Board-code authors will be grateful to us!

2. what if you do have an inverter between the two? You'd have to tell the 
sensor to use active high, and the host to use active low, i.e., you need 
two sets of flags.

3. all soc-camera boards rely on this autonegotiation. Do we really want 
(and have) to add this useless information back to them? Back - because, 
yes, we've been there we've done that before, but then we switched to the 
current autonegotiation, which we are perfectly happy with so far (anyone 
dares to object?:-)).

4. the autonegiation code is simple and small, so, I really don't see a 
reason to hardcode something, that we can perfectly autoconfigure.

Thanks
Guennadi
---
Guennadi Liakhovetski, Ph.D.
Freelance Open-Source Software Developer
http://www.open-technology.de/
--
To unsubscribe from this list: send the line "unsubscribe linux-media" in
the body of a message to majordomo@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html
diff mbox

Patch

diff --git a/include/media/v4l2-subdev.h b/include/media/v4l2-subdev.h
index 1785608..c1cfb3b 100644
--- a/include/media/v4l2-subdev.h
+++ b/include/media/v4l2-subdev.h
@@ -37,6 +37,41 @@  struct v4l2_decode_vbi_line {
 	u32 type;		/* VBI service type (V4L2_SLICED_*). 0 if no service found */
 };
 
+/*
+ * Some sub-devices are connected to the bridge device through a bus that
+ * carries * the clock, vsync, hsync and data. Some interfaces such as BT.656
+ * carries the sync embedded in the data where as others have separate line
+ * carrying the sync signals. The structure below is used by bridge driver to
+ * set the desired bus parameters in the sub device to work with it.
+ */
+enum v4l2_subdev_bus_type {
+	/* BT.656 interface. Embedded sync */
+	V4L2_SUBDEV_BUS_BT_656,
+	/* BT.1120 interface. Embedded sync */
+	V4L2_SUBDEV_BUS_BT_1120,
+	/* 8 bit muxed YCbCr bus, separate sync and field signals */
+	V4L2_SUBDEV_BUS_YCBCR_8,
+	/* 16 bit YCbCr bus, separate sync and field signals */
+	V4L2_SUBDEV_BUS_YCBCR_16,
+	/* Raw Bayer image data bus , 8 - 16 bit wide, sync signals */
+	V4L2_SUBDEV_BUS_RAW_BAYER
+};
+
+struct v4l2_subdev_bus	{
+	enum v4l2_subdev_bus_type type;
+	u8 width;
+	/* 0 - active low, 1 - active high */
+	unsigned pol_vsync:1;
+	/* 0 - active low, 1 - active high */
+	unsigned pol_hsync:1;
+	/* 0 - low to high , 1 - high to low */
+	unsigned pol_field:1;
+	/* 0 - sample at falling edge , 1 - sample at rising edge */
+	unsigned pol_pclock:1;
+	/* 0 - active low , 1 - active high */
+	unsigned pol_data:1;
+};
+
 /* Sub-devices are devices that are connected somehow to the main bridge
    device. These devices are usually audio/video muxers/encoders/decoders or
    sensors and webcam controllers.
@@ -109,6 +144,7 @@  struct v4l2_subdev_core_ops {
 	int (*querymenu)(struct v4l2_subdev *sd, struct v4l2_querymenu *qm);
 	int (*s_std)(struct v4l2_subdev *sd, v4l2_std_id norm);
 	long (*ioctl)(struct v4l2_subdev *sd, unsigned int cmd, void *arg);
+	int (*s_bus)(struct v4l2_subdev *sd, struct v4l2_subdev_bus *bus);
 #ifdef CONFIG_VIDEO_ADV_DEBUG
 	int (*g_register)(struct v4l2_subdev *sd, struct v4l2_dbg_register *reg);
 	int (*s_register)(struct v4l2_subdev *sd, struct v4l2_dbg_register *reg);