Commit 760697be authored by Guennadi Liakhovetski's avatar Guennadi Liakhovetski Committed by Mauro Carvalho Chehab

V4L/DVB (13659): soc-camera: convert to the new mediabus API

Convert soc-camera core and all soc-camera drivers to the new mediabus
API. This also takes soc-camera client drivers one step closer to also be
usable with generic v4l2-subdev host drivers.
Signed-off-by: default avatarGuennadi Liakhovetski <g.liakhovetski@gmx.de>
Acked-by: default avatarHans Verkuil <hverkuil@xs4all.nl>
Signed-off-by: default avatarMauro Carvalho Chehab <mchehab@redhat.com>
parent 9a74251d
...@@ -316,8 +316,9 @@ static struct soc_camera_platform_info camera_info = { ...@@ -316,8 +316,9 @@ static struct soc_camera_platform_info camera_info = {
.format_name = "UYVY", .format_name = "UYVY",
.format_depth = 16, .format_depth = 16,
.format = { .format = {
.pixelformat = V4L2_PIX_FMT_UYVY, .code = V4L2_MBUS_FMT_YUYV8_2X8_BE,
.colorspace = V4L2_COLORSPACE_SMPTE170M, .colorspace = V4L2_COLORSPACE_SMPTE170M,
.field = V4L2_FIELD_NONE,
.width = 640, .width = 640,
.height = 480, .height = 480,
}, },
......
...@@ -48,41 +48,46 @@ ...@@ -48,41 +48,46 @@
#define MT9M001_COLUMN_SKIP 20 #define MT9M001_COLUMN_SKIP 20
#define MT9M001_ROW_SKIP 12 #define MT9M001_ROW_SKIP 12
static const struct soc_camera_data_format mt9m001_colour_formats[] = { /* MT9M001 has only one fixed colorspace per pixelcode */
struct mt9m001_datafmt {
enum v4l2_mbus_pixelcode code;
enum v4l2_colorspace colorspace;
};
/* Find a data format by a pixel code in an array */
static const struct mt9m001_datafmt *mt9m001_find_datafmt(
enum v4l2_mbus_pixelcode code, const struct mt9m001_datafmt *fmt,
int n)
{
int i;
for (i = 0; i < n; i++)
if (fmt[i].code == code)
return fmt + i;
return NULL;
}
static const struct mt9m001_datafmt mt9m001_colour_fmts[] = {
/* /*
* Order important: first natively supported, * Order important: first natively supported,
* second supported with a GPIO extender * second supported with a GPIO extender
*/ */
{ {V4L2_MBUS_FMT_SBGGR10_1X10, V4L2_COLORSPACE_SRGB},
.name = "Bayer (sRGB) 10 bit", {V4L2_MBUS_FMT_SBGGR8_1X8, V4L2_COLORSPACE_SRGB},
.depth = 10,
.fourcc = V4L2_PIX_FMT_SBGGR16,
.colorspace = V4L2_COLORSPACE_SRGB,
}, {
.name = "Bayer (sRGB) 8 bit",
.depth = 8,
.fourcc = V4L2_PIX_FMT_SBGGR8,
.colorspace = V4L2_COLORSPACE_SRGB,
}
}; };
static const struct soc_camera_data_format mt9m001_monochrome_formats[] = { static const struct mt9m001_datafmt mt9m001_monochrome_fmts[] = {
/* Order important - see above */ /* Order important - see above */
{ {V4L2_MBUS_FMT_Y10_1X10, V4L2_COLORSPACE_JPEG},
.name = "Monochrome 10 bit", {V4L2_MBUS_FMT_GREY8_1X8, V4L2_COLORSPACE_JPEG},
.depth = 10,
.fourcc = V4L2_PIX_FMT_Y16,
}, {
.name = "Monochrome 8 bit",
.depth = 8,
.fourcc = V4L2_PIX_FMT_GREY,
},
}; };
struct mt9m001 { struct mt9m001 {
struct v4l2_subdev subdev; struct v4l2_subdev subdev;
struct v4l2_rect rect; /* Sensor window */ struct v4l2_rect rect; /* Sensor window */
__u32 fourcc; const struct mt9m001_datafmt *fmt;
const struct mt9m001_datafmt *fmts;
int num_fmts;
int model; /* V4L2_IDENT_MT9M001* codes from v4l2-chip-ident.h */ int model; /* V4L2_IDENT_MT9M001* codes from v4l2-chip-ident.h */
unsigned int gain; unsigned int gain;
unsigned int exposure; unsigned int exposure;
...@@ -209,8 +214,7 @@ static int mt9m001_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) ...@@ -209,8 +214,7 @@ static int mt9m001_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a)
const u16 hblank = 9, vblank = 25; const u16 hblank = 9, vblank = 25;
unsigned int total_h; unsigned int total_h;
if (mt9m001->fourcc == V4L2_PIX_FMT_SBGGR8 || if (mt9m001->fmts == mt9m001_colour_fmts)
mt9m001->fourcc == V4L2_PIX_FMT_SBGGR16)
/* /*
* Bayer format - even number of rows for simplicity, * Bayer format - even number of rows for simplicity,
* but let the user play with the top row. * but let the user play with the top row.
...@@ -290,32 +294,32 @@ static int mt9m001_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) ...@@ -290,32 +294,32 @@ static int mt9m001_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a)
return 0; return 0;
} }
static int mt9m001_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int mt9m001_g_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct i2c_client *client = sd->priv; struct i2c_client *client = sd->priv;
struct mt9m001 *mt9m001 = to_mt9m001(client); struct mt9m001 *mt9m001 = to_mt9m001(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
pix->width = mt9m001->rect.width; mf->width = mt9m001->rect.width;
pix->height = mt9m001->rect.height; mf->height = mt9m001->rect.height;
pix->pixelformat = mt9m001->fourcc; mf->code = mt9m001->fmt->code;
pix->field = V4L2_FIELD_NONE; mf->colorspace = mt9m001->fmt->colorspace;
pix->colorspace = V4L2_COLORSPACE_SRGB; mf->field = V4L2_FIELD_NONE;
return 0; return 0;
} }
static int mt9m001_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int mt9m001_s_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct i2c_client *client = sd->priv; struct i2c_client *client = sd->priv;
struct mt9m001 *mt9m001 = to_mt9m001(client); struct mt9m001 *mt9m001 = to_mt9m001(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_crop a = { struct v4l2_crop a = {
.c = { .c = {
.left = mt9m001->rect.left, .left = mt9m001->rect.left,
.top = mt9m001->rect.top, .top = mt9m001->rect.top,
.width = pix->width, .width = mf->width,
.height = pix->height, .height = mf->height,
}, },
}; };
int ret; int ret;
...@@ -323,28 +327,39 @@ static int mt9m001_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) ...@@ -323,28 +327,39 @@ static int mt9m001_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
/* No support for scaling so far, just crop. TODO: use skipping */ /* No support for scaling so far, just crop. TODO: use skipping */
ret = mt9m001_s_crop(sd, &a); ret = mt9m001_s_crop(sd, &a);
if (!ret) { if (!ret) {
pix->width = mt9m001->rect.width; mf->width = mt9m001->rect.width;
pix->height = mt9m001->rect.height; mf->height = mt9m001->rect.height;
mt9m001->fourcc = pix->pixelformat; mt9m001->fmt = mt9m001_find_datafmt(mf->code,
mt9m001->fmts, mt9m001->num_fmts);
mf->colorspace = mt9m001->fmt->colorspace;
} }
return ret; return ret;
} }
static int mt9m001_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int mt9m001_try_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct i2c_client *client = sd->priv; struct i2c_client *client = sd->priv;
struct mt9m001 *mt9m001 = to_mt9m001(client); struct mt9m001 *mt9m001 = to_mt9m001(client);
struct v4l2_pix_format *pix = &f->fmt.pix; const struct mt9m001_datafmt *fmt;
v4l_bound_align_image(&pix->width, MT9M001_MIN_WIDTH, v4l_bound_align_image(&mf->width, MT9M001_MIN_WIDTH,
MT9M001_MAX_WIDTH, 1, MT9M001_MAX_WIDTH, 1,
&pix->height, MT9M001_MIN_HEIGHT + mt9m001->y_skip_top, &mf->height, MT9M001_MIN_HEIGHT + mt9m001->y_skip_top,
MT9M001_MAX_HEIGHT + mt9m001->y_skip_top, 0, 0); MT9M001_MAX_HEIGHT + mt9m001->y_skip_top, 0, 0);
if (pix->pixelformat == V4L2_PIX_FMT_SBGGR8 || if (mt9m001->fmts == mt9m001_colour_fmts)
pix->pixelformat == V4L2_PIX_FMT_SBGGR16) mf->height = ALIGN(mf->height - 1, 2);
pix->height = ALIGN(pix->height - 1, 2);
fmt = mt9m001_find_datafmt(mf->code, mt9m001->fmts,
mt9m001->num_fmts);
if (!fmt) {
fmt = mt9m001->fmt;
mf->code = fmt->code;
}
mf->colorspace = fmt->colorspace;
return 0; return 0;
} }
...@@ -608,11 +623,11 @@ static int mt9m001_video_probe(struct soc_camera_device *icd, ...@@ -608,11 +623,11 @@ static int mt9m001_video_probe(struct soc_camera_device *icd,
case 0x8411: case 0x8411:
case 0x8421: case 0x8421:
mt9m001->model = V4L2_IDENT_MT9M001C12ST; mt9m001->model = V4L2_IDENT_MT9M001C12ST;
icd->formats = mt9m001_colour_formats; mt9m001->fmts = mt9m001_colour_fmts;
break; break;
case 0x8431: case 0x8431:
mt9m001->model = V4L2_IDENT_MT9M001C12STM; mt9m001->model = V4L2_IDENT_MT9M001C12STM;
icd->formats = mt9m001_monochrome_formats; mt9m001->fmts = mt9m001_monochrome_fmts;
break; break;
default: default:
dev_err(&client->dev, dev_err(&client->dev,
...@@ -620,7 +635,7 @@ static int mt9m001_video_probe(struct soc_camera_device *icd, ...@@ -620,7 +635,7 @@ static int mt9m001_video_probe(struct soc_camera_device *icd,
return -ENODEV; return -ENODEV;
} }
icd->num_formats = 0; mt9m001->num_fmts = 0;
/* /*
* This is a 10bit sensor, so by default we only allow 10bit. * This is a 10bit sensor, so by default we only allow 10bit.
...@@ -633,14 +648,14 @@ static int mt9m001_video_probe(struct soc_camera_device *icd, ...@@ -633,14 +648,14 @@ static int mt9m001_video_probe(struct soc_camera_device *icd,
flags = SOCAM_DATAWIDTH_10; flags = SOCAM_DATAWIDTH_10;
if (flags & SOCAM_DATAWIDTH_10) if (flags & SOCAM_DATAWIDTH_10)
icd->num_formats++; mt9m001->num_fmts++;
else else
icd->formats++; mt9m001->fmts++;
if (flags & SOCAM_DATAWIDTH_8) if (flags & SOCAM_DATAWIDTH_8)
icd->num_formats++; mt9m001->num_fmts++;
mt9m001->fourcc = icd->formats->fourcc; mt9m001->fmt = &mt9m001->fmts[0];
dev_info(&client->dev, "Detected a MT9M001 chip ID %x (%s)\n", data, dev_info(&client->dev, "Detected a MT9M001 chip ID %x (%s)\n", data,
data == 0x8431 ? "C12STM" : "C12ST"); data == 0x8431 ? "C12STM" : "C12ST");
...@@ -686,14 +701,28 @@ static struct v4l2_subdev_core_ops mt9m001_subdev_core_ops = { ...@@ -686,14 +701,28 @@ static struct v4l2_subdev_core_ops mt9m001_subdev_core_ops = {
#endif #endif
}; };
static int mt9m001_enum_fmt(struct v4l2_subdev *sd, int index,
enum v4l2_mbus_pixelcode *code)
{
struct i2c_client *client = sd->priv;
struct mt9m001 *mt9m001 = to_mt9m001(client);
if ((unsigned int)index >= mt9m001->num_fmts)
return -EINVAL;
*code = mt9m001->fmts[index].code;
return 0;
}
static struct v4l2_subdev_video_ops mt9m001_subdev_video_ops = { static struct v4l2_subdev_video_ops mt9m001_subdev_video_ops = {
.s_stream = mt9m001_s_stream, .s_stream = mt9m001_s_stream,
.s_fmt = mt9m001_s_fmt, .s_mbus_fmt = mt9m001_s_fmt,
.g_fmt = mt9m001_g_fmt, .g_mbus_fmt = mt9m001_g_fmt,
.try_fmt = mt9m001_try_fmt, .try_mbus_fmt = mt9m001_try_fmt,
.s_crop = mt9m001_s_crop, .s_crop = mt9m001_s_crop,
.g_crop = mt9m001_g_crop, .g_crop = mt9m001_g_crop,
.cropcap = mt9m001_cropcap, .cropcap = mt9m001_cropcap,
.enum_mbus_fmt = mt9m001_enum_fmt,
}; };
static struct v4l2_subdev_sensor_ops mt9m001_subdev_sensor_ops = { static struct v4l2_subdev_sensor_ops mt9m001_subdev_sensor_ops = {
......
...@@ -123,23 +123,34 @@ ...@@ -123,23 +123,34 @@
#define MT9M111_MAX_HEIGHT 1024 #define MT9M111_MAX_HEIGHT 1024
#define MT9M111_MAX_WIDTH 1280 #define MT9M111_MAX_WIDTH 1280
#define COL_FMT(_name, _depth, _fourcc, _colorspace) \ /* MT9M111 has only one fixed colorspace per pixelcode */
{ .name = _name, .depth = _depth, .fourcc = _fourcc, \ struct mt9m111_datafmt {
.colorspace = _colorspace } enum v4l2_mbus_pixelcode code;
#define RGB_FMT(_name, _depth, _fourcc) \ enum v4l2_colorspace colorspace;
COL_FMT(_name, _depth, _fourcc, V4L2_COLORSPACE_SRGB) };
#define JPG_FMT(_name, _depth, _fourcc) \
COL_FMT(_name, _depth, _fourcc, V4L2_COLORSPACE_JPEG) /* Find a data format by a pixel code in an array */
static const struct mt9m111_datafmt *mt9m111_find_datafmt(
static const struct soc_camera_data_format mt9m111_colour_formats[] = { enum v4l2_mbus_pixelcode code, const struct mt9m111_datafmt *fmt,
JPG_FMT("CbYCrY 16 bit", 16, V4L2_PIX_FMT_UYVY), int n)
JPG_FMT("CrYCbY 16 bit", 16, V4L2_PIX_FMT_VYUY), {
JPG_FMT("YCbYCr 16 bit", 16, V4L2_PIX_FMT_YUYV), int i;
JPG_FMT("YCrYCb 16 bit", 16, V4L2_PIX_FMT_YVYU), for (i = 0; i < n; i++)
RGB_FMT("RGB 565", 16, V4L2_PIX_FMT_RGB565), if (fmt[i].code == code)
RGB_FMT("RGB 555", 16, V4L2_PIX_FMT_RGB555), return fmt + i;
RGB_FMT("Bayer (sRGB) 10 bit", 10, V4L2_PIX_FMT_SBGGR16),
RGB_FMT("Bayer (sRGB) 8 bit", 8, V4L2_PIX_FMT_SBGGR8), return NULL;
}
static const struct mt9m111_datafmt mt9m111_colour_fmts[] = {
{V4L2_MBUS_FMT_YUYV8_2X8_LE, V4L2_COLORSPACE_JPEG},
{V4L2_MBUS_FMT_YVYU8_2X8_LE, V4L2_COLORSPACE_JPEG},
{V4L2_MBUS_FMT_YUYV8_2X8_BE, V4L2_COLORSPACE_JPEG},
{V4L2_MBUS_FMT_YVYU8_2X8_BE, V4L2_COLORSPACE_JPEG},
{V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE, V4L2_COLORSPACE_SRGB},
{V4L2_MBUS_FMT_RGB565_2X8_LE, V4L2_COLORSPACE_SRGB},
{V4L2_MBUS_FMT_SBGGR8_1X8, V4L2_COLORSPACE_SRGB},
{V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_LE, V4L2_COLORSPACE_SRGB},
}; };
enum mt9m111_context { enum mt9m111_context {
...@@ -152,7 +163,7 @@ struct mt9m111 { ...@@ -152,7 +163,7 @@ struct mt9m111 {
int model; /* V4L2_IDENT_MT9M11x* codes from v4l2-chip-ident.h */ int model; /* V4L2_IDENT_MT9M11x* codes from v4l2-chip-ident.h */
enum mt9m111_context context; enum mt9m111_context context;
struct v4l2_rect rect; struct v4l2_rect rect;
u32 pixfmt; const struct mt9m111_datafmt *fmt;
unsigned int gain; unsigned int gain;
unsigned char autoexposure; unsigned char autoexposure;
unsigned char datawidth; unsigned char datawidth;
...@@ -258,8 +269,8 @@ static int mt9m111_setup_rect(struct i2c_client *client, ...@@ -258,8 +269,8 @@ static int mt9m111_setup_rect(struct i2c_client *client,
int width = rect->width; int width = rect->width;
int height = rect->height; int height = rect->height;
if (mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR8 || if (mt9m111->fmt->code == V4L2_MBUS_FMT_SBGGR8_1X8 ||
mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR16) mt9m111->fmt->code == V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_LE)
is_raw_format = 1; is_raw_format = 1;
else else
is_raw_format = 0; is_raw_format = 0;
...@@ -307,7 +318,8 @@ static int mt9m111_setup_pixfmt(struct i2c_client *client, u16 outfmt) ...@@ -307,7 +318,8 @@ static int mt9m111_setup_pixfmt(struct i2c_client *client, u16 outfmt)
static int mt9m111_setfmt_bayer8(struct i2c_client *client) static int mt9m111_setfmt_bayer8(struct i2c_client *client)
{ {
return mt9m111_setup_pixfmt(client, MT9M111_OUTFMT_PROCESSED_BAYER); return mt9m111_setup_pixfmt(client, MT9M111_OUTFMT_PROCESSED_BAYER |
MT9M111_OUTFMT_RGB);
} }
static int mt9m111_setfmt_bayer10(struct i2c_client *client) static int mt9m111_setfmt_bayer10(struct i2c_client *client)
...@@ -401,8 +413,8 @@ static int mt9m111_make_rect(struct i2c_client *client, ...@@ -401,8 +413,8 @@ static int mt9m111_make_rect(struct i2c_client *client,
{ {
struct mt9m111 *mt9m111 = to_mt9m111(client); struct mt9m111 *mt9m111 = to_mt9m111(client);
if (mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR8 || if (mt9m111->fmt->code == V4L2_MBUS_FMT_SBGGR8_1X8 ||
mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR16) { mt9m111->fmt->code == V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_LE) {
/* Bayer format - even size lengths */ /* Bayer format - even size lengths */
rect->width = ALIGN(rect->width, 2); rect->width = ALIGN(rect->width, 2);
rect->height = ALIGN(rect->height, 2); rect->height = ALIGN(rect->height, 2);
...@@ -460,120 +472,139 @@ static int mt9m111_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) ...@@ -460,120 +472,139 @@ static int mt9m111_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a)
return 0; return 0;
} }
static int mt9m111_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int mt9m111_g_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct i2c_client *client = sd->priv; struct i2c_client *client = sd->priv;
struct mt9m111 *mt9m111 = to_mt9m111(client); struct mt9m111 *mt9m111 = to_mt9m111(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
pix->width = mt9m111->rect.width; mf->width = mt9m111->rect.width;
pix->height = mt9m111->rect.height; mf->height = mt9m111->rect.height;
pix->pixelformat = mt9m111->pixfmt; mf->code = mt9m111->fmt->code;
pix->field = V4L2_FIELD_NONE; mf->field = V4L2_FIELD_NONE;
pix->colorspace = V4L2_COLORSPACE_SRGB;
return 0; return 0;
} }
static int mt9m111_set_pixfmt(struct i2c_client *client, u32 pixfmt) static int mt9m111_set_pixfmt(struct i2c_client *client,
enum v4l2_mbus_pixelcode code)
{ {
struct mt9m111 *mt9m111 = to_mt9m111(client); struct mt9m111 *mt9m111 = to_mt9m111(client);
int ret; int ret;
switch (pixfmt) { switch (code) {
case V4L2_PIX_FMT_SBGGR8: case V4L2_MBUS_FMT_SBGGR8_1X8:
ret = mt9m111_setfmt_bayer8(client); ret = mt9m111_setfmt_bayer8(client);
break; break;
case V4L2_PIX_FMT_SBGGR16: case V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_LE:
ret = mt9m111_setfmt_bayer10(client); ret = mt9m111_setfmt_bayer10(client);
break; break;
case V4L2_PIX_FMT_RGB555: case V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE:
ret = mt9m111_setfmt_rgb555(client); ret = mt9m111_setfmt_rgb555(client);
break; break;
case V4L2_PIX_FMT_RGB565: case V4L2_MBUS_FMT_RGB565_2X8_LE:
ret = mt9m111_setfmt_rgb565(client); ret = mt9m111_setfmt_rgb565(client);
break; break;
case V4L2_PIX_FMT_UYVY: case V4L2_MBUS_FMT_YUYV8_2X8_BE:
mt9m111->swap_yuv_y_chromas = 0; mt9m111->swap_yuv_y_chromas = 0;
mt9m111->swap_yuv_cb_cr = 0; mt9m111->swap_yuv_cb_cr = 0;
ret = mt9m111_setfmt_yuv(client); ret = mt9m111_setfmt_yuv(client);
break; break;
case V4L2_PIX_FMT_VYUY: case V4L2_MBUS_FMT_YVYU8_2X8_BE:
mt9m111->swap_yuv_y_chromas = 0; mt9m111->swap_yuv_y_chromas = 0;
mt9m111->swap_yuv_cb_cr = 1; mt9m111->swap_yuv_cb_cr = 1;
ret = mt9m111_setfmt_yuv(client); ret = mt9m111_setfmt_yuv(client);
break; break;
case V4L2_PIX_FMT_YUYV: case V4L2_MBUS_FMT_YUYV8_2X8_LE:
mt9m111->swap_yuv_y_chromas = 1; mt9m111->swap_yuv_y_chromas = 1;
mt9m111->swap_yuv_cb_cr = 0; mt9m111->swap_yuv_cb_cr = 0;
ret = mt9m111_setfmt_yuv(client); ret = mt9m111_setfmt_yuv(client);
break; break;
case V4L2_PIX_FMT_YVYU: case V4L2_MBUS_FMT_YVYU8_2X8_LE:
mt9m111->swap_yuv_y_chromas = 1; mt9m111->swap_yuv_y_chromas = 1;
mt9m111->swap_yuv_cb_cr = 1; mt9m111->swap_yuv_cb_cr = 1;
ret = mt9m111_setfmt_yuv(client); ret = mt9m111_setfmt_yuv(client);
break; break;
default: default:
dev_err(&client->dev, "Pixel format not handled : %x\n", dev_err(&client->dev, "Pixel format not handled : %x\n",
pixfmt); code);
ret = -EINVAL; ret = -EINVAL;
} }
if (!ret)
mt9m111->pixfmt = pixfmt;
return ret; return ret;
} }
static int mt9m111_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int mt9m111_s_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct i2c_client *client = sd->priv; struct i2c_client *client = sd->priv;
const struct mt9m111_datafmt *fmt;
struct mt9m111 *mt9m111 = to_mt9m111(client); struct mt9m111 *mt9m111 = to_mt9m111(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_rect rect = { struct v4l2_rect rect = {
.left = mt9m111->rect.left, .left = mt9m111->rect.left,
.top = mt9m111->rect.top, .top = mt9m111->rect.top,
.width = pix->width, .width = mf->width,
.height = pix->height, .height = mf->height,
}; };
int ret; int ret;
fmt = mt9m111_find_datafmt(mf->code, mt9m111_colour_fmts,
ARRAY_SIZE(mt9m111_colour_fmts));
if (!fmt)
return -EINVAL;
dev_dbg(&client->dev, dev_dbg(&client->dev,
"%s fmt=%x left=%d, top=%d, width=%d, height=%d\n", __func__, "%s code=%x left=%d, top=%d, width=%d, height=%d\n", __func__,
pix->pixelformat, rect.left, rect.top, rect.width, rect.height); mf->code, rect.left, rect.top, rect.width, rect.height);
ret = mt9m111_make_rect(client, &rect); ret = mt9m111_make_rect(client, &rect);
if (!ret) if (!ret)
ret = mt9m111_set_pixfmt(client, pix->pixelformat); ret = mt9m111_set_pixfmt(client, mf->code);
if (!ret) if (!ret) {
mt9m111->rect = rect; mt9m111->rect = rect;
mt9m111->fmt = fmt;
mf->colorspace = fmt->colorspace;
}
return ret; return ret;
} }
static int mt9m111_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int mt9m111_try_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct v4l2_pix_format *pix = &f->fmt.pix; struct i2c_client *client = sd->priv;
bool bayer = pix->pixelformat == V4L2_PIX_FMT_SBGGR8 || struct mt9m111 *mt9m111 = to_mt9m111(client);
pix->pixelformat == V4L2_PIX_FMT_SBGGR16; const struct mt9m111_datafmt *fmt;
bool bayer = mf->code == V4L2_MBUS_FMT_SBGGR8_1X8 ||
mf->code == V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_LE;
fmt = mt9m111_find_datafmt(mf->code, mt9m111_colour_fmts,
ARRAY_SIZE(mt9m111_colour_fmts));
if (!fmt) {
fmt = mt9m111->fmt;
mf->code = fmt->code;
}
/* /*
* With Bayer format enforce even side lengths, but let the user play * With Bayer format enforce even side lengths, but let the user play
* with the starting pixel * with the starting pixel
*/ */
if (pix->height > MT9M111_MAX_HEIGHT) if (mf->height > MT9M111_MAX_HEIGHT)
pix->height = MT9M111_MAX_HEIGHT; mf->height = MT9M111_MAX_HEIGHT;
else if (pix->height < 2) else if (mf->height < 2)
pix->height = 2; mf->height = 2;
else if (bayer) else if (bayer)
pix->height = ALIGN(pix->height, 2); mf->height = ALIGN(mf->height, 2);
if (pix->width > MT9M111_MAX_WIDTH) if (mf->width > MT9M111_MAX_WIDTH)
pix->width = MT9M111_MAX_WIDTH; mf->width = MT9M111_MAX_WIDTH;
else if (pix->width < 2) else if (mf->width < 2)
pix->width = 2; mf->width = 2;
else if (bayer) else if (bayer)
pix->width = ALIGN(pix->width, 2); mf->width = ALIGN(mf->width, 2);
mf->colorspace = fmt->colorspace;
return 0; return 0;
} }
...@@ -863,7 +894,7 @@ static int mt9m111_restore_state(struct i2c_client *client) ...@@ -863,7 +894,7 @@ static int mt9m111_restore_state(struct i2c_client *client)
struct mt9m111 *mt9m111 = to_mt9m111(client); struct mt9m111 *mt9m111 = to_mt9m111(client);
mt9m111_set_context(client, mt9m111->context); mt9m111_set_context(client, mt9m111->context);
mt9m111_set_pixfmt(client, mt9m111->pixfmt); mt9m111_set_pixfmt(client, mt9m111->fmt->code);
mt9m111_setup_rect(client, &mt9m111->rect); mt9m111_setup_rect(client, &mt9m111->rect);
mt9m111_set_flip(client, mt9m111->hflip, MT9M111_RMB_MIRROR_COLS); mt9m111_set_flip(client, mt9m111->hflip, MT9M111_RMB_MIRROR_COLS);
mt9m111_set_flip(client, mt9m111->vflip, MT9M111_RMB_MIRROR_ROWS); mt9m111_set_flip(client, mt9m111->vflip, MT9M111_RMB_MIRROR_ROWS);
...@@ -952,9 +983,6 @@ static int mt9m111_video_probe(struct soc_camera_device *icd, ...@@ -952,9 +983,6 @@ static int mt9m111_video_probe(struct soc_camera_device *icd,
goto ei2c; goto ei2c;
} }
icd->formats = mt9m111_colour_formats;
icd->num_formats = ARRAY_SIZE(mt9m111_colour_formats);
dev_info(&client->dev, "Detected a MT9M11x chip ID %x\n", data); dev_info(&client->dev, "Detected a MT9M11x chip ID %x\n", data);
ei2c: ei2c:
...@@ -971,13 +999,24 @@ static struct v4l2_subdev_core_ops mt9m111_subdev_core_ops = { ...@@ -971,13 +999,24 @@ static struct v4l2_subdev_core_ops mt9m111_subdev_core_ops = {
#endif #endif
}; };
static int mt9m111_enum_fmt(struct v4l2_subdev *sd, int index,
enum v4l2_mbus_pixelcode *code)
{
if ((unsigned int)index >= ARRAY_SIZE(mt9m111_colour_fmts))
return -EINVAL;
*code = mt9m111_colour_fmts[index].code;
return 0;
}
static struct v4l2_subdev_video_ops mt9m111_subdev_video_ops = { static struct v4l2_subdev_video_ops mt9m111_subdev_video_ops = {
.s_fmt = mt9m111_s_fmt, .s_mbus_fmt = mt9m111_s_fmt,
.g_fmt = mt9m111_g_fmt, .g_mbus_fmt = mt9m111_g_fmt,
.try_fmt = mt9m111_try_fmt, .try_mbus_fmt = mt9m111_try_fmt,
.s_crop = mt9m111_s_crop, .s_crop = mt9m111_s_crop,
.g_crop = mt9m111_g_crop, .g_crop = mt9m111_g_crop,
.cropcap = mt9m111_cropcap, .cropcap = mt9m111_cropcap,
.enum_mbus_fmt = mt9m111_enum_fmt,
}; };
static struct v4l2_subdev_ops mt9m111_subdev_ops = { static struct v4l2_subdev_ops mt9m111_subdev_ops = {
...@@ -1024,6 +1063,7 @@ static int mt9m111_probe(struct i2c_client *client, ...@@ -1024,6 +1063,7 @@ static int mt9m111_probe(struct i2c_client *client,
mt9m111->rect.top = MT9M111_MIN_DARK_ROWS; mt9m111->rect.top = MT9M111_MIN_DARK_ROWS;
mt9m111->rect.width = MT9M111_MAX_WIDTH; mt9m111->rect.width = MT9M111_MAX_WIDTH;
mt9m111->rect.height = MT9M111_MAX_HEIGHT; mt9m111->rect.height = MT9M111_MAX_HEIGHT;
mt9m111->fmt = &mt9m111_colour_fmts[0];
ret = mt9m111_video_probe(icd, client); ret = mt9m111_video_probe(icd, client);
if (ret) { if (ret) {
......
...@@ -60,15 +60,6 @@ ...@@ -60,15 +60,6 @@
SOCAM_VSYNC_ACTIVE_HIGH | SOCAM_DATA_ACTIVE_HIGH | \ SOCAM_VSYNC_ACTIVE_HIGH | SOCAM_DATA_ACTIVE_HIGH | \
SOCAM_MASTER | SOCAM_DATAWIDTH_10) SOCAM_MASTER | SOCAM_DATAWIDTH_10)
static const struct soc_camera_data_format mt9t031_colour_formats[] = {
{
.name = "Bayer (sRGB) 10 bit",
.depth = 10,
.fourcc = V4L2_PIX_FMT_SGRBG10,
.colorspace = V4L2_COLORSPACE_SRGB,
}
};
struct mt9t031 { struct mt9t031 {
struct v4l2_subdev subdev; struct v4l2_subdev subdev;
struct v4l2_rect rect; /* Sensor window */ struct v4l2_rect rect; /* Sensor window */
...@@ -378,27 +369,27 @@ static int mt9t031_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) ...@@ -378,27 +369,27 @@ static int mt9t031_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a)
return 0; return 0;
} }
static int mt9t031_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int mt9t031_g_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct i2c_client *client = sd->priv; struct i2c_client *client = sd->priv;
struct mt9t031 *mt9t031 = to_mt9t031(client); struct mt9t031 *mt9t031 = to_mt9t031(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
pix->width = mt9t031->rect.width / mt9t031->xskip; mf->width = mt9t031->rect.width / mt9t031->xskip;
pix->height = mt9t031->rect.height / mt9t031->yskip; mf->height = mt9t031->rect.height / mt9t031->yskip;
pix->pixelformat = V4L2_PIX_FMT_SGRBG10; mf->code = V4L2_MBUS_FMT_SBGGR10_1X10;
pix->field = V4L2_FIELD_NONE; mf->colorspace = V4L2_COLORSPACE_SRGB;
pix->colorspace = V4L2_COLORSPACE_SRGB; mf->field = V4L2_FIELD_NONE;
return 0; return 0;
} }
static int mt9t031_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int mt9t031_s_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct i2c_client *client = sd->priv; struct i2c_client *client = sd->priv;
struct mt9t031 *mt9t031 = to_mt9t031(client); struct mt9t031 *mt9t031 = to_mt9t031(client);
struct soc_camera_device *icd = client->dev.platform_data; struct soc_camera_device *icd = client->dev.platform_data;
struct v4l2_pix_format *pix = &f->fmt.pix;
u16 xskip, yskip; u16 xskip, yskip;
struct v4l2_rect rect = mt9t031->rect; struct v4l2_rect rect = mt9t031->rect;
...@@ -406,8 +397,11 @@ static int mt9t031_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) ...@@ -406,8 +397,11 @@ static int mt9t031_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
* try_fmt has put width and height within limits. * try_fmt has put width and height within limits.
* S_FMT: use binning and skipping for scaling * S_FMT: use binning and skipping for scaling
*/ */
xskip = mt9t031_skip(&rect.width, pix->width, MT9T031_MAX_WIDTH); xskip = mt9t031_skip(&rect.width, mf->width, MT9T031_MAX_WIDTH);
yskip = mt9t031_skip(&rect.height, pix->height, MT9T031_MAX_HEIGHT); yskip = mt9t031_skip(&rect.height, mf->height, MT9T031_MAX_HEIGHT);
mf->code = V4L2_MBUS_FMT_SBGGR10_1X10;
mf->colorspace = V4L2_COLORSPACE_SRGB;
/* mt9t031_set_params() doesn't change width and height */ /* mt9t031_set_params() doesn't change width and height */
return mt9t031_set_params(icd, &rect, xskip, yskip); return mt9t031_set_params(icd, &rect, xskip, yskip);
...@@ -417,13 +411,15 @@ static int mt9t031_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) ...@@ -417,13 +411,15 @@ static int mt9t031_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
* If a user window larger than sensor window is requested, we'll increase the * If a user window larger than sensor window is requested, we'll increase the
* sensor window. * sensor window.
*/ */
static int mt9t031_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int mt9t031_try_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct v4l2_pix_format *pix = &f->fmt.pix;
v4l_bound_align_image( v4l_bound_align_image(
&pix->width, MT9T031_MIN_WIDTH, MT9T031_MAX_WIDTH, 1, &mf->width, MT9T031_MIN_WIDTH, MT9T031_MAX_WIDTH, 1,
&pix->height, MT9T031_MIN_HEIGHT, MT9T031_MAX_HEIGHT, 1, 0); &mf->height, MT9T031_MIN_HEIGHT, MT9T031_MAX_HEIGHT, 1, 0);
mf->code = V4L2_MBUS_FMT_SBGGR10_1X10;
mf->colorspace = V4L2_COLORSPACE_SRGB;
return 0; return 0;
} }
...@@ -684,7 +680,6 @@ static int mt9t031_s_ctrl(struct v4l2_subdev *sd, struct v4l2_control *ctrl) ...@@ -684,7 +680,6 @@ static int mt9t031_s_ctrl(struct v4l2_subdev *sd, struct v4l2_control *ctrl)
*/ */
static int mt9t031_video_probe(struct i2c_client *client) static int mt9t031_video_probe(struct i2c_client *client)
{ {
struct soc_camera_device *icd = client->dev.platform_data;
struct mt9t031 *mt9t031 = to_mt9t031(client); struct mt9t031 *mt9t031 = to_mt9t031(client);
s32 data; s32 data;
int ret; int ret;
...@@ -699,8 +694,6 @@ static int mt9t031_video_probe(struct i2c_client *client) ...@@ -699,8 +694,6 @@ static int mt9t031_video_probe(struct i2c_client *client)
switch (data) { switch (data) {
case 0x1621: case 0x1621:
mt9t031->model = V4L2_IDENT_MT9T031; mt9t031->model = V4L2_IDENT_MT9T031;
icd->formats = mt9t031_colour_formats;
icd->num_formats = ARRAY_SIZE(mt9t031_colour_formats);
break; break;
default: default:
dev_err(&client->dev, dev_err(&client->dev,
...@@ -741,14 +734,25 @@ static struct v4l2_subdev_core_ops mt9t031_subdev_core_ops = { ...@@ -741,14 +734,25 @@ static struct v4l2_subdev_core_ops mt9t031_subdev_core_ops = {
#endif #endif
}; };
static int mt9t031_enum_fmt(struct v4l2_subdev *sd, int index,
enum v4l2_mbus_pixelcode *code)
{
if (index)
return -EINVAL;
*code = V4L2_MBUS_FMT_SBGGR10_1X10;
return 0;
}
static struct v4l2_subdev_video_ops mt9t031_subdev_video_ops = { static struct v4l2_subdev_video_ops mt9t031_subdev_video_ops = {
.s_stream = mt9t031_s_stream, .s_stream = mt9t031_s_stream,
.s_fmt = mt9t031_s_fmt, .s_mbus_fmt = mt9t031_s_fmt,
.g_fmt = mt9t031_g_fmt, .g_mbus_fmt = mt9t031_g_fmt,
.try_fmt = mt9t031_try_fmt, .try_mbus_fmt = mt9t031_try_fmt,
.s_crop = mt9t031_s_crop, .s_crop = mt9t031_s_crop,
.g_crop = mt9t031_g_crop, .g_crop = mt9t031_g_crop,
.cropcap = mt9t031_cropcap, .cropcap = mt9t031_cropcap,
.enum_mbus_fmt = mt9t031_enum_fmt,
}; };
static struct v4l2_subdev_sensor_ops mt9t031_subdev_sensor_ops = { static struct v4l2_subdev_sensor_ops mt9t031_subdev_sensor_ops = {
......
...@@ -64,41 +64,46 @@ MODULE_PARM_DESC(sensor_type, "Sensor type: \"colour\" or \"monochrome\""); ...@@ -64,41 +64,46 @@ MODULE_PARM_DESC(sensor_type, "Sensor type: \"colour\" or \"monochrome\"");
#define MT9V022_COLUMN_SKIP 1 #define MT9V022_COLUMN_SKIP 1
#define MT9V022_ROW_SKIP 4 #define MT9V022_ROW_SKIP 4
static const struct soc_camera_data_format mt9v022_colour_formats[] = { /* MT9V022 has only one fixed colorspace per pixelcode */
struct mt9v022_datafmt {
enum v4l2_mbus_pixelcode code;
enum v4l2_colorspace colorspace;
};
/* Find a data format by a pixel code in an array */
static const struct mt9v022_datafmt *mt9v022_find_datafmt(
enum v4l2_mbus_pixelcode code, const struct mt9v022_datafmt *fmt,
int n)
{
int i;
for (i = 0; i < n; i++)
if (fmt[i].code == code)
return fmt + i;
return NULL;
}
static const struct mt9v022_datafmt mt9v022_colour_fmts[] = {
/* /*
* Order important: first natively supported, * Order important: first natively supported,
* second supported with a GPIO extender * second supported with a GPIO extender
*/ */
{ {V4L2_MBUS_FMT_SBGGR10_1X10, V4L2_COLORSPACE_SRGB},
.name = "Bayer (sRGB) 10 bit", {V4L2_MBUS_FMT_SBGGR8_1X8, V4L2_COLORSPACE_SRGB},
.depth = 10,
.fourcc = V4L2_PIX_FMT_SBGGR16,
.colorspace = V4L2_COLORSPACE_SRGB,
}, {
.name = "Bayer (sRGB) 8 bit",
.depth = 8,
.fourcc = V4L2_PIX_FMT_SBGGR8,
.colorspace = V4L2_COLORSPACE_SRGB,
}
}; };
static const struct soc_camera_data_format mt9v022_monochrome_formats[] = { static const struct mt9v022_datafmt mt9v022_monochrome_fmts[] = {
/* Order important - see above */ /* Order important - see above */
{ {V4L2_MBUS_FMT_Y10_1X10, V4L2_COLORSPACE_JPEG},
.name = "Monochrome 10 bit", {V4L2_MBUS_FMT_GREY8_1X8, V4L2_COLORSPACE_JPEG},
.depth = 10,
.fourcc = V4L2_PIX_FMT_Y16,
}, {
.name = "Monochrome 8 bit",
.depth = 8,
.fourcc = V4L2_PIX_FMT_GREY,
},
}; };
struct mt9v022 { struct mt9v022 {
struct v4l2_subdev subdev; struct v4l2_subdev subdev;
struct v4l2_rect rect; /* Sensor window */ struct v4l2_rect rect; /* Sensor window */
__u32 fourcc; const struct mt9v022_datafmt *fmt;
const struct mt9v022_datafmt *fmts;
int num_fmts;
int model; /* V4L2_IDENT_MT9V022* codes from v4l2-chip-ident.h */ int model; /* V4L2_IDENT_MT9V022* codes from v4l2-chip-ident.h */
u16 chip_control; u16 chip_control;
unsigned short y_skip_top; /* Lines to skip at the top */ unsigned short y_skip_top; /* Lines to skip at the top */
...@@ -275,8 +280,7 @@ static int mt9v022_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) ...@@ -275,8 +280,7 @@ static int mt9v022_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a)
int ret; int ret;
/* Bayer format - even size lengths */ /* Bayer format - even size lengths */
if (mt9v022->fourcc == V4L2_PIX_FMT_SBGGR8 || if (mt9v022->fmts == mt9v022_colour_fmts) {
mt9v022->fourcc == V4L2_PIX_FMT_SBGGR16) {
rect.width = ALIGN(rect.width, 2); rect.width = ALIGN(rect.width, 2);
rect.height = ALIGN(rect.height, 2); rect.height = ALIGN(rect.height, 2);
/* Let the user play with the starting pixel */ /* Let the user play with the starting pixel */
...@@ -354,32 +358,32 @@ static int mt9v022_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) ...@@ -354,32 +358,32 @@ static int mt9v022_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a)
return 0; return 0;
} }
static int mt9v022_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int mt9v022_g_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct i2c_client *client = sd->priv; struct i2c_client *client = sd->priv;
struct mt9v022 *mt9v022 = to_mt9v022(client); struct mt9v022 *mt9v022 = to_mt9v022(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
pix->width = mt9v022->rect.width; mf->width = mt9v022->rect.width;
pix->height = mt9v022->rect.height; mf->height = mt9v022->rect.height;
pix->pixelformat = mt9v022->fourcc; mf->code = mt9v022->fmt->code;
pix->field = V4L2_FIELD_NONE; mf->colorspace = mt9v022->fmt->colorspace;
pix->colorspace = V4L2_COLORSPACE_SRGB; mf->field = V4L2_FIELD_NONE;
return 0; return 0;
} }
static int mt9v022_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int mt9v022_s_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct i2c_client *client = sd->priv; struct i2c_client *client = sd->priv;
struct mt9v022 *mt9v022 = to_mt9v022(client); struct mt9v022 *mt9v022 = to_mt9v022(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_crop a = { struct v4l2_crop a = {
.c = { .c = {
.left = mt9v022->rect.left, .left = mt9v022->rect.left,
.top = mt9v022->rect.top, .top = mt9v022->rect.top,
.width = pix->width, .width = mf->width,
.height = pix->height, .height = mf->height,
}, },
}; };
int ret; int ret;
...@@ -388,14 +392,14 @@ static int mt9v022_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) ...@@ -388,14 +392,14 @@ static int mt9v022_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
* The caller provides a supported format, as verified per call to * The caller provides a supported format, as verified per call to
* icd->try_fmt(), datawidth is from our supported format list * icd->try_fmt(), datawidth is from our supported format list
*/ */
switch (pix->pixelformat) { switch (mf->code) {
case V4L2_PIX_FMT_GREY: case V4L2_MBUS_FMT_GREY8_1X8:
case V4L2_PIX_FMT_Y16: case V4L2_MBUS_FMT_Y10_1X10:
if (mt9v022->model != V4L2_IDENT_MT9V022IX7ATM) if (mt9v022->model != V4L2_IDENT_MT9V022IX7ATM)
return -EINVAL; return -EINVAL;
break; break;
case V4L2_PIX_FMT_SBGGR8: case V4L2_MBUS_FMT_SBGGR8_1X8:
case V4L2_PIX_FMT_SBGGR16: case V4L2_MBUS_FMT_SBGGR10_1X10:
if (mt9v022->model != V4L2_IDENT_MT9V022IX7ATC) if (mt9v022->model != V4L2_IDENT_MT9V022IX7ATC)
return -EINVAL; return -EINVAL;
break; break;
...@@ -409,27 +413,39 @@ static int mt9v022_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) ...@@ -409,27 +413,39 @@ static int mt9v022_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
/* No support for scaling on this camera, just crop. */ /* No support for scaling on this camera, just crop. */
ret = mt9v022_s_crop(sd, &a); ret = mt9v022_s_crop(sd, &a);
if (!ret) { if (!ret) {
pix->width = mt9v022->rect.width; mf->width = mt9v022->rect.width;
pix->height = mt9v022->rect.height; mf->height = mt9v022->rect.height;
mt9v022->fourcc = pix->pixelformat; mt9v022->fmt = mt9v022_find_datafmt(mf->code,
mt9v022->fmts, mt9v022->num_fmts);
mf->colorspace = mt9v022->fmt->colorspace;
} }
return ret; return ret;
} }
static int mt9v022_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int mt9v022_try_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct i2c_client *client = sd->priv; struct i2c_client *client = sd->priv;
struct mt9v022 *mt9v022 = to_mt9v022(client); struct mt9v022 *mt9v022 = to_mt9v022(client);
struct v4l2_pix_format *pix = &f->fmt.pix; const struct mt9v022_datafmt *fmt;
int align = pix->pixelformat == V4L2_PIX_FMT_SBGGR8 || int align = mf->code == V4L2_MBUS_FMT_SBGGR8_1X8 ||
pix->pixelformat == V4L2_PIX_FMT_SBGGR16; mf->code == V4L2_MBUS_FMT_SBGGR10_1X10;
v4l_bound_align_image(&pix->width, MT9V022_MIN_WIDTH, v4l_bound_align_image(&mf->width, MT9V022_MIN_WIDTH,
MT9V022_MAX_WIDTH, align, MT9V022_MAX_WIDTH, align,
&pix->height, MT9V022_MIN_HEIGHT + mt9v022->y_skip_top, &mf->height, MT9V022_MIN_HEIGHT + mt9v022->y_skip_top,
MT9V022_MAX_HEIGHT + mt9v022->y_skip_top, align, 0); MT9V022_MAX_HEIGHT + mt9v022->y_skip_top, align, 0);
fmt = mt9v022_find_datafmt(mf->code, mt9v022->fmts,
mt9v022->num_fmts);
if (!fmt) {
fmt = mt9v022->fmt;
mf->code = fmt->code;
}
mf->colorspace = fmt->colorspace;
return 0; return 0;
} }
...@@ -749,17 +765,17 @@ static int mt9v022_video_probe(struct soc_camera_device *icd, ...@@ -749,17 +765,17 @@ static int mt9v022_video_probe(struct soc_camera_device *icd,
!strcmp("color", sensor_type))) { !strcmp("color", sensor_type))) {
ret = reg_write(client, MT9V022_PIXEL_OPERATION_MODE, 4 | 0x11); ret = reg_write(client, MT9V022_PIXEL_OPERATION_MODE, 4 | 0x11);
mt9v022->model = V4L2_IDENT_MT9V022IX7ATC; mt9v022->model = V4L2_IDENT_MT9V022IX7ATC;
icd->formats = mt9v022_colour_formats; mt9v022->fmts = mt9v022_colour_fmts;
} else { } else {
ret = reg_write(client, MT9V022_PIXEL_OPERATION_MODE, 0x11); ret = reg_write(client, MT9V022_PIXEL_OPERATION_MODE, 0x11);
mt9v022->model = V4L2_IDENT_MT9V022IX7ATM; mt9v022->model = V4L2_IDENT_MT9V022IX7ATM;
icd->formats = mt9v022_monochrome_formats; mt9v022->fmts = mt9v022_monochrome_fmts;
} }
if (ret < 0) if (ret < 0)
goto ei2c; goto ei2c;
icd->num_formats = 0; mt9v022->num_fmts = 0;
/* /*
* This is a 10bit sensor, so by default we only allow 10bit. * This is a 10bit sensor, so by default we only allow 10bit.
...@@ -772,14 +788,14 @@ static int mt9v022_video_probe(struct soc_camera_device *icd, ...@@ -772,14 +788,14 @@ static int mt9v022_video_probe(struct soc_camera_device *icd,
flags = SOCAM_DATAWIDTH_10; flags = SOCAM_DATAWIDTH_10;
if (flags & SOCAM_DATAWIDTH_10) if (flags & SOCAM_DATAWIDTH_10)
icd->num_formats++; mt9v022->num_fmts++;
else else
icd->formats++; mt9v022->fmts++;
if (flags & SOCAM_DATAWIDTH_8) if (flags & SOCAM_DATAWIDTH_8)
icd->num_formats++; mt9v022->num_fmts++;
mt9v022->fourcc = icd->formats->fourcc; mt9v022->fmt = &mt9v022->fmts[0];
dev_info(&client->dev, "Detected a MT9V022 chip ID %x, %s sensor\n", dev_info(&client->dev, "Detected a MT9V022 chip ID %x, %s sensor\n",
data, mt9v022->model == V4L2_IDENT_MT9V022IX7ATM ? data, mt9v022->model == V4L2_IDENT_MT9V022IX7ATM ?
...@@ -823,14 +839,28 @@ static struct v4l2_subdev_core_ops mt9v022_subdev_core_ops = { ...@@ -823,14 +839,28 @@ static struct v4l2_subdev_core_ops mt9v022_subdev_core_ops = {
#endif #endif
}; };
static int mt9v022_enum_fmt(struct v4l2_subdev *sd, int index,
enum v4l2_mbus_pixelcode *code)
{
struct i2c_client *client = sd->priv;
struct mt9v022 *mt9v022 = to_mt9v022(client);
if ((unsigned int)index >= mt9v022->num_fmts)
return -EINVAL;
*code = mt9v022->fmts[index].code;
return 0;
}
static struct v4l2_subdev_video_ops mt9v022_subdev_video_ops = { static struct v4l2_subdev_video_ops mt9v022_subdev_video_ops = {
.s_stream = mt9v022_s_stream, .s_stream = mt9v022_s_stream,
.s_fmt = mt9v022_s_fmt, .s_mbus_fmt = mt9v022_s_fmt,
.g_fmt = mt9v022_g_fmt, .g_mbus_fmt = mt9v022_g_fmt,
.try_fmt = mt9v022_try_fmt, .try_mbus_fmt = mt9v022_try_fmt,
.s_crop = mt9v022_s_crop, .s_crop = mt9v022_s_crop,
.g_crop = mt9v022_g_crop, .g_crop = mt9v022_g_crop,
.cropcap = mt9v022_cropcap, .cropcap = mt9v022_cropcap,
.enum_mbus_fmt = mt9v022_enum_fmt,
}; };
static struct v4l2_subdev_sensor_ops mt9v022_subdev_sensor_ops = { static struct v4l2_subdev_sensor_ops mt9v022_subdev_sensor_ops = {
......
...@@ -37,6 +37,7 @@ ...@@ -37,6 +37,7 @@
#include <media/v4l2-common.h> #include <media/v4l2-common.h>
#include <media/v4l2-dev.h> #include <media/v4l2-dev.h>
#include <media/videobuf-dma-contig.h> #include <media/videobuf-dma-contig.h>
#include <media/soc_mediabus.h>
#include <asm/dma.h> #include <asm/dma.h>
#include <asm/fiq.h> #include <asm/fiq.h>
...@@ -94,9 +95,9 @@ ...@@ -94,9 +95,9 @@
/* buffer for one video frame */ /* buffer for one video frame */
struct mx1_buffer { struct mx1_buffer {
/* common v4l buffer stuff -- must be first */ /* common v4l buffer stuff -- must be first */
struct videobuf_buffer vb; struct videobuf_buffer vb;
const struct soc_camera_data_format *fmt; enum v4l2_mbus_pixelcode code;
int inwork; int inwork;
}; };
/* /*
...@@ -128,9 +129,13 @@ static int mx1_videobuf_setup(struct videobuf_queue *vq, unsigned int *count, ...@@ -128,9 +129,13 @@ static int mx1_videobuf_setup(struct videobuf_queue *vq, unsigned int *count,
unsigned int *size) unsigned int *size)
{ {
struct soc_camera_device *icd = vq->priv_data; struct soc_camera_device *icd = vq->priv_data;
int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
icd->current_fmt->host_fmt);
*size = icd->user_width * icd->user_height * if (bytes_per_line < 0)
((icd->current_fmt->depth + 7) >> 3); return bytes_per_line;
*size = bytes_per_line * icd->user_height;
if (!*count) if (!*count)
*count = 32; *count = 32;
...@@ -169,6 +174,11 @@ static int mx1_videobuf_prepare(struct videobuf_queue *vq, ...@@ -169,6 +174,11 @@ static int mx1_videobuf_prepare(struct videobuf_queue *vq,
struct soc_camera_device *icd = vq->priv_data; struct soc_camera_device *icd = vq->priv_data;
struct mx1_buffer *buf = container_of(vb, struct mx1_buffer, vb); struct mx1_buffer *buf = container_of(vb, struct mx1_buffer, vb);
int ret; int ret;
int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
icd->current_fmt->host_fmt);
if (bytes_per_line < 0)
return bytes_per_line;
dev_dbg(icd->dev.parent, "%s (vb=0x%p) 0x%08lx %d\n", __func__, dev_dbg(icd->dev.parent, "%s (vb=0x%p) 0x%08lx %d\n", __func__,
vb, vb->baddr, vb->bsize); vb, vb->baddr, vb->bsize);
...@@ -184,18 +194,18 @@ static int mx1_videobuf_prepare(struct videobuf_queue *vq, ...@@ -184,18 +194,18 @@ static int mx1_videobuf_prepare(struct videobuf_queue *vq,
*/ */
buf->inwork = 1; buf->inwork = 1;
if (buf->fmt != icd->current_fmt || if (buf->code != icd->current_fmt->code ||
vb->width != icd->user_width || vb->width != icd->user_width ||
vb->height != icd->user_height || vb->height != icd->user_height ||
vb->field != field) { vb->field != field) {
buf->fmt = icd->current_fmt; buf->code = icd->current_fmt->code;
vb->width = icd->user_width; vb->width = icd->user_width;
vb->height = icd->user_height; vb->height = icd->user_height;
vb->field = field; vb->field = field;
vb->state = VIDEOBUF_NEEDS_INIT; vb->state = VIDEOBUF_NEEDS_INIT;
} }
vb->size = vb->width * vb->height * ((buf->fmt->depth + 7) >> 3); vb->size = bytes_per_line * vb->height;
if (0 != vb->baddr && vb->bsize < vb->size) { if (0 != vb->baddr && vb->bsize < vb->size) {
ret = -EINVAL; ret = -EINVAL;
goto out; goto out;
...@@ -497,12 +507,10 @@ static int mx1_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt) ...@@ -497,12 +507,10 @@ static int mx1_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt)
/* MX1 supports only 8bit buswidth */ /* MX1 supports only 8bit buswidth */
common_flags = soc_camera_bus_param_compatible(camera_flags, common_flags = soc_camera_bus_param_compatible(camera_flags,
CSI_BUS_FLAGS); CSI_BUS_FLAGS);
if (!common_flags) if (!common_flags)
return -EINVAL; return -EINVAL;
icd->buswidth = 8;
/* Make choises, based on platform choice */ /* Make choises, based on platform choice */
if ((common_flags & SOCAM_VSYNC_ACTIVE_HIGH) && if ((common_flags & SOCAM_VSYNC_ACTIVE_HIGH) &&
(common_flags & SOCAM_VSYNC_ACTIVE_LOW)) { (common_flags & SOCAM_VSYNC_ACTIVE_LOW)) {
...@@ -555,7 +563,8 @@ static int mx1_camera_set_fmt(struct soc_camera_device *icd, ...@@ -555,7 +563,8 @@ static int mx1_camera_set_fmt(struct soc_camera_device *icd,
struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
const struct soc_camera_format_xlate *xlate; const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix; struct v4l2_pix_format *pix = &f->fmt.pix;
int ret; struct v4l2_mbus_framefmt mf;
int ret, buswidth;
xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat); xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat);
if (!xlate) { if (!xlate) {
...@@ -564,12 +573,33 @@ static int mx1_camera_set_fmt(struct soc_camera_device *icd, ...@@ -564,12 +573,33 @@ static int mx1_camera_set_fmt(struct soc_camera_device *icd,
return -EINVAL; return -EINVAL;
} }
ret = v4l2_subdev_call(sd, video, s_fmt, f); buswidth = xlate->host_fmt->bits_per_sample;
if (!ret) { if (buswidth > 8) {
icd->buswidth = xlate->buswidth; dev_warn(icd->dev.parent,
icd->current_fmt = xlate->host_fmt; "bits-per-sample %d for format %x unsupported\n",
buswidth, pix->pixelformat);
return -EINVAL;
} }
mf.width = pix->width;
mf.height = pix->height;
mf.field = pix->field;
mf.colorspace = pix->colorspace;
mf.code = xlate->code;
ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf);
if (ret < 0)
return ret;
if (mf.code != xlate->code)
return -EINVAL;
pix->width = mf.width;
pix->height = mf.height;
pix->field = mf.field;
pix->colorspace = mf.colorspace;
icd->current_fmt = xlate;
return ret; return ret;
} }
...@@ -577,10 +607,36 @@ static int mx1_camera_try_fmt(struct soc_camera_device *icd, ...@@ -577,10 +607,36 @@ static int mx1_camera_try_fmt(struct soc_camera_device *icd,
struct v4l2_format *f) struct v4l2_format *f)
{ {
struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_mbus_framefmt mf;
int ret;
/* TODO: limit to mx1 hardware capabilities */ /* TODO: limit to mx1 hardware capabilities */
xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat);
if (!xlate) {
dev_warn(icd->dev.parent, "Format %x not found\n",
pix->pixelformat);
return -EINVAL;
}
mf.width = pix->width;
mf.height = pix->height;
mf.field = pix->field;
mf.colorspace = pix->colorspace;
mf.code = xlate->code;
/* limit to sensor capabilities */ /* limit to sensor capabilities */
return v4l2_subdev_call(sd, video, try_fmt, f); ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf);
if (ret < 0)
return ret;
pix->width = mf.width;
pix->height = mf.height;
pix->field = mf.field;
pix->colorspace = mf.colorspace;
return 0;
} }
static int mx1_camera_reqbufs(struct soc_camera_file *icf, static int mx1_camera_reqbufs(struct soc_camera_file *icf,
......
...@@ -23,6 +23,7 @@ ...@@ -23,6 +23,7 @@
#include <media/v4l2-dev.h> #include <media/v4l2-dev.h>
#include <media/videobuf-dma-contig.h> #include <media/videobuf-dma-contig.h>
#include <media/soc_camera.h> #include <media/soc_camera.h>
#include <media/soc_mediabus.h>
#include <mach/ipu.h> #include <mach/ipu.h>
#include <mach/mx3_camera.h> #include <mach/mx3_camera.h>
...@@ -63,7 +64,7 @@ ...@@ -63,7 +64,7 @@
struct mx3_camera_buffer { struct mx3_camera_buffer {
/* common v4l buffer stuff -- must be first */ /* common v4l buffer stuff -- must be first */
struct videobuf_buffer vb; struct videobuf_buffer vb;
const struct soc_camera_data_format *fmt; enum v4l2_mbus_pixelcode code;
/* One descriptot per scatterlist (per frame) */ /* One descriptot per scatterlist (per frame) */
struct dma_async_tx_descriptor *txd; struct dma_async_tx_descriptor *txd;
...@@ -118,8 +119,6 @@ struct dma_chan_request { ...@@ -118,8 +119,6 @@ struct dma_chan_request {
enum ipu_channel id; enum ipu_channel id;
}; };
static int mx3_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt);
static u32 csi_reg_read(struct mx3_camera_dev *mx3, off_t reg) static u32 csi_reg_read(struct mx3_camera_dev *mx3, off_t reg)
{ {
return __raw_readl(mx3->base + reg); return __raw_readl(mx3->base + reg);
...@@ -211,17 +210,16 @@ static int mx3_videobuf_setup(struct videobuf_queue *vq, unsigned int *count, ...@@ -211,17 +210,16 @@ static int mx3_videobuf_setup(struct videobuf_queue *vq, unsigned int *count,
struct soc_camera_device *icd = vq->priv_data; struct soc_camera_device *icd = vq->priv_data;
struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
struct mx3_camera_dev *mx3_cam = ici->priv; struct mx3_camera_dev *mx3_cam = ici->priv;
/* int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
* bits-per-pixel (depth) as specified in camera's pixel format does icd->current_fmt->host_fmt);
* not necessarily match what the camera interface writes to RAM, but
* it should be good enough for now. if (bytes_per_line < 0)
*/ return bytes_per_line;
unsigned int bpp = DIV_ROUND_UP(icd->current_fmt->depth, 8);
if (!mx3_cam->idmac_channel[0]) if (!mx3_cam->idmac_channel[0])
return -EINVAL; return -EINVAL;
*size = icd->user_width * icd->user_height * bpp; *size = bytes_per_line * icd->user_height;
if (!*count) if (!*count)
*count = 32; *count = 32;
...@@ -241,21 +239,26 @@ static int mx3_videobuf_prepare(struct videobuf_queue *vq, ...@@ -241,21 +239,26 @@ static int mx3_videobuf_prepare(struct videobuf_queue *vq,
struct mx3_camera_dev *mx3_cam = ici->priv; struct mx3_camera_dev *mx3_cam = ici->priv;
struct mx3_camera_buffer *buf = struct mx3_camera_buffer *buf =
container_of(vb, struct mx3_camera_buffer, vb); container_of(vb, struct mx3_camera_buffer, vb);
/* current_fmt _must_ always be set */ size_t new_size;
size_t new_size = icd->user_width * icd->user_height *
((icd->current_fmt->depth + 7) >> 3);
int ret; int ret;
int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
icd->current_fmt->host_fmt);
if (bytes_per_line < 0)
return bytes_per_line;
new_size = bytes_per_line * icd->user_height;
/* /*
* I think, in buf_prepare you only have to protect global data, * I think, in buf_prepare you only have to protect global data,
* the actual buffer is yours * the actual buffer is yours
*/ */
if (buf->fmt != icd->current_fmt || if (buf->code != icd->current_fmt->code ||
vb->width != icd->user_width || vb->width != icd->user_width ||
vb->height != icd->user_height || vb->height != icd->user_height ||
vb->field != field) { vb->field != field) {
buf->fmt = icd->current_fmt; buf->code = icd->current_fmt->code;
vb->width = icd->user_width; vb->width = icd->user_width;
vb->height = icd->user_height; vb->height = icd->user_height;
vb->field = field; vb->field = field;
...@@ -348,13 +351,13 @@ static void mx3_videobuf_queue(struct videobuf_queue *vq, ...@@ -348,13 +351,13 @@ static void mx3_videobuf_queue(struct videobuf_queue *vq,
struct dma_async_tx_descriptor *txd = buf->txd; struct dma_async_tx_descriptor *txd = buf->txd;
struct idmac_channel *ichan = to_idmac_chan(txd->chan); struct idmac_channel *ichan = to_idmac_chan(txd->chan);
struct idmac_video_param *video = &ichan->params.video; struct idmac_video_param *video = &ichan->params.video;
const struct soc_camera_data_format *data_fmt = icd->current_fmt;
dma_cookie_t cookie; dma_cookie_t cookie;
u32 fourcc = icd->current_fmt->host_fmt->fourcc;
BUG_ON(!irqs_disabled()); BUG_ON(!irqs_disabled());
/* This is the configuration of one sg-element */ /* This is the configuration of one sg-element */
video->out_pixel_fmt = fourcc_to_ipu_pix(data_fmt->fourcc); video->out_pixel_fmt = fourcc_to_ipu_pix(fourcc);
video->out_width = icd->user_width; video->out_width = icd->user_width;
video->out_height = icd->user_height; video->out_height = icd->user_height;
video->out_stride = icd->user_width; video->out_stride = icd->user_width;
...@@ -568,28 +571,33 @@ static int test_platform_param(struct mx3_camera_dev *mx3_cam, ...@@ -568,28 +571,33 @@ static int test_platform_param(struct mx3_camera_dev *mx3_cam,
* If requested data width is supported by the platform, use it or any * If requested data width is supported by the platform, use it or any
* possible lower value - i.MX31 is smart enough to schift bits * possible lower value - i.MX31 is smart enough to schift bits
*/ */
if (mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_15)
*flags |= SOCAM_DATAWIDTH_15 | SOCAM_DATAWIDTH_10 |
SOCAM_DATAWIDTH_8 | SOCAM_DATAWIDTH_4;
else if (mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_10)
*flags |= SOCAM_DATAWIDTH_10 | SOCAM_DATAWIDTH_8 |
SOCAM_DATAWIDTH_4;
else if (mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_8)
*flags |= SOCAM_DATAWIDTH_8 | SOCAM_DATAWIDTH_4;
else if (mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_4)
*flags |= SOCAM_DATAWIDTH_4;
switch (buswidth) { switch (buswidth) {
case 15: case 15:
if (!(mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_15)) if (!(*flags & SOCAM_DATAWIDTH_15))
return -EINVAL; return -EINVAL;
*flags |= SOCAM_DATAWIDTH_15 | SOCAM_DATAWIDTH_10 |
SOCAM_DATAWIDTH_8 | SOCAM_DATAWIDTH_4;
break; break;
case 10: case 10:
if (!(mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_10)) if (!(*flags & SOCAM_DATAWIDTH_10))
return -EINVAL; return -EINVAL;
*flags |= SOCAM_DATAWIDTH_10 | SOCAM_DATAWIDTH_8 |
SOCAM_DATAWIDTH_4;
break; break;
case 8: case 8:
if (!(mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_8)) if (!(*flags & SOCAM_DATAWIDTH_8))
return -EINVAL; return -EINVAL;
*flags |= SOCAM_DATAWIDTH_8 | SOCAM_DATAWIDTH_4;
break; break;
case 4: case 4:
if (!(mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_4)) if (!(*flags & SOCAM_DATAWIDTH_4))
return -EINVAL; return -EINVAL;
*flags |= SOCAM_DATAWIDTH_4;
break; break;
default: default:
dev_warn(mx3_cam->soc_host.v4l2_dev.dev, dev_warn(mx3_cam->soc_host.v4l2_dev.dev,
...@@ -638,91 +646,92 @@ static bool chan_filter(struct dma_chan *chan, void *arg) ...@@ -638,91 +646,92 @@ static bool chan_filter(struct dma_chan *chan, void *arg)
pdata->dma_dev == chan->device->dev; pdata->dma_dev == chan->device->dev;
} }
static const struct soc_camera_data_format mx3_camera_formats[] = { static const struct soc_mbus_pixelfmt mx3_camera_formats[] = {
{ {
.name = "Bayer (sRGB) 8 bit", .fourcc = V4L2_PIX_FMT_SBGGR8,
.depth = 8, .name = "Bayer BGGR (sRGB) 8 bit",
.fourcc = V4L2_PIX_FMT_SBGGR8, .bits_per_sample = 8,
.colorspace = V4L2_COLORSPACE_SRGB, .packing = SOC_MBUS_PACKING_NONE,
.order = SOC_MBUS_ORDER_LE,
}, { }, {
.name = "Monochrome 8 bit", .fourcc = V4L2_PIX_FMT_GREY,
.depth = 8, .name = "Monochrome 8 bit",
.fourcc = V4L2_PIX_FMT_GREY, .bits_per_sample = 8,
.colorspace = V4L2_COLORSPACE_JPEG, .packing = SOC_MBUS_PACKING_NONE,
.order = SOC_MBUS_ORDER_LE,
}, },
}; };
static bool buswidth_supported(struct soc_camera_host *ici, int depth) /* This will be corrected as we get more formats */
static bool mx3_camera_packing_supported(const struct soc_mbus_pixelfmt *fmt)
{ {
struct mx3_camera_dev *mx3_cam = ici->priv; return fmt->packing == SOC_MBUS_PACKING_NONE ||
(fmt->bits_per_sample == 8 &&
switch (depth) { fmt->packing == SOC_MBUS_PACKING_2X8_PADHI) ||
case 4: (fmt->bits_per_sample > 8 &&
return !!(mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_4); fmt->packing == SOC_MBUS_PACKING_EXTEND16);
case 8:
return !!(mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_8);
case 10:
return !!(mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_10);
case 15:
return !!(mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_15);
}
return false;
} }
static int mx3_camera_get_formats(struct soc_camera_device *icd, int idx, static int mx3_camera_get_formats(struct soc_camera_device *icd, int idx,
struct soc_camera_format_xlate *xlate) struct soc_camera_format_xlate *xlate)
{ {
struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
int formats = 0, buswidth, ret; struct device *dev = icd->dev.parent;
int formats = 0, ret;
enum v4l2_mbus_pixelcode code;
const struct soc_mbus_pixelfmt *fmt;
buswidth = icd->formats[idx].depth; ret = v4l2_subdev_call(sd, video, enum_mbus_fmt, idx, &code);
if (ret < 0)
/* No more formats */
return 0;
if (!buswidth_supported(ici, buswidth)) fmt = soc_mbus_get_fmtdesc(code);
if (!fmt) {
dev_err(icd->dev.parent,
"Invalid format code #%d: %d\n", idx, code);
return 0; return 0;
}
ret = mx3_camera_try_bus_param(icd, buswidth); /* This also checks support for the requested bits-per-sample */
ret = mx3_camera_try_bus_param(icd, fmt->bits_per_sample);
if (ret < 0) if (ret < 0)
return 0; return 0;
switch (icd->formats[idx].fourcc) { switch (code) {
case V4L2_PIX_FMT_SGRBG10: case V4L2_MBUS_FMT_SBGGR10_1X10:
formats++; formats++;
if (xlate) { if (xlate) {
xlate->host_fmt = &mx3_camera_formats[0]; xlate->host_fmt = &mx3_camera_formats[0];
xlate->cam_fmt = icd->formats + idx; xlate->code = code;
xlate->buswidth = buswidth;
xlate++; xlate++;
dev_dbg(icd->dev.parent, dev_dbg(dev, "Providing format %s using code %d\n",
"Providing format %s using %s\n", mx3_camera_formats[0].name, code);
mx3_camera_formats[0].name,
icd->formats[idx].name);
} }
goto passthrough; break;
case V4L2_PIX_FMT_Y16: case V4L2_MBUS_FMT_Y10_1X10:
formats++; formats++;
if (xlate) { if (xlate) {
xlate->host_fmt = &mx3_camera_formats[1]; xlate->host_fmt = &mx3_camera_formats[1];
xlate->cam_fmt = icd->formats + idx; xlate->code = code;
xlate->buswidth = buswidth;
xlate++; xlate++;
dev_dbg(icd->dev.parent, dev_dbg(dev, "Providing format %s using code %d\n",
"Providing format %s using %s\n", mx3_camera_formats[1].name, code);
mx3_camera_formats[0].name,
icd->formats[idx].name);
} }
break;
default: default:
passthrough: if (!mx3_camera_packing_supported(fmt))
/* Generic pass-through */ return 0;
formats++; }
if (xlate) {
xlate->host_fmt = icd->formats + idx; /* Generic pass-through */
xlate->cam_fmt = icd->formats + idx; formats++;
xlate->buswidth = buswidth; if (xlate) {
xlate++; xlate->host_fmt = fmt;
dev_dbg(icd->dev.parent, xlate->code = code;
"Providing format %s in pass-through mode\n", xlate++;
icd->formats[idx].name); dev_dbg(dev, "Providing format %x in pass-through mode\n",
} xlate->host_fmt->fourcc);
} }
return formats; return formats;
...@@ -806,8 +815,7 @@ static int mx3_camera_set_crop(struct soc_camera_device *icd, ...@@ -806,8 +815,7 @@ static int mx3_camera_set_crop(struct soc_camera_device *icd,
struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
struct mx3_camera_dev *mx3_cam = ici->priv; struct mx3_camera_dev *mx3_cam = ici->priv;
struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct v4l2_format f = {.type = V4L2_BUF_TYPE_VIDEO_CAPTURE}; struct v4l2_mbus_framefmt mf;
struct v4l2_pix_format *pix = &f.fmt.pix;
int ret; int ret;
soc_camera_limit_side(&rect->left, &rect->width, 0, 2, 4096); soc_camera_limit_side(&rect->left, &rect->width, 0, 2, 4096);
...@@ -818,19 +826,19 @@ static int mx3_camera_set_crop(struct soc_camera_device *icd, ...@@ -818,19 +826,19 @@ static int mx3_camera_set_crop(struct soc_camera_device *icd,
return ret; return ret;
/* The capture device might have changed its output */ /* The capture device might have changed its output */
ret = v4l2_subdev_call(sd, video, g_fmt, &f); ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf);
if (ret < 0) if (ret < 0)
return ret; return ret;
if (pix->width & 7) { if (mf.width & 7) {
/* Ouch! We can only handle 8-byte aligned width... */ /* Ouch! We can only handle 8-byte aligned width... */
stride_align(&pix->width); stride_align(&mf.width);
ret = v4l2_subdev_call(sd, video, s_fmt, &f); ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf);
if (ret < 0) if (ret < 0)
return ret; return ret;
} }
if (pix->width != icd->user_width || pix->height != icd->user_height) { if (mf.width != icd->user_width || mf.height != icd->user_height) {
/* /*
* We now know pixel formats and can decide upon DMA-channel(s) * We now know pixel formats and can decide upon DMA-channel(s)
* So far only direct camera-to-memory is supported * So far only direct camera-to-memory is supported
...@@ -841,14 +849,14 @@ static int mx3_camera_set_crop(struct soc_camera_device *icd, ...@@ -841,14 +849,14 @@ static int mx3_camera_set_crop(struct soc_camera_device *icd,
return ret; return ret;
} }
configure_geometry(mx3_cam, pix->width, pix->height); configure_geometry(mx3_cam, mf.width, mf.height);
} }
dev_dbg(icd->dev.parent, "Sensor cropped %dx%d\n", dev_dbg(icd->dev.parent, "Sensor cropped %dx%d\n",
pix->width, pix->height); mf.width, mf.height);
icd->user_width = pix->width; icd->user_width = mf.width;
icd->user_height = pix->height; icd->user_height = mf.height;
return ret; return ret;
} }
...@@ -861,6 +869,7 @@ static int mx3_camera_set_fmt(struct soc_camera_device *icd, ...@@ -861,6 +869,7 @@ static int mx3_camera_set_fmt(struct soc_camera_device *icd,
struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
const struct soc_camera_format_xlate *xlate; const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix; struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_mbus_framefmt mf;
int ret; int ret;
xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat); xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat);
...@@ -885,11 +894,24 @@ static int mx3_camera_set_fmt(struct soc_camera_device *icd, ...@@ -885,11 +894,24 @@ static int mx3_camera_set_fmt(struct soc_camera_device *icd,
configure_geometry(mx3_cam, pix->width, pix->height); configure_geometry(mx3_cam, pix->width, pix->height);
ret = v4l2_subdev_call(sd, video, s_fmt, f); mf.width = pix->width;
if (!ret) { mf.height = pix->height;
icd->buswidth = xlate->buswidth; mf.field = pix->field;
icd->current_fmt = xlate->host_fmt; mf.colorspace = pix->colorspace;
} mf.code = xlate->code;
ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf);
if (ret < 0)
return ret;
if (mf.code != xlate->code)
return -EINVAL;
pix->width = mf.width;
pix->height = mf.height;
pix->field = mf.field;
pix->colorspace = mf.colorspace;
icd->current_fmt = xlate;
dev_dbg(icd->dev.parent, "Sensor set %dx%d\n", pix->width, pix->height); dev_dbg(icd->dev.parent, "Sensor set %dx%d\n", pix->width, pix->height);
...@@ -902,8 +924,8 @@ static int mx3_camera_try_fmt(struct soc_camera_device *icd, ...@@ -902,8 +924,8 @@ static int mx3_camera_try_fmt(struct soc_camera_device *icd,
struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
const struct soc_camera_format_xlate *xlate; const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix; struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_mbus_framefmt mf;
__u32 pixfmt = pix->pixelformat; __u32 pixfmt = pix->pixelformat;
enum v4l2_field field;
int ret; int ret;
xlate = soc_camera_xlate_by_fourcc(icd, pixfmt); xlate = soc_camera_xlate_by_fourcc(icd, pixfmt);
...@@ -918,23 +940,37 @@ static int mx3_camera_try_fmt(struct soc_camera_device *icd, ...@@ -918,23 +940,37 @@ static int mx3_camera_try_fmt(struct soc_camera_device *icd,
if (pix->width > 4096) if (pix->width > 4096)
pix->width = 4096; pix->width = 4096;
pix->bytesperline = pix->width * pix->bytesperline = soc_mbus_bytes_per_line(pix->width,
DIV_ROUND_UP(xlate->host_fmt->depth, 8); xlate->host_fmt);
if (pix->bytesperline < 0)
return pix->bytesperline;
pix->sizeimage = pix->height * pix->bytesperline; pix->sizeimage = pix->height * pix->bytesperline;
/* camera has to see its format, but the user the original one */
pix->pixelformat = xlate->cam_fmt->fourcc;
/* limit to sensor capabilities */ /* limit to sensor capabilities */
ret = v4l2_subdev_call(sd, video, try_fmt, f); mf.width = pix->width;
pix->pixelformat = xlate->host_fmt->fourcc; mf.height = pix->height;
mf.field = pix->field;
mf.colorspace = pix->colorspace;
mf.code = xlate->code;
field = pix->field; ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf);
if (ret < 0)
return ret;
if (field == V4L2_FIELD_ANY) { pix->width = mf.width;
pix->height = mf.height;
pix->colorspace = mf.colorspace;
switch (mf.field) {
case V4L2_FIELD_ANY:
pix->field = V4L2_FIELD_NONE; pix->field = V4L2_FIELD_NONE;
} else if (field != V4L2_FIELD_NONE) { break;
dev_err(icd->dev.parent, "Field type %d unsupported.\n", field); case V4L2_FIELD_NONE:
return -EINVAL; break;
default:
dev_err(icd->dev.parent, "Field type %d unsupported.\n",
mf.field);
ret = -EINVAL;
} }
return ret; return ret;
...@@ -970,18 +1006,26 @@ static int mx3_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt) ...@@ -970,18 +1006,26 @@ static int mx3_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt)
struct mx3_camera_dev *mx3_cam = ici->priv; struct mx3_camera_dev *mx3_cam = ici->priv;
unsigned long bus_flags, camera_flags, common_flags; unsigned long bus_flags, camera_flags, common_flags;
u32 dw, sens_conf; u32 dw, sens_conf;
int ret = test_platform_param(mx3_cam, icd->buswidth, &bus_flags); const struct soc_mbus_pixelfmt *fmt;
int buswidth;
int ret;
const struct soc_camera_format_xlate *xlate; const struct soc_camera_format_xlate *xlate;
struct device *dev = icd->dev.parent; struct device *dev = icd->dev.parent;
fmt = soc_mbus_get_fmtdesc(icd->current_fmt->code);
if (!fmt)
return -EINVAL;
buswidth = fmt->bits_per_sample;
ret = test_platform_param(mx3_cam, buswidth, &bus_flags);
xlate = soc_camera_xlate_by_fourcc(icd, pixfmt); xlate = soc_camera_xlate_by_fourcc(icd, pixfmt);
if (!xlate) { if (!xlate) {
dev_warn(dev, "Format %x not found\n", pixfmt); dev_warn(dev, "Format %x not found\n", pixfmt);
return -EINVAL; return -EINVAL;
} }
dev_dbg(dev, "requested bus width %d bit: %d\n", dev_dbg(dev, "requested bus width %d bit: %d\n", buswidth, ret);
icd->buswidth, ret);
if (ret < 0) if (ret < 0)
return ret; return ret;
...@@ -1082,7 +1126,7 @@ static int mx3_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt) ...@@ -1082,7 +1126,7 @@ static int mx3_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt)
sens_conf |= 1 << CSI_SENS_CONF_DATA_POL_SHIFT; sens_conf |= 1 << CSI_SENS_CONF_DATA_POL_SHIFT;
/* Just do what we're asked to do */ /* Just do what we're asked to do */
switch (xlate->host_fmt->depth) { switch (xlate->host_fmt->bits_per_sample) {
case 4: case 4:
dw = 0 << CSI_SENS_CONF_DATA_WIDTH_SHIFT; dw = 0 << CSI_SENS_CONF_DATA_WIDTH_SHIFT;
break; break;
......
...@@ -24,6 +24,7 @@ ...@@ -24,6 +24,7 @@
#include <media/v4l2-chip-ident.h> #include <media/v4l2-chip-ident.h>
#include <media/v4l2-subdev.h> #include <media/v4l2-subdev.h>
#include <media/soc_camera.h> #include <media/soc_camera.h>
#include <media/soc_mediabus.h>
#include <media/ov772x.h> #include <media/ov772x.h>
/* /*
...@@ -382,7 +383,8 @@ struct regval_list { ...@@ -382,7 +383,8 @@ struct regval_list {
}; };
struct ov772x_color_format { struct ov772x_color_format {
const struct soc_camera_data_format *format; enum v4l2_mbus_pixelcode code;
enum v4l2_colorspace colorspace;
u8 dsp3; u8 dsp3;
u8 com3; u8 com3;
u8 com7; u8 com7;
...@@ -399,7 +401,7 @@ struct ov772x_win_size { ...@@ -399,7 +401,7 @@ struct ov772x_win_size {
struct ov772x_priv { struct ov772x_priv {
struct v4l2_subdev subdev; struct v4l2_subdev subdev;
struct ov772x_camera_info *info; struct ov772x_camera_info *info;
const struct ov772x_color_format *fmt; const struct ov772x_color_format *cfmt;
const struct ov772x_win_size *win; const struct ov772x_win_size *win;
int model; int model;
unsigned short flag_vflip:1; unsigned short flag_vflip:1;
...@@ -434,93 +436,57 @@ static const struct regval_list ov772x_vga_regs[] = { ...@@ -434,93 +436,57 @@ static const struct regval_list ov772x_vga_regs[] = {
}; };
/* /*
* supported format list * supported color format list
*/
#define SETFOURCC(type) .name = (#type), .fourcc = (V4L2_PIX_FMT_ ## type)
static const struct soc_camera_data_format ov772x_fmt_lists[] = {
{
SETFOURCC(YUYV),
.depth = 16,
.colorspace = V4L2_COLORSPACE_JPEG,
},
{
SETFOURCC(YVYU),
.depth = 16,
.colorspace = V4L2_COLORSPACE_JPEG,
},
{
SETFOURCC(UYVY),
.depth = 16,
.colorspace = V4L2_COLORSPACE_JPEG,
},
{
SETFOURCC(RGB555),
.depth = 16,
.colorspace = V4L2_COLORSPACE_SRGB,
},
{
SETFOURCC(RGB555X),
.depth = 16,
.colorspace = V4L2_COLORSPACE_SRGB,
},
{
SETFOURCC(RGB565),
.depth = 16,
.colorspace = V4L2_COLORSPACE_SRGB,
},
{
SETFOURCC(RGB565X),
.depth = 16,
.colorspace = V4L2_COLORSPACE_SRGB,
},
};
/*
* color format list
*/ */
static const struct ov772x_color_format ov772x_cfmts[] = { static const struct ov772x_color_format ov772x_cfmts[] = {
{ {
.format = &ov772x_fmt_lists[0], .code = V4L2_MBUS_FMT_YUYV8_2X8_LE,
.dsp3 = 0x0, .colorspace = V4L2_COLORSPACE_JPEG,
.com3 = SWAP_YUV, .dsp3 = 0x0,
.com7 = OFMT_YUV, .com3 = SWAP_YUV,
.com7 = OFMT_YUV,
}, },
{ {
.format = &ov772x_fmt_lists[1], .code = V4L2_MBUS_FMT_YVYU8_2X8_LE,
.dsp3 = UV_ON, .colorspace = V4L2_COLORSPACE_JPEG,
.com3 = SWAP_YUV, .dsp3 = UV_ON,
.com7 = OFMT_YUV, .com3 = SWAP_YUV,
.com7 = OFMT_YUV,
}, },
{ {
.format = &ov772x_fmt_lists[2], .code = V4L2_MBUS_FMT_YUYV8_2X8_BE,
.dsp3 = 0x0, .colorspace = V4L2_COLORSPACE_JPEG,
.com3 = 0x0, .dsp3 = 0x0,
.com7 = OFMT_YUV, .com3 = 0x0,
.com7 = OFMT_YUV,
}, },
{ {
.format = &ov772x_fmt_lists[3], .code = V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE,
.dsp3 = 0x0, .colorspace = V4L2_COLORSPACE_SRGB,
.com3 = SWAP_RGB, .dsp3 = 0x0,
.com7 = FMT_RGB555 | OFMT_RGB, .com3 = SWAP_RGB,
.com7 = FMT_RGB555 | OFMT_RGB,
}, },
{ {
.format = &ov772x_fmt_lists[4], .code = V4L2_MBUS_FMT_RGB555_2X8_PADHI_BE,
.dsp3 = 0x0, .colorspace = V4L2_COLORSPACE_SRGB,
.com3 = 0x0, .dsp3 = 0x0,
.com7 = FMT_RGB555 | OFMT_RGB, .com3 = 0x0,
.com7 = FMT_RGB555 | OFMT_RGB,
}, },
{ {
.format = &ov772x_fmt_lists[5], .code = V4L2_MBUS_FMT_RGB565_2X8_LE,
.dsp3 = 0x0, .colorspace = V4L2_COLORSPACE_SRGB,
.com3 = SWAP_RGB, .dsp3 = 0x0,
.com7 = FMT_RGB565 | OFMT_RGB, .com3 = SWAP_RGB,
.com7 = FMT_RGB565 | OFMT_RGB,
}, },
{ {
.format = &ov772x_fmt_lists[6], .code = V4L2_MBUS_FMT_RGB565_2X8_BE,
.dsp3 = 0x0, .colorspace = V4L2_COLORSPACE_SRGB,
.com3 = 0x0, .dsp3 = 0x0,
.com7 = FMT_RGB565 | OFMT_RGB, .com3 = 0x0,
.com7 = FMT_RGB565 | OFMT_RGB,
}, },
}; };
...@@ -642,15 +608,15 @@ static int ov772x_s_stream(struct v4l2_subdev *sd, int enable) ...@@ -642,15 +608,15 @@ static int ov772x_s_stream(struct v4l2_subdev *sd, int enable)
return 0; return 0;
} }
if (!priv->win || !priv->fmt) { if (!priv->win || !priv->cfmt) {
dev_err(&client->dev, "norm or win select error\n"); dev_err(&client->dev, "norm or win select error\n");
return -EPERM; return -EPERM;
} }
ov772x_mask_set(client, COM2, SOFT_SLEEP_MODE, 0); ov772x_mask_set(client, COM2, SOFT_SLEEP_MODE, 0);
dev_dbg(&client->dev, "format %s, win %s\n", dev_dbg(&client->dev, "format %d, win %s\n",
priv->fmt->format->name, priv->win->name); priv->cfmt->code, priv->win->name);
return 0; return 0;
} }
...@@ -806,8 +772,8 @@ static const struct ov772x_win_size *ov772x_select_win(u32 width, u32 height) ...@@ -806,8 +772,8 @@ static const struct ov772x_win_size *ov772x_select_win(u32 width, u32 height)
return win; return win;
} }
static int ov772x_set_params(struct i2c_client *client, static int ov772x_set_params(struct i2c_client *client, u32 *width, u32 *height,
u32 *width, u32 *height, u32 pixfmt) enum v4l2_mbus_pixelcode code)
{ {
struct ov772x_priv *priv = to_ov772x(client); struct ov772x_priv *priv = to_ov772x(client);
int ret = -EINVAL; int ret = -EINVAL;
...@@ -817,14 +783,14 @@ static int ov772x_set_params(struct i2c_client *client, ...@@ -817,14 +783,14 @@ static int ov772x_set_params(struct i2c_client *client,
/* /*
* select format * select format
*/ */
priv->fmt = NULL; priv->cfmt = NULL;
for (i = 0; i < ARRAY_SIZE(ov772x_cfmts); i++) { for (i = 0; i < ARRAY_SIZE(ov772x_cfmts); i++) {
if (pixfmt == ov772x_cfmts[i].format->fourcc) { if (code == ov772x_cfmts[i].code) {
priv->fmt = ov772x_cfmts + i; priv->cfmt = ov772x_cfmts + i;
break; break;
} }
} }
if (!priv->fmt) if (!priv->cfmt)
goto ov772x_set_fmt_error; goto ov772x_set_fmt_error;
/* /*
...@@ -894,7 +860,7 @@ static int ov772x_set_params(struct i2c_client *client, ...@@ -894,7 +860,7 @@ static int ov772x_set_params(struct i2c_client *client,
/* /*
* set DSP_CTRL3 * set DSP_CTRL3
*/ */
val = priv->fmt->dsp3; val = priv->cfmt->dsp3;
if (val) { if (val) {
ret = ov772x_mask_set(client, ret = ov772x_mask_set(client,
DSP_CTRL3, UV_MASK, val); DSP_CTRL3, UV_MASK, val);
...@@ -905,7 +871,7 @@ static int ov772x_set_params(struct i2c_client *client, ...@@ -905,7 +871,7 @@ static int ov772x_set_params(struct i2c_client *client,
/* /*
* set COM3 * set COM3
*/ */
val = priv->fmt->com3; val = priv->cfmt->com3;
if (priv->info->flags & OV772X_FLAG_VFLIP) if (priv->info->flags & OV772X_FLAG_VFLIP)
val |= VFLIP_IMG; val |= VFLIP_IMG;
if (priv->info->flags & OV772X_FLAG_HFLIP) if (priv->info->flags & OV772X_FLAG_HFLIP)
...@@ -923,9 +889,9 @@ static int ov772x_set_params(struct i2c_client *client, ...@@ -923,9 +889,9 @@ static int ov772x_set_params(struct i2c_client *client,
/* /*
* set COM7 * set COM7
*/ */
val = priv->win->com7_bit | priv->fmt->com7; val = priv->win->com7_bit | priv->cfmt->com7;
ret = ov772x_mask_set(client, ret = ov772x_mask_set(client,
COM7, (SLCT_MASK | FMT_MASK | OFMT_MASK), COM7, SLCT_MASK | FMT_MASK | OFMT_MASK,
val); val);
if (ret < 0) if (ret < 0)
goto ov772x_set_fmt_error; goto ov772x_set_fmt_error;
...@@ -951,7 +917,7 @@ static int ov772x_set_params(struct i2c_client *client, ...@@ -951,7 +917,7 @@ static int ov772x_set_params(struct i2c_client *client,
ov772x_reset(client); ov772x_reset(client);
priv->win = NULL; priv->win = NULL;
priv->fmt = NULL; priv->cfmt = NULL;
return ret; return ret;
} }
...@@ -981,54 +947,79 @@ static int ov772x_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) ...@@ -981,54 +947,79 @@ static int ov772x_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a)
return 0; return 0;
} }
static int ov772x_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int ov772x_g_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct i2c_client *client = sd->priv; struct i2c_client *client = sd->priv;
struct ov772x_priv *priv = to_ov772x(client); struct ov772x_priv *priv = to_ov772x(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
if (!priv->win || !priv->fmt) { if (!priv->win || !priv->cfmt) {
u32 width = VGA_WIDTH, height = VGA_HEIGHT; u32 width = VGA_WIDTH, height = VGA_HEIGHT;
int ret = ov772x_set_params(client, &width, &height, int ret = ov772x_set_params(client, &width, &height,
V4L2_PIX_FMT_YUYV); V4L2_MBUS_FMT_YUYV8_2X8_LE);
if (ret < 0) if (ret < 0)
return ret; return ret;
} }
f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; mf->width = priv->win->width;
mf->height = priv->win->height;
pix->width = priv->win->width; mf->code = priv->cfmt->code;
pix->height = priv->win->height; mf->colorspace = priv->cfmt->colorspace;
pix->pixelformat = priv->fmt->format->fourcc; mf->field = V4L2_FIELD_NONE;
pix->colorspace = priv->fmt->format->colorspace;
pix->field = V4L2_FIELD_NONE;
return 0; return 0;
} }
static int ov772x_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int ov772x_s_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct i2c_client *client = sd->priv; struct i2c_client *client = sd->priv;
struct v4l2_pix_format *pix = &f->fmt.pix; struct ov772x_priv *priv = to_ov772x(client);
int ret = ov772x_set_params(client, &mf->width, &mf->height,
mf->code);
if (!ret)
mf->colorspace = priv->cfmt->colorspace;
return ov772x_set_params(client, &pix->width, &pix->height, return ret;
pix->pixelformat);
} }
static int ov772x_try_fmt(struct v4l2_subdev *sd, static int ov772x_try_fmt(struct v4l2_subdev *sd,
struct v4l2_format *f) struct v4l2_mbus_framefmt *mf)
{ {
struct v4l2_pix_format *pix = &f->fmt.pix; struct i2c_client *client = sd->priv;
struct ov772x_priv *priv = to_ov772x(client);
const struct ov772x_win_size *win; const struct ov772x_win_size *win;
int i;
/* /*
* select suitable win * select suitable win
*/ */
win = ov772x_select_win(pix->width, pix->height); win = ov772x_select_win(mf->width, mf->height);
mf->width = win->width;
mf->height = win->height;
mf->field = V4L2_FIELD_NONE;
pix->width = win->width; for (i = 0; i < ARRAY_SIZE(ov772x_cfmts); i++)
pix->height = win->height; if (mf->code == ov772x_cfmts[i].code)
pix->field = V4L2_FIELD_NONE; break;
if (i == ARRAY_SIZE(ov772x_cfmts)) {
/* Unsupported format requested. Propose either */
if (priv->cfmt) {
/* the current one or */
mf->colorspace = priv->cfmt->colorspace;
mf->code = priv->cfmt->code;
} else {
/* the default one */
mf->colorspace = ov772x_cfmts[0].colorspace;
mf->code = ov772x_cfmts[0].code;
}
} else {
/* Also return the colorspace */
mf->colorspace = ov772x_cfmts[i].colorspace;
}
return 0; return 0;
} }
...@@ -1057,9 +1048,6 @@ static int ov772x_video_probe(struct soc_camera_device *icd, ...@@ -1057,9 +1048,6 @@ static int ov772x_video_probe(struct soc_camera_device *icd,
return -ENODEV; return -ENODEV;
} }
icd->formats = ov772x_fmt_lists;
icd->num_formats = ARRAY_SIZE(ov772x_fmt_lists);
/* /*
* check and show product ID and manufacturer ID * check and show product ID and manufacturer ID
*/ */
...@@ -1109,13 +1097,24 @@ static struct v4l2_subdev_core_ops ov772x_subdev_core_ops = { ...@@ -1109,13 +1097,24 @@ static struct v4l2_subdev_core_ops ov772x_subdev_core_ops = {
#endif #endif
}; };
static int ov772x_enum_fmt(struct v4l2_subdev *sd, int index,
enum v4l2_mbus_pixelcode *code)
{
if ((unsigned int)index >= ARRAY_SIZE(ov772x_cfmts))
return -EINVAL;
*code = ov772x_cfmts[index].code;
return 0;
}
static struct v4l2_subdev_video_ops ov772x_subdev_video_ops = { static struct v4l2_subdev_video_ops ov772x_subdev_video_ops = {
.s_stream = ov772x_s_stream, .s_stream = ov772x_s_stream,
.g_fmt = ov772x_g_fmt, .g_mbus_fmt = ov772x_g_fmt,
.s_fmt = ov772x_s_fmt, .s_mbus_fmt = ov772x_s_fmt,
.try_fmt = ov772x_try_fmt, .try_mbus_fmt = ov772x_try_fmt,
.cropcap = ov772x_cropcap, .cropcap = ov772x_cropcap,
.g_crop = ov772x_g_crop, .g_crop = ov772x_g_crop,
.enum_mbus_fmt = ov772x_enum_fmt,
}; };
static struct v4l2_subdev_ops ov772x_subdev_ops = { static struct v4l2_subdev_ops ov772x_subdev_ops = {
......
...@@ -154,19 +154,10 @@ static const struct ov9640_reg ov9640_regs_rgb[] = { ...@@ -154,19 +154,10 @@ static const struct ov9640_reg ov9640_regs_rgb[] = {
{ OV9640_MTXS, 0x65 }, { OV9640_MTXS, 0x65 },
}; };
/* static enum v4l2_mbus_pixelcode ov9640_codes[] = {
* TODO: this sensor also supports RGB555 and RGB565 formats, but support for V4L2_MBUS_FMT_YUYV8_2X8_BE,
* them has not yet been sufficiently tested and so it is not included with V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE,
* this version of the driver. To test and debug these formats add two entries V4L2_MBUS_FMT_RGB565_2X8_LE,
* to the below array, see ov722x.c for an example.
*/
static const struct soc_camera_data_format ov9640_fmt_lists[] = {
{
.name = "UYVY",
.fourcc = V4L2_PIX_FMT_UYVY,
.depth = 16,
.colorspace = V4L2_COLORSPACE_JPEG,
},
}; };
static const struct v4l2_queryctrl ov9640_controls[] = { static const struct v4l2_queryctrl ov9640_controls[] = {
...@@ -434,20 +425,22 @@ static void ov9640_res_roundup(u32 *width, u32 *height) ...@@ -434,20 +425,22 @@ static void ov9640_res_roundup(u32 *width, u32 *height)
} }
/* Prepare necessary register changes depending on color encoding */ /* Prepare necessary register changes depending on color encoding */
static void ov9640_alter_regs(u32 pixfmt, struct ov9640_reg_alt *alt) static void ov9640_alter_regs(enum v4l2_mbus_pixelcode code,
struct ov9640_reg_alt *alt)
{ {
switch (pixfmt) { switch (code) {
case V4L2_PIX_FMT_UYVY: default:
case V4L2_MBUS_FMT_YUYV8_2X8_BE:
alt->com12 = OV9640_COM12_YUV_AVG; alt->com12 = OV9640_COM12_YUV_AVG;
alt->com13 = OV9640_COM13_Y_DELAY_EN | alt->com13 = OV9640_COM13_Y_DELAY_EN |
OV9640_COM13_YUV_DLY(0x01); OV9640_COM13_YUV_DLY(0x01);
break; break;
case V4L2_PIX_FMT_RGB555: case V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE:
alt->com7 = OV9640_COM7_RGB; alt->com7 = OV9640_COM7_RGB;
alt->com13 = OV9640_COM13_RGB_AVG; alt->com13 = OV9640_COM13_RGB_AVG;
alt->com15 = OV9640_COM15_RGB_555; alt->com15 = OV9640_COM15_RGB_555;
break; break;
case V4L2_PIX_FMT_RGB565: case V4L2_MBUS_FMT_RGB565_2X8_LE:
alt->com7 = OV9640_COM7_RGB; alt->com7 = OV9640_COM7_RGB;
alt->com13 = OV9640_COM13_RGB_AVG; alt->com13 = OV9640_COM13_RGB_AVG;
alt->com15 = OV9640_COM15_RGB_565; alt->com15 = OV9640_COM15_RGB_565;
...@@ -456,8 +449,8 @@ static void ov9640_alter_regs(u32 pixfmt, struct ov9640_reg_alt *alt) ...@@ -456,8 +449,8 @@ static void ov9640_alter_regs(u32 pixfmt, struct ov9640_reg_alt *alt)
} }
/* Setup registers according to resolution and color encoding */ /* Setup registers according to resolution and color encoding */
static int ov9640_write_regs(struct i2c_client *client, static int ov9640_write_regs(struct i2c_client *client, u32 width,
u32 width, u32 pixfmt, struct ov9640_reg_alt *alts) enum v4l2_mbus_pixelcode code, struct ov9640_reg_alt *alts)
{ {
const struct ov9640_reg *ov9640_regs, *matrix_regs; const struct ov9640_reg *ov9640_regs, *matrix_regs;
int ov9640_regs_len, matrix_regs_len; int ov9640_regs_len, matrix_regs_len;
...@@ -500,7 +493,7 @@ static int ov9640_write_regs(struct i2c_client *client, ...@@ -500,7 +493,7 @@ static int ov9640_write_regs(struct i2c_client *client,
} }
/* select color matrix configuration for given color encoding */ /* select color matrix configuration for given color encoding */
if (pixfmt == V4L2_PIX_FMT_UYVY) { if (code == V4L2_MBUS_FMT_YUYV8_2X8_BE) {
matrix_regs = ov9640_regs_yuv; matrix_regs = ov9640_regs_yuv;
matrix_regs_len = ARRAY_SIZE(ov9640_regs_yuv); matrix_regs_len = ARRAY_SIZE(ov9640_regs_yuv);
} else { } else {
...@@ -562,15 +555,17 @@ static int ov9640_prog_dflt(struct i2c_client *client) ...@@ -562,15 +555,17 @@ static int ov9640_prog_dflt(struct i2c_client *client)
} }
/* set the format we will capture in */ /* set the format we will capture in */
static int ov9640_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int ov9640_s_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct i2c_client *client = sd->priv; struct i2c_client *client = sd->priv;
struct v4l2_pix_format *pix = &f->fmt.pix;
struct ov9640_reg_alt alts = {0}; struct ov9640_reg_alt alts = {0};
enum v4l2_colorspace cspace;
enum v4l2_mbus_pixelcode code = mf->code;
int ret; int ret;
ov9640_res_roundup(&pix->width, &pix->height); ov9640_res_roundup(&mf->width, &mf->height);
ov9640_alter_regs(pix->pixelformat, &alts); ov9640_alter_regs(mf->code, &alts);
ov9640_reset(client); ov9640_reset(client);
...@@ -578,19 +573,57 @@ static int ov9640_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) ...@@ -578,19 +573,57 @@ static int ov9640_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
if (ret) if (ret)
return ret; return ret;
return ov9640_write_regs(client, pix->width, pix->pixelformat, &alts); switch (code) {
case V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE:
case V4L2_MBUS_FMT_RGB565_2X8_LE:
cspace = V4L2_COLORSPACE_SRGB;
break;
default:
code = V4L2_MBUS_FMT_YUYV8_2X8_BE;
case V4L2_MBUS_FMT_YUYV8_2X8_BE:
cspace = V4L2_COLORSPACE_JPEG;
}
ret = ov9640_write_regs(client, mf->width, code, &alts);
if (!ret) {
mf->code = code;
mf->colorspace = cspace;
}
return ret;
} }
static int ov9640_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int ov9640_try_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct v4l2_pix_format *pix = &f->fmt.pix; ov9640_res_roundup(&mf->width, &mf->height);
ov9640_res_roundup(&pix->width, &pix->height); mf->field = V4L2_FIELD_NONE;
pix->field = V4L2_FIELD_NONE;
switch (mf->code) {
case V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE:
case V4L2_MBUS_FMT_RGB565_2X8_LE:
mf->colorspace = V4L2_COLORSPACE_SRGB;
break;
default:
mf->code = V4L2_MBUS_FMT_YUYV8_2X8_BE;
case V4L2_MBUS_FMT_YUYV8_2X8_BE:
mf->colorspace = V4L2_COLORSPACE_JPEG;
}
return 0; return 0;
} }
static int ov9640_enum_fmt(struct v4l2_subdev *sd, int index,
enum v4l2_mbus_pixelcode *code)
{
if ((unsigned int)index >= ARRAY_SIZE(ov9640_codes))
return -EINVAL;
*code = ov9640_codes[index];
return 0;
}
static int ov9640_g_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) static int ov9640_g_crop(struct v4l2_subdev *sd, struct v4l2_crop *a)
{ {
a->c.left = 0; a->c.left = 0;
...@@ -637,9 +670,6 @@ static int ov9640_video_probe(struct soc_camera_device *icd, ...@@ -637,9 +670,6 @@ static int ov9640_video_probe(struct soc_camera_device *icd,
goto err; goto err;
} }
icd->formats = ov9640_fmt_lists;
icd->num_formats = ARRAY_SIZE(ov9640_fmt_lists);
/* /*
* check and show product ID and manufacturer ID * check and show product ID and manufacturer ID
*/ */
...@@ -702,11 +732,12 @@ static struct v4l2_subdev_core_ops ov9640_core_ops = { ...@@ -702,11 +732,12 @@ static struct v4l2_subdev_core_ops ov9640_core_ops = {
}; };
static struct v4l2_subdev_video_ops ov9640_video_ops = { static struct v4l2_subdev_video_ops ov9640_video_ops = {
.s_stream = ov9640_s_stream, .s_stream = ov9640_s_stream,
.s_fmt = ov9640_s_fmt, .s_mbus_fmt = ov9640_s_fmt,
.try_fmt = ov9640_try_fmt, .try_mbus_fmt = ov9640_try_fmt,
.cropcap = ov9640_cropcap, .enum_mbus_fmt = ov9640_enum_fmt,
.g_crop = ov9640_g_crop, .cropcap = ov9640_cropcap,
.g_crop = ov9640_g_crop,
}; };
......
...@@ -32,6 +32,7 @@ ...@@ -32,6 +32,7 @@
#include <media/v4l2-dev.h> #include <media/v4l2-dev.h>
#include <media/videobuf-dma-sg.h> #include <media/videobuf-dma-sg.h>
#include <media/soc_camera.h> #include <media/soc_camera.h>
#include <media/soc_mediabus.h>
#include <linux/videodev2.h> #include <linux/videodev2.h>
...@@ -183,16 +184,12 @@ struct pxa_cam_dma { ...@@ -183,16 +184,12 @@ struct pxa_cam_dma {
/* buffer for one video frame */ /* buffer for one video frame */
struct pxa_buffer { struct pxa_buffer {
/* common v4l buffer stuff -- must be first */ /* common v4l buffer stuff -- must be first */
struct videobuf_buffer vb; struct videobuf_buffer vb;
enum v4l2_mbus_pixelcode code;
const struct soc_camera_data_format *fmt;
/* our descriptor lists for Y, U and V channels */ /* our descriptor lists for Y, U and V channels */
struct pxa_cam_dma dmas[3]; struct pxa_cam_dma dmas[3];
int inwork;
int inwork; enum pxa_camera_active_dma active_dma;
enum pxa_camera_active_dma active_dma;
}; };
struct pxa_camera_dev { struct pxa_camera_dev {
...@@ -243,11 +240,15 @@ static int pxa_videobuf_setup(struct videobuf_queue *vq, unsigned int *count, ...@@ -243,11 +240,15 @@ static int pxa_videobuf_setup(struct videobuf_queue *vq, unsigned int *count,
unsigned int *size) unsigned int *size)
{ {
struct soc_camera_device *icd = vq->priv_data; struct soc_camera_device *icd = vq->priv_data;
int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
icd->current_fmt->host_fmt);
if (bytes_per_line < 0)
return bytes_per_line;
dev_dbg(icd->dev.parent, "count=%d, size=%d\n", *count, *size); dev_dbg(icd->dev.parent, "count=%d, size=%d\n", *count, *size);
*size = roundup(icd->user_width * icd->user_height * *size = bytes_per_line * icd->user_height;
((icd->current_fmt->depth + 7) >> 3), 8);
if (0 == *count) if (0 == *count)
*count = 32; *count = 32;
...@@ -433,6 +434,11 @@ static int pxa_videobuf_prepare(struct videobuf_queue *vq, ...@@ -433,6 +434,11 @@ static int pxa_videobuf_prepare(struct videobuf_queue *vq,
struct pxa_buffer *buf = container_of(vb, struct pxa_buffer, vb); struct pxa_buffer *buf = container_of(vb, struct pxa_buffer, vb);
int ret; int ret;
int size_y, size_u = 0, size_v = 0; int size_y, size_u = 0, size_v = 0;
int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
icd->current_fmt->host_fmt);
if (bytes_per_line < 0)
return bytes_per_line;
dev_dbg(dev, "%s (vb=0x%p) 0x%08lx %d\n", __func__, dev_dbg(dev, "%s (vb=0x%p) 0x%08lx %d\n", __func__,
vb, vb->baddr, vb->bsize); vb, vb->baddr, vb->bsize);
...@@ -456,18 +462,18 @@ static int pxa_videobuf_prepare(struct videobuf_queue *vq, ...@@ -456,18 +462,18 @@ static int pxa_videobuf_prepare(struct videobuf_queue *vq,
*/ */
buf->inwork = 1; buf->inwork = 1;
if (buf->fmt != icd->current_fmt || if (buf->code != icd->current_fmt->code ||
vb->width != icd->user_width || vb->width != icd->user_width ||
vb->height != icd->user_height || vb->height != icd->user_height ||
vb->field != field) { vb->field != field) {
buf->fmt = icd->current_fmt; buf->code = icd->current_fmt->code;
vb->width = icd->user_width; vb->width = icd->user_width;
vb->height = icd->user_height; vb->height = icd->user_height;
vb->field = field; vb->field = field;
vb->state = VIDEOBUF_NEEDS_INIT; vb->state = VIDEOBUF_NEEDS_INIT;
} }
vb->size = vb->width * vb->height * ((buf->fmt->depth + 7) >> 3); vb->size = bytes_per_line * vb->height;
if (0 != vb->baddr && vb->bsize < vb->size) { if (0 != vb->baddr && vb->bsize < vb->size) {
ret = -EINVAL; ret = -EINVAL;
goto out; goto out;
...@@ -1157,9 +1163,15 @@ static int pxa_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt) ...@@ -1157,9 +1163,15 @@ static int pxa_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt)
struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
struct pxa_camera_dev *pcdev = ici->priv; struct pxa_camera_dev *pcdev = ici->priv;
unsigned long bus_flags, camera_flags, common_flags; unsigned long bus_flags, camera_flags, common_flags;
int ret = test_platform_param(pcdev, icd->buswidth, &bus_flags); const struct soc_mbus_pixelfmt *fmt;
int ret;
struct pxa_cam *cam = icd->host_priv; struct pxa_cam *cam = icd->host_priv;
fmt = soc_mbus_get_fmtdesc(icd->current_fmt->code);
if (!fmt)
return -EINVAL;
ret = test_platform_param(pcdev, fmt->bits_per_sample, &bus_flags);
if (ret < 0) if (ret < 0)
return ret; return ret;
...@@ -1223,59 +1235,49 @@ static int pxa_camera_try_bus_param(struct soc_camera_device *icd, ...@@ -1223,59 +1235,49 @@ static int pxa_camera_try_bus_param(struct soc_camera_device *icd,
return soc_camera_bus_param_compatible(camera_flags, bus_flags) ? 0 : -EINVAL; return soc_camera_bus_param_compatible(camera_flags, bus_flags) ? 0 : -EINVAL;
} }
static const struct soc_camera_data_format pxa_camera_formats[] = { static const struct soc_mbus_pixelfmt pxa_camera_formats[] = {
{ {
.name = "Planar YUV422 16 bit", .fourcc = V4L2_PIX_FMT_YUV422P,
.depth = 16, .name = "Planar YUV422 16 bit",
.fourcc = V4L2_PIX_FMT_YUV422P, .bits_per_sample = 8,
.colorspace = V4L2_COLORSPACE_JPEG, .packing = SOC_MBUS_PACKING_2X8_PADHI,
.order = SOC_MBUS_ORDER_LE,
}, },
}; };
static bool buswidth_supported(struct soc_camera_device *icd, int depth) /* This will be corrected as we get more formats */
static bool pxa_camera_packing_supported(const struct soc_mbus_pixelfmt *fmt)
{ {
struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); return fmt->packing == SOC_MBUS_PACKING_NONE ||
struct pxa_camera_dev *pcdev = ici->priv; (fmt->bits_per_sample == 8 &&
fmt->packing == SOC_MBUS_PACKING_2X8_PADHI) ||
switch (depth) { (fmt->bits_per_sample > 8 &&
case 8: fmt->packing == SOC_MBUS_PACKING_EXTEND16);
return !!(pcdev->platform_flags & PXA_CAMERA_DATAWIDTH_8);
case 9:
return !!(pcdev->platform_flags & PXA_CAMERA_DATAWIDTH_9);
case 10:
return !!(pcdev->platform_flags & PXA_CAMERA_DATAWIDTH_10);
}
return false;
}
static int required_buswidth(const struct soc_camera_data_format *fmt)
{
switch (fmt->fourcc) {
case V4L2_PIX_FMT_UYVY:
case V4L2_PIX_FMT_VYUY:
case V4L2_PIX_FMT_YUYV:
case V4L2_PIX_FMT_YVYU:
case V4L2_PIX_FMT_RGB565:
case V4L2_PIX_FMT_RGB555:
return 8;
default:
return fmt->depth;
}
} }
static int pxa_camera_get_formats(struct soc_camera_device *icd, int idx, static int pxa_camera_get_formats(struct soc_camera_device *icd, int idx,
struct soc_camera_format_xlate *xlate) struct soc_camera_format_xlate *xlate)
{ {
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct device *dev = icd->dev.parent; struct device *dev = icd->dev.parent;
int formats = 0, buswidth, ret; int formats = 0, ret;
struct pxa_cam *cam; struct pxa_cam *cam;
enum v4l2_mbus_pixelcode code;
const struct soc_mbus_pixelfmt *fmt;
buswidth = required_buswidth(icd->formats + idx); ret = v4l2_subdev_call(sd, video, enum_mbus_fmt, idx, &code);
if (ret < 0)
/* No more formats */
return 0;
if (!buswidth_supported(icd, buswidth)) fmt = soc_mbus_get_fmtdesc(code);
if (!fmt) {
dev_err(dev, "Invalid format code #%d: %d\n", idx, code);
return 0; return 0;
}
ret = pxa_camera_try_bus_param(icd, buswidth); /* This also checks support for the requested bits-per-sample */
ret = pxa_camera_try_bus_param(icd, fmt->bits_per_sample);
if (ret < 0) if (ret < 0)
return 0; return 0;
...@@ -1289,45 +1291,40 @@ static int pxa_camera_get_formats(struct soc_camera_device *icd, int idx, ...@@ -1289,45 +1291,40 @@ static int pxa_camera_get_formats(struct soc_camera_device *icd, int idx,
cam = icd->host_priv; cam = icd->host_priv;
} }
switch (icd->formats[idx].fourcc) { switch (code) {
case V4L2_PIX_FMT_UYVY: case V4L2_MBUS_FMT_YUYV8_2X8_BE:
formats++; formats++;
if (xlate) { if (xlate) {
xlate->host_fmt = &pxa_camera_formats[0]; xlate->host_fmt = &pxa_camera_formats[0];
xlate->cam_fmt = icd->formats + idx; xlate->code = code;
xlate->buswidth = buswidth;
xlate++; xlate++;
dev_dbg(dev, "Providing format %s using %s\n", dev_dbg(dev, "Providing format %s using code %d\n",
pxa_camera_formats[0].name, pxa_camera_formats[0].name, code);
icd->formats[idx].name);
} }
case V4L2_PIX_FMT_VYUY: case V4L2_MBUS_FMT_YVYU8_2X8_BE:
case V4L2_PIX_FMT_YUYV: case V4L2_MBUS_FMT_YUYV8_2X8_LE:
case V4L2_PIX_FMT_YVYU: case V4L2_MBUS_FMT_YVYU8_2X8_LE:
case V4L2_PIX_FMT_RGB565: case V4L2_MBUS_FMT_RGB565_2X8_LE:
case V4L2_PIX_FMT_RGB555: case V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE:
formats++; if (xlate)
if (xlate) {
xlate->host_fmt = icd->formats + idx;
xlate->cam_fmt = icd->formats + idx;
xlate->buswidth = buswidth;
xlate++;
dev_dbg(dev, "Providing format %s packed\n", dev_dbg(dev, "Providing format %s packed\n",
icd->formats[idx].name); fmt->name);
}
break; break;
default: default:
/* Generic pass-through */ if (!pxa_camera_packing_supported(fmt))
formats++; return 0;
if (xlate) { if (xlate)
xlate->host_fmt = icd->formats + idx;
xlate->cam_fmt = icd->formats + idx;
xlate->buswidth = icd->formats[idx].depth;
xlate++;
dev_dbg(dev, dev_dbg(dev,
"Providing format %s in pass-through mode\n", "Providing format %s in pass-through mode\n",
icd->formats[idx].name); fmt->name);
} }
/* Generic pass-through */
formats++;
if (xlate) {
xlate->host_fmt = fmt;
xlate->code = code;
xlate++;
} }
return formats; return formats;
...@@ -1339,11 +1336,11 @@ static void pxa_camera_put_formats(struct soc_camera_device *icd) ...@@ -1339,11 +1336,11 @@ static void pxa_camera_put_formats(struct soc_camera_device *icd)
icd->host_priv = NULL; icd->host_priv = NULL;
} }
static int pxa_camera_check_frame(struct v4l2_pix_format *pix) static int pxa_camera_check_frame(u32 width, u32 height)
{ {
/* limit to pxa hardware capabilities */ /* limit to pxa hardware capabilities */
return pix->height < 32 || pix->height > 2048 || pix->width < 48 || return height < 32 || height > 2048 || width < 48 || width > 2048 ||
pix->width > 2048 || (pix->width & 0x01); (width & 0x01);
} }
static int pxa_camera_set_crop(struct soc_camera_device *icd, static int pxa_camera_set_crop(struct soc_camera_device *icd,
...@@ -1358,9 +1355,9 @@ static int pxa_camera_set_crop(struct soc_camera_device *icd, ...@@ -1358,9 +1355,9 @@ static int pxa_camera_set_crop(struct soc_camera_device *icd,
.master_clock = pcdev->mclk, .master_clock = pcdev->mclk,
.pixel_clock_max = pcdev->ciclk / 4, .pixel_clock_max = pcdev->ciclk / 4,
}; };
struct v4l2_format f; struct v4l2_mbus_framefmt mf;
struct v4l2_pix_format *pix = &f.fmt.pix, pix_tmp;
struct pxa_cam *cam = icd->host_priv; struct pxa_cam *cam = icd->host_priv;
u32 fourcc = icd->current_fmt->host_fmt->fourcc;
int ret; int ret;
/* If PCLK is used to latch data from the sensor, check sense */ /* If PCLK is used to latch data from the sensor, check sense */
...@@ -1377,27 +1374,23 @@ static int pxa_camera_set_crop(struct soc_camera_device *icd, ...@@ -1377,27 +1374,23 @@ static int pxa_camera_set_crop(struct soc_camera_device *icd,
return ret; return ret;
} }
f.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf);
ret = v4l2_subdev_call(sd, video, g_fmt, &f);
if (ret < 0) if (ret < 0)
return ret; return ret;
pix_tmp = *pix; if (pxa_camera_check_frame(mf.width, mf.height)) {
if (pxa_camera_check_frame(pix)) {
/* /*
* Camera cropping produced a frame beyond our capabilities. * Camera cropping produced a frame beyond our capabilities.
* FIXME: just extract a subframe, that we can process. * FIXME: just extract a subframe, that we can process.
*/ */
v4l_bound_align_image(&pix->width, 48, 2048, 1, v4l_bound_align_image(&mf.width, 48, 2048, 1,
&pix->height, 32, 2048, 0, &mf.height, 32, 2048, 0,
icd->current_fmt->fourcc == V4L2_PIX_FMT_YUV422P ? fourcc == V4L2_PIX_FMT_YUV422P ? 4 : 0);
4 : 0); ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf);
ret = v4l2_subdev_call(sd, video, s_fmt, &f);
if (ret < 0) if (ret < 0)
return ret; return ret;
if (pxa_camera_check_frame(pix)) { if (pxa_camera_check_frame(mf.width, mf.height)) {
dev_warn(icd->dev.parent, dev_warn(icd->dev.parent,
"Inconsistent state. Use S_FMT to repair\n"); "Inconsistent state. Use S_FMT to repair\n");
return -EINVAL; return -EINVAL;
...@@ -1414,10 +1407,10 @@ static int pxa_camera_set_crop(struct soc_camera_device *icd, ...@@ -1414,10 +1407,10 @@ static int pxa_camera_set_crop(struct soc_camera_device *icd,
recalculate_fifo_timeout(pcdev, sense.pixel_clock); recalculate_fifo_timeout(pcdev, sense.pixel_clock);
} }
icd->user_width = pix->width; icd->user_width = mf.width;
icd->user_height = pix->height; icd->user_height = mf.height;
pxa_camera_setup_cicr(icd, cam->flags, icd->current_fmt->fourcc); pxa_camera_setup_cicr(icd, cam->flags, fourcc);
return ret; return ret;
} }
...@@ -1429,14 +1422,13 @@ static int pxa_camera_set_fmt(struct soc_camera_device *icd, ...@@ -1429,14 +1422,13 @@ static int pxa_camera_set_fmt(struct soc_camera_device *icd,
struct pxa_camera_dev *pcdev = ici->priv; struct pxa_camera_dev *pcdev = ici->priv;
struct device *dev = icd->dev.parent; struct device *dev = icd->dev.parent;
struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
const struct soc_camera_data_format *cam_fmt = NULL;
const struct soc_camera_format_xlate *xlate = NULL; const struct soc_camera_format_xlate *xlate = NULL;
struct soc_camera_sense sense = { struct soc_camera_sense sense = {
.master_clock = pcdev->mclk, .master_clock = pcdev->mclk,
.pixel_clock_max = pcdev->ciclk / 4, .pixel_clock_max = pcdev->ciclk / 4,
}; };
struct v4l2_pix_format *pix = &f->fmt.pix; struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_format cam_f = *f; struct v4l2_mbus_framefmt mf;
int ret; int ret;
xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat); xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat);
...@@ -1445,26 +1437,31 @@ static int pxa_camera_set_fmt(struct soc_camera_device *icd, ...@@ -1445,26 +1437,31 @@ static int pxa_camera_set_fmt(struct soc_camera_device *icd,
return -EINVAL; return -EINVAL;
} }
cam_fmt = xlate->cam_fmt;
/* If PCLK is used to latch data from the sensor, check sense */ /* If PCLK is used to latch data from the sensor, check sense */
if (pcdev->platform_flags & PXA_CAMERA_PCLK_EN) if (pcdev->platform_flags & PXA_CAMERA_PCLK_EN)
/* The caller holds a mutex. */
icd->sense = &sense; icd->sense = &sense;
cam_f.fmt.pix.pixelformat = cam_fmt->fourcc; mf.width = pix->width;
ret = v4l2_subdev_call(sd, video, s_fmt, &cam_f); mf.height = pix->height;
cam_f.fmt.pix.pixelformat = pix->pixelformat; mf.field = pix->field;
*pix = cam_f.fmt.pix; mf.colorspace = pix->colorspace;
mf.code = xlate->code;
ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf);
if (mf.code != xlate->code)
return -EINVAL;
icd->sense = NULL; icd->sense = NULL;
if (ret < 0) { if (ret < 0) {
dev_warn(dev, "Failed to configure for format %x\n", dev_warn(dev, "Failed to configure for format %x\n",
pix->pixelformat); pix->pixelformat);
} else if (pxa_camera_check_frame(pix)) { } else if (pxa_camera_check_frame(mf.width, mf.height)) {
dev_warn(dev, dev_warn(dev,
"Camera driver produced an unsupported frame %dx%d\n", "Camera driver produced an unsupported frame %dx%d\n",
pix->width, pix->height); mf.width, mf.height);
ret = -EINVAL; ret = -EINVAL;
} else if (sense.flags & SOCAM_SENSE_PCLK_CHANGED) { } else if (sense.flags & SOCAM_SENSE_PCLK_CHANGED) {
if (sense.pixel_clock > sense.pixel_clock_max) { if (sense.pixel_clock > sense.pixel_clock_max) {
...@@ -1476,10 +1473,14 @@ static int pxa_camera_set_fmt(struct soc_camera_device *icd, ...@@ -1476,10 +1473,14 @@ static int pxa_camera_set_fmt(struct soc_camera_device *icd,
recalculate_fifo_timeout(pcdev, sense.pixel_clock); recalculate_fifo_timeout(pcdev, sense.pixel_clock);
} }
if (!ret) { if (ret < 0)
icd->buswidth = xlate->buswidth; return ret;
icd->current_fmt = xlate->host_fmt;
} pix->width = mf.width;
pix->height = mf.height;
pix->field = mf.field;
pix->colorspace = mf.colorspace;
icd->current_fmt = xlate;
return ret; return ret;
} }
...@@ -1487,17 +1488,16 @@ static int pxa_camera_set_fmt(struct soc_camera_device *icd, ...@@ -1487,17 +1488,16 @@ static int pxa_camera_set_fmt(struct soc_camera_device *icd,
static int pxa_camera_try_fmt(struct soc_camera_device *icd, static int pxa_camera_try_fmt(struct soc_camera_device *icd,
struct v4l2_format *f) struct v4l2_format *f)
{ {
struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
const struct soc_camera_format_xlate *xlate; const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix; struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_mbus_framefmt mf;
__u32 pixfmt = pix->pixelformat; __u32 pixfmt = pix->pixelformat;
enum v4l2_field field;
int ret; int ret;
xlate = soc_camera_xlate_by_fourcc(icd, pixfmt); xlate = soc_camera_xlate_by_fourcc(icd, pixfmt);
if (!xlate) { if (!xlate) {
dev_warn(ici->v4l2_dev.dev, "Format %x not found\n", pixfmt); dev_warn(icd->dev.parent, "Format %x not found\n", pixfmt);
return -EINVAL; return -EINVAL;
} }
...@@ -1511,22 +1511,36 @@ static int pxa_camera_try_fmt(struct soc_camera_device *icd, ...@@ -1511,22 +1511,36 @@ static int pxa_camera_try_fmt(struct soc_camera_device *icd,
&pix->height, 32, 2048, 0, &pix->height, 32, 2048, 0,
pixfmt == V4L2_PIX_FMT_YUV422P ? 4 : 0); pixfmt == V4L2_PIX_FMT_YUV422P ? 4 : 0);
pix->bytesperline = pix->width * pix->bytesperline = soc_mbus_bytes_per_line(pix->width,
DIV_ROUND_UP(xlate->host_fmt->depth, 8); xlate->host_fmt);
if (pix->bytesperline < 0)
return pix->bytesperline;
pix->sizeimage = pix->height * pix->bytesperline; pix->sizeimage = pix->height * pix->bytesperline;
/* camera has to see its format, but the user the original one */
pix->pixelformat = xlate->cam_fmt->fourcc;
/* limit to sensor capabilities */ /* limit to sensor capabilities */
ret = v4l2_subdev_call(sd, video, try_fmt, f); mf.width = pix->width;
pix->pixelformat = pixfmt; mf.height = pix->height;
mf.field = pix->field;
mf.colorspace = pix->colorspace;
mf.code = xlate->code;
field = pix->field; ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf);
if (ret < 0)
return ret;
if (field == V4L2_FIELD_ANY) { pix->width = mf.width;
pix->field = V4L2_FIELD_NONE; pix->height = mf.height;
} else if (field != V4L2_FIELD_NONE) { pix->colorspace = mf.colorspace;
dev_err(icd->dev.parent, "Field type %d unsupported.\n", field);
switch (mf.field) {
case V4L2_FIELD_ANY:
case V4L2_FIELD_NONE:
pix->field = V4L2_FIELD_NONE;
break;
default:
/* TODO: support interlaced at least in pass-through mode */
dev_err(icd->dev.parent, "Field type %d unsupported.\n",
mf.field);
return -EINVAL; return -EINVAL;
} }
......
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
#include <media/v4l2-subdev.h> #include <media/v4l2-subdev.h>
#include <media/v4l2-chip-ident.h> #include <media/v4l2-chip-ident.h>
#include <media/soc_camera.h> #include <media/soc_camera.h>
#include <media/soc_mediabus.h>
#define RJ54N1_DEV_CODE 0x0400 #define RJ54N1_DEV_CODE 0x0400
#define RJ54N1_DEV_CODE2 0x0401 #define RJ54N1_DEV_CODE2 0x0401
...@@ -85,18 +86,35 @@ ...@@ -85,18 +86,35 @@
/* I2C addresses: 0x50, 0x51, 0x60, 0x61 */ /* I2C addresses: 0x50, 0x51, 0x60, 0x61 */
static const struct soc_camera_data_format rj54n1_colour_formats[] = { /* RJ54N1CB0C has only one fixed colorspace per pixelcode */
{ struct rj54n1_datafmt {
.name = "YUYV", enum v4l2_mbus_pixelcode code;
.depth = 16, enum v4l2_colorspace colorspace;
.fourcc = V4L2_PIX_FMT_YUYV, };
.colorspace = V4L2_COLORSPACE_JPEG,
}, { /* Find a data format by a pixel code in an array */
.name = "RGB565", static const struct rj54n1_datafmt *rj54n1_find_datafmt(
.depth = 16, enum v4l2_mbus_pixelcode code, const struct rj54n1_datafmt *fmt,
.fourcc = V4L2_PIX_FMT_RGB565, int n)
.colorspace = V4L2_COLORSPACE_SRGB, {
} int i;
for (i = 0; i < n; i++)
if (fmt[i].code == code)
return fmt + i;
return NULL;
}
static const struct rj54n1_datafmt rj54n1_colour_fmts[] = {
{V4L2_MBUS_FMT_YUYV8_2X8_LE, V4L2_COLORSPACE_JPEG},
{V4L2_MBUS_FMT_YVYU8_2X8_LE, V4L2_COLORSPACE_JPEG},
{V4L2_MBUS_FMT_RGB565_2X8_LE, V4L2_COLORSPACE_SRGB},
{V4L2_MBUS_FMT_RGB565_2X8_BE, V4L2_COLORSPACE_SRGB},
{V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_LE, V4L2_COLORSPACE_SRGB},
{V4L2_MBUS_FMT_SBGGR10_2X8_PADLO_LE, V4L2_COLORSPACE_SRGB},
{V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_BE, V4L2_COLORSPACE_SRGB},
{V4L2_MBUS_FMT_SBGGR10_2X8_PADLO_BE, V4L2_COLORSPACE_SRGB},
{V4L2_MBUS_FMT_SBGGR10_1X10, V4L2_COLORSPACE_SRGB},
}; };
struct rj54n1_clock_div { struct rj54n1_clock_div {
...@@ -109,12 +127,12 @@ struct rj54n1_clock_div { ...@@ -109,12 +127,12 @@ struct rj54n1_clock_div {
struct rj54n1 { struct rj54n1 {
struct v4l2_subdev subdev; struct v4l2_subdev subdev;
const struct rj54n1_datafmt *fmt;
struct v4l2_rect rect; /* Sensor window */ struct v4l2_rect rect; /* Sensor window */
unsigned short width; /* Output window */ unsigned short width; /* Output window */
unsigned short height; unsigned short height;
unsigned short resize; /* Sensor * 1024 / resize = Output */ unsigned short resize; /* Sensor * 1024 / resize = Output */
struct rj54n1_clock_div clk_div; struct rj54n1_clock_div clk_div;
u32 fourcc;
unsigned short scale; unsigned short scale;
u8 bank; u8 bank;
}; };
...@@ -440,6 +458,16 @@ static int reg_write_multiple(struct i2c_client *client, ...@@ -440,6 +458,16 @@ static int reg_write_multiple(struct i2c_client *client,
return 0; return 0;
} }
static int rj54n1_enum_fmt(struct v4l2_subdev *sd, int index,
enum v4l2_mbus_pixelcode *code)
{
if ((unsigned int)index >= ARRAY_SIZE(rj54n1_colour_fmts))
return -EINVAL;
*code = rj54n1_colour_fmts[index].code;
return 0;
}
static int rj54n1_s_stream(struct v4l2_subdev *sd, int enable) static int rj54n1_s_stream(struct v4l2_subdev *sd, int enable)
{ {
/* TODO: start / stop streaming */ /* TODO: start / stop streaming */
...@@ -527,16 +555,17 @@ static int rj54n1_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) ...@@ -527,16 +555,17 @@ static int rj54n1_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a)
return 0; return 0;
} }
static int rj54n1_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int rj54n1_g_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct i2c_client *client = sd->priv; struct i2c_client *client = sd->priv;
struct rj54n1 *rj54n1 = to_rj54n1(client); struct rj54n1 *rj54n1 = to_rj54n1(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
pix->pixelformat = rj54n1->fourcc; mf->code = rj54n1->fmt->code;
pix->field = V4L2_FIELD_NONE; mf->colorspace = rj54n1->fmt->colorspace;
pix->width = rj54n1->width; mf->field = V4L2_FIELD_NONE;
pix->height = rj54n1->height; mf->width = rj54n1->width;
mf->height = rj54n1->height;
return 0; return 0;
} }
...@@ -787,26 +816,44 @@ static int rj54n1_reg_init(struct i2c_client *client) ...@@ -787,26 +816,44 @@ static int rj54n1_reg_init(struct i2c_client *client)
} }
/* FIXME: streaming output only up to 800x600 is functional */ /* FIXME: streaming output only up to 800x600 is functional */
static int rj54n1_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int rj54n1_try_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct v4l2_pix_format *pix = &f->fmt.pix; struct i2c_client *client = sd->priv;
struct rj54n1 *rj54n1 = to_rj54n1(client);
const struct rj54n1_datafmt *fmt;
int align = mf->code == V4L2_MBUS_FMT_SBGGR10_1X10 ||
mf->code == V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_BE ||
mf->code == V4L2_MBUS_FMT_SBGGR10_2X8_PADLO_BE ||
mf->code == V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_LE ||
mf->code == V4L2_MBUS_FMT_SBGGR10_2X8_PADLO_LE;
dev_dbg(&client->dev, "%s: code = %d, width = %u, height = %u\n",
__func__, mf->code, mf->width, mf->height);
fmt = rj54n1_find_datafmt(mf->code, rj54n1_colour_fmts,
ARRAY_SIZE(rj54n1_colour_fmts));
if (!fmt) {
fmt = rj54n1->fmt;
mf->code = fmt->code;
}
pix->field = V4L2_FIELD_NONE; mf->field = V4L2_FIELD_NONE;
mf->colorspace = fmt->colorspace;
if (pix->width > 800) v4l_bound_align_image(&mf->width, 112, RJ54N1_MAX_WIDTH, align,
pix->width = 800; &mf->height, 84, RJ54N1_MAX_HEIGHT, align, 0);
if (pix->height > 600)
pix->height = 600;
return 0; return 0;
} }
static int rj54n1_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int rj54n1_s_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct i2c_client *client = sd->priv; struct i2c_client *client = sd->priv;
struct rj54n1 *rj54n1 = to_rj54n1(client); struct rj54n1 *rj54n1 = to_rj54n1(client);
struct v4l2_pix_format *pix = &f->fmt.pix; const struct rj54n1_datafmt *fmt;
unsigned int output_w, output_h, unsigned int output_w, output_h, max_w, max_h,
input_w = rj54n1->rect.width, input_h = rj54n1->rect.height; input_w = rj54n1->rect.width, input_h = rj54n1->rect.height;
int ret; int ret;
...@@ -814,7 +861,7 @@ static int rj54n1_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) ...@@ -814,7 +861,7 @@ static int rj54n1_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
* The host driver can call us without .try_fmt(), so, we have to take * The host driver can call us without .try_fmt(), so, we have to take
* care ourseleves * care ourseleves
*/ */
ret = rj54n1_try_fmt(sd, f); ret = rj54n1_try_fmt(sd, mf);
/* /*
* Verify if the sensor has just been powered on. TODO: replace this * Verify if the sensor has just been powered on. TODO: replace this
...@@ -832,49 +879,101 @@ static int rj54n1_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) ...@@ -832,49 +879,101 @@ static int rj54n1_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
} }
/* RA_SEL_UL is only relevant for raw modes, ignored otherwise. */ /* RA_SEL_UL is only relevant for raw modes, ignored otherwise. */
switch (pix->pixelformat) { switch (mf->code) {
case V4L2_PIX_FMT_YUYV: case V4L2_MBUS_FMT_YUYV8_2X8_LE:
ret = reg_write(client, RJ54N1_OUT_SEL, 0); ret = reg_write(client, RJ54N1_OUT_SEL, 0);
if (!ret) if (!ret)
ret = reg_set(client, RJ54N1_BYTE_SWAP, 8, 8); ret = reg_set(client, RJ54N1_BYTE_SWAP, 8, 8);
break; break;
case V4L2_PIX_FMT_RGB565: case V4L2_MBUS_FMT_YVYU8_2X8_LE:
ret = reg_write(client, RJ54N1_OUT_SEL, 0);
if (!ret)
ret = reg_set(client, RJ54N1_BYTE_SWAP, 0, 8);
break;
case V4L2_MBUS_FMT_RGB565_2X8_LE:
ret = reg_write(client, RJ54N1_OUT_SEL, 0x11); ret = reg_write(client, RJ54N1_OUT_SEL, 0x11);
if (!ret) if (!ret)
ret = reg_set(client, RJ54N1_BYTE_SWAP, 8, 8); ret = reg_set(client, RJ54N1_BYTE_SWAP, 8, 8);
break; break;
case V4L2_MBUS_FMT_RGB565_2X8_BE:
ret = reg_write(client, RJ54N1_OUT_SEL, 0x11);
if (!ret)
ret = reg_set(client, RJ54N1_BYTE_SWAP, 0, 8);
break;
case V4L2_MBUS_FMT_SBGGR10_2X8_PADLO_LE:
ret = reg_write(client, RJ54N1_OUT_SEL, 4);
if (!ret)
ret = reg_set(client, RJ54N1_BYTE_SWAP, 8, 8);
if (!ret)
ret = reg_write(client, RJ54N1_RA_SEL_UL, 0);
break;
case V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_LE:
ret = reg_write(client, RJ54N1_OUT_SEL, 4);
if (!ret)
ret = reg_set(client, RJ54N1_BYTE_SWAP, 8, 8);
if (!ret)
ret = reg_write(client, RJ54N1_RA_SEL_UL, 8);
break;
case V4L2_MBUS_FMT_SBGGR10_2X8_PADLO_BE:
ret = reg_write(client, RJ54N1_OUT_SEL, 4);
if (!ret)
ret = reg_set(client, RJ54N1_BYTE_SWAP, 0, 8);
if (!ret)
ret = reg_write(client, RJ54N1_RA_SEL_UL, 0);
break;
case V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_BE:
ret = reg_write(client, RJ54N1_OUT_SEL, 4);
if (!ret)
ret = reg_set(client, RJ54N1_BYTE_SWAP, 0, 8);
if (!ret)
ret = reg_write(client, RJ54N1_RA_SEL_UL, 8);
break;
case V4L2_MBUS_FMT_SBGGR10_1X10:
ret = reg_write(client, RJ54N1_OUT_SEL, 5);
break;
default: default:
ret = -EINVAL; ret = -EINVAL;
} }
/* Special case: a raw mode with 10 bits of data per clock tick */
if (!ret)
ret = reg_set(client, RJ54N1_OCLK_SEL_EN,
(mf->code == V4L2_MBUS_FMT_SBGGR10_1X10) << 1, 2);
if (ret < 0) if (ret < 0)
return ret; return ret;
/* Supported scales 1:1 - 1:16 */ /* Supported scales 1:1 >= scale > 1:16 */
if (pix->width < input_w / 16) max_w = mf->width * (16 * 1024 - 1) / 1024;
pix->width = input_w / 16; if (input_w > max_w)
if (pix->height < input_h / 16) input_w = max_w;
pix->height = input_h / 16; max_h = mf->height * (16 * 1024 - 1) / 1024;
if (input_h > max_h)
input_h = max_h;
output_w = pix->width; output_w = mf->width;
output_h = pix->height; output_h = mf->height;
ret = rj54n1_sensor_scale(sd, &input_w, &input_h, &output_w, &output_h); ret = rj54n1_sensor_scale(sd, &input_w, &input_h, &output_w, &output_h);
if (ret < 0) if (ret < 0)
return ret; return ret;
rj54n1->fourcc = pix->pixelformat; fmt = rj54n1_find_datafmt(mf->code, rj54n1_colour_fmts,
ARRAY_SIZE(rj54n1_colour_fmts));
rj54n1->fmt = fmt;
rj54n1->resize = ret; rj54n1->resize = ret;
rj54n1->rect.width = input_w; rj54n1->rect.width = input_w;
rj54n1->rect.height = input_h; rj54n1->rect.height = input_h;
rj54n1->width = output_w; rj54n1->width = output_w;
rj54n1->height = output_h; rj54n1->height = output_h;
pix->width = output_w; mf->width = output_w;
pix->height = output_h; mf->height = output_h;
pix->field = V4L2_FIELD_NONE; mf->field = V4L2_FIELD_NONE;
mf->colorspace = fmt->colorspace;
return ret; return 0;
} }
static int rj54n1_g_chip_ident(struct v4l2_subdev *sd, static int rj54n1_g_chip_ident(struct v4l2_subdev *sd,
...@@ -1054,9 +1153,10 @@ static struct v4l2_subdev_core_ops rj54n1_subdev_core_ops = { ...@@ -1054,9 +1153,10 @@ static struct v4l2_subdev_core_ops rj54n1_subdev_core_ops = {
static struct v4l2_subdev_video_ops rj54n1_subdev_video_ops = { static struct v4l2_subdev_video_ops rj54n1_subdev_video_ops = {
.s_stream = rj54n1_s_stream, .s_stream = rj54n1_s_stream,
.s_fmt = rj54n1_s_fmt, .s_mbus_fmt = rj54n1_s_fmt,
.g_fmt = rj54n1_g_fmt, .g_mbus_fmt = rj54n1_g_fmt,
.try_fmt = rj54n1_try_fmt, .try_mbus_fmt = rj54n1_try_fmt,
.enum_mbus_fmt = rj54n1_enum_fmt,
.g_crop = rj54n1_g_crop, .g_crop = rj54n1_g_crop,
.cropcap = rj54n1_cropcap, .cropcap = rj54n1_cropcap,
}; };
...@@ -1153,7 +1253,7 @@ static int rj54n1_probe(struct i2c_client *client, ...@@ -1153,7 +1253,7 @@ static int rj54n1_probe(struct i2c_client *client,
rj54n1->rect.height = RJ54N1_MAX_HEIGHT; rj54n1->rect.height = RJ54N1_MAX_HEIGHT;
rj54n1->width = RJ54N1_MAX_WIDTH; rj54n1->width = RJ54N1_MAX_WIDTH;
rj54n1->height = RJ54N1_MAX_HEIGHT; rj54n1->height = RJ54N1_MAX_HEIGHT;
rj54n1->fourcc = V4L2_PIX_FMT_YUYV; rj54n1->fmt = &rj54n1_colour_fmts[0];
rj54n1->resize = 1024; rj54n1->resize = 1024;
ret = rj54n1_video_probe(icd, client); ret = rj54n1_video_probe(icd, client);
...@@ -1164,9 +1264,6 @@ static int rj54n1_probe(struct i2c_client *client, ...@@ -1164,9 +1264,6 @@ static int rj54n1_probe(struct i2c_client *client,
return ret; return ret;
} }
icd->formats = rj54n1_colour_formats;
icd->num_formats = ARRAY_SIZE(rj54n1_colour_formats);
return ret; return ret;
} }
......
...@@ -38,6 +38,8 @@ ...@@ -38,6 +38,8 @@
#include <media/soc_camera.h> #include <media/soc_camera.h>
#include <media/sh_mobile_ceu.h> #include <media/sh_mobile_ceu.h>
#include <media/videobuf-dma-contig.h> #include <media/videobuf-dma-contig.h>
#include <media/v4l2-mediabus.h>
#include <media/soc_mediabus.h>
/* register offsets for sh7722 / sh7723 */ /* register offsets for sh7722 / sh7723 */
...@@ -85,7 +87,7 @@ ...@@ -85,7 +87,7 @@
/* per video frame buffer */ /* per video frame buffer */
struct sh_mobile_ceu_buffer { struct sh_mobile_ceu_buffer {
struct videobuf_buffer vb; /* v4l buffer must be first */ struct videobuf_buffer vb; /* v4l buffer must be first */
const struct soc_camera_data_format *fmt; enum v4l2_mbus_pixelcode code;
}; };
struct sh_mobile_ceu_dev { struct sh_mobile_ceu_dev {
...@@ -114,8 +116,8 @@ struct sh_mobile_ceu_cam { ...@@ -114,8 +116,8 @@ struct sh_mobile_ceu_cam {
struct v4l2_rect ceu_rect; struct v4l2_rect ceu_rect;
unsigned int cam_width; unsigned int cam_width;
unsigned int cam_height; unsigned int cam_height;
const struct soc_camera_data_format *extra_fmt; const struct soc_mbus_pixelfmt *extra_fmt;
const struct soc_camera_data_format *camera_fmt; enum v4l2_mbus_pixelcode code;
}; };
static unsigned long make_bus_param(struct sh_mobile_ceu_dev *pcdev) static unsigned long make_bus_param(struct sh_mobile_ceu_dev *pcdev)
...@@ -197,10 +199,13 @@ static int sh_mobile_ceu_videobuf_setup(struct videobuf_queue *vq, ...@@ -197,10 +199,13 @@ static int sh_mobile_ceu_videobuf_setup(struct videobuf_queue *vq,
struct soc_camera_device *icd = vq->priv_data; struct soc_camera_device *icd = vq->priv_data;
struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
struct sh_mobile_ceu_dev *pcdev = ici->priv; struct sh_mobile_ceu_dev *pcdev = ici->priv;
int bytes_per_pixel = (icd->current_fmt->depth + 7) >> 3; int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
icd->current_fmt->host_fmt);
*size = PAGE_ALIGN(icd->user_width * icd->user_height * if (bytes_per_line < 0)
bytes_per_pixel); return bytes_per_line;
*size = PAGE_ALIGN(bytes_per_line * icd->user_height);
if (0 == *count) if (0 == *count)
*count = 2; *count = 2;
...@@ -284,7 +289,7 @@ static int sh_mobile_ceu_capture(struct sh_mobile_ceu_dev *pcdev) ...@@ -284,7 +289,7 @@ static int sh_mobile_ceu_capture(struct sh_mobile_ceu_dev *pcdev)
ceu_write(pcdev, CDBYR, phys_addr_bottom); ceu_write(pcdev, CDBYR, phys_addr_bottom);
} }
switch (icd->current_fmt->fourcc) { switch (icd->current_fmt->host_fmt->fourcc) {
case V4L2_PIX_FMT_NV12: case V4L2_PIX_FMT_NV12:
case V4L2_PIX_FMT_NV21: case V4L2_PIX_FMT_NV21:
case V4L2_PIX_FMT_NV16: case V4L2_PIX_FMT_NV16:
...@@ -311,8 +316,13 @@ static int sh_mobile_ceu_videobuf_prepare(struct videobuf_queue *vq, ...@@ -311,8 +316,13 @@ static int sh_mobile_ceu_videobuf_prepare(struct videobuf_queue *vq,
{ {
struct soc_camera_device *icd = vq->priv_data; struct soc_camera_device *icd = vq->priv_data;
struct sh_mobile_ceu_buffer *buf; struct sh_mobile_ceu_buffer *buf;
int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
icd->current_fmt->host_fmt);
int ret; int ret;
if (bytes_per_line < 0)
return bytes_per_line;
buf = container_of(vb, struct sh_mobile_ceu_buffer, vb); buf = container_of(vb, struct sh_mobile_ceu_buffer, vb);
dev_dbg(icd->dev.parent, "%s (vb=0x%p) 0x%08lx %zd\n", __func__, dev_dbg(icd->dev.parent, "%s (vb=0x%p) 0x%08lx %zd\n", __func__,
...@@ -331,18 +341,18 @@ static int sh_mobile_ceu_videobuf_prepare(struct videobuf_queue *vq, ...@@ -331,18 +341,18 @@ static int sh_mobile_ceu_videobuf_prepare(struct videobuf_queue *vq,
BUG_ON(NULL == icd->current_fmt); BUG_ON(NULL == icd->current_fmt);
if (buf->fmt != icd->current_fmt || if (buf->code != icd->current_fmt->code ||
vb->width != icd->user_width || vb->width != icd->user_width ||
vb->height != icd->user_height || vb->height != icd->user_height ||
vb->field != field) { vb->field != field) {
buf->fmt = icd->current_fmt; buf->code = icd->current_fmt->code;
vb->width = icd->user_width; vb->width = icd->user_width;
vb->height = icd->user_height; vb->height = icd->user_height;
vb->field = field; vb->field = field;
vb->state = VIDEOBUF_NEEDS_INIT; vb->state = VIDEOBUF_NEEDS_INIT;
} }
vb->size = vb->width * vb->height * ((buf->fmt->depth + 7) >> 3); vb->size = vb->height * bytes_per_line;
if (0 != vb->baddr && vb->bsize < vb->size) { if (0 != vb->baddr && vb->bsize < vb->size) {
ret = -EINVAL; ret = -EINVAL;
goto out; goto out;
...@@ -564,19 +574,30 @@ static void sh_mobile_ceu_set_rect(struct soc_camera_device *icd, ...@@ -564,19 +574,30 @@ static void sh_mobile_ceu_set_rect(struct soc_camera_device *icd,
in_width *= 2; in_width *= 2;
left_offset *= 2; left_offset *= 2;
} }
width = cdwdr_width = out_width; width = out_width;
cdwdr_width = out_width;
} else { } else {
unsigned int w_factor = (icd->current_fmt->depth + 7) >> 3; int bytes_per_line = soc_mbus_bytes_per_line(out_width,
icd->current_fmt->host_fmt);
unsigned int w_factor;
width = out_width * w_factor / 2; width = out_width;
if (!pcdev->is_16bit) switch (icd->current_fmt->host_fmt->packing) {
w_factor *= 2; case SOC_MBUS_PACKING_2X8_PADHI:
w_factor = 2;
break;
default:
w_factor = 1;
}
in_width = rect->width * w_factor / 2; in_width = rect->width * w_factor;
left_offset = left_offset * w_factor / 2; left_offset = left_offset * w_factor;
cdwdr_width = width * 2; if (bytes_per_line < 0)
cdwdr_width = out_width;
else
cdwdr_width = bytes_per_line;
} }
height = out_height; height = out_height;
...@@ -673,24 +694,24 @@ static int sh_mobile_ceu_set_bus_param(struct soc_camera_device *icd, ...@@ -673,24 +694,24 @@ static int sh_mobile_ceu_set_bus_param(struct soc_camera_device *icd,
value = 0x00000010; /* data fetch by default */ value = 0x00000010; /* data fetch by default */
yuv_lineskip = 0; yuv_lineskip = 0;
switch (icd->current_fmt->fourcc) { switch (icd->current_fmt->host_fmt->fourcc) {
case V4L2_PIX_FMT_NV12: case V4L2_PIX_FMT_NV12:
case V4L2_PIX_FMT_NV21: case V4L2_PIX_FMT_NV21:
yuv_lineskip = 1; /* skip for NV12/21, no skip for NV16/61 */ yuv_lineskip = 1; /* skip for NV12/21, no skip for NV16/61 */
/* fall-through */ /* fall-through */
case V4L2_PIX_FMT_NV16: case V4L2_PIX_FMT_NV16:
case V4L2_PIX_FMT_NV61: case V4L2_PIX_FMT_NV61:
switch (cam->camera_fmt->fourcc) { switch (cam->code) {
case V4L2_PIX_FMT_UYVY: case V4L2_MBUS_FMT_YUYV8_2X8_BE:
value = 0x00000000; /* Cb0, Y0, Cr0, Y1 */ value = 0x00000000; /* Cb0, Y0, Cr0, Y1 */
break; break;
case V4L2_PIX_FMT_VYUY: case V4L2_MBUS_FMT_YVYU8_2X8_BE:
value = 0x00000100; /* Cr0, Y0, Cb0, Y1 */ value = 0x00000100; /* Cr0, Y0, Cb0, Y1 */
break; break;
case V4L2_PIX_FMT_YUYV: case V4L2_MBUS_FMT_YUYV8_2X8_LE:
value = 0x00000200; /* Y0, Cb0, Y1, Cr0 */ value = 0x00000200; /* Y0, Cb0, Y1, Cr0 */
break; break;
case V4L2_PIX_FMT_YVYU: case V4L2_MBUS_FMT_YVYU8_2X8_LE:
value = 0x00000300; /* Y0, Cr0, Y1, Cb0 */ value = 0x00000300; /* Y0, Cr0, Y1, Cb0 */
break; break;
default: default:
...@@ -698,8 +719,8 @@ static int sh_mobile_ceu_set_bus_param(struct soc_camera_device *icd, ...@@ -698,8 +719,8 @@ static int sh_mobile_ceu_set_bus_param(struct soc_camera_device *icd,
} }
} }
if (icd->current_fmt->fourcc == V4L2_PIX_FMT_NV21 || if (icd->current_fmt->host_fmt->fourcc == V4L2_PIX_FMT_NV21 ||
icd->current_fmt->fourcc == V4L2_PIX_FMT_NV61) icd->current_fmt->host_fmt->fourcc == V4L2_PIX_FMT_NV61)
value ^= 0x00000100; /* swap U, V to change from NV1x->NVx1 */ value ^= 0x00000100; /* swap U, V to change from NV1x->NVx1 */
value |= common_flags & SOCAM_VSYNC_ACTIVE_LOW ? 1 << 1 : 0; value |= common_flags & SOCAM_VSYNC_ACTIVE_LOW ? 1 << 1 : 0;
...@@ -746,7 +767,8 @@ static int sh_mobile_ceu_set_bus_param(struct soc_camera_device *icd, ...@@ -746,7 +767,8 @@ static int sh_mobile_ceu_set_bus_param(struct soc_camera_device *icd,
return 0; return 0;
} }
static int sh_mobile_ceu_try_bus_param(struct soc_camera_device *icd) static int sh_mobile_ceu_try_bus_param(struct soc_camera_device *icd,
unsigned char buswidth)
{ {
struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
struct sh_mobile_ceu_dev *pcdev = ici->priv; struct sh_mobile_ceu_dev *pcdev = ici->priv;
...@@ -755,48 +777,75 @@ static int sh_mobile_ceu_try_bus_param(struct soc_camera_device *icd) ...@@ -755,48 +777,75 @@ static int sh_mobile_ceu_try_bus_param(struct soc_camera_device *icd)
camera_flags = icd->ops->query_bus_param(icd); camera_flags = icd->ops->query_bus_param(icd);
common_flags = soc_camera_bus_param_compatible(camera_flags, common_flags = soc_camera_bus_param_compatible(camera_flags,
make_bus_param(pcdev)); make_bus_param(pcdev));
if (!common_flags) if (!common_flags || buswidth > 16 ||
(buswidth > 8 && !(common_flags & SOCAM_DATAWIDTH_16)))
return -EINVAL; return -EINVAL;
return 0; return 0;
} }
static const struct soc_camera_data_format sh_mobile_ceu_formats[] = { static const struct soc_mbus_pixelfmt sh_mobile_ceu_formats[] = {
{
.name = "NV12",
.depth = 12,
.fourcc = V4L2_PIX_FMT_NV12,
.colorspace = V4L2_COLORSPACE_JPEG,
},
{
.name = "NV21",
.depth = 12,
.fourcc = V4L2_PIX_FMT_NV21,
.colorspace = V4L2_COLORSPACE_JPEG,
},
{
.name = "NV16",
.depth = 16,
.fourcc = V4L2_PIX_FMT_NV16,
.colorspace = V4L2_COLORSPACE_JPEG,
},
{ {
.name = "NV61", .fourcc = V4L2_PIX_FMT_NV12,
.depth = 16, .name = "NV12",
.fourcc = V4L2_PIX_FMT_NV61, .bits_per_sample = 12,
.colorspace = V4L2_COLORSPACE_JPEG, .packing = SOC_MBUS_PACKING_NONE,
.order = SOC_MBUS_ORDER_LE,
}, {
.fourcc = V4L2_PIX_FMT_NV21,
.name = "NV21",
.bits_per_sample = 12,
.packing = SOC_MBUS_PACKING_NONE,
.order = SOC_MBUS_ORDER_LE,
}, {
.fourcc = V4L2_PIX_FMT_NV16,
.name = "NV16",
.bits_per_sample = 16,
.packing = SOC_MBUS_PACKING_NONE,
.order = SOC_MBUS_ORDER_LE,
}, {
.fourcc = V4L2_PIX_FMT_NV61,
.name = "NV61",
.bits_per_sample = 16,
.packing = SOC_MBUS_PACKING_NONE,
.order = SOC_MBUS_ORDER_LE,
}, },
}; };
/* This will be corrected as we get more formats */
static bool sh_mobile_ceu_packing_supported(const struct soc_mbus_pixelfmt *fmt)
{
return fmt->packing == SOC_MBUS_PACKING_NONE ||
(fmt->bits_per_sample == 8 &&
fmt->packing == SOC_MBUS_PACKING_2X8_PADHI) ||
(fmt->bits_per_sample > 8 &&
fmt->packing == SOC_MBUS_PACKING_EXTEND16);
}
static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, int idx, static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, int idx,
struct soc_camera_format_xlate *xlate) struct soc_camera_format_xlate *xlate)
{ {
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct device *dev = icd->dev.parent; struct device *dev = icd->dev.parent;
int ret, k, n; int ret, k, n;
int formats = 0; int formats = 0;
struct sh_mobile_ceu_cam *cam; struct sh_mobile_ceu_cam *cam;
enum v4l2_mbus_pixelcode code;
const struct soc_mbus_pixelfmt *fmt;
ret = sh_mobile_ceu_try_bus_param(icd); ret = v4l2_subdev_call(sd, video, enum_mbus_fmt, idx, &code);
if (ret < 0)
/* No more formats */
return 0;
fmt = soc_mbus_get_fmtdesc(code);
if (!fmt) {
dev_err(icd->dev.parent,
"Invalid format code #%d: %d\n", idx, code);
return -EINVAL;
}
ret = sh_mobile_ceu_try_bus_param(icd, fmt->bits_per_sample);
if (ret < 0) if (ret < 0)
return 0; return 0;
...@@ -814,13 +863,13 @@ static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, int idx, ...@@ -814,13 +863,13 @@ static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, int idx,
if (!idx) if (!idx)
cam->extra_fmt = NULL; cam->extra_fmt = NULL;
switch (icd->formats[idx].fourcc) { switch (code) {
case V4L2_PIX_FMT_UYVY: case V4L2_MBUS_FMT_YUYV8_2X8_BE:
case V4L2_PIX_FMT_VYUY: case V4L2_MBUS_FMT_YVYU8_2X8_BE:
case V4L2_PIX_FMT_YUYV: case V4L2_MBUS_FMT_YUYV8_2X8_LE:
case V4L2_PIX_FMT_YVYU: case V4L2_MBUS_FMT_YVYU8_2X8_LE:
if (cam->extra_fmt) if (cam->extra_fmt)
goto add_single_format; break;
/* /*
* Our case is simple so far: for any of the above four camera * Our case is simple so far: for any of the above four camera
...@@ -831,32 +880,31 @@ static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, int idx, ...@@ -831,32 +880,31 @@ static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, int idx,
* the host_priv pointer and check whether the format you're * the host_priv pointer and check whether the format you're
* going to add now is already there. * going to add now is already there.
*/ */
cam->extra_fmt = (void *)sh_mobile_ceu_formats; cam->extra_fmt = sh_mobile_ceu_formats;
n = ARRAY_SIZE(sh_mobile_ceu_formats); n = ARRAY_SIZE(sh_mobile_ceu_formats);
formats += n; formats += n;
for (k = 0; xlate && k < n; k++) { for (k = 0; xlate && k < n; k++) {
xlate->host_fmt = &sh_mobile_ceu_formats[k]; xlate->host_fmt = &sh_mobile_ceu_formats[k];
xlate->cam_fmt = icd->formats + idx; xlate->code = code;
xlate->buswidth = icd->formats[idx].depth;
xlate++; xlate++;
dev_dbg(dev, "Providing format %s using %s\n", dev_dbg(dev, "Providing format %s using code %d\n",
sh_mobile_ceu_formats[k].name, sh_mobile_ceu_formats[k].name, code);
icd->formats[idx].name);
} }
break;
default: default:
add_single_format: if (!sh_mobile_ceu_packing_supported(fmt))
/* Generic pass-through */ return 0;
formats++; }
if (xlate) {
xlate->host_fmt = icd->formats + idx; /* Generic pass-through */
xlate->cam_fmt = icd->formats + idx; formats++;
xlate->buswidth = icd->formats[idx].depth; if (xlate) {
xlate++; xlate->host_fmt = fmt;
dev_dbg(dev, xlate->code = code;
"Providing format %s in pass-through mode\n", xlate++;
icd->formats[idx].name); dev_dbg(dev, "Providing format %s in pass-through mode\n",
} xlate->host_fmt->name);
} }
return formats; return formats;
...@@ -1036,17 +1084,15 @@ static int client_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *crop, ...@@ -1036,17 +1084,15 @@ static int client_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *crop,
static int get_camera_scales(struct v4l2_subdev *sd, struct v4l2_rect *rect, static int get_camera_scales(struct v4l2_subdev *sd, struct v4l2_rect *rect,
unsigned int *scale_h, unsigned int *scale_v) unsigned int *scale_h, unsigned int *scale_v)
{ {
struct v4l2_format f; struct v4l2_mbus_framefmt mf;
int ret; int ret;
f.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf);
ret = v4l2_subdev_call(sd, video, g_fmt, &f);
if (ret < 0) if (ret < 0)
return ret; return ret;
*scale_h = calc_generic_scale(rect->width, f.fmt.pix.width); *scale_h = calc_generic_scale(rect->width, mf.width);
*scale_v = calc_generic_scale(rect->height, f.fmt.pix.height); *scale_v = calc_generic_scale(rect->height, mf.height);
return 0; return 0;
} }
...@@ -1061,32 +1107,29 @@ static int get_camera_subwin(struct soc_camera_device *icd, ...@@ -1061,32 +1107,29 @@ static int get_camera_subwin(struct soc_camera_device *icd,
if (!ceu_rect->width) { if (!ceu_rect->width) {
struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct device *dev = icd->dev.parent; struct device *dev = icd->dev.parent;
struct v4l2_format f; struct v4l2_mbus_framefmt mf;
struct v4l2_pix_format *pix = &f.fmt.pix;
int ret; int ret;
/* First time */ /* First time */
f.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf);
ret = v4l2_subdev_call(sd, video, g_fmt, &f);
if (ret < 0) if (ret < 0)
return ret; return ret;
dev_geo(dev, "camera fmt %ux%u\n", pix->width, pix->height); dev_geo(dev, "camera fmt %ux%u\n", mf.width, mf.height);
if (pix->width > 2560) { if (mf.width > 2560) {
ceu_rect->width = 2560; ceu_rect->width = 2560;
ceu_rect->left = (pix->width - 2560) / 2; ceu_rect->left = (mf.width - 2560) / 2;
} else { } else {
ceu_rect->width = pix->width; ceu_rect->width = mf.width;
ceu_rect->left = 0; ceu_rect->left = 0;
} }
if (pix->height > 1920) { if (mf.height > 1920) {
ceu_rect->height = 1920; ceu_rect->height = 1920;
ceu_rect->top = (pix->height - 1920) / 2; ceu_rect->top = (mf.height - 1920) / 2;
} else { } else {
ceu_rect->height = pix->height; ceu_rect->height = mf.height;
ceu_rect->top = 0; ceu_rect->top = 0;
} }
...@@ -1103,13 +1146,12 @@ static int get_camera_subwin(struct soc_camera_device *icd, ...@@ -1103,13 +1146,12 @@ static int get_camera_subwin(struct soc_camera_device *icd,
return 0; return 0;
} }
static int client_s_fmt(struct soc_camera_device *icd, struct v4l2_format *f, static int client_s_fmt(struct soc_camera_device *icd,
bool ceu_can_scale) struct v4l2_mbus_framefmt *mf, bool ceu_can_scale)
{ {
struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct device *dev = icd->dev.parent; struct device *dev = icd->dev.parent;
struct v4l2_pix_format *pix = &f->fmt.pix; unsigned int width = mf->width, height = mf->height, tmp_w, tmp_h;
unsigned int width = pix->width, height = pix->height, tmp_w, tmp_h;
unsigned int max_width, max_height; unsigned int max_width, max_height;
struct v4l2_cropcap cap; struct v4l2_cropcap cap;
int ret; int ret;
...@@ -1123,29 +1165,29 @@ static int client_s_fmt(struct soc_camera_device *icd, struct v4l2_format *f, ...@@ -1123,29 +1165,29 @@ static int client_s_fmt(struct soc_camera_device *icd, struct v4l2_format *f,
max_width = min(cap.bounds.width, 2560); max_width = min(cap.bounds.width, 2560);
max_height = min(cap.bounds.height, 1920); max_height = min(cap.bounds.height, 1920);
ret = v4l2_subdev_call(sd, video, s_fmt, f); ret = v4l2_subdev_call(sd, video, s_mbus_fmt, mf);
if (ret < 0) if (ret < 0)
return ret; return ret;
dev_geo(dev, "camera scaled to %ux%u\n", pix->width, pix->height); dev_geo(dev, "camera scaled to %ux%u\n", mf->width, mf->height);
if ((width == pix->width && height == pix->height) || !ceu_can_scale) if ((width == mf->width && height == mf->height) || !ceu_can_scale)
return 0; return 0;
/* Camera set a format, but geometry is not precise, try to improve */ /* Camera set a format, but geometry is not precise, try to improve */
tmp_w = pix->width; tmp_w = mf->width;
tmp_h = pix->height; tmp_h = mf->height;
/* width <= max_width && height <= max_height - guaranteed by try_fmt */ /* width <= max_width && height <= max_height - guaranteed by try_fmt */
while ((width > tmp_w || height > tmp_h) && while ((width > tmp_w || height > tmp_h) &&
tmp_w < max_width && tmp_h < max_height) { tmp_w < max_width && tmp_h < max_height) {
tmp_w = min(2 * tmp_w, max_width); tmp_w = min(2 * tmp_w, max_width);
tmp_h = min(2 * tmp_h, max_height); tmp_h = min(2 * tmp_h, max_height);
pix->width = tmp_w; mf->width = tmp_w;
pix->height = tmp_h; mf->height = tmp_h;
ret = v4l2_subdev_call(sd, video, s_fmt, f); ret = v4l2_subdev_call(sd, video, s_mbus_fmt, mf);
dev_geo(dev, "Camera scaled to %ux%u\n", dev_geo(dev, "Camera scaled to %ux%u\n",
pix->width, pix->height); mf->width, mf->height);
if (ret < 0) { if (ret < 0) {
/* This shouldn't happen */ /* This shouldn't happen */
dev_err(dev, "Client failed to set format: %d\n", ret); dev_err(dev, "Client failed to set format: %d\n", ret);
...@@ -1163,27 +1205,26 @@ static int client_s_fmt(struct soc_camera_device *icd, struct v4l2_format *f, ...@@ -1163,27 +1205,26 @@ static int client_s_fmt(struct soc_camera_device *icd, struct v4l2_format *f,
*/ */
static int client_scale(struct soc_camera_device *icd, struct v4l2_rect *rect, static int client_scale(struct soc_camera_device *icd, struct v4l2_rect *rect,
struct v4l2_rect *sub_rect, struct v4l2_rect *ceu_rect, struct v4l2_rect *sub_rect, struct v4l2_rect *ceu_rect,
struct v4l2_format *f, bool ceu_can_scale) struct v4l2_mbus_framefmt *mf, bool ceu_can_scale)
{ {
struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct sh_mobile_ceu_cam *cam = icd->host_priv; struct sh_mobile_ceu_cam *cam = icd->host_priv;
struct device *dev = icd->dev.parent; struct device *dev = icd->dev.parent;
struct v4l2_format f_tmp = *f; struct v4l2_mbus_framefmt mf_tmp = *mf;
struct v4l2_pix_format *pix_tmp = &f_tmp.fmt.pix;
unsigned int scale_h, scale_v; unsigned int scale_h, scale_v;
int ret; int ret;
/* 5. Apply iterative camera S_FMT for camera user window. */ /* 5. Apply iterative camera S_FMT for camera user window. */
ret = client_s_fmt(icd, &f_tmp, ceu_can_scale); ret = client_s_fmt(icd, &mf_tmp, ceu_can_scale);
if (ret < 0) if (ret < 0)
return ret; return ret;
dev_geo(dev, "5: camera scaled to %ux%u\n", dev_geo(dev, "5: camera scaled to %ux%u\n",
pix_tmp->width, pix_tmp->height); mf_tmp.width, mf_tmp.height);
/* 6. Retrieve camera output window (g_fmt) */ /* 6. Retrieve camera output window (g_fmt) */
/* unneeded - it is already in "f_tmp" */ /* unneeded - it is already in "mf_tmp" */
/* 7. Calculate new camera scales. */ /* 7. Calculate new camera scales. */
ret = get_camera_scales(sd, rect, &scale_h, &scale_v); ret = get_camera_scales(sd, rect, &scale_h, &scale_v);
...@@ -1192,10 +1233,11 @@ static int client_scale(struct soc_camera_device *icd, struct v4l2_rect *rect, ...@@ -1192,10 +1233,11 @@ static int client_scale(struct soc_camera_device *icd, struct v4l2_rect *rect,
dev_geo(dev, "7: camera scales %u:%u\n", scale_h, scale_v); dev_geo(dev, "7: camera scales %u:%u\n", scale_h, scale_v);
cam->cam_width = pix_tmp->width; cam->cam_width = mf_tmp.width;
cam->cam_height = pix_tmp->height; cam->cam_height = mf_tmp.height;
f->fmt.pix.width = pix_tmp->width; mf->width = mf_tmp.width;
f->fmt.pix.height = pix_tmp->height; mf->height = mf_tmp.height;
mf->colorspace = mf_tmp.colorspace;
/* /*
* 8. Calculate new CEU crop - apply camera scales to previously * 8. Calculate new CEU crop - apply camera scales to previously
...@@ -1259,8 +1301,7 @@ static int sh_mobile_ceu_set_crop(struct soc_camera_device *icd, ...@@ -1259,8 +1301,7 @@ static int sh_mobile_ceu_set_crop(struct soc_camera_device *icd,
struct v4l2_rect *cam_rect = &cam_crop.c, *ceu_rect = &cam->ceu_rect; struct v4l2_rect *cam_rect = &cam_crop.c, *ceu_rect = &cam->ceu_rect;
struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct device *dev = icd->dev.parent; struct device *dev = icd->dev.parent;
struct v4l2_format f; struct v4l2_mbus_framefmt mf;
struct v4l2_pix_format *pix = &f.fmt.pix;
unsigned int scale_comb_h, scale_comb_v, scale_ceu_h, scale_ceu_v, unsigned int scale_comb_h, scale_comb_v, scale_ceu_h, scale_ceu_v,
out_width, out_height; out_width, out_height;
u32 capsr, cflcr; u32 capsr, cflcr;
...@@ -1309,25 +1350,24 @@ static int sh_mobile_ceu_set_crop(struct soc_camera_device *icd, ...@@ -1309,25 +1350,24 @@ static int sh_mobile_ceu_set_crop(struct soc_camera_device *icd,
* 5. Using actual input window and calculated combined scales calculate * 5. Using actual input window and calculated combined scales calculate
* camera target output window. * camera target output window.
*/ */
pix->width = scale_down(cam_rect->width, scale_comb_h); mf.width = scale_down(cam_rect->width, scale_comb_h);
pix->height = scale_down(cam_rect->height, scale_comb_v); mf.height = scale_down(cam_rect->height, scale_comb_v);
dev_geo(dev, "5: camera target %ux%u\n", pix->width, pix->height); dev_geo(dev, "5: camera target %ux%u\n", mf.width, mf.height);
/* 6. - 9. */ /* 6. - 9. */
pix->pixelformat = cam->camera_fmt->fourcc; mf.code = cam->code;
pix->colorspace = cam->camera_fmt->colorspace; mf.field = pcdev->is_interlaced ? V4L2_FIELD_INTERLACED :
V4L2_FIELD_NONE;
capsr = capture_save_reset(pcdev); capsr = capture_save_reset(pcdev);
dev_dbg(dev, "CAPSR 0x%x, CFLCR 0x%x\n", capsr, pcdev->cflcr); dev_dbg(dev, "CAPSR 0x%x, CFLCR 0x%x\n", capsr, pcdev->cflcr);
/* Make relative to camera rectangle */ /* Make relative to camera rectangle */
rect->left -= cam_rect->left; rect->left -= cam_rect->left;
rect->top -= cam_rect->top; rect->top -= cam_rect->top;
f.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; ret = client_scale(icd, cam_rect, rect, ceu_rect, &mf,
ret = client_scale(icd, cam_rect, rect, ceu_rect, &f,
pcdev->image_mode && !pcdev->is_interlaced); pcdev->image_mode && !pcdev->is_interlaced);
dev_geo(dev, "6-9: %d\n", ret); dev_geo(dev, "6-9: %d\n", ret);
...@@ -1375,8 +1415,7 @@ static int sh_mobile_ceu_set_fmt(struct soc_camera_device *icd, ...@@ -1375,8 +1415,7 @@ static int sh_mobile_ceu_set_fmt(struct soc_camera_device *icd,
struct sh_mobile_ceu_dev *pcdev = ici->priv; struct sh_mobile_ceu_dev *pcdev = ici->priv;
struct sh_mobile_ceu_cam *cam = icd->host_priv; struct sh_mobile_ceu_cam *cam = icd->host_priv;
struct v4l2_pix_format *pix = &f->fmt.pix; struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_format cam_f = *f; struct v4l2_mbus_framefmt mf;
struct v4l2_pix_format *cam_pix = &cam_f.fmt.pix;
struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct device *dev = icd->dev.parent; struct device *dev = icd->dev.parent;
__u32 pixfmt = pix->pixelformat; __u32 pixfmt = pix->pixelformat;
...@@ -1445,9 +1484,11 @@ static int sh_mobile_ceu_set_fmt(struct soc_camera_device *icd, ...@@ -1445,9 +1484,11 @@ static int sh_mobile_ceu_set_fmt(struct soc_camera_device *icd,
* 4. Calculate camera output window by applying combined scales to real * 4. Calculate camera output window by applying combined scales to real
* input window. * input window.
*/ */
cam_pix->width = scale_down(cam_rect->width, scale_h); mf.width = scale_down(cam_rect->width, scale_h);
cam_pix->height = scale_down(cam_rect->height, scale_v); mf.height = scale_down(cam_rect->height, scale_v);
cam_pix->pixelformat = xlate->cam_fmt->fourcc; mf.field = pix->field;
mf.colorspace = pix->colorspace;
mf.code = xlate->code;
switch (pixfmt) { switch (pixfmt) {
case V4L2_PIX_FMT_NV12: case V4L2_PIX_FMT_NV12:
...@@ -1460,11 +1501,10 @@ static int sh_mobile_ceu_set_fmt(struct soc_camera_device *icd, ...@@ -1460,11 +1501,10 @@ static int sh_mobile_ceu_set_fmt(struct soc_camera_device *icd,
image_mode = false; image_mode = false;
} }
dev_geo(dev, "4: camera output %ux%u\n", dev_geo(dev, "4: camera output %ux%u\n", mf.width, mf.height);
cam_pix->width, cam_pix->height);
/* 5. - 9. */ /* 5. - 9. */
ret = client_scale(icd, cam_rect, &cam_subrect, &ceu_rect, &cam_f, ret = client_scale(icd, cam_rect, &cam_subrect, &ceu_rect, &mf,
image_mode && !is_interlaced); image_mode && !is_interlaced);
dev_geo(dev, "5-9: client scale %d\n", ret); dev_geo(dev, "5-9: client scale %d\n", ret);
...@@ -1472,37 +1512,48 @@ static int sh_mobile_ceu_set_fmt(struct soc_camera_device *icd, ...@@ -1472,37 +1512,48 @@ static int sh_mobile_ceu_set_fmt(struct soc_camera_device *icd,
/* Done with the camera. Now see if we can improve the result */ /* Done with the camera. Now see if we can improve the result */
dev_dbg(dev, "Camera %d fmt %ux%u, requested %ux%u\n", dev_dbg(dev, "Camera %d fmt %ux%u, requested %ux%u\n",
ret, cam_pix->width, cam_pix->height, pix->width, pix->height); ret, mf.width, mf.height, pix->width, pix->height);
if (ret < 0) if (ret < 0)
return ret; return ret;
if (mf.code != xlate->code)
return -EINVAL;
/* 10. Use CEU scaling to scale to the requested user window. */ /* 10. Use CEU scaling to scale to the requested user window. */
/* We cannot scale up */ /* We cannot scale up */
if (pix->width > cam_pix->width) if (pix->width > mf.width)
pix->width = cam_pix->width; pix->width = mf.width;
if (pix->width > ceu_rect.width) if (pix->width > ceu_rect.width)
pix->width = ceu_rect.width; pix->width = ceu_rect.width;
if (pix->height > cam_pix->height) if (pix->height > mf.height)
pix->height = cam_pix->height; pix->height = mf.height;
if (pix->height > ceu_rect.height) if (pix->height > ceu_rect.height)
pix->height = ceu_rect.height; pix->height = ceu_rect.height;
/* Let's rock: scale pix->{width x height} down to width x height */ pix->colorspace = mf.colorspace;
scale_h = calc_scale(ceu_rect.width, &pix->width);
scale_v = calc_scale(ceu_rect.height, &pix->height); if (image_mode) {
/* Scale pix->{width x height} down to width x height */
scale_h = calc_scale(ceu_rect.width, &pix->width);
scale_v = calc_scale(ceu_rect.height, &pix->height);
pcdev->cflcr = scale_h | (scale_v << 16);
} else {
pix->width = ceu_rect.width;
pix->height = ceu_rect.height;
scale_h = scale_v = 0;
pcdev->cflcr = 0;
}
dev_geo(dev, "10: W: %u : 0x%x = %u, H: %u : 0x%x = %u\n", dev_geo(dev, "10: W: %u : 0x%x = %u, H: %u : 0x%x = %u\n",
ceu_rect.width, scale_h, pix->width, ceu_rect.width, scale_h, pix->width,
ceu_rect.height, scale_v, pix->height); ceu_rect.height, scale_v, pix->height);
pcdev->cflcr = scale_h | (scale_v << 16); cam->code = xlate->code;
cam->ceu_rect = ceu_rect;
icd->buswidth = xlate->buswidth; icd->current_fmt = xlate;
icd->current_fmt = xlate->host_fmt;
cam->camera_fmt = xlate->cam_fmt;
cam->ceu_rect = ceu_rect;
pcdev->is_interlaced = is_interlaced; pcdev->is_interlaced = is_interlaced;
pcdev->image_mode = image_mode; pcdev->image_mode = image_mode;
...@@ -1516,6 +1567,7 @@ static int sh_mobile_ceu_try_fmt(struct soc_camera_device *icd, ...@@ -1516,6 +1567,7 @@ static int sh_mobile_ceu_try_fmt(struct soc_camera_device *icd,
const struct soc_camera_format_xlate *xlate; const struct soc_camera_format_xlate *xlate;
struct v4l2_pix_format *pix = &f->fmt.pix; struct v4l2_pix_format *pix = &f->fmt.pix;
struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct v4l2_mbus_framefmt mf;
__u32 pixfmt = pix->pixelformat; __u32 pixfmt = pix->pixelformat;
int width, height; int width, height;
int ret; int ret;
...@@ -1534,18 +1586,27 @@ static int sh_mobile_ceu_try_fmt(struct soc_camera_device *icd, ...@@ -1534,18 +1586,27 @@ static int sh_mobile_ceu_try_fmt(struct soc_camera_device *icd,
width = pix->width; width = pix->width;
height = pix->height; height = pix->height;
pix->bytesperline = pix->width * pix->bytesperline = soc_mbus_bytes_per_line(width, xlate->host_fmt);
DIV_ROUND_UP(xlate->host_fmt->depth, 8); if (pix->bytesperline < 0)
pix->sizeimage = pix->height * pix->bytesperline; return pix->bytesperline;
pix->sizeimage = height * pix->bytesperline;
pix->pixelformat = xlate->cam_fmt->fourcc;
/* limit to sensor capabilities */ /* limit to sensor capabilities */
ret = v4l2_subdev_call(sd, video, try_fmt, f); mf.width = pix->width;
pix->pixelformat = pixfmt; mf.height = pix->height;
mf.field = pix->field;
mf.code = xlate->code;
mf.colorspace = pix->colorspace;
ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf);
if (ret < 0) if (ret < 0)
return ret; return ret;
pix->width = mf.width;
pix->height = mf.height;
pix->field = mf.field;
pix->colorspace = mf.colorspace;
switch (pixfmt) { switch (pixfmt) {
case V4L2_PIX_FMT_NV12: case V4L2_PIX_FMT_NV12:
case V4L2_PIX_FMT_NV21: case V4L2_PIX_FMT_NV21:
...@@ -1554,21 +1615,25 @@ static int sh_mobile_ceu_try_fmt(struct soc_camera_device *icd, ...@@ -1554,21 +1615,25 @@ static int sh_mobile_ceu_try_fmt(struct soc_camera_device *icd,
/* FIXME: check against rect_max after converting soc-camera */ /* FIXME: check against rect_max after converting soc-camera */
/* We can scale precisely, need a bigger image from camera */ /* We can scale precisely, need a bigger image from camera */
if (pix->width < width || pix->height < height) { if (pix->width < width || pix->height < height) {
int tmp_w = pix->width, tmp_h = pix->height; /*
pix->width = 2560; * We presume, the sensor behaves sanely, i.e., if
pix->height = 1920; * requested a bigger rectangle, it will not return a
ret = v4l2_subdev_call(sd, video, try_fmt, f); * smaller one.
*/
mf.width = 2560;
mf.height = 1920;
ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf);
if (ret < 0) { if (ret < 0) {
/* Shouldn't actually happen... */ /* Shouldn't actually happen... */
dev_err(icd->dev.parent, dev_err(icd->dev.parent,
"FIXME: try_fmt() returned %d\n", ret); "FIXME: client try_fmt() = %d\n", ret);
pix->width = tmp_w; return ret;
pix->height = tmp_h;
} }
} }
if (pix->width > width) /* We will scale exactly */
if (mf.width > width)
pix->width = width; pix->width = width;
if (pix->height > height) if (mf.height > height)
pix->height = height; pix->height = height;
} }
...@@ -1663,7 +1728,7 @@ static int sh_mobile_ceu_set_ctrl(struct soc_camera_device *icd, ...@@ -1663,7 +1728,7 @@ static int sh_mobile_ceu_set_ctrl(struct soc_camera_device *icd,
switch (ctrl->id) { switch (ctrl->id) {
case V4L2_CID_SHARPNESS: case V4L2_CID_SHARPNESS:
switch (icd->current_fmt->fourcc) { switch (icd->current_fmt->host_fmt->fourcc) {
case V4L2_PIX_FMT_NV12: case V4L2_PIX_FMT_NV12:
case V4L2_PIX_FMT_NV21: case V4L2_PIX_FMT_NV21:
case V4L2_PIX_FMT_NV16: case V4L2_PIX_FMT_NV16:
......
...@@ -31,6 +31,7 @@ ...@@ -31,6 +31,7 @@
#include <media/v4l2-ioctl.h> #include <media/v4l2-ioctl.h>
#include <media/v4l2-dev.h> #include <media/v4l2-dev.h>
#include <media/videobuf-core.h> #include <media/videobuf-core.h>
#include <media/soc_mediabus.h>
/* Default to VGA resolution */ /* Default to VGA resolution */
#define DEFAULT_WIDTH 640 #define DEFAULT_WIDTH 640
...@@ -40,18 +41,6 @@ static LIST_HEAD(hosts); ...@@ -40,18 +41,6 @@ static LIST_HEAD(hosts);
static LIST_HEAD(devices); static LIST_HEAD(devices);
static DEFINE_MUTEX(list_lock); /* Protects the list of hosts */ static DEFINE_MUTEX(list_lock); /* Protects the list of hosts */
const struct soc_camera_data_format *soc_camera_format_by_fourcc(
struct soc_camera_device *icd, unsigned int fourcc)
{
unsigned int i;
for (i = 0; i < icd->num_formats; i++)
if (icd->formats[i].fourcc == fourcc)
return icd->formats + i;
return NULL;
}
EXPORT_SYMBOL(soc_camera_format_by_fourcc);
const struct soc_camera_format_xlate *soc_camera_xlate_by_fourcc( const struct soc_camera_format_xlate *soc_camera_xlate_by_fourcc(
struct soc_camera_device *icd, unsigned int fourcc) struct soc_camera_device *icd, unsigned int fourcc)
{ {
...@@ -207,21 +196,26 @@ static int soc_camera_dqbuf(struct file *file, void *priv, ...@@ -207,21 +196,26 @@ static int soc_camera_dqbuf(struct file *file, void *priv,
/* Always entered with .video_lock held */ /* Always entered with .video_lock held */
static int soc_camera_init_user_formats(struct soc_camera_device *icd) static int soc_camera_init_user_formats(struct soc_camera_device *icd)
{ {
struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
int i, fmts = 0, ret; int i, fmts = 0, raw_fmts = 0, ret;
enum v4l2_mbus_pixelcode code;
while (!v4l2_subdev_call(sd, video, enum_mbus_fmt, raw_fmts, &code))
raw_fmts++;
if (!ici->ops->get_formats) if (!ici->ops->get_formats)
/* /*
* Fallback mode - the host will have to serve all * Fallback mode - the host will have to serve all
* sensor-provided formats one-to-one to the user * sensor-provided formats one-to-one to the user
*/ */
fmts = icd->num_formats; fmts = raw_fmts;
else else
/* /*
* First pass - only count formats this host-sensor * First pass - only count formats this host-sensor
* configuration can provide * configuration can provide
*/ */
for (i = 0; i < icd->num_formats; i++) { for (i = 0; i < raw_fmts; i++) {
ret = ici->ops->get_formats(icd, i, NULL); ret = ici->ops->get_formats(icd, i, NULL);
if (ret < 0) if (ret < 0)
return ret; return ret;
...@@ -242,11 +236,12 @@ static int soc_camera_init_user_formats(struct soc_camera_device *icd) ...@@ -242,11 +236,12 @@ static int soc_camera_init_user_formats(struct soc_camera_device *icd)
/* Second pass - actually fill data formats */ /* Second pass - actually fill data formats */
fmts = 0; fmts = 0;
for (i = 0; i < icd->num_formats; i++) for (i = 0; i < raw_fmts; i++)
if (!ici->ops->get_formats) { if (!ici->ops->get_formats) {
icd->user_formats[i].host_fmt = icd->formats + i; v4l2_subdev_call(sd, video, enum_mbus_fmt, i, &code);
icd->user_formats[i].cam_fmt = icd->formats + i; icd->user_formats[i].host_fmt =
icd->user_formats[i].buswidth = icd->formats[i].depth; soc_mbus_get_fmtdesc(code);
icd->user_formats[i].code = code;
} else { } else {
ret = ici->ops->get_formats(icd, i, ret = ici->ops->get_formats(icd, i,
&icd->user_formats[fmts]); &icd->user_formats[fmts]);
...@@ -255,7 +250,7 @@ static int soc_camera_init_user_formats(struct soc_camera_device *icd) ...@@ -255,7 +250,7 @@ static int soc_camera_init_user_formats(struct soc_camera_device *icd)
fmts += ret; fmts += ret;
} }
icd->current_fmt = icd->user_formats[0].host_fmt; icd->current_fmt = &icd->user_formats[0];
return 0; return 0;
...@@ -281,7 +276,7 @@ static void soc_camera_free_user_formats(struct soc_camera_device *icd) ...@@ -281,7 +276,7 @@ static void soc_camera_free_user_formats(struct soc_camera_device *icd)
#define pixfmtstr(x) (x) & 0xff, ((x) >> 8) & 0xff, ((x) >> 16) & 0xff, \ #define pixfmtstr(x) (x) & 0xff, ((x) >> 8) & 0xff, ((x) >> 16) & 0xff, \
((x) >> 24) & 0xff ((x) >> 24) & 0xff
/* Called with .vb_lock held */ /* Called with .vb_lock held, or from the first open(2), see comment there */
static int soc_camera_set_fmt(struct soc_camera_file *icf, static int soc_camera_set_fmt(struct soc_camera_file *icf,
struct v4l2_format *f) struct v4l2_format *f)
{ {
...@@ -302,7 +297,7 @@ static int soc_camera_set_fmt(struct soc_camera_file *icf, ...@@ -302,7 +297,7 @@ static int soc_camera_set_fmt(struct soc_camera_file *icf,
if (ret < 0) { if (ret < 0) {
return ret; return ret;
} else if (!icd->current_fmt || } else if (!icd->current_fmt ||
icd->current_fmt->fourcc != pix->pixelformat) { icd->current_fmt->host_fmt->fourcc != pix->pixelformat) {
dev_err(&icd->dev, dev_err(&icd->dev,
"Host driver hasn't set up current format correctly!\n"); "Host driver hasn't set up current format correctly!\n");
return -EINVAL; return -EINVAL;
...@@ -310,6 +305,7 @@ static int soc_camera_set_fmt(struct soc_camera_file *icf, ...@@ -310,6 +305,7 @@ static int soc_camera_set_fmt(struct soc_camera_file *icf,
icd->user_width = pix->width; icd->user_width = pix->width;
icd->user_height = pix->height; icd->user_height = pix->height;
icd->colorspace = pix->colorspace;
icf->vb_vidq.field = icf->vb_vidq.field =
icd->field = pix->field; icd->field = pix->field;
...@@ -369,8 +365,9 @@ static int soc_camera_open(struct file *file) ...@@ -369,8 +365,9 @@ static int soc_camera_open(struct file *file)
.width = icd->user_width, .width = icd->user_width,
.height = icd->user_height, .height = icd->user_height,
.field = icd->field, .field = icd->field,
.pixelformat = icd->current_fmt->fourcc, .colorspace = icd->colorspace,
.colorspace = icd->current_fmt->colorspace, .pixelformat =
icd->current_fmt->host_fmt->fourcc,
}, },
}; };
...@@ -390,7 +387,12 @@ static int soc_camera_open(struct file *file) ...@@ -390,7 +387,12 @@ static int soc_camera_open(struct file *file)
goto eiciadd; goto eiciadd;
} }
/* Try to configure with default parameters */ /*
* Try to configure with default parameters. Notice: this is the
* very first open, so, we cannot race against other calls,
* apart from someone else calling open() simultaneously, but
* .video_lock is protecting us against it.
*/
ret = soc_camera_set_fmt(icf, &f); ret = soc_camera_set_fmt(icf, &f);
if (ret < 0) if (ret < 0)
goto esfmt; goto esfmt;
...@@ -534,7 +536,7 @@ static int soc_camera_enum_fmt_vid_cap(struct file *file, void *priv, ...@@ -534,7 +536,7 @@ static int soc_camera_enum_fmt_vid_cap(struct file *file, void *priv,
{ {
struct soc_camera_file *icf = file->private_data; struct soc_camera_file *icf = file->private_data;
struct soc_camera_device *icd = icf->icd; struct soc_camera_device *icd = icf->icd;
const struct soc_camera_data_format *format; const struct soc_mbus_pixelfmt *format;
WARN_ON(priv != file->private_data); WARN_ON(priv != file->private_data);
...@@ -543,7 +545,8 @@ static int soc_camera_enum_fmt_vid_cap(struct file *file, void *priv, ...@@ -543,7 +545,8 @@ static int soc_camera_enum_fmt_vid_cap(struct file *file, void *priv,
format = icd->user_formats[f->index].host_fmt; format = icd->user_formats[f->index].host_fmt;
strlcpy(f->description, format->name, sizeof(f->description)); if (format->name)
strlcpy(f->description, format->name, sizeof(f->description));
f->pixelformat = format->fourcc; f->pixelformat = format->fourcc;
return 0; return 0;
} }
...@@ -560,12 +563,15 @@ static int soc_camera_g_fmt_vid_cap(struct file *file, void *priv, ...@@ -560,12 +563,15 @@ static int soc_camera_g_fmt_vid_cap(struct file *file, void *priv,
pix->width = icd->user_width; pix->width = icd->user_width;
pix->height = icd->user_height; pix->height = icd->user_height;
pix->field = icf->vb_vidq.field; pix->field = icf->vb_vidq.field;
pix->pixelformat = icd->current_fmt->fourcc; pix->pixelformat = icd->current_fmt->host_fmt->fourcc;
pix->bytesperline = pix->width * pix->bytesperline = soc_mbus_bytes_per_line(pix->width,
DIV_ROUND_UP(icd->current_fmt->depth, 8); icd->current_fmt->host_fmt);
pix->colorspace = icd->colorspace;
if (pix->bytesperline < 0)
return pix->bytesperline;
pix->sizeimage = pix->height * pix->bytesperline; pix->sizeimage = pix->height * pix->bytesperline;
dev_dbg(&icd->dev, "current_fmt->fourcc: 0x%08x\n", dev_dbg(&icd->dev, "current_fmt->fourcc: 0x%08x\n",
icd->current_fmt->fourcc); icd->current_fmt->host_fmt->fourcc);
return 0; return 0;
} }
...@@ -894,7 +900,7 @@ static int soc_camera_probe(struct device *dev) ...@@ -894,7 +900,7 @@ static int soc_camera_probe(struct device *dev)
struct soc_camera_link *icl = to_soc_camera_link(icd); struct soc_camera_link *icl = to_soc_camera_link(icd);
struct device *control = NULL; struct device *control = NULL;
struct v4l2_subdev *sd; struct v4l2_subdev *sd;
struct v4l2_format f = {.type = V4L2_BUF_TYPE_VIDEO_CAPTURE}; struct v4l2_mbus_framefmt mf;
int ret; int ret;
dev_info(dev, "Probing %s\n", dev_name(dev)); dev_info(dev, "Probing %s\n", dev_name(dev));
...@@ -965,9 +971,11 @@ static int soc_camera_probe(struct device *dev) ...@@ -965,9 +971,11 @@ static int soc_camera_probe(struct device *dev)
/* Try to improve our guess of a reasonable window format */ /* Try to improve our guess of a reasonable window format */
sd = soc_camera_to_subdev(icd); sd = soc_camera_to_subdev(icd);
if (!v4l2_subdev_call(sd, video, g_fmt, &f)) { if (!v4l2_subdev_call(sd, video, g_mbus_fmt, &mf)) {
icd->user_width = f.fmt.pix.width; icd->user_width = mf.width;
icd->user_height = f.fmt.pix.height; icd->user_height = mf.height;
icd->colorspace = mf.colorspace;
icd->field = mf.field;
} }
/* Do we have to sysfs_remove_link() before device_unregister()? */ /* Do we have to sysfs_remove_link() before device_unregister()? */
......
...@@ -22,7 +22,6 @@ ...@@ -22,7 +22,6 @@
struct soc_camera_platform_priv { struct soc_camera_platform_priv {
struct v4l2_subdev subdev; struct v4l2_subdev subdev;
struct soc_camera_data_format format;
}; };
static struct soc_camera_platform_priv *get_priv(struct platform_device *pdev) static struct soc_camera_platform_priv *get_priv(struct platform_device *pdev)
...@@ -58,36 +57,36 @@ soc_camera_platform_query_bus_param(struct soc_camera_device *icd) ...@@ -58,36 +57,36 @@ soc_camera_platform_query_bus_param(struct soc_camera_device *icd)
} }
static int soc_camera_platform_try_fmt(struct v4l2_subdev *sd, static int soc_camera_platform_try_fmt(struct v4l2_subdev *sd,
struct v4l2_format *f) struct v4l2_mbus_framefmt *mf)
{ {
struct soc_camera_platform_info *p = v4l2_get_subdevdata(sd); struct soc_camera_platform_info *p = v4l2_get_subdevdata(sd);
struct v4l2_pix_format *pix = &f->fmt.pix;
pix->width = p->format.width; mf->width = p->format.width;
pix->height = p->format.height; mf->height = p->format.height;
mf->code = p->format.code;
mf->colorspace = p->format.colorspace;
return 0; return 0;
} }
static void soc_camera_platform_video_probe(struct soc_camera_device *icd, static struct v4l2_subdev_core_ops platform_subdev_core_ops;
struct platform_device *pdev)
static int soc_camera_platform_enum_fmt(struct v4l2_subdev *sd, int index,
enum v4l2_mbus_pixelcode *code)
{ {
struct soc_camera_platform_priv *priv = get_priv(pdev); struct soc_camera_platform_info *p = v4l2_get_subdevdata(sd);
struct soc_camera_platform_info *p = pdev->dev.platform_data;
priv->format.name = p->format_name; if (index)
priv->format.depth = p->format_depth; return -EINVAL;
priv->format.fourcc = p->format.pixelformat;
priv->format.colorspace = p->format.colorspace;
icd->formats = &priv->format; *code = p->format.code;
icd->num_formats = 1; return 0;
} }
static struct v4l2_subdev_core_ops platform_subdev_core_ops;
static struct v4l2_subdev_video_ops platform_subdev_video_ops = { static struct v4l2_subdev_video_ops platform_subdev_video_ops = {
.s_stream = soc_camera_platform_s_stream, .s_stream = soc_camera_platform_s_stream,
.try_fmt = soc_camera_platform_try_fmt, .try_mbus_fmt = soc_camera_platform_try_fmt,
.enum_mbus_fmt = soc_camera_platform_enum_fmt,
}; };
static struct v4l2_subdev_ops platform_subdev_ops = { static struct v4l2_subdev_ops platform_subdev_ops = {
...@@ -128,12 +127,10 @@ static int soc_camera_platform_probe(struct platform_device *pdev) ...@@ -128,12 +127,10 @@ static int soc_camera_platform_probe(struct platform_device *pdev)
/* Set the control device reference */ /* Set the control device reference */
dev_set_drvdata(&icd->dev, &pdev->dev); dev_set_drvdata(&icd->dev, &pdev->dev);
icd->ops = &soc_camera_platform_ops; icd->ops = &soc_camera_platform_ops;
ici = to_soc_camera_host(icd->dev.parent); ici = to_soc_camera_host(icd->dev.parent);
soc_camera_platform_video_probe(icd, pdev);
v4l2_subdev_init(&priv->subdev, &platform_subdev_ops); v4l2_subdev_init(&priv->subdev, &platform_subdev_ops);
v4l2_set_subdevdata(&priv->subdev, p); v4l2_set_subdevdata(&priv->subdev, p);
strncpy(priv->subdev.name, dev_name(&pdev->dev), V4L2_SUBDEV_NAME_SIZE); strncpy(priv->subdev.name, dev_name(&pdev->dev), V4L2_SUBDEV_NAME_SIZE);
......
...@@ -251,15 +251,6 @@ static const struct regval_list tw9910_default_regs[] = ...@@ -251,15 +251,6 @@ static const struct regval_list tw9910_default_regs[] =
ENDMARKER, ENDMARKER,
}; };
static const struct soc_camera_data_format tw9910_color_fmt[] = {
{
.name = "VYUY",
.fourcc = V4L2_PIX_FMT_VYUY,
.depth = 16,
.colorspace = V4L2_COLORSPACE_SMPTE170M,
}
};
static const struct tw9910_scale_ctrl tw9910_ntsc_scales[] = { static const struct tw9910_scale_ctrl tw9910_ntsc_scales[] = {
{ {
.name = "NTSC SQ", .name = "NTSC SQ",
...@@ -814,11 +805,11 @@ static int tw9910_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) ...@@ -814,11 +805,11 @@ static int tw9910_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a)
return 0; return 0;
} }
static int tw9910_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int tw9910_g_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct i2c_client *client = sd->priv; struct i2c_client *client = sd->priv;
struct tw9910_priv *priv = to_tw9910(client); struct tw9910_priv *priv = to_tw9910(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
if (!priv->scale) { if (!priv->scale) {
int ret; int ret;
...@@ -835,74 +826,76 @@ static int tw9910_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) ...@@ -835,74 +826,76 @@ static int tw9910_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
return ret; return ret;
} }
f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; mf->width = priv->scale->width;
mf->height = priv->scale->height;
pix->width = priv->scale->width; mf->code = V4L2_MBUS_FMT_YVYU8_2X8_BE;
pix->height = priv->scale->height; mf->colorspace = V4L2_COLORSPACE_JPEG;
pix->pixelformat = V4L2_PIX_FMT_VYUY; mf->field = V4L2_FIELD_INTERLACED;
pix->colorspace = V4L2_COLORSPACE_SMPTE170M;
pix->field = V4L2_FIELD_INTERLACED;
return 0; return 0;
} }
static int tw9910_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int tw9910_s_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct i2c_client *client = sd->priv; struct i2c_client *client = sd->priv;
struct tw9910_priv *priv = to_tw9910(client); struct tw9910_priv *priv = to_tw9910(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
/* See tw9910_s_crop() - no proper cropping support */ /* See tw9910_s_crop() - no proper cropping support */
struct v4l2_crop a = { struct v4l2_crop a = {
.c = { .c = {
.left = 0, .left = 0,
.top = 0, .top = 0,
.width = pix->width, .width = mf->width,
.height = pix->height, .height = mf->height,
}, },
}; };
int i, ret; int ret;
WARN_ON(mf->field != V4L2_FIELD_ANY &&
mf->field != V4L2_FIELD_INTERLACED);
/* /*
* check color format * check color format
*/ */
for (i = 0; i < ARRAY_SIZE(tw9910_color_fmt); i++) if (mf->code != V4L2_MBUS_FMT_YVYU8_2X8_BE)
if (pix->pixelformat == tw9910_color_fmt[i].fourcc)
break;
if (i == ARRAY_SIZE(tw9910_color_fmt))
return -EINVAL; return -EINVAL;
mf->colorspace = V4L2_COLORSPACE_JPEG;
ret = tw9910_s_crop(sd, &a); ret = tw9910_s_crop(sd, &a);
if (!ret) { if (!ret) {
pix->width = priv->scale->width; mf->width = priv->scale->width;
pix->height = priv->scale->height; mf->height = priv->scale->height;
} }
return ret; return ret;
} }
static int tw9910_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) static int tw9910_try_fmt(struct v4l2_subdev *sd,
struct v4l2_mbus_framefmt *mf)
{ {
struct i2c_client *client = sd->priv; struct i2c_client *client = sd->priv;
struct soc_camera_device *icd = client->dev.platform_data; struct soc_camera_device *icd = client->dev.platform_data;
struct v4l2_pix_format *pix = &f->fmt.pix;
const struct tw9910_scale_ctrl *scale; const struct tw9910_scale_ctrl *scale;
if (V4L2_FIELD_ANY == pix->field) { if (V4L2_FIELD_ANY == mf->field) {
pix->field = V4L2_FIELD_INTERLACED; mf->field = V4L2_FIELD_INTERLACED;
} else if (V4L2_FIELD_INTERLACED != pix->field) { } else if (V4L2_FIELD_INTERLACED != mf->field) {
dev_err(&client->dev, "Field type invalid.\n"); dev_err(&client->dev, "Field type %d invalid.\n", mf->field);
return -EINVAL; return -EINVAL;
} }
mf->code = V4L2_MBUS_FMT_YVYU8_2X8_BE;
mf->colorspace = V4L2_COLORSPACE_JPEG;
/* /*
* select suitable norm * select suitable norm
*/ */
scale = tw9910_select_norm(icd, pix->width, pix->height); scale = tw9910_select_norm(icd, mf->width, mf->height);
if (!scale) if (!scale)
return -EINVAL; return -EINVAL;
pix->width = scale->width; mf->width = scale->width;
pix->height = scale->height; mf->height = scale->height;
return 0; return 0;
} }
...@@ -930,9 +923,6 @@ static int tw9910_video_probe(struct soc_camera_device *icd, ...@@ -930,9 +923,6 @@ static int tw9910_video_probe(struct soc_camera_device *icd,
return -ENODEV; return -ENODEV;
} }
icd->formats = tw9910_color_fmt;
icd->num_formats = ARRAY_SIZE(tw9910_color_fmt);
/* /*
* check and show Product ID * check and show Product ID
* So far only revisions 0 and 1 have been seen * So far only revisions 0 and 1 have been seen
...@@ -973,14 +963,25 @@ static struct v4l2_subdev_core_ops tw9910_subdev_core_ops = { ...@@ -973,14 +963,25 @@ static struct v4l2_subdev_core_ops tw9910_subdev_core_ops = {
#endif #endif
}; };
static int tw9910_enum_fmt(struct v4l2_subdev *sd, int index,
enum v4l2_mbus_pixelcode *code)
{
if (index)
return -EINVAL;
*code = V4L2_MBUS_FMT_YVYU8_2X8_BE;
return 0;
}
static struct v4l2_subdev_video_ops tw9910_subdev_video_ops = { static struct v4l2_subdev_video_ops tw9910_subdev_video_ops = {
.s_stream = tw9910_s_stream, .s_stream = tw9910_s_stream,
.g_fmt = tw9910_g_fmt, .g_mbus_fmt = tw9910_g_fmt,
.s_fmt = tw9910_s_fmt, .s_mbus_fmt = tw9910_s_fmt,
.try_fmt = tw9910_try_fmt, .try_mbus_fmt = tw9910_try_fmt,
.cropcap = tw9910_cropcap, .cropcap = tw9910_cropcap,
.g_crop = tw9910_g_crop, .g_crop = tw9910_g_crop,
.s_crop = tw9910_s_crop, .s_crop = tw9910_s_crop,
.enum_mbus_fmt = tw9910_enum_fmt,
}; };
static struct v4l2_subdev_ops tw9910_subdev_ops = { static struct v4l2_subdev_ops tw9910_subdev_ops = {
......
...@@ -24,15 +24,13 @@ struct soc_camera_device { ...@@ -24,15 +24,13 @@ struct soc_camera_device {
struct device *pdev; /* Platform device */ struct device *pdev; /* Platform device */
s32 user_width; s32 user_width;
s32 user_height; s32 user_height;
enum v4l2_colorspace colorspace;
unsigned char iface; /* Host number */ unsigned char iface; /* Host number */
unsigned char devnum; /* Device number per host */ unsigned char devnum; /* Device number per host */
unsigned char buswidth; /* See comment in .c */
struct soc_camera_sense *sense; /* See comment in struct definition */ struct soc_camera_sense *sense; /* See comment in struct definition */
struct soc_camera_ops *ops; struct soc_camera_ops *ops;
struct video_device *vdev; struct video_device *vdev;
const struct soc_camera_data_format *current_fmt; const struct soc_camera_format_xlate *current_fmt;
const struct soc_camera_data_format *formats;
int num_formats;
struct soc_camera_format_xlate *user_formats; struct soc_camera_format_xlate *user_formats;
int num_user_formats; int num_user_formats;
enum v4l2_field field; /* Preserve field over close() */ enum v4l2_field field; /* Preserve field over close() */
...@@ -161,23 +159,13 @@ static inline struct v4l2_subdev *soc_camera_to_subdev( ...@@ -161,23 +159,13 @@ static inline struct v4l2_subdev *soc_camera_to_subdev(
int soc_camera_host_register(struct soc_camera_host *ici); int soc_camera_host_register(struct soc_camera_host *ici);
void soc_camera_host_unregister(struct soc_camera_host *ici); void soc_camera_host_unregister(struct soc_camera_host *ici);
const struct soc_camera_data_format *soc_camera_format_by_fourcc(
struct soc_camera_device *icd, unsigned int fourcc);
const struct soc_camera_format_xlate *soc_camera_xlate_by_fourcc( const struct soc_camera_format_xlate *soc_camera_xlate_by_fourcc(
struct soc_camera_device *icd, unsigned int fourcc); struct soc_camera_device *icd, unsigned int fourcc);
struct soc_camera_data_format {
const char *name;
unsigned int depth;
__u32 fourcc;
enum v4l2_colorspace colorspace;
};
/** /**
* struct soc_camera_format_xlate - match between host and sensor formats * struct soc_camera_format_xlate - match between host and sensor formats
* @cam_fmt: sensor format provided by the sensor * @code: code of a sensor provided format
* @host_fmt: host format after host translation from cam_fmt * @host_fmt: host format after host translation from code
* @buswidth: bus width for this format
* *
* Host and sensor translation structure. Used in table of host and sensor * Host and sensor translation structure. Used in table of host and sensor
* formats matchings in soc_camera_device. A host can override the generic list * formats matchings in soc_camera_device. A host can override the generic list
...@@ -185,9 +173,8 @@ struct soc_camera_data_format { ...@@ -185,9 +173,8 @@ struct soc_camera_data_format {
* format setup. * format setup.
*/ */
struct soc_camera_format_xlate { struct soc_camera_format_xlate {
const struct soc_camera_data_format *cam_fmt; enum v4l2_mbus_pixelcode code;
const struct soc_camera_data_format *host_fmt; const struct soc_mbus_pixelfmt *host_fmt;
unsigned char buswidth;
}; };
struct soc_camera_ops { struct soc_camera_ops {
......
...@@ -19,7 +19,7 @@ struct device; ...@@ -19,7 +19,7 @@ struct device;
struct soc_camera_platform_info { struct soc_camera_platform_info {
const char *format_name; const char *format_name;
unsigned long format_depth; unsigned long format_depth;
struct v4l2_pix_format format; struct v4l2_mbus_framefmt format;
unsigned long bus_param; unsigned long bus_param;
struct device *dev; struct device *dev;
int (*set_capture)(struct soc_camera_platform_info *info, int enable); int (*set_capture)(struct soc_camera_platform_info *info, int enable);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment