diff options
author | Guennadi Liakhovetski <g.liakhovetski@gmx.de> | 2009-12-11 09:46:49 -0500 |
---|---|---|
committer | Mauro Carvalho Chehab <mchehab@redhat.com> | 2009-12-16 06:27:29 -0500 |
commit | 760697beca338599a65484389c7abbe54aedb664 (patch) | |
tree | 515735429d2240629a6f048ab1a7fefaf5299e46 | |
parent | 9a74251d8bee7a25fee89a0be3ccea73e01c1a05 (diff) |
V4L/DVB (13659): soc-camera: convert to the new mediabus API
Convert soc-camera core and all soc-camera drivers to the new mediabus
API. This also takes soc-camera client drivers one step closer to also be
usable with generic v4l2-subdev host drivers.
Signed-off-by: Guennadi Liakhovetski <g.liakhovetski@gmx.de>
Acked-by: Hans Verkuil <hverkuil@xs4all.nl>
Signed-off-by: Mauro Carvalho Chehab <mchehab@redhat.com>
-rw-r--r-- | arch/sh/boards/mach-ap325rxa/setup.c | 3 | ||||
-rw-r--r-- | drivers/media/video/mt9m001.c | 141 | ||||
-rw-r--r-- | drivers/media/video/mt9m111.c | 188 | ||||
-rw-r--r-- | drivers/media/video/mt9t031.c | 66 | ||||
-rw-r--r-- | drivers/media/video/mt9v022.c | 150 | ||||
-rw-r--r-- | drivers/media/video/mx1_camera.c | 90 | ||||
-rw-r--r-- | drivers/media/video/mx3_camera.c | 278 | ||||
-rw-r--r-- | drivers/media/video/ov772x.c | 229 | ||||
-rw-r--r-- | drivers/media/video/ov9640.c | 107 | ||||
-rw-r--r-- | drivers/media/video/pxa_camera.c | 272 | ||||
-rw-r--r-- | drivers/media/video/rj54n1cb0c.c | 201 | ||||
-rw-r--r-- | drivers/media/video/sh_mobile_ceu_camera.c | 403 | ||||
-rw-r--r-- | drivers/media/video/soc_camera.c | 78 | ||||
-rw-r--r-- | drivers/media/video/soc_camera_platform.c | 39 | ||||
-rw-r--r-- | drivers/media/video/tw9910.c | 91 | ||||
-rw-r--r-- | include/media/soc_camera.h | 25 | ||||
-rw-r--r-- | include/media/soc_camera_platform.h | 2 |
17 files changed, 1383 insertions, 980 deletions
diff --git a/arch/sh/boards/mach-ap325rxa/setup.c b/arch/sh/boards/mach-ap325rxa/setup.c index 4c8602884573..7a9f69663f1a 100644 --- a/arch/sh/boards/mach-ap325rxa/setup.c +++ b/arch/sh/boards/mach-ap325rxa/setup.c | |||
@@ -316,8 +316,9 @@ static struct soc_camera_platform_info camera_info = { | |||
316 | .format_name = "UYVY", | 316 | .format_name = "UYVY", |
317 | .format_depth = 16, | 317 | .format_depth = 16, |
318 | .format = { | 318 | .format = { |
319 | .pixelformat = V4L2_PIX_FMT_UYVY, | 319 | .code = V4L2_MBUS_FMT_YUYV8_2X8_BE, |
320 | .colorspace = V4L2_COLORSPACE_SMPTE170M, | 320 | .colorspace = V4L2_COLORSPACE_SMPTE170M, |
321 | .field = V4L2_FIELD_NONE, | ||
321 | .width = 640, | 322 | .width = 640, |
322 | .height = 480, | 323 | .height = 480, |
323 | }, | 324 | }, |
diff --git a/drivers/media/video/mt9m001.c b/drivers/media/video/mt9m001.c index cc9066000c2d..b62c0bd3f8ea 100644 --- a/drivers/media/video/mt9m001.c +++ b/drivers/media/video/mt9m001.c | |||
@@ -48,41 +48,46 @@ | |||
48 | #define MT9M001_COLUMN_SKIP 20 | 48 | #define MT9M001_COLUMN_SKIP 20 |
49 | #define MT9M001_ROW_SKIP 12 | 49 | #define MT9M001_ROW_SKIP 12 |
50 | 50 | ||
51 | static const struct soc_camera_data_format mt9m001_colour_formats[] = { | 51 | /* MT9M001 has only one fixed colorspace per pixelcode */ |
52 | struct mt9m001_datafmt { | ||
53 | enum v4l2_mbus_pixelcode code; | ||
54 | enum v4l2_colorspace colorspace; | ||
55 | }; | ||
56 | |||
57 | /* Find a data format by a pixel code in an array */ | ||
58 | static const struct mt9m001_datafmt *mt9m001_find_datafmt( | ||
59 | enum v4l2_mbus_pixelcode code, const struct mt9m001_datafmt *fmt, | ||
60 | int n) | ||
61 | { | ||
62 | int i; | ||
63 | for (i = 0; i < n; i++) | ||
64 | if (fmt[i].code == code) | ||
65 | return fmt + i; | ||
66 | |||
67 | return NULL; | ||
68 | } | ||
69 | |||
70 | static const struct mt9m001_datafmt mt9m001_colour_fmts[] = { | ||
52 | /* | 71 | /* |
53 | * Order important: first natively supported, | 72 | * Order important: first natively supported, |
54 | * second supported with a GPIO extender | 73 | * second supported with a GPIO extender |
55 | */ | 74 | */ |
56 | { | 75 | {V4L2_MBUS_FMT_SBGGR10_1X10, V4L2_COLORSPACE_SRGB}, |
57 | .name = "Bayer (sRGB) 10 bit", | 76 | {V4L2_MBUS_FMT_SBGGR8_1X8, V4L2_COLORSPACE_SRGB}, |
58 | .depth = 10, | ||
59 | .fourcc = V4L2_PIX_FMT_SBGGR16, | ||
60 | .colorspace = V4L2_COLORSPACE_SRGB, | ||
61 | }, { | ||
62 | .name = "Bayer (sRGB) 8 bit", | ||
63 | .depth = 8, | ||
64 | .fourcc = V4L2_PIX_FMT_SBGGR8, | ||
65 | .colorspace = V4L2_COLORSPACE_SRGB, | ||
66 | } | ||
67 | }; | 77 | }; |
68 | 78 | ||
69 | static const struct soc_camera_data_format mt9m001_monochrome_formats[] = { | 79 | static const struct mt9m001_datafmt mt9m001_monochrome_fmts[] = { |
70 | /* Order important - see above */ | 80 | /* Order important - see above */ |
71 | { | 81 | {V4L2_MBUS_FMT_Y10_1X10, V4L2_COLORSPACE_JPEG}, |
72 | .name = "Monochrome 10 bit", | 82 | {V4L2_MBUS_FMT_GREY8_1X8, V4L2_COLORSPACE_JPEG}, |
73 | .depth = 10, | ||
74 | .fourcc = V4L2_PIX_FMT_Y16, | ||
75 | }, { | ||
76 | .name = "Monochrome 8 bit", | ||
77 | .depth = 8, | ||
78 | .fourcc = V4L2_PIX_FMT_GREY, | ||
79 | }, | ||
80 | }; | 83 | }; |
81 | 84 | ||
82 | struct mt9m001 { | 85 | struct mt9m001 { |
83 | struct v4l2_subdev subdev; | 86 | struct v4l2_subdev subdev; |
84 | struct v4l2_rect rect; /* Sensor window */ | 87 | struct v4l2_rect rect; /* Sensor window */ |
85 | __u32 fourcc; | 88 | const struct mt9m001_datafmt *fmt; |
89 | const struct mt9m001_datafmt *fmts; | ||
90 | int num_fmts; | ||
86 | int model; /* V4L2_IDENT_MT9M001* codes from v4l2-chip-ident.h */ | 91 | int model; /* V4L2_IDENT_MT9M001* codes from v4l2-chip-ident.h */ |
87 | unsigned int gain; | 92 | unsigned int gain; |
88 | unsigned int exposure; | 93 | unsigned int exposure; |
@@ -209,8 +214,7 @@ static int mt9m001_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) | |||
209 | const u16 hblank = 9, vblank = 25; | 214 | const u16 hblank = 9, vblank = 25; |
210 | unsigned int total_h; | 215 | unsigned int total_h; |
211 | 216 | ||
212 | if (mt9m001->fourcc == V4L2_PIX_FMT_SBGGR8 || | 217 | if (mt9m001->fmts == mt9m001_colour_fmts) |
213 | mt9m001->fourcc == V4L2_PIX_FMT_SBGGR16) | ||
214 | /* | 218 | /* |
215 | * Bayer format - even number of rows for simplicity, | 219 | * Bayer format - even number of rows for simplicity, |
216 | * but let the user play with the top row. | 220 | * but let the user play with the top row. |
@@ -290,32 +294,32 @@ static int mt9m001_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) | |||
290 | return 0; | 294 | return 0; |
291 | } | 295 | } |
292 | 296 | ||
293 | static int mt9m001_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 297 | static int mt9m001_g_fmt(struct v4l2_subdev *sd, |
298 | struct v4l2_mbus_framefmt *mf) | ||
294 | { | 299 | { |
295 | struct i2c_client *client = sd->priv; | 300 | struct i2c_client *client = sd->priv; |
296 | struct mt9m001 *mt9m001 = to_mt9m001(client); | 301 | struct mt9m001 *mt9m001 = to_mt9m001(client); |
297 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
298 | 302 | ||
299 | pix->width = mt9m001->rect.width; | 303 | mf->width = mt9m001->rect.width; |
300 | pix->height = mt9m001->rect.height; | 304 | mf->height = mt9m001->rect.height; |
301 | pix->pixelformat = mt9m001->fourcc; | 305 | mf->code = mt9m001->fmt->code; |
302 | pix->field = V4L2_FIELD_NONE; | 306 | mf->colorspace = mt9m001->fmt->colorspace; |
303 | pix->colorspace = V4L2_COLORSPACE_SRGB; | 307 | mf->field = V4L2_FIELD_NONE; |
304 | 308 | ||
305 | return 0; | 309 | return 0; |
306 | } | 310 | } |
307 | 311 | ||
308 | static int mt9m001_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 312 | static int mt9m001_s_fmt(struct v4l2_subdev *sd, |
313 | struct v4l2_mbus_framefmt *mf) | ||
309 | { | 314 | { |
310 | struct i2c_client *client = sd->priv; | 315 | struct i2c_client *client = sd->priv; |
311 | struct mt9m001 *mt9m001 = to_mt9m001(client); | 316 | struct mt9m001 *mt9m001 = to_mt9m001(client); |
312 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
313 | struct v4l2_crop a = { | 317 | struct v4l2_crop a = { |
314 | .c = { | 318 | .c = { |
315 | .left = mt9m001->rect.left, | 319 | .left = mt9m001->rect.left, |
316 | .top = mt9m001->rect.top, | 320 | .top = mt9m001->rect.top, |
317 | .width = pix->width, | 321 | .width = mf->width, |
318 | .height = pix->height, | 322 | .height = mf->height, |
319 | }, | 323 | }, |
320 | }; | 324 | }; |
321 | int ret; | 325 | int ret; |
@@ -323,28 +327,39 @@ static int mt9m001_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | |||
323 | /* No support for scaling so far, just crop. TODO: use skipping */ | 327 | /* No support for scaling so far, just crop. TODO: use skipping */ |
324 | ret = mt9m001_s_crop(sd, &a); | 328 | ret = mt9m001_s_crop(sd, &a); |
325 | if (!ret) { | 329 | if (!ret) { |
326 | pix->width = mt9m001->rect.width; | 330 | mf->width = mt9m001->rect.width; |
327 | pix->height = mt9m001->rect.height; | 331 | mf->height = mt9m001->rect.height; |
328 | mt9m001->fourcc = pix->pixelformat; | 332 | mt9m001->fmt = mt9m001_find_datafmt(mf->code, |
333 | mt9m001->fmts, mt9m001->num_fmts); | ||
334 | mf->colorspace = mt9m001->fmt->colorspace; | ||
329 | } | 335 | } |
330 | 336 | ||
331 | return ret; | 337 | return ret; |
332 | } | 338 | } |
333 | 339 | ||
334 | static int mt9m001_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 340 | static int mt9m001_try_fmt(struct v4l2_subdev *sd, |
341 | struct v4l2_mbus_framefmt *mf) | ||
335 | { | 342 | { |
336 | struct i2c_client *client = sd->priv; | 343 | struct i2c_client *client = sd->priv; |
337 | struct mt9m001 *mt9m001 = to_mt9m001(client); | 344 | struct mt9m001 *mt9m001 = to_mt9m001(client); |
338 | struct v4l2_pix_format *pix = &f->fmt.pix; | 345 | const struct mt9m001_datafmt *fmt; |
339 | 346 | ||
340 | v4l_bound_align_image(&pix->width, MT9M001_MIN_WIDTH, | 347 | v4l_bound_align_image(&mf->width, MT9M001_MIN_WIDTH, |
341 | MT9M001_MAX_WIDTH, 1, | 348 | MT9M001_MAX_WIDTH, 1, |
342 | &pix->height, MT9M001_MIN_HEIGHT + mt9m001->y_skip_top, | 349 | &mf->height, MT9M001_MIN_HEIGHT + mt9m001->y_skip_top, |
343 | MT9M001_MAX_HEIGHT + mt9m001->y_skip_top, 0, 0); | 350 | MT9M001_MAX_HEIGHT + mt9m001->y_skip_top, 0, 0); |
344 | 351 | ||
345 | if (pix->pixelformat == V4L2_PIX_FMT_SBGGR8 || | 352 | if (mt9m001->fmts == mt9m001_colour_fmts) |
346 | pix->pixelformat == V4L2_PIX_FMT_SBGGR16) | 353 | mf->height = ALIGN(mf->height - 1, 2); |
347 | pix->height = ALIGN(pix->height - 1, 2); | 354 | |
355 | fmt = mt9m001_find_datafmt(mf->code, mt9m001->fmts, | ||
356 | mt9m001->num_fmts); | ||
357 | if (!fmt) { | ||
358 | fmt = mt9m001->fmt; | ||
359 | mf->code = fmt->code; | ||
360 | } | ||
361 | |||
362 | mf->colorspace = fmt->colorspace; | ||
348 | 363 | ||
349 | return 0; | 364 | return 0; |
350 | } | 365 | } |
@@ -608,11 +623,11 @@ static int mt9m001_video_probe(struct soc_camera_device *icd, | |||
608 | case 0x8411: | 623 | case 0x8411: |
609 | case 0x8421: | 624 | case 0x8421: |
610 | mt9m001->model = V4L2_IDENT_MT9M001C12ST; | 625 | mt9m001->model = V4L2_IDENT_MT9M001C12ST; |
611 | icd->formats = mt9m001_colour_formats; | 626 | mt9m001->fmts = mt9m001_colour_fmts; |
612 | break; | 627 | break; |
613 | case 0x8431: | 628 | case 0x8431: |
614 | mt9m001->model = V4L2_IDENT_MT9M001C12STM; | 629 | mt9m001->model = V4L2_IDENT_MT9M001C12STM; |
615 | icd->formats = mt9m001_monochrome_formats; | 630 | mt9m001->fmts = mt9m001_monochrome_fmts; |
616 | break; | 631 | break; |
617 | default: | 632 | default: |
618 | dev_err(&client->dev, | 633 | dev_err(&client->dev, |
@@ -620,7 +635,7 @@ static int mt9m001_video_probe(struct soc_camera_device *icd, | |||
620 | return -ENODEV; | 635 | return -ENODEV; |
621 | } | 636 | } |
622 | 637 | ||
623 | icd->num_formats = 0; | 638 | mt9m001->num_fmts = 0; |
624 | 639 | ||
625 | /* | 640 | /* |
626 | * This is a 10bit sensor, so by default we only allow 10bit. | 641 | * This is a 10bit sensor, so by default we only allow 10bit. |
@@ -633,14 +648,14 @@ static int mt9m001_video_probe(struct soc_camera_device *icd, | |||
633 | flags = SOCAM_DATAWIDTH_10; | 648 | flags = SOCAM_DATAWIDTH_10; |
634 | 649 | ||
635 | if (flags & SOCAM_DATAWIDTH_10) | 650 | if (flags & SOCAM_DATAWIDTH_10) |
636 | icd->num_formats++; | 651 | mt9m001->num_fmts++; |
637 | else | 652 | else |
638 | icd->formats++; | 653 | mt9m001->fmts++; |
639 | 654 | ||
640 | if (flags & SOCAM_DATAWIDTH_8) | 655 | if (flags & SOCAM_DATAWIDTH_8) |
641 | icd->num_formats++; | 656 | mt9m001->num_fmts++; |
642 | 657 | ||
643 | mt9m001->fourcc = icd->formats->fourcc; | 658 | mt9m001->fmt = &mt9m001->fmts[0]; |
644 | 659 | ||
645 | dev_info(&client->dev, "Detected a MT9M001 chip ID %x (%s)\n", data, | 660 | dev_info(&client->dev, "Detected a MT9M001 chip ID %x (%s)\n", data, |
646 | data == 0x8431 ? "C12STM" : "C12ST"); | 661 | data == 0x8431 ? "C12STM" : "C12ST"); |
@@ -686,14 +701,28 @@ static struct v4l2_subdev_core_ops mt9m001_subdev_core_ops = { | |||
686 | #endif | 701 | #endif |
687 | }; | 702 | }; |
688 | 703 | ||
704 | static int mt9m001_enum_fmt(struct v4l2_subdev *sd, int index, | ||
705 | enum v4l2_mbus_pixelcode *code) | ||
706 | { | ||
707 | struct i2c_client *client = sd->priv; | ||
708 | struct mt9m001 *mt9m001 = to_mt9m001(client); | ||
709 | |||
710 | if ((unsigned int)index >= mt9m001->num_fmts) | ||
711 | return -EINVAL; | ||
712 | |||
713 | *code = mt9m001->fmts[index].code; | ||
714 | return 0; | ||
715 | } | ||
716 | |||
689 | static struct v4l2_subdev_video_ops mt9m001_subdev_video_ops = { | 717 | static struct v4l2_subdev_video_ops mt9m001_subdev_video_ops = { |
690 | .s_stream = mt9m001_s_stream, | 718 | .s_stream = mt9m001_s_stream, |
691 | .s_fmt = mt9m001_s_fmt, | 719 | .s_mbus_fmt = mt9m001_s_fmt, |
692 | .g_fmt = mt9m001_g_fmt, | 720 | .g_mbus_fmt = mt9m001_g_fmt, |
693 | .try_fmt = mt9m001_try_fmt, | 721 | .try_mbus_fmt = mt9m001_try_fmt, |
694 | .s_crop = mt9m001_s_crop, | 722 | .s_crop = mt9m001_s_crop, |
695 | .g_crop = mt9m001_g_crop, | 723 | .g_crop = mt9m001_g_crop, |
696 | .cropcap = mt9m001_cropcap, | 724 | .cropcap = mt9m001_cropcap, |
725 | .enum_mbus_fmt = mt9m001_enum_fmt, | ||
697 | }; | 726 | }; |
698 | 727 | ||
699 | static struct v4l2_subdev_sensor_ops mt9m001_subdev_sensor_ops = { | 728 | static struct v4l2_subdev_sensor_ops mt9m001_subdev_sensor_ops = { |
diff --git a/drivers/media/video/mt9m111.c b/drivers/media/video/mt9m111.c index 30db625455e4..d35f536f9fc3 100644 --- a/drivers/media/video/mt9m111.c +++ b/drivers/media/video/mt9m111.c | |||
@@ -123,23 +123,34 @@ | |||
123 | #define MT9M111_MAX_HEIGHT 1024 | 123 | #define MT9M111_MAX_HEIGHT 1024 |
124 | #define MT9M111_MAX_WIDTH 1280 | 124 | #define MT9M111_MAX_WIDTH 1280 |
125 | 125 | ||
126 | #define COL_FMT(_name, _depth, _fourcc, _colorspace) \ | 126 | /* MT9M111 has only one fixed colorspace per pixelcode */ |
127 | { .name = _name, .depth = _depth, .fourcc = _fourcc, \ | 127 | struct mt9m111_datafmt { |
128 | .colorspace = _colorspace } | 128 | enum v4l2_mbus_pixelcode code; |
129 | #define RGB_FMT(_name, _depth, _fourcc) \ | 129 | enum v4l2_colorspace colorspace; |
130 | COL_FMT(_name, _depth, _fourcc, V4L2_COLORSPACE_SRGB) | 130 | }; |
131 | #define JPG_FMT(_name, _depth, _fourcc) \ | 131 | |
132 | COL_FMT(_name, _depth, _fourcc, V4L2_COLORSPACE_JPEG) | 132 | /* Find a data format by a pixel code in an array */ |
133 | 133 | static const struct mt9m111_datafmt *mt9m111_find_datafmt( | |
134 | static const struct soc_camera_data_format mt9m111_colour_formats[] = { | 134 | enum v4l2_mbus_pixelcode code, const struct mt9m111_datafmt *fmt, |
135 | JPG_FMT("CbYCrY 16 bit", 16, V4L2_PIX_FMT_UYVY), | 135 | int n) |
136 | JPG_FMT("CrYCbY 16 bit", 16, V4L2_PIX_FMT_VYUY), | 136 | { |
137 | JPG_FMT("YCbYCr 16 bit", 16, V4L2_PIX_FMT_YUYV), | 137 | int i; |
138 | JPG_FMT("YCrYCb 16 bit", 16, V4L2_PIX_FMT_YVYU), | 138 | for (i = 0; i < n; i++) |
139 | RGB_FMT("RGB 565", 16, V4L2_PIX_FMT_RGB565), | 139 | if (fmt[i].code == code) |
140 | RGB_FMT("RGB 555", 16, V4L2_PIX_FMT_RGB555), | 140 | return fmt + i; |
141 | RGB_FMT("Bayer (sRGB) 10 bit", 10, V4L2_PIX_FMT_SBGGR16), | 141 | |
142 | RGB_FMT("Bayer (sRGB) 8 bit", 8, V4L2_PIX_FMT_SBGGR8), | 142 | return NULL; |
143 | } | ||
144 | |||
145 | static const struct mt9m111_datafmt mt9m111_colour_fmts[] = { | ||
146 | {V4L2_MBUS_FMT_YUYV8_2X8_LE, V4L2_COLORSPACE_JPEG}, | ||
147 | {V4L2_MBUS_FMT_YVYU8_2X8_LE, V4L2_COLORSPACE_JPEG}, | ||
148 | {V4L2_MBUS_FMT_YUYV8_2X8_BE, V4L2_COLORSPACE_JPEG}, | ||
149 | {V4L2_MBUS_FMT_YVYU8_2X8_BE, V4L2_COLORSPACE_JPEG}, | ||
150 | {V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE, V4L2_COLORSPACE_SRGB}, | ||
151 | {V4L2_MBUS_FMT_RGB565_2X8_LE, V4L2_COLORSPACE_SRGB}, | ||
152 | {V4L2_MBUS_FMT_SBGGR8_1X8, V4L2_COLORSPACE_SRGB}, | ||
153 | {V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_LE, V4L2_COLORSPACE_SRGB}, | ||
143 | }; | 154 | }; |
144 | 155 | ||
145 | enum mt9m111_context { | 156 | enum mt9m111_context { |
@@ -152,7 +163,7 @@ struct mt9m111 { | |||
152 | int model; /* V4L2_IDENT_MT9M11x* codes from v4l2-chip-ident.h */ | 163 | int model; /* V4L2_IDENT_MT9M11x* codes from v4l2-chip-ident.h */ |
153 | enum mt9m111_context context; | 164 | enum mt9m111_context context; |
154 | struct v4l2_rect rect; | 165 | struct v4l2_rect rect; |
155 | u32 pixfmt; | 166 | const struct mt9m111_datafmt *fmt; |
156 | unsigned int gain; | 167 | unsigned int gain; |
157 | unsigned char autoexposure; | 168 | unsigned char autoexposure; |
158 | unsigned char datawidth; | 169 | unsigned char datawidth; |
@@ -258,8 +269,8 @@ static int mt9m111_setup_rect(struct i2c_client *client, | |||
258 | int width = rect->width; | 269 | int width = rect->width; |
259 | int height = rect->height; | 270 | int height = rect->height; |
260 | 271 | ||
261 | if (mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR8 || | 272 | if (mt9m111->fmt->code == V4L2_MBUS_FMT_SBGGR8_1X8 || |
262 | mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR16) | 273 | mt9m111->fmt->code == V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_LE) |
263 | is_raw_format = 1; | 274 | is_raw_format = 1; |
264 | else | 275 | else |
265 | is_raw_format = 0; | 276 | is_raw_format = 0; |
@@ -307,7 +318,8 @@ static int mt9m111_setup_pixfmt(struct i2c_client *client, u16 outfmt) | |||
307 | 318 | ||
308 | static int mt9m111_setfmt_bayer8(struct i2c_client *client) | 319 | static int mt9m111_setfmt_bayer8(struct i2c_client *client) |
309 | { | 320 | { |
310 | return mt9m111_setup_pixfmt(client, MT9M111_OUTFMT_PROCESSED_BAYER); | 321 | return mt9m111_setup_pixfmt(client, MT9M111_OUTFMT_PROCESSED_BAYER | |
322 | MT9M111_OUTFMT_RGB); | ||
311 | } | 323 | } |
312 | 324 | ||
313 | static int mt9m111_setfmt_bayer10(struct i2c_client *client) | 325 | static int mt9m111_setfmt_bayer10(struct i2c_client *client) |
@@ -401,8 +413,8 @@ static int mt9m111_make_rect(struct i2c_client *client, | |||
401 | { | 413 | { |
402 | struct mt9m111 *mt9m111 = to_mt9m111(client); | 414 | struct mt9m111 *mt9m111 = to_mt9m111(client); |
403 | 415 | ||
404 | if (mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR8 || | 416 | if (mt9m111->fmt->code == V4L2_MBUS_FMT_SBGGR8_1X8 || |
405 | mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR16) { | 417 | mt9m111->fmt->code == V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_LE) { |
406 | /* Bayer format - even size lengths */ | 418 | /* Bayer format - even size lengths */ |
407 | rect->width = ALIGN(rect->width, 2); | 419 | rect->width = ALIGN(rect->width, 2); |
408 | rect->height = ALIGN(rect->height, 2); | 420 | rect->height = ALIGN(rect->height, 2); |
@@ -460,120 +472,139 @@ static int mt9m111_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) | |||
460 | return 0; | 472 | return 0; |
461 | } | 473 | } |
462 | 474 | ||
463 | static int mt9m111_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 475 | static int mt9m111_g_fmt(struct v4l2_subdev *sd, |
476 | struct v4l2_mbus_framefmt *mf) | ||
464 | { | 477 | { |
465 | struct i2c_client *client = sd->priv; | 478 | struct i2c_client *client = sd->priv; |
466 | struct mt9m111 *mt9m111 = to_mt9m111(client); | 479 | struct mt9m111 *mt9m111 = to_mt9m111(client); |
467 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
468 | 480 | ||
469 | pix->width = mt9m111->rect.width; | 481 | mf->width = mt9m111->rect.width; |
470 | pix->height = mt9m111->rect.height; | 482 | mf->height = mt9m111->rect.height; |
471 | pix->pixelformat = mt9m111->pixfmt; | 483 | mf->code = mt9m111->fmt->code; |
472 | pix->field = V4L2_FIELD_NONE; | 484 | mf->field = V4L2_FIELD_NONE; |
473 | pix->colorspace = V4L2_COLORSPACE_SRGB; | ||
474 | 485 | ||
475 | return 0; | 486 | return 0; |
476 | } | 487 | } |
477 | 488 | ||
478 | static int mt9m111_set_pixfmt(struct i2c_client *client, u32 pixfmt) | 489 | static int mt9m111_set_pixfmt(struct i2c_client *client, |
490 | enum v4l2_mbus_pixelcode code) | ||
479 | { | 491 | { |
480 | struct mt9m111 *mt9m111 = to_mt9m111(client); | 492 | struct mt9m111 *mt9m111 = to_mt9m111(client); |
481 | int ret; | 493 | int ret; |
482 | 494 | ||
483 | switch (pixfmt) { | 495 | switch (code) { |
484 | case V4L2_PIX_FMT_SBGGR8: | 496 | case V4L2_MBUS_FMT_SBGGR8_1X8: |
485 | ret = mt9m111_setfmt_bayer8(client); | 497 | ret = mt9m111_setfmt_bayer8(client); |
486 | break; | 498 | break; |
487 | case V4L2_PIX_FMT_SBGGR16: | 499 | case V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_LE: |
488 | ret = mt9m111_setfmt_bayer10(client); | 500 | ret = mt9m111_setfmt_bayer10(client); |
489 | break; | 501 | break; |
490 | case V4L2_PIX_FMT_RGB555: | 502 | case V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE: |
491 | ret = mt9m111_setfmt_rgb555(client); | 503 | ret = mt9m111_setfmt_rgb555(client); |
492 | break; | 504 | break; |
493 | case V4L2_PIX_FMT_RGB565: | 505 | case V4L2_MBUS_FMT_RGB565_2X8_LE: |
494 | ret = mt9m111_setfmt_rgb565(client); | 506 | ret = mt9m111_setfmt_rgb565(client); |
495 | break; | 507 | break; |
496 | case V4L2_PIX_FMT_UYVY: | 508 | case V4L2_MBUS_FMT_YUYV8_2X8_BE: |
497 | mt9m111->swap_yuv_y_chromas = 0; | 509 | mt9m111->swap_yuv_y_chromas = 0; |
498 | mt9m111->swap_yuv_cb_cr = 0; | 510 | mt9m111->swap_yuv_cb_cr = 0; |
499 | ret = mt9m111_setfmt_yuv(client); | 511 | ret = mt9m111_setfmt_yuv(client); |
500 | break; | 512 | break; |
501 | case V4L2_PIX_FMT_VYUY: | 513 | case V4L2_MBUS_FMT_YVYU8_2X8_BE: |
502 | mt9m111->swap_yuv_y_chromas = 0; | 514 | mt9m111->swap_yuv_y_chromas = 0; |
503 | mt9m111->swap_yuv_cb_cr = 1; | 515 | mt9m111->swap_yuv_cb_cr = 1; |
504 | ret = mt9m111_setfmt_yuv(client); | 516 | ret = mt9m111_setfmt_yuv(client); |
505 | break; | 517 | break; |
506 | case V4L2_PIX_FMT_YUYV: | 518 | case V4L2_MBUS_FMT_YUYV8_2X8_LE: |
507 | mt9m111->swap_yuv_y_chromas = 1; | 519 | mt9m111->swap_yuv_y_chromas = 1; |
508 | mt9m111->swap_yuv_cb_cr = 0; | 520 | mt9m111->swap_yuv_cb_cr = 0; |
509 | ret = mt9m111_setfmt_yuv(client); | 521 | ret = mt9m111_setfmt_yuv(client); |
510 | break; | 522 | break; |
511 | case V4L2_PIX_FMT_YVYU: | 523 | case V4L2_MBUS_FMT_YVYU8_2X8_LE: |
512 | mt9m111->swap_yuv_y_chromas = 1; | 524 | mt9m111->swap_yuv_y_chromas = 1; |
513 | mt9m111->swap_yuv_cb_cr = 1; | 525 | mt9m111->swap_yuv_cb_cr = 1; |
514 | ret = mt9m111_setfmt_yuv(client); | 526 | ret = mt9m111_setfmt_yuv(client); |
515 | break; | 527 | break; |
516 | default: | 528 | default: |
517 | dev_err(&client->dev, "Pixel format not handled : %x\n", | 529 | dev_err(&client->dev, "Pixel format not handled : %x\n", |
518 | pixfmt); | 530 | code); |
519 | ret = -EINVAL; | 531 | ret = -EINVAL; |
520 | } | 532 | } |
521 | 533 | ||
522 | if (!ret) | ||
523 | mt9m111->pixfmt = pixfmt; | ||
524 | |||
525 | return ret; | 534 | return ret; |
526 | } | 535 | } |
527 | 536 | ||
528 | static int mt9m111_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 537 | static int mt9m111_s_fmt(struct v4l2_subdev *sd, |
538 | struct v4l2_mbus_framefmt *mf) | ||
529 | { | 539 | { |
530 | struct i2c_client *client = sd->priv; | 540 | struct i2c_client *client = sd->priv; |
541 | const struct mt9m111_datafmt *fmt; | ||
531 | struct mt9m111 *mt9m111 = to_mt9m111(client); | 542 | struct mt9m111 *mt9m111 = to_mt9m111(client); |
532 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
533 | struct v4l2_rect rect = { | 543 | struct v4l2_rect rect = { |
534 | .left = mt9m111->rect.left, | 544 | .left = mt9m111->rect.left, |
535 | .top = mt9m111->rect.top, | 545 | .top = mt9m111->rect.top, |
536 | .width = pix->width, | 546 | .width = mf->width, |
537 | .height = pix->height, | 547 | .height = mf->height, |
538 | }; | 548 | }; |
539 | int ret; | 549 | int ret; |
540 | 550 | ||
551 | fmt = mt9m111_find_datafmt(mf->code, mt9m111_colour_fmts, | ||
552 | ARRAY_SIZE(mt9m111_colour_fmts)); | ||
553 | if (!fmt) | ||
554 | return -EINVAL; | ||
555 | |||
541 | dev_dbg(&client->dev, | 556 | dev_dbg(&client->dev, |
542 | "%s fmt=%x left=%d, top=%d, width=%d, height=%d\n", __func__, | 557 | "%s code=%x left=%d, top=%d, width=%d, height=%d\n", __func__, |
543 | pix->pixelformat, rect.left, rect.top, rect.width, rect.height); | 558 | mf->code, rect.left, rect.top, rect.width, rect.height); |
544 | 559 | ||
545 | ret = mt9m111_make_rect(client, &rect); | 560 | ret = mt9m111_make_rect(client, &rect); |
546 | if (!ret) | 561 | if (!ret) |
547 | ret = mt9m111_set_pixfmt(client, pix->pixelformat); | 562 | ret = mt9m111_set_pixfmt(client, mf->code); |
548 | if (!ret) | 563 | if (!ret) { |
549 | mt9m111->rect = rect; | 564 | mt9m111->rect = rect; |
565 | mt9m111->fmt = fmt; | ||
566 | mf->colorspace = fmt->colorspace; | ||
567 | } | ||
568 | |||
550 | return ret; | 569 | return ret; |
551 | } | 570 | } |
552 | 571 | ||
553 | static int mt9m111_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 572 | static int mt9m111_try_fmt(struct v4l2_subdev *sd, |
573 | struct v4l2_mbus_framefmt *mf) | ||
554 | { | 574 | { |
555 | struct v4l2_pix_format *pix = &f->fmt.pix; | 575 | struct i2c_client *client = sd->priv; |
556 | bool bayer = pix->pixelformat == V4L2_PIX_FMT_SBGGR8 || | 576 | struct mt9m111 *mt9m111 = to_mt9m111(client); |
557 | pix->pixelformat == V4L2_PIX_FMT_SBGGR16; | 577 | const struct mt9m111_datafmt *fmt; |
578 | bool bayer = mf->code == V4L2_MBUS_FMT_SBGGR8_1X8 || | ||
579 | mf->code == V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_LE; | ||
580 | |||
581 | fmt = mt9m111_find_datafmt(mf->code, mt9m111_colour_fmts, | ||
582 | ARRAY_SIZE(mt9m111_colour_fmts)); | ||
583 | if (!fmt) { | ||
584 | fmt = mt9m111->fmt; | ||
585 | mf->code = fmt->code; | ||
586 | } | ||
558 | 587 | ||
559 | /* | 588 | /* |
560 | * With Bayer format enforce even side lengths, but let the user play | 589 | * With Bayer format enforce even side lengths, but let the user play |
561 | * with the starting pixel | 590 | * with the starting pixel |
562 | */ | 591 | */ |
563 | 592 | ||
564 | if (pix->height > MT9M111_MAX_HEIGHT) | 593 | if (mf->height > MT9M111_MAX_HEIGHT) |
565 | pix->height = MT9M111_MAX_HEIGHT; | 594 | mf->height = MT9M111_MAX_HEIGHT; |
566 | else if (pix->height < 2) | 595 | else if (mf->height < 2) |
567 | pix->height = 2; | 596 | mf->height = 2; |
568 | else if (bayer) | 597 | else if (bayer) |
569 | pix->height = ALIGN(pix->height, 2); | 598 | mf->height = ALIGN(mf->height, 2); |
570 | 599 | ||
571 | if (pix->width > MT9M111_MAX_WIDTH) | 600 | if (mf->width > MT9M111_MAX_WIDTH) |
572 | pix->width = MT9M111_MAX_WIDTH; | 601 | mf->width = MT9M111_MAX_WIDTH; |
573 | else if (pix->width < 2) | 602 | else if (mf->width < 2) |
574 | pix->width = 2; | 603 | mf->width = 2; |
575 | else if (bayer) | 604 | else if (bayer) |
576 | pix->width = ALIGN(pix->width, 2); | 605 | mf->width = ALIGN(mf->width, 2); |
606 | |||
607 | mf->colorspace = fmt->colorspace; | ||
577 | 608 | ||
578 | return 0; | 609 | return 0; |
579 | } | 610 | } |
@@ -863,7 +894,7 @@ static int mt9m111_restore_state(struct i2c_client *client) | |||
863 | struct mt9m111 *mt9m111 = to_mt9m111(client); | 894 | struct mt9m111 *mt9m111 = to_mt9m111(client); |
864 | 895 | ||
865 | mt9m111_set_context(client, mt9m111->context); | 896 | mt9m111_set_context(client, mt9m111->context); |
866 | mt9m111_set_pixfmt(client, mt9m111->pixfmt); | 897 | mt9m111_set_pixfmt(client, mt9m111->fmt->code); |
867 | mt9m111_setup_rect(client, &mt9m111->rect); | 898 | mt9m111_setup_rect(client, &mt9m111->rect); |
868 | mt9m111_set_flip(client, mt9m111->hflip, MT9M111_RMB_MIRROR_COLS); | 899 | mt9m111_set_flip(client, mt9m111->hflip, MT9M111_RMB_MIRROR_COLS); |
869 | mt9m111_set_flip(client, mt9m111->vflip, MT9M111_RMB_MIRROR_ROWS); | 900 | mt9m111_set_flip(client, mt9m111->vflip, MT9M111_RMB_MIRROR_ROWS); |
@@ -952,9 +983,6 @@ static int mt9m111_video_probe(struct soc_camera_device *icd, | |||
952 | goto ei2c; | 983 | goto ei2c; |
953 | } | 984 | } |
954 | 985 | ||
955 | icd->formats = mt9m111_colour_formats; | ||
956 | icd->num_formats = ARRAY_SIZE(mt9m111_colour_formats); | ||
957 | |||
958 | dev_info(&client->dev, "Detected a MT9M11x chip ID %x\n", data); | 986 | dev_info(&client->dev, "Detected a MT9M11x chip ID %x\n", data); |
959 | 987 | ||
960 | ei2c: | 988 | ei2c: |
@@ -971,13 +999,24 @@ static struct v4l2_subdev_core_ops mt9m111_subdev_core_ops = { | |||
971 | #endif | 999 | #endif |
972 | }; | 1000 | }; |
973 | 1001 | ||
1002 | static int mt9m111_enum_fmt(struct v4l2_subdev *sd, int index, | ||
1003 | enum v4l2_mbus_pixelcode *code) | ||
1004 | { | ||
1005 | if ((unsigned int)index >= ARRAY_SIZE(mt9m111_colour_fmts)) | ||
1006 | return -EINVAL; | ||
1007 | |||
1008 | *code = mt9m111_colour_fmts[index].code; | ||
1009 | return 0; | ||
1010 | } | ||
1011 | |||
974 | static struct v4l2_subdev_video_ops mt9m111_subdev_video_ops = { | 1012 | static struct v4l2_subdev_video_ops mt9m111_subdev_video_ops = { |
975 | .s_fmt = mt9m111_s_fmt, | 1013 | .s_mbus_fmt = mt9m111_s_fmt, |
976 | .g_fmt = mt9m111_g_fmt, | 1014 | .g_mbus_fmt = mt9m111_g_fmt, |
977 | .try_fmt = mt9m111_try_fmt, | 1015 | .try_mbus_fmt = mt9m111_try_fmt, |
978 | .s_crop = mt9m111_s_crop, | 1016 | .s_crop = mt9m111_s_crop, |
979 | .g_crop = mt9m111_g_crop, | 1017 | .g_crop = mt9m111_g_crop, |
980 | .cropcap = mt9m111_cropcap, | 1018 | .cropcap = mt9m111_cropcap, |
1019 | .enum_mbus_fmt = mt9m111_enum_fmt, | ||
981 | }; | 1020 | }; |
982 | 1021 | ||
983 | static struct v4l2_subdev_ops mt9m111_subdev_ops = { | 1022 | static struct v4l2_subdev_ops mt9m111_subdev_ops = { |
@@ -1024,6 +1063,7 @@ static int mt9m111_probe(struct i2c_client *client, | |||
1024 | mt9m111->rect.top = MT9M111_MIN_DARK_ROWS; | 1063 | mt9m111->rect.top = MT9M111_MIN_DARK_ROWS; |
1025 | mt9m111->rect.width = MT9M111_MAX_WIDTH; | 1064 | mt9m111->rect.width = MT9M111_MAX_WIDTH; |
1026 | mt9m111->rect.height = MT9M111_MAX_HEIGHT; | 1065 | mt9m111->rect.height = MT9M111_MAX_HEIGHT; |
1066 | mt9m111->fmt = &mt9m111_colour_fmts[0]; | ||
1027 | 1067 | ||
1028 | ret = mt9m111_video_probe(icd, client); | 1068 | ret = mt9m111_video_probe(icd, client); |
1029 | if (ret) { | 1069 | if (ret) { |
diff --git a/drivers/media/video/mt9t031.c b/drivers/media/video/mt9t031.c index e3f664f21c48..69c227f65bcb 100644 --- a/drivers/media/video/mt9t031.c +++ b/drivers/media/video/mt9t031.c | |||
@@ -60,15 +60,6 @@ | |||
60 | SOCAM_VSYNC_ACTIVE_HIGH | SOCAM_DATA_ACTIVE_HIGH | \ | 60 | SOCAM_VSYNC_ACTIVE_HIGH | SOCAM_DATA_ACTIVE_HIGH | \ |
61 | SOCAM_MASTER | SOCAM_DATAWIDTH_10) | 61 | SOCAM_MASTER | SOCAM_DATAWIDTH_10) |
62 | 62 | ||
63 | static const struct soc_camera_data_format mt9t031_colour_formats[] = { | ||
64 | { | ||
65 | .name = "Bayer (sRGB) 10 bit", | ||
66 | .depth = 10, | ||
67 | .fourcc = V4L2_PIX_FMT_SGRBG10, | ||
68 | .colorspace = V4L2_COLORSPACE_SRGB, | ||
69 | } | ||
70 | }; | ||
71 | |||
72 | struct mt9t031 { | 63 | struct mt9t031 { |
73 | struct v4l2_subdev subdev; | 64 | struct v4l2_subdev subdev; |
74 | struct v4l2_rect rect; /* Sensor window */ | 65 | struct v4l2_rect rect; /* Sensor window */ |
@@ -378,27 +369,27 @@ static int mt9t031_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) | |||
378 | return 0; | 369 | return 0; |
379 | } | 370 | } |
380 | 371 | ||
381 | static int mt9t031_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 372 | static int mt9t031_g_fmt(struct v4l2_subdev *sd, |
373 | struct v4l2_mbus_framefmt *mf) | ||
382 | { | 374 | { |
383 | struct i2c_client *client = sd->priv; | 375 | struct i2c_client *client = sd->priv; |
384 | struct mt9t031 *mt9t031 = to_mt9t031(client); | 376 | struct mt9t031 *mt9t031 = to_mt9t031(client); |
385 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
386 | 377 | ||
387 | pix->width = mt9t031->rect.width / mt9t031->xskip; | 378 | mf->width = mt9t031->rect.width / mt9t031->xskip; |
388 | pix->height = mt9t031->rect.height / mt9t031->yskip; | 379 | mf->height = mt9t031->rect.height / mt9t031->yskip; |
389 | pix->pixelformat = V4L2_PIX_FMT_SGRBG10; | 380 | mf->code = V4L2_MBUS_FMT_SBGGR10_1X10; |
390 | pix->field = V4L2_FIELD_NONE; | 381 | mf->colorspace = V4L2_COLORSPACE_SRGB; |
391 | pix->colorspace = V4L2_COLORSPACE_SRGB; | 382 | mf->field = V4L2_FIELD_NONE; |
392 | 383 | ||
393 | return 0; | 384 | return 0; |
394 | } | 385 | } |
395 | 386 | ||
396 | static int mt9t031_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 387 | static int mt9t031_s_fmt(struct v4l2_subdev *sd, |
388 | struct v4l2_mbus_framefmt *mf) | ||
397 | { | 389 | { |
398 | struct i2c_client *client = sd->priv; | 390 | struct i2c_client *client = sd->priv; |
399 | struct mt9t031 *mt9t031 = to_mt9t031(client); | 391 | struct mt9t031 *mt9t031 = to_mt9t031(client); |
400 | struct soc_camera_device *icd = client->dev.platform_data; | 392 | struct soc_camera_device *icd = client->dev.platform_data; |
401 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
402 | u16 xskip, yskip; | 393 | u16 xskip, yskip; |
403 | struct v4l2_rect rect = mt9t031->rect; | 394 | struct v4l2_rect rect = mt9t031->rect; |
404 | 395 | ||
@@ -406,8 +397,11 @@ static int mt9t031_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | |||
406 | * try_fmt has put width and height within limits. | 397 | * try_fmt has put width and height within limits. |
407 | * S_FMT: use binning and skipping for scaling | 398 | * S_FMT: use binning and skipping for scaling |
408 | */ | 399 | */ |
409 | xskip = mt9t031_skip(&rect.width, pix->width, MT9T031_MAX_WIDTH); | 400 | xskip = mt9t031_skip(&rect.width, mf->width, MT9T031_MAX_WIDTH); |
410 | yskip = mt9t031_skip(&rect.height, pix->height, MT9T031_MAX_HEIGHT); | 401 | yskip = mt9t031_skip(&rect.height, mf->height, MT9T031_MAX_HEIGHT); |
402 | |||
403 | mf->code = V4L2_MBUS_FMT_SBGGR10_1X10; | ||
404 | mf->colorspace = V4L2_COLORSPACE_SRGB; | ||
411 | 405 | ||
412 | /* mt9t031_set_params() doesn't change width and height */ | 406 | /* mt9t031_set_params() doesn't change width and height */ |
413 | return mt9t031_set_params(icd, &rect, xskip, yskip); | 407 | return mt9t031_set_params(icd, &rect, xskip, yskip); |
@@ -417,13 +411,15 @@ static int mt9t031_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | |||
417 | * If a user window larger than sensor window is requested, we'll increase the | 411 | * If a user window larger than sensor window is requested, we'll increase the |
418 | * sensor window. | 412 | * sensor window. |
419 | */ | 413 | */ |
420 | static int mt9t031_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 414 | static int mt9t031_try_fmt(struct v4l2_subdev *sd, |
415 | struct v4l2_mbus_framefmt *mf) | ||
421 | { | 416 | { |
422 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
423 | |||
424 | v4l_bound_align_image( | 417 | v4l_bound_align_image( |
425 | &pix->width, MT9T031_MIN_WIDTH, MT9T031_MAX_WIDTH, 1, | 418 | &mf->width, MT9T031_MIN_WIDTH, MT9T031_MAX_WIDTH, 1, |
426 | &pix->height, MT9T031_MIN_HEIGHT, MT9T031_MAX_HEIGHT, 1, 0); | 419 | &mf->height, MT9T031_MIN_HEIGHT, MT9T031_MAX_HEIGHT, 1, 0); |
420 | |||
421 | mf->code = V4L2_MBUS_FMT_SBGGR10_1X10; | ||
422 | mf->colorspace = V4L2_COLORSPACE_SRGB; | ||
427 | 423 | ||
428 | return 0; | 424 | return 0; |
429 | } | 425 | } |
@@ -684,7 +680,6 @@ static int mt9t031_s_ctrl(struct v4l2_subdev *sd, struct v4l2_control *ctrl) | |||
684 | */ | 680 | */ |
685 | static int mt9t031_video_probe(struct i2c_client *client) | 681 | static int mt9t031_video_probe(struct i2c_client *client) |
686 | { | 682 | { |
687 | struct soc_camera_device *icd = client->dev.platform_data; | ||
688 | struct mt9t031 *mt9t031 = to_mt9t031(client); | 683 | struct mt9t031 *mt9t031 = to_mt9t031(client); |
689 | s32 data; | 684 | s32 data; |
690 | int ret; | 685 | int ret; |
@@ -699,8 +694,6 @@ static int mt9t031_video_probe(struct i2c_client *client) | |||
699 | switch (data) { | 694 | switch (data) { |
700 | case 0x1621: | 695 | case 0x1621: |
701 | mt9t031->model = V4L2_IDENT_MT9T031; | 696 | mt9t031->model = V4L2_IDENT_MT9T031; |
702 | icd->formats = mt9t031_colour_formats; | ||
703 | icd->num_formats = ARRAY_SIZE(mt9t031_colour_formats); | ||
704 | break; | 697 | break; |
705 | default: | 698 | default: |
706 | dev_err(&client->dev, | 699 | dev_err(&client->dev, |
@@ -741,14 +734,25 @@ static struct v4l2_subdev_core_ops mt9t031_subdev_core_ops = { | |||
741 | #endif | 734 | #endif |
742 | }; | 735 | }; |
743 | 736 | ||
737 | static int mt9t031_enum_fmt(struct v4l2_subdev *sd, int index, | ||
738 | enum v4l2_mbus_pixelcode *code) | ||
739 | { | ||
740 | if (index) | ||
741 | return -EINVAL; | ||
742 | |||
743 | *code = V4L2_MBUS_FMT_SBGGR10_1X10; | ||
744 | return 0; | ||
745 | } | ||
746 | |||
744 | static struct v4l2_subdev_video_ops mt9t031_subdev_video_ops = { | 747 | static struct v4l2_subdev_video_ops mt9t031_subdev_video_ops = { |
745 | .s_stream = mt9t031_s_stream, | 748 | .s_stream = mt9t031_s_stream, |
746 | .s_fmt = mt9t031_s_fmt, | 749 | .s_mbus_fmt = mt9t031_s_fmt, |
747 | .g_fmt = mt9t031_g_fmt, | 750 | .g_mbus_fmt = mt9t031_g_fmt, |
748 | .try_fmt = mt9t031_try_fmt, | 751 | .try_mbus_fmt = mt9t031_try_fmt, |
749 | .s_crop = mt9t031_s_crop, | 752 | .s_crop = mt9t031_s_crop, |
750 | .g_crop = mt9t031_g_crop, | 753 | .g_crop = mt9t031_g_crop, |
751 | .cropcap = mt9t031_cropcap, | 754 | .cropcap = mt9t031_cropcap, |
755 | .enum_mbus_fmt = mt9t031_enum_fmt, | ||
752 | }; | 756 | }; |
753 | 757 | ||
754 | static struct v4l2_subdev_sensor_ops mt9t031_subdev_sensor_ops = { | 758 | static struct v4l2_subdev_sensor_ops mt9t031_subdev_sensor_ops = { |
diff --git a/drivers/media/video/mt9v022.c b/drivers/media/video/mt9v022.c index f60a9a107f20..91df7ec91fb6 100644 --- a/drivers/media/video/mt9v022.c +++ b/drivers/media/video/mt9v022.c | |||
@@ -64,41 +64,46 @@ MODULE_PARM_DESC(sensor_type, "Sensor type: \"colour\" or \"monochrome\""); | |||
64 | #define MT9V022_COLUMN_SKIP 1 | 64 | #define MT9V022_COLUMN_SKIP 1 |
65 | #define MT9V022_ROW_SKIP 4 | 65 | #define MT9V022_ROW_SKIP 4 |
66 | 66 | ||
67 | static const struct soc_camera_data_format mt9v022_colour_formats[] = { | 67 | /* MT9V022 has only one fixed colorspace per pixelcode */ |
68 | struct mt9v022_datafmt { | ||
69 | enum v4l2_mbus_pixelcode code; | ||
70 | enum v4l2_colorspace colorspace; | ||
71 | }; | ||
72 | |||
73 | /* Find a data format by a pixel code in an array */ | ||
74 | static const struct mt9v022_datafmt *mt9v022_find_datafmt( | ||
75 | enum v4l2_mbus_pixelcode code, const struct mt9v022_datafmt *fmt, | ||
76 | int n) | ||
77 | { | ||
78 | int i; | ||
79 | for (i = 0; i < n; i++) | ||
80 | if (fmt[i].code == code) | ||
81 | return fmt + i; | ||
82 | |||
83 | return NULL; | ||
84 | } | ||
85 | |||
86 | static const struct mt9v022_datafmt mt9v022_colour_fmts[] = { | ||
68 | /* | 87 | /* |
69 | * Order important: first natively supported, | 88 | * Order important: first natively supported, |
70 | * second supported with a GPIO extender | 89 | * second supported with a GPIO extender |
71 | */ | 90 | */ |
72 | { | 91 | {V4L2_MBUS_FMT_SBGGR10_1X10, V4L2_COLORSPACE_SRGB}, |
73 | .name = "Bayer (sRGB) 10 bit", | 92 | {V4L2_MBUS_FMT_SBGGR8_1X8, V4L2_COLORSPACE_SRGB}, |
74 | .depth = 10, | ||
75 | .fourcc = V4L2_PIX_FMT_SBGGR16, | ||
76 | .colorspace = V4L2_COLORSPACE_SRGB, | ||
77 | }, { | ||
78 | .name = "Bayer (sRGB) 8 bit", | ||
79 | .depth = 8, | ||
80 | .fourcc = V4L2_PIX_FMT_SBGGR8, | ||
81 | .colorspace = V4L2_COLORSPACE_SRGB, | ||
82 | } | ||
83 | }; | 93 | }; |
84 | 94 | ||
85 | static const struct soc_camera_data_format mt9v022_monochrome_formats[] = { | 95 | static const struct mt9v022_datafmt mt9v022_monochrome_fmts[] = { |
86 | /* Order important - see above */ | 96 | /* Order important - see above */ |
87 | { | 97 | {V4L2_MBUS_FMT_Y10_1X10, V4L2_COLORSPACE_JPEG}, |
88 | .name = "Monochrome 10 bit", | 98 | {V4L2_MBUS_FMT_GREY8_1X8, V4L2_COLORSPACE_JPEG}, |
89 | .depth = 10, | ||
90 | .fourcc = V4L2_PIX_FMT_Y16, | ||
91 | }, { | ||
92 | .name = "Monochrome 8 bit", | ||
93 | .depth = 8, | ||
94 | .fourcc = V4L2_PIX_FMT_GREY, | ||
95 | }, | ||
96 | }; | 99 | }; |
97 | 100 | ||
98 | struct mt9v022 { | 101 | struct mt9v022 { |
99 | struct v4l2_subdev subdev; | 102 | struct v4l2_subdev subdev; |
100 | struct v4l2_rect rect; /* Sensor window */ | 103 | struct v4l2_rect rect; /* Sensor window */ |
101 | __u32 fourcc; | 104 | const struct mt9v022_datafmt *fmt; |
105 | const struct mt9v022_datafmt *fmts; | ||
106 | int num_fmts; | ||
102 | int model; /* V4L2_IDENT_MT9V022* codes from v4l2-chip-ident.h */ | 107 | int model; /* V4L2_IDENT_MT9V022* codes from v4l2-chip-ident.h */ |
103 | u16 chip_control; | 108 | u16 chip_control; |
104 | unsigned short y_skip_top; /* Lines to skip at the top */ | 109 | unsigned short y_skip_top; /* Lines to skip at the top */ |
@@ -275,8 +280,7 @@ static int mt9v022_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) | |||
275 | int ret; | 280 | int ret; |
276 | 281 | ||
277 | /* Bayer format - even size lengths */ | 282 | /* Bayer format - even size lengths */ |
278 | if (mt9v022->fourcc == V4L2_PIX_FMT_SBGGR8 || | 283 | if (mt9v022->fmts == mt9v022_colour_fmts) { |
279 | mt9v022->fourcc == V4L2_PIX_FMT_SBGGR16) { | ||
280 | rect.width = ALIGN(rect.width, 2); | 284 | rect.width = ALIGN(rect.width, 2); |
281 | rect.height = ALIGN(rect.height, 2); | 285 | rect.height = ALIGN(rect.height, 2); |
282 | /* Let the user play with the starting pixel */ | 286 | /* Let the user play with the starting pixel */ |
@@ -354,32 +358,32 @@ static int mt9v022_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) | |||
354 | return 0; | 358 | return 0; |
355 | } | 359 | } |
356 | 360 | ||
357 | static int mt9v022_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 361 | static int mt9v022_g_fmt(struct v4l2_subdev *sd, |
362 | struct v4l2_mbus_framefmt *mf) | ||
358 | { | 363 | { |
359 | struct i2c_client *client = sd->priv; | 364 | struct i2c_client *client = sd->priv; |
360 | struct mt9v022 *mt9v022 = to_mt9v022(client); | 365 | struct mt9v022 *mt9v022 = to_mt9v022(client); |
361 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
362 | 366 | ||
363 | pix->width = mt9v022->rect.width; | 367 | mf->width = mt9v022->rect.width; |
364 | pix->height = mt9v022->rect.height; | 368 | mf->height = mt9v022->rect.height; |
365 | pix->pixelformat = mt9v022->fourcc; | 369 | mf->code = mt9v022->fmt->code; |
366 | pix->field = V4L2_FIELD_NONE; | 370 | mf->colorspace = mt9v022->fmt->colorspace; |
367 | pix->colorspace = V4L2_COLORSPACE_SRGB; | 371 | mf->field = V4L2_FIELD_NONE; |
368 | 372 | ||
369 | return 0; | 373 | return 0; |
370 | } | 374 | } |
371 | 375 | ||
372 | static int mt9v022_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 376 | static int mt9v022_s_fmt(struct v4l2_subdev *sd, |
377 | struct v4l2_mbus_framefmt *mf) | ||
373 | { | 378 | { |
374 | struct i2c_client *client = sd->priv; | 379 | struct i2c_client *client = sd->priv; |
375 | struct mt9v022 *mt9v022 = to_mt9v022(client); | 380 | struct mt9v022 *mt9v022 = to_mt9v022(client); |
376 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
377 | struct v4l2_crop a = { | 381 | struct v4l2_crop a = { |
378 | .c = { | 382 | .c = { |
379 | .left = mt9v022->rect.left, | 383 | .left = mt9v022->rect.left, |
380 | .top = mt9v022->rect.top, | 384 | .top = mt9v022->rect.top, |
381 | .width = pix->width, | 385 | .width = mf->width, |
382 | .height = pix->height, | 386 | .height = mf->height, |
383 | }, | 387 | }, |
384 | }; | 388 | }; |
385 | int ret; | 389 | int ret; |
@@ -388,14 +392,14 @@ static int mt9v022_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | |||
388 | * The caller provides a supported format, as verified per call to | 392 | * The caller provides a supported format, as verified per call to |
389 | * icd->try_fmt(), datawidth is from our supported format list | 393 | * icd->try_fmt(), datawidth is from our supported format list |
390 | */ | 394 | */ |
391 | switch (pix->pixelformat) { | 395 | switch (mf->code) { |
392 | case V4L2_PIX_FMT_GREY: | 396 | case V4L2_MBUS_FMT_GREY8_1X8: |
393 | case V4L2_PIX_FMT_Y16: | 397 | case V4L2_MBUS_FMT_Y10_1X10: |
394 | if (mt9v022->model != V4L2_IDENT_MT9V022IX7ATM) | 398 | if (mt9v022->model != V4L2_IDENT_MT9V022IX7ATM) |
395 | return -EINVAL; | 399 | return -EINVAL; |
396 | break; | 400 | break; |
397 | case V4L2_PIX_FMT_SBGGR8: | 401 | case V4L2_MBUS_FMT_SBGGR8_1X8: |
398 | case V4L2_PIX_FMT_SBGGR16: | 402 | case V4L2_MBUS_FMT_SBGGR10_1X10: |
399 | if (mt9v022->model != V4L2_IDENT_MT9V022IX7ATC) | 403 | if (mt9v022->model != V4L2_IDENT_MT9V022IX7ATC) |
400 | return -EINVAL; | 404 | return -EINVAL; |
401 | break; | 405 | break; |
@@ -409,27 +413,39 @@ static int mt9v022_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | |||
409 | /* No support for scaling on this camera, just crop. */ | 413 | /* No support for scaling on this camera, just crop. */ |
410 | ret = mt9v022_s_crop(sd, &a); | 414 | ret = mt9v022_s_crop(sd, &a); |
411 | if (!ret) { | 415 | if (!ret) { |
412 | pix->width = mt9v022->rect.width; | 416 | mf->width = mt9v022->rect.width; |
413 | pix->height = mt9v022->rect.height; | 417 | mf->height = mt9v022->rect.height; |
414 | mt9v022->fourcc = pix->pixelformat; | 418 | mt9v022->fmt = mt9v022_find_datafmt(mf->code, |
419 | mt9v022->fmts, mt9v022->num_fmts); | ||
420 | mf->colorspace = mt9v022->fmt->colorspace; | ||
415 | } | 421 | } |
416 | 422 | ||
417 | return ret; | 423 | return ret; |
418 | } | 424 | } |
419 | 425 | ||
420 | static int mt9v022_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 426 | static int mt9v022_try_fmt(struct v4l2_subdev *sd, |
427 | struct v4l2_mbus_framefmt *mf) | ||
421 | { | 428 | { |
422 | struct i2c_client *client = sd->priv; | 429 | struct i2c_client *client = sd->priv; |
423 | struct mt9v022 *mt9v022 = to_mt9v022(client); | 430 | struct mt9v022 *mt9v022 = to_mt9v022(client); |
424 | struct v4l2_pix_format *pix = &f->fmt.pix; | 431 | const struct mt9v022_datafmt *fmt; |
425 | int align = pix->pixelformat == V4L2_PIX_FMT_SBGGR8 || | 432 | int align = mf->code == V4L2_MBUS_FMT_SBGGR8_1X8 || |
426 | pix->pixelformat == V4L2_PIX_FMT_SBGGR16; | 433 | mf->code == V4L2_MBUS_FMT_SBGGR10_1X10; |
427 | 434 | ||
428 | v4l_bound_align_image(&pix->width, MT9V022_MIN_WIDTH, | 435 | v4l_bound_align_image(&mf->width, MT9V022_MIN_WIDTH, |
429 | MT9V022_MAX_WIDTH, align, | 436 | MT9V022_MAX_WIDTH, align, |
430 | &pix->height, MT9V022_MIN_HEIGHT + mt9v022->y_skip_top, | 437 | &mf->height, MT9V022_MIN_HEIGHT + mt9v022->y_skip_top, |
431 | MT9V022_MAX_HEIGHT + mt9v022->y_skip_top, align, 0); | 438 | MT9V022_MAX_HEIGHT + mt9v022->y_skip_top, align, 0); |
432 | 439 | ||
440 | fmt = mt9v022_find_datafmt(mf->code, mt9v022->fmts, | ||
441 | mt9v022->num_fmts); | ||
442 | if (!fmt) { | ||
443 | fmt = mt9v022->fmt; | ||
444 | mf->code = fmt->code; | ||
445 | } | ||
446 | |||
447 | mf->colorspace = fmt->colorspace; | ||
448 | |||
433 | return 0; | 449 | return 0; |
434 | } | 450 | } |
435 | 451 | ||
@@ -749,17 +765,17 @@ static int mt9v022_video_probe(struct soc_camera_device *icd, | |||
749 | !strcmp("color", sensor_type))) { | 765 | !strcmp("color", sensor_type))) { |
750 | ret = reg_write(client, MT9V022_PIXEL_OPERATION_MODE, 4 | 0x11); | 766 | ret = reg_write(client, MT9V022_PIXEL_OPERATION_MODE, 4 | 0x11); |
751 | mt9v022->model = V4L2_IDENT_MT9V022IX7ATC; | 767 | mt9v022->model = V4L2_IDENT_MT9V022IX7ATC; |
752 | icd->formats = mt9v022_colour_formats; | 768 | mt9v022->fmts = mt9v022_colour_fmts; |
753 | } else { | 769 | } else { |
754 | ret = reg_write(client, MT9V022_PIXEL_OPERATION_MODE, 0x11); | 770 | ret = reg_write(client, MT9V022_PIXEL_OPERATION_MODE, 0x11); |
755 | mt9v022->model = V4L2_IDENT_MT9V022IX7ATM; | 771 | mt9v022->model = V4L2_IDENT_MT9V022IX7ATM; |
756 | icd->formats = mt9v022_monochrome_formats; | 772 | mt9v022->fmts = mt9v022_monochrome_fmts; |
757 | } | 773 | } |
758 | 774 | ||
759 | if (ret < 0) | 775 | if (ret < 0) |
760 | goto ei2c; | 776 | goto ei2c; |
761 | 777 | ||
762 | icd->num_formats = 0; | 778 | mt9v022->num_fmts = 0; |
763 | 779 | ||
764 | /* | 780 | /* |
765 | * This is a 10bit sensor, so by default we only allow 10bit. | 781 | * This is a 10bit sensor, so by default we only allow 10bit. |
@@ -772,14 +788,14 @@ static int mt9v022_video_probe(struct soc_camera_device *icd, | |||
772 | flags = SOCAM_DATAWIDTH_10; | 788 | flags = SOCAM_DATAWIDTH_10; |
773 | 789 | ||
774 | if (flags & SOCAM_DATAWIDTH_10) | 790 | if (flags & SOCAM_DATAWIDTH_10) |
775 | icd->num_formats++; | 791 | mt9v022->num_fmts++; |
776 | else | 792 | else |
777 | icd->formats++; | 793 | mt9v022->fmts++; |
778 | 794 | ||
779 | if (flags & SOCAM_DATAWIDTH_8) | 795 | if (flags & SOCAM_DATAWIDTH_8) |
780 | icd->num_formats++; | 796 | mt9v022->num_fmts++; |
781 | 797 | ||
782 | mt9v022->fourcc = icd->formats->fourcc; | 798 | mt9v022->fmt = &mt9v022->fmts[0]; |
783 | 799 | ||
784 | dev_info(&client->dev, "Detected a MT9V022 chip ID %x, %s sensor\n", | 800 | dev_info(&client->dev, "Detected a MT9V022 chip ID %x, %s sensor\n", |
785 | data, mt9v022->model == V4L2_IDENT_MT9V022IX7ATM ? | 801 | data, mt9v022->model == V4L2_IDENT_MT9V022IX7ATM ? |
@@ -823,14 +839,28 @@ static struct v4l2_subdev_core_ops mt9v022_subdev_core_ops = { | |||
823 | #endif | 839 | #endif |
824 | }; | 840 | }; |
825 | 841 | ||
842 | static int mt9v022_enum_fmt(struct v4l2_subdev *sd, int index, | ||
843 | enum v4l2_mbus_pixelcode *code) | ||
844 | { | ||
845 | struct i2c_client *client = sd->priv; | ||
846 | struct mt9v022 *mt9v022 = to_mt9v022(client); | ||
847 | |||
848 | if ((unsigned int)index >= mt9v022->num_fmts) | ||
849 | return -EINVAL; | ||
850 | |||
851 | *code = mt9v022->fmts[index].code; | ||
852 | return 0; | ||
853 | } | ||
854 | |||
826 | static struct v4l2_subdev_video_ops mt9v022_subdev_video_ops = { | 855 | static struct v4l2_subdev_video_ops mt9v022_subdev_video_ops = { |
827 | .s_stream = mt9v022_s_stream, | 856 | .s_stream = mt9v022_s_stream, |
828 | .s_fmt = mt9v022_s_fmt, | 857 | .s_mbus_fmt = mt9v022_s_fmt, |
829 | .g_fmt = mt9v022_g_fmt, | 858 | .g_mbus_fmt = mt9v022_g_fmt, |
830 | .try_fmt = mt9v022_try_fmt, | 859 | .try_mbus_fmt = mt9v022_try_fmt, |
831 | .s_crop = mt9v022_s_crop, | 860 | .s_crop = mt9v022_s_crop, |
832 | .g_crop = mt9v022_g_crop, | 861 | .g_crop = mt9v022_g_crop, |
833 | .cropcap = mt9v022_cropcap, | 862 | .cropcap = mt9v022_cropcap, |
863 | .enum_mbus_fmt = mt9v022_enum_fmt, | ||
834 | }; | 864 | }; |
835 | 865 | ||
836 | static struct v4l2_subdev_sensor_ops mt9v022_subdev_sensor_ops = { | 866 | static struct v4l2_subdev_sensor_ops mt9v022_subdev_sensor_ops = { |
diff --git a/drivers/media/video/mx1_camera.c b/drivers/media/video/mx1_camera.c index 4c1a439373c5..2ba14fb5b031 100644 --- a/drivers/media/video/mx1_camera.c +++ b/drivers/media/video/mx1_camera.c | |||
@@ -37,6 +37,7 @@ | |||
37 | #include <media/v4l2-common.h> | 37 | #include <media/v4l2-common.h> |
38 | #include <media/v4l2-dev.h> | 38 | #include <media/v4l2-dev.h> |
39 | #include <media/videobuf-dma-contig.h> | 39 | #include <media/videobuf-dma-contig.h> |
40 | #include <media/soc_mediabus.h> | ||
40 | 41 | ||
41 | #include <asm/dma.h> | 42 | #include <asm/dma.h> |
42 | #include <asm/fiq.h> | 43 | #include <asm/fiq.h> |
@@ -94,9 +95,9 @@ | |||
94 | /* buffer for one video frame */ | 95 | /* buffer for one video frame */ |
95 | struct mx1_buffer { | 96 | struct mx1_buffer { |
96 | /* common v4l buffer stuff -- must be first */ | 97 | /* common v4l buffer stuff -- must be first */ |
97 | struct videobuf_buffer vb; | 98 | struct videobuf_buffer vb; |
98 | const struct soc_camera_data_format *fmt; | 99 | enum v4l2_mbus_pixelcode code; |
99 | int inwork; | 100 | int inwork; |
100 | }; | 101 | }; |
101 | 102 | ||
102 | /* | 103 | /* |
@@ -128,9 +129,13 @@ static int mx1_videobuf_setup(struct videobuf_queue *vq, unsigned int *count, | |||
128 | unsigned int *size) | 129 | unsigned int *size) |
129 | { | 130 | { |
130 | struct soc_camera_device *icd = vq->priv_data; | 131 | struct soc_camera_device *icd = vq->priv_data; |
132 | int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width, | ||
133 | icd->current_fmt->host_fmt); | ||
131 | 134 | ||
132 | *size = icd->user_width * icd->user_height * | 135 | if (bytes_per_line < 0) |
133 | ((icd->current_fmt->depth + 7) >> 3); | 136 | return bytes_per_line; |
137 | |||
138 | *size = bytes_per_line * icd->user_height; | ||
134 | 139 | ||
135 | if (!*count) | 140 | if (!*count) |
136 | *count = 32; | 141 | *count = 32; |
@@ -169,6 +174,11 @@ static int mx1_videobuf_prepare(struct videobuf_queue *vq, | |||
169 | struct soc_camera_device *icd = vq->priv_data; | 174 | struct soc_camera_device *icd = vq->priv_data; |
170 | struct mx1_buffer *buf = container_of(vb, struct mx1_buffer, vb); | 175 | struct mx1_buffer *buf = container_of(vb, struct mx1_buffer, vb); |
171 | int ret; | 176 | int ret; |
177 | int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width, | ||
178 | icd->current_fmt->host_fmt); | ||
179 | |||
180 | if (bytes_per_line < 0) | ||
181 | return bytes_per_line; | ||
172 | 182 | ||
173 | dev_dbg(icd->dev.parent, "%s (vb=0x%p) 0x%08lx %d\n", __func__, | 183 | dev_dbg(icd->dev.parent, "%s (vb=0x%p) 0x%08lx %d\n", __func__, |
174 | vb, vb->baddr, vb->bsize); | 184 | vb, vb->baddr, vb->bsize); |
@@ -184,18 +194,18 @@ static int mx1_videobuf_prepare(struct videobuf_queue *vq, | |||
184 | */ | 194 | */ |
185 | buf->inwork = 1; | 195 | buf->inwork = 1; |
186 | 196 | ||
187 | if (buf->fmt != icd->current_fmt || | 197 | if (buf->code != icd->current_fmt->code || |
188 | vb->width != icd->user_width || | 198 | vb->width != icd->user_width || |
189 | vb->height != icd->user_height || | 199 | vb->height != icd->user_height || |
190 | vb->field != field) { | 200 | vb->field != field) { |
191 | buf->fmt = icd->current_fmt; | 201 | buf->code = icd->current_fmt->code; |
192 | vb->width = icd->user_width; | 202 | vb->width = icd->user_width; |
193 | vb->height = icd->user_height; | 203 | vb->height = icd->user_height; |
194 | vb->field = field; | 204 | vb->field = field; |
195 | vb->state = VIDEOBUF_NEEDS_INIT; | 205 | vb->state = VIDEOBUF_NEEDS_INIT; |
196 | } | 206 | } |
197 | 207 | ||
198 | vb->size = vb->width * vb->height * ((buf->fmt->depth + 7) >> 3); | 208 | vb->size = bytes_per_line * vb->height; |
199 | if (0 != vb->baddr && vb->bsize < vb->size) { | 209 | if (0 != vb->baddr && vb->bsize < vb->size) { |
200 | ret = -EINVAL; | 210 | ret = -EINVAL; |
201 | goto out; | 211 | goto out; |
@@ -497,12 +507,10 @@ static int mx1_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt) | |||
497 | 507 | ||
498 | /* MX1 supports only 8bit buswidth */ | 508 | /* MX1 supports only 8bit buswidth */ |
499 | common_flags = soc_camera_bus_param_compatible(camera_flags, | 509 | common_flags = soc_camera_bus_param_compatible(camera_flags, |
500 | CSI_BUS_FLAGS); | 510 | CSI_BUS_FLAGS); |
501 | if (!common_flags) | 511 | if (!common_flags) |
502 | return -EINVAL; | 512 | return -EINVAL; |
503 | 513 | ||
504 | icd->buswidth = 8; | ||
505 | |||
506 | /* Make choises, based on platform choice */ | 514 | /* Make choises, based on platform choice */ |
507 | if ((common_flags & SOCAM_VSYNC_ACTIVE_HIGH) && | 515 | if ((common_flags & SOCAM_VSYNC_ACTIVE_HIGH) && |
508 | (common_flags & SOCAM_VSYNC_ACTIVE_LOW)) { | 516 | (common_flags & SOCAM_VSYNC_ACTIVE_LOW)) { |
@@ -555,7 +563,8 @@ static int mx1_camera_set_fmt(struct soc_camera_device *icd, | |||
555 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | 563 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); |
556 | const struct soc_camera_format_xlate *xlate; | 564 | const struct soc_camera_format_xlate *xlate; |
557 | struct v4l2_pix_format *pix = &f->fmt.pix; | 565 | struct v4l2_pix_format *pix = &f->fmt.pix; |
558 | int ret; | 566 | struct v4l2_mbus_framefmt mf; |
567 | int ret, buswidth; | ||
559 | 568 | ||
560 | xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat); | 569 | xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat); |
561 | if (!xlate) { | 570 | if (!xlate) { |
@@ -564,12 +573,33 @@ static int mx1_camera_set_fmt(struct soc_camera_device *icd, | |||
564 | return -EINVAL; | 573 | return -EINVAL; |
565 | } | 574 | } |
566 | 575 | ||
567 | ret = v4l2_subdev_call(sd, video, s_fmt, f); | 576 | buswidth = xlate->host_fmt->bits_per_sample; |
568 | if (!ret) { | 577 | if (buswidth > 8) { |
569 | icd->buswidth = xlate->buswidth; | 578 | dev_warn(icd->dev.parent, |
570 | icd->current_fmt = xlate->host_fmt; | 579 | "bits-per-sample %d for format %x unsupported\n", |
580 | buswidth, pix->pixelformat); | ||
581 | return -EINVAL; | ||
571 | } | 582 | } |
572 | 583 | ||
584 | mf.width = pix->width; | ||
585 | mf.height = pix->height; | ||
586 | mf.field = pix->field; | ||
587 | mf.colorspace = pix->colorspace; | ||
588 | mf.code = xlate->code; | ||
589 | |||
590 | ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf); | ||
591 | if (ret < 0) | ||
592 | return ret; | ||
593 | |||
594 | if (mf.code != xlate->code) | ||
595 | return -EINVAL; | ||
596 | |||
597 | pix->width = mf.width; | ||
598 | pix->height = mf.height; | ||
599 | pix->field = mf.field; | ||
600 | pix->colorspace = mf.colorspace; | ||
601 | icd->current_fmt = xlate; | ||
602 | |||
573 | return ret; | 603 | return ret; |
574 | } | 604 | } |
575 | 605 | ||
@@ -577,10 +607,36 @@ static int mx1_camera_try_fmt(struct soc_camera_device *icd, | |||
577 | struct v4l2_format *f) | 607 | struct v4l2_format *f) |
578 | { | 608 | { |
579 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | 609 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); |
610 | const struct soc_camera_format_xlate *xlate; | ||
611 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
612 | struct v4l2_mbus_framefmt mf; | ||
613 | int ret; | ||
580 | /* TODO: limit to mx1 hardware capabilities */ | 614 | /* TODO: limit to mx1 hardware capabilities */ |
581 | 615 | ||
616 | xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat); | ||
617 | if (!xlate) { | ||
618 | dev_warn(icd->dev.parent, "Format %x not found\n", | ||
619 | pix->pixelformat); | ||
620 | return -EINVAL; | ||
621 | } | ||
622 | |||
623 | mf.width = pix->width; | ||
624 | mf.height = pix->height; | ||
625 | mf.field = pix->field; | ||
626 | mf.colorspace = pix->colorspace; | ||
627 | mf.code = xlate->code; | ||
628 | |||
582 | /* limit to sensor capabilities */ | 629 | /* limit to sensor capabilities */ |
583 | return v4l2_subdev_call(sd, video, try_fmt, f); | 630 | ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf); |
631 | if (ret < 0) | ||
632 | return ret; | ||
633 | |||
634 | pix->width = mf.width; | ||
635 | pix->height = mf.height; | ||
636 | pix->field = mf.field; | ||
637 | pix->colorspace = mf.colorspace; | ||
638 | |||
639 | return 0; | ||
584 | } | 640 | } |
585 | 641 | ||
586 | static int mx1_camera_reqbufs(struct soc_camera_file *icf, | 642 | static int mx1_camera_reqbufs(struct soc_camera_file *icf, |
diff --git a/drivers/media/video/mx3_camera.c b/drivers/media/video/mx3_camera.c index ae7d48324493..bd297f567dc7 100644 --- a/drivers/media/video/mx3_camera.c +++ b/drivers/media/video/mx3_camera.c | |||
@@ -23,6 +23,7 @@ | |||
23 | #include <media/v4l2-dev.h> | 23 | #include <media/v4l2-dev.h> |
24 | #include <media/videobuf-dma-contig.h> | 24 | #include <media/videobuf-dma-contig.h> |
25 | #include <media/soc_camera.h> | 25 | #include <media/soc_camera.h> |
26 | #include <media/soc_mediabus.h> | ||
26 | 27 | ||
27 | #include <mach/ipu.h> | 28 | #include <mach/ipu.h> |
28 | #include <mach/mx3_camera.h> | 29 | #include <mach/mx3_camera.h> |
@@ -63,7 +64,7 @@ | |||
63 | struct mx3_camera_buffer { | 64 | struct mx3_camera_buffer { |
64 | /* common v4l buffer stuff -- must be first */ | 65 | /* common v4l buffer stuff -- must be first */ |
65 | struct videobuf_buffer vb; | 66 | struct videobuf_buffer vb; |
66 | const struct soc_camera_data_format *fmt; | 67 | enum v4l2_mbus_pixelcode code; |
67 | 68 | ||
68 | /* One descriptot per scatterlist (per frame) */ | 69 | /* One descriptot per scatterlist (per frame) */ |
69 | struct dma_async_tx_descriptor *txd; | 70 | struct dma_async_tx_descriptor *txd; |
@@ -118,8 +119,6 @@ struct dma_chan_request { | |||
118 | enum ipu_channel id; | 119 | enum ipu_channel id; |
119 | }; | 120 | }; |
120 | 121 | ||
121 | static int mx3_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt); | ||
122 | |||
123 | static u32 csi_reg_read(struct mx3_camera_dev *mx3, off_t reg) | 122 | static u32 csi_reg_read(struct mx3_camera_dev *mx3, off_t reg) |
124 | { | 123 | { |
125 | return __raw_readl(mx3->base + reg); | 124 | return __raw_readl(mx3->base + reg); |
@@ -211,17 +210,16 @@ static int mx3_videobuf_setup(struct videobuf_queue *vq, unsigned int *count, | |||
211 | struct soc_camera_device *icd = vq->priv_data; | 210 | struct soc_camera_device *icd = vq->priv_data; |
212 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); | 211 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); |
213 | struct mx3_camera_dev *mx3_cam = ici->priv; | 212 | struct mx3_camera_dev *mx3_cam = ici->priv; |
214 | /* | 213 | int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width, |
215 | * bits-per-pixel (depth) as specified in camera's pixel format does | 214 | icd->current_fmt->host_fmt); |
216 | * not necessarily match what the camera interface writes to RAM, but | 215 | |
217 | * it should be good enough for now. | 216 | if (bytes_per_line < 0) |
218 | */ | 217 | return bytes_per_line; |
219 | unsigned int bpp = DIV_ROUND_UP(icd->current_fmt->depth, 8); | ||
220 | 218 | ||
221 | if (!mx3_cam->idmac_channel[0]) | 219 | if (!mx3_cam->idmac_channel[0]) |
222 | return -EINVAL; | 220 | return -EINVAL; |
223 | 221 | ||
224 | *size = icd->user_width * icd->user_height * bpp; | 222 | *size = bytes_per_line * icd->user_height; |
225 | 223 | ||
226 | if (!*count) | 224 | if (!*count) |
227 | *count = 32; | 225 | *count = 32; |
@@ -241,21 +239,26 @@ static int mx3_videobuf_prepare(struct videobuf_queue *vq, | |||
241 | struct mx3_camera_dev *mx3_cam = ici->priv; | 239 | struct mx3_camera_dev *mx3_cam = ici->priv; |
242 | struct mx3_camera_buffer *buf = | 240 | struct mx3_camera_buffer *buf = |
243 | container_of(vb, struct mx3_camera_buffer, vb); | 241 | container_of(vb, struct mx3_camera_buffer, vb); |
244 | /* current_fmt _must_ always be set */ | 242 | size_t new_size; |
245 | size_t new_size = icd->user_width * icd->user_height * | ||
246 | ((icd->current_fmt->depth + 7) >> 3); | ||
247 | int ret; | 243 | int ret; |
244 | int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width, | ||
245 | icd->current_fmt->host_fmt); | ||
246 | |||
247 | if (bytes_per_line < 0) | ||
248 | return bytes_per_line; | ||
249 | |||
250 | new_size = bytes_per_line * icd->user_height; | ||
248 | 251 | ||
249 | /* | 252 | /* |
250 | * I think, in buf_prepare you only have to protect global data, | 253 | * I think, in buf_prepare you only have to protect global data, |
251 | * the actual buffer is yours | 254 | * the actual buffer is yours |
252 | */ | 255 | */ |
253 | 256 | ||
254 | if (buf->fmt != icd->current_fmt || | 257 | if (buf->code != icd->current_fmt->code || |
255 | vb->width != icd->user_width || | 258 | vb->width != icd->user_width || |
256 | vb->height != icd->user_height || | 259 | vb->height != icd->user_height || |
257 | vb->field != field) { | 260 | vb->field != field) { |
258 | buf->fmt = icd->current_fmt; | 261 | buf->code = icd->current_fmt->code; |
259 | vb->width = icd->user_width; | 262 | vb->width = icd->user_width; |
260 | vb->height = icd->user_height; | 263 | vb->height = icd->user_height; |
261 | vb->field = field; | 264 | vb->field = field; |
@@ -348,13 +351,13 @@ static void mx3_videobuf_queue(struct videobuf_queue *vq, | |||
348 | struct dma_async_tx_descriptor *txd = buf->txd; | 351 | struct dma_async_tx_descriptor *txd = buf->txd; |
349 | struct idmac_channel *ichan = to_idmac_chan(txd->chan); | 352 | struct idmac_channel *ichan = to_idmac_chan(txd->chan); |
350 | struct idmac_video_param *video = &ichan->params.video; | 353 | struct idmac_video_param *video = &ichan->params.video; |
351 | const struct soc_camera_data_format *data_fmt = icd->current_fmt; | ||
352 | dma_cookie_t cookie; | 354 | dma_cookie_t cookie; |
355 | u32 fourcc = icd->current_fmt->host_fmt->fourcc; | ||
353 | 356 | ||
354 | BUG_ON(!irqs_disabled()); | 357 | BUG_ON(!irqs_disabled()); |
355 | 358 | ||
356 | /* This is the configuration of one sg-element */ | 359 | /* This is the configuration of one sg-element */ |
357 | video->out_pixel_fmt = fourcc_to_ipu_pix(data_fmt->fourcc); | 360 | video->out_pixel_fmt = fourcc_to_ipu_pix(fourcc); |
358 | video->out_width = icd->user_width; | 361 | video->out_width = icd->user_width; |
359 | video->out_height = icd->user_height; | 362 | video->out_height = icd->user_height; |
360 | video->out_stride = icd->user_width; | 363 | video->out_stride = icd->user_width; |
@@ -568,28 +571,33 @@ static int test_platform_param(struct mx3_camera_dev *mx3_cam, | |||
568 | * If requested data width is supported by the platform, use it or any | 571 | * If requested data width is supported by the platform, use it or any |
569 | * possible lower value - i.MX31 is smart enough to schift bits | 572 | * possible lower value - i.MX31 is smart enough to schift bits |
570 | */ | 573 | */ |
574 | if (mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_15) | ||
575 | *flags |= SOCAM_DATAWIDTH_15 | SOCAM_DATAWIDTH_10 | | ||
576 | SOCAM_DATAWIDTH_8 | SOCAM_DATAWIDTH_4; | ||
577 | else if (mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_10) | ||
578 | *flags |= SOCAM_DATAWIDTH_10 | SOCAM_DATAWIDTH_8 | | ||
579 | SOCAM_DATAWIDTH_4; | ||
580 | else if (mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_8) | ||
581 | *flags |= SOCAM_DATAWIDTH_8 | SOCAM_DATAWIDTH_4; | ||
582 | else if (mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_4) | ||
583 | *flags |= SOCAM_DATAWIDTH_4; | ||
584 | |||
571 | switch (buswidth) { | 585 | switch (buswidth) { |
572 | case 15: | 586 | case 15: |
573 | if (!(mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_15)) | 587 | if (!(*flags & SOCAM_DATAWIDTH_15)) |
574 | return -EINVAL; | 588 | return -EINVAL; |
575 | *flags |= SOCAM_DATAWIDTH_15 | SOCAM_DATAWIDTH_10 | | ||
576 | SOCAM_DATAWIDTH_8 | SOCAM_DATAWIDTH_4; | ||
577 | break; | 589 | break; |
578 | case 10: | 590 | case 10: |
579 | if (!(mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_10)) | 591 | if (!(*flags & SOCAM_DATAWIDTH_10)) |
580 | return -EINVAL; | 592 | return -EINVAL; |
581 | *flags |= SOCAM_DATAWIDTH_10 | SOCAM_DATAWIDTH_8 | | ||
582 | SOCAM_DATAWIDTH_4; | ||
583 | break; | 593 | break; |
584 | case 8: | 594 | case 8: |
585 | if (!(mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_8)) | 595 | if (!(*flags & SOCAM_DATAWIDTH_8)) |
586 | return -EINVAL; | 596 | return -EINVAL; |
587 | *flags |= SOCAM_DATAWIDTH_8 | SOCAM_DATAWIDTH_4; | ||
588 | break; | 597 | break; |
589 | case 4: | 598 | case 4: |
590 | if (!(mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_4)) | 599 | if (!(*flags & SOCAM_DATAWIDTH_4)) |
591 | return -EINVAL; | 600 | return -EINVAL; |
592 | *flags |= SOCAM_DATAWIDTH_4; | ||
593 | break; | 601 | break; |
594 | default: | 602 | default: |
595 | dev_warn(mx3_cam->soc_host.v4l2_dev.dev, | 603 | dev_warn(mx3_cam->soc_host.v4l2_dev.dev, |
@@ -638,91 +646,92 @@ static bool chan_filter(struct dma_chan *chan, void *arg) | |||
638 | pdata->dma_dev == chan->device->dev; | 646 | pdata->dma_dev == chan->device->dev; |
639 | } | 647 | } |
640 | 648 | ||
641 | static const struct soc_camera_data_format mx3_camera_formats[] = { | 649 | static const struct soc_mbus_pixelfmt mx3_camera_formats[] = { |
642 | { | 650 | { |
643 | .name = "Bayer (sRGB) 8 bit", | 651 | .fourcc = V4L2_PIX_FMT_SBGGR8, |
644 | .depth = 8, | 652 | .name = "Bayer BGGR (sRGB) 8 bit", |
645 | .fourcc = V4L2_PIX_FMT_SBGGR8, | 653 | .bits_per_sample = 8, |
646 | .colorspace = V4L2_COLORSPACE_SRGB, | 654 | .packing = SOC_MBUS_PACKING_NONE, |
655 | .order = SOC_MBUS_ORDER_LE, | ||
647 | }, { | 656 | }, { |
648 | .name = "Monochrome 8 bit", | 657 | .fourcc = V4L2_PIX_FMT_GREY, |
649 | .depth = 8, | 658 | .name = "Monochrome 8 bit", |
650 | .fourcc = V4L2_PIX_FMT_GREY, | 659 | .bits_per_sample = 8, |
651 | .colorspace = V4L2_COLORSPACE_JPEG, | 660 | .packing = SOC_MBUS_PACKING_NONE, |
661 | .order = SOC_MBUS_ORDER_LE, | ||
652 | }, | 662 | }, |
653 | }; | 663 | }; |
654 | 664 | ||
655 | static bool buswidth_supported(struct soc_camera_host *ici, int depth) | 665 | /* This will be corrected as we get more formats */ |
666 | static bool mx3_camera_packing_supported(const struct soc_mbus_pixelfmt *fmt) | ||
656 | { | 667 | { |
657 | struct mx3_camera_dev *mx3_cam = ici->priv; | 668 | return fmt->packing == SOC_MBUS_PACKING_NONE || |
658 | 669 | (fmt->bits_per_sample == 8 && | |
659 | switch (depth) { | 670 | fmt->packing == SOC_MBUS_PACKING_2X8_PADHI) || |
660 | case 4: | 671 | (fmt->bits_per_sample > 8 && |
661 | return !!(mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_4); | 672 | fmt->packing == SOC_MBUS_PACKING_EXTEND16); |
662 | case 8: | ||
663 | return !!(mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_8); | ||
664 | case 10: | ||
665 | return !!(mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_10); | ||
666 | case 15: | ||
667 | return !!(mx3_cam->platform_flags & MX3_CAMERA_DATAWIDTH_15); | ||
668 | } | ||
669 | return false; | ||
670 | } | 673 | } |
671 | 674 | ||
672 | static int mx3_camera_get_formats(struct soc_camera_device *icd, int idx, | 675 | static int mx3_camera_get_formats(struct soc_camera_device *icd, int idx, |
673 | struct soc_camera_format_xlate *xlate) | 676 | struct soc_camera_format_xlate *xlate) |
674 | { | 677 | { |
675 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); | 678 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); |
676 | int formats = 0, buswidth, ret; | 679 | struct device *dev = icd->dev.parent; |
680 | int formats = 0, ret; | ||
681 | enum v4l2_mbus_pixelcode code; | ||
682 | const struct soc_mbus_pixelfmt *fmt; | ||
677 | 683 | ||
678 | buswidth = icd->formats[idx].depth; | 684 | ret = v4l2_subdev_call(sd, video, enum_mbus_fmt, idx, &code); |
685 | if (ret < 0) | ||
686 | /* No more formats */ | ||
687 | return 0; | ||
679 | 688 | ||
680 | if (!buswidth_supported(ici, buswidth)) | 689 | fmt = soc_mbus_get_fmtdesc(code); |
690 | if (!fmt) { | ||
691 | dev_err(icd->dev.parent, | ||
692 | "Invalid format code #%d: %d\n", idx, code); | ||
681 | return 0; | 693 | return 0; |
694 | } | ||
682 | 695 | ||
683 | ret = mx3_camera_try_bus_param(icd, buswidth); | 696 | /* This also checks support for the requested bits-per-sample */ |
697 | ret = mx3_camera_try_bus_param(icd, fmt->bits_per_sample); | ||
684 | if (ret < 0) | 698 | if (ret < 0) |
685 | return 0; | 699 | return 0; |
686 | 700 | ||
687 | switch (icd->formats[idx].fourcc) { | 701 | switch (code) { |
688 | case V4L2_PIX_FMT_SGRBG10: | 702 | case V4L2_MBUS_FMT_SBGGR10_1X10: |
689 | formats++; | 703 | formats++; |
690 | if (xlate) { | 704 | if (xlate) { |
691 | xlate->host_fmt = &mx3_camera_formats[0]; | 705 | xlate->host_fmt = &mx3_camera_formats[0]; |
692 | xlate->cam_fmt = icd->formats + idx; | 706 | xlate->code = code; |
693 | xlate->buswidth = buswidth; | ||
694 | xlate++; | 707 | xlate++; |
695 | dev_dbg(icd->dev.parent, | 708 | dev_dbg(dev, "Providing format %s using code %d\n", |
696 | "Providing format %s using %s\n", | 709 | mx3_camera_formats[0].name, code); |
697 | mx3_camera_formats[0].name, | ||
698 | icd->formats[idx].name); | ||
699 | } | 710 | } |
700 | goto passthrough; | 711 | break; |
701 | case V4L2_PIX_FMT_Y16: | 712 | case V4L2_MBUS_FMT_Y10_1X10: |
702 | formats++; | 713 | formats++; |
703 | if (xlate) { | 714 | if (xlate) { |
704 | xlate->host_fmt = &mx3_camera_formats[1]; | 715 | xlate->host_fmt = &mx3_camera_formats[1]; |
705 | xlate->cam_fmt = icd->formats + idx; | 716 | xlate->code = code; |
706 | xlate->buswidth = buswidth; | ||
707 | xlate++; | 717 | xlate++; |
708 | dev_dbg(icd->dev.parent, | 718 | dev_dbg(dev, "Providing format %s using code %d\n", |
709 | "Providing format %s using %s\n", | 719 | mx3_camera_formats[1].name, code); |
710 | mx3_camera_formats[0].name, | ||
711 | icd->formats[idx].name); | ||
712 | } | 720 | } |
721 | break; | ||
713 | default: | 722 | default: |
714 | passthrough: | 723 | if (!mx3_camera_packing_supported(fmt)) |
715 | /* Generic pass-through */ | 724 | return 0; |
716 | formats++; | 725 | } |
717 | if (xlate) { | 726 | |
718 | xlate->host_fmt = icd->formats + idx; | 727 | /* Generic pass-through */ |
719 | xlate->cam_fmt = icd->formats + idx; | 728 | formats++; |
720 | xlate->buswidth = buswidth; | 729 | if (xlate) { |
721 | xlate++; | 730 | xlate->host_fmt = fmt; |
722 | dev_dbg(icd->dev.parent, | 731 | xlate->code = code; |
723 | "Providing format %s in pass-through mode\n", | 732 | xlate++; |
724 | icd->formats[idx].name); | 733 | dev_dbg(dev, "Providing format %x in pass-through mode\n", |
725 | } | 734 | xlate->host_fmt->fourcc); |
726 | } | 735 | } |
727 | 736 | ||
728 | return formats; | 737 | return formats; |
@@ -806,8 +815,7 @@ static int mx3_camera_set_crop(struct soc_camera_device *icd, | |||
806 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); | 815 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); |
807 | struct mx3_camera_dev *mx3_cam = ici->priv; | 816 | struct mx3_camera_dev *mx3_cam = ici->priv; |
808 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | 817 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); |
809 | struct v4l2_format f = {.type = V4L2_BUF_TYPE_VIDEO_CAPTURE}; | 818 | struct v4l2_mbus_framefmt mf; |
810 | struct v4l2_pix_format *pix = &f.fmt.pix; | ||
811 | int ret; | 819 | int ret; |
812 | 820 | ||
813 | soc_camera_limit_side(&rect->left, &rect->width, 0, 2, 4096); | 821 | soc_camera_limit_side(&rect->left, &rect->width, 0, 2, 4096); |
@@ -818,19 +826,19 @@ static int mx3_camera_set_crop(struct soc_camera_device *icd, | |||
818 | return ret; | 826 | return ret; |
819 | 827 | ||
820 | /* The capture device might have changed its output */ | 828 | /* The capture device might have changed its output */ |
821 | ret = v4l2_subdev_call(sd, video, g_fmt, &f); | 829 | ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf); |
822 | if (ret < 0) | 830 | if (ret < 0) |
823 | return ret; | 831 | return ret; |
824 | 832 | ||
825 | if (pix->width & 7) { | 833 | if (mf.width & 7) { |
826 | /* Ouch! We can only handle 8-byte aligned width... */ | 834 | /* Ouch! We can only handle 8-byte aligned width... */ |
827 | stride_align(&pix->width); | 835 | stride_align(&mf.width); |
828 | ret = v4l2_subdev_call(sd, video, s_fmt, &f); | 836 | ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf); |
829 | if (ret < 0) | 837 | if (ret < 0) |
830 | return ret; | 838 | return ret; |
831 | } | 839 | } |
832 | 840 | ||
833 | if (pix->width != icd->user_width || pix->height != icd->user_height) { | 841 | if (mf.width != icd->user_width || mf.height != icd->user_height) { |
834 | /* | 842 | /* |
835 | * We now know pixel formats and can decide upon DMA-channel(s) | 843 | * We now know pixel formats and can decide upon DMA-channel(s) |
836 | * So far only direct camera-to-memory is supported | 844 | * So far only direct camera-to-memory is supported |
@@ -841,14 +849,14 @@ static int mx3_camera_set_crop(struct soc_camera_device *icd, | |||
841 | return ret; | 849 | return ret; |
842 | } | 850 | } |
843 | 851 | ||
844 | configure_geometry(mx3_cam, pix->width, pix->height); | 852 | configure_geometry(mx3_cam, mf.width, mf.height); |
845 | } | 853 | } |
846 | 854 | ||
847 | dev_dbg(icd->dev.parent, "Sensor cropped %dx%d\n", | 855 | dev_dbg(icd->dev.parent, "Sensor cropped %dx%d\n", |
848 | pix->width, pix->height); | 856 | mf.width, mf.height); |
849 | 857 | ||
850 | icd->user_width = pix->width; | 858 | icd->user_width = mf.width; |
851 | icd->user_height = pix->height; | 859 | icd->user_height = mf.height; |
852 | 860 | ||
853 | return ret; | 861 | return ret; |
854 | } | 862 | } |
@@ -861,6 +869,7 @@ static int mx3_camera_set_fmt(struct soc_camera_device *icd, | |||
861 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | 869 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); |
862 | const struct soc_camera_format_xlate *xlate; | 870 | const struct soc_camera_format_xlate *xlate; |
863 | struct v4l2_pix_format *pix = &f->fmt.pix; | 871 | struct v4l2_pix_format *pix = &f->fmt.pix; |
872 | struct v4l2_mbus_framefmt mf; | ||
864 | int ret; | 873 | int ret; |
865 | 874 | ||
866 | xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat); | 875 | xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat); |
@@ -885,11 +894,24 @@ static int mx3_camera_set_fmt(struct soc_camera_device *icd, | |||
885 | 894 | ||
886 | configure_geometry(mx3_cam, pix->width, pix->height); | 895 | configure_geometry(mx3_cam, pix->width, pix->height); |
887 | 896 | ||
888 | ret = v4l2_subdev_call(sd, video, s_fmt, f); | 897 | mf.width = pix->width; |
889 | if (!ret) { | 898 | mf.height = pix->height; |
890 | icd->buswidth = xlate->buswidth; | 899 | mf.field = pix->field; |
891 | icd->current_fmt = xlate->host_fmt; | 900 | mf.colorspace = pix->colorspace; |
892 | } | 901 | mf.code = xlate->code; |
902 | |||
903 | ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf); | ||
904 | if (ret < 0) | ||
905 | return ret; | ||
906 | |||
907 | if (mf.code != xlate->code) | ||
908 | return -EINVAL; | ||
909 | |||
910 | pix->width = mf.width; | ||
911 | pix->height = mf.height; | ||
912 | pix->field = mf.field; | ||
913 | pix->colorspace = mf.colorspace; | ||
914 | icd->current_fmt = xlate; | ||
893 | 915 | ||
894 | dev_dbg(icd->dev.parent, "Sensor set %dx%d\n", pix->width, pix->height); | 916 | dev_dbg(icd->dev.parent, "Sensor set %dx%d\n", pix->width, pix->height); |
895 | 917 | ||
@@ -902,8 +924,8 @@ static int mx3_camera_try_fmt(struct soc_camera_device *icd, | |||
902 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | 924 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); |
903 | const struct soc_camera_format_xlate *xlate; | 925 | const struct soc_camera_format_xlate *xlate; |
904 | struct v4l2_pix_format *pix = &f->fmt.pix; | 926 | struct v4l2_pix_format *pix = &f->fmt.pix; |
927 | struct v4l2_mbus_framefmt mf; | ||
905 | __u32 pixfmt = pix->pixelformat; | 928 | __u32 pixfmt = pix->pixelformat; |
906 | enum v4l2_field field; | ||
907 | int ret; | 929 | int ret; |
908 | 930 | ||
909 | xlate = soc_camera_xlate_by_fourcc(icd, pixfmt); | 931 | xlate = soc_camera_xlate_by_fourcc(icd, pixfmt); |
@@ -918,23 +940,37 @@ static int mx3_camera_try_fmt(struct soc_camera_device *icd, | |||
918 | if (pix->width > 4096) | 940 | if (pix->width > 4096) |
919 | pix->width = 4096; | 941 | pix->width = 4096; |
920 | 942 | ||
921 | pix->bytesperline = pix->width * | 943 | pix->bytesperline = soc_mbus_bytes_per_line(pix->width, |
922 | DIV_ROUND_UP(xlate->host_fmt->depth, 8); | 944 | xlate->host_fmt); |
945 | if (pix->bytesperline < 0) | ||
946 | return pix->bytesperline; | ||
923 | pix->sizeimage = pix->height * pix->bytesperline; | 947 | pix->sizeimage = pix->height * pix->bytesperline; |
924 | 948 | ||
925 | /* camera has to see its format, but the user the original one */ | ||
926 | pix->pixelformat = xlate->cam_fmt->fourcc; | ||
927 | /* limit to sensor capabilities */ | 949 | /* limit to sensor capabilities */ |
928 | ret = v4l2_subdev_call(sd, video, try_fmt, f); | 950 | mf.width = pix->width; |
929 | pix->pixelformat = xlate->host_fmt->fourcc; | 951 | mf.height = pix->height; |
952 | mf.field = pix->field; | ||
953 | mf.colorspace = pix->colorspace; | ||
954 | mf.code = xlate->code; | ||
930 | 955 | ||
931 | field = pix->field; | 956 | ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf); |
957 | if (ret < 0) | ||
958 | return ret; | ||
932 | 959 | ||
933 | if (field == V4L2_FIELD_ANY) { | 960 | pix->width = mf.width; |
961 | pix->height = mf.height; | ||
962 | pix->colorspace = mf.colorspace; | ||
963 | |||
964 | switch (mf.field) { | ||
965 | case V4L2_FIELD_ANY: | ||
934 | pix->field = V4L2_FIELD_NONE; | 966 | pix->field = V4L2_FIELD_NONE; |
935 | } else if (field != V4L2_FIELD_NONE) { | 967 | break; |
936 | dev_err(icd->dev.parent, "Field type %d unsupported.\n", field); | 968 | case V4L2_FIELD_NONE: |
937 | return -EINVAL; | 969 | break; |
970 | default: | ||
971 | dev_err(icd->dev.parent, "Field type %d unsupported.\n", | ||
972 | mf.field); | ||
973 | ret = -EINVAL; | ||
938 | } | 974 | } |
939 | 975 | ||
940 | return ret; | 976 | return ret; |
@@ -970,18 +1006,26 @@ static int mx3_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt) | |||
970 | struct mx3_camera_dev *mx3_cam = ici->priv; | 1006 | struct mx3_camera_dev *mx3_cam = ici->priv; |
971 | unsigned long bus_flags, camera_flags, common_flags; | 1007 | unsigned long bus_flags, camera_flags, common_flags; |
972 | u32 dw, sens_conf; | 1008 | u32 dw, sens_conf; |
973 | int ret = test_platform_param(mx3_cam, icd->buswidth, &bus_flags); | 1009 | const struct soc_mbus_pixelfmt *fmt; |
1010 | int buswidth; | ||
1011 | int ret; | ||
974 | const struct soc_camera_format_xlate *xlate; | 1012 | const struct soc_camera_format_xlate *xlate; |
975 | struct device *dev = icd->dev.parent; | 1013 | struct device *dev = icd->dev.parent; |
976 | 1014 | ||
1015 | fmt = soc_mbus_get_fmtdesc(icd->current_fmt->code); | ||
1016 | if (!fmt) | ||
1017 | return -EINVAL; | ||
1018 | |||
1019 | buswidth = fmt->bits_per_sample; | ||
1020 | ret = test_platform_param(mx3_cam, buswidth, &bus_flags); | ||
1021 | |||
977 | xlate = soc_camera_xlate_by_fourcc(icd, pixfmt); | 1022 | xlate = soc_camera_xlate_by_fourcc(icd, pixfmt); |
978 | if (!xlate) { | 1023 | if (!xlate) { |
979 | dev_warn(dev, "Format %x not found\n", pixfmt); | 1024 | dev_warn(dev, "Format %x not found\n", pixfmt); |
980 | return -EINVAL; | 1025 | return -EINVAL; |
981 | } | 1026 | } |
982 | 1027 | ||
983 | dev_dbg(dev, "requested bus width %d bit: %d\n", | 1028 | dev_dbg(dev, "requested bus width %d bit: %d\n", buswidth, ret); |
984 | icd->buswidth, ret); | ||
985 | 1029 | ||
986 | if (ret < 0) | 1030 | if (ret < 0) |
987 | return ret; | 1031 | return ret; |
@@ -1082,7 +1126,7 @@ static int mx3_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt) | |||
1082 | sens_conf |= 1 << CSI_SENS_CONF_DATA_POL_SHIFT; | 1126 | sens_conf |= 1 << CSI_SENS_CONF_DATA_POL_SHIFT; |
1083 | 1127 | ||
1084 | /* Just do what we're asked to do */ | 1128 | /* Just do what we're asked to do */ |
1085 | switch (xlate->host_fmt->depth) { | 1129 | switch (xlate->host_fmt->bits_per_sample) { |
1086 | case 4: | 1130 | case 4: |
1087 | dw = 0 << CSI_SENS_CONF_DATA_WIDTH_SHIFT; | 1131 | dw = 0 << CSI_SENS_CONF_DATA_WIDTH_SHIFT; |
1088 | break; | 1132 | break; |
diff --git a/drivers/media/video/ov772x.c b/drivers/media/video/ov772x.c index dcb690cb5ae7..3a45e945a528 100644 --- a/drivers/media/video/ov772x.c +++ b/drivers/media/video/ov772x.c | |||
@@ -24,6 +24,7 @@ | |||
24 | #include <media/v4l2-chip-ident.h> | 24 | #include <media/v4l2-chip-ident.h> |
25 | #include <media/v4l2-subdev.h> | 25 | #include <media/v4l2-subdev.h> |
26 | #include <media/soc_camera.h> | 26 | #include <media/soc_camera.h> |
27 | #include <media/soc_mediabus.h> | ||
27 | #include <media/ov772x.h> | 28 | #include <media/ov772x.h> |
28 | 29 | ||
29 | /* | 30 | /* |
@@ -382,7 +383,8 @@ struct regval_list { | |||
382 | }; | 383 | }; |
383 | 384 | ||
384 | struct ov772x_color_format { | 385 | struct ov772x_color_format { |
385 | const struct soc_camera_data_format *format; | 386 | enum v4l2_mbus_pixelcode code; |
387 | enum v4l2_colorspace colorspace; | ||
386 | u8 dsp3; | 388 | u8 dsp3; |
387 | u8 com3; | 389 | u8 com3; |
388 | u8 com7; | 390 | u8 com7; |
@@ -399,7 +401,7 @@ struct ov772x_win_size { | |||
399 | struct ov772x_priv { | 401 | struct ov772x_priv { |
400 | struct v4l2_subdev subdev; | 402 | struct v4l2_subdev subdev; |
401 | struct ov772x_camera_info *info; | 403 | struct ov772x_camera_info *info; |
402 | const struct ov772x_color_format *fmt; | 404 | const struct ov772x_color_format *cfmt; |
403 | const struct ov772x_win_size *win; | 405 | const struct ov772x_win_size *win; |
404 | int model; | 406 | int model; |
405 | unsigned short flag_vflip:1; | 407 | unsigned short flag_vflip:1; |
@@ -434,93 +436,57 @@ static const struct regval_list ov772x_vga_regs[] = { | |||
434 | }; | 436 | }; |
435 | 437 | ||
436 | /* | 438 | /* |
437 | * supported format list | 439 | * supported color format list |
438 | */ | ||
439 | |||
440 | #define SETFOURCC(type) .name = (#type), .fourcc = (V4L2_PIX_FMT_ ## type) | ||
441 | static const struct soc_camera_data_format ov772x_fmt_lists[] = { | ||
442 | { | ||
443 | SETFOURCC(YUYV), | ||
444 | .depth = 16, | ||
445 | .colorspace = V4L2_COLORSPACE_JPEG, | ||
446 | }, | ||
447 | { | ||
448 | SETFOURCC(YVYU), | ||
449 | .depth = 16, | ||
450 | .colorspace = V4L2_COLORSPACE_JPEG, | ||
451 | }, | ||
452 | { | ||
453 | SETFOURCC(UYVY), | ||
454 | .depth = 16, | ||
455 | .colorspace = V4L2_COLORSPACE_JPEG, | ||
456 | }, | ||
457 | { | ||
458 | SETFOURCC(RGB555), | ||
459 | .depth = 16, | ||
460 | .colorspace = V4L2_COLORSPACE_SRGB, | ||
461 | }, | ||
462 | { | ||
463 | SETFOURCC(RGB555X), | ||
464 | .depth = 16, | ||
465 | .colorspace = V4L2_COLORSPACE_SRGB, | ||
466 | }, | ||
467 | { | ||
468 | SETFOURCC(RGB565), | ||
469 | .depth = 16, | ||
470 | .colorspace = V4L2_COLORSPACE_SRGB, | ||
471 | }, | ||
472 | { | ||
473 | SETFOURCC(RGB565X), | ||
474 | .depth = 16, | ||
475 | .colorspace = V4L2_COLORSPACE_SRGB, | ||
476 | }, | ||
477 | }; | ||
478 | |||
479 | /* | ||
480 | * color format list | ||
481 | */ | 440 | */ |
482 | static const struct ov772x_color_format ov772x_cfmts[] = { | 441 | static const struct ov772x_color_format ov772x_cfmts[] = { |
483 | { | 442 | { |
484 | .format = &ov772x_fmt_lists[0], | 443 | .code = V4L2_MBUS_FMT_YUYV8_2X8_LE, |
485 | .dsp3 = 0x0, | 444 | .colorspace = V4L2_COLORSPACE_JPEG, |
486 | .com3 = SWAP_YUV, | 445 | .dsp3 = 0x0, |
487 | .com7 = OFMT_YUV, | 446 | .com3 = SWAP_YUV, |
447 | .com7 = OFMT_YUV, | ||
488 | }, | 448 | }, |
489 | { | 449 | { |
490 | .format = &ov772x_fmt_lists[1], | 450 | .code = V4L2_MBUS_FMT_YVYU8_2X8_LE, |
491 | .dsp3 = UV_ON, | 451 | .colorspace = V4L2_COLORSPACE_JPEG, |
492 | .com3 = SWAP_YUV, | 452 | .dsp3 = UV_ON, |
493 | .com7 = OFMT_YUV, | 453 | .com3 = SWAP_YUV, |
454 | .com7 = OFMT_YUV, | ||
494 | }, | 455 | }, |
495 | { | 456 | { |
496 | .format = &ov772x_fmt_lists[2], | 457 | .code = V4L2_MBUS_FMT_YUYV8_2X8_BE, |
497 | .dsp3 = 0x0, | 458 | .colorspace = V4L2_COLORSPACE_JPEG, |
498 | .com3 = 0x0, | 459 | .dsp3 = 0x0, |
499 | .com7 = OFMT_YUV, | 460 | .com3 = 0x0, |
461 | .com7 = OFMT_YUV, | ||
500 | }, | 462 | }, |
501 | { | 463 | { |
502 | .format = &ov772x_fmt_lists[3], | 464 | .code = V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE, |
503 | .dsp3 = 0x0, | 465 | .colorspace = V4L2_COLORSPACE_SRGB, |
504 | .com3 = SWAP_RGB, | 466 | .dsp3 = 0x0, |
505 | .com7 = FMT_RGB555 | OFMT_RGB, | 467 | .com3 = SWAP_RGB, |
468 | .com7 = FMT_RGB555 | OFMT_RGB, | ||
506 | }, | 469 | }, |
507 | { | 470 | { |
508 | .format = &ov772x_fmt_lists[4], | 471 | .code = V4L2_MBUS_FMT_RGB555_2X8_PADHI_BE, |
509 | .dsp3 = 0x0, | 472 | .colorspace = V4L2_COLORSPACE_SRGB, |
510 | .com3 = 0x0, | 473 | .dsp3 = 0x0, |
511 | .com7 = FMT_RGB555 | OFMT_RGB, | 474 | .com3 = 0x0, |
475 | .com7 = FMT_RGB555 | OFMT_RGB, | ||
512 | }, | 476 | }, |
513 | { | 477 | { |
514 | .format = &ov772x_fmt_lists[5], | 478 | .code = V4L2_MBUS_FMT_RGB565_2X8_LE, |
515 | .dsp3 = 0x0, | 479 | .colorspace = V4L2_COLORSPACE_SRGB, |
516 | .com3 = SWAP_RGB, | 480 | .dsp3 = 0x0, |
517 | .com7 = FMT_RGB565 | OFMT_RGB, | 481 | .com3 = SWAP_RGB, |
482 | .com7 = FMT_RGB565 | OFMT_RGB, | ||
518 | }, | 483 | }, |
519 | { | 484 | { |
520 | .format = &ov772x_fmt_lists[6], | 485 | .code = V4L2_MBUS_FMT_RGB565_2X8_BE, |
521 | .dsp3 = 0x0, | 486 | .colorspace = V4L2_COLORSPACE_SRGB, |
522 | .com3 = 0x0, | 487 | .dsp3 = 0x0, |
523 | .com7 = FMT_RGB565 | OFMT_RGB, | 488 | .com3 = 0x0, |
489 | .com7 = FMT_RGB565 | OFMT_RGB, | ||
524 | }, | 490 | }, |
525 | }; | 491 | }; |
526 | 492 | ||
@@ -642,15 +608,15 @@ static int ov772x_s_stream(struct v4l2_subdev *sd, int enable) | |||
642 | return 0; | 608 | return 0; |
643 | } | 609 | } |
644 | 610 | ||
645 | if (!priv->win || !priv->fmt) { | 611 | if (!priv->win || !priv->cfmt) { |
646 | dev_err(&client->dev, "norm or win select error\n"); | 612 | dev_err(&client->dev, "norm or win select error\n"); |
647 | return -EPERM; | 613 | return -EPERM; |
648 | } | 614 | } |
649 | 615 | ||
650 | ov772x_mask_set(client, COM2, SOFT_SLEEP_MODE, 0); | 616 | ov772x_mask_set(client, COM2, SOFT_SLEEP_MODE, 0); |
651 | 617 | ||
652 | dev_dbg(&client->dev, "format %s, win %s\n", | 618 | dev_dbg(&client->dev, "format %d, win %s\n", |
653 | priv->fmt->format->name, priv->win->name); | 619 | priv->cfmt->code, priv->win->name); |
654 | 620 | ||
655 | return 0; | 621 | return 0; |
656 | } | 622 | } |
@@ -806,8 +772,8 @@ static const struct ov772x_win_size *ov772x_select_win(u32 width, u32 height) | |||
806 | return win; | 772 | return win; |
807 | } | 773 | } |
808 | 774 | ||
809 | static int ov772x_set_params(struct i2c_client *client, | 775 | static int ov772x_set_params(struct i2c_client *client, u32 *width, u32 *height, |
810 | u32 *width, u32 *height, u32 pixfmt) | 776 | enum v4l2_mbus_pixelcode code) |
811 | { | 777 | { |
812 | struct ov772x_priv *priv = to_ov772x(client); | 778 | struct ov772x_priv *priv = to_ov772x(client); |
813 | int ret = -EINVAL; | 779 | int ret = -EINVAL; |
@@ -817,14 +783,14 @@ static int ov772x_set_params(struct i2c_client *client, | |||
817 | /* | 783 | /* |
818 | * select format | 784 | * select format |
819 | */ | 785 | */ |
820 | priv->fmt = NULL; | 786 | priv->cfmt = NULL; |
821 | for (i = 0; i < ARRAY_SIZE(ov772x_cfmts); i++) { | 787 | for (i = 0; i < ARRAY_SIZE(ov772x_cfmts); i++) { |
822 | if (pixfmt == ov772x_cfmts[i].format->fourcc) { | 788 | if (code == ov772x_cfmts[i].code) { |
823 | priv->fmt = ov772x_cfmts + i; | 789 | priv->cfmt = ov772x_cfmts + i; |
824 | break; | 790 | break; |
825 | } | 791 | } |
826 | } | 792 | } |
827 | if (!priv->fmt) | 793 | if (!priv->cfmt) |
828 | goto ov772x_set_fmt_error; | 794 | goto ov772x_set_fmt_error; |
829 | 795 | ||
830 | /* | 796 | /* |
@@ -894,7 +860,7 @@ static int ov772x_set_params(struct i2c_client *client, | |||
894 | /* | 860 | /* |
895 | * set DSP_CTRL3 | 861 | * set DSP_CTRL3 |
896 | */ | 862 | */ |
897 | val = priv->fmt->dsp3; | 863 | val = priv->cfmt->dsp3; |
898 | if (val) { | 864 | if (val) { |
899 | ret = ov772x_mask_set(client, | 865 | ret = ov772x_mask_set(client, |
900 | DSP_CTRL3, UV_MASK, val); | 866 | DSP_CTRL3, UV_MASK, val); |
@@ -905,7 +871,7 @@ static int ov772x_set_params(struct i2c_client *client, | |||
905 | /* | 871 | /* |
906 | * set COM3 | 872 | * set COM3 |
907 | */ | 873 | */ |
908 | val = priv->fmt->com3; | 874 | val = priv->cfmt->com3; |
909 | if (priv->info->flags & OV772X_FLAG_VFLIP) | 875 | if (priv->info->flags & OV772X_FLAG_VFLIP) |
910 | val |= VFLIP_IMG; | 876 | val |= VFLIP_IMG; |
911 | if (priv->info->flags & OV772X_FLAG_HFLIP) | 877 | if (priv->info->flags & OV772X_FLAG_HFLIP) |
@@ -923,9 +889,9 @@ static int ov772x_set_params(struct i2c_client *client, | |||
923 | /* | 889 | /* |
924 | * set COM7 | 890 | * set COM7 |
925 | */ | 891 | */ |
926 | val = priv->win->com7_bit | priv->fmt->com7; | 892 | val = priv->win->com7_bit | priv->cfmt->com7; |
927 | ret = ov772x_mask_set(client, | 893 | ret = ov772x_mask_set(client, |
928 | COM7, (SLCT_MASK | FMT_MASK | OFMT_MASK), | 894 | COM7, SLCT_MASK | FMT_MASK | OFMT_MASK, |
929 | val); | 895 | val); |
930 | if (ret < 0) | 896 | if (ret < 0) |
931 | goto ov772x_set_fmt_error; | 897 | goto ov772x_set_fmt_error; |
@@ -951,7 +917,7 @@ ov772x_set_fmt_error: | |||
951 | 917 | ||
952 | ov772x_reset(client); | 918 | ov772x_reset(client); |
953 | priv->win = NULL; | 919 | priv->win = NULL; |
954 | priv->fmt = NULL; | 920 | priv->cfmt = NULL; |
955 | 921 | ||
956 | return ret; | 922 | return ret; |
957 | } | 923 | } |
@@ -981,54 +947,79 @@ static int ov772x_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) | |||
981 | return 0; | 947 | return 0; |
982 | } | 948 | } |
983 | 949 | ||
984 | static int ov772x_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 950 | static int ov772x_g_fmt(struct v4l2_subdev *sd, |
951 | struct v4l2_mbus_framefmt *mf) | ||
985 | { | 952 | { |
986 | struct i2c_client *client = sd->priv; | 953 | struct i2c_client *client = sd->priv; |
987 | struct ov772x_priv *priv = to_ov772x(client); | 954 | struct ov772x_priv *priv = to_ov772x(client); |
988 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
989 | 955 | ||
990 | if (!priv->win || !priv->fmt) { | 956 | if (!priv->win || !priv->cfmt) { |
991 | u32 width = VGA_WIDTH, height = VGA_HEIGHT; | 957 | u32 width = VGA_WIDTH, height = VGA_HEIGHT; |
992 | int ret = ov772x_set_params(client, &width, &height, | 958 | int ret = ov772x_set_params(client, &width, &height, |
993 | V4L2_PIX_FMT_YUYV); | 959 | V4L2_MBUS_FMT_YUYV8_2X8_LE); |
994 | if (ret < 0) | 960 | if (ret < 0) |
995 | return ret; | 961 | return ret; |
996 | } | 962 | } |
997 | 963 | ||
998 | f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 964 | mf->width = priv->win->width; |
999 | 965 | mf->height = priv->win->height; | |
1000 | pix->width = priv->win->width; | 966 | mf->code = priv->cfmt->code; |
1001 | pix->height = priv->win->height; | 967 | mf->colorspace = priv->cfmt->colorspace; |
1002 | pix->pixelformat = priv->fmt->format->fourcc; | 968 | mf->field = V4L2_FIELD_NONE; |
1003 | pix->colorspace = priv->fmt->format->colorspace; | ||
1004 | pix->field = V4L2_FIELD_NONE; | ||
1005 | 969 | ||
1006 | return 0; | 970 | return 0; |
1007 | } | 971 | } |
1008 | 972 | ||
1009 | static int ov772x_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 973 | static int ov772x_s_fmt(struct v4l2_subdev *sd, |
974 | struct v4l2_mbus_framefmt *mf) | ||
1010 | { | 975 | { |
1011 | struct i2c_client *client = sd->priv; | 976 | struct i2c_client *client = sd->priv; |
1012 | struct v4l2_pix_format *pix = &f->fmt.pix; | 977 | struct ov772x_priv *priv = to_ov772x(client); |
978 | int ret = ov772x_set_params(client, &mf->width, &mf->height, | ||
979 | mf->code); | ||
980 | |||
981 | if (!ret) | ||
982 | mf->colorspace = priv->cfmt->colorspace; | ||
1013 | 983 | ||
1014 | return ov772x_set_params(client, &pix->width, &pix->height, | 984 | return ret; |
1015 | pix->pixelformat); | ||
1016 | } | 985 | } |
1017 | 986 | ||
1018 | static int ov772x_try_fmt(struct v4l2_subdev *sd, | 987 | static int ov772x_try_fmt(struct v4l2_subdev *sd, |
1019 | struct v4l2_format *f) | 988 | struct v4l2_mbus_framefmt *mf) |
1020 | { | 989 | { |
1021 | struct v4l2_pix_format *pix = &f->fmt.pix; | 990 | struct i2c_client *client = sd->priv; |
991 | struct ov772x_priv *priv = to_ov772x(client); | ||
1022 | const struct ov772x_win_size *win; | 992 | const struct ov772x_win_size *win; |
993 | int i; | ||
1023 | 994 | ||
1024 | /* | 995 | /* |
1025 | * select suitable win | 996 | * select suitable win |
1026 | */ | 997 | */ |
1027 | win = ov772x_select_win(pix->width, pix->height); | 998 | win = ov772x_select_win(mf->width, mf->height); |
999 | |||
1000 | mf->width = win->width; | ||
1001 | mf->height = win->height; | ||
1002 | mf->field = V4L2_FIELD_NONE; | ||
1028 | 1003 | ||
1029 | pix->width = win->width; | 1004 | for (i = 0; i < ARRAY_SIZE(ov772x_cfmts); i++) |
1030 | pix->height = win->height; | 1005 | if (mf->code == ov772x_cfmts[i].code) |
1031 | pix->field = V4L2_FIELD_NONE; | 1006 | break; |
1007 | |||
1008 | if (i == ARRAY_SIZE(ov772x_cfmts)) { | ||
1009 | /* Unsupported format requested. Propose either */ | ||
1010 | if (priv->cfmt) { | ||
1011 | /* the current one or */ | ||
1012 | mf->colorspace = priv->cfmt->colorspace; | ||
1013 | mf->code = priv->cfmt->code; | ||
1014 | } else { | ||
1015 | /* the default one */ | ||
1016 | mf->colorspace = ov772x_cfmts[0].colorspace; | ||
1017 | mf->code = ov772x_cfmts[0].code; | ||
1018 | } | ||
1019 | } else { | ||
1020 | /* Also return the colorspace */ | ||
1021 | mf->colorspace = ov772x_cfmts[i].colorspace; | ||
1022 | } | ||
1032 | 1023 | ||
1033 | return 0; | 1024 | return 0; |
1034 | } | 1025 | } |
@@ -1057,9 +1048,6 @@ static int ov772x_video_probe(struct soc_camera_device *icd, | |||
1057 | return -ENODEV; | 1048 | return -ENODEV; |
1058 | } | 1049 | } |
1059 | 1050 | ||
1060 | icd->formats = ov772x_fmt_lists; | ||
1061 | icd->num_formats = ARRAY_SIZE(ov772x_fmt_lists); | ||
1062 | |||
1063 | /* | 1051 | /* |
1064 | * check and show product ID and manufacturer ID | 1052 | * check and show product ID and manufacturer ID |
1065 | */ | 1053 | */ |
@@ -1109,13 +1097,24 @@ static struct v4l2_subdev_core_ops ov772x_subdev_core_ops = { | |||
1109 | #endif | 1097 | #endif |
1110 | }; | 1098 | }; |
1111 | 1099 | ||
1100 | static int ov772x_enum_fmt(struct v4l2_subdev *sd, int index, | ||
1101 | enum v4l2_mbus_pixelcode *code) | ||
1102 | { | ||
1103 | if ((unsigned int)index >= ARRAY_SIZE(ov772x_cfmts)) | ||
1104 | return -EINVAL; | ||
1105 | |||
1106 | *code = ov772x_cfmts[index].code; | ||
1107 | return 0; | ||
1108 | } | ||
1109 | |||
1112 | static struct v4l2_subdev_video_ops ov772x_subdev_video_ops = { | 1110 | static struct v4l2_subdev_video_ops ov772x_subdev_video_ops = { |
1113 | .s_stream = ov772x_s_stream, | 1111 | .s_stream = ov772x_s_stream, |
1114 | .g_fmt = ov772x_g_fmt, | 1112 | .g_mbus_fmt = ov772x_g_fmt, |
1115 | .s_fmt = ov772x_s_fmt, | 1113 | .s_mbus_fmt = ov772x_s_fmt, |
1116 | .try_fmt = ov772x_try_fmt, | 1114 | .try_mbus_fmt = ov772x_try_fmt, |
1117 | .cropcap = ov772x_cropcap, | 1115 | .cropcap = ov772x_cropcap, |
1118 | .g_crop = ov772x_g_crop, | 1116 | .g_crop = ov772x_g_crop, |
1117 | .enum_mbus_fmt = ov772x_enum_fmt, | ||
1119 | }; | 1118 | }; |
1120 | 1119 | ||
1121 | static struct v4l2_subdev_ops ov772x_subdev_ops = { | 1120 | static struct v4l2_subdev_ops ov772x_subdev_ops = { |
diff --git a/drivers/media/video/ov9640.c b/drivers/media/video/ov9640.c index c81ae2192887..47bf60ceb7a2 100644 --- a/drivers/media/video/ov9640.c +++ b/drivers/media/video/ov9640.c | |||
@@ -154,19 +154,10 @@ static const struct ov9640_reg ov9640_regs_rgb[] = { | |||
154 | { OV9640_MTXS, 0x65 }, | 154 | { OV9640_MTXS, 0x65 }, |
155 | }; | 155 | }; |
156 | 156 | ||
157 | /* | 157 | static enum v4l2_mbus_pixelcode ov9640_codes[] = { |
158 | * TODO: this sensor also supports RGB555 and RGB565 formats, but support for | 158 | V4L2_MBUS_FMT_YUYV8_2X8_BE, |
159 | * them has not yet been sufficiently tested and so it is not included with | 159 | V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE, |
160 | * this version of the driver. To test and debug these formats add two entries | 160 | V4L2_MBUS_FMT_RGB565_2X8_LE, |
161 | * to the below array, see ov722x.c for an example. | ||
162 | */ | ||
163 | static const struct soc_camera_data_format ov9640_fmt_lists[] = { | ||
164 | { | ||
165 | .name = "UYVY", | ||
166 | .fourcc = V4L2_PIX_FMT_UYVY, | ||
167 | .depth = 16, | ||
168 | .colorspace = V4L2_COLORSPACE_JPEG, | ||
169 | }, | ||
170 | }; | 161 | }; |
171 | 162 | ||
172 | static const struct v4l2_queryctrl ov9640_controls[] = { | 163 | static const struct v4l2_queryctrl ov9640_controls[] = { |
@@ -434,20 +425,22 @@ static void ov9640_res_roundup(u32 *width, u32 *height) | |||
434 | } | 425 | } |
435 | 426 | ||
436 | /* Prepare necessary register changes depending on color encoding */ | 427 | /* Prepare necessary register changes depending on color encoding */ |
437 | static void ov9640_alter_regs(u32 pixfmt, struct ov9640_reg_alt *alt) | 428 | static void ov9640_alter_regs(enum v4l2_mbus_pixelcode code, |
429 | struct ov9640_reg_alt *alt) | ||
438 | { | 430 | { |
439 | switch (pixfmt) { | 431 | switch (code) { |
440 | case V4L2_PIX_FMT_UYVY: | 432 | default: |
433 | case V4L2_MBUS_FMT_YUYV8_2X8_BE: | ||
441 | alt->com12 = OV9640_COM12_YUV_AVG; | 434 | alt->com12 = OV9640_COM12_YUV_AVG; |
442 | alt->com13 = OV9640_COM13_Y_DELAY_EN | | 435 | alt->com13 = OV9640_COM13_Y_DELAY_EN | |
443 | OV9640_COM13_YUV_DLY(0x01); | 436 | OV9640_COM13_YUV_DLY(0x01); |
444 | break; | 437 | break; |
445 | case V4L2_PIX_FMT_RGB555: | 438 | case V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE: |
446 | alt->com7 = OV9640_COM7_RGB; | 439 | alt->com7 = OV9640_COM7_RGB; |
447 | alt->com13 = OV9640_COM13_RGB_AVG; | 440 | alt->com13 = OV9640_COM13_RGB_AVG; |
448 | alt->com15 = OV9640_COM15_RGB_555; | 441 | alt->com15 = OV9640_COM15_RGB_555; |
449 | break; | 442 | break; |
450 | case V4L2_PIX_FMT_RGB565: | 443 | case V4L2_MBUS_FMT_RGB565_2X8_LE: |
451 | alt->com7 = OV9640_COM7_RGB; | 444 | alt->com7 = OV9640_COM7_RGB; |
452 | alt->com13 = OV9640_COM13_RGB_AVG; | 445 | alt->com13 = OV9640_COM13_RGB_AVG; |
453 | alt->com15 = OV9640_COM15_RGB_565; | 446 | alt->com15 = OV9640_COM15_RGB_565; |
@@ -456,8 +449,8 @@ static void ov9640_alter_regs(u32 pixfmt, struct ov9640_reg_alt *alt) | |||
456 | } | 449 | } |
457 | 450 | ||
458 | /* Setup registers according to resolution and color encoding */ | 451 | /* Setup registers according to resolution and color encoding */ |
459 | static int ov9640_write_regs(struct i2c_client *client, | 452 | static int ov9640_write_regs(struct i2c_client *client, u32 width, |
460 | u32 width, u32 pixfmt, struct ov9640_reg_alt *alts) | 453 | enum v4l2_mbus_pixelcode code, struct ov9640_reg_alt *alts) |
461 | { | 454 | { |
462 | const struct ov9640_reg *ov9640_regs, *matrix_regs; | 455 | const struct ov9640_reg *ov9640_regs, *matrix_regs; |
463 | int ov9640_regs_len, matrix_regs_len; | 456 | int ov9640_regs_len, matrix_regs_len; |
@@ -500,7 +493,7 @@ static int ov9640_write_regs(struct i2c_client *client, | |||
500 | } | 493 | } |
501 | 494 | ||
502 | /* select color matrix configuration for given color encoding */ | 495 | /* select color matrix configuration for given color encoding */ |
503 | if (pixfmt == V4L2_PIX_FMT_UYVY) { | 496 | if (code == V4L2_MBUS_FMT_YUYV8_2X8_BE) { |
504 | matrix_regs = ov9640_regs_yuv; | 497 | matrix_regs = ov9640_regs_yuv; |
505 | matrix_regs_len = ARRAY_SIZE(ov9640_regs_yuv); | 498 | matrix_regs_len = ARRAY_SIZE(ov9640_regs_yuv); |
506 | } else { | 499 | } else { |
@@ -562,15 +555,17 @@ static int ov9640_prog_dflt(struct i2c_client *client) | |||
562 | } | 555 | } |
563 | 556 | ||
564 | /* set the format we will capture in */ | 557 | /* set the format we will capture in */ |
565 | static int ov9640_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 558 | static int ov9640_s_fmt(struct v4l2_subdev *sd, |
559 | struct v4l2_mbus_framefmt *mf) | ||
566 | { | 560 | { |
567 | struct i2c_client *client = sd->priv; | 561 | struct i2c_client *client = sd->priv; |
568 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
569 | struct ov9640_reg_alt alts = {0}; | 562 | struct ov9640_reg_alt alts = {0}; |
563 | enum v4l2_colorspace cspace; | ||
564 | enum v4l2_mbus_pixelcode code = mf->code; | ||
570 | int ret; | 565 | int ret; |
571 | 566 | ||
572 | ov9640_res_roundup(&pix->width, &pix->height); | 567 | ov9640_res_roundup(&mf->width, &mf->height); |
573 | ov9640_alter_regs(pix->pixelformat, &alts); | 568 | ov9640_alter_regs(mf->code, &alts); |
574 | 569 | ||
575 | ov9640_reset(client); | 570 | ov9640_reset(client); |
576 | 571 | ||
@@ -578,19 +573,57 @@ static int ov9640_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | |||
578 | if (ret) | 573 | if (ret) |
579 | return ret; | 574 | return ret; |
580 | 575 | ||
581 | return ov9640_write_regs(client, pix->width, pix->pixelformat, &alts); | 576 | switch (code) { |
577 | case V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE: | ||
578 | case V4L2_MBUS_FMT_RGB565_2X8_LE: | ||
579 | cspace = V4L2_COLORSPACE_SRGB; | ||
580 | break; | ||
581 | default: | ||
582 | code = V4L2_MBUS_FMT_YUYV8_2X8_BE; | ||
583 | case V4L2_MBUS_FMT_YUYV8_2X8_BE: | ||
584 | cspace = V4L2_COLORSPACE_JPEG; | ||
585 | } | ||
586 | |||
587 | ret = ov9640_write_regs(client, mf->width, code, &alts); | ||
588 | if (!ret) { | ||
589 | mf->code = code; | ||
590 | mf->colorspace = cspace; | ||
591 | } | ||
592 | |||
593 | return ret; | ||
582 | } | 594 | } |
583 | 595 | ||
584 | static int ov9640_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 596 | static int ov9640_try_fmt(struct v4l2_subdev *sd, |
597 | struct v4l2_mbus_framefmt *mf) | ||
585 | { | 598 | { |
586 | struct v4l2_pix_format *pix = &f->fmt.pix; | 599 | ov9640_res_roundup(&mf->width, &mf->height); |
587 | 600 | ||
588 | ov9640_res_roundup(&pix->width, &pix->height); | 601 | mf->field = V4L2_FIELD_NONE; |
589 | pix->field = V4L2_FIELD_NONE; | 602 | |
603 | switch (mf->code) { | ||
604 | case V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE: | ||
605 | case V4L2_MBUS_FMT_RGB565_2X8_LE: | ||
606 | mf->colorspace = V4L2_COLORSPACE_SRGB; | ||
607 | break; | ||
608 | default: | ||
609 | mf->code = V4L2_MBUS_FMT_YUYV8_2X8_BE; | ||
610 | case V4L2_MBUS_FMT_YUYV8_2X8_BE: | ||
611 | mf->colorspace = V4L2_COLORSPACE_JPEG; | ||
612 | } | ||
590 | 613 | ||
591 | return 0; | 614 | return 0; |
592 | } | 615 | } |
593 | 616 | ||
617 | static int ov9640_enum_fmt(struct v4l2_subdev *sd, int index, | ||
618 | enum v4l2_mbus_pixelcode *code) | ||
619 | { | ||
620 | if ((unsigned int)index >= ARRAY_SIZE(ov9640_codes)) | ||
621 | return -EINVAL; | ||
622 | |||
623 | *code = ov9640_codes[index]; | ||
624 | return 0; | ||
625 | } | ||
626 | |||
594 | static int ov9640_g_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) | 627 | static int ov9640_g_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) |
595 | { | 628 | { |
596 | a->c.left = 0; | 629 | a->c.left = 0; |
@@ -637,9 +670,6 @@ static int ov9640_video_probe(struct soc_camera_device *icd, | |||
637 | goto err; | 670 | goto err; |
638 | } | 671 | } |
639 | 672 | ||
640 | icd->formats = ov9640_fmt_lists; | ||
641 | icd->num_formats = ARRAY_SIZE(ov9640_fmt_lists); | ||
642 | |||
643 | /* | 673 | /* |
644 | * check and show product ID and manufacturer ID | 674 | * check and show product ID and manufacturer ID |
645 | */ | 675 | */ |
@@ -702,11 +732,12 @@ static struct v4l2_subdev_core_ops ov9640_core_ops = { | |||
702 | }; | 732 | }; |
703 | 733 | ||
704 | static struct v4l2_subdev_video_ops ov9640_video_ops = { | 734 | static struct v4l2_subdev_video_ops ov9640_video_ops = { |
705 | .s_stream = ov9640_s_stream, | 735 | .s_stream = ov9640_s_stream, |
706 | .s_fmt = ov9640_s_fmt, | 736 | .s_mbus_fmt = ov9640_s_fmt, |
707 | .try_fmt = ov9640_try_fmt, | 737 | .try_mbus_fmt = ov9640_try_fmt, |
708 | .cropcap = ov9640_cropcap, | 738 | .enum_mbus_fmt = ov9640_enum_fmt, |
709 | .g_crop = ov9640_g_crop, | 739 | .cropcap = ov9640_cropcap, |
740 | .g_crop = ov9640_g_crop, | ||
710 | 741 | ||
711 | }; | 742 | }; |
712 | 743 | ||
diff --git a/drivers/media/video/pxa_camera.c b/drivers/media/video/pxa_camera.c index f063f5981f43..294f860ce2b0 100644 --- a/drivers/media/video/pxa_camera.c +++ b/drivers/media/video/pxa_camera.c | |||
@@ -32,6 +32,7 @@ | |||
32 | #include <media/v4l2-dev.h> | 32 | #include <media/v4l2-dev.h> |
33 | #include <media/videobuf-dma-sg.h> | 33 | #include <media/videobuf-dma-sg.h> |
34 | #include <media/soc_camera.h> | 34 | #include <media/soc_camera.h> |
35 | #include <media/soc_mediabus.h> | ||
35 | 36 | ||
36 | #include <linux/videodev2.h> | 37 | #include <linux/videodev2.h> |
37 | 38 | ||
@@ -183,16 +184,12 @@ struct pxa_cam_dma { | |||
183 | /* buffer for one video frame */ | 184 | /* buffer for one video frame */ |
184 | struct pxa_buffer { | 185 | struct pxa_buffer { |
185 | /* common v4l buffer stuff -- must be first */ | 186 | /* common v4l buffer stuff -- must be first */ |
186 | struct videobuf_buffer vb; | 187 | struct videobuf_buffer vb; |
187 | 188 | enum v4l2_mbus_pixelcode code; | |
188 | const struct soc_camera_data_format *fmt; | ||
189 | |||
190 | /* our descriptor lists for Y, U and V channels */ | 189 | /* our descriptor lists for Y, U and V channels */ |
191 | struct pxa_cam_dma dmas[3]; | 190 | struct pxa_cam_dma dmas[3]; |
192 | 191 | int inwork; | |
193 | int inwork; | 192 | enum pxa_camera_active_dma active_dma; |
194 | |||
195 | enum pxa_camera_active_dma active_dma; | ||
196 | }; | 193 | }; |
197 | 194 | ||
198 | struct pxa_camera_dev { | 195 | struct pxa_camera_dev { |
@@ -243,11 +240,15 @@ static int pxa_videobuf_setup(struct videobuf_queue *vq, unsigned int *count, | |||
243 | unsigned int *size) | 240 | unsigned int *size) |
244 | { | 241 | { |
245 | struct soc_camera_device *icd = vq->priv_data; | 242 | struct soc_camera_device *icd = vq->priv_data; |
243 | int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width, | ||
244 | icd->current_fmt->host_fmt); | ||
245 | |||
246 | if (bytes_per_line < 0) | ||
247 | return bytes_per_line; | ||
246 | 248 | ||
247 | dev_dbg(icd->dev.parent, "count=%d, size=%d\n", *count, *size); | 249 | dev_dbg(icd->dev.parent, "count=%d, size=%d\n", *count, *size); |
248 | 250 | ||
249 | *size = roundup(icd->user_width * icd->user_height * | 251 | *size = bytes_per_line * icd->user_height; |
250 | ((icd->current_fmt->depth + 7) >> 3), 8); | ||
251 | 252 | ||
252 | if (0 == *count) | 253 | if (0 == *count) |
253 | *count = 32; | 254 | *count = 32; |
@@ -433,6 +434,11 @@ static int pxa_videobuf_prepare(struct videobuf_queue *vq, | |||
433 | struct pxa_buffer *buf = container_of(vb, struct pxa_buffer, vb); | 434 | struct pxa_buffer *buf = container_of(vb, struct pxa_buffer, vb); |
434 | int ret; | 435 | int ret; |
435 | int size_y, size_u = 0, size_v = 0; | 436 | int size_y, size_u = 0, size_v = 0; |
437 | int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width, | ||
438 | icd->current_fmt->host_fmt); | ||
439 | |||
440 | if (bytes_per_line < 0) | ||
441 | return bytes_per_line; | ||
436 | 442 | ||
437 | dev_dbg(dev, "%s (vb=0x%p) 0x%08lx %d\n", __func__, | 443 | dev_dbg(dev, "%s (vb=0x%p) 0x%08lx %d\n", __func__, |
438 | vb, vb->baddr, vb->bsize); | 444 | vb, vb->baddr, vb->bsize); |
@@ -456,18 +462,18 @@ static int pxa_videobuf_prepare(struct videobuf_queue *vq, | |||
456 | */ | 462 | */ |
457 | buf->inwork = 1; | 463 | buf->inwork = 1; |
458 | 464 | ||
459 | if (buf->fmt != icd->current_fmt || | 465 | if (buf->code != icd->current_fmt->code || |
460 | vb->width != icd->user_width || | 466 | vb->width != icd->user_width || |
461 | vb->height != icd->user_height || | 467 | vb->height != icd->user_height || |
462 | vb->field != field) { | 468 | vb->field != field) { |
463 | buf->fmt = icd->current_fmt; | 469 | buf->code = icd->current_fmt->code; |
464 | vb->width = icd->user_width; | 470 | vb->width = icd->user_width; |
465 | vb->height = icd->user_height; | 471 | vb->height = icd->user_height; |
466 | vb->field = field; | 472 | vb->field = field; |
467 | vb->state = VIDEOBUF_NEEDS_INIT; | 473 | vb->state = VIDEOBUF_NEEDS_INIT; |
468 | } | 474 | } |
469 | 475 | ||
470 | vb->size = vb->width * vb->height * ((buf->fmt->depth + 7) >> 3); | 476 | vb->size = bytes_per_line * vb->height; |
471 | if (0 != vb->baddr && vb->bsize < vb->size) { | 477 | if (0 != vb->baddr && vb->bsize < vb->size) { |
472 | ret = -EINVAL; | 478 | ret = -EINVAL; |
473 | goto out; | 479 | goto out; |
@@ -1157,9 +1163,15 @@ static int pxa_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt) | |||
1157 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); | 1163 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); |
1158 | struct pxa_camera_dev *pcdev = ici->priv; | 1164 | struct pxa_camera_dev *pcdev = ici->priv; |
1159 | unsigned long bus_flags, camera_flags, common_flags; | 1165 | unsigned long bus_flags, camera_flags, common_flags; |
1160 | int ret = test_platform_param(pcdev, icd->buswidth, &bus_flags); | 1166 | const struct soc_mbus_pixelfmt *fmt; |
1167 | int ret; | ||
1161 | struct pxa_cam *cam = icd->host_priv; | 1168 | struct pxa_cam *cam = icd->host_priv; |
1162 | 1169 | ||
1170 | fmt = soc_mbus_get_fmtdesc(icd->current_fmt->code); | ||
1171 | if (!fmt) | ||
1172 | return -EINVAL; | ||
1173 | |||
1174 | ret = test_platform_param(pcdev, fmt->bits_per_sample, &bus_flags); | ||
1163 | if (ret < 0) | 1175 | if (ret < 0) |
1164 | return ret; | 1176 | return ret; |
1165 | 1177 | ||
@@ -1223,59 +1235,49 @@ static int pxa_camera_try_bus_param(struct soc_camera_device *icd, | |||
1223 | return soc_camera_bus_param_compatible(camera_flags, bus_flags) ? 0 : -EINVAL; | 1235 | return soc_camera_bus_param_compatible(camera_flags, bus_flags) ? 0 : -EINVAL; |
1224 | } | 1236 | } |
1225 | 1237 | ||
1226 | static const struct soc_camera_data_format pxa_camera_formats[] = { | 1238 | static const struct soc_mbus_pixelfmt pxa_camera_formats[] = { |
1227 | { | 1239 | { |
1228 | .name = "Planar YUV422 16 bit", | 1240 | .fourcc = V4L2_PIX_FMT_YUV422P, |
1229 | .depth = 16, | 1241 | .name = "Planar YUV422 16 bit", |
1230 | .fourcc = V4L2_PIX_FMT_YUV422P, | 1242 | .bits_per_sample = 8, |
1231 | .colorspace = V4L2_COLORSPACE_JPEG, | 1243 | .packing = SOC_MBUS_PACKING_2X8_PADHI, |
1244 | .order = SOC_MBUS_ORDER_LE, | ||
1232 | }, | 1245 | }, |
1233 | }; | 1246 | }; |
1234 | 1247 | ||
1235 | static bool buswidth_supported(struct soc_camera_device *icd, int depth) | 1248 | /* This will be corrected as we get more formats */ |
1249 | static bool pxa_camera_packing_supported(const struct soc_mbus_pixelfmt *fmt) | ||
1236 | { | 1250 | { |
1237 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); | 1251 | return fmt->packing == SOC_MBUS_PACKING_NONE || |
1238 | struct pxa_camera_dev *pcdev = ici->priv; | 1252 | (fmt->bits_per_sample == 8 && |
1239 | 1253 | fmt->packing == SOC_MBUS_PACKING_2X8_PADHI) || | |
1240 | switch (depth) { | 1254 | (fmt->bits_per_sample > 8 && |
1241 | case 8: | 1255 | fmt->packing == SOC_MBUS_PACKING_EXTEND16); |
1242 | return !!(pcdev->platform_flags & PXA_CAMERA_DATAWIDTH_8); | ||
1243 | case 9: | ||
1244 | return !!(pcdev->platform_flags & PXA_CAMERA_DATAWIDTH_9); | ||
1245 | case 10: | ||
1246 | return !!(pcdev->platform_flags & PXA_CAMERA_DATAWIDTH_10); | ||
1247 | } | ||
1248 | return false; | ||
1249 | } | ||
1250 | |||
1251 | static int required_buswidth(const struct soc_camera_data_format *fmt) | ||
1252 | { | ||
1253 | switch (fmt->fourcc) { | ||
1254 | case V4L2_PIX_FMT_UYVY: | ||
1255 | case V4L2_PIX_FMT_VYUY: | ||
1256 | case V4L2_PIX_FMT_YUYV: | ||
1257 | case V4L2_PIX_FMT_YVYU: | ||
1258 | case V4L2_PIX_FMT_RGB565: | ||
1259 | case V4L2_PIX_FMT_RGB555: | ||
1260 | return 8; | ||
1261 | default: | ||
1262 | return fmt->depth; | ||
1263 | } | ||
1264 | } | 1256 | } |
1265 | 1257 | ||
1266 | static int pxa_camera_get_formats(struct soc_camera_device *icd, int idx, | 1258 | static int pxa_camera_get_formats(struct soc_camera_device *icd, int idx, |
1267 | struct soc_camera_format_xlate *xlate) | 1259 | struct soc_camera_format_xlate *xlate) |
1268 | { | 1260 | { |
1261 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | ||
1269 | struct device *dev = icd->dev.parent; | 1262 | struct device *dev = icd->dev.parent; |
1270 | int formats = 0, buswidth, ret; | 1263 | int formats = 0, ret; |
1271 | struct pxa_cam *cam; | 1264 | struct pxa_cam *cam; |
1265 | enum v4l2_mbus_pixelcode code; | ||
1266 | const struct soc_mbus_pixelfmt *fmt; | ||
1272 | 1267 | ||
1273 | buswidth = required_buswidth(icd->formats + idx); | 1268 | ret = v4l2_subdev_call(sd, video, enum_mbus_fmt, idx, &code); |
1269 | if (ret < 0) | ||
1270 | /* No more formats */ | ||
1271 | return 0; | ||
1274 | 1272 | ||
1275 | if (!buswidth_supported(icd, buswidth)) | 1273 | fmt = soc_mbus_get_fmtdesc(code); |
1274 | if (!fmt) { | ||
1275 | dev_err(dev, "Invalid format code #%d: %d\n", idx, code); | ||
1276 | return 0; | 1276 | return 0; |
1277 | } | ||
1277 | 1278 | ||
1278 | ret = pxa_camera_try_bus_param(icd, buswidth); | 1279 | /* This also checks support for the requested bits-per-sample */ |
1280 | ret = pxa_camera_try_bus_param(icd, fmt->bits_per_sample); | ||
1279 | if (ret < 0) | 1281 | if (ret < 0) |
1280 | return 0; | 1282 | return 0; |
1281 | 1283 | ||
@@ -1289,45 +1291,40 @@ static int pxa_camera_get_formats(struct soc_camera_device *icd, int idx, | |||
1289 | cam = icd->host_priv; | 1291 | cam = icd->host_priv; |
1290 | } | 1292 | } |
1291 | 1293 | ||
1292 | switch (icd->formats[idx].fourcc) { | 1294 | switch (code) { |
1293 | case V4L2_PIX_FMT_UYVY: | 1295 | case V4L2_MBUS_FMT_YUYV8_2X8_BE: |
1294 | formats++; | 1296 | formats++; |
1295 | if (xlate) { | 1297 | if (xlate) { |
1296 | xlate->host_fmt = &pxa_camera_formats[0]; | 1298 | xlate->host_fmt = &pxa_camera_formats[0]; |
1297 | xlate->cam_fmt = icd->formats + idx; | 1299 | xlate->code = code; |
1298 | xlate->buswidth = buswidth; | ||
1299 | xlate++; | 1300 | xlate++; |
1300 | dev_dbg(dev, "Providing format %s using %s\n", | 1301 | dev_dbg(dev, "Providing format %s using code %d\n", |
1301 | pxa_camera_formats[0].name, | 1302 | pxa_camera_formats[0].name, code); |
1302 | icd->formats[idx].name); | ||
1303 | } | 1303 | } |
1304 | case V4L2_PIX_FMT_VYUY: | 1304 | case V4L2_MBUS_FMT_YVYU8_2X8_BE: |
1305 | case V4L2_PIX_FMT_YUYV: | 1305 | case V4L2_MBUS_FMT_YUYV8_2X8_LE: |
1306 | case V4L2_PIX_FMT_YVYU: | 1306 | case V4L2_MBUS_FMT_YVYU8_2X8_LE: |
1307 | case V4L2_PIX_FMT_RGB565: | 1307 | case V4L2_MBUS_FMT_RGB565_2X8_LE: |
1308 | case V4L2_PIX_FMT_RGB555: | 1308 | case V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE: |
1309 | formats++; | 1309 | if (xlate) |
1310 | if (xlate) { | ||
1311 | xlate->host_fmt = icd->formats + idx; | ||
1312 | xlate->cam_fmt = icd->formats + idx; | ||
1313 | xlate->buswidth = buswidth; | ||
1314 | xlate++; | ||
1315 | dev_dbg(dev, "Providing format %s packed\n", | 1310 | dev_dbg(dev, "Providing format %s packed\n", |
1316 | icd->formats[idx].name); | 1311 | fmt->name); |
1317 | } | ||
1318 | break; | 1312 | break; |
1319 | default: | 1313 | default: |
1320 | /* Generic pass-through */ | 1314 | if (!pxa_camera_packing_supported(fmt)) |
1321 | formats++; | 1315 | return 0; |
1322 | if (xlate) { | 1316 | if (xlate) |
1323 | xlate->host_fmt = icd->formats + idx; | ||
1324 | xlate->cam_fmt = icd->formats + idx; | ||
1325 | xlate->buswidth = icd->formats[idx].depth; | ||
1326 | xlate++; | ||
1327 | dev_dbg(dev, | 1317 | dev_dbg(dev, |
1328 | "Providing format %s in pass-through mode\n", | 1318 | "Providing format %s in pass-through mode\n", |
1329 | icd->formats[idx].name); | 1319 | fmt->name); |
1330 | } | 1320 | } |
1321 | |||
1322 | /* Generic pass-through */ | ||
1323 | formats++; | ||
1324 | if (xlate) { | ||
1325 | xlate->host_fmt = fmt; | ||
1326 | xlate->code = code; | ||
1327 | xlate++; | ||
1331 | } | 1328 | } |
1332 | 1329 | ||
1333 | return formats; | 1330 | return formats; |
@@ -1339,11 +1336,11 @@ static void pxa_camera_put_formats(struct soc_camera_device *icd) | |||
1339 | icd->host_priv = NULL; | 1336 | icd->host_priv = NULL; |
1340 | } | 1337 | } |
1341 | 1338 | ||
1342 | static int pxa_camera_check_frame(struct v4l2_pix_format *pix) | 1339 | static int pxa_camera_check_frame(u32 width, u32 height) |
1343 | { | 1340 | { |
1344 | /* limit to pxa hardware capabilities */ | 1341 | /* limit to pxa hardware capabilities */ |
1345 | return pix->height < 32 || pix->height > 2048 || pix->width < 48 || | 1342 | return height < 32 || height > 2048 || width < 48 || width > 2048 || |
1346 | pix->width > 2048 || (pix->width & 0x01); | 1343 | (width & 0x01); |
1347 | } | 1344 | } |
1348 | 1345 | ||
1349 | static int pxa_camera_set_crop(struct soc_camera_device *icd, | 1346 | static int pxa_camera_set_crop(struct soc_camera_device *icd, |
@@ -1358,9 +1355,9 @@ static int pxa_camera_set_crop(struct soc_camera_device *icd, | |||
1358 | .master_clock = pcdev->mclk, | 1355 | .master_clock = pcdev->mclk, |
1359 | .pixel_clock_max = pcdev->ciclk / 4, | 1356 | .pixel_clock_max = pcdev->ciclk / 4, |
1360 | }; | 1357 | }; |
1361 | struct v4l2_format f; | 1358 | struct v4l2_mbus_framefmt mf; |
1362 | struct v4l2_pix_format *pix = &f.fmt.pix, pix_tmp; | ||
1363 | struct pxa_cam *cam = icd->host_priv; | 1359 | struct pxa_cam *cam = icd->host_priv; |
1360 | u32 fourcc = icd->current_fmt->host_fmt->fourcc; | ||
1364 | int ret; | 1361 | int ret; |
1365 | 1362 | ||
1366 | /* If PCLK is used to latch data from the sensor, check sense */ | 1363 | /* If PCLK is used to latch data from the sensor, check sense */ |
@@ -1377,27 +1374,23 @@ static int pxa_camera_set_crop(struct soc_camera_device *icd, | |||
1377 | return ret; | 1374 | return ret; |
1378 | } | 1375 | } |
1379 | 1376 | ||
1380 | f.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 1377 | ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf); |
1381 | |||
1382 | ret = v4l2_subdev_call(sd, video, g_fmt, &f); | ||
1383 | if (ret < 0) | 1378 | if (ret < 0) |
1384 | return ret; | 1379 | return ret; |
1385 | 1380 | ||
1386 | pix_tmp = *pix; | 1381 | if (pxa_camera_check_frame(mf.width, mf.height)) { |
1387 | if (pxa_camera_check_frame(pix)) { | ||
1388 | /* | 1382 | /* |
1389 | * Camera cropping produced a frame beyond our capabilities. | 1383 | * Camera cropping produced a frame beyond our capabilities. |
1390 | * FIXME: just extract a subframe, that we can process. | 1384 | * FIXME: just extract a subframe, that we can process. |
1391 | */ | 1385 | */ |
1392 | v4l_bound_align_image(&pix->width, 48, 2048, 1, | 1386 | v4l_bound_align_image(&mf.width, 48, 2048, 1, |
1393 | &pix->height, 32, 2048, 0, | 1387 | &mf.height, 32, 2048, 0, |
1394 | icd->current_fmt->fourcc == V4L2_PIX_FMT_YUV422P ? | 1388 | fourcc == V4L2_PIX_FMT_YUV422P ? 4 : 0); |
1395 | 4 : 0); | 1389 | ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf); |
1396 | ret = v4l2_subdev_call(sd, video, s_fmt, &f); | ||
1397 | if (ret < 0) | 1390 | if (ret < 0) |
1398 | return ret; | 1391 | return ret; |
1399 | 1392 | ||
1400 | if (pxa_camera_check_frame(pix)) { | 1393 | if (pxa_camera_check_frame(mf.width, mf.height)) { |
1401 | dev_warn(icd->dev.parent, | 1394 | dev_warn(icd->dev.parent, |
1402 | "Inconsistent state. Use S_FMT to repair\n"); | 1395 | "Inconsistent state. Use S_FMT to repair\n"); |
1403 | return -EINVAL; | 1396 | return -EINVAL; |
@@ -1414,10 +1407,10 @@ static int pxa_camera_set_crop(struct soc_camera_device *icd, | |||
1414 | recalculate_fifo_timeout(pcdev, sense.pixel_clock); | 1407 | recalculate_fifo_timeout(pcdev, sense.pixel_clock); |
1415 | } | 1408 | } |
1416 | 1409 | ||
1417 | icd->user_width = pix->width; | 1410 | icd->user_width = mf.width; |
1418 | icd->user_height = pix->height; | 1411 | icd->user_height = mf.height; |
1419 | 1412 | ||
1420 | pxa_camera_setup_cicr(icd, cam->flags, icd->current_fmt->fourcc); | 1413 | pxa_camera_setup_cicr(icd, cam->flags, fourcc); |
1421 | 1414 | ||
1422 | return ret; | 1415 | return ret; |
1423 | } | 1416 | } |
@@ -1429,14 +1422,13 @@ static int pxa_camera_set_fmt(struct soc_camera_device *icd, | |||
1429 | struct pxa_camera_dev *pcdev = ici->priv; | 1422 | struct pxa_camera_dev *pcdev = ici->priv; |
1430 | struct device *dev = icd->dev.parent; | 1423 | struct device *dev = icd->dev.parent; |
1431 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | 1424 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); |
1432 | const struct soc_camera_data_format *cam_fmt = NULL; | ||
1433 | const struct soc_camera_format_xlate *xlate = NULL; | 1425 | const struct soc_camera_format_xlate *xlate = NULL; |
1434 | struct soc_camera_sense sense = { | 1426 | struct soc_camera_sense sense = { |
1435 | .master_clock = pcdev->mclk, | 1427 | .master_clock = pcdev->mclk, |
1436 | .pixel_clock_max = pcdev->ciclk / 4, | 1428 | .pixel_clock_max = pcdev->ciclk / 4, |
1437 | }; | 1429 | }; |
1438 | struct v4l2_pix_format *pix = &f->fmt.pix; | 1430 | struct v4l2_pix_format *pix = &f->fmt.pix; |
1439 | struct v4l2_format cam_f = *f; | 1431 | struct v4l2_mbus_framefmt mf; |
1440 | int ret; | 1432 | int ret; |
1441 | 1433 | ||
1442 | xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat); | 1434 | xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat); |
@@ -1445,26 +1437,31 @@ static int pxa_camera_set_fmt(struct soc_camera_device *icd, | |||
1445 | return -EINVAL; | 1437 | return -EINVAL; |
1446 | } | 1438 | } |
1447 | 1439 | ||
1448 | cam_fmt = xlate->cam_fmt; | ||
1449 | |||
1450 | /* If PCLK is used to latch data from the sensor, check sense */ | 1440 | /* If PCLK is used to latch data from the sensor, check sense */ |
1451 | if (pcdev->platform_flags & PXA_CAMERA_PCLK_EN) | 1441 | if (pcdev->platform_flags & PXA_CAMERA_PCLK_EN) |
1442 | /* The caller holds a mutex. */ | ||
1452 | icd->sense = &sense; | 1443 | icd->sense = &sense; |
1453 | 1444 | ||
1454 | cam_f.fmt.pix.pixelformat = cam_fmt->fourcc; | 1445 | mf.width = pix->width; |
1455 | ret = v4l2_subdev_call(sd, video, s_fmt, &cam_f); | 1446 | mf.height = pix->height; |
1456 | cam_f.fmt.pix.pixelformat = pix->pixelformat; | 1447 | mf.field = pix->field; |
1457 | *pix = cam_f.fmt.pix; | 1448 | mf.colorspace = pix->colorspace; |
1449 | mf.code = xlate->code; | ||
1450 | |||
1451 | ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf); | ||
1452 | |||
1453 | if (mf.code != xlate->code) | ||
1454 | return -EINVAL; | ||
1458 | 1455 | ||
1459 | icd->sense = NULL; | 1456 | icd->sense = NULL; |
1460 | 1457 | ||
1461 | if (ret < 0) { | 1458 | if (ret < 0) { |
1462 | dev_warn(dev, "Failed to configure for format %x\n", | 1459 | dev_warn(dev, "Failed to configure for format %x\n", |
1463 | pix->pixelformat); | 1460 | pix->pixelformat); |
1464 | } else if (pxa_camera_check_frame(pix)) { | 1461 | } else if (pxa_camera_check_frame(mf.width, mf.height)) { |
1465 | dev_warn(dev, | 1462 | dev_warn(dev, |
1466 | "Camera driver produced an unsupported frame %dx%d\n", | 1463 | "Camera driver produced an unsupported frame %dx%d\n", |
1467 | pix->width, pix->height); | 1464 | mf.width, mf.height); |
1468 | ret = -EINVAL; | 1465 | ret = -EINVAL; |
1469 | } else if (sense.flags & SOCAM_SENSE_PCLK_CHANGED) { | 1466 | } else if (sense.flags & SOCAM_SENSE_PCLK_CHANGED) { |
1470 | if (sense.pixel_clock > sense.pixel_clock_max) { | 1467 | if (sense.pixel_clock > sense.pixel_clock_max) { |
@@ -1476,10 +1473,14 @@ static int pxa_camera_set_fmt(struct soc_camera_device *icd, | |||
1476 | recalculate_fifo_timeout(pcdev, sense.pixel_clock); | 1473 | recalculate_fifo_timeout(pcdev, sense.pixel_clock); |
1477 | } | 1474 | } |
1478 | 1475 | ||
1479 | if (!ret) { | 1476 | if (ret < 0) |
1480 | icd->buswidth = xlate->buswidth; | 1477 | return ret; |
1481 | icd->current_fmt = xlate->host_fmt; | 1478 | |
1482 | } | 1479 | pix->width = mf.width; |
1480 | pix->height = mf.height; | ||
1481 | pix->field = mf.field; | ||
1482 | pix->colorspace = mf.colorspace; | ||
1483 | icd->current_fmt = xlate; | ||
1483 | 1484 | ||
1484 | return ret; | 1485 | return ret; |
1485 | } | 1486 | } |
@@ -1487,17 +1488,16 @@ static int pxa_camera_set_fmt(struct soc_camera_device *icd, | |||
1487 | static int pxa_camera_try_fmt(struct soc_camera_device *icd, | 1488 | static int pxa_camera_try_fmt(struct soc_camera_device *icd, |
1488 | struct v4l2_format *f) | 1489 | struct v4l2_format *f) |
1489 | { | 1490 | { |
1490 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); | ||
1491 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | 1491 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); |
1492 | const struct soc_camera_format_xlate *xlate; | 1492 | const struct soc_camera_format_xlate *xlate; |
1493 | struct v4l2_pix_format *pix = &f->fmt.pix; | 1493 | struct v4l2_pix_format *pix = &f->fmt.pix; |
1494 | struct v4l2_mbus_framefmt mf; | ||
1494 | __u32 pixfmt = pix->pixelformat; | 1495 | __u32 pixfmt = pix->pixelformat; |
1495 | enum v4l2_field field; | ||
1496 | int ret; | 1496 | int ret; |
1497 | 1497 | ||
1498 | xlate = soc_camera_xlate_by_fourcc(icd, pixfmt); | 1498 | xlate = soc_camera_xlate_by_fourcc(icd, pixfmt); |
1499 | if (!xlate) { | 1499 | if (!xlate) { |
1500 | dev_warn(ici->v4l2_dev.dev, "Format %x not found\n", pixfmt); | 1500 | dev_warn(icd->dev.parent, "Format %x not found\n", pixfmt); |
1501 | return -EINVAL; | 1501 | return -EINVAL; |
1502 | } | 1502 | } |
1503 | 1503 | ||
@@ -1511,22 +1511,36 @@ static int pxa_camera_try_fmt(struct soc_camera_device *icd, | |||
1511 | &pix->height, 32, 2048, 0, | 1511 | &pix->height, 32, 2048, 0, |
1512 | pixfmt == V4L2_PIX_FMT_YUV422P ? 4 : 0); | 1512 | pixfmt == V4L2_PIX_FMT_YUV422P ? 4 : 0); |
1513 | 1513 | ||
1514 | pix->bytesperline = pix->width * | 1514 | pix->bytesperline = soc_mbus_bytes_per_line(pix->width, |
1515 | DIV_ROUND_UP(xlate->host_fmt->depth, 8); | 1515 | xlate->host_fmt); |
1516 | if (pix->bytesperline < 0) | ||
1517 | return pix->bytesperline; | ||
1516 | pix->sizeimage = pix->height * pix->bytesperline; | 1518 | pix->sizeimage = pix->height * pix->bytesperline; |
1517 | 1519 | ||
1518 | /* camera has to see its format, but the user the original one */ | ||
1519 | pix->pixelformat = xlate->cam_fmt->fourcc; | ||
1520 | /* limit to sensor capabilities */ | 1520 | /* limit to sensor capabilities */ |
1521 | ret = v4l2_subdev_call(sd, video, try_fmt, f); | 1521 | mf.width = pix->width; |
1522 | pix->pixelformat = pixfmt; | 1522 | mf.height = pix->height; |
1523 | mf.field = pix->field; | ||
1524 | mf.colorspace = pix->colorspace; | ||
1525 | mf.code = xlate->code; | ||
1523 | 1526 | ||
1524 | field = pix->field; | 1527 | ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf); |
1528 | if (ret < 0) | ||
1529 | return ret; | ||
1525 | 1530 | ||
1526 | if (field == V4L2_FIELD_ANY) { | 1531 | pix->width = mf.width; |
1527 | pix->field = V4L2_FIELD_NONE; | 1532 | pix->height = mf.height; |
1528 | } else if (field != V4L2_FIELD_NONE) { | 1533 | pix->colorspace = mf.colorspace; |
1529 | dev_err(icd->dev.parent, "Field type %d unsupported.\n", field); | 1534 | |
1535 | switch (mf.field) { | ||
1536 | case V4L2_FIELD_ANY: | ||
1537 | case V4L2_FIELD_NONE: | ||
1538 | pix->field = V4L2_FIELD_NONE; | ||
1539 | break; | ||
1540 | default: | ||
1541 | /* TODO: support interlaced at least in pass-through mode */ | ||
1542 | dev_err(icd->dev.parent, "Field type %d unsupported.\n", | ||
1543 | mf.field); | ||
1530 | return -EINVAL; | 1544 | return -EINVAL; |
1531 | } | 1545 | } |
1532 | 1546 | ||
diff --git a/drivers/media/video/rj54n1cb0c.c b/drivers/media/video/rj54n1cb0c.c index 373f2a30a677..7b08bff443f5 100644 --- a/drivers/media/video/rj54n1cb0c.c +++ b/drivers/media/video/rj54n1cb0c.c | |||
@@ -16,6 +16,7 @@ | |||
16 | #include <media/v4l2-subdev.h> | 16 | #include <media/v4l2-subdev.h> |
17 | #include <media/v4l2-chip-ident.h> | 17 | #include <media/v4l2-chip-ident.h> |
18 | #include <media/soc_camera.h> | 18 | #include <media/soc_camera.h> |
19 | #include <media/soc_mediabus.h> | ||
19 | 20 | ||
20 | #define RJ54N1_DEV_CODE 0x0400 | 21 | #define RJ54N1_DEV_CODE 0x0400 |
21 | #define RJ54N1_DEV_CODE2 0x0401 | 22 | #define RJ54N1_DEV_CODE2 0x0401 |
@@ -85,18 +86,35 @@ | |||
85 | 86 | ||
86 | /* I2C addresses: 0x50, 0x51, 0x60, 0x61 */ | 87 | /* I2C addresses: 0x50, 0x51, 0x60, 0x61 */ |
87 | 88 | ||
88 | static const struct soc_camera_data_format rj54n1_colour_formats[] = { | 89 | /* RJ54N1CB0C has only one fixed colorspace per pixelcode */ |
89 | { | 90 | struct rj54n1_datafmt { |
90 | .name = "YUYV", | 91 | enum v4l2_mbus_pixelcode code; |
91 | .depth = 16, | 92 | enum v4l2_colorspace colorspace; |
92 | .fourcc = V4L2_PIX_FMT_YUYV, | 93 | }; |
93 | .colorspace = V4L2_COLORSPACE_JPEG, | 94 | |
94 | }, { | 95 | /* Find a data format by a pixel code in an array */ |
95 | .name = "RGB565", | 96 | static const struct rj54n1_datafmt *rj54n1_find_datafmt( |
96 | .depth = 16, | 97 | enum v4l2_mbus_pixelcode code, const struct rj54n1_datafmt *fmt, |
97 | .fourcc = V4L2_PIX_FMT_RGB565, | 98 | int n) |
98 | .colorspace = V4L2_COLORSPACE_SRGB, | 99 | { |
99 | } | 100 | int i; |
101 | for (i = 0; i < n; i++) | ||
102 | if (fmt[i].code == code) | ||
103 | return fmt + i; | ||
104 | |||
105 | return NULL; | ||
106 | } | ||
107 | |||
108 | static const struct rj54n1_datafmt rj54n1_colour_fmts[] = { | ||
109 | {V4L2_MBUS_FMT_YUYV8_2X8_LE, V4L2_COLORSPACE_JPEG}, | ||
110 | {V4L2_MBUS_FMT_YVYU8_2X8_LE, V4L2_COLORSPACE_JPEG}, | ||
111 | {V4L2_MBUS_FMT_RGB565_2X8_LE, V4L2_COLORSPACE_SRGB}, | ||
112 | {V4L2_MBUS_FMT_RGB565_2X8_BE, V4L2_COLORSPACE_SRGB}, | ||
113 | {V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_LE, V4L2_COLORSPACE_SRGB}, | ||
114 | {V4L2_MBUS_FMT_SBGGR10_2X8_PADLO_LE, V4L2_COLORSPACE_SRGB}, | ||
115 | {V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_BE, V4L2_COLORSPACE_SRGB}, | ||
116 | {V4L2_MBUS_FMT_SBGGR10_2X8_PADLO_BE, V4L2_COLORSPACE_SRGB}, | ||
117 | {V4L2_MBUS_FMT_SBGGR10_1X10, V4L2_COLORSPACE_SRGB}, | ||
100 | }; | 118 | }; |
101 | 119 | ||
102 | struct rj54n1_clock_div { | 120 | struct rj54n1_clock_div { |
@@ -109,12 +127,12 @@ struct rj54n1_clock_div { | |||
109 | 127 | ||
110 | struct rj54n1 { | 128 | struct rj54n1 { |
111 | struct v4l2_subdev subdev; | 129 | struct v4l2_subdev subdev; |
130 | const struct rj54n1_datafmt *fmt; | ||
112 | struct v4l2_rect rect; /* Sensor window */ | 131 | struct v4l2_rect rect; /* Sensor window */ |
113 | unsigned short width; /* Output window */ | 132 | unsigned short width; /* Output window */ |
114 | unsigned short height; | 133 | unsigned short height; |
115 | unsigned short resize; /* Sensor * 1024 / resize = Output */ | 134 | unsigned short resize; /* Sensor * 1024 / resize = Output */ |
116 | struct rj54n1_clock_div clk_div; | 135 | struct rj54n1_clock_div clk_div; |
117 | u32 fourcc; | ||
118 | unsigned short scale; | 136 | unsigned short scale; |
119 | u8 bank; | 137 | u8 bank; |
120 | }; | 138 | }; |
@@ -440,6 +458,16 @@ static int reg_write_multiple(struct i2c_client *client, | |||
440 | return 0; | 458 | return 0; |
441 | } | 459 | } |
442 | 460 | ||
461 | static int rj54n1_enum_fmt(struct v4l2_subdev *sd, int index, | ||
462 | enum v4l2_mbus_pixelcode *code) | ||
463 | { | ||
464 | if ((unsigned int)index >= ARRAY_SIZE(rj54n1_colour_fmts)) | ||
465 | return -EINVAL; | ||
466 | |||
467 | *code = rj54n1_colour_fmts[index].code; | ||
468 | return 0; | ||
469 | } | ||
470 | |||
443 | static int rj54n1_s_stream(struct v4l2_subdev *sd, int enable) | 471 | static int rj54n1_s_stream(struct v4l2_subdev *sd, int enable) |
444 | { | 472 | { |
445 | /* TODO: start / stop streaming */ | 473 | /* TODO: start / stop streaming */ |
@@ -527,16 +555,17 @@ static int rj54n1_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) | |||
527 | return 0; | 555 | return 0; |
528 | } | 556 | } |
529 | 557 | ||
530 | static int rj54n1_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 558 | static int rj54n1_g_fmt(struct v4l2_subdev *sd, |
559 | struct v4l2_mbus_framefmt *mf) | ||
531 | { | 560 | { |
532 | struct i2c_client *client = sd->priv; | 561 | struct i2c_client *client = sd->priv; |
533 | struct rj54n1 *rj54n1 = to_rj54n1(client); | 562 | struct rj54n1 *rj54n1 = to_rj54n1(client); |
534 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
535 | 563 | ||
536 | pix->pixelformat = rj54n1->fourcc; | 564 | mf->code = rj54n1->fmt->code; |
537 | pix->field = V4L2_FIELD_NONE; | 565 | mf->colorspace = rj54n1->fmt->colorspace; |
538 | pix->width = rj54n1->width; | 566 | mf->field = V4L2_FIELD_NONE; |
539 | pix->height = rj54n1->height; | 567 | mf->width = rj54n1->width; |
568 | mf->height = rj54n1->height; | ||
540 | 569 | ||
541 | return 0; | 570 | return 0; |
542 | } | 571 | } |
@@ -787,26 +816,44 @@ static int rj54n1_reg_init(struct i2c_client *client) | |||
787 | } | 816 | } |
788 | 817 | ||
789 | /* FIXME: streaming output only up to 800x600 is functional */ | 818 | /* FIXME: streaming output only up to 800x600 is functional */ |
790 | static int rj54n1_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 819 | static int rj54n1_try_fmt(struct v4l2_subdev *sd, |
820 | struct v4l2_mbus_framefmt *mf) | ||
791 | { | 821 | { |
792 | struct v4l2_pix_format *pix = &f->fmt.pix; | 822 | struct i2c_client *client = sd->priv; |
823 | struct rj54n1 *rj54n1 = to_rj54n1(client); | ||
824 | const struct rj54n1_datafmt *fmt; | ||
825 | int align = mf->code == V4L2_MBUS_FMT_SBGGR10_1X10 || | ||
826 | mf->code == V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_BE || | ||
827 | mf->code == V4L2_MBUS_FMT_SBGGR10_2X8_PADLO_BE || | ||
828 | mf->code == V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_LE || | ||
829 | mf->code == V4L2_MBUS_FMT_SBGGR10_2X8_PADLO_LE; | ||
830 | |||
831 | dev_dbg(&client->dev, "%s: code = %d, width = %u, height = %u\n", | ||
832 | __func__, mf->code, mf->width, mf->height); | ||
833 | |||
834 | fmt = rj54n1_find_datafmt(mf->code, rj54n1_colour_fmts, | ||
835 | ARRAY_SIZE(rj54n1_colour_fmts)); | ||
836 | if (!fmt) { | ||
837 | fmt = rj54n1->fmt; | ||
838 | mf->code = fmt->code; | ||
839 | } | ||
793 | 840 | ||
794 | pix->field = V4L2_FIELD_NONE; | 841 | mf->field = V4L2_FIELD_NONE; |
842 | mf->colorspace = fmt->colorspace; | ||
795 | 843 | ||
796 | if (pix->width > 800) | 844 | v4l_bound_align_image(&mf->width, 112, RJ54N1_MAX_WIDTH, align, |
797 | pix->width = 800; | 845 | &mf->height, 84, RJ54N1_MAX_HEIGHT, align, 0); |
798 | if (pix->height > 600) | ||
799 | pix->height = 600; | ||
800 | 846 | ||
801 | return 0; | 847 | return 0; |
802 | } | 848 | } |
803 | 849 | ||
804 | static int rj54n1_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 850 | static int rj54n1_s_fmt(struct v4l2_subdev *sd, |
851 | struct v4l2_mbus_framefmt *mf) | ||
805 | { | 852 | { |
806 | struct i2c_client *client = sd->priv; | 853 | struct i2c_client *client = sd->priv; |
807 | struct rj54n1 *rj54n1 = to_rj54n1(client); | 854 | struct rj54n1 *rj54n1 = to_rj54n1(client); |
808 | struct v4l2_pix_format *pix = &f->fmt.pix; | 855 | const struct rj54n1_datafmt *fmt; |
809 | unsigned int output_w, output_h, | 856 | unsigned int output_w, output_h, max_w, max_h, |
810 | input_w = rj54n1->rect.width, input_h = rj54n1->rect.height; | 857 | input_w = rj54n1->rect.width, input_h = rj54n1->rect.height; |
811 | int ret; | 858 | int ret; |
812 | 859 | ||
@@ -814,7 +861,7 @@ static int rj54n1_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | |||
814 | * The host driver can call us without .try_fmt(), so, we have to take | 861 | * The host driver can call us without .try_fmt(), so, we have to take |
815 | * care ourseleves | 862 | * care ourseleves |
816 | */ | 863 | */ |
817 | ret = rj54n1_try_fmt(sd, f); | 864 | ret = rj54n1_try_fmt(sd, mf); |
818 | 865 | ||
819 | /* | 866 | /* |
820 | * Verify if the sensor has just been powered on. TODO: replace this | 867 | * Verify if the sensor has just been powered on. TODO: replace this |
@@ -832,49 +879,101 @@ static int rj54n1_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | |||
832 | } | 879 | } |
833 | 880 | ||
834 | /* RA_SEL_UL is only relevant for raw modes, ignored otherwise. */ | 881 | /* RA_SEL_UL is only relevant for raw modes, ignored otherwise. */ |
835 | switch (pix->pixelformat) { | 882 | switch (mf->code) { |
836 | case V4L2_PIX_FMT_YUYV: | 883 | case V4L2_MBUS_FMT_YUYV8_2X8_LE: |
837 | ret = reg_write(client, RJ54N1_OUT_SEL, 0); | 884 | ret = reg_write(client, RJ54N1_OUT_SEL, 0); |
838 | if (!ret) | 885 | if (!ret) |
839 | ret = reg_set(client, RJ54N1_BYTE_SWAP, 8, 8); | 886 | ret = reg_set(client, RJ54N1_BYTE_SWAP, 8, 8); |
840 | break; | 887 | break; |
841 | case V4L2_PIX_FMT_RGB565: | 888 | case V4L2_MBUS_FMT_YVYU8_2X8_LE: |
889 | ret = reg_write(client, RJ54N1_OUT_SEL, 0); | ||
890 | if (!ret) | ||
891 | ret = reg_set(client, RJ54N1_BYTE_SWAP, 0, 8); | ||
892 | break; | ||
893 | case V4L2_MBUS_FMT_RGB565_2X8_LE: | ||
842 | ret = reg_write(client, RJ54N1_OUT_SEL, 0x11); | 894 | ret = reg_write(client, RJ54N1_OUT_SEL, 0x11); |
843 | if (!ret) | 895 | if (!ret) |
844 | ret = reg_set(client, RJ54N1_BYTE_SWAP, 8, 8); | 896 | ret = reg_set(client, RJ54N1_BYTE_SWAP, 8, 8); |
845 | break; | 897 | break; |
898 | case V4L2_MBUS_FMT_RGB565_2X8_BE: | ||
899 | ret = reg_write(client, RJ54N1_OUT_SEL, 0x11); | ||
900 | if (!ret) | ||
901 | ret = reg_set(client, RJ54N1_BYTE_SWAP, 0, 8); | ||
902 | break; | ||
903 | case V4L2_MBUS_FMT_SBGGR10_2X8_PADLO_LE: | ||
904 | ret = reg_write(client, RJ54N1_OUT_SEL, 4); | ||
905 | if (!ret) | ||
906 | ret = reg_set(client, RJ54N1_BYTE_SWAP, 8, 8); | ||
907 | if (!ret) | ||
908 | ret = reg_write(client, RJ54N1_RA_SEL_UL, 0); | ||
909 | break; | ||
910 | case V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_LE: | ||
911 | ret = reg_write(client, RJ54N1_OUT_SEL, 4); | ||
912 | if (!ret) | ||
913 | ret = reg_set(client, RJ54N1_BYTE_SWAP, 8, 8); | ||
914 | if (!ret) | ||
915 | ret = reg_write(client, RJ54N1_RA_SEL_UL, 8); | ||
916 | break; | ||
917 | case V4L2_MBUS_FMT_SBGGR10_2X8_PADLO_BE: | ||
918 | ret = reg_write(client, RJ54N1_OUT_SEL, 4); | ||
919 | if (!ret) | ||
920 | ret = reg_set(client, RJ54N1_BYTE_SWAP, 0, 8); | ||
921 | if (!ret) | ||
922 | ret = reg_write(client, RJ54N1_RA_SEL_UL, 0); | ||
923 | break; | ||
924 | case V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_BE: | ||
925 | ret = reg_write(client, RJ54N1_OUT_SEL, 4); | ||
926 | if (!ret) | ||
927 | ret = reg_set(client, RJ54N1_BYTE_SWAP, 0, 8); | ||
928 | if (!ret) | ||
929 | ret = reg_write(client, RJ54N1_RA_SEL_UL, 8); | ||
930 | break; | ||
931 | case V4L2_MBUS_FMT_SBGGR10_1X10: | ||
932 | ret = reg_write(client, RJ54N1_OUT_SEL, 5); | ||
933 | break; | ||
846 | default: | 934 | default: |
847 | ret = -EINVAL; | 935 | ret = -EINVAL; |
848 | } | 936 | } |
849 | 937 | ||
938 | /* Special case: a raw mode with 10 bits of data per clock tick */ | ||
939 | if (!ret) | ||
940 | ret = reg_set(client, RJ54N1_OCLK_SEL_EN, | ||
941 | (mf->code == V4L2_MBUS_FMT_SBGGR10_1X10) << 1, 2); | ||
942 | |||
850 | if (ret < 0) | 943 | if (ret < 0) |
851 | return ret; | 944 | return ret; |
852 | 945 | ||
853 | /* Supported scales 1:1 - 1:16 */ | 946 | /* Supported scales 1:1 >= scale > 1:16 */ |
854 | if (pix->width < input_w / 16) | 947 | max_w = mf->width * (16 * 1024 - 1) / 1024; |
855 | pix->width = input_w / 16; | 948 | if (input_w > max_w) |
856 | if (pix->height < input_h / 16) | 949 | input_w = max_w; |
857 | pix->height = input_h / 16; | 950 | max_h = mf->height * (16 * 1024 - 1) / 1024; |
951 | if (input_h > max_h) | ||
952 | input_h = max_h; | ||
858 | 953 | ||
859 | output_w = pix->width; | 954 | output_w = mf->width; |
860 | output_h = pix->height; | 955 | output_h = mf->height; |
861 | 956 | ||
862 | ret = rj54n1_sensor_scale(sd, &input_w, &input_h, &output_w, &output_h); | 957 | ret = rj54n1_sensor_scale(sd, &input_w, &input_h, &output_w, &output_h); |
863 | if (ret < 0) | 958 | if (ret < 0) |
864 | return ret; | 959 | return ret; |
865 | 960 | ||
866 | rj54n1->fourcc = pix->pixelformat; | 961 | fmt = rj54n1_find_datafmt(mf->code, rj54n1_colour_fmts, |
962 | ARRAY_SIZE(rj54n1_colour_fmts)); | ||
963 | |||
964 | rj54n1->fmt = fmt; | ||
867 | rj54n1->resize = ret; | 965 | rj54n1->resize = ret; |
868 | rj54n1->rect.width = input_w; | 966 | rj54n1->rect.width = input_w; |
869 | rj54n1->rect.height = input_h; | 967 | rj54n1->rect.height = input_h; |
870 | rj54n1->width = output_w; | 968 | rj54n1->width = output_w; |
871 | rj54n1->height = output_h; | 969 | rj54n1->height = output_h; |
872 | 970 | ||
873 | pix->width = output_w; | 971 | mf->width = output_w; |
874 | pix->height = output_h; | 972 | mf->height = output_h; |
875 | pix->field = V4L2_FIELD_NONE; | 973 | mf->field = V4L2_FIELD_NONE; |
974 | mf->colorspace = fmt->colorspace; | ||
876 | 975 | ||
877 | return ret; | 976 | return 0; |
878 | } | 977 | } |
879 | 978 | ||
880 | static int rj54n1_g_chip_ident(struct v4l2_subdev *sd, | 979 | static int rj54n1_g_chip_ident(struct v4l2_subdev *sd, |
@@ -1054,9 +1153,10 @@ static struct v4l2_subdev_core_ops rj54n1_subdev_core_ops = { | |||
1054 | 1153 | ||
1055 | static struct v4l2_subdev_video_ops rj54n1_subdev_video_ops = { | 1154 | static struct v4l2_subdev_video_ops rj54n1_subdev_video_ops = { |
1056 | .s_stream = rj54n1_s_stream, | 1155 | .s_stream = rj54n1_s_stream, |
1057 | .s_fmt = rj54n1_s_fmt, | 1156 | .s_mbus_fmt = rj54n1_s_fmt, |
1058 | .g_fmt = rj54n1_g_fmt, | 1157 | .g_mbus_fmt = rj54n1_g_fmt, |
1059 | .try_fmt = rj54n1_try_fmt, | 1158 | .try_mbus_fmt = rj54n1_try_fmt, |
1159 | .enum_mbus_fmt = rj54n1_enum_fmt, | ||
1060 | .g_crop = rj54n1_g_crop, | 1160 | .g_crop = rj54n1_g_crop, |
1061 | .cropcap = rj54n1_cropcap, | 1161 | .cropcap = rj54n1_cropcap, |
1062 | }; | 1162 | }; |
@@ -1153,7 +1253,7 @@ static int rj54n1_probe(struct i2c_client *client, | |||
1153 | rj54n1->rect.height = RJ54N1_MAX_HEIGHT; | 1253 | rj54n1->rect.height = RJ54N1_MAX_HEIGHT; |
1154 | rj54n1->width = RJ54N1_MAX_WIDTH; | 1254 | rj54n1->width = RJ54N1_MAX_WIDTH; |
1155 | rj54n1->height = RJ54N1_MAX_HEIGHT; | 1255 | rj54n1->height = RJ54N1_MAX_HEIGHT; |
1156 | rj54n1->fourcc = V4L2_PIX_FMT_YUYV; | 1256 | rj54n1->fmt = &rj54n1_colour_fmts[0]; |
1157 | rj54n1->resize = 1024; | 1257 | rj54n1->resize = 1024; |
1158 | 1258 | ||
1159 | ret = rj54n1_video_probe(icd, client); | 1259 | ret = rj54n1_video_probe(icd, client); |
@@ -1164,9 +1264,6 @@ static int rj54n1_probe(struct i2c_client *client, | |||
1164 | return ret; | 1264 | return ret; |
1165 | } | 1265 | } |
1166 | 1266 | ||
1167 | icd->formats = rj54n1_colour_formats; | ||
1168 | icd->num_formats = ARRAY_SIZE(rj54n1_colour_formats); | ||
1169 | |||
1170 | return ret; | 1267 | return ret; |
1171 | } | 1268 | } |
1172 | 1269 | ||
diff --git a/drivers/media/video/sh_mobile_ceu_camera.c b/drivers/media/video/sh_mobile_ceu_camera.c index 2a38e1d90613..273ad34f9d8d 100644 --- a/drivers/media/video/sh_mobile_ceu_camera.c +++ b/drivers/media/video/sh_mobile_ceu_camera.c | |||
@@ -38,6 +38,8 @@ | |||
38 | #include <media/soc_camera.h> | 38 | #include <media/soc_camera.h> |
39 | #include <media/sh_mobile_ceu.h> | 39 | #include <media/sh_mobile_ceu.h> |
40 | #include <media/videobuf-dma-contig.h> | 40 | #include <media/videobuf-dma-contig.h> |
41 | #include <media/v4l2-mediabus.h> | ||
42 | #include <media/soc_mediabus.h> | ||
41 | 43 | ||
42 | /* register offsets for sh7722 / sh7723 */ | 44 | /* register offsets for sh7722 / sh7723 */ |
43 | 45 | ||
@@ -85,7 +87,7 @@ | |||
85 | /* per video frame buffer */ | 87 | /* per video frame buffer */ |
86 | struct sh_mobile_ceu_buffer { | 88 | struct sh_mobile_ceu_buffer { |
87 | struct videobuf_buffer vb; /* v4l buffer must be first */ | 89 | struct videobuf_buffer vb; /* v4l buffer must be first */ |
88 | const struct soc_camera_data_format *fmt; | 90 | enum v4l2_mbus_pixelcode code; |
89 | }; | 91 | }; |
90 | 92 | ||
91 | struct sh_mobile_ceu_dev { | 93 | struct sh_mobile_ceu_dev { |
@@ -114,8 +116,8 @@ struct sh_mobile_ceu_cam { | |||
114 | struct v4l2_rect ceu_rect; | 116 | struct v4l2_rect ceu_rect; |
115 | unsigned int cam_width; | 117 | unsigned int cam_width; |
116 | unsigned int cam_height; | 118 | unsigned int cam_height; |
117 | const struct soc_camera_data_format *extra_fmt; | 119 | const struct soc_mbus_pixelfmt *extra_fmt; |
118 | const struct soc_camera_data_format *camera_fmt; | 120 | enum v4l2_mbus_pixelcode code; |
119 | }; | 121 | }; |
120 | 122 | ||
121 | static unsigned long make_bus_param(struct sh_mobile_ceu_dev *pcdev) | 123 | static unsigned long make_bus_param(struct sh_mobile_ceu_dev *pcdev) |
@@ -197,10 +199,13 @@ static int sh_mobile_ceu_videobuf_setup(struct videobuf_queue *vq, | |||
197 | struct soc_camera_device *icd = vq->priv_data; | 199 | struct soc_camera_device *icd = vq->priv_data; |
198 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); | 200 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); |
199 | struct sh_mobile_ceu_dev *pcdev = ici->priv; | 201 | struct sh_mobile_ceu_dev *pcdev = ici->priv; |
200 | int bytes_per_pixel = (icd->current_fmt->depth + 7) >> 3; | 202 | int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width, |
203 | icd->current_fmt->host_fmt); | ||
201 | 204 | ||
202 | *size = PAGE_ALIGN(icd->user_width * icd->user_height * | 205 | if (bytes_per_line < 0) |
203 | bytes_per_pixel); | 206 | return bytes_per_line; |
207 | |||
208 | *size = PAGE_ALIGN(bytes_per_line * icd->user_height); | ||
204 | 209 | ||
205 | if (0 == *count) | 210 | if (0 == *count) |
206 | *count = 2; | 211 | *count = 2; |
@@ -284,7 +289,7 @@ static int sh_mobile_ceu_capture(struct sh_mobile_ceu_dev *pcdev) | |||
284 | ceu_write(pcdev, CDBYR, phys_addr_bottom); | 289 | ceu_write(pcdev, CDBYR, phys_addr_bottom); |
285 | } | 290 | } |
286 | 291 | ||
287 | switch (icd->current_fmt->fourcc) { | 292 | switch (icd->current_fmt->host_fmt->fourcc) { |
288 | case V4L2_PIX_FMT_NV12: | 293 | case V4L2_PIX_FMT_NV12: |
289 | case V4L2_PIX_FMT_NV21: | 294 | case V4L2_PIX_FMT_NV21: |
290 | case V4L2_PIX_FMT_NV16: | 295 | case V4L2_PIX_FMT_NV16: |
@@ -311,8 +316,13 @@ static int sh_mobile_ceu_videobuf_prepare(struct videobuf_queue *vq, | |||
311 | { | 316 | { |
312 | struct soc_camera_device *icd = vq->priv_data; | 317 | struct soc_camera_device *icd = vq->priv_data; |
313 | struct sh_mobile_ceu_buffer *buf; | 318 | struct sh_mobile_ceu_buffer *buf; |
319 | int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width, | ||
320 | icd->current_fmt->host_fmt); | ||
314 | int ret; | 321 | int ret; |
315 | 322 | ||
323 | if (bytes_per_line < 0) | ||
324 | return bytes_per_line; | ||
325 | |||
316 | buf = container_of(vb, struct sh_mobile_ceu_buffer, vb); | 326 | buf = container_of(vb, struct sh_mobile_ceu_buffer, vb); |
317 | 327 | ||
318 | dev_dbg(icd->dev.parent, "%s (vb=0x%p) 0x%08lx %zd\n", __func__, | 328 | dev_dbg(icd->dev.parent, "%s (vb=0x%p) 0x%08lx %zd\n", __func__, |
@@ -331,18 +341,18 @@ static int sh_mobile_ceu_videobuf_prepare(struct videobuf_queue *vq, | |||
331 | 341 | ||
332 | BUG_ON(NULL == icd->current_fmt); | 342 | BUG_ON(NULL == icd->current_fmt); |
333 | 343 | ||
334 | if (buf->fmt != icd->current_fmt || | 344 | if (buf->code != icd->current_fmt->code || |
335 | vb->width != icd->user_width || | 345 | vb->width != icd->user_width || |
336 | vb->height != icd->user_height || | 346 | vb->height != icd->user_height || |
337 | vb->field != field) { | 347 | vb->field != field) { |
338 | buf->fmt = icd->current_fmt; | 348 | buf->code = icd->current_fmt->code; |
339 | vb->width = icd->user_width; | 349 | vb->width = icd->user_width; |
340 | vb->height = icd->user_height; | 350 | vb->height = icd->user_height; |
341 | vb->field = field; | 351 | vb->field = field; |
342 | vb->state = VIDEOBUF_NEEDS_INIT; | 352 | vb->state = VIDEOBUF_NEEDS_INIT; |
343 | } | 353 | } |
344 | 354 | ||
345 | vb->size = vb->width * vb->height * ((buf->fmt->depth + 7) >> 3); | 355 | vb->size = vb->height * bytes_per_line; |
346 | if (0 != vb->baddr && vb->bsize < vb->size) { | 356 | if (0 != vb->baddr && vb->bsize < vb->size) { |
347 | ret = -EINVAL; | 357 | ret = -EINVAL; |
348 | goto out; | 358 | goto out; |
@@ -564,19 +574,30 @@ static void sh_mobile_ceu_set_rect(struct soc_camera_device *icd, | |||
564 | in_width *= 2; | 574 | in_width *= 2; |
565 | left_offset *= 2; | 575 | left_offset *= 2; |
566 | } | 576 | } |
567 | width = cdwdr_width = out_width; | 577 | width = out_width; |
578 | cdwdr_width = out_width; | ||
568 | } else { | 579 | } else { |
569 | unsigned int w_factor = (icd->current_fmt->depth + 7) >> 3; | 580 | int bytes_per_line = soc_mbus_bytes_per_line(out_width, |
581 | icd->current_fmt->host_fmt); | ||
582 | unsigned int w_factor; | ||
570 | 583 | ||
571 | width = out_width * w_factor / 2; | 584 | width = out_width; |
572 | 585 | ||
573 | if (!pcdev->is_16bit) | 586 | switch (icd->current_fmt->host_fmt->packing) { |
574 | w_factor *= 2; | 587 | case SOC_MBUS_PACKING_2X8_PADHI: |
588 | w_factor = 2; | ||
589 | break; | ||
590 | default: | ||
591 | w_factor = 1; | ||
592 | } | ||
575 | 593 | ||
576 | in_width = rect->width * w_factor / 2; | 594 | in_width = rect->width * w_factor; |
577 | left_offset = left_offset * w_factor / 2; | 595 | left_offset = left_offset * w_factor; |
578 | 596 | ||
579 | cdwdr_width = width * 2; | 597 | if (bytes_per_line < 0) |
598 | cdwdr_width = out_width; | ||
599 | else | ||
600 | cdwdr_width = bytes_per_line; | ||
580 | } | 601 | } |
581 | 602 | ||
582 | height = out_height; | 603 | height = out_height; |
@@ -673,24 +694,24 @@ static int sh_mobile_ceu_set_bus_param(struct soc_camera_device *icd, | |||
673 | value = 0x00000010; /* data fetch by default */ | 694 | value = 0x00000010; /* data fetch by default */ |
674 | yuv_lineskip = 0; | 695 | yuv_lineskip = 0; |
675 | 696 | ||
676 | switch (icd->current_fmt->fourcc) { | 697 | switch (icd->current_fmt->host_fmt->fourcc) { |
677 | case V4L2_PIX_FMT_NV12: | 698 | case V4L2_PIX_FMT_NV12: |
678 | case V4L2_PIX_FMT_NV21: | 699 | case V4L2_PIX_FMT_NV21: |
679 | yuv_lineskip = 1; /* skip for NV12/21, no skip for NV16/61 */ | 700 | yuv_lineskip = 1; /* skip for NV12/21, no skip for NV16/61 */ |
680 | /* fall-through */ | 701 | /* fall-through */ |
681 | case V4L2_PIX_FMT_NV16: | 702 | case V4L2_PIX_FMT_NV16: |
682 | case V4L2_PIX_FMT_NV61: | 703 | case V4L2_PIX_FMT_NV61: |
683 | switch (cam->camera_fmt->fourcc) { | 704 | switch (cam->code) { |
684 | case V4L2_PIX_FMT_UYVY: | 705 | case V4L2_MBUS_FMT_YUYV8_2X8_BE: |
685 | value = 0x00000000; /* Cb0, Y0, Cr0, Y1 */ | 706 | value = 0x00000000; /* Cb0, Y0, Cr0, Y1 */ |
686 | break; | 707 | break; |
687 | case V4L2_PIX_FMT_VYUY: | 708 | case V4L2_MBUS_FMT_YVYU8_2X8_BE: |
688 | value = 0x00000100; /* Cr0, Y0, Cb0, Y1 */ | 709 | value = 0x00000100; /* Cr0, Y0, Cb0, Y1 */ |
689 | break; | 710 | break; |
690 | case V4L2_PIX_FMT_YUYV: | 711 | case V4L2_MBUS_FMT_YUYV8_2X8_LE: |
691 | value = 0x00000200; /* Y0, Cb0, Y1, Cr0 */ | 712 | value = 0x00000200; /* Y0, Cb0, Y1, Cr0 */ |
692 | break; | 713 | break; |
693 | case V4L2_PIX_FMT_YVYU: | 714 | case V4L2_MBUS_FMT_YVYU8_2X8_LE: |
694 | value = 0x00000300; /* Y0, Cr0, Y1, Cb0 */ | 715 | value = 0x00000300; /* Y0, Cr0, Y1, Cb0 */ |
695 | break; | 716 | break; |
696 | default: | 717 | default: |
@@ -698,8 +719,8 @@ static int sh_mobile_ceu_set_bus_param(struct soc_camera_device *icd, | |||
698 | } | 719 | } |
699 | } | 720 | } |
700 | 721 | ||
701 | if (icd->current_fmt->fourcc == V4L2_PIX_FMT_NV21 || | 722 | if (icd->current_fmt->host_fmt->fourcc == V4L2_PIX_FMT_NV21 || |
702 | icd->current_fmt->fourcc == V4L2_PIX_FMT_NV61) | 723 | icd->current_fmt->host_fmt->fourcc == V4L2_PIX_FMT_NV61) |
703 | value ^= 0x00000100; /* swap U, V to change from NV1x->NVx1 */ | 724 | value ^= 0x00000100; /* swap U, V to change from NV1x->NVx1 */ |
704 | 725 | ||
705 | value |= common_flags & SOCAM_VSYNC_ACTIVE_LOW ? 1 << 1 : 0; | 726 | value |= common_flags & SOCAM_VSYNC_ACTIVE_LOW ? 1 << 1 : 0; |
@@ -746,7 +767,8 @@ static int sh_mobile_ceu_set_bus_param(struct soc_camera_device *icd, | |||
746 | return 0; | 767 | return 0; |
747 | } | 768 | } |
748 | 769 | ||
749 | static int sh_mobile_ceu_try_bus_param(struct soc_camera_device *icd) | 770 | static int sh_mobile_ceu_try_bus_param(struct soc_camera_device *icd, |
771 | unsigned char buswidth) | ||
750 | { | 772 | { |
751 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); | 773 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); |
752 | struct sh_mobile_ceu_dev *pcdev = ici->priv; | 774 | struct sh_mobile_ceu_dev *pcdev = ici->priv; |
@@ -755,48 +777,75 @@ static int sh_mobile_ceu_try_bus_param(struct soc_camera_device *icd) | |||
755 | camera_flags = icd->ops->query_bus_param(icd); | 777 | camera_flags = icd->ops->query_bus_param(icd); |
756 | common_flags = soc_camera_bus_param_compatible(camera_flags, | 778 | common_flags = soc_camera_bus_param_compatible(camera_flags, |
757 | make_bus_param(pcdev)); | 779 | make_bus_param(pcdev)); |
758 | if (!common_flags) | 780 | if (!common_flags || buswidth > 16 || |
781 | (buswidth > 8 && !(common_flags & SOCAM_DATAWIDTH_16))) | ||
759 | return -EINVAL; | 782 | return -EINVAL; |
760 | 783 | ||
761 | return 0; | 784 | return 0; |
762 | } | 785 | } |
763 | 786 | ||
764 | static const struct soc_camera_data_format sh_mobile_ceu_formats[] = { | 787 | static const struct soc_mbus_pixelfmt sh_mobile_ceu_formats[] = { |
765 | { | ||
766 | .name = "NV12", | ||
767 | .depth = 12, | ||
768 | .fourcc = V4L2_PIX_FMT_NV12, | ||
769 | .colorspace = V4L2_COLORSPACE_JPEG, | ||
770 | }, | ||
771 | { | ||
772 | .name = "NV21", | ||
773 | .depth = 12, | ||
774 | .fourcc = V4L2_PIX_FMT_NV21, | ||
775 | .colorspace = V4L2_COLORSPACE_JPEG, | ||
776 | }, | ||
777 | { | ||
778 | .name = "NV16", | ||
779 | .depth = 16, | ||
780 | .fourcc = V4L2_PIX_FMT_NV16, | ||
781 | .colorspace = V4L2_COLORSPACE_JPEG, | ||
782 | }, | ||
783 | { | 788 | { |
784 | .name = "NV61", | 789 | .fourcc = V4L2_PIX_FMT_NV12, |
785 | .depth = 16, | 790 | .name = "NV12", |
786 | .fourcc = V4L2_PIX_FMT_NV61, | 791 | .bits_per_sample = 12, |
787 | .colorspace = V4L2_COLORSPACE_JPEG, | 792 | .packing = SOC_MBUS_PACKING_NONE, |
793 | .order = SOC_MBUS_ORDER_LE, | ||
794 | }, { | ||
795 | .fourcc = V4L2_PIX_FMT_NV21, | ||
796 | .name = "NV21", | ||
797 | .bits_per_sample = 12, | ||
798 | .packing = SOC_MBUS_PACKING_NONE, | ||
799 | .order = SOC_MBUS_ORDER_LE, | ||
800 | }, { | ||
801 | .fourcc = V4L2_PIX_FMT_NV16, | ||
802 | .name = "NV16", | ||
803 | .bits_per_sample = 16, | ||
804 | .packing = SOC_MBUS_PACKING_NONE, | ||
805 | .order = SOC_MBUS_ORDER_LE, | ||
806 | }, { | ||
807 | .fourcc = V4L2_PIX_FMT_NV61, | ||
808 | .name = "NV61", | ||
809 | .bits_per_sample = 16, | ||
810 | .packing = SOC_MBUS_PACKING_NONE, | ||
811 | .order = SOC_MBUS_ORDER_LE, | ||
788 | }, | 812 | }, |
789 | }; | 813 | }; |
790 | 814 | ||
815 | /* This will be corrected as we get more formats */ | ||
816 | static bool sh_mobile_ceu_packing_supported(const struct soc_mbus_pixelfmt *fmt) | ||
817 | { | ||
818 | return fmt->packing == SOC_MBUS_PACKING_NONE || | ||
819 | (fmt->bits_per_sample == 8 && | ||
820 | fmt->packing == SOC_MBUS_PACKING_2X8_PADHI) || | ||
821 | (fmt->bits_per_sample > 8 && | ||
822 | fmt->packing == SOC_MBUS_PACKING_EXTEND16); | ||
823 | } | ||
824 | |||
791 | static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, int idx, | 825 | static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, int idx, |
792 | struct soc_camera_format_xlate *xlate) | 826 | struct soc_camera_format_xlate *xlate) |
793 | { | 827 | { |
828 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | ||
794 | struct device *dev = icd->dev.parent; | 829 | struct device *dev = icd->dev.parent; |
795 | int ret, k, n; | 830 | int ret, k, n; |
796 | int formats = 0; | 831 | int formats = 0; |
797 | struct sh_mobile_ceu_cam *cam; | 832 | struct sh_mobile_ceu_cam *cam; |
833 | enum v4l2_mbus_pixelcode code; | ||
834 | const struct soc_mbus_pixelfmt *fmt; | ||
798 | 835 | ||
799 | ret = sh_mobile_ceu_try_bus_param(icd); | 836 | ret = v4l2_subdev_call(sd, video, enum_mbus_fmt, idx, &code); |
837 | if (ret < 0) | ||
838 | /* No more formats */ | ||
839 | return 0; | ||
840 | |||
841 | fmt = soc_mbus_get_fmtdesc(code); | ||
842 | if (!fmt) { | ||
843 | dev_err(icd->dev.parent, | ||
844 | "Invalid format code #%d: %d\n", idx, code); | ||
845 | return -EINVAL; | ||
846 | } | ||
847 | |||
848 | ret = sh_mobile_ceu_try_bus_param(icd, fmt->bits_per_sample); | ||
800 | if (ret < 0) | 849 | if (ret < 0) |
801 | return 0; | 850 | return 0; |
802 | 851 | ||
@@ -814,13 +863,13 @@ static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, int idx, | |||
814 | if (!idx) | 863 | if (!idx) |
815 | cam->extra_fmt = NULL; | 864 | cam->extra_fmt = NULL; |
816 | 865 | ||
817 | switch (icd->formats[idx].fourcc) { | 866 | switch (code) { |
818 | case V4L2_PIX_FMT_UYVY: | 867 | case V4L2_MBUS_FMT_YUYV8_2X8_BE: |
819 | case V4L2_PIX_FMT_VYUY: | 868 | case V4L2_MBUS_FMT_YVYU8_2X8_BE: |
820 | case V4L2_PIX_FMT_YUYV: | 869 | case V4L2_MBUS_FMT_YUYV8_2X8_LE: |
821 | case V4L2_PIX_FMT_YVYU: | 870 | case V4L2_MBUS_FMT_YVYU8_2X8_LE: |
822 | if (cam->extra_fmt) | 871 | if (cam->extra_fmt) |
823 | goto add_single_format; | 872 | break; |
824 | 873 | ||
825 | /* | 874 | /* |
826 | * Our case is simple so far: for any of the above four camera | 875 | * Our case is simple so far: for any of the above four camera |
@@ -831,32 +880,31 @@ static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, int idx, | |||
831 | * the host_priv pointer and check whether the format you're | 880 | * the host_priv pointer and check whether the format you're |
832 | * going to add now is already there. | 881 | * going to add now is already there. |
833 | */ | 882 | */ |
834 | cam->extra_fmt = (void *)sh_mobile_ceu_formats; | 883 | cam->extra_fmt = sh_mobile_ceu_formats; |
835 | 884 | ||
836 | n = ARRAY_SIZE(sh_mobile_ceu_formats); | 885 | n = ARRAY_SIZE(sh_mobile_ceu_formats); |
837 | formats += n; | 886 | formats += n; |
838 | for (k = 0; xlate && k < n; k++) { | 887 | for (k = 0; xlate && k < n; k++) { |
839 | xlate->host_fmt = &sh_mobile_ceu_formats[k]; | 888 | xlate->host_fmt = &sh_mobile_ceu_formats[k]; |
840 | xlate->cam_fmt = icd->formats + idx; | 889 | xlate->code = code; |
841 | xlate->buswidth = icd->formats[idx].depth; | ||
842 | xlate++; | 890 | xlate++; |
843 | dev_dbg(dev, "Providing format %s using %s\n", | 891 | dev_dbg(dev, "Providing format %s using code %d\n", |
844 | sh_mobile_ceu_formats[k].name, | 892 | sh_mobile_ceu_formats[k].name, code); |
845 | icd->formats[idx].name); | ||
846 | } | 893 | } |
894 | break; | ||
847 | default: | 895 | default: |
848 | add_single_format: | 896 | if (!sh_mobile_ceu_packing_supported(fmt)) |
849 | /* Generic pass-through */ | 897 | return 0; |
850 | formats++; | 898 | } |
851 | if (xlate) { | 899 | |
852 | xlate->host_fmt = icd->formats + idx; | 900 | /* Generic pass-through */ |
853 | xlate->cam_fmt = icd->formats + idx; | 901 | formats++; |
854 | xlate->buswidth = icd->formats[idx].depth; | 902 | if (xlate) { |
855 | xlate++; | 903 | xlate->host_fmt = fmt; |
856 | dev_dbg(dev, | 904 | xlate->code = code; |
857 | "Providing format %s in pass-through mode\n", | 905 | xlate++; |
858 | icd->formats[idx].name); | 906 | dev_dbg(dev, "Providing format %s in pass-through mode\n", |
859 | } | 907 | xlate->host_fmt->name); |
860 | } | 908 | } |
861 | 909 | ||
862 | return formats; | 910 | return formats; |
@@ -1036,17 +1084,15 @@ static int client_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *crop, | |||
1036 | static int get_camera_scales(struct v4l2_subdev *sd, struct v4l2_rect *rect, | 1084 | static int get_camera_scales(struct v4l2_subdev *sd, struct v4l2_rect *rect, |
1037 | unsigned int *scale_h, unsigned int *scale_v) | 1085 | unsigned int *scale_h, unsigned int *scale_v) |
1038 | { | 1086 | { |
1039 | struct v4l2_format f; | 1087 | struct v4l2_mbus_framefmt mf; |
1040 | int ret; | 1088 | int ret; |
1041 | 1089 | ||
1042 | f.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 1090 | ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf); |
1043 | |||
1044 | ret = v4l2_subdev_call(sd, video, g_fmt, &f); | ||
1045 | if (ret < 0) | 1091 | if (ret < 0) |
1046 | return ret; | 1092 | return ret; |
1047 | 1093 | ||
1048 | *scale_h = calc_generic_scale(rect->width, f.fmt.pix.width); | 1094 | *scale_h = calc_generic_scale(rect->width, mf.width); |
1049 | *scale_v = calc_generic_scale(rect->height, f.fmt.pix.height); | 1095 | *scale_v = calc_generic_scale(rect->height, mf.height); |
1050 | 1096 | ||
1051 | return 0; | 1097 | return 0; |
1052 | } | 1098 | } |
@@ -1061,32 +1107,29 @@ static int get_camera_subwin(struct soc_camera_device *icd, | |||
1061 | if (!ceu_rect->width) { | 1107 | if (!ceu_rect->width) { |
1062 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | 1108 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); |
1063 | struct device *dev = icd->dev.parent; | 1109 | struct device *dev = icd->dev.parent; |
1064 | struct v4l2_format f; | 1110 | struct v4l2_mbus_framefmt mf; |
1065 | struct v4l2_pix_format *pix = &f.fmt.pix; | ||
1066 | int ret; | 1111 | int ret; |
1067 | /* First time */ | 1112 | /* First time */ |
1068 | 1113 | ||
1069 | f.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 1114 | ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf); |
1070 | |||
1071 | ret = v4l2_subdev_call(sd, video, g_fmt, &f); | ||
1072 | if (ret < 0) | 1115 | if (ret < 0) |
1073 | return ret; | 1116 | return ret; |
1074 | 1117 | ||
1075 | dev_geo(dev, "camera fmt %ux%u\n", pix->width, pix->height); | 1118 | dev_geo(dev, "camera fmt %ux%u\n", mf.width, mf.height); |
1076 | 1119 | ||
1077 | if (pix->width > 2560) { | 1120 | if (mf.width > 2560) { |
1078 | ceu_rect->width = 2560; | 1121 | ceu_rect->width = 2560; |
1079 | ceu_rect->left = (pix->width - 2560) / 2; | 1122 | ceu_rect->left = (mf.width - 2560) / 2; |
1080 | } else { | 1123 | } else { |
1081 | ceu_rect->width = pix->width; | 1124 | ceu_rect->width = mf.width; |
1082 | ceu_rect->left = 0; | 1125 | ceu_rect->left = 0; |
1083 | } | 1126 | } |
1084 | 1127 | ||
1085 | if (pix->height > 1920) { | 1128 | if (mf.height > 1920) { |
1086 | ceu_rect->height = 1920; | 1129 | ceu_rect->height = 1920; |
1087 | ceu_rect->top = (pix->height - 1920) / 2; | 1130 | ceu_rect->top = (mf.height - 1920) / 2; |
1088 | } else { | 1131 | } else { |
1089 | ceu_rect->height = pix->height; | 1132 | ceu_rect->height = mf.height; |
1090 | ceu_rect->top = 0; | 1133 | ceu_rect->top = 0; |
1091 | } | 1134 | } |
1092 | 1135 | ||
@@ -1103,13 +1146,12 @@ static int get_camera_subwin(struct soc_camera_device *icd, | |||
1103 | return 0; | 1146 | return 0; |
1104 | } | 1147 | } |
1105 | 1148 | ||
1106 | static int client_s_fmt(struct soc_camera_device *icd, struct v4l2_format *f, | 1149 | static int client_s_fmt(struct soc_camera_device *icd, |
1107 | bool ceu_can_scale) | 1150 | struct v4l2_mbus_framefmt *mf, bool ceu_can_scale) |
1108 | { | 1151 | { |
1109 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | 1152 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); |
1110 | struct device *dev = icd->dev.parent; | 1153 | struct device *dev = icd->dev.parent; |
1111 | struct v4l2_pix_format *pix = &f->fmt.pix; | 1154 | unsigned int width = mf->width, height = mf->height, tmp_w, tmp_h; |
1112 | unsigned int width = pix->width, height = pix->height, tmp_w, tmp_h; | ||
1113 | unsigned int max_width, max_height; | 1155 | unsigned int max_width, max_height; |
1114 | struct v4l2_cropcap cap; | 1156 | struct v4l2_cropcap cap; |
1115 | int ret; | 1157 | int ret; |
@@ -1123,29 +1165,29 @@ static int client_s_fmt(struct soc_camera_device *icd, struct v4l2_format *f, | |||
1123 | max_width = min(cap.bounds.width, 2560); | 1165 | max_width = min(cap.bounds.width, 2560); |
1124 | max_height = min(cap.bounds.height, 1920); | 1166 | max_height = min(cap.bounds.height, 1920); |
1125 | 1167 | ||
1126 | ret = v4l2_subdev_call(sd, video, s_fmt, f); | 1168 | ret = v4l2_subdev_call(sd, video, s_mbus_fmt, mf); |
1127 | if (ret < 0) | 1169 | if (ret < 0) |
1128 | return ret; | 1170 | return ret; |
1129 | 1171 | ||
1130 | dev_geo(dev, "camera scaled to %ux%u\n", pix->width, pix->height); | 1172 | dev_geo(dev, "camera scaled to %ux%u\n", mf->width, mf->height); |
1131 | 1173 | ||
1132 | if ((width == pix->width && height == pix->height) || !ceu_can_scale) | 1174 | if ((width == mf->width && height == mf->height) || !ceu_can_scale) |
1133 | return 0; | 1175 | return 0; |
1134 | 1176 | ||
1135 | /* Camera set a format, but geometry is not precise, try to improve */ | 1177 | /* Camera set a format, but geometry is not precise, try to improve */ |
1136 | tmp_w = pix->width; | 1178 | tmp_w = mf->width; |
1137 | tmp_h = pix->height; | 1179 | tmp_h = mf->height; |
1138 | 1180 | ||
1139 | /* width <= max_width && height <= max_height - guaranteed by try_fmt */ | 1181 | /* width <= max_width && height <= max_height - guaranteed by try_fmt */ |
1140 | while ((width > tmp_w || height > tmp_h) && | 1182 | while ((width > tmp_w || height > tmp_h) && |
1141 | tmp_w < max_width && tmp_h < max_height) { | 1183 | tmp_w < max_width && tmp_h < max_height) { |
1142 | tmp_w = min(2 * tmp_w, max_width); | 1184 | tmp_w = min(2 * tmp_w, max_width); |
1143 | tmp_h = min(2 * tmp_h, max_height); | 1185 | tmp_h = min(2 * tmp_h, max_height); |
1144 | pix->width = tmp_w; | 1186 | mf->width = tmp_w; |
1145 | pix->height = tmp_h; | 1187 | mf->height = tmp_h; |
1146 | ret = v4l2_subdev_call(sd, video, s_fmt, f); | 1188 | ret = v4l2_subdev_call(sd, video, s_mbus_fmt, mf); |
1147 | dev_geo(dev, "Camera scaled to %ux%u\n", | 1189 | dev_geo(dev, "Camera scaled to %ux%u\n", |
1148 | pix->width, pix->height); | 1190 | mf->width, mf->height); |
1149 | if (ret < 0) { | 1191 | if (ret < 0) { |
1150 | /* This shouldn't happen */ | 1192 | /* This shouldn't happen */ |
1151 | dev_err(dev, "Client failed to set format: %d\n", ret); | 1193 | dev_err(dev, "Client failed to set format: %d\n", ret); |
@@ -1163,27 +1205,26 @@ static int client_s_fmt(struct soc_camera_device *icd, struct v4l2_format *f, | |||
1163 | */ | 1205 | */ |
1164 | static int client_scale(struct soc_camera_device *icd, struct v4l2_rect *rect, | 1206 | static int client_scale(struct soc_camera_device *icd, struct v4l2_rect *rect, |
1165 | struct v4l2_rect *sub_rect, struct v4l2_rect *ceu_rect, | 1207 | struct v4l2_rect *sub_rect, struct v4l2_rect *ceu_rect, |
1166 | struct v4l2_format *f, bool ceu_can_scale) | 1208 | struct v4l2_mbus_framefmt *mf, bool ceu_can_scale) |
1167 | { | 1209 | { |
1168 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | 1210 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); |
1169 | struct sh_mobile_ceu_cam *cam = icd->host_priv; | 1211 | struct sh_mobile_ceu_cam *cam = icd->host_priv; |
1170 | struct device *dev = icd->dev.parent; | 1212 | struct device *dev = icd->dev.parent; |
1171 | struct v4l2_format f_tmp = *f; | 1213 | struct v4l2_mbus_framefmt mf_tmp = *mf; |
1172 | struct v4l2_pix_format *pix_tmp = &f_tmp.fmt.pix; | ||
1173 | unsigned int scale_h, scale_v; | 1214 | unsigned int scale_h, scale_v; |
1174 | int ret; | 1215 | int ret; |
1175 | 1216 | ||
1176 | /* 5. Apply iterative camera S_FMT for camera user window. */ | 1217 | /* 5. Apply iterative camera S_FMT for camera user window. */ |
1177 | ret = client_s_fmt(icd, &f_tmp, ceu_can_scale); | 1218 | ret = client_s_fmt(icd, &mf_tmp, ceu_can_scale); |
1178 | if (ret < 0) | 1219 | if (ret < 0) |
1179 | return ret; | 1220 | return ret; |
1180 | 1221 | ||
1181 | dev_geo(dev, "5: camera scaled to %ux%u\n", | 1222 | dev_geo(dev, "5: camera scaled to %ux%u\n", |
1182 | pix_tmp->width, pix_tmp->height); | 1223 | mf_tmp.width, mf_tmp.height); |
1183 | 1224 | ||
1184 | /* 6. Retrieve camera output window (g_fmt) */ | 1225 | /* 6. Retrieve camera output window (g_fmt) */ |
1185 | 1226 | ||
1186 | /* unneeded - it is already in "f_tmp" */ | 1227 | /* unneeded - it is already in "mf_tmp" */ |
1187 | 1228 | ||
1188 | /* 7. Calculate new camera scales. */ | 1229 | /* 7. Calculate new camera scales. */ |
1189 | ret = get_camera_scales(sd, rect, &scale_h, &scale_v); | 1230 | ret = get_camera_scales(sd, rect, &scale_h, &scale_v); |
@@ -1192,10 +1233,11 @@ static int client_scale(struct soc_camera_device *icd, struct v4l2_rect *rect, | |||
1192 | 1233 | ||
1193 | dev_geo(dev, "7: camera scales %u:%u\n", scale_h, scale_v); | 1234 | dev_geo(dev, "7: camera scales %u:%u\n", scale_h, scale_v); |
1194 | 1235 | ||
1195 | cam->cam_width = pix_tmp->width; | 1236 | cam->cam_width = mf_tmp.width; |
1196 | cam->cam_height = pix_tmp->height; | 1237 | cam->cam_height = mf_tmp.height; |
1197 | f->fmt.pix.width = pix_tmp->width; | 1238 | mf->width = mf_tmp.width; |
1198 | f->fmt.pix.height = pix_tmp->height; | 1239 | mf->height = mf_tmp.height; |
1240 | mf->colorspace = mf_tmp.colorspace; | ||
1199 | 1241 | ||
1200 | /* | 1242 | /* |
1201 | * 8. Calculate new CEU crop - apply camera scales to previously | 1243 | * 8. Calculate new CEU crop - apply camera scales to previously |
@@ -1259,8 +1301,7 @@ static int sh_mobile_ceu_set_crop(struct soc_camera_device *icd, | |||
1259 | struct v4l2_rect *cam_rect = &cam_crop.c, *ceu_rect = &cam->ceu_rect; | 1301 | struct v4l2_rect *cam_rect = &cam_crop.c, *ceu_rect = &cam->ceu_rect; |
1260 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | 1302 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); |
1261 | struct device *dev = icd->dev.parent; | 1303 | struct device *dev = icd->dev.parent; |
1262 | struct v4l2_format f; | 1304 | struct v4l2_mbus_framefmt mf; |
1263 | struct v4l2_pix_format *pix = &f.fmt.pix; | ||
1264 | unsigned int scale_comb_h, scale_comb_v, scale_ceu_h, scale_ceu_v, | 1305 | unsigned int scale_comb_h, scale_comb_v, scale_ceu_h, scale_ceu_v, |
1265 | out_width, out_height; | 1306 | out_width, out_height; |
1266 | u32 capsr, cflcr; | 1307 | u32 capsr, cflcr; |
@@ -1309,25 +1350,24 @@ static int sh_mobile_ceu_set_crop(struct soc_camera_device *icd, | |||
1309 | * 5. Using actual input window and calculated combined scales calculate | 1350 | * 5. Using actual input window and calculated combined scales calculate |
1310 | * camera target output window. | 1351 | * camera target output window. |
1311 | */ | 1352 | */ |
1312 | pix->width = scale_down(cam_rect->width, scale_comb_h); | 1353 | mf.width = scale_down(cam_rect->width, scale_comb_h); |
1313 | pix->height = scale_down(cam_rect->height, scale_comb_v); | 1354 | mf.height = scale_down(cam_rect->height, scale_comb_v); |
1314 | 1355 | ||
1315 | dev_geo(dev, "5: camera target %ux%u\n", pix->width, pix->height); | 1356 | dev_geo(dev, "5: camera target %ux%u\n", mf.width, mf.height); |
1316 | 1357 | ||
1317 | /* 6. - 9. */ | 1358 | /* 6. - 9. */ |
1318 | pix->pixelformat = cam->camera_fmt->fourcc; | 1359 | mf.code = cam->code; |
1319 | pix->colorspace = cam->camera_fmt->colorspace; | 1360 | mf.field = pcdev->is_interlaced ? V4L2_FIELD_INTERLACED : |
1361 | V4L2_FIELD_NONE; | ||
1320 | 1362 | ||
1321 | capsr = capture_save_reset(pcdev); | 1363 | capsr = capture_save_reset(pcdev); |
1322 | dev_dbg(dev, "CAPSR 0x%x, CFLCR 0x%x\n", capsr, pcdev->cflcr); | 1364 | dev_dbg(dev, "CAPSR 0x%x, CFLCR 0x%x\n", capsr, pcdev->cflcr); |
1323 | 1365 | ||
1324 | /* Make relative to camera rectangle */ | 1366 | /* Make relative to camera rectangle */ |
1325 | rect->left -= cam_rect->left; | 1367 | rect->left -= cam_rect->left; |
1326 | rect->top -= cam_rect->top; | 1368 | rect->top -= cam_rect->top; |
1327 | 1369 | ||
1328 | f.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 1370 | ret = client_scale(icd, cam_rect, rect, ceu_rect, &mf, |
1329 | |||
1330 | ret = client_scale(icd, cam_rect, rect, ceu_rect, &f, | ||
1331 | pcdev->image_mode && !pcdev->is_interlaced); | 1371 | pcdev->image_mode && !pcdev->is_interlaced); |
1332 | 1372 | ||
1333 | dev_geo(dev, "6-9: %d\n", ret); | 1373 | dev_geo(dev, "6-9: %d\n", ret); |
@@ -1375,8 +1415,7 @@ static int sh_mobile_ceu_set_fmt(struct soc_camera_device *icd, | |||
1375 | struct sh_mobile_ceu_dev *pcdev = ici->priv; | 1415 | struct sh_mobile_ceu_dev *pcdev = ici->priv; |
1376 | struct sh_mobile_ceu_cam *cam = icd->host_priv; | 1416 | struct sh_mobile_ceu_cam *cam = icd->host_priv; |
1377 | struct v4l2_pix_format *pix = &f->fmt.pix; | 1417 | struct v4l2_pix_format *pix = &f->fmt.pix; |
1378 | struct v4l2_format cam_f = *f; | 1418 | struct v4l2_mbus_framefmt mf; |
1379 | struct v4l2_pix_format *cam_pix = &cam_f.fmt.pix; | ||
1380 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | 1419 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); |
1381 | struct device *dev = icd->dev.parent; | 1420 | struct device *dev = icd->dev.parent; |
1382 | __u32 pixfmt = pix->pixelformat; | 1421 | __u32 pixfmt = pix->pixelformat; |
@@ -1445,9 +1484,11 @@ static int sh_mobile_ceu_set_fmt(struct soc_camera_device *icd, | |||
1445 | * 4. Calculate camera output window by applying combined scales to real | 1484 | * 4. Calculate camera output window by applying combined scales to real |
1446 | * input window. | 1485 | * input window. |
1447 | */ | 1486 | */ |
1448 | cam_pix->width = scale_down(cam_rect->width, scale_h); | 1487 | mf.width = scale_down(cam_rect->width, scale_h); |
1449 | cam_pix->height = scale_down(cam_rect->height, scale_v); | 1488 | mf.height = scale_down(cam_rect->height, scale_v); |
1450 | cam_pix->pixelformat = xlate->cam_fmt->fourcc; | 1489 | mf.field = pix->field; |
1490 | mf.colorspace = pix->colorspace; | ||
1491 | mf.code = xlate->code; | ||
1451 | 1492 | ||
1452 | switch (pixfmt) { | 1493 | switch (pixfmt) { |
1453 | case V4L2_PIX_FMT_NV12: | 1494 | case V4L2_PIX_FMT_NV12: |
@@ -1460,11 +1501,10 @@ static int sh_mobile_ceu_set_fmt(struct soc_camera_device *icd, | |||
1460 | image_mode = false; | 1501 | image_mode = false; |
1461 | } | 1502 | } |
1462 | 1503 | ||
1463 | dev_geo(dev, "4: camera output %ux%u\n", | 1504 | dev_geo(dev, "4: camera output %ux%u\n", mf.width, mf.height); |
1464 | cam_pix->width, cam_pix->height); | ||
1465 | 1505 | ||
1466 | /* 5. - 9. */ | 1506 | /* 5. - 9. */ |
1467 | ret = client_scale(icd, cam_rect, &cam_subrect, &ceu_rect, &cam_f, | 1507 | ret = client_scale(icd, cam_rect, &cam_subrect, &ceu_rect, &mf, |
1468 | image_mode && !is_interlaced); | 1508 | image_mode && !is_interlaced); |
1469 | 1509 | ||
1470 | dev_geo(dev, "5-9: client scale %d\n", ret); | 1510 | dev_geo(dev, "5-9: client scale %d\n", ret); |
@@ -1472,37 +1512,48 @@ static int sh_mobile_ceu_set_fmt(struct soc_camera_device *icd, | |||
1472 | /* Done with the camera. Now see if we can improve the result */ | 1512 | /* Done with the camera. Now see if we can improve the result */ |
1473 | 1513 | ||
1474 | dev_dbg(dev, "Camera %d fmt %ux%u, requested %ux%u\n", | 1514 | dev_dbg(dev, "Camera %d fmt %ux%u, requested %ux%u\n", |
1475 | ret, cam_pix->width, cam_pix->height, pix->width, pix->height); | 1515 | ret, mf.width, mf.height, pix->width, pix->height); |
1476 | if (ret < 0) | 1516 | if (ret < 0) |
1477 | return ret; | 1517 | return ret; |
1478 | 1518 | ||
1519 | if (mf.code != xlate->code) | ||
1520 | return -EINVAL; | ||
1521 | |||
1479 | /* 10. Use CEU scaling to scale to the requested user window. */ | 1522 | /* 10. Use CEU scaling to scale to the requested user window. */ |
1480 | 1523 | ||
1481 | /* We cannot scale up */ | 1524 | /* We cannot scale up */ |
1482 | if (pix->width > cam_pix->width) | 1525 | if (pix->width > mf.width) |
1483 | pix->width = cam_pix->width; | 1526 | pix->width = mf.width; |
1484 | if (pix->width > ceu_rect.width) | 1527 | if (pix->width > ceu_rect.width) |
1485 | pix->width = ceu_rect.width; | 1528 | pix->width = ceu_rect.width; |
1486 | 1529 | ||
1487 | if (pix->height > cam_pix->height) | 1530 | if (pix->height > mf.height) |
1488 | pix->height = cam_pix->height; | 1531 | pix->height = mf.height; |
1489 | if (pix->height > ceu_rect.height) | 1532 | if (pix->height > ceu_rect.height) |
1490 | pix->height = ceu_rect.height; | 1533 | pix->height = ceu_rect.height; |
1491 | 1534 | ||
1492 | /* Let's rock: scale pix->{width x height} down to width x height */ | 1535 | pix->colorspace = mf.colorspace; |
1493 | scale_h = calc_scale(ceu_rect.width, &pix->width); | 1536 | |
1494 | scale_v = calc_scale(ceu_rect.height, &pix->height); | 1537 | if (image_mode) { |
1538 | /* Scale pix->{width x height} down to width x height */ | ||
1539 | scale_h = calc_scale(ceu_rect.width, &pix->width); | ||
1540 | scale_v = calc_scale(ceu_rect.height, &pix->height); | ||
1541 | |||
1542 | pcdev->cflcr = scale_h | (scale_v << 16); | ||
1543 | } else { | ||
1544 | pix->width = ceu_rect.width; | ||
1545 | pix->height = ceu_rect.height; | ||
1546 | scale_h = scale_v = 0; | ||
1547 | pcdev->cflcr = 0; | ||
1548 | } | ||
1495 | 1549 | ||
1496 | dev_geo(dev, "10: W: %u : 0x%x = %u, H: %u : 0x%x = %u\n", | 1550 | dev_geo(dev, "10: W: %u : 0x%x = %u, H: %u : 0x%x = %u\n", |
1497 | ceu_rect.width, scale_h, pix->width, | 1551 | ceu_rect.width, scale_h, pix->width, |
1498 | ceu_rect.height, scale_v, pix->height); | 1552 | ceu_rect.height, scale_v, pix->height); |
1499 | 1553 | ||
1500 | pcdev->cflcr = scale_h | (scale_v << 16); | 1554 | cam->code = xlate->code; |
1501 | 1555 | cam->ceu_rect = ceu_rect; | |
1502 | icd->buswidth = xlate->buswidth; | 1556 | icd->current_fmt = xlate; |
1503 | icd->current_fmt = xlate->host_fmt; | ||
1504 | cam->camera_fmt = xlate->cam_fmt; | ||
1505 | cam->ceu_rect = ceu_rect; | ||
1506 | 1557 | ||
1507 | pcdev->is_interlaced = is_interlaced; | 1558 | pcdev->is_interlaced = is_interlaced; |
1508 | pcdev->image_mode = image_mode; | 1559 | pcdev->image_mode = image_mode; |
@@ -1516,6 +1567,7 @@ static int sh_mobile_ceu_try_fmt(struct soc_camera_device *icd, | |||
1516 | const struct soc_camera_format_xlate *xlate; | 1567 | const struct soc_camera_format_xlate *xlate; |
1517 | struct v4l2_pix_format *pix = &f->fmt.pix; | 1568 | struct v4l2_pix_format *pix = &f->fmt.pix; |
1518 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | 1569 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); |
1570 | struct v4l2_mbus_framefmt mf; | ||
1519 | __u32 pixfmt = pix->pixelformat; | 1571 | __u32 pixfmt = pix->pixelformat; |
1520 | int width, height; | 1572 | int width, height; |
1521 | int ret; | 1573 | int ret; |
@@ -1534,18 +1586,27 @@ static int sh_mobile_ceu_try_fmt(struct soc_camera_device *icd, | |||
1534 | width = pix->width; | 1586 | width = pix->width; |
1535 | height = pix->height; | 1587 | height = pix->height; |
1536 | 1588 | ||
1537 | pix->bytesperline = pix->width * | 1589 | pix->bytesperline = soc_mbus_bytes_per_line(width, xlate->host_fmt); |
1538 | DIV_ROUND_UP(xlate->host_fmt->depth, 8); | 1590 | if (pix->bytesperline < 0) |
1539 | pix->sizeimage = pix->height * pix->bytesperline; | 1591 | return pix->bytesperline; |
1540 | 1592 | pix->sizeimage = height * pix->bytesperline; | |
1541 | pix->pixelformat = xlate->cam_fmt->fourcc; | ||
1542 | 1593 | ||
1543 | /* limit to sensor capabilities */ | 1594 | /* limit to sensor capabilities */ |
1544 | ret = v4l2_subdev_call(sd, video, try_fmt, f); | 1595 | mf.width = pix->width; |
1545 | pix->pixelformat = pixfmt; | 1596 | mf.height = pix->height; |
1597 | mf.field = pix->field; | ||
1598 | mf.code = xlate->code; | ||
1599 | mf.colorspace = pix->colorspace; | ||
1600 | |||
1601 | ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf); | ||
1546 | if (ret < 0) | 1602 | if (ret < 0) |
1547 | return ret; | 1603 | return ret; |
1548 | 1604 | ||
1605 | pix->width = mf.width; | ||
1606 | pix->height = mf.height; | ||
1607 | pix->field = mf.field; | ||
1608 | pix->colorspace = mf.colorspace; | ||
1609 | |||
1549 | switch (pixfmt) { | 1610 | switch (pixfmt) { |
1550 | case V4L2_PIX_FMT_NV12: | 1611 | case V4L2_PIX_FMT_NV12: |
1551 | case V4L2_PIX_FMT_NV21: | 1612 | case V4L2_PIX_FMT_NV21: |
@@ -1554,21 +1615,25 @@ static int sh_mobile_ceu_try_fmt(struct soc_camera_device *icd, | |||
1554 | /* FIXME: check against rect_max after converting soc-camera */ | 1615 | /* FIXME: check against rect_max after converting soc-camera */ |
1555 | /* We can scale precisely, need a bigger image from camera */ | 1616 | /* We can scale precisely, need a bigger image from camera */ |
1556 | if (pix->width < width || pix->height < height) { | 1617 | if (pix->width < width || pix->height < height) { |
1557 | int tmp_w = pix->width, tmp_h = pix->height; | 1618 | /* |
1558 | pix->width = 2560; | 1619 | * We presume, the sensor behaves sanely, i.e., if |
1559 | pix->height = 1920; | 1620 | * requested a bigger rectangle, it will not return a |
1560 | ret = v4l2_subdev_call(sd, video, try_fmt, f); | 1621 | * smaller one. |
1622 | */ | ||
1623 | mf.width = 2560; | ||
1624 | mf.height = 1920; | ||
1625 | ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf); | ||
1561 | if (ret < 0) { | 1626 | if (ret < 0) { |
1562 | /* Shouldn't actually happen... */ | 1627 | /* Shouldn't actually happen... */ |
1563 | dev_err(icd->dev.parent, | 1628 | dev_err(icd->dev.parent, |
1564 | "FIXME: try_fmt() returned %d\n", ret); | 1629 | "FIXME: client try_fmt() = %d\n", ret); |
1565 | pix->width = tmp_w; | 1630 | return ret; |
1566 | pix->height = tmp_h; | ||
1567 | } | 1631 | } |
1568 | } | 1632 | } |
1569 | if (pix->width > width) | 1633 | /* We will scale exactly */ |
1634 | if (mf.width > width) | ||
1570 | pix->width = width; | 1635 | pix->width = width; |
1571 | if (pix->height > height) | 1636 | if (mf.height > height) |
1572 | pix->height = height; | 1637 | pix->height = height; |
1573 | } | 1638 | } |
1574 | 1639 | ||
@@ -1663,7 +1728,7 @@ static int sh_mobile_ceu_set_ctrl(struct soc_camera_device *icd, | |||
1663 | 1728 | ||
1664 | switch (ctrl->id) { | 1729 | switch (ctrl->id) { |
1665 | case V4L2_CID_SHARPNESS: | 1730 | case V4L2_CID_SHARPNESS: |
1666 | switch (icd->current_fmt->fourcc) { | 1731 | switch (icd->current_fmt->host_fmt->fourcc) { |
1667 | case V4L2_PIX_FMT_NV12: | 1732 | case V4L2_PIX_FMT_NV12: |
1668 | case V4L2_PIX_FMT_NV21: | 1733 | case V4L2_PIX_FMT_NV21: |
1669 | case V4L2_PIX_FMT_NV16: | 1734 | case V4L2_PIX_FMT_NV16: |
diff --git a/drivers/media/video/soc_camera.c b/drivers/media/video/soc_camera.c index 5fdedc766401..6b3fbcca7747 100644 --- a/drivers/media/video/soc_camera.c +++ b/drivers/media/video/soc_camera.c | |||
@@ -31,6 +31,7 @@ | |||
31 | #include <media/v4l2-ioctl.h> | 31 | #include <media/v4l2-ioctl.h> |
32 | #include <media/v4l2-dev.h> | 32 | #include <media/v4l2-dev.h> |
33 | #include <media/videobuf-core.h> | 33 | #include <media/videobuf-core.h> |
34 | #include <media/soc_mediabus.h> | ||
34 | 35 | ||
35 | /* Default to VGA resolution */ | 36 | /* Default to VGA resolution */ |
36 | #define DEFAULT_WIDTH 640 | 37 | #define DEFAULT_WIDTH 640 |
@@ -40,18 +41,6 @@ static LIST_HEAD(hosts); | |||
40 | static LIST_HEAD(devices); | 41 | static LIST_HEAD(devices); |
41 | static DEFINE_MUTEX(list_lock); /* Protects the list of hosts */ | 42 | static DEFINE_MUTEX(list_lock); /* Protects the list of hosts */ |
42 | 43 | ||
43 | const struct soc_camera_data_format *soc_camera_format_by_fourcc( | ||
44 | struct soc_camera_device *icd, unsigned int fourcc) | ||
45 | { | ||
46 | unsigned int i; | ||
47 | |||
48 | for (i = 0; i < icd->num_formats; i++) | ||
49 | if (icd->formats[i].fourcc == fourcc) | ||
50 | return icd->formats + i; | ||
51 | return NULL; | ||
52 | } | ||
53 | EXPORT_SYMBOL(soc_camera_format_by_fourcc); | ||
54 | |||
55 | const struct soc_camera_format_xlate *soc_camera_xlate_by_fourcc( | 44 | const struct soc_camera_format_xlate *soc_camera_xlate_by_fourcc( |
56 | struct soc_camera_device *icd, unsigned int fourcc) | 45 | struct soc_camera_device *icd, unsigned int fourcc) |
57 | { | 46 | { |
@@ -207,21 +196,26 @@ static int soc_camera_dqbuf(struct file *file, void *priv, | |||
207 | /* Always entered with .video_lock held */ | 196 | /* Always entered with .video_lock held */ |
208 | static int soc_camera_init_user_formats(struct soc_camera_device *icd) | 197 | static int soc_camera_init_user_formats(struct soc_camera_device *icd) |
209 | { | 198 | { |
199 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | ||
210 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); | 200 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); |
211 | int i, fmts = 0, ret; | 201 | int i, fmts = 0, raw_fmts = 0, ret; |
202 | enum v4l2_mbus_pixelcode code; | ||
203 | |||
204 | while (!v4l2_subdev_call(sd, video, enum_mbus_fmt, raw_fmts, &code)) | ||
205 | raw_fmts++; | ||
212 | 206 | ||
213 | if (!ici->ops->get_formats) | 207 | if (!ici->ops->get_formats) |
214 | /* | 208 | /* |
215 | * Fallback mode - the host will have to serve all | 209 | * Fallback mode - the host will have to serve all |
216 | * sensor-provided formats one-to-one to the user | 210 | * sensor-provided formats one-to-one to the user |
217 | */ | 211 | */ |
218 | fmts = icd->num_formats; | 212 | fmts = raw_fmts; |
219 | else | 213 | else |
220 | /* | 214 | /* |
221 | * First pass - only count formats this host-sensor | 215 | * First pass - only count formats this host-sensor |
222 | * configuration can provide | 216 | * configuration can provide |
223 | */ | 217 | */ |
224 | for (i = 0; i < icd->num_formats; i++) { | 218 | for (i = 0; i < raw_fmts; i++) { |
225 | ret = ici->ops->get_formats(icd, i, NULL); | 219 | ret = ici->ops->get_formats(icd, i, NULL); |
226 | if (ret < 0) | 220 | if (ret < 0) |
227 | return ret; | 221 | return ret; |
@@ -242,11 +236,12 @@ static int soc_camera_init_user_formats(struct soc_camera_device *icd) | |||
242 | 236 | ||
243 | /* Second pass - actually fill data formats */ | 237 | /* Second pass - actually fill data formats */ |
244 | fmts = 0; | 238 | fmts = 0; |
245 | for (i = 0; i < icd->num_formats; i++) | 239 | for (i = 0; i < raw_fmts; i++) |
246 | if (!ici->ops->get_formats) { | 240 | if (!ici->ops->get_formats) { |
247 | icd->user_formats[i].host_fmt = icd->formats + i; | 241 | v4l2_subdev_call(sd, video, enum_mbus_fmt, i, &code); |
248 | icd->user_formats[i].cam_fmt = icd->formats + i; | 242 | icd->user_formats[i].host_fmt = |
249 | icd->user_formats[i].buswidth = icd->formats[i].depth; | 243 | soc_mbus_get_fmtdesc(code); |
244 | icd->user_formats[i].code = code; | ||
250 | } else { | 245 | } else { |
251 | ret = ici->ops->get_formats(icd, i, | 246 | ret = ici->ops->get_formats(icd, i, |
252 | &icd->user_formats[fmts]); | 247 | &icd->user_formats[fmts]); |
@@ -255,7 +250,7 @@ static int soc_camera_init_user_formats(struct soc_camera_device *icd) | |||
255 | fmts += ret; | 250 | fmts += ret; |
256 | } | 251 | } |
257 | 252 | ||
258 | icd->current_fmt = icd->user_formats[0].host_fmt; | 253 | icd->current_fmt = &icd->user_formats[0]; |
259 | 254 | ||
260 | return 0; | 255 | return 0; |
261 | 256 | ||
@@ -281,7 +276,7 @@ static void soc_camera_free_user_formats(struct soc_camera_device *icd) | |||
281 | #define pixfmtstr(x) (x) & 0xff, ((x) >> 8) & 0xff, ((x) >> 16) & 0xff, \ | 276 | #define pixfmtstr(x) (x) & 0xff, ((x) >> 8) & 0xff, ((x) >> 16) & 0xff, \ |
282 | ((x) >> 24) & 0xff | 277 | ((x) >> 24) & 0xff |
283 | 278 | ||
284 | /* Called with .vb_lock held */ | 279 | /* Called with .vb_lock held, or from the first open(2), see comment there */ |
285 | static int soc_camera_set_fmt(struct soc_camera_file *icf, | 280 | static int soc_camera_set_fmt(struct soc_camera_file *icf, |
286 | struct v4l2_format *f) | 281 | struct v4l2_format *f) |
287 | { | 282 | { |
@@ -302,7 +297,7 @@ static int soc_camera_set_fmt(struct soc_camera_file *icf, | |||
302 | if (ret < 0) { | 297 | if (ret < 0) { |
303 | return ret; | 298 | return ret; |
304 | } else if (!icd->current_fmt || | 299 | } else if (!icd->current_fmt || |
305 | icd->current_fmt->fourcc != pix->pixelformat) { | 300 | icd->current_fmt->host_fmt->fourcc != pix->pixelformat) { |
306 | dev_err(&icd->dev, | 301 | dev_err(&icd->dev, |
307 | "Host driver hasn't set up current format correctly!\n"); | 302 | "Host driver hasn't set up current format correctly!\n"); |
308 | return -EINVAL; | 303 | return -EINVAL; |
@@ -310,6 +305,7 @@ static int soc_camera_set_fmt(struct soc_camera_file *icf, | |||
310 | 305 | ||
311 | icd->user_width = pix->width; | 306 | icd->user_width = pix->width; |
312 | icd->user_height = pix->height; | 307 | icd->user_height = pix->height; |
308 | icd->colorspace = pix->colorspace; | ||
313 | icf->vb_vidq.field = | 309 | icf->vb_vidq.field = |
314 | icd->field = pix->field; | 310 | icd->field = pix->field; |
315 | 311 | ||
@@ -369,8 +365,9 @@ static int soc_camera_open(struct file *file) | |||
369 | .width = icd->user_width, | 365 | .width = icd->user_width, |
370 | .height = icd->user_height, | 366 | .height = icd->user_height, |
371 | .field = icd->field, | 367 | .field = icd->field, |
372 | .pixelformat = icd->current_fmt->fourcc, | 368 | .colorspace = icd->colorspace, |
373 | .colorspace = icd->current_fmt->colorspace, | 369 | .pixelformat = |
370 | icd->current_fmt->host_fmt->fourcc, | ||
374 | }, | 371 | }, |
375 | }; | 372 | }; |
376 | 373 | ||
@@ -390,7 +387,12 @@ static int soc_camera_open(struct file *file) | |||
390 | goto eiciadd; | 387 | goto eiciadd; |
391 | } | 388 | } |
392 | 389 | ||
393 | /* Try to configure with default parameters */ | 390 | /* |
391 | * Try to configure with default parameters. Notice: this is the | ||
392 | * very first open, so, we cannot race against other calls, | ||
393 | * apart from someone else calling open() simultaneously, but | ||
394 | * .video_lock is protecting us against it. | ||
395 | */ | ||
394 | ret = soc_camera_set_fmt(icf, &f); | 396 | ret = soc_camera_set_fmt(icf, &f); |
395 | if (ret < 0) | 397 | if (ret < 0) |
396 | goto esfmt; | 398 | goto esfmt; |
@@ -534,7 +536,7 @@ static int soc_camera_enum_fmt_vid_cap(struct file *file, void *priv, | |||
534 | { | 536 | { |
535 | struct soc_camera_file *icf = file->private_data; | 537 | struct soc_camera_file *icf = file->private_data; |
536 | struct soc_camera_device *icd = icf->icd; | 538 | struct soc_camera_device *icd = icf->icd; |
537 | const struct soc_camera_data_format *format; | 539 | const struct soc_mbus_pixelfmt *format; |
538 | 540 | ||
539 | WARN_ON(priv != file->private_data); | 541 | WARN_ON(priv != file->private_data); |
540 | 542 | ||
@@ -543,7 +545,8 @@ static int soc_camera_enum_fmt_vid_cap(struct file *file, void *priv, | |||
543 | 545 | ||
544 | format = icd->user_formats[f->index].host_fmt; | 546 | format = icd->user_formats[f->index].host_fmt; |
545 | 547 | ||
546 | strlcpy(f->description, format->name, sizeof(f->description)); | 548 | if (format->name) |
549 | strlcpy(f->description, format->name, sizeof(f->description)); | ||
547 | f->pixelformat = format->fourcc; | 550 | f->pixelformat = format->fourcc; |
548 | return 0; | 551 | return 0; |
549 | } | 552 | } |
@@ -560,12 +563,15 @@ static int soc_camera_g_fmt_vid_cap(struct file *file, void *priv, | |||
560 | pix->width = icd->user_width; | 563 | pix->width = icd->user_width; |
561 | pix->height = icd->user_height; | 564 | pix->height = icd->user_height; |
562 | pix->field = icf->vb_vidq.field; | 565 | pix->field = icf->vb_vidq.field; |
563 | pix->pixelformat = icd->current_fmt->fourcc; | 566 | pix->pixelformat = icd->current_fmt->host_fmt->fourcc; |
564 | pix->bytesperline = pix->width * | 567 | pix->bytesperline = soc_mbus_bytes_per_line(pix->width, |
565 | DIV_ROUND_UP(icd->current_fmt->depth, 8); | 568 | icd->current_fmt->host_fmt); |
569 | pix->colorspace = icd->colorspace; | ||
570 | if (pix->bytesperline < 0) | ||
571 | return pix->bytesperline; | ||
566 | pix->sizeimage = pix->height * pix->bytesperline; | 572 | pix->sizeimage = pix->height * pix->bytesperline; |
567 | dev_dbg(&icd->dev, "current_fmt->fourcc: 0x%08x\n", | 573 | dev_dbg(&icd->dev, "current_fmt->fourcc: 0x%08x\n", |
568 | icd->current_fmt->fourcc); | 574 | icd->current_fmt->host_fmt->fourcc); |
569 | return 0; | 575 | return 0; |
570 | } | 576 | } |
571 | 577 | ||
@@ -894,7 +900,7 @@ static int soc_camera_probe(struct device *dev) | |||
894 | struct soc_camera_link *icl = to_soc_camera_link(icd); | 900 | struct soc_camera_link *icl = to_soc_camera_link(icd); |
895 | struct device *control = NULL; | 901 | struct device *control = NULL; |
896 | struct v4l2_subdev *sd; | 902 | struct v4l2_subdev *sd; |
897 | struct v4l2_format f = {.type = V4L2_BUF_TYPE_VIDEO_CAPTURE}; | 903 | struct v4l2_mbus_framefmt mf; |
898 | int ret; | 904 | int ret; |
899 | 905 | ||
900 | dev_info(dev, "Probing %s\n", dev_name(dev)); | 906 | dev_info(dev, "Probing %s\n", dev_name(dev)); |
@@ -965,9 +971,11 @@ static int soc_camera_probe(struct device *dev) | |||
965 | 971 | ||
966 | /* Try to improve our guess of a reasonable window format */ | 972 | /* Try to improve our guess of a reasonable window format */ |
967 | sd = soc_camera_to_subdev(icd); | 973 | sd = soc_camera_to_subdev(icd); |
968 | if (!v4l2_subdev_call(sd, video, g_fmt, &f)) { | 974 | if (!v4l2_subdev_call(sd, video, g_mbus_fmt, &mf)) { |
969 | icd->user_width = f.fmt.pix.width; | 975 | icd->user_width = mf.width; |
970 | icd->user_height = f.fmt.pix.height; | 976 | icd->user_height = mf.height; |
977 | icd->colorspace = mf.colorspace; | ||
978 | icd->field = mf.field; | ||
971 | } | 979 | } |
972 | 980 | ||
973 | /* Do we have to sysfs_remove_link() before device_unregister()? */ | 981 | /* Do we have to sysfs_remove_link() before device_unregister()? */ |
diff --git a/drivers/media/video/soc_camera_platform.c b/drivers/media/video/soc_camera_platform.c index c7c91518c391..10b003a8be83 100644 --- a/drivers/media/video/soc_camera_platform.c +++ b/drivers/media/video/soc_camera_platform.c | |||
@@ -22,7 +22,6 @@ | |||
22 | 22 | ||
23 | struct soc_camera_platform_priv { | 23 | struct soc_camera_platform_priv { |
24 | struct v4l2_subdev subdev; | 24 | struct v4l2_subdev subdev; |
25 | struct soc_camera_data_format format; | ||
26 | }; | 25 | }; |
27 | 26 | ||
28 | static struct soc_camera_platform_priv *get_priv(struct platform_device *pdev) | 27 | static struct soc_camera_platform_priv *get_priv(struct platform_device *pdev) |
@@ -58,36 +57,36 @@ soc_camera_platform_query_bus_param(struct soc_camera_device *icd) | |||
58 | } | 57 | } |
59 | 58 | ||
60 | static int soc_camera_platform_try_fmt(struct v4l2_subdev *sd, | 59 | static int soc_camera_platform_try_fmt(struct v4l2_subdev *sd, |
61 | struct v4l2_format *f) | 60 | struct v4l2_mbus_framefmt *mf) |
62 | { | 61 | { |
63 | struct soc_camera_platform_info *p = v4l2_get_subdevdata(sd); | 62 | struct soc_camera_platform_info *p = v4l2_get_subdevdata(sd); |
64 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
65 | 63 | ||
66 | pix->width = p->format.width; | 64 | mf->width = p->format.width; |
67 | pix->height = p->format.height; | 65 | mf->height = p->format.height; |
66 | mf->code = p->format.code; | ||
67 | mf->colorspace = p->format.colorspace; | ||
68 | |||
68 | return 0; | 69 | return 0; |
69 | } | 70 | } |
70 | 71 | ||
71 | static void soc_camera_platform_video_probe(struct soc_camera_device *icd, | 72 | static struct v4l2_subdev_core_ops platform_subdev_core_ops; |
72 | struct platform_device *pdev) | 73 | |
74 | static int soc_camera_platform_enum_fmt(struct v4l2_subdev *sd, int index, | ||
75 | enum v4l2_mbus_pixelcode *code) | ||
73 | { | 76 | { |
74 | struct soc_camera_platform_priv *priv = get_priv(pdev); | 77 | struct soc_camera_platform_info *p = v4l2_get_subdevdata(sd); |
75 | struct soc_camera_platform_info *p = pdev->dev.platform_data; | ||
76 | 78 | ||
77 | priv->format.name = p->format_name; | 79 | if (index) |
78 | priv->format.depth = p->format_depth; | 80 | return -EINVAL; |
79 | priv->format.fourcc = p->format.pixelformat; | ||
80 | priv->format.colorspace = p->format.colorspace; | ||
81 | 81 | ||
82 | icd->formats = &priv->format; | 82 | *code = p->format.code; |
83 | icd->num_formats = 1; | 83 | return 0; |
84 | } | 84 | } |
85 | 85 | ||
86 | static struct v4l2_subdev_core_ops platform_subdev_core_ops; | ||
87 | |||
88 | static struct v4l2_subdev_video_ops platform_subdev_video_ops = { | 86 | static struct v4l2_subdev_video_ops platform_subdev_video_ops = { |
89 | .s_stream = soc_camera_platform_s_stream, | 87 | .s_stream = soc_camera_platform_s_stream, |
90 | .try_fmt = soc_camera_platform_try_fmt, | 88 | .try_mbus_fmt = soc_camera_platform_try_fmt, |
89 | .enum_mbus_fmt = soc_camera_platform_enum_fmt, | ||
91 | }; | 90 | }; |
92 | 91 | ||
93 | static struct v4l2_subdev_ops platform_subdev_ops = { | 92 | static struct v4l2_subdev_ops platform_subdev_ops = { |
@@ -128,12 +127,10 @@ static int soc_camera_platform_probe(struct platform_device *pdev) | |||
128 | /* Set the control device reference */ | 127 | /* Set the control device reference */ |
129 | dev_set_drvdata(&icd->dev, &pdev->dev); | 128 | dev_set_drvdata(&icd->dev, &pdev->dev); |
130 | 129 | ||
131 | icd->ops = &soc_camera_platform_ops; | 130 | icd->ops = &soc_camera_platform_ops; |
132 | 131 | ||
133 | ici = to_soc_camera_host(icd->dev.parent); | 132 | ici = to_soc_camera_host(icd->dev.parent); |
134 | 133 | ||
135 | soc_camera_platform_video_probe(icd, pdev); | ||
136 | |||
137 | v4l2_subdev_init(&priv->subdev, &platform_subdev_ops); | 134 | v4l2_subdev_init(&priv->subdev, &platform_subdev_ops); |
138 | v4l2_set_subdevdata(&priv->subdev, p); | 135 | v4l2_set_subdevdata(&priv->subdev, p); |
139 | strncpy(priv->subdev.name, dev_name(&pdev->dev), V4L2_SUBDEV_NAME_SIZE); | 136 | strncpy(priv->subdev.name, dev_name(&pdev->dev), V4L2_SUBDEV_NAME_SIZE); |
diff --git a/drivers/media/video/tw9910.c b/drivers/media/video/tw9910.c index 8ec1031dacc3..341d0e035cbf 100644 --- a/drivers/media/video/tw9910.c +++ b/drivers/media/video/tw9910.c | |||
@@ -251,15 +251,6 @@ static const struct regval_list tw9910_default_regs[] = | |||
251 | ENDMARKER, | 251 | ENDMARKER, |
252 | }; | 252 | }; |
253 | 253 | ||
254 | static const struct soc_camera_data_format tw9910_color_fmt[] = { | ||
255 | { | ||
256 | .name = "VYUY", | ||
257 | .fourcc = V4L2_PIX_FMT_VYUY, | ||
258 | .depth = 16, | ||
259 | .colorspace = V4L2_COLORSPACE_SMPTE170M, | ||
260 | } | ||
261 | }; | ||
262 | |||
263 | static const struct tw9910_scale_ctrl tw9910_ntsc_scales[] = { | 254 | static const struct tw9910_scale_ctrl tw9910_ntsc_scales[] = { |
264 | { | 255 | { |
265 | .name = "NTSC SQ", | 256 | .name = "NTSC SQ", |
@@ -814,11 +805,11 @@ static int tw9910_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) | |||
814 | return 0; | 805 | return 0; |
815 | } | 806 | } |
816 | 807 | ||
817 | static int tw9910_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 808 | static int tw9910_g_fmt(struct v4l2_subdev *sd, |
809 | struct v4l2_mbus_framefmt *mf) | ||
818 | { | 810 | { |
819 | struct i2c_client *client = sd->priv; | 811 | struct i2c_client *client = sd->priv; |
820 | struct tw9910_priv *priv = to_tw9910(client); | 812 | struct tw9910_priv *priv = to_tw9910(client); |
821 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
822 | 813 | ||
823 | if (!priv->scale) { | 814 | if (!priv->scale) { |
824 | int ret; | 815 | int ret; |
@@ -835,74 +826,76 @@ static int tw9910_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | |||
835 | return ret; | 826 | return ret; |
836 | } | 827 | } |
837 | 828 | ||
838 | f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 829 | mf->width = priv->scale->width; |
839 | 830 | mf->height = priv->scale->height; | |
840 | pix->width = priv->scale->width; | 831 | mf->code = V4L2_MBUS_FMT_YVYU8_2X8_BE; |
841 | pix->height = priv->scale->height; | 832 | mf->colorspace = V4L2_COLORSPACE_JPEG; |
842 | pix->pixelformat = V4L2_PIX_FMT_VYUY; | 833 | mf->field = V4L2_FIELD_INTERLACED; |
843 | pix->colorspace = V4L2_COLORSPACE_SMPTE170M; | ||
844 | pix->field = V4L2_FIELD_INTERLACED; | ||
845 | 834 | ||
846 | return 0; | 835 | return 0; |
847 | } | 836 | } |
848 | 837 | ||
849 | static int tw9910_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 838 | static int tw9910_s_fmt(struct v4l2_subdev *sd, |
839 | struct v4l2_mbus_framefmt *mf) | ||
850 | { | 840 | { |
851 | struct i2c_client *client = sd->priv; | 841 | struct i2c_client *client = sd->priv; |
852 | struct tw9910_priv *priv = to_tw9910(client); | 842 | struct tw9910_priv *priv = to_tw9910(client); |
853 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
854 | /* See tw9910_s_crop() - no proper cropping support */ | 843 | /* See tw9910_s_crop() - no proper cropping support */ |
855 | struct v4l2_crop a = { | 844 | struct v4l2_crop a = { |
856 | .c = { | 845 | .c = { |
857 | .left = 0, | 846 | .left = 0, |
858 | .top = 0, | 847 | .top = 0, |
859 | .width = pix->width, | 848 | .width = mf->width, |
860 | .height = pix->height, | 849 | .height = mf->height, |
861 | }, | 850 | }, |
862 | }; | 851 | }; |
863 | int i, ret; | 852 | int ret; |
853 | |||
854 | WARN_ON(mf->field != V4L2_FIELD_ANY && | ||
855 | mf->field != V4L2_FIELD_INTERLACED); | ||
864 | 856 | ||
865 | /* | 857 | /* |
866 | * check color format | 858 | * check color format |
867 | */ | 859 | */ |
868 | for (i = 0; i < ARRAY_SIZE(tw9910_color_fmt); i++) | 860 | if (mf->code != V4L2_MBUS_FMT_YVYU8_2X8_BE) |
869 | if (pix->pixelformat == tw9910_color_fmt[i].fourcc) | ||
870 | break; | ||
871 | |||
872 | if (i == ARRAY_SIZE(tw9910_color_fmt)) | ||
873 | return -EINVAL; | 861 | return -EINVAL; |
874 | 862 | ||
863 | mf->colorspace = V4L2_COLORSPACE_JPEG; | ||
864 | |||
875 | ret = tw9910_s_crop(sd, &a); | 865 | ret = tw9910_s_crop(sd, &a); |
876 | if (!ret) { | 866 | if (!ret) { |
877 | pix->width = priv->scale->width; | 867 | mf->width = priv->scale->width; |
878 | pix->height = priv->scale->height; | 868 | mf->height = priv->scale->height; |
879 | } | 869 | } |
880 | return ret; | 870 | return ret; |
881 | } | 871 | } |
882 | 872 | ||
883 | static int tw9910_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 873 | static int tw9910_try_fmt(struct v4l2_subdev *sd, |
874 | struct v4l2_mbus_framefmt *mf) | ||
884 | { | 875 | { |
885 | struct i2c_client *client = sd->priv; | 876 | struct i2c_client *client = sd->priv; |
886 | struct soc_camera_device *icd = client->dev.platform_data; | 877 | struct soc_camera_device *icd = client->dev.platform_data; |
887 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
888 | const struct tw9910_scale_ctrl *scale; | 878 | const struct tw9910_scale_ctrl *scale; |
889 | 879 | ||
890 | if (V4L2_FIELD_ANY == pix->field) { | 880 | if (V4L2_FIELD_ANY == mf->field) { |
891 | pix->field = V4L2_FIELD_INTERLACED; | 881 | mf->field = V4L2_FIELD_INTERLACED; |
892 | } else if (V4L2_FIELD_INTERLACED != pix->field) { | 882 | } else if (V4L2_FIELD_INTERLACED != mf->field) { |
893 | dev_err(&client->dev, "Field type invalid.\n"); | 883 | dev_err(&client->dev, "Field type %d invalid.\n", mf->field); |
894 | return -EINVAL; | 884 | return -EINVAL; |
895 | } | 885 | } |
896 | 886 | ||
887 | mf->code = V4L2_MBUS_FMT_YVYU8_2X8_BE; | ||
888 | mf->colorspace = V4L2_COLORSPACE_JPEG; | ||
889 | |||
897 | /* | 890 | /* |
898 | * select suitable norm | 891 | * select suitable norm |
899 | */ | 892 | */ |
900 | scale = tw9910_select_norm(icd, pix->width, pix->height); | 893 | scale = tw9910_select_norm(icd, mf->width, mf->height); |
901 | if (!scale) | 894 | if (!scale) |
902 | return -EINVAL; | 895 | return -EINVAL; |
903 | 896 | ||
904 | pix->width = scale->width; | 897 | mf->width = scale->width; |
905 | pix->height = scale->height; | 898 | mf->height = scale->height; |
906 | 899 | ||
907 | return 0; | 900 | return 0; |
908 | } | 901 | } |
@@ -930,9 +923,6 @@ static int tw9910_video_probe(struct soc_camera_device *icd, | |||
930 | return -ENODEV; | 923 | return -ENODEV; |
931 | } | 924 | } |
932 | 925 | ||
933 | icd->formats = tw9910_color_fmt; | ||
934 | icd->num_formats = ARRAY_SIZE(tw9910_color_fmt); | ||
935 | |||
936 | /* | 926 | /* |
937 | * check and show Product ID | 927 | * check and show Product ID |
938 | * So far only revisions 0 and 1 have been seen | 928 | * So far only revisions 0 and 1 have been seen |
@@ -973,14 +963,25 @@ static struct v4l2_subdev_core_ops tw9910_subdev_core_ops = { | |||
973 | #endif | 963 | #endif |
974 | }; | 964 | }; |
975 | 965 | ||
966 | static int tw9910_enum_fmt(struct v4l2_subdev *sd, int index, | ||
967 | enum v4l2_mbus_pixelcode *code) | ||
968 | { | ||
969 | if (index) | ||
970 | return -EINVAL; | ||
971 | |||
972 | *code = V4L2_MBUS_FMT_YVYU8_2X8_BE; | ||
973 | return 0; | ||
974 | } | ||
975 | |||
976 | static struct v4l2_subdev_video_ops tw9910_subdev_video_ops = { | 976 | static struct v4l2_subdev_video_ops tw9910_subdev_video_ops = { |
977 | .s_stream = tw9910_s_stream, | 977 | .s_stream = tw9910_s_stream, |
978 | .g_fmt = tw9910_g_fmt, | 978 | .g_mbus_fmt = tw9910_g_fmt, |
979 | .s_fmt = tw9910_s_fmt, | 979 | .s_mbus_fmt = tw9910_s_fmt, |
980 | .try_fmt = tw9910_try_fmt, | 980 | .try_mbus_fmt = tw9910_try_fmt, |
981 | .cropcap = tw9910_cropcap, | 981 | .cropcap = tw9910_cropcap, |
982 | .g_crop = tw9910_g_crop, | 982 | .g_crop = tw9910_g_crop, |
983 | .s_crop = tw9910_s_crop, | 983 | .s_crop = tw9910_s_crop, |
984 | .enum_mbus_fmt = tw9910_enum_fmt, | ||
984 | }; | 985 | }; |
985 | 986 | ||
986 | static struct v4l2_subdev_ops tw9910_subdev_ops = { | 987 | static struct v4l2_subdev_ops tw9910_subdev_ops = { |
diff --git a/include/media/soc_camera.h b/include/media/soc_camera.h index 831efffaf2ae..dcc5b86bcb6c 100644 --- a/include/media/soc_camera.h +++ b/include/media/soc_camera.h | |||
@@ -24,15 +24,13 @@ struct soc_camera_device { | |||
24 | struct device *pdev; /* Platform device */ | 24 | struct device *pdev; /* Platform device */ |
25 | s32 user_width; | 25 | s32 user_width; |
26 | s32 user_height; | 26 | s32 user_height; |
27 | enum v4l2_colorspace colorspace; | ||
27 | unsigned char iface; /* Host number */ | 28 | unsigned char iface; /* Host number */ |
28 | unsigned char devnum; /* Device number per host */ | 29 | unsigned char devnum; /* Device number per host */ |
29 | unsigned char buswidth; /* See comment in .c */ | ||
30 | struct soc_camera_sense *sense; /* See comment in struct definition */ | 30 | struct soc_camera_sense *sense; /* See comment in struct definition */ |
31 | struct soc_camera_ops *ops; | 31 | struct soc_camera_ops *ops; |
32 | struct video_device *vdev; | 32 | struct video_device *vdev; |
33 | const struct soc_camera_data_format *current_fmt; | 33 | const struct soc_camera_format_xlate *current_fmt; |
34 | const struct soc_camera_data_format *formats; | ||
35 | int num_formats; | ||
36 | struct soc_camera_format_xlate *user_formats; | 34 | struct soc_camera_format_xlate *user_formats; |
37 | int num_user_formats; | 35 | int num_user_formats; |
38 | enum v4l2_field field; /* Preserve field over close() */ | 36 | enum v4l2_field field; /* Preserve field over close() */ |
@@ -161,23 +159,13 @@ static inline struct v4l2_subdev *soc_camera_to_subdev( | |||
161 | int soc_camera_host_register(struct soc_camera_host *ici); | 159 | int soc_camera_host_register(struct soc_camera_host *ici); |
162 | void soc_camera_host_unregister(struct soc_camera_host *ici); | 160 | void soc_camera_host_unregister(struct soc_camera_host *ici); |
163 | 161 | ||
164 | const struct soc_camera_data_format *soc_camera_format_by_fourcc( | ||
165 | struct soc_camera_device *icd, unsigned int fourcc); | ||
166 | const struct soc_camera_format_xlate *soc_camera_xlate_by_fourcc( | 162 | const struct soc_camera_format_xlate *soc_camera_xlate_by_fourcc( |
167 | struct soc_camera_device *icd, unsigned int fourcc); | 163 | struct soc_camera_device *icd, unsigned int fourcc); |
168 | 164 | ||
169 | struct soc_camera_data_format { | ||
170 | const char *name; | ||
171 | unsigned int depth; | ||
172 | __u32 fourcc; | ||
173 | enum v4l2_colorspace colorspace; | ||
174 | }; | ||
175 | |||
176 | /** | 165 | /** |
177 | * struct soc_camera_format_xlate - match between host and sensor formats | 166 | * struct soc_camera_format_xlate - match between host and sensor formats |
178 | * @cam_fmt: sensor format provided by the sensor | 167 | * @code: code of a sensor provided format |
179 | * @host_fmt: host format after host translation from cam_fmt | 168 | * @host_fmt: host format after host translation from code |
180 | * @buswidth: bus width for this format | ||
181 | * | 169 | * |
182 | * Host and sensor translation structure. Used in table of host and sensor | 170 | * Host and sensor translation structure. Used in table of host and sensor |
183 | * formats matchings in soc_camera_device. A host can override the generic list | 171 | * formats matchings in soc_camera_device. A host can override the generic list |
@@ -185,9 +173,8 @@ struct soc_camera_data_format { | |||
185 | * format setup. | 173 | * format setup. |
186 | */ | 174 | */ |
187 | struct soc_camera_format_xlate { | 175 | struct soc_camera_format_xlate { |
188 | const struct soc_camera_data_format *cam_fmt; | 176 | enum v4l2_mbus_pixelcode code; |
189 | const struct soc_camera_data_format *host_fmt; | 177 | const struct soc_mbus_pixelfmt *host_fmt; |
190 | unsigned char buswidth; | ||
191 | }; | 178 | }; |
192 | 179 | ||
193 | struct soc_camera_ops { | 180 | struct soc_camera_ops { |
diff --git a/include/media/soc_camera_platform.h b/include/media/soc_camera_platform.h index 88b3b5747f62..0ecefe227b76 100644 --- a/include/media/soc_camera_platform.h +++ b/include/media/soc_camera_platform.h | |||
@@ -19,7 +19,7 @@ struct device; | |||
19 | struct soc_camera_platform_info { | 19 | struct soc_camera_platform_info { |
20 | const char *format_name; | 20 | const char *format_name; |
21 | unsigned long format_depth; | 21 | unsigned long format_depth; |
22 | struct v4l2_pix_format format; | 22 | struct v4l2_mbus_framefmt format; |
23 | unsigned long bus_param; | 23 | unsigned long bus_param; |
24 | struct device *dev; | 24 | struct device *dev; |
25 | int (*set_capture)(struct soc_camera_platform_info *info, int enable); | 25 | int (*set_capture)(struct soc_camera_platform_info *info, int enable); |