diff options
-rw-r--r-- | Documentation/video4linux/soc-camera.txt | 40 | ||||
-rw-r--r-- | drivers/media/video/mt9m001.c | 142 | ||||
-rw-r--r-- | drivers/media/video/mt9m111.c | 112 | ||||
-rw-r--r-- | drivers/media/video/mt9t031.c | 220 | ||||
-rw-r--r-- | drivers/media/video/mt9v022.c | 131 | ||||
-rw-r--r-- | drivers/media/video/mx1_camera.c | 10 | ||||
-rw-r--r-- | drivers/media/video/mx3_camera.c | 114 | ||||
-rw-r--r-- | drivers/media/video/ov772x.c | 84 | ||||
-rw-r--r-- | drivers/media/video/pxa_camera.c | 201 | ||||
-rw-r--r-- | drivers/media/video/sh_mobile_ceu_camera.c | 828 | ||||
-rw-r--r-- | drivers/media/video/soc_camera.c | 130 | ||||
-rw-r--r-- | drivers/media/video/soc_camera_platform.c | 4 | ||||
-rw-r--r-- | drivers/media/video/tw9910.c | 120 | ||||
-rw-r--r-- | include/media/soc_camera.h | 21 |
14 files changed, 1524 insertions, 633 deletions
diff --git a/Documentation/video4linux/soc-camera.txt b/Documentation/video4linux/soc-camera.txt index 178ef3c5e579..3f87c7da4ca2 100644 --- a/Documentation/video4linux/soc-camera.txt +++ b/Documentation/video4linux/soc-camera.txt | |||
@@ -116,5 +116,45 @@ functionality. | |||
116 | struct soc_camera_device also links to an array of struct soc_camera_data_format, | 116 | struct soc_camera_device also links to an array of struct soc_camera_data_format, |
117 | listing pixel formats, supported by the camera. | 117 | listing pixel formats, supported by the camera. |
118 | 118 | ||
119 | VIDIOC_S_CROP and VIDIOC_S_FMT behaviour | ||
120 | ---------------------------------------- | ||
121 | |||
122 | Above user ioctls modify image geometry as follows: | ||
123 | |||
124 | VIDIOC_S_CROP: sets location and sizes of the sensor window. Unit is one sensor | ||
125 | pixel. Changing sensor window sizes preserves any scaling factors, therefore | ||
126 | user window sizes change as well. | ||
127 | |||
128 | VIDIOC_S_FMT: sets user window. Should preserve previously set sensor window as | ||
129 | much as possible by modifying scaling factors. If the sensor window cannot be | ||
130 | preserved precisely, it may be changed too. | ||
131 | |||
132 | In soc-camera there are two locations, where scaling and cropping can taks | ||
133 | place: in the camera driver and in the host driver. User ioctls are first passed | ||
134 | to the host driver, which then generally passes them down to the camera driver. | ||
135 | It is more efficient to perform scaling and cropping in the camera driver to | ||
136 | save camera bus bandwidth and maximise the framerate. However, if the camera | ||
137 | driver failed to set the required parameters with sufficient precision, the host | ||
138 | driver may decide to also use its own scaling and cropping to fulfill the user's | ||
139 | request. | ||
140 | |||
141 | Camera drivers are interfaced to the soc-camera core and to host drivers over | ||
142 | the v4l2-subdev API, which is completely functional, it doesn't pass any data. | ||
143 | Therefore all camera drivers shall reply to .g_fmt() requests with their current | ||
144 | output geometry. This is necessary to correctly configure the camera bus. | ||
145 | .s_fmt() and .try_fmt() have to be implemented too. Sensor window and scaling | ||
146 | factors have to be maintained by camera drivers internally. According to the | ||
147 | V4L2 API all capture drivers must support the VIDIOC_CROPCAP ioctl, hence we | ||
148 | rely on camera drivers implementing .cropcap(). If the camera driver does not | ||
149 | support cropping, it may choose to not implement .s_crop(), but to enable | ||
150 | cropping support by the camera host driver at least the .g_crop method must be | ||
151 | implemented. | ||
152 | |||
153 | User window geometry is kept in .user_width and .user_height fields in struct | ||
154 | soc_camera_device and used by the soc-camera core and host drivers. The core | ||
155 | updates these fields upon successful completion of a .s_fmt() call, but if these | ||
156 | fields change elsewhere, e.g., during .s_crop() processing, the host driver is | ||
157 | responsible for updating them. | ||
158 | |||
119 | -- | 159 | -- |
120 | Author: Guennadi Liakhovetski <g.liakhovetski@gmx.de> | 160 | Author: Guennadi Liakhovetski <g.liakhovetski@gmx.de> |
diff --git a/drivers/media/video/mt9m001.c b/drivers/media/video/mt9m001.c index 775e1a3c98d3..e8cf56189ef1 100644 --- a/drivers/media/video/mt9m001.c +++ b/drivers/media/video/mt9m001.c | |||
@@ -39,6 +39,13 @@ | |||
39 | #define MT9M001_GLOBAL_GAIN 0x35 | 39 | #define MT9M001_GLOBAL_GAIN 0x35 |
40 | #define MT9M001_CHIP_ENABLE 0xF1 | 40 | #define MT9M001_CHIP_ENABLE 0xF1 |
41 | 41 | ||
42 | #define MT9M001_MAX_WIDTH 1280 | ||
43 | #define MT9M001_MAX_HEIGHT 1024 | ||
44 | #define MT9M001_MIN_WIDTH 48 | ||
45 | #define MT9M001_MIN_HEIGHT 32 | ||
46 | #define MT9M001_COLUMN_SKIP 20 | ||
47 | #define MT9M001_ROW_SKIP 12 | ||
48 | |||
42 | static const struct soc_camera_data_format mt9m001_colour_formats[] = { | 49 | static const struct soc_camera_data_format mt9m001_colour_formats[] = { |
43 | /* Order important: first natively supported, | 50 | /* Order important: first natively supported, |
44 | * second supported with a GPIO extender */ | 51 | * second supported with a GPIO extender */ |
@@ -70,6 +77,8 @@ static const struct soc_camera_data_format mt9m001_monochrome_formats[] = { | |||
70 | 77 | ||
71 | struct mt9m001 { | 78 | struct mt9m001 { |
72 | struct v4l2_subdev subdev; | 79 | struct v4l2_subdev subdev; |
80 | struct v4l2_rect rect; /* Sensor window */ | ||
81 | __u32 fourcc; | ||
73 | int model; /* V4L2_IDENT_MT9M001* codes from v4l2-chip-ident.h */ | 82 | int model; /* V4L2_IDENT_MT9M001* codes from v4l2-chip-ident.h */ |
74 | unsigned char autoexposure; | 83 | unsigned char autoexposure; |
75 | }; | 84 | }; |
@@ -196,13 +205,31 @@ static unsigned long mt9m001_query_bus_param(struct soc_camera_device *icd) | |||
196 | 205 | ||
197 | static int mt9m001_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) | 206 | static int mt9m001_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) |
198 | { | 207 | { |
199 | struct v4l2_rect *rect = &a->c; | ||
200 | struct i2c_client *client = sd->priv; | 208 | struct i2c_client *client = sd->priv; |
201 | struct mt9m001 *mt9m001 = to_mt9m001(client); | 209 | struct mt9m001 *mt9m001 = to_mt9m001(client); |
210 | struct v4l2_rect rect = a->c; | ||
202 | struct soc_camera_device *icd = client->dev.platform_data; | 211 | struct soc_camera_device *icd = client->dev.platform_data; |
203 | int ret; | 212 | int ret; |
204 | const u16 hblank = 9, vblank = 25; | 213 | const u16 hblank = 9, vblank = 25; |
205 | 214 | ||
215 | if (mt9m001->fourcc == V4L2_PIX_FMT_SBGGR8 || | ||
216 | mt9m001->fourcc == V4L2_PIX_FMT_SBGGR16) | ||
217 | /* | ||
218 | * Bayer format - even number of rows for simplicity, | ||
219 | * but let the user play with the top row. | ||
220 | */ | ||
221 | rect.height = ALIGN(rect.height, 2); | ||
222 | |||
223 | /* Datasheet requirement: see register description */ | ||
224 | rect.width = ALIGN(rect.width, 2); | ||
225 | rect.left = ALIGN(rect.left, 2); | ||
226 | |||
227 | soc_camera_limit_side(&rect.left, &rect.width, | ||
228 | MT9M001_COLUMN_SKIP, MT9M001_MIN_WIDTH, MT9M001_MAX_WIDTH); | ||
229 | |||
230 | soc_camera_limit_side(&rect.top, &rect.height, | ||
231 | MT9M001_ROW_SKIP, MT9M001_MIN_HEIGHT, MT9M001_MAX_HEIGHT); | ||
232 | |||
206 | /* Blanking and start values - default... */ | 233 | /* Blanking and start values - default... */ |
207 | ret = reg_write(client, MT9M001_HORIZONTAL_BLANKING, hblank); | 234 | ret = reg_write(client, MT9M001_HORIZONTAL_BLANKING, hblank); |
208 | if (!ret) | 235 | if (!ret) |
@@ -211,46 +238,98 @@ static int mt9m001_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) | |||
211 | /* The caller provides a supported format, as verified per | 238 | /* The caller provides a supported format, as verified per |
212 | * call to icd->try_fmt() */ | 239 | * call to icd->try_fmt() */ |
213 | if (!ret) | 240 | if (!ret) |
214 | ret = reg_write(client, MT9M001_COLUMN_START, rect->left); | 241 | ret = reg_write(client, MT9M001_COLUMN_START, rect.left); |
215 | if (!ret) | 242 | if (!ret) |
216 | ret = reg_write(client, MT9M001_ROW_START, rect->top); | 243 | ret = reg_write(client, MT9M001_ROW_START, rect.top); |
217 | if (!ret) | 244 | if (!ret) |
218 | ret = reg_write(client, MT9M001_WINDOW_WIDTH, rect->width - 1); | 245 | ret = reg_write(client, MT9M001_WINDOW_WIDTH, rect.width - 1); |
219 | if (!ret) | 246 | if (!ret) |
220 | ret = reg_write(client, MT9M001_WINDOW_HEIGHT, | 247 | ret = reg_write(client, MT9M001_WINDOW_HEIGHT, |
221 | rect->height + icd->y_skip_top - 1); | 248 | rect.height + icd->y_skip_top - 1); |
222 | if (!ret && mt9m001->autoexposure) { | 249 | if (!ret && mt9m001->autoexposure) { |
223 | ret = reg_write(client, MT9M001_SHUTTER_WIDTH, | 250 | ret = reg_write(client, MT9M001_SHUTTER_WIDTH, |
224 | rect->height + icd->y_skip_top + vblank); | 251 | rect.height + icd->y_skip_top + vblank); |
225 | if (!ret) { | 252 | if (!ret) { |
226 | const struct v4l2_queryctrl *qctrl = | 253 | const struct v4l2_queryctrl *qctrl = |
227 | soc_camera_find_qctrl(icd->ops, | 254 | soc_camera_find_qctrl(icd->ops, |
228 | V4L2_CID_EXPOSURE); | 255 | V4L2_CID_EXPOSURE); |
229 | icd->exposure = (524 + (rect->height + icd->y_skip_top + | 256 | icd->exposure = (524 + (rect.height + icd->y_skip_top + |
230 | vblank - 1) * | 257 | vblank - 1) * |
231 | (qctrl->maximum - qctrl->minimum)) / | 258 | (qctrl->maximum - qctrl->minimum)) / |
232 | 1048 + qctrl->minimum; | 259 | 1048 + qctrl->minimum; |
233 | } | 260 | } |
234 | } | 261 | } |
235 | 262 | ||
263 | if (!ret) | ||
264 | mt9m001->rect = rect; | ||
265 | |||
236 | return ret; | 266 | return ret; |
237 | } | 267 | } |
238 | 268 | ||
269 | static int mt9m001_g_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) | ||
270 | { | ||
271 | struct i2c_client *client = sd->priv; | ||
272 | struct mt9m001 *mt9m001 = to_mt9m001(client); | ||
273 | |||
274 | a->c = mt9m001->rect; | ||
275 | a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
276 | |||
277 | return 0; | ||
278 | } | ||
279 | |||
280 | static int mt9m001_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) | ||
281 | { | ||
282 | a->bounds.left = MT9M001_COLUMN_SKIP; | ||
283 | a->bounds.top = MT9M001_ROW_SKIP; | ||
284 | a->bounds.width = MT9M001_MAX_WIDTH; | ||
285 | a->bounds.height = MT9M001_MAX_HEIGHT; | ||
286 | a->defrect = a->bounds; | ||
287 | a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
288 | a->pixelaspect.numerator = 1; | ||
289 | a->pixelaspect.denominator = 1; | ||
290 | |||
291 | return 0; | ||
292 | } | ||
293 | |||
294 | static int mt9m001_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | ||
295 | { | ||
296 | struct i2c_client *client = sd->priv; | ||
297 | struct mt9m001 *mt9m001 = to_mt9m001(client); | ||
298 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
299 | |||
300 | pix->width = mt9m001->rect.width; | ||
301 | pix->height = mt9m001->rect.height; | ||
302 | pix->pixelformat = mt9m001->fourcc; | ||
303 | pix->field = V4L2_FIELD_NONE; | ||
304 | pix->colorspace = V4L2_COLORSPACE_SRGB; | ||
305 | |||
306 | return 0; | ||
307 | } | ||
308 | |||
239 | static int mt9m001_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 309 | static int mt9m001_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) |
240 | { | 310 | { |
241 | struct i2c_client *client = sd->priv; | 311 | struct i2c_client *client = sd->priv; |
242 | struct soc_camera_device *icd = client->dev.platform_data; | 312 | struct mt9m001 *mt9m001 = to_mt9m001(client); |
313 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
243 | struct v4l2_crop a = { | 314 | struct v4l2_crop a = { |
244 | .c = { | 315 | .c = { |
245 | .left = icd->rect_current.left, | 316 | .left = mt9m001->rect.left, |
246 | .top = icd->rect_current.top, | 317 | .top = mt9m001->rect.top, |
247 | .width = f->fmt.pix.width, | 318 | .width = pix->width, |
248 | .height = f->fmt.pix.height, | 319 | .height = pix->height, |
249 | }, | 320 | }, |
250 | }; | 321 | }; |
322 | int ret; | ||
251 | 323 | ||
252 | /* No support for scaling so far, just crop. TODO: use skipping */ | 324 | /* No support for scaling so far, just crop. TODO: use skipping */ |
253 | return mt9m001_s_crop(sd, &a); | 325 | ret = mt9m001_s_crop(sd, &a); |
326 | if (!ret) { | ||
327 | pix->width = mt9m001->rect.width; | ||
328 | pix->height = mt9m001->rect.height; | ||
329 | mt9m001->fourcc = pix->pixelformat; | ||
330 | } | ||
331 | |||
332 | return ret; | ||
254 | } | 333 | } |
255 | 334 | ||
256 | static int mt9m001_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 335 | static int mt9m001_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) |
@@ -259,9 +338,14 @@ static int mt9m001_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | |||
259 | struct soc_camera_device *icd = client->dev.platform_data; | 338 | struct soc_camera_device *icd = client->dev.platform_data; |
260 | struct v4l2_pix_format *pix = &f->fmt.pix; | 339 | struct v4l2_pix_format *pix = &f->fmt.pix; |
261 | 340 | ||
262 | v4l_bound_align_image(&pix->width, 48, 1280, 1, | 341 | v4l_bound_align_image(&pix->width, MT9M001_MIN_WIDTH, |
263 | &pix->height, 32 + icd->y_skip_top, | 342 | MT9M001_MAX_WIDTH, 1, |
264 | 1024 + icd->y_skip_top, 0, 0); | 343 | &pix->height, MT9M001_MIN_HEIGHT + icd->y_skip_top, |
344 | MT9M001_MAX_HEIGHT + icd->y_skip_top, 0, 0); | ||
345 | |||
346 | if (pix->pixelformat == V4L2_PIX_FMT_SBGGR8 || | ||
347 | pix->pixelformat == V4L2_PIX_FMT_SBGGR16) | ||
348 | pix->height = ALIGN(pix->height - 1, 2); | ||
265 | 349 | ||
266 | return 0; | 350 | return 0; |
267 | } | 351 | } |
@@ -472,11 +556,11 @@ static int mt9m001_s_ctrl(struct v4l2_subdev *sd, struct v4l2_control *ctrl) | |||
472 | if (ctrl->value) { | 556 | if (ctrl->value) { |
473 | const u16 vblank = 25; | 557 | const u16 vblank = 25; |
474 | if (reg_write(client, MT9M001_SHUTTER_WIDTH, | 558 | if (reg_write(client, MT9M001_SHUTTER_WIDTH, |
475 | icd->rect_current.height + | 559 | mt9m001->rect.height + |
476 | icd->y_skip_top + vblank) < 0) | 560 | icd->y_skip_top + vblank) < 0) |
477 | return -EIO; | 561 | return -EIO; |
478 | qctrl = soc_camera_find_qctrl(icd->ops, V4L2_CID_EXPOSURE); | 562 | qctrl = soc_camera_find_qctrl(icd->ops, V4L2_CID_EXPOSURE); |
479 | icd->exposure = (524 + (icd->rect_current.height + | 563 | icd->exposure = (524 + (mt9m001->rect.height + |
480 | icd->y_skip_top + vblank - 1) * | 564 | icd->y_skip_top + vblank - 1) * |
481 | (qctrl->maximum - qctrl->minimum)) / | 565 | (qctrl->maximum - qctrl->minimum)) / |
482 | 1048 + qctrl->minimum; | 566 | 1048 + qctrl->minimum; |
@@ -548,6 +632,8 @@ static int mt9m001_video_probe(struct soc_camera_device *icd, | |||
548 | if (flags & SOCAM_DATAWIDTH_8) | 632 | if (flags & SOCAM_DATAWIDTH_8) |
549 | icd->num_formats++; | 633 | icd->num_formats++; |
550 | 634 | ||
635 | mt9m001->fourcc = icd->formats->fourcc; | ||
636 | |||
551 | dev_info(&client->dev, "Detected a MT9M001 chip ID %x (%s)\n", data, | 637 | dev_info(&client->dev, "Detected a MT9M001 chip ID %x (%s)\n", data, |
552 | data == 0x8431 ? "C12STM" : "C12ST"); | 638 | data == 0x8431 ? "C12STM" : "C12ST"); |
553 | 639 | ||
@@ -556,10 +642,9 @@ static int mt9m001_video_probe(struct soc_camera_device *icd, | |||
556 | 642 | ||
557 | static void mt9m001_video_remove(struct soc_camera_device *icd) | 643 | static void mt9m001_video_remove(struct soc_camera_device *icd) |
558 | { | 644 | { |
559 | struct i2c_client *client = to_i2c_client(to_soc_camera_control(icd)); | ||
560 | struct soc_camera_link *icl = to_soc_camera_link(icd); | 645 | struct soc_camera_link *icl = to_soc_camera_link(icd); |
561 | 646 | ||
562 | dev_dbg(&client->dev, "Video %x removed: %p, %p\n", client->addr, | 647 | dev_dbg(&icd->dev, "Video removed: %p, %p\n", |
563 | icd->dev.parent, icd->vdev); | 648 | icd->dev.parent, icd->vdev); |
564 | if (icl->free_bus) | 649 | if (icl->free_bus) |
565 | icl->free_bus(icl); | 650 | icl->free_bus(icl); |
@@ -578,8 +663,11 @@ static struct v4l2_subdev_core_ops mt9m001_subdev_core_ops = { | |||
578 | static struct v4l2_subdev_video_ops mt9m001_subdev_video_ops = { | 663 | static struct v4l2_subdev_video_ops mt9m001_subdev_video_ops = { |
579 | .s_stream = mt9m001_s_stream, | 664 | .s_stream = mt9m001_s_stream, |
580 | .s_fmt = mt9m001_s_fmt, | 665 | .s_fmt = mt9m001_s_fmt, |
666 | .g_fmt = mt9m001_g_fmt, | ||
581 | .try_fmt = mt9m001_try_fmt, | 667 | .try_fmt = mt9m001_try_fmt, |
582 | .s_crop = mt9m001_s_crop, | 668 | .s_crop = mt9m001_s_crop, |
669 | .g_crop = mt9m001_g_crop, | ||
670 | .cropcap = mt9m001_cropcap, | ||
583 | }; | 671 | }; |
584 | 672 | ||
585 | static struct v4l2_subdev_ops mt9m001_subdev_ops = { | 673 | static struct v4l2_subdev_ops mt9m001_subdev_ops = { |
@@ -621,15 +709,13 @@ static int mt9m001_probe(struct i2c_client *client, | |||
621 | 709 | ||
622 | /* Second stage probe - when a capture adapter is there */ | 710 | /* Second stage probe - when a capture adapter is there */ |
623 | icd->ops = &mt9m001_ops; | 711 | icd->ops = &mt9m001_ops; |
624 | icd->rect_max.left = 20; | ||
625 | icd->rect_max.top = 12; | ||
626 | icd->rect_max.width = 1280; | ||
627 | icd->rect_max.height = 1024; | ||
628 | icd->rect_current.left = 20; | ||
629 | icd->rect_current.top = 12; | ||
630 | icd->width_min = 48; | ||
631 | icd->height_min = 32; | ||
632 | icd->y_skip_top = 1; | 712 | icd->y_skip_top = 1; |
713 | |||
714 | mt9m001->rect.left = MT9M001_COLUMN_SKIP; | ||
715 | mt9m001->rect.top = MT9M001_ROW_SKIP; | ||
716 | mt9m001->rect.width = MT9M001_MAX_WIDTH; | ||
717 | mt9m001->rect.height = MT9M001_MAX_HEIGHT; | ||
718 | |||
633 | /* Simulated autoexposure. If enabled, we calculate shutter width | 719 | /* Simulated autoexposure. If enabled, we calculate shutter width |
634 | * ourselves in the driver based on vertical blanking and frame width */ | 720 | * ourselves in the driver based on vertical blanking and frame width */ |
635 | mt9m001->autoexposure = 1; | 721 | mt9m001->autoexposure = 1; |
diff --git a/drivers/media/video/mt9m111.c b/drivers/media/video/mt9m111.c index 3637376da755..920dd53c4cfa 100644 --- a/drivers/media/video/mt9m111.c +++ b/drivers/media/video/mt9m111.c | |||
@@ -194,7 +194,7 @@ static int mt9m111_reg_read(struct i2c_client *client, const u16 reg) | |||
194 | 194 | ||
195 | ret = reg_page_map_set(client, reg); | 195 | ret = reg_page_map_set(client, reg); |
196 | if (!ret) | 196 | if (!ret) |
197 | ret = swab16(i2c_smbus_read_word_data(client, (reg & 0xff))); | 197 | ret = swab16(i2c_smbus_read_word_data(client, reg & 0xff)); |
198 | 198 | ||
199 | dev_dbg(&client->dev, "read reg.%03x -> %04x\n", reg, ret); | 199 | dev_dbg(&client->dev, "read reg.%03x -> %04x\n", reg, ret); |
200 | return ret; | 200 | return ret; |
@@ -257,8 +257,8 @@ static int mt9m111_setup_rect(struct i2c_client *client, | |||
257 | int width = rect->width; | 257 | int width = rect->width; |
258 | int height = rect->height; | 258 | int height = rect->height; |
259 | 259 | ||
260 | if ((mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR8) | 260 | if (mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR8 || |
261 | || (mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR16)) | 261 | mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR16) |
262 | is_raw_format = 1; | 262 | is_raw_format = 1; |
263 | else | 263 | else |
264 | is_raw_format = 0; | 264 | is_raw_format = 0; |
@@ -395,23 +395,85 @@ static int mt9m111_set_bus_param(struct soc_camera_device *icd, unsigned long f) | |||
395 | return 0; | 395 | return 0; |
396 | } | 396 | } |
397 | 397 | ||
398 | static int mt9m111_make_rect(struct i2c_client *client, | ||
399 | struct v4l2_rect *rect) | ||
400 | { | ||
401 | struct mt9m111 *mt9m111 = to_mt9m111(client); | ||
402 | |||
403 | if (mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR8 || | ||
404 | mt9m111->pixfmt == V4L2_PIX_FMT_SBGGR16) { | ||
405 | /* Bayer format - even size lengths */ | ||
406 | rect->width = ALIGN(rect->width, 2); | ||
407 | rect->height = ALIGN(rect->height, 2); | ||
408 | /* Let the user play with the starting pixel */ | ||
409 | } | ||
410 | |||
411 | /* FIXME: the datasheet doesn't specify minimum sizes */ | ||
412 | soc_camera_limit_side(&rect->left, &rect->width, | ||
413 | MT9M111_MIN_DARK_COLS, 2, MT9M111_MAX_WIDTH); | ||
414 | |||
415 | soc_camera_limit_side(&rect->top, &rect->height, | ||
416 | MT9M111_MIN_DARK_ROWS, 2, MT9M111_MAX_HEIGHT); | ||
417 | |||
418 | return mt9m111_setup_rect(client, rect); | ||
419 | } | ||
420 | |||
398 | static int mt9m111_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) | 421 | static int mt9m111_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) |
399 | { | 422 | { |
400 | struct v4l2_rect *rect = &a->c; | 423 | struct v4l2_rect rect = a->c; |
401 | struct i2c_client *client = sd->priv; | 424 | struct i2c_client *client = sd->priv; |
402 | struct mt9m111 *mt9m111 = to_mt9m111(client); | 425 | struct mt9m111 *mt9m111 = to_mt9m111(client); |
403 | int ret; | 426 | int ret; |
404 | 427 | ||
405 | dev_dbg(&client->dev, "%s left=%d, top=%d, width=%d, height=%d\n", | 428 | dev_dbg(&client->dev, "%s left=%d, top=%d, width=%d, height=%d\n", |
406 | __func__, rect->left, rect->top, rect->width, | 429 | __func__, rect.left, rect.top, rect.width, rect.height); |
407 | rect->height); | ||
408 | 430 | ||
409 | ret = mt9m111_setup_rect(client, rect); | 431 | ret = mt9m111_make_rect(client, &rect); |
410 | if (!ret) | 432 | if (!ret) |
411 | mt9m111->rect = *rect; | 433 | mt9m111->rect = rect; |
412 | return ret; | 434 | return ret; |
413 | } | 435 | } |
414 | 436 | ||
437 | static int mt9m111_g_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) | ||
438 | { | ||
439 | struct i2c_client *client = sd->priv; | ||
440 | struct mt9m111 *mt9m111 = to_mt9m111(client); | ||
441 | |||
442 | a->c = mt9m111->rect; | ||
443 | a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
444 | |||
445 | return 0; | ||
446 | } | ||
447 | |||
448 | static int mt9m111_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) | ||
449 | { | ||
450 | a->bounds.left = MT9M111_MIN_DARK_COLS; | ||
451 | a->bounds.top = MT9M111_MIN_DARK_ROWS; | ||
452 | a->bounds.width = MT9M111_MAX_WIDTH; | ||
453 | a->bounds.height = MT9M111_MAX_HEIGHT; | ||
454 | a->defrect = a->bounds; | ||
455 | a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
456 | a->pixelaspect.numerator = 1; | ||
457 | a->pixelaspect.denominator = 1; | ||
458 | |||
459 | return 0; | ||
460 | } | ||
461 | |||
462 | static int mt9m111_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | ||
463 | { | ||
464 | struct i2c_client *client = sd->priv; | ||
465 | struct mt9m111 *mt9m111 = to_mt9m111(client); | ||
466 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
467 | |||
468 | pix->width = mt9m111->rect.width; | ||
469 | pix->height = mt9m111->rect.height; | ||
470 | pix->pixelformat = mt9m111->pixfmt; | ||
471 | pix->field = V4L2_FIELD_NONE; | ||
472 | pix->colorspace = V4L2_COLORSPACE_SRGB; | ||
473 | |||
474 | return 0; | ||
475 | } | ||
476 | |||
415 | static int mt9m111_set_pixfmt(struct i2c_client *client, u32 pixfmt) | 477 | static int mt9m111_set_pixfmt(struct i2c_client *client, u32 pixfmt) |
416 | { | 478 | { |
417 | struct mt9m111 *mt9m111 = to_mt9m111(client); | 479 | struct mt9m111 *mt9m111 = to_mt9m111(client); |
@@ -478,7 +540,7 @@ static int mt9m111_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | |||
478 | __func__, pix->pixelformat, rect.left, rect.top, rect.width, | 540 | __func__, pix->pixelformat, rect.left, rect.top, rect.width, |
479 | rect.height); | 541 | rect.height); |
480 | 542 | ||
481 | ret = mt9m111_setup_rect(client, &rect); | 543 | ret = mt9m111_make_rect(client, &rect); |
482 | if (!ret) | 544 | if (!ret) |
483 | ret = mt9m111_set_pixfmt(client, pix->pixelformat); | 545 | ret = mt9m111_set_pixfmt(client, pix->pixelformat); |
484 | if (!ret) | 546 | if (!ret) |
@@ -489,11 +551,27 @@ static int mt9m111_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | |||
489 | static int mt9m111_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 551 | static int mt9m111_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) |
490 | { | 552 | { |
491 | struct v4l2_pix_format *pix = &f->fmt.pix; | 553 | struct v4l2_pix_format *pix = &f->fmt.pix; |
554 | bool bayer = pix->pixelformat == V4L2_PIX_FMT_SBGGR8 || | ||
555 | pix->pixelformat == V4L2_PIX_FMT_SBGGR16; | ||
556 | |||
557 | /* | ||
558 | * With Bayer format enforce even side lengths, but let the user play | ||
559 | * with the starting pixel | ||
560 | */ | ||
492 | 561 | ||
493 | if (pix->height > MT9M111_MAX_HEIGHT) | 562 | if (pix->height > MT9M111_MAX_HEIGHT) |
494 | pix->height = MT9M111_MAX_HEIGHT; | 563 | pix->height = MT9M111_MAX_HEIGHT; |
564 | else if (pix->height < 2) | ||
565 | pix->height = 2; | ||
566 | else if (bayer) | ||
567 | pix->height = ALIGN(pix->height, 2); | ||
568 | |||
495 | if (pix->width > MT9M111_MAX_WIDTH) | 569 | if (pix->width > MT9M111_MAX_WIDTH) |
496 | pix->width = MT9M111_MAX_WIDTH; | 570 | pix->width = MT9M111_MAX_WIDTH; |
571 | else if (pix->width < 2) | ||
572 | pix->width = 2; | ||
573 | else if (bayer) | ||
574 | pix->width = ALIGN(pix->width, 2); | ||
497 | 575 | ||
498 | return 0; | 576 | return 0; |
499 | } | 577 | } |
@@ -906,8 +984,11 @@ static struct v4l2_subdev_core_ops mt9m111_subdev_core_ops = { | |||
906 | 984 | ||
907 | static struct v4l2_subdev_video_ops mt9m111_subdev_video_ops = { | 985 | static struct v4l2_subdev_video_ops mt9m111_subdev_video_ops = { |
908 | .s_fmt = mt9m111_s_fmt, | 986 | .s_fmt = mt9m111_s_fmt, |
987 | .g_fmt = mt9m111_g_fmt, | ||
909 | .try_fmt = mt9m111_try_fmt, | 988 | .try_fmt = mt9m111_try_fmt, |
910 | .s_crop = mt9m111_s_crop, | 989 | .s_crop = mt9m111_s_crop, |
990 | .g_crop = mt9m111_g_crop, | ||
991 | .cropcap = mt9m111_cropcap, | ||
911 | }; | 992 | }; |
912 | 993 | ||
913 | static struct v4l2_subdev_ops mt9m111_subdev_ops = { | 994 | static struct v4l2_subdev_ops mt9m111_subdev_ops = { |
@@ -949,16 +1030,13 @@ static int mt9m111_probe(struct i2c_client *client, | |||
949 | 1030 | ||
950 | /* Second stage probe - when a capture adapter is there */ | 1031 | /* Second stage probe - when a capture adapter is there */ |
951 | icd->ops = &mt9m111_ops; | 1032 | icd->ops = &mt9m111_ops; |
952 | icd->rect_max.left = MT9M111_MIN_DARK_COLS; | ||
953 | icd->rect_max.top = MT9M111_MIN_DARK_ROWS; | ||
954 | icd->rect_max.width = MT9M111_MAX_WIDTH; | ||
955 | icd->rect_max.height = MT9M111_MAX_HEIGHT; | ||
956 | icd->rect_current.left = icd->rect_max.left; | ||
957 | icd->rect_current.top = icd->rect_max.top; | ||
958 | icd->width_min = MT9M111_MIN_DARK_ROWS; | ||
959 | icd->height_min = MT9M111_MIN_DARK_COLS; | ||
960 | icd->y_skip_top = 0; | 1033 | icd->y_skip_top = 0; |
961 | 1034 | ||
1035 | mt9m111->rect.left = MT9M111_MIN_DARK_COLS; | ||
1036 | mt9m111->rect.top = MT9M111_MIN_DARK_ROWS; | ||
1037 | mt9m111->rect.width = MT9M111_MAX_WIDTH; | ||
1038 | mt9m111->rect.height = MT9M111_MAX_HEIGHT; | ||
1039 | |||
962 | ret = mt9m111_video_probe(icd, client); | 1040 | ret = mt9m111_video_probe(icd, client); |
963 | if (ret) { | 1041 | if (ret) { |
964 | icd->ops = NULL; | 1042 | icd->ops = NULL; |
diff --git a/drivers/media/video/mt9t031.c b/drivers/media/video/mt9t031.c index cd3eb7731ac2..f234ba602049 100644 --- a/drivers/media/video/mt9t031.c +++ b/drivers/media/video/mt9t031.c | |||
@@ -47,7 +47,7 @@ | |||
47 | #define MT9T031_MAX_HEIGHT 1536 | 47 | #define MT9T031_MAX_HEIGHT 1536 |
48 | #define MT9T031_MAX_WIDTH 2048 | 48 | #define MT9T031_MAX_WIDTH 2048 |
49 | #define MT9T031_MIN_HEIGHT 2 | 49 | #define MT9T031_MIN_HEIGHT 2 |
50 | #define MT9T031_MIN_WIDTH 2 | 50 | #define MT9T031_MIN_WIDTH 18 |
51 | #define MT9T031_HORIZONTAL_BLANK 142 | 51 | #define MT9T031_HORIZONTAL_BLANK 142 |
52 | #define MT9T031_VERTICAL_BLANK 25 | 52 | #define MT9T031_VERTICAL_BLANK 25 |
53 | #define MT9T031_COLUMN_SKIP 32 | 53 | #define MT9T031_COLUMN_SKIP 32 |
@@ -69,10 +69,11 @@ static const struct soc_camera_data_format mt9t031_colour_formats[] = { | |||
69 | 69 | ||
70 | struct mt9t031 { | 70 | struct mt9t031 { |
71 | struct v4l2_subdev subdev; | 71 | struct v4l2_subdev subdev; |
72 | struct v4l2_rect rect; /* Sensor window */ | ||
72 | int model; /* V4L2_IDENT_MT9T031* codes from v4l2-chip-ident.h */ | 73 | int model; /* V4L2_IDENT_MT9T031* codes from v4l2-chip-ident.h */ |
73 | unsigned char autoexposure; | ||
74 | u16 xskip; | 74 | u16 xskip; |
75 | u16 yskip; | 75 | u16 yskip; |
76 | unsigned char autoexposure; | ||
76 | }; | 77 | }; |
77 | 78 | ||
78 | static struct mt9t031 *to_mt9t031(const struct i2c_client *client) | 79 | static struct mt9t031 *to_mt9t031(const struct i2c_client *client) |
@@ -218,56 +219,68 @@ static unsigned long mt9t031_query_bus_param(struct soc_camera_device *icd) | |||
218 | return soc_camera_apply_sensor_flags(icl, MT9T031_BUS_PARAM); | 219 | return soc_camera_apply_sensor_flags(icl, MT9T031_BUS_PARAM); |
219 | } | 220 | } |
220 | 221 | ||
221 | /* Round up minima and round down maxima */ | 222 | /* target must be _even_ */ |
222 | static void recalculate_limits(struct soc_camera_device *icd, | 223 | static u16 mt9t031_skip(s32 *source, s32 target, s32 max) |
223 | u16 xskip, u16 yskip) | ||
224 | { | 224 | { |
225 | icd->rect_max.left = (MT9T031_COLUMN_SKIP + xskip - 1) / xskip; | 225 | unsigned int skip; |
226 | icd->rect_max.top = (MT9T031_ROW_SKIP + yskip - 1) / yskip; | 226 | |
227 | icd->width_min = (MT9T031_MIN_WIDTH + xskip - 1) / xskip; | 227 | if (*source < target + target / 2) { |
228 | icd->height_min = (MT9T031_MIN_HEIGHT + yskip - 1) / yskip; | 228 | *source = target; |
229 | icd->rect_max.width = MT9T031_MAX_WIDTH / xskip; | 229 | return 1; |
230 | icd->rect_max.height = MT9T031_MAX_HEIGHT / yskip; | 230 | } |
231 | |||
232 | skip = min(max, *source + target / 2) / target; | ||
233 | if (skip > 8) | ||
234 | skip = 8; | ||
235 | *source = target * skip; | ||
236 | |||
237 | return skip; | ||
231 | } | 238 | } |
232 | 239 | ||
240 | /* rect is the sensor rectangle, the caller guarantees parameter validity */ | ||
233 | static int mt9t031_set_params(struct soc_camera_device *icd, | 241 | static int mt9t031_set_params(struct soc_camera_device *icd, |
234 | struct v4l2_rect *rect, u16 xskip, u16 yskip) | 242 | struct v4l2_rect *rect, u16 xskip, u16 yskip) |
235 | { | 243 | { |
236 | struct i2c_client *client = to_i2c_client(to_soc_camera_control(icd)); | 244 | struct i2c_client *client = to_i2c_client(to_soc_camera_control(icd)); |
237 | struct mt9t031 *mt9t031 = to_mt9t031(client); | 245 | struct mt9t031 *mt9t031 = to_mt9t031(client); |
238 | int ret; | 246 | int ret; |
239 | u16 xbin, ybin, width, height, left, top; | 247 | u16 xbin, ybin; |
240 | const u16 hblank = MT9T031_HORIZONTAL_BLANK, | 248 | const u16 hblank = MT9T031_HORIZONTAL_BLANK, |
241 | vblank = MT9T031_VERTICAL_BLANK; | 249 | vblank = MT9T031_VERTICAL_BLANK; |
242 | 250 | ||
243 | width = rect->width * xskip; | ||
244 | height = rect->height * yskip; | ||
245 | left = rect->left * xskip; | ||
246 | top = rect->top * yskip; | ||
247 | |||
248 | xbin = min(xskip, (u16)3); | 251 | xbin = min(xskip, (u16)3); |
249 | ybin = min(yskip, (u16)3); | 252 | ybin = min(yskip, (u16)3); |
250 | 253 | ||
251 | dev_dbg(&client->dev, "xskip %u, width %u/%u, yskip %u, height %u/%u\n", | 254 | /* |
252 | xskip, width, rect->width, yskip, height, rect->height); | 255 | * Could just do roundup(rect->left, [xy]bin * 2); but this is cheaper. |
253 | 256 | * There is always a valid suitably aligned value. The worst case is | |
254 | /* Could just do roundup(rect->left, [xy]bin * 2); but this is cheaper */ | 257 | * xbin = 3, width = 2048. Then we will start at 36, the last read out |
258 | * pixel will be 2083, which is < 2085 - first black pixel. | ||
259 | * | ||
260 | * MT9T031 datasheet imposes window left border alignment, depending on | ||
261 | * the selected xskip. Failing to conform to this requirement produces | ||
262 | * dark horizontal stripes in the image. However, even obeying to this | ||
263 | * requirement doesn't eliminate the stripes in all configurations. They | ||
264 | * appear "locally reproducibly," but can differ between tests under | ||
265 | * different lighting conditions. | ||
266 | */ | ||
255 | switch (xbin) { | 267 | switch (xbin) { |
256 | case 2: | 268 | case 1: |
257 | left = (left + 3) & ~3; | 269 | rect->left &= ~1; |
258 | break; | 270 | break; |
259 | case 3: | ||
260 | left = roundup(left, 6); | ||
261 | } | ||
262 | |||
263 | switch (ybin) { | ||
264 | case 2: | 271 | case 2: |
265 | top = (top + 3) & ~3; | 272 | rect->left &= ~3; |
266 | break; | 273 | break; |
267 | case 3: | 274 | case 3: |
268 | top = roundup(top, 6); | 275 | rect->left = rect->left > roundup(MT9T031_COLUMN_SKIP, 6) ? |
276 | (rect->left / 6) * 6 : roundup(MT9T031_COLUMN_SKIP, 6); | ||
269 | } | 277 | } |
270 | 278 | ||
279 | rect->top &= ~1; | ||
280 | |||
281 | dev_dbg(&client->dev, "skip %u:%u, rect %ux%u@%u:%u\n", | ||
282 | xskip, yskip, rect->width, rect->height, rect->left, rect->top); | ||
283 | |||
271 | /* Disable register update, reconfigure atomically */ | 284 | /* Disable register update, reconfigure atomically */ |
272 | ret = reg_set(client, MT9T031_OUTPUT_CONTROL, 1); | 285 | ret = reg_set(client, MT9T031_OUTPUT_CONTROL, 1); |
273 | if (ret < 0) | 286 | if (ret < 0) |
@@ -287,27 +300,29 @@ static int mt9t031_set_params(struct soc_camera_device *icd, | |||
287 | ret = reg_write(client, MT9T031_ROW_ADDRESS_MODE, | 300 | ret = reg_write(client, MT9T031_ROW_ADDRESS_MODE, |
288 | ((ybin - 1) << 4) | (yskip - 1)); | 301 | ((ybin - 1) << 4) | (yskip - 1)); |
289 | } | 302 | } |
290 | dev_dbg(&client->dev, "new physical left %u, top %u\n", left, top); | 303 | dev_dbg(&client->dev, "new physical left %u, top %u\n", |
304 | rect->left, rect->top); | ||
291 | 305 | ||
292 | /* The caller provides a supported format, as guaranteed by | 306 | /* The caller provides a supported format, as guaranteed by |
293 | * icd->try_fmt_cap(), soc_camera_s_crop() and soc_camera_cropcap() */ | 307 | * icd->try_fmt_cap(), soc_camera_s_crop() and soc_camera_cropcap() */ |
294 | if (ret >= 0) | 308 | if (ret >= 0) |
295 | ret = reg_write(client, MT9T031_COLUMN_START, left); | 309 | ret = reg_write(client, MT9T031_COLUMN_START, rect->left); |
296 | if (ret >= 0) | 310 | if (ret >= 0) |
297 | ret = reg_write(client, MT9T031_ROW_START, top); | 311 | ret = reg_write(client, MT9T031_ROW_START, rect->top); |
298 | if (ret >= 0) | 312 | if (ret >= 0) |
299 | ret = reg_write(client, MT9T031_WINDOW_WIDTH, width - 1); | 313 | ret = reg_write(client, MT9T031_WINDOW_WIDTH, rect->width - 1); |
300 | if (ret >= 0) | 314 | if (ret >= 0) |
301 | ret = reg_write(client, MT9T031_WINDOW_HEIGHT, | 315 | ret = reg_write(client, MT9T031_WINDOW_HEIGHT, |
302 | height + icd->y_skip_top - 1); | 316 | rect->height + icd->y_skip_top - 1); |
303 | if (ret >= 0 && mt9t031->autoexposure) { | 317 | if (ret >= 0 && mt9t031->autoexposure) { |
304 | ret = set_shutter(client, height + icd->y_skip_top + vblank); | 318 | ret = set_shutter(client, |
319 | rect->height + icd->y_skip_top + vblank); | ||
305 | if (ret >= 0) { | 320 | if (ret >= 0) { |
306 | const u32 shutter_max = MT9T031_MAX_HEIGHT + vblank; | 321 | const u32 shutter_max = MT9T031_MAX_HEIGHT + vblank; |
307 | const struct v4l2_queryctrl *qctrl = | 322 | const struct v4l2_queryctrl *qctrl = |
308 | soc_camera_find_qctrl(icd->ops, | 323 | soc_camera_find_qctrl(icd->ops, |
309 | V4L2_CID_EXPOSURE); | 324 | V4L2_CID_EXPOSURE); |
310 | icd->exposure = (shutter_max / 2 + (height + | 325 | icd->exposure = (shutter_max / 2 + (rect->height + |
311 | icd->y_skip_top + vblank - 1) * | 326 | icd->y_skip_top + vblank - 1) * |
312 | (qctrl->maximum - qctrl->minimum)) / | 327 | (qctrl->maximum - qctrl->minimum)) / |
313 | shutter_max + qctrl->minimum; | 328 | shutter_max + qctrl->minimum; |
@@ -318,27 +333,72 @@ static int mt9t031_set_params(struct soc_camera_device *icd, | |||
318 | if (ret >= 0) | 333 | if (ret >= 0) |
319 | ret = reg_clear(client, MT9T031_OUTPUT_CONTROL, 1); | 334 | ret = reg_clear(client, MT9T031_OUTPUT_CONTROL, 1); |
320 | 335 | ||
336 | if (ret >= 0) { | ||
337 | mt9t031->rect = *rect; | ||
338 | mt9t031->xskip = xskip; | ||
339 | mt9t031->yskip = yskip; | ||
340 | } | ||
341 | |||
321 | return ret < 0 ? ret : 0; | 342 | return ret < 0 ? ret : 0; |
322 | } | 343 | } |
323 | 344 | ||
324 | static int mt9t031_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) | 345 | static int mt9t031_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) |
325 | { | 346 | { |
326 | struct v4l2_rect *rect = &a->c; | 347 | struct v4l2_rect rect = a->c; |
327 | struct i2c_client *client = sd->priv; | 348 | struct i2c_client *client = sd->priv; |
328 | struct mt9t031 *mt9t031 = to_mt9t031(client); | 349 | struct mt9t031 *mt9t031 = to_mt9t031(client); |
329 | struct soc_camera_device *icd = client->dev.platform_data; | 350 | struct soc_camera_device *icd = client->dev.platform_data; |
330 | 351 | ||
331 | /* Make sure we don't exceed sensor limits */ | 352 | rect.width = ALIGN(rect.width, 2); |
332 | if (rect->left + rect->width > icd->rect_max.left + icd->rect_max.width) | 353 | rect.height = ALIGN(rect.height, 2); |
333 | rect->left = icd->rect_max.width + icd->rect_max.left - | 354 | |
334 | rect->width; | 355 | soc_camera_limit_side(&rect.left, &rect.width, |
356 | MT9T031_COLUMN_SKIP, MT9T031_MIN_WIDTH, MT9T031_MAX_WIDTH); | ||
357 | |||
358 | soc_camera_limit_side(&rect.top, &rect.height, | ||
359 | MT9T031_ROW_SKIP, MT9T031_MIN_HEIGHT, MT9T031_MAX_HEIGHT); | ||
360 | |||
361 | return mt9t031_set_params(icd, &rect, mt9t031->xskip, mt9t031->yskip); | ||
362 | } | ||
363 | |||
364 | static int mt9t031_g_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) | ||
365 | { | ||
366 | struct i2c_client *client = sd->priv; | ||
367 | struct mt9t031 *mt9t031 = to_mt9t031(client); | ||
368 | |||
369 | a->c = mt9t031->rect; | ||
370 | a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
335 | 371 | ||
336 | if (rect->top + rect->height > icd->rect_max.height + icd->rect_max.top) | 372 | return 0; |
337 | rect->top = icd->rect_max.height + icd->rect_max.top - | 373 | } |
338 | rect->height; | 374 | |
375 | static int mt9t031_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) | ||
376 | { | ||
377 | a->bounds.left = MT9T031_COLUMN_SKIP; | ||
378 | a->bounds.top = MT9T031_ROW_SKIP; | ||
379 | a->bounds.width = MT9T031_MAX_WIDTH; | ||
380 | a->bounds.height = MT9T031_MAX_HEIGHT; | ||
381 | a->defrect = a->bounds; | ||
382 | a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
383 | a->pixelaspect.numerator = 1; | ||
384 | a->pixelaspect.denominator = 1; | ||
339 | 385 | ||
340 | /* CROP - no change in scaling, or in limits */ | 386 | return 0; |
341 | return mt9t031_set_params(icd, rect, mt9t031->xskip, mt9t031->yskip); | 387 | } |
388 | |||
389 | static int mt9t031_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | ||
390 | { | ||
391 | struct i2c_client *client = sd->priv; | ||
392 | struct mt9t031 *mt9t031 = to_mt9t031(client); | ||
393 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
394 | |||
395 | pix->width = mt9t031->rect.width / mt9t031->xskip; | ||
396 | pix->height = mt9t031->rect.height / mt9t031->yskip; | ||
397 | pix->pixelformat = V4L2_PIX_FMT_SGRBG10; | ||
398 | pix->field = V4L2_FIELD_NONE; | ||
399 | pix->colorspace = V4L2_COLORSPACE_SRGB; | ||
400 | |||
401 | return 0; | ||
342 | } | 402 | } |
343 | 403 | ||
344 | static int mt9t031_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 404 | static int mt9t031_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) |
@@ -346,40 +406,25 @@ static int mt9t031_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | |||
346 | struct i2c_client *client = sd->priv; | 406 | struct i2c_client *client = sd->priv; |
347 | struct mt9t031 *mt9t031 = to_mt9t031(client); | 407 | struct mt9t031 *mt9t031 = to_mt9t031(client); |
348 | struct soc_camera_device *icd = client->dev.platform_data; | 408 | struct soc_camera_device *icd = client->dev.platform_data; |
349 | int ret; | 409 | struct v4l2_pix_format *pix = &f->fmt.pix; |
350 | u16 xskip, yskip; | 410 | u16 xskip, yskip; |
351 | struct v4l2_rect rect = { | 411 | struct v4l2_rect rect = mt9t031->rect; |
352 | .left = icd->rect_current.left, | ||
353 | .top = icd->rect_current.top, | ||
354 | .width = f->fmt.pix.width, | ||
355 | .height = f->fmt.pix.height, | ||
356 | }; | ||
357 | 412 | ||
358 | /* | 413 | /* |
359 | * try_fmt has put rectangle within limits. | 414 | * try_fmt has put width and height within limits. |
360 | * S_FMT - use binning and skipping for scaling, recalculate | 415 | * S_FMT: use binning and skipping for scaling |
361 | * limits, used for cropping | ||
362 | */ | 416 | */ |
363 | /* Is this more optimal than just a division? */ | 417 | xskip = mt9t031_skip(&rect.width, pix->width, MT9T031_MAX_WIDTH); |
364 | for (xskip = 8; xskip > 1; xskip--) | 418 | yskip = mt9t031_skip(&rect.height, pix->height, MT9T031_MAX_HEIGHT); |
365 | if (rect.width * xskip <= MT9T031_MAX_WIDTH) | ||
366 | break; | ||
367 | |||
368 | for (yskip = 8; yskip > 1; yskip--) | ||
369 | if (rect.height * yskip <= MT9T031_MAX_HEIGHT) | ||
370 | break; | ||
371 | |||
372 | recalculate_limits(icd, xskip, yskip); | ||
373 | |||
374 | ret = mt9t031_set_params(icd, &rect, xskip, yskip); | ||
375 | if (!ret) { | ||
376 | mt9t031->xskip = xskip; | ||
377 | mt9t031->yskip = yskip; | ||
378 | } | ||
379 | 419 | ||
380 | return ret; | 420 | /* mt9t031_set_params() doesn't change width and height */ |
421 | return mt9t031_set_params(icd, &rect, xskip, yskip); | ||
381 | } | 422 | } |
382 | 423 | ||
424 | /* | ||
425 | * If a user window larger than sensor window is requested, we'll increase the | ||
426 | * sensor window. | ||
427 | */ | ||
383 | static int mt9t031_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 428 | static int mt9t031_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) |
384 | { | 429 | { |
385 | struct v4l2_pix_format *pix = &f->fmt.pix; | 430 | struct v4l2_pix_format *pix = &f->fmt.pix; |
@@ -620,12 +665,12 @@ static int mt9t031_s_ctrl(struct v4l2_subdev *sd, struct v4l2_control *ctrl) | |||
620 | if (ctrl->value) { | 665 | if (ctrl->value) { |
621 | const u16 vblank = MT9T031_VERTICAL_BLANK; | 666 | const u16 vblank = MT9T031_VERTICAL_BLANK; |
622 | const u32 shutter_max = MT9T031_MAX_HEIGHT + vblank; | 667 | const u32 shutter_max = MT9T031_MAX_HEIGHT + vblank; |
623 | if (set_shutter(client, icd->rect_current.height + | 668 | if (set_shutter(client, mt9t031->rect.height + |
624 | icd->y_skip_top + vblank) < 0) | 669 | icd->y_skip_top + vblank) < 0) |
625 | return -EIO; | 670 | return -EIO; |
626 | qctrl = soc_camera_find_qctrl(icd->ops, V4L2_CID_EXPOSURE); | 671 | qctrl = soc_camera_find_qctrl(icd->ops, V4L2_CID_EXPOSURE); |
627 | icd->exposure = (shutter_max / 2 + | 672 | icd->exposure = (shutter_max / 2 + |
628 | (icd->rect_current.height + | 673 | (mt9t031->rect.height + |
629 | icd->y_skip_top + vblank - 1) * | 674 | icd->y_skip_top + vblank - 1) * |
630 | (qctrl->maximum - qctrl->minimum)) / | 675 | (qctrl->maximum - qctrl->minimum)) / |
631 | shutter_max + qctrl->minimum; | 676 | shutter_max + qctrl->minimum; |
@@ -645,12 +690,6 @@ static int mt9t031_video_probe(struct i2c_client *client) | |||
645 | struct mt9t031 *mt9t031 = to_mt9t031(client); | 690 | struct mt9t031 *mt9t031 = to_mt9t031(client); |
646 | s32 data; | 691 | s32 data; |
647 | 692 | ||
648 | /* We must have a parent by now. And it cannot be a wrong one. | ||
649 | * So this entire test is completely redundant. */ | ||
650 | if (!icd->dev.parent || | ||
651 | to_soc_camera_host(icd->dev.parent)->nr != icd->iface) | ||
652 | return -ENODEV; | ||
653 | |||
654 | /* Enable the chip */ | 693 | /* Enable the chip */ |
655 | data = reg_write(client, MT9T031_CHIP_ENABLE, 1); | 694 | data = reg_write(client, MT9T031_CHIP_ENABLE, 1); |
656 | dev_dbg(&client->dev, "write: %d\n", data); | 695 | dev_dbg(&client->dev, "write: %d\n", data); |
@@ -688,8 +727,11 @@ static struct v4l2_subdev_core_ops mt9t031_subdev_core_ops = { | |||
688 | static struct v4l2_subdev_video_ops mt9t031_subdev_video_ops = { | 727 | static struct v4l2_subdev_video_ops mt9t031_subdev_video_ops = { |
689 | .s_stream = mt9t031_s_stream, | 728 | .s_stream = mt9t031_s_stream, |
690 | .s_fmt = mt9t031_s_fmt, | 729 | .s_fmt = mt9t031_s_fmt, |
730 | .g_fmt = mt9t031_g_fmt, | ||
691 | .try_fmt = mt9t031_try_fmt, | 731 | .try_fmt = mt9t031_try_fmt, |
692 | .s_crop = mt9t031_s_crop, | 732 | .s_crop = mt9t031_s_crop, |
733 | .g_crop = mt9t031_g_crop, | ||
734 | .cropcap = mt9t031_cropcap, | ||
693 | }; | 735 | }; |
694 | 736 | ||
695 | static struct v4l2_subdev_ops mt9t031_subdev_ops = { | 737 | static struct v4l2_subdev_ops mt9t031_subdev_ops = { |
@@ -731,15 +773,13 @@ static int mt9t031_probe(struct i2c_client *client, | |||
731 | 773 | ||
732 | /* Second stage probe - when a capture adapter is there */ | 774 | /* Second stage probe - when a capture adapter is there */ |
733 | icd->ops = &mt9t031_ops; | 775 | icd->ops = &mt9t031_ops; |
734 | icd->rect_max.left = MT9T031_COLUMN_SKIP; | ||
735 | icd->rect_max.top = MT9T031_ROW_SKIP; | ||
736 | icd->rect_current.left = icd->rect_max.left; | ||
737 | icd->rect_current.top = icd->rect_max.top; | ||
738 | icd->width_min = MT9T031_MIN_WIDTH; | ||
739 | icd->rect_max.width = MT9T031_MAX_WIDTH; | ||
740 | icd->height_min = MT9T031_MIN_HEIGHT; | ||
741 | icd->rect_max.height = MT9T031_MAX_HEIGHT; | ||
742 | icd->y_skip_top = 0; | 776 | icd->y_skip_top = 0; |
777 | |||
778 | mt9t031->rect.left = MT9T031_COLUMN_SKIP; | ||
779 | mt9t031->rect.top = MT9T031_ROW_SKIP; | ||
780 | mt9t031->rect.width = MT9T031_MAX_WIDTH; | ||
781 | mt9t031->rect.height = MT9T031_MAX_HEIGHT; | ||
782 | |||
743 | /* Simulated autoexposure. If enabled, we calculate shutter width | 783 | /* Simulated autoexposure. If enabled, we calculate shutter width |
744 | * ourselves in the driver based on vertical blanking and frame width */ | 784 | * ourselves in the driver based on vertical blanking and frame width */ |
745 | mt9t031->autoexposure = 1; | 785 | mt9t031->autoexposure = 1; |
diff --git a/drivers/media/video/mt9v022.c b/drivers/media/video/mt9v022.c index ab1965425289..35ea0ddd0715 100644 --- a/drivers/media/video/mt9v022.c +++ b/drivers/media/video/mt9v022.c | |||
@@ -55,6 +55,13 @@ MODULE_PARM_DESC(sensor_type, "Sensor type: \"colour\" or \"monochrome\""); | |||
55 | /* Progressive scan, master, defaults */ | 55 | /* Progressive scan, master, defaults */ |
56 | #define MT9V022_CHIP_CONTROL_DEFAULT 0x188 | 56 | #define MT9V022_CHIP_CONTROL_DEFAULT 0x188 |
57 | 57 | ||
58 | #define MT9V022_MAX_WIDTH 752 | ||
59 | #define MT9V022_MAX_HEIGHT 480 | ||
60 | #define MT9V022_MIN_WIDTH 48 | ||
61 | #define MT9V022_MIN_HEIGHT 32 | ||
62 | #define MT9V022_COLUMN_SKIP 1 | ||
63 | #define MT9V022_ROW_SKIP 4 | ||
64 | |||
58 | static const struct soc_camera_data_format mt9v022_colour_formats[] = { | 65 | static const struct soc_camera_data_format mt9v022_colour_formats[] = { |
59 | /* Order important: first natively supported, | 66 | /* Order important: first natively supported, |
60 | * second supported with a GPIO extender */ | 67 | * second supported with a GPIO extender */ |
@@ -86,6 +93,8 @@ static const struct soc_camera_data_format mt9v022_monochrome_formats[] = { | |||
86 | 93 | ||
87 | struct mt9v022 { | 94 | struct mt9v022 { |
88 | struct v4l2_subdev subdev; | 95 | struct v4l2_subdev subdev; |
96 | struct v4l2_rect rect; /* Sensor window */ | ||
97 | __u32 fourcc; | ||
89 | int model; /* V4L2_IDENT_MT9V022* codes from v4l2-chip-ident.h */ | 98 | int model; /* V4L2_IDENT_MT9V022* codes from v4l2-chip-ident.h */ |
90 | u16 chip_control; | 99 | u16 chip_control; |
91 | }; | 100 | }; |
@@ -250,44 +259,101 @@ static unsigned long mt9v022_query_bus_param(struct soc_camera_device *icd) | |||
250 | 259 | ||
251 | static int mt9v022_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) | 260 | static int mt9v022_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) |
252 | { | 261 | { |
253 | struct v4l2_rect *rect = &a->c; | ||
254 | struct i2c_client *client = sd->priv; | 262 | struct i2c_client *client = sd->priv; |
263 | struct mt9v022 *mt9v022 = to_mt9v022(client); | ||
264 | struct v4l2_rect rect = a->c; | ||
255 | struct soc_camera_device *icd = client->dev.platform_data; | 265 | struct soc_camera_device *icd = client->dev.platform_data; |
256 | int ret; | 266 | int ret; |
257 | 267 | ||
268 | /* Bayer format - even size lengths */ | ||
269 | if (mt9v022->fourcc == V4L2_PIX_FMT_SBGGR8 || | ||
270 | mt9v022->fourcc == V4L2_PIX_FMT_SBGGR16) { | ||
271 | rect.width = ALIGN(rect.width, 2); | ||
272 | rect.height = ALIGN(rect.height, 2); | ||
273 | /* Let the user play with the starting pixel */ | ||
274 | } | ||
275 | |||
276 | soc_camera_limit_side(&rect.left, &rect.width, | ||
277 | MT9V022_COLUMN_SKIP, MT9V022_MIN_WIDTH, MT9V022_MAX_WIDTH); | ||
278 | |||
279 | soc_camera_limit_side(&rect.top, &rect.height, | ||
280 | MT9V022_ROW_SKIP, MT9V022_MIN_HEIGHT, MT9V022_MAX_HEIGHT); | ||
281 | |||
258 | /* Like in example app. Contradicts the datasheet though */ | 282 | /* Like in example app. Contradicts the datasheet though */ |
259 | ret = reg_read(client, MT9V022_AEC_AGC_ENABLE); | 283 | ret = reg_read(client, MT9V022_AEC_AGC_ENABLE); |
260 | if (ret >= 0) { | 284 | if (ret >= 0) { |
261 | if (ret & 1) /* Autoexposure */ | 285 | if (ret & 1) /* Autoexposure */ |
262 | ret = reg_write(client, MT9V022_MAX_TOTAL_SHUTTER_WIDTH, | 286 | ret = reg_write(client, MT9V022_MAX_TOTAL_SHUTTER_WIDTH, |
263 | rect->height + icd->y_skip_top + 43); | 287 | rect.height + icd->y_skip_top + 43); |
264 | else | 288 | else |
265 | ret = reg_write(client, MT9V022_TOTAL_SHUTTER_WIDTH, | 289 | ret = reg_write(client, MT9V022_TOTAL_SHUTTER_WIDTH, |
266 | rect->height + icd->y_skip_top + 43); | 290 | rect.height + icd->y_skip_top + 43); |
267 | } | 291 | } |
268 | /* Setup frame format: defaults apart from width and height */ | 292 | /* Setup frame format: defaults apart from width and height */ |
269 | if (!ret) | 293 | if (!ret) |
270 | ret = reg_write(client, MT9V022_COLUMN_START, rect->left); | 294 | ret = reg_write(client, MT9V022_COLUMN_START, rect.left); |
271 | if (!ret) | 295 | if (!ret) |
272 | ret = reg_write(client, MT9V022_ROW_START, rect->top); | 296 | ret = reg_write(client, MT9V022_ROW_START, rect.top); |
273 | if (!ret) | 297 | if (!ret) |
274 | /* Default 94, Phytec driver says: | 298 | /* Default 94, Phytec driver says: |
275 | * "width + horizontal blank >= 660" */ | 299 | * "width + horizontal blank >= 660" */ |
276 | ret = reg_write(client, MT9V022_HORIZONTAL_BLANKING, | 300 | ret = reg_write(client, MT9V022_HORIZONTAL_BLANKING, |
277 | rect->width > 660 - 43 ? 43 : | 301 | rect.width > 660 - 43 ? 43 : |
278 | 660 - rect->width); | 302 | 660 - rect.width); |
279 | if (!ret) | 303 | if (!ret) |
280 | ret = reg_write(client, MT9V022_VERTICAL_BLANKING, 45); | 304 | ret = reg_write(client, MT9V022_VERTICAL_BLANKING, 45); |
281 | if (!ret) | 305 | if (!ret) |
282 | ret = reg_write(client, MT9V022_WINDOW_WIDTH, rect->width); | 306 | ret = reg_write(client, MT9V022_WINDOW_WIDTH, rect.width); |
283 | if (!ret) | 307 | if (!ret) |
284 | ret = reg_write(client, MT9V022_WINDOW_HEIGHT, | 308 | ret = reg_write(client, MT9V022_WINDOW_HEIGHT, |
285 | rect->height + icd->y_skip_top); | 309 | rect.height + icd->y_skip_top); |
286 | 310 | ||
287 | if (ret < 0) | 311 | if (ret < 0) |
288 | return ret; | 312 | return ret; |
289 | 313 | ||
290 | dev_dbg(&client->dev, "Frame %ux%u pixel\n", rect->width, rect->height); | 314 | dev_dbg(&client->dev, "Frame %ux%u pixel\n", rect.width, rect.height); |
315 | |||
316 | mt9v022->rect = rect; | ||
317 | |||
318 | return 0; | ||
319 | } | ||
320 | |||
321 | static int mt9v022_g_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) | ||
322 | { | ||
323 | struct i2c_client *client = sd->priv; | ||
324 | struct mt9v022 *mt9v022 = to_mt9v022(client); | ||
325 | |||
326 | a->c = mt9v022->rect; | ||
327 | a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
328 | |||
329 | return 0; | ||
330 | } | ||
331 | |||
332 | static int mt9v022_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) | ||
333 | { | ||
334 | a->bounds.left = MT9V022_COLUMN_SKIP; | ||
335 | a->bounds.top = MT9V022_ROW_SKIP; | ||
336 | a->bounds.width = MT9V022_MAX_WIDTH; | ||
337 | a->bounds.height = MT9V022_MAX_HEIGHT; | ||
338 | a->defrect = a->bounds; | ||
339 | a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
340 | a->pixelaspect.numerator = 1; | ||
341 | a->pixelaspect.denominator = 1; | ||
342 | |||
343 | return 0; | ||
344 | } | ||
345 | |||
346 | static int mt9v022_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | ||
347 | { | ||
348 | struct i2c_client *client = sd->priv; | ||
349 | struct mt9v022 *mt9v022 = to_mt9v022(client); | ||
350 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
351 | |||
352 | pix->width = mt9v022->rect.width; | ||
353 | pix->height = mt9v022->rect.height; | ||
354 | pix->pixelformat = mt9v022->fourcc; | ||
355 | pix->field = V4L2_FIELD_NONE; | ||
356 | pix->colorspace = V4L2_COLORSPACE_SRGB; | ||
291 | 357 | ||
292 | return 0; | 358 | return 0; |
293 | } | 359 | } |
@@ -296,16 +362,16 @@ static int mt9v022_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | |||
296 | { | 362 | { |
297 | struct i2c_client *client = sd->priv; | 363 | struct i2c_client *client = sd->priv; |
298 | struct mt9v022 *mt9v022 = to_mt9v022(client); | 364 | struct mt9v022 *mt9v022 = to_mt9v022(client); |
299 | struct soc_camera_device *icd = client->dev.platform_data; | ||
300 | struct v4l2_pix_format *pix = &f->fmt.pix; | 365 | struct v4l2_pix_format *pix = &f->fmt.pix; |
301 | struct v4l2_crop a = { | 366 | struct v4l2_crop a = { |
302 | .c = { | 367 | .c = { |
303 | .left = icd->rect_current.left, | 368 | .left = mt9v022->rect.left, |
304 | .top = icd->rect_current.top, | 369 | .top = mt9v022->rect.top, |
305 | .width = pix->width, | 370 | .width = pix->width, |
306 | .height = pix->height, | 371 | .height = pix->height, |
307 | }, | 372 | }, |
308 | }; | 373 | }; |
374 | int ret; | ||
309 | 375 | ||
310 | /* The caller provides a supported format, as verified per call to | 376 | /* The caller provides a supported format, as verified per call to |
311 | * icd->try_fmt(), datawidth is from our supported format list */ | 377 | * icd->try_fmt(), datawidth is from our supported format list */ |
@@ -328,7 +394,14 @@ static int mt9v022_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | |||
328 | } | 394 | } |
329 | 395 | ||
330 | /* No support for scaling on this camera, just crop. */ | 396 | /* No support for scaling on this camera, just crop. */ |
331 | return mt9v022_s_crop(sd, &a); | 397 | ret = mt9v022_s_crop(sd, &a); |
398 | if (!ret) { | ||
399 | pix->width = mt9v022->rect.width; | ||
400 | pix->height = mt9v022->rect.height; | ||
401 | mt9v022->fourcc = pix->pixelformat; | ||
402 | } | ||
403 | |||
404 | return ret; | ||
332 | } | 405 | } |
333 | 406 | ||
334 | static int mt9v022_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 407 | static int mt9v022_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) |
@@ -336,10 +409,13 @@ static int mt9v022_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | |||
336 | struct i2c_client *client = sd->priv; | 409 | struct i2c_client *client = sd->priv; |
337 | struct soc_camera_device *icd = client->dev.platform_data; | 410 | struct soc_camera_device *icd = client->dev.platform_data; |
338 | struct v4l2_pix_format *pix = &f->fmt.pix; | 411 | struct v4l2_pix_format *pix = &f->fmt.pix; |
412 | int align = pix->pixelformat == V4L2_PIX_FMT_SBGGR8 || | ||
413 | pix->pixelformat == V4L2_PIX_FMT_SBGGR16; | ||
339 | 414 | ||
340 | v4l_bound_align_image(&pix->width, 48, 752, 2 /* ? */, | 415 | v4l_bound_align_image(&pix->width, MT9V022_MIN_WIDTH, |
341 | &pix->height, 32 + icd->y_skip_top, | 416 | MT9V022_MAX_WIDTH, align, |
342 | 480 + icd->y_skip_top, 0, 0); | 417 | &pix->height, MT9V022_MIN_HEIGHT + icd->y_skip_top, |
418 | MT9V022_MAX_HEIGHT + icd->y_skip_top, align, 0); | ||
343 | 419 | ||
344 | return 0; | 420 | return 0; |
345 | } | 421 | } |
@@ -669,6 +745,8 @@ static int mt9v022_video_probe(struct soc_camera_device *icd, | |||
669 | if (flags & SOCAM_DATAWIDTH_8) | 745 | if (flags & SOCAM_DATAWIDTH_8) |
670 | icd->num_formats++; | 746 | icd->num_formats++; |
671 | 747 | ||
748 | mt9v022->fourcc = icd->formats->fourcc; | ||
749 | |||
672 | dev_info(&client->dev, "Detected a MT9V022 chip ID %x, %s sensor\n", | 750 | dev_info(&client->dev, "Detected a MT9V022 chip ID %x, %s sensor\n", |
673 | data, mt9v022->model == V4L2_IDENT_MT9V022IX7ATM ? | 751 | data, mt9v022->model == V4L2_IDENT_MT9V022IX7ATM ? |
674 | "monochrome" : "colour"); | 752 | "monochrome" : "colour"); |
@@ -679,10 +757,9 @@ ei2c: | |||
679 | 757 | ||
680 | static void mt9v022_video_remove(struct soc_camera_device *icd) | 758 | static void mt9v022_video_remove(struct soc_camera_device *icd) |
681 | { | 759 | { |
682 | struct i2c_client *client = to_i2c_client(to_soc_camera_control(icd)); | ||
683 | struct soc_camera_link *icl = to_soc_camera_link(icd); | 760 | struct soc_camera_link *icl = to_soc_camera_link(icd); |
684 | 761 | ||
685 | dev_dbg(&client->dev, "Video %x removed: %p, %p\n", client->addr, | 762 | dev_dbg(&icd->dev, "Video removed: %p, %p\n", |
686 | icd->dev.parent, icd->vdev); | 763 | icd->dev.parent, icd->vdev); |
687 | if (icl->free_bus) | 764 | if (icl->free_bus) |
688 | icl->free_bus(icl); | 765 | icl->free_bus(icl); |
@@ -701,8 +778,11 @@ static struct v4l2_subdev_core_ops mt9v022_subdev_core_ops = { | |||
701 | static struct v4l2_subdev_video_ops mt9v022_subdev_video_ops = { | 778 | static struct v4l2_subdev_video_ops mt9v022_subdev_video_ops = { |
702 | .s_stream = mt9v022_s_stream, | 779 | .s_stream = mt9v022_s_stream, |
703 | .s_fmt = mt9v022_s_fmt, | 780 | .s_fmt = mt9v022_s_fmt, |
781 | .g_fmt = mt9v022_g_fmt, | ||
704 | .try_fmt = mt9v022_try_fmt, | 782 | .try_fmt = mt9v022_try_fmt, |
705 | .s_crop = mt9v022_s_crop, | 783 | .s_crop = mt9v022_s_crop, |
784 | .g_crop = mt9v022_g_crop, | ||
785 | .cropcap = mt9v022_cropcap, | ||
706 | }; | 786 | }; |
707 | 787 | ||
708 | static struct v4l2_subdev_ops mt9v022_subdev_ops = { | 788 | static struct v4l2_subdev_ops mt9v022_subdev_ops = { |
@@ -745,16 +825,13 @@ static int mt9v022_probe(struct i2c_client *client, | |||
745 | mt9v022->chip_control = MT9V022_CHIP_CONTROL_DEFAULT; | 825 | mt9v022->chip_control = MT9V022_CHIP_CONTROL_DEFAULT; |
746 | 826 | ||
747 | icd->ops = &mt9v022_ops; | 827 | icd->ops = &mt9v022_ops; |
748 | icd->rect_max.left = 1; | ||
749 | icd->rect_max.top = 4; | ||
750 | icd->rect_max.width = 752; | ||
751 | icd->rect_max.height = 480; | ||
752 | icd->rect_current.left = 1; | ||
753 | icd->rect_current.top = 4; | ||
754 | icd->width_min = 48; | ||
755 | icd->height_min = 32; | ||
756 | icd->y_skip_top = 1; | 828 | icd->y_skip_top = 1; |
757 | 829 | ||
830 | mt9v022->rect.left = MT9V022_COLUMN_SKIP; | ||
831 | mt9v022->rect.top = MT9V022_ROW_SKIP; | ||
832 | mt9v022->rect.width = MT9V022_MAX_WIDTH; | ||
833 | mt9v022->rect.height = MT9V022_MAX_HEIGHT; | ||
834 | |||
758 | ret = mt9v022_video_probe(icd, client); | 835 | ret = mt9v022_video_probe(icd, client); |
759 | if (ret) { | 836 | if (ret) { |
760 | icd->ops = NULL; | 837 | icd->ops = NULL; |
diff --git a/drivers/media/video/mx1_camera.c b/drivers/media/video/mx1_camera.c index 1f1324a1d493..3875483ab9d5 100644 --- a/drivers/media/video/mx1_camera.c +++ b/drivers/media/video/mx1_camera.c | |||
@@ -126,7 +126,7 @@ static int mx1_videobuf_setup(struct videobuf_queue *vq, unsigned int *count, | |||
126 | { | 126 | { |
127 | struct soc_camera_device *icd = vq->priv_data; | 127 | struct soc_camera_device *icd = vq->priv_data; |
128 | 128 | ||
129 | *size = icd->rect_current.width * icd->rect_current.height * | 129 | *size = icd->user_width * icd->user_height * |
130 | ((icd->current_fmt->depth + 7) >> 3); | 130 | ((icd->current_fmt->depth + 7) >> 3); |
131 | 131 | ||
132 | if (!*count) | 132 | if (!*count) |
@@ -178,12 +178,12 @@ static int mx1_videobuf_prepare(struct videobuf_queue *vq, | |||
178 | buf->inwork = 1; | 178 | buf->inwork = 1; |
179 | 179 | ||
180 | if (buf->fmt != icd->current_fmt || | 180 | if (buf->fmt != icd->current_fmt || |
181 | vb->width != icd->rect_current.width || | 181 | vb->width != icd->user_width || |
182 | vb->height != icd->rect_current.height || | 182 | vb->height != icd->user_height || |
183 | vb->field != field) { | 183 | vb->field != field) { |
184 | buf->fmt = icd->current_fmt; | 184 | buf->fmt = icd->current_fmt; |
185 | vb->width = icd->rect_current.width; | 185 | vb->width = icd->user_width; |
186 | vb->height = icd->rect_current.height; | 186 | vb->height = icd->user_height; |
187 | vb->field = field; | 187 | vb->field = field; |
188 | vb->state = VIDEOBUF_NEEDS_INIT; | 188 | vb->state = VIDEOBUF_NEEDS_INIT; |
189 | } | 189 | } |
diff --git a/drivers/media/video/mx3_camera.c b/drivers/media/video/mx3_camera.c index d5b51e9900bb..dff2e5e2d8c6 100644 --- a/drivers/media/video/mx3_camera.c +++ b/drivers/media/video/mx3_camera.c | |||
@@ -220,7 +220,7 @@ static int mx3_videobuf_setup(struct videobuf_queue *vq, unsigned int *count, | |||
220 | if (!mx3_cam->idmac_channel[0]) | 220 | if (!mx3_cam->idmac_channel[0]) |
221 | return -EINVAL; | 221 | return -EINVAL; |
222 | 222 | ||
223 | *size = icd->rect_current.width * icd->rect_current.height * bpp; | 223 | *size = icd->user_width * icd->user_height * bpp; |
224 | 224 | ||
225 | if (!*count) | 225 | if (!*count) |
226 | *count = 32; | 226 | *count = 32; |
@@ -241,7 +241,7 @@ static int mx3_videobuf_prepare(struct videobuf_queue *vq, | |||
241 | struct mx3_camera_buffer *buf = | 241 | struct mx3_camera_buffer *buf = |
242 | container_of(vb, struct mx3_camera_buffer, vb); | 242 | container_of(vb, struct mx3_camera_buffer, vb); |
243 | /* current_fmt _must_ always be set */ | 243 | /* current_fmt _must_ always be set */ |
244 | size_t new_size = icd->rect_current.width * icd->rect_current.height * | 244 | size_t new_size = icd->user_width * icd->user_height * |
245 | ((icd->current_fmt->depth + 7) >> 3); | 245 | ((icd->current_fmt->depth + 7) >> 3); |
246 | int ret; | 246 | int ret; |
247 | 247 | ||
@@ -251,12 +251,12 @@ static int mx3_videobuf_prepare(struct videobuf_queue *vq, | |||
251 | */ | 251 | */ |
252 | 252 | ||
253 | if (buf->fmt != icd->current_fmt || | 253 | if (buf->fmt != icd->current_fmt || |
254 | vb->width != icd->rect_current.width || | 254 | vb->width != icd->user_width || |
255 | vb->height != icd->rect_current.height || | 255 | vb->height != icd->user_height || |
256 | vb->field != field) { | 256 | vb->field != field) { |
257 | buf->fmt = icd->current_fmt; | 257 | buf->fmt = icd->current_fmt; |
258 | vb->width = icd->rect_current.width; | 258 | vb->width = icd->user_width; |
259 | vb->height = icd->rect_current.height; | 259 | vb->height = icd->user_height; |
260 | vb->field = field; | 260 | vb->field = field; |
261 | if (vb->state != VIDEOBUF_NEEDS_INIT) | 261 | if (vb->state != VIDEOBUF_NEEDS_INIT) |
262 | free_buffer(vq, buf); | 262 | free_buffer(vq, buf); |
@@ -354,9 +354,9 @@ static void mx3_videobuf_queue(struct videobuf_queue *vq, | |||
354 | 354 | ||
355 | /* This is the configuration of one sg-element */ | 355 | /* This is the configuration of one sg-element */ |
356 | video->out_pixel_fmt = fourcc_to_ipu_pix(data_fmt->fourcc); | 356 | video->out_pixel_fmt = fourcc_to_ipu_pix(data_fmt->fourcc); |
357 | video->out_width = icd->rect_current.width; | 357 | video->out_width = icd->user_width; |
358 | video->out_height = icd->rect_current.height; | 358 | video->out_height = icd->user_height; |
359 | video->out_stride = icd->rect_current.width; | 359 | video->out_stride = icd->user_width; |
360 | 360 | ||
361 | #ifdef DEBUG | 361 | #ifdef DEBUG |
362 | /* helps to see what DMA actually has written */ | 362 | /* helps to see what DMA actually has written */ |
@@ -541,7 +541,7 @@ static bool channel_change_requested(struct soc_camera_device *icd, | |||
541 | 541 | ||
542 | /* Do buffers have to be re-allocated or channel re-configured? */ | 542 | /* Do buffers have to be re-allocated or channel re-configured? */ |
543 | return ichan && rect->width * rect->height > | 543 | return ichan && rect->width * rect->height > |
544 | icd->rect_current.width * icd->rect_current.height; | 544 | icd->user_width * icd->user_height; |
545 | } | 545 | } |
546 | 546 | ||
547 | static int test_platform_param(struct mx3_camera_dev *mx3_cam, | 547 | static int test_platform_param(struct mx3_camera_dev *mx3_cam, |
@@ -589,8 +589,8 @@ static int test_platform_param(struct mx3_camera_dev *mx3_cam, | |||
589 | *flags |= SOCAM_DATAWIDTH_4; | 589 | *flags |= SOCAM_DATAWIDTH_4; |
590 | break; | 590 | break; |
591 | default: | 591 | default: |
592 | dev_info(mx3_cam->soc_host.v4l2_dev.dev, "Unsupported bus width %d\n", | 592 | dev_warn(mx3_cam->soc_host.v4l2_dev.dev, |
593 | buswidth); | 593 | "Unsupported bus width %d\n", buswidth); |
594 | return -EINVAL; | 594 | return -EINVAL; |
595 | } | 595 | } |
596 | 596 | ||
@@ -605,8 +605,7 @@ static int mx3_camera_try_bus_param(struct soc_camera_device *icd, | |||
605 | unsigned long bus_flags, camera_flags; | 605 | unsigned long bus_flags, camera_flags; |
606 | int ret = test_platform_param(mx3_cam, depth, &bus_flags); | 606 | int ret = test_platform_param(mx3_cam, depth, &bus_flags); |
607 | 607 | ||
608 | dev_dbg(icd->dev.parent, "requested bus width %d bit: %d\n", | 608 | dev_dbg(icd->dev.parent, "request bus width %d bit: %d\n", depth, ret); |
609 | depth, ret); | ||
610 | 609 | ||
611 | if (ret < 0) | 610 | if (ret < 0) |
612 | return ret; | 611 | return ret; |
@@ -727,13 +726,13 @@ passthrough: | |||
727 | } | 726 | } |
728 | 727 | ||
729 | static void configure_geometry(struct mx3_camera_dev *mx3_cam, | 728 | static void configure_geometry(struct mx3_camera_dev *mx3_cam, |
730 | struct v4l2_rect *rect) | 729 | unsigned int width, unsigned int height) |
731 | { | 730 | { |
732 | u32 ctrl, width_field, height_field; | 731 | u32 ctrl, width_field, height_field; |
733 | 732 | ||
734 | /* Setup frame size - this cannot be changed on-the-fly... */ | 733 | /* Setup frame size - this cannot be changed on-the-fly... */ |
735 | width_field = rect->width - 1; | 734 | width_field = width - 1; |
736 | height_field = rect->height - 1; | 735 | height_field = height - 1; |
737 | csi_reg_write(mx3_cam, width_field | (height_field << 16), CSI_SENS_FRM_SIZE); | 736 | csi_reg_write(mx3_cam, width_field | (height_field << 16), CSI_SENS_FRM_SIZE); |
738 | 737 | ||
739 | csi_reg_write(mx3_cam, width_field << 16, CSI_FLASH_STROBE_1); | 738 | csi_reg_write(mx3_cam, width_field << 16, CSI_FLASH_STROBE_1); |
@@ -745,11 +744,6 @@ static void configure_geometry(struct mx3_camera_dev *mx3_cam, | |||
745 | ctrl = csi_reg_read(mx3_cam, CSI_OUT_FRM_CTRL) & 0xffff0000; | 744 | ctrl = csi_reg_read(mx3_cam, CSI_OUT_FRM_CTRL) & 0xffff0000; |
746 | /* Sensor does the cropping */ | 745 | /* Sensor does the cropping */ |
747 | csi_reg_write(mx3_cam, ctrl | 0 | (0 << 8), CSI_OUT_FRM_CTRL); | 746 | csi_reg_write(mx3_cam, ctrl | 0 | (0 << 8), CSI_OUT_FRM_CTRL); |
748 | |||
749 | /* | ||
750 | * No need to free resources here if we fail, we'll see if we need to | ||
751 | * do this next time we are called | ||
752 | */ | ||
753 | } | 747 | } |
754 | 748 | ||
755 | static int acquire_dma_channel(struct mx3_camera_dev *mx3_cam) | 749 | static int acquire_dma_channel(struct mx3_camera_dev *mx3_cam) |
@@ -786,6 +780,22 @@ static int acquire_dma_channel(struct mx3_camera_dev *mx3_cam) | |||
786 | return 0; | 780 | return 0; |
787 | } | 781 | } |
788 | 782 | ||
783 | /* | ||
784 | * FIXME: learn to use stride != width, then we can keep stride properly aligned | ||
785 | * and support arbitrary (even) widths. | ||
786 | */ | ||
787 | static inline void stride_align(__s32 *width) | ||
788 | { | ||
789 | if (((*width + 7) & ~7) < 4096) | ||
790 | *width = (*width + 7) & ~7; | ||
791 | else | ||
792 | *width = *width & ~7; | ||
793 | } | ||
794 | |||
795 | /* | ||
796 | * As long as we don't implement host-side cropping and scaling, we can use | ||
797 | * default g_crop and cropcap from soc_camera.c | ||
798 | */ | ||
789 | static int mx3_camera_set_crop(struct soc_camera_device *icd, | 799 | static int mx3_camera_set_crop(struct soc_camera_device *icd, |
790 | struct v4l2_crop *a) | 800 | struct v4l2_crop *a) |
791 | { | 801 | { |
@@ -793,20 +803,51 @@ static int mx3_camera_set_crop(struct soc_camera_device *icd, | |||
793 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); | 803 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); |
794 | struct mx3_camera_dev *mx3_cam = ici->priv; | 804 | struct mx3_camera_dev *mx3_cam = ici->priv; |
795 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | 805 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); |
806 | struct v4l2_format f = {.type = V4L2_BUF_TYPE_VIDEO_CAPTURE}; | ||
807 | struct v4l2_pix_format *pix = &f.fmt.pix; | ||
808 | int ret; | ||
796 | 809 | ||
797 | /* | 810 | soc_camera_limit_side(&rect->left, &rect->width, 0, 2, 4096); |
798 | * We now know pixel formats and can decide upon DMA-channel(s) | 811 | soc_camera_limit_side(&rect->top, &rect->height, 0, 2, 4096); |
799 | * So far only direct camera-to-memory is supported | 812 | |
800 | */ | 813 | ret = v4l2_subdev_call(sd, video, s_crop, a); |
801 | if (channel_change_requested(icd, rect)) { | 814 | if (ret < 0) |
802 | int ret = acquire_dma_channel(mx3_cam); | 815 | return ret; |
816 | |||
817 | /* The capture device might have changed its output */ | ||
818 | ret = v4l2_subdev_call(sd, video, g_fmt, &f); | ||
819 | if (ret < 0) | ||
820 | return ret; | ||
821 | |||
822 | if (pix->width & 7) { | ||
823 | /* Ouch! We can only handle 8-byte aligned width... */ | ||
824 | stride_align(&pix->width); | ||
825 | ret = v4l2_subdev_call(sd, video, s_fmt, &f); | ||
803 | if (ret < 0) | 826 | if (ret < 0) |
804 | return ret; | 827 | return ret; |
805 | } | 828 | } |
806 | 829 | ||
807 | configure_geometry(mx3_cam, rect); | 830 | if (pix->width != icd->user_width || pix->height != icd->user_height) { |
831 | /* | ||
832 | * We now know pixel formats and can decide upon DMA-channel(s) | ||
833 | * So far only direct camera-to-memory is supported | ||
834 | */ | ||
835 | if (channel_change_requested(icd, rect)) { | ||
836 | int ret = acquire_dma_channel(mx3_cam); | ||
837 | if (ret < 0) | ||
838 | return ret; | ||
839 | } | ||
808 | 840 | ||
809 | return v4l2_subdev_call(sd, video, s_crop, a); | 841 | configure_geometry(mx3_cam, pix->width, pix->height); |
842 | } | ||
843 | |||
844 | dev_dbg(icd->dev.parent, "Sensor cropped %dx%d\n", | ||
845 | pix->width, pix->height); | ||
846 | |||
847 | icd->user_width = pix->width; | ||
848 | icd->user_height = pix->height; | ||
849 | |||
850 | return ret; | ||
810 | } | 851 | } |
811 | 852 | ||
812 | static int mx3_camera_set_fmt(struct soc_camera_device *icd, | 853 | static int mx3_camera_set_fmt(struct soc_camera_device *icd, |
@@ -817,12 +858,6 @@ static int mx3_camera_set_fmt(struct soc_camera_device *icd, | |||
817 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | 858 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); |
818 | const struct soc_camera_format_xlate *xlate; | 859 | const struct soc_camera_format_xlate *xlate; |
819 | struct v4l2_pix_format *pix = &f->fmt.pix; | 860 | struct v4l2_pix_format *pix = &f->fmt.pix; |
820 | struct v4l2_rect rect = { | ||
821 | .left = icd->rect_current.left, | ||
822 | .top = icd->rect_current.top, | ||
823 | .width = pix->width, | ||
824 | .height = pix->height, | ||
825 | }; | ||
826 | int ret; | 861 | int ret; |
827 | 862 | ||
828 | xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat); | 863 | xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat); |
@@ -832,6 +867,9 @@ static int mx3_camera_set_fmt(struct soc_camera_device *icd, | |||
832 | return -EINVAL; | 867 | return -EINVAL; |
833 | } | 868 | } |
834 | 869 | ||
870 | stride_align(&pix->width); | ||
871 | dev_dbg(icd->dev.parent, "Set format %dx%d\n", pix->width, pix->height); | ||
872 | |||
835 | ret = acquire_dma_channel(mx3_cam); | 873 | ret = acquire_dma_channel(mx3_cam); |
836 | if (ret < 0) | 874 | if (ret < 0) |
837 | return ret; | 875 | return ret; |
@@ -842,7 +880,7 @@ static int mx3_camera_set_fmt(struct soc_camera_device *icd, | |||
842 | * mxc_v4l2_s_fmt() | 880 | * mxc_v4l2_s_fmt() |
843 | */ | 881 | */ |
844 | 882 | ||
845 | configure_geometry(mx3_cam, &rect); | 883 | configure_geometry(mx3_cam, pix->width, pix->height); |
846 | 884 | ||
847 | ret = v4l2_subdev_call(sd, video, s_fmt, f); | 885 | ret = v4l2_subdev_call(sd, video, s_fmt, f); |
848 | if (!ret) { | 886 | if (!ret) { |
@@ -850,6 +888,8 @@ static int mx3_camera_set_fmt(struct soc_camera_device *icd, | |||
850 | icd->current_fmt = xlate->host_fmt; | 888 | icd->current_fmt = xlate->host_fmt; |
851 | } | 889 | } |
852 | 890 | ||
891 | dev_dbg(icd->dev.parent, "Sensor set %dx%d\n", pix->width, pix->height); | ||
892 | |||
853 | return ret; | 893 | return ret; |
854 | } | 894 | } |
855 | 895 | ||
diff --git a/drivers/media/video/ov772x.c b/drivers/media/video/ov772x.c index bbf5331a2eae..776a91dcfbe6 100644 --- a/drivers/media/video/ov772x.c +++ b/drivers/media/video/ov772x.c | |||
@@ -382,11 +382,10 @@ struct regval_list { | |||
382 | }; | 382 | }; |
383 | 383 | ||
384 | struct ov772x_color_format { | 384 | struct ov772x_color_format { |
385 | char *name; | 385 | const struct soc_camera_data_format *format; |
386 | __u32 fourcc; | 386 | u8 dsp3; |
387 | u8 dsp3; | 387 | u8 com3; |
388 | u8 com3; | 388 | u8 com7; |
389 | u8 com7; | ||
390 | }; | 389 | }; |
391 | 390 | ||
392 | struct ov772x_win_size { | 391 | struct ov772x_win_size { |
@@ -481,43 +480,43 @@ static const struct soc_camera_data_format ov772x_fmt_lists[] = { | |||
481 | */ | 480 | */ |
482 | static const struct ov772x_color_format ov772x_cfmts[] = { | 481 | static const struct ov772x_color_format ov772x_cfmts[] = { |
483 | { | 482 | { |
484 | SETFOURCC(YUYV), | 483 | .format = &ov772x_fmt_lists[0], |
485 | .dsp3 = 0x0, | 484 | .dsp3 = 0x0, |
486 | .com3 = SWAP_YUV, | 485 | .com3 = SWAP_YUV, |
487 | .com7 = OFMT_YUV, | 486 | .com7 = OFMT_YUV, |
488 | }, | 487 | }, |
489 | { | 488 | { |
490 | SETFOURCC(YVYU), | 489 | .format = &ov772x_fmt_lists[1], |
491 | .dsp3 = UV_ON, | 490 | .dsp3 = UV_ON, |
492 | .com3 = SWAP_YUV, | 491 | .com3 = SWAP_YUV, |
493 | .com7 = OFMT_YUV, | 492 | .com7 = OFMT_YUV, |
494 | }, | 493 | }, |
495 | { | 494 | { |
496 | SETFOURCC(UYVY), | 495 | .format = &ov772x_fmt_lists[2], |
497 | .dsp3 = 0x0, | 496 | .dsp3 = 0x0, |
498 | .com3 = 0x0, | 497 | .com3 = 0x0, |
499 | .com7 = OFMT_YUV, | 498 | .com7 = OFMT_YUV, |
500 | }, | 499 | }, |
501 | { | 500 | { |
502 | SETFOURCC(RGB555), | 501 | .format = &ov772x_fmt_lists[3], |
503 | .dsp3 = 0x0, | 502 | .dsp3 = 0x0, |
504 | .com3 = SWAP_RGB, | 503 | .com3 = SWAP_RGB, |
505 | .com7 = FMT_RGB555 | OFMT_RGB, | 504 | .com7 = FMT_RGB555 | OFMT_RGB, |
506 | }, | 505 | }, |
507 | { | 506 | { |
508 | SETFOURCC(RGB555X), | 507 | .format = &ov772x_fmt_lists[4], |
509 | .dsp3 = 0x0, | 508 | .dsp3 = 0x0, |
510 | .com3 = 0x0, | 509 | .com3 = 0x0, |
511 | .com7 = FMT_RGB555 | OFMT_RGB, | 510 | .com7 = FMT_RGB555 | OFMT_RGB, |
512 | }, | 511 | }, |
513 | { | 512 | { |
514 | SETFOURCC(RGB565), | 513 | .format = &ov772x_fmt_lists[5], |
515 | .dsp3 = 0x0, | 514 | .dsp3 = 0x0, |
516 | .com3 = SWAP_RGB, | 515 | .com3 = SWAP_RGB, |
517 | .com7 = FMT_RGB565 | OFMT_RGB, | 516 | .com7 = FMT_RGB565 | OFMT_RGB, |
518 | }, | 517 | }, |
519 | { | 518 | { |
520 | SETFOURCC(RGB565X), | 519 | .format = &ov772x_fmt_lists[6], |
521 | .dsp3 = 0x0, | 520 | .dsp3 = 0x0, |
522 | .com3 = 0x0, | 521 | .com3 = 0x0, |
523 | .com7 = FMT_RGB565 | OFMT_RGB, | 522 | .com7 = FMT_RGB565 | OFMT_RGB, |
@@ -648,8 +647,8 @@ static int ov772x_s_stream(struct v4l2_subdev *sd, int enable) | |||
648 | 647 | ||
649 | ov772x_mask_set(client, COM2, SOFT_SLEEP_MODE, 0); | 648 | ov772x_mask_set(client, COM2, SOFT_SLEEP_MODE, 0); |
650 | 649 | ||
651 | dev_dbg(&client->dev, | 650 | dev_dbg(&client->dev, "format %s, win %s\n", |
652 | "format %s, win %s\n", priv->fmt->name, priv->win->name); | 651 | priv->fmt->format->name, priv->win->name); |
653 | 652 | ||
654 | return 0; | 653 | return 0; |
655 | } | 654 | } |
@@ -818,7 +817,7 @@ static int ov772x_set_params(struct i2c_client *client, | |||
818 | */ | 817 | */ |
819 | priv->fmt = NULL; | 818 | priv->fmt = NULL; |
820 | for (i = 0; i < ARRAY_SIZE(ov772x_cfmts); i++) { | 819 | for (i = 0; i < ARRAY_SIZE(ov772x_cfmts); i++) { |
821 | if (pixfmt == ov772x_cfmts[i].fourcc) { | 820 | if (pixfmt == ov772x_cfmts[i].format->fourcc) { |
822 | priv->fmt = ov772x_cfmts + i; | 821 | priv->fmt = ov772x_cfmts + i; |
823 | break; | 822 | break; |
824 | } | 823 | } |
@@ -955,6 +954,56 @@ ov772x_set_fmt_error: | |||
955 | return ret; | 954 | return ret; |
956 | } | 955 | } |
957 | 956 | ||
957 | static int ov772x_g_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) | ||
958 | { | ||
959 | a->c.left = 0; | ||
960 | a->c.top = 0; | ||
961 | a->c.width = VGA_WIDTH; | ||
962 | a->c.height = VGA_HEIGHT; | ||
963 | a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
964 | |||
965 | return 0; | ||
966 | } | ||
967 | |||
968 | static int ov772x_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) | ||
969 | { | ||
970 | a->bounds.left = 0; | ||
971 | a->bounds.top = 0; | ||
972 | a->bounds.width = VGA_WIDTH; | ||
973 | a->bounds.height = VGA_HEIGHT; | ||
974 | a->defrect = a->bounds; | ||
975 | a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
976 | a->pixelaspect.numerator = 1; | ||
977 | a->pixelaspect.denominator = 1; | ||
978 | |||
979 | return 0; | ||
980 | } | ||
981 | |||
982 | static int ov772x_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | ||
983 | { | ||
984 | struct i2c_client *client = sd->priv; | ||
985 | struct ov772x_priv *priv = to_ov772x(client); | ||
986 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
987 | |||
988 | if (!priv->win || !priv->fmt) { | ||
989 | u32 width = VGA_WIDTH, height = VGA_HEIGHT; | ||
990 | int ret = ov772x_set_params(client, &width, &height, | ||
991 | V4L2_PIX_FMT_YUYV); | ||
992 | if (ret < 0) | ||
993 | return ret; | ||
994 | } | ||
995 | |||
996 | f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
997 | |||
998 | pix->width = priv->win->width; | ||
999 | pix->height = priv->win->height; | ||
1000 | pix->pixelformat = priv->fmt->format->fourcc; | ||
1001 | pix->colorspace = priv->fmt->format->colorspace; | ||
1002 | pix->field = V4L2_FIELD_NONE; | ||
1003 | |||
1004 | return 0; | ||
1005 | } | ||
1006 | |||
958 | static int ov772x_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 1007 | static int ov772x_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) |
959 | { | 1008 | { |
960 | struct i2c_client *client = sd->priv; | 1009 | struct i2c_client *client = sd->priv; |
@@ -1060,8 +1109,11 @@ static struct v4l2_subdev_core_ops ov772x_subdev_core_ops = { | |||
1060 | 1109 | ||
1061 | static struct v4l2_subdev_video_ops ov772x_subdev_video_ops = { | 1110 | static struct v4l2_subdev_video_ops ov772x_subdev_video_ops = { |
1062 | .s_stream = ov772x_s_stream, | 1111 | .s_stream = ov772x_s_stream, |
1112 | .g_fmt = ov772x_g_fmt, | ||
1063 | .s_fmt = ov772x_s_fmt, | 1113 | .s_fmt = ov772x_s_fmt, |
1064 | .try_fmt = ov772x_try_fmt, | 1114 | .try_fmt = ov772x_try_fmt, |
1115 | .cropcap = ov772x_cropcap, | ||
1116 | .g_crop = ov772x_g_crop, | ||
1065 | }; | 1117 | }; |
1066 | 1118 | ||
1067 | static struct v4l2_subdev_ops ov772x_subdev_ops = { | 1119 | static struct v4l2_subdev_ops ov772x_subdev_ops = { |
@@ -1110,8 +1162,6 @@ static int ov772x_probe(struct i2c_client *client, | |||
1110 | v4l2_i2c_subdev_init(&priv->subdev, client, &ov772x_subdev_ops); | 1162 | v4l2_i2c_subdev_init(&priv->subdev, client, &ov772x_subdev_ops); |
1111 | 1163 | ||
1112 | icd->ops = &ov772x_ops; | 1164 | icd->ops = &ov772x_ops; |
1113 | icd->rect_max.width = MAX_WIDTH; | ||
1114 | icd->rect_max.height = MAX_HEIGHT; | ||
1115 | 1165 | ||
1116 | ret = ov772x_video_probe(icd, client); | 1166 | ret = ov772x_video_probe(icd, client); |
1117 | if (ret) { | 1167 | if (ret) { |
diff --git a/drivers/media/video/pxa_camera.c b/drivers/media/video/pxa_camera.c index 1fd6ef392a54..a19bb76e175d 100644 --- a/drivers/media/video/pxa_camera.c +++ b/drivers/media/video/pxa_camera.c | |||
@@ -225,6 +225,10 @@ struct pxa_camera_dev { | |||
225 | u32 save_cicr[5]; | 225 | u32 save_cicr[5]; |
226 | }; | 226 | }; |
227 | 227 | ||
228 | struct pxa_cam { | ||
229 | unsigned long flags; | ||
230 | }; | ||
231 | |||
228 | static const char *pxa_cam_driver_description = "PXA_Camera"; | 232 | static const char *pxa_cam_driver_description = "PXA_Camera"; |
229 | 233 | ||
230 | static unsigned int vid_limit = 16; /* Video memory limit, in Mb */ | 234 | static unsigned int vid_limit = 16; /* Video memory limit, in Mb */ |
@@ -239,7 +243,7 @@ static int pxa_videobuf_setup(struct videobuf_queue *vq, unsigned int *count, | |||
239 | 243 | ||
240 | dev_dbg(icd->dev.parent, "count=%d, size=%d\n", *count, *size); | 244 | dev_dbg(icd->dev.parent, "count=%d, size=%d\n", *count, *size); |
241 | 245 | ||
242 | *size = roundup(icd->rect_current.width * icd->rect_current.height * | 246 | *size = roundup(icd->user_width * icd->user_height * |
243 | ((icd->current_fmt->depth + 7) >> 3), 8); | 247 | ((icd->current_fmt->depth + 7) >> 3), 8); |
244 | 248 | ||
245 | if (0 == *count) | 249 | if (0 == *count) |
@@ -443,12 +447,12 @@ static int pxa_videobuf_prepare(struct videobuf_queue *vq, | |||
443 | buf->inwork = 1; | 447 | buf->inwork = 1; |
444 | 448 | ||
445 | if (buf->fmt != icd->current_fmt || | 449 | if (buf->fmt != icd->current_fmt || |
446 | vb->width != icd->rect_current.width || | 450 | vb->width != icd->user_width || |
447 | vb->height != icd->rect_current.height || | 451 | vb->height != icd->user_height || |
448 | vb->field != field) { | 452 | vb->field != field) { |
449 | buf->fmt = icd->current_fmt; | 453 | buf->fmt = icd->current_fmt; |
450 | vb->width = icd->rect_current.width; | 454 | vb->width = icd->user_width; |
451 | vb->height = icd->rect_current.height; | 455 | vb->height = icd->user_height; |
452 | vb->field = field; | 456 | vb->field = field; |
453 | vb->state = VIDEOBUF_NEEDS_INIT; | 457 | vb->state = VIDEOBUF_NEEDS_INIT; |
454 | } | 458 | } |
@@ -839,7 +843,7 @@ static u32 mclk_get_divisor(struct platform_device *pdev, | |||
839 | struct pxa_camera_dev *pcdev) | 843 | struct pxa_camera_dev *pcdev) |
840 | { | 844 | { |
841 | unsigned long mclk = pcdev->mclk; | 845 | unsigned long mclk = pcdev->mclk; |
842 | struct device *dev = pcdev->soc_host.v4l2_dev.dev; | 846 | struct device *dev = &pdev->dev; |
843 | u32 div; | 847 | u32 div; |
844 | unsigned long lcdclk; | 848 | unsigned long lcdclk; |
845 | 849 | ||
@@ -1040,57 +1044,17 @@ static int test_platform_param(struct pxa_camera_dev *pcdev, | |||
1040 | return 0; | 1044 | return 0; |
1041 | } | 1045 | } |
1042 | 1046 | ||
1043 | static int pxa_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt) | 1047 | static void pxa_camera_setup_cicr(struct soc_camera_device *icd, |
1048 | unsigned long flags, __u32 pixfmt) | ||
1044 | { | 1049 | { |
1045 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); | 1050 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); |
1046 | struct pxa_camera_dev *pcdev = ici->priv; | 1051 | struct pxa_camera_dev *pcdev = ici->priv; |
1047 | unsigned long dw, bpp, bus_flags, camera_flags, common_flags; | 1052 | unsigned long dw, bpp; |
1048 | u32 cicr0, cicr1, cicr2, cicr3, cicr4 = 0; | 1053 | u32 cicr0, cicr1, cicr2, cicr3, cicr4 = 0; |
1049 | int ret = test_platform_param(pcdev, icd->buswidth, &bus_flags); | ||
1050 | |||
1051 | if (ret < 0) | ||
1052 | return ret; | ||
1053 | |||
1054 | camera_flags = icd->ops->query_bus_param(icd); | ||
1055 | |||
1056 | common_flags = soc_camera_bus_param_compatible(camera_flags, bus_flags); | ||
1057 | if (!common_flags) | ||
1058 | return -EINVAL; | ||
1059 | |||
1060 | pcdev->channels = 1; | ||
1061 | |||
1062 | /* Make choises, based on platform preferences */ | ||
1063 | if ((common_flags & SOCAM_HSYNC_ACTIVE_HIGH) && | ||
1064 | (common_flags & SOCAM_HSYNC_ACTIVE_LOW)) { | ||
1065 | if (pcdev->platform_flags & PXA_CAMERA_HSP) | ||
1066 | common_flags &= ~SOCAM_HSYNC_ACTIVE_HIGH; | ||
1067 | else | ||
1068 | common_flags &= ~SOCAM_HSYNC_ACTIVE_LOW; | ||
1069 | } | ||
1070 | |||
1071 | if ((common_flags & SOCAM_VSYNC_ACTIVE_HIGH) && | ||
1072 | (common_flags & SOCAM_VSYNC_ACTIVE_LOW)) { | ||
1073 | if (pcdev->platform_flags & PXA_CAMERA_VSP) | ||
1074 | common_flags &= ~SOCAM_VSYNC_ACTIVE_HIGH; | ||
1075 | else | ||
1076 | common_flags &= ~SOCAM_VSYNC_ACTIVE_LOW; | ||
1077 | } | ||
1078 | |||
1079 | if ((common_flags & SOCAM_PCLK_SAMPLE_RISING) && | ||
1080 | (common_flags & SOCAM_PCLK_SAMPLE_FALLING)) { | ||
1081 | if (pcdev->platform_flags & PXA_CAMERA_PCP) | ||
1082 | common_flags &= ~SOCAM_PCLK_SAMPLE_RISING; | ||
1083 | else | ||
1084 | common_flags &= ~SOCAM_PCLK_SAMPLE_FALLING; | ||
1085 | } | ||
1086 | |||
1087 | ret = icd->ops->set_bus_param(icd, common_flags); | ||
1088 | if (ret < 0) | ||
1089 | return ret; | ||
1090 | 1054 | ||
1091 | /* Datawidth is now guaranteed to be equal to one of the three values. | 1055 | /* Datawidth is now guaranteed to be equal to one of the three values. |
1092 | * We fix bit-per-pixel equal to data-width... */ | 1056 | * We fix bit-per-pixel equal to data-width... */ |
1093 | switch (common_flags & SOCAM_DATAWIDTH_MASK) { | 1057 | switch (flags & SOCAM_DATAWIDTH_MASK) { |
1094 | case SOCAM_DATAWIDTH_10: | 1058 | case SOCAM_DATAWIDTH_10: |
1095 | dw = 4; | 1059 | dw = 4; |
1096 | bpp = 0x40; | 1060 | bpp = 0x40; |
@@ -1111,18 +1075,18 @@ static int pxa_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt) | |||
1111 | cicr4 |= CICR4_PCLK_EN; | 1075 | cicr4 |= CICR4_PCLK_EN; |
1112 | if (pcdev->platform_flags & PXA_CAMERA_MCLK_EN) | 1076 | if (pcdev->platform_flags & PXA_CAMERA_MCLK_EN) |
1113 | cicr4 |= CICR4_MCLK_EN; | 1077 | cicr4 |= CICR4_MCLK_EN; |
1114 | if (common_flags & SOCAM_PCLK_SAMPLE_FALLING) | 1078 | if (flags & SOCAM_PCLK_SAMPLE_FALLING) |
1115 | cicr4 |= CICR4_PCP; | 1079 | cicr4 |= CICR4_PCP; |
1116 | if (common_flags & SOCAM_HSYNC_ACTIVE_LOW) | 1080 | if (flags & SOCAM_HSYNC_ACTIVE_LOW) |
1117 | cicr4 |= CICR4_HSP; | 1081 | cicr4 |= CICR4_HSP; |
1118 | if (common_flags & SOCAM_VSYNC_ACTIVE_LOW) | 1082 | if (flags & SOCAM_VSYNC_ACTIVE_LOW) |
1119 | cicr4 |= CICR4_VSP; | 1083 | cicr4 |= CICR4_VSP; |
1120 | 1084 | ||
1121 | cicr0 = __raw_readl(pcdev->base + CICR0); | 1085 | cicr0 = __raw_readl(pcdev->base + CICR0); |
1122 | if (cicr0 & CICR0_ENB) | 1086 | if (cicr0 & CICR0_ENB) |
1123 | __raw_writel(cicr0 & ~CICR0_ENB, pcdev->base + CICR0); | 1087 | __raw_writel(cicr0 & ~CICR0_ENB, pcdev->base + CICR0); |
1124 | 1088 | ||
1125 | cicr1 = CICR1_PPL_VAL(icd->rect_current.width - 1) | bpp | dw; | 1089 | cicr1 = CICR1_PPL_VAL(icd->user_width - 1) | bpp | dw; |
1126 | 1090 | ||
1127 | switch (pixfmt) { | 1091 | switch (pixfmt) { |
1128 | case V4L2_PIX_FMT_YUV422P: | 1092 | case V4L2_PIX_FMT_YUV422P: |
@@ -1151,7 +1115,7 @@ static int pxa_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt) | |||
1151 | } | 1115 | } |
1152 | 1116 | ||
1153 | cicr2 = 0; | 1117 | cicr2 = 0; |
1154 | cicr3 = CICR3_LPF_VAL(icd->rect_current.height - 1) | | 1118 | cicr3 = CICR3_LPF_VAL(icd->user_height - 1) | |
1155 | CICR3_BFW_VAL(min((unsigned short)255, icd->y_skip_top)); | 1119 | CICR3_BFW_VAL(min((unsigned short)255, icd->y_skip_top)); |
1156 | cicr4 |= pcdev->mclk_divisor; | 1120 | cicr4 |= pcdev->mclk_divisor; |
1157 | 1121 | ||
@@ -1165,6 +1129,59 @@ static int pxa_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt) | |||
1165 | CICR0_SIM_MP : (CICR0_SL_CAP_EN | CICR0_SIM_SP)); | 1129 | CICR0_SIM_MP : (CICR0_SL_CAP_EN | CICR0_SIM_SP)); |
1166 | cicr0 |= CICR0_DMAEN | CICR0_IRQ_MASK; | 1130 | cicr0 |= CICR0_DMAEN | CICR0_IRQ_MASK; |
1167 | __raw_writel(cicr0, pcdev->base + CICR0); | 1131 | __raw_writel(cicr0, pcdev->base + CICR0); |
1132 | } | ||
1133 | |||
1134 | static int pxa_camera_set_bus_param(struct soc_camera_device *icd, __u32 pixfmt) | ||
1135 | { | ||
1136 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); | ||
1137 | struct pxa_camera_dev *pcdev = ici->priv; | ||
1138 | unsigned long bus_flags, camera_flags, common_flags; | ||
1139 | int ret = test_platform_param(pcdev, icd->buswidth, &bus_flags); | ||
1140 | struct pxa_cam *cam = icd->host_priv; | ||
1141 | |||
1142 | if (ret < 0) | ||
1143 | return ret; | ||
1144 | |||
1145 | camera_flags = icd->ops->query_bus_param(icd); | ||
1146 | |||
1147 | common_flags = soc_camera_bus_param_compatible(camera_flags, bus_flags); | ||
1148 | if (!common_flags) | ||
1149 | return -EINVAL; | ||
1150 | |||
1151 | pcdev->channels = 1; | ||
1152 | |||
1153 | /* Make choises, based on platform preferences */ | ||
1154 | if ((common_flags & SOCAM_HSYNC_ACTIVE_HIGH) && | ||
1155 | (common_flags & SOCAM_HSYNC_ACTIVE_LOW)) { | ||
1156 | if (pcdev->platform_flags & PXA_CAMERA_HSP) | ||
1157 | common_flags &= ~SOCAM_HSYNC_ACTIVE_HIGH; | ||
1158 | else | ||
1159 | common_flags &= ~SOCAM_HSYNC_ACTIVE_LOW; | ||
1160 | } | ||
1161 | |||
1162 | if ((common_flags & SOCAM_VSYNC_ACTIVE_HIGH) && | ||
1163 | (common_flags & SOCAM_VSYNC_ACTIVE_LOW)) { | ||
1164 | if (pcdev->platform_flags & PXA_CAMERA_VSP) | ||
1165 | common_flags &= ~SOCAM_VSYNC_ACTIVE_HIGH; | ||
1166 | else | ||
1167 | common_flags &= ~SOCAM_VSYNC_ACTIVE_LOW; | ||
1168 | } | ||
1169 | |||
1170 | if ((common_flags & SOCAM_PCLK_SAMPLE_RISING) && | ||
1171 | (common_flags & SOCAM_PCLK_SAMPLE_FALLING)) { | ||
1172 | if (pcdev->platform_flags & PXA_CAMERA_PCP) | ||
1173 | common_flags &= ~SOCAM_PCLK_SAMPLE_RISING; | ||
1174 | else | ||
1175 | common_flags &= ~SOCAM_PCLK_SAMPLE_FALLING; | ||
1176 | } | ||
1177 | |||
1178 | cam->flags = common_flags; | ||
1179 | |||
1180 | ret = icd->ops->set_bus_param(icd, common_flags); | ||
1181 | if (ret < 0) | ||
1182 | return ret; | ||
1183 | |||
1184 | pxa_camera_setup_cicr(icd, common_flags, pixfmt); | ||
1168 | 1185 | ||
1169 | return 0; | 1186 | return 0; |
1170 | } | 1187 | } |
@@ -1230,6 +1247,7 @@ static int pxa_camera_get_formats(struct soc_camera_device *icd, int idx, | |||
1230 | { | 1247 | { |
1231 | struct device *dev = icd->dev.parent; | 1248 | struct device *dev = icd->dev.parent; |
1232 | int formats = 0, buswidth, ret; | 1249 | int formats = 0, buswidth, ret; |
1250 | struct pxa_cam *cam; | ||
1233 | 1251 | ||
1234 | buswidth = required_buswidth(icd->formats + idx); | 1252 | buswidth = required_buswidth(icd->formats + idx); |
1235 | 1253 | ||
@@ -1240,6 +1258,16 @@ static int pxa_camera_get_formats(struct soc_camera_device *icd, int idx, | |||
1240 | if (ret < 0) | 1258 | if (ret < 0) |
1241 | return 0; | 1259 | return 0; |
1242 | 1260 | ||
1261 | if (!icd->host_priv) { | ||
1262 | cam = kzalloc(sizeof(*cam), GFP_KERNEL); | ||
1263 | if (!cam) | ||
1264 | return -ENOMEM; | ||
1265 | |||
1266 | icd->host_priv = cam; | ||
1267 | } else { | ||
1268 | cam = icd->host_priv; | ||
1269 | } | ||
1270 | |||
1243 | switch (icd->formats[idx].fourcc) { | 1271 | switch (icd->formats[idx].fourcc) { |
1244 | case V4L2_PIX_FMT_UYVY: | 1272 | case V4L2_PIX_FMT_UYVY: |
1245 | formats++; | 1273 | formats++; |
@@ -1284,6 +1312,19 @@ static int pxa_camera_get_formats(struct soc_camera_device *icd, int idx, | |||
1284 | return formats; | 1312 | return formats; |
1285 | } | 1313 | } |
1286 | 1314 | ||
1315 | static void pxa_camera_put_formats(struct soc_camera_device *icd) | ||
1316 | { | ||
1317 | kfree(icd->host_priv); | ||
1318 | icd->host_priv = NULL; | ||
1319 | } | ||
1320 | |||
1321 | static int pxa_camera_check_frame(struct v4l2_pix_format *pix) | ||
1322 | { | ||
1323 | /* limit to pxa hardware capabilities */ | ||
1324 | return pix->height < 32 || pix->height > 2048 || pix->width < 48 || | ||
1325 | pix->width > 2048 || (pix->width & 0x01); | ||
1326 | } | ||
1327 | |||
1287 | static int pxa_camera_set_crop(struct soc_camera_device *icd, | 1328 | static int pxa_camera_set_crop(struct soc_camera_device *icd, |
1288 | struct v4l2_crop *a) | 1329 | struct v4l2_crop *a) |
1289 | { | 1330 | { |
@@ -1296,6 +1337,9 @@ static int pxa_camera_set_crop(struct soc_camera_device *icd, | |||
1296 | .master_clock = pcdev->mclk, | 1337 | .master_clock = pcdev->mclk, |
1297 | .pixel_clock_max = pcdev->ciclk / 4, | 1338 | .pixel_clock_max = pcdev->ciclk / 4, |
1298 | }; | 1339 | }; |
1340 | struct v4l2_format f; | ||
1341 | struct v4l2_pix_format *pix = &f.fmt.pix, pix_tmp; | ||
1342 | struct pxa_cam *cam = icd->host_priv; | ||
1299 | int ret; | 1343 | int ret; |
1300 | 1344 | ||
1301 | /* If PCLK is used to latch data from the sensor, check sense */ | 1345 | /* If PCLK is used to latch data from the sensor, check sense */ |
@@ -1309,7 +1353,37 @@ static int pxa_camera_set_crop(struct soc_camera_device *icd, | |||
1309 | if (ret < 0) { | 1353 | if (ret < 0) { |
1310 | dev_warn(dev, "Failed to crop to %ux%u@%u:%u\n", | 1354 | dev_warn(dev, "Failed to crop to %ux%u@%u:%u\n", |
1311 | rect->width, rect->height, rect->left, rect->top); | 1355 | rect->width, rect->height, rect->left, rect->top); |
1312 | } else if (sense.flags & SOCAM_SENSE_PCLK_CHANGED) { | 1356 | return ret; |
1357 | } | ||
1358 | |||
1359 | f.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
1360 | |||
1361 | ret = v4l2_subdev_call(sd, video, g_fmt, &f); | ||
1362 | if (ret < 0) | ||
1363 | return ret; | ||
1364 | |||
1365 | pix_tmp = *pix; | ||
1366 | if (pxa_camera_check_frame(pix)) { | ||
1367 | /* | ||
1368 | * Camera cropping produced a frame beyond our capabilities. | ||
1369 | * FIXME: just extract a subframe, that we can process. | ||
1370 | */ | ||
1371 | v4l_bound_align_image(&pix->width, 48, 2048, 1, | ||
1372 | &pix->height, 32, 2048, 0, | ||
1373 | icd->current_fmt->fourcc == V4L2_PIX_FMT_YUV422P ? | ||
1374 | 4 : 0); | ||
1375 | ret = v4l2_subdev_call(sd, video, s_fmt, &f); | ||
1376 | if (ret < 0) | ||
1377 | return ret; | ||
1378 | |||
1379 | if (pxa_camera_check_frame(pix)) { | ||
1380 | dev_warn(icd->dev.parent, | ||
1381 | "Inconsistent state. Use S_FMT to repair\n"); | ||
1382 | return -EINVAL; | ||
1383 | } | ||
1384 | } | ||
1385 | |||
1386 | if (sense.flags & SOCAM_SENSE_PCLK_CHANGED) { | ||
1313 | if (sense.pixel_clock > sense.pixel_clock_max) { | 1387 | if (sense.pixel_clock > sense.pixel_clock_max) { |
1314 | dev_err(dev, | 1388 | dev_err(dev, |
1315 | "pixel clock %lu set by the camera too high!", | 1389 | "pixel clock %lu set by the camera too high!", |
@@ -1319,6 +1393,11 @@ static int pxa_camera_set_crop(struct soc_camera_device *icd, | |||
1319 | recalculate_fifo_timeout(pcdev, sense.pixel_clock); | 1393 | recalculate_fifo_timeout(pcdev, sense.pixel_clock); |
1320 | } | 1394 | } |
1321 | 1395 | ||
1396 | icd->user_width = pix->width; | ||
1397 | icd->user_height = pix->height; | ||
1398 | |||
1399 | pxa_camera_setup_cicr(icd, cam->flags, icd->current_fmt->fourcc); | ||
1400 | |||
1322 | return ret; | 1401 | return ret; |
1323 | } | 1402 | } |
1324 | 1403 | ||
@@ -1359,6 +1438,11 @@ static int pxa_camera_set_fmt(struct soc_camera_device *icd, | |||
1359 | if (ret < 0) { | 1438 | if (ret < 0) { |
1360 | dev_warn(dev, "Failed to configure for format %x\n", | 1439 | dev_warn(dev, "Failed to configure for format %x\n", |
1361 | pix->pixelformat); | 1440 | pix->pixelformat); |
1441 | } else if (pxa_camera_check_frame(pix)) { | ||
1442 | dev_warn(dev, | ||
1443 | "Camera driver produced an unsupported frame %dx%d\n", | ||
1444 | pix->width, pix->height); | ||
1445 | ret = -EINVAL; | ||
1362 | } else if (sense.flags & SOCAM_SENSE_PCLK_CHANGED) { | 1446 | } else if (sense.flags & SOCAM_SENSE_PCLK_CHANGED) { |
1363 | if (sense.pixel_clock > sense.pixel_clock_max) { | 1447 | if (sense.pixel_clock > sense.pixel_clock_max) { |
1364 | dev_err(dev, | 1448 | dev_err(dev, |
@@ -1402,7 +1486,7 @@ static int pxa_camera_try_fmt(struct soc_camera_device *icd, | |||
1402 | */ | 1486 | */ |
1403 | v4l_bound_align_image(&pix->width, 48, 2048, 1, | 1487 | v4l_bound_align_image(&pix->width, 48, 2048, 1, |
1404 | &pix->height, 32, 2048, 0, | 1488 | &pix->height, 32, 2048, 0, |
1405 | xlate->host_fmt->fourcc == V4L2_PIX_FMT_YUV422P ? 4 : 0); | 1489 | pixfmt == V4L2_PIX_FMT_YUV422P ? 4 : 0); |
1406 | 1490 | ||
1407 | pix->bytesperline = pix->width * | 1491 | pix->bytesperline = pix->width * |
1408 | DIV_ROUND_UP(xlate->host_fmt->depth, 8); | 1492 | DIV_ROUND_UP(xlate->host_fmt->depth, 8); |
@@ -1412,7 +1496,7 @@ static int pxa_camera_try_fmt(struct soc_camera_device *icd, | |||
1412 | pix->pixelformat = xlate->cam_fmt->fourcc; | 1496 | pix->pixelformat = xlate->cam_fmt->fourcc; |
1413 | /* limit to sensor capabilities */ | 1497 | /* limit to sensor capabilities */ |
1414 | ret = v4l2_subdev_call(sd, video, try_fmt, f); | 1498 | ret = v4l2_subdev_call(sd, video, try_fmt, f); |
1415 | pix->pixelformat = xlate->host_fmt->fourcc; | 1499 | pix->pixelformat = pixfmt; |
1416 | 1500 | ||
1417 | field = pix->field; | 1501 | field = pix->field; |
1418 | 1502 | ||
@@ -1525,6 +1609,7 @@ static struct soc_camera_host_ops pxa_soc_camera_host_ops = { | |||
1525 | .resume = pxa_camera_resume, | 1609 | .resume = pxa_camera_resume, |
1526 | .set_crop = pxa_camera_set_crop, | 1610 | .set_crop = pxa_camera_set_crop, |
1527 | .get_formats = pxa_camera_get_formats, | 1611 | .get_formats = pxa_camera_get_formats, |
1612 | .put_formats = pxa_camera_put_formats, | ||
1528 | .set_fmt = pxa_camera_set_fmt, | 1613 | .set_fmt = pxa_camera_set_fmt, |
1529 | .try_fmt = pxa_camera_try_fmt, | 1614 | .try_fmt = pxa_camera_try_fmt, |
1530 | .init_videobuf = pxa_camera_init_videobuf, | 1615 | .init_videobuf = pxa_camera_init_videobuf, |
diff --git a/drivers/media/video/sh_mobile_ceu_camera.c b/drivers/media/video/sh_mobile_ceu_camera.c index 3457bababd36..5ab7c5aefd62 100644 --- a/drivers/media/video/sh_mobile_ceu_camera.c +++ b/drivers/media/video/sh_mobile_ceu_camera.c | |||
@@ -74,6 +74,13 @@ | |||
74 | #define CDBYR2 0x98 /* Capture data bottom-field address Y register 2 */ | 74 | #define CDBYR2 0x98 /* Capture data bottom-field address Y register 2 */ |
75 | #define CDBCR2 0x9c /* Capture data bottom-field address C register 2 */ | 75 | #define CDBCR2 0x9c /* Capture data bottom-field address C register 2 */ |
76 | 76 | ||
77 | #undef DEBUG_GEOMETRY | ||
78 | #ifdef DEBUG_GEOMETRY | ||
79 | #define dev_geo dev_info | ||
80 | #else | ||
81 | #define dev_geo dev_dbg | ||
82 | #endif | ||
83 | |||
77 | /* per video frame buffer */ | 84 | /* per video frame buffer */ |
78 | struct sh_mobile_ceu_buffer { | 85 | struct sh_mobile_ceu_buffer { |
79 | struct videobuf_buffer vb; /* v4l buffer must be first */ | 86 | struct videobuf_buffer vb; /* v4l buffer must be first */ |
@@ -103,8 +110,9 @@ struct sh_mobile_ceu_dev { | |||
103 | }; | 110 | }; |
104 | 111 | ||
105 | struct sh_mobile_ceu_cam { | 112 | struct sh_mobile_ceu_cam { |
106 | struct v4l2_rect camera_rect; | 113 | struct v4l2_rect ceu_rect; |
107 | struct v4l2_rect camera_max; | 114 | unsigned int cam_width; |
115 | unsigned int cam_height; | ||
108 | const struct soc_camera_data_format *extra_fmt; | 116 | const struct soc_camera_data_format *extra_fmt; |
109 | const struct soc_camera_data_format *camera_fmt; | 117 | const struct soc_camera_data_format *camera_fmt; |
110 | }; | 118 | }; |
@@ -156,7 +164,7 @@ static int sh_mobile_ceu_videobuf_setup(struct videobuf_queue *vq, | |||
156 | struct sh_mobile_ceu_dev *pcdev = ici->priv; | 164 | struct sh_mobile_ceu_dev *pcdev = ici->priv; |
157 | int bytes_per_pixel = (icd->current_fmt->depth + 7) >> 3; | 165 | int bytes_per_pixel = (icd->current_fmt->depth + 7) >> 3; |
158 | 166 | ||
159 | *size = PAGE_ALIGN(icd->rect_current.width * icd->rect_current.height * | 167 | *size = PAGE_ALIGN(icd->user_width * icd->user_height * |
160 | bytes_per_pixel); | 168 | bytes_per_pixel); |
161 | 169 | ||
162 | if (0 == *count) | 170 | if (0 == *count) |
@@ -176,8 +184,9 @@ static void free_buffer(struct videobuf_queue *vq, | |||
176 | struct sh_mobile_ceu_buffer *buf) | 184 | struct sh_mobile_ceu_buffer *buf) |
177 | { | 185 | { |
178 | struct soc_camera_device *icd = vq->priv_data; | 186 | struct soc_camera_device *icd = vq->priv_data; |
187 | struct device *dev = icd->dev.parent; | ||
179 | 188 | ||
180 | dev_dbg(icd->dev.parent, "%s (vb=0x%p) 0x%08lx %zd\n", __func__, | 189 | dev_dbg(dev, "%s (vb=0x%p) 0x%08lx %zd\n", __func__, |
181 | &buf->vb, buf->vb.baddr, buf->vb.bsize); | 190 | &buf->vb, buf->vb.baddr, buf->vb.bsize); |
182 | 191 | ||
183 | if (in_interrupt()) | 192 | if (in_interrupt()) |
@@ -185,7 +194,7 @@ static void free_buffer(struct videobuf_queue *vq, | |||
185 | 194 | ||
186 | videobuf_waiton(&buf->vb, 0, 0); | 195 | videobuf_waiton(&buf->vb, 0, 0); |
187 | videobuf_dma_contig_free(vq, &buf->vb); | 196 | videobuf_dma_contig_free(vq, &buf->vb); |
188 | dev_dbg(icd->dev.parent, "%s freed\n", __func__); | 197 | dev_dbg(dev, "%s freed\n", __func__); |
189 | buf->vb.state = VIDEOBUF_NEEDS_INIT; | 198 | buf->vb.state = VIDEOBUF_NEEDS_INIT; |
190 | } | 199 | } |
191 | 200 | ||
@@ -216,7 +225,7 @@ static void sh_mobile_ceu_capture(struct sh_mobile_ceu_dev *pcdev) | |||
216 | phys_addr_top = videobuf_to_dma_contig(pcdev->active); | 225 | phys_addr_top = videobuf_to_dma_contig(pcdev->active); |
217 | ceu_write(pcdev, CDAYR, phys_addr_top); | 226 | ceu_write(pcdev, CDAYR, phys_addr_top); |
218 | if (pcdev->is_interlaced) { | 227 | if (pcdev->is_interlaced) { |
219 | phys_addr_bottom = phys_addr_top + icd->rect_current.width; | 228 | phys_addr_bottom = phys_addr_top + icd->user_width; |
220 | ceu_write(pcdev, CDBYR, phys_addr_bottom); | 229 | ceu_write(pcdev, CDBYR, phys_addr_bottom); |
221 | } | 230 | } |
222 | 231 | ||
@@ -225,12 +234,12 @@ static void sh_mobile_ceu_capture(struct sh_mobile_ceu_dev *pcdev) | |||
225 | case V4L2_PIX_FMT_NV21: | 234 | case V4L2_PIX_FMT_NV21: |
226 | case V4L2_PIX_FMT_NV16: | 235 | case V4L2_PIX_FMT_NV16: |
227 | case V4L2_PIX_FMT_NV61: | 236 | case V4L2_PIX_FMT_NV61: |
228 | phys_addr_top += icd->rect_current.width * | 237 | phys_addr_top += icd->user_width * |
229 | icd->rect_current.height; | 238 | icd->user_height; |
230 | ceu_write(pcdev, CDACR, phys_addr_top); | 239 | ceu_write(pcdev, CDACR, phys_addr_top); |
231 | if (pcdev->is_interlaced) { | 240 | if (pcdev->is_interlaced) { |
232 | phys_addr_bottom = phys_addr_top + | 241 | phys_addr_bottom = phys_addr_top + |
233 | icd->rect_current.width; | 242 | icd->user_width; |
234 | ceu_write(pcdev, CDBCR, phys_addr_bottom); | 243 | ceu_write(pcdev, CDBCR, phys_addr_bottom); |
235 | } | 244 | } |
236 | } | 245 | } |
@@ -264,12 +273,12 @@ static int sh_mobile_ceu_videobuf_prepare(struct videobuf_queue *vq, | |||
264 | BUG_ON(NULL == icd->current_fmt); | 273 | BUG_ON(NULL == icd->current_fmt); |
265 | 274 | ||
266 | if (buf->fmt != icd->current_fmt || | 275 | if (buf->fmt != icd->current_fmt || |
267 | vb->width != icd->rect_current.width || | 276 | vb->width != icd->user_width || |
268 | vb->height != icd->rect_current.height || | 277 | vb->height != icd->user_height || |
269 | vb->field != field) { | 278 | vb->field != field) { |
270 | buf->fmt = icd->current_fmt; | 279 | buf->fmt = icd->current_fmt; |
271 | vb->width = icd->rect_current.width; | 280 | vb->width = icd->user_width; |
272 | vb->height = icd->rect_current.height; | 281 | vb->height = icd->user_height; |
273 | vb->field = field; | 282 | vb->field = field; |
274 | vb->state = VIDEOBUF_NEEDS_INIT; | 283 | vb->state = VIDEOBUF_NEEDS_INIT; |
275 | } | 284 | } |
@@ -451,18 +460,6 @@ static unsigned int size_dst(unsigned int src, unsigned int scale) | |||
451 | mant_pre * 4096 / scale + 1; | 460 | mant_pre * 4096 / scale + 1; |
452 | } | 461 | } |
453 | 462 | ||
454 | static unsigned int size_src(unsigned int dst, unsigned int scale) | ||
455 | { | ||
456 | unsigned int mant_pre = scale >> 12, tmp; | ||
457 | if (!dst || !scale) | ||
458 | return dst; | ||
459 | for (tmp = ((dst - 1) * scale + 2048 * mant_pre) / 4096 + 1; | ||
460 | size_dst(tmp, scale) < dst; | ||
461 | tmp++) | ||
462 | ; | ||
463 | return tmp; | ||
464 | } | ||
465 | |||
466 | static u16 calc_scale(unsigned int src, unsigned int *dst) | 463 | static u16 calc_scale(unsigned int src, unsigned int *dst) |
467 | { | 464 | { |
468 | u16 scale; | 465 | u16 scale; |
@@ -482,65 +479,46 @@ static u16 calc_scale(unsigned int src, unsigned int *dst) | |||
482 | 479 | ||
483 | /* rect is guaranteed to not exceed the scaled camera rectangle */ | 480 | /* rect is guaranteed to not exceed the scaled camera rectangle */ |
484 | static void sh_mobile_ceu_set_rect(struct soc_camera_device *icd, | 481 | static void sh_mobile_ceu_set_rect(struct soc_camera_device *icd, |
485 | struct v4l2_rect *rect) | 482 | unsigned int out_width, |
483 | unsigned int out_height) | ||
486 | { | 484 | { |
487 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); | 485 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); |
488 | struct sh_mobile_ceu_cam *cam = icd->host_priv; | 486 | struct sh_mobile_ceu_cam *cam = icd->host_priv; |
487 | struct v4l2_rect *rect = &cam->ceu_rect; | ||
489 | struct sh_mobile_ceu_dev *pcdev = ici->priv; | 488 | struct sh_mobile_ceu_dev *pcdev = ici->priv; |
490 | int width, height, cfszr_width, cdwdr_width, in_width, in_height; | 489 | unsigned int height, width, cdwdr_width, in_width, in_height; |
491 | unsigned int left_offset, top_offset, left, top; | 490 | unsigned int left_offset, top_offset; |
492 | unsigned int hscale = pcdev->cflcr & 0xffff; | ||
493 | unsigned int vscale = (pcdev->cflcr >> 16) & 0xffff; | ||
494 | u32 camor; | 491 | u32 camor; |
495 | 492 | ||
496 | /* Switch to the camera scale */ | 493 | dev_dbg(icd->dev.parent, "Crop %ux%u@%u:%u\n", |
497 | left = size_src(rect->left, hscale); | 494 | rect->width, rect->height, rect->left, rect->top); |
498 | top = size_src(rect->top, vscale); | ||
499 | |||
500 | dev_dbg(icd->dev.parent, "Left %u * 0x%x = %u, top %u * 0x%x = %u\n", | ||
501 | rect->left, hscale, left, rect->top, vscale, top); | ||
502 | |||
503 | if (left > cam->camera_rect.left) { | ||
504 | left_offset = left - cam->camera_rect.left; | ||
505 | } else { | ||
506 | left_offset = 0; | ||
507 | left = cam->camera_rect.left; | ||
508 | } | ||
509 | |||
510 | if (top > cam->camera_rect.top) { | ||
511 | top_offset = top - cam->camera_rect.top; | ||
512 | } else { | ||
513 | top_offset = 0; | ||
514 | top = cam->camera_rect.top; | ||
515 | } | ||
516 | 495 | ||
517 | dev_dbg(icd->dev.parent, "New left %u, top %u, offsets %u:%u\n", | 496 | left_offset = rect->left; |
518 | rect->left, rect->top, left_offset, top_offset); | 497 | top_offset = rect->top; |
519 | 498 | ||
520 | if (pcdev->image_mode) { | 499 | if (pcdev->image_mode) { |
521 | width = rect->width; | 500 | in_width = rect->width; |
522 | in_width = cam->camera_rect.width; | ||
523 | if (!pcdev->is_16bit) { | 501 | if (!pcdev->is_16bit) { |
524 | width *= 2; | ||
525 | in_width *= 2; | 502 | in_width *= 2; |
526 | left_offset *= 2; | 503 | left_offset *= 2; |
527 | } | 504 | } |
528 | cfszr_width = cdwdr_width = rect->width; | 505 | width = cdwdr_width = out_width; |
529 | } else { | 506 | } else { |
530 | unsigned int w_factor = (icd->current_fmt->depth + 7) >> 3; | 507 | unsigned int w_factor = (icd->current_fmt->depth + 7) >> 3; |
508 | |||
509 | width = out_width * w_factor / 2; | ||
510 | |||
531 | if (!pcdev->is_16bit) | 511 | if (!pcdev->is_16bit) |
532 | w_factor *= 2; | 512 | w_factor *= 2; |
533 | 513 | ||
534 | width = rect->width * w_factor / 2; | 514 | in_width = rect->width * w_factor / 2; |
535 | in_width = cam->camera_rect.width * w_factor / 2; | ||
536 | left_offset = left_offset * w_factor / 2; | 515 | left_offset = left_offset * w_factor / 2; |
537 | 516 | ||
538 | cfszr_width = pcdev->is_16bit ? width : width / 2; | 517 | cdwdr_width = width * 2; |
539 | cdwdr_width = pcdev->is_16bit ? width * 2 : width; | ||
540 | } | 518 | } |
541 | 519 | ||
542 | height = rect->height; | 520 | height = out_height; |
543 | in_height = cam->camera_rect.height; | 521 | in_height = rect->height; |
544 | if (pcdev->is_interlaced) { | 522 | if (pcdev->is_interlaced) { |
545 | height /= 2; | 523 | height /= 2; |
546 | in_height /= 2; | 524 | in_height /= 2; |
@@ -548,10 +526,17 @@ static void sh_mobile_ceu_set_rect(struct soc_camera_device *icd, | |||
548 | cdwdr_width *= 2; | 526 | cdwdr_width *= 2; |
549 | } | 527 | } |
550 | 528 | ||
529 | /* Set CAMOR, CAPWR, CFSZR, take care of CDWDR */ | ||
551 | camor = left_offset | (top_offset << 16); | 530 | camor = left_offset | (top_offset << 16); |
531 | |||
532 | dev_geo(icd->dev.parent, | ||
533 | "CAMOR 0x%x, CAPWR 0x%x, CFSZR 0x%x, CDWDR 0x%x\n", camor, | ||
534 | (in_height << 16) | in_width, (height << 16) | width, | ||
535 | cdwdr_width); | ||
536 | |||
552 | ceu_write(pcdev, CAMOR, camor); | 537 | ceu_write(pcdev, CAMOR, camor); |
553 | ceu_write(pcdev, CAPWR, (in_height << 16) | in_width); | 538 | ceu_write(pcdev, CAPWR, (in_height << 16) | in_width); |
554 | ceu_write(pcdev, CFSZR, (height << 16) | cfszr_width); | 539 | ceu_write(pcdev, CFSZR, (height << 16) | width); |
555 | ceu_write(pcdev, CDWDR, cdwdr_width); | 540 | ceu_write(pcdev, CDWDR, cdwdr_width); |
556 | } | 541 | } |
557 | 542 | ||
@@ -663,8 +648,8 @@ static int sh_mobile_ceu_set_bus_param(struct soc_camera_device *icd, | |||
663 | ceu_write(pcdev, CAPCR, 0x00300000); | 648 | ceu_write(pcdev, CAPCR, 0x00300000); |
664 | ceu_write(pcdev, CAIFR, pcdev->is_interlaced ? 0x101 : 0); | 649 | ceu_write(pcdev, CAIFR, pcdev->is_interlaced ? 0x101 : 0); |
665 | 650 | ||
651 | sh_mobile_ceu_set_rect(icd, icd->user_width, icd->user_height); | ||
666 | mdelay(1); | 652 | mdelay(1); |
667 | sh_mobile_ceu_set_rect(icd, &icd->rect_current); | ||
668 | 653 | ||
669 | ceu_write(pcdev, CFLCR, pcdev->cflcr); | 654 | ceu_write(pcdev, CFLCR, pcdev->cflcr); |
670 | 655 | ||
@@ -687,11 +672,10 @@ static int sh_mobile_ceu_set_bus_param(struct soc_camera_device *icd, | |||
687 | ceu_write(pcdev, CDOCR, value); | 672 | ceu_write(pcdev, CDOCR, value); |
688 | ceu_write(pcdev, CFWCR, 0); /* keep "datafetch firewall" disabled */ | 673 | ceu_write(pcdev, CFWCR, 0); /* keep "datafetch firewall" disabled */ |
689 | 674 | ||
690 | dev_dbg(icd->dev.parent, "S_FMT successful for %c%c%c%c %ux%u@%u:%u\n", | 675 | dev_dbg(icd->dev.parent, "S_FMT successful for %c%c%c%c %ux%u\n", |
691 | pixfmt & 0xff, (pixfmt >> 8) & 0xff, | 676 | pixfmt & 0xff, (pixfmt >> 8) & 0xff, |
692 | (pixfmt >> 16) & 0xff, (pixfmt >> 24) & 0xff, | 677 | (pixfmt >> 16) & 0xff, (pixfmt >> 24) & 0xff, |
693 | icd->rect_current.width, icd->rect_current.height, | 678 | icd->user_width, icd->user_height); |
694 | icd->rect_current.left, icd->rect_current.top); | ||
695 | 679 | ||
696 | capture_restore(pcdev, capsr); | 680 | capture_restore(pcdev, capsr); |
697 | 681 | ||
@@ -744,6 +728,7 @@ static const struct soc_camera_data_format sh_mobile_ceu_formats[] = { | |||
744 | static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, int idx, | 728 | static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, int idx, |
745 | struct soc_camera_format_xlate *xlate) | 729 | struct soc_camera_format_xlate *xlate) |
746 | { | 730 | { |
731 | struct device *dev = icd->dev.parent; | ||
747 | int ret, k, n; | 732 | int ret, k, n; |
748 | int formats = 0; | 733 | int formats = 0; |
749 | struct sh_mobile_ceu_cam *cam; | 734 | struct sh_mobile_ceu_cam *cam; |
@@ -758,7 +743,6 @@ static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, int idx, | |||
758 | return -ENOMEM; | 743 | return -ENOMEM; |
759 | 744 | ||
760 | icd->host_priv = cam; | 745 | icd->host_priv = cam; |
761 | cam->camera_max = icd->rect_max; | ||
762 | } else { | 746 | } else { |
763 | cam = icd->host_priv; | 747 | cam = icd->host_priv; |
764 | } | 748 | } |
@@ -793,8 +777,7 @@ static int sh_mobile_ceu_get_formats(struct soc_camera_device *icd, int idx, | |||
793 | xlate->cam_fmt = icd->formats + idx; | 777 | xlate->cam_fmt = icd->formats + idx; |
794 | xlate->buswidth = icd->formats[idx].depth; | 778 | xlate->buswidth = icd->formats[idx].depth; |
795 | xlate++; | 779 | xlate++; |
796 | dev_dbg(icd->dev.parent, | 780 | dev_dbg(dev, "Providing format %s using %s\n", |
797 | "Providing format %s using %s\n", | ||
798 | sh_mobile_ceu_formats[k].name, | 781 | sh_mobile_ceu_formats[k].name, |
799 | icd->formats[idx].name); | 782 | icd->formats[idx].name); |
800 | } | 783 | } |
@@ -807,7 +790,7 @@ add_single_format: | |||
807 | xlate->cam_fmt = icd->formats + idx; | 790 | xlate->cam_fmt = icd->formats + idx; |
808 | xlate->buswidth = icd->formats[idx].depth; | 791 | xlate->buswidth = icd->formats[idx].depth; |
809 | xlate++; | 792 | xlate++; |
810 | dev_dbg(icd->dev.parent, | 793 | dev_dbg(dev, |
811 | "Providing format %s in pass-through mode\n", | 794 | "Providing format %s in pass-through mode\n", |
812 | icd->formats[idx].name); | 795 | icd->formats[idx].name); |
813 | } | 796 | } |
@@ -836,176 +819,487 @@ static bool is_inside(struct v4l2_rect *r1, struct v4l2_rect *r2) | |||
836 | r1->top + r1->height < r2->top + r2->height; | 819 | r1->top + r1->height < r2->top + r2->height; |
837 | } | 820 | } |
838 | 821 | ||
822 | static unsigned int scale_down(unsigned int size, unsigned int scale) | ||
823 | { | ||
824 | return (size * 4096 + scale / 2) / scale; | ||
825 | } | ||
826 | |||
827 | static unsigned int scale_up(unsigned int size, unsigned int scale) | ||
828 | { | ||
829 | return (size * scale + 2048) / 4096; | ||
830 | } | ||
831 | |||
832 | static unsigned int calc_generic_scale(unsigned int input, unsigned int output) | ||
833 | { | ||
834 | return (input * 4096 + output / 2) / output; | ||
835 | } | ||
836 | |||
837 | static int client_g_rect(struct v4l2_subdev *sd, struct v4l2_rect *rect) | ||
838 | { | ||
839 | struct v4l2_crop crop; | ||
840 | struct v4l2_cropcap cap; | ||
841 | int ret; | ||
842 | |||
843 | crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
844 | |||
845 | ret = v4l2_subdev_call(sd, video, g_crop, &crop); | ||
846 | if (!ret) { | ||
847 | *rect = crop.c; | ||
848 | return ret; | ||
849 | } | ||
850 | |||
851 | /* Camera driver doesn't support .g_crop(), assume default rectangle */ | ||
852 | cap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
853 | |||
854 | ret = v4l2_subdev_call(sd, video, cropcap, &cap); | ||
855 | if (ret < 0) | ||
856 | return ret; | ||
857 | |||
858 | *rect = cap.defrect; | ||
859 | |||
860 | return ret; | ||
861 | } | ||
862 | |||
839 | /* | 863 | /* |
840 | * CEU can scale and crop, but we don't want to waste bandwidth and kill the | 864 | * The common for both scaling and cropping iterative approach is: |
841 | * framerate by always requesting the maximum image from the client. For | ||
842 | * cropping we also have to take care of the current scale. The common for both | ||
843 | * scaling and cropping approach is: | ||
844 | * 1. try if the client can produce exactly what requested by the user | 865 | * 1. try if the client can produce exactly what requested by the user |
845 | * 2. if (1) failed, try to double the client image until we get one big enough | 866 | * 2. if (1) failed, try to double the client image until we get one big enough |
846 | * 3. if (2) failed, try to request the maximum image | 867 | * 3. if (2) failed, try to request the maximum image |
847 | */ | 868 | */ |
848 | static int sh_mobile_ceu_set_crop(struct soc_camera_device *icd, | 869 | static int client_s_crop(struct v4l2_subdev *sd, struct v4l2_crop *crop, |
849 | struct v4l2_crop *a) | 870 | struct v4l2_crop *cam_crop) |
850 | { | 871 | { |
851 | struct v4l2_rect *rect = &a->c; | 872 | struct v4l2_rect *rect = &crop->c, *cam_rect = &cam_crop->c; |
852 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); | 873 | struct device *dev = sd->v4l2_dev->dev; |
853 | struct sh_mobile_ceu_dev *pcdev = ici->priv; | 874 | struct v4l2_cropcap cap; |
854 | struct v4l2_crop cam_crop; | ||
855 | struct v4l2_rect *cam_rect = &cam_crop.c, target, cam_max; | ||
856 | struct sh_mobile_ceu_cam *cam = icd->host_priv; | ||
857 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | ||
858 | unsigned int hscale = pcdev->cflcr & 0xffff; | ||
859 | unsigned int vscale = (pcdev->cflcr >> 16) & 0xffff; | ||
860 | unsigned short width, height; | ||
861 | u32 capsr; | ||
862 | int ret; | 875 | int ret; |
876 | unsigned int width, height; | ||
863 | 877 | ||
864 | /* Scale back up into client units */ | 878 | v4l2_subdev_call(sd, video, s_crop, crop); |
865 | cam_rect->left = size_src(rect->left, hscale); | 879 | ret = client_g_rect(sd, cam_rect); |
866 | cam_rect->width = size_src(rect->width, hscale); | 880 | if (ret < 0) |
867 | cam_rect->top = size_src(rect->top, vscale); | 881 | return ret; |
868 | cam_rect->height = size_src(rect->height, vscale); | ||
869 | |||
870 | target = *cam_rect; | ||
871 | 882 | ||
872 | capsr = capture_save_reset(pcdev); | 883 | /* |
873 | dev_dbg(icd->dev.parent, "CAPSR 0x%x, CFLCR 0x%x\n", | 884 | * Now cam_crop contains the current camera input rectangle, and it must |
874 | capsr, pcdev->cflcr); | 885 | * be within camera cropcap bounds |
875 | 886 | */ | |
876 | /* First attempt - see if the client can deliver a perfect result */ | 887 | if (!memcmp(rect, cam_rect, sizeof(*rect))) { |
877 | ret = v4l2_subdev_call(sd, video, s_crop, &cam_crop); | 888 | /* Even if camera S_CROP failed, but camera rectangle matches */ |
878 | if (!ret && !memcmp(&target, &cam_rect, sizeof(target))) { | 889 | dev_dbg(dev, "Camera S_CROP successful for %ux%u@%u:%u\n", |
879 | dev_dbg(icd->dev.parent, | 890 | rect->width, rect->height, rect->left, rect->top); |
880 | "Camera S_CROP successful for %ux%u@%u:%u\n", | 891 | return 0; |
881 | cam_rect->width, cam_rect->height, | ||
882 | cam_rect->left, cam_rect->top); | ||
883 | goto ceu_set_rect; | ||
884 | } | 892 | } |
885 | 893 | ||
886 | /* Try to fix cropping, that camera hasn't managed to do */ | 894 | /* Try to fix cropping, that camera hasn't managed to set */ |
887 | dev_dbg(icd->dev.parent, "Fix camera S_CROP %d for %ux%u@%u:%u" | 895 | dev_geo(dev, "Fix camera S_CROP for %ux%u@%u:%u to %ux%u@%u:%u\n", |
888 | " to %ux%u@%u:%u\n", | 896 | cam_rect->width, cam_rect->height, |
889 | ret, cam_rect->width, cam_rect->height, | ||
890 | cam_rect->left, cam_rect->top, | 897 | cam_rect->left, cam_rect->top, |
891 | target.width, target.height, target.left, target.top); | 898 | rect->width, rect->height, rect->left, rect->top); |
899 | |||
900 | /* We need sensor maximum rectangle */ | ||
901 | ret = v4l2_subdev_call(sd, video, cropcap, &cap); | ||
902 | if (ret < 0) | ||
903 | return ret; | ||
904 | |||
905 | soc_camera_limit_side(&rect->left, &rect->width, cap.bounds.left, 2, | ||
906 | cap.bounds.width); | ||
907 | soc_camera_limit_side(&rect->top, &rect->height, cap.bounds.top, 4, | ||
908 | cap.bounds.height); | ||
892 | 909 | ||
893 | /* | 910 | /* |
894 | * Popular special case - some cameras can only handle fixed sizes like | 911 | * Popular special case - some cameras can only handle fixed sizes like |
895 | * QVGA, VGA,... Take care to avoid infinite loop. | 912 | * QVGA, VGA,... Take care to avoid infinite loop. |
896 | */ | 913 | */ |
897 | width = max(cam_rect->width, 1); | 914 | width = max(cam_rect->width, 2); |
898 | height = max(cam_rect->height, 1); | 915 | height = max(cam_rect->height, 2); |
899 | cam_max.width = size_src(icd->rect_max.width, hscale); | 916 | |
900 | cam_max.left = size_src(icd->rect_max.left, hscale); | 917 | while (!ret && (is_smaller(cam_rect, rect) || |
901 | cam_max.height = size_src(icd->rect_max.height, vscale); | 918 | is_inside(cam_rect, rect)) && |
902 | cam_max.top = size_src(icd->rect_max.top, vscale); | 919 | (cap.bounds.width > width || cap.bounds.height > height)) { |
903 | while (!ret && (is_smaller(cam_rect, &target) || | ||
904 | is_inside(cam_rect, &target)) && | ||
905 | cam_max.width >= width && cam_max.height >= height) { | ||
906 | 920 | ||
907 | width *= 2; | 921 | width *= 2; |
908 | height *= 2; | 922 | height *= 2; |
923 | |||
909 | cam_rect->width = width; | 924 | cam_rect->width = width; |
910 | cam_rect->height = height; | 925 | cam_rect->height = height; |
911 | 926 | ||
912 | /* We do not know what the camera is capable of, play safe */ | 927 | /* |
913 | if (cam_rect->left > target.left) | 928 | * We do not know what capabilities the camera has to set up |
914 | cam_rect->left = cam_max.left; | 929 | * left and top borders. We could try to be smarter in iterating |
930 | * them, e.g., if camera current left is to the right of the | ||
931 | * target left, set it to the middle point between the current | ||
932 | * left and minimum left. But that would add too much | ||
933 | * complexity: we would have to iterate each border separately. | ||
934 | */ | ||
935 | if (cam_rect->left > rect->left) | ||
936 | cam_rect->left = cap.bounds.left; | ||
915 | 937 | ||
916 | if (cam_rect->left + cam_rect->width < target.left + target.width) | 938 | if (cam_rect->left + cam_rect->width < rect->left + rect->width) |
917 | cam_rect->width = target.left + target.width - | 939 | cam_rect->width = rect->left + rect->width - |
918 | cam_rect->left; | 940 | cam_rect->left; |
919 | 941 | ||
920 | if (cam_rect->top > target.top) | 942 | if (cam_rect->top > rect->top) |
921 | cam_rect->top = cam_max.top; | 943 | cam_rect->top = cap.bounds.top; |
922 | 944 | ||
923 | if (cam_rect->top + cam_rect->height < target.top + target.height) | 945 | if (cam_rect->top + cam_rect->height < rect->top + rect->height) |
924 | cam_rect->height = target.top + target.height - | 946 | cam_rect->height = rect->top + rect->height - |
925 | cam_rect->top; | 947 | cam_rect->top; |
926 | 948 | ||
927 | if (cam_rect->width + cam_rect->left > | 949 | v4l2_subdev_call(sd, video, s_crop, cam_crop); |
928 | cam_max.width + cam_max.left) | 950 | ret = client_g_rect(sd, cam_rect); |
929 | cam_rect->left = max(cam_max.width + cam_max.left - | 951 | dev_geo(dev, "Camera S_CROP %d for %ux%u@%u:%u\n", ret, |
930 | cam_rect->width, cam_max.left); | 952 | cam_rect->width, cam_rect->height, |
931 | |||
932 | if (cam_rect->height + cam_rect->top > | ||
933 | cam_max.height + cam_max.top) | ||
934 | cam_rect->top = max(cam_max.height + cam_max.top - | ||
935 | cam_rect->height, cam_max.top); | ||
936 | |||
937 | ret = v4l2_subdev_call(sd, video, s_crop, &cam_crop); | ||
938 | dev_dbg(icd->dev.parent, "Camera S_CROP %d for %ux%u@%u:%u\n", | ||
939 | ret, cam_rect->width, cam_rect->height, | ||
940 | cam_rect->left, cam_rect->top); | 953 | cam_rect->left, cam_rect->top); |
941 | } | 954 | } |
942 | 955 | ||
943 | /* | 956 | /* S_CROP must not modify the rectangle */ |
944 | * If the camera failed to configure cropping, it should not modify the | 957 | if (is_smaller(cam_rect, rect) || is_inside(cam_rect, rect)) { |
945 | * rectangle | ||
946 | */ | ||
947 | if ((ret < 0 && (is_smaller(&icd->rect_current, rect) || | ||
948 | is_inside(&icd->rect_current, rect))) || | ||
949 | is_smaller(cam_rect, &target) || is_inside(cam_rect, &target)) { | ||
950 | /* | 958 | /* |
951 | * The camera failed to configure a suitable cropping, | 959 | * The camera failed to configure a suitable cropping, |
952 | * we cannot use the current rectangle, set to max | 960 | * we cannot use the current rectangle, set to max |
953 | */ | 961 | */ |
954 | *cam_rect = cam_max; | 962 | *cam_rect = cap.bounds; |
955 | ret = v4l2_subdev_call(sd, video, s_crop, &cam_crop); | 963 | v4l2_subdev_call(sd, video, s_crop, cam_crop); |
956 | dev_dbg(icd->dev.parent, | 964 | ret = client_g_rect(sd, cam_rect); |
957 | "Camera S_CROP %d for max %ux%u@%u:%u\n", | 965 | dev_geo(dev, "Camera S_CROP %d for max %ux%u@%u:%u\n", ret, |
958 | ret, cam_rect->width, cam_rect->height, | 966 | cam_rect->width, cam_rect->height, |
959 | cam_rect->left, cam_rect->top); | 967 | cam_rect->left, cam_rect->top); |
960 | if (ret < 0 && ret != -ENOIOCTLCMD) | ||
961 | /* All failed, hopefully resume current capture */ | ||
962 | goto resume_capture; | ||
963 | |||
964 | /* Finally, adjust the target rectangle */ | ||
965 | if (target.width > cam_rect->width) | ||
966 | target.width = cam_rect->width; | ||
967 | if (target.height > cam_rect->height) | ||
968 | target.height = cam_rect->height; | ||
969 | if (target.left + target.width > cam_rect->left + cam_rect->width) | ||
970 | target.left = cam_rect->left + cam_rect->width - | ||
971 | target.width; | ||
972 | if (target.top + target.height > cam_rect->top + cam_rect->height) | ||
973 | target.top = cam_rect->top + cam_rect->height - | ||
974 | target.height; | ||
975 | } | 968 | } |
976 | 969 | ||
977 | /* We now have a rectangle, larger than requested, let's crop */ | 970 | return ret; |
971 | } | ||
972 | |||
973 | static int get_camera_scales(struct v4l2_subdev *sd, struct v4l2_rect *rect, | ||
974 | unsigned int *scale_h, unsigned int *scale_v) | ||
975 | { | ||
976 | struct v4l2_format f; | ||
977 | int ret; | ||
978 | |||
979 | f.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
980 | |||
981 | ret = v4l2_subdev_call(sd, video, g_fmt, &f); | ||
982 | if (ret < 0) | ||
983 | return ret; | ||
984 | |||
985 | *scale_h = calc_generic_scale(rect->width, f.fmt.pix.width); | ||
986 | *scale_v = calc_generic_scale(rect->height, f.fmt.pix.height); | ||
987 | |||
988 | return 0; | ||
989 | } | ||
990 | |||
991 | static int get_camera_subwin(struct soc_camera_device *icd, | ||
992 | struct v4l2_rect *cam_subrect, | ||
993 | unsigned int cam_hscale, unsigned int cam_vscale) | ||
994 | { | ||
995 | struct sh_mobile_ceu_cam *cam = icd->host_priv; | ||
996 | struct v4l2_rect *ceu_rect = &cam->ceu_rect; | ||
997 | |||
998 | if (!ceu_rect->width) { | ||
999 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | ||
1000 | struct device *dev = icd->dev.parent; | ||
1001 | struct v4l2_format f; | ||
1002 | struct v4l2_pix_format *pix = &f.fmt.pix; | ||
1003 | int ret; | ||
1004 | /* First time */ | ||
1005 | |||
1006 | f.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
1007 | |||
1008 | ret = v4l2_subdev_call(sd, video, g_fmt, &f); | ||
1009 | if (ret < 0) | ||
1010 | return ret; | ||
1011 | |||
1012 | dev_geo(dev, "camera fmt %ux%u\n", pix->width, pix->height); | ||
1013 | |||
1014 | if (pix->width > 2560) { | ||
1015 | ceu_rect->width = 2560; | ||
1016 | ceu_rect->left = (pix->width - 2560) / 2; | ||
1017 | } else { | ||
1018 | ceu_rect->width = pix->width; | ||
1019 | ceu_rect->left = 0; | ||
1020 | } | ||
1021 | |||
1022 | if (pix->height > 1920) { | ||
1023 | ceu_rect->height = 1920; | ||
1024 | ceu_rect->top = (pix->height - 1920) / 2; | ||
1025 | } else { | ||
1026 | ceu_rect->height = pix->height; | ||
1027 | ceu_rect->top = 0; | ||
1028 | } | ||
1029 | |||
1030 | dev_geo(dev, "initialised CEU rect %ux%u@%u:%u\n", | ||
1031 | ceu_rect->width, ceu_rect->height, | ||
1032 | ceu_rect->left, ceu_rect->top); | ||
1033 | } | ||
1034 | |||
1035 | cam_subrect->width = scale_up(ceu_rect->width, cam_hscale); | ||
1036 | cam_subrect->left = scale_up(ceu_rect->left, cam_hscale); | ||
1037 | cam_subrect->height = scale_up(ceu_rect->height, cam_vscale); | ||
1038 | cam_subrect->top = scale_up(ceu_rect->top, cam_vscale); | ||
1039 | |||
1040 | return 0; | ||
1041 | } | ||
1042 | |||
1043 | static int client_s_fmt(struct soc_camera_device *icd, struct v4l2_format *f, | ||
1044 | bool ceu_can_scale) | ||
1045 | { | ||
1046 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | ||
1047 | struct device *dev = icd->dev.parent; | ||
1048 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
1049 | unsigned int width = pix->width, height = pix->height, tmp_w, tmp_h; | ||
1050 | unsigned int max_width, max_height; | ||
1051 | struct v4l2_cropcap cap; | ||
1052 | int ret; | ||
1053 | |||
1054 | cap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
1055 | |||
1056 | ret = v4l2_subdev_call(sd, video, cropcap, &cap); | ||
1057 | if (ret < 0) | ||
1058 | return ret; | ||
1059 | |||
1060 | max_width = min(cap.bounds.width, 2560); | ||
1061 | max_height = min(cap.bounds.height, 1920); | ||
1062 | |||
1063 | ret = v4l2_subdev_call(sd, video, s_fmt, f); | ||
1064 | if (ret < 0) | ||
1065 | return ret; | ||
1066 | |||
1067 | dev_geo(dev, "camera scaled to %ux%u\n", pix->width, pix->height); | ||
1068 | |||
1069 | if ((width == pix->width && height == pix->height) || !ceu_can_scale) | ||
1070 | return 0; | ||
1071 | |||
1072 | /* Camera set a format, but geometry is not precise, try to improve */ | ||
1073 | tmp_w = pix->width; | ||
1074 | tmp_h = pix->height; | ||
1075 | |||
1076 | /* width <= max_width && height <= max_height - guaranteed by try_fmt */ | ||
1077 | while ((width > tmp_w || height > tmp_h) && | ||
1078 | tmp_w < max_width && tmp_h < max_height) { | ||
1079 | tmp_w = min(2 * tmp_w, max_width); | ||
1080 | tmp_h = min(2 * tmp_h, max_height); | ||
1081 | pix->width = tmp_w; | ||
1082 | pix->height = tmp_h; | ||
1083 | ret = v4l2_subdev_call(sd, video, s_fmt, f); | ||
1084 | dev_geo(dev, "Camera scaled to %ux%u\n", | ||
1085 | pix->width, pix->height); | ||
1086 | if (ret < 0) { | ||
1087 | /* This shouldn't happen */ | ||
1088 | dev_err(dev, "Client failed to set format: %d\n", ret); | ||
1089 | return ret; | ||
1090 | } | ||
1091 | } | ||
1092 | |||
1093 | return 0; | ||
1094 | } | ||
1095 | |||
1096 | /** | ||
1097 | * @rect - camera cropped rectangle | ||
1098 | * @sub_rect - CEU cropped rectangle, mapped back to camera input area | ||
1099 | * @ceu_rect - on output calculated CEU crop rectangle | ||
1100 | */ | ||
1101 | static int client_scale(struct soc_camera_device *icd, struct v4l2_rect *rect, | ||
1102 | struct v4l2_rect *sub_rect, struct v4l2_rect *ceu_rect, | ||
1103 | struct v4l2_format *f, bool ceu_can_scale) | ||
1104 | { | ||
1105 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | ||
1106 | struct sh_mobile_ceu_cam *cam = icd->host_priv; | ||
1107 | struct device *dev = icd->dev.parent; | ||
1108 | struct v4l2_format f_tmp = *f; | ||
1109 | struct v4l2_pix_format *pix_tmp = &f_tmp.fmt.pix; | ||
1110 | unsigned int scale_h, scale_v; | ||
1111 | int ret; | ||
1112 | |||
1113 | /* 5. Apply iterative camera S_FMT for camera user window. */ | ||
1114 | ret = client_s_fmt(icd, &f_tmp, ceu_can_scale); | ||
1115 | if (ret < 0) | ||
1116 | return ret; | ||
1117 | |||
1118 | dev_geo(dev, "5: camera scaled to %ux%u\n", | ||
1119 | pix_tmp->width, pix_tmp->height); | ||
1120 | |||
1121 | /* 6. Retrieve camera output window (g_fmt) */ | ||
1122 | |||
1123 | /* unneeded - it is already in "f_tmp" */ | ||
1124 | |||
1125 | /* 7. Calculate new camera scales. */ | ||
1126 | ret = get_camera_scales(sd, rect, &scale_h, &scale_v); | ||
1127 | if (ret < 0) | ||
1128 | return ret; | ||
1129 | |||
1130 | dev_geo(dev, "7: camera scales %u:%u\n", scale_h, scale_v); | ||
1131 | |||
1132 | cam->cam_width = pix_tmp->width; | ||
1133 | cam->cam_height = pix_tmp->height; | ||
1134 | f->fmt.pix.width = pix_tmp->width; | ||
1135 | f->fmt.pix.height = pix_tmp->height; | ||
978 | 1136 | ||
979 | /* | 1137 | /* |
980 | * We have to preserve camera rectangle between close() / open(), | 1138 | * 8. Calculate new CEU crop - apply camera scales to previously |
981 | * because soc-camera core calls .set_fmt() on each first open() with | 1139 | * calculated "effective" crop. |
982 | * last before last close() _user_ rectangle, which can be different | ||
983 | * from camera rectangle. | ||
984 | */ | 1140 | */ |
985 | dev_dbg(icd->dev.parent, | 1141 | ceu_rect->left = scale_down(sub_rect->left, scale_h); |
986 | "SH S_CROP from %ux%u@%u:%u to %ux%u@%u:%u, scale to %ux%u@%u:%u\n", | 1142 | ceu_rect->width = scale_down(sub_rect->width, scale_h); |
987 | cam_rect->width, cam_rect->height, cam_rect->left, cam_rect->top, | 1143 | ceu_rect->top = scale_down(sub_rect->top, scale_v); |
988 | target.width, target.height, target.left, target.top, | 1144 | ceu_rect->height = scale_down(sub_rect->height, scale_v); |
989 | rect->width, rect->height, rect->left, rect->top); | 1145 | |
1146 | dev_geo(dev, "8: new CEU rect %ux%u@%u:%u\n", | ||
1147 | ceu_rect->width, ceu_rect->height, | ||
1148 | ceu_rect->left, ceu_rect->top); | ||
1149 | |||
1150 | return 0; | ||
1151 | } | ||
1152 | |||
1153 | /* Get combined scales */ | ||
1154 | static int get_scales(struct soc_camera_device *icd, | ||
1155 | unsigned int *scale_h, unsigned int *scale_v) | ||
1156 | { | ||
1157 | struct sh_mobile_ceu_cam *cam = icd->host_priv; | ||
1158 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | ||
1159 | struct v4l2_crop cam_crop; | ||
1160 | unsigned int width_in, height_in; | ||
1161 | int ret; | ||
990 | 1162 | ||
991 | ret = 0; | 1163 | cam_crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
992 | 1164 | ||
993 | ceu_set_rect: | 1165 | ret = client_g_rect(sd, &cam_crop.c); |
994 | cam->camera_rect = *cam_rect; | 1166 | if (ret < 0) |
1167 | return ret; | ||
995 | 1168 | ||
996 | rect->width = size_dst(target.width, hscale); | 1169 | ret = get_camera_scales(sd, &cam_crop.c, scale_h, scale_v); |
997 | rect->left = size_dst(target.left, hscale); | 1170 | if (ret < 0) |
998 | rect->height = size_dst(target.height, vscale); | 1171 | return ret; |
999 | rect->top = size_dst(target.top, vscale); | ||
1000 | 1172 | ||
1001 | sh_mobile_ceu_set_rect(icd, rect); | 1173 | width_in = scale_up(cam->ceu_rect.width, *scale_h); |
1174 | height_in = scale_up(cam->ceu_rect.height, *scale_v); | ||
1002 | 1175 | ||
1003 | resume_capture: | 1176 | *scale_h = calc_generic_scale(cam->ceu_rect.width, icd->user_width); |
1004 | /* Set CAMOR, CAPWR, CFSZR, take care of CDWDR */ | 1177 | *scale_v = calc_generic_scale(cam->ceu_rect.height, icd->user_height); |
1178 | |||
1179 | return 0; | ||
1180 | } | ||
1181 | |||
1182 | /* | ||
1183 | * CEU can scale and crop, but we don't want to waste bandwidth and kill the | ||
1184 | * framerate by always requesting the maximum image from the client. See | ||
1185 | * Documentation/video4linux/sh_mobile_camera_ceu.txt for a description of | ||
1186 | * scaling and cropping algorithms and for the meaning of referenced here steps. | ||
1187 | */ | ||
1188 | static int sh_mobile_ceu_set_crop(struct soc_camera_device *icd, | ||
1189 | struct v4l2_crop *a) | ||
1190 | { | ||
1191 | struct v4l2_rect *rect = &a->c; | ||
1192 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); | ||
1193 | struct sh_mobile_ceu_dev *pcdev = ici->priv; | ||
1194 | struct v4l2_crop cam_crop; | ||
1195 | struct sh_mobile_ceu_cam *cam = icd->host_priv; | ||
1196 | struct v4l2_rect *cam_rect = &cam_crop.c, *ceu_rect = &cam->ceu_rect; | ||
1197 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | ||
1198 | struct device *dev = icd->dev.parent; | ||
1199 | struct v4l2_format f; | ||
1200 | struct v4l2_pix_format *pix = &f.fmt.pix; | ||
1201 | unsigned int scale_comb_h, scale_comb_v, scale_ceu_h, scale_ceu_v, | ||
1202 | out_width, out_height; | ||
1203 | u32 capsr, cflcr; | ||
1204 | int ret; | ||
1205 | |||
1206 | /* 1. Calculate current combined scales. */ | ||
1207 | ret = get_scales(icd, &scale_comb_h, &scale_comb_v); | ||
1208 | if (ret < 0) | ||
1209 | return ret; | ||
1210 | |||
1211 | dev_geo(dev, "1: combined scales %u:%u\n", scale_comb_h, scale_comb_v); | ||
1212 | |||
1213 | /* 2. Apply iterative camera S_CROP for new input window. */ | ||
1214 | ret = client_s_crop(sd, a, &cam_crop); | ||
1215 | if (ret < 0) | ||
1216 | return ret; | ||
1217 | |||
1218 | dev_geo(dev, "2: camera cropped to %ux%u@%u:%u\n", | ||
1219 | cam_rect->width, cam_rect->height, | ||
1220 | cam_rect->left, cam_rect->top); | ||
1221 | |||
1222 | /* On success cam_crop contains current camera crop */ | ||
1223 | |||
1224 | /* | ||
1225 | * 3. If old combined scales applied to new crop produce an impossible | ||
1226 | * user window, adjust scales to produce nearest possible window. | ||
1227 | */ | ||
1228 | out_width = scale_down(rect->width, scale_comb_h); | ||
1229 | out_height = scale_down(rect->height, scale_comb_v); | ||
1230 | |||
1231 | if (out_width > 2560) | ||
1232 | out_width = 2560; | ||
1233 | else if (out_width < 2) | ||
1234 | out_width = 2; | ||
1235 | |||
1236 | if (out_height > 1920) | ||
1237 | out_height = 1920; | ||
1238 | else if (out_height < 4) | ||
1239 | out_height = 4; | ||
1240 | |||
1241 | dev_geo(dev, "3: Adjusted output %ux%u\n", out_width, out_height); | ||
1242 | |||
1243 | /* 4. Use G_CROP to retrieve actual input window: already in cam_crop */ | ||
1244 | |||
1245 | /* | ||
1246 | * 5. Using actual input window and calculated combined scales calculate | ||
1247 | * camera target output window. | ||
1248 | */ | ||
1249 | pix->width = scale_down(cam_rect->width, scale_comb_h); | ||
1250 | pix->height = scale_down(cam_rect->height, scale_comb_v); | ||
1251 | |||
1252 | dev_geo(dev, "5: camera target %ux%u\n", pix->width, pix->height); | ||
1253 | |||
1254 | /* 6. - 9. */ | ||
1255 | pix->pixelformat = cam->camera_fmt->fourcc; | ||
1256 | pix->colorspace = cam->camera_fmt->colorspace; | ||
1257 | |||
1258 | capsr = capture_save_reset(pcdev); | ||
1259 | dev_dbg(dev, "CAPSR 0x%x, CFLCR 0x%x\n", capsr, pcdev->cflcr); | ||
1260 | |||
1261 | /* Make relative to camera rectangle */ | ||
1262 | rect->left -= cam_rect->left; | ||
1263 | rect->top -= cam_rect->top; | ||
1264 | |||
1265 | f.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
1266 | |||
1267 | ret = client_scale(icd, cam_rect, rect, ceu_rect, &f, | ||
1268 | pcdev->image_mode && !pcdev->is_interlaced); | ||
1269 | |||
1270 | dev_geo(dev, "6-9: %d\n", ret); | ||
1271 | |||
1272 | /* 10. Use CEU cropping to crop to the new window. */ | ||
1273 | sh_mobile_ceu_set_rect(icd, out_width, out_height); | ||
1274 | |||
1275 | dev_geo(dev, "10: CEU cropped to %ux%u@%u:%u\n", | ||
1276 | ceu_rect->width, ceu_rect->height, | ||
1277 | ceu_rect->left, ceu_rect->top); | ||
1278 | |||
1279 | /* | ||
1280 | * 11. Calculate CEU scales from camera scales from results of (10) and | ||
1281 | * user window from (3) | ||
1282 | */ | ||
1283 | scale_ceu_h = calc_scale(ceu_rect->width, &out_width); | ||
1284 | scale_ceu_v = calc_scale(ceu_rect->height, &out_height); | ||
1285 | |||
1286 | dev_geo(dev, "11: CEU scales %u:%u\n", scale_ceu_h, scale_ceu_v); | ||
1287 | |||
1288 | /* 12. Apply CEU scales. */ | ||
1289 | cflcr = scale_ceu_h | (scale_ceu_v << 16); | ||
1290 | if (cflcr != pcdev->cflcr) { | ||
1291 | pcdev->cflcr = cflcr; | ||
1292 | ceu_write(pcdev, CFLCR, cflcr); | ||
1293 | } | ||
1294 | |||
1295 | /* Restore capture */ | ||
1005 | if (pcdev->active) | 1296 | if (pcdev->active) |
1006 | capsr |= 1; | 1297 | capsr |= 1; |
1007 | capture_restore(pcdev, capsr); | 1298 | capture_restore(pcdev, capsr); |
1008 | 1299 | ||
1300 | icd->user_width = out_width; | ||
1301 | icd->user_height = out_height; | ||
1302 | |||
1009 | /* Even if only camera cropping succeeded */ | 1303 | /* Even if only camera cropping succeeded */ |
1010 | return ret; | 1304 | return ret; |
1011 | } | 1305 | } |
@@ -1018,121 +1312,137 @@ static int sh_mobile_ceu_set_fmt(struct soc_camera_device *icd, | |||
1018 | struct sh_mobile_ceu_dev *pcdev = ici->priv; | 1312 | struct sh_mobile_ceu_dev *pcdev = ici->priv; |
1019 | struct sh_mobile_ceu_cam *cam = icd->host_priv; | 1313 | struct sh_mobile_ceu_cam *cam = icd->host_priv; |
1020 | struct v4l2_pix_format *pix = &f->fmt.pix; | 1314 | struct v4l2_pix_format *pix = &f->fmt.pix; |
1315 | struct v4l2_format cam_f = *f; | ||
1316 | struct v4l2_pix_format *cam_pix = &cam_f.fmt.pix; | ||
1021 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | 1317 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); |
1318 | struct device *dev = icd->dev.parent; | ||
1022 | __u32 pixfmt = pix->pixelformat; | 1319 | __u32 pixfmt = pix->pixelformat; |
1023 | const struct soc_camera_format_xlate *xlate; | 1320 | const struct soc_camera_format_xlate *xlate; |
1024 | unsigned int width = pix->width, height = pix->height, tmp_w, tmp_h; | 1321 | struct v4l2_crop cam_crop; |
1025 | u16 vscale, hscale; | 1322 | struct v4l2_rect *cam_rect = &cam_crop.c, cam_subrect, ceu_rect; |
1026 | int ret, is_interlaced; | 1323 | unsigned int scale_cam_h, scale_cam_v; |
1324 | u16 scale_v, scale_h; | ||
1325 | int ret; | ||
1326 | bool is_interlaced, image_mode; | ||
1027 | 1327 | ||
1028 | switch (pix->field) { | 1328 | switch (pix->field) { |
1029 | case V4L2_FIELD_INTERLACED: | 1329 | case V4L2_FIELD_INTERLACED: |
1030 | is_interlaced = 1; | 1330 | is_interlaced = true; |
1031 | break; | 1331 | break; |
1032 | case V4L2_FIELD_ANY: | 1332 | case V4L2_FIELD_ANY: |
1033 | default: | 1333 | default: |
1034 | pix->field = V4L2_FIELD_NONE; | 1334 | pix->field = V4L2_FIELD_NONE; |
1035 | /* fall-through */ | 1335 | /* fall-through */ |
1036 | case V4L2_FIELD_NONE: | 1336 | case V4L2_FIELD_NONE: |
1037 | is_interlaced = 0; | 1337 | is_interlaced = false; |
1038 | break; | 1338 | break; |
1039 | } | 1339 | } |
1040 | 1340 | ||
1041 | xlate = soc_camera_xlate_by_fourcc(icd, pixfmt); | 1341 | xlate = soc_camera_xlate_by_fourcc(icd, pixfmt); |
1042 | if (!xlate) { | 1342 | if (!xlate) { |
1043 | dev_warn(icd->dev.parent, "Format %x not found\n", pixfmt); | 1343 | dev_warn(dev, "Format %x not found\n", pixfmt); |
1044 | return -EINVAL; | 1344 | return -EINVAL; |
1045 | } | 1345 | } |
1046 | 1346 | ||
1047 | pix->pixelformat = xlate->cam_fmt->fourcc; | 1347 | /* 1. Calculate current camera scales. */ |
1048 | ret = v4l2_subdev_call(sd, video, s_fmt, f); | 1348 | cam_crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
1049 | pix->pixelformat = pixfmt; | 1349 | |
1050 | dev_dbg(icd->dev.parent, | 1350 | ret = client_g_rect(sd, cam_rect); |
1051 | "Camera %d fmt %ux%u, requested %ux%u, max %ux%u\n", | 1351 | if (ret < 0) |
1052 | ret, pix->width, pix->height, width, height, | 1352 | return ret; |
1053 | icd->rect_max.width, icd->rect_max.height); | 1353 | |
1354 | ret = get_camera_scales(sd, cam_rect, &scale_cam_h, &scale_cam_v); | ||
1355 | if (ret < 0) | ||
1356 | return ret; | ||
1357 | |||
1358 | dev_geo(dev, "1: camera scales %u:%u\n", scale_cam_h, scale_cam_v); | ||
1359 | |||
1360 | /* | ||
1361 | * 2. Calculate "effective" input crop (sensor subwindow) - CEU crop | ||
1362 | * scaled back at current camera scales onto input window. | ||
1363 | */ | ||
1364 | ret = get_camera_subwin(icd, &cam_subrect, scale_cam_h, scale_cam_v); | ||
1054 | if (ret < 0) | 1365 | if (ret < 0) |
1055 | return ret; | 1366 | return ret; |
1056 | 1367 | ||
1368 | dev_geo(dev, "2: subwin %ux%u@%u:%u\n", | ||
1369 | cam_subrect.width, cam_subrect.height, | ||
1370 | cam_subrect.left, cam_subrect.top); | ||
1371 | |||
1372 | /* | ||
1373 | * 3. Calculate new combined scales from "effective" input window to | ||
1374 | * requested user window. | ||
1375 | */ | ||
1376 | scale_h = calc_generic_scale(cam_subrect.width, pix->width); | ||
1377 | scale_v = calc_generic_scale(cam_subrect.height, pix->height); | ||
1378 | |||
1379 | dev_geo(dev, "3: scales %u:%u\n", scale_h, scale_v); | ||
1380 | |||
1381 | /* | ||
1382 | * 4. Calculate camera output window by applying combined scales to real | ||
1383 | * input window. | ||
1384 | */ | ||
1385 | cam_pix->width = scale_down(cam_rect->width, scale_h); | ||
1386 | cam_pix->height = scale_down(cam_rect->height, scale_v); | ||
1387 | cam_pix->pixelformat = xlate->cam_fmt->fourcc; | ||
1388 | |||
1057 | switch (pixfmt) { | 1389 | switch (pixfmt) { |
1058 | case V4L2_PIX_FMT_NV12: | 1390 | case V4L2_PIX_FMT_NV12: |
1059 | case V4L2_PIX_FMT_NV21: | 1391 | case V4L2_PIX_FMT_NV21: |
1060 | case V4L2_PIX_FMT_NV16: | 1392 | case V4L2_PIX_FMT_NV16: |
1061 | case V4L2_PIX_FMT_NV61: | 1393 | case V4L2_PIX_FMT_NV61: |
1062 | pcdev->image_mode = 1; | 1394 | image_mode = true; |
1063 | break; | 1395 | break; |
1064 | default: | 1396 | default: |
1065 | pcdev->image_mode = 0; | 1397 | image_mode = false; |
1066 | } | 1398 | } |
1067 | 1399 | ||
1068 | if ((abs(width - pix->width) < 4 && abs(height - pix->height) < 4) || | 1400 | dev_geo(dev, "4: camera output %ux%u\n", |
1069 | !pcdev->image_mode || is_interlaced) { | 1401 | cam_pix->width, cam_pix->height); |
1070 | hscale = 0; | ||
1071 | vscale = 0; | ||
1072 | goto out; | ||
1073 | } | ||
1074 | 1402 | ||
1075 | /* Camera set a format, but geometry is not precise, try to improve */ | 1403 | /* 5. - 9. */ |
1076 | /* | 1404 | ret = client_scale(icd, cam_rect, &cam_subrect, &ceu_rect, &cam_f, |
1077 | * FIXME: when soc-camera is converted to implement traditional S_FMT | 1405 | image_mode && !is_interlaced); |
1078 | * and S_CROP semantics, replace CEU limits with camera maxima | 1406 | |
1079 | */ | 1407 | dev_geo(dev, "5-9: client scale %d\n", ret); |
1080 | tmp_w = pix->width; | 1408 | |
1081 | tmp_h = pix->height; | 1409 | /* Done with the camera. Now see if we can improve the result */ |
1082 | while ((width > tmp_w || height > tmp_h) && | 1410 | |
1083 | tmp_w < 2560 && tmp_h < 1920) { | 1411 | dev_dbg(dev, "Camera %d fmt %ux%u, requested %ux%u\n", |
1084 | tmp_w = min(2 * tmp_w, (__u32)2560); | 1412 | ret, cam_pix->width, cam_pix->height, pix->width, pix->height); |
1085 | tmp_h = min(2 * tmp_h, (__u32)1920); | 1413 | if (ret < 0) |
1086 | pix->width = tmp_w; | 1414 | return ret; |
1087 | pix->height = tmp_h; | 1415 | |
1088 | pix->pixelformat = xlate->cam_fmt->fourcc; | 1416 | /* 10. Use CEU scaling to scale to the requested user window. */ |
1089 | ret = v4l2_subdev_call(sd, video, s_fmt, f); | ||
1090 | pix->pixelformat = pixfmt; | ||
1091 | dev_dbg(icd->dev.parent, "Camera scaled to %ux%u\n", | ||
1092 | pix->width, pix->height); | ||
1093 | if (ret < 0) { | ||
1094 | /* This shouldn't happen */ | ||
1095 | dev_err(icd->dev.parent, | ||
1096 | "Client failed to set format: %d\n", ret); | ||
1097 | return ret; | ||
1098 | } | ||
1099 | } | ||
1100 | 1417 | ||
1101 | /* We cannot scale up */ | 1418 | /* We cannot scale up */ |
1102 | if (width > pix->width) | 1419 | if (pix->width > cam_pix->width) |
1103 | width = pix->width; | 1420 | pix->width = cam_pix->width; |
1421 | if (pix->width > ceu_rect.width) | ||
1422 | pix->width = ceu_rect.width; | ||
1104 | 1423 | ||
1105 | if (height > pix->height) | 1424 | if (pix->height > cam_pix->height) |
1106 | height = pix->height; | 1425 | pix->height = cam_pix->height; |
1426 | if (pix->height > ceu_rect.height) | ||
1427 | pix->height = ceu_rect.height; | ||
1107 | 1428 | ||
1108 | /* Let's rock: scale pix->{width x height} down to width x height */ | 1429 | /* Let's rock: scale pix->{width x height} down to width x height */ |
1109 | hscale = calc_scale(pix->width, &width); | 1430 | scale_h = calc_scale(ceu_rect.width, &pix->width); |
1110 | vscale = calc_scale(pix->height, &height); | 1431 | scale_v = calc_scale(ceu_rect.height, &pix->height); |
1111 | 1432 | ||
1112 | dev_dbg(icd->dev.parent, "W: %u : 0x%x = %u, H: %u : 0x%x = %u\n", | 1433 | dev_geo(dev, "10: W: %u : 0x%x = %u, H: %u : 0x%x = %u\n", |
1113 | pix->width, hscale, width, pix->height, vscale, height); | 1434 | ceu_rect.width, scale_h, pix->width, |
1435 | ceu_rect.height, scale_v, pix->height); | ||
1114 | 1436 | ||
1115 | out: | 1437 | pcdev->cflcr = scale_h | (scale_v << 16); |
1116 | pcdev->cflcr = hscale | (vscale << 16); | ||
1117 | 1438 | ||
1118 | icd->buswidth = xlate->buswidth; | 1439 | icd->buswidth = xlate->buswidth; |
1119 | icd->current_fmt = xlate->host_fmt; | 1440 | icd->current_fmt = xlate->host_fmt; |
1120 | cam->camera_fmt = xlate->cam_fmt; | 1441 | cam->camera_fmt = xlate->cam_fmt; |
1121 | cam->camera_rect.width = pix->width; | 1442 | cam->ceu_rect = ceu_rect; |
1122 | cam->camera_rect.height = pix->height; | ||
1123 | |||
1124 | icd->rect_max.left = size_dst(cam->camera_max.left, hscale); | ||
1125 | icd->rect_max.width = size_dst(cam->camera_max.width, hscale); | ||
1126 | icd->rect_max.top = size_dst(cam->camera_max.top, vscale); | ||
1127 | icd->rect_max.height = size_dst(cam->camera_max.height, vscale); | ||
1128 | |||
1129 | icd->rect_current.left = icd->rect_max.left; | ||
1130 | icd->rect_current.top = icd->rect_max.top; | ||
1131 | 1443 | ||
1132 | pcdev->is_interlaced = is_interlaced; | 1444 | pcdev->is_interlaced = is_interlaced; |
1133 | 1445 | pcdev->image_mode = image_mode; | |
1134 | pix->width = width; | ||
1135 | pix->height = height; | ||
1136 | 1446 | ||
1137 | return 0; | 1447 | return 0; |
1138 | } | 1448 | } |
diff --git a/drivers/media/video/soc_camera.c b/drivers/media/video/soc_camera.c index c6cccdf8daf5..86e0648f65a0 100644 --- a/drivers/media/video/soc_camera.c +++ b/drivers/media/video/soc_camera.c | |||
@@ -278,6 +278,9 @@ static void soc_camera_free_user_formats(struct soc_camera_device *icd) | |||
278 | icd->user_formats = NULL; | 278 | icd->user_formats = NULL; |
279 | } | 279 | } |
280 | 280 | ||
281 | #define pixfmtstr(x) (x) & 0xff, ((x) >> 8) & 0xff, ((x) >> 16) & 0xff, \ | ||
282 | ((x) >> 24) & 0xff | ||
283 | |||
281 | /* Called with .vb_lock held */ | 284 | /* Called with .vb_lock held */ |
282 | static int soc_camera_set_fmt(struct soc_camera_file *icf, | 285 | static int soc_camera_set_fmt(struct soc_camera_file *icf, |
283 | struct v4l2_format *f) | 286 | struct v4l2_format *f) |
@@ -287,6 +290,9 @@ static int soc_camera_set_fmt(struct soc_camera_file *icf, | |||
287 | struct v4l2_pix_format *pix = &f->fmt.pix; | 290 | struct v4l2_pix_format *pix = &f->fmt.pix; |
288 | int ret; | 291 | int ret; |
289 | 292 | ||
293 | dev_dbg(&icd->dev, "S_FMT(%c%c%c%c, %ux%u)\n", | ||
294 | pixfmtstr(pix->pixelformat), pix->width, pix->height); | ||
295 | |||
290 | /* We always call try_fmt() before set_fmt() or set_crop() */ | 296 | /* We always call try_fmt() before set_fmt() or set_crop() */ |
291 | ret = ici->ops->try_fmt(icd, f); | 297 | ret = ici->ops->try_fmt(icd, f); |
292 | if (ret < 0) | 298 | if (ret < 0) |
@@ -302,17 +308,17 @@ static int soc_camera_set_fmt(struct soc_camera_file *icf, | |||
302 | return -EINVAL; | 308 | return -EINVAL; |
303 | } | 309 | } |
304 | 310 | ||
305 | icd->rect_current.width = pix->width; | 311 | icd->user_width = pix->width; |
306 | icd->rect_current.height = pix->height; | 312 | icd->user_height = pix->height; |
307 | icf->vb_vidq.field = | 313 | icf->vb_vidq.field = |
308 | icd->field = pix->field; | 314 | icd->field = pix->field; |
309 | 315 | ||
310 | if (f->type != V4L2_BUF_TYPE_VIDEO_CAPTURE) | 316 | if (f->type != V4L2_BUF_TYPE_VIDEO_CAPTURE) |
311 | dev_warn(&icd->dev, "Attention! Wrong buf-type %d\n", | 317 | dev_warn(&icd->dev, "Attention! Wrong buf-type %d\n", |
312 | f->type); | 318 | f->type); |
313 | 319 | ||
314 | dev_dbg(&icd->dev, "set width: %d height: %d\n", | 320 | dev_dbg(&icd->dev, "set width: %d height: %d\n", |
315 | icd->rect_current.width, icd->rect_current.height); | 321 | icd->user_width, icd->user_height); |
316 | 322 | ||
317 | /* set physical bus parameters */ | 323 | /* set physical bus parameters */ |
318 | return ici->ops->set_bus_param(icd, pix->pixelformat); | 324 | return ici->ops->set_bus_param(icd, pix->pixelformat); |
@@ -355,8 +361,8 @@ static int soc_camera_open(struct file *file) | |||
355 | struct v4l2_format f = { | 361 | struct v4l2_format f = { |
356 | .type = V4L2_BUF_TYPE_VIDEO_CAPTURE, | 362 | .type = V4L2_BUF_TYPE_VIDEO_CAPTURE, |
357 | .fmt.pix = { | 363 | .fmt.pix = { |
358 | .width = icd->rect_current.width, | 364 | .width = icd->user_width, |
359 | .height = icd->rect_current.height, | 365 | .height = icd->user_height, |
360 | .field = icd->field, | 366 | .field = icd->field, |
361 | .pixelformat = icd->current_fmt->fourcc, | 367 | .pixelformat = icd->current_fmt->fourcc, |
362 | .colorspace = icd->current_fmt->colorspace, | 368 | .colorspace = icd->current_fmt->colorspace, |
@@ -557,8 +563,8 @@ static int soc_camera_g_fmt_vid_cap(struct file *file, void *priv, | |||
557 | 563 | ||
558 | WARN_ON(priv != file->private_data); | 564 | WARN_ON(priv != file->private_data); |
559 | 565 | ||
560 | pix->width = icd->rect_current.width; | 566 | pix->width = icd->user_width; |
561 | pix->height = icd->rect_current.height; | 567 | pix->height = icd->user_height; |
562 | pix->field = icf->vb_vidq.field; | 568 | pix->field = icf->vb_vidq.field; |
563 | pix->pixelformat = icd->current_fmt->fourcc; | 569 | pix->pixelformat = icd->current_fmt->fourcc; |
564 | pix->bytesperline = pix->width * | 570 | pix->bytesperline = pix->width * |
@@ -722,17 +728,9 @@ static int soc_camera_cropcap(struct file *file, void *fh, | |||
722 | { | 728 | { |
723 | struct soc_camera_file *icf = file->private_data; | 729 | struct soc_camera_file *icf = file->private_data; |
724 | struct soc_camera_device *icd = icf->icd; | 730 | struct soc_camera_device *icd = icf->icd; |
731 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); | ||
725 | 732 | ||
726 | a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 733 | return ici->ops->cropcap(icd, a); |
727 | a->bounds = icd->rect_max; | ||
728 | a->defrect.left = icd->rect_max.left; | ||
729 | a->defrect.top = icd->rect_max.top; | ||
730 | a->defrect.width = DEFAULT_WIDTH; | ||
731 | a->defrect.height = DEFAULT_HEIGHT; | ||
732 | a->pixelaspect.numerator = 1; | ||
733 | a->pixelaspect.denominator = 1; | ||
734 | |||
735 | return 0; | ||
736 | } | 734 | } |
737 | 735 | ||
738 | static int soc_camera_g_crop(struct file *file, void *fh, | 736 | static int soc_camera_g_crop(struct file *file, void *fh, |
@@ -740,11 +738,14 @@ static int soc_camera_g_crop(struct file *file, void *fh, | |||
740 | { | 738 | { |
741 | struct soc_camera_file *icf = file->private_data; | 739 | struct soc_camera_file *icf = file->private_data; |
742 | struct soc_camera_device *icd = icf->icd; | 740 | struct soc_camera_device *icd = icf->icd; |
741 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); | ||
742 | int ret; | ||
743 | 743 | ||
744 | a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | 744 | mutex_lock(&icf->vb_vidq.vb_lock); |
745 | a->c = icd->rect_current; | 745 | ret = ici->ops->get_crop(icd, a); |
746 | mutex_unlock(&icf->vb_vidq.vb_lock); | ||
746 | 747 | ||
747 | return 0; | 748 | return ret; |
748 | } | 749 | } |
749 | 750 | ||
750 | /* | 751 | /* |
@@ -759,49 +760,33 @@ static int soc_camera_s_crop(struct file *file, void *fh, | |||
759 | struct soc_camera_file *icf = file->private_data; | 760 | struct soc_camera_file *icf = file->private_data; |
760 | struct soc_camera_device *icd = icf->icd; | 761 | struct soc_camera_device *icd = icf->icd; |
761 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); | 762 | struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); |
762 | struct v4l2_rect rect = a->c; | 763 | struct v4l2_rect *rect = &a->c; |
764 | struct v4l2_crop current_crop; | ||
763 | int ret; | 765 | int ret; |
764 | 766 | ||
765 | if (a->type != V4L2_BUF_TYPE_VIDEO_CAPTURE) | 767 | if (a->type != V4L2_BUF_TYPE_VIDEO_CAPTURE) |
766 | return -EINVAL; | 768 | return -EINVAL; |
767 | 769 | ||
770 | dev_dbg(&icd->dev, "S_CROP(%ux%u@%u:%u)\n", | ||
771 | rect->width, rect->height, rect->left, rect->top); | ||
772 | |||
768 | /* Cropping is allowed during a running capture, guard consistency */ | 773 | /* Cropping is allowed during a running capture, guard consistency */ |
769 | mutex_lock(&icf->vb_vidq.vb_lock); | 774 | mutex_lock(&icf->vb_vidq.vb_lock); |
770 | 775 | ||
776 | /* If get_crop fails, we'll let host and / or client drivers decide */ | ||
777 | ret = ici->ops->get_crop(icd, ¤t_crop); | ||
778 | |||
771 | /* Prohibit window size change with initialised buffers */ | 779 | /* Prohibit window size change with initialised buffers */ |
772 | if (icf->vb_vidq.bufs[0] && (rect.width != icd->rect_current.width || | 780 | if (icf->vb_vidq.bufs[0] && !ret && |
773 | rect.height != icd->rect_current.height)) { | 781 | (a->c.width != current_crop.c.width || |
782 | a->c.height != current_crop.c.height)) { | ||
774 | dev_err(&icd->dev, | 783 | dev_err(&icd->dev, |
775 | "S_CROP denied: queue initialised and sizes differ\n"); | 784 | "S_CROP denied: queue initialised and sizes differ\n"); |
776 | ret = -EBUSY; | 785 | ret = -EBUSY; |
777 | goto unlock; | 786 | } else { |
787 | ret = ici->ops->set_crop(icd, a); | ||
778 | } | 788 | } |
779 | 789 | ||
780 | if (rect.width > icd->rect_max.width) | ||
781 | rect.width = icd->rect_max.width; | ||
782 | |||
783 | if (rect.width < icd->width_min) | ||
784 | rect.width = icd->width_min; | ||
785 | |||
786 | if (rect.height > icd->rect_max.height) | ||
787 | rect.height = icd->rect_max.height; | ||
788 | |||
789 | if (rect.height < icd->height_min) | ||
790 | rect.height = icd->height_min; | ||
791 | |||
792 | if (rect.width + rect.left > icd->rect_max.width + icd->rect_max.left) | ||
793 | rect.left = icd->rect_max.width + icd->rect_max.left - | ||
794 | rect.width; | ||
795 | |||
796 | if (rect.height + rect.top > icd->rect_max.height + icd->rect_max.top) | ||
797 | rect.top = icd->rect_max.height + icd->rect_max.top - | ||
798 | rect.height; | ||
799 | |||
800 | ret = ici->ops->set_crop(icd, a); | ||
801 | if (!ret) | ||
802 | icd->rect_current = rect; | ||
803 | |||
804 | unlock: | ||
805 | mutex_unlock(&icf->vb_vidq.vb_lock); | 790 | mutex_unlock(&icf->vb_vidq.vb_lock); |
806 | 791 | ||
807 | return ret; | 792 | return ret; |
@@ -926,6 +911,8 @@ static int soc_camera_probe(struct device *dev) | |||
926 | struct soc_camera_host *ici = to_soc_camera_host(dev->parent); | 911 | struct soc_camera_host *ici = to_soc_camera_host(dev->parent); |
927 | struct soc_camera_link *icl = to_soc_camera_link(icd); | 912 | struct soc_camera_link *icl = to_soc_camera_link(icd); |
928 | struct device *control = NULL; | 913 | struct device *control = NULL; |
914 | struct v4l2_subdev *sd; | ||
915 | struct v4l2_format f = {.type = V4L2_BUF_TYPE_VIDEO_CAPTURE}; | ||
929 | int ret; | 916 | int ret; |
930 | 917 | ||
931 | dev_info(dev, "Probing %s\n", dev_name(dev)); | 918 | dev_info(dev, "Probing %s\n", dev_name(dev)); |
@@ -982,7 +969,6 @@ static int soc_camera_probe(struct device *dev) | |||
982 | if (ret < 0) | 969 | if (ret < 0) |
983 | goto eiufmt; | 970 | goto eiufmt; |
984 | 971 | ||
985 | icd->rect_current = icd->rect_max; | ||
986 | icd->field = V4L2_FIELD_ANY; | 972 | icd->field = V4L2_FIELD_ANY; |
987 | 973 | ||
988 | /* ..._video_start() will create a device node, so we have to protect */ | 974 | /* ..._video_start() will create a device node, so we have to protect */ |
@@ -992,9 +978,15 @@ static int soc_camera_probe(struct device *dev) | |||
992 | if (ret < 0) | 978 | if (ret < 0) |
993 | goto evidstart; | 979 | goto evidstart; |
994 | 980 | ||
981 | /* Try to improve our guess of a reasonable window format */ | ||
982 | sd = soc_camera_to_subdev(icd); | ||
983 | if (!v4l2_subdev_call(sd, video, g_fmt, &f)) { | ||
984 | icd->user_width = f.fmt.pix.width; | ||
985 | icd->user_height = f.fmt.pix.height; | ||
986 | } | ||
987 | |||
995 | /* Do we have to sysfs_remove_link() before device_unregister()? */ | 988 | /* Do we have to sysfs_remove_link() before device_unregister()? */ |
996 | if (to_soc_camera_control(icd) && | 989 | if (sysfs_create_link(&icd->dev.kobj, &to_soc_camera_control(icd)->kobj, |
997 | sysfs_create_link(&icd->dev.kobj, &to_soc_camera_control(icd)->kobj, | ||
998 | "control")) | 990 | "control")) |
999 | dev_warn(&icd->dev, "Failed creating the control symlink\n"); | 991 | dev_warn(&icd->dev, "Failed creating the control symlink\n"); |
1000 | 992 | ||
@@ -1103,6 +1095,25 @@ static void dummy_release(struct device *dev) | |||
1103 | { | 1095 | { |
1104 | } | 1096 | } |
1105 | 1097 | ||
1098 | static int default_cropcap(struct soc_camera_device *icd, | ||
1099 | struct v4l2_cropcap *a) | ||
1100 | { | ||
1101 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | ||
1102 | return v4l2_subdev_call(sd, video, cropcap, a); | ||
1103 | } | ||
1104 | |||
1105 | static int default_g_crop(struct soc_camera_device *icd, struct v4l2_crop *a) | ||
1106 | { | ||
1107 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | ||
1108 | return v4l2_subdev_call(sd, video, g_crop, a); | ||
1109 | } | ||
1110 | |||
1111 | static int default_s_crop(struct soc_camera_device *icd, struct v4l2_crop *a) | ||
1112 | { | ||
1113 | struct v4l2_subdev *sd = soc_camera_to_subdev(icd); | ||
1114 | return v4l2_subdev_call(sd, video, s_crop, a); | ||
1115 | } | ||
1116 | |||
1106 | int soc_camera_host_register(struct soc_camera_host *ici) | 1117 | int soc_camera_host_register(struct soc_camera_host *ici) |
1107 | { | 1118 | { |
1108 | struct soc_camera_host *ix; | 1119 | struct soc_camera_host *ix; |
@@ -1111,7 +1122,6 @@ int soc_camera_host_register(struct soc_camera_host *ici) | |||
1111 | if (!ici || !ici->ops || | 1122 | if (!ici || !ici->ops || |
1112 | !ici->ops->try_fmt || | 1123 | !ici->ops->try_fmt || |
1113 | !ici->ops->set_fmt || | 1124 | !ici->ops->set_fmt || |
1114 | !ici->ops->set_crop || | ||
1115 | !ici->ops->set_bus_param || | 1125 | !ici->ops->set_bus_param || |
1116 | !ici->ops->querycap || | 1126 | !ici->ops->querycap || |
1117 | !ici->ops->init_videobuf || | 1127 | !ici->ops->init_videobuf || |
@@ -1122,6 +1132,13 @@ int soc_camera_host_register(struct soc_camera_host *ici) | |||
1122 | !ici->v4l2_dev.dev) | 1132 | !ici->v4l2_dev.dev) |
1123 | return -EINVAL; | 1133 | return -EINVAL; |
1124 | 1134 | ||
1135 | if (!ici->ops->set_crop) | ||
1136 | ici->ops->set_crop = default_s_crop; | ||
1137 | if (!ici->ops->get_crop) | ||
1138 | ici->ops->get_crop = default_g_crop; | ||
1139 | if (!ici->ops->cropcap) | ||
1140 | ici->ops->cropcap = default_cropcap; | ||
1141 | |||
1125 | mutex_lock(&list_lock); | 1142 | mutex_lock(&list_lock); |
1126 | list_for_each_entry(ix, &hosts, list) { | 1143 | list_for_each_entry(ix, &hosts, list) { |
1127 | if (ix->nr == ici->nr) { | 1144 | if (ix->nr == ici->nr) { |
@@ -1321,6 +1338,9 @@ static int __devinit soc_camera_pdrv_probe(struct platform_device *pdev) | |||
1321 | if (ret < 0) | 1338 | if (ret < 0) |
1322 | goto escdevreg; | 1339 | goto escdevreg; |
1323 | 1340 | ||
1341 | icd->user_width = DEFAULT_WIDTH; | ||
1342 | icd->user_height = DEFAULT_HEIGHT; | ||
1343 | |||
1324 | return 0; | 1344 | return 0; |
1325 | 1345 | ||
1326 | escdevreg: | 1346 | escdevreg: |
diff --git a/drivers/media/video/soc_camera_platform.c b/drivers/media/video/soc_camera_platform.c index aec2cadbd2ee..3825c358172f 100644 --- a/drivers/media/video/soc_camera_platform.c +++ b/drivers/media/video/soc_camera_platform.c | |||
@@ -127,10 +127,6 @@ static int soc_camera_platform_probe(struct platform_device *pdev) | |||
127 | /* Set the control device reference */ | 127 | /* Set the control device reference */ |
128 | dev_set_drvdata(&icd->dev, &pdev->dev); | 128 | dev_set_drvdata(&icd->dev, &pdev->dev); |
129 | 129 | ||
130 | icd->width_min = 0; | ||
131 | icd->rect_max.width = p->format.width; | ||
132 | icd->height_min = 0; | ||
133 | icd->rect_max.height = p->format.height; | ||
134 | icd->y_skip_top = 0; | 130 | icd->y_skip_top = 0; |
135 | icd->ops = &soc_camera_platform_ops; | 131 | icd->ops = &soc_camera_platform_ops; |
136 | 132 | ||
diff --git a/drivers/media/video/tw9910.c b/drivers/media/video/tw9910.c index 94bd5b09f057..fbf4130dfc5d 100644 --- a/drivers/media/video/tw9910.c +++ b/drivers/media/video/tw9910.c | |||
@@ -715,8 +715,88 @@ tw9910_set_fmt_error: | |||
715 | return ret; | 715 | return ret; |
716 | } | 716 | } |
717 | 717 | ||
718 | static int tw9910_g_crop(struct v4l2_subdev *sd, struct v4l2_crop *a) | ||
719 | { | ||
720 | struct i2c_client *client = sd->priv; | ||
721 | struct tw9910_priv *priv = to_tw9910(client); | ||
722 | |||
723 | if (!priv->scale) { | ||
724 | int ret; | ||
725 | struct v4l2_crop crop = { | ||
726 | .c = { | ||
727 | .left = 0, | ||
728 | .top = 0, | ||
729 | .width = 640, | ||
730 | .height = 480, | ||
731 | }, | ||
732 | }; | ||
733 | ret = tw9910_s_crop(sd, &crop); | ||
734 | if (ret < 0) | ||
735 | return ret; | ||
736 | } | ||
737 | |||
738 | a->c.left = 0; | ||
739 | a->c.top = 0; | ||
740 | a->c.width = priv->scale->width; | ||
741 | a->c.height = priv->scale->height; | ||
742 | a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
743 | |||
744 | return 0; | ||
745 | } | ||
746 | |||
747 | static int tw9910_cropcap(struct v4l2_subdev *sd, struct v4l2_cropcap *a) | ||
748 | { | ||
749 | a->bounds.left = 0; | ||
750 | a->bounds.top = 0; | ||
751 | a->bounds.width = 768; | ||
752 | a->bounds.height = 576; | ||
753 | a->defrect.left = 0; | ||
754 | a->defrect.top = 0; | ||
755 | a->defrect.width = 640; | ||
756 | a->defrect.height = 480; | ||
757 | a->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
758 | a->pixelaspect.numerator = 1; | ||
759 | a->pixelaspect.denominator = 1; | ||
760 | |||
761 | return 0; | ||
762 | } | ||
763 | |||
764 | static int tw9910_g_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | ||
765 | { | ||
766 | struct i2c_client *client = sd->priv; | ||
767 | struct tw9910_priv *priv = to_tw9910(client); | ||
768 | struct v4l2_pix_format *pix = &f->fmt.pix; | ||
769 | |||
770 | if (!priv->scale) { | ||
771 | int ret; | ||
772 | struct v4l2_crop crop = { | ||
773 | .c = { | ||
774 | .left = 0, | ||
775 | .top = 0, | ||
776 | .width = 640, | ||
777 | .height = 480, | ||
778 | }, | ||
779 | }; | ||
780 | ret = tw9910_s_crop(sd, &crop); | ||
781 | if (ret < 0) | ||
782 | return ret; | ||
783 | } | ||
784 | |||
785 | f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; | ||
786 | |||
787 | pix->width = priv->scale->width; | ||
788 | pix->height = priv->scale->height; | ||
789 | pix->pixelformat = V4L2_PIX_FMT_VYUY; | ||
790 | pix->colorspace = V4L2_COLORSPACE_SMPTE170M; | ||
791 | pix->field = V4L2_FIELD_INTERLACED; | ||
792 | |||
793 | return 0; | ||
794 | } | ||
795 | |||
718 | static int tw9910_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | 796 | static int tw9910_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) |
719 | { | 797 | { |
798 | struct i2c_client *client = sd->priv; | ||
799 | struct tw9910_priv *priv = to_tw9910(client); | ||
720 | struct v4l2_pix_format *pix = &f->fmt.pix; | 800 | struct v4l2_pix_format *pix = &f->fmt.pix; |
721 | /* See tw9910_s_crop() - no proper cropping support */ | 801 | /* See tw9910_s_crop() - no proper cropping support */ |
722 | struct v4l2_crop a = { | 802 | struct v4l2_crop a = { |
@@ -741,8 +821,8 @@ static int tw9910_s_fmt(struct v4l2_subdev *sd, struct v4l2_format *f) | |||
741 | 821 | ||
742 | ret = tw9910_s_crop(sd, &a); | 822 | ret = tw9910_s_crop(sd, &a); |
743 | if (!ret) { | 823 | if (!ret) { |
744 | pix->width = a.c.width; | 824 | pix->width = priv->scale->width; |
745 | pix->height = a.c.height; | 825 | pix->height = priv->scale->height; |
746 | } | 826 | } |
747 | return ret; | 827 | return ret; |
748 | } | 828 | } |
@@ -838,8 +918,11 @@ static struct v4l2_subdev_core_ops tw9910_subdev_core_ops = { | |||
838 | 918 | ||
839 | static struct v4l2_subdev_video_ops tw9910_subdev_video_ops = { | 919 | static struct v4l2_subdev_video_ops tw9910_subdev_video_ops = { |
840 | .s_stream = tw9910_s_stream, | 920 | .s_stream = tw9910_s_stream, |
921 | .g_fmt = tw9910_g_fmt, | ||
841 | .s_fmt = tw9910_s_fmt, | 922 | .s_fmt = tw9910_s_fmt, |
842 | .try_fmt = tw9910_try_fmt, | 923 | .try_fmt = tw9910_try_fmt, |
924 | .cropcap = tw9910_cropcap, | ||
925 | .g_crop = tw9910_g_crop, | ||
843 | .s_crop = tw9910_s_crop, | 926 | .s_crop = tw9910_s_crop, |
844 | }; | 927 | }; |
845 | 928 | ||
@@ -852,20 +935,6 @@ static struct v4l2_subdev_ops tw9910_subdev_ops = { | |||
852 | * i2c_driver function | 935 | * i2c_driver function |
853 | */ | 936 | */ |
854 | 937 | ||
855 | /* This is called during probe, so, setting rect_max is Ok here: scale == 1 */ | ||
856 | static void limit_to_scale(struct soc_camera_device *icd, | ||
857 | const struct tw9910_scale_ctrl *scale) | ||
858 | { | ||
859 | if (scale->width > icd->rect_max.width) | ||
860 | icd->rect_max.width = scale->width; | ||
861 | if (scale->width < icd->width_min) | ||
862 | icd->width_min = scale->width; | ||
863 | if (scale->height > icd->rect_max.height) | ||
864 | icd->rect_max.height = scale->height; | ||
865 | if (scale->height < icd->height_min) | ||
866 | icd->height_min = scale->height; | ||
867 | } | ||
868 | |||
869 | static int tw9910_probe(struct i2c_client *client, | 938 | static int tw9910_probe(struct i2c_client *client, |
870 | const struct i2c_device_id *did) | 939 | const struct i2c_device_id *did) |
871 | 940 | ||
@@ -876,8 +945,7 @@ static int tw9910_probe(struct i2c_client *client, | |||
876 | struct i2c_adapter *adapter = | 945 | struct i2c_adapter *adapter = |
877 | to_i2c_adapter(client->dev.parent); | 946 | to_i2c_adapter(client->dev.parent); |
878 | struct soc_camera_link *icl; | 947 | struct soc_camera_link *icl; |
879 | const struct tw9910_scale_ctrl *scale; | 948 | int ret; |
880 | int i, ret; | ||
881 | 949 | ||
882 | if (!icd) { | 950 | if (!icd) { |
883 | dev_err(&client->dev, "TW9910: missing soc-camera data!\n"); | 951 | dev_err(&client->dev, "TW9910: missing soc-camera data!\n"); |
@@ -908,22 +976,6 @@ static int tw9910_probe(struct i2c_client *client, | |||
908 | icd->ops = &tw9910_ops; | 976 | icd->ops = &tw9910_ops; |
909 | icd->iface = info->link.bus_id; | 977 | icd->iface = info->link.bus_id; |
910 | 978 | ||
911 | /* | ||
912 | * set width and height | ||
913 | */ | ||
914 | icd->rect_max.width = tw9910_ntsc_scales[0].width; /* set default */ | ||
915 | icd->width_min = tw9910_ntsc_scales[0].width; | ||
916 | icd->rect_max.height = tw9910_ntsc_scales[0].height; | ||
917 | icd->height_min = tw9910_ntsc_scales[0].height; | ||
918 | |||
919 | scale = tw9910_ntsc_scales; | ||
920 | for (i = 0; i < ARRAY_SIZE(tw9910_ntsc_scales); i++) | ||
921 | limit_to_scale(icd, scale + i); | ||
922 | |||
923 | scale = tw9910_pal_scales; | ||
924 | for (i = 0; i < ARRAY_SIZE(tw9910_pal_scales); i++) | ||
925 | limit_to_scale(icd, scale + i); | ||
926 | |||
927 | ret = tw9910_video_probe(icd, client); | 979 | ret = tw9910_video_probe(icd, client); |
928 | if (ret) { | 980 | if (ret) { |
929 | icd->ops = NULL; | 981 | icd->ops = NULL; |
diff --git a/include/media/soc_camera.h b/include/media/soc_camera.h index 344d89904774..3185e8daaa0a 100644 --- a/include/media/soc_camera.h +++ b/include/media/soc_camera.h | |||
@@ -22,8 +22,8 @@ struct soc_camera_device { | |||
22 | struct list_head list; | 22 | struct list_head list; |
23 | struct device dev; | 23 | struct device dev; |
24 | struct device *pdev; /* Platform device */ | 24 | struct device *pdev; /* Platform device */ |
25 | struct v4l2_rect rect_current; /* Current window */ | 25 | s32 user_width; |
26 | struct v4l2_rect rect_max; /* Maximum window */ | 26 | s32 user_height; |
27 | unsigned short width_min; | 27 | unsigned short width_min; |
28 | unsigned short height_min; | 28 | unsigned short height_min; |
29 | unsigned short y_skip_top; /* Lines to skip at the top */ | 29 | unsigned short y_skip_top; /* Lines to skip at the top */ |
@@ -76,6 +76,8 @@ struct soc_camera_host_ops { | |||
76 | int (*get_formats)(struct soc_camera_device *, int, | 76 | int (*get_formats)(struct soc_camera_device *, int, |
77 | struct soc_camera_format_xlate *); | 77 | struct soc_camera_format_xlate *); |
78 | void (*put_formats)(struct soc_camera_device *); | 78 | void (*put_formats)(struct soc_camera_device *); |
79 | int (*cropcap)(struct soc_camera_device *, struct v4l2_cropcap *); | ||
80 | int (*get_crop)(struct soc_camera_device *, struct v4l2_crop *); | ||
79 | int (*set_crop)(struct soc_camera_device *, struct v4l2_crop *); | 81 | int (*set_crop)(struct soc_camera_device *, struct v4l2_crop *); |
80 | int (*set_fmt)(struct soc_camera_device *, struct v4l2_format *); | 82 | int (*set_fmt)(struct soc_camera_device *, struct v4l2_format *); |
81 | int (*try_fmt)(struct soc_camera_device *, struct v4l2_format *); | 83 | int (*try_fmt)(struct soc_camera_device *, struct v4l2_format *); |
@@ -277,6 +279,21 @@ static inline unsigned long soc_camera_bus_param_compatible( | |||
277 | common_flags; | 279 | common_flags; |
278 | } | 280 | } |
279 | 281 | ||
282 | static inline void soc_camera_limit_side(unsigned int *start, | ||
283 | unsigned int *length, unsigned int start_min, | ||
284 | unsigned int length_min, unsigned int length_max) | ||
285 | { | ||
286 | if (*length < length_min) | ||
287 | *length = length_min; | ||
288 | else if (*length > length_max) | ||
289 | *length = length_max; | ||
290 | |||
291 | if (*start < start_min) | ||
292 | *start = start_min; | ||
293 | else if (*start > start_min + length_max - *length) | ||
294 | *start = start_min + length_max - *length; | ||
295 | } | ||
296 | |||
280 | extern unsigned long soc_camera_apply_sensor_flags(struct soc_camera_link *icl, | 297 | extern unsigned long soc_camera_apply_sensor_flags(struct soc_camera_link *icl, |
281 | unsigned long flags); | 298 | unsigned long flags); |
282 | 299 | ||