bcm27xx: 6.1: add kernel patches
[openwrt/staging/nbd.git] / target / linux / bcm27xx / patches-6.1 / 950-0345-media-bcm2835-unicam-Add-support-for-configuration-v.patch
1 From 9556f5777a8961dec3259f4c09c267ca5c77c90a Mon Sep 17 00:00:00 2001
2 From: Dave Stevenson <dave.stevenson@raspberrypi.com>
3 Date: Fri, 15 Oct 2021 17:57:27 +0100
4 Subject: [PATCH] media/bcm2835-unicam: Add support for configuration
5 via MC API
6
7 Adds Media Controller API support for more complex pipelines.
8 libcamera is about to switch to using this mechanism for configuring
9 sensors.
10
11 This can be enabled by either a module parameter, or device tree.
12
13 Various functions have been moved to group video-centric and
14 mc-centric functions together.
15
16 Based on a similar conversion done to ti-vpe.
17
18 Signed-off-by: Dave Stevenson <dave.stevenson@raspberrypi.com>
19
20 media: bcm2835-unicam: Fixup for 5.18 and new get_mbus_config struct
21
22 The number of active CSI2 data lanes has moved within the struct
23 v4l2_mbus_config used by the get_mbus_config API call.
24 Update the driver to match the changes in mainline.
25
26 Signed-off-by: Dave Stevenson <dave.stevenson@raspberrypi.com>
27 ---
28 .../media/platform/bcm2835/bcm2835-unicam.c | 2111 ++++++++++-------
29 1 file changed, 1306 insertions(+), 805 deletions(-)
30
31 --- a/drivers/media/platform/bcm2835/bcm2835-unicam.c
32 +++ b/drivers/media/platform/bcm2835/bcm2835-unicam.c
33 @@ -83,6 +83,10 @@ static int debug;
34 module_param(debug, int, 0644);
35 MODULE_PARM_DESC(debug, "Debug level 0-3");
36
37 +static int media_controller;
38 +module_param(media_controller, int, 0644);
39 +MODULE_PARM_DESC(media_controller, "Use media controller API");
40 +
41 #define unicam_dbg(level, dev, fmt, arg...) \
42 v4l2_dbg(level, debug, &(dev)->v4l2_dev, fmt, ##arg)
43 #define unicam_info(dev, fmt, arg...) \
44 @@ -119,7 +123,7 @@ MODULE_PARM_DESC(debug, "Debug level 0-3
45 #define MIN_WIDTH 16
46 #define MIN_HEIGHT 16
47 /* Default size of the embedded buffer */
48 -#define UNICAM_EMBEDDED_SIZE 8192
49 +#define UNICAM_EMBEDDED_SIZE 16384
50
51 /*
52 * Size of the dummy buffer. Can be any size really, but the DMA
53 @@ -133,6 +137,22 @@ enum pad_types {
54 MAX_NODES
55 };
56
57 +#define MASK_CS_DEFAULT BIT(V4L2_COLORSPACE_DEFAULT)
58 +#define MASK_CS_SMPTE170M BIT(V4L2_COLORSPACE_SMPTE170M)
59 +#define MASK_CS_SMPTE240M BIT(V4L2_COLORSPACE_SMPTE240M)
60 +#define MASK_CS_REC709 BIT(V4L2_COLORSPACE_REC709)
61 +#define MASK_CS_BT878 BIT(V4L2_COLORSPACE_BT878)
62 +#define MASK_CS_470_M BIT(V4L2_COLORSPACE_470_SYSTEM_M)
63 +#define MASK_CS_470_BG BIT(V4L2_COLORSPACE_470_SYSTEM_BG)
64 +#define MASK_CS_JPEG BIT(V4L2_COLORSPACE_JPEG)
65 +#define MASK_CS_SRGB BIT(V4L2_COLORSPACE_SRGB)
66 +#define MASK_CS_OPRGB BIT(V4L2_COLORSPACE_OPRGB)
67 +#define MASK_CS_BT2020 BIT(V4L2_COLORSPACE_BT2020)
68 +#define MASK_CS_RAW BIT(V4L2_COLORSPACE_RAW)
69 +#define MASK_CS_DCI_P3 BIT(V4L2_COLORSPACE_DCI_P3)
70 +
71 +#define MAX_COLORSPACE 32
72 +
73 /*
74 * struct unicam_fmt - Unicam media bus format information
75 * @pixelformat: V4L2 pixel format FCC identifier. 0 if n/a.
76 @@ -141,8 +161,14 @@ enum pad_types {
77 * @code: V4L2 media bus format code.
78 * @depth: Bits per pixel as delivered from the source.
79 * @csi_dt: CSI data type.
80 + * @valid_colorspaces: Bitmask of valid colorspaces so that the Media Controller
81 + * centric try_fmt can validate the colorspace and pass
82 + * v4l2-compliance.
83 * @check_variants: Flag to denote that there are multiple mediabus formats
84 * still in the list that could match this V4L2 format.
85 + * @mc_skip: Media Controller shouldn't list this format via ENUM_FMT as it is
86 + * a duplicate of an earlier format.
87 + * @metadata_fmt: This format only applies to the metadata pad.
88 */
89 struct unicam_fmt {
90 u32 fourcc;
91 @@ -150,7 +176,10 @@ struct unicam_fmt {
92 u32 code;
93 u8 depth;
94 u8 csi_dt;
95 - u8 check_variants;
96 + u32 valid_colorspaces;
97 + u8 check_variants:1;
98 + u8 mc_skip:1;
99 + u8 metadata_fmt:1;
100 };
101
102 static const struct unicam_fmt formats[] = {
103 @@ -161,173 +190,216 @@ static const struct unicam_fmt formats[]
104 .depth = 16,
105 .csi_dt = 0x1e,
106 .check_variants = 1,
107 + .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
108 + MASK_CS_JPEG,
109 }, {
110 .fourcc = V4L2_PIX_FMT_UYVY,
111 .code = MEDIA_BUS_FMT_UYVY8_2X8,
112 .depth = 16,
113 .csi_dt = 0x1e,
114 .check_variants = 1,
115 + .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
116 + MASK_CS_JPEG,
117 }, {
118 .fourcc = V4L2_PIX_FMT_YVYU,
119 .code = MEDIA_BUS_FMT_YVYU8_2X8,
120 .depth = 16,
121 .csi_dt = 0x1e,
122 .check_variants = 1,
123 + .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
124 + MASK_CS_JPEG,
125 }, {
126 .fourcc = V4L2_PIX_FMT_VYUY,
127 .code = MEDIA_BUS_FMT_VYUY8_2X8,
128 .depth = 16,
129 .csi_dt = 0x1e,
130 .check_variants = 1,
131 + .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
132 + MASK_CS_JPEG,
133 }, {
134 .fourcc = V4L2_PIX_FMT_YUYV,
135 .code = MEDIA_BUS_FMT_YUYV8_1X16,
136 .depth = 16,
137 .csi_dt = 0x1e,
138 + .mc_skip = 1,
139 + .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
140 + MASK_CS_JPEG,
141 }, {
142 .fourcc = V4L2_PIX_FMT_UYVY,
143 .code = MEDIA_BUS_FMT_UYVY8_1X16,
144 .depth = 16,
145 .csi_dt = 0x1e,
146 + .mc_skip = 1,
147 + .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
148 + MASK_CS_JPEG,
149 }, {
150 .fourcc = V4L2_PIX_FMT_YVYU,
151 .code = MEDIA_BUS_FMT_YVYU8_1X16,
152 .depth = 16,
153 .csi_dt = 0x1e,
154 + .mc_skip = 1,
155 + .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
156 + MASK_CS_JPEG,
157 }, {
158 .fourcc = V4L2_PIX_FMT_VYUY,
159 .code = MEDIA_BUS_FMT_VYUY8_1X16,
160 .depth = 16,
161 .csi_dt = 0x1e,
162 + .mc_skip = 1,
163 + .valid_colorspaces = MASK_CS_SMPTE170M | MASK_CS_REC709 |
164 + MASK_CS_JPEG,
165 }, {
166 /* RGB Formats */
167 .fourcc = V4L2_PIX_FMT_RGB565, /* gggbbbbb rrrrrggg */
168 .code = MEDIA_BUS_FMT_RGB565_2X8_LE,
169 .depth = 16,
170 .csi_dt = 0x22,
171 + .valid_colorspaces = MASK_CS_SRGB,
172 }, {
173 .fourcc = V4L2_PIX_FMT_RGB565X, /* rrrrrggg gggbbbbb */
174 .code = MEDIA_BUS_FMT_RGB565_2X8_BE,
175 .depth = 16,
176 - .csi_dt = 0x22
177 + .csi_dt = 0x22,
178 + .valid_colorspaces = MASK_CS_SRGB,
179 }, {
180 .fourcc = V4L2_PIX_FMT_RGB555, /* gggbbbbb arrrrrgg */
181 .code = MEDIA_BUS_FMT_RGB555_2X8_PADHI_LE,
182 .depth = 16,
183 .csi_dt = 0x21,
184 + .valid_colorspaces = MASK_CS_SRGB,
185 }, {
186 .fourcc = V4L2_PIX_FMT_RGB555X, /* arrrrrgg gggbbbbb */
187 .code = MEDIA_BUS_FMT_RGB555_2X8_PADHI_BE,
188 .depth = 16,
189 .csi_dt = 0x21,
190 + .valid_colorspaces = MASK_CS_SRGB,
191 }, {
192 .fourcc = V4L2_PIX_FMT_RGB24, /* rgb */
193 .code = MEDIA_BUS_FMT_RGB888_1X24,
194 .depth = 24,
195 .csi_dt = 0x24,
196 + .valid_colorspaces = MASK_CS_SRGB,
197 }, {
198 .fourcc = V4L2_PIX_FMT_BGR24, /* bgr */
199 .code = MEDIA_BUS_FMT_BGR888_1X24,
200 .depth = 24,
201 .csi_dt = 0x24,
202 + .valid_colorspaces = MASK_CS_SRGB,
203 }, {
204 .fourcc = V4L2_PIX_FMT_RGB32, /* argb */
205 .code = MEDIA_BUS_FMT_ARGB8888_1X32,
206 .depth = 32,
207 .csi_dt = 0x0,
208 + .valid_colorspaces = MASK_CS_SRGB,
209 }, {
210 /* Bayer Formats */
211 .fourcc = V4L2_PIX_FMT_SBGGR8,
212 .code = MEDIA_BUS_FMT_SBGGR8_1X8,
213 .depth = 8,
214 .csi_dt = 0x2a,
215 + .valid_colorspaces = MASK_CS_RAW,
216 }, {
217 .fourcc = V4L2_PIX_FMT_SGBRG8,
218 .code = MEDIA_BUS_FMT_SGBRG8_1X8,
219 .depth = 8,
220 .csi_dt = 0x2a,
221 + .valid_colorspaces = MASK_CS_RAW,
222 }, {
223 .fourcc = V4L2_PIX_FMT_SGRBG8,
224 .code = MEDIA_BUS_FMT_SGRBG8_1X8,
225 .depth = 8,
226 .csi_dt = 0x2a,
227 + .valid_colorspaces = MASK_CS_RAW,
228 }, {
229 .fourcc = V4L2_PIX_FMT_SRGGB8,
230 .code = MEDIA_BUS_FMT_SRGGB8_1X8,
231 .depth = 8,
232 .csi_dt = 0x2a,
233 + .valid_colorspaces = MASK_CS_RAW,
234 }, {
235 .fourcc = V4L2_PIX_FMT_SBGGR10P,
236 .repacked_fourcc = V4L2_PIX_FMT_SBGGR10,
237 .code = MEDIA_BUS_FMT_SBGGR10_1X10,
238 .depth = 10,
239 .csi_dt = 0x2b,
240 + .valid_colorspaces = MASK_CS_RAW,
241 }, {
242 .fourcc = V4L2_PIX_FMT_SGBRG10P,
243 .repacked_fourcc = V4L2_PIX_FMT_SGBRG10,
244 .code = MEDIA_BUS_FMT_SGBRG10_1X10,
245 .depth = 10,
246 .csi_dt = 0x2b,
247 + .valid_colorspaces = MASK_CS_RAW,
248 }, {
249 .fourcc = V4L2_PIX_FMT_SGRBG10P,
250 .repacked_fourcc = V4L2_PIX_FMT_SGRBG10,
251 .code = MEDIA_BUS_FMT_SGRBG10_1X10,
252 .depth = 10,
253 .csi_dt = 0x2b,
254 + .valid_colorspaces = MASK_CS_RAW,
255 }, {
256 .fourcc = V4L2_PIX_FMT_SRGGB10P,
257 .repacked_fourcc = V4L2_PIX_FMT_SRGGB10,
258 .code = MEDIA_BUS_FMT_SRGGB10_1X10,
259 .depth = 10,
260 .csi_dt = 0x2b,
261 + .valid_colorspaces = MASK_CS_RAW,
262 }, {
263 .fourcc = V4L2_PIX_FMT_SBGGR12P,
264 .repacked_fourcc = V4L2_PIX_FMT_SBGGR12,
265 .code = MEDIA_BUS_FMT_SBGGR12_1X12,
266 .depth = 12,
267 .csi_dt = 0x2c,
268 + .valid_colorspaces = MASK_CS_RAW,
269 }, {
270 .fourcc = V4L2_PIX_FMT_SGBRG12P,
271 .repacked_fourcc = V4L2_PIX_FMT_SGBRG12,
272 .code = MEDIA_BUS_FMT_SGBRG12_1X12,
273 .depth = 12,
274 .csi_dt = 0x2c,
275 + .valid_colorspaces = MASK_CS_RAW,
276 }, {
277 .fourcc = V4L2_PIX_FMT_SGRBG12P,
278 .repacked_fourcc = V4L2_PIX_FMT_SGRBG12,
279 .code = MEDIA_BUS_FMT_SGRBG12_1X12,
280 .depth = 12,
281 .csi_dt = 0x2c,
282 + .valid_colorspaces = MASK_CS_RAW,
283 }, {
284 .fourcc = V4L2_PIX_FMT_SRGGB12P,
285 .repacked_fourcc = V4L2_PIX_FMT_SRGGB12,
286 .code = MEDIA_BUS_FMT_SRGGB12_1X12,
287 .depth = 12,
288 .csi_dt = 0x2c,
289 + .valid_colorspaces = MASK_CS_RAW,
290 }, {
291 .fourcc = V4L2_PIX_FMT_SBGGR14P,
292 .repacked_fourcc = V4L2_PIX_FMT_SBGGR14,
293 .code = MEDIA_BUS_FMT_SBGGR14_1X14,
294 .depth = 14,
295 .csi_dt = 0x2d,
296 + .valid_colorspaces = MASK_CS_RAW,
297 }, {
298 .fourcc = V4L2_PIX_FMT_SGBRG14P,
299 .repacked_fourcc = V4L2_PIX_FMT_SGBRG14,
300 .code = MEDIA_BUS_FMT_SGBRG14_1X14,
301 .depth = 14,
302 .csi_dt = 0x2d,
303 + .valid_colorspaces = MASK_CS_RAW,
304 }, {
305 .fourcc = V4L2_PIX_FMT_SGRBG14P,
306 .repacked_fourcc = V4L2_PIX_FMT_SGRBG14,
307 .code = MEDIA_BUS_FMT_SGRBG14_1X14,
308 .depth = 14,
309 .csi_dt = 0x2d,
310 + .valid_colorspaces = MASK_CS_RAW,
311 }, {
312 .fourcc = V4L2_PIX_FMT_SRGGB14P,
313 .repacked_fourcc = V4L2_PIX_FMT_SRGGB14,
314 .code = MEDIA_BUS_FMT_SRGGB14_1X14,
315 .depth = 14,
316 .csi_dt = 0x2d,
317 + .valid_colorspaces = MASK_CS_RAW,
318 }, {
319 /*
320 * 16 bit Bayer formats could be supported, but there is no CSI2
321 @@ -340,30 +412,35 @@ static const struct unicam_fmt formats[]
322 .code = MEDIA_BUS_FMT_Y8_1X8,
323 .depth = 8,
324 .csi_dt = 0x2a,
325 + .valid_colorspaces = MASK_CS_RAW,
326 }, {
327 .fourcc = V4L2_PIX_FMT_Y10P,
328 .repacked_fourcc = V4L2_PIX_FMT_Y10,
329 .code = MEDIA_BUS_FMT_Y10_1X10,
330 .depth = 10,
331 .csi_dt = 0x2b,
332 + .valid_colorspaces = MASK_CS_RAW,
333 }, {
334 .fourcc = V4L2_PIX_FMT_Y12P,
335 .repacked_fourcc = V4L2_PIX_FMT_Y12,
336 .code = MEDIA_BUS_FMT_Y12_1X12,
337 .depth = 12,
338 .csi_dt = 0x2c,
339 + .valid_colorspaces = MASK_CS_RAW,
340 }, {
341 .fourcc = V4L2_PIX_FMT_Y14P,
342 .repacked_fourcc = V4L2_PIX_FMT_Y14,
343 .code = MEDIA_BUS_FMT_Y14_1X14,
344 .depth = 14,
345 .csi_dt = 0x2d,
346 + .valid_colorspaces = MASK_CS_RAW,
347 },
348 /* Embedded data format */
349 {
350 .fourcc = V4L2_META_FMT_SENSOR_DATA,
351 .code = MEDIA_BUS_FMT_SENSOR_DATA,
352 .depth = 8,
353 + .metadata_fmt = 1,
354 }
355 };
356
357 @@ -408,6 +485,7 @@ struct unicam_node {
358 struct unicam_device *dev;
359 struct media_pad pad;
360 unsigned int embedded_lines;
361 + struct media_pipeline pipe;
362 /*
363 * Dummy buffer intended to be used by unicam
364 * if we have no other queued buffers to swap to.
365 @@ -459,6 +537,8 @@ struct unicam_device {
366
367 struct unicam_node node[MAX_NODES];
368 struct v4l2_ctrl_handler ctrl_handler;
369 +
370 + bool mc_api;
371 };
372
373 static inline struct unicam_device *
374 @@ -908,6 +988,7 @@ static irqreturn_t unicam_isr(int irq, v
375 return IRQ_HANDLED;
376 }
377
378 +/* V4L2 Common IOCTLs */
379 static int unicam_querycap(struct file *file, void *priv,
380 struct v4l2_capability *cap)
381 {
382 @@ -925,6 +1006,38 @@ static int unicam_querycap(struct file *
383 return 0;
384 }
385
386 +static int unicam_log_status(struct file *file, void *fh)
387 +{
388 + struct unicam_node *node = video_drvdata(file);
389 + struct unicam_device *dev = node->dev;
390 + u32 reg;
391 +
392 + /* status for sub devices */
393 + v4l2_device_call_all(&dev->v4l2_dev, 0, core, log_status);
394 +
395 + unicam_info(dev, "-----Receiver status-----\n");
396 + unicam_info(dev, "V4L2 width/height: %ux%u\n",
397 + node->v_fmt.fmt.pix.width, node->v_fmt.fmt.pix.height);
398 + unicam_info(dev, "Mediabus format: %08x\n", node->fmt->code);
399 + unicam_info(dev, "V4L2 format: %08x\n",
400 + node->v_fmt.fmt.pix.pixelformat);
401 + reg = reg_read(dev, UNICAM_IPIPE);
402 + unicam_info(dev, "Unpacking/packing: %u / %u\n",
403 + get_field(reg, UNICAM_PUM_MASK),
404 + get_field(reg, UNICAM_PPM_MASK));
405 + unicam_info(dev, "----Live data----\n");
406 + unicam_info(dev, "Programmed stride: %4u\n",
407 + reg_read(dev, UNICAM_IBLS));
408 + unicam_info(dev, "Detected resolution: %ux%u\n",
409 + reg_read(dev, UNICAM_IHSTA),
410 + reg_read(dev, UNICAM_IVSTA));
411 + unicam_info(dev, "Write pointer: %08x\n",
412 + reg_read(dev, UNICAM_IBWP));
413 +
414 + return 0;
415 +}
416 +
417 +/* V4L2 Video Centric IOCTLs */
418 static int unicam_enum_fmt_vid_cap(struct file *file, void *priv,
419 struct v4l2_fmtdesc *f)
420 {
421 @@ -1269,6 +1382,727 @@ static int unicam_g_fmt_meta_cap(struct
422 return 0;
423 }
424
425 +static int unicam_enum_input(struct file *file, void *priv,
426 + struct v4l2_input *inp)
427 +{
428 + struct unicam_node *node = video_drvdata(file);
429 + struct unicam_device *dev = node->dev;
430 + int ret;
431 +
432 + if (inp->index != 0)
433 + return -EINVAL;
434 +
435 + inp->type = V4L2_INPUT_TYPE_CAMERA;
436 + if (v4l2_subdev_has_op(dev->sensor, video, s_dv_timings)) {
437 + inp->capabilities = V4L2_IN_CAP_DV_TIMINGS;
438 + inp->std = 0;
439 + } else if (v4l2_subdev_has_op(dev->sensor, video, s_std)) {
440 + inp->capabilities = V4L2_IN_CAP_STD;
441 + if (v4l2_subdev_call(dev->sensor, video, g_tvnorms, &inp->std) < 0)
442 + inp->std = V4L2_STD_ALL;
443 + } else {
444 + inp->capabilities = 0;
445 + inp->std = 0;
446 + }
447 +
448 + if (v4l2_subdev_has_op(dev->sensor, video, g_input_status)) {
449 + ret = v4l2_subdev_call(dev->sensor, video, g_input_status,
450 + &inp->status);
451 + if (ret < 0)
452 + return ret;
453 + }
454 +
455 + snprintf(inp->name, sizeof(inp->name), "Camera 0");
456 + return 0;
457 +}
458 +
459 +static int unicam_g_input(struct file *file, void *priv, unsigned int *i)
460 +{
461 + *i = 0;
462 +
463 + return 0;
464 +}
465 +
466 +static int unicam_s_input(struct file *file, void *priv, unsigned int i)
467 +{
468 + /*
469 + * FIXME: Ideally we would like to be able to query the source
470 + * subdevice for information over the input connectors it supports,
471 + * and map that through in to a call to video_ops->s_routing.
472 + * There is no infrastructure support for defining that within
473 + * devicetree at present. Until that is implemented we can't
474 + * map a user physical connector number to s_routing input number.
475 + */
476 + if (i > 0)
477 + return -EINVAL;
478 +
479 + return 0;
480 +}
481 +
482 +static int unicam_querystd(struct file *file, void *priv,
483 + v4l2_std_id *std)
484 +{
485 + struct unicam_node *node = video_drvdata(file);
486 + struct unicam_device *dev = node->dev;
487 +
488 + return v4l2_subdev_call(dev->sensor, video, querystd, std);
489 +}
490 +
491 +static int unicam_g_std(struct file *file, void *priv, v4l2_std_id *std)
492 +{
493 + struct unicam_node *node = video_drvdata(file);
494 + struct unicam_device *dev = node->dev;
495 +
496 + return v4l2_subdev_call(dev->sensor, video, g_std, std);
497 +}
498 +
499 +static int unicam_s_std(struct file *file, void *priv, v4l2_std_id std)
500 +{
501 + struct unicam_node *node = video_drvdata(file);
502 + struct unicam_device *dev = node->dev;
503 + int ret;
504 + v4l2_std_id current_std;
505 +
506 + ret = v4l2_subdev_call(dev->sensor, video, g_std, &current_std);
507 + if (ret)
508 + return ret;
509 +
510 + if (std == current_std)
511 + return 0;
512 +
513 + if (vb2_is_busy(&node->buffer_queue))
514 + return -EBUSY;
515 +
516 + ret = v4l2_subdev_call(dev->sensor, video, s_std, std);
517 +
518 + /* Force recomputation of bytesperline */
519 + node->v_fmt.fmt.pix.bytesperline = 0;
520 +
521 + unicam_reset_format(node);
522 +
523 + return ret;
524 +}
525 +
526 +static int unicam_s_edid(struct file *file, void *priv, struct v4l2_edid *edid)
527 +{
528 + struct unicam_node *node = video_drvdata(file);
529 + struct unicam_device *dev = node->dev;
530 +
531 + return v4l2_subdev_call(dev->sensor, pad, set_edid, edid);
532 +}
533 +
534 +static int unicam_g_edid(struct file *file, void *priv, struct v4l2_edid *edid)
535 +{
536 + struct unicam_node *node = video_drvdata(file);
537 + struct unicam_device *dev = node->dev;
538 +
539 + return v4l2_subdev_call(dev->sensor, pad, get_edid, edid);
540 +}
541 +
542 +static int unicam_s_selection(struct file *file, void *priv,
543 + struct v4l2_selection *sel)
544 +{
545 + struct unicam_node *node = video_drvdata(file);
546 + struct unicam_device *dev = node->dev;
547 + struct v4l2_subdev_selection sdsel = {
548 + .which = V4L2_SUBDEV_FORMAT_ACTIVE,
549 + .target = sel->target,
550 + .flags = sel->flags,
551 + .r = sel->r,
552 + };
553 +
554 + if (sel->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
555 + return -EINVAL;
556 +
557 + return v4l2_subdev_call(dev->sensor, pad, set_selection, NULL, &sdsel);
558 +}
559 +
560 +static int unicam_g_selection(struct file *file, void *priv,
561 + struct v4l2_selection *sel)
562 +{
563 + struct unicam_node *node = video_drvdata(file);
564 + struct unicam_device *dev = node->dev;
565 + struct v4l2_subdev_selection sdsel = {
566 + .which = V4L2_SUBDEV_FORMAT_ACTIVE,
567 + .target = sel->target,
568 + };
569 + int ret;
570 +
571 + if (sel->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
572 + return -EINVAL;
573 +
574 + ret = v4l2_subdev_call(dev->sensor, pad, get_selection, NULL, &sdsel);
575 + if (!ret)
576 + sel->r = sdsel.r;
577 +
578 + return ret;
579 +}
580 +
581 +static int unicam_enum_framesizes(struct file *file, void *priv,
582 + struct v4l2_frmsizeenum *fsize)
583 +{
584 + struct unicam_node *node = video_drvdata(file);
585 + struct unicam_device *dev = node->dev;
586 + const struct unicam_fmt *fmt;
587 + struct v4l2_subdev_frame_size_enum fse;
588 + int ret;
589 +
590 + /* check for valid format */
591 + fmt = find_format_by_pix(dev, fsize->pixel_format);
592 + if (!fmt) {
593 + unicam_dbg(3, dev, "Invalid pixel code: %x\n",
594 + fsize->pixel_format);
595 + return -EINVAL;
596 + }
597 + fse.code = fmt->code;
598 +
599 + fse.which = V4L2_SUBDEV_FORMAT_ACTIVE;
600 + fse.index = fsize->index;
601 + fse.pad = node->src_pad_id;
602 +
603 + ret = v4l2_subdev_call(dev->sensor, pad, enum_frame_size, NULL, &fse);
604 + if (ret)
605 + return ret;
606 +
607 + unicam_dbg(1, dev, "%s: index: %d code: %x W:[%d,%d] H:[%d,%d]\n",
608 + __func__, fse.index, fse.code, fse.min_width, fse.max_width,
609 + fse.min_height, fse.max_height);
610 +
611 + fsize->type = V4L2_FRMSIZE_TYPE_DISCRETE;
612 + fsize->discrete.width = fse.max_width;
613 + fsize->discrete.height = fse.max_height;
614 +
615 + return 0;
616 +}
617 +
618 +static int unicam_enum_frameintervals(struct file *file, void *priv,
619 + struct v4l2_frmivalenum *fival)
620 +{
621 + struct unicam_node *node = video_drvdata(file);
622 + struct unicam_device *dev = node->dev;
623 + const struct unicam_fmt *fmt;
624 + struct v4l2_subdev_frame_interval_enum fie = {
625 + .index = fival->index,
626 + .pad = node->src_pad_id,
627 + .width = fival->width,
628 + .height = fival->height,
629 + .which = V4L2_SUBDEV_FORMAT_ACTIVE,
630 + };
631 + int ret;
632 +
633 + fmt = find_format_by_pix(dev, fival->pixel_format);
634 + if (!fmt)
635 + return -EINVAL;
636 +
637 + fie.code = fmt->code;
638 + ret = v4l2_subdev_call(dev->sensor, pad, enum_frame_interval,
639 + NULL, &fie);
640 + if (ret)
641 + return ret;
642 +
643 + fival->type = V4L2_FRMIVAL_TYPE_DISCRETE;
644 + fival->discrete = fie.interval;
645 +
646 + return 0;
647 +}
648 +
649 +static int unicam_g_parm(struct file *file, void *fh, struct v4l2_streamparm *a)
650 +{
651 + struct unicam_node *node = video_drvdata(file);
652 + struct unicam_device *dev = node->dev;
653 +
654 + return v4l2_g_parm_cap(video_devdata(file), dev->sensor, a);
655 +}
656 +
657 +static int unicam_s_parm(struct file *file, void *fh, struct v4l2_streamparm *a)
658 +{
659 + struct unicam_node *node = video_drvdata(file);
660 + struct unicam_device *dev = node->dev;
661 +
662 + return v4l2_s_parm_cap(video_devdata(file), dev->sensor, a);
663 +}
664 +
665 +static int unicam_g_dv_timings(struct file *file, void *priv,
666 + struct v4l2_dv_timings *timings)
667 +{
668 + struct unicam_node *node = video_drvdata(file);
669 + struct unicam_device *dev = node->dev;
670 +
671 + return v4l2_subdev_call(dev->sensor, video, g_dv_timings, timings);
672 +}
673 +
674 +static int unicam_s_dv_timings(struct file *file, void *priv,
675 + struct v4l2_dv_timings *timings)
676 +{
677 + struct unicam_node *node = video_drvdata(file);
678 + struct unicam_device *dev = node->dev;
679 + struct v4l2_dv_timings current_timings;
680 + int ret;
681 +
682 + ret = v4l2_subdev_call(dev->sensor, video, g_dv_timings,
683 + &current_timings);
684 +
685 + if (ret < 0)
686 + return ret;
687 +
688 + if (v4l2_match_dv_timings(timings, &current_timings, 0, false))
689 + return 0;
690 +
691 + if (vb2_is_busy(&node->buffer_queue))
692 + return -EBUSY;
693 +
694 + ret = v4l2_subdev_call(dev->sensor, video, s_dv_timings, timings);
695 +
696 + /* Force recomputation of bytesperline */
697 + node->v_fmt.fmt.pix.bytesperline = 0;
698 +
699 + unicam_reset_format(node);
700 +
701 + return ret;
702 +}
703 +
704 +static int unicam_query_dv_timings(struct file *file, void *priv,
705 + struct v4l2_dv_timings *timings)
706 +{
707 + struct unicam_node *node = video_drvdata(file);
708 + struct unicam_device *dev = node->dev;
709 +
710 + return v4l2_subdev_call(dev->sensor, video, query_dv_timings, timings);
711 +}
712 +
713 +static int unicam_enum_dv_timings(struct file *file, void *priv,
714 + struct v4l2_enum_dv_timings *timings)
715 +{
716 + struct unicam_node *node = video_drvdata(file);
717 + struct unicam_device *dev = node->dev;
718 + int ret;
719 +
720 + timings->pad = node->src_pad_id;
721 + ret = v4l2_subdev_call(dev->sensor, pad, enum_dv_timings, timings);
722 + timings->pad = node->pad_id;
723 +
724 + return ret;
725 +}
726 +
727 +static int unicam_dv_timings_cap(struct file *file, void *priv,
728 + struct v4l2_dv_timings_cap *cap)
729 +{
730 + struct unicam_node *node = video_drvdata(file);
731 + struct unicam_device *dev = node->dev;
732 + int ret;
733 +
734 + cap->pad = node->src_pad_id;
735 + ret = v4l2_subdev_call(dev->sensor, pad, dv_timings_cap, cap);
736 + cap->pad = node->pad_id;
737 +
738 + return ret;
739 +}
740 +
741 +static int unicam_subscribe_event(struct v4l2_fh *fh,
742 + const struct v4l2_event_subscription *sub)
743 +{
744 + switch (sub->type) {
745 + case V4L2_EVENT_FRAME_SYNC:
746 + return v4l2_event_subscribe(fh, sub, 2, NULL);
747 + case V4L2_EVENT_SOURCE_CHANGE:
748 + return v4l2_event_subscribe(fh, sub, 4, NULL);
749 + }
750 +
751 + return v4l2_ctrl_subscribe_event(fh, sub);
752 +}
753 +
754 +static void unicam_notify(struct v4l2_subdev *sd,
755 + unsigned int notification, void *arg)
756 +{
757 + struct unicam_device *dev = to_unicam_device(sd->v4l2_dev);
758 +
759 + switch (notification) {
760 + case V4L2_DEVICE_NOTIFY_EVENT:
761 + v4l2_event_queue(&dev->node[IMAGE_PAD].video_dev, arg);
762 + break;
763 + default:
764 + break;
765 + }
766 +}
767 +
768 +/* unicam capture ioctl operations */
769 +static const struct v4l2_ioctl_ops unicam_ioctl_ops = {
770 + .vidioc_querycap = unicam_querycap,
771 + .vidioc_enum_fmt_vid_cap = unicam_enum_fmt_vid_cap,
772 + .vidioc_g_fmt_vid_cap = unicam_g_fmt_vid_cap,
773 + .vidioc_s_fmt_vid_cap = unicam_s_fmt_vid_cap,
774 + .vidioc_try_fmt_vid_cap = unicam_try_fmt_vid_cap,
775 +
776 + .vidioc_enum_fmt_meta_cap = unicam_enum_fmt_meta_cap,
777 + .vidioc_g_fmt_meta_cap = unicam_g_fmt_meta_cap,
778 + .vidioc_s_fmt_meta_cap = unicam_g_fmt_meta_cap,
779 + .vidioc_try_fmt_meta_cap = unicam_g_fmt_meta_cap,
780 +
781 + .vidioc_enum_input = unicam_enum_input,
782 + .vidioc_g_input = unicam_g_input,
783 + .vidioc_s_input = unicam_s_input,
784 +
785 + .vidioc_querystd = unicam_querystd,
786 + .vidioc_s_std = unicam_s_std,
787 + .vidioc_g_std = unicam_g_std,
788 +
789 + .vidioc_g_edid = unicam_g_edid,
790 + .vidioc_s_edid = unicam_s_edid,
791 +
792 + .vidioc_enum_framesizes = unicam_enum_framesizes,
793 + .vidioc_enum_frameintervals = unicam_enum_frameintervals,
794 +
795 + .vidioc_g_selection = unicam_g_selection,
796 + .vidioc_s_selection = unicam_s_selection,
797 +
798 + .vidioc_g_parm = unicam_g_parm,
799 + .vidioc_s_parm = unicam_s_parm,
800 +
801 + .vidioc_s_dv_timings = unicam_s_dv_timings,
802 + .vidioc_g_dv_timings = unicam_g_dv_timings,
803 + .vidioc_query_dv_timings = unicam_query_dv_timings,
804 + .vidioc_enum_dv_timings = unicam_enum_dv_timings,
805 + .vidioc_dv_timings_cap = unicam_dv_timings_cap,
806 +
807 + .vidioc_reqbufs = vb2_ioctl_reqbufs,
808 + .vidioc_create_bufs = vb2_ioctl_create_bufs,
809 + .vidioc_prepare_buf = vb2_ioctl_prepare_buf,
810 + .vidioc_querybuf = vb2_ioctl_querybuf,
811 + .vidioc_qbuf = vb2_ioctl_qbuf,
812 + .vidioc_dqbuf = vb2_ioctl_dqbuf,
813 + .vidioc_expbuf = vb2_ioctl_expbuf,
814 + .vidioc_streamon = vb2_ioctl_streamon,
815 + .vidioc_streamoff = vb2_ioctl_streamoff,
816 +
817 + .vidioc_log_status = unicam_log_status,
818 + .vidioc_subscribe_event = unicam_subscribe_event,
819 + .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
820 +};
821 +
822 +/* V4L2 Media Controller Centric IOCTLs */
823 +
824 +static int unicam_mc_enum_fmt_vid_cap(struct file *file, void *priv,
825 + struct v4l2_fmtdesc *f)
826 +{
827 + int i, j;
828 +
829 + for (i = 0, j = 0; i < ARRAY_SIZE(formats); i++) {
830 + if (f->mbus_code && formats[i].code != f->mbus_code)
831 + continue;
832 + if (formats[i].mc_skip || formats[i].metadata_fmt)
833 + continue;
834 +
835 + if (formats[i].fourcc) {
836 + if (j == f->index) {
837 + f->pixelformat = formats[i].fourcc;
838 + f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
839 + return 0;
840 + }
841 + j++;
842 + }
843 + if (formats[i].repacked_fourcc) {
844 + if (j == f->index) {
845 + f->pixelformat = formats[i].repacked_fourcc;
846 + f->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
847 + return 0;
848 + }
849 + j++;
850 + }
851 + }
852 +
853 + return -EINVAL;
854 +}
855 +
856 +static int unicam_mc_g_fmt_vid_cap(struct file *file, void *priv,
857 + struct v4l2_format *f)
858 +{
859 + struct unicam_node *node = video_drvdata(file);
860 +
861 + if (node->pad_id != IMAGE_PAD)
862 + return -EINVAL;
863 +
864 + *f = node->v_fmt;
865 +
866 + return 0;
867 +}
868 +
869 +static void unicam_mc_try_fmt(struct unicam_node *node, struct v4l2_format *f,
870 + const struct unicam_fmt **ret_fmt)
871 +{
872 + struct v4l2_pix_format *v4l2_format = &f->fmt.pix;
873 + struct unicam_device *dev = node->dev;
874 + const struct unicam_fmt *fmt;
875 + int is_rgb;
876 +
877 + /*
878 + * Default to the first format if the requested pixel format code isn't
879 + * supported.
880 + */
881 + fmt = find_format_by_pix(dev, v4l2_format->pixelformat);
882 + if (!fmt) {
883 + fmt = &formats[0];
884 + v4l2_format->pixelformat = fmt->fourcc;
885 + }
886 +
887 + unicam_calc_format_size_bpl(dev, fmt, f);
888 +
889 + if (v4l2_format->field == V4L2_FIELD_ANY)
890 + v4l2_format->field = V4L2_FIELD_NONE;
891 +
892 + if (ret_fmt)
893 + *ret_fmt = fmt;
894 +
895 + if (v4l2_format->colorspace >= MAX_COLORSPACE ||
896 + !(fmt->valid_colorspaces & (1 << v4l2_format->colorspace))) {
897 + v4l2_format->colorspace = __ffs(fmt->valid_colorspaces);
898 +
899 + v4l2_format->xfer_func =
900 + V4L2_MAP_XFER_FUNC_DEFAULT(v4l2_format->colorspace);
901 + v4l2_format->ycbcr_enc =
902 + V4L2_MAP_YCBCR_ENC_DEFAULT(v4l2_format->colorspace);
903 + is_rgb = v4l2_format->colorspace == V4L2_COLORSPACE_SRGB;
904 + v4l2_format->quantization =
905 + V4L2_MAP_QUANTIZATION_DEFAULT(is_rgb,
906 + v4l2_format->colorspace,
907 + v4l2_format->ycbcr_enc);
908 + }
909 +
910 + unicam_dbg(3, dev, "%s: %08x %ux%u (bytesperline %u sizeimage %u)\n",
911 + __func__, v4l2_format->pixelformat,
912 + v4l2_format->width, v4l2_format->height,
913 + v4l2_format->bytesperline, v4l2_format->sizeimage);
914 +}
915 +
916 +static int unicam_mc_try_fmt_vid_cap(struct file *file, void *priv,
917 + struct v4l2_format *f)
918 +{
919 + struct unicam_node *node = video_drvdata(file);
920 +
921 + unicam_mc_try_fmt(node, f, NULL);
922 + return 0;
923 +}
924 +
925 +static int unicam_mc_s_fmt_vid_cap(struct file *file, void *priv,
926 + struct v4l2_format *f)
927 +{
928 + struct unicam_node *node = video_drvdata(file);
929 + struct unicam_device *dev = node->dev;
930 + const struct unicam_fmt *fmt;
931 +
932 + if (vb2_is_busy(&node->buffer_queue)) {
933 + unicam_dbg(3, dev, "%s device busy\n", __func__);
934 + return -EBUSY;
935 + }
936 +
937 + unicam_mc_try_fmt(node, f, &fmt);
938 +
939 + node->v_fmt = *f;
940 + node->fmt = fmt;
941 +
942 + return 0;
943 +}
944 +
945 +static int unicam_mc_enum_framesizes(struct file *file, void *fh,
946 + struct v4l2_frmsizeenum *fsize)
947 +{
948 + struct unicam_node *node = video_drvdata(file);
949 + struct unicam_device *dev = node->dev;
950 +
951 + if (fsize->index > 0)
952 + return -EINVAL;
953 +
954 + if (!find_format_by_pix(dev, fsize->pixel_format)) {
955 + unicam_dbg(3, dev, "Invalid pixel format 0x%08x\n",
956 + fsize->pixel_format);
957 + return -EINVAL;
958 + }
959 +
960 + fsize->type = V4L2_FRMSIZE_TYPE_STEPWISE;
961 + fsize->stepwise.min_width = MIN_WIDTH;
962 + fsize->stepwise.max_width = MAX_WIDTH;
963 + fsize->stepwise.step_width = 1;
964 + fsize->stepwise.min_height = MIN_HEIGHT;
965 + fsize->stepwise.max_height = MAX_HEIGHT;
966 + fsize->stepwise.step_height = 1;
967 +
968 + return 0;
969 +}
970 +
971 +static int unicam_mc_enum_fmt_meta_cap(struct file *file, void *priv,
972 + struct v4l2_fmtdesc *f)
973 +{
974 + int i, j;
975 +
976 + for (i = 0, j = 0; i < ARRAY_SIZE(formats); i++) {
977 + if (f->mbus_code && formats[i].code != f->mbus_code)
978 + continue;
979 + if (!formats[i].metadata_fmt)
980 + continue;
981 +
982 + if (formats[i].fourcc) {
983 + if (j == f->index) {
984 + f->pixelformat = formats[i].fourcc;
985 + f->type = V4L2_BUF_TYPE_META_CAPTURE;
986 + return 0;
987 + }
988 + j++;
989 + }
990 + }
991 +
992 + return -EINVAL;
993 +}
994 +
995 +static int unicam_mc_g_fmt_meta_cap(struct file *file, void *priv,
996 + struct v4l2_format *f)
997 +{
998 + struct unicam_node *node = video_drvdata(file);
999 +
1000 + if (node->pad_id != METADATA_PAD)
1001 + return -EINVAL;
1002 +
1003 + *f = node->v_fmt;
1004 +
1005 + return 0;
1006 +}
1007 +
1008 +static int unicam_mc_try_fmt_meta_cap(struct file *file, void *priv,
1009 + struct v4l2_format *f)
1010 +{
1011 + struct unicam_node *node = video_drvdata(file);
1012 +
1013 + if (node->pad_id != METADATA_PAD)
1014 + return -EINVAL;
1015 +
1016 + f->fmt.meta.dataformat = V4L2_META_FMT_SENSOR_DATA;
1017 +
1018 + return 0;
1019 +}
1020 +
1021 +static int unicam_mc_s_fmt_meta_cap(struct file *file, void *priv,
1022 + struct v4l2_format *f)
1023 +{
1024 + struct unicam_node *node = video_drvdata(file);
1025 +
1026 + if (node->pad_id != METADATA_PAD)
1027 + return -EINVAL;
1028 +
1029 + unicam_mc_try_fmt_meta_cap(file, priv, f);
1030 +
1031 + node->v_fmt = *f;
1032 +
1033 + return 0;
1034 +}
1035 +
1036 +static const struct v4l2_ioctl_ops unicam_mc_ioctl_ops = {
1037 + .vidioc_querycap = unicam_querycap,
1038 + .vidioc_enum_fmt_vid_cap = unicam_mc_enum_fmt_vid_cap,
1039 + .vidioc_g_fmt_vid_cap = unicam_mc_g_fmt_vid_cap,
1040 + .vidioc_try_fmt_vid_cap = unicam_mc_try_fmt_vid_cap,
1041 + .vidioc_s_fmt_vid_cap = unicam_mc_s_fmt_vid_cap,
1042 +
1043 + .vidioc_enum_fmt_meta_cap = unicam_mc_enum_fmt_meta_cap,
1044 + .vidioc_g_fmt_meta_cap = unicam_mc_g_fmt_meta_cap,
1045 + .vidioc_try_fmt_meta_cap = unicam_mc_try_fmt_meta_cap,
1046 + .vidioc_s_fmt_meta_cap = unicam_mc_s_fmt_meta_cap,
1047 +
1048 + .vidioc_enum_framesizes = unicam_mc_enum_framesizes,
1049 + .vidioc_reqbufs = vb2_ioctl_reqbufs,
1050 + .vidioc_create_bufs = vb2_ioctl_create_bufs,
1051 + .vidioc_prepare_buf = vb2_ioctl_prepare_buf,
1052 + .vidioc_querybuf = vb2_ioctl_querybuf,
1053 + .vidioc_qbuf = vb2_ioctl_qbuf,
1054 + .vidioc_dqbuf = vb2_ioctl_dqbuf,
1055 + .vidioc_expbuf = vb2_ioctl_expbuf,
1056 + .vidioc_streamon = vb2_ioctl_streamon,
1057 + .vidioc_streamoff = vb2_ioctl_streamoff,
1058 +
1059 + .vidioc_log_status = unicam_log_status,
1060 + .vidioc_subscribe_event = unicam_subscribe_event,
1061 + .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
1062 +};
1063 +
1064 +static int
1065 +unicam_mc_subdev_link_validate_get_format(struct media_pad *pad,
1066 + struct v4l2_subdev_format *fmt)
1067 +{
1068 + if (is_media_entity_v4l2_subdev(pad->entity)) {
1069 + struct v4l2_subdev *sd =
1070 + media_entity_to_v4l2_subdev(pad->entity);
1071 +
1072 + fmt->which = V4L2_SUBDEV_FORMAT_ACTIVE;
1073 + fmt->pad = pad->index;
1074 + return v4l2_subdev_call(sd, pad, get_fmt, NULL, fmt);
1075 + }
1076 +
1077 + return -EINVAL;
1078 +}
1079 +
1080 +static int unicam_mc_video_link_validate(struct media_link *link)
1081 +{
1082 + struct video_device *vd = container_of(link->sink->entity,
1083 + struct video_device, entity);
1084 + struct unicam_node *node = container_of(vd, struct unicam_node,
1085 + video_dev);
1086 + struct unicam_device *unicam = node->dev;
1087 + struct v4l2_subdev_format source_fmt;
1088 + int ret;
1089 +
1090 + if (!media_entity_remote_source_pad_unique(link->sink->entity)) {
1091 + unicam_dbg(1, unicam,
1092 + "video node %s pad not connected\n", vd->name);
1093 + return -ENOTCONN;
1094 + }
1095 +
1096 + ret = unicam_mc_subdev_link_validate_get_format(link->source,
1097 + &source_fmt);
1098 + if (ret < 0)
1099 + return 0;
1100 +
1101 + if (node->pad_id == IMAGE_PAD) {
1102 + struct v4l2_pix_format *pix_fmt = &node->v_fmt.fmt.pix;
1103 + const struct unicam_fmt *fmt;
1104 +
1105 + if (source_fmt.format.width != pix_fmt->width ||
1106 + source_fmt.format.height != pix_fmt->height) {
1107 + unicam_err(unicam,
1108 + "Wrong width or height %ux%u (remote pad set to %ux%u)\n",
1109 + pix_fmt->width, pix_fmt->height,
1110 + source_fmt.format.width,
1111 + source_fmt.format.height);
1112 + return -EINVAL;
1113 + }
1114 +
1115 + fmt = find_format_by_code(source_fmt.format.code);
1116 +
1117 + if (!fmt || (fmt->fourcc != pix_fmt->pixelformat &&
1118 + fmt->repacked_fourcc != pix_fmt->pixelformat))
1119 + return -EINVAL;
1120 + } else {
1121 + struct v4l2_meta_format *meta_fmt = &node->v_fmt.fmt.meta;
1122 +
1123 + if (source_fmt.format.width != meta_fmt->buffersize ||
1124 + source_fmt.format.height != 1 ||
1125 + source_fmt.format.code != MEDIA_BUS_FMT_SENSOR_DATA) {
1126 + unicam_err(unicam,
1127 + "Wrong metadata width/height/code %ux%u %08x (remote pad set to %ux%u %08x)\n",
1128 + meta_fmt->buffersize, 1,
1129 + MEDIA_BUS_FMT_SENSOR_DATA,
1130 + source_fmt.format.width,
1131 + source_fmt.format.height,
1132 + source_fmt.format.code);
1133 + return -EINVAL;
1134 + }
1135 + }
1136 +
1137 + return 0;
1138 +}
1139 +
1140 +static const struct media_entity_operations unicam_mc_entity_ops = {
1141 + .link_validate = unicam_mc_video_link_validate,
1142 +};
1143 +
1144 +/* videobuf2 Operations */
1145 +
1146 static int unicam_queue_setup(struct vb2_queue *vq,
1147 unsigned int *nbuffers,
1148 unsigned int *nplanes,
1149 @@ -1495,7 +2329,7 @@ static void unicam_start_rx(struct unica
1150 /* CSI2 */
1151 set_field(&val, 1, UNICAM_CLE);
1152 set_field(&val, 1, UNICAM_CLLPE);
1153 - if (dev->bus_flags & V4L2_MBUS_CSI2_CONTINUOUS_CLOCK) {
1154 + if (!(dev->bus_flags & V4L2_MBUS_CSI2_NONCONTINUOUS_CLOCK)) {
1155 set_field(&val, 1, UNICAM_CLTRE);
1156 set_field(&val, 1, UNICAM_CLHSE);
1157 }
1158 @@ -1517,7 +2351,7 @@ static void unicam_start_rx(struct unica
1159 /* CSI2 */
1160 set_field(&val, 1, UNICAM_DLE);
1161 set_field(&val, 1, UNICAM_DLLPE);
1162 - if (dev->bus_flags & V4L2_MBUS_CSI2_CONTINUOUS_CLOCK) {
1163 + if (!(dev->bus_flags & V4L2_MBUS_CSI2_NONCONTINUOUS_CLOCK)) {
1164 set_field(&val, 1, UNICAM_DLTRE);
1165 set_field(&val, 1, UNICAM_DLHSE);
1166 }
1167 @@ -1666,6 +2500,12 @@ static int unicam_start_streaming(struct
1168 goto err_streaming;
1169 }
1170
1171 + ret = media_pipeline_start(&node->video_dev.entity, &node->pipe);
1172 + if (ret < 0) {
1173 + unicam_err(dev, "Failed to start media pipeline: %d\n", ret);
1174 + goto err_pm_put;
1175 + }
1176 +
1177 dev->active_data_lanes = dev->max_data_lanes;
1178
1179 if (dev->bus_type == V4L2_MBUS_CSI2_DPHY) {
1180 @@ -1675,12 +2515,10 @@ static int unicam_start_streaming(struct
1181 0, &mbus_config);
1182 if (ret < 0 && ret != -ENOIOCTLCMD) {
1183 unicam_dbg(3, dev, "g_mbus_config failed\n");
1184 - goto err_pm_put;
1185 + goto error_pipeline;
1186 }
1187
1188 - dev->active_data_lanes =
1189 - (mbus_config.flags & V4L2_MBUS_CSI2_LANE_MASK) >>
1190 - __ffs(V4L2_MBUS_CSI2_LANE_MASK);
1191 + dev->active_data_lanes = mbus_config.bus.mipi_csi2.num_data_lanes;
1192 if (!dev->active_data_lanes)
1193 dev->active_data_lanes = dev->max_data_lanes;
1194 if (dev->active_data_lanes > dev->max_data_lanes) {
1195 @@ -1688,7 +2526,7 @@ static int unicam_start_streaming(struct
1196 dev->active_data_lanes,
1197 dev->max_data_lanes);
1198 ret = -EINVAL;
1199 - goto err_pm_put;
1200 + goto error_pipeline;
1201 }
1202 }
1203
1204 @@ -1698,13 +2536,13 @@ static int unicam_start_streaming(struct
1205 ret = clk_set_min_rate(dev->vpu_clock, MIN_VPU_CLOCK_RATE);
1206 if (ret) {
1207 unicam_err(dev, "failed to set up VPU clock\n");
1208 - goto err_pm_put;
1209 + goto error_pipeline;
1210 }
1211
1212 ret = clk_prepare_enable(dev->vpu_clock);
1213 if (ret) {
1214 unicam_err(dev, "Failed to enable VPU clock: %d\n", ret);
1215 - goto err_pm_put;
1216 + goto error_pipeline;
1217 }
1218
1219 ret = clk_set_rate(dev->clock, 100 * 1000 * 1000);
1220 @@ -1755,6 +2593,8 @@ err_vpu_clock:
1221 if (clk_set_min_rate(dev->vpu_clock, 0))
1222 unicam_err(dev, "failed to reset the VPU clock\n");
1223 clk_disable_unprepare(dev->vpu_clock);
1224 +error_pipeline:
1225 + media_pipeline_stop(&node->video_dev.entity);
1226 err_pm_put:
1227 unicam_runtime_put(dev);
1228 err_streaming:
1229 @@ -1782,6 +2622,8 @@ static void unicam_stop_streaming(struct
1230
1231 unicam_disable(dev);
1232
1233 + media_pipeline_stop(&node->video_dev.entity);
1234 +
1235 if (dev->clocks_enabled) {
1236 if (clk_set_min_rate(dev->vpu_clock, 0))
1237 unicam_err(dev, "failed to reset the min VPU clock\n");
1238 @@ -1806,379 +2648,6 @@ static void unicam_stop_streaming(struct
1239 unicam_return_buffers(node, VB2_BUF_STATE_ERROR);
1240 }
1241
1242 -static int unicam_enum_input(struct file *file, void *priv,
1243 - struct v4l2_input *inp)
1244 -{
1245 - struct unicam_node *node = video_drvdata(file);
1246 - struct unicam_device *dev = node->dev;
1247 - int ret;
1248 -
1249 - if (inp->index != 0)
1250 - return -EINVAL;
1251 -
1252 - inp->type = V4L2_INPUT_TYPE_CAMERA;
1253 - if (v4l2_subdev_has_op(dev->sensor, video, s_dv_timings)) {
1254 - inp->capabilities = V4L2_IN_CAP_DV_TIMINGS;
1255 - inp->std = 0;
1256 - } else if (v4l2_subdev_has_op(dev->sensor, video, s_std)) {
1257 - inp->capabilities = V4L2_IN_CAP_STD;
1258 - if (v4l2_subdev_call(dev->sensor, video, g_tvnorms, &inp->std) < 0)
1259 - inp->std = V4L2_STD_ALL;
1260 - } else {
1261 - inp->capabilities = 0;
1262 - inp->std = 0;
1263 - }
1264 -
1265 - if (v4l2_subdev_has_op(dev->sensor, video, g_input_status)) {
1266 - ret = v4l2_subdev_call(dev->sensor, video, g_input_status,
1267 - &inp->status);
1268 - if (ret < 0)
1269 - return ret;
1270 - }
1271 -
1272 - snprintf(inp->name, sizeof(inp->name), "Camera 0");
1273 - return 0;
1274 -}
1275 -
1276 -static int unicam_g_input(struct file *file, void *priv, unsigned int *i)
1277 -{
1278 - *i = 0;
1279 -
1280 - return 0;
1281 -}
1282 -
1283 -static int unicam_s_input(struct file *file, void *priv, unsigned int i)
1284 -{
1285 - /*
1286 - * FIXME: Ideally we would like to be able to query the source
1287 - * subdevice for information over the input connectors it supports,
1288 - * and map that through in to a call to video_ops->s_routing.
1289 - * There is no infrastructure support for defining that within
1290 - * devicetree at present. Until that is implemented we can't
1291 - * map a user physical connector number to s_routing input number.
1292 - */
1293 - if (i > 0)
1294 - return -EINVAL;
1295 -
1296 - return 0;
1297 -}
1298 -
1299 -static int unicam_querystd(struct file *file, void *priv,
1300 - v4l2_std_id *std)
1301 -{
1302 - struct unicam_node *node = video_drvdata(file);
1303 - struct unicam_device *dev = node->dev;
1304 -
1305 - return v4l2_subdev_call(dev->sensor, video, querystd, std);
1306 -}
1307 -
1308 -static int unicam_g_std(struct file *file, void *priv, v4l2_std_id *std)
1309 -{
1310 - struct unicam_node *node = video_drvdata(file);
1311 - struct unicam_device *dev = node->dev;
1312 -
1313 - return v4l2_subdev_call(dev->sensor, video, g_std, std);
1314 -}
1315 -
1316 -static int unicam_s_std(struct file *file, void *priv, v4l2_std_id std)
1317 -{
1318 - struct unicam_node *node = video_drvdata(file);
1319 - struct unicam_device *dev = node->dev;
1320 - int ret;
1321 - v4l2_std_id current_std;
1322 -
1323 - ret = v4l2_subdev_call(dev->sensor, video, g_std, &current_std);
1324 - if (ret)
1325 - return ret;
1326 -
1327 - if (std == current_std)
1328 - return 0;
1329 -
1330 - if (vb2_is_busy(&node->buffer_queue))
1331 - return -EBUSY;
1332 -
1333 - ret = v4l2_subdev_call(dev->sensor, video, s_std, std);
1334 -
1335 - /* Force recomputation of bytesperline */
1336 - node->v_fmt.fmt.pix.bytesperline = 0;
1337 -
1338 - unicam_reset_format(node);
1339 -
1340 - return ret;
1341 -}
1342 -
1343 -static int unicam_s_edid(struct file *file, void *priv, struct v4l2_edid *edid)
1344 -{
1345 - struct unicam_node *node = video_drvdata(file);
1346 - struct unicam_device *dev = node->dev;
1347 -
1348 - return v4l2_subdev_call(dev->sensor, pad, set_edid, edid);
1349 -}
1350 -
1351 -static int unicam_g_edid(struct file *file, void *priv, struct v4l2_edid *edid)
1352 -{
1353 - struct unicam_node *node = video_drvdata(file);
1354 - struct unicam_device *dev = node->dev;
1355 -
1356 - return v4l2_subdev_call(dev->sensor, pad, get_edid, edid);
1357 -}
1358 -
1359 -static int unicam_s_selection(struct file *file, void *priv,
1360 - struct v4l2_selection *sel)
1361 -{
1362 - struct unicam_node *node = video_drvdata(file);
1363 - struct unicam_device *dev = node->dev;
1364 - struct v4l2_subdev_selection sdsel = {
1365 - .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1366 - .target = sel->target,
1367 - .flags = sel->flags,
1368 - .r = sel->r,
1369 - };
1370 -
1371 - if (sel->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
1372 - return -EINVAL;
1373 -
1374 - return v4l2_subdev_call(dev->sensor, pad, set_selection, NULL, &sdsel);
1375 -}
1376 -
1377 -static int unicam_g_selection(struct file *file, void *priv,
1378 - struct v4l2_selection *sel)
1379 -{
1380 - struct unicam_node *node = video_drvdata(file);
1381 - struct unicam_device *dev = node->dev;
1382 - struct v4l2_subdev_selection sdsel = {
1383 - .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1384 - .target = sel->target,
1385 - };
1386 - int ret;
1387 -
1388 - if (sel->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
1389 - return -EINVAL;
1390 -
1391 - ret = v4l2_subdev_call(dev->sensor, pad, get_selection, NULL, &sdsel);
1392 - if (!ret)
1393 - sel->r = sdsel.r;
1394 -
1395 - return ret;
1396 -}
1397 -
1398 -static int unicam_enum_framesizes(struct file *file, void *priv,
1399 - struct v4l2_frmsizeenum *fsize)
1400 -{
1401 - struct unicam_node *node = video_drvdata(file);
1402 - struct unicam_device *dev = node->dev;
1403 - const struct unicam_fmt *fmt;
1404 - struct v4l2_subdev_frame_size_enum fse;
1405 - int ret;
1406 -
1407 - /* check for valid format */
1408 - fmt = find_format_by_pix(dev, fsize->pixel_format);
1409 - if (!fmt) {
1410 - unicam_dbg(3, dev, "Invalid pixel code: %x\n",
1411 - fsize->pixel_format);
1412 - return -EINVAL;
1413 - }
1414 - fse.code = fmt->code;
1415 -
1416 - fse.which = V4L2_SUBDEV_FORMAT_ACTIVE;
1417 - fse.index = fsize->index;
1418 - fse.pad = node->src_pad_id;
1419 -
1420 - ret = v4l2_subdev_call(dev->sensor, pad, enum_frame_size, NULL, &fse);
1421 - if (ret)
1422 - return ret;
1423 -
1424 - unicam_dbg(1, dev, "%s: index: %d code: %x W:[%d,%d] H:[%d,%d]\n",
1425 - __func__, fse.index, fse.code, fse.min_width, fse.max_width,
1426 - fse.min_height, fse.max_height);
1427 -
1428 - fsize->type = V4L2_FRMSIZE_TYPE_DISCRETE;
1429 - fsize->discrete.width = fse.max_width;
1430 - fsize->discrete.height = fse.max_height;
1431 -
1432 - return 0;
1433 -}
1434 -
1435 -static int unicam_enum_frameintervals(struct file *file, void *priv,
1436 - struct v4l2_frmivalenum *fival)
1437 -{
1438 - struct unicam_node *node = video_drvdata(file);
1439 - struct unicam_device *dev = node->dev;
1440 - const struct unicam_fmt *fmt;
1441 - struct v4l2_subdev_frame_interval_enum fie = {
1442 - .index = fival->index,
1443 - .pad = node->src_pad_id,
1444 - .width = fival->width,
1445 - .height = fival->height,
1446 - .which = V4L2_SUBDEV_FORMAT_ACTIVE,
1447 - };
1448 - int ret;
1449 -
1450 - fmt = find_format_by_pix(dev, fival->pixel_format);
1451 - if (!fmt)
1452 - return -EINVAL;
1453 -
1454 - fie.code = fmt->code;
1455 - ret = v4l2_subdev_call(dev->sensor, pad, enum_frame_interval,
1456 - NULL, &fie);
1457 - if (ret)
1458 - return ret;
1459 -
1460 - fival->type = V4L2_FRMIVAL_TYPE_DISCRETE;
1461 - fival->discrete = fie.interval;
1462 -
1463 - return 0;
1464 -}
1465 -
1466 -static int unicam_g_parm(struct file *file, void *fh, struct v4l2_streamparm *a)
1467 -{
1468 - struct unicam_node *node = video_drvdata(file);
1469 - struct unicam_device *dev = node->dev;
1470 -
1471 - return v4l2_g_parm_cap(video_devdata(file), dev->sensor, a);
1472 -}
1473 -
1474 -static int unicam_s_parm(struct file *file, void *fh, struct v4l2_streamparm *a)
1475 -{
1476 - struct unicam_node *node = video_drvdata(file);
1477 - struct unicam_device *dev = node->dev;
1478 -
1479 - return v4l2_s_parm_cap(video_devdata(file), dev->sensor, a);
1480 -}
1481 -
1482 -static int unicam_g_dv_timings(struct file *file, void *priv,
1483 - struct v4l2_dv_timings *timings)
1484 -{
1485 - struct unicam_node *node = video_drvdata(file);
1486 - struct unicam_device *dev = node->dev;
1487 -
1488 - return v4l2_subdev_call(dev->sensor, video, g_dv_timings, timings);
1489 -}
1490 -
1491 -static int unicam_s_dv_timings(struct file *file, void *priv,
1492 - struct v4l2_dv_timings *timings)
1493 -{
1494 - struct unicam_node *node = video_drvdata(file);
1495 - struct unicam_device *dev = node->dev;
1496 - struct v4l2_dv_timings current_timings;
1497 - int ret;
1498 -
1499 - ret = v4l2_subdev_call(dev->sensor, video, g_dv_timings,
1500 - &current_timings);
1501 -
1502 - if (ret < 0)
1503 - return ret;
1504 -
1505 - if (v4l2_match_dv_timings(timings, &current_timings, 0, false))
1506 - return 0;
1507 -
1508 - if (vb2_is_busy(&node->buffer_queue))
1509 - return -EBUSY;
1510 -
1511 - ret = v4l2_subdev_call(dev->sensor, video, s_dv_timings, timings);
1512 -
1513 - /* Force recomputation of bytesperline */
1514 - node->v_fmt.fmt.pix.bytesperline = 0;
1515 -
1516 - unicam_reset_format(node);
1517 -
1518 - return ret;
1519 -}
1520 -
1521 -static int unicam_query_dv_timings(struct file *file, void *priv,
1522 - struct v4l2_dv_timings *timings)
1523 -{
1524 - struct unicam_node *node = video_drvdata(file);
1525 - struct unicam_device *dev = node->dev;
1526 -
1527 - return v4l2_subdev_call(dev->sensor, video, query_dv_timings, timings);
1528 -}
1529 -
1530 -static int unicam_enum_dv_timings(struct file *file, void *priv,
1531 - struct v4l2_enum_dv_timings *timings)
1532 -{
1533 - struct unicam_node *node = video_drvdata(file);
1534 - struct unicam_device *dev = node->dev;
1535 - int ret;
1536 -
1537 - timings->pad = node->src_pad_id;
1538 - ret = v4l2_subdev_call(dev->sensor, pad, enum_dv_timings, timings);
1539 - timings->pad = node->pad_id;
1540 -
1541 - return ret;
1542 -}
1543 -
1544 -static int unicam_dv_timings_cap(struct file *file, void *priv,
1545 - struct v4l2_dv_timings_cap *cap)
1546 -{
1547 - struct unicam_node *node = video_drvdata(file);
1548 - struct unicam_device *dev = node->dev;
1549 - int ret;
1550 -
1551 - cap->pad = node->src_pad_id;
1552 - ret = v4l2_subdev_call(dev->sensor, pad, dv_timings_cap, cap);
1553 - cap->pad = node->pad_id;
1554 -
1555 - return ret;
1556 -}
1557 -
1558 -static int unicam_subscribe_event(struct v4l2_fh *fh,
1559 - const struct v4l2_event_subscription *sub)
1560 -{
1561 - switch (sub->type) {
1562 - case V4L2_EVENT_FRAME_SYNC:
1563 - return v4l2_event_subscribe(fh, sub, 2, NULL);
1564 - case V4L2_EVENT_SOURCE_CHANGE:
1565 - return v4l2_event_subscribe(fh, sub, 4, NULL);
1566 - }
1567 -
1568 - return v4l2_ctrl_subscribe_event(fh, sub);
1569 -}
1570 -
1571 -static int unicam_log_status(struct file *file, void *fh)
1572 -{
1573 - struct unicam_node *node = video_drvdata(file);
1574 - struct unicam_device *dev = node->dev;
1575 - u32 reg;
1576 -
1577 - /* status for sub devices */
1578 - v4l2_device_call_all(&dev->v4l2_dev, 0, core, log_status);
1579 -
1580 - unicam_info(dev, "-----Receiver status-----\n");
1581 - unicam_info(dev, "V4L2 width/height: %ux%u\n",
1582 - node->v_fmt.fmt.pix.width, node->v_fmt.fmt.pix.height);
1583 - unicam_info(dev, "Mediabus format: %08x\n", node->fmt->code);
1584 - unicam_info(dev, "V4L2 format: %08x\n",
1585 - node->v_fmt.fmt.pix.pixelformat);
1586 - reg = reg_read(dev, UNICAM_IPIPE);
1587 - unicam_info(dev, "Unpacking/packing: %u / %u\n",
1588 - get_field(reg, UNICAM_PUM_MASK),
1589 - get_field(reg, UNICAM_PPM_MASK));
1590 - unicam_info(dev, "----Live data----\n");
1591 - unicam_info(dev, "Programmed stride: %4u\n",
1592 - reg_read(dev, UNICAM_IBLS));
1593 - unicam_info(dev, "Detected resolution: %ux%u\n",
1594 - reg_read(dev, UNICAM_IHSTA),
1595 - reg_read(dev, UNICAM_IVSTA));
1596 - unicam_info(dev, "Write pointer: %08x\n",
1597 - reg_read(dev, UNICAM_IBWP));
1598 -
1599 - return 0;
1600 -}
1601 -
1602 -static void unicam_notify(struct v4l2_subdev *sd,
1603 - unsigned int notification, void *arg)
1604 -{
1605 - struct unicam_device *dev = to_unicam_device(sd->v4l2_dev);
1606 -
1607 - switch (notification) {
1608 - case V4L2_DEVICE_NOTIFY_EVENT:
1609 - v4l2_event_queue(&dev->node[IMAGE_PAD].video_dev, arg);
1610 - break;
1611 - default:
1612 - break;
1613 - }
1614 -}
1615
1616 static const struct vb2_ops unicam_video_qops = {
1617 .wait_prepare = vb2_ops_wait_prepare,
1618 @@ -2261,60 +2730,6 @@ static const struct v4l2_file_operations
1619 .mmap = vb2_fop_mmap,
1620 };
1621
1622 -/* unicam capture ioctl operations */
1623 -static const struct v4l2_ioctl_ops unicam_ioctl_ops = {
1624 - .vidioc_querycap = unicam_querycap,
1625 - .vidioc_enum_fmt_vid_cap = unicam_enum_fmt_vid_cap,
1626 - .vidioc_g_fmt_vid_cap = unicam_g_fmt_vid_cap,
1627 - .vidioc_s_fmt_vid_cap = unicam_s_fmt_vid_cap,
1628 - .vidioc_try_fmt_vid_cap = unicam_try_fmt_vid_cap,
1629 -
1630 - .vidioc_enum_fmt_meta_cap = unicam_enum_fmt_meta_cap,
1631 - .vidioc_g_fmt_meta_cap = unicam_g_fmt_meta_cap,
1632 - .vidioc_s_fmt_meta_cap = unicam_g_fmt_meta_cap,
1633 - .vidioc_try_fmt_meta_cap = unicam_g_fmt_meta_cap,
1634 -
1635 - .vidioc_enum_input = unicam_enum_input,
1636 - .vidioc_g_input = unicam_g_input,
1637 - .vidioc_s_input = unicam_s_input,
1638 -
1639 - .vidioc_querystd = unicam_querystd,
1640 - .vidioc_s_std = unicam_s_std,
1641 - .vidioc_g_std = unicam_g_std,
1642 -
1643 - .vidioc_g_edid = unicam_g_edid,
1644 - .vidioc_s_edid = unicam_s_edid,
1645 -
1646 - .vidioc_enum_framesizes = unicam_enum_framesizes,
1647 - .vidioc_enum_frameintervals = unicam_enum_frameintervals,
1648 -
1649 - .vidioc_g_selection = unicam_g_selection,
1650 - .vidioc_s_selection = unicam_s_selection,
1651 -
1652 - .vidioc_g_parm = unicam_g_parm,
1653 - .vidioc_s_parm = unicam_s_parm,
1654 -
1655 - .vidioc_s_dv_timings = unicam_s_dv_timings,
1656 - .vidioc_g_dv_timings = unicam_g_dv_timings,
1657 - .vidioc_query_dv_timings = unicam_query_dv_timings,
1658 - .vidioc_enum_dv_timings = unicam_enum_dv_timings,
1659 - .vidioc_dv_timings_cap = unicam_dv_timings_cap,
1660 -
1661 - .vidioc_reqbufs = vb2_ioctl_reqbufs,
1662 - .vidioc_create_bufs = vb2_ioctl_create_bufs,
1663 - .vidioc_prepare_buf = vb2_ioctl_prepare_buf,
1664 - .vidioc_querybuf = vb2_ioctl_querybuf,
1665 - .vidioc_qbuf = vb2_ioctl_qbuf,
1666 - .vidioc_dqbuf = vb2_ioctl_dqbuf,
1667 - .vidioc_expbuf = vb2_ioctl_expbuf,
1668 - .vidioc_streamon = vb2_ioctl_streamon,
1669 - .vidioc_streamoff = vb2_ioctl_streamoff,
1670 -
1671 - .vidioc_log_status = unicam_log_status,
1672 - .vidioc_subscribe_event = unicam_subscribe_event,
1673 - .vidioc_unsubscribe_event = v4l2_event_unsubscribe,
1674 -};
1675 -
1676 static int
1677 unicam_async_bound(struct v4l2_async_notifier *notifier,
1678 struct v4l2_subdev *subdev,
1679 @@ -2365,11 +2780,11 @@ static void unicam_node_release(struct v
1680 unicam_put(node->dev);
1681 }
1682
1683 -static int register_node(struct unicam_device *unicam, struct unicam_node *node,
1684 - enum v4l2_buf_type type, int pad_id)
1685 +static int unicam_set_default_format(struct unicam_device *unicam,
1686 + struct unicam_node *node,
1687 + int pad_id,
1688 + const struct unicam_fmt **ret_fmt)
1689 {
1690 - struct video_device *vdev;
1691 - struct vb2_queue *q;
1692 struct v4l2_mbus_framefmt mbus_fmt = {0};
1693 const struct unicam_fmt *fmt;
1694 int ret;
1695 @@ -2414,15 +2829,69 @@ static int register_node(struct unicam_d
1696 node->v_fmt.fmt.meta.dataformat = fmt->fourcc;
1697 }
1698
1699 + *ret_fmt = fmt;
1700 +
1701 + return 0;
1702 +}
1703 +
1704 +static void unicam_mc_set_default_format(struct unicam_node *node, int pad_id)
1705 +{
1706 + if (pad_id == IMAGE_PAD) {
1707 + struct v4l2_pix_format *pix_fmt = &node->v_fmt.fmt.pix;
1708 +
1709 + pix_fmt->width = 640;
1710 + pix_fmt->height = 480;
1711 + pix_fmt->field = V4L2_FIELD_NONE;
1712 + pix_fmt->colorspace = V4L2_COLORSPACE_SRGB;
1713 + pix_fmt->ycbcr_enc = V4L2_YCBCR_ENC_601;
1714 + pix_fmt->quantization = V4L2_QUANTIZATION_LIM_RANGE;
1715 + pix_fmt->xfer_func = V4L2_XFER_FUNC_SRGB;
1716 + pix_fmt->pixelformat = formats[0].fourcc;
1717 + unicam_calc_format_size_bpl(node->dev, &formats[0],
1718 + &node->v_fmt);
1719 + node->v_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
1720 +
1721 + node->fmt = &formats[0];
1722 + } else {
1723 + const struct unicam_fmt *fmt;
1724 +
1725 + /* Fix this node format as embedded data. */
1726 + fmt = find_format_by_code(MEDIA_BUS_FMT_SENSOR_DATA);
1727 + node->v_fmt.fmt.meta.dataformat = fmt->fourcc;
1728 + node->fmt = fmt;
1729 +
1730 + node->v_fmt.fmt.meta.buffersize = UNICAM_EMBEDDED_SIZE;
1731 + node->embedded_lines = 1;
1732 + node->v_fmt.type = V4L2_BUF_TYPE_META_CAPTURE;
1733 + }
1734 +}
1735 +
1736 +static int register_node(struct unicam_device *unicam, struct unicam_node *node,
1737 + enum v4l2_buf_type type, int pad_id)
1738 +{
1739 + struct video_device *vdev;
1740 + struct vb2_queue *q;
1741 + int ret;
1742 +
1743 node->dev = unicam;
1744 node->pad_id = pad_id;
1745 - node->fmt = fmt;
1746
1747 - /* Read current subdev format */
1748 - if (fmt)
1749 - unicam_reset_format(node);
1750 + if (!unicam->mc_api) {
1751 + const struct unicam_fmt *fmt;
1752
1753 - if (v4l2_subdev_has_op(unicam->sensor, video, s_std)) {
1754 + ret = unicam_set_default_format(unicam, node, pad_id, &fmt);
1755 + if (ret)
1756 + return ret;
1757 + node->fmt = fmt;
1758 + /* Read current subdev format */
1759 + if (fmt)
1760 + unicam_reset_format(node);
1761 + } else {
1762 + unicam_mc_set_default_format(node, pad_id);
1763 + }
1764 +
1765 + if (!unicam->mc_api &&
1766 + v4l2_subdev_has_op(unicam->sensor, video, s_std)) {
1767 v4l2_std_id tvnorms;
1768
1769 if (WARN_ON(!v4l2_subdev_has_op(unicam->sensor, video,
1770 @@ -2445,12 +2914,15 @@ static int register_node(struct unicam_d
1771
1772 vdev = &node->video_dev;
1773 if (pad_id == IMAGE_PAD) {
1774 - /* Add controls from the subdevice */
1775 - ret = v4l2_ctrl_add_handler(&unicam->ctrl_handler,
1776 - unicam->sensor->ctrl_handler, NULL,
1777 - true);
1778 - if (ret < 0)
1779 - return ret;
1780 + if (!unicam->mc_api) {
1781 + /* Add controls from the subdevice */
1782 + ret = v4l2_ctrl_add_handler(&unicam->ctrl_handler,
1783 + unicam->sensor->ctrl_handler,
1784 + NULL,
1785 + true);
1786 + if (ret < 0)
1787 + return ret;
1788 + }
1789
1790 /*
1791 * If the sensor subdevice has any controls, associate the node
1792 @@ -2482,7 +2954,8 @@ static int register_node(struct unicam_d
1793
1794 vdev->release = unicam_node_release;
1795 vdev->fops = &unicam_fops;
1796 - vdev->ioctl_ops = &unicam_ioctl_ops;
1797 + vdev->ioctl_ops = unicam->mc_api ? &unicam_mc_ioctl_ops :
1798 + &unicam_ioctl_ops;
1799 vdev->v4l2_dev = &unicam->v4l2_dev;
1800 vdev->vfl_dir = VFL_DIR_RX;
1801 vdev->queue = q;
1802 @@ -2490,6 +2963,10 @@ static int register_node(struct unicam_d
1803 vdev->device_caps = (pad_id == IMAGE_PAD) ?
1804 V4L2_CAP_VIDEO_CAPTURE : V4L2_CAP_META_CAPTURE;
1805 vdev->device_caps |= V4L2_CAP_READWRITE | V4L2_CAP_STREAMING;
1806 + if (unicam->mc_api) {
1807 + vdev->device_caps |= V4L2_CAP_IO_MC;
1808 + vdev->entity.ops = &unicam_mc_entity_ops;
1809 + }
1810
1811 /* Define the device names */
1812 snprintf(vdev->name, sizeof(vdev->name), "%s-%s", UNICAM_MODULE_NAME,
1813 @@ -2509,48 +2986,61 @@ static int register_node(struct unicam_d
1814 unicam_err(unicam, "Unable to allocate dummy buffer.\n");
1815 return -ENOMEM;
1816 }
1817 -
1818 - if (pad_id == METADATA_PAD ||
1819 - !v4l2_subdev_has_op(unicam->sensor, video, s_std)) {
1820 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_STD);
1821 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_STD);
1822 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_ENUMSTD);
1823 - }
1824 - if (pad_id == METADATA_PAD ||
1825 - !v4l2_subdev_has_op(unicam->sensor, video, querystd))
1826 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_QUERYSTD);
1827 - if (pad_id == METADATA_PAD ||
1828 - !v4l2_subdev_has_op(unicam->sensor, video, s_dv_timings)) {
1829 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_EDID);
1830 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_EDID);
1831 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_DV_TIMINGS_CAP);
1832 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_DV_TIMINGS);
1833 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_DV_TIMINGS);
1834 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_ENUM_DV_TIMINGS);
1835 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_QUERY_DV_TIMINGS);
1836 - }
1837 - if (pad_id == METADATA_PAD ||
1838 - !v4l2_subdev_has_op(unicam->sensor, pad, enum_frame_interval))
1839 - v4l2_disable_ioctl(&node->video_dev,
1840 - VIDIOC_ENUM_FRAMEINTERVALS);
1841 - if (pad_id == METADATA_PAD ||
1842 - !v4l2_subdev_has_op(unicam->sensor, video, g_frame_interval))
1843 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_PARM);
1844 - if (pad_id == METADATA_PAD ||
1845 - !v4l2_subdev_has_op(unicam->sensor, video, s_frame_interval))
1846 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_PARM);
1847 -
1848 - if (pad_id == METADATA_PAD ||
1849 - !v4l2_subdev_has_op(unicam->sensor, pad, enum_frame_size))
1850 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_ENUM_FRAMESIZES);
1851 -
1852 - if (node->pad_id == METADATA_PAD ||
1853 - !v4l2_subdev_has_op(unicam->sensor, pad, set_selection))
1854 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_SELECTION);
1855 -
1856 - if (node->pad_id == METADATA_PAD ||
1857 - !v4l2_subdev_has_op(unicam->sensor, pad, get_selection))
1858 - v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_SELECTION);
1859 + if (!unicam->mc_api) {
1860 + if (pad_id == METADATA_PAD ||
1861 + !v4l2_subdev_has_op(unicam->sensor, video, s_std)) {
1862 + v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_STD);
1863 + v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_STD);
1864 + v4l2_disable_ioctl(&node->video_dev, VIDIOC_ENUMSTD);
1865 + }
1866 + if (pad_id == METADATA_PAD ||
1867 + !v4l2_subdev_has_op(unicam->sensor, video, querystd))
1868 + v4l2_disable_ioctl(&node->video_dev, VIDIOC_QUERYSTD);
1869 + if (pad_id == METADATA_PAD ||
1870 + !v4l2_subdev_has_op(unicam->sensor, video, s_dv_timings)) {
1871 + v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_EDID);
1872 + v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_EDID);
1873 + v4l2_disable_ioctl(&node->video_dev,
1874 + VIDIOC_DV_TIMINGS_CAP);
1875 + v4l2_disable_ioctl(&node->video_dev,
1876 + VIDIOC_G_DV_TIMINGS);
1877 + v4l2_disable_ioctl(&node->video_dev,
1878 + VIDIOC_S_DV_TIMINGS);
1879 + v4l2_disable_ioctl(&node->video_dev,
1880 + VIDIOC_ENUM_DV_TIMINGS);
1881 + v4l2_disable_ioctl(&node->video_dev,
1882 + VIDIOC_QUERY_DV_TIMINGS);
1883 + }
1884 + if (pad_id == METADATA_PAD ||
1885 + !v4l2_subdev_has_op(unicam->sensor, pad,
1886 + enum_frame_interval))
1887 + v4l2_disable_ioctl(&node->video_dev,
1888 + VIDIOC_ENUM_FRAMEINTERVALS);
1889 + if (pad_id == METADATA_PAD ||
1890 + !v4l2_subdev_has_op(unicam->sensor, video,
1891 + g_frame_interval))
1892 + v4l2_disable_ioctl(&node->video_dev, VIDIOC_G_PARM);
1893 + if (pad_id == METADATA_PAD ||
1894 + !v4l2_subdev_has_op(unicam->sensor, video,
1895 + s_frame_interval))
1896 + v4l2_disable_ioctl(&node->video_dev, VIDIOC_S_PARM);
1897 +
1898 + if (pad_id == METADATA_PAD ||
1899 + !v4l2_subdev_has_op(unicam->sensor, pad,
1900 + enum_frame_size))
1901 + v4l2_disable_ioctl(&node->video_dev,
1902 + VIDIOC_ENUM_FRAMESIZES);
1903 +
1904 + if (node->pad_id == METADATA_PAD ||
1905 + !v4l2_subdev_has_op(unicam->sensor, pad, set_selection))
1906 + v4l2_disable_ioctl(&node->video_dev,
1907 + VIDIOC_S_SELECTION);
1908 +
1909 + if (node->pad_id == METADATA_PAD ||
1910 + !v4l2_subdev_has_op(unicam->sensor, pad, get_selection))
1911 + v4l2_disable_ioctl(&node->video_dev,
1912 + VIDIOC_G_SELECTION);
1913 + }
1914
1915 ret = video_register_device(vdev, VFL_TYPE_VIDEO, -1);
1916 if (ret) {
1917 @@ -2619,7 +3109,7 @@ static int unicam_async_complete(struct
1918 if (unicam->sensor->entity.pads[i].flags & MEDIA_PAD_FL_SOURCE) {
1919 if (source_pads < MAX_NODES) {
1920 unicam->node[source_pads].src_pad_id = i;
1921 - unicam_err(unicam, "source pad %u is index %u\n",
1922 + unicam_dbg(3, unicam, "source pad %u is index %u\n",
1923 source_pads, i);
1924 }
1925 source_pads++;
1926 @@ -2648,7 +3138,10 @@ static int unicam_async_complete(struct
1927 }
1928 }
1929
1930 - ret = v4l2_device_register_ro_subdev_nodes(&unicam->v4l2_dev);
1931 + if (unicam->mc_api)
1932 + ret = v4l2_device_register_subdev_nodes(&unicam->v4l2_dev);
1933 + else
1934 + ret = v4l2_device_register_ro_subdev_nodes(&unicam->v4l2_dev);
1935 if (ret) {
1936 unicam_err(unicam, "Unable to register subdev nodes.\n");
1937 goto unregister;
1938 @@ -2808,6 +3301,14 @@ static int unicam_probe(struct platform_
1939 kref_init(&unicam->kref);
1940 unicam->pdev = pdev;
1941
1942 + /*
1943 + * Adopt the current setting of the module parameter, and check if
1944 + * device tree requests it.
1945 + */
1946 + unicam->mc_api = media_controller;
1947 + if (of_property_read_bool(pdev->dev.of_node, "brcm,media-controller"))
1948 + unicam->mc_api = true;
1949 +
1950 unicam->base = devm_platform_ioremap_resource(pdev, 0);
1951 if (IS_ERR(unicam->base)) {
1952 unicam_err(unicam, "Failed to get main io block\n");