]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - drivers/staging/media/atomisp/pci/atomisp2/css2400/runtime/pipeline/src/pipeline.c
Merge branch 'for-4.15-fixes' of git://git.kernel.org/pub/scm/linux/kernel/git/tj/wq
[mirror_ubuntu-bionic-kernel.git] / drivers / staging / media / atomisp / pci / atomisp2 / css2400 / runtime / pipeline / src / pipeline.c
1 #ifndef ISP2401
2 /*
3 * Support for Intel Camera Imaging ISP subsystem.
4 * Copyright (c) 2015, Intel Corporation.
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms and conditions of the GNU General Public License,
8 * version 2, as published by the Free Software Foundation.
9 *
10 * This program is distributed in the hope it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 */
15 #else
16 /*
17 Support for Intel Camera Imaging ISP subsystem.
18 Copyright (c) 2010 - 2015, Intel Corporation.
19
20 This program is free software; you can redistribute it and/or modify it
21 under the terms and conditions of the GNU General Public License,
22 version 2, as published by the Free Software Foundation.
23
24 This program is distributed in the hope it will be useful, but WITHOUT
25 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
26 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
27 more details.
28 */
29 #endif
30
31 #include "ia_css_debug.h"
32 #include "sw_event_global.h" /* encode_sw_event */
33 #include "sp.h" /* cnd_sp_irq_enable() */
34 #include "assert_support.h"
35 #include "memory_access.h"
36 #include "sh_css_sp.h"
37 #include "ia_css_pipeline.h"
38 #include "ia_css_isp_param.h"
39 #include "ia_css_bufq.h"
40
41 #define PIPELINE_NUM_UNMAPPED (~0U)
42 #define PIPELINE_SP_THREAD_EMPTY_TOKEN (0x0)
43 #define PIPELINE_SP_THREAD_RESERVED_TOKEN (0x1)
44
45
46 /*******************************************************
47 *** Static variables
48 ********************************************************/
49 static unsigned int pipeline_num_to_sp_thread_map[IA_CSS_PIPELINE_NUM_MAX];
50 static unsigned int pipeline_sp_thread_list[SH_CSS_MAX_SP_THREADS];
51
52 /*******************************************************
53 *** Static functions
54 ********************************************************/
55 static void pipeline_init_sp_thread_map(void);
56 static void pipeline_map_num_to_sp_thread(unsigned int pipe_num);
57 static void pipeline_unmap_num_to_sp_thread(unsigned int pipe_num);
58 static void pipeline_init_defaults(
59 struct ia_css_pipeline *pipeline,
60 enum ia_css_pipe_id pipe_id,
61 unsigned int pipe_num,
62 unsigned int dvs_frame_delay);
63
64 static void pipeline_stage_destroy(struct ia_css_pipeline_stage *stage);
65 static enum ia_css_err pipeline_stage_create(
66 struct ia_css_pipeline_stage_desc *stage_desc,
67 struct ia_css_pipeline_stage **new_stage);
68 static void ia_css_pipeline_set_zoom_stage(struct ia_css_pipeline *pipeline);
69 static void ia_css_pipeline_configure_inout_port(struct ia_css_pipeline *me,
70 bool continuous);
71
72 /*******************************************************
73 *** Public functions
74 ********************************************************/
75 void ia_css_pipeline_init(void)
76 {
77 pipeline_init_sp_thread_map();
78 }
79
80 enum ia_css_err ia_css_pipeline_create(
81 struct ia_css_pipeline *pipeline,
82 enum ia_css_pipe_id pipe_id,
83 unsigned int pipe_num,
84 unsigned int dvs_frame_delay)
85 {
86 assert(pipeline != NULL);
87 IA_CSS_ENTER_PRIVATE("pipeline = %p, pipe_id = %d, pipe_num = %d, dvs_frame_delay = %d",
88 pipeline, pipe_id, pipe_num, dvs_frame_delay);
89 if (pipeline == NULL) {
90 IA_CSS_LEAVE_ERR_PRIVATE(IA_CSS_ERR_INVALID_ARGUMENTS);
91 return IA_CSS_ERR_INVALID_ARGUMENTS;
92 }
93
94 pipeline_init_defaults(pipeline, pipe_id, pipe_num, dvs_frame_delay);
95
96 IA_CSS_LEAVE_ERR_PRIVATE(IA_CSS_SUCCESS);
97 return IA_CSS_SUCCESS;
98 }
99
100 void ia_css_pipeline_map(unsigned int pipe_num, bool map)
101 {
102 assert(pipe_num < IA_CSS_PIPELINE_NUM_MAX);
103 IA_CSS_ENTER_PRIVATE("pipe_num = %d, map = %d", pipe_num, map);
104
105 if (pipe_num >= IA_CSS_PIPELINE_NUM_MAX) {
106 IA_CSS_ERROR("Invalid pipe number");
107 IA_CSS_LEAVE_PRIVATE("void");
108 return;
109 }
110 if (map)
111 pipeline_map_num_to_sp_thread(pipe_num);
112 else
113 pipeline_unmap_num_to_sp_thread(pipe_num);
114 IA_CSS_LEAVE_PRIVATE("void");
115 }
116
117 /* @brief destroy a pipeline
118 *
119 * @param[in] pipeline
120 * @return None
121 *
122 */
123 void ia_css_pipeline_destroy(struct ia_css_pipeline *pipeline)
124 {
125 assert(pipeline != NULL);
126 IA_CSS_ENTER_PRIVATE("pipeline = %p", pipeline);
127
128 if (pipeline == NULL) {
129 IA_CSS_ERROR("NULL input parameter");
130 IA_CSS_LEAVE_PRIVATE("void");
131 return;
132 }
133
134 IA_CSS_LOG("pipe_num = %d", pipeline->pipe_num);
135
136 /* Free the pipeline number */
137 ia_css_pipeline_clean(pipeline);
138
139 IA_CSS_LEAVE_PRIVATE("void");
140 }
141
142 /* Run a pipeline and wait till it completes. */
143 void ia_css_pipeline_start(enum ia_css_pipe_id pipe_id,
144 struct ia_css_pipeline *pipeline)
145 {
146 uint8_t pipe_num = 0;
147 unsigned int thread_id;
148
149 assert(pipeline != NULL);
150 ia_css_debug_dtrace(IA_CSS_DEBUG_TRACE,
151 "ia_css_pipeline_start() enter: pipe_id=%d, pipeline=%p\n",
152 pipe_id, pipeline);
153 pipeline->pipe_id = pipe_id;
154 sh_css_sp_init_pipeline(pipeline, pipe_id, pipe_num,
155 false, false, false, true, SH_CSS_BDS_FACTOR_1_00,
156 SH_CSS_PIPE_CONFIG_OVRD_NO_OVRD,
157 #ifndef ISP2401
158 IA_CSS_INPUT_MODE_MEMORY, NULL, NULL
159 #else
160 IA_CSS_INPUT_MODE_MEMORY, NULL, NULL,
161 #endif
162 #if !defined(HAS_NO_INPUT_SYSTEM)
163 #ifndef ISP2401
164 , (mipi_port_ID_t) 0
165 #else
166 (mipi_port_ID_t) 0,
167 #endif
168 #endif
169 #ifndef ISP2401
170 );
171 #else
172 NULL, NULL);
173 #endif
174 ia_css_pipeline_get_sp_thread_id(pipe_num, &thread_id);
175 if (!sh_css_sp_is_running()) {
176 ia_css_debug_dtrace(IA_CSS_DEBUG_TRACE,
177 "ia_css_pipeline_start() error,leaving\n");
178 /* queues are invalid*/
179 return;
180 }
181 ia_css_bufq_enqueue_psys_event(IA_CSS_PSYS_SW_EVENT_START_STREAM,
182 (uint8_t)thread_id,
183 0,
184 0);
185
186 ia_css_debug_dtrace(IA_CSS_DEBUG_TRACE,
187 "ia_css_pipeline_start() leave: return_void\n");
188 }
189
190 /*
191 * @brief Query the SP thread ID.
192 * Refer to "sh_css_internal.h" for details.
193 */
194 bool ia_css_pipeline_get_sp_thread_id(unsigned int key, unsigned int *val)
195 {
196
197 IA_CSS_ENTER("key=%d, val=%p", key, val);
198
199 if ((val == NULL) || (key >= IA_CSS_PIPELINE_NUM_MAX) || (key >= IA_CSS_PIPE_ID_NUM)) {
200 IA_CSS_LEAVE("return value = false");
201 return false;
202 }
203
204 *val = pipeline_num_to_sp_thread_map[key];
205
206 if (*val == (unsigned)PIPELINE_NUM_UNMAPPED) {
207 IA_CSS_LOG("unmapped pipeline number");
208 IA_CSS_LEAVE("return value = false");
209 return false;
210 }
211 IA_CSS_LEAVE("return value = true");
212 return true;
213 }
214
215 void ia_css_pipeline_dump_thread_map_info(void)
216 {
217 unsigned int i;
218 ia_css_debug_dtrace(IA_CSS_DEBUG_TRACE,
219 "pipeline_num_to_sp_thread_map:\n");
220 for (i = 0; i < IA_CSS_PIPELINE_NUM_MAX; i++) {
221 ia_css_debug_dtrace(IA_CSS_DEBUG_TRACE,
222 "pipe_num: %u, tid: 0x%x\n", i, pipeline_num_to_sp_thread_map[i]);
223 }
224 }
225
226 enum ia_css_err ia_css_pipeline_request_stop(struct ia_css_pipeline *pipeline)
227 {
228 enum ia_css_err err = IA_CSS_SUCCESS;
229 unsigned int thread_id;
230
231 assert(pipeline != NULL);
232
233 if (pipeline == NULL)
234 return IA_CSS_ERR_INVALID_ARGUMENTS;
235
236 ia_css_debug_dtrace(IA_CSS_DEBUG_TRACE,
237 "ia_css_pipeline_request_stop() enter: pipeline=%p\n",
238 pipeline);
239 pipeline->stop_requested = true;
240
241 /* Send stop event to the sp*/
242 /* This needs improvement, stop on all the pipes available
243 * in the stream*/
244 ia_css_pipeline_get_sp_thread_id(pipeline->pipe_num, &thread_id);
245 if (!sh_css_sp_is_running())
246 {
247 ia_css_debug_dtrace(IA_CSS_DEBUG_TRACE,
248 "ia_css_pipeline_request_stop() leaving\n");
249 /* queues are invalid */
250 return IA_CSS_ERR_RESOURCE_NOT_AVAILABLE;
251 }
252 ia_css_bufq_enqueue_psys_event(IA_CSS_PSYS_SW_EVENT_STOP_STREAM,
253 (uint8_t)thread_id,
254 0,
255 0);
256 sh_css_sp_uninit_pipeline(pipeline->pipe_num);
257
258 ia_css_debug_dtrace(IA_CSS_DEBUG_TRACE,
259 "ia_css_pipeline_request_stop() leave: return_err=%d\n",
260 err);
261 return err;
262 }
263
264 void ia_css_pipeline_clean(struct ia_css_pipeline *pipeline)
265 {
266 struct ia_css_pipeline_stage *s;
267
268 assert(pipeline != NULL);
269 IA_CSS_ENTER_PRIVATE("pipeline = %p", pipeline);
270
271 if (pipeline == NULL) {
272 IA_CSS_ERROR("NULL input parameter");
273 IA_CSS_LEAVE_PRIVATE("void");
274 return;
275 }
276 s = pipeline->stages;
277
278 while (s) {
279 struct ia_css_pipeline_stage *next = s->next;
280 pipeline_stage_destroy(s);
281 s = next;
282 }
283 pipeline_init_defaults(pipeline, pipeline->pipe_id, pipeline->pipe_num, pipeline->dvs_frame_delay);
284
285 IA_CSS_LEAVE_PRIVATE("void");
286 }
287
288 /* @brief Add a stage to pipeline.
289 *
290 * @param pipeline Pointer to the pipeline to be added to.
291 * @param[in] stage_desc The description of the stage
292 * @param[out] stage The successor of the stage.
293 * @return IA_CSS_SUCCESS or error code upon error.
294 *
295 * Add a new stage to a non-NULL pipeline.
296 * The stage consists of an ISP binary or firmware and input and
297 * output arguments.
298 */
299 enum ia_css_err ia_css_pipeline_create_and_add_stage(
300 struct ia_css_pipeline *pipeline,
301 struct ia_css_pipeline_stage_desc *stage_desc,
302 struct ia_css_pipeline_stage **stage)
303 {
304 struct ia_css_pipeline_stage *last, *new_stage = NULL;
305 enum ia_css_err err;
306
307 /* other arguments can be NULL */
308 assert(pipeline != NULL);
309 assert(stage_desc != NULL);
310 last = pipeline->stages;
311
312 ia_css_debug_dtrace(IA_CSS_DEBUG_TRACE,
313 "ia_css_pipeline_create_and_add_stage() enter:\n");
314 if (!stage_desc->binary && !stage_desc->firmware
315 && (stage_desc->sp_func == IA_CSS_PIPELINE_NO_FUNC)) {
316 ia_css_debug_dtrace(IA_CSS_DEBUG_TRACE,
317 "ia_css_pipeline_create_and_add_stage() done:"
318 " Invalid args\n");
319
320 return IA_CSS_ERR_INTERNAL_ERROR;
321 }
322
323 /* Find the last stage */
324 while (last && last->next)
325 last = last->next;
326
327 /* if in_frame is not set, we use the out_frame from the previous
328 * stage, if no previous stage, it's an error.
329 */
330 if ((stage_desc->sp_func == IA_CSS_PIPELINE_NO_FUNC)
331 && (!stage_desc->in_frame)
332 && (!stage_desc->firmware)
333 && (!stage_desc->binary->online)) {
334
335 /* Do this only for ISP stages*/
336 if (last && last->args.out_frame[0])
337 stage_desc->in_frame = last->args.out_frame[0];
338
339 if (!stage_desc->in_frame)
340 return IA_CSS_ERR_INTERNAL_ERROR;
341 }
342
343 /* Create the new stage */
344 err = pipeline_stage_create(stage_desc, &new_stage);
345 if (err != IA_CSS_SUCCESS) {
346 ia_css_debug_dtrace(IA_CSS_DEBUG_TRACE,
347 "ia_css_pipeline_create_and_add_stage() done:"
348 " stage_create_failed\n");
349 return err;
350 }
351
352 if (last)
353 last->next = new_stage;
354 else
355 pipeline->stages = new_stage;
356
357 /* Output the new stage */
358 if (stage)
359 *stage = new_stage;
360
361 ia_css_debug_dtrace(IA_CSS_DEBUG_TRACE,
362 "ia_css_pipeline_create_and_add_stage() done:\n");
363 return IA_CSS_SUCCESS;
364 }
365
366 void ia_css_pipeline_finalize_stages(struct ia_css_pipeline *pipeline,
367 bool continuous)
368 {
369 unsigned i = 0;
370 struct ia_css_pipeline_stage *stage;
371
372 assert(pipeline != NULL);
373 for (stage = pipeline->stages; stage; stage = stage->next) {
374 stage->stage_num = i;
375 i++;
376 }
377 pipeline->num_stages = i;
378
379 ia_css_pipeline_set_zoom_stage(pipeline);
380 ia_css_pipeline_configure_inout_port(pipeline, continuous);
381 }
382
383 enum ia_css_err ia_css_pipeline_get_stage(struct ia_css_pipeline *pipeline,
384 int mode,
385 struct ia_css_pipeline_stage **stage)
386 {
387 struct ia_css_pipeline_stage *s;
388 assert(pipeline != NULL);
389 assert(stage != NULL);
390 ia_css_debug_dtrace(IA_CSS_DEBUG_TRACE,
391 "ia_css_pipeline_get_stage() enter:\n");
392 for (s = pipeline->stages; s; s = s->next) {
393 if (s->mode == mode) {
394 *stage = s;
395 return IA_CSS_SUCCESS;
396 }
397 }
398 return IA_CSS_ERR_INTERNAL_ERROR;
399 }
400
401 enum ia_css_err ia_css_pipeline_get_stage_from_fw(struct ia_css_pipeline *pipeline,
402 uint32_t fw_handle,
403 struct ia_css_pipeline_stage **stage)
404 {
405 struct ia_css_pipeline_stage *s;
406 assert(pipeline != NULL);
407 assert(stage != NULL);
408 ia_css_debug_dtrace(IA_CSS_DEBUG_TRACE,"%s() \n",__func__);
409 for (s = pipeline->stages; s; s = s->next) {
410 if ((s->firmware) && (s->firmware->handle == fw_handle)) {
411 *stage = s;
412 return IA_CSS_SUCCESS;
413 }
414 }
415 return IA_CSS_ERR_INTERNAL_ERROR;
416 }
417
418 enum ia_css_err ia_css_pipeline_get_fw_from_stage(struct ia_css_pipeline *pipeline,
419 uint32_t stage_num,
420 uint32_t *fw_handle)
421 {
422 struct ia_css_pipeline_stage *s;
423
424 ia_css_debug_dtrace(IA_CSS_DEBUG_TRACE,"%s() \n",__func__);
425 if ((pipeline == NULL) || (fw_handle == NULL))
426 return IA_CSS_ERR_INVALID_ARGUMENTS;
427
428 for (s = pipeline->stages; s; s = s->next) {
429 if((s->stage_num == stage_num) && (s->firmware)) {
430 *fw_handle = s->firmware->handle;
431 return IA_CSS_SUCCESS;
432 }
433 }
434 return IA_CSS_ERR_INTERNAL_ERROR;
435 }
436
437 enum ia_css_err ia_css_pipeline_get_output_stage(
438 struct ia_css_pipeline *pipeline,
439 int mode,
440 struct ia_css_pipeline_stage **stage)
441 {
442 struct ia_css_pipeline_stage *s;
443 assert(pipeline != NULL);
444 assert(stage != NULL);
445 ia_css_debug_dtrace(IA_CSS_DEBUG_TRACE,
446 "ia_css_pipeline_get_output_stage() enter:\n");
447
448 *stage = NULL;
449 /* First find acceleration firmware at end of pipe */
450 for (s = pipeline->stages; s; s = s->next) {
451 if (s->firmware && s->mode == mode &&
452 s->firmware->info.isp.sp.enable.output)
453 *stage = s;
454 }
455 if (*stage)
456 return IA_CSS_SUCCESS;
457 /* If no firmware, find binary in pipe */
458 return ia_css_pipeline_get_stage(pipeline, mode, stage);
459 }
460
461 bool ia_css_pipeline_has_stopped(struct ia_css_pipeline *pipeline)
462 {
463 /* Android compilation files if made an local variable
464 stack size on android is limited to 2k and this structure
465 is around 2.5K, in place of static malloc can be done but
466 if this call is made too often it will lead to fragment memory
467 versus a fixed allocation */
468 static struct sh_css_sp_group sp_group;
469 unsigned int thread_id;
470 const struct ia_css_fw_info *fw;
471 unsigned int HIVE_ADDR_sp_group;
472
473 fw = &sh_css_sp_fw;
474 HIVE_ADDR_sp_group = fw->info.sp.group;
475
476 ia_css_pipeline_get_sp_thread_id(pipeline->pipe_num, &thread_id);
477 sp_dmem_load(SP0_ID,
478 (unsigned int)sp_address_of(sp_group),
479 &sp_group, sizeof(struct sh_css_sp_group));
480 return sp_group.pipe[thread_id].num_stages == 0;
481 }
482
483 #if defined(USE_INPUT_SYSTEM_VERSION_2401)
484 struct sh_css_sp_pipeline_io_status *ia_css_pipeline_get_pipe_io_status(void)
485 {
486 return(&sh_css_sp_group.pipe_io_status);
487 }
488 #endif
489
490 bool ia_css_pipeline_is_mapped(unsigned int key)
491 {
492 bool ret = false;
493
494 IA_CSS_ENTER_PRIVATE("key = %d", key);
495
496 if ((key >= IA_CSS_PIPELINE_NUM_MAX) || (key >= IA_CSS_PIPE_ID_NUM)) {
497 IA_CSS_ERROR("Invalid key!!");
498 IA_CSS_LEAVE_PRIVATE("return = %d", false);
499 return false;
500 }
501
502 ret = (bool)(pipeline_num_to_sp_thread_map[key] != (unsigned)PIPELINE_NUM_UNMAPPED);
503
504 IA_CSS_LEAVE_PRIVATE("return = %d", ret);
505 return ret;
506 }
507
508 /*******************************************************
509 *** Static functions
510 ********************************************************/
511
512 /* Pipeline:
513 * To organize the several different binaries for each type of mode,
514 * we use a pipeline. A pipeline contains a number of stages, each with
515 * their own binary and frame pointers.
516 * When stages are added to a pipeline, output frames that are not passed
517 * from outside are automatically allocated.
518 * When input frames are not passed from outside, each stage will use the
519 * output frame of the previous stage as input (the full resolution output,
520 * not the viewfinder output).
521 * Pipelines must be cleaned and re-created when settings of the binaries
522 * change.
523 */
524 static void pipeline_stage_destroy(struct ia_css_pipeline_stage *stage)
525 {
526 unsigned int i;
527 for (i = 0; i < IA_CSS_BINARY_MAX_OUTPUT_PORTS; i++) {
528 if (stage->out_frame_allocated[i]) {
529 ia_css_frame_free(stage->args.out_frame[i]);
530 stage->args.out_frame[i] = NULL;
531 }
532 }
533 if (stage->vf_frame_allocated) {
534 ia_css_frame_free(stage->args.out_vf_frame);
535 stage->args.out_vf_frame = NULL;
536 }
537 sh_css_free(stage);
538 }
539
540 static void pipeline_init_sp_thread_map(void)
541 {
542 unsigned int i;
543
544 for (i = 1; i < SH_CSS_MAX_SP_THREADS; i++)
545 pipeline_sp_thread_list[i] = PIPELINE_SP_THREAD_EMPTY_TOKEN;
546
547 for (i = 0; i < IA_CSS_PIPELINE_NUM_MAX; i++)
548 pipeline_num_to_sp_thread_map[i] = PIPELINE_NUM_UNMAPPED;
549 }
550
551 static void pipeline_map_num_to_sp_thread(unsigned int pipe_num)
552 {
553 unsigned int i;
554 bool found_sp_thread = false;
555
556 /* pipe is not mapped to any thread */
557 assert(pipeline_num_to_sp_thread_map[pipe_num]
558 == (unsigned)PIPELINE_NUM_UNMAPPED);
559
560 for (i = 0; i < SH_CSS_MAX_SP_THREADS; i++) {
561 if (pipeline_sp_thread_list[i] ==
562 PIPELINE_SP_THREAD_EMPTY_TOKEN) {
563 pipeline_sp_thread_list[i] =
564 PIPELINE_SP_THREAD_RESERVED_TOKEN;
565 pipeline_num_to_sp_thread_map[pipe_num] = i;
566 found_sp_thread = true;
567 break;
568 }
569 }
570
571 /* Make sure a mapping is found */
572 /* I could do:
573 assert(i < SH_CSS_MAX_SP_THREADS);
574
575 But the below is more descriptive.
576 */
577 assert(found_sp_thread != false);
578 }
579
580 static void pipeline_unmap_num_to_sp_thread(unsigned int pipe_num)
581 {
582 unsigned int thread_id;
583 assert(pipeline_num_to_sp_thread_map[pipe_num]
584 != (unsigned)PIPELINE_NUM_UNMAPPED);
585
586 thread_id = pipeline_num_to_sp_thread_map[pipe_num];
587 pipeline_num_to_sp_thread_map[pipe_num] = PIPELINE_NUM_UNMAPPED;
588 pipeline_sp_thread_list[thread_id] = PIPELINE_SP_THREAD_EMPTY_TOKEN;
589 }
590
591 static enum ia_css_err pipeline_stage_create(
592 struct ia_css_pipeline_stage_desc *stage_desc,
593 struct ia_css_pipeline_stage **new_stage)
594 {
595 enum ia_css_err err = IA_CSS_SUCCESS;
596 struct ia_css_pipeline_stage *stage = NULL;
597 struct ia_css_binary *binary;
598 struct ia_css_frame *vf_frame;
599 struct ia_css_frame *out_frame[IA_CSS_BINARY_MAX_OUTPUT_PORTS];
600 const struct ia_css_fw_info *firmware;
601 unsigned int i;
602
603 /* Verify input parameters*/
604 if (!(stage_desc->in_frame) && !(stage_desc->firmware)
605 && (stage_desc->binary) && !(stage_desc->binary->online)) {
606 err = IA_CSS_ERR_INTERNAL_ERROR;
607 goto ERR;
608 }
609
610 binary = stage_desc->binary;
611 firmware = stage_desc->firmware;
612 vf_frame = stage_desc->vf_frame;
613 for (i = 0; i < IA_CSS_BINARY_MAX_OUTPUT_PORTS; i++) {
614 out_frame[i] = stage_desc->out_frame[i];
615 }
616
617 stage = sh_css_malloc(sizeof(*stage));
618 if (stage == NULL) {
619 err = IA_CSS_ERR_CANNOT_ALLOCATE_MEMORY;
620 goto ERR;
621 }
622 memset(stage, 0, sizeof(*stage));
623
624 if (firmware) {
625 stage->binary = NULL;
626 stage->binary_info =
627 (struct ia_css_binary_info *)&firmware->info.isp;
628 } else {
629 stage->binary = binary;
630 if (binary)
631 stage->binary_info =
632 (struct ia_css_binary_info *)binary->info;
633 else
634 stage->binary_info = NULL;
635 }
636
637 stage->firmware = firmware;
638 stage->sp_func = stage_desc->sp_func;
639 stage->max_input_width = stage_desc->max_input_width;
640 stage->mode = stage_desc->mode;
641 for (i = 0; i < IA_CSS_BINARY_MAX_OUTPUT_PORTS; i++)
642 stage->out_frame_allocated[i] = false;
643 stage->vf_frame_allocated = false;
644 stage->next = NULL;
645 sh_css_binary_args_reset(&stage->args);
646
647 for (i = 0; i < IA_CSS_BINARY_MAX_OUTPUT_PORTS; i++) {
648 if (!(out_frame[i]) && (binary)
649 && (binary->out_frame_info[i].res.width)) {
650 err = ia_css_frame_allocate_from_info(&out_frame[i],
651 &binary->out_frame_info[i]);
652 if (err != IA_CSS_SUCCESS)
653 goto ERR;
654 stage->out_frame_allocated[i] = true;
655 }
656 }
657 /* VF frame is not needed in case of need_pp
658 However, the capture binary needs a vf frame to write to.
659 */
660 if (!vf_frame) {
661 if ((binary && binary->vf_frame_info.res.width) ||
662 (firmware && firmware->info.isp.sp.enable.vf_veceven)
663 ) {
664 err = ia_css_frame_allocate_from_info(&vf_frame,
665 &binary->vf_frame_info);
666 if (err != IA_CSS_SUCCESS)
667 goto ERR;
668 stage->vf_frame_allocated = true;
669 }
670 } else if (vf_frame && binary && binary->vf_frame_info.res.width
671 && !firmware) {
672 /* only mark as allocated if buffer pointer available */
673 if (vf_frame->data != mmgr_NULL)
674 stage->vf_frame_allocated = true;
675 }
676
677 stage->args.in_frame = stage_desc->in_frame;
678 for (i = 0; i < IA_CSS_BINARY_MAX_OUTPUT_PORTS; i++)
679 stage->args.out_frame[i] = out_frame[i];
680 stage->args.out_vf_frame = vf_frame;
681 *new_stage = stage;
682 return err;
683 ERR:
684 if (stage != NULL)
685 pipeline_stage_destroy(stage);
686 return err;
687 }
688
689 static void pipeline_init_defaults(
690 struct ia_css_pipeline *pipeline,
691 enum ia_css_pipe_id pipe_id,
692 unsigned int pipe_num,
693 unsigned int dvs_frame_delay)
694 {
695 struct ia_css_frame init_frame = DEFAULT_FRAME;
696 unsigned int i;
697
698 pipeline->pipe_id = pipe_id;
699 pipeline->stages = NULL;
700 pipeline->stop_requested = false;
701 pipeline->current_stage = NULL;
702 pipeline->in_frame = init_frame;
703 for (i = 0; i < IA_CSS_PIPE_MAX_OUTPUT_STAGE; i++) {
704 pipeline->out_frame[i] = init_frame;
705 pipeline->vf_frame[i] = init_frame;
706 }
707 pipeline->num_execs = -1;
708 pipeline->acquire_isp_each_stage = true;
709 pipeline->pipe_num = (uint8_t)pipe_num;
710 pipeline->dvs_frame_delay = dvs_frame_delay;
711 }
712
713 static void ia_css_pipeline_set_zoom_stage(struct ia_css_pipeline *pipeline)
714 {
715 struct ia_css_pipeline_stage *stage = NULL;
716 enum ia_css_err err = IA_CSS_SUCCESS;
717
718 assert(pipeline != NULL);
719 if (pipeline->pipe_id == IA_CSS_PIPE_ID_PREVIEW) {
720 /* in preview pipeline, vf_pp stage should do zoom */
721 err = ia_css_pipeline_get_stage(pipeline, IA_CSS_BINARY_MODE_VF_PP, &stage);
722 if (err == IA_CSS_SUCCESS)
723 stage->enable_zoom = true;
724 } else if (pipeline->pipe_id == IA_CSS_PIPE_ID_CAPTURE) {
725 /* in capture pipeline, capture_pp stage should do zoom */
726 err = ia_css_pipeline_get_stage(pipeline, IA_CSS_BINARY_MODE_CAPTURE_PP, &stage);
727 if (err == IA_CSS_SUCCESS)
728 stage->enable_zoom = true;
729 } else if (pipeline->pipe_id == IA_CSS_PIPE_ID_VIDEO) {
730 /* in video pipeline, video stage should do zoom */
731 err = ia_css_pipeline_get_stage(pipeline, IA_CSS_BINARY_MODE_VIDEO, &stage);
732 if (err == IA_CSS_SUCCESS)
733 stage->enable_zoom = true;
734 } else if (pipeline->pipe_id == IA_CSS_PIPE_ID_YUVPP) {
735 /* in yuvpp pipeline, first yuv_scaler stage should do zoom */
736 err = ia_css_pipeline_get_stage(pipeline, IA_CSS_BINARY_MODE_CAPTURE_PP, &stage);
737 if (err == IA_CSS_SUCCESS)
738 stage->enable_zoom = true;
739 }
740 }
741
742 static void
743 ia_css_pipeline_configure_inout_port(struct ia_css_pipeline *me, bool continuous)
744 {
745 ia_css_debug_dtrace(IA_CSS_DEBUG_TRACE_PRIVATE,
746 "ia_css_pipeline_configure_inout_port() enter: pipe_id(%d) continuous(%d)\n",
747 me->pipe_id, continuous);
748 switch (me->pipe_id) {
749 case IA_CSS_PIPE_ID_PREVIEW:
750 case IA_CSS_PIPE_ID_VIDEO:
751 SH_CSS_PIPE_PORT_CONFIG_SET(me->inout_port_config,
752 (uint8_t)SH_CSS_PORT_INPUT,
753 (uint8_t)(continuous ? SH_CSS_COPYSINK_TYPE : SH_CSS_HOST_TYPE), 1);
754 SH_CSS_PIPE_PORT_CONFIG_SET(me->inout_port_config,
755 (uint8_t)SH_CSS_PORT_OUTPUT,
756 (uint8_t)SH_CSS_HOST_TYPE, 1);
757 break;
758 case IA_CSS_PIPE_ID_COPY: /*Copy pipe ports configured to "offline" mode*/
759 SH_CSS_PIPE_PORT_CONFIG_SET(me->inout_port_config,
760 (uint8_t)SH_CSS_PORT_INPUT,
761 (uint8_t)SH_CSS_HOST_TYPE, 1);
762 if (continuous) {
763 SH_CSS_PIPE_PORT_CONFIG_SET(me->inout_port_config,
764 (uint8_t)SH_CSS_PORT_OUTPUT,
765 (uint8_t)SH_CSS_COPYSINK_TYPE, 1);
766 SH_CSS_PIPE_PORT_CONFIG_SET(me->inout_port_config,
767 (uint8_t)SH_CSS_PORT_OUTPUT,
768 (uint8_t)SH_CSS_TAGGERSINK_TYPE, 1);
769 } else {
770 SH_CSS_PIPE_PORT_CONFIG_SET(me->inout_port_config,
771 (uint8_t)SH_CSS_PORT_OUTPUT,
772 (uint8_t)SH_CSS_HOST_TYPE, 1);
773 }
774 break;
775 case IA_CSS_PIPE_ID_CAPTURE:
776 SH_CSS_PIPE_PORT_CONFIG_SET(me->inout_port_config,
777 (uint8_t)SH_CSS_PORT_INPUT,
778 (uint8_t)(continuous ? SH_CSS_TAGGERSINK_TYPE : SH_CSS_HOST_TYPE),
779 1);
780 SH_CSS_PIPE_PORT_CONFIG_SET(me->inout_port_config,
781 (uint8_t)SH_CSS_PORT_OUTPUT,
782 (uint8_t)SH_CSS_HOST_TYPE, 1);
783 break;
784 case IA_CSS_PIPE_ID_YUVPP:
785 SH_CSS_PIPE_PORT_CONFIG_SET(me->inout_port_config,
786 (uint8_t)SH_CSS_PORT_INPUT,
787 (uint8_t)(SH_CSS_HOST_TYPE), 1);
788 SH_CSS_PIPE_PORT_CONFIG_SET(me->inout_port_config,
789 (uint8_t)SH_CSS_PORT_OUTPUT,
790 (uint8_t)SH_CSS_HOST_TYPE, 1);
791 break;
792 case IA_CSS_PIPE_ID_ACC:
793 SH_CSS_PIPE_PORT_CONFIG_SET(me->inout_port_config,
794 (uint8_t)SH_CSS_PORT_INPUT,
795 (uint8_t)SH_CSS_HOST_TYPE, 1);
796 SH_CSS_PIPE_PORT_CONFIG_SET(me->inout_port_config,
797 (uint8_t)SH_CSS_PORT_OUTPUT,
798 (uint8_t)SH_CSS_HOST_TYPE, 1);
799 break;
800 default:
801 break;
802 }
803 ia_css_debug_dtrace(IA_CSS_DEBUG_TRACE_PRIVATE,
804 "ia_css_pipeline_configure_inout_port() leave: inout_port_config(%x)\n",
805 me->inout_port_config);
806 }