/* Copyright 2020 - Stettbacher Signal Processing AG Author: Jonas Schmid Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include #include #include #include #include #include #include #include #include #include #include #include #include #include "helpers.h" /* On Debian Linux and its derivatives (such as ubuntu), Python libraries * installed through the package manager are kept in a non-standard directory * 'dist-packages' instead of 'site-packages'. Why? Who knows. * * https://wiki.debian.org/Python#Deviations_from_upstream */ #ifdef DEBIAN_LINUX #include #else #include #endif #define DEFAULT_FRAME_RATE 10 ///< default frame rate #define MAX_FRAME_WIDTH 1280 ///< O-3000 image width in pixels #define MAX_FRAME_HEIGHT 960 ///< O-3000 image height in pixels /** * default video cache size in bytes * use multiple of maximum image size for better performance */ #define DEFAULT_VIDEO_CACHE_SIZE (MAX_FRAME_WIDTH*MAX_FRAME_HEIGHT*5) #define MAX_REC_THREAD 8 ///< maximum possible threads /** * thread data definition structure */ struct recpthread_data_t { int is_running; ///< flag indicating whether thread is running or not int id; ///< thread data ID unsigned long long frame_cnt; ///< frame counter of the processed image pthread_cond_t cond; ///< thread condition variable used for synchronization pthread_mutex_t mutex; ///< mutex used by pthread_cond_wait() struct color_pipe_t *color_pipe; ///< allocated color pipeline for this thread unsigned char *img_raw; ///< pointer to raw input image (from sensor) struct img_header_t img_header; ///< image header corresponding to raw input image char filename[256]; ///< image filename without extensions }; static struct recpthread_data_t recthread_data[MAX_REC_THREAD]; ///< recording thread array static struct recpthread_data_t *recthread_data_recent; static pthread_t pthread_video; ///< thread where o3000_connect static int cam_session; ///< camera session ID static unsigned long long frame_cnt; ///< image counter used to generate file number if is enabled static int is_color; ///< not 0 if color camera is connected static int log_level = O3000_LOG_ERROR; static int video_cache_size = DEFAULT_VIDEO_CACHE_SIZE; static int num_recthread = MAX_REC_THREAD-1; static int prio_rec = PRIO_TIME_SLICED; static int prio_video = PRIO_TIME_SLICED; static float fps = DEFAULT_FRAME_RATE; static int awb_enable = 0; static int ccm_enable = 0; static int lense_enable = 1; static int lense_nr = 2; // C-Mount 6mm 1/2.7" IR MP static int sharp_enable = 0; static int gamma_enable = 0; static float gamma_fact = 1.0; static int img_width = MAX_FRAME_WIDTH; static int img_height = MAX_FRAME_HEIGHT; /** * O-3000 log handler * * @param id session ID * @param msg logging message from O-3000 driver */ static void log_handling(int id, char* msg) { printf("%s: %s", __func__, msg); } /** * XML handler * This function is called of the host receives XML messages from the camera. * * @param id session ID * @param msg message string * @param len string length */ static void xml_handling(int id, char* msg, int len) { printf("%s: %s", __func__, msg); } /** * Thread used for image processing * * @param ptr Pointer to thread ID */ static void *rec_thread(void *ptr) { int id = *((int*)ptr); struct recpthread_data_t *data = &recthread_data[id]; while(1) { data->is_running = 0; if(pthread_cond_wait(&(data->cond), &(data->mutex))) { printf("pthread_cond_wait: %s\n", strerror(errno)); // TODO do something continue; } recthread_data_recent = data; // TODO TODO replace by looking for the oldest frame // TODO copy data to cache color_pipe_process(data->color_pipe, data->img_raw, &(data->img_header)); is_color = data->color_pipe->is_color; } pthread_exit(NULL); } /** * This callback is called from the underlying O-3000 driver after receiving a complete image frame. * This function should finish quickly before the next image is received. Therfore, CPU intensive work * should be done at another thread. * * @param id session ID * @param buf image frame data * @param img_header image header */ static void video_handling(int id, unsigned char *buf, struct img_header_t *img_header) { int i, thdrec_index; /* * Find next thread ready for processing image data. */ for(i = 0; i < num_recthread; i++) { if(!recthread_data[i].is_running) { // at least one thread is not running break; } } // TODO TODO look for a recthread with an older frame if(i >= num_recthread) { printf("%s: All image processing threads are busy --> skip image frame.\n", __func__); return; } // save next free thread ID thdrec_index = i; recthread_data[thdrec_index].is_running = 1; recthread_data[thdrec_index].img_raw = buf; memcpy(&recthread_data[thdrec_index].img_header, img_header, sizeof(struct img_header_t)); recthread_data[thdrec_index].frame_cnt = frame_cnt; frame_cnt++; pthread_cond_signal(&recthread_data[thdrec_index].cond); } /** * Video thread handling images received by USB. * * @param ptr not used * @return not used */ static void *video_main(void *ptr) { int ret, num_camera, i, msg_len, num_recthread = 2; // TODO TODO char msg[2048]; pthread_t pthread_imgproc[MAX_REC_THREAD]; // create bunch of image recording thread // printf("%s: create %d threads for processing and recording\n", __func__, num_recthread); for(i = 0; i < num_recthread; i++) { recthread_data[i].id = i; if(pthread_cond_init(&recthread_data[i].cond, NULL)) { printf("pthread_cond_init: %s\n", strerror(errno)); goto _abort_video_main1; } if(pthread_mutex_init(&recthread_data[i].mutex, NULL)) { printf("pthread_mutex_init: %s\n", strerror(errno)); goto _abort_video_main1; } // setup color image processing pipeline with 12 bits per channel if(color_pipe_open(&recthread_data[i].color_pipe, MAX_FRAME_HEIGHT, MAX_FRAME_WIDTH, 12)) { printf("%s: Initializing pipeline for thread %d failed\n", __func__, i); goto _abort_video_main1; } color_pipe_stageconf_awb(recthread_data[i].color_pipe, awb_enable, 0.3, 0.01); color_pipe_stageconf_cam_calib(recthread_data[i].color_pipe, lense_enable, (enum o3000_lenses_t)lense_nr); color_pipe_stageconf_color_calib(recthread_data[i].color_pipe, ccm_enable, CCM_PRESET_O3020); color_pipe_stageconf_sharp(recthread_data[i].color_pipe, sharp_enable, 5, SHARP_ALG_LOCAL, 94.0); color_pipe_stageconf_gamma(recthread_data[i].color_pipe, gamma_enable, gamma_fact); // start image recording thread ret = generic_start_thread(&pthread_imgproc[i], rec_thread, prio_rec, (void*)&recthread_data[i].id); if(ret) { printf("%s: starting recording thread %d failed\n", __func__, i); goto _abort_video_main1; } } /* * Setup camera session */ cam_session = o3000_init(O3000_VID, O3000_PID, video_cache_size, xml_handling, video_handling, log_handling, log_level); if(cam_session < 0) { printf("%s: Error opening new camera session (code %d)\n", __func__, cam_session); goto _abort_video_main2; } printf("%s: new session ID %d\n", __func__, cam_session); num_camera = o3000_device_discovery(cam_session); if(num_camera < 0) { printf("%s: device discovery error (code %d)\n", __func__, num_camera); goto _abort_video_main2; } if(num_camera == 0) { printf("%s: no camera connected to the system\n", __func__); goto _abort_video_main2; } printf("%s: %d cameras connected to the system\n", __func__, num_camera); // prepare configuration message sprintf(msg, "" "" "(0 %d 0 %d)" "" "" "" "" "%f" "" "" "" "" "", img_width-1, img_height-1, fps); msg_len = strlen(msg); // establish connection to first camera which is not in use for(i = 0; i < num_camera; i++) { printf("%s: establish connection to camera %d\n", __func__, i); ret = o3000_connect(cam_session, i, msg, msg_len); if(ret == O3000_ERROR_BUSY) { printf("%s: device %d is already in use\n", __func__, i); } else { printf("%s: connection failure %d\n", __func__, ret); break; } } // cleanup for(i = 0; i < num_recthread; i++) { pthread_cancel(pthread_imgproc[i]); pthread_join(pthread_imgproc[i], NULL); printf("%s: recording thread %d joined\n", __func__, i); } for(i = 0; i < num_recthread; i++) { pthread_cond_destroy(&recthread_data[i].cond); pthread_mutex_destroy(&recthread_data[i].mutex); } _abort_video_main2: o3000_exit(cam_session); _abort_video_main1: for(i = 0; i < num_recthread; i++) { if(recthread_data[i].color_pipe != NULL) { color_pipe_close(recthread_data[i].color_pipe); recthread_data[i].color_pipe = NULL; } } // TODO TODO do_exit = 1; //kill(0, SIGTERM); printf("video thread exits...\n"); pthread_exit(NULL); } static PyObject* video_images_get(PyObject* self, PyObject* args) { npy_intp dims[3]; struct color_pipe_t *cp = recthread_data_recent->color_pipe; if (!is_color) { fprintf(stderr, "%s: not implemented for gray images\n", __func__); return NULL; } dims[0] = cp->height; dims[1] = cp->width; dims[2] = 3; PyObject *raw = PyArray_SimpleNewFromData(2, dims, NPY_UINT8, recthread_data_recent->img_raw); PyObject *rgb = PyArray_SimpleNewFromData(3, dims, NPY_UINT8, cp->debayer_data.img_rgb); PyObject *dist = PyArray_SimpleNewFromData(3, dims, NPY_UINT8, cp->img_out); PyObject *list = PyList_New(3); PyList_SetItem(list, 0, raw); PyList_SetItem(list, 1, rgb); PyList_SetItem(list, 2, dist); return list; //return PyArray_SimpleNewFromData(3, dims, NPY_UINT8, cp->img_out); } /** * Main function * * @param argc number of argument * @param argv argument vector data * @return exit code */ static PyObject* video_init(PyObject* self, PyObject* args) { int ret; // create video handling thread ret = generic_start_thread(&pthread_video, video_main, prio_video, NULL); if(ret) { printf("%s: starting video handling thread failed\n", __func__); sync(); return NULL; } Py_RETURN_NONE; } static PyObject* video_deinit(PyObject* self, PyObject* args) { o3000_disconnect(cam_session); pthread_join(pthread_video, NULL); o3000_exit(cam_session); cam_session = -1; sync(); Py_RETURN_NONE; } static PyObject* video_xml_send(PyObject* self, PyObject* args) { Py_ssize_t count; const uint8_t* str; if (!PyArg_ParseTuple(args, "s#", &str, &count)) { return NULL; } printf("video send xml: %s\n", str); o3000_send_xml(cam_session, str, count); Py_RETURN_NONE; } /** module's function definition struct */ static PyMethodDef myMethods[] = { { "video_init", video_init, METH_NOARGS, "Initialize and start the O-3000 pipeline" }, { "video_deinit", video_deinit, METH_NOARGS, "Deinitialize the O-3000 pipeline" }, { "video_images_get", video_images_get, METH_NOARGS, "Get the most recent captured images: raw, color (debayered), distorted (lense)" }, { "video_xml_send", video_xml_send, METH_VARARGS, "Send an XML message to the camera, e.g. for some configurations" }, { NULL, NULL, 0, NULL } // to signal the end of our method list }; /** module definition struct */ static struct PyModuleDef o3000 = { PyModuleDef_HEAD_INIT, "o3000", "O-3000 Python API", -1, myMethods }; /* module initialization */ PyMODINIT_FUNC PyInit_o3000(void) { _import_array(); return PyModule_Create(&o3000); }