aboutsummaryrefslogtreecommitdiff
path: root/camera.c
diff options
context:
space:
mode:
authorAlexander Kavon <me+git@alexkavon.com>2026-03-16 02:45:43 -0400
committerAlexander Kavon <me+git@alexkavon.com>2026-03-16 02:45:43 -0400
commit8bf80674ae5150d6c718f6d6ef4907b85dd8b083 (patch)
treeba7a99e9326b54b53ef61fac5c07d9e21dc25c03 /camera.c
working webcam demo
Diffstat (limited to 'camera.c')
-rw-r--r--camera.c236
1 files changed, 236 insertions, 0 deletions
diff --git a/camera.c b/camera.c
new file mode 100644
index 0000000..fefa140
--- /dev/null
+++ b/camera.c
@@ -0,0 +1,236 @@
+#include "libuvc/libuvc.h"
+#include <stdio.h>
+#include <unistd.h>
+
+typedef struct camera {
+ uvc_context_t *ctx;
+ uvc_device_t *dev;
+ uvc_device_handle_t *devh;
+ uvc_stream_ctrl_t ctrl;
+ uvc_error_t res;
+ uvc_frame_t * latest_frame;
+} camera_t;
+
+/* This callback function runs once per frame. Use it to perform any
+ * quick processing you need, or have it put the frame into your application's
+ * input queue. If this function takes too long, you'll start losing frames. */
+void cb(uvc_frame_t *frame, void *ptr) {
+ uvc_frame_t *bgr;
+ uvc_error_t ret;
+ // enum uvc_frame_format *frame_format = (enum uvc_frame_format *)ptr;
+ /* FILE *fp;
+ * static int jpeg_count = 0;
+ * static const char *H264_FILE = "iOSDevLog.h264";
+ * static const char *MJPEG_FILE = ".jpeg";
+ * char filename[16]; */
+
+ /* We'll convert the image from YUV/JPEG to BGR, so allocate space */
+ bgr = uvc_allocate_frame(frame->width * frame->height * 3);
+ if (!bgr) {
+ printf("unable to allocate bgr frame!\n");
+ return;
+ }
+
+ // printf("callback! frame_format = %d, width = %d, height = %d, length = %lu, ptr = %p\n",
+ // frame->frame_format, frame->width, frame->height, frame->data_bytes, ptr);
+
+ switch (frame->frame_format) {
+ case UVC_FRAME_FORMAT_H264:
+ /* use `ffplay H264_FILE` to play */
+ /* fp = fopen(H264_FILE, "a");
+ * fwrite(frame->data, 1, frame->data_bytes, fp);
+ * fclose(fp); */
+ break;
+ case UVC_COLOR_FORMAT_MJPEG:
+ /* sprintf(filename, "%d%s", jpeg_count++, MJPEG_FILE);
+ * fp = fopen(filename, "w");
+ * fwrite(frame->data, 1, frame->data_bytes, fp);
+ * fclose(fp); */
+ break;
+ case UVC_COLOR_FORMAT_YUYV:
+ /* Do the BGR conversion */
+ ret = uvc_any2bgr(frame, bgr);
+ if (ret) {
+ uvc_perror(ret, "uvc_any2bgr");
+ uvc_free_frame(bgr);
+ return;
+ }
+ break;
+ default:
+ break;
+ }
+
+ if (frame->sequence % 30 == 0) {
+ printf(" * got image %u, %dx%d\n", frame->sequence, frame->width, frame->height);
+ }
+
+ /* Call a user function:
+ *
+ * my_type *my_obj = (*my_type) ptr;
+ * my_user_function(ptr, bgr);
+ * my_other_function(ptr, bgr->data, bgr->width, bgr->height);
+ */
+
+ /* Call a C++ method:
+ *
+ * my_type *my_obj = (*my_type) ptr;
+ * my_obj->my_func(bgr);
+ */
+ // bgr->width = frame->width;
+ // bgr->height = frame->height;
+ // bgr->step = frame->step;
+ camera_t * camera = (camera_t *) ptr;
+ camera->latest_frame = frame;
+
+ /* Use opencv.highgui to display the image:
+ *
+ * cvImg = cvCreateImageHeader(
+ * cvSize(bgr->width, bgr->height),
+ * IPL_DEPTH_8U,
+ * 3);
+ *
+ * cvSetData(cvImg, bgr->data, bgr->width * 3);
+ *
+ * cvNamedWindow("Test", CV_WINDOW_AUTOSIZE);
+ * cvShowImage("Test", cvImg);
+ * cvWaitKey(10);
+ *
+ * cvReleaseImageHeader(&cvImg);
+ */
+
+ uvc_free_frame(bgr);
+}
+
+int get_camera_devices(camera_t * camera) {
+ /* Initialize a UVC service context. Libuvc will set up its own libusb
+ * context. Replace NULL with a libusb_context pointer to run libuvc
+ * from an existing libusb context. */
+ camera->res = uvc_init(&camera->ctx, NULL);
+
+ if (camera->res < 0) {
+ uvc_perror(camera->res, "uvc_init");
+ return camera->res;
+ }
+
+ puts("UVC initialized");
+
+ /* Locates the first attached UVC device, stores in dev */
+ camera->res = uvc_find_device(
+ camera->ctx, &camera->dev,
+ 0, 0, NULL); /* filter devices: vendor_id, product_id, "serial_num" */
+
+ if (camera->res < 0) {
+ uvc_perror(camera->res, "uvc_find_device"); /* no devices found */
+ } else {
+ puts("Device found");
+
+ /* Try to open the device: requires exclusive access */
+ camera->res = uvc_open(camera->dev, &camera->devh);
+
+ if (camera->res < 0) {
+ uvc_perror(camera->res, "uvc_open"); /* unable to open device */
+ } else {
+ puts("Device opened");
+
+ /* Print out a message containing all the information that libuvc
+ * knows about the device */
+ uvc_print_diag(camera->devh, stderr);
+
+ const uvc_format_desc_t *format_desc = uvc_get_format_descs(camera->devh);
+ const uvc_frame_desc_t *frame_desc = format_desc->frame_descs;
+ enum uvc_frame_format frame_format;
+ int width = 640;
+ int height = 480;
+ int fps = 30;
+
+ switch (format_desc->bDescriptorSubtype) {
+ case UVC_VS_FORMAT_MJPEG:
+ frame_format = UVC_COLOR_FORMAT_MJPEG;
+ break;
+ case UVC_VS_FORMAT_FRAME_BASED:
+ frame_format = UVC_FRAME_FORMAT_H264;
+ break;
+ default:
+ frame_format = UVC_FRAME_FORMAT_YUYV;
+ break;
+ }
+
+ if (frame_desc) {
+ width = frame_desc->wWidth;
+ height = frame_desc->wHeight;
+ fps = 10000000 / frame_desc->dwDefaultFrameInterval;
+ }
+
+ printf("\nFirst format: (%4s) %dx%d %dfps\n", format_desc->fourccFormat, width, height, fps);
+
+ /* Try to negotiate first stream profile */
+ camera->res = uvc_get_stream_ctrl_format_size(
+ camera->devh, &camera->ctrl, /* result stored in ctrl */
+ frame_format,
+ width, height, fps /* width, height, fps */
+ );
+
+ /* Print out the result */
+ uvc_print_stream_ctrl(&camera->ctrl, stderr);
+
+ if (camera->res < 0) {
+ uvc_perror(camera->res, "get_mode"); /* device doesn't provide a matching stream */
+ }
+ }
+ }
+ return 0;
+}
+
+void start_camera_stream(camera_t * camera) {
+ /* Start the video stream. The library will call user function cb:
+ * cb(frame, (void *) 12345)
+ */
+ camera->res = uvc_start_streaming(camera->devh, &camera->ctrl, cb, camera, 0);
+
+ if (camera->res < 0) {
+ uvc_perror(camera->res, "start_streaming"); /* unable to start stream */
+ } else {
+ puts("Streaming...");
+
+ /* enable auto exposure - see uvc_set_ae_mode documentation */
+ puts("Enabling auto exposure ...");
+ const uint8_t UVC_AUTO_EXPOSURE_MODE_AUTO = 2;
+ camera->res = uvc_set_ae_mode(camera->devh, UVC_AUTO_EXPOSURE_MODE_AUTO);
+ if (camera->res == UVC_SUCCESS) {
+ puts(" ... enabled auto exposure");
+ } else if (camera->res == UVC_ERROR_PIPE) {
+ /* this error indicates that the camera does not support the full AE mode;
+ * try again, using aperture priority mode (fixed aperture, variable exposure time) */
+ puts(" ... full AE not supported, trying aperture priority mode");
+ const uint8_t UVC_AUTO_EXPOSURE_MODE_APERTURE_PRIORITY = 8;
+ camera->res = uvc_set_ae_mode(camera->devh, UVC_AUTO_EXPOSURE_MODE_APERTURE_PRIORITY);
+ if (camera->res < 0) {
+ uvc_perror(camera->res, " ... uvc_set_ae_mode failed to enable aperture priority mode");
+ } else {
+ puts(" ... enabled aperture priority auto exposure mode");
+ }
+ } else {
+ uvc_perror(camera->res, " ... uvc_set_ae_mode failed to enable auto exposure mode");
+ }
+ }
+}
+
+void stop_camera_stream(camera_t * camera) {
+ /* End the stream. Blocks until last callback is serviced */
+ uvc_stop_streaming(camera->devh);
+ puts("Done streaming.");
+}
+
+void close_camera(camera_t * camera) {
+ /* Release our handle on the device */
+ uvc_close(camera->devh);
+ puts("Device closed");
+
+ /* Release the device descriptor */
+ uvc_unref_device(camera->dev);
+
+ /* Close the UVC context. This closes and cleans up any existing device handles,
+ * and it closes the libusb context if one was not provided. */
+ uvc_exit(camera->ctx);
+ puts("UVC exited");
+}