摄像头应用编程(四):ARM Linux LCD实时预览UVC摄像头画面

news/2025/3/4 16:24:40/

文章目录

  • 1、前言
  • 2、环境介绍
  • 3、步骤
  • 4、应用程序编写
    • 4.1、lcd初始化
    • 4.2、摄像头初始化
    • 4.3、jpeg解码
    • 4.4、开启摄像头
    • 4.5、完整的程序如下
  • 5、测试
    • 5.1、编译应用程序
    • 5.2、运行应用程序
  • 6、总结

1、前言

本次应用程序主要针对支持MJPEG格式输出的UVC摄像头。

2、环境介绍

rk3566 + 7寸 mipi lcd + uvc摄像头

3、步骤

应用程序编写主要分为以下几个步骤:

1、lcd初始化。

2、摄像头初始化。

3、摄像头采集数据。

4、jpeg解码。

5、lcd显示。

4、应用程序编写

lcd_21">4.1、lcd初始化

typedef struct lcd_mes {int fd;unsigned char *fb_base;int lcd_width;int lcd_height;unsigned int bpp;unsigned int line_width;
} lcd_mes;int lcd_init(const char *fb_dev, lcd_mes *lcd)
{int screen_size;struct fb_var_screeninfo var;   if (fb_dev == NULL)goto _err;/* 1. open /dev/fb* */    lcd->fd = open(fb_dev, O_RDWR);if(lcd->fd < 0){printf("can not open %s\n", fb_dev);goto _err;}/* 2. get lcd message */if (ioctl(lcd->fd, FBIOGET_VSCREENINFO, &var)){printf("can not get var\n");goto _err;}screen_size = var.xres * var.yres * var.bits_per_pixel / 8;lcd->line_width  = var.xres * var.bits_per_pixel / 8;lcd->lcd_width = var.xres;lcd->lcd_height = var.yres;lcd->bpp = var.bits_per_pixel;lcd->fb_base = mmap(NULL, screen_size, PROT_READ | PROT_WRITE, MAP_SHARED, lcd->fd, 0);if (lcd->fb_base == (unsigned char *)-1){printf("can not mmap\n");goto _err;}memset(lcd->fb_base, 0x00, screen_size);return 0;_err:return -1;
}

4.2、摄像头初始化

使用v4l2接口初始化uvc摄像头:

typedef struct camera_mes {int fd;void *bufs[32];int bufs_index;int buf_length;char fmt[20];int frame_x_size;int frame_y_size;
} camera_mes;int camera_init(const char *video, camera_mes *camera)
{   struct v4l2_fmtdesc fmtdesc;struct v4l2_frmsizeenum fsenum;int fmt_index = 0;int frame_index = 0;int buf_cnt;int i;if (video == NULL)goto _err;/* 1. open /dev/video* */camera->fd = open(video, O_RDWR);if (camera->fd < 0){printf("can not open %s\n", video);goto _err;}/* 2. query capability */struct v4l2_capability cap;memset(&cap, 0, sizeof(struct v4l2_capability));if (0 == ioctl(camera->fd, VIDIOC_QUERYCAP, &cap)){        if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {fprintf(stderr, "Error opening device %s: video capture not supported.\n", video);goto _ioc_querycap_err;}if(!(cap.capabilities & V4L2_CAP_STREAMING)) {fprintf(stderr, "%s does not support streaming i/o\n", video);goto _ioc_querycap_err;}}else{printf("can not get capability\n");goto _ioc_querycap_err;}/* 3. enum formt */while (1){fmtdesc.index = fmt_index;  fmtdesc.type  = V4L2_BUF_TYPE_VIDEO_CAPTURE;  if (0 != ioctl(camera->fd, VIDIOC_ENUM_FMT, &fmtdesc))break;frame_index = 0;// printf("format %s,%d:\n", fmtdesc.description, fmtdesc.pixelformat);while (1){memset(&fsenum, 0, sizeof(struct v4l2_frmsizeenum));fsenum.pixel_format = fmtdesc.pixelformat;fsenum.index = frame_index;/* get framesize */if (ioctl(camera->fd, VIDIOC_ENUM_FRAMESIZES, &fsenum) == 0){// printf("\t%d: %d x %d\n", frame_index, fsenum.discrete.width, fsenum.discrete.height);}else{break;}frame_index++;}fmt_index++;}/* 4. set formt */struct v4l2_format fmt;memset(&fmt, 0, sizeof(struct v4l2_format));fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;fmt.fmt.pix.width = camera->frame_x_size;fmt.fmt.pix.height = camera->frame_y_size;fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;fmt.fmt.pix.field = V4L2_FIELD_ANY;if (0 == ioctl(camera->fd, VIDIOC_S_FMT, &fmt)){// printf("the final frame-size has been set : %d x %d\n", fmt.fmt.pix.width, fmt.fmt.pix.height);camera->frame_x_size = fmt.fmt.pix.width;camera->frame_y_size = fmt.fmt.pix.height;strncpy(camera->fmt, "Motion-JPEG", strlen("Motion-JPEG"));}else{printf("can not set format\n");goto _ioc_sfmt_err;}/* 5. require buffer */struct v4l2_requestbuffers rb;memset(&rb, 0, sizeof(struct v4l2_requestbuffers));rb.count = 32;rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;rb.memory = V4L2_MEMORY_MMAP;if (0 == ioctl(camera->fd, VIDIOC_REQBUFS, &rb)){buf_cnt = rb.count;for(i = 0; i < rb.count; i++) {struct v4l2_buffer buf;memset(&buf, 0, sizeof(struct v4l2_buffer));buf.index = i;buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;if (0 == ioctl(camera->fd, VIDIOC_QUERYBUF, &buf)){/* mmap */camera->bufs[i] = mmap(0, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, camera->fd, buf.m.offset);if(camera->bufs[i] == MAP_FAILED) {printf("Unable to map buffer");goto _err;}}else{printf("can not query buffer\n");goto _err;}            }}else{printf("can not request buffers\n");goto _ioc_reqbufs_err;}/* 6. queue buffer */for(i = 0; i < buf_cnt; ++i) {struct v4l2_buffer buf;memset(&buf, 0, sizeof(struct v4l2_buffer));buf.index = i;buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;if (0 != ioctl(camera->fd, VIDIOC_QBUF, &buf)){perror("Unable to queue buffer");goto _ioc_qbuf_err;}}camera->bufs_index = 0;     // init camera structcamera->buf_length = 0;return 0;_ioc_qbuf_err:
_ioc_reqbufs_err:
_ioc_sfmt_err:
_ioc_querycap_err:
_err:return -1;
}

4.3、jpeg解码

int jpeg_show_on_lcd(lcd_mes *lcd, camera_mes *camera)
{int min_width, min_height;int valid_bytes;int offset_x, offset_y;struct jpeg_decompress_struct cinfo;struct jpeg_error_mgr jerr;cinfo.err = jpeg_std_error(&jerr);      // 错误处理对象与解码对象绑定jpeg_create_decompress(&cinfo);         // 初始化解码器jpeg_mem_src(&cinfo, camera->bufs[camera->bufs_index], camera->buf_length);   // 指定JPEG数据的来源jpeg_read_header(&cinfo, TRUE);         // 读取图像信息cinfo.out_color_space = JCS_RGB;        // 设置解码后的颜色空间为RGBjpeg_start_decompress(&cinfo);          // 开始解码unsigned char *jpeg_line_buf = (char *)malloc(cinfo.output_components * cinfo.output_width);    // 用于存储从JPEG解码器读取的一行数据unsigned int *fb_line_buf = (int *)malloc(lcd->line_width);                                     // 用于存储转换后的RGB数据,准备写入framebuffermin_width = (cinfo.output_width < lcd->lcd_width) ? cinfo.output_width : lcd->lcd_width;min_height = (cinfo.output_height < lcd->lcd_height) ? cinfo.output_height : lcd->lcd_height;valid_bytes = min_width * lcd->bpp / 8;             // 一行的有效字节数unsigned char *ptr = lcd->fb_base;offset_x = ((lcd->lcd_width - min_width) / 2) * lcd->bpp / 8;   // x方向居中offset_y = (lcd->lcd_height - min_height) / 2;      // y方向居中for (int i = 0; i < offset_y; i++)                  ptr += lcd->lcd_width * lcd->bpp / 8;unsigned int red, green, blue;unsigned int color;while (cinfo.output_scanline < min_height){jpeg_read_scanlines(&cinfo, &jpeg_line_buf, 1); // 每次读取一行for(int i = 0; i < min_width; i++)              {red = jpeg_line_buf[i * 3];                 green = jpeg_line_buf[i * 3 + 1];           blue = jpeg_line_buf[i * 3 + 2];            color = red << 16 | green << 8 | blue;      // RGB888转RGB8888fb_line_buf[i] = color;}memcpy(ptr + offset_x, fb_line_buf, valid_bytes);   // 将一行数据写入framebufferptr += lcd->lcd_width * lcd->bpp / 8;               // 移动到下一行}jpeg_finish_decompress(&cinfo);                     // 完成解码jpeg_destroy_decompress(&cinfo);                    // 销毁解码对象free(jpeg_line_buf);                                // 释放内存free(fb_line_buf);                                  // 释放内存return 0;
}

4.4、开启摄像头

int main(int argc, char **argv)
{.../* start camera */if (0 != ioctl(camera.fd, VIDIOC_STREAMON, &type)){printf("Unable to start capture\n");goto _err;}printf("\nstart camera ...\n");while (1){/* poll */memset(fds, 0, sizeof(fds));fds[0].fd = camera.fd;fds[0].events = POLLIN;if (1 == poll(fds, 1, -1)){/* dequeue buffer */struct v4l2_buffer buf;memset(&buf, 0, sizeof(struct v4l2_buffer));buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;if (0 != ioctl(camera.fd, VIDIOC_DQBUF, &buf)){printf("Unable to dequeue buffer\n");goto _ioc_dqbuf_err;}/* jpeg show on lcd */camera.bufs_index = buf.index;camera.buf_length = buf.length;jpeg_show_on_lcd(&lcd, &camera);/* queue buffer */if (0 != ioctl(camera.fd, VIDIOC_QBUF, &buf)){printf("Unable to queue buffer");goto _ioc_qbuf_err;}}}...}

4.5、完整的程序如下


#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <unistd.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <linux/types.h>          
#include <linux/videodev2.h>
#include <poll.h>
#include <sys/mman.h>
#include <jpeglib.h>
#include <linux/fb.h>typedef struct lcd_mes {int fd;unsigned char *fb_base;int lcd_width;int lcd_height;unsigned int bpp;unsigned int line_width;
} lcd_mes;typedef struct camera_mes {int fd;void *bufs[32];int bufs_index;int buf_length;char fmt[20];int frame_x_size;int frame_y_size;
} camera_mes;int jpeg_show_on_lcd(lcd_mes *lcd, camera_mes *camera)
{int min_width, min_height;int valid_bytes;int offset_x, offset_y;struct jpeg_decompress_struct cinfo;struct jpeg_error_mgr jerr;cinfo.err = jpeg_std_error(&jerr);      // 错误处理对象与解码对象绑定jpeg_create_decompress(&cinfo);         // 初始化解码器jpeg_mem_src(&cinfo, camera->bufs[camera->bufs_index], camera->buf_length);   // 指定JPEG数据的来源jpeg_read_header(&cinfo, TRUE);         // 读取图像信息cinfo.out_color_space = JCS_RGB;        // 设置解码后的颜色空间为RGBjpeg_start_decompress(&cinfo);          // 开始解码unsigned char *jpeg_line_buf = (char *)malloc(cinfo.output_components * cinfo.output_width);    // 用于存储从JPEG解码器读取的一行数据unsigned int *fb_line_buf = (int *)malloc(lcd->line_width);                                     // 用于存储转换后的RGB数据,准备写入framebuffermin_width = (cinfo.output_width < lcd->lcd_width) ? cinfo.output_width : lcd->lcd_width;min_height = (cinfo.output_height < lcd->lcd_height) ? cinfo.output_height : lcd->lcd_height;valid_bytes = min_width * lcd->bpp / 8;             // 一行的有效字节数unsigned char *ptr = lcd->fb_base;offset_x = ((lcd->lcd_width - min_width) / 2) * lcd->bpp / 8;   // x方向居中offset_y = (lcd->lcd_height - min_height) / 2;      // y方向居中for (int i = 0; i < offset_y; i++)                  ptr += lcd->lcd_width * lcd->bpp / 8;unsigned int red, green, blue;unsigned int color;while (cinfo.output_scanline < min_height){jpeg_read_scanlines(&cinfo, &jpeg_line_buf, 1); // 每次读取一行for(int i = 0; i < min_width; i++)              {red = jpeg_line_buf[i * 3];                 green = jpeg_line_buf[i * 3 + 1];           blue = jpeg_line_buf[i * 3 + 2];            color = red << 16 | green << 8 | blue;      // RGB888转RGB8888fb_line_buf[i] = color;}memcpy(ptr + offset_x, fb_line_buf, valid_bytes);   // 将一行数据写入framebufferptr += lcd->lcd_width * lcd->bpp / 8;               // 移动到下一行}jpeg_finish_decompress(&cinfo);                     // 完成解码jpeg_destroy_decompress(&cinfo);                    // 销毁解码对象free(jpeg_line_buf);                                // 释放内存free(fb_line_buf);                                  // 释放内存return 0;
}int lcd_init(const char *fb_dev, lcd_mes *lcd)
{int screen_size;struct fb_var_screeninfo var;   if (fb_dev == NULL)goto _err;/* 1. open /dev/fb* */    lcd->fd = open(fb_dev, O_RDWR);if(lcd->fd < 0){printf("can not open %s\n", fb_dev);goto _err;}/* 2. get lcd message */if (ioctl(lcd->fd, FBIOGET_VSCREENINFO, &var)){printf("can not get var\n");goto _err;}screen_size = var.xres * var.yres * var.bits_per_pixel / 8;lcd->line_width  = var.xres * var.bits_per_pixel / 8;lcd->lcd_width = var.xres;lcd->lcd_height = var.yres;lcd->bpp = var.bits_per_pixel;lcd->fb_base = mmap(NULL, screen_size, PROT_READ | PROT_WRITE, MAP_SHARED, lcd->fd, 0);if (lcd->fb_base == (unsigned char *)-1){printf("can not mmap\n");goto _err;}memset(lcd->fb_base, 0x00, screen_size);return 0;_err:return -1;
}int camera_init(const char *video, camera_mes *camera)
{   struct v4l2_fmtdesc fmtdesc;struct v4l2_frmsizeenum fsenum;int fmt_index = 0;int frame_index = 0;int buf_cnt;int i;if (video == NULL)goto _err;/* 1. open /dev/video* */camera->fd = open(video, O_RDWR);if (camera->fd < 0){printf("can not open %s\n", video);goto _err;}/* 2. query capability */struct v4l2_capability cap;memset(&cap, 0, sizeof(struct v4l2_capability));if (0 == ioctl(camera->fd, VIDIOC_QUERYCAP, &cap)){        if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {fprintf(stderr, "Error opening device %s: video capture not supported.\n", video);goto _ioc_querycap_err;}if(!(cap.capabilities & V4L2_CAP_STREAMING)) {fprintf(stderr, "%s does not support streaming i/o\n", video);goto _ioc_querycap_err;}}else{printf("can not get capability\n");goto _ioc_querycap_err;}/* 3. enum formt */while (1){fmtdesc.index = fmt_index;  fmtdesc.type  = V4L2_BUF_TYPE_VIDEO_CAPTURE;  if (0 != ioctl(camera->fd, VIDIOC_ENUM_FMT, &fmtdesc))break;frame_index = 0;// printf("format %s,%d:\n", fmtdesc.description, fmtdesc.pixelformat);while (1){memset(&fsenum, 0, sizeof(struct v4l2_frmsizeenum));fsenum.pixel_format = fmtdesc.pixelformat;fsenum.index = frame_index;/* get framesize */if (ioctl(camera->fd, VIDIOC_ENUM_FRAMESIZES, &fsenum) == 0){// printf("\t%d: %d x %d\n", frame_index, fsenum.discrete.width, fsenum.discrete.height);}else{break;}frame_index++;}fmt_index++;}/* 4. set formt */struct v4l2_format fmt;memset(&fmt, 0, sizeof(struct v4l2_format));fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;fmt.fmt.pix.width = camera->frame_x_size;fmt.fmt.pix.height = camera->frame_y_size;fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;fmt.fmt.pix.field = V4L2_FIELD_ANY;if (0 == ioctl(camera->fd, VIDIOC_S_FMT, &fmt)){// printf("the final frame-size has been set : %d x %d\n", fmt.fmt.pix.width, fmt.fmt.pix.height);camera->frame_x_size = fmt.fmt.pix.width;camera->frame_y_size = fmt.fmt.pix.height;strncpy(camera->fmt, "Motion-JPEG", strlen("Motion-JPEG"));}else{printf("can not set format\n");goto _ioc_sfmt_err;}/* 5. require buffer */struct v4l2_requestbuffers rb;memset(&rb, 0, sizeof(struct v4l2_requestbuffers));rb.count = 32;rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;rb.memory = V4L2_MEMORY_MMAP;if (0 == ioctl(camera->fd, VIDIOC_REQBUFS, &rb)){buf_cnt = rb.count;for(i = 0; i < rb.count; i++) {struct v4l2_buffer buf;memset(&buf, 0, sizeof(struct v4l2_buffer));buf.index = i;buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;if (0 == ioctl(camera->fd, VIDIOC_QUERYBUF, &buf)){/* mmap */camera->bufs[i] = mmap(0, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, camera->fd, buf.m.offset);if(camera->bufs[i] == MAP_FAILED) {printf("Unable to map buffer");goto _err;}}else{printf("can not query buffer\n");goto _err;}            }}else{printf("can not request buffers\n");goto _ioc_reqbufs_err;}/* 6. queue buffer */for(i = 0; i < buf_cnt; ++i) {struct v4l2_buffer buf;memset(&buf, 0, sizeof(struct v4l2_buffer));buf.index = i;buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;if (0 != ioctl(camera->fd, VIDIOC_QBUF, &buf)){perror("Unable to queue buffer");goto _ioc_qbuf_err;}}camera->bufs_index = 0;     // init camera structcamera->buf_length = 0;return 0;_ioc_qbuf_err:
_ioc_reqbufs_err:
_ioc_sfmt_err:
_ioc_querycap_err:
_err:return -1;
}int main(int argc, char **argv)
{int ret;lcd_mes lcd;camera_mes camera;int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;struct pollfd fds[1];if (argc != 3){printf("Usage: %s </dev/videoX> </dev/fbX>\n", argv[0]);return -1;}/* lcd init */ret = lcd_init(argv[2], &lcd);if (ret == -1){printf("lcd init err !\n");goto _err;}printf("\n-------------- lcd message --------------\n");printf("screen pixel: %d x %d\n", lcd.lcd_width, lcd.lcd_height);printf("line width: %d (byte)\n", lcd.line_width);printf("bpp: %d\n", lcd.bpp);printf("-----------------------------------------\n");/* camera init */camera.frame_x_size = lcd.lcd_width;camera.frame_y_size = lcd.lcd_height;ret = camera_init(argv[1], &camera);if (ret == -1){printf("camera init err !\n");goto _err;}printf("\n------------ camera message -------------\n");printf("frame size: %d x %d\n", camera.frame_x_size, camera.frame_y_size);printf("format: %s\n", camera.fmt);printf("-----------------------------------------\n");/* start camera */if (0 != ioctl(camera.fd, VIDIOC_STREAMON, &type)){printf("Unable to start capture\n");goto _err;}printf("\nstart camera ...\n");while (1){/* poll */memset(fds, 0, sizeof(fds));fds[0].fd = camera.fd;fds[0].events = POLLIN;if (1 == poll(fds, 1, -1)){/* dequeue buffer */struct v4l2_buffer buf;memset(&buf, 0, sizeof(struct v4l2_buffer));buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;if (0 != ioctl(camera.fd, VIDIOC_DQBUF, &buf)){printf("Unable to dequeue buffer\n");goto _ioc_dqbuf_err;}/* jpeg show on lcd */camera.bufs_index = buf.index;camera.buf_length = buf.length;jpeg_show_on_lcd(&lcd, &camera);/* queue buffer */if (0 != ioctl(camera.fd, VIDIOC_QBUF, &buf)){printf("Unable to queue buffer");goto _ioc_qbuf_err;}}}/* close camera */if (0 != ioctl(camera.fd, VIDIOC_STREAMOFF, &type)){printf("Unable to stop capture\n");goto _ioc_streamoff_err;}close(camera.fd);return 0;_ioc_streamoff_err:
_ioc_qbuf_err:
_ioc_dqbuf_err:
_err:return -1;
}

5、测试

5.1、编译应用程序

如果你使用的buildroot系统,需要交叉编译。

这里测试所使用的板子跑的是ubuntu,执行如下命令直接编译:

sudo gcc -o uvctolcd uvctolcd.c -ljpeg

5.2、运行应用程序

sudo ./uvctolcd /dev/video10 /dev/fb0

6、总结

参考文章:Linux摄像头(v4l2应用)——在LCD上实时显示摄像头采集JPEG数据_v4l2调用并显示图像-CSDN博客
源码gitee仓库:

仓库主页:
https://gitee.com/cattle_l/v4l2_app.git
直接拉取:
git clone https://gitee.com/cattle_l/v4l2_app.git

http://www.ppmy.cn/news/1576582.html

相关文章

GPT-4.5震撼登场,AI世界再掀波澜!(3)

GPT-4.5震撼登场&#xff0c;AI世界再掀波澜! GPT-4.5震撼登场&#xff0c;AI世界再掀波澜!(2) &#xff08;一&#xff09;伦理困境&#xff1a;如何抉择 GPT-4.5 的强大功能在为我们带来诸多便利的同时&#xff0c;也引发了一系列深刻的伦理问题&#xff0c;这些问题犹如高…

【pytest框架源码分析四】pluggy源码分析之hook执行

pluggy的主要执行方法在_callers.py中&#xff0c;这里简单介绍下。 def _multicall(hook_name: str,hook_impls: Sequence[HookImpl],caller_kwargs: Mapping[str, object],firstresult: bool, ) -> object | list[object]:"""Execute a call into multipl…

Unity 内置渲染管线各个Shader的用途和性能分析,以及如何修改Shader(build in shader 源码下载)

文章目录 所有Shader分析路径&#xff1a;Standard路径&#xff1a;Nature/路径&#xff1a;UI/路径&#xff1a;Particles/Particles/Standard SurfaceParticles/Standard Unlit 路径&#xff1a;Unlit/Unlit/TextureUnlit/ColorUnlit/TransparentUnlit/Transparent CutoutUnl…

android12 屏幕亮度控制修改为线性变化

由于高版本的亮度调节不是线性变化了,有客户反馈在Android11或者12上使用代码获取亮度不对,比如我们在设置中查看屏幕亮度是80%,读出来的亮度值是100,客户认为亮度值是39%。 获取屏幕亮度adb shell settings get system screen_brightness 或者 adb shell cat /sys/class…

STM32G431RBT6——(2)浅析Cortex-M4内核

本篇博客是一个对Cortex-M4内核了解性的简介&#xff0c;不会涉及到深奥的理论&#xff0c;请大家放心食用。 我们所学习的STM32G431RBT6单片机是基于ARM的Cotex-M4内核&#xff0c;因此我们有必要对此内核做一个大概了解。其实M4内核和M3内核有很大的相似之处&#xff0c;很多…

广州4399游戏25届春招游戏策划管培生内推

【热招岗位】 游戏策划管培生、产品培训生、游戏文案策划、游戏数值策划、游戏系统策划、游戏产品运营、游戏战斗策划、游戏关卡策划 【其他岗位】产品类&#xff08;产品培训生、产品运营等&#xff09;、技术类&#xff08;开发、测试、算法、运维等&#xff09;、运营市场类…

【计算机网络基础】-------计算机网络概念

1.什么是计算机网络 定义&#xff1a; 图解&#xff1a; 2.最简单的计算机网络 其中&#xff1a; 结点可以是计算机、集线器、交换机、路由器等链路可以是有线链路、无线链路 2.1集线器 2.2交换机 3.互连网&#xff08;internet&#xff09;与 路由器 路由器 与 家用路由…

日语学习-日语知识点小记-构建基础-JLPT-N4N5阶段(15):についてどう思いますか 关于 があります有 と言います

日语学习-日语知识点小记-构建基础-JLPT-N4&N5阶段(15):についてどう思いますか 关于& があります有 & と言います 1、前言(1)情况说明(2)工程师的信仰2、知识点(1)「~」についてどう思いますか(2)「~」で「~」があります。(3)「~」と言います…