RK Camera hal 图像处理

soc:RK3568

system:Android12

今天发现外接的USBCamera用Camera 2API打开显示颠倒,如果在APP 里使用Camera1处理这块接口较少,调整起来比较麻烦

RK Camera hal位置:hardware/interfaces/camera

核心的文件在:

开机会启动:android.hardware.camera.provider@2.4-external-service服务

遍历/dev/videox ,通过V4l2获取摄像头驱动 长 宽 数据格式与帧率,判断当前的摄像头节点是否有效,有效就会告诉CameraServer注册为CameraId,主要代码如下

ExternalCameraDevice.cpp

std::vector<SupportedV4L2Format> ExternalCameraDevice::getCandidateSupportedFormatsLocked(int fd, CroppingType cropType,const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,const std::vector<ExternalCameraConfig::FpsLimitation>& depthFpsLimits,const Size& minStreamSize,bool depthEnabled) {std::vector<SupportedV4L2Format> outFmts;
if (!mSubDevice){// VIDIOC_QUERYCAP get Capabilitystruct v4l2_capability capability;int ret_query = ioctl(fd, VIDIOC_QUERYCAP, &capability);if (ret_query < 0) {ALOGE("%s v4l2 QUERYCAP %s failed: %s", __FUNCTION__, strerror(errno));}struct v4l2_fmtdesc fmtdesc{};fmtdesc.index = 0;if (capability.device_caps & V4L2_CAP_VIDEO_CAPTURE_MPLANE)fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;elsefmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;int ret = 0;while (ret == 0) {//获取摄像头格式ret = TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc));ALOGV("index:%d,ret:%d, format:%c%c%c%c", fmtdesc.index, ret,fmtdesc.pixelformat & 0xFF,(fmtdesc.pixelformat >> 8) & 0xFF,(fmtdesc.pixelformat >> 16) & 0xFF,(fmtdesc.pixelformat >> 24) & 0xFF);if (ret == 0 && !(fmtdesc.flags & V4L2_FMT_FLAG_EMULATED)) {auto it = std::find (kSupportedFourCCs.begin(), kSupportedFourCCs.end(), fmtdesc.pixelformat);if (it != kSupportedFourCCs.end()) {// Found supported formatv4l2_frmsizeenum frameSize {.index = 0,.pixel_format = fmtdesc.pixelformat};//获取摄像头SIZEfor (; TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frameSize)) == 0;++frameSize.index) {if (frameSize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {ALOGV("index:%d, format:%c%c%c%c, w %d, h %d", frameSize.index,fmtdesc.pixelformat & 0xFF,(fmtdesc.pixelformat >> 8) & 0xFF,(fmtdesc.pixelformat >> 16) & 0xFF,(fmtdesc.pixelformat >> 24) & 0xFF,frameSize.discrete.width, frameSize.discrete.height);// Disregard h > w formats so all aspect ratio (h/w) <= 1.0// This will simplify the crop/scaling logic down the roadif (frameSize.discrete.height > frameSize.discrete.width) {continue;}// Discard all formats which is smaller than minStreamSizeif (frameSize.discrete.width < minStreamSize.width|| frameSize.discrete.height < minStreamSize.height) {continue;}SupportedV4L2Format format {.width = frameSize.discrete.width,.height = frameSize.discrete.height,.fourcc = fmtdesc.pixelformat};//获取对于的摄像头参数if (format.fourcc == V4L2_PIX_FMT_Z16 && depthEnabled) {updateFpsBounds(fd, cropType, depthFpsLimits, format, outFmts);} else {updateFpsBounds(fd, cropType, fpsLimits, format, outFmts);}}}
#ifdef HDMI_ENABLEif(strstr((const char*)capability.driver,"hdmi")){ALOGE("driver.find :%s",capability.driver);struct v4l2_dv_timings timings;if(TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_SUBDEV_QUERY_DV_TIMINGS, &timings)) == 0){char fmtDesc[5]{0};sprintf(fmtDesc,"%c%c%c%c",fmtdesc.pixelformat & 0xFF,(fmtdesc.pixelformat >> 8) & 0xFF,(fmtdesc.pixelformat >> 16) & 0xFF,(fmtdesc.pixelformat >> 24) & 0xFF);ALOGV("hdmi index:%d,ret:%d, format:%s", fmtdesc.index, ret,fmtDesc);ALOGE("%s, hdmi I:%d, wxh:%dx%d", __func__,timings.bt.interlaced, timings.bt.width, timings.bt.height);ALOGV("add hdmi index:%d,ret:%d, format:%c%c%c%c", fmtdesc.index, ret,fmtdesc.pixelformat & 0xFF,(fmtdesc.pixelformat >> 8) & 0xFF,(fmtdesc.pixelformat >> 16) & 0xFF,(fmtdesc.pixelformat >> 24) & 0xFF);SupportedV4L2Format formatGet {.width = timings.bt.width,.height = timings.bt.height,.fourcc = fmtdesc.pixelformat};updateFpsBounds(fd, cropType, fpsLimits, formatGet, outFmts);SupportedV4L2Format format_640x360 {.width = 640,.height = 360,.fourcc = fmtdesc.pixelformat};updateFpsBounds(fd, cropType, fpsLimits, format_640x360, outFmts);SupportedV4L2Format format_1920x1080 {.width = 1920,.height = 1080,.fourcc = fmtdesc.pixelformat};updateFpsBounds(fd, cropType, fpsLimits, format_1920x1080, outFmts);}}
#endif}}fmtdesc.index++;}trimSupportedFormats(cropType, &outFmts);}

上面正常跑入,就可以通过dumpsys  media.camera | grep map 获取到支持的摄像头

rk3588_s:/ $ dumpsys media.camera | grep mapDevice 0 maps to "0"Device 1 maps to "1"Device 2 maps to "112"Device 3 maps to "201"

之后Camera 2 API 通过open 会调到CameraServer最终进到ExternalCameraDevice::open

1.openCamera

Return<void> ExternalCameraDevice::open(const sp<ICameraDeviceCallback>& callback, ICameraDevice::open_cb _hidl_cb) {Status status = Status::OK;sp<ExternalCameraDeviceSession> session = nullptr;if (callback == nullptr) {ALOGE("%s: cannot open camera %s. callback is null!",__FUNCTION__, mCameraId.c_str());_hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr);return Void();}//获取摄像头参数if (isInitFailed()) {ALOGE("%s: cannot open camera %s. camera init failed!",__FUNCTION__, mCameraId.c_str());_hidl_cb(Status::INTERNAL_ERROR, nullptr);return Void();}mLock.lock();ALOGV("%s: Initializing device for camera %s", __FUNCTION__, mCameraId.c_str());session = mSession.promote();if (session != nullptr && !session->isClosed()) {ALOGE("%s: cannot open an already opened camera!", __FUNCTION__);mLock.unlock();_hidl_cb(Status::CAMERA_IN_USE, nullptr);return Void();}//打开摄像头unique_fd fd(::open(mDevicePath.c_str(), O_RDWR));
#ifdef SUBDEVICE_ENABLEif(!mSubDevice){if (fd.get() < 0) {int numAttempt = 0;do {ALOGW("%s: v4l2 device %s open failed, wait 33ms and try again",__FUNCTION__, mDevicePath.c_str());usleep(OPEN_RETRY_SLEEP_US); // sleep and try againfd.reset(::open(mDevicePath.c_str(), O_RDWR));numAttempt++;} while (fd.get() < 0 && numAttempt <= MAX_RETRY);if (fd.get() < 0) {ALOGE("%s: v4l2 device open %s failed: %s",__FUNCTION__, mDevicePath.c_str(), strerror(errno));mLock.unlock();_hidl_cb(Status::INTERNAL_ERROR, nullptr);return Void();}}}
#elseif (fd.get() < 0) {int numAttempt = 0;do {ALOGW("%s: v4l2 device %s open failed, wait 33ms and try again",__FUNCTION__, mDevicePath.c_str());usleep(OPEN_RETRY_SLEEP_US); // sleep and try againfd.reset(::open(mDevicePath.c_str(), O_RDWR));numAttempt++;} while (fd.get() < 0 && numAttempt <= MAX_RETRY);if (fd.get() < 0) {ALOGE("%s: v4l2 device open %s failed: %s",__FUNCTION__, mDevicePath.c_str(), strerror(errno));mLock.unlock();_hidl_cb(Status::INTERNAL_ERROR, nullptr);return Void();}}
#endif//创建Sessionsession = createSession(callback, mCfg, mSupportedFormats, mCroppingType,mCameraCharacteristics, mCameraId, std::move(fd));if (session == nullptr) {ALOGE("%s: camera device session allocation failed", __FUNCTION__);mLock.unlock();_hidl_cb(Status::INTERNAL_ERROR, nullptr);return Void();}if (session->isInitFailed()) {ALOGE("%s: camera device session init failed", __FUNCTION__);session = nullptr;mLock.unlock();_hidl_cb(Status::INTERNAL_ERROR, nullptr);return Void();}mSession = session;mLock.unlock();_hidl_cb(status, session->getInterface());return Void();
}

Camera framework 调用ExternalCameraDeviceSession::processCaptureResult(std::shared_ptr<HalRequest>& req)通过enqueueV4l2Frame 获取到每一帧数据,


void ExternalCameraDeviceSession::enqueueV4l2Frame(const sp<V4L2Frame>& frame) {ATRACE_CALL();frame->unmap();ATRACE_BEGIN("VIDIOC_QBUF");v4l2_buffer buffer{};if (mCapability.device_caps & V4L2_CAP_VIDEO_CAPTURE_MPLANE)buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;elsebuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buffer.memory = V4L2_MEMORY_MMAP;if (V4L2_TYPE_IS_MULTIPLANAR(buffer.type)) {buffer.m.planes = planes;buffer.length = PLANES_NUM;}buffer.index = frame->mBufferIndex;
#ifdef SUBDEVICE_ENABLEif(!isSubDevice()){if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) {ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__,frame->mBufferIndex, strerror(errno));return;}}
#elseif (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) {ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__,frame->mBufferIndex, strerror(errno));return;}
#endifATRACE_END();{std::lock_guard<std::mutex> lk(mV4l2BufferLock);mNumDequeuedV4l2Buffers--;}mV4L2BufferReturned.notify_one();
}Status ExternalCameraDeviceSession::processCaptureResult(std::shared_ptr<HalRequest>& req) {ATRACE_CALL();// Return V4L2 buffer to V4L2 buffer queuesp<V3_4::implementation::V4L2Frame> v4l2Frame =static_cast<V3_4::implementation::V4L2Frame*>(req->frameIn.get());enqueueV4l2Frame(v4l2Frame);// NotifyShutternotifyShutter(req->frameNumber, req->shutterTs);// Fill output buffershidl_vec<CaptureResult> results;results.resize(1);CaptureResult& result = results[0];result.frameNumber = req->frameNumber;result.partialResult = 1;result.inputBuffer.streamId = -1;result.outputBuffers.resize(req->buffers.size());for (size_t i = 0; i < req->buffers.size(); i++) {result.outputBuffers[i].streamId = req->buffers[i].streamId;result.outputBuffers[i].bufferId = req->buffers[i].bufferId;if (req->buffers[i].fenceTimeout) {result.outputBuffers[i].status = BufferStatus::ERROR;if (req->buffers[i].acquireFence >= 0) {native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0);handle->data[0] = req->buffers[i].acquireFence;result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false);}notifyError(req->frameNumber, req->buffers[i].streamId, ErrorCode::ERROR_BUFFER);} else {result.outputBuffers[i].status = BufferStatus::OK;// TODO: refactorif (req->buffers[i].acquireFence >= 0) {native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0);handle->data[0] = req->buffers[i].acquireFence;result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false);}}}// Fill capture result metadatafillCaptureResult(req->setting, req->shutterTs);const camera_metadata_t *rawResult = req->setting.getAndLock();V3_2::implementation::convertToHidl(rawResult, &result.result);req->setting.unlock(rawResult);// update inflight records{std::lock_guard<std::mutex> lk(mInflightFramesLock);mInflightFrames.erase(req->frameNumber);}// Callback into frameworkinvokeProcessCaptureResultCallback(results, /* tryWriteFmq */true);freeReleaseFences(results);return Status::OK;
}

接下来主要是initialize,通过开启一些OutputThread ,图像处理线程FormatConvertThread


bool ExternalCameraDeviceSession::initialize() {
#ifdef SUBDEVICE_ENABLEif(!isSubDevice()){if (mV4l2Fd.get() < 0) {ALOGE("%s: invalid v4l2 device fd %d!", __FUNCTION__, mV4l2Fd.get());return true;}}
#elseif (mV4l2Fd.get() < 0) {ALOGE("%s: invalid v4l2 device fd %d!", __FUNCTION__, mV4l2Fd.get());return true;}
#endifstruct v4l2_capability capability;
#ifdef SUBDEVICE_ENABLEint ret = -1;if(!isSubDevice()){ioctl(mV4l2Fd.get(), VIDIOC_QUERYCAP, &capability);}
#elseint ret = ioctl(mV4l2Fd.get(), VIDIOC_QUERYCAP, &capability);
#endifstd::string make, model;if (ret < 0) {ALOGW("%s v4l2 QUERYCAP failed", __FUNCTION__);mExifMake = "Generic UVC webcam";mExifModel = "Generic UVC webcam";} else {// capability.card is UTF-8 encodedchar card[32];int j = 0;for (int i = 0; i < 32; i++) {if (capability.card[i] < 128) {card[j++] = capability.card[i];}if (capability.card[i] == '\0') {break;}}if (j == 0 || card[j - 1] != '\0') {mExifMake = "Generic UVC webcam";mExifModel = "Generic UVC webcam";} else {mExifMake = card;mExifModel = card;}}initOutputThread();if (mOutputThread == nullptr) {ALOGE("%s: init OutputThread failed!", __FUNCTION__);return true;}mOutputThread->setExifMakeModel(mExifMake, mExifModel);mFormatConvertThread->createJpegDecoder();status_t status = initDefaultRequests();if (status != OK) {ALOGE("%s: init default requests failed!", __FUNCTION__);return true;}mRequestMetadataQueue = std::make_unique<RequestMetadataQueue>(kMetadataMsgQueueSize, false /* non blocking */);if (!mRequestMetadataQueue->isValid()) {ALOGE("%s: invalid request fmq", __FUNCTION__);return true;}mResultMetadataQueue = std::make_shared<ResultMetadataQueue>(kMetadataMsgQueueSize, false /* non blocking */);if (!mResultMetadataQueue->isValid()) {ALOGE("%s: invalid result fmq", __FUNCTION__);return true;}// TODO: check is PRIORITY_DISPLAY enough?mOutputThread->run("ExtCamOut", PRIORITY_DISPLAY);mFormatConvertThread->run("ExtFmtCvt", PRIORITY_DISPLAY);#ifdef HDMI_ENABLE
#ifdef HDMI_SUBVIDEO_ENABLEsp<rockchip::hardware::hdmi::V1_0::IHdmi> client = rockchip::hardware::hdmi::V1_0::IHdmi::getService();if(client.get()!= nullptr){::android::hardware::hidl_string deviceId;client->getHdmiDeviceId( [&](const ::android::hardware::hidl_string &id){deviceId = id.c_str();});ALOGE("getHdmiDeviceId:%s",deviceId.c_str());if(strstr(deviceId.c_str(), mCameraId.c_str())){ALOGE("HDMI attach SubVideo %s",mCameraId.c_str());if(strlen(ExternalCameraDevice::kSubDevName)>0){sprintf(main_ctx.dev_name,"%s",ExternalCameraDevice::kSubDevName);ALOGE("main_ctx.dev_name:%s",main_ctx.dev_name);}mSubVideoThread = new SubVideoThread(0);mSubVideoThread->run("SubVideo", PRIORITY_DISPLAY);}}
#endif
#endifreturn false;
}

每一帧都会在bool ExternalCameraDeviceSession::OutputThread::threadLoop() 里做格式转换和裁剪

//通过RGA处理每一帧图像,图像显示有问题可以在里面改

bool ExternalCameraDeviceSession::OutputThread::threadLoop() {std::shared_ptr<HalRequest> req;auto parent = mParent.promote();if (parent == nullptr) {ALOGE("%s: session has been disconnected!", __FUNCTION__);return false;}...} else if (req->frameIn->mFourcc == V4L2_PIX_FMT_NV12){int handle_fd = -1, ret;const native_handle_t* tmp_hand = (const native_handle_t*)(*(halBuf.bufPtr));ret = ExCamGralloc4::get_share_fd(tmp_hand, &handle_fd);if (handle_fd == -1) {LOGE("convert tmp_hand to dst_fd error");return -EINVAL;}ALOGV("%s(%d): halBuf handle_fd(%d)", __FUNCTION__, __LINE__, handle_fd);ALOGV("%s(%d) halbuf_wxh(%dx%d) frameNumber(%d)", __FUNCTION__, __LINE__,halBuf.width, halBuf.height, req->frameNumber);unsigned long vir_addr =  reinterpret_cast<unsigned long>(req->inData);//通过RGA处理每一帧图像,图像显示有问题可以在里面改camera2::RgaCropScale::rga_scale_crop(tempFrameWidth, tempFrameHeight, vir_addr,HAL_PIXEL_FORMAT_YCrCb_NV12,handle_fd,halBuf.width, halBuf.height, 100, false, false,(halBuf.format == PixelFormat::YCRCB_420_SP), is16Align,true);} else if (req->frameIn->mFourcc == V4L2_PIX_FMT_NV16){...
}

int RgaCropScale::rga_scale_crop(int src_width, int src_height,unsigned long src_fd, int src_format,unsigned long dst_fd,int dst_width, int dst_height,int zoom_val, bool mirror, bool isNeedCrop,bool isDstNV21, bool is16Align, bool isYuyvFormat)
{int ret = 0;rga_info_t src,dst;int zoom_cropW,zoom_cropH;int ratio = 0;...//我的是图像需要镜像 可以在这里改if (mirror)src.rotation = HAL_TRANSFORM_ROT_90; //HAL_TRANSFORM_ROT_else src.rotation = HAL_TRANSFORM_ROT_180;...
}

本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处:http://www.mzph.cn/news/666344.shtml

如若内容造成侵权/违法违规/事实不符,请联系多彩编程网进行投诉反馈email:809451989@qq.com,一经查实,立即删除!

相关文章

c语言大小写转换

⭐个人主页&#xff1a;黑菜钟-CSDN博客 ❀专栏&#xff1a;c/c_黑菜钟的博客-CSDN博客 前言&#xff1a; 这篇博客主要介绍3种有关大小写转换的方法&#xff0c;以及如何判断大小写的扩展c语言库函数 1.方法 1.1.ASCII编码法 在ASCII编码表中&#xff0c;小写和大写总是差一…

深入理解Istio服务网格(一)数据平面Envoy

一、服务网格概述(service mesh) 在传统的微服务架构中&#xff0c;服务间的调用&#xff0c;业务代码需要考虑认证、熔断、服务发现等非业务能力&#xff0c;在某种程度上&#xff0c;表现出了一定的耦合性 服务网格追求高级别的服务流量治理能力&#xff0c;认证、熔断、服…

文档更新记录

vue-cli3搭建项目_vite cli3搭建项目-CSDN博客 1.8 eslint_"plugins: [\"import\"], // 解决动态导入import语法报错问题 --> -CSDN博客 1.8

N-142基于springboot,vue停车场管理系统

开发工具&#xff1a;IDEA 服务器&#xff1a;Tomcat9.0&#xff0c; jdk1.8 项目构建&#xff1a;maven 数据库&#xff1a;mysql5.7 项目采用前后端分离 前端技术&#xff1a;vueelementUI 服务端技术&#xff1a;springbootmybatis-plus 本项目分为普通用户和管理员…

基于若依的ruoyi-nbcio流程管理系统自定义业务回写状态的一种新方法(一)

更多ruoyi-nbcio功能请看演示系统 gitee源代码地址 前后端代码&#xff1a; https://gitee.com/nbacheng/ruoyi-nbcio 演示地址&#xff1a;RuoYi-Nbcio后台管理系统 更多nbcio-boot功能请看演示系统 gitee源代码地址 后端代码&#xff1a; https://gitee.com/nbacheng/n…

深度学习驱动下的自然语言处理进展及其应用前景

文章目录 每日一句正能量前言技术进步应用场景挑战与前景自然语言处理技术当前面临的挑战未来的发展趋势和前景 伦理和社会影响实践经验后记 每日一句正能量 一个人若想拥有聪明才智&#xff0c;便需要不断地学习积累。 前言 自然语言处理&#xff08;NLP&#xff09;是一项正…

FreeRTOS动态 / 静态创建和删除任务

本篇文章记录我学习FreeRTOS的动态 / 静态创建和删除任务的知识。希望我的分享能给你带来不一样的收获&#xff01;文中涉及FreeRTOS创建和删除任务的API函数&#xff0c;建议读者参考以下文章&#xff1a; FreeRTOS任务相关的API函数-CSDN博客 目录 ​编辑 一、FreeRTOS动态创…

“超越摩尔定律”,存内计算走在爆发的边缘

目录 ​编辑 前言 在后摩尔时代提高计算机性能 六类存内计算技术 1&#xff09;XYZ-CIM 2&#xff09;XZ-CIM 3&#xff09;Z-CIM 4&#xff09;XY-CIM 5&#xff09;X-CIM 6&#xff09;O-CIM 各种CIM技术的原理 1&#xff09;XYZ-CIM&#xff1a;NVM有状态逻辑 2…

ES6-数组的解构赋值

一、数组的解构赋值的规律 - 只要等号两边的模式相同&#xff0c;左边的变量就会被赋予对应的值二、数组的解构赋值的例子讲解 1&#xff09;简单的示例&#xff08;完整的解构赋值&#xff09; 示例 //基本的模式匹配 // a&#xff0c;b,c依次和1&#xff0c;2&#xff0c…

libevent源码解析--event,event_callback,event_base

1.概述 实现一个基础tcp网络库&#xff0c;以基于tcp网络库构建服务端应用&#xff0c;客户端应用为起点&#xff0c;我们的核心诉求有&#xff1a; a. tcp网络库管理工作线程。 b. tcp网络库产生服务端对象&#xff0c;通过启动接口&#xff0c;开启服务端监听。进一步&…

解决gitee文件大小超过100MB——分片上传(每片<100MB)

Gitee 上传文件大小限制为 100MB。如果需要上传大于 100MB 的文件&#xff0c;可以按照以下步骤操作&#xff1a; 1. 将大文件分割成多个小于 100MB 的子文件。 2. 使用 Gitee 的命令行工具 git 分别将这些子文件添加到仓库中。 3. 在仓库中创建一个新文件&#xff08;例如&am…

记录学习--java abstract与interface使用区别

1.abstract使用场景 abstract提供了一套功能代码&#xff0c;这套功能代码可以直接用&#xff0c;也可以细微的改变&#xff0c;但是abstract不希望这套功能都改变了&#xff0c;这可能是一套标准功能。 2.interface使用场景 interface不提供任何功能&#xff0c;提供协议解…

【C生万物】初始C语言

&#x1f4da;博客主页&#xff1a;爱敲代码的小杨. ✨专栏&#xff1a;《Java SE语法》 | 《数据结构与算法》 | 《C生万物》 ❤️感谢大家点赞&#x1f44d;&#x1f3fb;收藏⭐评论✍&#x1f3fb;&#xff0c;您的三连就是我持续更新的动力❤️ &#x1f64f;小杨水平有…

【微服务】skywalking自定义链路追踪与日志采集

目录 一、前言 二、自定义链路追踪简介 2.1 自定义链路追踪应用场景 2.2 链路追踪几个关键概念 三、skywalking 自定义链路追踪实现 3.1 环境准备 3.2 集成过程 3.2.1 导入核心依赖 3.2.2 几个常用注解 3.2.3 方法集成 3.2.4 上报追踪信息 四、skywalking 自定义日志…

【国产MCU】-CH32V307-通用同步/异步收发器(USART)

通用同步/异步收发器(USART) 文章目录 通用同步/异步收发器(USART)1、USART介绍2、USART驱动API介绍3、USART使用示例3.1 轮询方式数据发送与接收3.2 中断方式数据发送与接收3.3 DMA方式数据发送与接收CH32V307提供了3组通用同步/异步收发器(USART1、USART2、USART3),以…

如何从 iPhone 上恢复永久删除的照片

您的 iPhone 上缺少照片吗&#xff1f;讽刺的是&#xff0c;iPhone 的许多高级功能可能正是这个问题如此普遍的原因。幸运的是&#xff0c;还有很多方法可以从 iPhone 恢复已删除的照片&#xff0c;具体取决于您设备的设置方式。 本文涵盖了所有这些内容。该过程根据您的具体情…

微信公众号迁移公证书怎么办?

公众号迁移有什么作用&#xff1f;只能变更主体吗&#xff1f;公众号迁移的作用可不止是变更主体哦&#xff01;还可以把原公众号的粉丝、文章素材、违规记录、留言功能、名称等迁移到新的公众号上。这样一来&#xff0c;你就可以实现公众号的公司主体变更、粉丝转移、开通留言…

MySQL数据库备份方法

一、数据库备份的分类 1.按不同维度分类 从物理与逻辑的角度&#xff0c;备份可分为 物理备份&#xff1a;对数据库操作系统的物理文件&#xff08;如数据文件、日志文件等&#xff09;的备份 物理备份方法 冷备份&#xff08;脱机备份&#xff09;&#xff1a;是在关闭数…

使用maven对springboot项目进行瘦身分离jar的多种处理方案

文章目录 前言一、方案一&#xff08;修改自带的spring-boot-maven-plugin插件&#xff09;二、方案二&#xff08;使用spring-boot-thin-maven-plugin插件&#xff09;总结 前言 springboot项目打包一般我们都使用它自带的spring-boot-maven-plugin插件&#xff0c;这个插件默…