RK Camera hal 图像处理

soc:RK3568

system:Android12

今天发现外接的USBCamera用Camera 2API打开显示颠倒,如果在APP 里使用Camera1处理这块接口较少,调整起来比较麻烦

RK Camera hal位置:hardware/interfaces/camera

核心的文件在:

开机会启动:android.hardware.camera.provider@2.4-external-service服务

遍历/dev/videox ,通过V4l2获取摄像头驱动 长 宽 数据格式与帧率,判断当前的摄像头节点是否有效,有效就会告诉CameraServer注册为CameraId,主要代码如下

ExternalCameraDevice.cpp

std::vector<SupportedV4L2Format> ExternalCameraDevice::getCandidateSupportedFormatsLocked(int fd, CroppingType cropType,const std::vector<ExternalCameraConfig::FpsLimitation>& fpsLimits,const std::vector<ExternalCameraConfig::FpsLimitation>& depthFpsLimits,const Size& minStreamSize,bool depthEnabled) {std::vector<SupportedV4L2Format> outFmts;
if (!mSubDevice){// VIDIOC_QUERYCAP get Capabilitystruct v4l2_capability capability;int ret_query = ioctl(fd, VIDIOC_QUERYCAP, &capability);if (ret_query < 0) {ALOGE("%s v4l2 QUERYCAP %s failed: %s", __FUNCTION__, strerror(errno));}struct v4l2_fmtdesc fmtdesc{};fmtdesc.index = 0;if (capability.device_caps & V4L2_CAP_VIDEO_CAPTURE_MPLANE)fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;elsefmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;int ret = 0;while (ret == 0) {//获取摄像头格式ret = TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc));ALOGV("index:%d,ret:%d, format:%c%c%c%c", fmtdesc.index, ret,fmtdesc.pixelformat & 0xFF,(fmtdesc.pixelformat >> 8) & 0xFF,(fmtdesc.pixelformat >> 16) & 0xFF,(fmtdesc.pixelformat >> 24) & 0xFF);if (ret == 0 && !(fmtdesc.flags & V4L2_FMT_FLAG_EMULATED)) {auto it = std::find (kSupportedFourCCs.begin(), kSupportedFourCCs.end(), fmtdesc.pixelformat);if (it != kSupportedFourCCs.end()) {// Found supported formatv4l2_frmsizeenum frameSize {.index = 0,.pixel_format = fmtdesc.pixelformat};//获取摄像头SIZEfor (; TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frameSize)) == 0;++frameSize.index) {if (frameSize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {ALOGV("index:%d, format:%c%c%c%c, w %d, h %d", frameSize.index,fmtdesc.pixelformat & 0xFF,(fmtdesc.pixelformat >> 8) & 0xFF,(fmtdesc.pixelformat >> 16) & 0xFF,(fmtdesc.pixelformat >> 24) & 0xFF,frameSize.discrete.width, frameSize.discrete.height);// Disregard h > w formats so all aspect ratio (h/w) <= 1.0// This will simplify the crop/scaling logic down the roadif (frameSize.discrete.height > frameSize.discrete.width) {continue;}// Discard all formats which is smaller than minStreamSizeif (frameSize.discrete.width < minStreamSize.width|| frameSize.discrete.height < minStreamSize.height) {continue;}SupportedV4L2Format format {.width = frameSize.discrete.width,.height = frameSize.discrete.height,.fourcc = fmtdesc.pixelformat};//获取对于的摄像头参数if (format.fourcc == V4L2_PIX_FMT_Z16 && depthEnabled) {updateFpsBounds(fd, cropType, depthFpsLimits, format, outFmts);} else {updateFpsBounds(fd, cropType, fpsLimits, format, outFmts);}}}
#ifdef HDMI_ENABLEif(strstr((const char*)capability.driver,"hdmi")){ALOGE("driver.find :%s",capability.driver);struct v4l2_dv_timings timings;if(TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_SUBDEV_QUERY_DV_TIMINGS, &timings)) == 0){char fmtDesc[5]{0};sprintf(fmtDesc,"%c%c%c%c",fmtdesc.pixelformat & 0xFF,(fmtdesc.pixelformat >> 8) & 0xFF,(fmtdesc.pixelformat >> 16) & 0xFF,(fmtdesc.pixelformat >> 24) & 0xFF);ALOGV("hdmi index:%d,ret:%d, format:%s", fmtdesc.index, ret,fmtDesc);ALOGE("%s, hdmi I:%d, wxh:%dx%d", __func__,timings.bt.interlaced, timings.bt.width, timings.bt.height);ALOGV("add hdmi index:%d,ret:%d, format:%c%c%c%c", fmtdesc.index, ret,fmtdesc.pixelformat & 0xFF,(fmtdesc.pixelformat >> 8) & 0xFF,(fmtdesc.pixelformat >> 16) & 0xFF,(fmtdesc.pixelformat >> 24) & 0xFF);SupportedV4L2Format formatGet {.width = timings.bt.width,.height = timings.bt.height,.fourcc = fmtdesc.pixelformat};updateFpsBounds(fd, cropType, fpsLimits, formatGet, outFmts);SupportedV4L2Format format_640x360 {.width = 640,.height = 360,.fourcc = fmtdesc.pixelformat};updateFpsBounds(fd, cropType, fpsLimits, format_640x360, outFmts);SupportedV4L2Format format_1920x1080 {.width = 1920,.height = 1080,.fourcc = fmtdesc.pixelformat};updateFpsBounds(fd, cropType, fpsLimits, format_1920x1080, outFmts);}}
#endif}}fmtdesc.index++;}trimSupportedFormats(cropType, &outFmts);}

上面正常跑入,就可以通过dumpsys  media.camera | grep map 获取到支持的摄像头

rk3588_s:/ $ dumpsys media.camera | grep mapDevice 0 maps to "0"Device 1 maps to "1"Device 2 maps to "112"Device 3 maps to "201"

之后Camera 2 API 通过open 会调到CameraServer最终进到ExternalCameraDevice::open

1.openCamera

Return<void> ExternalCameraDevice::open(const sp<ICameraDeviceCallback>& callback, ICameraDevice::open_cb _hidl_cb) {Status status = Status::OK;sp<ExternalCameraDeviceSession> session = nullptr;if (callback == nullptr) {ALOGE("%s: cannot open camera %s. callback is null!",__FUNCTION__, mCameraId.c_str());_hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr);return Void();}//获取摄像头参数if (isInitFailed()) {ALOGE("%s: cannot open camera %s. camera init failed!",__FUNCTION__, mCameraId.c_str());_hidl_cb(Status::INTERNAL_ERROR, nullptr);return Void();}mLock.lock();ALOGV("%s: Initializing device for camera %s", __FUNCTION__, mCameraId.c_str());session = mSession.promote();if (session != nullptr && !session->isClosed()) {ALOGE("%s: cannot open an already opened camera!", __FUNCTION__);mLock.unlock();_hidl_cb(Status::CAMERA_IN_USE, nullptr);return Void();}//打开摄像头unique_fd fd(::open(mDevicePath.c_str(), O_RDWR));
#ifdef SUBDEVICE_ENABLEif(!mSubDevice){if (fd.get() < 0) {int numAttempt = 0;do {ALOGW("%s: v4l2 device %s open failed, wait 33ms and try again",__FUNCTION__, mDevicePath.c_str());usleep(OPEN_RETRY_SLEEP_US); // sleep and try againfd.reset(::open(mDevicePath.c_str(), O_RDWR));numAttempt++;} while (fd.get() < 0 && numAttempt <= MAX_RETRY);if (fd.get() < 0) {ALOGE("%s: v4l2 device open %s failed: %s",__FUNCTION__, mDevicePath.c_str(), strerror(errno));mLock.unlock();_hidl_cb(Status::INTERNAL_ERROR, nullptr);return Void();}}}
#elseif (fd.get() < 0) {int numAttempt = 0;do {ALOGW("%s: v4l2 device %s open failed, wait 33ms and try again",__FUNCTION__, mDevicePath.c_str());usleep(OPEN_RETRY_SLEEP_US); // sleep and try againfd.reset(::open(mDevicePath.c_str(), O_RDWR));numAttempt++;} while (fd.get() < 0 && numAttempt <= MAX_RETRY);if (fd.get() < 0) {ALOGE("%s: v4l2 device open %s failed: %s",__FUNCTION__, mDevicePath.c_str(), strerror(errno));mLock.unlock();_hidl_cb(Status::INTERNAL_ERROR, nullptr);return Void();}}
#endif//创建Sessionsession = createSession(callback, mCfg, mSupportedFormats, mCroppingType,mCameraCharacteristics, mCameraId, std::move(fd));if (session == nullptr) {ALOGE("%s: camera device session allocation failed", __FUNCTION__);mLock.unlock();_hidl_cb(Status::INTERNAL_ERROR, nullptr);return Void();}if (session->isInitFailed()) {ALOGE("%s: camera device session init failed", __FUNCTION__);session = nullptr;mLock.unlock();_hidl_cb(Status::INTERNAL_ERROR, nullptr);return Void();}mSession = session;mLock.unlock();_hidl_cb(status, session->getInterface());return Void();
}

Camera framework 调用ExternalCameraDeviceSession::processCaptureResult(std::shared_ptr<HalRequest>& req)通过enqueueV4l2Frame 获取到每一帧数据,


void ExternalCameraDeviceSession::enqueueV4l2Frame(const sp<V4L2Frame>& frame) {ATRACE_CALL();frame->unmap();ATRACE_BEGIN("VIDIOC_QBUF");v4l2_buffer buffer{};if (mCapability.device_caps & V4L2_CAP_VIDEO_CAPTURE_MPLANE)buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;elsebuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buffer.memory = V4L2_MEMORY_MMAP;if (V4L2_TYPE_IS_MULTIPLANAR(buffer.type)) {buffer.m.planes = planes;buffer.length = PLANES_NUM;}buffer.index = frame->mBufferIndex;
#ifdef SUBDEVICE_ENABLEif(!isSubDevice()){if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) {ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__,frame->mBufferIndex, strerror(errno));return;}}
#elseif (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) {ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__,frame->mBufferIndex, strerror(errno));return;}
#endifATRACE_END();{std::lock_guard<std::mutex> lk(mV4l2BufferLock);mNumDequeuedV4l2Buffers--;}mV4L2BufferReturned.notify_one();
}Status ExternalCameraDeviceSession::processCaptureResult(std::shared_ptr<HalRequest>& req) {ATRACE_CALL();// Return V4L2 buffer to V4L2 buffer queuesp<V3_4::implementation::V4L2Frame> v4l2Frame =static_cast<V3_4::implementation::V4L2Frame*>(req->frameIn.get());enqueueV4l2Frame(v4l2Frame);// NotifyShutternotifyShutter(req->frameNumber, req->shutterTs);// Fill output buffershidl_vec<CaptureResult> results;results.resize(1);CaptureResult& result = results[0];result.frameNumber = req->frameNumber;result.partialResult = 1;result.inputBuffer.streamId = -1;result.outputBuffers.resize(req->buffers.size());for (size_t i = 0; i < req->buffers.size(); i++) {result.outputBuffers[i].streamId = req->buffers[i].streamId;result.outputBuffers[i].bufferId = req->buffers[i].bufferId;if (req->buffers[i].fenceTimeout) {result.outputBuffers[i].status = BufferStatus::ERROR;if (req->buffers[i].acquireFence >= 0) {native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0);handle->data[0] = req->buffers[i].acquireFence;result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false);}notifyError(req->frameNumber, req->buffers[i].streamId, ErrorCode::ERROR_BUFFER);} else {result.outputBuffers[i].status = BufferStatus::OK;// TODO: refactorif (req->buffers[i].acquireFence >= 0) {native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0);handle->data[0] = req->buffers[i].acquireFence;result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false);}}}// Fill capture result metadatafillCaptureResult(req->setting, req->shutterTs);const camera_metadata_t *rawResult = req->setting.getAndLock();V3_2::implementation::convertToHidl(rawResult, &result.result);req->setting.unlock(rawResult);// update inflight records{std::lock_guard<std::mutex> lk(mInflightFramesLock);mInflightFrames.erase(req->frameNumber);}// Callback into frameworkinvokeProcessCaptureResultCallback(results, /* tryWriteFmq */true);freeReleaseFences(results);return Status::OK;
}

接下来主要是initialize,通过开启一些OutputThread ,图像处理线程FormatConvertThread


bool ExternalCameraDeviceSession::initialize() {
#ifdef SUBDEVICE_ENABLEif(!isSubDevice()){if (mV4l2Fd.get() < 0) {ALOGE("%s: invalid v4l2 device fd %d!", __FUNCTION__, mV4l2Fd.get());return true;}}
#elseif (mV4l2Fd.get() < 0) {ALOGE("%s: invalid v4l2 device fd %d!", __FUNCTION__, mV4l2Fd.get());return true;}
#endifstruct v4l2_capability capability;
#ifdef SUBDEVICE_ENABLEint ret = -1;if(!isSubDevice()){ioctl(mV4l2Fd.get(), VIDIOC_QUERYCAP, &capability);}
#elseint ret = ioctl(mV4l2Fd.get(), VIDIOC_QUERYCAP, &capability);
#endifstd::string make, model;if (ret < 0) {ALOGW("%s v4l2 QUERYCAP failed", __FUNCTION__);mExifMake = "Generic UVC webcam";mExifModel = "Generic UVC webcam";} else {// capability.card is UTF-8 encodedchar card[32];int j = 0;for (int i = 0; i < 32; i++) {if (capability.card[i] < 128) {card[j++] = capability.card[i];}if (capability.card[i] == '\0') {break;}}if (j == 0 || card[j - 1] != '\0') {mExifMake = "Generic UVC webcam";mExifModel = "Generic UVC webcam";} else {mExifMake = card;mExifModel = card;}}initOutputThread();if (mOutputThread == nullptr) {ALOGE("%s: init OutputThread failed!", __FUNCTION__);return true;}mOutputThread->setExifMakeModel(mExifMake, mExifModel);mFormatConvertThread->createJpegDecoder();status_t status = initDefaultRequests();if (status != OK) {ALOGE("%s: init default requests failed!", __FUNCTION__);return true;}mRequestMetadataQueue = std::make_unique<RequestMetadataQueue>(kMetadataMsgQueueSize, false /* non blocking */);if (!mRequestMetadataQueue->isValid()) {ALOGE("%s: invalid request fmq", __FUNCTION__);return true;}mResultMetadataQueue = std::make_shared<ResultMetadataQueue>(kMetadataMsgQueueSize, false /* non blocking */);if (!mResultMetadataQueue->isValid()) {ALOGE("%s: invalid result fmq", __FUNCTION__);return true;}// TODO: check is PRIORITY_DISPLAY enough?mOutputThread->run("ExtCamOut", PRIORITY_DISPLAY);mFormatConvertThread->run("ExtFmtCvt", PRIORITY_DISPLAY);#ifdef HDMI_ENABLE
#ifdef HDMI_SUBVIDEO_ENABLEsp<rockchip::hardware::hdmi::V1_0::IHdmi> client = rockchip::hardware::hdmi::V1_0::IHdmi::getService();if(client.get()!= nullptr){::android::hardware::hidl_string deviceId;client->getHdmiDeviceId( [&](const ::android::hardware::hidl_string &id){deviceId = id.c_str();});ALOGE("getHdmiDeviceId:%s",deviceId.c_str());if(strstr(deviceId.c_str(), mCameraId.c_str())){ALOGE("HDMI attach SubVideo %s",mCameraId.c_str());if(strlen(ExternalCameraDevice::kSubDevName)>0){sprintf(main_ctx.dev_name,"%s",ExternalCameraDevice::kSubDevName);ALOGE("main_ctx.dev_name:%s",main_ctx.dev_name);}mSubVideoThread = new SubVideoThread(0);mSubVideoThread->run("SubVideo", PRIORITY_DISPLAY);}}
#endif
#endifreturn false;
}

每一帧都会在bool ExternalCameraDeviceSession::OutputThread::threadLoop() 里做格式转换和裁剪

//通过RGA处理每一帧图像,图像显示有问题可以在里面改

bool ExternalCameraDeviceSession::OutputThread::threadLoop() {std::shared_ptr<HalRequest> req;auto parent = mParent.promote();if (parent == nullptr) {ALOGE("%s: session has been disconnected!", __FUNCTION__);return false;}...} else if (req->frameIn->mFourcc == V4L2_PIX_FMT_NV12){int handle_fd = -1, ret;const native_handle_t* tmp_hand = (const native_handle_t*)(*(halBuf.bufPtr));ret = ExCamGralloc4::get_share_fd(tmp_hand, &handle_fd);if (handle_fd == -1) {LOGE("convert tmp_hand to dst_fd error");return -EINVAL;}ALOGV("%s(%d): halBuf handle_fd(%d)", __FUNCTION__, __LINE__, handle_fd);ALOGV("%s(%d) halbuf_wxh(%dx%d) frameNumber(%d)", __FUNCTION__, __LINE__,halBuf.width, halBuf.height, req->frameNumber);unsigned long vir_addr =  reinterpret_cast<unsigned long>(req->inData);//通过RGA处理每一帧图像,图像显示有问题可以在里面改camera2::RgaCropScale::rga_scale_crop(tempFrameWidth, tempFrameHeight, vir_addr,HAL_PIXEL_FORMAT_YCrCb_NV12,handle_fd,halBuf.width, halBuf.height, 100, false, false,(halBuf.format == PixelFormat::YCRCB_420_SP), is16Align,true);} else if (req->frameIn->mFourcc == V4L2_PIX_FMT_NV16){...
}

int RgaCropScale::rga_scale_crop(int src_width, int src_height,unsigned long src_fd, int src_format,unsigned long dst_fd,int dst_width, int dst_height,int zoom_val, bool mirror, bool isNeedCrop,bool isDstNV21, bool is16Align, bool isYuyvFormat)
{int ret = 0;rga_info_t src,dst;int zoom_cropW,zoom_cropH;int ratio = 0;...//我的是图像需要镜像 可以在这里改if (mirror)src.rotation = HAL_TRANSFORM_ROT_90; //HAL_TRANSFORM_ROT_else src.rotation = HAL_TRANSFORM_ROT_180;...
}

本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处:http://www.mzph.cn/news/666344.shtml

如若内容造成侵权/违法违规/事实不符,请联系多彩编程网进行投诉反馈email:809451989@qq.com,一经查实,立即删除!

相关文章

深入理解Istio服务网格(一)数据平面Envoy

一、服务网格概述(service mesh) 在传统的微服务架构中&#xff0c;服务间的调用&#xff0c;业务代码需要考虑认证、熔断、服务发现等非业务能力&#xff0c;在某种程度上&#xff0c;表现出了一定的耦合性 服务网格追求高级别的服务流量治理能力&#xff0c;认证、熔断、服…

N-142基于springboot,vue停车场管理系统

开发工具&#xff1a;IDEA 服务器&#xff1a;Tomcat9.0&#xff0c; jdk1.8 项目构建&#xff1a;maven 数据库&#xff1a;mysql5.7 项目采用前后端分离 前端技术&#xff1a;vueelementUI 服务端技术&#xff1a;springbootmybatis-plus 本项目分为普通用户和管理员…

基于若依的ruoyi-nbcio流程管理系统自定义业务回写状态的一种新方法(一)

更多ruoyi-nbcio功能请看演示系统 gitee源代码地址 前后端代码&#xff1a; https://gitee.com/nbacheng/ruoyi-nbcio 演示地址&#xff1a;RuoYi-Nbcio后台管理系统 更多nbcio-boot功能请看演示系统 gitee源代码地址 后端代码&#xff1a; https://gitee.com/nbacheng/n…

深度学习驱动下的自然语言处理进展及其应用前景

文章目录 每日一句正能量前言技术进步应用场景挑战与前景自然语言处理技术当前面临的挑战未来的发展趋势和前景 伦理和社会影响实践经验后记 每日一句正能量 一个人若想拥有聪明才智&#xff0c;便需要不断地学习积累。 前言 自然语言处理&#xff08;NLP&#xff09;是一项正…

FreeRTOS动态 / 静态创建和删除任务

本篇文章记录我学习FreeRTOS的动态 / 静态创建和删除任务的知识。希望我的分享能给你带来不一样的收获&#xff01;文中涉及FreeRTOS创建和删除任务的API函数&#xff0c;建议读者参考以下文章&#xff1a; FreeRTOS任务相关的API函数-CSDN博客 目录 ​编辑 一、FreeRTOS动态创…

“超越摩尔定律”,存内计算走在爆发的边缘

目录 ​编辑 前言 在后摩尔时代提高计算机性能 六类存内计算技术 1&#xff09;XYZ-CIM 2&#xff09;XZ-CIM 3&#xff09;Z-CIM 4&#xff09;XY-CIM 5&#xff09;X-CIM 6&#xff09;O-CIM 各种CIM技术的原理 1&#xff09;XYZ-CIM&#xff1a;NVM有状态逻辑 2…

ES6-数组的解构赋值

一、数组的解构赋值的规律 - 只要等号两边的模式相同&#xff0c;左边的变量就会被赋予对应的值二、数组的解构赋值的例子讲解 1&#xff09;简单的示例&#xff08;完整的解构赋值&#xff09; 示例 //基本的模式匹配 // a&#xff0c;b,c依次和1&#xff0c;2&#xff0c…

【C生万物】初始C语言

&#x1f4da;博客主页&#xff1a;爱敲代码的小杨. ✨专栏&#xff1a;《Java SE语法》 | 《数据结构与算法》 | 《C生万物》 ❤️感谢大家点赞&#x1f44d;&#x1f3fb;收藏⭐评论✍&#x1f3fb;&#xff0c;您的三连就是我持续更新的动力❤️ &#x1f64f;小杨水平有…

【微服务】skywalking自定义链路追踪与日志采集

目录 一、前言 二、自定义链路追踪简介 2.1 自定义链路追踪应用场景 2.2 链路追踪几个关键概念 三、skywalking 自定义链路追踪实现 3.1 环境准备 3.2 集成过程 3.2.1 导入核心依赖 3.2.2 几个常用注解 3.2.3 方法集成 3.2.4 上报追踪信息 四、skywalking 自定义日志…

如何从 iPhone 上恢复永久删除的照片

您的 iPhone 上缺少照片吗&#xff1f;讽刺的是&#xff0c;iPhone 的许多高级功能可能正是这个问题如此普遍的原因。幸运的是&#xff0c;还有很多方法可以从 iPhone 恢复已删除的照片&#xff0c;具体取决于您设备的设置方式。 本文涵盖了所有这些内容。该过程根据您的具体情…

微信公众号迁移公证书怎么办?

公众号迁移有什么作用&#xff1f;只能变更主体吗&#xff1f;公众号迁移的作用可不止是变更主体哦&#xff01;还可以把原公众号的粉丝、文章素材、违规记录、留言功能、名称等迁移到新的公众号上。这样一来&#xff0c;你就可以实现公众号的公司主体变更、粉丝转移、开通留言…

使用maven对springboot项目进行瘦身分离jar的多种处理方案

文章目录 前言一、方案一&#xff08;修改自带的spring-boot-maven-plugin插件&#xff09;二、方案二&#xff08;使用spring-boot-thin-maven-plugin插件&#xff09;总结 前言 springboot项目打包一般我们都使用它自带的spring-boot-maven-plugin插件&#xff0c;这个插件默…

大数据本地环境搭建03-Spark搭建

需要提前部署好 Zookeeper/Hadoop/Hive 环境 1 Local模式 1.1 上传压缩包 下载链接 链接&#xff1a;https://pan.baidu.com/s/1rLq39ddxh7np7JKiuRAhDA?pwde20h 提取码&#xff1a;e20h 将spark-3.1.2-bin-hadoop3.2.tar.gz压缩包到node1下的/export/server目录 1.2 解压压…

两个重要极限【高数笔记】

【第一个&#xff1a;lim &#xff08;sinx / x&#xff09; 1, x -- > 0】 1.本质&#xff1a; lim &#xff08;sin‘&#xff1f;’ / ‘&#xff1f;’&#xff09; 1, ‘&#xff1f;’ -- > 0&#xff1b;保证‘&#xff1f;’ -- > 0,与趋向无关 2.例题&#x…

【ELK】logstash快速入门

1.概述 1.1.什么是logstash&#xff1f; 之前我们聊了es&#xff0c;并且用docker搭建了一个eskibana的环境。es目前最普遍的用法是用来存储日志的&#xff0c;然后结合kibana对日志做一些可视化的工作。既然要收集日志&#xff0c;就面临着一个问题&#xff1a; 各个系统的…

口味多样的碱水贝果面包,香气饱满松松软软

这两天在吃一款碱趣贝果面包&#xff0c;感觉味道很不错&#xff0c;它是一种加热一下就可以吃的手工面包&#xff0c;口感十分独特。这款面包有着清香有韧性的表皮&#xff0c;里面松软可口&#xff0c;加热后更是香气四溢。 除了标准的原味全麦之外&#xff0c;碱趣贝果面包还…

SpringBoot数据访问复习

SpringBoot数据访问复习 数据访问准备 引入jdbc所需要的依赖 <dependency><groupId>org.springframework.boot</groupId><artifactId>spring-boot-starter-data-jdbc</artifactId></dependency> 原理分析 导入的启动器引入了两个…

【年度盛会征稿】第二届全国精准营养与生命健康创新大会(PNHIC 2024)

第二届全国精准营养与生命健康创新大会&#xff08;PNHIC 2024&#xff09; 【高层次专家齐聚交流&#xff0c;年度盛会&#xff01;】 近年来&#xff0c;人们对营养与健康愈加关注&#xff0c;精准营养学研究也成为一个快速发展的热点领域。“精准营养”研究借助多组学、可…

MySQL进阶45讲【13】为什么表数据删掉一半,表文件大小不变?

1 前言 有些小伙伴在删数据库数据时&#xff0c;会产生一个疑问&#xff0c;我的数据库占用空间大&#xff0c;我把一个最大的表删掉了一半的数据&#xff0c;怎么表文件的大小还是没变&#xff1f; 那么这篇文章&#xff0c;就介绍一下数据库表的空间回收&#xff0c;看看如…