video_topic

news/2024/7/10 21:59:32 标签: ubuntu, ffmpeg, YOLO, 人工智能, linux

使用qt5,ffmpeg6.0,opencv,os2来实现。qt并非必要,只是用惯了。

步骤是:

1.读取rtsp码流,转换成mat图像

2.发送ros::mat图像

项目结构如下:

videoplayer.h

#ifndef VIDEOPLAYER_H
#define VIDEOPLAYER_H

#include <QThread>
#include <QImage>

class VlcInstance;
class VlcMedia;
class VlcMediaPlayer;

class VideoPlayer : public QThread
{
    Q_OBJECT

public:
    explicit VideoPlayer();
    ~VideoPlayer();

    void startPlay();
    void stopPlay();

signals:
    void sig_GetOneFrame(QImage); //每获取到一帧图像 就发送此信号
    void sig_GetRFrame(QImage);   

signals:
    //void SigFinished(void);

protected:
    void run();

private:
    QString mFileName;

    VlcInstance *_instance;
    VlcMedia *_media;
    VlcMediaPlayer *_player;

    std::string rtspaddr;
    bool mStopFlag;//是否退出标志
public slots:
    void setrtsp(std::string addr);
};

#endif // VIDEOPLAYER_H

videoplayer.cpp

#include "videoplayer.h"
extern "C"
{

    #include "libavcodec/avcodec.h"
    #include "libavformat/avformat.h"
    #include "libavutil/pixfmt.h"
    #include "libswscale/swscale.h"
    #include "libavutil/imgutils.h"

}

#include <stdio.h>
#include<iostream>
using namespace std;
VideoPlayer::VideoPlayer()
{
    rtspaddr="rtsp://admin:123456@192.168.123.104:554/stream1";
}

VideoPlayer::~VideoPlayer()
{

}

void VideoPlayer::setrtsp(std::string addr){
    rtspaddr=addr;
}

void VideoPlayer::startPlay()
{
    ///调用 QThread 的start函数 将会自动执行下面的run函数 run函数是一个新的线程
    this->start();
}

void VideoPlayer::stopPlay(){
    mStopFlag= true;
}

void VideoPlayer::run()
{
    AVFormatContext *pFormatCtx;
    AVCodecContext *pCodecCtx;
    const AVCodec *pCodec;
    AVFrame *pFrame, *pFrameRGB;
    AVPacket *packet;
    uint8_t *out_buffer;

    static struct SwsContext *img_convert_ctx;

    int videoStream, i, numBytes;
    int ret, got_picture;

    avformat_network_init();

    //Allocate an AVFormatContext.
    pFormatCtx = avformat_alloc_context();


    AVDictionary *avdic=NULL;
    /*
    char option_key[]="rtsp_transport";
    char option_value[]="tcp";
    av_dict_set(&avdic,option_key,option_value,0);
    char option_key2[]="max_delay";
    char option_value2[]="100";
    av_dict_set(&avdic,option_key2,option_value2,0);*/

    av_dict_set(&avdic, "buffer_size", "1024000", 0); //设置最大缓存,1080可调到最大
    av_dict_set(&avdic, "rtsp_transport", "udp", 0); //以tcp的方式传送
    av_dict_set(&avdic, "stimeout", "5000000", 0); //设置超时断开链接时间,单位us
    av_dict_set(&avdic, "max_delay", "500000", 0); //设置最大时延
    av_dict_set(&avdic, "framerate", "5", 0);
    //av_dict_set(&avdic, "video_size","640x40",0);

    /*
    AVDictionary* options = NULL;
    av_dict_set(&options, "buffer_size", "1024000", 0); //设置最大缓存,1080可调到最大
    av_dict_set(&options, "rtsp_transport", "udp", 0); //以tcp的方式传送
    av_dict_set(&options, "stimeout", "5000000", 0); //设置超时断开链接时间,单位us
    av_dict_set(&options, "max_delay", "500000", 0); //设置最大时延
    av_dict_set(&options, "framerate", "20", 0);*/



    ///rtsp地址,可根据实际情况修改
    /// rtsp://127.0.0.1:8554/stream
    /// rtsp://admin:123456@192.168.123.104:554/stream1
    //char * tmp=(char*)rtspaddr.data();
    //char url[50];
    //strcpy(url, tmp);
    //char url[] ="rtsp://admin:123456@192.168.123.104:554/stream1";
    char url[100];
    for(int i=0;i<rtspaddr.length();i++){
        url[i] = rtspaddr[i];
    }
    url[rtspaddr.length()]='\0';


    if (avformat_open_input(&pFormatCtx, url, NULL, &avdic) != 0) {
        printf("can't open the file. \n");
        return;
    }

    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
        printf("Could't find stream infomation.\n");
        return;
    }



    videoStream = -1;

    ///循环查找视频中包含的流信息,直到找到视频类型的流
    ///便将其记录下来 保存到videoStream变量中
    ///这里我们现在只处理视频流  音频流先不管他
    for (i = 0; i < pFormatCtx->nb_streams; i++) {
        if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoStream = i;
            break;
        }
    }

    ///如果videoStream为-1 说明没有找到视频流
    if (videoStream == -1) {
        printf("Didn't find a video stream.\n");
        return;
    }
    printf("nb_stream:%d videoStream:%d\n",pFormatCtx->nb_streams,videoStream);

    pCodec = avcodec_find_decoder(pFormatCtx->streams[videoStream]->codecpar->codec_id);

    pCodecCtx = avcodec_alloc_context3(pCodec);

    avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoStream]->codecpar);

    //printf("pCodecCtx->frame_number:%d\n", pCodecCtx->frame_number);
    //printf("pCodecCtx->time_base.num:%d\n", pCodecCtx->time_base.num);
    //printf("pCodecCtx->time_base.den:%d\n", pCodecCtx->time_base.den);
    //printf("pCodecCtx->bit_rate:%d\n", pCodecCtx->bit_rate);
    //printf("pCodecCtx->framerate:%d\n", pCodecCtx->framerate);

    
    // pCodecCtx->bit_rate =0;   //初始化为0
    // pCodecCtx->time_base.num=1;  //下面两行:一秒钟25帧
    // pCodecCtx->time_base.den=10;
    // pCodecCtx->frame_number=1;  //每包一个视频帧


    if (pCodec == NULL) {
        printf("Codec not found.\n");
        return;
    }

    ///打开解码器
    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
        printf("Could not open codec.\n");
        return;
    }

    pFrame = av_frame_alloc();
    pFrameRGB = av_frame_alloc();

    ///这里我们改成了 将解码后的YUV数据转换成RGB32
    img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
            pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
            AV_PIX_FMT_RGBA, SWS_BICUBIC, NULL, NULL, NULL);

    numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width,pCodecCtx->height,1);


    out_buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));

        av_image_fill_arrays(
                pFrameRGB->data,
                pFrameRGB->linesize,
                out_buffer,
                AV_PIX_FMT_RGBA,
                pCodecCtx->width,
                pCodecCtx->height,
                1
        );
    int y_size = pCodecCtx->width * pCodecCtx->height;

    packet = (AVPacket *) malloc(sizeof(AVPacket)); //分配一个packet
    av_new_packet(packet, y_size); //分配packet的数据
    mStopFlag = false;
    while (!mStopFlag)
    {
        if (av_read_frame(pFormatCtx, packet) < 0)
        {
            continue; //这里认为视频读取完了
        }

        if (packet->stream_index == videoStream) {
            ret = avcodec_send_packet(pCodecCtx,packet);
            if( 0 != ret){
                continue;
            }
            while (avcodec_receive_frame(pCodecCtx,pFrame) == 0){
                sws_scale(img_convert_ctx,
                        (uint8_t const * const *) pFrame->data,
                        pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data,
                        pFrameRGB->linesize);

                //把这个RGB数据 用QImage加载
                QImage tmpImg((uchar *)out_buffer,pCodecCtx->width,pCodecCtx->height,QImage::Format_RGBA8888);

                //QImage tmpImg((uchar *)out_buffer,pCodecCtx->width,pCodecCtx->height,QImage::Format_RGB888);
                QImage image = tmpImg.copy(); //把图像复制一份 传递给界面显示
                emit sig_GetOneFrame(image);  //发送信号

                /*
                printf("pCodecCtx->width:%d\n", pCodecCtx->width);
                printf("pCodecCtx->height:%d\n", pCodecCtx->height);
                printf("pCodecCtx->frame_number:%d\n", pCodecCtx->frame_number);
                printf("pCodecCtx->time_base.num:%d\n", pCodecCtx->time_base.num);
                printf("pCodecCtx->time_base.den:%d\n", pCodecCtx->time_base.den);
                printf("pCodecCtx->bit_rate:%d\n", pCodecCtx->bit_rate);
                printf("pCodecCtx->framerate:%d\n", pCodecCtx->framerate);
                printf("pCodecCtx->frame_size:%d\n", pCodecCtx->frame_size);*/
            }
        }
        av_packet_unref(packet); //释放资源,否则内存会一直上升

        msleep(0.02); //停一停  不然放的太快了
    }
    av_free(out_buffer);
    av_free(pFrameRGB);
    avcodec_close(pCodecCtx);
    avformat_close_input(&pFormatCtx);
    //emit SigFinished();
}

widget.h

#ifndef WIDGET_H
#define WIDGET_H

#include <QWidget>
#include <QLineEdit>
#include <QDir>
#include <QSettings>
#include <QDebug>
#include <QPushButton>
#include <QPainter>
#include <QInputDialog>
#include <QtMath>
#include <iostream>

#include "videoplayer.h"
#include <iostream>
#include <csignal>
#include <opencv4/opencv2/opencv.hpp>

#include <iostream>
#include <iomanip>
#include <ctime>
#include <opencv2/opencv.hpp>

#include<algorithm>
#include<vector>
#include<iostream>

#include "rclcpp/rclcpp.hpp"
#include "std_msgs/msg/string.hpp"
 
#include <QTimer>


#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/msg/image.hpp>
#include <std_msgs/msg/string.hpp>
using namespace std::chrono_literals;

QT_BEGIN_NAMESPACE
namespace Ui { class Widget; }
QT_END_NAMESPACE

class Widget : public QWidget
{
    Q_OBJECT

public:
    Widget(QWidget *parent = nullptr);
    ~Widget();
signals:
    void sig_fame(QImage img);

private:
    Ui::Widget *ui;
private:
    void readconfig();
    QString rtspaddr;
    void initWidget();
    void initconnect();
private slots:
    void slot_open_or_close();

protected:
    //void paintEvent(QPaintEvent *event);

private:
    VideoPlayer *mPlayer; //播放线程
    QImage mImage; //记录当前的图像
    QString url;
    //QImage initimage;

    cv::Mat QImage2Mat(QImage image);
    QImage Mat2QImage(const cv::Mat &mat);


    //void readvideo();

    //std::vector<cv::Vec3b> colors(32);
    //cv::VideoWriter outputVideo;
    //int encode_type ;//= VideoWriter::fourcc('M', 'J', 'P', 'G');
    //std::vector<cv::Vec3b> colors;

    //cv::VideoWriter outputVideo;

private slots:
    void slotGetOneFrame(QImage img);

private:

    cv::Mat tempmat;
    // -------------------------------------
    // ros
    // -------------------------------------
    // node
    rclcpp::Node::SharedPtr node_;
    // pub
    rclcpp::Publisher<sensor_msgs::msg::CompressedImage>::SharedPtr publisher_;
    // sub
    //rclcpp::Subscription<std_msgs::msg::String>::SharedPtr subscriber_;
    // spin
    rclcpp::TimerBase::SharedPtr timer_;
    void initSpin(void);
    QTimer spin_timer_;

    void timer_callback();

};
#endif // WIDGET_H

widget.cpp

#include "widget.h"
#include "ui_widget.h"

Widget::Widget(QWidget *parent)
    : QWidget(parent)
    , ui(new Ui::Widget)
{

    mPlayer = new VideoPlayer;
    ui->setupUi(this);
    readconfig();
    initWidget();
    initconnect();
    // -------------------------------------
   
    /*
    // create topic pub
    this->publisher_ = this->node_->create_publisher<std_msgs::msg::String>("pub_topic", 10);
    
    // create topic sub
    this->subscriber_ = node_->create_subscription<std_msgs::msg::String>(
        "sub_topic", 10,
        [&](const std_msgs::msg::String::SharedPtr msg)
        {
            // 處理訂閱到的消息
            QString receivedMsg = QString::fromStdString(msg->data);
            std::cout << msg->data << std::endl;
            //ui->textBrowser->append(receivedMsg);
        });
    this->initSpin();*/

    rclcpp::init(0, nullptr);
    // create node
    this->node_ = rclcpp::Node::make_shared("video");

    this->publisher_ = this->node_->create_publisher<sensor_msgs::msg::CompressedImage>("pubImageTopic", 10);

    this->timer_ = this->node_->create_wall_timer(33ms, std::bind(&Widget::timer_callback, this));

    this->initSpin();

}

Widget::~Widget()
{
    delete ui;
    // -------------------------------------
    // ROS 釋放
    // -------------------------------------
    this->spin_timer_.stop();
    rclcpp::shutdown();
    // -------------------------------------

}

void Widget::initSpin(void)
{
    this->spin_timer_.setInterval(1); // 1 ms
    QObject::connect(&this->spin_timer_, &QTimer::timeout, [&]()
                     { rclcpp::spin_some(node_); });
    this->spin_timer_.start();
}



void Widget::timer_callback(){

     try
      {
        //ros_img_ = cv_bridge::CvImage(std_msgs::msg::Header(), "bgr8", tempmat).toImageMsg();
        //sensor_msgs::msg::Image::SharedPtr ros_img_ = cv_bridge::CvImage(std_msgs::msg::Header(), "bgr8", tempmat).toImageMsg();
    
        if(!tempmat.empty())
        {
          //rclcpp::Publisher<sensor_msgs::msg::CompressedImage>::SharedPtr video_compressed_publisher_;
          cv::Mat des1080;
          cv::resize(tempmat, des1080, cv::Size(1080, 720), 0, 0, cv::INTER_NEAREST);
          sensor_msgs::msg::CompressedImage::SharedPtr ros_img_compressed_ = cv_bridge::CvImage(std_msgs::msg::Header(), "bgr8", des1080).toCompressedImageMsg();
          //video_compressed_publisher_->publish(*ros_img_compressed_);
          this->publisher_ ->publish(*ros_img_compressed_);
          qDebug()<<"publisher";
        }
        else{
            qDebug()<<"empty image";
        }
          //RCLCPP_WARN(this->get_logger(), "empty image");
        // video_publisher_->publish(*ros_img_);
      }
      catch (cv_bridge::Exception &e)
      {
        //RCLCPP_ERROR(this->get_logger(),ros_img_->encoding.c_str());
        qDebug()<<"Exception";
      }

}

void Widget::readconfig(){
    QSettings settingsread("./src/video_topic/conf/config.ini",QSettings::IniFormat);
    rtspaddr = settingsread.value("SetUpOption/camerartsp").toString();
    mPlayer->setrtsp(rtspaddr.toStdString());
}

void Widget::initWidget(){
    qDebug()<<rtspaddr;
    ui->le_rtstspaddr->setText(rtspaddr);
}

void Widget::slot_open_or_close(){
    if(ui->btn_openorclose->text()=="open"){
        ui->btn_openorclose->setText("close");
        mPlayer->startPlay();

         // ROS 初始化
         // -------------------------------------
         /*
        rclcpp::init(0, nullptr);
        // create node
        this->node_ = rclcpp::Node::make_shared("video");

        this->publisher_ = this->node_->create_publisher<sensor_msgs::msg::CompressedImage>("pubImageTopic", 10);

        this->timer_ = this->node_->create_wall_timer(500ms, std::bind(&Widget::timer_callback, this));
        //pub_img = this->create_publisher<sensor_msgs::msg::Image>("res_img", 10);

        rclcpp::spin(this->node_);*/
        //readvideo();
    }else{
        ui->btn_openorclose->setText("open");
        mPlayer->stopPlay();
    }
}

void Widget::initconnect(){
    connect(ui->btn_openorclose,&QPushButton::clicked,this,&Widget::slot_open_or_close);
    connect(mPlayer,SIGNAL(sig_GetOneFrame(QImage)),this,SLOT(slotGetOneFrame(QImage)));
    connect(this,&Widget::sig_fame,this,&Widget::slotGetOneFrame);
    //connect(mPlayer,&VideoPlayer::SigFinished, mPlayer,&VideoPlayer::deleteLater);//自动释放
}


void Widget::slotGetOneFrame(QImage img)
{

    //cv::Mat tempmat = QImage2Mat(img);

    tempmat = QImage2Mat(img);
    if (tempmat.empty()) {
       printf("null img\n");
    }else {
       QImage outimg = Mat2QImage(tempmat);
       //printf("get img\n");
       mImage = outimg;
       QImage imageScale = mImage.scaled(QSize(ui->label->width(), ui->label->height()));
       QPixmap pixmap = QPixmap::fromImage(imageScale);
       ui->label->setPixmap(pixmap);
    }

}


cv::Mat Widget::QImage2Mat(QImage image)
{
    cv::Mat mat = cv::Mat::zeros(image.height(), image.width(),image.format()); //初始化Mat
    switch(image.format()) //判断image的类型
    {
            case QImage::QImage::Format_Grayscale8:  //灰度图
                mat = cv::Mat(image.height(), image.width(),
                CV_8UC1,(void*)image.constBits(),image.bytesPerLine());
                break;
            case QImage::Format_RGB888: //3通道彩色
                mat = cv::Mat(image.height(), image.width(),
                    CV_8UC3,(void*)image.constBits(),image.bytesPerLine());
                break;
            case QImage::Format_ARGB32: //4通道彩色
                mat = cv::Mat(image.height(), image.width(),
                    CV_8UC4,(void*)image.constBits(),image.bytesPerLine());
                break;
            case QImage::Format_RGBA8888:
                mat = cv::Mat(image.height(), image.width(),
                    CV_8UC4,(void*)image.constBits(),image.bytesPerLine());
        break;
            default:
                return mat;
    }
    cv::cvtColor(mat, mat, cv::COLOR_BGR2RGB);
    return mat;

}

QImage Widget::Mat2QImage(const cv::Mat &mat)
{
    if(mat.type()==CV_8UC1 || mat.type()==CV_8U)
    {
        QImage image((const uchar *)mat.data, mat.cols, mat.rows, mat.step, QImage::Format_Grayscale8);
        return image;
    }
    else if(mat.type()==CV_8UC3)
    {
        QImage image((const uchar *)mat.data, mat.cols, mat.rows, mat.step, QImage::Format_RGB888);
        return image.rgbSwapped();  //r与b调换
    }
}

widget.ui

<?xml version="1.0" encoding="UTF-8"?>
<ui version="4.0">
 <class>Widget</class>
 <widget class="QWidget" name="Widget">
  <property name="geometry">
   <rect>
    <x>0</x>
    <y>0</y>
    <width>645</width>
    <height>461</height>
   </rect>
  </property>
  <property name="windowTitle">
   <string>Widget</string>
  </property>
  <layout class="QVBoxLayout" name="verticalLayout">
   <item>
    <widget class="QStackedWidget" name="stackedWidget">
     <widget class="QWidget" name="page">
      <layout class="QVBoxLayout" name="verticalLayout_2">
       <item>
        <widget class="QLabel" name="label">
         <property name="text">
          <string>TextLabel</string>
         </property>
        </widget>
       </item>
       <item>
        <layout class="QHBoxLayout" name="horizontalLayout">
         <item>
          <widget class="QLineEdit" name="le_rtstspaddr">
           <property name="readOnly">
            <bool>true</bool>
           </property>
          </widget>
         </item>
         <item>
          <widget class="QPushButton" name="btn_openorclose">
           <property name="text">
            <string>open</string>
           </property>
          </widget>
         </item>
        </layout>
       </item>
      </layout>
     </widget>
     <widget class="QWidget" name="page_2"/>
    </widget>
   </item>
  </layout>
 </widget>
 <resources/>
 <connections/>
</ui>

main.cpp

#include "widget.h"

#include <QApplication>

int main(int argc, char *argv[])
{
    QApplication a(argc, argv);
    Widget w;
    w.show();
    //w.showMaximized();
    return a.exec();
}

package.xml

<?xml version="1.0"?>
<?xml-model href="http://download.ros.org/schema/package_format3.xsd" schematypens="http://www.w3.org/2001/XMLSchema"?>
<package format="3">
  <name>video_topic</name>
  <version>0.0.0</version>
  <description>TODO: Package description</description>
  <maintainer email="cl@todo.todo">cl</maintainer>
  <license>TODO: License declaration</license>

  <buildtool_depend>ament_cmake</buildtool_depend>
  <buildtool_depend>cv_bridge</buildtool_depend>
  

  <depend>rclcpp</depend>
  <depend>std_msgs</depend>
  <depend>cv_bridge</depend>

  <test_depend>ament_lint_auto</test_depend>
  <test_depend>ament_lint_common</test_depend>

  <export>
    <build_type>ament_cmake</build_type>
  </export>
</package>

CMakeLists.txt

cmake_minimum_required(VERSION 3.5)
project(video_topic)


# Default to C++14
if(NOT CMAKE_CXX_STANDARD)
  set(CMAKE_CXX_STANDARD 14)
endif()


if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang")
  add_compile_options(-Wall -Wextra -Wpedantic)
endif()
 


# find dependencies
find_package(ament_cmake REQUIRED)
find_package(rclcpp REQUIRED)
find_package(std_msgs REQUIRED)
# qt
find_package(Qt5 REQUIRED COMPONENTS Core Gui Widgets)
find_package(cv_bridge REQUIRED)
find_package(image_transport)
find_package(sensor_msgs REQUIRED)



add_executable(video
  src/main.cpp
  src/videoplayer.h
  src/videoplayer.cpp
  src/widget.h
  src/widget.cpp
  src/widget.ui
)

#set(FFMPEG_LIBS_DIR /usr/local/ffmpeg/lib)
#set(FFMPEG_HEADERS_DIR /usr/local/ffmpeg/include)

set(FFMPEG_LIBS_DIR /usr/lib/aarch64-linux-gnu)
set(FFMPEG_HEADERS_DIR /usr/include/aarch64-linux-gnu)
include_directories(${FFMPEG_HEADERS_DIR})
#link_directories(${FFMPEG_LIBS_DIR})
#set(FFMPEG_LIBS libavutil.so libavcodec.so libavdevice.so libavformat.so libavfilter.so libswresample.so libswscale.so  libavutil.so)
set(FFMPEG_LIBS libavcodec.so libavformat.so libswscale.so libavutil.so)


find_package(OpenCV REQUIRED)
include_directories( ${OpenCV_INCLUDE_DIRS})
target_link_libraries(video ${OpenCV_LIBS})

# ros
target_link_libraries(video
  ${rclcpp_LIBRARIES} 
)
# qt
target_link_libraries(video 
  Qt5::Core 
  Qt5::Gui
  Qt5::Widgets
)
#ffmpeg

target_link_libraries(video ${FFMPEG_LIBS})

ament_target_dependencies(video
  rclcpp
  std_msgs
  sensor_msgs 
  cv_bridge 
  OpenCV 
  image_transport
)
 
# ------------------------------------------
# 設置自動MOC、UIC和RCC (與QT相關)
# ------------------------------------------
set_target_properties(video PROPERTIES AUTOMOC ON)
set_target_properties(video PROPERTIES AUTOUIC ON)
set_target_properties(video PROPERTIES AUTORCC ON)
# ------------------------------------------

# 安装可执行文件
install(TARGETS video
  DESTINATION lib/${PROJECT_NAME}
)


ament_package()


http://www.niftyadmin.cn/n/5082611.html

相关文章

uni-app集成使用SQLite

一、打开uni-app中SQLite 二、封装sqlite.js module.exports {dbName: chat, // 数据库名称dbPath: _doc/chat.db, // 数据库地址,推荐以下划线为开头 _doc/xxx.db/*** Description: 创建数据库 或 有该数据库就打开* author: ZXL* createTime: 2023-10-12 09:23:10* Copyr…

Vue项目为页面添加水印效果

最近在做项目&#xff0c;有这样要求&#xff0c;需要在指定容器中添加水印&#xff0c;也可不设置容器&#xff0c;如果没有容器&#xff0c;则添加在整个页面中&#xff0c;即body&#xff0c;当接到这个需求的时候我第一想的方法就是用canvas来实现&#xff0c;话不多说搞起…

华为认证 | HCIP-Datacom,这门认证正式发布新版本!

华为认证数通高级工程师HCIP-Datacom-Campus Network Planning and Deployment V1.5&#xff08;中文版&#xff09;自2023年9月28日起&#xff0c;正式在中国区发布。 01 发布概述 基于“平台生态”战略&#xff0c;围绕“云-管-端”协同的新ICT技术架构&#xff0c;华为公司…

【产品】智能结构仿真软件AIFEM 2023R2新版本功能介绍

AIFEM是由天洑自主研发的一款通用的智能结构仿真软件&#xff0c;助力用户解决固体结构相关的静力学、动力学、振动、热力学等实际工程问题&#xff0c;软件提供高效的前后处理工具和高精度的有限元求解器&#xff0c;帮助用户快速、深入地评估结构的力学性能&#xff0c;加速产…

深拷贝和浅拷贝的主要区别

在JavaScript中&#xff0c;深拷贝和浅拷贝的主要区别在于它们处理对象属性的方式。 浅拷贝&#xff08;Shallow Copy&#xff09;只复制对象的引用&#xff0c;而不是实际的对象。因此&#xff0c;如果你修改了复制的对象&#xff0c;原始对象也会受到影响。 深拷贝&#xf…

RK3588平台产测之ArmSoM-W3 DDR带宽监控

1. 简介 专栏总目录 ArmSoM团队在产品量产之前都会对产品做几次专业化的功能测试以及性能压力测试&#xff0c;以此来保证产品的质量以及稳定性 优秀的产品都要进行多次全方位的功能测试以及性能压力测试才能够经得起市场的检验 2. 环境介绍 硬件环境&#xff1a; ArmSoM-W…

【SQL】新建库表时,报错attempt to write a readonly database

目录 1.问题背景 2.问题原因 3.解决方式 4.结果 windows64位 Navicat sql vscode c 1.问题背景 需求是这样&#xff1a; 希望在调用初始化数据库方法时&#xff0c;查看是否有名为【POCT_PROCESS_CONFIG】的数据库表&#xff0c;如果没有就新建 我的数据库格式是这样 …

应用商店优化的好处有哪些?

应用程序优化优势包括应用在商店的可见性和曝光度&#xff0c;高质量和被相关用户的更好发现&#xff0c;增加的应用下载量&#xff0c;降低用户获取成本和持续增长&#xff0c;增加应用收入和转化率以及全球受众范围。 1、提高知名度并在应用商店中脱颖而出。 如果用户找不到…