MyScreenRecord.cpp

//#define LOG_NODEBUG 0
#define LOG_TAG "myrecord" #include <signal.h>
#include <string.h>
#include <assert.h> #include <utils/Errors.h>
#include <ui/DisplayState.h>
#include <ui/DisplayConfig.h>
#include <ui/PhysicalDisplayId.h>
#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <gui/SurfaceComposerClient.h>
#include <media/openmax/OMX_IVCommon.h>
#include <media/NdkMediaMuxer.h>
#include <media/NdkMediaCodec.h>
#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <gui/Surface.h>
#include <binder/ProcessState.h>
#include <mediadrm/ICrypto.h>
#include <media/NdkMediaFormatPriv.h> using namespace android;
namespace ui = android::ui; static struct sigaction gOrigSigactionINT;
static struct sigaction gOrigSigactionHUP; static PhysicalDisplayId gPhysicalDisplayId; static uint32_t gVideoWidth = 0;
static uint32_t gVideoHeight = 0;
static uint32_t gBitRate = 20000000;
static uint32_t gBframes = 0; static const char* kMimeTypeAvc = "video/avc";
static const char* kMimeTypeApplicationOctetstream = "application/octet-stream";
static volatile bool gStopRequested = false; static void signalCatcher(int signum)
{
gStopRequested = true;
switch(signum){
case SIGINT:
case SIGHUP:
sigaction(SIGINT, &gOrigSigactionINT, NULL);
sigaction(SIGHUP, &gOrigSigactionHUP, NULL);
break;
default:
abort();
break;
}
} static status_t configureSignals()
{
struct sigaction act;
memset(&act, 0, sizeof(act));
act.sa_handler = signalCatcher;
sigaction(SIGINT, &act, &gOrigSigactionINT);
sigaction(SIGHUP, &act, &gOrigSigactionHUP);
signal(SIGPIPE,SIG_IGN);
return NO_ERROR;
} static status_t prepareEncoder(float displayFps, sp<MediaCodec> *pCodec, sp<IGraphicBufferProducer> *pBufferProducer)
{
// 组织encoder参数,包括width、height、mime、color、bitrate,framerate、I帧间隔、B帧间隔
sp<AMessage> format = new AMessage;
format->setInt32(KEY_WIDTH, gVideoWidth);
format->setInt32(KEY_HEIGHT, gVideoHeight);
format->setString(KEY_MIME, kMimeTypeAvc);
format->setInt32(KEY_COLOR_FORMAT, OMX_COLOR_FormatAndroidOpaque);
format->setInt32(KEY_BIT_RATE, gBitRate);
format->setInt32(KEY_FRAME_RATE, displayFps);
format->setInt32(KEY_I_FRAME_INTERVAL, 10);
format->setInt32(KEY_MAX_B_FRAMES, 10);
if(gBframes > 0)
{
format->setInt32(KEY_PROFILE, AVCProfileMain);
format->setInt32(KEY_LEVEL, AVCLevel41);
} sp<android::ALooper> looper = new android::ALooper;
looper->setName("screenrecord_looper");
looper->start(); // 最后一个参数true意为创建一个encode组件
sp<MediaCodec> codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
if(codec == NULL)
{
fprintf(stderr, "ERROR: unable to create %s codec instance\n", kMimeTypeAvc);
return UNKNOWN_ERROR;
}
// configure时最后一个参数flag使用CONFIGURE_FLAG_ENCODE,表示使用配置Encoder
status_t err = codec->configure(format, NULL, NULL, MediaCodec::CONFIGURE_FLAG_ENCODE);
CHECK_EQ(err, OK); sp<IGraphicBufferProducer> bufferProducer;
// 调用omx中的createInputSurface方法,创建出一个OmxGraphicBufferSource
err = codec->createInputSurface(&bufferProducer);
CHECK_EQ(err, OK); err = codec->start();
CHECK_EQ(err, OK); *pCodec = codec;
*pBufferProducer = bufferProducer; return OK;
} static status_t setDisplayProjection(SurfaceComposerClient::Transaction& t, const sp<IBinder>& dpy, const ui::DisplayState& displayState)
{
const ui::Size& viewport = displayState.viewport;
// 创建基础图层
Rect layerStackRect(viewport); float displayAspect = viewport.getHeight() / static_cast<float>(viewport.getWidth()); uint32_t videoWidth, videoHeight;
uint32_t outWidth, outHeight;
videoWidth = gVideoWidth;
videoHeight = gVideoHeight; if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
outWidth = videoWidth;
outHeight = (uint32_t)(videoWidth * displayAspect);
}
else{
outHeight = videoHeight;
outWidth = (uint32_t)(videoHeight / displayAspect);
} uint32_t offX, offY;
offX = (videoWidth - outWidth) / 2;
offY = (videoHeight - outHeight) / 2;
// 创建显示图层大小
//Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
Rect displayRect(50, 50, 500, 500);
// 将投射区域以及旋转属性设定给display
t.setDisplayProjection(dpy, ui::ROTATION_0, layerStackRect, displayRect);
return NO_ERROR; } static status_t prepareVirtualDisplay(const ui::DisplayState& displayState, const sp<IGraphicBufferProducer>& bufferProducer, sp<IBinder> *pDisplayHandle)
{
// 创建一块display
sp<IBinder> dpy = SurfaceComposerClient::createDisplay(String8("ScreenRecorder"), false);
SurfaceComposerClient::Transaction t;
// 给display设置surface
t.setDisplaySurface(dpy, bufferProducer);
setDisplayProjection(t, dpy, displayState);
// 设置底部图层,感觉和上一步中的displayStackRect是相同的
t.setDisplayLayerStack(dpy, displayState.layerStack);
// 应用配置内容
t.apply();
*pDisplayHandle = dpy;
return NO_ERROR;
} static status_t runEncoder(const sp<MediaCodec>& encoder, AMediaMuxer *muxer, FILE* rawFp, const sp<IBinder> &display, const sp<IBinder>& virtualDpy, ui::Rotation orientation)
{
static int kTimeout = 250000;
// 获取所有的OutputBuffer
Vector<sp<MediaCodecBuffer>> buffers;
status_t err = encoder->getOutputBuffers(&buffers);
CHECK_EQ(err, OK);
ssize_t trackIdx = -1;
ssize_t metaTrackIdx = -1;
//Vector<int64_t> timestamps; while(!gStopRequested){
size_t bufIndex, offset, size;
int64_t ptsUsec;
uint32_t flags;
// 等待OutputBuffer
err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec, &flags, kTimeout); switch(err)
{
case NO_ERROR:
{
// 如果返回的flag为CODECCONFIG,那么将数据长度置为0
if((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0)
{
ALOGD("get codec config buffer %d bytes", size);
if(muxer != NULL)
size = 0;
}
if(size != 0){
ALOGD("get codec output buffer %d bytes", size);
{
// 这是用来检测是否有屏幕旋转
/*
ui::DisplayState displayState;
err = SurfaceComposerClient::getDisplayState(display, &displayState);
SurfaceComposerClient::Transaction t;
setDisplayProjection(t, virtualDpy, displayState);
t.apply();
orientation = displayState.orientation;
*/
} if(ptsUsec == 0)
ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000; // 将encode之后的数据写给MediaMuxer
sp<ABuffer> buffer = new ABuffer(buffers[bufIndex]->data(), buffers[bufIndex]->size());
AMediaCodecBufferInfo bufferInfo = {0, static_cast<int32_t>(buffer->size()), ptsUsec, flags};
err = AMediaMuxer_writeSampleData(muxer, trackIdx, buffer->data(), &bufferInfo);
if(err != NO_ERROR)
{
ALOGD("Failed writing data to muxer (err=%d)", err);
return err;
}
//timestamps.add(ptsUsec);
}
err = encoder->releaseOutputBuffer(bufIndex);
// 如果收到EOS,结束录制
if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
gStopRequested = true;
}
break;
}
case -EAGAIN:
ALOGD("Got -EAGAIN, looping");
break;
case INFO_FORMAT_CHANGED:
{
// encoder返回format changed之后,先获取output format,并用该format给MediaMuxer创建新的track
ALOGD("Encoder format changed");
sp<AMessage> newFormat;
encoder->getOutputFormat(&newFormat);
AMediaFormat *ndkFormat = AMediaFormat_fromMsg(&newFormat);
trackIdx = AMediaMuxer_addTrack(muxer, ndkFormat);
ALOGD("trackIdx = %d", trackIdx);
AMediaFormat *metaFormat = AMediaFormat_new();
AMediaFormat_setString(metaFormat, AMEDIAFORMAT_KEY_MIME, kMimeTypeApplicationOctetstream);
metaTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
AMediaFormat_delete(metaFormat); ALOGD("starting muxer");
// 有了track之后才能开启MediaMuxer
err = AMediaMuxer_start(muxer);
break;
}
case android::INFO_OUTPUT_BUFFERS_CHANGED:
ALOGD("dequeueOutputBuffer returned INFO_OUTPUT_BUFFERS_CHANGED");
err = encoder->getOutputBuffers(&buffers);
break;
case INVALID_OPERATION:
ALOGD("dequeueOutputBuffer returned INVALID_OPERATION");
return err;
default:
ALOGD("GOT other result");
return err;
} } ALOGD("Encoder stopping (req=%d)", gStopRequested);
return OK;
} status_t recordScreen(const char *fileName)
{
// 信号捕捉,利用sigaction捕捉到SIGINT和SIGHUP时将循环条件置为false
configureSignals(); sp<ProcessState> self = ProcessState::self();
self->startThreadPool(); sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(gPhysicalDisplayId);
if(display == NULL){
fprintf(stderr, "ERROR: no display\n");
return NAME_NOT_FOUND;
} ui::DisplayState displayState;
status_t err = SurfaceComposerClient::getDisplayState(display, &displayState);
if(err != NO_ERROR)
{
fprintf(stderr, "ERROR: unable to get display state\n");
return err;
} DisplayConfig displayConfig;
err = SurfaceComposerClient::getActiveDisplayConfig(display, &displayConfig);
if(err != NO_ERROR){
fprintf(stderr, "ERROR: unable to get displau config\n");
return err;
} // 获取当前显示区域的视窗参数
const ui::Size& viewport = displayState.viewport;
// 要注意,viewport获得的宽高不能是奇数
gVideoWidth = viewport.getWidth();
gVideoHeight = viewport.getHeight();
ALOGD("gVideoWidth = %d, gVideoHeight = %d", gVideoWidth, gVideoHeight); sp<MediaCodec> encoder;
sp<IGraphicBufferProducer> encoderInputSurface;
// 准备encoder
prepareEncoder(displayConfig.refreshRate, &encoder, &encoderInputSurface); sp<IGraphicBufferProducer> bufferProducer = encoderInputSurface; // Configure virtual display.
sp<IBinder> dpy;
err = prepareVirtualDisplay(displayState, bufferProducer, &dpy);
CHECK_EQ(err, OK); // AMediaMuxer是ndk封装的MediaMuxer
AMediaMuxer *muxer = NULL; // 删除文件
err = unlink(fileName);
if(err != 0 && errno != ENOENT)
{
fprintf(stderr, "ERROR: couldn't remove existing file\n");
abort();
}
int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
if(fd < 0)
{
fprintf(stderr, "ERROR: couldn't open file\n");
abort();
} // 创建一个MediaMuxer
// new MediaMuxer(fd, (android::MediaMuxer::OutputFormat)format);
muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4); // MPEG4Writer中会有dup拷贝一份fd,所以这里的fd可以关闭掉
close(fd); // 开始录屏
err = runEncoder(encoder, muxer, NULL, display, dpy, displayState.orientation); encoderInputSurface = NULL;
SurfaceComposerClient::destroyDisplay(dpy); encoder->stop();
err = AMediaMuxer_stop(muxer);
encoder->release();
return OK;
} int main(int argc, char** argv)
{
std::optional<PhysicalDisplayId> displayId = SurfaceComposerClient::getInternalDisplayId();
if(!displayId)
{
fprintf(stderr, "Failed to get token for internal display\n");
return 1;
} gPhysicalDisplayId = *displayId;
const char* fileName = argv[1];
status_t err = recordScreen(fileName); return OK; }

Android.bp

cc_binary{
name: "myscreenrecord", srcs: [
"myscreenrecord.cpp",
], shared_libs:[
"libstagefright",
"libmedia",
"libmediandk",
"libmedia_omx",
"libutils",
"libbinder",
"libstagefright_foundation",
"libui",
"libgui",
"libcutils",
"liblog",
], header_libs: [
"libmediadrm_headers",
"libmediametrics_headers",
], include_dirs: [
"frameworks/av/media/libstagefright",
"frameworks/av/media/libstagefright/include",
"frameworks/native/include/media/openmax",
],
}

MediaMuxer

看完上面的demo大致可以了解MediaMuxer是怎么使用的了。下面来看看MediaMuxer的代码:

构造函数

传入fd和outputformat,fd是打开的写入文件描述符,OutputFormat是输出类型,这是一个枚举类型,定义在MediaMuxer.h中,支持的输出类型有

    enum OutputFormat {
OUTPUT_FORMAT_MPEG_4 = 0,  // MPEG4
OUTPUT_FORMAT_WEBM = 1,  // WEBM,类似于MKV
OUTPUT_FORMAT_THREE_GPP = 2,  // 3GPP 音频
OUTPUT_FORMAT_HEIF = 3,  // HEVC 视频
OUTPUT_FORMAT_OGG = 4,  // OGG 音频
OUTPUT_FORMAT_LIST_END // must be last - used to validate format type
};
static bool isMp4Format(MediaMuxer::OutputFormat format) {
return format == MediaMuxer::OUTPUT_FORMAT_MPEG_4 ||
format == MediaMuxer::OUTPUT_FORMAT_THREE_GPP ||
format == MediaMuxer::OUTPUT_FORMAT_HEIF;
}

其中MPEG4、3GPP、HEIF都用MP4封装

MediaMuxer::MediaMuxer(int fd, OutputFormat format)
: mFormat(format),
mState(UNINITIALIZED) {
if (isMp4Format(format)) {
mWriter = new MPEG4Writer(fd);
} else if (format == OUTPUT_FORMAT_WEBM) {
mWriter = new WebmWriter(fd);
} else if (format == OUTPUT_FORMAT_OGG) {
mWriter = new OggWriter(fd);
} if (mWriter != NULL) {
mFileMeta = new MetaData;
if (format == OUTPUT_FORMAT_HEIF) {
// Note that the key uses recorder file types.
mFileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_HEIF);
} else if (format == OUTPUT_FORMAT_OGG) {
mFileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_OGG);
}
mState = INITIALIZED;
}
}

原生支持的的还有AACWriter、AMRWriter、MPEG2TSWriter

addTrack

给封装的文件添加Track,先用output Media Format创建MediaAdapter,然后调用addSource方法添加到MediaWriter中

ssize_t MediaMuxer::addTrack(const sp<AMessage> &format) {
Mutex::Autolock autoLock(mMuxerLock); if (format.get() == NULL) {
ALOGE("addTrack() get a null format");
return -EINVAL;
} if (mState != INITIALIZED) {
ALOGE("addTrack() must be called after constructor and before start().");
return INVALID_OPERATION;
} sp<MetaData> trackMeta = new MetaData;
convertMessageToMetaData(format, trackMeta); sp<MediaAdapter> newTrack = new MediaAdapter(trackMeta);
status_t result = mWriter->addSource(newTrack);
if (result != OK) {
return -1;
}
float captureFps = -1.0;
if (format->findAsFloat("time-lapse-fps", &captureFps)) {
ALOGV("addTrack() time-lapse-fps: %f", captureFps);
result = mWriter->setCaptureRate(captureFps);
if (result != OK) {
ALOGW("addTrack() setCaptureRate failed :%d", result);
}
}
return mTrackList.add(newTrack);
}

start

实际会开启多线程做文件写入

status_t MediaMuxer::start() {
Mutex::Autolock autoLock(mMuxerLock);
if (mState == INITIALIZED) {
mState = STARTED;
mFileMeta->setInt32(kKeyRealTimeRecording, false);
return mWriter->start(mFileMeta.get());
} else {
ALOGE("start() is called in invalid state %d", mState);
return INVALID_OPERATION;
}
}

writeSampleData

找到trackIndex对应的MediaAdapter,然后将数据写入到MediaAdapter中,MediaWriter中的线程会从MediaAdapter读取数据写入文件,MediaAdapter用到了消费者生产者模型

status_t MediaMuxer::writeSampleData(const sp<ABuffer> &buffer, size_t trackIndex,
int64_t timeUs, uint32_t flags) {
Mutex::Autolock autoLock(mMuxerLock); if (buffer.get() == NULL) {
ALOGE("WriteSampleData() get an NULL buffer.");
return -EINVAL;
} if (mState != STARTED) {
ALOGE("WriteSampleData() is called in invalid state %d", mState);
return INVALID_OPERATION;
} if (trackIndex >= mTrackList.size()) {
ALOGE("WriteSampleData() get an invalid index %zu", trackIndex);
return -EINVAL;
} MediaBuffer* mediaBuffer = new MediaBuffer(buffer); mediaBuffer->add_ref(); // Released in MediaAdapter::signalBufferReturned().
mediaBuffer->set_range(buffer->offset(), buffer->size()); MetaDataBase &sampleMetaData = mediaBuffer->meta_data();
sampleMetaData.setInt64(kKeyTime, timeUs);
// Just set the kKeyDecodingTime as the presentation time for now.
sampleMetaData.setInt64(kKeyDecodingTime, timeUs); if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
sampleMetaData.setInt32(kKeyIsSyncFrame, true);
} if (flags & MediaCodec::BUFFER_FLAG_MUXER_DATA) {
sampleMetaData.setInt32(kKeyIsMuxerData, 1);
} if (flags & MediaCodec::BUFFER_FLAG_EOS) {
sampleMetaData.setInt32(kKeyIsEndOfStream, 1);
ALOGV("BUFFER_FLAG_EOS");
} sp<MediaAdapter> currentTrack = mTrackList[trackIndex];
// This pushBuffer will wait until the mediaBuffer is consumed.
return currentTrack->pushBuffer(mediaBuffer);
}

stop

关闭MediaAdapter的读写和MediaWriter中的线程

status_t MediaMuxer::stop() {
Mutex::Autolock autoLock(mMuxerLock);
if (mState == STARTED) {
mState = STOPPED;
for (size_t i = 0; i < mTrackList.size(); i++) {
if (mTrackList[i]->stop() != OK) {
return INVALID_OPERATION;
}
}
status_t err = mWriter->stop();
if (err != OK) {
ALOGE("stop() err: %d", err);
}
return err;
} else {
ALOGE("stop() is called in invalid state %d", mState);
return INVALID_OPERATION;
}
}

一个简易的录屏demo的更多相关文章

  1. Java小程序—录屏小程序(上半场)

    做软件的三个步骤: (1)做什么? (2)怎么做? (3)动手做! ok,我们今天要做的是一个录屏软件,那怎么做呢?首先,我们小时候都玩过一种小人书,就是当你快速翻动书页时,书中的人物就会活灵活现的动 ...

  2. WPF 录屏软件研发心得及思路分享(已结束开发)

    最近由于工程需要开始研发基于Windows的自动录屏软件,很多细节很多功能需要处理,毕竟一个完美的录屏软件不是你随随便便就可以写出来的.首先参考了大部分的录屏软件,在研发的过程中遇到了很多的问题:比如 ...

  3. Android 4.4 KitKat终于支持录屏(Screen Recording)了!

    本文介绍了Android 4.4 KitKat系统新增加的录屏功能以及录屏方法,和限制因素.如果App由于版权方面的原因,不想被记录屏幕录像的话,APP只需要在相应的SurfaceView请求“Sur ...

  4. Mac录屏同时录制系统声音和画外音(Soundflower无法安装解决方案)

    个人博客地址:xzajyjs.cn 前言 以前一直有录屏的需求,但苦于自带的QuickTime 无法录制内屏声音,一直使用的是第三方的app.近期开腾讯会议需要录屏,但主持人本身没有开启录屏权限,只好 ...

  5. C#开源录音组件、录像组件、录屏组件及demo源码

    在多媒体系统中,一般都会涉及到录音.录像.录屏问题,采集得到的数据可以用来传输.播放.或存储.所以,对于像课件录制系统.语音视频录制系统.录屏系统等,多媒体数据的采集就是最基础的功能之一. MCapt ...

  6. 使用Camstudio和KeyCastOW来录屏制作软件Demo视频

    博客搬到了fresky.github.io - Dawei XU,请各位看官挪步.最新的一篇是:使用Camstudio和KeyCastOW来录屏制作软件Demo视频.

  7. 手游录屏直播技术详解 | 直播 SDK 性能优化实践

    在上期<直播推流端弱网优化策略 >中,我们介绍了直播推流端是如何优化的.本期,将介绍手游直播中录屏的实现方式. 直播经过一年左右的快速发展,衍生出越来越丰富的业务形式,也覆盖越来越广的应用 ...

  8. Android实现录屏直播(三)MediaProjection + VirtualDisplay + librtmp + MediaCodec实现视频编码并推流到rtmp服务器

    请尊重分享成果,转载请注明出处,本文来自Coder包子哥,原文链接:http://blog.csdn.net/zxccxzzxz/article/details/55230272 Android实现录 ...

  9. 开启全民窃听风云——C#智能录音录像录屏程序源码放送!

    ·引子 我这人从小有个坏毛病,就是喜欢偷窥别人隐私.当然,在道德上,我时刻要求自己做一名正人君子,只不过是心理上有这癖好罢了.所以我从小就对窃听.窃视.黑客技术.破解技术等疯狂着迷!实际上这也是我走上 ...

  10. Android设备一对多录屏直播--(UDP组播连接,Tcp传输)

    原文:https://blog.csdn.net/sunmmer123/article/details/82734245 近期需要学习流媒体知识,做一个Android设备相互投屏Demo,因此找到了这 ...

随机推荐

  1. MySQL—MySQL架构

    MySQL-MySQL架构 MySQL逻辑架构图如下: Connectors连接器:负责跟客户端建立连接: Management Serveices & Utilities系统管理和控制工具: ...

  2. DRF自动生成接口文档

    自动接口文档能生成的是继承自APIView及其子类的视图. 1. 安装依赖 # 生成接口文档需要coreapi库的支持 pip install coreapi 2 设置接口文档访问路径 # 在总路由中 ...

  3. kubernetes集群最新版安装

    原文地址:https://haiyux.cc/2022/09/21/k8s-install/ 虚拟机准备 我这里准备了三台虚拟机,分别部署一个master和两个node,操作系统位ubuntu 20. ...

  4. 力扣262(MySQL)-行程和用户(困难)

    题目: 表:Trips 表:Users 取消率 的计算方式如下:(被司机或乘客取消的非禁止用户生成的订单数量) / (非禁止用户生成的订单总数). 写一段 SQL 语句查出 "2013-10 ...

  5. 力扣205(java)-同构字符串(简单)

    题目: 给定两个字符串 s 和 t ,判断它们是否是同构的. 如果 s 中的字符可以按某种映射关系替换得到 t ,那么这两个字符串是同构的. 每个出现的字符都应当映射到另一个字符,同时不改变字符的顺序 ...

  6. 【开通指南】 实时计算 Flink 全托管版本

    简介: [开通指南]实时计算 Flink 全托管版本 1.试用的实时计算 Flink 版产品是后付费还是预付费?是否有额外费用产生?预付费,有额外的SLB费用,一天2元封顶.(开通 Flink 全托管 ...

  7. 新型DDoS来袭 | 基于STUN协议的DDoS反射攻击分析

    简介: 作为新型反射类型,目前仍存绕过防御可能性. 阿里云安全近期发现利用STUN(Session Traversal Utilities for NAT,NAT会话穿越应用程序)服务发起的DDoS反 ...

  8. Vite + React 组件开发实践

    简介: 毫不夸张的说,Vite 给前端带来的绝对是一次革命性的变化.或者也可以说是 Vite 背后整合的 esbuild . Browser es modules.HMR.Pre-Bundling 等 ...

  9. [MongoDB] aggregate 查询的优化思路

    首先从业务角度出发,不必要的筛选条件和粗略的筛选条件会严重影响查询速度,比如 $or 查询和 $in 查询,视情况尽可能去掉. 程序中打印出查询条件的各部分,有 $match.$group.比如 PH ...

  10. WPF 将 StaticResource 和 ResourceDictionary 放在一起的魔幻行为

    本文将记录一些在 WPF 里面,使用 StaticResource 将 ResourceDictionary 玩坏的做法.大家可以放心的是,这些玩法基本只有高级玩家或逗比开发者才会使用到 后加入的资源 ...