好的,又来到了一篇源码运行记录← ←


一. 基础环境

根据作者源码Readme文件,必须需要有Python2.7,后续我装tensorflow什么都是在Anaconda建立的一个虚拟环境下弄的。这篇记录是对于OpenCV 3.x的版本的!对于3.x的版本的修改在第四大点,前面都是通用的部分。

二. 满足能编译ORB-SLAM2的条件

因为这个源码是基于ORB-SLAM2写的,所以当然要满足一些预备条件,需要预先安装一些库比如C++11 or C++0x Compiler, Pangolin, OpenCV and Eigen3,如果不知道怎么装,可以再手动搜索一下。DynaSLAM刚出时只能支持OpenCV2.4.11,不过19年有热心人提交了可以支持Opencv3.x的代码,后面我们再详细说。我自己装的是OpenCV 3.4.5

三. 编译DynaSLAM前需要安装的其他库

按照开源代码的Readme文件:

1. 安装boost库

sudo apt-get install libboost-all-dev

2. 下载DynaSLAM源码并放入h5文件

git clone https://github.com/BertaBescos/DynaSLAM.git
然后从这个页面https://github.com/matterport/Mask_RCNN/releases下载h5文件,把文件存到DynaSLAM/src/python/

3.Python相关的环境

这里先在Anaconda创建一个新的虚拟环境并激活,然后在虚拟环境中依次安装tensorflow和keras。

conda create -n MaskRCNN python=2.7
conda activate MaskRCNN
pip install tensorflow==1.14.0  #或者 pip install tensorflow-gpu==1.14.0
pip install keras==2.0.9

完成上面的步骤后,python的环境差不多就弄好了,下面可以测试一下

cd DynaSLAM
python src/python/Check.py

如果输出为Mask R-CNN is correctly working,就可以下一步了。然而,事情很难这么顺利哈哈哈,那么就一一解决。我这里碰到了两个问题:

3.1 没有安装scikit-image

sudo pip install scikit-image

3.2 关于pycocotools的报错

注意!这里一定要在Python2.7(cocoapi只支持Python2)的时候进行安装!否则运行Check.py的时候会报错找不到_mask, 因为Python3运行的话就不会生成_mask.so这个文件。

git clone https://github.com/waleedka/coco
python PythonAPI/setup.py build_ext install

运行完上面指令之后就把pycocotools文件夹整个复制到src/python/下,像这样:

四. 修改部分DynaSLAM源码

这里非常感谢这个小姐姐(?),原地址在这里:Pushyami_dev,如果想看看代码具体增删了哪些可以点进去看看。
提交的代码主要是针对Opencv3的使用做出了一些修改,然后在这个代码的基础上,去掉CMakeLists.txt中的-march=native(会出现Segment Default报错),/Thirdparty/DBoW2中的记得也要去一下。代码主要在以下部分做了修改,直接整个复制到文件夹里就行,注意修改一下自己OpenCV3.x的版本。

  • CMakeLists.txt
  • Thirdparty/DBoW2/CMakeLists.txt
  • include/Conversion.h
  • src/Conversion.cc

1. CMakeLists.txt

cmake_minimum_required(VERSION 2.8)
project(DynaSLAM)IF(NOT CMAKE_BUILD_TYPE)SET(CMAKE_BUILD_TYPE Release)# SET(CMAKE_BUILD_TYPE Debug)
ENDIF()MESSAGE("Build type: " ${CMAKE_BUILD_TYPE})#set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS}  -Wall  -O3 -march=native ")
#set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall   -O3 -march=native")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS}  -Wall  -O3  ")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall   -O3 ")
# This is required if opencv is built from source locally
#SET(OpenCV_DIR "~/opencv/build")# set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS}  -Wall  -O0 -march=native ")
# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall   -O0 -march=native")# Check C++11 or C++0x support
include(CheckCXXCompilerFlag)
CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11)
CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X)
if(COMPILER_SUPPORTS_CXX11)set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")add_definitions(-DCOMPILEDWITHC11)message(STATUS "Using flag -std=c++11.")
elseif(COMPILER_SUPPORTS_CXX0X)set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x")add_definitions(-DCOMPILEDWITHC0X)message(STATUS "Using flag -std=c++0x.")
else()message(FATAL_ERROR "The compiler ${CMAKE_CXX_COMPILER} has no C++11 support. Please use a different C++ compiler.")
endif()LIST(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake_modules)set(Python_ADDITIONAL_VERSIONS "2.7")
#This is to avoid detecting python 3
find_package(PythonLibs 2.7 EXACT REQUIRED)
if (NOT PythonLibs_FOUND)message(FATAL_ERROR "PYTHON LIBS not found.")
else()message("PYTHON LIBS were found!")message("PYTHON LIBS DIRECTORY: " ${PYTHON_LIBRARY} ${PYTHON_INCLUDE_DIRS})
endif()message("PROJECT_SOURCE_DIR: " ${OpenCV_DIR})
find_package(OpenCV 3.4 QUIET)
if(NOT OpenCV_FOUND)find_package(OpenCV 2.4 QUIET)if(NOT OpenCV_FOUND)message(FATAL_ERROR "OpenCV > 2.4.x not found.")endif()
endif()find_package(Qt5Widgets REQUIRED)
find_package(Qt5Concurrent REQUIRED)
find_package(Qt5OpenGL REQUIRED)
find_package(Qt5Test REQUIRED)find_package(Boost REQUIRED COMPONENTS thread)
if(Boost_FOUND)message("Boost was found!")message("Boost Headers DIRECTORY: " ${Boost_INCLUDE_DIRS})message("Boost LIBS DIRECTORY: " ${Boost_LIBRARY_DIRS})message("Found Libraries: " ${Boost_LIBRARIES})
endif()find_package(Eigen3 3.1.0 REQUIRED)
find_package(Pangolin REQUIRED)set(PYTHON_INCLUDE_DIRS ${PYTHON_INCLUDE_DIRS} /usr/local/lib/python2.7/dist-packages/numpy/core/include/numpy)include_directories(
${PROJECT_SOURCE_DIR}
${PROJECT_SOURCE_DIR}/include
${EIGEN3_INCLUDE_DIR}
${Pangolin_INCLUDE_DIRS}
${PYTHON_INCLUDE_DIRS}
/usr/include/python2.7/
#/usr/lib/python2.7/dist-packages/numpy/core/include/numpy/
${Boost_INCLUDE_DIRS}
)
message("PROJECT_SOURCE_DIR: " ${PROJECT_SOURCE_DIR})
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/lib)add_library(${PROJECT_NAME} SHARED
src/System.cc
src/Tracking.cc
src/LocalMapping.cc
src/LoopClosing.cc
src/ORBextractor.cc
src/ORBmatcher.cc
src/FrameDrawer.cc
src/Converter.cc
src/MapPoint.cc
src/KeyFrame.cc
src/Map.cc
src/MapDrawer.cc
src/Optimizer.cc
src/PnPsolver.cc
src/Frame.cc
src/KeyFrameDatabase.cc
src/Sim3Solver.cc
src/Initializer.cc
src/Viewer.cc
src/Conversion.cc
src/MaskNet.cc
src/Geometry.cc
)target_link_libraries(${PROJECT_NAME}
${OpenCV_LIBS}
${EIGEN3_LIBS}
${Pangolin_LIBRARIES}
${PROJECT_SOURCE_DIR}/Thirdparty/DBoW2/lib/libDBoW2.so
${PROJECT_SOURCE_DIR}/Thirdparty/g2o/lib/libg2o.so
/usr/lib/x86_64-linux-gnu/libpython2.7.so
${Boost_LIBRARIES}
)# Build examplesset(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/Examples/RGB-D)add_executable(rgbd_tum
Examples/RGB-D/rgbd_tum.cc)
target_link_libraries(rgbd_tum ${PROJECT_NAME})set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/Examples/Stereo)add_executable(stereo_kitti
Examples/Stereo/stereo_kitti.cc)
target_link_libraries(stereo_kitti ${PROJECT_NAME})set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/Examples/Monocular)add_executable(mono_tum
Examples/Monocular/mono_tum.cc)
target_link_libraries(mono_tum ${PROJECT_NAME})

2.Thirdparty/DBoW2/CMakeLists.txt

cmake_minimum_required(VERSION 2.8)
project(DBoW2)#set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS}  -Wall  -O3 -march=native ")
#set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall  -O3 -march=native")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS}  -Wall  -O3 ")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall  -O3 ")set(HDRS_DBOW2DBoW2/BowVector.hDBoW2/FORB.h DBoW2/FClass.h       DBoW2/FeatureVector.hDBoW2/ScoringObject.h   DBoW2/TemplatedVocabulary.h)
set(SRCS_DBOW2DBoW2/BowVector.cppDBoW2/FORB.cpp      DBoW2/FeatureVector.cppDBoW2/ScoringObject.cpp)set(HDRS_DUTILSDUtils/Random.hDUtils/Timestamp.h)
set(SRCS_DUTILSDUtils/Random.cppDUtils/Timestamp.cpp)find_package(OpenCV 3.4 QUIET)
if(NOT OpenCV_FOUND)find_package(OpenCV 2.4.3 QUIET)if(NOT OpenCV_FOUND)message(FATAL_ERROR "OpenCV > 2.4.3 not found.")endif()
endif()set(LIBRARY_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/lib)include_directories(${OpenCV_INCLUDE_DIRS})
add_library(DBoW2 SHARED ${SRCS_DBOW2} ${SRCS_DUTILS})
target_link_libraries(DBoW2 ${OpenCV_LIBS})

3.include/Conversion.h

/**
* This file is part of DynaSLAM.
* Copyright (C) 2018 Berta Bescos <bbescos at unizar dot es> (University of Zaragoza)
* For more information see <https://github.com/bertabescos/DynaSLAM>.
*
*/#ifndef CONVERSION_H_#define CONVERSION_H_#include <Python.h>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/core/core.hpp>
#include "numpy/ndarrayobject.h"
// #include "__multiarray_api.h"#define NUMPY_IMPORT_ARRAY_RETVALnamespace DynaSLAM
{static PyObject* opencv_error = 0;static int failmsg(const char *fmt, ...);class PyAllowThreads;class PyEnsureGIL;#define ERRWRAP2(expr) \
try \
{ \PyAllowThreads allowThreads; \expr; \
} \
catch (const cv::Exception &e) \
{ \PyErr_SetString(opencv_error, e.what()); \return 0; \
}static PyObject* failmsgp(const char *fmt, ...);static size_t REFCOUNT_OFFSET = (size_t)&(((PyObject*)0)->ob_refcnt) +(0x12345678 != *(const size_t*)"\x78\x56\x34\x12\0\0\0\0\0")*sizeof(int);static inline PyObject* pyObjectFromRefcount(const int* refcount)
{return (PyObject*)((size_t)refcount - REFCOUNT_OFFSET);
}static inline int* refcountFromPyObject(const PyObject* obj)
{return (int*)((size_t)obj + REFCOUNT_OFFSET);
}class NumpyAllocator;enum { ARG_NONE = 0, ARG_MAT = 1, ARG_SCALAR = 2 };class NDArrayConverter
{private:void init();
public:NDArrayConverter();//cv::Mat toMat(const PyObject* o);   //issue bugcv::Mat toMat(PyObject* o);PyObject* toNDArray(const cv::Mat& mat);
};}#endif /* CONVERSION_H_ */

4. src/Conversion.cc

/**
* This file is part of DynaSLAM.
* Copyright (C) 2018 Berta Bescos <bbescos at unizar dot es> (University of Zaragoza)
* For more information see <https://github.com/bertabescos/DynaSLAM>.
*
*/#include "Conversion.h"
#include <iostream>namespace DynaSLAM
{static void init()
{import_array();
}static int failmsg(const char *fmt, ...)
{char str[1000];va_list ap;va_start(ap, fmt);vsnprintf(str, sizeof(str), fmt, ap);va_end(ap);PyErr_SetString(PyExc_TypeError, str);return 0;
}class PyAllowThreads
{public:PyAllowThreads() : _state(PyEval_SaveThread()) {}~PyAllowThreads(){PyEval_RestoreThread(_state);}
private:PyThreadState* _state;
};class PyEnsureGIL
{public:PyEnsureGIL() : _state(PyGILState_Ensure()) {}~PyEnsureGIL(){//std::cout << "releasing"<< std::endl;PyGILState_Release(_state);}
private:PyGILState_STATE _state;
};using namespace cv;static PyObject* failmsgp(const char *fmt, ...)
{char str[1000];va_list ap;va_start(ap, fmt);vsnprintf(str, sizeof(str), fmt, ap);va_end(ap);PyErr_SetString(PyExc_TypeError, str);return 0;
}class NumpyAllocator : public MatAllocator
{public:
#if ( CV_MAJOR_VERSION < 3)NumpyAllocator() {}~NumpyAllocator() {}void allocate(int dims, const int* sizes, int type, int*& refcount,uchar*& datastart, uchar*& data, size_t* step){//PyEnsureGIL gil;int depth = CV_MAT_DEPTH(type);int cn = CV_MAT_CN(type);const int f = (int)(sizeof(size_t)/8);int typenum = depth == CV_8U ? NPY_UBYTE : depth == CV_8S ? NPY_BYTE :depth == CV_16U ? NPY_USHORT : depth == CV_16S ? NPY_SHORT :depth == CV_32S ? NPY_INT : depth == CV_32F ? NPY_FLOAT :depth == CV_64F ? NPY_DOUBLE : f*NPY_ULONGLONG + (f^1)*NPY_UINT;int i;npy_intp _sizes[CV_MAX_DIM+1];for( i = 0; i < dims; i++ ){_sizes[i] = sizes[i];}if( cn > 1 ){_sizes[dims++] = cn;}PyObject* o = PyArray_SimpleNew(dims, _sizes, typenum);if(!o){CV_Error_(CV_StsError, ("The numpy array of typenum=%d, ndims=%d can not be created", typenum, dims));}refcount = refcountFromPyObject(o);npy_intp* _strides = PyArray_STRIDES(o);for( i = 0; i < dims - (cn > 1); i++ )step[i] = (size_t)_strides[i];datastart = data = (uchar*)PyArray_DATA(o);}void deallocate(int* refcount, uchar*, uchar*){//PyEnsureGIL gil;if( !refcount )return;PyObject* o = pyObjectFromRefcount(refcount);Py_INCREF(o);Py_DECREF(o);}
#elseNumpyAllocator() {stdAllocator = Mat::getStdAllocator();}~NumpyAllocator() {}UMatData* allocate(PyObject* o, int dims, const int* sizes, int type,size_t* step) const {UMatData* u = new UMatData(this);u->data = u->origdata = (uchar*) PyArray_DATA((PyArrayObject*) o);npy_intp* _strides = PyArray_STRIDES((PyArrayObject*) o);for (int i = 0; i < dims - 1; i++)step[i] = (size_t) _strides[i];step[dims - 1] = CV_ELEM_SIZE(type);u->size = sizes[0] * step[0];u->userdata = o;return u;}UMatData* allocate(int dims0, const int* sizes, int type, void* data,size_t* step, int flags, UMatUsageFlags usageFlags) const {if (data != 0) {CV_Error(Error::StsAssert, "The data should normally be NULL!");// probably this is safe to do in such extreme casereturn stdAllocator->allocate(dims0, sizes, type, data, step, flags,usageFlags);}PyEnsureGIL gil;int depth = CV_MAT_DEPTH(type);int cn = CV_MAT_CN(type);const int f = (int) (sizeof(size_t) / 8);int typenum =depth == CV_8U ? NPY_UBYTE :depth == CV_8S ? NPY_BYTE :depth == CV_16U ? NPY_USHORT :depth == CV_16S ? NPY_SHORT :depth == CV_32S ? NPY_INT :depth == CV_32F ? NPY_FLOAT :depth == CV_64F ?NPY_DOUBLE :f * NPY_ULONGLONG + (f ^ 1) * NPY_UINT;int i, dims = dims0;cv::AutoBuffer<npy_intp> _sizes(dims + 1);for (i = 0; i < dims; i++)_sizes[i] = sizes[i];if (cn > 1)_sizes[dims++] = cn;PyObject* o = PyArray_SimpleNew(dims, _sizes, typenum);if (!o)CV_Error_(Error::StsError,("The numpy array of typenum=%d, ndims=%d can not be created", typenum, dims));return allocate(o, dims0, sizes, type, step);}bool allocate(UMatData* u, int accessFlags,UMatUsageFlags usageFlags) const {return stdAllocator->allocate(u, accessFlags, usageFlags);}void deallocate(UMatData* u) const {if (u) {PyEnsureGIL gil;PyObject* o = (PyObject*) u->userdata;Py_XDECREF(o);delete u;}}const MatAllocator* stdAllocator;
#endif
};NumpyAllocator g_numpyAllocator;NDArrayConverter::NDArrayConverter() { init(); }void NDArrayConverter::init()
{import_array();
}cv::Mat NDArrayConverter::toMat( PyObject *o)
{cv::Mat m;if(!o || o == Py_None){if( !m.data )m.allocator = &g_numpyAllocator;}if( !PyArray_Check(o) ){failmsg("toMat: Object is not a numpy array");}int typenum = PyArray_TYPE(o);int type = typenum == NPY_UBYTE ? CV_8U : typenum == NPY_BYTE ? CV_8S :typenum == NPY_USHORT ? CV_16U : typenum == NPY_SHORT ? CV_16S :typenum == NPY_INT || typenum == NPY_LONG ? CV_32S :typenum == NPY_FLOAT ? CV_32F :typenum == NPY_DOUBLE ? CV_64F : -1;if( type < 0 ){failmsg("toMat: Data type = %d is not supported", typenum);}int ndims = PyArray_NDIM(o);if(ndims >= CV_MAX_DIM){failmsg("toMat: Dimensionality (=%d) is too high", ndims);}int size[CV_MAX_DIM+1];size_t step[CV_MAX_DIM+1], elemsize = CV_ELEM_SIZE1(type);const npy_intp* _sizes = PyArray_DIMS(o);const npy_intp* _strides = PyArray_STRIDES(o);bool transposed = false;for(int i = 0; i < ndims; i++){size[i] = (int)_sizes[i];step[i] = (size_t)_strides[i];}if( ndims == 0 || step[ndims-1] > elemsize ) {size[ndims] = 1;step[ndims] = elemsize;ndims++;}if( ndims >= 2 && step[0] < step[1] ){std::swap(size[0], size[1]);std::swap(step[0], step[1]);transposed = true;}if( ndims == 3 && size[2] <= CV_CN_MAX && step[1] == elemsize*size[2] ){ndims--;type |= CV_MAKETYPE(0, size[2]);}if( ndims > 2){failmsg("toMat: Object has more than 2 dimensions");}m = Mat(ndims, size, type, PyArray_DATA(o), step);if( m.data ){#if ( CV_MAJOR_VERSION < 3)m.refcount = refcountFromPyObject(o);m.addref(); // protect the original numpy array from deallocation// (since Mat destructor will decrement the reference counter)
#elsem.u = g_numpyAllocator.allocate(o, ndims, size, type, step);m.addref();Py_INCREF(o);//m.u->refcount = *refcountFromPyObject(o);
#endif};m.allocator = &g_numpyAllocator;if( transposed ){Mat tmp;tmp.allocator = &g_numpyAllocator;transpose(m, tmp);m = tmp;}return m;
}PyObject* NDArrayConverter::toNDArray(const cv::Mat& m)
{if( !m.data )Py_RETURN_NONE;Mat temp;Mat *p = (Mat*)&m;
#if ( CV_MAJOR_VERSION < 3)if(!p->refcount || p->allocator != &g_numpyAllocator){temp.allocator = &g_numpyAllocator;m.copyTo(temp);p = &temp;}p->addref();return pyObjectFromRefcount(p->refcount);
#elseif(!p->u || p->allocator != &g_numpyAllocator){temp.allocator = &g_numpyAllocator;m.copyTo(temp);p = &temp;}//p->addref();//return pyObjectFromRefcount(&p->u->refcount);PyObject* o = (PyObject*) p->u->userdata;Py_INCREF(o);return o;
#endif}}

五.编译源码与运行

编译DynaSLAM源码

cd DynaSLAM
chmod +x build.sh
./build.sh

如果运行时不给后面两个参数,就相当于运行ORB-SLAM2
如果只想用MaskRCNN的功能但不想存mask,那么在PATH_MASK那里就写为no_save,否则就给一个存Mask的文件夹地址

./Examples/RGB-D/rgbd_tum Vocabulary/ORBvoc.txt Examples/RGB-D/TUM3.yaml /XXX/tum_dataset/ /XXX/tum_dataset/associations.txt masks/ output/

如果运行起来发现Light Track一直不成功,无法初始化,那么就把ORB参数设置中特征点的数目增多,github上大家一般改成3000就好了。


完结,撒花~

关于运行DynaSLAM源码这档子事(OpenCV3.x版)相关推荐

  1. DynaSLAM源码笔记-检测动态物体部分梳理

    按照main函数向下细分的顺序大概记录一下rgbd情况下,动态物体去除(inpaint的部分本篇不涉及)的源码的写法,并对应一下论文, 关于ORB-SLAM2本身的部分不会太涉及到. 安装方法见:关于 ...

  2. 【深度学习实战03】——YOLO tensorflow运行及源码解析

    本文章是深度学习实战系列第三讲文章,以运行代码+源码分析 为主: 转载请注明引用自:https://blog.csdn.net/c20081052/article/details/80260726 首 ...

  3. ubuntu1804系统运行smplify-x源码

    这里写自定义目录标题 ubuntu1804系统运行smplify-x源码 1. Installation 2.Summary of possible issues **Issue 1: ImportE ...

  4. 使用PyTorch构建GAN生成对抗网络源码(详细步骤讲解+注释版)02 人脸识别 下

    文章目录 1 测试鉴别器 2 建立生成器 3 测试生成器 4 训练生成器 5 使用生成器 6 内存查看 上一节,我们已经建立好了模型所必需的鉴别器类与Dataset类. 使用PyTorch构建GAN生 ...

  5. (已更新)Thinkphp课程表小程序源码v1.0.0全开源版(前后端分离)

    源码功能介绍 1.情侣功能 2.情侣间留言 3.情侣间互相设置课程表背景 4.自己日.周课程表背景设置 5.教务系统课程表导入 6.导入别人分享的课表 7.导入别人分享的单课 8.多校支持 9.首页顶 ...

  6. Thinkphp课程表小程序源码v1.0.0全开源版(前后端分离)

    源码介绍 练手Lab课程表小程序源码是一个基于Thinkphp系统进行开发的前后端分离系统. 源码功能介绍 1.情侣功能 2.情侣间留言 3.情侣间互相设置课程表背景 4.自己日.周课程表背景设置 5 ...

  7. 重言式判别c语言中文网,重言式判别 重言式判别源码及课程设计 c语言版.doc

    重言式判别 重言式判别源码及课程设计 c语言版 重言式的判别 题目: 一个逻辑表达式如果对于其変元的任一种取值都为真,则称为重言式:反之,如果对于其变元的任一种取值都为假,则称为矛盾式:然而,更多的情 ...

  8. 小程序源码:2022强大的修复版趣味心理测试小程序源码,趣味测试引流裂变神器

    大家还记得以前有一款趣味测嚒? 那款趣味测试可以说在当时是只要当时做小程序的基本是人手一款 不过后来自从腾讯更新小程序登录接口以后,那款小程序也就和接口一起挂了 那么呢现在小编就给大家发布修复过的,修 ...

  9. 小程序源码:2022强大的修复版趣味心理测试小程序源码,趣味测试引流裂变神器-多玩法安装简单

    大家还记得以前有一款趣味测嚒? 那款趣味测试可以说在当时是只要当时做小程序的基本是人手一款 不过后来自从腾讯更新小程序登录接口以后,那款小程序也就和接口一起挂了 那么呢现在小编就给大家发布修复过的,修 ...

最新文章

  1. 卡尔曼滤波---实例讲解
  2. select count(*) from user注入
  3. Java生成pgp密钥对_在Javascript中生成PGP密钥对,并使用加密的PGP私钥对文本进行签名...
  4. android view存储为jpg,Android长按imageview把图片保存到本地的实例代码
  5. Linux系统文件类型 特殊文件 和 进程间通信机制
  6. html页面之间传参乱码,急求教,在两个htm页面传参数时中文出现了乱码,试了网上的方法不管用。_html/css_WEB-ITnose...
  7. 计算机管理是什么控件,Win7旗舰版系统WMI控件的功能作用是什么?
  8. python做一个linux网卡,Linux系统Python可以选择不同网卡进行网络访问吗?
  9. 萌新关于C#委托一点见解
  10. 【Caffe代码解析】convert_imageset
  11. CS61A第一章笔记
  12. phpexcel导出excel无法打开,提示文件格式或文件名无效,文件损毁,解决办法
  13. vue 直接输入路由地址进入_vue地址栏直接输入路由无效问题的解决
  14. C不会断句?【前后置,位,移位操作符详解】 b = ++c, c++, ++a, a++
  15. 亚马逊被人差评了怎么办?
  16. Wave(.wav)文件格式
  17. js时间戳转时间年月日时分秒
  18. 怎么把移动硬盘挂载到linux,linux 怎么挂载移动硬盘
  19. ?——熊猫烧香作者年薪100万?
  20. 洛谷:P1462 通往奥格瑞玛的道路

热门文章

  1. 手掌是人体健康的晴雨表
  2. 大二学生《web课程设计》中华英雄人物介绍袁隆平HTML+CSS+JavaScript(期末考核大作业)
  3. 消防大数据平台建设解决方案
  4. Android IQ自定义扩展(smack-4.1.9)
  5. 唯一AI企业!快商通董事长肖龙源获“福建十大青年创业楷模”
  6. 简单理解蒙德卡洛算法(附实例)
  7. 仿微信IM即时通讯v2.0.64
  8. visio中图形旋转任意角度
  9. Windows智能卡登录系统网络版,服务器集中管理
  10. 教你如何使用android studio 4.0发布release 版本 学习记录 仅供参考