ESP8266代码

/** 智能语言控制控制,支持天猫、小爱、小度、google Assistent同时控制* 2021-08-12* QQ交流群:566565915* 官网https://bemfa.com */
#include <ESP8266WiFi.h>
#include <WiFiManager.h> // https://github.com/tzapu/WiFiManager#define server_ip "bemfa.com" //巴法云服务器地址默认即可
#define server_port "8344" //服务器端口,tcp创客云端口8344//********************需要修改的部分*******************//#define wifi_name  "yuxuan"     //WIFI名称,区分大小写,不要写错
#define wifi_password   "yuxuan@+"  //WIFI密码
String UID = "5e8480057c38c6294784d50620d34247";  //用户私钥,可在控制台获取,修改为自己的UID
String TOPIC = "led002";         //主题名字,可在控制台新建
const int LED_Pin = D2;              //单片机LED引脚值,D2是NodeMcu引脚命名方式,其他esp8266型号将D2改为自己的引脚//**************************************************//
//最大字节数
#define MAX_PACKETSIZE 512
//设置心跳值60s
#define KEEPALIVEATIME 60*1000
//tcp客户端相关初始化,默认即可
WiFiClient TCPclient;
String TcpClient_Buff = "";//初始化字符串,用于接收服务器发来的数据
unsigned int TcpClient_BuffIndex = 0;
unsigned long TcpClient_preTick = 0;
unsigned long preHeartTick = 0;//心跳
unsigned long preTCPStartTick = 0;//连接
bool preTCPConnected = false;
//相关函数初始化
//连接WIFI
void doWiFiTick();
void startSTA();//TCP初始化连接
void doTCPClientTick();
void startTCPClient();
void sendtoTCPServer(String p);//led控制函数,具体函数内容见下方
void turnOnLed();
void turnOffLed();/**发送数据到TCP服务器*/
void sendtoTCPServer(String p){if (!TCPclient.connected()) {Serial.println("Client is not readly");return;}TCPclient.print(p);preHeartTick = millis();//心跳计时开始,需要每隔60秒发送一次数据
}/**初始化和服务器建立连接
*/
void startTCPClient(){if(TCPclient.connect(server_ip, atoi(server_port))){Serial.print("\nConnected to server:");Serial.printf("%s:%d\r\n",server_ip,atoi(server_port));String tcpTemp="";  //初始化字符串tcpTemp = "cmd=1&uid="+UID+"&topic="+TOPIC+"\r\n"; //构建订阅指令sendtoTCPServer(tcpTemp); //发送订阅指令tcpTemp="";//清空/*//如果需要订阅多个主题,可发送  cmd=1&uid=xxxxxxxxxxxxxxxxxxxxxxx&topic=xxx1,xxx2,xxx3,xxx4\r\n教程:https://bbs.bemfa.com/64*/preTCPConnected = true;TCPclient.setNoDelay(true);}else{Serial.print("Failed connected to server:");Serial.println(server_ip);TCPclient.stop();preTCPConnected = false;}preTCPStartTick = millis();
}/**检查数据,发送心跳
*/
void doTCPClientTick(){//检查是否断开,断开后重连if(WiFi.status() != WL_CONNECTED) return;if (!TCPclient.connected()) {//断开重连if(preTCPConnected == true){preTCPConnected = false;preTCPStartTick = millis();Serial.println();Serial.println("TCP Client disconnected.");TCPclient.stop();}else if(millis() - preTCPStartTick > 1*1000)//重新连接startTCPClient();}else{if (TCPclient.available()) {//收数据char c =TCPclient.read();TcpClient_Buff +=c;TcpClient_BuffIndex++;TcpClient_preTick = millis();if(TcpClient_BuffIndex>=MAX_PACKETSIZE - 1){TcpClient_BuffIndex = MAX_PACKETSIZE-2;TcpClient_preTick = TcpClient_preTick - 200;}}if(millis() - preHeartTick >= KEEPALIVEATIME){//保持心跳preHeartTick = millis();Serial.println("--Keep alive:");sendtoTCPServer("ping\r\n"); //发送心跳,指令需\r\n结尾,详见接入文档介绍}}if((TcpClient_Buff.length() >= 1) && (millis() - TcpClient_preTick>=200)){TCPclient.flush();Serial.print("Rev string: ");TcpClient_Buff.trim(); //去掉首位空格Serial.println(TcpClient_Buff); //打印接收到的消息String getTopic = "";String getMsg = "";if(TcpClient_Buff.length() > 15){//注意TcpClient_Buff只是个字符串,在上面开头做了初始化 String TcpClient_Buff = "";//此时会收到推送的指令,指令大概为 cmd=2&uid=xxx&topic=light002&msg=offint topicIndex = TcpClient_Buff.indexOf("&topic=")+7; //c语言字符串查找,查找&topic=位置,并移动7位,不懂的可百度c语言字符串查找int msgIndex = TcpClient_Buff.indexOf("&msg=");//c语言字符串查找,查找&msg=位置getTopic = TcpClient_Buff.substring(topicIndex,msgIndex);//c语言字符串截取,截取到topic,不懂的可百度c语言字符串截取getMsg = TcpClient_Buff.substring(msgIndex+5);//c语言字符串截取,截取到消息Serial.print("topic:------");Serial.println(getTopic); //打印截取到的主题值Serial.print("msg:--------");Serial.println(getMsg);   //打印截取到的消息值}if(getMsg  == "on"){       //如果是消息==打开turnOnLed();}else if(getMsg == "off"){ //如果是消息==关闭turnOffLed();}TcpClient_Buff="";TcpClient_BuffIndex = 0;}
}
/**初始化wifi连接
*/
void startSTA(){WiFi.disconnect();WiFi.mode(WIFI_STA);WiFi.begin(wifi_name, wifi_password);
}/**************************************************************************WIFI
***************************************************************************/
/*WiFiTick检查是否需要初始化WiFi检查WiFi是否连接上,若连接成功启动TCP Client控制指示灯
*/
void doWiFiTick(){static bool startSTAFlag = false;static bool taskStarted = false;static uint32_t lastWiFiCheckTick = 0;if (!startSTAFlag) {startSTAFlag = true;startSTA();}//未连接1s重连if ( WiFi.status() != WL_CONNECTED ) {if (millis() - lastWiFiCheckTick > 1000) {lastWiFiCheckTick = millis();}}//连接成功建立else {if (taskStarted == false) {taskStarted = true;Serial.print("\r\nGet IP Address: ");Serial.println(WiFi.localIP());startTCPClient();}}
}
//打开灯泡
void turnOnLed(){Serial.println("Turn ON...");digitalWrite(2, LOW);digitalWrite(0, LOW);Serial.println("Turn ON OVER");
}
//关闭灯泡
void turnOffLed(){Serial.println("Turn OFF...");digitalWrite(0, HIGH);digitalWrite(2, HIGH);Serial.println("Turn OFF over");
}// 初始化,相当于main 函数
void setup() {Serial.begin(115200);WiFi.mode(WIFI_STA);WiFiManager wm;bool res;res = wm.autoConnect("yuxuanLED", "password");if (!res) {Serial.println("Failed to connect");ESP.restart();}else {   Serial.println("connected...yeey :");//Blinker.begin(auth, wm.getWiFiSSID().c_str(), wm.getWiFiPass().c_str());pinMode(0, OUTPUT);pinMode(2, OUTPUT);digitalWrite(0, LOW);digitalWrite(2, LOW);startTCPClient();Serial.println("Beginning...");}
}//循环
void loop() {//doWiFiTick();doTCPClientTick();
}

手机手势识别控制灯的开关代码

import cv2
import math
import socket
import android
import threading
# import tensorflow as tfimport sys
import numpy as np
from blazeface import *
from cvs import *
import aidlite_gpu
aidlite=aidlite_gpu.aidlite()
droid=android.Android()
droid.ttsSpeak('欢迎使用雨轩手势控制系统')
def preprocess_image_for_tflite32(image, model_image_size=300):print(type(image))print(image.shape)image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)image = cv2.resize(image, (model_image_size, model_image_size))image = np.expand_dims(image, axis=0)image = (2.0 / 255.0) * image - 1.0image = image.astype('float32')return imagedef plot_detections(img, detections, with_keypoints=True):output_img = imgprint(img.shape)x_min=[0,0]x_max=[0,0]y_min=[0,0]y_max=[0,0]hand_nums=len(detections)# if hand_nums >2:#     hand_nums=2print("Found %d hands" % hand_nums)if hand_nums >2:hand_nums=2        for i in range(hand_nums):ymin = detections[i][ 0] * img.shape[0]xmin = detections[i][ 1] * img.shape[1] ymax = detections[i][ 2] * img.shape[0]xmax = detections[i][ 3] * img.shape[1]w=int(xmax-xmin)h=int(ymax-ymin)h=max(h,w)h=h*224./128.# ymin-=0.08*h# xmin-=0.25*w# xmax=xmin+1.5*w;# ymax=ymin+1.0*h;x=(xmin+xmax)/2.y=(ymin+ymax)/2.xmin=x-h/2.xmax=x+h/2.ymin=y-h/2.-0.18*hymax=y+h/2.-0.18*h# if w<h:#     xmin=xmin-(h+0.08*h-w)/2#     xmax=xmax+(h+0.08*h-w)/2#     ymin-=0.08*h#     # ymax-=0.08*h# else :#     ymin=ymin-(w-h)/2#     ymax=ymax+(w-h)/2# h=int(ymax-ymin)# ymin-=0.08*h            # landmarks_xywh[:, 2:4] += (landmarks_xywh[:, 2:4] * pad_ratio).astype(np.int32) #adding some padding around detection for landmark detection step.# landmarks_xywh[:, 1:2] -= (landmarks_xywh[:, 3:4]*0.08).astype(np.int32)x_min[i]=int(xmin)y_min[i]=int(ymin)x_max[i]=int(xmax)y_max[i]=int(ymax)            p1 = (int(xmin),int(ymin))p2 = (int(xmax),int(ymax))# print(p1,p2)#cv2.rectangle(output_img, p1, p2, (0,255,255),2,1)# cv2.putText(output_img, "Face found! ", (p1[0]+10, p2[1]-10),cv2.FONT_ITALIC, 1, (0, 255, 129), 2)# if with_keypoints:#     for k in range(7):#         kp_x = int(detections[i, 4 + k*2    ] * img.shape[1])#         kp_y = int(detections[i, 4 + k*2 + 1] * img.shape[0])#         cv2.circle(output_img,(kp_x,kp_y),4,(0,255,255),4)return x_min,y_min,x_max,y_maxdef draw_mesh(image, mesh, mark_size=4, line_width=1):"""Draw the mesh on an image"""# The mesh are normalized which means we need to convert it back to fit# the image size.image_size = image.shape[0]mesh = mesh * image_sizefor point in mesh:cv2.circle(image, (point[0], point[1]),mark_size, (255, 0, 0), 4)# Draw the contours.# Eyes# left_eye_contour = np.array([mesh[33][0:2],#                              mesh[7][0:2],#                              mesh[163][0:2],#                              mesh[144][0:2],#                              mesh[145][0:2],#                              mesh[153][0:2],#                              mesh[154][0:2],#                              mesh[155][0:2],#                              mesh[133][0:2],#                              mesh[173][0:2],#                              mesh[157][0:2],#                              mesh[158][0:2],#                              mesh[159][0:2],#                              mesh[160][0:2],#                              mesh[161][0:2],#                              mesh[246][0:2], ]).astype(np.int32)# right_eye_contour = np.array([mesh[263][0:2],#                               mesh[249][0:2],#                               mesh[390][0:2],#                               mesh[373][0:2],#                               mesh[374][0:2],#                               mesh[380][0:2],#                               mesh[381][0:2],#                               mesh[382][0:2],#                               mesh[362][0:2],#                               mesh[398][0:2],#                               mesh[384][0:2],#                               mesh[385][0:2],#                               mesh[386][0:2],#                               mesh[387][0:2],#                               mesh[388][0:2],#                               mesh[466][0:2]]).astype(np.int32)# # Lips# cv2.polylines(image, [left_eye_contour, right_eye_contour], False,#               (255, 255, 255), line_width, cv2.LINE_AA)
def calc_palm_moment(image, landmarks):image_width, image_height = image.shape[1], image.shape[0]palm_array = np.empty((0, 2), int)for index, landmark in enumerate(landmarks):landmark_x = min(int(landmark[0] * image_width), image_width - 1)landmark_y = min(int(landmark[1] * image_height), image_height - 1)landmark_point = [np.array((landmark_x, landmark_y))]if index == 0:  # 手首1palm_array = np.append(palm_array, landmark_point, axis=0)if index == 1:  # 手首2palm_array = np.append(palm_array, landmark_point, axis=0)if index == 5:  # 人差指:付け根palm_array = np.append(palm_array, landmark_point, axis=0)if index == 9:  # 中指:付け根palm_array = np.append(palm_array, landmark_point, axis=0)if index == 13:  # 薬指:付け根palm_array = np.append(palm_array, landmark_point, axis=0)if index == 17:  # 小指:付け根palm_array = np.append(palm_array, landmark_point, axis=0)M = cv2.moments(palm_array)cx, cy = 0, 0if M['m00'] != 0:cx = int(M['m10'] / M['m00'])cy = int(M['m01'] / M['m00'])return cx, cydef calc_bounding_rect(image, landmarks):image_width, image_height = image.shape[1], image.shape[0]landmark_array = np.empty((0, 2), int)for _, landmark in enumerate(landmarks):landmark_x = min(int(landmark[0] * image_width), image_width - 1)landmark_y = min(int(landmark[0] * image_height), image_height - 1)landmark_point = [np.array((landmark_x, landmark_y))]landmark_array = np.append(landmark_array, landmark_point, axis=0)x, y, w, h = cv2.boundingRect(landmark_array)return [x, y, x + w, y + h]#def draw_bounding_rect(use_brect, image, brect):
#    if use_brect:
#        # 外接矩形
#        cv2.rectangle(image, (brect[0], brect[1]), (brect[2], brect[3]),
#                     (0, 255, 0), 2)
#
#    return imagedef draw_landmarks(image, cx, cy, landmarks):global num,switchStateimage_width, image_height = image.shape[1], image.shape[0]landmark_point = []handNums={}zj=[8,12,16,20]# キーポイントfor index, landmark in enumerate(landmarks):# if landmark.visibility < 0 or landmark.presence < 0:#     continuelandmark_x = min(int(landmark[0] * image_width), image_width - 1)landmark_y = min(int(landmark[1] * image_height), image_height - 1)# landmark_z = landmark.zlandmark_point.append((landmark_x, landmark_y))#标记点编号cv2.putText(image,str(index),(landmark_x, landmark_y-5),cv2.FONT_HERSHEY_SIMPLEX,0.5, (0,0,255),1)handNums[index]={"x":landmark_x,"y":landmark_y}#if index == 0:  # 手首1#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#if index == 1:  # 手首2#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#if index == 2:  # 親指:付け根#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#if index == 3:  # 親指:第1関節#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#if index == 4:  # 親指:指先#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#    cv2.circle(image, (landmark_x, landmark_y), 12, (0, 255, 0), 2)#if index == 5:  # 人差指:付け根#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#if index == 6:  # 人差指:第2関節#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#if index == 7:  # 人差指:第1関節#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#if index == 8:  # 人差指:指先#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#    cv2.circle(image, (landmark_x, landmark_y), 12, (0, 255, 0), 2)#if index == 9:  # 中指:付け根#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#if index == 10:  # 中指:第2関節#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#if index == 11:  # 中指:第1関節#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#if index == 12:  # 中指:指先#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#    cv2.circle(image, (landmark_x, landmark_y), 12, (0, 255, 0), 2)#if index == 13:  # 薬指:付け根#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#if index == 14:  # 薬指:第2関節#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#if index == 15:  # 薬指:第1関節#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#if index == 16:  # 薬指:指先#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#    cv2.circle(image, (landmark_x, landmark_y), 12, (0, 255, 0), 2)#if index == 17:  # 小指:付け根#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#if index == 18:  # 小指:第2関節#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#if index == 19:  # 小指:第1関節#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#if index == 20:  # 小指:指先#    cv2.circle(image, (landmark_x, landmark_y), 5, (0, 255, 0), 2)#    cv2.circle(image, (landmark_x, landmark_y), 12, (0, 255, 0), 2)# 接続線if len(landmark_point) > 0:# 親指cv2.line(image, landmark_point[2], landmark_point[3], (0, 255, 0), 2)cv2.line(image, landmark_point[3], landmark_point[4], (0, 255, 0), 2)# 人差指cv2.line(image, landmark_point[5], landmark_point[6], (0, 255, 0), 2)cv2.line(image, landmark_point[6], landmark_point[7], (0, 255, 0), 2)cv2.line(image, landmark_point[7], landmark_point[8], (0, 255, 0), 2)# 中指cv2.line(image, landmark_point[9], landmark_point[10], (0, 255, 0), 2)cv2.line(image, landmark_point[10], landmark_point[11], (0, 255, 0), 2)cv2.line(image, landmark_point[11], landmark_point[12], (0, 255, 0), 2)# 薬指cv2.line(image, landmark_point[13], landmark_point[14], (0, 255, 0), 2)cv2.line(image, landmark_point[14], landmark_point[15], (0, 255, 0), 2)cv2.line(image, landmark_point[15], landmark_point[16], (0, 255, 0), 2)# 小指cv2.line(image, landmark_point[17], landmark_point[18], (0, 255, 0), 2)cv2.line(image, landmark_point[18], landmark_point[19], (0, 255, 0), 2)cv2.line(image, landmark_point[19], landmark_point[20], (0, 255, 0), 2)# 手の平cv2.line(image, landmark_point[0], landmark_point[1], (0, 255, 0), 2)cv2.line(image, landmark_point[1], landmark_point[2], (0, 255, 0), 2)cv2.line(image, landmark_point[2], landmark_point[5], (0, 255, 0), 2)cv2.line(image, landmark_point[5], landmark_point[9], (0, 255, 0), 2)cv2.line(image, landmark_point[9], landmark_point[13], (0, 255, 0), 2)cv2.line(image, landmark_point[13], landmark_point[17], (0, 255, 0), 2)cv2.line(image, landmark_point[17], landmark_point[0], (0, 255, 0), 2)num=0for z in zj:if handNums[z]["y"]<handNums[z-1]["y"]:num+=1cv2.putText(image,str(num),(50,30),cv2.FONT_HERSHEY_SIMPLEX,0.7, (0,255,255),1)if num==1 and switchState!='off':switchState='off'cv2.putText(image,'off',(30,20),cv2.FONT_HERSHEY_SIMPLEX,0.7, (0,255,255),1)sendCmd('off')#droid.ttsSpeak('关灯')elif num==4 and switchState!='on':switchState='on'cv2.putText(image,'on',(30,20),cv2.FONT_HERSHEY_SIMPLEX,0.7, (0,255,255),1)sendCmd('on')#droid.ttsSpeak('开灯')# 重心 + 左右if len(landmark_point) > 0:# handedness.classification[0].index# handedness.classification[0].scorecv2.circle(image, (cx, cy), 12, (0, 255, 0), 2)# cv2.putText(image, handedness.classification[0].label[0],#           (cx - 6, cy + 6), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0),#           2, cv2.LINE_AA)  # label[0]:一文字目だけreturn image
def sendCmd(cmd):# 创建sockettcp_client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)# IP 和端口server_ip = 'bemfa.com'server_port = 8344try:# 连接服务器tcp_client_socket.connect((server_ip, server_port))#发送订阅指令substr = 'cmd=2&uid=5e8480057c38c6294784d50620d34247&topic=led002&msg='+cmd+'\r\n'tcp_client_socket.send(substr.encode("utf-8"))except:print("连接失败")input_shape=[128,128]
inShape =[1 * 128 * 128 *3*4,]
outShape= [1 * 896*18*4,1*896*1*4]
model_path="models/palm_detection.tflite"
print('gpu:',aidlite.FAST_ANNModel(model_path,inShape,outShape,4,0))
model_path="models/hand_landmark.tflite"
aidlite.set_g_index(1)
inShape1 =[1 * 224 * 224 *3*4,]
outShape1= [1 * 63*4,1*4,1*4]
print('cpu:',aidlite.FAST_ANNModel(model_path,inShape1,outShape1,4,0))anchors = np.load('models/anchors.npy').astype(np.float32)
camid=0
cap=cvs.VideoCapture(camid)
bHand=Falsex_min=[0,0]
x_max=[0,0]
y_min=[0,0]
y_max=[0,0]fface=0.0
use_brect=Truenum=0
switchState=''
while True:frame=cvs.read()if frame is None:continueif camid==1:# frame=cv2.resize(frame,(240,480))frame=cv2.flip(frame,1)start_time = time.time()    img = preprocess_image_for_tflite32(frame,128)# interpreter.set_tensor(input_details[0]['index'], img[np.newaxis,:,:,:])if bHand==False:aidlite.set_g_index(0)aidlite.setTensor_Fp32(img,input_shape[1],input_shape[1])aidlite.invoke()# t = (time.time() - start_time)# # print('elapsed_ms invoke:',t*1000)# lbs = 'Fps: '+ str(int(1/t))+" ~~ Time:"+str(t*1000) +"ms"# cvs.setLbs(lbs)    raw_boxes = aidlite.getTensor_Fp32(0)classificators = aidlite.getTensor_Fp32(1)detections = blazeface(raw_boxes, classificators, anchors)x_min,y_min,x_max,y_max=plot_detections(frame, detections[0])if len(detections[0])>0 :bHand=Trueif bHand:hand_nums=len(detections[0])if hand_nums>2:hand_nums=2for i in range(hand_nums):print(x_min,y_min,x_max,y_max)xmin=max(0,x_min[i])ymin=max(0,y_min[i])xmax=min(frame.shape[1],x_max[i])ymax=min(frame.shape[0],y_max[i])roi_ori=frame[ymin:ymax, xmin:xmax]# cvs.imshow(roi)roi =preprocess_image_for_tflite32(roi_ori,224)aidlite.set_g_index(1)aidlite.setTensor_Fp32(roi,224,224)# start_time = time.time()aidlite.invoke()mesh = aidlite.getTensor_Fp32(0)# ffacetmp = tflite.getTensor_Fp32(1)[0]# print('fface:',abs(fface-ffacetmp))# if abs(fface - ffacetmp) > 0.5:bHand=False# fface=ffacetmp# print('mesh:',mesh.shape)mesh = mesh.reshape(21, 3)/224cx, cy = calc_palm_moment(roi_ori, mesh)draw_landmarks(roi_ori,cx,cy,mesh)# brect = calc_bounding_rect(roi_ori, mesh)# draw_bounding_rect(use_brect, roi_ori, brect)# draw_mesh(roi_ori,mesh)frame[ymin:ymax, xmin:xmax]=roi_orit = (time.time() - start_time)# print('elapsed_ms invoke:',t*1000)lbs = 'Fps: '+ str(int(100/t)/100.)+" ~~ Time:"+str(t*1000) +"ms"cvs.setLbs(lbs) cvs.imshow(frame)sleep(1)import apkneed

天猫精灵配置巴法云

打开天猫精灵app,底部找到—内容---->精灵技能—>搜索巴法云,找到巴法云技能,点击“巴法云”进入技能(直接点这三个字,不要点”尝试“),绑定账号。如果巴法云控制台有创建设备,在天猫精灵“我家”中就可以看到设备了。
在技能中心搜索:巴法云。找到巴法云技能,点击“巴法云”进入技能(直接点这三个字,不要点”尝试“),绑定账号,登陆上一步注册的巴法云账号

ESP8266 连接巴法云相关推荐

  1. 【基于Arduino IDE平台开发ESP8266连接巴法云】

    Arduino教程传送门

  2. STM32F103C8T6+ESP8266WIFI+DHT11模块连接巴法云

    STM32F103C8T6通过ESP8266模块连接巴法云 一.硬件介绍 二.准备工作 三.STM32主要代码介绍 一.硬件介绍 1.MCU:STM32F103C8T6 2.ESP8266:正点原子W ...

  3. NBIOT模块连接巴法云实践(SIM7020)

    使用NBIOTSIM7020模块连接巴法云并进行数据通信 一.前期准备 1.巴法云新建主题(这里使用TCP协议,因此创建TCP设备云) 2.模块 使用前焊接好模块天线 3.SIM卡(物联专用卡) 4. ...

  4. lua语言开发esp8266接入巴法云,mqtt和tcp协议

    第一步,lua语言开发环境配置 下载安装java环境,下载地址:点击下载 选择windows版本下载安装即可,如图所示: 下载开发环境包: 下载地址:点击下载 esp8266串口驱动:点击下载 下载后 ...

  5. 【ESP 保姆级教程】疯狂毕设篇 —— 案例:基于ESP8266和巴法云的教室灯光人工控制、定时控制系统(完整设计思路与细节)

    忘记过去,超越自己 ❤️ 博客主页 单片机菜鸟哥,一个野生非专业硬件IOT爱好者 ❤️ ❤️ 本篇创建记录 2023-03-10 ❤️ ❤️ 本篇更新记录 2022-03-11 ❤️

  6. esp8266 TCP接入巴法云物联网云平台

    文章目录 前言 (一)注册添加TCP设备 (1)注册巴法云账号 (2)创建主题 (二)订阅发布主题 (1)TCP设备相关指令 (2)调试 (三)ESP8266接入巴法云 前言 巴法云物联网云平台,就如 ...

  7. 巴法云 使用 esp01s wifi模块 做一个网络智能开关

    巴法云 使用 esp01s wifi模块 做一个网络智能开关 前言 一:配置arduino IDE,使其支持ESP8266编程开发 二:esp01s接线问题 1 下载模式 2 运行模式 三:开发环境测 ...

  8. 【巴法云】开源安卓App控制ESP8266,通过MQTT协议,APP Inventor 开发

    [巴法云]APP Inventor 开发安卓app,通过MQTT控制ESP8266 第一 下载ESP8266示例(arduino ide 编程开发) 第二 修改demo例程 第三 app invent ...

  9. uniapp 开发移动端对接巴法云物联网平台控制ESP8266开关灯

    巴法云物联网平台的MQTT接入只有说明文档,没有移动端实例.经过多次连接测试,使用uniapp开发的移动端终于成功连接服务器. 手机端效果图 uniapp 代码(app, 小程序): <temp ...

最新文章

  1. sftp进入指定目录_CentOS7服务搭建----搭建SFTP(安全文件传送协议)服务器
  2. facade 提供一个接口,通过这个接口,可以使一个子系统更容易使用。
  3. hadoop-1.2.0集群安装与配置
  4. 开关电源雷击浪涌整改_大佬多年经验总结,开关电源EMI整改策略
  5. unity hub添加找不到文件夹_教师资格证报名网站,IE浏览器“兼容性站点”添加方法。找不到按钮怎么办?...
  6. Sql语句里的递归查询
  7. 机器学习里面的树形模型
  8. 子进程 已安装 post-installation 脚本 返回错误状态 4
  9. BugkuCTF-MISC题猫片
  10. 微服务教程--什么是 Nacos
  11. asp.net 初步入门使用正则抓取网页信息
  12. 一文带你认识队列数据结构
  13. php include_once 路径,php使用include加密路径的方法介绍
  14. android 获取微信二维码 DiffDevOAuth.auth
  15. C#实现Zip文件解压
  16. Tensorflow XLA
  17. JQuery Validate(1)---电话号码与邮箱验证
  18. scanf(%*s)
  19. 如何用Python获取基金行情并分析年度表现优异基金,解锁赚钱秘密?
  20. excel操作word,替换文本

热门文章

  1. 在linux关闭的命令,Linux系统关闭或重新启动主机的命令详解
  2. Unity项目中不显示手柄
  3. 工业机器人专项检测技术——电磁兼容检测
  4. UVA - 11825 ——Hackers' Crackdown (状压DP)
  5. 7-3 马会飞 (15分)
  6. 欠驱动Or全驱动?——关于机械手驱动方式的介绍与更优选择(以德国Schunk Hand为例)
  7. python自动化办公、Excel数据处理方法总结
  8. OSG显示状态时字体模糊问题
  9. 远程控制手机时,解决隐私屏黑屏的问题
  10. 北京师范大学珠海分校课件资源库