百度翻译

  • baidufanyi.py
import requests
import re
import json
import execjsURL = 'https://fanyi.baidu.com/?aldtype=16047#zh/en/'
TRANSLATE_API = 'https://fanyi.baidu.com/v2transapi'
REALTRANSLATE_API = 'https://fanyi.baidu.com/transapi'HEADERS = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 SE 2.X MetaSr 1.0','Accept': '*/*','Accept-Language': 'zh-CN,zh;q=0.9','Connection': 'keep-alive','Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8','Host': 'fanyi.baidu.com','Origin': 'https://fanyi.baidu.com','Referer': 'https://fanyi.baidu.com/','X-Requested-With': 'XMLHttpRequest',
}HEADERS2 = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 SE 2.X MetaSr 1.0', 'Accept': '*/*', 'Accept-Language': 'zh-CN,zh;q=0.9', 'Connection': 'keep-alive', 'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8', 'Host': 'fanyi.baidu.com', 'Origin': 'https://fanyi.baidu.com', 'Referer': 'https://fanyi.baidu.com/', 'X-Requested-With': 'XMLHttpRequest', 'Cookie': 'BAIDUID=BEA2658FC962DF6CA0C053E5690C1934:FG=1; locale=zh; REALTIME_TRANS_SWITCH=1; FANYI_WORD_SWITCH=1; HISTORY_SWITCH=1; SOUND_SPD_SWITCH=1; SOUND_PREFER_SWITCH=1; Hm_lvt_64ecd82404c51e03dc91cb9e8c025574=1540531940; Hm_lpvt_64ecd82404c51e03dc91cb9e8c025574=1540531984; from_lang_often=%5B%7B%22value%22%3A%22en%22%2C%22text%22%3A%22%u82F1%u8BED%22%7D%2C%7B%22value%22%3A%22zh%22%2C%22text%22%3A%22%u4E2D%u6587%22%7D%5D; to_lang_often=%5B%7B%22value%22%3A%22zh%22%2C%22text%22%3A%22%u4E2D%u6587%22%7D%2C%7B%22value%22%3A%22en%22%2C%22text%22%3A%22%u82F1%u8BED%22%7D%5D'}Cookie = 'REALTIME_TRANS_SWITCH=1; FANYI_WORD_SWITCH=1; HISTORY_SWITCH=1; SOUND_SPD_SWITCH=1; SOUND_PREFER_SWITCH=1; Hm_lvt_64ecd82404c51e03dc91cb9e8c025574=1540531940; Hm_lpvt_64ecd82404c51e03dc91cb9e8c025574=1540531984; from_lang_often=%5B%7B%22value%22%3A%22en%22%2C%22text%22%3A%22%u82F1%u8BED%22%7D%2C%7B%22value%22%3A%22zh%22%2C%22text%22%3A%22%u4E2D%u6587%22%7D%5D; to_lang_often=%5B%7B%22value%22%3A%22zh%22%2C%22text%22%3A%22%u4E2D%u6587%22%7D%2C%7B%22value%22%3A%22en%22%2C%22text%22%3A%22%u82F1%u8BED%22%7D%5D'class fanYi:def __init__(self):self._session = requests.session()self._data = {'from': 'en','to': 'zh','query': '','transtype': 'realtime','simple_means_flag': '3','sign': '','token': ''}def _set_words(self, words):self._words = wordsdef _get_token(self):response = self._session.get(URL, headers=HEADERS2)html = response.textli = re.search(r"<script>\s*window\[\'common\'\] = ([\s\S]*?)</script>", html)token = re.search(r"token: \'([a-zA-Z0-9]+)\',", li.group(1))self._data['token'] = token.group(1)def _get_sign(self):with open('baidufanyi.js') as f:js = f.read()sign = execjs.compile(js).call('e', self._words)self._data['sign'] = signdef _translate(self):self._get_token()self._get_sign()self._data['query'] = self._words# string = ''# cookie = self._session.cookies.get_dict()# for key in cookie:#     string += key + '=' + cookie[key] + '; '# # response = self._session.post(REALTRANSLATE_API, data=self._data, headers=HEADERS)# HEADERS['Cookie'] = string + Cookie# print(HEADERS)response = self._session.post(TRANSLATE_API, data=self._data, headers=HEADERS2)Dict = json.loads(response.content.decode('utf-8'))print(Dict['trans_result']['data'][0]['dst'])if __name__ == "__main__":fanyi = fanYi()while True:fanyi._set_words(input())fanyi._translate()
  • baidufanyi.js
var i = "320305.131321201"function a(r){if(Array.isArray(r)){for(var o=0,t=Array(r.length);o<r.length;o++)t[o]=r[o];
return t}return Array.from(r)}function n(r,o){for(var t=0;t<o.length-2;t+=3){var a=o.charAt(t+2);a=a>="a"?a.charCodeAt(0)-87:Number(a),a="+"===o.charAt(t+1)?r>>>a:r<<a,r="+"===o.charAt(t)?r+a&4294967295:r^a
}return r}function e(r) {var o = r.match(/[\uD800-\uDBFF][\uDC00-\uDFFF]/g);if (null === o) {var t = r.length;t > 30 && (r = "" + r.substr(0, 10) + r.substr(Math.floor(t / 2) - 5, 10) + r.substr( - 10, 10))} else {for (var e = r.split(/[\uD800-\uDBFF][\uDC00-\uDFFF]/), C = 0, h = e.length, f = []; h > C; C++)"" !== e[C] && f.push.apply(f, a(e[C].split(""))),C !== h - 1 && f.push(o[C]);var g = f.length;g > 30 && (r = f.slice(0, 10).join("") + f.slice(Math.floor(g / 2) - 5, Math.floor(g / 2) + 5).join("") + f.slice( - 10).join(""))}var u = void 0,l = "" + String.fromCharCode(103) + String.fromCharCode(116) + String.fromCharCode(107);u = null !== i ? i: (i = window[l] || "") || "";for (var d = u.split("."), m = Number(d[0]) || 0, s = Number(d[1]) || 0, S = [], c = 0, v = 0; v < r.length; v++) {var A = r.charCodeAt(v);128 > A ? S[c++] = A: (2048 > A ? S[c++] = A >> 6 | 192 : (55296 === (64512 & A) && v + 1 < r.length && 56320 === (64512 & r.charCodeAt(v + 1)) ? (A = 65536 + ((1023 & A) << 10) + (1023 & r.charCodeAt(++v)), S[c++] = A >> 18 | 240, S[c++] = A >> 12 & 63 | 128) : S[c++] = A >> 12 | 224, S[c++] = A >> 6 & 63 | 128), S[c++] = 63 & A | 128)}for (var p = m,F = "" + String.fromCharCode(43) + String.fromCharCode(45) + String.fromCharCode(97) + ("" + String.fromCharCode(94) + String.fromCharCode(43) + String.fromCharCode(54)), D = "" + String.fromCharCode(43) + String.fromCharCode(45) + String.fromCharCode(51) + ("" + String.fromCharCode(94) + String.fromCharCode(43) + String.fromCharCode(98)) + ("" + String.fromCharCode(43) + String.fromCharCode(45) + String.fromCharCode(102)), b = 0; b < S.length; b++) p += S[b],p = n(p, F);return p = n(p, D),p ^= s,0 > p && (p = (2147483647 & p) + 2147483648),p %= 1e6,p.toString() + "." + (p ^ m)}

有道翻译

单纯翻译:

__author__ = 'hugowen'
# -*- coding:utf-8 -*-
'''
[33]python Web 框架:Tornado
https://blog.csdn.net/xc_zhou/article/details/80637714
https://pypi.org/project/tornado/
'''from bs4 import BeautifulSoup
import tornado.httpclientdef is_chinese(uchar):if uchar >= u'\u4e00' and uchar <= u'\u9fa5':return Trueelse:return Falseif __name__ == "__main__":cli = tornado.httpclient.HTTPClient()link = 'http://dict.youdao.com/search?q='search = input('search: ')link += search# print(link)data = cli.fetch(link)body = data.bodysoup = BeautifulSoup(body)group = soup.find_all(class_ = 'trans-container')if is_chinese(search):content = group[0].find('ul').find('p')print(content.find_all('span')[0].get_text())for ele in content.find_all(class_ = 'contentTitle'):print(ele.find('a').get_text())else:content = group[0].find('ul').find_all('li')for ele in content:print(ele.get_text())

翻译并记录翻译日志: 输出到 words.md 文件中

__author__ = 'hugowen'
# -*- coding:utf-8 -*-
'''
[33]python Web 框架:Tornado
https://blog.csdn.net/xc_zhou/article/details/80637714
https://pypi.org/project/tornado/
'''from bs4 import BeautifulSoup
import tornado.httpclientdef is_chinese(uchar):if uchar >= u'\u4e00' and uchar <= u'\u9fa5':return Trueelse:return Falsedef translate(search):cli = tornado.httpclient.HTTPClient()link = 'http://dict.youdao.com/search?q='link += searchdata = cli.fetch(link)body = data.bodysoup = BeautifulSoup(body, "html.parser")group = soup.find_all(class_ = 'trans-container')result = []if is_chinese(search):content = group[0].find('ul').find('p')print(content.find_all('span')[0].get_text())for ele in content.find_all(class_ = 'contentTitle'):result.append(ele.find('a').get_text())else:content = group[0].find('ul').find_all('li')for ele in content:result.append(ele.get_text())return resultif __name__ == "__main__":while True:search = input('search: ')if search in ['Q', 'q']:breakresult = translate(search)with open('words.md', 'a') as f:f.write('###  ▌' + search + '\n')for r in result:f.write('- [ ] ' + r +'\n')print('▌> ' + r)

python 百度翻译 有道翻译相关推荐

  1. Python 打造基于有道翻译的命令行翻译工具(命令行爱好者必备)

    Python 打造基于有道翻译的命令行翻译工具(命令行爱好者必备) 之前基于百度写了一篇博文 Python 打造基于百度翻译的命令行翻译工具,本来这工具用得挺好的.但是没想到,近期处于不知道啥原因,不 ...

  2. python实战爬虫有道翻译与解决{“errorCode”:50}反爬虫问题

    title: python实战爬虫有道翻译与解决有道翻译反爬虫问题 date: 2020-03-22 20:21:12 description: 最近在学Python,一直没有尝试过实战.于是昨天试了 ...

  3. python有道翻译接口-Python通过调用有道翻译api实现翻译功能示例

    本文实例讲述了Python通过调用有道翻译api实现翻译功能.分享给大家供大家参考,具体如下: 通过调用有道翻译的api,实现中译英.其他语言译中文 Python代码: # coding=utf-8 ...

  4. python爬虫模拟有道翻译

    python爬虫模拟有道翻译 案例目的: 通过模拟有道翻译,介绍携带form表单发送post请求以及破解form表单中的加密数据. 案例实现功能: 模拟有道翻译,实现中英互译. 爬取过程分析: 找到目 ...

  5. 基于python爬取有道翻译,并在线翻译

    基于python爬取有道翻译,并在线翻译 由于我也是爬虫新学者,有什么做的不对的请多加包涵 我们需要使用的库如下 from urllib import request import urllib im ...

  6. Python爬虫破解有道翻译

    有道翻译是以异步方式实现数据加载的,要实现对此类网站的数据抓取,其过程相对繁琐,本节我以有道翻译为例进行详细讲解. 通过控制台抓包,我们得知了 POST 请求的参数以及相应的参数值,如下所示: 图1: ...

  7. Python 编写一个有道翻译的 workflow 教程

    最近使用有道翻译的 workflow 总是翻译不了,可能是 appKey 失效了或者超过调用上限,所以打算自己实现一个. 创建 workflow 打开 Alfred3 的 Preferences,选择 ...

  8. 教你用Python爬虫自制有道翻译词典

    Python爬虫能够实现的功能有很多,就看你如何去使用它了.今天小千就来教大家如何去利用Python爬虫自制一个有道翻译词典. 首先打开有道翻译页面,尝试输入hello,就出现翻译了,可以自动翻译.有 ...

  9. python如何爬有道翻译_Python网络爬虫(八) - 利用有道词典实现一个简单翻译程序...

    目录: 1.爬虫前的分析 因为要实现有道翻译的翻译功能,就需要找到它的接口,打开审查元素,来到网络监听窗口(Network),查看API接口. 我们可以找到有道翻译的API接口,同时是以Post方式提 ...

最新文章

  1. centos yum mysql-devel 5.5_CentOS 6.5下yum安装 MySQL-5.5全过程图文教程
  2. 他们提出了一个大胆的猜想:GWT(深度学习)→通用人工智能
  3. 地表车神争霸赛,且看第16届大学生智能汽车竞赛
  4. b区计算机考研招不满的大学,b区考研招不满的大学,适合调剂的学校有哪些
  5. CTF-RSA共模攻击 和 非共模攻击解密脚本
  6. php mvc cms企业站,Phpcms V9程序目录结构及MVC简析
  7. 扩展WCF的消息分发行为
  8. SAP Commerce Cloud Storefront 框架选型:Accelerator 还是 Spartacus?
  9. Typescript学习笔记(五) 模块机制
  10. 利用微查询和数据锐化进行大数据探索
  11. Study From DevOps 学习交流会议
  12. C++学生信息管理系统1.0
  13. 写一手好字:硬笔书法轻松自学指南(知乎周刊 Plus)-读书笔记
  14. 通过Git Hook关联Tapd和Commit
  15. 茶馆预定APP开发功能需求
  16. 可可英语奇文老师 中高级词汇记忆方法(免费下载)
  17. Arduino与Proteus仿真实例-MPX4250压力传感器驱动仿真
  18. 免费实时汇率查询Api接口
  19. 关于python异常处理、以下说法正确的是_以下关于Python高级特性说法正确的是(_____)。...
  20. 疲惫的 618,很难再激起年轻人的多巴胺?

热门文章

  1. 构建您的第一个Web爬网程序,第2部分
  2. Ubuntu16.04LTS ibus pinyin 无法输入中文的处理办法
  3. fullcalendar.js添加农历、节日展示
  4. 三国谋士智商前20名
  5. Mac如何设置多个桌面
  6. stylus样式编译问题./node_modules/css-loader/dist/cjs.js??ref--6-oneOf-6-1!
  7. 大乌龙:系统出错,被接受的加拿大移民申请超出限额7307份
  8. 如何将替换一款蓝牙/wifi 芯片到android
  9. 前端vue利用flexpaper实现在线预览
  10. 普通网民追求怎样的用户体验?