网页版迅雷离线下载过程分析
#!/usr/bin/env python3 #-*- encoding: utf-8 -*-import os import re import sys import time import json import random import base64 import pickle import hashlib import threading import urllib.parse import urllib.request import rsaclass NotImplemented(Exception):passclass MultiPart(object):# HTTP multipart/form-data wrapperdef __init__(self, file):if isinstance(file, str):self.pathname = filewith open(file, "rb") as f:self.content = f.read()self.content = self.content.decode('latin-1')elif hasattr(file, 'read'):self.pathname = 'file'self.content = file.read()else:raise TypeError("str or objects that have ``read`` attribute needed, but got a {}.".format(type(file)))def _gen_boundary(self):if not self.content: return ### FIXME should we raise an exception?nrand = int(random.random() * 50 + 50)brand = os.urandom(nrand)boundary = hashlib.md5(brand).hexdigest()return boundarydef post(self, url, extra={}):boundary = self._gen_boundary()boundary = '--' + boundaryheaders = {'Content-Type': 'multipart/form-data; boundary={}'.format(boundary)}if 'Cookie' in extra:headers['Cookie'] = extra.pop('Cookie')multiparts = []# first extra datafor k,v in extra.items():multiparts.append('--' + boundary)multiparts.append('Content-Disposition: form-data; name="{}"'.format(k))multiparts.append('')multiparts.append(v)# last pathnamemultiparts.append('--' + boundary)multiparts.append('Content-Disposition: form-data; name="filepath"; filename="{}"'.format(self.pathname))multiparts.append('Content-Type: application/octet-stream')multiparts.append('')multiparts.append(self.content)multiparts.append('--' + boundary + '--') # endmultiparts.append('') data = '
'.join(multiparts).encode('latin-1')req = urllib.request.Request(url, data=data, headers=headers)resp = urllib.request.urlopen(req)return respclass SimpleCookie(object):### FIXME: we should subclass http.cookiejar.Cookie insteaddef __init__(self, name='', value='', path='', domain=''):self.name = nameself.value = valueself.path = pathself.domain = domainself.extra = []@staticmethoddef fromstring(cookie_str):cookie = SimpleCookie()cookie._parse(cookie_str)return cookiedef _parse(self, cookie_str):for what in cookie_str.split(';'):what = what.strip()try:what.index('=')except ValueError:self.extra.append(what)continuek,v = what.strip().split('=', 1)if k.lower() == 'path': self.path = velif k.lower() == 'domain': self.domain = velse:self.name = kself.value = vdef __str__(self):return '{}={};'.format(self.name, self.value)class XLTask(object):### TODO: fulfill all task attributes as list in the documentationdef __init__(self, tid, name, dl_url, fsize, loadnum):self.tid = tidself.name = nameself.dlurl = dl_urlself.fsize = fsizeself.loadn = loadnumclass Thunder(object):LIXIAN_HOME = 'http://lixian.xunlei.com'INTERFACE_URL = 'http://dynamic.cloud.vip.xunlei.com/interface/'# All these actions below are from INTERFACE_URLTASK_PROCESS = INTERFACE_URL + 'task_process'SHOW_UNFRESH = INTERFACE_URL + 'showtask_unfresh'TASK_CHECK = INTERFACE_URL + 'task_check'TASK_COMMIT = INTERFACE_URL + 'task_commit'TASK_DELETE = INTERFACE_URL + 'task_delete'URL_QUERY = INTERFACE_URL + 'url_query' # for btTORRENT_UPLOAD = INTERFACE_URL + 'torrent_upload'BT_TASK_COMMIT = INTERFACE_URL + 'bt_task_commit'def __init__(self, uname, password):self.uname = unameself.password = passwordself.unfinished = []self.eyeon = []def _getcookie(self):# get cookie from check user actionbusiness_type = 108url = 'https://login.xunlei.com/check/?u={}&business_type={}&cachetime={:.0f}&'resp = urllib.request.urlopen(url.format(urllib.parse.quote(self.uname), business_type, time.time() *1000))cookies = resp.headers.get_all('Set-Cookie')cookies_dict = {} # FIXME: should we use OrderedDict?for cookie in cookies:cook_obj = SimpleCookie.fromstring(urllib.parse.unquote(cookie))cookies_dict[cook_obj.name] = cook_objself.cookies = cookies_dictself._save_cookies()return self.cookiesdef _build_cookie_str(self):cookies_all = []for _,cook in self.cookies.items():cookies_all.append('{}={};'.format(urllib.parse.quote_plus(cook.name), urllib.parse.quote_plus(cook.value)))return ' '.join(cookies_all)def _refresh_captcha(self, verify_type, cookie=None):captcha_url = "http://verify1.xunlei.com/image?t={}&cachetime={:.0f}".format(verify_type, time.time()*1000)headers = {}if cookie:headers['Cookie'] = cookiereq = urllib.request.Request(captcha_url, headers=headers)resp = urllib.request.urlopen(req)self._update_cookies(resp.headers.get_all('Set-Cookie'))with open("captcha.jpg", 'wb') as f:f.write(resp.read())def login(self):# login to get cookie self._getcookie()# login method, request, blablawhile True:lr, result = self._login_internal()if result != 'Captcha error':breakreturn lr, resultdef _get_verifycode(self): # check result: R : V# where R indicates if we would get a captcha and# V tells the verify codecr = self.cookies['check_result'].value try:if_captcha, verify_code = cr.split(':')except ValueError: # need captchaif_captcha = crverify_code = ''if_captcha = int(if_captcha)# verify typevt = self.cookies.get('verify_type')vt = vt.value if vt else 'SEA'if if_captcha:# get a captcha from self._refresh_captcha(vt)while True:ucode = input("Please open captcha.jpg and input what you see. Or
PRESS ENTER ONLY to refresh captcha image:
")ucode = ucode.strip()if len(ucode) == 0:# refresh captcha self._refresh_captcha(vt)continue# user has input a codebreakverify_code = ucodereturn verify_codedef _login_internal(self):# self._d_print("User:", self.uname)# get verify codeverify_code = self._get_verifycode()# prepare cookie string for further requestcookies_all_str = self._build_cookie_str()headers = {'Referer': 'http://i.xunlei.com/login/2.5/?r_d=1','Cookie': cookies_all_str}# User can login nowkn = self.cookies['check_n'].valueke = self.cookies['check_e'].valueN = int.from_bytes(base64.b64decode(kn.encode()), byteorder='big')E = int.from_bytes(base64.b64decode(ke.encode()), byteorder='big')pubkey = rsa.key.PublicKey(N, E)message = hashlib.md5(self.password.encode()).hexdigest() + verify_code.upper()rsapwd = rsa.encrypt(message.encode(), pubkey, padding='oaep')newpwd = base64.b64encode(rsapwd)newpwd = newpwd.decode()# Thunder randomly selects a login server from these:# "https://login"# "https://login2"# "https://login3"login_url = 'https://login.{}/sec2login'.format(self.cookies['check_n'].domain)login_params = {'p': newpwd,'u': self.uname,'n': kn,'e': ke,'verifycode': verify_code,'login_enable': 0, ### TODO: what dose this parameter mean?'business_type': 108, ### TODO: what dose this parameter mean?'v': 100,'cachetime': int(time.time() * 1000)}data = urllib.parse.urlencode(login_params).encode()#self._d_print("Data", data.decode())req = urllib.request.Request(login_url, data, headers)resp = urllib.request.urlopen(req)self._update_cookies(resp.headers.get_all('Set-Cookie'))self._d_write_response(resp)lr,reason = self._check_login_response()self._d_print("Login Result:", lr, reason)return lr,reasondef _update_cookies(self, new_cookies, log=False):for cookie in new_cookies:if log: self._d_print("Add cookie:", cookie)cook_obj = SimpleCookie.fromstring(cookie)self.cookies[cook_obj.name] = cook_objself._save_cookies()return self.cookiesdef _save_cookies(self):with open('cookies.cache', 'wb') as f:pickle.dump(self.cookies, f)def _check_login_response(self):# case -1:code=1;msg="连接超时,请重试";break;# case 0:msg="登录成功";break;# case 1:case 9:case 10:case 11:code=2;msg="验证码错误,请重新输入验证码";break;# case 2:case 4:code=3;msg="帐号或密码错误,请重新输入";break;# case 3:case 7:case 8:case 16:code=4;msg="服务器内部错误,请重试";break;# case 12:case 13:case 14:case 15:code=5;msg="登录页面失效";break;# case 6:msg="帐号被锁定,请换帐号登录";break;# default:code=-1;msg="内部错误,请重试";breakresult_str = {-1: 'Connection Timeout',0: 'Succeed',1: 'Captcha error',2: 'Account or Password error',3: 'Server Internal error',4: 'Account or Password error',6: 'Account is locked',7: 'Server Internal error',8: 'Server Internal error',9: 'Captcha error',10: 'Captcha error',11: 'Captcha error',12: 'Login page failure',13: 'Login page failure',14: 'Login page failure',15: 'Login page failure',16: 'Server Internal error',}lr = self.cookies.get('blogresult')lr = int(lr.value) if lr else -1if lr not in result_str:return lr, 'Internal Error'return lr, result_str[lr]def _d_write_response(self, resp):pr = urllib.parse.urlparse(resp.url)name = '{}-{}'.format(pr.path.strip('/'), pr.query.replace('&', '_'))with open(name, 'wb') as f:f.write(resp.read())def _d_print(self, *args):print(*args)def gettasks(self, page=1):# get user tasks# this function simply aquire# http://dynamic.cloud.vip.xunlei.com/user_task?userid={}&st=4# where userid is responsd from login cookieif page < 1: page = 1try:userid = self.cookies['userid'].valueexcept KeyError:raise ValueError("Cookies does not contain 'userid'")cookies_all_str = self._build_cookie_str()task_url = 'http://dynamic.cloud.vip.xunlei.com/user_task?userid={}&st=4&p={}&stype='.format(userid, page)headers = {'Referer': 'http://lixian.xunlei.com/login.html','Cookie': cookies_all_str}req = urllib.request.Request(task_url, headers=headers)resp = urllib.request.urlopen(req)# update cookiesself._update_cookies(resp.headers.get_all('Set-Cookie'))# The page is utf-8 encoded, so just gocontent = resp.read().decode()with open("tasks.resp.html", "w", encoding="utf-8") as f:f.write(content)self.tasklist = self._parse_tasks(content)self._d_print_tasks()def _parse_tasks(self, content, tasklist=None):# for me, only dl_url, taskname, ysfilesize and loadnum of tasks are caredpat_attr = '<input id="(dl_url|taskname|ysfilesize)(\d+)\".*?value=\"(.*?)\".*?>'pat_load = '<em class="loadnum".*?>(\d+)%</em>'L1 = re.findall(pat_attr, content)L2 = re.findall(pat_load, content) # len(L2) == len(L1)//3 + 1# self._d_print("len(tasks):", len(L1))# self._d_print("len(loadnum):", len(L2))# self._d_print("Loadnum:", L2)# dl_url, name, fsizeassert len(L1) % 3 == 0# assert len(L2) == len(L1)//3 + 1if not tasklist:tasklist = list()# XLTaskfor i in range(0, len(L1), 3):# their id must be unique#assert L1[i][1] == L1[i+1][1] == L1[i+2][1]tid = L1[i][1]# usually, # L[i] is dl_url# L[i+1] is tname# L[i+1] is fsize# but just usually. I cannot guarantee the order. So let us just use this ugly style.if L1[i][0] == "dl_url": dl_url = L1[i][2]elif L1[i][0] == "taskname": tname = L1[i][2]elif L1[i][0] == "ysfilesize": fsize = L1[i][2]if L1[i+1][0] == "dl_url": dl_url = L1[i+1][2]elif L1[i+1][0] == "taskname": tname = L1[i+1][2]elif L1[i+1][0] == "ysfilesize": fsize = L1[i+1][2]if L1[i+2][0] == "dl_url": dl_url = L1[i+2][2]elif L1[i+2][0] == "taskname": tname = L1[i+2][2]elif L1[i+2][0] == "ysfilesize": fsize = L1[i+2][2]try: # and here tooloadnum = L2[i//3]except IndexError:loadnum = '--'#self._d_print(ntype, tname, fsize) tasklist.append(XLTask(tid, tname, dl_url, fsize, loadnum))return tasklistdef _d_print_tasks(self, count=-1):if count < 0: count = len(self.tasklist)if count == 0: returnprint("loadnum size(M) name")for i in range(count):print("{} {} {}".format(self.tasklist[i].loadn, int(self.tasklist[i].fsize)//(2**20), self.tasklist[i].name))def addurl(self, url):self._d_print("add url: {}".format(url))args = self._check_task(url)while True:taskid = self._commit_task(*args)if taskid == -11 or taskid == -12: # need verify codecookie_str = self._build_cookie_str()while True:self._refresh_captcha('MVA', cookie_str)ucode = input("Please open captcha.jpg and input what you see. Or
PRESS ENTER ONLY to refresh captcha image:
")ucode = ucode.strip()if len(ucode) != 0: breakelse: breakreturn taskiddef process(self, tasks, nm_tasks=None, bt_tasks=None):# get processes of specified tasks# this function simply aquire# http://dynamic.cloud.vip.xunlei.com/interface/task_process# with query string: # callback=jsonp1438754682737&t=Wed%20Aug%2005%202015%2014:05:43%20GMT+0800%20(%E4%B8%AD%E5%9B%BD%E6%A0%87%E5%87%86%E6%97%B6%E9%97%B4)try:userid = self.cookies['userid'].valueexcept KeyError:raise ValueError("Cookies does not contain 'userid'")process_url = self.TASK_PROCESScookies_all_str = self._build_cookie_str()headers = {'Referer': 'http://dynamic.cloud.vip.xunlei.com/user_task?userid={}&st=4'.format(userid),'Cookie': cookies_all_str}_params = {'uid': userid,'callback': 'jsonp{:.0f}'.format(time.time()),'t': time.strftime('%a %b %d %Y %H:%M:%S GMT%z (%Z)'),'list': ','.join(tasks), 'nm_list': ','.join(nm_tasks) if nm_tasks else '','bt_list': ','.join(bt_tasks) if bt_tasks else '','interfrom': 'task'}data = urllib.parse.urlencode(_params).encode()req = urllib.request.Request(process_url, data=data, headers=headers)resp = urllib.request.urlopen(req)# ascii characters con = resp.read().decode()# error?with open("process.resp", "w", encoding="utf-8") as f:f.write(con)lcolon = con.find('(') + 1 # `)`# `(`rcolon = con.rfind(')')json_str = con[lcolon:rcolon]# self._d_print(json_str)# to processresults = []try:tasks_resp = json.loads(json_str)except ValueError:return resultsfor task in tasks_resp["Process"]["Record"]:should_print = Falsetid = task["tid"]if tid in tasks: should_print = Trueif nm_tasks and tid in nm_tasks: should_print = Trueif bt_tasks and tid in bt_tasks: should_print = Trueif not should_print: continueresults.append(task)return resultsdef task_unfresh(self, page=1):# get user unfinished tasks# this function simply aquire# http://dynamic.cloud.vip.xunlei.com/interface/showtask_unfresh# with query string: # callback=jsonp1438756475786&t=Wed%20Aug%2005%202015%2014:36:40%20GMT+0800%20(%E4%B8%AD%E5%9B%BD%E6%A0%87%E5%87%86%E6%97%B6%E9%97%B4)&type_id=4&page=1&tasknum=30&p=1&interfrom=task# where userid is responsd from login cookieif page < 1: page = 1try:userid = self.cookies['userid'].valueexcept KeyError:raise ValueError("Cookies does not contain 'userid'")task_url = self.SHOW_UNFRESH + "?"cookies_all_str = self._build_cookie_str()headers = {'Referer': 'http://dynamic.cloud.vip.xunlei.com/user_task?userid={}&st=4'.format(userid),'Cookie': cookies_all_str}_params = {'callback': 'jsonp{:.0f}'.format(time.time()),'t': time.strftime('%a %b %d %Y %H:%M:%S GMT+0800 (%Z)'),'type_id': 4, ### TODO: What does it mean?'page': page,'p': page,'tasknum': 30,'interfrom': 'task'}query = urllib.parse.urlencode(_params)req = urllib.request.Request(task_url+query, headers=headers)resp = urllib.request.urlopen(req)# ascii characters con = resp.read().decode()# cache itwith open("unfresh.resp", "w", encoding="utf-8") as f:f.write(con)lcolon = con.find('(') + 1 # `)`# `(`rcolon = con.rfind(')')json_str = con[lcolon:rcolon]# self._d_print(json_str)# to process unfinished_json = json_strunfinished = json.loads(unfinished_json)if unfinished["rtcode"] == -11 or unfinished["rtcode"] == -1:print("System busy. Please retry later.")return None, None, Noneelif unfinished["rtcode"] != 0:print("Unknown error. Please retry later.")return None, None, Noneta_ids = unfinished["global_new"]["download_task_ids"].split(',')nm_ids = unfinished["global_new"]["download_nm_task_ids"].split(',')bt_ids = unfinished["global_new"]["download_bt_task_ids"].split(',')self.unfinished.extend(unfinished)return ta_ids, nm_ids, bt_idsdef showtasks(self):while True:ta_ids, nm_ids, bt_ids = self.task_unfresh()if ta_ids is None: time.sleep(10) # sleep 10 seconds and try againcontinue# tasks are grabbedbreaktasks = self.process(ta_ids, nm_ids, bt_ids)self._d_print_process(tasks)def _d_print_process(self, tasks):msgfmt = "{0[fsize]:<8s}{0[fpercent]:>6.2f}% {0[leave_time]:<11s}{1}"self._d_print("{:<8s}{:<8s}{:<11s}{}".format("size", "percent", "leave-time", "tid"))for t in tasks:name = t['taskname'] if 'taskname' in t else t['tid']self._d_print(msgfmt.format(t, name))def _gen_rand(self):_randf = '{:.0f}{}'.format(time.time() * 1000, random.random()*(2000000-10)+10)return _randfdef _check_task(self, url):# check a url to add a taskcheck_url = self.TASK_CHECK + '?'# useridtry:userid = self.cookies['userid'].valueexcept KeyError:raise ValueError("Cookies does not contain 'userid'")cookies_all_str = self._build_cookie_str()headers = {'Referer': 'http://dynamic.cloud.vip.xunlei.com/user_task?userid={}&st=4'.format(userid),'Cookie': cookies_all_str}# random_randf = self._gen_rand()_params = {'callback': 'queryCid','url': url,'interfrom': 'task', 'random': _randf, 'tcache': int(time.time()*1000)}query = urllib.parse.urlencode(_params)req = urllib.request.Request(check_url+query, headers=headers)resp = urllib.request.urlopen(req)# ascii characters con = resp.read().decode()# cache itwith open("check.resp", "w", encoding="utf-8") as f:f.write(con)lcolon = con.find('(') + 1 # `)`# `(`rcolon = con.rfind(')')parameters_str = con[lcolon:rcolon]# function queryCid(cid,gcid,file_size,avail_space,tname,goldbean_need,silverbean_need,is_full,random,type,rtcode)l = parameters_str.split(',')if len(l) < 11:return Nonetry:cid = eval(l[0])gcid = eval(l[1])fsize = eval(l[2])fname = eval(l[4])goldbean = eval(l[5])silverbean = eval(l[6])ttype = eval(l[9])except IndexError:return None# task type# magnet: : 4# thunder:// : 3# ed2k:// : 2# .torrent : 1# all other : 0### FIXME shall we check random? Xunlei's queryCid does.# check if it is bit-torrentif fname.endswith('.torrent'):# goto # INTERFACE_URL+"/url_query?callback=queryUrl&u="+encodeURIComponent(u)+"&random="+$('#query_random').val()+"&interfrom="+G_PAGE;# to check bt taskreturn self.checkbturltask(url)### TODO we SHOULD check if golden or silver bean is neededreturn [cid, gcid, fsize, fname, goldbean, silverbean, ttype]def checkbturltask(self, url):# check a url to add a taskcheck_url = self.URL_QUERY + '?'# useridtry:userid = self.cookies['userid'].valueexcept KeyError:raise ValueError("Cookies does not contain 'userid'")cookies_all_str = self._build_cookie_str()headers = {'Referer': 'http://dynamic.cloud.vip.xunlei.com/user_task?userid={}&st=4'.format(userid),'Cookie': cookies_all_str}# random_randf = self._gen_rand()_params = {'callback': 'queryUrl','u': url,'interfrom': 'task', 'random': _randf, 'tcache': int(time.time()*1000)}query = urllib.parse.urlencode(_params)req = urllib.request.Request(check_url+query, headers=headers)resp = urllib.request.urlopen(req)# ascii characters con = resp.read().decode()# cache itwith open("check.resp", "w", encoding="utf-8") as f:f.write(con)lcolon = con.find('(') + 1 # `)`# `(`rcolon = con.rfind(')')parameters_str = con[lcolon:rcolon]# queryUrl(flag,infohash,fsize,bt_title,is_full,subtitle,subformatsize,size_list,valid_list,file_icon,findex,is_blocked,random,rtcode)l = parameters_str.split(',')if len(l) < 13:return Nonetry:flag = l[0].strip()infohash = l[1].strip()fsize = l[2].strip()bt_title = l[3].strip()subtitle = l[5].strip()subformatsize = l[6].strip()size_list = l[7].strip()rtcode = l[-1].strip()except IndexError:return Noneif not bt_title:return Noneflag = int(flag)if flag == 0:self._d_print("Get bit-torrent failed. Please check if the url is correct.")return Noneelif flag == -1:self._d_print("You have uploaded this torrent before.")### TODO we should let the user select files OR just begin download# goto # INTERFACE_URL+"/fill_bt_list?callback=edit_bt_list&tid="+fsize+"&infoid="+infohash+"&uid="+G_USERID+"&ed=1&random="+random+"&interfrom="+G_PAGE;# to get bt list?return None### TODO find a url that directs to a BT file and see what happenedraise NotImplemented('BT download through a url is not implemented yet.
''If you see this message, please send the url to roy.zhang@browan.com')def _commit_task(self, *args):# commit a task commit_url = self.TASK_COMMIT + '?'# if len(args) < 7:# raise ValueError("committask need 7 parameters, but only {} passed".format(len(args)))#self._d_print("commit task args:", *args)try:cid, gcid, fsize, fname, goldbean, silverbean, ttype = argsexcept TypeError:return -1if cid is None: return -1# useridtry:userid = self.cookies['userid'].valueexcept KeyError:raise ValueError("Cookies does not contain 'userid'")# cookiecookies_all_str = self._build_cookie_str()headers = {'Referer': 'http://dynamic.cloud.vip.xunlei.com/user_task?userid={}&st=4'.format(userid),'Cookie': cookies_all_str}_params = {'callback': 'ret_task','uid': userid,'cid': cid,'gcid': gcid,'size': fsize,'goldbean': goldbean,'silverbean': silverbean,'t': fname, # file name'url': url, # source url'type': ttype, # task type'o_page': 'history','o_taskid': 0, ### FIXME'class_id': 0, # classify/group'database': 'undefined','interfrom': 'task','verify_code': '','time': time.strftime('%a %b %d %Y %H:%M:%S GMT+0800 (%Z)'),'noCacheIE': int(time.time()*1000)}query = urllib.parse.urlencode(_params)req = urllib.request.Request(commit_url+query, headers=headers)resp = urllib.request.urlopen(req)# ascii characters con = resp.read().decode()# cache itwith open("commit.resp", "w", encoding="utf-8") as f:f.write(con)lcolon = con.find('(') + 1 # `)`# `(`rcolon = con.rfind(')')parameters_str = con[lcolon:rcolon]# function ret_task(ret_num,taskid,time)l = parameters_str.split(',')retnum = int(eval(l[0].strip()))taskid = eval(l[1].strip())# tatime = l[2].strip()self._d_print("retnum:", retnum)if retnum != 1: # failed: -1, 75, 76return retnumreturn taskiddef deletetask(self, tasks):# delete tasksdelete_url = self.TASK_DELETE + '?'# useridtry:userid = self.cookies['userid'].valueexcept KeyError:raise ValueError("Cookies does not contain 'userid'")# cookiecookies_all_str = self._build_cookie_str()headers = {'Referer': 'http://dynamic.cloud.vip.xunlei.com/user_task?userid={}&st=4'.format(userid),'Cookie': cookies_all_str}if isinstance(tasks, str) or not hasattr(tasks, '__iter__'):tasks = [tasks, ]tasks_str = ','.join(tasks) + ','databases = ','.join('0'*len(tasks)) + ','self._d_print("delete task: ", ",".join(tasks))_params1 = { # for url'callback': 'jsonp{:.0f}'.format(time.time()*1000),'type': 0,'t': time.strftime('%a %b %d %Y %H:%M:%S GMT+0800 (%Z)'),}_params2 = { # for data'taskids': tasks_str,'databases': databases,'old_idlist': '','old_databaselist': '','interfrom': 'task',}query = urllib.parse.urlencode(_params1)data = urllib.parse.urlencode(_params2)data = data.encode()req = urllib.request.Request(delete_url+query, data=data, headers=headers)resp = urllib.request.urlopen(req)# ascii characters con = resp.read().decode()# cache itwith open("delete.resp", "w", encoding="utf-8") as f:f.write(con)lcolon = con.find('(') + 1 # `)`# `(`rcolon = con.rfind(')')json_str = con[lcolon:rcolon]try:dr = json.loads(json_str)return dr['result'] == 1except (ValueError, KeyError):return Falsedef addbt(self, btfile):# upload bt try:title, btcid, fsize, files = self._upload_btfile(btfile)except ValueError as e:print("Error:", e)return -1# commit bt taskwhile True:tid = self._commit_bttask(title, btcid, fsize, files)if tid == -11 or tid == -12:# need verify codecookie_str = self._build_cookie_str()while True:self._refresh_captcha('MVA', cookie_str)ucode = input("Please open captcha.jpg and input what you see. Or
PRESS ENTER ONLY to refresh captcha image:
")ucode = ucode.strip()if len(ucode) != 0: breakelse: breakreturn tiddef _upload_btfile(self, btfile):### TODO upload_url = self.TORRENT_UPLOAD# cookiecookies_all_str = self._build_cookie_str()headers = {'Cookie': cookies_all_str, 'random': self._gen_rand(),'interfrom': 'task'}mp = MultiPart(btfile)resp = mp.post(upload_url, headers)con = resp.read()con = con.decode()with open("upload.resp", "w", encoding="utf-8") as f:f.write(con)# find the value of btResulttry:_start = con.find('btResult') _start1 = con.find('{', _start) # `}`# `{`_end = con.rfind('}')json_str = con[_start1:_end+1]except ValueError:raise ValueError('upload response is not expected.')try:br = json.loads(json_str)except ValueError:raise ValueError('btResult is not a valid JS object.')if br["ret_value"] != 1:raise ValueError('file upload return {}.'.format(br["ret_value"]))title = br["ftitle"]btcid = br["infoid"]fsize = br["btsize"]files = br["filelist"]self._d_print("upload result: title: {}, cid: {}, fsize: {}, files:{}".format(title, btcid, fsize, files))return title, btcid, fsize, filesdef _commit_bttask(self, title, btcid, fsize, files):commit_url = self.BT_TASK_COMMIT + '?'# useridtry:userid = self.cookies['userid'].valueexcept KeyError:raise ValueError("Cookies does not contain 'userid'")# cookiecookies_all_str = self._build_cookie_str()headers = {'Referer': 'http://dynamic.cloud.vip.xunlei.com/user_task?userid={}&st=4'.format(userid),'Cookie': cookies_all_str}# for now, we select to download all. and someday in the future### TODO select user interested files automatically like QQ-Downloadfindex = '_'.join(f['id'] for f in files) + "_"ssize = '_'.join(f['subsize'] for f in files) + "_"_params1 = {'callback': 'jsonp{:.0f}'.format(time.time()*1000),'t': time.strftime('%a %b %d %Y %H:%M:%S GMT+0800 (%Z)'),}_params2 = {'uid': userid,'btname': title,'cid': btcid,'goldbean': 0,'silverbean': 0,'tsize': fsize,'findex': findex,'size': ssize,'o_taskid': 0,'o_page': 'task','class_id': 0, # category: all files'interfrom': 'task','verify_code': ''}query = urllib.parse.urlencode(_params1)data = urllib.parse.urlencode(_params2)data = data.encode()# self._d_print("commit bt task query: ", query)# self._d_print("commit bt task query: ", data.decode())req = urllib.request.Request(commit_url+query, data=data, headers=headers)resp = urllib.request.urlopen(req)# ascii characters con = resp.read().decode()# self._d_print("commit bt task response: ", con)# cache itwith open("commit_bttask.resp", "w", encoding="utf-8") as f:f.write(con)lcolon = con.find('(') + 1 # `)`# `(`rcolon = con.rfind(')')json_str = con[lcolon:rcolon]try:dr = json.loads(json_str)except (ValueError, KeyError):return Falseprogress = {1: 'Success',2: 'Fail',-11: 'Need verify code (-11)',-12: 'Need verify code (-12)'}tid = dr.get('id')if tid is None:# dr['progress'] must existreturn dr['progress']# self._d_print('commit bt task result:', progress[dr['progress']])return dr['id']def openeye(self, tasks):if not hasattr(tasks, '__iter__'):tasks = [tasks,]self.eyeon.extend(tasks)### TODO Threads in python will slow down your execution sharply### but do we have another way to take a glance now and then wihout### interrupting other tasks?raise NotImplemented("Eyes are closed currently....")if __name__ == '__main__':# get thunder vip account: http://521xunlei.com/portal.phpurl = 'http://mirrors.163.com/archlinux/iso/2015.08.01/archlinux-2015.08.01-dual.iso'my = Thunder('793040110:1', '1575933')print("************************ login ************************")lr, reason = my.login()if lr != 0: sys.exit(0)print("************************ tasks (page 1) ************************")my.gettasks()print("************************ process ************************")my.showtasks()# print("************************ add url ************************")# tid = my.addurl(url)# if isinstance(tid, int):# print("Add task failed.")# else:# print("Add task succeed, task id:", tid)# #print("************************ eyeon ************************")# #my.start_eyeon(tid)# print("************************ delete ************************")# r = my.deletetask(tid)# print("delete result:", r)print("************************ add bt file ************************")tid = my.addbt('archlinux-2015.08.01-dual.iso.torrent')if isinstance(tid, int):print("Add bt task failed.")else:print("Add task succeed, task id:", tid)#print("************************ eyeon ************************")#my.start_eyeon(tid)print("************************ delete ************************")my.deletetask(tid)
最近更新热点资讯
- 谷歌AI聊天记录让网友San值狂掉:研究员走火入魔认为它已具备人格,被罚带薪休假
- 豆瓣9.4,姐弟恋、三人行,这部大尺度太厉害
- Genes, Intelligence, Racial Hygiene, Gen
- 【土耳其电影】《冬眠》电影评价: 宛如一部回归伯格曼风格的道德剧
- 陌生人社会伦理问题研究
- 理论研究|前海实践的价值理性和工具理性
- 澳门刑事证据禁止规则
- 综艺普及剧本杀和密室逃脱助力线下实体店爆发式增长
- 日本小伙和五个小姐姐同居?看完我酸了!
- 第一学期高一语文考试期中试卷
- 高中必考的物理公式有哪些
- 这部大尺度的申奥片,却讲述了不lun恋...
- 心理语言学论文精品(七篇)
- 《贵妃还乡》 超清
- 专论 | 郭丹彤、陈嘉琪:古代埃及书信中的玛阿特观念
- 微专业招生 | 数字文化传播微专业列车即将发车,沿途课程抢先看!
- 生态安全的重要性汇总十篇
- 原创因“18禁”电影登舆论顶峰,万千少女一场春梦:这一生,足够了
- 章鱼头
- 读书心得体会
- 考研考北京大学医学部或者协和是一种怎样的难度?
- 央媒评女主播编造“夜宿故宫”:让肇事者付出代价,理所应当
- 库欣病患者求医记(流水账)
- 《太平公主》④ | 地位越高,越要装傻
- 爱体检 安卓版 v2.5