|
@@ -9,6 +9,7 @@ from Crawler import Information_storage
|
|
|
import requests
|
|
|
from selenium.webdriver.common.action_chains import ActionChains
|
|
|
from selenium.webdriver.common.keys import Keys
|
|
|
+
|
|
|
keys_name_dict = {
|
|
|
'ctrl': Keys.CONTROL,
|
|
|
'shift': Keys.SHIFT,
|
|
@@ -35,58 +36,58 @@ keys_name_dict = {
|
|
|
for i in range(1, 13): # F1 - F12按键
|
|
|
keys_name_dict[f'f{i}'] = eval(f'Keys.F{i}')
|
|
|
|
|
|
-data_base = Information_storage.DataBase_Home()
|
|
|
+data_base = Information_storage.DatabaseController()
|
|
|
|
|
|
|
|
|
-class PAGE:
|
|
|
+class Page:
|
|
|
def __init__(self, time_out):
|
|
|
self.url = ''
|
|
|
- self.UA = ''
|
|
|
- self.func = 'PAGE'
|
|
|
+ self.user_agent = ''
|
|
|
+ self.mode = 'PAGE'
|
|
|
self.time_out = time_out
|
|
|
|
|
|
def __str__(self):
|
|
|
- return f'[{self.time_out}s]{self.func}-{self.url}:UA>{self.UA}'
|
|
|
+ return f'[{self.time_out}s]{self.mode}-{self.url}:UA>{self.user_agent}'
|
|
|
|
|
|
|
|
|
-class REQUESTS_Base(PAGE):
|
|
|
- def init(self, UA, url, cookies):
|
|
|
- if UA == '':
|
|
|
- UA = f'--user-agent ="Mozilla/5.0 (Windows NT 10.0; Win64; x64) ' \
|
|
|
+class RequestsBase(Page):
|
|
|
+ def init(self, user_agent, url, cookies):
|
|
|
+ if user_agent == '':
|
|
|
+ user_agent = f'--user-agent ="Mozilla/5.0 (Windows NT 10.0; Win64; x64) ' \
|
|
|
f'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36 Edg/80.0.361.66"'
|
|
|
- self.UA = UA
|
|
|
+ self.user_agent = user_agent
|
|
|
self.headers = {
|
|
|
'Accept': 'text/html, application/xhtml+xml, image/jxr, */*',
|
|
|
'Accept - Encoding': 'gzip, deflate',
|
|
|
'Accept-Language': 'zh-Hans-CN, zh-Hans; q=0.5',
|
|
|
'Connection': 'Keep-Alive',
|
|
|
- 'User-Agent': UA}
|
|
|
+ 'User-Agent': user_agent}
|
|
|
self.url = url
|
|
|
self.cookies = cookies
|
|
|
self.new = True
|
|
|
|
|
|
|
|
|
-class URL_POST(REQUESTS_Base): # 通过requests的post请求
|
|
|
- def __init__(self, url, data, time_out, UA='', cookies=None, **kwargs):
|
|
|
- super(URL_POST, self).__init__(time_out)
|
|
|
- self.func = 'post'
|
|
|
+class UrlPost(RequestsBase): # 通过requests的post请求
|
|
|
+ def __init__(self, url, data, time_out, user_agent='', cookies=None, **kwargs):
|
|
|
+ super(UrlPost, self).__init__(time_out)
|
|
|
+ self.mode = 'post'
|
|
|
self.data = data
|
|
|
self.requests = requests.post
|
|
|
- self.init(UA, url, cookies)
|
|
|
+ self.init(user_agent, url, cookies)
|
|
|
|
|
|
def __str__(self):
|
|
|
- return super(URL_POST, self).__str__() + f';data>{self.data}'
|
|
|
+ return super(UrlPost, self).__str__() + f';data>{self.data}'
|
|
|
|
|
|
|
|
|
-class URL_GET(REQUESTS_Base): # 通过requests的post请求
|
|
|
- def __init__(self, url, time_out, UA='', cookies=None, **kwargs):
|
|
|
- super(URL_GET, self).__init__(time_out)
|
|
|
- self.func = 'simplify_get'
|
|
|
+class UrlGet(RequestsBase): # 通过requests的post请求
|
|
|
+ def __init__(self, url, time_out, user_agent='', cookies=None, **kwargs):
|
|
|
+ super(UrlGet, self).__init__(time_out)
|
|
|
+ self.mode = 'simplify_get'
|
|
|
self.requests = requests.get
|
|
|
- self.init(UA, url, cookies)
|
|
|
+ self.init(user_agent, url, cookies)
|
|
|
|
|
|
|
|
|
-class URL_PAGE(PAGE):
|
|
|
+class UrlPage(Page):
|
|
|
def __init__(
|
|
|
self,
|
|
|
url,
|
|
@@ -97,21 +98,21 @@ class URL_PAGE(PAGE):
|
|
|
no_js=False,
|
|
|
no_java=False,
|
|
|
no_img=False,
|
|
|
- UA='',
|
|
|
+ user_agent='',
|
|
|
cookies=None,
|
|
|
new=False,
|
|
|
down_load_dir='',
|
|
|
**kwargs):
|
|
|
- super(URL_PAGE, self).__init__(time_out)
|
|
|
+ super(UrlPage, self).__init__(time_out)
|
|
|
self.url = url
|
|
|
- self.func = 'get'
|
|
|
+ self.mode = 'get'
|
|
|
self.options = webdriver.ChromeOptions()
|
|
|
self.cookies = cookies # cookies存储位置
|
|
|
self.new = new # 新键页面or新键浏览器
|
|
|
self.down_load_dir = down_load_dir
|
|
|
- self.init(first_run, head, no_plugins, no_js, no_java, no_img, UA)
|
|
|
+ self.init(first_run, head, no_plugins, no_js, no_java, no_img, user_agent)
|
|
|
|
|
|
- def init(self, first_run, head, no_plugins, no_js, no_java, no_img, UA):
|
|
|
+ def init(self, first_run, head, no_plugins, no_js, no_java, no_img, user_agent):
|
|
|
self.options.add_argument('disable-infobars') # 不显示
|
|
|
prefs = {'profile.default_content_settings.popups': 0,
|
|
|
'download.default_directory': self.down_load_dir}
|
|
@@ -119,7 +120,6 @@ class URL_PAGE(PAGE):
|
|
|
if first_run:
|
|
|
self.options.add_argument('-first run')
|
|
|
if head: # 无头设置
|
|
|
- print('FFF')
|
|
|
self.options.add_argument('--headless')
|
|
|
self.options.add_argument('--disable-gpu')
|
|
|
if no_plugins:
|
|
@@ -130,25 +130,25 @@ class URL_PAGE(PAGE):
|
|
|
self.options.add_argument('--disable-java')
|
|
|
if no_img:
|
|
|
self.options.add_argument('blink-settings=imagesEnabled=false')
|
|
|
- if UA == '':
|
|
|
- UA = (
|
|
|
+ if user_agent == '':
|
|
|
+ user_agent = (
|
|
|
f'user-agent ="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) '
|
|
|
f'Chrome/80.0.3987.132 Safari/537.36"')
|
|
|
# self.options.add_argument(f'--user-agent ="{UA}"')
|
|
|
- self.UA = UA
|
|
|
+ self.user_agent = user_agent
|
|
|
|
|
|
def __str__(self):
|
|
|
- return f'{self.func}-{self.url}:UA>{self.UA}'
|
|
|
+ return f'{self.mode}-{self.url}:UA>{self.user_agent}'
|
|
|
|
|
|
|
|
|
class url: # url管理器
|
|
|
- num = 0 # url处理器个数
|
|
|
+ url_count = 0 # url处理器个数
|
|
|
|
|
|
def __init__(self, dic=f'', dic_run=f''):
|
|
|
- url.num += 1
|
|
|
+ url.url_count += 1
|
|
|
self.save_dir = dic
|
|
|
- dic += f'/url[{url.num}].cot_url'
|
|
|
- dic_run += f'/url_run[{url.num}].cot_url'
|
|
|
+ dic += f'/url[{url.url_count}].cot_url'
|
|
|
+ dic_run += f'/url_run[{url.url_count}].cot_url'
|
|
|
self.dir = dic
|
|
|
self.dir_run = dic_run
|
|
|
self.file = open(dic, 'a') # 写入url_history的文件
|
|
@@ -167,13 +167,13 @@ class url: # url管理器
|
|
|
return False
|
|
|
return True
|
|
|
|
|
|
- def Add_func(self, func, name): # 添加过滤函数
|
|
|
+ def add_filter_func(self, func, name): # 添加过滤函数
|
|
|
self.filter[name] = func
|
|
|
|
|
|
- def Del_func(self, index): # 删除过滤函数
|
|
|
+ def del_filter_func(self, index): # 删除过滤函数
|
|
|
del self.filter[list(self.filter.keys())[index]]
|
|
|
|
|
|
- def return_func(self):
|
|
|
+ def return_filter_func(self):
|
|
|
return list(self.filter.keys())
|
|
|
|
|
|
def add_url(self, url, func, data=None, **kwargs): # 添加url
|
|
@@ -187,25 +187,25 @@ class url: # url管理器
|
|
|
url, func=func): # 1.url不存在历史,2.url满足筛选条件
|
|
|
if func == 'get':
|
|
|
self.url_list.append(
|
|
|
- URL_PAGE(
|
|
|
+ UrlPage(
|
|
|
url=url,
|
|
|
**kwargs,
|
|
|
down_load_dir=self.dir)) # 添加到待取得url
|
|
|
elif func == 'simplify_get':
|
|
|
self.url_list.append(
|
|
|
- URL_GET(
|
|
|
+ UrlGet(
|
|
|
url=url,
|
|
|
**kwargs,
|
|
|
down_load_dir=self.dir)) # 添加到待取得url
|
|
|
else:
|
|
|
self.url_list.append(
|
|
|
- URL_POST(
|
|
|
+ UrlPost(
|
|
|
url=url,
|
|
|
data=data,
|
|
|
**kwargs)) # 添加到待取得url
|
|
|
|
|
|
self.url_history.append(url_) # 添加到历史url
|
|
|
- self.__out_url(url_) # 输出历史url
|
|
|
+ self.__out_url_history(url_) # 输出历史url
|
|
|
return True # 写入成功
|
|
|
return False # 写入失败
|
|
|
|
|
@@ -213,13 +213,13 @@ class url: # url管理器
|
|
|
self.__out_url_run(f'DELETE {self.url_list[index]}')
|
|
|
del self.url_list[index]
|
|
|
|
|
|
- def get_url(self) -> (URL_PAGE, URL_POST): # 取得url
|
|
|
+ def get_url(self) -> (UrlPage, UrlPost): # 取得url
|
|
|
url_page = self.url_list[0]
|
|
|
self.__out_url_run(url_page.url)
|
|
|
del self.url_list[0]
|
|
|
return url_page
|
|
|
|
|
|
- def __out_url(self, url): # 输出url历史
|
|
|
+ def __out_url_history(self, url): # 输出url历史
|
|
|
self.file.write(f'{url}\n')
|
|
|
self.file.flush()
|
|
|
|
|
@@ -227,7 +227,7 @@ class url: # url管理器
|
|
|
self.file_run.write(f'{url}\n')
|
|
|
self.file_run.flush()
|
|
|
|
|
|
- def finish(self):
|
|
|
+ def is_finish(self):
|
|
|
return len(self.url_list) == 0
|
|
|
|
|
|
def return_url(self):
|
|
@@ -237,20 +237,20 @@ class url: # url管理器
|
|
|
return self.url_history.copy()
|
|
|
|
|
|
|
|
|
-class Page_Downloader:
|
|
|
- num = 0
|
|
|
+class PageDownloader:
|
|
|
+ downloader_count = 0
|
|
|
|
|
|
def __init__(self, url: url, dic=''):
|
|
|
self.url = url
|
|
|
self.dir = dic
|
|
|
self.log = Information_storage.log(dic)
|
|
|
- Page_Downloader.num += 1
|
|
|
+ PageDownloader.downloader_count += 1
|
|
|
self.page_source_dict = {} # 页面保存信息
|
|
|
self.cookie_Thread = None # 子进程
|
|
|
self.browser = None
|
|
|
self.cookie_dict = {}
|
|
|
self.cookie_dict_list = {} # sele的cookies
|
|
|
- self.lase_func = ''
|
|
|
+ self.last_mode = ''
|
|
|
|
|
|
def close(self):
|
|
|
self.log.close()
|
|
@@ -259,69 +259,69 @@ class Page_Downloader:
|
|
|
try:
|
|
|
self.break_ = False
|
|
|
self.browser.quit()
|
|
|
- self.lase_func = ''
|
|
|
+ self.last_mode = ''
|
|
|
except BaseException:
|
|
|
pass
|
|
|
|
|
|
- def strat_urlGet(self, *args, func_cookie): # 用get请求url ->得到一个页面信息
|
|
|
+ def start_to_run(self, *args, func_cookie): # 用get请求url ->得到一个页面信息
|
|
|
self.break_ = False
|
|
|
self.page_source_dict = {}
|
|
|
- self.nowurl = self.url.get_url() # 获取一个url
|
|
|
- url = self.nowurl.url
|
|
|
- if self.nowurl.func == 'get':
|
|
|
- if self.nowurl.new and self.lase_func == 'get': # 重新启动
|
|
|
+ self.url_text = self.url.get_url() # 获取一个url
|
|
|
+ url = self.url_text.url
|
|
|
+ if self.url_text.mode == 'get':
|
|
|
+ if self.url_text.new and self.last_mode == 'get': # 重新启动
|
|
|
self.browser.quit()
|
|
|
self.browser = webdriver.Chrome(
|
|
|
- chrome_options=self.nowurl.options)
|
|
|
+ chrome_options=self.url_text.options)
|
|
|
try:
|
|
|
self.browser.set_page_load_timeout(
|
|
|
- self.nowurl.time_out) # 设置页面加载超时
|
|
|
+ self.url_text.time_out) # 设置页面加载超时
|
|
|
self.browser.set_script_timeout(
|
|
|
- self.nowurl.time_out) # 设置页面异步js执行超时
|
|
|
+ self.url_text.time_out) # 设置页面异步js执行超时
|
|
|
self.browser.get(url)
|
|
|
except BaseException:
|
|
|
self.browser = webdriver.Chrome(
|
|
|
- chrome_options=self.nowurl.options)
|
|
|
+ chrome_options=self.url_text.options)
|
|
|
self.browser.set_page_load_timeout(
|
|
|
- self.nowurl.time_out) # 设置页面加载超时
|
|
|
+ self.url_text.time_out) # 设置页面加载超时
|
|
|
self.browser.set_script_timeout(
|
|
|
- self.nowurl.time_out) # 设置页面异步js执行超时
|
|
|
+ self.url_text.time_out) # 设置页面异步js执行超时
|
|
|
self.browser.get(url)
|
|
|
try:
|
|
|
- if not self.nowurl.new:
|
|
|
+ if not self.url_text.new:
|
|
|
raise Exception
|
|
|
- list_ = self.cookie_dict_list[self.nowurl.cookies]
|
|
|
- self.Tra_cookies()
|
|
|
+ list_ = self.cookie_dict_list[self.url_text.cookies]
|
|
|
+ self.monitoring_clear_cookier()
|
|
|
try:
|
|
|
for i in list_:
|
|
|
- self.Add_cookies(i)
|
|
|
+ self.monitoring_add_cookies(i)
|
|
|
except BaseException:
|
|
|
pass
|
|
|
except BaseException:
|
|
|
pass
|
|
|
self.start_cookies(func_cookie, url)
|
|
|
else: # requests模式
|
|
|
- if self.lase_func == 'get':
|
|
|
+ if self.last_mode == 'get':
|
|
|
try:
|
|
|
self.browser.quit()
|
|
|
except BaseException:
|
|
|
pass
|
|
|
try:
|
|
|
- args = {'cookies': self.cookie_dict[self.nowurl.cookies]}
|
|
|
+ args = {'cookies': self.cookie_dict[self.url_text.cookies]}
|
|
|
func_cookie([args['cookies']])
|
|
|
except BaseException:
|
|
|
args = {}
|
|
|
func_cookie([])
|
|
|
- if self.nowurl.func == 'post':
|
|
|
- args['data'] = self.nowurl.data
|
|
|
- self.browser = self.nowurl.requests(
|
|
|
- url, headers=self.nowurl.headers, **args, timeout=self.nowurl.time_out)
|
|
|
+ if self.url_text.func == 'post':
|
|
|
+ args['data'] = self.url_text.data
|
|
|
+ self.browser = self.url_text.requests(
|
|
|
+ url, headers=self.url_text.headers, **args, timeout=self.url_text.time_out)
|
|
|
self.cookie_dict[url] = requests.utils.dict_from_cookiejar(
|
|
|
self.browser.cookies) # 保存cookies
|
|
|
func_cookie([self.cookie_dict[url]])
|
|
|
- self.lase_func = self.nowurl.func
|
|
|
- self.Parser.browser = self.browser
|
|
|
- self.Parser.init(url)
|
|
|
+ self.last_mode = self.url_text.func
|
|
|
+ self.parser.browser = self.browser
|
|
|
+ self.parser.init(url)
|
|
|
return self.browser
|
|
|
|
|
|
def start_cookies(self, func_cookie, url):
|
|
@@ -341,19 +341,19 @@ class Page_Downloader:
|
|
|
self.cookie_Thread = threading.Thread(target=update_cookie)
|
|
|
self.cookie_Thread.start()
|
|
|
|
|
|
- def Del_cookies(self, name): # 删除指定cookies
|
|
|
+ def monitoring_del_cookies(self, name): # 删除指定cookies
|
|
|
browser = self.browser
|
|
|
browser.delete_cookie(name)
|
|
|
|
|
|
- def Tra_cookies(self): # 清空cookies
|
|
|
+ def monitoring_clear_cookier(self): # 清空cookies
|
|
|
browser = self.browser
|
|
|
browser.delete_all_cookies()
|
|
|
|
|
|
- def Add_cookies(self, cookies: dict): # 清空cookies
|
|
|
+ def monitoring_add_cookies(self, cookies: dict): # 新增cookies
|
|
|
browser = self.browser
|
|
|
browser.add_cookie(cookies)
|
|
|
|
|
|
- def update_cookies(self, name, cookies: dict):
|
|
|
+ def monitoring_update_cookies(self, name, cookies: dict):
|
|
|
browser = self.browser
|
|
|
cookies_list = browser.get_cookies()
|
|
|
for i in cookies_list:
|
|
@@ -364,18 +364,18 @@ class Page_Downloader:
|
|
|
return
|
|
|
raise Exception
|
|
|
|
|
|
- def set_Page_Parser(self, Parser):
|
|
|
- self.Parser = Parser
|
|
|
- self.Parser.browser = self.browser
|
|
|
- self.Parser.url = self.url
|
|
|
- self.Parser.dir = self.dir
|
|
|
- self.Parser.log = self.log
|
|
|
+ def set_Page_Parser(self, parser):
|
|
|
+ self.parser = parser
|
|
|
+ self.parser.browser = self.browser
|
|
|
+ self.parser.url = self.url
|
|
|
+ self.parser.dir = self.dir
|
|
|
+ self.parser.log = self.log
|
|
|
|
|
|
|
|
|
-class Page_Parser:
|
|
|
- def __init__(self, Downloader: Page_Downloader):
|
|
|
- self.Downloader = Downloader
|
|
|
- self.Downloader.set_Page_Parser(self)
|
|
|
+class PageParser:
|
|
|
+ def __init__(self, downloader: PageDownloader):
|
|
|
+ self.downloader = downloader
|
|
|
+ self.downloader.set_Page_Parser(self)
|
|
|
self.func_list = []
|
|
|
self.func_dict = {}
|
|
|
self.n = 0
|
|
@@ -383,7 +383,7 @@ class Page_Parser:
|
|
|
|
|
|
def init(self, url=''):
|
|
|
self.element_dict = {} # 记录属性的名字
|
|
|
- self.now_url = url
|
|
|
+ self.url_text = url
|
|
|
|
|
|
def add_base(self, func): # 装饰器
|
|
|
def wrap(browser=None, num=None, name=None, *args, **kwargs) -> bool:
|
|
@@ -420,7 +420,7 @@ class Page_Parser:
|
|
|
i in enumerate(
|
|
|
self.func_list.copy())]
|
|
|
|
|
|
- def find_ID(self, id, not_all=False, **kwargs):
|
|
|
+ def find_id(self, id, not_all=False, **kwargs):
|
|
|
@self.add_base
|
|
|
def find(browser, num, name, *args, **kwargs):
|
|
|
nonlocal self, id
|
|
@@ -582,13 +582,13 @@ class Page_Parser:
|
|
|
f'sent_text:{text}>{element_value}[{index}]',
|
|
|
action) # 添加func
|
|
|
|
|
|
- def User_Passwd(self, User, Passwd, element_value, index=0, **kwargs): # 输入验证(User&Password)
|
|
|
+ def authentication(self, User, Passwd, element_value, index=0, **kwargs): # 输入验证(User&Password)
|
|
|
@self.add_base
|
|
|
def action(*args, **kwargs):
|
|
|
nonlocal self
|
|
|
self.element_dict[element_value][index].authenticate(User, Passwd)
|
|
|
self.add_func(
|
|
|
- f'User:Passwd:{User};{Passwd}>{element_value}[{index}]',
|
|
|
+ f'Authentication:{User};{Passwd}>{element_value}[{index}]',
|
|
|
action) # 添加func
|
|
|
|
|
|
def clear(self, element_value, index=0, **kwargs): # 清空文本
|
|
@@ -834,7 +834,7 @@ class Page_Parser:
|
|
|
new.append(str(bs.string).replace('\n', ''))
|
|
|
else:
|
|
|
new.append(bs.attrs.get(i, ''))
|
|
|
- data_base.add_DataBase(dataBase_name, new)
|
|
|
+ data_base.add_database(dataBase_name, new)
|
|
|
self.add_func(
|
|
|
f'DataBase:{data}<{element_value}[{index}]>{dataBase_name}',
|
|
|
action) # 添加func
|
|
@@ -848,7 +848,7 @@ class Page_Parser:
|
|
|
iter_list = self.listSlicing(index, element_value)
|
|
|
for bs in iter_list:
|
|
|
new = regular.findall(data, str(bs))
|
|
|
- data_base.add_DataBase(dataBase_name, new)
|
|
|
+ data_base.add_database(dataBase_name, new)
|
|
|
self.add_func(
|
|
|
f'DataBase:{data}<{element_value}[{index}]>{dataBase_name}',
|
|
|
action) # 添加func
|
|
@@ -1017,14 +1017,14 @@ class Page_Parser:
|
|
|
self.browser.json()] # request 解析为 json
|
|
|
self.add_func(f'to_json', action) # 添加func
|
|
|
|
|
|
- def make_ActionChains(self, **kwargs): # 创建动作链
|
|
|
+ def make_action_chains(self, **kwargs): # 创建动作链
|
|
|
@self.add_base
|
|
|
def action(num, name, *args, **kwargs):
|
|
|
nonlocal self
|
|
|
self.element_dict[f'{name}[{num}]'] = [ActionChains(self.browser)]
|
|
|
self.add_func(f'make_ActionChains', action) # 添加func
|
|
|
|
|
|
- def ActionChains_click(self, Chains, element_value, index, **kwargs): # 单击左
|
|
|
+ def action_click(self, Chains, element_value, index, **kwargs): # 单击左
|
|
|
@self.add_base
|
|
|
def action(*args, **kwargs):
|
|
|
nonlocal self
|
|
@@ -1034,7 +1034,7 @@ class Page_Parser:
|
|
|
f'[{Chains}]click>[{element_value}][{index}]',
|
|
|
action) # 添加func
|
|
|
|
|
|
- def ActionChains_double_click(self, Chains, element_value, index, **kwargs): # 双击左
|
|
|
+ def action_double_click(self, Chains, element_value, index, **kwargs): # 双击左
|
|
|
@self.add_base
|
|
|
def action(*args, **kwargs):
|
|
|
nonlocal self
|
|
@@ -1044,7 +1044,7 @@ class Page_Parser:
|
|
|
f'[{Chains}]double_click>[{element_value}][{index}]',
|
|
|
action) # 添加func
|
|
|
|
|
|
- def ActionChains_click_right(self, Chains, element_value, index, **kwargs): # 点击右
|
|
|
+ def action_click_right(self, Chains, element_value, index, **kwargs): # 点击右
|
|
|
@self.add_base
|
|
|
def action(*args, **kwargs):
|
|
|
nonlocal self
|
|
@@ -1054,7 +1054,7 @@ class Page_Parser:
|
|
|
f'[{Chains}]right_click>[{element_value}][{index}]',
|
|
|
action) # 添加func
|
|
|
|
|
|
- def ActionChains_click_and_hold(self, Chains, element_value, index, **kwargs): # 按住左
|
|
|
+ def action_click_and_hold(self, Chains, element_value, index, **kwargs): # 按住左
|
|
|
@self.add_base
|
|
|
def action(*args, **kwargs):
|
|
|
nonlocal self
|
|
@@ -1064,7 +1064,7 @@ class Page_Parser:
|
|
|
f'[{Chains}]click_and_hold>[{element_value}][{index}]',
|
|
|
action) # 添加func
|
|
|
|
|
|
- def ActionChains_release(self, Chains, element_value, index, **kwargs): # 松开左键
|
|
|
+ def action_release(self, Chains, element_value, index, **kwargs): # 松开左键
|
|
|
@self.add_base
|
|
|
def action(*args, **kwargs):
|
|
|
nonlocal self
|
|
@@ -1074,7 +1074,7 @@ class Page_Parser:
|
|
|
f'[{Chains}]release>[{element_value}][{index}]',
|
|
|
action) # 添加func
|
|
|
|
|
|
- def ActionChains_drag_and_drop(self, Chains, element_value, index, element_value2, index2, **kwargs): # 拽托、松开
|
|
|
+ def action_drag_and_drop(self, Chains, element_value, index, element_value2, index2, **kwargs): # 拽托、松开
|
|
|
@self.add_base
|
|
|
def action(*args, **kwargs):
|
|
|
nonlocal self
|
|
@@ -1085,7 +1085,7 @@ class Page_Parser:
|
|
|
f'[{Chains}]drag_and_drop>[{element_value}][{index}]',
|
|
|
action) # 添加func
|
|
|
|
|
|
- def ActionChains_move(self, Chains, element_value, index, **kwargs): # 移动鼠标
|
|
|
+ def action_move(self, Chains, element_value, index, **kwargs): # 移动鼠标
|
|
|
@self.add_base
|
|
|
def action(*args, **kwargs):
|
|
|
nonlocal self
|
|
@@ -1095,14 +1095,14 @@ class Page_Parser:
|
|
|
f'[{Chains}]drag_and_drop>[{element_value}][{index}]',
|
|
|
action) # 添加func
|
|
|
|
|
|
- def Special_keys(self, key: str, is_special_keys): # 装饰器
|
|
|
+ def special_keys(self, key: str, is_special_keys):
|
|
|
if is_special_keys:
|
|
|
return keys_name_dict.get(key.lower(), key), f'[{key.upper()}]'
|
|
|
else:
|
|
|
return key, key
|
|
|
|
|
|
- def ActionChains_key_down(self, Chains, key, element_value, index, is_special_keys, **kwargs): # down
|
|
|
- new_key, key = self.Special_keys(key, is_special_keys)
|
|
|
+ def action_key_down(self, Chains, key, element_value, index, is_special_keys, **kwargs): # down
|
|
|
+ new_key, key = self.special_keys(key, is_special_keys)
|
|
|
@self.add_base
|
|
|
def action(*args, **kwargs):
|
|
|
nonlocal self
|
|
@@ -1112,8 +1112,8 @@ class Page_Parser:
|
|
|
f'[{Chains}]key_down>{key}:[{element_value}][{index}]',
|
|
|
action) # 添加func
|
|
|
|
|
|
- def ActionChains_key_up(self, Chains, key, element_value, index, is_special_keys, **kwargs): # down
|
|
|
- new_key, key = self.Special_keys(key, is_special_keys)
|
|
|
+ def action_key_up(self, Chains, key, element_value, index, is_special_keys, **kwargs): # down
|
|
|
+ new_key, key = self.special_keys(key, is_special_keys)
|
|
|
@self.add_base
|
|
|
def action(*args, **kwargs):
|
|
|
nonlocal self
|
|
@@ -1124,7 +1124,7 @@ class Page_Parser:
|
|
|
action) # 添加func
|
|
|
|
|
|
# 发送到指定元素
|
|
|
- def ActionChains_send_keys_to_element(
|
|
|
+ def action_send_keys_to_element(
|
|
|
self,
|
|
|
Chains,
|
|
|
key,
|
|
@@ -1132,7 +1132,7 @@ class Page_Parser:
|
|
|
index,
|
|
|
is_special_keys,
|
|
|
**kwargs):
|
|
|
- new_key, key = self.Special_keys(key, is_special_keys)
|
|
|
+ new_key, key = self.special_keys(key, is_special_keys)
|
|
|
@self.add_base
|
|
|
def action(*args, **kwargs):
|
|
|
nonlocal self
|
|
@@ -1142,15 +1142,15 @@ class Page_Parser:
|
|
|
f'[{Chains}]sent>{key}:[{element_value}][{index}]',
|
|
|
action) # 添加func
|
|
|
|
|
|
- def ActionChains_send_keys(self, Chains, key, is_special_keys, **kwargs): # 发送到焦点元素
|
|
|
- new_key, key = self.Special_keys(key, is_special_keys)
|
|
|
+ def action_send_keys(self, Chains, key, is_special_keys, **kwargs): # 发送到焦点元素
|
|
|
+ new_key, key = self.special_keys(key, is_special_keys)
|
|
|
@self.add_base
|
|
|
def action(*args, **kwargs):
|
|
|
nonlocal self
|
|
|
self.element_dict[Chains][0].send_keys(new_key)
|
|
|
self.add_func(f'[{Chains}].sent>{key}', action) # 添加func
|
|
|
|
|
|
- def ActionChains_run(self, Chains, run_time=1, **kwargs): # 执行
|
|
|
+ def action_run(self, Chains, run_time=1, **kwargs): # 执行
|
|
|
@self.add_base
|
|
|
def action(*args, **kwargs):
|
|
|
nonlocal self
|
|
@@ -1188,12 +1188,12 @@ class Page_Parser:
|
|
|
f'switch_to_window>{element_value}[{index}]',
|
|
|
action) # 添加func
|
|
|
|
|
|
- def Element_interaction(self, update_func=lambda *args: None): # 元素交互
|
|
|
+ def element_interaction(self, update_func=lambda *args: None): # 元素交互
|
|
|
func_list = self.func_list
|
|
|
status = None
|
|
|
- self.log.write(f'{"*"*5}url:{self.now_url}{"*"*5}')
|
|
|
+ self.log.write(f'{"*"*5}url:{self.url_text}{"*" * 5}')
|
|
|
|
|
|
- def update(func_name):
|
|
|
+ def update_log(func_name):
|
|
|
nonlocal status, self
|
|
|
if status:
|
|
|
success_code = 'Success to run'
|
|
@@ -1210,9 +1210,9 @@ class Page_Parser:
|
|
|
except BaseException:
|
|
|
value_box.append(f'{i} = {self.element_dict[i]}')
|
|
|
update_func(func_name, success_code, value_box) # 信息更新系统
|
|
|
- update('start')
|
|
|
+ update_log('start')
|
|
|
for func_num in range(len(func_list)):
|
|
|
func_name = func_list[func_num]
|
|
|
- update(func_name)
|
|
|
+ update_log(func_name)
|
|
|
status = self.func_dict[func_name](num=f'{func_num}', name='var')
|
|
|
- update('Finish')
|
|
|
+ update_log('Finish')
|