Skip to content

Commit

Permalink
新增更新wiki指令,无需手动下载替换data.db
Browse files Browse the repository at this point in the history
  • Loading branch information
Cappuccilo committed Sep 8, 2020
1 parent 0d818ed commit 6f2de03
Show file tree
Hide file tree
Showing 3 changed files with 51 additions and 16 deletions.
9 changes: 5 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
- **[@bot技能ue] 角色技能**:查询角色技能
- **[@bot专武ue] 角色专武**:查询角色专武
- **[@bot羁绊ue] 角色羁绊**:查询角色羁绊
- **更新wiki**:更新wiki数据
- **启用wiki**:启用wiki
- **禁用wiki**:禁用wiki

Expand All @@ -31,9 +32,9 @@
至此,你可以开始使用插件了。

插件的数据源自文件夹下的`data.db`wiki有更新时需要对应更新(我尝试过实时拉取数据,速度太慢,所以改用数据库存储),`data.db`会不定时更新(Releases里下载,一般在图书馆更新了新角色,新专武后我会更新),如果你想要自己手动更新,请看下一小节
插件的数据源自文件夹下的`data.db``data.db`会不定时更新(Releases里手动下载或使用更新命令更新,一般在图书馆更新了新角色,新专武后我会更新),如果你想要自己手动更新,请看下一小节

## 更新数据
## 手动更新数据

> 强烈建议在windows机器上更新数据,更为快速方便。`spider`文件夹仅作更新数据使用,**不要**把这个文件夹混入`hoshino`的任何目录,它是独立的
Expand All @@ -49,7 +50,7 @@

> **不要**双击运行解压得到的exe文件,看下一步!
5. 打开`run.py`按照注释修改对应处(第24、27、35行),打开`data.py`按照注释修改第3行
5. 打开`run.py`按照注释修改对应处(第24、27、38或39行),打开`data.py`按照注释修改第3行

6. 将你最新的`_pcr_data.py`复制到`spider`文件夹下替换(保证`spider/_pcr_data.py`里有你需要更新的id信息)

Expand Down Expand Up @@ -78,7 +79,7 @@

4. http://npm.taobao.org/mirrors/chromedriver/ 下载最为接近你的chrome版本的驱动

5. 打开`run.py`按照注释修改对应处(第24、27、35行),打开`data.py`按照注释修改第3行
5. 打开`run.py`按照注释修改对应处(第24、27、38或39行),打开`data.py`按照注释修改第3行

6. 将你最新的`_pcr_data.py`复制到`spider`文件夹下替换(保证`spider/_pcr_data.py`里有你需要更新的id信息)

Expand Down
46 changes: 34 additions & 12 deletions wiki/__init__.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,15 @@
import json
from hoshino import config, Service, priv
from hoshino.typing import CQEvent
from .. import chara
from .data import *

sv_help = '''
sv = Service('wiki', help_='''
[@bot简介ue] 角色简介
[@bot技能ue] 角色技能
[@bot专武ue] 角色专武
[@bot羁绊ue] 角色羁绊
'''.strip()

sv = Service('wiki', help_=sv_help, bundle='pcr查询')
'''.strip(), bundle='pcr查询')

def get_chara(name, types):
id_ = chara.name2id(name)
Expand All @@ -26,14 +25,17 @@ def get_chara(name, types):
msg = f'没有查询到{name}的wiki数据'
else:
msg = f'{c.icon.cqcode}'
if types == 'introduce':
msg = msg + get_info(id_)
elif types == 'skill':
msg = msg + get_skill(id_)
elif types == 'uniquei':
msg = msg + get_uniquei(id_)
elif types == 'kizuna':
msg = msg + get_kizuna(id_)
try:
if types == 'introduce':
msg += get_info(id_)
elif types == 'skill':
msg += get_skill(id_)
elif types == 'uniquei':
msg += get_uniquei(id_)
elif types == 'kizuna':
msg += get_kizuna(id_)
except:
msg += f'\n暂时没有更新{name}的数据'
return msg

@sv.on_prefix(('简介','介绍'), only_to_me=True)
Expand Down Expand Up @@ -71,3 +73,23 @@ async def kizuna(bot, ev: CQEvent):
return
result = get_chara(name,'kizuna')
await bot.send(ev, result)

@sv.on_fullmatch(('更新wiki'))
async def update_wiki(bot, ev: CQEvent):
if priv.get_user_priv(ev) < priv.SUPERUSER:
return
local_version = get_file_md5()
download_addres = 'https://alphaone-my.sharepoint.cn/personal/yu_vip_tg/_layouts/15/download.aspx?UniqueId=90bc6f10-7493-45fd-90e9-9d3083fcc8c1&Translate=false&tempauth=eyJ0eXAiOiJKV1QiLCJhbGciOiJub25lIn0.eyJhdWQiOiIwMDAwMDAwMy0wMDAwLTBmZjEtY2UwMC0wMDAwMDAwMDAwMDAvYWxwaGFvbmUtbXkuc2hhcmVwb2ludC5jbkAzYjFjODFiMS1kMTU2LTRhZjktYjE2OS1hZTA4MTI4YzAzOTYiLCJpc3MiOiIwMDAwMDAwMy0wMDAwLTBmZjEtY2UwMC0wMDAwMDAwMDAwMDAiLCJuYmYiOiIxNTk5NTMxMzU3IiwiZXhwIjoiMTU5OTUzNDk1NyIsImVuZHBvaW50dXJsIjoiK1M1LzlHRWJwYnFMV1pPN1M4TGcvc09QanRxVWJpZFE3R2tvb3hINnZIOD0iLCJlbmRwb2ludHVybExlbmd0aCI6IjE0MCIsImlzbG9vcGJhY2siOiJUcnVlIiwiY2lkIjoiWkRObFl6WXdNVEF0WTJJMFppMDBNMlUxTFRsbE1qVXRZV0k1TlRBek1XWmpaR1JsIiwidmVyIjoiaGFzaGVkcHJvb2Z0b2tlbiIsInNpdGVpZCI6Ik0ySTRNbU16WkRrdFpXWmlaUzAwWWpnekxUaGxaV1F0Tmprd09HVmxZbVJpTXpsbCIsImFwcF9kaXNwbGF5bmFtZSI6ImRyaXZlLm1pY2hpa2F3YWNoaW4uYXJ0Iiwic2lnbmluX3N0YXRlIjoiW1wia21zaVwiXSIsImFwcGlkIjoiODZkNzEzOGItMmFhYS00NzZlLTk4MDgtZDRkZGQ5NWIzZjNjIiwidGlkIjoiM2IxYzgxYjEtZDE1Ni00YWY5LWIxNjktYWUwODEyOGMwMzk2IiwidXBuIjoieXVAdmlwLnRnIiwicHVpZCI6IjEwMDMzMjMwQzU3N0FDOTgiLCJjYWNoZWtleSI6IjBoLmZ8bWVtYmVyc2hpcHwxMDAzMzIzMGM1NzdhYzk4QGxpdmUuY29tIiwic2NwIjoiYWxsZmlsZXMud3JpdGUiLCJ0dCI6IjIiLCJ1c2VQZXJzaXN0ZW50Q29va2llIjpudWxsfQ.WG5Oc3oyYkdOSVQ1TzI3T2hENW9remhGS1E0bWRtUFpWajhyUURjTXBRQT0&ApiVersion=2.0'
f = requests.get(download_addres)
with open(os.path.join(os.path.dirname(__file__), 'version.json'), "wb") as code:
code.write(f.content)
with open(os.path.join(os.path.dirname(__file__), 'version.json'), "rb") as file:
version = json.load(file)
if version['hash'] == local_version:
await bot.send(ev, f'已是最新版本,无需更新')
else:
db_request = requests.get(version['url'])
with open(os.path.join(os.path.dirname(__file__), 'data.db'), "wb") as db:
db.write(db_request.content)
result = version['content']
await bot.send(ev, f'{result}')
12 changes: 12 additions & 0 deletions wiki/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import peewee as pw
import requests
import functools
import hashlib
from PIL import Image
from io import BytesIO
from zhconv import convert
Expand All @@ -13,6 +14,17 @@

UNKNOWN = 1000

def get_file_md5():
myhash = hashlib.md5()
f = open(os.path.join(os.path.dirname(__file__), 'data.db'), "rb")
while True:
b = f.read(8096)
if not b:
break
myhash.update(b)
f.close()
return myhash.hexdigest()

def custom_sorted(x,y):
order = ['必殺技','必殺技+','技能1','專武強化技能1','技能2','EX技能','EX技能+']
if order.index(x['type']) < order.index(y['type']):
Expand Down

0 comments on commit 6f2de03

Please # to comment.