删除无效源

This commit is contained in:
qist
2025-08-21 18:53:35 +08:00
parent a4479a4e0f
commit cfdae44950
3 changed files with 38 additions and 10 deletions

View File

@@ -34,7 +34,6 @@
{"key":"drpy_js_优酷","name":"官源 | 优酷[js]","type":3,"api":"./lib/drpy2.min.js","ext":"./js/优酷.js"},
{"key":"drpy_js_厂长资源","name":"影视 | 厂长资源[js]","type":3,"api":"./lib/drpy2.min.js","ext":"./js/厂长资源.js","header":{"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"}},
{"key":"drpy_js_农民","name":"影视 | 农民影视[js]","type":3,"api":"./lib/drpy2.min.js","ext":"./js/农民影视.js"},
{"key":"drpy_js_58动漫","name":"动漫 | 58动漫[js]","type":3,"api":"./lib/drpy2.min.js","ext":"./js/58动漫.js"},
{"key":"drpy_js_AGE动漫","name":"动漫 | AGE动漫[js]","type":3,"api":"./lib/drpy2.min.js","ext":"./js/AGE动漫.js"},

View File

@@ -37,8 +37,6 @@
{"key":"drpy_js_优酷","name":"官源 | 优酷[js]","type":3,"api":"./lib/drpy2.min.js","ext":"./js/优酷.js"},
{"key":"drpy_js_厂长资源","name":"影视 | 厂长资源[js]","type":3,"api":"./lib/drpy2.min.js","ext":"./js/厂长资源.js","header":{"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"}},
{"key":"drpy_js_农民","name":"影视 | 农民影视[js]","type":3,"api":"./lib/drpy2.min.js","ext":"./js/农民影视.js"},
{"key":"drpy_js_58动漫","name":"动漫 | 58动漫[js]","type":3,"api":"./lib/drpy2.min.js","ext":"./js/58动漫.js"},
{"key":"drpy_js_AGE动漫","name":"动漫 | AGE动漫[js]","type":3,"api":"./lib/drpy2.min.js","ext":"./js/AGE动漫.js"},

View File

@@ -1,15 +1,16 @@
import json
import requests
import re
import sys
import hashlib
import os
# 默认 jar 路径和下载 URL如需下载
default_jar = "./xiaosa/spider.jar"
# 如果需要自动下载 jar可替换为真实链接否则留空
default_jar_url = "../xiaosa/spider.jar"
# 需要删除的站点 key
remove_keys = {"巴士动漫"} # 可以加多个,例如 {"巴士动漫", "电影牛"}
# 保存 JSON 文件(折叠字典数组为单行,空数组和基础数组一行)
class CompactJSONEncoder(json.JSONEncoder):
def iterencode(self, o, _one_shot=False):
@@ -32,13 +33,12 @@ class CompactJSONEncoder(json.JSONEncoder):
return iter([_encode(o)])
def fetch_json(path_or_url):
if os.path.exists(path_or_url):
# 本地文件
with open(path_or_url, "r", encoding="utf-8") as f:
return json.load(f)
elif path_or_url.startswith("http://") or path_or_url.startswith("https://"):
# 网络 URL
resp = requests.get(path_or_url)
resp.raise_for_status()
return resp.json()
@@ -53,6 +53,7 @@ def get_md5(filepath):
md5.update(chunk)
return md5.hexdigest()
def ensure_jar_with_md5(site):
if not isinstance(site, dict):
return
@@ -65,6 +66,18 @@ def ensure_jar_with_md5(site):
md5_val = get_md5(default_jar_url)
site["jar"] = f"{default_jar};md5;{md5_val}"
def ensure_xyqhiker_ext_and_jar(site):
"""修正所有 csp_XYQHiker 站点的 ext 路径,并加上 jar md5"""
if not isinstance(site, dict):
return
if site.get("api") == "csp_XYQHiker":
ext_val = site.get("ext", "")
if ext_val.startswith("./XYQHiker/"):
site["ext"] = ext_val.replace("./XYQHiker/", "./xiaosa/XYQHiker/")
ensure_jar_with_md5(site)
def insert_sites_at_key(base_sites, insert_sites, key_marker):
for i, item in enumerate(base_sites):
if item.get("key") == key_marker:
@@ -72,6 +85,12 @@ def insert_sites_at_key(base_sites, insert_sites, key_marker):
print(f"⚠️ 未找到 key 为 {key_marker} 的插入点,追加到末尾")
return base_sites + insert_sites
def remove_sites(sites, keys_to_remove):
"""从站点列表中删除指定 key 的站点"""
return [s for s in sites if s.get("key") not in keys_to_remove]
if __name__ == "__main__":
if len(sys.argv) < 3:
print("用法: python script.py <远程json_url> <本地dianshi.json路径>")
@@ -88,10 +107,16 @@ if __name__ == "__main__":
sites = data.get("sites", [])
filtered_sites = [s for s in sites if isinstance(s, dict) and "name" in s and "APP" in s["name"]]
# 3. 为每个筛选 site 添加 jar 字段和 md5
# 3. 为每个 APP site 添加 jar 字段和 md5
for site in filtered_sites:
ensure_jar_with_md5(site)
print(f"✅ 筛选并更新 {len(filtered_sites)} 个含 APP 的 sites包含 md5 jar 字段)")
# 3.1 处理所有 csp_XYQHiker
for site in sites:
if isinstance(site, dict) and site.get("api") == "csp_XYQHiker":
ensure_xyqhiker_ext_and_jar(site)
filtered_sites.append(site)
print(f"✅ 筛选并更新 {len(filtered_sites)} 个站点APP + XYQHiker含 jar+md5")
# 4. 读取本地文件
with open(local_file, "r", encoding="utf-8") as f:
@@ -101,7 +126,13 @@ if __name__ == "__main__":
dianshi_sites = dianshi.get("sites", [])
dianshi["sites"] = insert_sites_at_key(dianshi_sites, filtered_sites, "玩偶")
# 6. 保存合并结果
# 6. 删除指定的站点
before_count = len(dianshi["sites"])
dianshi["sites"] = remove_sites(dianshi["sites"], remove_keys)
after_count = len(dianshi["sites"])
print(f"✅ 删除了 {before_count - after_count} 个指定站点: {', '.join(remove_keys)}")
# 7. 保存合并结果
output_file = f"{local_file.rsplit('.',1)[0]}_with_app_sites.json"
with open(output_file, "w", encoding="utf-8") as f:
json.dump(dianshi, f, ensure_ascii=False, indent=2, cls=CompactJSONEncoder)