diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 0b4059a..32dbd96 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,5 +1,5 @@ name: GitHub CI -on: +on: push: branches: [ master ] tags: @@ -7,7 +7,7 @@ on: workflow_dispatch: pull_request: -concurrency: +concurrency: group: ${{ github.ref }}-${{ github.workflow }} cancel-in-progress: true @@ -24,36 +24,34 @@ jobs: os: ubuntu-latest - arch: armv7 artifact: subconverter_armv7 - os: ubuntu-latest + os: [self-hosted, linux, ARM] - arch: aarch64 artifact: subconverter_aarch64 - os: ubuntu-latest + os: [self-hosted, linux, ARM64] runs-on: ${{ matrix.os }} name: Linux ${{ matrix.arch }} Build steps: - - name: Checkout base - uses: actions/checkout@v4 - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - name: Add commit id into version - if: ${{ !startsWith(github.ref, 'refs/tags/') }} - run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h - - name: Build - run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:${{ matrix.arch }}-latest-stable /bin/sh -c "apk add bash git nodejs npm && cd /root/workdir && chmod +x scripts/build.alpine.release.sh && bash scripts/build.alpine.release.sh" - - name: Upload - uses: actions/upload-artifact@v4 - with: - name: ${{ matrix.artifact }} - path: subconverter/ - - name: Package Release - if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }} - run: tar czf ${{ matrix.artifact }}.tar.gz subconverter - - name: Draft Release - if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }} - uses: softprops/action-gh-release@v2 - with: - files: ${{ matrix.artifact }}.tar.gz - draft: true + - name: Checkout base + uses: actions/checkout@v4 + - name: Add commit id into version + if: ${{ !startsWith(github.ref, 'refs/tags/') }} + run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h + - name: Build + run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:${{ matrix.arch }}-latest-stable /bin/sh -c "apk add bash git nodejs npm && cd /root/workdir && chmod +x scripts/build.alpine.release.sh && bash scripts/build.alpine.release.sh" + - name: Upload + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.artifact }} + path: subconverter/ + - name: Package Release + if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }} + run: tar czf ${{ matrix.artifact }}.tar.gz subconverter + - name: Draft Release + if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }} + uses: softprops/action-gh-release@v2 + with: + files: ${{ matrix.artifact }}.tar.gz + draft: true macos_build: strategy: @@ -68,31 +66,31 @@ jobs: runs-on: ${{ matrix.os }} name: macOS ${{ matrix.arch }} Build steps: - - name: Checkout base - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: '3.11' - - name: Add commit id into version - if: ${{ !startsWith(github.ref, 'refs/tags/') }} - run: SHA=$(git rev-parse --short HEAD) && sed -i -e 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h - - name: Build - run: bash scripts/build.macos.release.sh - - name: Upload - uses: actions/upload-artifact@v4 - with: - name: ${{ matrix.artifact }} - path: subconverter/ - - name: Package Release - if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }} - run: tar czf ${{ matrix.artifact }}.tar.gz subconverter - - name: Draft Release - if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }} - uses: softprops/action-gh-release@v2 - with: - files: ${{ matrix.artifact }}.tar.gz - draft: true + - name: Checkout base + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + - name: Add commit id into version + if: ${{ !startsWith(github.ref, 'refs/tags/') }} + run: SHA=$(git rev-parse --short HEAD) && sed -i -e 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h + - name: Build + run: bash scripts/build.macos.release.sh + - name: Upload + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.artifact }} + path: subconverter/ + - name: Package Release + if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }} + run: tar czf ${{ matrix.artifact }}.tar.gz subconverter + - name: Draft Release + if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }} + uses: softprops/action-gh-release@v2 + with: + files: ${{ matrix.artifact }}.tar.gz + draft: true windows_build: strategy: @@ -112,39 +110,39 @@ jobs: run: shell: msys2 {0} steps: - - name: Checkout base - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: '3.11' - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: '16' - - name: Setup MSYS2 - uses: msys2/setup-msys2@v2 - with: - update: true - install: base-devel git mingw-w64-${{ matrix.env }}-gcc mingw-w64-${{ matrix.env }}-cmake mingw-w64-${{ matrix.env }}-pcre2 patch - msystem: ${{ matrix.msystem }} - path-type: inherit - - name: Add commit id into version - if: ${{ !startsWith(github.ref, 'refs/tags/') }} - run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h - - name: Build - run: bash scripts/build.windows.release.sh - - name: Upload - uses: actions/upload-artifact@v4 - with: - name: ${{ matrix.artifact }} - path: subconverter/ - - name: Package Release - if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }} - run: 7z a ${{ matrix.artifact }}.7z subconverter/ - - name: Draft Release - if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }} - uses: softprops/action-gh-release@v2 - with: - files: ${{ matrix.artifact }}.7z - draft: true \ No newline at end of file + - name: Checkout base + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: '16' + - name: Setup MSYS2 + uses: msys2/setup-msys2@v2 + with: + update: true + install: base-devel git mingw-w64-${{ matrix.env }}-gcc mingw-w64-${{ matrix.env }}-cmake mingw-w64-${{ matrix.env }}-pcre2 patch + msystem: ${{ matrix.msystem }} + path-type: inherit + - name: Add commit id into version + if: ${{ !startsWith(github.ref, 'refs/tags/') }} + run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h + - name: Build + run: bash scripts/build.windows.release.sh + - name: Upload + uses: actions/upload-artifact@v4 + with: + name: ${{ matrix.artifact }} + path: subconverter/ + - name: Package Release + if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }} + run: 7z a ${{ matrix.artifact }}.7z subconverter/ + - name: Draft Release + if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }} + uses: softprops/action-gh-release@v2 + with: + files: ${{ matrix.artifact }}.7z + draft: true diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index caa0646..d45cf09 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -1,16 +1,16 @@ name: Publish Docker Image -on: +on: push: branches: [ master ] tags: - '**' -concurrency: +concurrency: group: ${{ github.ref }}-${{ github.workflow }} cancel-in-progress: true env: - REGISTRY_IMAGE: asdlokj1qpi23/subconverter + REGISTRY_IMAGE: tindy2013/subconverter jobs: build: @@ -22,9 +22,9 @@ jobs: - platform: linux/386 os: ubuntu-latest - platform: linux/arm/v7 - os: ubuntu-latest + os: [self-hosted, linux, ARM] - platform: linux/arm64 - os: ubuntu-latest + os: [self-hosted, linux, ARM64] runs-on: ${{ matrix.os }} name: Build ${{ matrix.platform }} Image steps: @@ -37,8 +37,6 @@ jobs: uses: actions/checkout@v4 with: fetch-depth: 0 - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 diff --git a/.gitignore b/.gitignore index 1331b57..79a0963 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,5 @@ scripts/quickjspp scripts/yaml-cpp .DS_Store src/.DS_Store + +build \ No newline at end of file diff --git a/base/base/all_base.tpl b/base/base/all_base.tpl index 933e8ca..e67e350 100644 --- a/base/base/all_base.tpl +++ b/base/base/all_base.tpl @@ -5,7 +5,7 @@ socks-port: {{ default(global.clash.socks_port, "7891") }} allow-lan: {{ default(global.clash.allow_lan, "true") }} mode: Rule log-level: {{ default(global.clash.log_level, "info") }} -external-controller: :9090 +external-controller: {{ default(global.clash.external_controller, "127.0.0.1:9090") }} {% if default(request.clash.dns, "") == "1" %} dns: enable: true @@ -378,7 +378,16 @@ enhanced-mode-by-rule = true "rules": [], "auto_detect_interface": true }, - "experimental": {} + "experimental": { + "cache_file": { + "enabled": true, + "store_fakeip": true + }, + "clash_api": { + "external_controller": "{{ default(global.clash.external_controller, "127.0.0.1:9090") }}", + "external_ui": "dashboard" + } + } } {% endif %} diff --git a/base/base/singbox.json b/base/base/singbox.json index 4263bd5..252bf5e 100644 --- a/base/base/singbox.json +++ b/base/base/singbox.json @@ -100,5 +100,14 @@ "rules": [], "auto_detect_interface": true }, - "experimental": {} + "experimental": { + "cache_file": { + "enabled": true, + "store_fakeip": true + }, + "clash_api": { + "external_controller": "127.0.0.1:9090", + "external_ui": "dashboard" + } + } } diff --git a/base/pref.example.ini b/base/pref.example.ini index fe1688f..1b1b55c 100644 --- a/base/pref.example.ini +++ b/base/pref.example.ini @@ -109,13 +109,14 @@ filter_deprecated_nodes=false append_sub_userinfo=true clash_use_new_field_name=true -;Generate style of the proxies section of Clash subscriptions. +;Generate style of the proxies and proxy groups section of Clash subscriptions. ;Supported styles: block, flow, compact ;Block: - name: name1 Flow: - {name: name1, key: value} Compact: [{name: name1, key: value},{name: name2, key: value}] ; key: value - {name: name2, key: value} ; - name: name2 ; key: value clash_proxies_style=flow +clash_proxy_groups_style=block ;add Clash mode to sing-box rules, and add a GLOBAL group to end of outbounds singbox_add_clash_modes=true @@ -232,6 +233,7 @@ clash.http_port=7890 clash.socks_port=7891 clash.allow_lan=true clash.log_level=info +clash.external_controller=127.0.0.1:9090 singbox.allow_lan=true singbox.mixed_port=2080 diff --git a/base/pref.example.toml b/base/pref.example.toml index a35a687..6448eb9 100644 --- a/base/pref.example.toml +++ b/base/pref.example.toml @@ -117,9 +117,9 @@ match = '^Smart Access expire: (\d+)/(\d+)/(\d+)$' replace = '$1:$2:$3:0:0:0' [node_pref] -#udp_flag = true +#udp_flag = false #tcp_fast_open_flag = false -#skip_cert_verify_flag = true +#skip_cert_verify_flag = false #tls13_flag = false sort_flag = false @@ -135,13 +135,14 @@ filter_deprecated_nodes = false append_sub_userinfo = true clash_use_new_field_name = true -# Generate style of the proxies section of Clash subscriptions. +# Generate style of the proxies and proxy groups section of Clash subscriptions. # Supported styles: block, flow, compact # Block: - name: name1 Flow: - {name: name1, key: value} Compact: [{name: name1, key: value},{name: name2, key: value}] # key: value - {name: name2, key: value} # - name: name2 # key: value clash_proxies_style = "flow" +clash_proxy_groups_style = "block" # add Clash mode to sing-box rules, and add a GLOBAL group to end of outbounds singbox_add_clash_modes = true @@ -243,6 +244,10 @@ value = "true" key = "clash.log_level" value = "info" +[[template.globals]] +key = "clash.external_controller" +value = "127.0.0.1:9090" + [[template.globals]] key = "singbox.allow_lan" value = "true" diff --git a/base/pref.example.yml b/base/pref.example.yml index f532994..5e27275 100644 --- a/base/pref.example.yml +++ b/base/pref.example.yml @@ -50,6 +50,7 @@ node_pref: append_sub_userinfo: true clash_use_new_field_name: true clash_proxies_style: flow + clash_proxy_groups_style: block singbox_add_clash_modes: true rename_node: # - {match: "\\(?((x|X)?(\\d+)(\\.?\\d+)?)((\\s?倍率?)|(x|X))\\)?", replace: "$1x"} @@ -108,9 +109,10 @@ template: - {key: clash.socks_port, value: 7891} - {key: clash.allow_lan, value: true} - {key: clash.log_level, value: info} + - {key: clash.external_controller, value: '127.0.0.1:9090'} - {key: singbox.allow_lan, value: true} - {key: singbox.mixed_port, value: 2080} - + aliases: - {uri: /v, target: /version} - {uri: /clash, target: "/sub?target=clash"} diff --git a/base/snippets/emoji.toml b/base/snippets/emoji.toml index f04cd88..eeb62d5 100644 --- a/base/snippets/emoji.toml +++ b/base/snippets/emoji.toml @@ -23,7 +23,7 @@ match = "(?i:\\bJP[N]?\\d*\\b|Japan|Tokyo|Osaka|Saitama|日本|东京|大阪|埼 emoji = "🇯🇵" [[emoji]] -match = "(?i:\\bK[O]?R\\d*\\b|Korea|(? /etc/timezone + # set entry WORKDIR /base CMD subconverter diff --git a/scripts/build.alpine.release.sh b/scripts/build.alpine.release.sh index c159737..304492d 100644 --- a/scripts/build.alpine.release.sh +++ b/scripts/build.alpine.release.sh @@ -4,7 +4,7 @@ set -xe apk add gcc g++ build-base linux-headers cmake make autoconf automake libtool python2 python3 apk add mbedtls-dev mbedtls-static zlib-dev rapidjson-dev zlib-static pcre2-dev -git clone https://github.com/curl/curl --depth=1 --branch curl-8_4_0 +git clone https://github.com/curl/curl --depth=1 --branch curl-8_6_0 cd curl cmake -DCURL_USE_MBEDTLS=ON -DHTTP_ONLY=ON -DBUILD_TESTING=OFF -DBUILD_SHARED_LIBS=OFF -DCMAKE_USE_LIBSSH2=OFF -DBUILD_CURL_EXE=OFF . > /dev/null make install -j2 > /dev/null @@ -34,7 +34,7 @@ cmake -DCMAKE_BUILD_TYPE=Release . make libcron install -j3 cd .. -git clone https://github.com/ToruNiina/toml11 --branch="v3.7.1" --depth=1 +git clone https://github.com/ToruNiina/toml11 --branch="v4.3.0" --depth=1 cd toml11 cmake -DCMAKE_CXX_STANDARD=11 . make install -j4 diff --git a/scripts/build.macos.release.sh b/scripts/build.macos.release.sh index f97b6cc..96ef522 100644 --- a/scripts/build.macos.release.sh +++ b/scripts/build.macos.release.sh @@ -41,7 +41,7 @@ sudo install -d /usr/local/include/date/ sudo install -m644 libcron/externals/date/include/date/* /usr/local/include/date/ cd .. -git clone https://github.com/ToruNiina/toml11 --branch="v3.7.1" --depth=1 +git clone https://github.com/ToruNiina/toml11 --branch="v4.3.0" --depth=1 cd toml11 cmake -DCMAKE_CXX_STANDARD=11 . sudo make install -j6 > /dev/null @@ -63,4 +63,4 @@ chmod +r ./* cd .. mv base subconverter -set +xe \ No newline at end of file +set +xe diff --git a/scripts/build.windows.release.sh b/scripts/build.windows.release.sh index a8ef840..c085989 100644 --- a/scripts/build.windows.release.sh +++ b/scripts/build.windows.release.sh @@ -1,33 +1,9 @@ #!/bin/bash set -xe -# 获取系统架构 -ARCH=$(uname -m) - -if [ "$ARCH" == "x86_64" ]; then - TOOLCHAIN="mingw-w64-x86_64" -else - TOOLCHAIN="mingw-w64-i686" -fi - -pacman -S --needed --noconfirm base-devel ${TOOLCHAIN}-toolchain ${TOOLCHAIN}-cmake ${TOOLCHAIN}-nghttp2 ${TOOLCHAIN}-openssl - -git clone https://github.com/curl/curl --depth=1 --branch curl-8_8_0 +git clone https://github.com/curl/curl --depth=1 --branch curl-8_6_0 cd curl -cmake -DCMAKE_BUILD_TYPE=Release \ - -DCURL_USE_LIBSSH2=OFF \ - -DHTTP_ONLY=ON \ - -DCURL_USE_SCHANNEL=ON \ - -DBUILD_SHARED_LIBS=OFF \ - -DBUILD_CURL_EXE=OFF \ - -DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" \ - -G "Unix Makefiles" \ - -DHAVE_LIBIDN2=OFF \ - -DCURL_USE_LIBPSL=OFF \ - -DCURL_STATICLIB=ON \ - -DCURL_DISABLE_SOCKETPAIR=ON \ - -DCURL_DISABLE_NONBLOCKING=ON . - +cmake -DCMAKE_BUILD_TYPE=Release -DCURL_USE_LIBSSH2=OFF -DHTTP_ONLY=ON -DCURL_USE_SCHANNEL=ON -DBUILD_SHARED_LIBS=OFF -DBUILD_CURL_EXE=OFF -DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" -G "Unix Makefiles" -DHAVE_LIBIDN2=OFF -DCURL_USE_LIBPSL=OFF . make install -j4 cd .. @@ -62,7 +38,7 @@ cmake -DRAPIDJSON_BUILD_DOC=OFF -DRAPIDJSON_BUILD_EXAMPLES=OFF -DRAPIDJSON_BUILD make install -j4 cd .. -git clone https://github.com/ToruNiina/toml11 --branch="v3.7.1" --depth=1 +git clone https://github.com/ToruNiina/toml11 --branch "v4.3.0" --depth=1 cd toml11 cmake -DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" -G "Unix Makefiles" -DCMAKE_CXX_STANDARD=11 . make install -j4 diff --git a/scripts/rules_config.conf b/scripts/rules_config.conf index feb5760..65a3b0d 100644 --- a/scripts/rules_config.conf +++ b/scripts/rules_config.conf @@ -1,23 +1,23 @@ [ACL4SSR] name=ACL4SSR url=https://github.com/ACL4SSR/ACL4SSR -checkout=1dc5c92b0c8ceaaecbc66530c309961f53e52c8c +branch=master match=Clash/*.list|Clash/Ruleset/** [ACL4SSR_config] name=ACL4SSR url=https://github.com/ACL4SSR/ACL4SSR -checkout=1dc5c92b0c8ceaaecbc66530c309961f53e52c8c +branch=master match=Clash/config/** dest=base/config/ keep_tree=false -[DivineEngine] -url=https://github.com/asdlokj1qpi233/Profiles.git -checkout=f6302d855192bd8d0be08319dff3e58ae7c2bd4e -match=Surge/Ruleset/** - [NobyDa] url=https://github.com/NobyDa/Script -checkout=ae4c12f23de8078e02c373c9969b19af28257fcb +branch=master match=Surge/*.list + +[lhie1] +url=https://github.com/dler-io/Rules +branch=main +match=Surge/Surge 3/Provider/** diff --git a/scripts/update_rules.py b/scripts/update_rules.py index 7633c76..f6b4a99 100644 --- a/scripts/update_rules.py +++ b/scripts/update_rules.py @@ -22,10 +22,13 @@ def open_repo(path: str): return None -def update_rules(repo_path, save_path, commit, matches, keep_tree): +def update_rules(repo_path: str, save_path: str, matches: list[str], keep_tree: bool): os.makedirs(save_path, exist_ok=True) for pattern in matches: files = glob.glob(os.path.join(repo_path, pattern), recursive=True) + if len(files) == 0: + logging.warn(f"no files found for pattern {pattern}") + continue for file in files: if os.path.isdir(file): continue @@ -51,12 +54,13 @@ def main(): for section in config.sections(): repo = config.get(section, "name", fallback=section) url = config.get(section, "url") - commit = config.get(section, "checkout") + commit = config.get(section, "commit", fallback=None) + branch = config.get(section, "branch", fallback=None) matches = config.get(section, "match").split("|") save_path = config.get(section, "dest", fallback=f"base/rules/{repo}") keep_tree = config.getboolean(section, "keep_tree", fallback=True) - logging.info(f"reading files from url {url} with commit {commit} and matches {matches}, save to {save_path} keep_tree {keep_tree}") + logging.info(f"reading files from url {url}, matches {matches}, save to {save_path} keep_tree {keep_tree}") repo_path = os.path.join("./tmp/repo/", repo) @@ -67,8 +71,21 @@ def main(): else: logging.info(f"repo {repo_path} exists") - r.git.checkout(commit) - update_rules(repo_path, save_path, commit, matches, keep_tree) + try: + if commit is not None: + logging.info(f"checking out to commit {commit}") + r.git.checkout(commit) + elif branch is not None: + logging.info(f"checking out to branch {branch}") + r.git.checkout(branch) + else: + logging.info(f"checking out to default branch") + r.active_branch.checkout() + except Exception as e: + logging.error(f"checkout failed {e}") + continue + + update_rules(repo_path, save_path, matches, keep_tree) shutil.rmtree("./tmp", ignore_errors=True) diff --git a/src/config/binding.h b/src/config/binding.h index 7741108..e069869 100644 --- a/src/config/binding.h +++ b/src/config/binding.h @@ -17,9 +17,9 @@ namespace toml static ProxyGroupConfig from_toml(const value& v) { ProxyGroupConfig conf; - conf.Name = toml::find(v, "name"); - String type = toml::find(v, "type"); - String strategy = toml::find_or(v, "strategy", ""); + conf.Name = find(v, "name"); + String type = find(v, "type"); + String strategy = find_or(v, "strategy", ""); switch(hash_(type)) { case "select"_hash: @@ -27,18 +27,18 @@ namespace toml break; case "url-test"_hash: conf.Type = ProxyGroupType::URLTest; - conf.Url = toml::find(v, "url"); - conf.Interval = toml::find(v, "interval"); - conf.Tolerance = toml::find_or(v, "tolerance", 0); + conf.Url = find(v, "url"); + conf.Interval = find(v, "interval"); + conf.Tolerance = find_or(v, "tolerance", 0); if(v.contains("lazy")) - conf.Lazy = toml::find_or(v, "lazy", false); + conf.Lazy = find_or(v, "lazy", false); if(v.contains("evaluate-before-use")) - conf.EvaluateBeforeUse = toml::find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get()); + conf.EvaluateBeforeUse = find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get()); break; case "load-balance"_hash: conf.Type = ProxyGroupType::LoadBalance; - conf.Url = toml::find(v, "url"); - conf.Interval = toml::find(v, "interval"); + conf.Url = find(v, "url"); + conf.Interval = find(v, "interval"); switch(hash_(strategy)) { case "consistent-hashing"_hash: @@ -49,14 +49,14 @@ namespace toml break; } if(v.contains("persistent")) - conf.Persistent = toml::find_or(v, "persistent", conf.Persistent.get()); + conf.Persistent = find_or(v, "persistent", conf.Persistent.get()); break; case "fallback"_hash: conf.Type = ProxyGroupType::Fallback; - conf.Url = toml::find(v, "url"); - conf.Interval = toml::find(v, "interval"); + conf.Url = find(v, "url"); + conf.Interval = find(v, "interval"); if(v.contains("evaluate-before-use")) - conf.EvaluateBeforeUse = toml::find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get()); + conf.EvaluateBeforeUse = find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get()); break; case "relay"_hash: conf.Type = ProxyGroupType::Relay; @@ -64,16 +64,26 @@ namespace toml case "ssid"_hash: conf.Type = ProxyGroupType::SSID; break; + case "smart"_hash: + conf.Type = ProxyGroupType::Smart; + conf.Url = find(v, "url"); + conf.Interval = find(v, "interval"); + conf.Tolerance = find_or(v, "tolerance", 0); + if(v.contains("lazy")) + conf.Lazy = find_or(v, "lazy", false); + if(v.contains("evaluate-before-use")) + conf.EvaluateBeforeUse = find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get()); + break; default: - throw toml::syntax_error("Proxy Group has incorrect type, should be one of following:\n select, url-test, load-balance, fallback, relay, ssid", v.at("type").location()); + throw serialization_error(format_error("Proxy Group has unsupported type!", v.at("type").location(), "should be one of following: select, url-test, load-balance, fallback, relay, ssid"), v.at("type").location()); } - conf.Timeout = toml::find_or(v, "timeout", 5); - conf.Proxies = toml::find_or(v, "rule", {}); - conf.UsingProvider = toml::find_or(v, "use", {}); + conf.Timeout = find_or(v, "timeout", 5); + conf.Proxies = find_or(v, "rule", {}); + conf.UsingProvider = find_or(v, "use", {}); if(conf.Proxies.empty() && conf.UsingProvider.empty()) - throw toml::syntax_error("Proxy Group must contains at least one of proxy match rule or provider", v.location()); + throw serialization_error(format_error("Proxy Group must contains at least one of proxy match rule or provider!", v.location(), "here"), v.location()); if(v.contains("disable-udp")) - conf.DisableUdp = toml::find_or(v, "disable-udp", conf.DisableUdp.get()); + conf.DisableUdp = find_or(v, "disable-udp", conf.DisableUdp.get()); return conf; } }; @@ -84,8 +94,8 @@ namespace toml static RulesetConfig from_toml(const value& v) { RulesetConfig conf; - conf.Group = toml::find(v, "group"); - String type = toml::find_or(v, "type", "surge-ruleset"); + conf.Group = find(v, "group"); + String type = find_or(v, "type", "surge-ruleset"); switch(hash_(type)) { /* @@ -122,10 +132,10 @@ namespace toml conf.Url = type + ":"; break; default: - throw toml::syntax_error("Ruleset has incorrect type, should be one of following:\n surge-ruleset, quantumultx, clash-domain, clash-ipcidr, clash-classic", v.at("type").location()); + throw serialization_error(format_error("Ruleset has unsupported type!", v.at("type").location(), "should be one of following: surge-ruleset, quantumultx, clash-domain, clash-ipcidr, clash-classic"), v.at("type").location()); } - conf.Url += toml::find(v, "ruleset"); - conf.Interval = toml::find_or(v, "interval", 86400); + conf.Url += find(v, "ruleset"); + conf.Interval = find_or(v, "interval", 86400); return conf; } }; @@ -138,14 +148,14 @@ namespace toml RegexMatchConfig conf; if(v.contains("script")) { - conf.Script = toml::find(v, "script"); + conf.Script = find(v, "script"); return conf; } - conf.Match = toml::find(v, "match"); + conf.Match = find(v, "match"); if(v.contains("emoji")) - conf.Replace = toml::find(v, "emoji"); + conf.Replace = find(v, "emoji"); else - conf.Replace = toml::find(v, "replace"); + conf.Replace = find(v, "replace"); return conf; } }; @@ -156,10 +166,10 @@ namespace toml static CronTaskConfig from_toml(const value& v) { CronTaskConfig conf; - conf.Name = toml::find(v, "name"); - conf.CronExp = toml::find(v, "cronexp"); - conf.Path = toml::find(v, "path"); - conf.Timeout = toml::find_or(v, "timeout", 0); + conf.Name = find(v, "name"); + conf.CronExp = find(v, "cronexp"); + conf.Path = find(v, "path"); + conf.Timeout = find_or(v, "timeout", 0); return conf; } }; @@ -220,6 +230,9 @@ namespace INIBinding case "ssid"_hash: conf.Type = ProxyGroupType::SSID; break; + case "smart"_hash: + conf.Type = ProxyGroupType::Smart; + break; default: continue; } diff --git a/src/config/proxygroup.h b/src/config/proxygroup.h index 4300e29..07dbddc 100644 --- a/src/config/proxygroup.h +++ b/src/config/proxygroup.h @@ -3,17 +3,18 @@ #include "def.h" -enum ProxyGroupType +enum class ProxyGroupType { Select, URLTest, Fallback, LoadBalance, Relay, - SSID + SSID, + Smart }; -enum BalanceStrategy +enum class BalanceStrategy { ConsistentHashing, RoundRobin @@ -45,6 +46,7 @@ struct ProxyGroupConfig case ProxyGroupType::Fallback: return "fallback"; case ProxyGroupType::Relay: return "relay"; case ProxyGroupType::SSID: return "ssid"; + case ProxyGroupType::Smart: return "smart"; } return ""; } diff --git a/src/config/ruleset.h b/src/config/ruleset.h index 4a29a54..c9551eb 100644 --- a/src/config/ruleset.h +++ b/src/config/ruleset.h @@ -3,7 +3,7 @@ #include "def.h" -enum RulesetType +enum class RulesetType { SurgeRuleset, QuantumultX, diff --git a/src/generator/config/subexport.cpp b/src/generator/config/subexport.cpp index 27910b8..f9a8a9e 100644 --- a/src/generator/config/subexport.cpp +++ b/src/generator/config/subexport.cpp @@ -161,7 +161,8 @@ void processRemark(std::string &remark, const string_array &remarks_list, bool p } std::string tempRemark = remark; int cnt = 2; - while (std::find(remarks_list.cbegin(), remarks_list.cend(), tempRemark) != remarks_list.cend()) { + while(std::find(remarks_list.cbegin(), remarks_list.cend(), tempRemark) != remarks_list.cend()) + { tempRemark = remark + " " + std::to_string(cnt); cnt++; } @@ -218,6 +219,30 @@ proxyToClash(std::vector &nodes, YAML::Node &yamlnode, const ProxyGroupCo case "compact"_hash: compact = true; break; + bool proxy_block = false, proxy_compact = false, group_block = false, group_compact = false; + switch(hash_(ext.clash_proxies_style)) + { + case "block"_hash: + proxy_block = true; + break; + default: + case "flow"_hash: + break; + case "compact"_hash: + proxy_compact = true; + break; + } + switch(hash_(ext.clash_proxy_groups_style)) + { + case "block"_hash: + group_block = true; + break; + default: + case "flow"_hash: + break; + case "compact"_hash: + group_compact = true; + break; } for (Proxy &x: nodes) { @@ -644,7 +669,10 @@ proxyToClash(std::vector &nodes, YAML::Node &yamlnode, const ProxyGroupCo string_array filtered_nodelist; singlegroup["name"] = x.Name; - singlegroup["type"] = x.TypeStr(); + if (x.Type == ProxyGroupType::Smart) + singlegroup["type"] = "url-test"; + else + singlegroup["type"] = x.TypeStr(); switch (x.Type) { case ProxyGroupType::Select: @@ -666,6 +694,29 @@ proxyToClash(std::vector &nodes, YAML::Node &yamlnode, const ProxyGroupCo break; default: continue; + switch(x.Type) + { + case ProxyGroupType::Select: + case ProxyGroupType::Relay: + break; + case ProxyGroupType::LoadBalance: + singlegroup["strategy"] = x.StrategyStr(); + [[fallthrough]]; + case ProxyGroupType::Smart: + [[fallthrough]]; + case ProxyGroupType::URLTest: + if(!x.Lazy.is_undef()) + singlegroup["lazy"] = x.Lazy.get(); + [[fallthrough]]; + case ProxyGroupType::Fallback: + singlegroup["url"] = x.Url; + if(x.Interval > 0) + singlegroup["interval"] = x.Interval; + if(x.Tolerance > 0) + singlegroup["tolerance"] = x.Tolerance; + break; + default: + continue; } if (!x.DisableUdp.is_undef()) singlegroup["disable-udp"] = x.DisableUdp.get(); @@ -681,7 +732,10 @@ proxyToClash(std::vector &nodes, YAML::Node &yamlnode, const ProxyGroupCo } if (!filtered_nodelist.empty()) singlegroup["proxies"] = filtered_nodelist; - //singlegroup.SetStyle(YAML::EmitterStyle::Flow); + if(group_block) + singlegroup.SetStyle(YAML::EmitterStyle::Block); + else + singlegroup.SetStyle(YAML::EmitterStyle::Flow); bool replace_flag = false; for (auto &&original_group: original_groups) { @@ -694,6 +748,8 @@ proxyToClash(std::vector &nodes, YAML::Node &yamlnode, const ProxyGroupCo if (!replace_flag) original_groups.push_back(singlegroup); } + if(group_compact) + original_groups.SetStyle(YAML::EmitterStyle::Flow); if (ext.clash_new_field_name) yamlnode["proxy-groups"] = original_groups; @@ -975,11 +1031,18 @@ std::string proxyToSurge(std::vector &nodes, const std::string &base_conf proxy += ", version=" + std::to_string(x.SnellVersion); break; case ProxyType::Hysteria2: - if (surge_ver < 4 && surge_ver != -3) + if(surge_ver < 4) continue; - proxy = "hysteria2, " + hostname + ", " + port + ", password=" + password; - if (!scv.is_undef()) - proxy += ", skip-cert-verify=" + scv.get_str(); + proxy = "hysteria, " + hostname + ", " + port + ", password=" + password; + if(x.DownSpeed) + proxy += ", download-bandwidth=" + x.DownSpeed; + + if(!scv.is_undef()) + proxy += ",skip-cert-verify=" + std::string(scv.get() ? "true" : "false"); + if(!x.Fingerprint.empty()) + proxy += ",server-cert-fingerprint-sha256=" + x.Fingerprint; + if(!x.SNI.empty()) + proxy += ",sni=" + x.SNI; break; case ProxyType::WireGuard: if (surge_ver < 4 && surge_ver != -3) @@ -1011,7 +1074,8 @@ std::string proxyToSurge(std::vector &nodes, const std::string &base_conf proxy += ", tfo=" + tfo.get_str(); if (!udp.is_undef()) proxy += ", udp-relay=" + udp.get_str(); - + if (underlying_proxy != "") + proxy += ", underlying-proxy=" + underlying_proxy; if (ext.nodelist) output_nodelist += x.Remark + " = " + proxy + "\n"; else { @@ -1030,22 +1094,24 @@ std::string proxyToSurge(std::vector &nodes, const std::string &base_conf string_array filtered_nodelist; std::string group; - switch (x.Type) { - case ProxyGroupType::Select: - case ProxyGroupType::URLTest: - case ProxyGroupType::Fallback: - break; - case ProxyGroupType::LoadBalance: - if (surge_ver < 1 && surge_ver != -3) - continue; - break; - case ProxyGroupType::SSID: - group = x.TypeStr() + ",default=" + x.Proxies[0] + ","; + switch(x.Type) + { + case ProxyGroupType::Select: + case ProxyGroupType::Smart: + case ProxyGroupType::URLTest: + case ProxyGroupType::Fallback: + break; + case ProxyGroupType::LoadBalance: + if(surge_ver < 1 && surge_ver != -3) + continue; + break; + case ProxyGroupType::SSID: + group = x.TypeStr() + ",default=" + x.Proxies[0] + ","; group += join(x.Proxies.begin() + 1, x.Proxies.end(), ","); ini.set("{NONAME}", x.Name + " = " + group); //insert order - continue; - default: - continue; + continue; + default: + continue; } for (const auto &y: x.Proxies) @@ -1693,7 +1759,8 @@ void proxyToQuanX(std::vector &nodes, INIReader &ini, std::vector 0) singlegroup += ", tolerance=" + std::to_string(x.Tolerance); diff --git a/src/generator/config/subexport.h b/src/generator/config/subexport.h index 7d1833e..f4ae6e7 100644 --- a/src/generator/config/subexport.h +++ b/src/generator/config/subexport.h @@ -40,6 +40,7 @@ struct extra_settings bool clash_classical_ruleset = false; std::string sort_script; std::string clash_proxies_style = "flow"; + std::string clash_proxy_groups_style = "flow"; bool authorized = false; extra_settings() = default; diff --git a/src/generator/template/templates.cpp b/src/generator/template/templates.cpp index 7bbea8d..f9917e3 100644 --- a/src/generator/template/templates.cpp +++ b/src/generator/template/templates.cpp @@ -357,10 +357,10 @@ int renderClashScript(YAML::Node &base_rule, std::vector &rulese if(x.rule_type == RULESET_CLASH_IPCIDR || x.rule_type == RULESET_CLASH_DOMAIN || x.rule_type == RULESET_CLASH_CLASSICAL) { //rule_name = std::to_string(hash_(rule_group + rule_path)); - rule_name = old_rule_name = findFileName(rule_path); + rule_name = old_rule_name = urlDecode(findFileName(rule_path)); int idx = 2; while(std::find(groups.begin(), groups.end(), rule_name) != groups.end()) - rule_name = old_rule_name + "_" + std::to_string(idx++); + rule_name = old_rule_name + " " + std::to_string(idx++); names[rule_name] = rule_group; urls[rule_name] = "*" + rule_path; rule_type[rule_name] = x.rule_type; @@ -386,10 +386,10 @@ int renderClashScript(YAML::Node &base_rule, std::vector &rulese if(fileExist(rule_path, true) || isLink(rule_path)) { //rule_name = std::to_string(hash_(rule_group + rule_path)); - rule_name = old_rule_name = findFileName(rule_path); + rule_name = old_rule_name = urlDecode(findFileName(rule_path)); int idx = 2; while(std::find(groups.begin(), groups.end(), rule_name) != groups.end()) - rule_name = old_rule_name + "_" + std::to_string(idx++); + rule_name = old_rule_name + " " + std::to_string(idx++); names[rule_name] = rule_group; urls[rule_name] = rule_path_typed; rule_type[rule_name] = x.rule_type; @@ -436,9 +436,9 @@ int renderClashScript(YAML::Node &base_rule, std::vector &rulese if(vArray.size() < 2) continue; if(keywords.find(rule_name) == keywords.end()) - keywords[rule_name] = "\"" + vArray[1] + "\""; + keywords[rule_name] = "\"" + trim(vArray[1]) + "\""; else - keywords[rule_name] += ",\"" + vArray[1] + "\""; + keywords[rule_name] += ",\"" + trim(vArray[1]) + "\""; } else { @@ -449,7 +449,7 @@ int renderClashScript(YAML::Node &base_rule, std::vector &rulese } else { - strLine = vArray[0] + "," + vArray[1] + "," + rule_group; + strLine = vArray[0] + "," + trim(vArray[1]) + "," + rule_group; if(vArray.size() > 2) strLine += "," + vArray[2]; } @@ -466,14 +466,16 @@ int renderClashScript(YAML::Node &base_rule, std::vector &rulese } } if(has_domain[rule_name] && !script) - rules.emplace_back("RULE-SET," + rule_name + "_domain," + rule_group); + rules.emplace_back("RULE-SET," + rule_name + " (Domain)," + rule_group); if(has_ipcidr[rule_name] && !script) { if(has_no_resolve) - rules.emplace_back("RULE-SET," + rule_name + "_ipcidr," + rule_group + ",no-resolve"); + rules.emplace_back("RULE-SET," + rule_name + " (IP-CIDR)," + rule_group + ",no-resolve"); else - rules.emplace_back("RULE-SET," + rule_name + "_ipcidr," + rule_group); + rules.emplace_back("RULE-SET," + rule_name + " (IP-CIDR)," + rule_group); } + if(!has_domain[rule_name] && !has_ipcidr[rule_name] && !script) + rules.emplace_back("RULE-SET," + rule_name + "," + rule_group); if(std::find(groups.begin(), groups.end(), rule_name) == groups.end()) groups.emplace_back(rule_name); } @@ -488,14 +490,14 @@ int renderClashScript(YAML::Node &base_rule, std::vector &rulese { std::string yaml_key = x; if(rule_type[x] != RULESET_CLASH_DOMAIN) - yaml_key += "_domain"; + yaml_key += " (Domain)"; base_rule["rule-providers"][yaml_key]["type"] = "http"; base_rule["rule-providers"][yaml_key]["behavior"] = "domain"; if(url[0] == '*') base_rule["rule-providers"][yaml_key]["url"] = url.substr(1); else base_rule["rule-providers"][yaml_key]["url"] = remote_path_prefix + "/getruleset?type=3&url=" + urlSafeBase64Encode(url); - base_rule["rule-providers"][yaml_key]["path"] = "./providers/rule-provider_" + yaml_key + ".yaml"; + base_rule["rule-providers"][yaml_key]["path"] = "./providers/" + std::to_string(hash_(url)) + "_domain.yaml"; if(interval) base_rule["rule-providers"][yaml_key]["interval"] = interval; } @@ -503,14 +505,14 @@ int renderClashScript(YAML::Node &base_rule, std::vector &rulese { std::string yaml_key = x; if(rule_type[x] != RULESET_CLASH_IPCIDR) - yaml_key += "_ipcidr"; + yaml_key += " (IP-CIDR)"; base_rule["rule-providers"][yaml_key]["type"] = "http"; base_rule["rule-providers"][yaml_key]["behavior"] = "ipcidr"; if(url[0] == '*') base_rule["rule-providers"][yaml_key]["url"] = url.substr(1); else base_rule["rule-providers"][yaml_key]["url"] = remote_path_prefix + "/getruleset?type=4&url=" + urlSafeBase64Encode(url); - base_rule["rule-providers"][yaml_key]["path"] = "./providers/rule-provider_" + yaml_key + ".yaml"; + base_rule["rule-providers"][yaml_key]["path"] = "./providers/" + std::to_string(hash_(url)) + "_ipcidr.yaml"; if(interval) base_rule["rule-providers"][yaml_key]["interval"] = interval; } @@ -523,7 +525,7 @@ int renderClashScript(YAML::Node &base_rule, std::vector &rulese base_rule["rule-providers"][yaml_key]["url"] = url.substr(1); else base_rule["rule-providers"][yaml_key]["url"] = remote_path_prefix + "/getruleset?type=6&url=" + urlSafeBase64Encode(url); - base_rule["rule-providers"][yaml_key]["path"] = "./providers/rule-provider_" + yaml_key + ".yaml"; + base_rule["rule-providers"][yaml_key]["path"] = "./providers/" + std::to_string(hash_(url)) + ".yaml"; if(interval) base_rule["rule-providers"][yaml_key]["interval"] = interval; } diff --git a/src/handler/interfaces.cpp b/src/handler/interfaces.cpp index 19b02c3..8771a81 100644 --- a/src/handler/interfaces.cpp +++ b/src/handler/interfaces.cpp @@ -58,31 +58,32 @@ struct UAProfile { }; const std::vector UAMatchList = { - {"ClashForAndroid", "\\/([0-9.]+)", "2.0", "clash", true}, - {"ClashForAndroid", "\\/([0-9.]+)R", "", "clashr", false}, - {"ClashForAndroid", "", "", "clash", false}, - {"ClashforWindows", "\\/([0-9.]+)", "0.11", "clash", true}, - {"ClashforWindows", "", "", "clash", false}, - {"ClashX Pro", "", "", "clash", true}, - {"ClashX", "\\/([0-9.]+)", "0.13", "clash", true}, - {"Clash", "", "", "clash", true}, - {"Kitsunebi", "", "", "v2ray"}, - {"Loon", "", "", "loon"}, - {"Pharos", "", "", "mixed"}, - {"Potatso", "", "", "mixed"}, - {"Quantumult%20X", "", "", "quanx"}, - {"Quantumult", "", "", "quan"}, - {"Qv2ray", "", "", "v2ray"}, - {"Shadowrocket", "", "", "mixed"}, - {"Surfboard", "", "", "surfboard"}, - {"Surge", "\\/([0-9.]+).*x86", "906", "surge", false, 4}, /// Surge for Mac (supports VMess) - {"Surge", "\\/([0-9.]+).*x86", "368", "surge", false, 3}, /// Surge for Mac (supports new rule types and Shadowsocks without plugin) - {"Surge", "\\/([0-9.]+)", "1419", "surge", false, 4}, /// Surge iOS 4 (first version) - {"Surge", "\\/([0-9.]+)", "900", "surge", false, 3}, /// Surge iOS 3 (approx) - {"Surge", "", "", "surge", false, 2}, /// any version of Surge as fallback - {"Trojan-Qt5", "", "", "trojan"}, - {"V2rayU", "", "", "v2ray"}, - {"V2RayX", "", "", "v2ray"} + {"ClashForAndroid","\\/([0-9.]+)","2.0","clash",true}, + {"ClashForAndroid","\\/([0-9.]+)R","","clashr",false}, + {"ClashForAndroid","","","clash",false}, + {"ClashforWindows","\\/([0-9.]+)","0.11","clash",true}, + {"ClashforWindows","","","clash",false}, + {"clash-verge","","","clash",true}, + {"ClashX Pro","","","clash",true}, + {"ClashX","\\/([0-9.]+)","0.13","clash",true}, + {"Clash","","","clash",true}, + {"Kitsunebi","","","v2ray"}, + {"Loon","","","loon"}, + {"Pharos","","","mixed"}, + {"Potatso","","","mixed"}, + {"Quantumult%20X","","","quanx"}, + {"Quantumult","","","quan"}, + {"Qv2ray","","","v2ray"}, + {"Shadowrocket","","","mixed"}, + {"Surfboard","","","surfboard"}, + {"Surge","\\/([0-9.]+).*x86","906","surge",false,4}, /// Surge for Mac (supports VMess) + {"Surge","\\/([0-9.]+).*x86","368","surge",false,3}, /// Surge for Mac (supports new rule types and Shadowsocks without plugin) + {"Surge","\\/([0-9.]+)","1419","surge",false,4}, /// Surge iOS 4 (first version) + {"Surge","\\/([0-9.]+)","900","surge",false,3}, /// Surge iOS 3 (approx) + {"Surge","","","surge",false,2}, /// any version of Surge as fallback + {"Trojan-Qt5","","","trojan"}, + {"V2rayU","","","v2ray"}, + {"V2RayX","","","v2ray"} }; bool verGreaterEqual(const std::string &src_ver, const std::string &target_ver) { @@ -424,6 +425,7 @@ std::string subconverter(RESPONSE_CALLBACK_ARGS) { argExpandRulesets.define(true); ext.clash_proxies_style = global.clashProxiesStyle; + ext.clash_proxy_groups_style = global.clashProxyGroupsStyle; /// read preference from argument, assign global var if not in argument ext.tfo.define(argTFO).define(global.TFOFlag); @@ -433,13 +435,13 @@ std::string subconverter(RESPONSE_CALLBACK_ARGS) { ext.sort_flag = argSort.get(global.enableSort); argUseSortScript.define(!global.sortScript.empty()); - if (ext.sort_flag && argUseSortScript) + if(ext.sort_flag && argUseSortScript) ext.sort_script = global.sortScript; ext.filter_deprecated = argFilterDeprecated.get(global.filterDeprecated); ext.clash_new_field_name = argClashNewField.get(global.clashUseNewField); ext.clash_script = argGenClashScript.get(); ext.clash_classical_ruleset = argGenClassicalRuleProvider.get(); - if (!argExpandRulesets) + if(!argExpandRulesets) ext.clash_new_field_name = true; else ext.clash_script = false; @@ -449,21 +451,25 @@ std::string subconverter(RESPONSE_CALLBACK_ARGS) { ext.quanx_dev_id = !argDeviceID.empty() ? argDeviceID : global.quanXDevID; ext.enable_rule_generator = global.enableRuleGen; ext.overwrite_original_rules = global.overwriteOriginalRules; - if (!argExpandRulesets) + if(!argExpandRulesets) ext.managed_config_prefix = global.managedConfigPrefix; /// load external configuration - if (argExternalConfig.empty()) + if(argExternalConfig.empty()) argExternalConfig = global.defaultExtConfig; - if (!argExternalConfig.empty()) { + if(!argExternalConfig.empty()) + { //std::cerr<<"External configuration file provided. Loading...\n"; writeLog(0, "External configuration file provided. Loading...", LOG_LEVEL_INFO); ExternalConfig extconf; extconf.tpl_args = &tpl_args; - if (loadExternalConfig(argExternalConfig, extconf) == 0) { - if (!ext.nodelist) { + if(loadExternalConfig(argExternalConfig, extconf) == 0) + { + if(!ext.nodelist) + { checkExternalBase(extconf.sssub_rule_base, lSSSubBase); - if (!lSimpleSubscription) { + if(!lSimpleSubscription) + { checkExternalBase(extconf.clash_rule_base, lClashBase); checkExternalBase(extconf.surge_rule_base, lSurgeBase); checkExternalBase(extconf.surfboard_rule_base, lSurfboardBase); @@ -473,71 +479,80 @@ std::string subconverter(RESPONSE_CALLBACK_ARGS) { checkExternalBase(extconf.loon_rule_base, lLoonBase); checkExternalBase(extconf.singbox_rule_base, lSingBoxBase); - if (!extconf.surge_ruleset.empty()) + if(!extconf.surge_ruleset.empty()) lCustomRulesets = extconf.surge_ruleset; - if (!extconf.custom_proxy_group.empty()) + if(!extconf.custom_proxy_group.empty()) lCustomProxyGroups = extconf.custom_proxy_group; ext.enable_rule_generator = extconf.enable_rule_generator; ext.overwrite_original_rules = extconf.overwrite_original_rules; } } - if (!extconf.rename.empty()) + if(!extconf.rename.empty()) ext.rename_array = extconf.rename; - if (!extconf.emoji.empty()) + if(!extconf.emoji.empty()) ext.emoji_array = extconf.emoji; - if (!extconf.include.empty()) + if(!extconf.include.empty()) lIncludeRemarks = extconf.include; - if (!extconf.exclude.empty()) + if(!extconf.exclude.empty()) lExcludeRemarks = extconf.exclude; argAddEmoji.define(extconf.add_emoji); argRemoveEmoji.define(extconf.remove_old_emoji); } - } else { - if (!lSimpleSubscription) { + } + else + { + if(!lSimpleSubscription) + { /// loading custom groups - if (!argCustomGroups.empty() && !ext.nodelist) { + if(!argCustomGroups.empty() && !ext.nodelist) + { string_array vArray = split(argCustomGroups, "@"); lCustomProxyGroups = INIBinding::from::from_ini(vArray); } /// loading custom rulesets - if (!argCustomRulesets.empty() && !ext.nodelist) { + if(!argCustomRulesets.empty() && !ext.nodelist) + { string_array vArray = split(argCustomRulesets, "@"); lCustomRulesets = INIBinding::from::from_ini(vArray); } } } - if (ext.enable_rule_generator && !ext.nodelist && !lSimpleSubscription) { - if (lCustomRulesets != global.customRulesets) + if(ext.enable_rule_generator && !ext.nodelist && !lSimpleSubscription) + { + if(lCustomRulesets != global.customRulesets) refreshRulesets(lCustomRulesets, lRulesetContent); - else { - if (global.updateRulesetOnRequest) + else + { + if(global.updateRulesetOnRequest) refreshRulesets(global.customRulesets, global.rulesetsContent); lRulesetContent = global.rulesetsContent; } } - if (!argEmoji.is_undef()) { + if(!argEmoji.is_undef()) + { argAddEmoji.set(argEmoji); argRemoveEmoji.set(true); } ext.add_emoji = argAddEmoji.get(global.addEmoji); ext.remove_emoji = argRemoveEmoji.get(global.removeEmoji); - if (ext.add_emoji && ext.emoji_array.empty()) + if(ext.add_emoji && ext.emoji_array.empty()) ext.emoji_array = safe_get_emojis(); - if (!argRenames.empty()) + if(!argRenames.empty()) ext.rename_array = INIBinding::from::from_ini(split(argRenames, "`"), "@"); - else if (ext.rename_array.empty()) + else if(ext.rename_array.empty()) ext.rename_array = safe_get_renames(); /// check custom include/exclude settings - if (!argIncludeRemark.empty() && regValid(argIncludeRemark)) + if(!argIncludeRemark.empty() && regValid(argIncludeRemark)) lIncludeRemarks = string_array{argIncludeRemark}; - if (!argExcludeRemark.empty() && regValid(argExcludeRemark)) + if(!argExcludeRemark.empty() && regValid(argExcludeRemark)) lExcludeRemarks = string_array{argExcludeRemark}; /// initialize script runtime - if (authorized && !global.scriptCleanContext) { + if(authorized && !global.scriptCleanContext) + { ext.js_runtime = new qjs::Runtime(); script_runtime_init(*ext.js_runtime); ext.js_context = new qjs::Context(*ext.js_runtime); @@ -564,17 +579,21 @@ std::string subconverter(RESPONSE_CALLBACK_ARGS) { parse_set.js_runtime = ext.js_runtime; parse_set.js_context = ext.js_context; - if (!global.insertUrls.empty() && argEnableInsert) { + if(!global.insertUrls.empty() && argEnableInsert) + { groupID = -1; urls = split(global.insertUrls, "|"); importItems(urls, true); - for (std::string &x: urls) { + for(std::string &x : urls) + { x = regTrim(x); writeLog(0, "Fetching node data from url '" + x + "'.", LOG_LEVEL_INFO); - if (addNodes(x, insert_nodes, groupID, parse_set) == -1) { - if (global.skipFailedLinks) + if(addNodes(x, insert_nodes, groupID, parse_set) == -1) + { + if(global.skipFailedLinks) writeLog(0, "The following link doesn't contain any valid node info: " + x, LOG_LEVEL_WARNING); - else { + else + { *status_code = 400; return "The following link doesn't contain any valid node info: " + x; } @@ -585,14 +604,17 @@ std::string subconverter(RESPONSE_CALLBACK_ARGS) { urls = split(argUrl, "|"); importItems(urls, true); groupID = 0; - for (std::string &x: urls) { + for(std::string &x : urls) + { x = regTrim(x); //std::cerr<<"Fetching node data from url '"<) ctx.eval("filter"); + auto filter = (std::function) ctx.eval("filter"); nodes.erase(std::remove_if(nodes.begin(), nodes.end(), filter), nodes.end()); } - catch (qjs::exception) { + catch(qjs::exception) + { script_print_stack(ctx); } }, global.scriptCleanContext); } //check custom group name - if (!argGroupName.empty()) - for (Proxy &x: nodes) + if(!argGroupName.empty()) + for(Proxy &x : nodes) x.Group = argGroupName; //do pre-process now @@ -684,228 +714,229 @@ std::string subconverter(RESPONSE_CALLBACK_ARGS) { ProxyGroupConfigs dummy_group; std::vector dummy_ruleset; std::string managed_url = base64Decode(getUrlArg(argument, "profile_data")); - if (managed_url.empty()) + if(managed_url.empty()) managed_url = global.managedConfigPrefix + "/sub?" + joinArguments(argument); - size_t found; + //std::cerr<<"Generate target: "; proxy = parseProxy(global.proxyConfig); - switch (hash_(argTarget)) { - case "clash"_hash: - case "clashr"_hash: - writeLog(0, argTarget == "clashr" ? "Generate target: ClashR" : "Generate target: Clash", LOG_LEVEL_INFO); - tpl_args.local_vars["clash.new_field_name"] = ext.clash_new_field_name ? "true" : "false"; - response.headers["profile-update-interval"] = std::to_string(interval / 3600); - if (ext.nodelist) { - YAML::Node yamlnode; - proxyToClash(nodes, yamlnode, dummy_group, argTarget == "clashr", ext); - output_content = YAML::Dump(yamlnode); - } else { - if (render_template(fetchFile(lClashBase, proxy, global.cacheConfig), tpl_args, base_content, - global.templatePath) != 0) { - *status_code = 400; - return base_content; - } - output_content = proxyToClash(nodes, base_content, lRulesetContent, lCustomProxyGroups, - argTarget == "clashr", ext); - } - - if (argUpload) - uploadGist(argTarget, argUploadPath, output_content, false); - break; - case "surge"_hash: - - writeLog(0, "Generate target: Surge " + std::to_string(intSurgeVer), LOG_LEVEL_INFO); - - if (ext.nodelist) { - output_content = proxyToSurge(nodes, base_content, dummy_ruleset, dummy_group, intSurgeVer, ext); - - if (argUpload) - uploadGist("surge" + argSurgeVer + "list", argUploadPath, output_content, true); - } else { - if (render_template(fetchFile(lSurgeBase, proxy, global.cacheConfig), tpl_args, base_content, - global.templatePath) != 0) { - *status_code = 400; - return base_content; - } - output_content = proxyToSurge(nodes, base_content, lRulesetContent, lCustomProxyGroups, intSurgeVer, - ext); - - if (argUpload) - uploadGist("surge" + argSurgeVer, argUploadPath, output_content, true); - - if (global.writeManagedConfig && !global.managedConfigPrefix.empty()) - output_content = "#!MANAGED-CONFIG " + managed_url + - (interval ? " interval=" + std::to_string(interval) : "") \ - + " strict=" + std::string(strict ? "true" : "false") + "\n\n" + output_content; - } - break; - case "surfboard"_hash: - writeLog(0, "Generate target: Surfboard", LOG_LEVEL_INFO); - - if (render_template(fetchFile(lSurfboardBase, proxy, global.cacheConfig), tpl_args, base_content, - global.templatePath) != 0) { + switch(hash_(argTarget)) + { + case "clash"_hash: case "clashr"_hash: + writeLog(0, argTarget == "clashr" ? "Generate target: ClashR" : "Generate target: Clash", LOG_LEVEL_INFO); + tpl_args.local_vars["clash.new_field_name"] = ext.clash_new_field_name ? "true" : "false"; + response.headers["profile-update-interval"] = std::to_string(interval / 3600); + if(ext.nodelist) + { + YAML::Node yamlnode; + proxyToClash(nodes, yamlnode, dummy_group, argTarget == "clashr", ext); + output_content = YAML::Dump(yamlnode); + } + else + { + if(render_template(fetchFile(lClashBase, proxy, global.cacheConfig), tpl_args, base_content, global.templatePath) != 0) + { *status_code = 400; return base_content; } - output_content = proxyToSurge(nodes, base_content, lRulesetContent, lCustomProxyGroups, -3, ext); - if (argUpload) - uploadGist("surfboard", argUploadPath, output_content, true); + output_content = proxyToClash(nodes, base_content, lRulesetContent, lCustomProxyGroups, argTarget == "clashr", ext); + } - if (global.writeManagedConfig && !global.managedConfigPrefix.empty()) - output_content = - "#!MANAGED-CONFIG " + managed_url + (interval ? " interval=" + std::to_string(interval) : "") \ - + " strict=" + std::string(strict ? "true" : "false") + "\n\n" + output_content; - break; - case "mellow"_hash: - writeLog(0, "Generate target: Mellow", LOG_LEVEL_INFO); + if(argUpload) + uploadGist(argTarget, argUploadPath, output_content, false); + break; + case "surge"_hash: - if (render_template(fetchFile(lMellowBase, proxy, global.cacheConfig), tpl_args, base_content, - global.templatePath) != 0) { + writeLog(0, "Generate target: Surge " + std::to_string(intSurgeVer), LOG_LEVEL_INFO); + + if(ext.nodelist) + { + output_content = proxyToSurge(nodes, base_content, dummy_ruleset, dummy_group, intSurgeVer, ext); + + if(argUpload) + uploadGist("surge" + argSurgeVer + "list", argUploadPath, output_content, true); + } + else + { + if(render_template(fetchFile(lSurgeBase, proxy, global.cacheConfig), tpl_args, base_content, global.templatePath) != 0) + { *status_code = 400; return base_content; } - output_content = proxyToMellow(nodes, base_content, lRulesetContent, lCustomProxyGroups, ext); + output_content = proxyToSurge(nodes, base_content, lRulesetContent, lCustomProxyGroups, intSurgeVer, ext); - if (argUpload) - uploadGist("mellow", argUploadPath, output_content, true); - break; - case "sssub"_hash: - writeLog(0, "Generate target: SS Subscription", LOG_LEVEL_INFO); + if(argUpload) + uploadGist("surge" + argSurgeVer, argUploadPath, output_content, true); - if (render_template(fetchFile(lSSSubBase, proxy, global.cacheConfig), tpl_args, base_content, - global.templatePath) != 0) { + if(global.writeManagedConfig && !global.managedConfigPrefix.empty()) + output_content = "#!MANAGED-CONFIG " + managed_url + (interval ? " interval=" + std::to_string(interval) : "") \ + + " strict=" + std::string(strict ? "true" : "false") + "\n\n" + output_content; + } + break; + case "surfboard"_hash: + writeLog(0, "Generate target: Surfboard", LOG_LEVEL_INFO); + + if(render_template(fetchFile(lSurfboardBase, proxy, global.cacheConfig), tpl_args, base_content, global.templatePath) != 0) + { + *status_code = 400; + return base_content; + } + output_content = proxyToSurge(nodes, base_content, lRulesetContent, lCustomProxyGroups, -3, ext); + if(argUpload) + uploadGist("surfboard", argUploadPath, output_content, true); + + if(global.writeManagedConfig && !global.managedConfigPrefix.empty()) + output_content = "#!MANAGED-CONFIG " + managed_url + (interval ? " interval=" + std::to_string(interval) : "") \ + + " strict=" + std::string(strict ? "true" : "false") + "\n\n" + output_content; + break; + case "mellow"_hash: + writeLog(0, "Generate target: Mellow", LOG_LEVEL_INFO); + + if(render_template(fetchFile(lMellowBase, proxy, global.cacheConfig), tpl_args, base_content, global.templatePath) != 0) + { + *status_code = 400; + return base_content; + } + output_content = proxyToMellow(nodes, base_content, lRulesetContent, lCustomProxyGroups, ext); + + if(argUpload) + uploadGist("mellow", argUploadPath, output_content, true); + break; + case "sssub"_hash: + writeLog(0, "Generate target: SS Subscription", LOG_LEVEL_INFO); + + if(render_template(fetchFile(lSSSubBase, proxy, global.cacheConfig), tpl_args, base_content, global.templatePath) != 0) + { + *status_code = 400; + return base_content; + } + output_content = proxyToSSSub(base_content, nodes, ext); + if(argUpload) + uploadGist("sssub", argUploadPath, output_content, false); + break; + case "ss"_hash: + writeLog(0, "Generate target: SS", LOG_LEVEL_INFO); + output_content = proxyToSingle(nodes, 1, ext); + if(argUpload) + uploadGist("ss", argUploadPath, output_content, false); + break; + case "ssr"_hash: + writeLog(0, "Generate target: SSR", LOG_LEVEL_INFO); + output_content = proxyToSingle(nodes, 2, ext); + if(argUpload) + uploadGist("ssr", argUploadPath, output_content, false); + break; + case "v2ray"_hash: + writeLog(0, "Generate target: v2rayN", LOG_LEVEL_INFO); + output_content = proxyToSingle(nodes, 4, ext); + if(argUpload) + uploadGist("v2ray", argUploadPath, output_content, false); + break; + case "trojan"_hash: + writeLog(0, "Generate target: Trojan", LOG_LEVEL_INFO); + output_content = proxyToSingle(nodes, 8, ext); + if(argUpload) + uploadGist("trojan", argUploadPath, output_content, false); + break; + case "mixed"_hash: + writeLog(0, "Generate target: Standard Subscription", LOG_LEVEL_INFO); + output_content = proxyToSingle(nodes, 15, ext); + if(argUpload) + uploadGist("sub", argUploadPath, output_content, false); + break; + case "quan"_hash: + writeLog(0, "Generate target: Quantumult", LOG_LEVEL_INFO); + if(!ext.nodelist) + { + if(render_template(fetchFile(lQuanBase, proxy, global.cacheConfig), tpl_args, base_content, global.templatePath) != 0) + { *status_code = 400; return base_content; } - output_content = proxyToSSSub(base_content, nodes, ext); - if (argUpload) - uploadGist("sssub", argUploadPath, output_content, false); - break; - case "ss"_hash: - writeLog(0, "Generate target: SS", LOG_LEVEL_INFO); - output_content = proxyToSingle(nodes, 1, ext); - if (argUpload) - uploadGist("ss", argUploadPath, output_content, false); - break; - case "ssr"_hash: - writeLog(0, "Generate target: SSR", LOG_LEVEL_INFO); - output_content = proxyToSingle(nodes, 2, ext); - if (argUpload) - uploadGist("ssr", argUploadPath, output_content, false); - break; - case "v2ray"_hash: - writeLog(0, "Generate target: v2rayN", LOG_LEVEL_INFO); - output_content = proxyToSingle(nodes, 4, ext); - if (argUpload) - uploadGist("v2ray", argUploadPath, output_content, false); - break; - case "trojan"_hash: - writeLog(0, "Generate target: Trojan", LOG_LEVEL_INFO); - output_content = proxyToSingle(nodes, 8, ext); - if (argUpload) - uploadGist("trojan", argUploadPath, output_content, false); - break; - case "mixed"_hash: - writeLog(0, "Generate target: Standard Subscription", LOG_LEVEL_INFO); - output_content = proxyToSingle(nodes, 15, ext); - if (argUpload) - uploadGist("sub", argUploadPath, output_content, false); - break; - case "quan"_hash: - writeLog(0, "Generate target: Quantumult", LOG_LEVEL_INFO); - if (!ext.nodelist) { - if (render_template(fetchFile(lQuanBase, proxy, global.cacheConfig), tpl_args, base_content, - global.templatePath) != 0) { - *status_code = 400; - return base_content; - } + } + + output_content = proxyToQuan(nodes, base_content, lRulesetContent, lCustomProxyGroups, ext); + + if(argUpload) + uploadGist("quan", argUploadPath, output_content, false); + break; + case "quanx"_hash: + writeLog(0, "Generate target: Quantumult X", LOG_LEVEL_INFO); + if(!ext.nodelist) + { + if(render_template(fetchFile(lQuanXBase, proxy, global.cacheConfig), tpl_args, base_content, global.templatePath) != 0) + { + *status_code = 400; + return base_content; } + } - output_content = proxyToQuan(nodes, base_content, lRulesetContent, lCustomProxyGroups, ext); + output_content = proxyToQuanX(nodes, base_content, lRulesetContent, lCustomProxyGroups, ext); - if (argUpload) - uploadGist("quan", argUploadPath, output_content, false); - break; - case "quanx"_hash: - writeLog(0, "Generate target: Quantumult X", LOG_LEVEL_INFO); - if (!ext.nodelist) { - if (render_template(fetchFile(lQuanXBase, proxy, global.cacheConfig), tpl_args, base_content, - global.templatePath) != 0) { - *status_code = 400; - return base_content; - } + if(argUpload) + uploadGist("quanx", argUploadPath, output_content, false); + break; + case "loon"_hash: + writeLog(0, "Generate target: Loon", LOG_LEVEL_INFO); + if(!ext.nodelist) + { + if(render_template(fetchFile(lLoonBase, proxy, global.cacheConfig), tpl_args, base_content, global.templatePath) != 0) + { + *status_code = 400; + return base_content; } + } - output_content = proxyToQuanX(nodes, base_content, lRulesetContent, lCustomProxyGroups, ext); + output_content = proxyToLoon(nodes, base_content, lRulesetContent, lCustomProxyGroups, ext); - if (argUpload) - uploadGist("quanx", argUploadPath, output_content, false); - break; - case "loon"_hash: - writeLog(0, "Generate target: Loon", LOG_LEVEL_INFO); - if (!ext.nodelist) { - if (render_template(fetchFile(lLoonBase, proxy, global.cacheConfig), tpl_args, base_content, - global.templatePath) != 0) { - *status_code = 400; - return base_content; - } + if(argUpload) + uploadGist("loon", argUploadPath, output_content, false); + break; + case "ssd"_hash: + writeLog(0, "Generate target: SSD", LOG_LEVEL_INFO); + output_content = proxyToSSD(nodes, argGroupName, subInfo, ext); + if(argUpload) + uploadGist("ssd", argUploadPath, output_content, false); + break; + case "singbox"_hash: + writeLog(0, "Generate target: sing-box", LOG_LEVEL_INFO); + if(!ext.nodelist) + { + if(render_template(fetchFile(lSingBoxBase, proxy, global.cacheConfig), tpl_args, base_content, global.templatePath) != 0) + { + *status_code = 400; + return base_content; } + } - output_content = proxyToLoon(nodes, base_content, lRulesetContent, lCustomProxyGroups, ext); + output_content = proxyToSingBox(nodes, base_content, lRulesetContent, lCustomProxyGroups, ext); - if (argUpload) - uploadGist("loon", argUploadPath, output_content, false); - break; - case "ssd"_hash: - writeLog(0, "Generate target: SSD", LOG_LEVEL_INFO); - output_content = proxyToSSD(nodes, argGroupName, subInfo, ext); - if (argUpload) - uploadGist("ssd", argUploadPath, output_content, false); - break; - case "singbox"_hash: - writeLog(0, "Generate target: sing-box", LOG_LEVEL_INFO); - if (!ext.nodelist) { - if (render_template(fetchFile(lSingBoxBase, proxy, global.cacheConfig), tpl_args, base_content, - global.templatePath) != 0) { - *status_code = 400; - return base_content; - } - } - - output_content = proxyToSingBox(nodes, base_content, lRulesetContent, lCustomProxyGroups, ext); - found = output_content.find('\u0000'); - while (found != std::string::npos) { - output_content.erase(found, 1); - found = output_content.find('\u0000', found); - } - - if (argUpload) - uploadGist("singbox", argUploadPath, output_content, false); - break; - default: - writeLog(0, "Generate target: Unspecified", LOG_LEVEL_INFO); - *status_code = 500; - return "Unrecognized target"; + if(argUpload) + uploadGist("singbox", argUploadPath, output_content, false); + break; + default: + writeLog(0, "Generate target: Unspecified", LOG_LEVEL_INFO); + *status_code = 500; + return "Unrecognized target"; } writeLog(0, "Generate completed.", LOG_LEVEL_INFO); - if (!argFilename.empty()) - response.headers.emplace("Content-Disposition", - "attachment; filename=\"" + argFilename + "\"; filename*=utf-8''" + - urlEncode(argFilename)); + if(!argFilename.empty()) + response.headers.emplace("Content-Disposition", "attachment; filename=\"" + argFilename + "\"; filename*=utf-8''" + urlEncode(argFilename)); return output_content; } - -std::string simpleToClashR(RESPONSE_CALLBACK_ARGS) { +std::string simpleToClashR(RESPONSE_CALLBACK_ARGS) +{ auto argument = joinArguments(request.argument); int *status_code = &response.status_code; std::string url = argument.size() <= 8 ? "" : argument.substr(8); - if (url.empty() || argument.substr(0, 8) != "sublink=") { + if(url.empty() || argument.substr(0, 8) != "sublink=") + { *status_code = 400; return "Invalid request!"; } - if (url == "sublink") { + if(url == "sublink") + { *status_code = 400; return "Please insert your subscription link instead of clicking the default link."; } @@ -914,7 +945,8 @@ std::string simpleToClashR(RESPONSE_CALLBACK_ARGS) { return subconverter(request, response); } -std::string surgeConfToClash(RESPONSE_CALLBACK_ARGS) { +std::string surgeConfToClash(RESPONSE_CALLBACK_ARGS) +{ auto argument = joinArguments(request.argument); int *status_code = &response.status_code; @@ -922,19 +954,19 @@ std::string surgeConfToClash(RESPONSE_CALLBACK_ARGS) { string_array dummy_str_array; std::vector nodes; std::string base_content, url = argument.size() <= 5 ? "" : argument.substr(5); - const std::string proxygroup_name = global.clashUseNewField ? "proxy-groups" - : "Proxy Group", rule_name = global.clashUseNewField - ? "rules" : "Rule"; + const std::string proxygroup_name = global.clashUseNewField ? "proxy-groups" : "Proxy Group", rule_name = global.clashUseNewField ? "rules" : "Rule"; ini.store_any_line = true; - if (url.empty()) + if(url.empty()) url = global.defaultUrls; - if (url.empty() || argument.substr(0, 5) != "link=") { + if(url.empty() || argument.substr(0, 5) != "link=") + { *status_code = 400; return "Invalid request!"; } - if (url == "link") { + if(url == "link") + { *status_code = 400; return "Please insert your subscription link instead of clicking the default link."; } @@ -948,8 +980,8 @@ std::string surgeConfToClash(RESPONSE_CALLBACK_ARGS) { tpl_args.request_params["target"] = "clash"; tpl_args.request_params["url"] = url; - if (render_template(fetchFile(global.clashBase, proxy, global.cacheConfig), tpl_args, base_content, - global.templatePath) != 0) { + if(render_template(fetchFile(global.clashBase, proxy, global.cacheConfig), tpl_args, base_content, global.templatePath) != 0) + { *status_code = 400; return base_content; } @@ -957,14 +989,16 @@ std::string surgeConfToClash(RESPONSE_CALLBACK_ARGS) { base_content = fetchFile(url, proxy, global.cacheConfig); - if (ini.parse(base_content) != INIREADER_EXCEPTION_NONE) { + if(ini.parse(base_content) != INIREADER_EXCEPTION_NONE) + { std::string errmsg = "Parsing Surge config failed! Reason: " + ini.get_last_error(); //std::cerr<= 2 && strLine[0] == '/' && strLine[1] == '/')) //empty lines and comments are ignored + if(!lineSize || strLine[0] == ';' || strLine[0] == '#' || (lineSize >= 2 && strLine[0] == '/' && strLine[1] == '/')) //empty lines and comments are ignored continue; - else if (!std::any_of(ClashRuleTypes.begin(), ClashRuleTypes.end(), - [&strLine](const std::string &type) { - return startsWith(strLine, type); - })) //remove unsupported types + else if(!std::any_of(ClashRuleTypes.begin(), ClashRuleTypes.end(), [&strLine](const std::string& type){return startsWith(strLine, type);})) //remove unsupported types continue; strLine += strArray[2]; - if (count_least(strLine, ',', 3)) + if(count_least(strLine, ',', 3)) strLine = regReplace(strLine, "^(.*?,.*?)(,.*)(,.*)$", "$1$3$2"); rule.push_back(strLine); } ss.clear(); continue; - } else if (!std::any_of(ClashRuleTypes.begin(), ClashRuleTypes.end(), - [&strLine](const std::string &type) { return startsWith(strLine, type); })) + } + else if(!std::any_of(ClashRuleTypes.begin(), ClashRuleTypes.end(), [&strLine](const std::string& type){return startsWith(strLine, type);})) continue; rule.push_back(x); } @@ -1127,13 +1167,15 @@ std::string surgeConfToClash(RESPONSE_CALLBACK_ARGS) { return YAML::Dump(clash); } -std::string getProfile(RESPONSE_CALLBACK_ARGS) { +std::string getProfile(RESPONSE_CALLBACK_ARGS) +{ auto &argument = request.argument; int *status_code = &response.status_code; std::string name = getUrlArg(argument, "name"), token = getUrlArg(argument, "token"); string_array profiles = split(name, "|"); - if (token.empty() || profiles.empty()) { + if(token.empty() || profiles.empty()) + { *status_code = 403; return "Forbidden"; } @@ -1144,16 +1186,20 @@ std::string getProfile(RESPONSE_CALLBACK_ARGS) { profile_content = vfs::vfs_get(name); } else */ - if (fileExist(name)) { + if(fileExist(name)) + { profile_content = fileGet(name, true); - } else { + } + else + { *status_code = 404; return "Profile not found"; } //std::cerr<<"Trying to load profile '" + name + "'.\n"; writeLog(0, "Trying to load profile '" + name + "'.", LOG_LEVEL_INFO); INIReader ini; - if (ini.parse(profile_content) != INIREADER_EXCEPTION_NONE && !ini.section_exist("Profile")) { + if(ini.parse(profile_content) != INIREADER_EXCEPTION_NONE && !ini.section_exist("Profile")) + { //std::cerr<<"Load profile failed! Reason: "<second) { + if(profiles.size() == 1 && profile_token != contents.end()) + { + if(token != profile_token->second) + { *status_code = 403; return "Forbidden"; } token = global.accessToken; - } else { - if (token != global.accessToken) { + } + else + { + if(token != global.accessToken) + { *status_code = 403; return "Forbidden"; } } /// check if more than one profile is provided - if (profiles.size() > 1) { + if(profiles.size() > 1) + { writeLog(0, "Multiple profiles are provided. Trying to combine profiles...", LOG_TYPE_INFO); std::string all_urls, url; auto iter = contents.find("url"); - if (iter != contents.end()) + if(iter != contents.end()) all_urls = iter->second; - for (size_t i = 1; i < profiles.size(); i++) { + for(size_t i = 1; i < profiles.size(); i++) + { name = profiles[i]; - if (!fileExist(name)) { + if(!fileExist(name)) + { writeLog(0, "Ignoring non-exist profile '" + name + "'...", LOG_LEVEL_WARNING); continue; } - if (ini.parse_file(name) != INIREADER_EXCEPTION_NONE && !ini.section_exist("Profile")) { + if(ini.parse_file(name) != INIREADER_EXCEPTION_NONE && !ini.section_exist("Profile")) + { writeLog(0, "Ignoring broken profile '" + name + "'...", LOG_LEVEL_WARNING); continue; } url = ini.get("Profile", "url"); - if (!url.empty()) { + if(!url.empty()) + { all_urls += "|" + url; writeLog(0, "Profile url from '" + name + "' added.", LOG_LEVEL_INFO); - } else { + } + else + { writeLog(0, "Profile '" + name + "' does not have url key. Skipping...", LOG_LEVEL_INFO); } } @@ -1211,8 +1270,7 @@ std::string getProfile(RESPONSE_CALLBACK_ARGS) { } contents.emplace("token", token); - contents.emplace("profile_data", - base64Encode(global.managedConfigPrefix + "/getprofile?" + joinArguments(argument))); + contents.emplace("profile_data", base64Encode(global.managedConfigPrefix + "/getprofile?" + joinArguments(argument))); std::copy(argument.cbegin(), argument.cend(), std::inserter(contents, contents.end())); request.argument = contents; return subconverter(request, response); @@ -1226,12 +1284,14 @@ std::string jinja2_webGet(const std::string &url) return webGet(url, proxy, global.cacheConfig); }*/ -inline std::string intToStream(unsigned long long stream) { +inline std::string intToStream(unsigned long long stream) +{ char chrs[16] = {}, units[6] = {' ', 'K', 'M', 'G', 'T', 'P'}; double streamval = stream; unsigned int level = 0; - while (streamval > 1024.0) { - if (level >= 5) + while(streamval > 1024.0) + { + if(level >= 5) break; level++; streamval /= 1024.0; @@ -1240,43 +1300,47 @@ inline std::string intToStream(unsigned long long stream) { return {chrs}; } -std::string subInfoToMessage(std::string subinfo) { +std::string subInfoToMessage(std::string subinfo) +{ using ull = unsigned long long; subinfo = replaceAllDistinct(subinfo, "; ", "&"); std::string retdata, useddata = "N/A", totaldata = "N/A", expirydata = "N/A"; - std::string upload = getUrlArg(subinfo, "upload"), download = getUrlArg(subinfo, "download"), total = getUrlArg( - subinfo, "total"), expire = getUrlArg(subinfo, "expire"); + std::string upload = getUrlArg(subinfo, "upload"), download = getUrlArg(subinfo, "download"), total = getUrlArg(subinfo, "total"), expire = getUrlArg(subinfo, "expire"); ull used = to_number(upload, 0) + to_number(download, 0), tot = to_number(total, 0); auto expiry = to_number(expire, 0); - if (used != 0) + if(used != 0) useddata = intToStream(used); - if (tot != 0) + if(tot != 0) totaldata = intToStream(tot); - if (expiry != 0) { + if(expiry != 0) + { char buffer[30]; struct tm *dt = localtime(&expiry); strftime(buffer, sizeof(buffer), "%Y-%m-%d %H:%M", dt); expirydata.assign(buffer); } - if (useddata == "N/A" && totaldata == "N/A" && expirydata == "N/A") + if(useddata == "N/A" && totaldata == "N/A" && expirydata == "N/A") retdata = "Not Available"; else retdata += "Stream Used: " + useddata + " Stream Total: " + totaldata + " Expiry Time: " + expirydata; return retdata; } -int simpleGenerator() { +int simpleGenerator() +{ //std::cerr<<"\nReading generator configuration...\n"; writeLog(0, "Reading generator configuration...", LOG_LEVEL_INFO); std::string config = fileGet("generate.ini"), path, profile, content; - if (config.empty()) { + if(config.empty()) + { //std::cerr<<"Generator configuration not found or empty!\n"; writeLog(0, "Generator configuration not found or empty!", LOG_LEVEL_ERROR); return -1; } INIReader ini; - if (ini.parse(config) != INIREADER_EXCEPTION_NONE) { + if(ini.parse(config) != INIREADER_EXCEPTION_NONE) + { //std::cerr<<"Generator configuration broken! Reason:"<second), LOG_LEVEL_INFO); //std::cerr<<"Artifact '"< -//void find_if_exist(const toml::value &v, const toml::key &k, T& target, U&&... args) -//{ -// if(v.contains(k)) target = toml::find(v, k); -// if constexpr (sizeof...(args) > 0) find_if_exist(v, std::forward(args)...); -//} +template +void find_if_exist(const toml::value &v, const toml::value::key_type &k, T& target, U&&... args) +{ + if(v.contains(k)) target = toml::find(v, k); + if constexpr (sizeof...(args) > 0) find_if_exist(v, std::forward(args)...); +} -void operate_toml_kv_table(const std::vector &arr, const toml::key &key_name, const toml::key &value_name, std::function binary_op) +void operate_toml_kv_table(const std::vector &arr, const toml::value::key_type &key_name, const toml::value::key_type &value_name, std::function binary_op) { for(const toml::table &table : arr) { diff --git a/src/handler/settings.h b/src/handler/settings.h index cc2d698..0665dbb 100644 --- a/src/handler/settings.h +++ b/src/handler/settings.h @@ -49,7 +49,7 @@ struct Settings tribool UDPFlag, TFOFlag, skipCertVerify, TLS13Flag, enableInsert; bool enableSort = false, updateStrict = false; bool clashUseNewField = false, singBoxAddClashModes = true; - std::string clashProxiesStyle = "flow"; + std::string clashProxiesStyle = "flow", clashProxyGroupsStyle = "block"; std::string proxyConfig, proxyRuleset, proxySubscription; int updateInterval = 0; std::string sortScript, filterScript; diff --git a/src/handler/webget.cpp b/src/handler/webget.cpp index 736aaa5..4433536 100644 --- a/src/handler/webget.cpp +++ b/src/handler/webget.cpp @@ -87,11 +87,13 @@ static int logger(CURL *handle, curl_infotype type, char *data, size_t size, voi switch(type) { case CURLINFO_TEXT: - prefix = "CURL_INFO"; + prefix = "CURL_INFO: "; break; case CURLINFO_HEADER_IN: + prefix = "CURL_HEADER: < "; + break; case CURLINFO_HEADER_OUT: - prefix = "CURL_HEADER"; + prefix = "CURL_HEADER: > "; break; case CURLINFO_DATA_IN: case CURLINFO_DATA_OUT: @@ -105,7 +107,6 @@ static int logger(CURL *handle, curl_infotype type, char *data, size_t size, voi for(auto &x : lines) { std::string log_content = prefix; - log_content += ": "; log_content += x; writeLog(0, log_content, LOG_LEVEL_VERBOSE); } @@ -113,7 +114,6 @@ static int logger(CURL *handle, curl_infotype type, char *data, size_t size, voi else { std::string log_content = prefix; - log_content += ": "; log_content += trimWhitespace(content); writeLog(0, log_content, LOG_LEVEL_VERBOSE); } @@ -172,7 +172,8 @@ static int curlGet(const FetchArgument &argument, FetchResult &result) { for(auto &x : *argument.request_headers) { - header_list = curl_slist_append(header_list, (x.first + ": " + x.second).data()); + auto header = x.first + ": " + x.second; + header_list = curl_slist_append(header_list, header.data()); } if(!argument.request_headers->contains("User-Agent")) curl_easy_setopt(curl_handle, CURLOPT_USERAGENT, user_agent_str); @@ -233,7 +234,7 @@ static int curlGet(const FetchArgument &argument, FetchResult &result) while(true) { retVal = curl_easy_perform(curl_handle); - if(retVal == CURLE_OK || max_fails <= fail_count) + if(retVal == CURLE_OK || max_fails <= fail_count || global.APIMode) break; else fail_count++; diff --git a/src/main.cpp b/src/main.cpp index e8bb4d6..9284ee4 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -233,10 +233,10 @@ int main(int argc, char *argv[]) } } std::string type = getUrlArg(request.argument, "type"); - if(type == "form") - fileWrite(global.prefPath, getFormData(request.postdata), true); - else if(type == "direct") + if(type == "form" || type == "direct") + { fileWrite(global.prefPath, request.postdata, true); + } else { response.status_code = 501; diff --git a/src/parser/config/proxy.h b/src/parser/config/proxy.h index 1e0ab2c..8e28e31 100644 --- a/src/parser/config/proxy.h +++ b/src/parser/config/proxy.h @@ -9,7 +9,8 @@ using String = std::string; using StringArray = std::vector; -enum class ProxyType { +enum class ProxyType +{ Unknown, Shadowsocks, ShadowsocksR, diff --git a/src/script/script_quickjs.cpp b/src/script/script_quickjs.cpp index b745b8f..ebf230b 100644 --- a/src/script/script_quickjs.cpp +++ b/src/script/script_quickjs.cpp @@ -2,6 +2,7 @@ #include #include #include +#include #include #ifdef _WIN32 @@ -226,7 +227,7 @@ public: qjs_fetch_Headers headers; std::string cookies; std::string postdata; - explicit qjs_fetch_Request(const std::string &url) : url(url) {} + explicit qjs_fetch_Request(std::string url) : url(std::move(url)) {} }; class qjs_fetch_Response @@ -389,7 +390,7 @@ void script_runtime_init(qjs::Runtime &runtime) js_std_init_handlers(runtime.rt); } -int ShowMsgbox(const std::string &title, std::string content, uint16_t type = 0) +int ShowMsgbox(const std::string &title, const std::string &content, uint16_t type = 0) { #ifdef _WIN32 if(!type) @@ -424,7 +425,7 @@ struct Lambda { uint32_t currentTime() { - return time(NULL); + return time(nullptr); } int script_context_init(qjs::Context &context) @@ -525,7 +526,7 @@ int script_context_init(qjs::Context &context) )", "", JS_EVAL_TYPE_MODULE); return 0; } - catch(qjs::exception) + catch(qjs::exception&) { script_print_stack(context); return 1; diff --git a/src/server/webserver_httplib.cpp b/src/server/webserver_httplib.cpp index 16dd9cc..627f510 100644 --- a/src/server/webserver_httplib.cpp +++ b/src/server/webserver_httplib.cpp @@ -47,16 +47,23 @@ static httplib::Server::Handler makeHandler(const responseRoute &rr) { continue; } - req.headers[h.first] = h.second; + req.headers.emplace(h.first.data(), h.second.data()); } req.argument = request.params; - if (request.get_header_value("Content-Type") == "application/x-www-form-urlencoded") + if (request.method == "POST" || request.method == "PUT" || request.method == "PATCH") { - req.postdata = urlDecode(request.body); - } - else - { - req.postdata = request.body; + if (request.is_multipart_form_data() && !request.files.empty()) + { + req.postdata = request.files.begin()->second.content; + } + else if (request.get_header_value("Content-Type") == "application/x-www-form-urlencoded") + { + req.postdata = urlDecode(request.body); + } + else + { + req.postdata = request.body; + } } auto result = rr.rc(req, resp); response.status = resp.status_code; @@ -163,6 +170,7 @@ int WebServer::start_web_server_multi(listener_args *args) { res.set_header("Access-Control-Allow-Headers", req.get_header_value("Access-Control-Request-Headers")); } + res.set_header("Access-Control-Allow-Origin", "*"); return httplib::Server::HandlerResponse::Unhandled; }); for (auto &x : redirect_map) @@ -187,7 +195,7 @@ int WebServer::start_web_server_multi(listener_args *args) { try { - std::rethrow_exception(e); + if (e) std::rethrow_exception(e); } catch (const httplib::Error &err) { @@ -212,6 +220,9 @@ int WebServer::start_web_server_multi(listener_args *args) { server.set_mount_point("/", serve_file_root); } + server.new_task_queue = [args] { + return new httplib::ThreadPool(args->max_workers); + }; server.bind_to_port(args->listen_address, args->port, 0); std::thread thread([&]() diff --git a/src/utils/logger.cpp b/src/utils/logger.cpp index a7694d5..ec6353b 100644 --- a/src/utils/logger.cpp +++ b/src/utils/logger.cpp @@ -26,7 +26,8 @@ std::string getTime(int type) format = "%Y%m%d-%H%M%S"; break; case 2: - format = "%Y/%m/%d %a %H:%M:%S." + std::string(cMillis); + format = "%Y/%m/%d %a %H:%M:%S."; + format += cMillis; break; case 3: default: diff --git a/src/utils/map_extra.h b/src/utils/map_extra.h index 583fb24..29c2549 100644 --- a/src/utils/map_extra.h +++ b/src/utils/map_extra.h @@ -5,9 +5,16 @@ #include #include -struct strICaseComp { - bool operator()(const std::string &lhs, const std::string &rhs) const { - return strcasecmp(lhs.c_str(), rhs.c_str()) > 0; +struct strICaseComp +{ + bool operator() (const std::string &lhs, const std::string &rhs) const + { + return std::lexicographical_compare(lhs.begin(), lhs.end(), rhs.begin(), + rhs.end(), + [](unsigned char c1, unsigned char c2) + { + return ::tolower(c1) < ::tolower(c2); + }); } }; diff --git a/src/utils/string.cpp b/src/utils/string.cpp index f270d5c..35cf3bf 100644 --- a/src/utils/string.cpp +++ b/src/utils/string.cpp @@ -3,88 +3,38 @@ #include #include #include -#include -#include +#include +#include +#include #include "string.h" #include "map_extra.h" std::vector split(const std::string &s, const std::string &separator) { + string_size bpos = 0, epos = s.find(separator); std::vector result; - string_size i = 0; - - while(i != s.size()) + while(bpos < s.size()) { - int flag = 0; - while(i != s.size() && flag == 0) - { - flag = 1; - for(char x : separator) - if(s[i] == x) - { - ++i; - flag = 0; - break; - } - } - - flag = 0; - string_size j = i; - while(j != s.size() && flag == 0) - { - for(char x : separator) - if(s[j] == x) - { - flag = 1; - break; - } - if(flag == 0) - ++j; - } - if(i != j) - { - result.push_back(s.substr(i, j-i)); - i = j; - } + if(epos == std::string::npos) + epos = s.size(); + result.push_back(s.substr(bpos, epos - bpos)); + bpos = epos + separator.size(); + epos = s.find(separator, bpos); } return result; } void split(std::vector &result, std::string_view s, char separator) { - string_size i = 0; - - while (i != s.size()) + string_size bpos = 0, epos = s.find(separator); + while(bpos < s.size()) { - int flag = 0; - while(i != s.size() && flag == 0) - { - flag = 1; - if(s[i] == separator) - { - ++i; - flag = 0; - break; - } - } - - flag = 0; - string_size j = i; - while(j != s.size() && flag == 0) - { - if(s[j] == separator) - { - flag = 1; - break; - } - ++j; - } - if (i != j) - { - result.push_back(s.substr(i, j-i)); - i = j; - } + if(epos == std::string_view::npos) + epos = s.size(); + result.push_back(s.substr(bpos, epos - bpos)); + bpos = epos + 1; + epos = s.find(separator, bpos); } } @@ -141,7 +91,7 @@ std::string toUpper(const std::string &str) void processEscapeChar(std::string &str) { string_size pos = str.find('\\'); - while(pos != str.npos) + while(pos != std::string::npos) { if(pos == str.size()) break; @@ -191,7 +141,7 @@ void processEscapeCharReverse(std::string &str) int parseCommaKeyValue(const std::string &input, const std::string &separator, string_pair_array &result) { - string_size bpos = 0, epos = input.find(','); + string_size bpos = 0, epos = input.find(separator); std::string kv; while(bpos < input.size()) { @@ -200,9 +150,9 @@ int parseCommaKeyValue(const std::string &input, const std::string &separator, s else if(epos && input[epos - 1] == '\\') { kv += input.substr(bpos, epos - bpos - 1); - kv += ','; + kv += separator; bpos = epos + 1; - epos = input.find(',', bpos); + epos = input.find(separator, bpos); continue; } kv += input.substr(bpos, epos - bpos); @@ -213,9 +163,9 @@ int parseCommaKeyValue(const std::string &input, const std::string &separator, s result.emplace_back(kv.substr(0, eqpos), kv.substr(eqpos + 1)); kv.clear(); bpos = epos + 1; - epos = input.find(',', bpos); + epos = input.find(separator, bpos); } - if(kv.size()) + if(!kv.empty()) { string_size eqpos = kv.find('='); if(eqpos == std::string::npos) @@ -328,12 +278,12 @@ std::string getUrlArg(const std::string &url, const std::string &request) while(pos) { pos = url.rfind(pattern, pos); - if(pos != url.npos) + if(pos != std::string::npos) { if(pos == 0 || url[pos - 1] == '&' || url[pos - 1] == '?') { pos += pattern.size(); - return url.substr(pos, url.find("&", pos) - pos); + return url.substr(pos, url.find('&', pos) - pos); } } else @@ -410,23 +360,24 @@ bool isStrUTF8(const std::string &data) std::string randomStr(int len) { std::string retData; - srand(time(NULL)); - int cnt = 0; - while(cnt < len) + std::random_device rd; + std::mt19937 gen(rd()); + std::uniform_int_distribution<> dis(0, 61); + for(int i = 0; i < len; i++) { - switch((rand() % 3)) + int r = dis(gen); + if (r < 26) { - case 1: - retData += ('A' + rand() % 26); - break; - case 2: - retData += ('a' + rand() % 26); - break; - default: - retData += ('0' + rand() % 10); - break; + retData.push_back('a' + r); + } + else if (r < 52) + { + retData.push_back('A' + r - 26); + } + else + { + retData.push_back('0' + r - 52); } - cnt++; } return retData; } @@ -451,7 +402,7 @@ int to_int(const std::string &str, int def_value) std::string join(const string_array &arr, const std::string &delimiter) { - if(arr.size() == 0) + if(arr.empty()) return ""; if(arr.size() == 1) return arr[0];