mirror of
https://github.com/asdlokj1qpi233/subconverter.git
synced 2025-10-26 02:42:25 +00:00
Merge remote-tracking branch 'fork/master' into dev
# Conflicts: # .github/workflows/build.yml # .github/workflows/docker.yml # .gitignore # base/pref.example.toml # base/snippets/emoji.toml # base/snippets/emoji.txt # scripts/build.macos.release.sh # scripts/build.windows.release.sh # scripts/rules_config.conf # src/generator/config/subexport.cpp # src/handler/interfaces.cpp # src/handler/settings.cpp # src/parser/config/proxy.h # src/parser/subparser.cpp # src/parser/subparser.h # src/utils/map_extra.h # src/version.h
This commit is contained in:
174
.github/workflows/build.yml
vendored
174
.github/workflows/build.yml
vendored
@@ -1,5 +1,5 @@
|
|||||||
name: GitHub CI
|
name: GitHub CI
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ master ]
|
branches: [ master ]
|
||||||
tags:
|
tags:
|
||||||
@@ -7,7 +7,7 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
pull_request:
|
pull_request:
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.ref }}-${{ github.workflow }}
|
group: ${{ github.ref }}-${{ github.workflow }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
@@ -24,36 +24,34 @@ jobs:
|
|||||||
os: ubuntu-latest
|
os: ubuntu-latest
|
||||||
- arch: armv7
|
- arch: armv7
|
||||||
artifact: subconverter_armv7
|
artifact: subconverter_armv7
|
||||||
os: ubuntu-latest
|
os: [self-hosted, linux, ARM]
|
||||||
- arch: aarch64
|
- arch: aarch64
|
||||||
artifact: subconverter_aarch64
|
artifact: subconverter_aarch64
|
||||||
os: ubuntu-latest
|
os: [self-hosted, linux, ARM64]
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
name: Linux ${{ matrix.arch }} Build
|
name: Linux ${{ matrix.arch }} Build
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout base
|
- name: Checkout base
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
- name: Set up QEMU
|
- name: Add commit id into version
|
||||||
uses: docker/setup-qemu-action@v3
|
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
||||||
- name: Add commit id into version
|
run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
|
||||||
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
- name: Build
|
||||||
run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
|
run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:${{ matrix.arch }}-latest-stable /bin/sh -c "apk add bash git nodejs npm && cd /root/workdir && chmod +x scripts/build.alpine.release.sh && bash scripts/build.alpine.release.sh"
|
||||||
- name: Build
|
- name: Upload
|
||||||
run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:${{ matrix.arch }}-latest-stable /bin/sh -c "apk add bash git nodejs npm && cd /root/workdir && chmod +x scripts/build.alpine.release.sh && bash scripts/build.alpine.release.sh"
|
uses: actions/upload-artifact@v4
|
||||||
- name: Upload
|
with:
|
||||||
uses: actions/upload-artifact@v4
|
name: ${{ matrix.artifact }}
|
||||||
with:
|
path: subconverter/
|
||||||
name: ${{ matrix.artifact }}
|
- name: Package Release
|
||||||
path: subconverter/
|
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
||||||
- name: Package Release
|
run: tar czf ${{ matrix.artifact }}.tar.gz subconverter
|
||||||
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
- name: Draft Release
|
||||||
run: tar czf ${{ matrix.artifact }}.tar.gz subconverter
|
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
||||||
- name: Draft Release
|
uses: softprops/action-gh-release@v2
|
||||||
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
with:
|
||||||
uses: softprops/action-gh-release@v2
|
files: ${{ matrix.artifact }}.tar.gz
|
||||||
with:
|
draft: true
|
||||||
files: ${{ matrix.artifact }}.tar.gz
|
|
||||||
draft: true
|
|
||||||
|
|
||||||
macos_build:
|
macos_build:
|
||||||
strategy:
|
strategy:
|
||||||
@@ -68,31 +66,31 @@ jobs:
|
|||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
name: macOS ${{ matrix.arch }} Build
|
name: macOS ${{ matrix.arch }} Build
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout base
|
- name: Checkout base
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
- name: Add commit id into version
|
- name: Add commit id into version
|
||||||
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
||||||
run: SHA=$(git rev-parse --short HEAD) && sed -i -e 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
|
run: SHA=$(git rev-parse --short HEAD) && sed -i -e 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
|
||||||
- name: Build
|
- name: Build
|
||||||
run: bash scripts/build.macos.release.sh
|
run: bash scripts/build.macos.release.sh
|
||||||
- name: Upload
|
- name: Upload
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.artifact }}
|
name: ${{ matrix.artifact }}
|
||||||
path: subconverter/
|
path: subconverter/
|
||||||
- name: Package Release
|
- name: Package Release
|
||||||
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
||||||
run: tar czf ${{ matrix.artifact }}.tar.gz subconverter
|
run: tar czf ${{ matrix.artifact }}.tar.gz subconverter
|
||||||
- name: Draft Release
|
- name: Draft Release
|
||||||
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
||||||
uses: softprops/action-gh-release@v2
|
uses: softprops/action-gh-release@v2
|
||||||
with:
|
with:
|
||||||
files: ${{ matrix.artifact }}.tar.gz
|
files: ${{ matrix.artifact }}.tar.gz
|
||||||
draft: true
|
draft: true
|
||||||
|
|
||||||
windows_build:
|
windows_build:
|
||||||
strategy:
|
strategy:
|
||||||
@@ -112,39 +110,39 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
shell: msys2 {0}
|
shell: msys2 {0}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout base
|
- name: Checkout base
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
- name: Setup Node.js
|
- name: Setup Node.js
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '16'
|
||||||
- name: Setup MSYS2
|
- name: Setup MSYS2
|
||||||
uses: msys2/setup-msys2@v2
|
uses: msys2/setup-msys2@v2
|
||||||
with:
|
with:
|
||||||
update: true
|
update: true
|
||||||
install: base-devel git mingw-w64-${{ matrix.env }}-gcc mingw-w64-${{ matrix.env }}-cmake mingw-w64-${{ matrix.env }}-pcre2 patch
|
install: base-devel git mingw-w64-${{ matrix.env }}-gcc mingw-w64-${{ matrix.env }}-cmake mingw-w64-${{ matrix.env }}-pcre2 patch
|
||||||
msystem: ${{ matrix.msystem }}
|
msystem: ${{ matrix.msystem }}
|
||||||
path-type: inherit
|
path-type: inherit
|
||||||
- name: Add commit id into version
|
- name: Add commit id into version
|
||||||
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
||||||
run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
|
run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
|
||||||
- name: Build
|
- name: Build
|
||||||
run: bash scripts/build.windows.release.sh
|
run: bash scripts/build.windows.release.sh
|
||||||
- name: Upload
|
- name: Upload
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.artifact }}
|
name: ${{ matrix.artifact }}
|
||||||
path: subconverter/
|
path: subconverter/
|
||||||
- name: Package Release
|
- name: Package Release
|
||||||
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
||||||
run: 7z a ${{ matrix.artifact }}.7z subconverter/
|
run: 7z a ${{ matrix.artifact }}.7z subconverter/
|
||||||
- name: Draft Release
|
- name: Draft Release
|
||||||
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
||||||
uses: softprops/action-gh-release@v2
|
uses: softprops/action-gh-release@v2
|
||||||
with:
|
with:
|
||||||
files: ${{ matrix.artifact }}.7z
|
files: ${{ matrix.artifact }}.7z
|
||||||
draft: true
|
draft: true
|
||||||
|
|||||||
12
.github/workflows/docker.yml
vendored
12
.github/workflows/docker.yml
vendored
@@ -1,16 +1,16 @@
|
|||||||
name: Publish Docker Image
|
name: Publish Docker Image
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [ master ]
|
branches: [ master ]
|
||||||
tags:
|
tags:
|
||||||
- '**'
|
- '**'
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.ref }}-${{ github.workflow }}
|
group: ${{ github.ref }}-${{ github.workflow }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
env:
|
||||||
REGISTRY_IMAGE: asdlokj1qpi23/subconverter
|
REGISTRY_IMAGE: tindy2013/subconverter
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
@@ -22,9 +22,9 @@ jobs:
|
|||||||
- platform: linux/386
|
- platform: linux/386
|
||||||
os: ubuntu-latest
|
os: ubuntu-latest
|
||||||
- platform: linux/arm/v7
|
- platform: linux/arm/v7
|
||||||
os: ubuntu-latest
|
os: [self-hosted, linux, ARM]
|
||||||
- platform: linux/arm64
|
- platform: linux/arm64
|
||||||
os: ubuntu-latest
|
os: [self-hosted, linux, ARM64]
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
name: Build ${{ matrix.platform }} Image
|
name: Build ${{ matrix.platform }} Image
|
||||||
steps:
|
steps:
|
||||||
@@ -37,8 +37,6 @@ jobs:
|
|||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -7,3 +7,5 @@ scripts/quickjspp
|
|||||||
scripts/yaml-cpp
|
scripts/yaml-cpp
|
||||||
.DS_Store
|
.DS_Store
|
||||||
src/.DS_Store
|
src/.DS_Store
|
||||||
|
|
||||||
|
build
|
||||||
@@ -5,7 +5,7 @@ socks-port: {{ default(global.clash.socks_port, "7891") }}
|
|||||||
allow-lan: {{ default(global.clash.allow_lan, "true") }}
|
allow-lan: {{ default(global.clash.allow_lan, "true") }}
|
||||||
mode: Rule
|
mode: Rule
|
||||||
log-level: {{ default(global.clash.log_level, "info") }}
|
log-level: {{ default(global.clash.log_level, "info") }}
|
||||||
external-controller: :9090
|
external-controller: {{ default(global.clash.external_controller, "127.0.0.1:9090") }}
|
||||||
{% if default(request.clash.dns, "") == "1" %}
|
{% if default(request.clash.dns, "") == "1" %}
|
||||||
dns:
|
dns:
|
||||||
enable: true
|
enable: true
|
||||||
@@ -378,7 +378,16 @@ enhanced-mode-by-rule = true
|
|||||||
"rules": [],
|
"rules": [],
|
||||||
"auto_detect_interface": true
|
"auto_detect_interface": true
|
||||||
},
|
},
|
||||||
"experimental": {}
|
"experimental": {
|
||||||
|
"cache_file": {
|
||||||
|
"enabled": true,
|
||||||
|
"store_fakeip": true
|
||||||
|
},
|
||||||
|
"clash_api": {
|
||||||
|
"external_controller": "{{ default(global.clash.external_controller, "127.0.0.1:9090") }}",
|
||||||
|
"external_ui": "dashboard"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|||||||
@@ -100,5 +100,14 @@
|
|||||||
"rules": [],
|
"rules": [],
|
||||||
"auto_detect_interface": true
|
"auto_detect_interface": true
|
||||||
},
|
},
|
||||||
"experimental": {}
|
"experimental": {
|
||||||
|
"cache_file": {
|
||||||
|
"enabled": true,
|
||||||
|
"store_fakeip": true
|
||||||
|
},
|
||||||
|
"clash_api": {
|
||||||
|
"external_controller": "127.0.0.1:9090",
|
||||||
|
"external_ui": "dashboard"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -109,13 +109,14 @@ filter_deprecated_nodes=false
|
|||||||
append_sub_userinfo=true
|
append_sub_userinfo=true
|
||||||
clash_use_new_field_name=true
|
clash_use_new_field_name=true
|
||||||
|
|
||||||
;Generate style of the proxies section of Clash subscriptions.
|
;Generate style of the proxies and proxy groups section of Clash subscriptions.
|
||||||
;Supported styles: block, flow, compact
|
;Supported styles: block, flow, compact
|
||||||
;Block: - name: name1 Flow: - {name: name1, key: value} Compact: [{name: name1, key: value},{name: name2, key: value}]
|
;Block: - name: name1 Flow: - {name: name1, key: value} Compact: [{name: name1, key: value},{name: name2, key: value}]
|
||||||
; key: value - {name: name2, key: value}
|
; key: value - {name: name2, key: value}
|
||||||
; - name: name2
|
; - name: name2
|
||||||
; key: value
|
; key: value
|
||||||
clash_proxies_style=flow
|
clash_proxies_style=flow
|
||||||
|
clash_proxy_groups_style=block
|
||||||
|
|
||||||
;add Clash mode to sing-box rules, and add a GLOBAL group to end of outbounds
|
;add Clash mode to sing-box rules, and add a GLOBAL group to end of outbounds
|
||||||
singbox_add_clash_modes=true
|
singbox_add_clash_modes=true
|
||||||
@@ -232,6 +233,7 @@ clash.http_port=7890
|
|||||||
clash.socks_port=7891
|
clash.socks_port=7891
|
||||||
clash.allow_lan=true
|
clash.allow_lan=true
|
||||||
clash.log_level=info
|
clash.log_level=info
|
||||||
|
clash.external_controller=127.0.0.1:9090
|
||||||
singbox.allow_lan=true
|
singbox.allow_lan=true
|
||||||
singbox.mixed_port=2080
|
singbox.mixed_port=2080
|
||||||
|
|
||||||
|
|||||||
@@ -117,9 +117,9 @@ match = '^Smart Access expire: (\d+)/(\d+)/(\d+)$'
|
|||||||
replace = '$1:$2:$3:0:0:0'
|
replace = '$1:$2:$3:0:0:0'
|
||||||
|
|
||||||
[node_pref]
|
[node_pref]
|
||||||
#udp_flag = true
|
#udp_flag = false
|
||||||
#tcp_fast_open_flag = false
|
#tcp_fast_open_flag = false
|
||||||
#skip_cert_verify_flag = true
|
#skip_cert_verify_flag = false
|
||||||
#tls13_flag = false
|
#tls13_flag = false
|
||||||
|
|
||||||
sort_flag = false
|
sort_flag = false
|
||||||
@@ -135,13 +135,14 @@ filter_deprecated_nodes = false
|
|||||||
append_sub_userinfo = true
|
append_sub_userinfo = true
|
||||||
clash_use_new_field_name = true
|
clash_use_new_field_name = true
|
||||||
|
|
||||||
# Generate style of the proxies section of Clash subscriptions.
|
# Generate style of the proxies and proxy groups section of Clash subscriptions.
|
||||||
# Supported styles: block, flow, compact
|
# Supported styles: block, flow, compact
|
||||||
# Block: - name: name1 Flow: - {name: name1, key: value} Compact: [{name: name1, key: value},{name: name2, key: value}]
|
# Block: - name: name1 Flow: - {name: name1, key: value} Compact: [{name: name1, key: value},{name: name2, key: value}]
|
||||||
# key: value - {name: name2, key: value}
|
# key: value - {name: name2, key: value}
|
||||||
# - name: name2
|
# - name: name2
|
||||||
# key: value
|
# key: value
|
||||||
clash_proxies_style = "flow"
|
clash_proxies_style = "flow"
|
||||||
|
clash_proxy_groups_style = "block"
|
||||||
|
|
||||||
# add Clash mode to sing-box rules, and add a GLOBAL group to end of outbounds
|
# add Clash mode to sing-box rules, and add a GLOBAL group to end of outbounds
|
||||||
singbox_add_clash_modes = true
|
singbox_add_clash_modes = true
|
||||||
@@ -243,6 +244,10 @@ value = "true"
|
|||||||
key = "clash.log_level"
|
key = "clash.log_level"
|
||||||
value = "info"
|
value = "info"
|
||||||
|
|
||||||
|
[[template.globals]]
|
||||||
|
key = "clash.external_controller"
|
||||||
|
value = "127.0.0.1:9090"
|
||||||
|
|
||||||
[[template.globals]]
|
[[template.globals]]
|
||||||
key = "singbox.allow_lan"
|
key = "singbox.allow_lan"
|
||||||
value = "true"
|
value = "true"
|
||||||
|
|||||||
@@ -50,6 +50,7 @@ node_pref:
|
|||||||
append_sub_userinfo: true
|
append_sub_userinfo: true
|
||||||
clash_use_new_field_name: true
|
clash_use_new_field_name: true
|
||||||
clash_proxies_style: flow
|
clash_proxies_style: flow
|
||||||
|
clash_proxy_groups_style: block
|
||||||
singbox_add_clash_modes: true
|
singbox_add_clash_modes: true
|
||||||
rename_node:
|
rename_node:
|
||||||
# - {match: "\\(?((x|X)?(\\d+)(\\.?\\d+)?)((\\s?倍率?)|(x|X))\\)?", replace: "$1x"}
|
# - {match: "\\(?((x|X)?(\\d+)(\\.?\\d+)?)((\\s?倍率?)|(x|X))\\)?", replace: "$1x"}
|
||||||
@@ -108,9 +109,10 @@ template:
|
|||||||
- {key: clash.socks_port, value: 7891}
|
- {key: clash.socks_port, value: 7891}
|
||||||
- {key: clash.allow_lan, value: true}
|
- {key: clash.allow_lan, value: true}
|
||||||
- {key: clash.log_level, value: info}
|
- {key: clash.log_level, value: info}
|
||||||
|
- {key: clash.external_controller, value: '127.0.0.1:9090'}
|
||||||
- {key: singbox.allow_lan, value: true}
|
- {key: singbox.allow_lan, value: true}
|
||||||
- {key: singbox.mixed_port, value: 2080}
|
- {key: singbox.mixed_port, value: 2080}
|
||||||
|
|
||||||
aliases:
|
aliases:
|
||||||
- {uri: /v, target: /version}
|
- {uri: /v, target: /version}
|
||||||
- {uri: /clash, target: "/sub?target=clash"}
|
- {uri: /clash, target: "/sub?target=clash"}
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ match = "(?i:\\bJP[N]?\\d*\\b|Japan|Tokyo|Osaka|Saitama|日本|东京|大阪|埼
|
|||||||
emoji = "🇯🇵"
|
emoji = "🇯🇵"
|
||||||
|
|
||||||
[[emoji]]
|
[[emoji]]
|
||||||
match = "(?i:\\bK[O]?R\\d*\\b|Korea|(?<!North)Korea|首尔|韩|韓)"
|
match = "(?i:(?<!North\\s)(\\bK[O]?R\\d*\\b|Korea|首尔|韩|韓))"
|
||||||
emoji = "🇰🇷"
|
emoji = "🇰🇷"
|
||||||
|
|
||||||
[[emoji]]
|
[[emoji]]
|
||||||
@@ -334,10 +334,6 @@ emoji = "🇹🇷"
|
|||||||
match = "(乌拉圭|Uruguay)"
|
match = "(乌拉圭|Uruguay)"
|
||||||
emoji = "🇺🇾"
|
emoji = "🇺🇾"
|
||||||
|
|
||||||
[[emoji]]
|
|
||||||
match = "(梵蒂冈|Vatican)"
|
|
||||||
emoji = "🇻🇦"
|
|
||||||
|
|
||||||
[[emoji]]
|
[[emoji]]
|
||||||
match = "(Vietnam|越南)"
|
match = "(Vietnam|越南)"
|
||||||
emoji = "🇻🇳"
|
emoji = "🇻🇳"
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
(?i:\bSG[P]?\d*\b|Singapore|新加坡|狮城|[^-]新),🇸🇬
|
(?i:\bSG[P]?\d*\b|Singapore|新加坡|狮城|[^-]新),🇸🇬
|
||||||
(尼日利亚|Nigeria),🇳🇬
|
(尼日利亚|Nigeria),🇳🇬
|
||||||
(?i:\bJP[N]?\d*\b|Japan|Tokyo|Osaka|Saitama|日本|东京|大阪|埼玉|[^-]日),🇯🇵
|
(?i:\bJP[N]?\d*\b|Japan|Tokyo|Osaka|Saitama|日本|东京|大阪|埼玉|[^-]日),🇯🇵
|
||||||
(?i:\bK[O]?R\d*\b|Korea|(?<!North)Korea|首尔|韩|韓),🇰🇷
|
(?i:(?<!North\s)(\bK[O]?R\d*\b|Korea|首尔|韩|韓)),🇰🇷
|
||||||
(?i:\bUS[A]?\d*\b|America|United.*?States|美国|[^-]美|波特兰|达拉斯|俄勒冈|凤凰城|费利蒙|硅谷|拉斯维加斯|洛杉矶|圣何塞|圣克拉拉|西雅图|芝加哥),🇺🇸
|
(?i:\bUS[A]?\d*\b|America|United.*?States|美国|[^-]美|波特兰|达拉斯|俄勒冈|凤凰城|费利蒙|硅谷|拉斯维加斯|洛杉矶|圣何塞|圣克拉拉|西雅图|芝加哥),🇺🇸
|
||||||
(Ascension|阿森松),🇦🇨
|
(Ascension|阿森松),🇦🇨
|
||||||
(?i:\bUAE\b|Dubai|阿联酋|迪拜),🇦🇪
|
(?i:\bUAE\b|Dubai|阿联酋|迪拜),🇦🇪
|
||||||
@@ -92,4 +92,4 @@
|
|||||||
(Morocco|摩洛哥),🇲🇦
|
(Morocco|摩洛哥),🇲🇦
|
||||||
(Nepal|尼泊尔),🇳🇵
|
(Nepal|尼泊尔),🇳🇵
|
||||||
(Bengal|孟加拉),🇧🇩
|
(Bengal|孟加拉),🇧🇩
|
||||||
(?i:\bC[H]?N\b|China|back|回国|中国[^-]|江苏[^-]|北京[^-]|上海[^-]|广州[^-]|深圳[^-]|杭州[^-]|常州[^-]|徐州[^-]|青岛[^-]|宁波[^-]|镇江[^-]|成都[^-]|河北[^-]|山西[^-]|辽宁[^-]|吉林[^-]|黑龙江[^-]|江苏[^-]|浙江[^-]|安徽[^-]|福建[^-]|江西[^-]|山东[^-]|河南[^-]|湖北[^-]|湖南[^-]|广东[^-]|海南[^-]|四川[^-]|贵州[^-]|云南[^-]|陕西[^-]|甘肃[^-]|青海[^-]|内蒙古[^-]|广西[^-]|西藏[^-]|宁夏[^-]|新疆[^-]),🇨🇳
|
(?i:\bC[H]?N\b|China|back|回国|中国[^-]|江苏[^-]|北京[^-]|上海[^-]|广州[^-]|深圳[^-]|杭州[^-]|常州[^-]|徐州[^-]|青岛[^-]|宁波[^-]|镇江[^-]|成都[^-]|河北[^-]|山西[^-]|辽宁[^-]|吉林[^-]|黑龙江[^-]|江苏[^-]|浙江[^-]|安徽[^-]|福建[^-]|江西[^-]|山东[^-]|河南[^-]|湖北[^-]|湖南[^-]|广东[^-]|海南[^-]|四川[^-]|贵州[^-]|云南[^-]|陕西[^-]|甘肃[^-]|青海[^-]|内蒙古[^-]|广西[^-]|西藏[^-]|宁夏[^-]|新疆[^-]),🇨🇳
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ RUN set -xe && \
|
|||||||
install -d /usr/include/date/ && \
|
install -d /usr/include/date/ && \
|
||||||
install -m644 libcron/externals/date/include/date/* /usr/include/date/ && \
|
install -m644 libcron/externals/date/include/date/* /usr/include/date/ && \
|
||||||
cd .. && \
|
cd .. && \
|
||||||
git clone https://github.com/ToruNiina/toml11 --branch="v3.7.1" --depth=1 && \
|
git clone https://github.com/ToruNiina/toml11 --branch="v4.3.0" --depth=1 && \
|
||||||
cd toml11 && \
|
cd toml11 && \
|
||||||
cmake -DCMAKE_CXX_STANDARD=11 . && \
|
cmake -DCMAKE_CXX_STANDARD=11 . && \
|
||||||
make install -j $THREADS && \
|
make install -j $THREADS && \
|
||||||
@@ -53,6 +53,10 @@ RUN apk add --no-cache --virtual subconverter-deps pcre2 libcurl yaml-cpp
|
|||||||
COPY --from=builder /subconverter/subconverter /usr/bin/
|
COPY --from=builder /subconverter/subconverter /usr/bin/
|
||||||
COPY --from=builder /subconverter/base /base/
|
COPY --from=builder /subconverter/base /base/
|
||||||
|
|
||||||
|
ENV TZ=Africa/Abidjan
|
||||||
|
RUN ln -sf /usr/share/zoneinfo/$TZ /etc/localtime
|
||||||
|
RUN echo $TZ > /etc/timezone
|
||||||
|
|
||||||
# set entry
|
# set entry
|
||||||
WORKDIR /base
|
WORKDIR /base
|
||||||
CMD subconverter
|
CMD subconverter
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ set -xe
|
|||||||
apk add gcc g++ build-base linux-headers cmake make autoconf automake libtool python2 python3
|
apk add gcc g++ build-base linux-headers cmake make autoconf automake libtool python2 python3
|
||||||
apk add mbedtls-dev mbedtls-static zlib-dev rapidjson-dev zlib-static pcre2-dev
|
apk add mbedtls-dev mbedtls-static zlib-dev rapidjson-dev zlib-static pcre2-dev
|
||||||
|
|
||||||
git clone https://github.com/curl/curl --depth=1 --branch curl-8_4_0
|
git clone https://github.com/curl/curl --depth=1 --branch curl-8_6_0
|
||||||
cd curl
|
cd curl
|
||||||
cmake -DCURL_USE_MBEDTLS=ON -DHTTP_ONLY=ON -DBUILD_TESTING=OFF -DBUILD_SHARED_LIBS=OFF -DCMAKE_USE_LIBSSH2=OFF -DBUILD_CURL_EXE=OFF . > /dev/null
|
cmake -DCURL_USE_MBEDTLS=ON -DHTTP_ONLY=ON -DBUILD_TESTING=OFF -DBUILD_SHARED_LIBS=OFF -DCMAKE_USE_LIBSSH2=OFF -DBUILD_CURL_EXE=OFF . > /dev/null
|
||||||
make install -j2 > /dev/null
|
make install -j2 > /dev/null
|
||||||
@@ -34,7 +34,7 @@ cmake -DCMAKE_BUILD_TYPE=Release .
|
|||||||
make libcron install -j3
|
make libcron install -j3
|
||||||
cd ..
|
cd ..
|
||||||
|
|
||||||
git clone https://github.com/ToruNiina/toml11 --branch="v3.7.1" --depth=1
|
git clone https://github.com/ToruNiina/toml11 --branch="v4.3.0" --depth=1
|
||||||
cd toml11
|
cd toml11
|
||||||
cmake -DCMAKE_CXX_STANDARD=11 .
|
cmake -DCMAKE_CXX_STANDARD=11 .
|
||||||
make install -j4
|
make install -j4
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ sudo install -d /usr/local/include/date/
|
|||||||
sudo install -m644 libcron/externals/date/include/date/* /usr/local/include/date/
|
sudo install -m644 libcron/externals/date/include/date/* /usr/local/include/date/
|
||||||
cd ..
|
cd ..
|
||||||
|
|
||||||
git clone https://github.com/ToruNiina/toml11 --branch="v3.7.1" --depth=1
|
git clone https://github.com/ToruNiina/toml11 --branch="v4.3.0" --depth=1
|
||||||
cd toml11
|
cd toml11
|
||||||
cmake -DCMAKE_CXX_STANDARD=11 .
|
cmake -DCMAKE_CXX_STANDARD=11 .
|
||||||
sudo make install -j6 > /dev/null
|
sudo make install -j6 > /dev/null
|
||||||
@@ -63,4 +63,4 @@ chmod +r ./*
|
|||||||
cd ..
|
cd ..
|
||||||
mv base subconverter
|
mv base subconverter
|
||||||
|
|
||||||
set +xe
|
set +xe
|
||||||
|
|||||||
@@ -1,33 +1,9 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -xe
|
set -xe
|
||||||
|
|
||||||
# 获取系统架构
|
git clone https://github.com/curl/curl --depth=1 --branch curl-8_6_0
|
||||||
ARCH=$(uname -m)
|
|
||||||
|
|
||||||
if [ "$ARCH" == "x86_64" ]; then
|
|
||||||
TOOLCHAIN="mingw-w64-x86_64"
|
|
||||||
else
|
|
||||||
TOOLCHAIN="mingw-w64-i686"
|
|
||||||
fi
|
|
||||||
|
|
||||||
pacman -S --needed --noconfirm base-devel ${TOOLCHAIN}-toolchain ${TOOLCHAIN}-cmake ${TOOLCHAIN}-nghttp2 ${TOOLCHAIN}-openssl
|
|
||||||
|
|
||||||
git clone https://github.com/curl/curl --depth=1 --branch curl-8_8_0
|
|
||||||
cd curl
|
cd curl
|
||||||
cmake -DCMAKE_BUILD_TYPE=Release \
|
cmake -DCMAKE_BUILD_TYPE=Release -DCURL_USE_LIBSSH2=OFF -DHTTP_ONLY=ON -DCURL_USE_SCHANNEL=ON -DBUILD_SHARED_LIBS=OFF -DBUILD_CURL_EXE=OFF -DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" -G "Unix Makefiles" -DHAVE_LIBIDN2=OFF -DCURL_USE_LIBPSL=OFF .
|
||||||
-DCURL_USE_LIBSSH2=OFF \
|
|
||||||
-DHTTP_ONLY=ON \
|
|
||||||
-DCURL_USE_SCHANNEL=ON \
|
|
||||||
-DBUILD_SHARED_LIBS=OFF \
|
|
||||||
-DBUILD_CURL_EXE=OFF \
|
|
||||||
-DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" \
|
|
||||||
-G "Unix Makefiles" \
|
|
||||||
-DHAVE_LIBIDN2=OFF \
|
|
||||||
-DCURL_USE_LIBPSL=OFF \
|
|
||||||
-DCURL_STATICLIB=ON \
|
|
||||||
-DCURL_DISABLE_SOCKETPAIR=ON \
|
|
||||||
-DCURL_DISABLE_NONBLOCKING=ON .
|
|
||||||
|
|
||||||
make install -j4
|
make install -j4
|
||||||
cd ..
|
cd ..
|
||||||
|
|
||||||
@@ -62,7 +38,7 @@ cmake -DRAPIDJSON_BUILD_DOC=OFF -DRAPIDJSON_BUILD_EXAMPLES=OFF -DRAPIDJSON_BUILD
|
|||||||
make install -j4
|
make install -j4
|
||||||
cd ..
|
cd ..
|
||||||
|
|
||||||
git clone https://github.com/ToruNiina/toml11 --branch="v3.7.1" --depth=1
|
git clone https://github.com/ToruNiina/toml11 --branch "v4.3.0" --depth=1
|
||||||
cd toml11
|
cd toml11
|
||||||
cmake -DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" -G "Unix Makefiles" -DCMAKE_CXX_STANDARD=11 .
|
cmake -DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" -G "Unix Makefiles" -DCMAKE_CXX_STANDARD=11 .
|
||||||
make install -j4
|
make install -j4
|
||||||
|
|||||||
@@ -1,23 +1,23 @@
|
|||||||
[ACL4SSR]
|
[ACL4SSR]
|
||||||
name=ACL4SSR
|
name=ACL4SSR
|
||||||
url=https://github.com/ACL4SSR/ACL4SSR
|
url=https://github.com/ACL4SSR/ACL4SSR
|
||||||
checkout=1dc5c92b0c8ceaaecbc66530c309961f53e52c8c
|
branch=master
|
||||||
match=Clash/*.list|Clash/Ruleset/**
|
match=Clash/*.list|Clash/Ruleset/**
|
||||||
|
|
||||||
[ACL4SSR_config]
|
[ACL4SSR_config]
|
||||||
name=ACL4SSR
|
name=ACL4SSR
|
||||||
url=https://github.com/ACL4SSR/ACL4SSR
|
url=https://github.com/ACL4SSR/ACL4SSR
|
||||||
checkout=1dc5c92b0c8ceaaecbc66530c309961f53e52c8c
|
branch=master
|
||||||
match=Clash/config/**
|
match=Clash/config/**
|
||||||
dest=base/config/
|
dest=base/config/
|
||||||
keep_tree=false
|
keep_tree=false
|
||||||
|
|
||||||
[DivineEngine]
|
|
||||||
url=https://github.com/asdlokj1qpi233/Profiles.git
|
|
||||||
checkout=f6302d855192bd8d0be08319dff3e58ae7c2bd4e
|
|
||||||
match=Surge/Ruleset/**
|
|
||||||
|
|
||||||
[NobyDa]
|
[NobyDa]
|
||||||
url=https://github.com/NobyDa/Script
|
url=https://github.com/NobyDa/Script
|
||||||
checkout=ae4c12f23de8078e02c373c9969b19af28257fcb
|
branch=master
|
||||||
match=Surge/*.list
|
match=Surge/*.list
|
||||||
|
|
||||||
|
[lhie1]
|
||||||
|
url=https://github.com/dler-io/Rules
|
||||||
|
branch=main
|
||||||
|
match=Surge/Surge 3/Provider/**
|
||||||
|
|||||||
@@ -22,10 +22,13 @@ def open_repo(path: str):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def update_rules(repo_path, save_path, commit, matches, keep_tree):
|
def update_rules(repo_path: str, save_path: str, matches: list[str], keep_tree: bool):
|
||||||
os.makedirs(save_path, exist_ok=True)
|
os.makedirs(save_path, exist_ok=True)
|
||||||
for pattern in matches:
|
for pattern in matches:
|
||||||
files = glob.glob(os.path.join(repo_path, pattern), recursive=True)
|
files = glob.glob(os.path.join(repo_path, pattern), recursive=True)
|
||||||
|
if len(files) == 0:
|
||||||
|
logging.warn(f"no files found for pattern {pattern}")
|
||||||
|
continue
|
||||||
for file in files:
|
for file in files:
|
||||||
if os.path.isdir(file):
|
if os.path.isdir(file):
|
||||||
continue
|
continue
|
||||||
@@ -51,12 +54,13 @@ def main():
|
|||||||
for section in config.sections():
|
for section in config.sections():
|
||||||
repo = config.get(section, "name", fallback=section)
|
repo = config.get(section, "name", fallback=section)
|
||||||
url = config.get(section, "url")
|
url = config.get(section, "url")
|
||||||
commit = config.get(section, "checkout")
|
commit = config.get(section, "commit", fallback=None)
|
||||||
|
branch = config.get(section, "branch", fallback=None)
|
||||||
matches = config.get(section, "match").split("|")
|
matches = config.get(section, "match").split("|")
|
||||||
save_path = config.get(section, "dest", fallback=f"base/rules/{repo}")
|
save_path = config.get(section, "dest", fallback=f"base/rules/{repo}")
|
||||||
keep_tree = config.getboolean(section, "keep_tree", fallback=True)
|
keep_tree = config.getboolean(section, "keep_tree", fallback=True)
|
||||||
|
|
||||||
logging.info(f"reading files from url {url} with commit {commit} and matches {matches}, save to {save_path} keep_tree {keep_tree}")
|
logging.info(f"reading files from url {url}, matches {matches}, save to {save_path} keep_tree {keep_tree}")
|
||||||
|
|
||||||
repo_path = os.path.join("./tmp/repo/", repo)
|
repo_path = os.path.join("./tmp/repo/", repo)
|
||||||
|
|
||||||
@@ -67,8 +71,21 @@ def main():
|
|||||||
else:
|
else:
|
||||||
logging.info(f"repo {repo_path} exists")
|
logging.info(f"repo {repo_path} exists")
|
||||||
|
|
||||||
r.git.checkout(commit)
|
try:
|
||||||
update_rules(repo_path, save_path, commit, matches, keep_tree)
|
if commit is not None:
|
||||||
|
logging.info(f"checking out to commit {commit}")
|
||||||
|
r.git.checkout(commit)
|
||||||
|
elif branch is not None:
|
||||||
|
logging.info(f"checking out to branch {branch}")
|
||||||
|
r.git.checkout(branch)
|
||||||
|
else:
|
||||||
|
logging.info(f"checking out to default branch")
|
||||||
|
r.active_branch.checkout()
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"checkout failed {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
update_rules(repo_path, save_path, matches, keep_tree)
|
||||||
|
|
||||||
shutil.rmtree("./tmp", ignore_errors=True)
|
shutil.rmtree("./tmp", ignore_errors=True)
|
||||||
|
|
||||||
|
|||||||
@@ -17,9 +17,9 @@ namespace toml
|
|||||||
static ProxyGroupConfig from_toml(const value& v)
|
static ProxyGroupConfig from_toml(const value& v)
|
||||||
{
|
{
|
||||||
ProxyGroupConfig conf;
|
ProxyGroupConfig conf;
|
||||||
conf.Name = toml::find<String>(v, "name");
|
conf.Name = find<String>(v, "name");
|
||||||
String type = toml::find<String>(v, "type");
|
String type = find<String>(v, "type");
|
||||||
String strategy = toml::find_or<String>(v, "strategy", "");
|
String strategy = find_or<String>(v, "strategy", "");
|
||||||
switch(hash_(type))
|
switch(hash_(type))
|
||||||
{
|
{
|
||||||
case "select"_hash:
|
case "select"_hash:
|
||||||
@@ -27,18 +27,18 @@ namespace toml
|
|||||||
break;
|
break;
|
||||||
case "url-test"_hash:
|
case "url-test"_hash:
|
||||||
conf.Type = ProxyGroupType::URLTest;
|
conf.Type = ProxyGroupType::URLTest;
|
||||||
conf.Url = toml::find<String>(v, "url");
|
conf.Url = find<String>(v, "url");
|
||||||
conf.Interval = toml::find<Integer>(v, "interval");
|
conf.Interval = find<Integer>(v, "interval");
|
||||||
conf.Tolerance = toml::find_or<Integer>(v, "tolerance", 0);
|
conf.Tolerance = find_or<Integer>(v, "tolerance", 0);
|
||||||
if(v.contains("lazy"))
|
if(v.contains("lazy"))
|
||||||
conf.Lazy = toml::find_or<bool>(v, "lazy", false);
|
conf.Lazy = find_or<bool>(v, "lazy", false);
|
||||||
if(v.contains("evaluate-before-use"))
|
if(v.contains("evaluate-before-use"))
|
||||||
conf.EvaluateBeforeUse = toml::find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get());
|
conf.EvaluateBeforeUse = find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get());
|
||||||
break;
|
break;
|
||||||
case "load-balance"_hash:
|
case "load-balance"_hash:
|
||||||
conf.Type = ProxyGroupType::LoadBalance;
|
conf.Type = ProxyGroupType::LoadBalance;
|
||||||
conf.Url = toml::find<String>(v, "url");
|
conf.Url = find<String>(v, "url");
|
||||||
conf.Interval = toml::find<Integer>(v, "interval");
|
conf.Interval = find<Integer>(v, "interval");
|
||||||
switch(hash_(strategy))
|
switch(hash_(strategy))
|
||||||
{
|
{
|
||||||
case "consistent-hashing"_hash:
|
case "consistent-hashing"_hash:
|
||||||
@@ -49,14 +49,14 @@ namespace toml
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if(v.contains("persistent"))
|
if(v.contains("persistent"))
|
||||||
conf.Persistent = toml::find_or(v, "persistent", conf.Persistent.get());
|
conf.Persistent = find_or(v, "persistent", conf.Persistent.get());
|
||||||
break;
|
break;
|
||||||
case "fallback"_hash:
|
case "fallback"_hash:
|
||||||
conf.Type = ProxyGroupType::Fallback;
|
conf.Type = ProxyGroupType::Fallback;
|
||||||
conf.Url = toml::find<String>(v, "url");
|
conf.Url = find<String>(v, "url");
|
||||||
conf.Interval = toml::find<Integer>(v, "interval");
|
conf.Interval = find<Integer>(v, "interval");
|
||||||
if(v.contains("evaluate-before-use"))
|
if(v.contains("evaluate-before-use"))
|
||||||
conf.EvaluateBeforeUse = toml::find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get());
|
conf.EvaluateBeforeUse = find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get());
|
||||||
break;
|
break;
|
||||||
case "relay"_hash:
|
case "relay"_hash:
|
||||||
conf.Type = ProxyGroupType::Relay;
|
conf.Type = ProxyGroupType::Relay;
|
||||||
@@ -64,16 +64,26 @@ namespace toml
|
|||||||
case "ssid"_hash:
|
case "ssid"_hash:
|
||||||
conf.Type = ProxyGroupType::SSID;
|
conf.Type = ProxyGroupType::SSID;
|
||||||
break;
|
break;
|
||||||
|
case "smart"_hash:
|
||||||
|
conf.Type = ProxyGroupType::Smart;
|
||||||
|
conf.Url = find<String>(v, "url");
|
||||||
|
conf.Interval = find<Integer>(v, "interval");
|
||||||
|
conf.Tolerance = find_or<Integer>(v, "tolerance", 0);
|
||||||
|
if(v.contains("lazy"))
|
||||||
|
conf.Lazy = find_or<bool>(v, "lazy", false);
|
||||||
|
if(v.contains("evaluate-before-use"))
|
||||||
|
conf.EvaluateBeforeUse = find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get());
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
throw toml::syntax_error("Proxy Group has incorrect type, should be one of following:\n select, url-test, load-balance, fallback, relay, ssid", v.at("type").location());
|
throw serialization_error(format_error("Proxy Group has unsupported type!", v.at("type").location(), "should be one of following: select, url-test, load-balance, fallback, relay, ssid"), v.at("type").location());
|
||||||
}
|
}
|
||||||
conf.Timeout = toml::find_or(v, "timeout", 5);
|
conf.Timeout = find_or(v, "timeout", 5);
|
||||||
conf.Proxies = toml::find_or<StrArray>(v, "rule", {});
|
conf.Proxies = find_or<StrArray>(v, "rule", {});
|
||||||
conf.UsingProvider = toml::find_or<StrArray>(v, "use", {});
|
conf.UsingProvider = find_or<StrArray>(v, "use", {});
|
||||||
if(conf.Proxies.empty() && conf.UsingProvider.empty())
|
if(conf.Proxies.empty() && conf.UsingProvider.empty())
|
||||||
throw toml::syntax_error("Proxy Group must contains at least one of proxy match rule or provider", v.location());
|
throw serialization_error(format_error("Proxy Group must contains at least one of proxy match rule or provider!", v.location(), "here"), v.location());
|
||||||
if(v.contains("disable-udp"))
|
if(v.contains("disable-udp"))
|
||||||
conf.DisableUdp = toml::find_or(v, "disable-udp", conf.DisableUdp.get());
|
conf.DisableUdp = find_or(v, "disable-udp", conf.DisableUdp.get());
|
||||||
return conf;
|
return conf;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -84,8 +94,8 @@ namespace toml
|
|||||||
static RulesetConfig from_toml(const value& v)
|
static RulesetConfig from_toml(const value& v)
|
||||||
{
|
{
|
||||||
RulesetConfig conf;
|
RulesetConfig conf;
|
||||||
conf.Group = toml::find<String>(v, "group");
|
conf.Group = find<String>(v, "group");
|
||||||
String type = toml::find_or<String>(v, "type", "surge-ruleset");
|
String type = find_or<String>(v, "type", "surge-ruleset");
|
||||||
switch(hash_(type))
|
switch(hash_(type))
|
||||||
{
|
{
|
||||||
/*
|
/*
|
||||||
@@ -122,10 +132,10 @@ namespace toml
|
|||||||
conf.Url = type + ":";
|
conf.Url = type + ":";
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw toml::syntax_error("Ruleset has incorrect type, should be one of following:\n surge-ruleset, quantumultx, clash-domain, clash-ipcidr, clash-classic", v.at("type").location());
|
throw serialization_error(format_error("Ruleset has unsupported type!", v.at("type").location(), "should be one of following: surge-ruleset, quantumultx, clash-domain, clash-ipcidr, clash-classic"), v.at("type").location());
|
||||||
}
|
}
|
||||||
conf.Url += toml::find<String>(v, "ruleset");
|
conf.Url += find<String>(v, "ruleset");
|
||||||
conf.Interval = toml::find_or<Integer>(v, "interval", 86400);
|
conf.Interval = find_or<Integer>(v, "interval", 86400);
|
||||||
return conf;
|
return conf;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -138,14 +148,14 @@ namespace toml
|
|||||||
RegexMatchConfig conf;
|
RegexMatchConfig conf;
|
||||||
if(v.contains("script"))
|
if(v.contains("script"))
|
||||||
{
|
{
|
||||||
conf.Script = toml::find<String>(v, "script");
|
conf.Script = find<String>(v, "script");
|
||||||
return conf;
|
return conf;
|
||||||
}
|
}
|
||||||
conf.Match = toml::find<String>(v, "match");
|
conf.Match = find<String>(v, "match");
|
||||||
if(v.contains("emoji"))
|
if(v.contains("emoji"))
|
||||||
conf.Replace = toml::find<String>(v, "emoji");
|
conf.Replace = find<String>(v, "emoji");
|
||||||
else
|
else
|
||||||
conf.Replace = toml::find<String>(v, "replace");
|
conf.Replace = find<String>(v, "replace");
|
||||||
return conf;
|
return conf;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -156,10 +166,10 @@ namespace toml
|
|||||||
static CronTaskConfig from_toml(const value& v)
|
static CronTaskConfig from_toml(const value& v)
|
||||||
{
|
{
|
||||||
CronTaskConfig conf;
|
CronTaskConfig conf;
|
||||||
conf.Name = toml::find<String>(v, "name");
|
conf.Name = find<String>(v, "name");
|
||||||
conf.CronExp = toml::find<String>(v, "cronexp");
|
conf.CronExp = find<String>(v, "cronexp");
|
||||||
conf.Path = toml::find<String>(v, "path");
|
conf.Path = find<String>(v, "path");
|
||||||
conf.Timeout = toml::find_or<Integer>(v, "timeout", 0);
|
conf.Timeout = find_or<Integer>(v, "timeout", 0);
|
||||||
return conf;
|
return conf;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -220,6 +230,9 @@ namespace INIBinding
|
|||||||
case "ssid"_hash:
|
case "ssid"_hash:
|
||||||
conf.Type = ProxyGroupType::SSID;
|
conf.Type = ProxyGroupType::SSID;
|
||||||
break;
|
break;
|
||||||
|
case "smart"_hash:
|
||||||
|
conf.Type = ProxyGroupType::Smart;
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,17 +3,18 @@
|
|||||||
|
|
||||||
#include "def.h"
|
#include "def.h"
|
||||||
|
|
||||||
enum ProxyGroupType
|
enum class ProxyGroupType
|
||||||
{
|
{
|
||||||
Select,
|
Select,
|
||||||
URLTest,
|
URLTest,
|
||||||
Fallback,
|
Fallback,
|
||||||
LoadBalance,
|
LoadBalance,
|
||||||
Relay,
|
Relay,
|
||||||
SSID
|
SSID,
|
||||||
|
Smart
|
||||||
};
|
};
|
||||||
|
|
||||||
enum BalanceStrategy
|
enum class BalanceStrategy
|
||||||
{
|
{
|
||||||
ConsistentHashing,
|
ConsistentHashing,
|
||||||
RoundRobin
|
RoundRobin
|
||||||
@@ -45,6 +46,7 @@ struct ProxyGroupConfig
|
|||||||
case ProxyGroupType::Fallback: return "fallback";
|
case ProxyGroupType::Fallback: return "fallback";
|
||||||
case ProxyGroupType::Relay: return "relay";
|
case ProxyGroupType::Relay: return "relay";
|
||||||
case ProxyGroupType::SSID: return "ssid";
|
case ProxyGroupType::SSID: return "ssid";
|
||||||
|
case ProxyGroupType::Smart: return "smart";
|
||||||
}
|
}
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
#include "def.h"
|
#include "def.h"
|
||||||
|
|
||||||
enum RulesetType
|
enum class RulesetType
|
||||||
{
|
{
|
||||||
SurgeRuleset,
|
SurgeRuleset,
|
||||||
QuantumultX,
|
QuantumultX,
|
||||||
|
|||||||
@@ -161,7 +161,8 @@ void processRemark(std::string &remark, const string_array &remarks_list, bool p
|
|||||||
}
|
}
|
||||||
std::string tempRemark = remark;
|
std::string tempRemark = remark;
|
||||||
int cnt = 2;
|
int cnt = 2;
|
||||||
while (std::find(remarks_list.cbegin(), remarks_list.cend(), tempRemark) != remarks_list.cend()) {
|
while(std::find(remarks_list.cbegin(), remarks_list.cend(), tempRemark) != remarks_list.cend())
|
||||||
|
{
|
||||||
tempRemark = remark + " " + std::to_string(cnt);
|
tempRemark = remark + " " + std::to_string(cnt);
|
||||||
cnt++;
|
cnt++;
|
||||||
}
|
}
|
||||||
@@ -218,6 +219,30 @@ proxyToClash(std::vector<Proxy> &nodes, YAML::Node &yamlnode, const ProxyGroupCo
|
|||||||
case "compact"_hash:
|
case "compact"_hash:
|
||||||
compact = true;
|
compact = true;
|
||||||
break;
|
break;
|
||||||
|
bool proxy_block = false, proxy_compact = false, group_block = false, group_compact = false;
|
||||||
|
switch(hash_(ext.clash_proxies_style))
|
||||||
|
{
|
||||||
|
case "block"_hash:
|
||||||
|
proxy_block = true;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
case "flow"_hash:
|
||||||
|
break;
|
||||||
|
case "compact"_hash:
|
||||||
|
proxy_compact = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
switch(hash_(ext.clash_proxy_groups_style))
|
||||||
|
{
|
||||||
|
case "block"_hash:
|
||||||
|
group_block = true;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
case "flow"_hash:
|
||||||
|
break;
|
||||||
|
case "compact"_hash:
|
||||||
|
group_compact = true;
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (Proxy &x: nodes) {
|
for (Proxy &x: nodes) {
|
||||||
@@ -644,7 +669,10 @@ proxyToClash(std::vector<Proxy> &nodes, YAML::Node &yamlnode, const ProxyGroupCo
|
|||||||
string_array filtered_nodelist;
|
string_array filtered_nodelist;
|
||||||
|
|
||||||
singlegroup["name"] = x.Name;
|
singlegroup["name"] = x.Name;
|
||||||
singlegroup["type"] = x.TypeStr();
|
if (x.Type == ProxyGroupType::Smart)
|
||||||
|
singlegroup["type"] = "url-test";
|
||||||
|
else
|
||||||
|
singlegroup["type"] = x.TypeStr();
|
||||||
|
|
||||||
switch (x.Type) {
|
switch (x.Type) {
|
||||||
case ProxyGroupType::Select:
|
case ProxyGroupType::Select:
|
||||||
@@ -666,6 +694,29 @@ proxyToClash(std::vector<Proxy> &nodes, YAML::Node &yamlnode, const ProxyGroupCo
|
|||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
continue;
|
continue;
|
||||||
|
switch(x.Type)
|
||||||
|
{
|
||||||
|
case ProxyGroupType::Select:
|
||||||
|
case ProxyGroupType::Relay:
|
||||||
|
break;
|
||||||
|
case ProxyGroupType::LoadBalance:
|
||||||
|
singlegroup["strategy"] = x.StrategyStr();
|
||||||
|
[[fallthrough]];
|
||||||
|
case ProxyGroupType::Smart:
|
||||||
|
[[fallthrough]];
|
||||||
|
case ProxyGroupType::URLTest:
|
||||||
|
if(!x.Lazy.is_undef())
|
||||||
|
singlegroup["lazy"] = x.Lazy.get();
|
||||||
|
[[fallthrough]];
|
||||||
|
case ProxyGroupType::Fallback:
|
||||||
|
singlegroup["url"] = x.Url;
|
||||||
|
if(x.Interval > 0)
|
||||||
|
singlegroup["interval"] = x.Interval;
|
||||||
|
if(x.Tolerance > 0)
|
||||||
|
singlegroup["tolerance"] = x.Tolerance;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
if (!x.DisableUdp.is_undef())
|
if (!x.DisableUdp.is_undef())
|
||||||
singlegroup["disable-udp"] = x.DisableUdp.get();
|
singlegroup["disable-udp"] = x.DisableUdp.get();
|
||||||
@@ -681,7 +732,10 @@ proxyToClash(std::vector<Proxy> &nodes, YAML::Node &yamlnode, const ProxyGroupCo
|
|||||||
}
|
}
|
||||||
if (!filtered_nodelist.empty())
|
if (!filtered_nodelist.empty())
|
||||||
singlegroup["proxies"] = filtered_nodelist;
|
singlegroup["proxies"] = filtered_nodelist;
|
||||||
//singlegroup.SetStyle(YAML::EmitterStyle::Flow);
|
if(group_block)
|
||||||
|
singlegroup.SetStyle(YAML::EmitterStyle::Block);
|
||||||
|
else
|
||||||
|
singlegroup.SetStyle(YAML::EmitterStyle::Flow);
|
||||||
|
|
||||||
bool replace_flag = false;
|
bool replace_flag = false;
|
||||||
for (auto &&original_group: original_groups) {
|
for (auto &&original_group: original_groups) {
|
||||||
@@ -694,6 +748,8 @@ proxyToClash(std::vector<Proxy> &nodes, YAML::Node &yamlnode, const ProxyGroupCo
|
|||||||
if (!replace_flag)
|
if (!replace_flag)
|
||||||
original_groups.push_back(singlegroup);
|
original_groups.push_back(singlegroup);
|
||||||
}
|
}
|
||||||
|
if(group_compact)
|
||||||
|
original_groups.SetStyle(YAML::EmitterStyle::Flow);
|
||||||
|
|
||||||
if (ext.clash_new_field_name)
|
if (ext.clash_new_field_name)
|
||||||
yamlnode["proxy-groups"] = original_groups;
|
yamlnode["proxy-groups"] = original_groups;
|
||||||
@@ -975,11 +1031,18 @@ std::string proxyToSurge(std::vector<Proxy> &nodes, const std::string &base_conf
|
|||||||
proxy += ", version=" + std::to_string(x.SnellVersion);
|
proxy += ", version=" + std::to_string(x.SnellVersion);
|
||||||
break;
|
break;
|
||||||
case ProxyType::Hysteria2:
|
case ProxyType::Hysteria2:
|
||||||
if (surge_ver < 4 && surge_ver != -3)
|
if(surge_ver < 4)
|
||||||
continue;
|
continue;
|
||||||
proxy = "hysteria2, " + hostname + ", " + port + ", password=" + password;
|
proxy = "hysteria, " + hostname + ", " + port + ", password=" + password;
|
||||||
if (!scv.is_undef())
|
if(x.DownSpeed)
|
||||||
proxy += ", skip-cert-verify=" + scv.get_str();
|
proxy += ", download-bandwidth=" + x.DownSpeed;
|
||||||
|
|
||||||
|
if(!scv.is_undef())
|
||||||
|
proxy += ",skip-cert-verify=" + std::string(scv.get() ? "true" : "false");
|
||||||
|
if(!x.Fingerprint.empty())
|
||||||
|
proxy += ",server-cert-fingerprint-sha256=" + x.Fingerprint;
|
||||||
|
if(!x.SNI.empty())
|
||||||
|
proxy += ",sni=" + x.SNI;
|
||||||
break;
|
break;
|
||||||
case ProxyType::WireGuard:
|
case ProxyType::WireGuard:
|
||||||
if (surge_ver < 4 && surge_ver != -3)
|
if (surge_ver < 4 && surge_ver != -3)
|
||||||
@@ -1011,7 +1074,8 @@ std::string proxyToSurge(std::vector<Proxy> &nodes, const std::string &base_conf
|
|||||||
proxy += ", tfo=" + tfo.get_str();
|
proxy += ", tfo=" + tfo.get_str();
|
||||||
if (!udp.is_undef())
|
if (!udp.is_undef())
|
||||||
proxy += ", udp-relay=" + udp.get_str();
|
proxy += ", udp-relay=" + udp.get_str();
|
||||||
|
if (underlying_proxy != "")
|
||||||
|
proxy += ", underlying-proxy=" + underlying_proxy;
|
||||||
if (ext.nodelist)
|
if (ext.nodelist)
|
||||||
output_nodelist += x.Remark + " = " + proxy + "\n";
|
output_nodelist += x.Remark + " = " + proxy + "\n";
|
||||||
else {
|
else {
|
||||||
@@ -1030,22 +1094,24 @@ std::string proxyToSurge(std::vector<Proxy> &nodes, const std::string &base_conf
|
|||||||
string_array filtered_nodelist;
|
string_array filtered_nodelist;
|
||||||
std::string group;
|
std::string group;
|
||||||
|
|
||||||
switch (x.Type) {
|
switch(x.Type)
|
||||||
case ProxyGroupType::Select:
|
{
|
||||||
case ProxyGroupType::URLTest:
|
case ProxyGroupType::Select:
|
||||||
case ProxyGroupType::Fallback:
|
case ProxyGroupType::Smart:
|
||||||
break;
|
case ProxyGroupType::URLTest:
|
||||||
case ProxyGroupType::LoadBalance:
|
case ProxyGroupType::Fallback:
|
||||||
if (surge_ver < 1 && surge_ver != -3)
|
break;
|
||||||
continue;
|
case ProxyGroupType::LoadBalance:
|
||||||
break;
|
if(surge_ver < 1 && surge_ver != -3)
|
||||||
case ProxyGroupType::SSID:
|
continue;
|
||||||
group = x.TypeStr() + ",default=" + x.Proxies[0] + ",";
|
break;
|
||||||
|
case ProxyGroupType::SSID:
|
||||||
|
group = x.TypeStr() + ",default=" + x.Proxies[0] + ",";
|
||||||
group += join(x.Proxies.begin() + 1, x.Proxies.end(), ",");
|
group += join(x.Proxies.begin() + 1, x.Proxies.end(), ",");
|
||||||
ini.set("{NONAME}", x.Name + " = " + group); //insert order
|
ini.set("{NONAME}", x.Name + " = " + group); //insert order
|
||||||
continue;
|
continue;
|
||||||
default:
|
default:
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const auto &y: x.Proxies)
|
for (const auto &y: x.Proxies)
|
||||||
@@ -1693,7 +1759,8 @@ void proxyToQuanX(std::vector<Proxy> &nodes, INIReader &ini, std::vector<Ruleset
|
|||||||
std::string proxies = join(filtered_nodelist, ", ");
|
std::string proxies = join(filtered_nodelist, ", ");
|
||||||
|
|
||||||
std::string singlegroup = type + "=" + x.Name + ", " + proxies;
|
std::string singlegroup = type + "=" + x.Name + ", " + proxies;
|
||||||
if (type != "static") {
|
if(x.Type != ProxyGroupType::Select && x.Type != ProxyGroupType::SSID)
|
||||||
|
{
|
||||||
singlegroup += ", check-interval=" + std::to_string(x.Interval);
|
singlegroup += ", check-interval=" + std::to_string(x.Interval);
|
||||||
if (x.Tolerance > 0)
|
if (x.Tolerance > 0)
|
||||||
singlegroup += ", tolerance=" + std::to_string(x.Tolerance);
|
singlegroup += ", tolerance=" + std::to_string(x.Tolerance);
|
||||||
|
|||||||
@@ -40,6 +40,7 @@ struct extra_settings
|
|||||||
bool clash_classical_ruleset = false;
|
bool clash_classical_ruleset = false;
|
||||||
std::string sort_script;
|
std::string sort_script;
|
||||||
std::string clash_proxies_style = "flow";
|
std::string clash_proxies_style = "flow";
|
||||||
|
std::string clash_proxy_groups_style = "flow";
|
||||||
bool authorized = false;
|
bool authorized = false;
|
||||||
|
|
||||||
extra_settings() = default;
|
extra_settings() = default;
|
||||||
|
|||||||
@@ -357,10 +357,10 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
|
|||||||
if(x.rule_type == RULESET_CLASH_IPCIDR || x.rule_type == RULESET_CLASH_DOMAIN || x.rule_type == RULESET_CLASH_CLASSICAL)
|
if(x.rule_type == RULESET_CLASH_IPCIDR || x.rule_type == RULESET_CLASH_DOMAIN || x.rule_type == RULESET_CLASH_CLASSICAL)
|
||||||
{
|
{
|
||||||
//rule_name = std::to_string(hash_(rule_group + rule_path));
|
//rule_name = std::to_string(hash_(rule_group + rule_path));
|
||||||
rule_name = old_rule_name = findFileName(rule_path);
|
rule_name = old_rule_name = urlDecode(findFileName(rule_path));
|
||||||
int idx = 2;
|
int idx = 2;
|
||||||
while(std::find(groups.begin(), groups.end(), rule_name) != groups.end())
|
while(std::find(groups.begin(), groups.end(), rule_name) != groups.end())
|
||||||
rule_name = old_rule_name + "_" + std::to_string(idx++);
|
rule_name = old_rule_name + " " + std::to_string(idx++);
|
||||||
names[rule_name] = rule_group;
|
names[rule_name] = rule_group;
|
||||||
urls[rule_name] = "*" + rule_path;
|
urls[rule_name] = "*" + rule_path;
|
||||||
rule_type[rule_name] = x.rule_type;
|
rule_type[rule_name] = x.rule_type;
|
||||||
@@ -386,10 +386,10 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
|
|||||||
if(fileExist(rule_path, true) || isLink(rule_path))
|
if(fileExist(rule_path, true) || isLink(rule_path))
|
||||||
{
|
{
|
||||||
//rule_name = std::to_string(hash_(rule_group + rule_path));
|
//rule_name = std::to_string(hash_(rule_group + rule_path));
|
||||||
rule_name = old_rule_name = findFileName(rule_path);
|
rule_name = old_rule_name = urlDecode(findFileName(rule_path));
|
||||||
int idx = 2;
|
int idx = 2;
|
||||||
while(std::find(groups.begin(), groups.end(), rule_name) != groups.end())
|
while(std::find(groups.begin(), groups.end(), rule_name) != groups.end())
|
||||||
rule_name = old_rule_name + "_" + std::to_string(idx++);
|
rule_name = old_rule_name + " " + std::to_string(idx++);
|
||||||
names[rule_name] = rule_group;
|
names[rule_name] = rule_group;
|
||||||
urls[rule_name] = rule_path_typed;
|
urls[rule_name] = rule_path_typed;
|
||||||
rule_type[rule_name] = x.rule_type;
|
rule_type[rule_name] = x.rule_type;
|
||||||
@@ -436,9 +436,9 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
|
|||||||
if(vArray.size() < 2)
|
if(vArray.size() < 2)
|
||||||
continue;
|
continue;
|
||||||
if(keywords.find(rule_name) == keywords.end())
|
if(keywords.find(rule_name) == keywords.end())
|
||||||
keywords[rule_name] = "\"" + vArray[1] + "\"";
|
keywords[rule_name] = "\"" + trim(vArray[1]) + "\"";
|
||||||
else
|
else
|
||||||
keywords[rule_name] += ",\"" + vArray[1] + "\"";
|
keywords[rule_name] += ",\"" + trim(vArray[1]) + "\"";
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@@ -449,7 +449,7 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
strLine = vArray[0] + "," + vArray[1] + "," + rule_group;
|
strLine = vArray[0] + "," + trim(vArray[1]) + "," + rule_group;
|
||||||
if(vArray.size() > 2)
|
if(vArray.size() > 2)
|
||||||
strLine += "," + vArray[2];
|
strLine += "," + vArray[2];
|
||||||
}
|
}
|
||||||
@@ -466,14 +466,16 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if(has_domain[rule_name] && !script)
|
if(has_domain[rule_name] && !script)
|
||||||
rules.emplace_back("RULE-SET," + rule_name + "_domain," + rule_group);
|
rules.emplace_back("RULE-SET," + rule_name + " (Domain)," + rule_group);
|
||||||
if(has_ipcidr[rule_name] && !script)
|
if(has_ipcidr[rule_name] && !script)
|
||||||
{
|
{
|
||||||
if(has_no_resolve)
|
if(has_no_resolve)
|
||||||
rules.emplace_back("RULE-SET," + rule_name + "_ipcidr," + rule_group + ",no-resolve");
|
rules.emplace_back("RULE-SET," + rule_name + " (IP-CIDR)," + rule_group + ",no-resolve");
|
||||||
else
|
else
|
||||||
rules.emplace_back("RULE-SET," + rule_name + "_ipcidr," + rule_group);
|
rules.emplace_back("RULE-SET," + rule_name + " (IP-CIDR)," + rule_group);
|
||||||
}
|
}
|
||||||
|
if(!has_domain[rule_name] && !has_ipcidr[rule_name] && !script)
|
||||||
|
rules.emplace_back("RULE-SET," + rule_name + "," + rule_group);
|
||||||
if(std::find(groups.begin(), groups.end(), rule_name) == groups.end())
|
if(std::find(groups.begin(), groups.end(), rule_name) == groups.end())
|
||||||
groups.emplace_back(rule_name);
|
groups.emplace_back(rule_name);
|
||||||
}
|
}
|
||||||
@@ -488,14 +490,14 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
|
|||||||
{
|
{
|
||||||
std::string yaml_key = x;
|
std::string yaml_key = x;
|
||||||
if(rule_type[x] != RULESET_CLASH_DOMAIN)
|
if(rule_type[x] != RULESET_CLASH_DOMAIN)
|
||||||
yaml_key += "_domain";
|
yaml_key += " (Domain)";
|
||||||
base_rule["rule-providers"][yaml_key]["type"] = "http";
|
base_rule["rule-providers"][yaml_key]["type"] = "http";
|
||||||
base_rule["rule-providers"][yaml_key]["behavior"] = "domain";
|
base_rule["rule-providers"][yaml_key]["behavior"] = "domain";
|
||||||
if(url[0] == '*')
|
if(url[0] == '*')
|
||||||
base_rule["rule-providers"][yaml_key]["url"] = url.substr(1);
|
base_rule["rule-providers"][yaml_key]["url"] = url.substr(1);
|
||||||
else
|
else
|
||||||
base_rule["rule-providers"][yaml_key]["url"] = remote_path_prefix + "/getruleset?type=3&url=" + urlSafeBase64Encode(url);
|
base_rule["rule-providers"][yaml_key]["url"] = remote_path_prefix + "/getruleset?type=3&url=" + urlSafeBase64Encode(url);
|
||||||
base_rule["rule-providers"][yaml_key]["path"] = "./providers/rule-provider_" + yaml_key + ".yaml";
|
base_rule["rule-providers"][yaml_key]["path"] = "./providers/" + std::to_string(hash_(url)) + "_domain.yaml";
|
||||||
if(interval)
|
if(interval)
|
||||||
base_rule["rule-providers"][yaml_key]["interval"] = interval;
|
base_rule["rule-providers"][yaml_key]["interval"] = interval;
|
||||||
}
|
}
|
||||||
@@ -503,14 +505,14 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
|
|||||||
{
|
{
|
||||||
std::string yaml_key = x;
|
std::string yaml_key = x;
|
||||||
if(rule_type[x] != RULESET_CLASH_IPCIDR)
|
if(rule_type[x] != RULESET_CLASH_IPCIDR)
|
||||||
yaml_key += "_ipcidr";
|
yaml_key += " (IP-CIDR)";
|
||||||
base_rule["rule-providers"][yaml_key]["type"] = "http";
|
base_rule["rule-providers"][yaml_key]["type"] = "http";
|
||||||
base_rule["rule-providers"][yaml_key]["behavior"] = "ipcidr";
|
base_rule["rule-providers"][yaml_key]["behavior"] = "ipcidr";
|
||||||
if(url[0] == '*')
|
if(url[0] == '*')
|
||||||
base_rule["rule-providers"][yaml_key]["url"] = url.substr(1);
|
base_rule["rule-providers"][yaml_key]["url"] = url.substr(1);
|
||||||
else
|
else
|
||||||
base_rule["rule-providers"][yaml_key]["url"] = remote_path_prefix + "/getruleset?type=4&url=" + urlSafeBase64Encode(url);
|
base_rule["rule-providers"][yaml_key]["url"] = remote_path_prefix + "/getruleset?type=4&url=" + urlSafeBase64Encode(url);
|
||||||
base_rule["rule-providers"][yaml_key]["path"] = "./providers/rule-provider_" + yaml_key + ".yaml";
|
base_rule["rule-providers"][yaml_key]["path"] = "./providers/" + std::to_string(hash_(url)) + "_ipcidr.yaml";
|
||||||
if(interval)
|
if(interval)
|
||||||
base_rule["rule-providers"][yaml_key]["interval"] = interval;
|
base_rule["rule-providers"][yaml_key]["interval"] = interval;
|
||||||
}
|
}
|
||||||
@@ -523,7 +525,7 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
|
|||||||
base_rule["rule-providers"][yaml_key]["url"] = url.substr(1);
|
base_rule["rule-providers"][yaml_key]["url"] = url.substr(1);
|
||||||
else
|
else
|
||||||
base_rule["rule-providers"][yaml_key]["url"] = remote_path_prefix + "/getruleset?type=6&url=" + urlSafeBase64Encode(url);
|
base_rule["rule-providers"][yaml_key]["url"] = remote_path_prefix + "/getruleset?type=6&url=" + urlSafeBase64Encode(url);
|
||||||
base_rule["rule-providers"][yaml_key]["path"] = "./providers/rule-provider_" + yaml_key + ".yaml";
|
base_rule["rule-providers"][yaml_key]["path"] = "./providers/" + std::to_string(hash_(url)) + ".yaml";
|
||||||
if(interval)
|
if(interval)
|
||||||
base_rule["rule-providers"][yaml_key]["interval"] = interval;
|
base_rule["rule-providers"][yaml_key]["interval"] = interval;
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -564,14 +564,14 @@ void readYAMLConf(YAML::Node &node)
|
|||||||
writeLog(0, "Load preference settings in YAML format completed.", LOG_LEVEL_INFO);
|
writeLog(0, "Load preference settings in YAML format completed.", LOG_LEVEL_INFO);
|
||||||
}
|
}
|
||||||
|
|
||||||
//template <class T, class... U>
|
template <class T, class... U>
|
||||||
//void find_if_exist(const toml::value &v, const toml::key &k, T& target, U&&... args)
|
void find_if_exist(const toml::value &v, const toml::value::key_type &k, T& target, U&&... args)
|
||||||
//{
|
{
|
||||||
// if(v.contains(k)) target = toml::find<T>(v, k);
|
if(v.contains(k)) target = toml::find<T>(v, k);
|
||||||
// if constexpr (sizeof...(args) > 0) find_if_exist(v, std::forward<U>(args)...);
|
if constexpr (sizeof...(args) > 0) find_if_exist(v, std::forward<U>(args)...);
|
||||||
//}
|
}
|
||||||
|
|
||||||
void operate_toml_kv_table(const std::vector<toml::table> &arr, const toml::key &key_name, const toml::key &value_name, std::function<void (const toml::value&, const toml::value&)> binary_op)
|
void operate_toml_kv_table(const std::vector<toml::table> &arr, const toml::value::key_type &key_name, const toml::value::key_type &value_name, std::function<void (const toml::value&, const toml::value&)> binary_op)
|
||||||
{
|
{
|
||||||
for(const toml::table &table : arr)
|
for(const toml::table &table : arr)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -49,7 +49,7 @@ struct Settings
|
|||||||
tribool UDPFlag, TFOFlag, skipCertVerify, TLS13Flag, enableInsert;
|
tribool UDPFlag, TFOFlag, skipCertVerify, TLS13Flag, enableInsert;
|
||||||
bool enableSort = false, updateStrict = false;
|
bool enableSort = false, updateStrict = false;
|
||||||
bool clashUseNewField = false, singBoxAddClashModes = true;
|
bool clashUseNewField = false, singBoxAddClashModes = true;
|
||||||
std::string clashProxiesStyle = "flow";
|
std::string clashProxiesStyle = "flow", clashProxyGroupsStyle = "block";
|
||||||
std::string proxyConfig, proxyRuleset, proxySubscription;
|
std::string proxyConfig, proxyRuleset, proxySubscription;
|
||||||
int updateInterval = 0;
|
int updateInterval = 0;
|
||||||
std::string sortScript, filterScript;
|
std::string sortScript, filterScript;
|
||||||
|
|||||||
@@ -87,11 +87,13 @@ static int logger(CURL *handle, curl_infotype type, char *data, size_t size, voi
|
|||||||
switch(type)
|
switch(type)
|
||||||
{
|
{
|
||||||
case CURLINFO_TEXT:
|
case CURLINFO_TEXT:
|
||||||
prefix = "CURL_INFO";
|
prefix = "CURL_INFO: ";
|
||||||
break;
|
break;
|
||||||
case CURLINFO_HEADER_IN:
|
case CURLINFO_HEADER_IN:
|
||||||
|
prefix = "CURL_HEADER: < ";
|
||||||
|
break;
|
||||||
case CURLINFO_HEADER_OUT:
|
case CURLINFO_HEADER_OUT:
|
||||||
prefix = "CURL_HEADER";
|
prefix = "CURL_HEADER: > ";
|
||||||
break;
|
break;
|
||||||
case CURLINFO_DATA_IN:
|
case CURLINFO_DATA_IN:
|
||||||
case CURLINFO_DATA_OUT:
|
case CURLINFO_DATA_OUT:
|
||||||
@@ -105,7 +107,6 @@ static int logger(CURL *handle, curl_infotype type, char *data, size_t size, voi
|
|||||||
for(auto &x : lines)
|
for(auto &x : lines)
|
||||||
{
|
{
|
||||||
std::string log_content = prefix;
|
std::string log_content = prefix;
|
||||||
log_content += ": ";
|
|
||||||
log_content += x;
|
log_content += x;
|
||||||
writeLog(0, log_content, LOG_LEVEL_VERBOSE);
|
writeLog(0, log_content, LOG_LEVEL_VERBOSE);
|
||||||
}
|
}
|
||||||
@@ -113,7 +114,6 @@ static int logger(CURL *handle, curl_infotype type, char *data, size_t size, voi
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
std::string log_content = prefix;
|
std::string log_content = prefix;
|
||||||
log_content += ": ";
|
|
||||||
log_content += trimWhitespace(content);
|
log_content += trimWhitespace(content);
|
||||||
writeLog(0, log_content, LOG_LEVEL_VERBOSE);
|
writeLog(0, log_content, LOG_LEVEL_VERBOSE);
|
||||||
}
|
}
|
||||||
@@ -172,7 +172,8 @@ static int curlGet(const FetchArgument &argument, FetchResult &result)
|
|||||||
{
|
{
|
||||||
for(auto &x : *argument.request_headers)
|
for(auto &x : *argument.request_headers)
|
||||||
{
|
{
|
||||||
header_list = curl_slist_append(header_list, (x.first + ": " + x.second).data());
|
auto header = x.first + ": " + x.second;
|
||||||
|
header_list = curl_slist_append(header_list, header.data());
|
||||||
}
|
}
|
||||||
if(!argument.request_headers->contains("User-Agent"))
|
if(!argument.request_headers->contains("User-Agent"))
|
||||||
curl_easy_setopt(curl_handle, CURLOPT_USERAGENT, user_agent_str);
|
curl_easy_setopt(curl_handle, CURLOPT_USERAGENT, user_agent_str);
|
||||||
@@ -233,7 +234,7 @@ static int curlGet(const FetchArgument &argument, FetchResult &result)
|
|||||||
while(true)
|
while(true)
|
||||||
{
|
{
|
||||||
retVal = curl_easy_perform(curl_handle);
|
retVal = curl_easy_perform(curl_handle);
|
||||||
if(retVal == CURLE_OK || max_fails <= fail_count)
|
if(retVal == CURLE_OK || max_fails <= fail_count || global.APIMode)
|
||||||
break;
|
break;
|
||||||
else
|
else
|
||||||
fail_count++;
|
fail_count++;
|
||||||
|
|||||||
@@ -233,10 +233,10 @@ int main(int argc, char *argv[])
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
std::string type = getUrlArg(request.argument, "type");
|
std::string type = getUrlArg(request.argument, "type");
|
||||||
if(type == "form")
|
if(type == "form" || type == "direct")
|
||||||
fileWrite(global.prefPath, getFormData(request.postdata), true);
|
{
|
||||||
else if(type == "direct")
|
|
||||||
fileWrite(global.prefPath, request.postdata, true);
|
fileWrite(global.prefPath, request.postdata, true);
|
||||||
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
response.status_code = 501;
|
response.status_code = 501;
|
||||||
|
|||||||
@@ -9,7 +9,8 @@
|
|||||||
using String = std::string;
|
using String = std::string;
|
||||||
using StringArray = std::vector<String>;
|
using StringArray = std::vector<String>;
|
||||||
|
|
||||||
enum class ProxyType {
|
enum class ProxyType
|
||||||
|
{
|
||||||
Unknown,
|
Unknown,
|
||||||
Shadowsocks,
|
Shadowsocks,
|
||||||
ShadowsocksR,
|
ShadowsocksR,
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
#include <map>
|
#include <map>
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include <quickjspp.hpp>
|
#include <quickjspp.hpp>
|
||||||
|
#include <utility>
|
||||||
#include <quickjs/quickjs-libc.h>
|
#include <quickjs/quickjs-libc.h>
|
||||||
|
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
@@ -226,7 +227,7 @@ public:
|
|||||||
qjs_fetch_Headers headers;
|
qjs_fetch_Headers headers;
|
||||||
std::string cookies;
|
std::string cookies;
|
||||||
std::string postdata;
|
std::string postdata;
|
||||||
explicit qjs_fetch_Request(const std::string &url) : url(url) {}
|
explicit qjs_fetch_Request(std::string url) : url(std::move(url)) {}
|
||||||
};
|
};
|
||||||
|
|
||||||
class qjs_fetch_Response
|
class qjs_fetch_Response
|
||||||
@@ -389,7 +390,7 @@ void script_runtime_init(qjs::Runtime &runtime)
|
|||||||
js_std_init_handlers(runtime.rt);
|
js_std_init_handlers(runtime.rt);
|
||||||
}
|
}
|
||||||
|
|
||||||
int ShowMsgbox(const std::string &title, std::string content, uint16_t type = 0)
|
int ShowMsgbox(const std::string &title, const std::string &content, uint16_t type = 0)
|
||||||
{
|
{
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
if(!type)
|
if(!type)
|
||||||
@@ -424,7 +425,7 @@ struct Lambda {
|
|||||||
|
|
||||||
uint32_t currentTime()
|
uint32_t currentTime()
|
||||||
{
|
{
|
||||||
return time(NULL);
|
return time(nullptr);
|
||||||
}
|
}
|
||||||
|
|
||||||
int script_context_init(qjs::Context &context)
|
int script_context_init(qjs::Context &context)
|
||||||
@@ -525,7 +526,7 @@ int script_context_init(qjs::Context &context)
|
|||||||
)", "<import>", JS_EVAL_TYPE_MODULE);
|
)", "<import>", JS_EVAL_TYPE_MODULE);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
catch(qjs::exception)
|
catch(qjs::exception&)
|
||||||
{
|
{
|
||||||
script_print_stack(context);
|
script_print_stack(context);
|
||||||
return 1;
|
return 1;
|
||||||
|
|||||||
@@ -47,16 +47,23 @@ static httplib::Server::Handler makeHandler(const responseRoute &rr)
|
|||||||
{
|
{
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
req.headers[h.first] = h.second;
|
req.headers.emplace(h.first.data(), h.second.data());
|
||||||
}
|
}
|
||||||
req.argument = request.params;
|
req.argument = request.params;
|
||||||
if (request.get_header_value("Content-Type") == "application/x-www-form-urlencoded")
|
if (request.method == "POST" || request.method == "PUT" || request.method == "PATCH")
|
||||||
{
|
{
|
||||||
req.postdata = urlDecode(request.body);
|
if (request.is_multipart_form_data() && !request.files.empty())
|
||||||
}
|
{
|
||||||
else
|
req.postdata = request.files.begin()->second.content;
|
||||||
{
|
}
|
||||||
req.postdata = request.body;
|
else if (request.get_header_value("Content-Type") == "application/x-www-form-urlencoded")
|
||||||
|
{
|
||||||
|
req.postdata = urlDecode(request.body);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
req.postdata = request.body;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
auto result = rr.rc(req, resp);
|
auto result = rr.rc(req, resp);
|
||||||
response.status = resp.status_code;
|
response.status = resp.status_code;
|
||||||
@@ -163,6 +170,7 @@ int WebServer::start_web_server_multi(listener_args *args)
|
|||||||
{
|
{
|
||||||
res.set_header("Access-Control-Allow-Headers", req.get_header_value("Access-Control-Request-Headers"));
|
res.set_header("Access-Control-Allow-Headers", req.get_header_value("Access-Control-Request-Headers"));
|
||||||
}
|
}
|
||||||
|
res.set_header("Access-Control-Allow-Origin", "*");
|
||||||
return httplib::Server::HandlerResponse::Unhandled;
|
return httplib::Server::HandlerResponse::Unhandled;
|
||||||
});
|
});
|
||||||
for (auto &x : redirect_map)
|
for (auto &x : redirect_map)
|
||||||
@@ -187,7 +195,7 @@ int WebServer::start_web_server_multi(listener_args *args)
|
|||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
std::rethrow_exception(e);
|
if (e) std::rethrow_exception(e);
|
||||||
}
|
}
|
||||||
catch (const httplib::Error &err)
|
catch (const httplib::Error &err)
|
||||||
{
|
{
|
||||||
@@ -212,6 +220,9 @@ int WebServer::start_web_server_multi(listener_args *args)
|
|||||||
{
|
{
|
||||||
server.set_mount_point("/", serve_file_root);
|
server.set_mount_point("/", serve_file_root);
|
||||||
}
|
}
|
||||||
|
server.new_task_queue = [args] {
|
||||||
|
return new httplib::ThreadPool(args->max_workers);
|
||||||
|
};
|
||||||
server.bind_to_port(args->listen_address, args->port, 0);
|
server.bind_to_port(args->listen_address, args->port, 0);
|
||||||
|
|
||||||
std::thread thread([&]()
|
std::thread thread([&]()
|
||||||
|
|||||||
@@ -26,7 +26,8 @@ std::string getTime(int type)
|
|||||||
format = "%Y%m%d-%H%M%S";
|
format = "%Y%m%d-%H%M%S";
|
||||||
break;
|
break;
|
||||||
case 2:
|
case 2:
|
||||||
format = "%Y/%m/%d %a %H:%M:%S." + std::string(cMillis);
|
format = "%Y/%m/%d %a %H:%M:%S.";
|
||||||
|
format += cMillis;
|
||||||
break;
|
break;
|
||||||
case 3:
|
case 3:
|
||||||
default:
|
default:
|
||||||
|
|||||||
@@ -5,9 +5,16 @@
|
|||||||
#include <map>
|
#include <map>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
struct strICaseComp {
|
struct strICaseComp
|
||||||
bool operator()(const std::string &lhs, const std::string &rhs) const {
|
{
|
||||||
return strcasecmp(lhs.c_str(), rhs.c_str()) > 0;
|
bool operator() (const std::string &lhs, const std::string &rhs) const
|
||||||
|
{
|
||||||
|
return std::lexicographical_compare(lhs.begin(), lhs.end(), rhs.begin(),
|
||||||
|
rhs.end(),
|
||||||
|
[](unsigned char c1, unsigned char c2)
|
||||||
|
{
|
||||||
|
return ::tolower(c1) < ::tolower(c2);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -3,88 +3,38 @@
|
|||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
#include <stdlib.h>
|
#include <cstdlib>
|
||||||
#include <time.h>
|
#include <ctime>
|
||||||
|
#include <random>
|
||||||
|
|
||||||
#include "string.h"
|
#include "string.h"
|
||||||
#include "map_extra.h"
|
#include "map_extra.h"
|
||||||
|
|
||||||
std::vector<std::string> split(const std::string &s, const std::string &separator)
|
std::vector<std::string> split(const std::string &s, const std::string &separator)
|
||||||
{
|
{
|
||||||
|
string_size bpos = 0, epos = s.find(separator);
|
||||||
std::vector<std::string> result;
|
std::vector<std::string> result;
|
||||||
string_size i = 0;
|
while(bpos < s.size())
|
||||||
|
|
||||||
while(i != s.size())
|
|
||||||
{
|
{
|
||||||
int flag = 0;
|
if(epos == std::string::npos)
|
||||||
while(i != s.size() && flag == 0)
|
epos = s.size();
|
||||||
{
|
result.push_back(s.substr(bpos, epos - bpos));
|
||||||
flag = 1;
|
bpos = epos + separator.size();
|
||||||
for(char x : separator)
|
epos = s.find(separator, bpos);
|
||||||
if(s[i] == x)
|
|
||||||
{
|
|
||||||
++i;
|
|
||||||
flag = 0;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
flag = 0;
|
|
||||||
string_size j = i;
|
|
||||||
while(j != s.size() && flag == 0)
|
|
||||||
{
|
|
||||||
for(char x : separator)
|
|
||||||
if(s[j] == x)
|
|
||||||
{
|
|
||||||
flag = 1;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if(flag == 0)
|
|
||||||
++j;
|
|
||||||
}
|
|
||||||
if(i != j)
|
|
||||||
{
|
|
||||||
result.push_back(s.substr(i, j-i));
|
|
||||||
i = j;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
void split(std::vector<std::string_view> &result, std::string_view s, char separator)
|
void split(std::vector<std::string_view> &result, std::string_view s, char separator)
|
||||||
{
|
{
|
||||||
string_size i = 0;
|
string_size bpos = 0, epos = s.find(separator);
|
||||||
|
while(bpos < s.size())
|
||||||
while (i != s.size())
|
|
||||||
{
|
{
|
||||||
int flag = 0;
|
if(epos == std::string_view::npos)
|
||||||
while(i != s.size() && flag == 0)
|
epos = s.size();
|
||||||
{
|
result.push_back(s.substr(bpos, epos - bpos));
|
||||||
flag = 1;
|
bpos = epos + 1;
|
||||||
if(s[i] == separator)
|
epos = s.find(separator, bpos);
|
||||||
{
|
|
||||||
++i;
|
|
||||||
flag = 0;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
flag = 0;
|
|
||||||
string_size j = i;
|
|
||||||
while(j != s.size() && flag == 0)
|
|
||||||
{
|
|
||||||
if(s[j] == separator)
|
|
||||||
{
|
|
||||||
flag = 1;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
++j;
|
|
||||||
}
|
|
||||||
if (i != j)
|
|
||||||
{
|
|
||||||
result.push_back(s.substr(i, j-i));
|
|
||||||
i = j;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -141,7 +91,7 @@ std::string toUpper(const std::string &str)
|
|||||||
void processEscapeChar(std::string &str)
|
void processEscapeChar(std::string &str)
|
||||||
{
|
{
|
||||||
string_size pos = str.find('\\');
|
string_size pos = str.find('\\');
|
||||||
while(pos != str.npos)
|
while(pos != std::string::npos)
|
||||||
{
|
{
|
||||||
if(pos == str.size())
|
if(pos == str.size())
|
||||||
break;
|
break;
|
||||||
@@ -191,7 +141,7 @@ void processEscapeCharReverse(std::string &str)
|
|||||||
|
|
||||||
int parseCommaKeyValue(const std::string &input, const std::string &separator, string_pair_array &result)
|
int parseCommaKeyValue(const std::string &input, const std::string &separator, string_pair_array &result)
|
||||||
{
|
{
|
||||||
string_size bpos = 0, epos = input.find(',');
|
string_size bpos = 0, epos = input.find(separator);
|
||||||
std::string kv;
|
std::string kv;
|
||||||
while(bpos < input.size())
|
while(bpos < input.size())
|
||||||
{
|
{
|
||||||
@@ -200,9 +150,9 @@ int parseCommaKeyValue(const std::string &input, const std::string &separator, s
|
|||||||
else if(epos && input[epos - 1] == '\\')
|
else if(epos && input[epos - 1] == '\\')
|
||||||
{
|
{
|
||||||
kv += input.substr(bpos, epos - bpos - 1);
|
kv += input.substr(bpos, epos - bpos - 1);
|
||||||
kv += ',';
|
kv += separator;
|
||||||
bpos = epos + 1;
|
bpos = epos + 1;
|
||||||
epos = input.find(',', bpos);
|
epos = input.find(separator, bpos);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
kv += input.substr(bpos, epos - bpos);
|
kv += input.substr(bpos, epos - bpos);
|
||||||
@@ -213,9 +163,9 @@ int parseCommaKeyValue(const std::string &input, const std::string &separator, s
|
|||||||
result.emplace_back(kv.substr(0, eqpos), kv.substr(eqpos + 1));
|
result.emplace_back(kv.substr(0, eqpos), kv.substr(eqpos + 1));
|
||||||
kv.clear();
|
kv.clear();
|
||||||
bpos = epos + 1;
|
bpos = epos + 1;
|
||||||
epos = input.find(',', bpos);
|
epos = input.find(separator, bpos);
|
||||||
}
|
}
|
||||||
if(kv.size())
|
if(!kv.empty())
|
||||||
{
|
{
|
||||||
string_size eqpos = kv.find('=');
|
string_size eqpos = kv.find('=');
|
||||||
if(eqpos == std::string::npos)
|
if(eqpos == std::string::npos)
|
||||||
@@ -328,12 +278,12 @@ std::string getUrlArg(const std::string &url, const std::string &request)
|
|||||||
while(pos)
|
while(pos)
|
||||||
{
|
{
|
||||||
pos = url.rfind(pattern, pos);
|
pos = url.rfind(pattern, pos);
|
||||||
if(pos != url.npos)
|
if(pos != std::string::npos)
|
||||||
{
|
{
|
||||||
if(pos == 0 || url[pos - 1] == '&' || url[pos - 1] == '?')
|
if(pos == 0 || url[pos - 1] == '&' || url[pos - 1] == '?')
|
||||||
{
|
{
|
||||||
pos += pattern.size();
|
pos += pattern.size();
|
||||||
return url.substr(pos, url.find("&", pos) - pos);
|
return url.substr(pos, url.find('&', pos) - pos);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@@ -410,23 +360,24 @@ bool isStrUTF8(const std::string &data)
|
|||||||
std::string randomStr(int len)
|
std::string randomStr(int len)
|
||||||
{
|
{
|
||||||
std::string retData;
|
std::string retData;
|
||||||
srand(time(NULL));
|
std::random_device rd;
|
||||||
int cnt = 0;
|
std::mt19937 gen(rd());
|
||||||
while(cnt < len)
|
std::uniform_int_distribution<> dis(0, 61);
|
||||||
|
for(int i = 0; i < len; i++)
|
||||||
{
|
{
|
||||||
switch((rand() % 3))
|
int r = dis(gen);
|
||||||
|
if (r < 26)
|
||||||
{
|
{
|
||||||
case 1:
|
retData.push_back('a' + r);
|
||||||
retData += ('A' + rand() % 26);
|
}
|
||||||
break;
|
else if (r < 52)
|
||||||
case 2:
|
{
|
||||||
retData += ('a' + rand() % 26);
|
retData.push_back('A' + r - 26);
|
||||||
break;
|
}
|
||||||
default:
|
else
|
||||||
retData += ('0' + rand() % 10);
|
{
|
||||||
break;
|
retData.push_back('0' + r - 52);
|
||||||
}
|
}
|
||||||
cnt++;
|
|
||||||
}
|
}
|
||||||
return retData;
|
return retData;
|
||||||
}
|
}
|
||||||
@@ -451,7 +402,7 @@ int to_int(const std::string &str, int def_value)
|
|||||||
|
|
||||||
std::string join(const string_array &arr, const std::string &delimiter)
|
std::string join(const string_array &arr, const std::string &delimiter)
|
||||||
{
|
{
|
||||||
if(arr.size() == 0)
|
if(arr.empty())
|
||||||
return "";
|
return "";
|
||||||
if(arr.size() == 1)
|
if(arr.size() == 1)
|
||||||
return arr[0];
|
return arr[0];
|
||||||
|
|||||||
Reference in New Issue
Block a user