mirror of
https://github.com/asdlokj1qpi233/subconverter.git
synced 2025-10-25 10:22:22 +00:00
Merge remote-tracking branch 'fork/master' into dev
# Conflicts: # .github/workflows/build.yml # .github/workflows/docker.yml # .gitignore # base/pref.example.toml # base/snippets/emoji.toml # base/snippets/emoji.txt # scripts/build.macos.release.sh # scripts/build.windows.release.sh # scripts/rules_config.conf # src/generator/config/subexport.cpp # src/handler/interfaces.cpp # src/handler/settings.cpp # src/parser/config/proxy.h # src/parser/subparser.cpp # src/parser/subparser.h # src/utils/map_extra.h # src/version.h
This commit is contained in:
174
.github/workflows/build.yml
vendored
174
.github/workflows/build.yml
vendored
@@ -1,5 +1,5 @@
|
||||
name: GitHub CI
|
||||
on:
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
tags:
|
||||
@@ -7,7 +7,7 @@ on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
|
||||
concurrency:
|
||||
concurrency:
|
||||
group: ${{ github.ref }}-${{ github.workflow }}
|
||||
cancel-in-progress: true
|
||||
|
||||
@@ -24,36 +24,34 @@ jobs:
|
||||
os: ubuntu-latest
|
||||
- arch: armv7
|
||||
artifact: subconverter_armv7
|
||||
os: ubuntu-latest
|
||||
os: [self-hosted, linux, ARM]
|
||||
- arch: aarch64
|
||||
artifact: subconverter_aarch64
|
||||
os: ubuntu-latest
|
||||
os: [self-hosted, linux, ARM64]
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: Linux ${{ matrix.arch }} Build
|
||||
steps:
|
||||
- name: Checkout base
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Add commit id into version
|
||||
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
||||
run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
|
||||
- name: Build
|
||||
run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:${{ matrix.arch }}-latest-stable /bin/sh -c "apk add bash git nodejs npm && cd /root/workdir && chmod +x scripts/build.alpine.release.sh && bash scripts/build.alpine.release.sh"
|
||||
- name: Upload
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.artifact }}
|
||||
path: subconverter/
|
||||
- name: Package Release
|
||||
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
||||
run: tar czf ${{ matrix.artifact }}.tar.gz subconverter
|
||||
- name: Draft Release
|
||||
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: ${{ matrix.artifact }}.tar.gz
|
||||
draft: true
|
||||
- name: Checkout base
|
||||
uses: actions/checkout@v4
|
||||
- name: Add commit id into version
|
||||
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
||||
run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
|
||||
- name: Build
|
||||
run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:${{ matrix.arch }}-latest-stable /bin/sh -c "apk add bash git nodejs npm && cd /root/workdir && chmod +x scripts/build.alpine.release.sh && bash scripts/build.alpine.release.sh"
|
||||
- name: Upload
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.artifact }}
|
||||
path: subconverter/
|
||||
- name: Package Release
|
||||
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
||||
run: tar czf ${{ matrix.artifact }}.tar.gz subconverter
|
||||
- name: Draft Release
|
||||
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: ${{ matrix.artifact }}.tar.gz
|
||||
draft: true
|
||||
|
||||
macos_build:
|
||||
strategy:
|
||||
@@ -68,31 +66,31 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: macOS ${{ matrix.arch }} Build
|
||||
steps:
|
||||
- name: Checkout base
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Add commit id into version
|
||||
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
||||
run: SHA=$(git rev-parse --short HEAD) && sed -i -e 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
|
||||
- name: Build
|
||||
run: bash scripts/build.macos.release.sh
|
||||
- name: Upload
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.artifact }}
|
||||
path: subconverter/
|
||||
- name: Package Release
|
||||
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
||||
run: tar czf ${{ matrix.artifact }}.tar.gz subconverter
|
||||
- name: Draft Release
|
||||
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: ${{ matrix.artifact }}.tar.gz
|
||||
draft: true
|
||||
- name: Checkout base
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Add commit id into version
|
||||
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
||||
run: SHA=$(git rev-parse --short HEAD) && sed -i -e 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
|
||||
- name: Build
|
||||
run: bash scripts/build.macos.release.sh
|
||||
- name: Upload
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.artifact }}
|
||||
path: subconverter/
|
||||
- name: Package Release
|
||||
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
||||
run: tar czf ${{ matrix.artifact }}.tar.gz subconverter
|
||||
- name: Draft Release
|
||||
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: ${{ matrix.artifact }}.tar.gz
|
||||
draft: true
|
||||
|
||||
windows_build:
|
||||
strategy:
|
||||
@@ -112,39 +110,39 @@ jobs:
|
||||
run:
|
||||
shell: msys2 {0}
|
||||
steps:
|
||||
- name: Checkout base
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '16'
|
||||
- name: Setup MSYS2
|
||||
uses: msys2/setup-msys2@v2
|
||||
with:
|
||||
update: true
|
||||
install: base-devel git mingw-w64-${{ matrix.env }}-gcc mingw-w64-${{ matrix.env }}-cmake mingw-w64-${{ matrix.env }}-pcre2 patch
|
||||
msystem: ${{ matrix.msystem }}
|
||||
path-type: inherit
|
||||
- name: Add commit id into version
|
||||
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
||||
run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
|
||||
- name: Build
|
||||
run: bash scripts/build.windows.release.sh
|
||||
- name: Upload
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.artifact }}
|
||||
path: subconverter/
|
||||
- name: Package Release
|
||||
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
||||
run: 7z a ${{ matrix.artifact }}.7z subconverter/
|
||||
- name: Draft Release
|
||||
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: ${{ matrix.artifact }}.7z
|
||||
draft: true
|
||||
- name: Checkout base
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '16'
|
||||
- name: Setup MSYS2
|
||||
uses: msys2/setup-msys2@v2
|
||||
with:
|
||||
update: true
|
||||
install: base-devel git mingw-w64-${{ matrix.env }}-gcc mingw-w64-${{ matrix.env }}-cmake mingw-w64-${{ matrix.env }}-pcre2 patch
|
||||
msystem: ${{ matrix.msystem }}
|
||||
path-type: inherit
|
||||
- name: Add commit id into version
|
||||
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
|
||||
run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
|
||||
- name: Build
|
||||
run: bash scripts/build.windows.release.sh
|
||||
- name: Upload
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.artifact }}
|
||||
path: subconverter/
|
||||
- name: Package Release
|
||||
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
||||
run: 7z a ${{ matrix.artifact }}.7z subconverter/
|
||||
- name: Draft Release
|
||||
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: ${{ matrix.artifact }}.7z
|
||||
draft: true
|
||||
|
||||
12
.github/workflows/docker.yml
vendored
12
.github/workflows/docker.yml
vendored
@@ -1,16 +1,16 @@
|
||||
name: Publish Docker Image
|
||||
on:
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
tags:
|
||||
- '**'
|
||||
|
||||
concurrency:
|
||||
concurrency:
|
||||
group: ${{ github.ref }}-${{ github.workflow }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
REGISTRY_IMAGE: asdlokj1qpi23/subconverter
|
||||
REGISTRY_IMAGE: tindy2013/subconverter
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -22,9 +22,9 @@ jobs:
|
||||
- platform: linux/386
|
||||
os: ubuntu-latest
|
||||
- platform: linux/arm/v7
|
||||
os: ubuntu-latest
|
||||
os: [self-hosted, linux, ARM]
|
||||
- platform: linux/arm64
|
||||
os: ubuntu-latest
|
||||
os: [self-hosted, linux, ARM64]
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: Build ${{ matrix.platform }} Image
|
||||
steps:
|
||||
@@ -37,8 +37,6 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -7,3 +7,5 @@ scripts/quickjspp
|
||||
scripts/yaml-cpp
|
||||
.DS_Store
|
||||
src/.DS_Store
|
||||
|
||||
build
|
||||
@@ -5,7 +5,7 @@ socks-port: {{ default(global.clash.socks_port, "7891") }}
|
||||
allow-lan: {{ default(global.clash.allow_lan, "true") }}
|
||||
mode: Rule
|
||||
log-level: {{ default(global.clash.log_level, "info") }}
|
||||
external-controller: :9090
|
||||
external-controller: {{ default(global.clash.external_controller, "127.0.0.1:9090") }}
|
||||
{% if default(request.clash.dns, "") == "1" %}
|
||||
dns:
|
||||
enable: true
|
||||
@@ -378,7 +378,16 @@ enhanced-mode-by-rule = true
|
||||
"rules": [],
|
||||
"auto_detect_interface": true
|
||||
},
|
||||
"experimental": {}
|
||||
"experimental": {
|
||||
"cache_file": {
|
||||
"enabled": true,
|
||||
"store_fakeip": true
|
||||
},
|
||||
"clash_api": {
|
||||
"external_controller": "{{ default(global.clash.external_controller, "127.0.0.1:9090") }}",
|
||||
"external_ui": "dashboard"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
{% endif %}
|
||||
|
||||
@@ -100,5 +100,14 @@
|
||||
"rules": [],
|
||||
"auto_detect_interface": true
|
||||
},
|
||||
"experimental": {}
|
||||
"experimental": {
|
||||
"cache_file": {
|
||||
"enabled": true,
|
||||
"store_fakeip": true
|
||||
},
|
||||
"clash_api": {
|
||||
"external_controller": "127.0.0.1:9090",
|
||||
"external_ui": "dashboard"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -109,13 +109,14 @@ filter_deprecated_nodes=false
|
||||
append_sub_userinfo=true
|
||||
clash_use_new_field_name=true
|
||||
|
||||
;Generate style of the proxies section of Clash subscriptions.
|
||||
;Generate style of the proxies and proxy groups section of Clash subscriptions.
|
||||
;Supported styles: block, flow, compact
|
||||
;Block: - name: name1 Flow: - {name: name1, key: value} Compact: [{name: name1, key: value},{name: name2, key: value}]
|
||||
; key: value - {name: name2, key: value}
|
||||
; - name: name2
|
||||
; key: value
|
||||
clash_proxies_style=flow
|
||||
clash_proxy_groups_style=block
|
||||
|
||||
;add Clash mode to sing-box rules, and add a GLOBAL group to end of outbounds
|
||||
singbox_add_clash_modes=true
|
||||
@@ -232,6 +233,7 @@ clash.http_port=7890
|
||||
clash.socks_port=7891
|
||||
clash.allow_lan=true
|
||||
clash.log_level=info
|
||||
clash.external_controller=127.0.0.1:9090
|
||||
singbox.allow_lan=true
|
||||
singbox.mixed_port=2080
|
||||
|
||||
|
||||
@@ -117,9 +117,9 @@ match = '^Smart Access expire: (\d+)/(\d+)/(\d+)$'
|
||||
replace = '$1:$2:$3:0:0:0'
|
||||
|
||||
[node_pref]
|
||||
#udp_flag = true
|
||||
#udp_flag = false
|
||||
#tcp_fast_open_flag = false
|
||||
#skip_cert_verify_flag = true
|
||||
#skip_cert_verify_flag = false
|
||||
#tls13_flag = false
|
||||
|
||||
sort_flag = false
|
||||
@@ -135,13 +135,14 @@ filter_deprecated_nodes = false
|
||||
append_sub_userinfo = true
|
||||
clash_use_new_field_name = true
|
||||
|
||||
# Generate style of the proxies section of Clash subscriptions.
|
||||
# Generate style of the proxies and proxy groups section of Clash subscriptions.
|
||||
# Supported styles: block, flow, compact
|
||||
# Block: - name: name1 Flow: - {name: name1, key: value} Compact: [{name: name1, key: value},{name: name2, key: value}]
|
||||
# key: value - {name: name2, key: value}
|
||||
# - name: name2
|
||||
# key: value
|
||||
clash_proxies_style = "flow"
|
||||
clash_proxy_groups_style = "block"
|
||||
|
||||
# add Clash mode to sing-box rules, and add a GLOBAL group to end of outbounds
|
||||
singbox_add_clash_modes = true
|
||||
@@ -243,6 +244,10 @@ value = "true"
|
||||
key = "clash.log_level"
|
||||
value = "info"
|
||||
|
||||
[[template.globals]]
|
||||
key = "clash.external_controller"
|
||||
value = "127.0.0.1:9090"
|
||||
|
||||
[[template.globals]]
|
||||
key = "singbox.allow_lan"
|
||||
value = "true"
|
||||
|
||||
@@ -50,6 +50,7 @@ node_pref:
|
||||
append_sub_userinfo: true
|
||||
clash_use_new_field_name: true
|
||||
clash_proxies_style: flow
|
||||
clash_proxy_groups_style: block
|
||||
singbox_add_clash_modes: true
|
||||
rename_node:
|
||||
# - {match: "\\(?((x|X)?(\\d+)(\\.?\\d+)?)((\\s?倍率?)|(x|X))\\)?", replace: "$1x"}
|
||||
@@ -108,9 +109,10 @@ template:
|
||||
- {key: clash.socks_port, value: 7891}
|
||||
- {key: clash.allow_lan, value: true}
|
||||
- {key: clash.log_level, value: info}
|
||||
- {key: clash.external_controller, value: '127.0.0.1:9090'}
|
||||
- {key: singbox.allow_lan, value: true}
|
||||
- {key: singbox.mixed_port, value: 2080}
|
||||
|
||||
|
||||
aliases:
|
||||
- {uri: /v, target: /version}
|
||||
- {uri: /clash, target: "/sub?target=clash"}
|
||||
|
||||
@@ -23,7 +23,7 @@ match = "(?i:\\bJP[N]?\\d*\\b|Japan|Tokyo|Osaka|Saitama|日本|东京|大阪|埼
|
||||
emoji = "🇯🇵"
|
||||
|
||||
[[emoji]]
|
||||
match = "(?i:\\bK[O]?R\\d*\\b|Korea|(?<!North)Korea|首尔|韩|韓)"
|
||||
match = "(?i:(?<!North\\s)(\\bK[O]?R\\d*\\b|Korea|首尔|韩|韓))"
|
||||
emoji = "🇰🇷"
|
||||
|
||||
[[emoji]]
|
||||
@@ -334,10 +334,6 @@ emoji = "🇹🇷"
|
||||
match = "(乌拉圭|Uruguay)"
|
||||
emoji = "🇺🇾"
|
||||
|
||||
[[emoji]]
|
||||
match = "(梵蒂冈|Vatican)"
|
||||
emoji = "🇻🇦"
|
||||
|
||||
[[emoji]]
|
||||
match = "(Vietnam|越南)"
|
||||
emoji = "🇻🇳"
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
(?i:\bSG[P]?\d*\b|Singapore|新加坡|狮城|[^-]新),🇸🇬
|
||||
(尼日利亚|Nigeria),🇳🇬
|
||||
(?i:\bJP[N]?\d*\b|Japan|Tokyo|Osaka|Saitama|日本|东京|大阪|埼玉|[^-]日),🇯🇵
|
||||
(?i:\bK[O]?R\d*\b|Korea|(?<!North)Korea|首尔|韩|韓),🇰🇷
|
||||
(?i:(?<!North\s)(\bK[O]?R\d*\b|Korea|首尔|韩|韓)),🇰🇷
|
||||
(?i:\bUS[A]?\d*\b|America|United.*?States|美国|[^-]美|波特兰|达拉斯|俄勒冈|凤凰城|费利蒙|硅谷|拉斯维加斯|洛杉矶|圣何塞|圣克拉拉|西雅图|芝加哥),🇺🇸
|
||||
(Ascension|阿森松),🇦🇨
|
||||
(?i:\bUAE\b|Dubai|阿联酋|迪拜),🇦🇪
|
||||
@@ -92,4 +92,4 @@
|
||||
(Morocco|摩洛哥),🇲🇦
|
||||
(Nepal|尼泊尔),🇳🇵
|
||||
(Bengal|孟加拉),🇧🇩
|
||||
(?i:\bC[H]?N\b|China|back|回国|中国[^-]|江苏[^-]|北京[^-]|上海[^-]|广州[^-]|深圳[^-]|杭州[^-]|常州[^-]|徐州[^-]|青岛[^-]|宁波[^-]|镇江[^-]|成都[^-]|河北[^-]|山西[^-]|辽宁[^-]|吉林[^-]|黑龙江[^-]|江苏[^-]|浙江[^-]|安徽[^-]|福建[^-]|江西[^-]|山东[^-]|河南[^-]|湖北[^-]|湖南[^-]|广东[^-]|海南[^-]|四川[^-]|贵州[^-]|云南[^-]|陕西[^-]|甘肃[^-]|青海[^-]|内蒙古[^-]|广西[^-]|西藏[^-]|宁夏[^-]|新疆[^-]),🇨🇳
|
||||
(?i:\bC[H]?N\b|China|back|回国|中国[^-]|江苏[^-]|北京[^-]|上海[^-]|广州[^-]|深圳[^-]|杭州[^-]|常州[^-]|徐州[^-]|青岛[^-]|宁波[^-]|镇江[^-]|成都[^-]|河北[^-]|山西[^-]|辽宁[^-]|吉林[^-]|黑龙江[^-]|江苏[^-]|浙江[^-]|安徽[^-]|福建[^-]|江西[^-]|山东[^-]|河南[^-]|湖北[^-]|湖南[^-]|广东[^-]|海南[^-]|四川[^-]|贵州[^-]|云南[^-]|陕西[^-]|甘肃[^-]|青海[^-]|内蒙古[^-]|广西[^-]|西藏[^-]|宁夏[^-]|新疆[^-]),🇨🇳
|
||||
|
||||
@@ -30,7 +30,7 @@ RUN set -xe && \
|
||||
install -d /usr/include/date/ && \
|
||||
install -m644 libcron/externals/date/include/date/* /usr/include/date/ && \
|
||||
cd .. && \
|
||||
git clone https://github.com/ToruNiina/toml11 --branch="v3.7.1" --depth=1 && \
|
||||
git clone https://github.com/ToruNiina/toml11 --branch="v4.3.0" --depth=1 && \
|
||||
cd toml11 && \
|
||||
cmake -DCMAKE_CXX_STANDARD=11 . && \
|
||||
make install -j $THREADS && \
|
||||
@@ -53,6 +53,10 @@ RUN apk add --no-cache --virtual subconverter-deps pcre2 libcurl yaml-cpp
|
||||
COPY --from=builder /subconverter/subconverter /usr/bin/
|
||||
COPY --from=builder /subconverter/base /base/
|
||||
|
||||
ENV TZ=Africa/Abidjan
|
||||
RUN ln -sf /usr/share/zoneinfo/$TZ /etc/localtime
|
||||
RUN echo $TZ > /etc/timezone
|
||||
|
||||
# set entry
|
||||
WORKDIR /base
|
||||
CMD subconverter
|
||||
|
||||
@@ -4,7 +4,7 @@ set -xe
|
||||
apk add gcc g++ build-base linux-headers cmake make autoconf automake libtool python2 python3
|
||||
apk add mbedtls-dev mbedtls-static zlib-dev rapidjson-dev zlib-static pcre2-dev
|
||||
|
||||
git clone https://github.com/curl/curl --depth=1 --branch curl-8_4_0
|
||||
git clone https://github.com/curl/curl --depth=1 --branch curl-8_6_0
|
||||
cd curl
|
||||
cmake -DCURL_USE_MBEDTLS=ON -DHTTP_ONLY=ON -DBUILD_TESTING=OFF -DBUILD_SHARED_LIBS=OFF -DCMAKE_USE_LIBSSH2=OFF -DBUILD_CURL_EXE=OFF . > /dev/null
|
||||
make install -j2 > /dev/null
|
||||
@@ -34,7 +34,7 @@ cmake -DCMAKE_BUILD_TYPE=Release .
|
||||
make libcron install -j3
|
||||
cd ..
|
||||
|
||||
git clone https://github.com/ToruNiina/toml11 --branch="v3.7.1" --depth=1
|
||||
git clone https://github.com/ToruNiina/toml11 --branch="v4.3.0" --depth=1
|
||||
cd toml11
|
||||
cmake -DCMAKE_CXX_STANDARD=11 .
|
||||
make install -j4
|
||||
|
||||
@@ -41,7 +41,7 @@ sudo install -d /usr/local/include/date/
|
||||
sudo install -m644 libcron/externals/date/include/date/* /usr/local/include/date/
|
||||
cd ..
|
||||
|
||||
git clone https://github.com/ToruNiina/toml11 --branch="v3.7.1" --depth=1
|
||||
git clone https://github.com/ToruNiina/toml11 --branch="v4.3.0" --depth=1
|
||||
cd toml11
|
||||
cmake -DCMAKE_CXX_STANDARD=11 .
|
||||
sudo make install -j6 > /dev/null
|
||||
@@ -63,4 +63,4 @@ chmod +r ./*
|
||||
cd ..
|
||||
mv base subconverter
|
||||
|
||||
set +xe
|
||||
set +xe
|
||||
|
||||
@@ -1,33 +1,9 @@
|
||||
#!/bin/bash
|
||||
set -xe
|
||||
|
||||
# 获取系统架构
|
||||
ARCH=$(uname -m)
|
||||
|
||||
if [ "$ARCH" == "x86_64" ]; then
|
||||
TOOLCHAIN="mingw-w64-x86_64"
|
||||
else
|
||||
TOOLCHAIN="mingw-w64-i686"
|
||||
fi
|
||||
|
||||
pacman -S --needed --noconfirm base-devel ${TOOLCHAIN}-toolchain ${TOOLCHAIN}-cmake ${TOOLCHAIN}-nghttp2 ${TOOLCHAIN}-openssl
|
||||
|
||||
git clone https://github.com/curl/curl --depth=1 --branch curl-8_8_0
|
||||
git clone https://github.com/curl/curl --depth=1 --branch curl-8_6_0
|
||||
cd curl
|
||||
cmake -DCMAKE_BUILD_TYPE=Release \
|
||||
-DCURL_USE_LIBSSH2=OFF \
|
||||
-DHTTP_ONLY=ON \
|
||||
-DCURL_USE_SCHANNEL=ON \
|
||||
-DBUILD_SHARED_LIBS=OFF \
|
||||
-DBUILD_CURL_EXE=OFF \
|
||||
-DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" \
|
||||
-G "Unix Makefiles" \
|
||||
-DHAVE_LIBIDN2=OFF \
|
||||
-DCURL_USE_LIBPSL=OFF \
|
||||
-DCURL_STATICLIB=ON \
|
||||
-DCURL_DISABLE_SOCKETPAIR=ON \
|
||||
-DCURL_DISABLE_NONBLOCKING=ON .
|
||||
|
||||
cmake -DCMAKE_BUILD_TYPE=Release -DCURL_USE_LIBSSH2=OFF -DHTTP_ONLY=ON -DCURL_USE_SCHANNEL=ON -DBUILD_SHARED_LIBS=OFF -DBUILD_CURL_EXE=OFF -DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" -G "Unix Makefiles" -DHAVE_LIBIDN2=OFF -DCURL_USE_LIBPSL=OFF .
|
||||
make install -j4
|
||||
cd ..
|
||||
|
||||
@@ -62,7 +38,7 @@ cmake -DRAPIDJSON_BUILD_DOC=OFF -DRAPIDJSON_BUILD_EXAMPLES=OFF -DRAPIDJSON_BUILD
|
||||
make install -j4
|
||||
cd ..
|
||||
|
||||
git clone https://github.com/ToruNiina/toml11 --branch="v3.7.1" --depth=1
|
||||
git clone https://github.com/ToruNiina/toml11 --branch "v4.3.0" --depth=1
|
||||
cd toml11
|
||||
cmake -DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" -G "Unix Makefiles" -DCMAKE_CXX_STANDARD=11 .
|
||||
make install -j4
|
||||
|
||||
@@ -1,23 +1,23 @@
|
||||
[ACL4SSR]
|
||||
name=ACL4SSR
|
||||
url=https://github.com/ACL4SSR/ACL4SSR
|
||||
checkout=1dc5c92b0c8ceaaecbc66530c309961f53e52c8c
|
||||
branch=master
|
||||
match=Clash/*.list|Clash/Ruleset/**
|
||||
|
||||
[ACL4SSR_config]
|
||||
name=ACL4SSR
|
||||
url=https://github.com/ACL4SSR/ACL4SSR
|
||||
checkout=1dc5c92b0c8ceaaecbc66530c309961f53e52c8c
|
||||
branch=master
|
||||
match=Clash/config/**
|
||||
dest=base/config/
|
||||
keep_tree=false
|
||||
|
||||
[DivineEngine]
|
||||
url=https://github.com/asdlokj1qpi233/Profiles.git
|
||||
checkout=f6302d855192bd8d0be08319dff3e58ae7c2bd4e
|
||||
match=Surge/Ruleset/**
|
||||
|
||||
[NobyDa]
|
||||
url=https://github.com/NobyDa/Script
|
||||
checkout=ae4c12f23de8078e02c373c9969b19af28257fcb
|
||||
branch=master
|
||||
match=Surge/*.list
|
||||
|
||||
[lhie1]
|
||||
url=https://github.com/dler-io/Rules
|
||||
branch=main
|
||||
match=Surge/Surge 3/Provider/**
|
||||
|
||||
@@ -22,10 +22,13 @@ def open_repo(path: str):
|
||||
return None
|
||||
|
||||
|
||||
def update_rules(repo_path, save_path, commit, matches, keep_tree):
|
||||
def update_rules(repo_path: str, save_path: str, matches: list[str], keep_tree: bool):
|
||||
os.makedirs(save_path, exist_ok=True)
|
||||
for pattern in matches:
|
||||
files = glob.glob(os.path.join(repo_path, pattern), recursive=True)
|
||||
if len(files) == 0:
|
||||
logging.warn(f"no files found for pattern {pattern}")
|
||||
continue
|
||||
for file in files:
|
||||
if os.path.isdir(file):
|
||||
continue
|
||||
@@ -51,12 +54,13 @@ def main():
|
||||
for section in config.sections():
|
||||
repo = config.get(section, "name", fallback=section)
|
||||
url = config.get(section, "url")
|
||||
commit = config.get(section, "checkout")
|
||||
commit = config.get(section, "commit", fallback=None)
|
||||
branch = config.get(section, "branch", fallback=None)
|
||||
matches = config.get(section, "match").split("|")
|
||||
save_path = config.get(section, "dest", fallback=f"base/rules/{repo}")
|
||||
keep_tree = config.getboolean(section, "keep_tree", fallback=True)
|
||||
|
||||
logging.info(f"reading files from url {url} with commit {commit} and matches {matches}, save to {save_path} keep_tree {keep_tree}")
|
||||
logging.info(f"reading files from url {url}, matches {matches}, save to {save_path} keep_tree {keep_tree}")
|
||||
|
||||
repo_path = os.path.join("./tmp/repo/", repo)
|
||||
|
||||
@@ -67,8 +71,21 @@ def main():
|
||||
else:
|
||||
logging.info(f"repo {repo_path} exists")
|
||||
|
||||
r.git.checkout(commit)
|
||||
update_rules(repo_path, save_path, commit, matches, keep_tree)
|
||||
try:
|
||||
if commit is not None:
|
||||
logging.info(f"checking out to commit {commit}")
|
||||
r.git.checkout(commit)
|
||||
elif branch is not None:
|
||||
logging.info(f"checking out to branch {branch}")
|
||||
r.git.checkout(branch)
|
||||
else:
|
||||
logging.info(f"checking out to default branch")
|
||||
r.active_branch.checkout()
|
||||
except Exception as e:
|
||||
logging.error(f"checkout failed {e}")
|
||||
continue
|
||||
|
||||
update_rules(repo_path, save_path, matches, keep_tree)
|
||||
|
||||
shutil.rmtree("./tmp", ignore_errors=True)
|
||||
|
||||
|
||||
@@ -17,9 +17,9 @@ namespace toml
|
||||
static ProxyGroupConfig from_toml(const value& v)
|
||||
{
|
||||
ProxyGroupConfig conf;
|
||||
conf.Name = toml::find<String>(v, "name");
|
||||
String type = toml::find<String>(v, "type");
|
||||
String strategy = toml::find_or<String>(v, "strategy", "");
|
||||
conf.Name = find<String>(v, "name");
|
||||
String type = find<String>(v, "type");
|
||||
String strategy = find_or<String>(v, "strategy", "");
|
||||
switch(hash_(type))
|
||||
{
|
||||
case "select"_hash:
|
||||
@@ -27,18 +27,18 @@ namespace toml
|
||||
break;
|
||||
case "url-test"_hash:
|
||||
conf.Type = ProxyGroupType::URLTest;
|
||||
conf.Url = toml::find<String>(v, "url");
|
||||
conf.Interval = toml::find<Integer>(v, "interval");
|
||||
conf.Tolerance = toml::find_or<Integer>(v, "tolerance", 0);
|
||||
conf.Url = find<String>(v, "url");
|
||||
conf.Interval = find<Integer>(v, "interval");
|
||||
conf.Tolerance = find_or<Integer>(v, "tolerance", 0);
|
||||
if(v.contains("lazy"))
|
||||
conf.Lazy = toml::find_or<bool>(v, "lazy", false);
|
||||
conf.Lazy = find_or<bool>(v, "lazy", false);
|
||||
if(v.contains("evaluate-before-use"))
|
||||
conf.EvaluateBeforeUse = toml::find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get());
|
||||
conf.EvaluateBeforeUse = find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get());
|
||||
break;
|
||||
case "load-balance"_hash:
|
||||
conf.Type = ProxyGroupType::LoadBalance;
|
||||
conf.Url = toml::find<String>(v, "url");
|
||||
conf.Interval = toml::find<Integer>(v, "interval");
|
||||
conf.Url = find<String>(v, "url");
|
||||
conf.Interval = find<Integer>(v, "interval");
|
||||
switch(hash_(strategy))
|
||||
{
|
||||
case "consistent-hashing"_hash:
|
||||
@@ -49,14 +49,14 @@ namespace toml
|
||||
break;
|
||||
}
|
||||
if(v.contains("persistent"))
|
||||
conf.Persistent = toml::find_or(v, "persistent", conf.Persistent.get());
|
||||
conf.Persistent = find_or(v, "persistent", conf.Persistent.get());
|
||||
break;
|
||||
case "fallback"_hash:
|
||||
conf.Type = ProxyGroupType::Fallback;
|
||||
conf.Url = toml::find<String>(v, "url");
|
||||
conf.Interval = toml::find<Integer>(v, "interval");
|
||||
conf.Url = find<String>(v, "url");
|
||||
conf.Interval = find<Integer>(v, "interval");
|
||||
if(v.contains("evaluate-before-use"))
|
||||
conf.EvaluateBeforeUse = toml::find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get());
|
||||
conf.EvaluateBeforeUse = find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get());
|
||||
break;
|
||||
case "relay"_hash:
|
||||
conf.Type = ProxyGroupType::Relay;
|
||||
@@ -64,16 +64,26 @@ namespace toml
|
||||
case "ssid"_hash:
|
||||
conf.Type = ProxyGroupType::SSID;
|
||||
break;
|
||||
case "smart"_hash:
|
||||
conf.Type = ProxyGroupType::Smart;
|
||||
conf.Url = find<String>(v, "url");
|
||||
conf.Interval = find<Integer>(v, "interval");
|
||||
conf.Tolerance = find_or<Integer>(v, "tolerance", 0);
|
||||
if(v.contains("lazy"))
|
||||
conf.Lazy = find_or<bool>(v, "lazy", false);
|
||||
if(v.contains("evaluate-before-use"))
|
||||
conf.EvaluateBeforeUse = find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get());
|
||||
break;
|
||||
default:
|
||||
throw toml::syntax_error("Proxy Group has incorrect type, should be one of following:\n select, url-test, load-balance, fallback, relay, ssid", v.at("type").location());
|
||||
throw serialization_error(format_error("Proxy Group has unsupported type!", v.at("type").location(), "should be one of following: select, url-test, load-balance, fallback, relay, ssid"), v.at("type").location());
|
||||
}
|
||||
conf.Timeout = toml::find_or(v, "timeout", 5);
|
||||
conf.Proxies = toml::find_or<StrArray>(v, "rule", {});
|
||||
conf.UsingProvider = toml::find_or<StrArray>(v, "use", {});
|
||||
conf.Timeout = find_or(v, "timeout", 5);
|
||||
conf.Proxies = find_or<StrArray>(v, "rule", {});
|
||||
conf.UsingProvider = find_or<StrArray>(v, "use", {});
|
||||
if(conf.Proxies.empty() && conf.UsingProvider.empty())
|
||||
throw toml::syntax_error("Proxy Group must contains at least one of proxy match rule or provider", v.location());
|
||||
throw serialization_error(format_error("Proxy Group must contains at least one of proxy match rule or provider!", v.location(), "here"), v.location());
|
||||
if(v.contains("disable-udp"))
|
||||
conf.DisableUdp = toml::find_or(v, "disable-udp", conf.DisableUdp.get());
|
||||
conf.DisableUdp = find_or(v, "disable-udp", conf.DisableUdp.get());
|
||||
return conf;
|
||||
}
|
||||
};
|
||||
@@ -84,8 +94,8 @@ namespace toml
|
||||
static RulesetConfig from_toml(const value& v)
|
||||
{
|
||||
RulesetConfig conf;
|
||||
conf.Group = toml::find<String>(v, "group");
|
||||
String type = toml::find_or<String>(v, "type", "surge-ruleset");
|
||||
conf.Group = find<String>(v, "group");
|
||||
String type = find_or<String>(v, "type", "surge-ruleset");
|
||||
switch(hash_(type))
|
||||
{
|
||||
/*
|
||||
@@ -122,10 +132,10 @@ namespace toml
|
||||
conf.Url = type + ":";
|
||||
break;
|
||||
default:
|
||||
throw toml::syntax_error("Ruleset has incorrect type, should be one of following:\n surge-ruleset, quantumultx, clash-domain, clash-ipcidr, clash-classic", v.at("type").location());
|
||||
throw serialization_error(format_error("Ruleset has unsupported type!", v.at("type").location(), "should be one of following: surge-ruleset, quantumultx, clash-domain, clash-ipcidr, clash-classic"), v.at("type").location());
|
||||
}
|
||||
conf.Url += toml::find<String>(v, "ruleset");
|
||||
conf.Interval = toml::find_or<Integer>(v, "interval", 86400);
|
||||
conf.Url += find<String>(v, "ruleset");
|
||||
conf.Interval = find_or<Integer>(v, "interval", 86400);
|
||||
return conf;
|
||||
}
|
||||
};
|
||||
@@ -138,14 +148,14 @@ namespace toml
|
||||
RegexMatchConfig conf;
|
||||
if(v.contains("script"))
|
||||
{
|
||||
conf.Script = toml::find<String>(v, "script");
|
||||
conf.Script = find<String>(v, "script");
|
||||
return conf;
|
||||
}
|
||||
conf.Match = toml::find<String>(v, "match");
|
||||
conf.Match = find<String>(v, "match");
|
||||
if(v.contains("emoji"))
|
||||
conf.Replace = toml::find<String>(v, "emoji");
|
||||
conf.Replace = find<String>(v, "emoji");
|
||||
else
|
||||
conf.Replace = toml::find<String>(v, "replace");
|
||||
conf.Replace = find<String>(v, "replace");
|
||||
return conf;
|
||||
}
|
||||
};
|
||||
@@ -156,10 +166,10 @@ namespace toml
|
||||
static CronTaskConfig from_toml(const value& v)
|
||||
{
|
||||
CronTaskConfig conf;
|
||||
conf.Name = toml::find<String>(v, "name");
|
||||
conf.CronExp = toml::find<String>(v, "cronexp");
|
||||
conf.Path = toml::find<String>(v, "path");
|
||||
conf.Timeout = toml::find_or<Integer>(v, "timeout", 0);
|
||||
conf.Name = find<String>(v, "name");
|
||||
conf.CronExp = find<String>(v, "cronexp");
|
||||
conf.Path = find<String>(v, "path");
|
||||
conf.Timeout = find_or<Integer>(v, "timeout", 0);
|
||||
return conf;
|
||||
}
|
||||
};
|
||||
@@ -220,6 +230,9 @@ namespace INIBinding
|
||||
case "ssid"_hash:
|
||||
conf.Type = ProxyGroupType::SSID;
|
||||
break;
|
||||
case "smart"_hash:
|
||||
conf.Type = ProxyGroupType::Smart;
|
||||
break;
|
||||
default:
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -3,17 +3,18 @@
|
||||
|
||||
#include "def.h"
|
||||
|
||||
enum ProxyGroupType
|
||||
enum class ProxyGroupType
|
||||
{
|
||||
Select,
|
||||
URLTest,
|
||||
Fallback,
|
||||
LoadBalance,
|
||||
Relay,
|
||||
SSID
|
||||
SSID,
|
||||
Smart
|
||||
};
|
||||
|
||||
enum BalanceStrategy
|
||||
enum class BalanceStrategy
|
||||
{
|
||||
ConsistentHashing,
|
||||
RoundRobin
|
||||
@@ -45,6 +46,7 @@ struct ProxyGroupConfig
|
||||
case ProxyGroupType::Fallback: return "fallback";
|
||||
case ProxyGroupType::Relay: return "relay";
|
||||
case ProxyGroupType::SSID: return "ssid";
|
||||
case ProxyGroupType::Smart: return "smart";
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
#include "def.h"
|
||||
|
||||
enum RulesetType
|
||||
enum class RulesetType
|
||||
{
|
||||
SurgeRuleset,
|
||||
QuantumultX,
|
||||
|
||||
@@ -161,7 +161,8 @@ void processRemark(std::string &remark, const string_array &remarks_list, bool p
|
||||
}
|
||||
std::string tempRemark = remark;
|
||||
int cnt = 2;
|
||||
while (std::find(remarks_list.cbegin(), remarks_list.cend(), tempRemark) != remarks_list.cend()) {
|
||||
while(std::find(remarks_list.cbegin(), remarks_list.cend(), tempRemark) != remarks_list.cend())
|
||||
{
|
||||
tempRemark = remark + " " + std::to_string(cnt);
|
||||
cnt++;
|
||||
}
|
||||
@@ -218,6 +219,30 @@ proxyToClash(std::vector<Proxy> &nodes, YAML::Node &yamlnode, const ProxyGroupCo
|
||||
case "compact"_hash:
|
||||
compact = true;
|
||||
break;
|
||||
bool proxy_block = false, proxy_compact = false, group_block = false, group_compact = false;
|
||||
switch(hash_(ext.clash_proxies_style))
|
||||
{
|
||||
case "block"_hash:
|
||||
proxy_block = true;
|
||||
break;
|
||||
default:
|
||||
case "flow"_hash:
|
||||
break;
|
||||
case "compact"_hash:
|
||||
proxy_compact = true;
|
||||
break;
|
||||
}
|
||||
switch(hash_(ext.clash_proxy_groups_style))
|
||||
{
|
||||
case "block"_hash:
|
||||
group_block = true;
|
||||
break;
|
||||
default:
|
||||
case "flow"_hash:
|
||||
break;
|
||||
case "compact"_hash:
|
||||
group_compact = true;
|
||||
break;
|
||||
}
|
||||
|
||||
for (Proxy &x: nodes) {
|
||||
@@ -644,7 +669,10 @@ proxyToClash(std::vector<Proxy> &nodes, YAML::Node &yamlnode, const ProxyGroupCo
|
||||
string_array filtered_nodelist;
|
||||
|
||||
singlegroup["name"] = x.Name;
|
||||
singlegroup["type"] = x.TypeStr();
|
||||
if (x.Type == ProxyGroupType::Smart)
|
||||
singlegroup["type"] = "url-test";
|
||||
else
|
||||
singlegroup["type"] = x.TypeStr();
|
||||
|
||||
switch (x.Type) {
|
||||
case ProxyGroupType::Select:
|
||||
@@ -666,6 +694,29 @@ proxyToClash(std::vector<Proxy> &nodes, YAML::Node &yamlnode, const ProxyGroupCo
|
||||
break;
|
||||
default:
|
||||
continue;
|
||||
switch(x.Type)
|
||||
{
|
||||
case ProxyGroupType::Select:
|
||||
case ProxyGroupType::Relay:
|
||||
break;
|
||||
case ProxyGroupType::LoadBalance:
|
||||
singlegroup["strategy"] = x.StrategyStr();
|
||||
[[fallthrough]];
|
||||
case ProxyGroupType::Smart:
|
||||
[[fallthrough]];
|
||||
case ProxyGroupType::URLTest:
|
||||
if(!x.Lazy.is_undef())
|
||||
singlegroup["lazy"] = x.Lazy.get();
|
||||
[[fallthrough]];
|
||||
case ProxyGroupType::Fallback:
|
||||
singlegroup["url"] = x.Url;
|
||||
if(x.Interval > 0)
|
||||
singlegroup["interval"] = x.Interval;
|
||||
if(x.Tolerance > 0)
|
||||
singlegroup["tolerance"] = x.Tolerance;
|
||||
break;
|
||||
default:
|
||||
continue;
|
||||
}
|
||||
if (!x.DisableUdp.is_undef())
|
||||
singlegroup["disable-udp"] = x.DisableUdp.get();
|
||||
@@ -681,7 +732,10 @@ proxyToClash(std::vector<Proxy> &nodes, YAML::Node &yamlnode, const ProxyGroupCo
|
||||
}
|
||||
if (!filtered_nodelist.empty())
|
||||
singlegroup["proxies"] = filtered_nodelist;
|
||||
//singlegroup.SetStyle(YAML::EmitterStyle::Flow);
|
||||
if(group_block)
|
||||
singlegroup.SetStyle(YAML::EmitterStyle::Block);
|
||||
else
|
||||
singlegroup.SetStyle(YAML::EmitterStyle::Flow);
|
||||
|
||||
bool replace_flag = false;
|
||||
for (auto &&original_group: original_groups) {
|
||||
@@ -694,6 +748,8 @@ proxyToClash(std::vector<Proxy> &nodes, YAML::Node &yamlnode, const ProxyGroupCo
|
||||
if (!replace_flag)
|
||||
original_groups.push_back(singlegroup);
|
||||
}
|
||||
if(group_compact)
|
||||
original_groups.SetStyle(YAML::EmitterStyle::Flow);
|
||||
|
||||
if (ext.clash_new_field_name)
|
||||
yamlnode["proxy-groups"] = original_groups;
|
||||
@@ -975,11 +1031,18 @@ std::string proxyToSurge(std::vector<Proxy> &nodes, const std::string &base_conf
|
||||
proxy += ", version=" + std::to_string(x.SnellVersion);
|
||||
break;
|
||||
case ProxyType::Hysteria2:
|
||||
if (surge_ver < 4 && surge_ver != -3)
|
||||
if(surge_ver < 4)
|
||||
continue;
|
||||
proxy = "hysteria2, " + hostname + ", " + port + ", password=" + password;
|
||||
if (!scv.is_undef())
|
||||
proxy += ", skip-cert-verify=" + scv.get_str();
|
||||
proxy = "hysteria, " + hostname + ", " + port + ", password=" + password;
|
||||
if(x.DownSpeed)
|
||||
proxy += ", download-bandwidth=" + x.DownSpeed;
|
||||
|
||||
if(!scv.is_undef())
|
||||
proxy += ",skip-cert-verify=" + std::string(scv.get() ? "true" : "false");
|
||||
if(!x.Fingerprint.empty())
|
||||
proxy += ",server-cert-fingerprint-sha256=" + x.Fingerprint;
|
||||
if(!x.SNI.empty())
|
||||
proxy += ",sni=" + x.SNI;
|
||||
break;
|
||||
case ProxyType::WireGuard:
|
||||
if (surge_ver < 4 && surge_ver != -3)
|
||||
@@ -1011,7 +1074,8 @@ std::string proxyToSurge(std::vector<Proxy> &nodes, const std::string &base_conf
|
||||
proxy += ", tfo=" + tfo.get_str();
|
||||
if (!udp.is_undef())
|
||||
proxy += ", udp-relay=" + udp.get_str();
|
||||
|
||||
if (underlying_proxy != "")
|
||||
proxy += ", underlying-proxy=" + underlying_proxy;
|
||||
if (ext.nodelist)
|
||||
output_nodelist += x.Remark + " = " + proxy + "\n";
|
||||
else {
|
||||
@@ -1030,22 +1094,24 @@ std::string proxyToSurge(std::vector<Proxy> &nodes, const std::string &base_conf
|
||||
string_array filtered_nodelist;
|
||||
std::string group;
|
||||
|
||||
switch (x.Type) {
|
||||
case ProxyGroupType::Select:
|
||||
case ProxyGroupType::URLTest:
|
||||
case ProxyGroupType::Fallback:
|
||||
break;
|
||||
case ProxyGroupType::LoadBalance:
|
||||
if (surge_ver < 1 && surge_ver != -3)
|
||||
continue;
|
||||
break;
|
||||
case ProxyGroupType::SSID:
|
||||
group = x.TypeStr() + ",default=" + x.Proxies[0] + ",";
|
||||
switch(x.Type)
|
||||
{
|
||||
case ProxyGroupType::Select:
|
||||
case ProxyGroupType::Smart:
|
||||
case ProxyGroupType::URLTest:
|
||||
case ProxyGroupType::Fallback:
|
||||
break;
|
||||
case ProxyGroupType::LoadBalance:
|
||||
if(surge_ver < 1 && surge_ver != -3)
|
||||
continue;
|
||||
break;
|
||||
case ProxyGroupType::SSID:
|
||||
group = x.TypeStr() + ",default=" + x.Proxies[0] + ",";
|
||||
group += join(x.Proxies.begin() + 1, x.Proxies.end(), ",");
|
||||
ini.set("{NONAME}", x.Name + " = " + group); //insert order
|
||||
continue;
|
||||
default:
|
||||
continue;
|
||||
continue;
|
||||
default:
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const auto &y: x.Proxies)
|
||||
@@ -1693,7 +1759,8 @@ void proxyToQuanX(std::vector<Proxy> &nodes, INIReader &ini, std::vector<Ruleset
|
||||
std::string proxies = join(filtered_nodelist, ", ");
|
||||
|
||||
std::string singlegroup = type + "=" + x.Name + ", " + proxies;
|
||||
if (type != "static") {
|
||||
if(x.Type != ProxyGroupType::Select && x.Type != ProxyGroupType::SSID)
|
||||
{
|
||||
singlegroup += ", check-interval=" + std::to_string(x.Interval);
|
||||
if (x.Tolerance > 0)
|
||||
singlegroup += ", tolerance=" + std::to_string(x.Tolerance);
|
||||
|
||||
@@ -40,6 +40,7 @@ struct extra_settings
|
||||
bool clash_classical_ruleset = false;
|
||||
std::string sort_script;
|
||||
std::string clash_proxies_style = "flow";
|
||||
std::string clash_proxy_groups_style = "flow";
|
||||
bool authorized = false;
|
||||
|
||||
extra_settings() = default;
|
||||
|
||||
@@ -357,10 +357,10 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
|
||||
if(x.rule_type == RULESET_CLASH_IPCIDR || x.rule_type == RULESET_CLASH_DOMAIN || x.rule_type == RULESET_CLASH_CLASSICAL)
|
||||
{
|
||||
//rule_name = std::to_string(hash_(rule_group + rule_path));
|
||||
rule_name = old_rule_name = findFileName(rule_path);
|
||||
rule_name = old_rule_name = urlDecode(findFileName(rule_path));
|
||||
int idx = 2;
|
||||
while(std::find(groups.begin(), groups.end(), rule_name) != groups.end())
|
||||
rule_name = old_rule_name + "_" + std::to_string(idx++);
|
||||
rule_name = old_rule_name + " " + std::to_string(idx++);
|
||||
names[rule_name] = rule_group;
|
||||
urls[rule_name] = "*" + rule_path;
|
||||
rule_type[rule_name] = x.rule_type;
|
||||
@@ -386,10 +386,10 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
|
||||
if(fileExist(rule_path, true) || isLink(rule_path))
|
||||
{
|
||||
//rule_name = std::to_string(hash_(rule_group + rule_path));
|
||||
rule_name = old_rule_name = findFileName(rule_path);
|
||||
rule_name = old_rule_name = urlDecode(findFileName(rule_path));
|
||||
int idx = 2;
|
||||
while(std::find(groups.begin(), groups.end(), rule_name) != groups.end())
|
||||
rule_name = old_rule_name + "_" + std::to_string(idx++);
|
||||
rule_name = old_rule_name + " " + std::to_string(idx++);
|
||||
names[rule_name] = rule_group;
|
||||
urls[rule_name] = rule_path_typed;
|
||||
rule_type[rule_name] = x.rule_type;
|
||||
@@ -436,9 +436,9 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
|
||||
if(vArray.size() < 2)
|
||||
continue;
|
||||
if(keywords.find(rule_name) == keywords.end())
|
||||
keywords[rule_name] = "\"" + vArray[1] + "\"";
|
||||
keywords[rule_name] = "\"" + trim(vArray[1]) + "\"";
|
||||
else
|
||||
keywords[rule_name] += ",\"" + vArray[1] + "\"";
|
||||
keywords[rule_name] += ",\"" + trim(vArray[1]) + "\"";
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -449,7 +449,7 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
|
||||
}
|
||||
else
|
||||
{
|
||||
strLine = vArray[0] + "," + vArray[1] + "," + rule_group;
|
||||
strLine = vArray[0] + "," + trim(vArray[1]) + "," + rule_group;
|
||||
if(vArray.size() > 2)
|
||||
strLine += "," + vArray[2];
|
||||
}
|
||||
@@ -466,14 +466,16 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
|
||||
}
|
||||
}
|
||||
if(has_domain[rule_name] && !script)
|
||||
rules.emplace_back("RULE-SET," + rule_name + "_domain," + rule_group);
|
||||
rules.emplace_back("RULE-SET," + rule_name + " (Domain)," + rule_group);
|
||||
if(has_ipcidr[rule_name] && !script)
|
||||
{
|
||||
if(has_no_resolve)
|
||||
rules.emplace_back("RULE-SET," + rule_name + "_ipcidr," + rule_group + ",no-resolve");
|
||||
rules.emplace_back("RULE-SET," + rule_name + " (IP-CIDR)," + rule_group + ",no-resolve");
|
||||
else
|
||||
rules.emplace_back("RULE-SET," + rule_name + "_ipcidr," + rule_group);
|
||||
rules.emplace_back("RULE-SET," + rule_name + " (IP-CIDR)," + rule_group);
|
||||
}
|
||||
if(!has_domain[rule_name] && !has_ipcidr[rule_name] && !script)
|
||||
rules.emplace_back("RULE-SET," + rule_name + "," + rule_group);
|
||||
if(std::find(groups.begin(), groups.end(), rule_name) == groups.end())
|
||||
groups.emplace_back(rule_name);
|
||||
}
|
||||
@@ -488,14 +490,14 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
|
||||
{
|
||||
std::string yaml_key = x;
|
||||
if(rule_type[x] != RULESET_CLASH_DOMAIN)
|
||||
yaml_key += "_domain";
|
||||
yaml_key += " (Domain)";
|
||||
base_rule["rule-providers"][yaml_key]["type"] = "http";
|
||||
base_rule["rule-providers"][yaml_key]["behavior"] = "domain";
|
||||
if(url[0] == '*')
|
||||
base_rule["rule-providers"][yaml_key]["url"] = url.substr(1);
|
||||
else
|
||||
base_rule["rule-providers"][yaml_key]["url"] = remote_path_prefix + "/getruleset?type=3&url=" + urlSafeBase64Encode(url);
|
||||
base_rule["rule-providers"][yaml_key]["path"] = "./providers/rule-provider_" + yaml_key + ".yaml";
|
||||
base_rule["rule-providers"][yaml_key]["path"] = "./providers/" + std::to_string(hash_(url)) + "_domain.yaml";
|
||||
if(interval)
|
||||
base_rule["rule-providers"][yaml_key]["interval"] = interval;
|
||||
}
|
||||
@@ -503,14 +505,14 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
|
||||
{
|
||||
std::string yaml_key = x;
|
||||
if(rule_type[x] != RULESET_CLASH_IPCIDR)
|
||||
yaml_key += "_ipcidr";
|
||||
yaml_key += " (IP-CIDR)";
|
||||
base_rule["rule-providers"][yaml_key]["type"] = "http";
|
||||
base_rule["rule-providers"][yaml_key]["behavior"] = "ipcidr";
|
||||
if(url[0] == '*')
|
||||
base_rule["rule-providers"][yaml_key]["url"] = url.substr(1);
|
||||
else
|
||||
base_rule["rule-providers"][yaml_key]["url"] = remote_path_prefix + "/getruleset?type=4&url=" + urlSafeBase64Encode(url);
|
||||
base_rule["rule-providers"][yaml_key]["path"] = "./providers/rule-provider_" + yaml_key + ".yaml";
|
||||
base_rule["rule-providers"][yaml_key]["path"] = "./providers/" + std::to_string(hash_(url)) + "_ipcidr.yaml";
|
||||
if(interval)
|
||||
base_rule["rule-providers"][yaml_key]["interval"] = interval;
|
||||
}
|
||||
@@ -523,7 +525,7 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
|
||||
base_rule["rule-providers"][yaml_key]["url"] = url.substr(1);
|
||||
else
|
||||
base_rule["rule-providers"][yaml_key]["url"] = remote_path_prefix + "/getruleset?type=6&url=" + urlSafeBase64Encode(url);
|
||||
base_rule["rule-providers"][yaml_key]["path"] = "./providers/rule-provider_" + yaml_key + ".yaml";
|
||||
base_rule["rule-providers"][yaml_key]["path"] = "./providers/" + std::to_string(hash_(url)) + ".yaml";
|
||||
if(interval)
|
||||
base_rule["rule-providers"][yaml_key]["interval"] = interval;
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -564,14 +564,14 @@ void readYAMLConf(YAML::Node &node)
|
||||
writeLog(0, "Load preference settings in YAML format completed.", LOG_LEVEL_INFO);
|
||||
}
|
||||
|
||||
//template <class T, class... U>
|
||||
//void find_if_exist(const toml::value &v, const toml::key &k, T& target, U&&... args)
|
||||
//{
|
||||
// if(v.contains(k)) target = toml::find<T>(v, k);
|
||||
// if constexpr (sizeof...(args) > 0) find_if_exist(v, std::forward<U>(args)...);
|
||||
//}
|
||||
template <class T, class... U>
|
||||
void find_if_exist(const toml::value &v, const toml::value::key_type &k, T& target, U&&... args)
|
||||
{
|
||||
if(v.contains(k)) target = toml::find<T>(v, k);
|
||||
if constexpr (sizeof...(args) > 0) find_if_exist(v, std::forward<U>(args)...);
|
||||
}
|
||||
|
||||
void operate_toml_kv_table(const std::vector<toml::table> &arr, const toml::key &key_name, const toml::key &value_name, std::function<void (const toml::value&, const toml::value&)> binary_op)
|
||||
void operate_toml_kv_table(const std::vector<toml::table> &arr, const toml::value::key_type &key_name, const toml::value::key_type &value_name, std::function<void (const toml::value&, const toml::value&)> binary_op)
|
||||
{
|
||||
for(const toml::table &table : arr)
|
||||
{
|
||||
|
||||
@@ -49,7 +49,7 @@ struct Settings
|
||||
tribool UDPFlag, TFOFlag, skipCertVerify, TLS13Flag, enableInsert;
|
||||
bool enableSort = false, updateStrict = false;
|
||||
bool clashUseNewField = false, singBoxAddClashModes = true;
|
||||
std::string clashProxiesStyle = "flow";
|
||||
std::string clashProxiesStyle = "flow", clashProxyGroupsStyle = "block";
|
||||
std::string proxyConfig, proxyRuleset, proxySubscription;
|
||||
int updateInterval = 0;
|
||||
std::string sortScript, filterScript;
|
||||
|
||||
@@ -87,11 +87,13 @@ static int logger(CURL *handle, curl_infotype type, char *data, size_t size, voi
|
||||
switch(type)
|
||||
{
|
||||
case CURLINFO_TEXT:
|
||||
prefix = "CURL_INFO";
|
||||
prefix = "CURL_INFO: ";
|
||||
break;
|
||||
case CURLINFO_HEADER_IN:
|
||||
prefix = "CURL_HEADER: < ";
|
||||
break;
|
||||
case CURLINFO_HEADER_OUT:
|
||||
prefix = "CURL_HEADER";
|
||||
prefix = "CURL_HEADER: > ";
|
||||
break;
|
||||
case CURLINFO_DATA_IN:
|
||||
case CURLINFO_DATA_OUT:
|
||||
@@ -105,7 +107,6 @@ static int logger(CURL *handle, curl_infotype type, char *data, size_t size, voi
|
||||
for(auto &x : lines)
|
||||
{
|
||||
std::string log_content = prefix;
|
||||
log_content += ": ";
|
||||
log_content += x;
|
||||
writeLog(0, log_content, LOG_LEVEL_VERBOSE);
|
||||
}
|
||||
@@ -113,7 +114,6 @@ static int logger(CURL *handle, curl_infotype type, char *data, size_t size, voi
|
||||
else
|
||||
{
|
||||
std::string log_content = prefix;
|
||||
log_content += ": ";
|
||||
log_content += trimWhitespace(content);
|
||||
writeLog(0, log_content, LOG_LEVEL_VERBOSE);
|
||||
}
|
||||
@@ -172,7 +172,8 @@ static int curlGet(const FetchArgument &argument, FetchResult &result)
|
||||
{
|
||||
for(auto &x : *argument.request_headers)
|
||||
{
|
||||
header_list = curl_slist_append(header_list, (x.first + ": " + x.second).data());
|
||||
auto header = x.first + ": " + x.second;
|
||||
header_list = curl_slist_append(header_list, header.data());
|
||||
}
|
||||
if(!argument.request_headers->contains("User-Agent"))
|
||||
curl_easy_setopt(curl_handle, CURLOPT_USERAGENT, user_agent_str);
|
||||
@@ -233,7 +234,7 @@ static int curlGet(const FetchArgument &argument, FetchResult &result)
|
||||
while(true)
|
||||
{
|
||||
retVal = curl_easy_perform(curl_handle);
|
||||
if(retVal == CURLE_OK || max_fails <= fail_count)
|
||||
if(retVal == CURLE_OK || max_fails <= fail_count || global.APIMode)
|
||||
break;
|
||||
else
|
||||
fail_count++;
|
||||
|
||||
@@ -233,10 +233,10 @@ int main(int argc, char *argv[])
|
||||
}
|
||||
}
|
||||
std::string type = getUrlArg(request.argument, "type");
|
||||
if(type == "form")
|
||||
fileWrite(global.prefPath, getFormData(request.postdata), true);
|
||||
else if(type == "direct")
|
||||
if(type == "form" || type == "direct")
|
||||
{
|
||||
fileWrite(global.prefPath, request.postdata, true);
|
||||
}
|
||||
else
|
||||
{
|
||||
response.status_code = 501;
|
||||
|
||||
@@ -9,7 +9,8 @@
|
||||
using String = std::string;
|
||||
using StringArray = std::vector<String>;
|
||||
|
||||
enum class ProxyType {
|
||||
enum class ProxyType
|
||||
{
|
||||
Unknown,
|
||||
Shadowsocks,
|
||||
ShadowsocksR,
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
#include <map>
|
||||
#include <iostream>
|
||||
#include <quickjspp.hpp>
|
||||
#include <utility>
|
||||
#include <quickjs/quickjs-libc.h>
|
||||
|
||||
#ifdef _WIN32
|
||||
@@ -226,7 +227,7 @@ public:
|
||||
qjs_fetch_Headers headers;
|
||||
std::string cookies;
|
||||
std::string postdata;
|
||||
explicit qjs_fetch_Request(const std::string &url) : url(url) {}
|
||||
explicit qjs_fetch_Request(std::string url) : url(std::move(url)) {}
|
||||
};
|
||||
|
||||
class qjs_fetch_Response
|
||||
@@ -389,7 +390,7 @@ void script_runtime_init(qjs::Runtime &runtime)
|
||||
js_std_init_handlers(runtime.rt);
|
||||
}
|
||||
|
||||
int ShowMsgbox(const std::string &title, std::string content, uint16_t type = 0)
|
||||
int ShowMsgbox(const std::string &title, const std::string &content, uint16_t type = 0)
|
||||
{
|
||||
#ifdef _WIN32
|
||||
if(!type)
|
||||
@@ -424,7 +425,7 @@ struct Lambda {
|
||||
|
||||
uint32_t currentTime()
|
||||
{
|
||||
return time(NULL);
|
||||
return time(nullptr);
|
||||
}
|
||||
|
||||
int script_context_init(qjs::Context &context)
|
||||
@@ -525,7 +526,7 @@ int script_context_init(qjs::Context &context)
|
||||
)", "<import>", JS_EVAL_TYPE_MODULE);
|
||||
return 0;
|
||||
}
|
||||
catch(qjs::exception)
|
||||
catch(qjs::exception&)
|
||||
{
|
||||
script_print_stack(context);
|
||||
return 1;
|
||||
|
||||
@@ -47,16 +47,23 @@ static httplib::Server::Handler makeHandler(const responseRoute &rr)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
req.headers[h.first] = h.second;
|
||||
req.headers.emplace(h.first.data(), h.second.data());
|
||||
}
|
||||
req.argument = request.params;
|
||||
if (request.get_header_value("Content-Type") == "application/x-www-form-urlencoded")
|
||||
if (request.method == "POST" || request.method == "PUT" || request.method == "PATCH")
|
||||
{
|
||||
req.postdata = urlDecode(request.body);
|
||||
}
|
||||
else
|
||||
{
|
||||
req.postdata = request.body;
|
||||
if (request.is_multipart_form_data() && !request.files.empty())
|
||||
{
|
||||
req.postdata = request.files.begin()->second.content;
|
||||
}
|
||||
else if (request.get_header_value("Content-Type") == "application/x-www-form-urlencoded")
|
||||
{
|
||||
req.postdata = urlDecode(request.body);
|
||||
}
|
||||
else
|
||||
{
|
||||
req.postdata = request.body;
|
||||
}
|
||||
}
|
||||
auto result = rr.rc(req, resp);
|
||||
response.status = resp.status_code;
|
||||
@@ -163,6 +170,7 @@ int WebServer::start_web_server_multi(listener_args *args)
|
||||
{
|
||||
res.set_header("Access-Control-Allow-Headers", req.get_header_value("Access-Control-Request-Headers"));
|
||||
}
|
||||
res.set_header("Access-Control-Allow-Origin", "*");
|
||||
return httplib::Server::HandlerResponse::Unhandled;
|
||||
});
|
||||
for (auto &x : redirect_map)
|
||||
@@ -187,7 +195,7 @@ int WebServer::start_web_server_multi(listener_args *args)
|
||||
{
|
||||
try
|
||||
{
|
||||
std::rethrow_exception(e);
|
||||
if (e) std::rethrow_exception(e);
|
||||
}
|
||||
catch (const httplib::Error &err)
|
||||
{
|
||||
@@ -212,6 +220,9 @@ int WebServer::start_web_server_multi(listener_args *args)
|
||||
{
|
||||
server.set_mount_point("/", serve_file_root);
|
||||
}
|
||||
server.new_task_queue = [args] {
|
||||
return new httplib::ThreadPool(args->max_workers);
|
||||
};
|
||||
server.bind_to_port(args->listen_address, args->port, 0);
|
||||
|
||||
std::thread thread([&]()
|
||||
|
||||
@@ -26,7 +26,8 @@ std::string getTime(int type)
|
||||
format = "%Y%m%d-%H%M%S";
|
||||
break;
|
||||
case 2:
|
||||
format = "%Y/%m/%d %a %H:%M:%S." + std::string(cMillis);
|
||||
format = "%Y/%m/%d %a %H:%M:%S.";
|
||||
format += cMillis;
|
||||
break;
|
||||
case 3:
|
||||
default:
|
||||
|
||||
@@ -5,9 +5,16 @@
|
||||
#include <map>
|
||||
#include <string.h>
|
||||
|
||||
struct strICaseComp {
|
||||
bool operator()(const std::string &lhs, const std::string &rhs) const {
|
||||
return strcasecmp(lhs.c_str(), rhs.c_str()) > 0;
|
||||
struct strICaseComp
|
||||
{
|
||||
bool operator() (const std::string &lhs, const std::string &rhs) const
|
||||
{
|
||||
return std::lexicographical_compare(lhs.begin(), lhs.end(), rhs.begin(),
|
||||
rhs.end(),
|
||||
[](unsigned char c1, unsigned char c2)
|
||||
{
|
||||
return ::tolower(c1) < ::tolower(c2);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -3,88 +3,38 @@
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <stdlib.h>
|
||||
#include <time.h>
|
||||
#include <cstdlib>
|
||||
#include <ctime>
|
||||
#include <random>
|
||||
|
||||
#include "string.h"
|
||||
#include "map_extra.h"
|
||||
|
||||
std::vector<std::string> split(const std::string &s, const std::string &separator)
|
||||
{
|
||||
string_size bpos = 0, epos = s.find(separator);
|
||||
std::vector<std::string> result;
|
||||
string_size i = 0;
|
||||
|
||||
while(i != s.size())
|
||||
while(bpos < s.size())
|
||||
{
|
||||
int flag = 0;
|
||||
while(i != s.size() && flag == 0)
|
||||
{
|
||||
flag = 1;
|
||||
for(char x : separator)
|
||||
if(s[i] == x)
|
||||
{
|
||||
++i;
|
||||
flag = 0;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
flag = 0;
|
||||
string_size j = i;
|
||||
while(j != s.size() && flag == 0)
|
||||
{
|
||||
for(char x : separator)
|
||||
if(s[j] == x)
|
||||
{
|
||||
flag = 1;
|
||||
break;
|
||||
}
|
||||
if(flag == 0)
|
||||
++j;
|
||||
}
|
||||
if(i != j)
|
||||
{
|
||||
result.push_back(s.substr(i, j-i));
|
||||
i = j;
|
||||
}
|
||||
if(epos == std::string::npos)
|
||||
epos = s.size();
|
||||
result.push_back(s.substr(bpos, epos - bpos));
|
||||
bpos = epos + separator.size();
|
||||
epos = s.find(separator, bpos);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
void split(std::vector<std::string_view> &result, std::string_view s, char separator)
|
||||
{
|
||||
string_size i = 0;
|
||||
|
||||
while (i != s.size())
|
||||
string_size bpos = 0, epos = s.find(separator);
|
||||
while(bpos < s.size())
|
||||
{
|
||||
int flag = 0;
|
||||
while(i != s.size() && flag == 0)
|
||||
{
|
||||
flag = 1;
|
||||
if(s[i] == separator)
|
||||
{
|
||||
++i;
|
||||
flag = 0;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
flag = 0;
|
||||
string_size j = i;
|
||||
while(j != s.size() && flag == 0)
|
||||
{
|
||||
if(s[j] == separator)
|
||||
{
|
||||
flag = 1;
|
||||
break;
|
||||
}
|
||||
++j;
|
||||
}
|
||||
if (i != j)
|
||||
{
|
||||
result.push_back(s.substr(i, j-i));
|
||||
i = j;
|
||||
}
|
||||
if(epos == std::string_view::npos)
|
||||
epos = s.size();
|
||||
result.push_back(s.substr(bpos, epos - bpos));
|
||||
bpos = epos + 1;
|
||||
epos = s.find(separator, bpos);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -141,7 +91,7 @@ std::string toUpper(const std::string &str)
|
||||
void processEscapeChar(std::string &str)
|
||||
{
|
||||
string_size pos = str.find('\\');
|
||||
while(pos != str.npos)
|
||||
while(pos != std::string::npos)
|
||||
{
|
||||
if(pos == str.size())
|
||||
break;
|
||||
@@ -191,7 +141,7 @@ void processEscapeCharReverse(std::string &str)
|
||||
|
||||
int parseCommaKeyValue(const std::string &input, const std::string &separator, string_pair_array &result)
|
||||
{
|
||||
string_size bpos = 0, epos = input.find(',');
|
||||
string_size bpos = 0, epos = input.find(separator);
|
||||
std::string kv;
|
||||
while(bpos < input.size())
|
||||
{
|
||||
@@ -200,9 +150,9 @@ int parseCommaKeyValue(const std::string &input, const std::string &separator, s
|
||||
else if(epos && input[epos - 1] == '\\')
|
||||
{
|
||||
kv += input.substr(bpos, epos - bpos - 1);
|
||||
kv += ',';
|
||||
kv += separator;
|
||||
bpos = epos + 1;
|
||||
epos = input.find(',', bpos);
|
||||
epos = input.find(separator, bpos);
|
||||
continue;
|
||||
}
|
||||
kv += input.substr(bpos, epos - bpos);
|
||||
@@ -213,9 +163,9 @@ int parseCommaKeyValue(const std::string &input, const std::string &separator, s
|
||||
result.emplace_back(kv.substr(0, eqpos), kv.substr(eqpos + 1));
|
||||
kv.clear();
|
||||
bpos = epos + 1;
|
||||
epos = input.find(',', bpos);
|
||||
epos = input.find(separator, bpos);
|
||||
}
|
||||
if(kv.size())
|
||||
if(!kv.empty())
|
||||
{
|
||||
string_size eqpos = kv.find('=');
|
||||
if(eqpos == std::string::npos)
|
||||
@@ -328,12 +278,12 @@ std::string getUrlArg(const std::string &url, const std::string &request)
|
||||
while(pos)
|
||||
{
|
||||
pos = url.rfind(pattern, pos);
|
||||
if(pos != url.npos)
|
||||
if(pos != std::string::npos)
|
||||
{
|
||||
if(pos == 0 || url[pos - 1] == '&' || url[pos - 1] == '?')
|
||||
{
|
||||
pos += pattern.size();
|
||||
return url.substr(pos, url.find("&", pos) - pos);
|
||||
return url.substr(pos, url.find('&', pos) - pos);
|
||||
}
|
||||
}
|
||||
else
|
||||
@@ -410,23 +360,24 @@ bool isStrUTF8(const std::string &data)
|
||||
std::string randomStr(int len)
|
||||
{
|
||||
std::string retData;
|
||||
srand(time(NULL));
|
||||
int cnt = 0;
|
||||
while(cnt < len)
|
||||
std::random_device rd;
|
||||
std::mt19937 gen(rd());
|
||||
std::uniform_int_distribution<> dis(0, 61);
|
||||
for(int i = 0; i < len; i++)
|
||||
{
|
||||
switch((rand() % 3))
|
||||
int r = dis(gen);
|
||||
if (r < 26)
|
||||
{
|
||||
case 1:
|
||||
retData += ('A' + rand() % 26);
|
||||
break;
|
||||
case 2:
|
||||
retData += ('a' + rand() % 26);
|
||||
break;
|
||||
default:
|
||||
retData += ('0' + rand() % 10);
|
||||
break;
|
||||
retData.push_back('a' + r);
|
||||
}
|
||||
else if (r < 52)
|
||||
{
|
||||
retData.push_back('A' + r - 26);
|
||||
}
|
||||
else
|
||||
{
|
||||
retData.push_back('0' + r - 52);
|
||||
}
|
||||
cnt++;
|
||||
}
|
||||
return retData;
|
||||
}
|
||||
@@ -451,7 +402,7 @@ int to_int(const std::string &str, int def_value)
|
||||
|
||||
std::string join(const string_array &arr, const std::string &delimiter)
|
||||
{
|
||||
if(arr.size() == 0)
|
||||
if(arr.empty())
|
||||
return "";
|
||||
if(arr.size() == 1)
|
||||
return arr[0];
|
||||
|
||||
Reference in New Issue
Block a user