Compare commits

..

12 Commits

Author SHA1 Message Date
Tindy X
6a3e287563 Fix build scripts 2023-12-11 15:16:24 +08:00
Tindy X
18fea93d89 Fix build scripts 2023-12-11 13:07:11 +08:00
Tindy X
a4a273d645 Fix build scripts 2023-12-11 04:16:24 +08:00
Tindy X
8485c2c778 Fix build scripts 2023-12-11 03:55:36 +08:00
Tindy X
2cb7fc7e19 Fix Dockerfile 2023-12-11 03:24:31 +08:00
Tindy X
6381fbbcbd Fix build scripts 2023-12-11 03:23:15 +08:00
Tindy X
104df33eae Fix build scripts 2023-12-11 03:17:21 +08:00
Tindy X
49d72f446c Fix build scripts 2023-12-11 02:25:11 +08:00
Tindy X
275f77a4f6 Fix build scripts 2023-12-11 02:13:12 +08:00
Tindy X
844989d772 Fix build scripts 2023-12-11 01:55:16 +08:00
Tindy X
1757090b25 Fix build script 2023-12-11 01:53:24 +08:00
Tindy X
547c771127 Add xmake as build system 2023-12-11 01:51:08 +08:00
47 changed files with 4049 additions and 5032 deletions

View File

@@ -1,7 +1,8 @@
name: GitHub CI
on:
push:
branches: [ master ]
branches:
- '**'
tags:
- '**'
workflow_dispatch:
@@ -12,66 +13,112 @@ concurrency:
cancel-in-progress: true
jobs:
linux_build:
strategy:
matrix:
include:
- arch: x86
artifact: subconverter_linux32
os: ubuntu-latest
- arch: amd64
artifact: subconverter_linux64
os: ubuntu-latest
- arch: armv7
artifact: subconverter_armv7
os: ubuntu-latest
- arch: aarch64
artifact: subconverter_aarch64
os: ubuntu-latest
runs-on: ${{ matrix.os }}
name: Linux ${{ matrix.arch }} Build
linux32_build:
name: Linux x86 Build
runs-on: ubuntu-latest
steps:
- name: Checkout base
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- uses: actions/checkout@v3
- name: Add commit id into version
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
- name: Build
run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:${{ matrix.arch }}-latest-stable /bin/sh -c "apk add bash git nodejs npm && cd /root/workdir && chmod +x scripts/build.alpine.release.sh && bash scripts/build.alpine.release.sh"
run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:x86-latest-stable /bin/sh -c "apk add bash git nodejs npm && cd /root/workdir && chmod +x scripts/build.alpine.release.sh && bash scripts/build.alpine.release.sh"
- name: Upload
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.artifact }}
name: subconverter_linux32
path: subconverter/
- name: Package Release
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
run: tar czf ${{ matrix.artifact }}.tar.gz subconverter
run: tar czf subconverter_linux32.tar.gz subconverter
- name: Draft Release
uses: softprops/action-gh-release@v1
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
uses: softprops/action-gh-release@v2
with:
files: ${{ matrix.artifact }}.tar.gz
files: subconverter_linux32.tar.gz
draft: true
linux64_build:
name: Linux x86_64 Build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Add commit id into version
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
- name: Build
run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:amd64-latest-stable /bin/sh -c "apk add bash git nodejs npm && cd /root/workdir && chmod +x scripts/build.alpine.release.sh && bash scripts/build.alpine.release.sh"
- name: Upload
uses: actions/upload-artifact@v3
with:
name: subconverter_linux64
path: subconverter/
- name: Package Release
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
run: tar czf subconverter_linux64.tar.gz subconverter
- name: Draft Release
uses: softprops/action-gh-release@v1
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
with:
files: subconverter_linux64.tar.gz
draft: true
armv7_build:
name: Linux armv7 Build
runs-on: [self-hosted, linux, ARM]
steps:
- uses: actions/checkout@v3
- name: Add commit id into version
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
- name: Build
run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:armv7-latest-stable /bin/sh -c "apk add bash git nodejs npm && cd /root/workdir && chmod +x scripts/build.alpine.release.sh && bash scripts/build.alpine.release.sh"
- name: Upload
uses: actions/upload-artifact@v3
with:
name: subconverter_armv7
path: subconverter/
- name: Package Release
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
run: tar czf subconverter_armv7.tar.gz subconverter
- name: Draft Release
uses: softprops/action-gh-release@v1
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
with:
files: subconverter_armv7.tar.gz
draft: true
aarch64_build:
name: Linux aarch64 Build
runs-on: [self-hosted, linux, ARM64]
steps:
- uses: actions/checkout@v3
- name: Add commit id into version
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
- name: Build
run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:aarch64-latest-stable /bin/sh -c "apk add bash git nodejs npm && cd /root/workdir && chmod +x scripts/build.alpine.release.sh && bash scripts/build.alpine.release.sh"
- name: Upload
uses: actions/upload-artifact@v3
with:
name: subconverter_aarch64
path: subconverter/
- name: Package Release
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
run: tar czf subconverter_aarch64.tar.gz subconverter
- name: Draft Release
uses: softprops/action-gh-release@v1
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
with:
files: subconverter_aarch64.tar.gz
draft: true
macos_build:
strategy:
matrix:
include:
- arch: x86
artifact: subconverter_darwin64
os: macos-13
- arch: arm
artifact: subconverter_darwinarm
os: macos-14
runs-on: ${{ matrix.os }}
name: macOS ${{ matrix.arch }} Build
name: macOS Build
runs-on: macos-latest
steps:
- name: Checkout base
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Add commit id into version
@@ -80,54 +127,39 @@ jobs:
- name: Build
run: bash scripts/build.macos.release.sh
- name: Upload
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.artifact }}
name: subconverter_darwin64
path: subconverter/
- name: Package Release
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
run: tar czf ${{ matrix.artifact }}.tar.gz subconverter
run: tar czf subconverter_darwin64.tar.gz subconverter
- name: Draft Release
uses: softprops/action-gh-release@v1
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
uses: softprops/action-gh-release@v2
with:
files: ${{ matrix.artifact }}.tar.gz
files: subconverter_darwin64.tar.gz
draft: true
windows_build:
strategy:
matrix:
include:
- arch: x86
artifact: subconverter_win32
env: i686
msystem: MINGW32
- arch: amd64
artifact: subconverter_win64
env: x86_64
msystem: MINGW64
windows64_build:
name: Windows x86_64 Build
runs-on: windows-latest
name: Windows ${{ matrix.arch }} Build
defaults:
run:
shell: msys2 {0}
steps:
- name: Checkout base
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Setup Node.js
uses: actions/setup-node@v4
- uses: actions/setup-node@v3
with:
node-version: '16'
- name: Setup MSYS2
uses: msys2/setup-msys2@v2
- uses: msys2/setup-msys2@v2
with:
update: true
install: base-devel git mingw-w64-${{ matrix.env }}-gcc mingw-w64-${{ matrix.env }}-cmake mingw-w64-${{ matrix.env }}-pcre2 patch
msystem: ${{ matrix.msystem }}
install: base-devel git mingw-w64-x86_64-gcc mingw-w64-x86_64-cmake mingw-w64-x86_64-pcre2 patch mingw-w64-x86_64-xmake
msystem: MINGW64
path-type: inherit
- name: Add commit id into version
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
@@ -135,16 +167,56 @@ jobs:
- name: Build
run: bash scripts/build.windows.release.sh
- name: Upload
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.artifact }}
name: subconverter_win64
path: subconverter/
- name: Package Release
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
run: 7z a ${{ matrix.artifact }}.7z subconverter/
run: 7z a subconverter_win64.7z subconverter/
- name: Draft Release
uses: softprops/action-gh-release@v1
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
uses: softprops/action-gh-release@v2
with:
files: ${{ matrix.artifact }}.7z
files: subconverter_win64.7z
draft: true
windows32_build:
name: Windows x86 Build
runs-on: windows-latest
defaults:
run:
shell: msys2 {0}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.11'
- uses: actions/setup-node@v3
with:
node-version: '16'
- uses: msys2/setup-msys2@v2
with:
update: true
install: base-devel git mingw-w64-i686-gcc mingw-w64-i686-cmake mingw-w64-i686-pcre2 patch mingw-w64-i686-xmake
msystem: MINGW32
path-type: inherit
- name: Add commit id into version
if: ${{ !startsWith(github.ref, 'refs/tags/') }}
run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
- name: Build
run: bash scripts/build.windows.release.sh
- name: Upload
uses: actions/upload-artifact@v3
with:
name: subconverter_win32
path: subconverter/
- name: Package Release
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
run: 7z a subconverter_win32.7z subconverter/
- name: Draft Release
uses: softprops/action-gh-release@v1
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
with:
files: subconverter_win32.7z
draft: true

View File

@@ -1,7 +1,8 @@
name: Publish Docker Image
on:
push:
branches: [ master ]
branches:
- '**'
tags:
- '**'
@@ -9,122 +10,335 @@ concurrency:
group: ${{ github.ref }}-${{ github.workflow }}
cancel-in-progress: true
env:
REGISTRY_IMAGE: asdlokj1qpi23/subconverter
jobs:
build:
strategy:
matrix:
include:
- platform: linux/amd64
os: ubuntu-latest
- platform: linux/386
os: ubuntu-latest
- platform: linux/arm/v7
os: ubuntu-latest
- platform: linux/arm64
os: ubuntu-latest
runs-on: ${{ matrix.os }}
name: Build ${{ matrix.platform }} Image
amd64_build:
name: Build AMD64 Image
runs-on: ubuntu-latest
steps:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Checkout base
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
tags: |
type=semver,pattern={{version}}
type=raw,value=latest,enable={{is_default_branch}}
uses: docker/setup-buildx-action@v2
- name: Docker login
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Get commit SHA
if: github.ref == 'refs/heads/master'
id: vars
run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Build and export
id: build
uses: docker/build-push-action@v5
if: startsWith(github.ref, 'refs/heads/')
uses: docker/build-push-action@v3
with:
platforms: ${{ matrix.platform }}
context: scripts/
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/amd64
context: .
file: scripts/Dockerfile
tags: tindy2013/subconverter:latest
build-args: |
SHA=${{ steps.vars.outputs.sha_short }}
outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true
outputs: type=image,push=true
- name: Export digest
run: |
rm -rf /tmp/digests
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Replace tag without `v`
if: startsWith(github.ref, 'refs/tags/')
uses: actions/github-script@v6
id: version
with:
script: |
return context.payload.ref.replace(/\/?refs\/tags\/v/, '')
result-encoding: string
- name: Build release and export
id: build_rel
if: startsWith(github.ref, 'refs/tags/')
uses: docker/build-push-action@v3
with:
platforms: linux/amd64
context: .
file: scripts/Dockerfile
tags: tindy2013/subconverter:${{steps.version.outputs.result}}
outputs: type=image,push=true
- name: Save digest
if: startsWith(github.ref, 'refs/heads/')
run: echo ${{ steps.build.outputs.digest }} > /tmp/digest.txt
- name: Save release digest
if: startsWith(github.ref, 'refs/tags/')
run: echo ${{ steps.build_rel.outputs.digest }} > /tmp/digest.txt
- name: Upload artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: digest-${{ env.PLATFORM_PAIR }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
name: digest_amd64
path: /tmp/digest.txt
merge:
name: Merge
needs: build
x86_build:
name: Build x86 Image
runs-on: ubuntu-latest
steps:
- name: Download digests
uses: actions/download-artifact@v4
- name: Checkout base
uses: actions/checkout@v3
with:
path: /tmp/digests
pattern: digest-*
merge-multiple: true
fetch-depth: 0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
tags: |
type=semver,pattern={{version}}
type=raw,value=latest,enable={{is_default_branch}}
uses: docker/setup-buildx-action@v2
- name: Docker login
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
- name: Get commit SHA
id: vars
run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }}
- name: Build and export
id: build
if: startsWith(github.ref, 'refs/heads/')
uses: docker/build-push-action@v3
with:
platforms: linux/386
context: .
file: scripts/Dockerfile
tags: tindy2013/subconverter:latest
build-args: |
SHA=${{ steps.vars.outputs.sha_short }}
outputs: type=image,push=true
- name: Replace tag without `v`
if: startsWith(github.ref, 'refs/tags/')
uses: actions/github-script@v6
id: version
with:
script: |
return context.payload.ref.replace(/\/?refs\/tags\/v/, '')
result-encoding: string
- name: Build release and export
id: build_rel
if: startsWith(github.ref, 'refs/tags/')
uses: docker/build-push-action@v3
with:
platforms: linux/386
context: .
file: scripts/Dockerfile
tags: tindy2013/subconverter:${{steps.version.outputs.result}}
outputs: type=image,push=true
- name: Save digest
if: startsWith(github.ref, 'refs/heads/')
run: echo ${{ steps.build.outputs.digest }} > /tmp/digest.txt
- name: Save release digest
if: startsWith(github.ref, 'refs/tags/')
run: echo ${{ steps.build_rel.outputs.digest }} > /tmp/digest.txt
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: digest_386
path: /tmp/digest.txt
armv7_build:
name: Build ARMv7 Image
runs-on: [self-hosted, linux, ARM]
steps:
- name: Checkout base
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Docker login
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Get commit SHA
id: vars
run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Build and export
id: build
if: startsWith(github.ref, 'refs/heads/')
uses: docker/build-push-action@v3
with:
platforms: linux/arm/v7
context: .
file: scripts/Dockerfile
tags: tindy2013/subconverter:latest
build-args: |
SHA=${{ steps.vars.outputs.sha_short }}
THREADS=4
outputs: type=image,push=true
- name: Replace tag without `v`
if: startsWith(github.ref, 'refs/tags/')
uses: actions/github-script@v6
id: version
with:
script: |
return context.payload.ref.replace(/\/?refs\/tags\/v/, '')
result-encoding: string
- name: Build release and export
id: build_rel
if: startsWith(github.ref, 'refs/tags/')
uses: docker/build-push-action@v3
with:
platforms: linux/arm/v7
context: .
file: scripts/Dockerfile
tags: tindy2013/subconverter:${{steps.version.outputs.result}}
build-args: |
THREADS=4
outputs: type=image,push=true
- name: Save digest
if: startsWith(github.ref, 'refs/heads/')
run: echo ${{ steps.build.outputs.digest }} > /tmp/digest.txt
- name: Save release digest
if: startsWith(github.ref, 'refs/tags/')
run: echo ${{ steps.build_rel.outputs.digest }} > /tmp/digest.txt
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: digest_armv7
path: /tmp/digest.txt
arm64_build:
name: Build ARM64 Image
runs-on: [self-hosted, linux, ARM64]
steps:
- name: Checkout base
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Docker login
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Get commit SHA
id: vars
run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Build and export
id: build
if: startsWith(github.ref, 'refs/heads/')
uses: docker/build-push-action@v3
with:
platforms: linux/arm64
context: .
file: scripts/Dockerfile
tags: tindy2013/subconverter:latest
build-args: |
SHA=${{ steps.vars.outputs.sha_short }}
THREADS=4
outputs: type=image,push=true
- name: Replace tag without `v`
if: startsWith(github.ref, 'refs/tags/')
uses: actions/github-script@v6
id: version
with:
script: |
return context.payload.ref.replace(/\/?refs\/tags\/v/, '')
result-encoding: string
- name: Build release and export
id: build_rel
if: startsWith(github.ref, 'refs/tags/')
uses: docker/build-push-action@v3
with:
platforms: linux/arm64
context: .
file: scripts/Dockerfile
tags: tindy2013/subconverter:${{steps.version.outputs.result}}
build-args: |
THREADS=4
outputs: type=image,push=true
- name: Save digest
if: startsWith(github.ref, 'refs/heads/')
run: echo ${{ steps.build.outputs.digest }} > /tmp/digest.txt
- name: Save release digest
if: startsWith(github.ref, 'refs/tags/')
run: echo ${{ steps.build_rel.outputs.digest }} > /tmp/digest.txt
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: digest_arm64
path: /tmp/digest.txt
build:
name: Build
needs: [amd64_build, x86_build, armv7_build, arm64_build]
runs-on: ubuntu-latest
steps:
- name: Checkout base
uses: actions/checkout@v3
with:
fetch-depth: 0
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
# https://github.com/docker/setup-buildx-action
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
config-inline: |
[worker.oci]
max-parallelism = 1
- name: Download artifact
uses: actions/download-artifact@v3
with:
path: /tmp/images/
- name: Docker login
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Replace tag without `v`
if: startsWith(github.ref, 'refs/tags/')
uses: actions/github-script@v6
id: version
with:
script: |
return context.payload.ref.replace(/\/?refs\/tags\/v/, '')
result-encoding: string
- name: Merge and push manifest on push
if: startsWith(github.ref, 'refs/heads/')
run: python scripts/merge_manifest.py
- name: Merge and push manifest on release
if: startsWith(github.ref, 'refs/tags/')
run: python scripts/merge_manifest.py ${{steps.version.outputs.result}}

10
.gitignore vendored
View File

@@ -1,11 +1,7 @@
subconverter.exe
.vscode
cmake-build-debug
cmake-build-*
.idea
base/cache
scripts/quickjspp
scripts/yaml-cpp
.DS_Store
src/.DS_Store
build
build
.xmake

View File

@@ -1,9 +1,11 @@
# subconverter-docker
This is a minimized image to run https://github.com/tindy2013/subconverter.
For running this docker, simply use the following commands:
```bash
# run the container detached, forward internal port 25500 to host port 25500
docker run -d --restart=always -p 25500:25500 asdlokj1qpi23/subconverter:latest
docker run -d --restart=always -p 25500:25500 tindy2013/subconverter:latest
# then check its status
curl http://localhost:25500/version
# if you see `subconverter vx.x.x backend` then the container is up and running
@@ -14,7 +16,7 @@ Or run in docker-compose:
version: '3'
services:
subconverter:
image: asdlokj1qpi23/subconverter:latest
image: tindy2013/subconverter:latest
container_name: subconverter
ports:
- "15051:25500"

View File

@@ -2,69 +2,42 @@
Utility to convert between various proxy subscription formats.
original git: https://github.com/asdlokj1qpi23/subconverter
[![Build Status](https://github.com/tindy2013/subconverter/actions/workflows/build.yml/badge.svg)](https://github.com/tindy2013/subconverter/actions)
[![GitHub tag (latest SemVer)](https://img.shields.io/github/tag/tindy2013/subconverter.svg)](https://github.com/tindy2013/subconverter/tags)
[![GitHub release](https://img.shields.io/github/release/tindy2013/subconverter.svg)](https://github.com/tindy2013/subconverter/releases)
[![GitHub license](https://img.shields.io/github/license/tindy2013/subconverter.svg)](https://github.com/tindy2013/subconverter/blob/master/LICENSE)
[![Build Status](https://github.com/asdlokj1qpi233/subconverter/actions/workflows/docker.yml/badge.svg)](https://github.com/asdlokj1qpi233/subconverter/actions)
[![GitHub tag (latest SemVer)](https://img.shields.io/github/tag/asdlokj1qpi233/subconverter.svg)](https://github.com/asdlokj1qpi23/subconverter/tags)
[![GitHub release](https://img.shields.io/github/release/asdlokj1qpi233/subconverter.svg)](https://github.com/asdlokj1qpi233/subconverter/releases)
[![GitHub license](https://img.shields.io/github/license/asdlokj1qpi233/subconverter.svg)](https://github.com/tindy2013/subconverter/blob/master/LICENSE)
[Docker README](https://github.com/tindy2013/subconverter/blob/master/README-docker.md)
[Docker README](https://github.com/asdlokj1qpi23/subconverter/blob/master/README-docker.md)
[中文文档](https://github.com/asdlokj1qpi23/subconverter/blob/master/README-cn.md)
[中文文档](https://github.com/tindy2013/subconverter/blob/master/README-cn.md)
- [subconverter](#subconverter)
- [Docker](#docker)
- [Supported Types](#supported-types)
- [Quick Usage](#quick-usage)
- [Access Interface](#access-interface)
- [Description](#description)
- [Advanced Usage](#advanced-usage)
- [Auto Upload](#auto-upload)
## Docker
For running this docker, simply use the following commands:
```bash
# run the container detached, forward internal port 25500 to host port 25500
docker run -d --restart=always -p 25500:25500 asdlokj1qpi23/subconverter:latest
# then check its status
curl http://localhost:25500/version
# if you see `subconverter vx.x.x backend` then the container is up and running
```
Or run in docker-compose:
```yaml
---
version: '3'
services:
subconverter:
image: asdlokj1qpi23/subconverter:latest
container_name: subconverter
ports:
- "15051:25500"
restart: always
```
## Supported Types
| Type | As Source | As Target | Target Name |
|-----------------------------------|:---------:| :----------: |----------------|
| Clash | ✓ | ✓ | clash |
| ClashR | ✓ | ✓ | clashr |
| Quantumult | ✓ | ✓ | quan |
| Quantumult X | ✓ | ✓ | quanx |
| Loon | ✓ | ✓ | loon |
| SS (SIP002) | ✓ | ✓ | ss |
| SS Android | ✓ | ✓ | sssub |
| SSD | ✓ | ✓ | ssd |
| SSR | ✓ | ✓ | ssr |
| Surfboard | ✓ | ✓ | surfboard |
| Surge 2 | ✓ | ✓ | surge&ver=2 |
| Surge 3 | ✓ | ✓ | surge&ver=3 |
| Surge 4 | ✓ | ✓ | surge&ver=4 |
| Surge 5 | ✓ | ✓ | surge&ver=5 |
| V2Ray | ✓ | | v2ray |
| Telegram-liked HTTP/Socks 5 links | ✓ | × | Only as source |
| Singbox | ✓ | ✓ | singbox |
| Type | As Source | As Target | Target Name |
| ------------ | :--------: | :----------: | ----------- |
| Clash | ✓ | ✓ | clash |
| ClashR | ✓ | ✓ | clashr |
| Quantumult | ✓ | ✓ | quan |
| Quantumult X | ✓ | ✓ | quanx |
| Loon | ✓ | ✓ | loon |
| SS (SIP002) | ✓ | ✓ | ss |
| SS Android | ✓ | ✓ | sssub |
| SSD | ✓ | ✓ | ssd |
| SSR | ✓ | ✓ | ssr |
| Surfboard | ✓ | ✓ | surfboard |
| Surge 2 | ✓ | ✓ | surge&ver=2 |
| Surge 3 | ✓ | ✓ | surge&ver=3 |
| Surge 4 | ✓ | ✓ | surge&ver=4 |
| V2Ray | ✓ | ✓ | v2ray |
| Telegram-liked HTTP/Socks 5 links | ✓ | × | Only as source |
Notice:
@@ -122,7 +95,7 @@ Finally subscribe this link in Clash and you are done!
## Advanced Usage
Please refer to [中文文档](https://github.com/asdlokj1qpi23/subconverter/blob/master/README-cn.md#%E8%BF%9B%E9%98%B6%E7%94%A8%E6%B3%95).
Please refer to [中文文档](https://github.com/tindy2013/subconverter/blob/master/README-cn.md#%E8%BF%9B%E9%98%B6%E7%94%A8%E6%B3%95).
## Auto Upload
@@ -137,6 +110,3 @@ Example:
;uncomment the following line and enter your token to enable upload function
token = xxxxxxxxxxxxxxxxxxxxxxxx(Your Personal Access Token)
```
## Thanks
[tindy2013](https://github.com/tindy2013)
[https://github.com/tindy2013/subconverter](https://github.com/tindy2013/subconverter)

View File

@@ -5,7 +5,7 @@ socks-port: {{ default(global.clash.socks_port, "7891") }}
allow-lan: {{ default(global.clash.allow_lan, "true") }}
mode: Rule
log-level: {{ default(global.clash.log_level, "info") }}
external-controller: {{ default(global.clash.external_controller, "127.0.0.1:9090") }}
external-controller: :9090
{% if default(request.clash.dns, "") == "1" %}
dns:
enable: true
@@ -378,16 +378,7 @@ enhanced-mode-by-rule = true
"rules": [],
"auto_detect_interface": true
},
"experimental": {
"cache_file": {
"enabled": true,
"store_fakeip": true
},
"clash_api": {
"external_controller": "{{ default(global.clash.external_controller, "127.0.0.1:9090") }}",
"external_ui": "dashboard"
}
}
"experimental": {}
}
{% endif %}

View File

@@ -100,14 +100,5 @@
"rules": [],
"auto_detect_interface": true
},
"experimental": {
"cache_file": {
"enabled": true,
"store_fakeip": true
},
"clash_api": {
"external_controller": "127.0.0.1:9090",
"external_ui": "dashboard"
}
}
"experimental": {}
}

View File

@@ -109,14 +109,13 @@ filter_deprecated_nodes=false
append_sub_userinfo=true
clash_use_new_field_name=true
;Generate style of the proxies and proxy groups section of Clash subscriptions.
;Generate style of the proxies section of Clash subscriptions.
;Supported styles: block, flow, compact
;Block: - name: name1 Flow: - {name: name1, key: value} Compact: [{name: name1, key: value},{name: name2, key: value}]
; key: value - {name: name2, key: value}
; - name: name2
; key: value
clash_proxies_style=flow
clash_proxy_groups_style=block
;add Clash mode to sing-box rules, and add a GLOBAL group to end of outbounds
singbox_add_clash_modes=true
@@ -233,7 +232,6 @@ clash.http_port=7890
clash.socks_port=7891
clash.allow_lan=true
clash.log_level=info
clash.external_controller=127.0.0.1:9090
singbox.allow_lan=true
singbox.mixed_port=2080

View File

@@ -117,10 +117,10 @@ match = '^Smart Access expire: (\d+)/(\d+)/(\d+)$'
replace = '$1:$2:$3:0:0:0'
[node_pref]
#udp_flag = false
#tcp_fast_open_flag = false
#skip_cert_verify_flag = false
#tls13_flag = false
udp_flag = true
tcp_fast_open_flag = false
skip_cert_verify_flag = true
tls13_flag = false
sort_flag = false
# Script used for sorting nodes. A "compare" function with 2 arguments which are the 2 nodes to be compared should be defined in the script. Supports inline script and script path.
@@ -135,14 +135,13 @@ filter_deprecated_nodes = false
append_sub_userinfo = true
clash_use_new_field_name = true
# Generate style of the proxies and proxy groups section of Clash subscriptions.
# Generate style of the proxies section of Clash subscriptions.
# Supported styles: block, flow, compact
# Block: - name: name1 Flow: - {name: name1, key: value} Compact: [{name: name1, key: value},{name: name2, key: value}]
# key: value - {name: name2, key: value}
# - name: name2
# key: value
clash_proxies_style = "flow"
clash_proxy_groups_style = "block"
# add Clash mode to sing-box rules, and add a GLOBAL group to end of outbounds
singbox_add_clash_modes = true
@@ -244,10 +243,6 @@ value = "true"
key = "clash.log_level"
value = "info"
[[template.globals]]
key = "clash.external_controller"
value = "127.0.0.1:9090"
[[template.globals]]
key = "singbox.allow_lan"
value = "true"

View File

@@ -50,7 +50,6 @@ node_pref:
append_sub_userinfo: true
clash_use_new_field_name: true
clash_proxies_style: flow
clash_proxy_groups_style: block
singbox_add_clash_modes: true
rename_node:
# - {match: "\\(?((x|X)?(\\d+)(\\.?\\d+)?)((\\s?倍率?)|(x|X))\\)?", replace: "$1x"}
@@ -109,10 +108,9 @@ template:
- {key: clash.socks_port, value: 7891}
- {key: clash.allow_lan, value: true}
- {key: clash.log_level, value: info}
- {key: clash.external_controller, value: '127.0.0.1:9090'}
- {key: singbox.allow_lan, value: true}
- {key: singbox.mixed_port, value: 2080}
aliases:
- {uri: /v, target: /version}
- {uri: /clash, target: "/sub?target=clash"}

View File

@@ -5,7 +5,7 @@
;For more available options, please check the readme section
target=clash
url=trojan://65474277@sqcu.hostmsu.ru:55551?allowinsecure=0&peer=mza.hkfq.xyz&mux=1&ws=0&wspath=&wshost=&ss=0&ssmethod=aes-128-gcm&sspasswd=&group=#%E9%A6%99%E6%B8%AFCN2-MZA%E8%8A%82%E7%82%B9-%E5%AE%BF%E8%BF%81%E8%81%94%E9%80%9A%E4%B8%AD%E8%BD%AC
url=ss://Y2hhY2hhMjAtaWV0Zi1wb2x5MTMwNTpwYXNzd29yZA@www.example.com:1080#Example
;config=config/example_external_config.ini
;ver=3
;udp=true

View File

@@ -3,15 +3,15 @@ match = "(?i:Bandwidth|expire|流量|时间|应急|过期)"
emoji = "🏳️‍🌈"
[[emoji]]
match = "(?i:\\bHK[G]?\\d*\\b|Hong.*?Kong|\\bHKT\\b|\\bHKBN\\b|\\bHGC\\b|\\bWTT\\b|\\bCMI\\b|[^-]港)"
match = "(?i:\\bHK[G]?\\b|Hong.*?Kong|\\bHKT\\b|\\bHKBN\\b|\\bHGC\\b|\\bWTT\\b|\\bCMI\\b|[^-]港)"
emoji = "🇭🇰"
[[emoji]]
match = "(?i:\\bTW[N]?\\d*\\b|Taiwan|新北|彰化|\\bCHT\\b|台湾|[^-]台|\\bHINET\\b)"
match = "(?i:\\bTW[N]?\\b|Taiwan|新北|彰化|\\bCHT\\b|台湾|[^-]台|\\bHINET\\b)"
emoji = "🇨🇳"
[[emoji]]
match = "(?i:\\bSG[P]?\\d*\\b|Singapore|新加坡|狮城|[^-]新)"
match = "(?i:\\bSG[P]?\\b|Singapore|新加坡|狮城|[^-]新)"
emoji = "🇸🇬"
[[emoji]]
@@ -19,15 +19,15 @@ match = "(尼日利亚|Nigeria)"
emoji = "🇳🇬"
[[emoji]]
match = "(?i:\\bJP[N]?\\d*\\b|Japan|Tokyo|Osaka|Saitama|日本|东京|大阪|埼玉|[^-]日)"
match = "(?i:\\bJP[N]?\\b|Japan|Tokyo|Osaka|Saitama|日本|东京|大阪|埼玉|[^-]日)"
emoji = "🇯🇵"
[[emoji]]
match = "(?i:(?<!North\\s)(\\bK[O]?R\\d*\\b|Korea|首尔|韩|韓))"
match = "(?i:\\bK[O]?R\\b|Korea|首尔|韩|韓)"
emoji = "🇰🇷"
[[emoji]]
match = "(?i:\\bUS[A]?\\d*\\b|America|United.*?States|美国|[^-]美|波特兰|达拉斯|俄勒冈|凤凰城|费利蒙|硅谷|拉斯维加斯|洛杉矶|圣何塞|圣克拉拉|西雅图|芝加哥)"
match = "(?i:\\bUS[A]?\\b|America|United.*?States|美国|[^-]美|波特兰|达拉斯|俄勒冈|凤凰城|费利蒙|硅谷|拉斯维加斯|洛杉矶|圣何塞|圣克拉拉|西雅图|芝加哥)"
emoji = "🇺🇸"
[[emoji]]
@@ -42,10 +42,6 @@ emoji = "🇦🇪"
match = "(阿尔巴尼亚|Albania)"
emoji = "🇦🇱"
[[emoji]]
match = "(南极|Antarctica)"
emoji = "🇦🇶"
[[emoji]]
match = "(Argentina|阿根廷)"
emoji = "🇦🇷"
@@ -55,7 +51,7 @@ match = "(Austria|Vienna|奥地利|维也纳)"
emoji = "🇦🇹"
[[emoji]]
match = "(?i:\\bAU[S]?\\d*\\b|Australia|Sydney|澳大利亚|澳洲|悉尼)"
match = "(?i:\\bAU[S]?\\b|Australia|Sydney|澳大利亚|澳洲|悉尼)"
emoji = "🇦🇺"
[[emoji]]
@@ -79,7 +75,7 @@ match = "(Brazil|Paulo|巴西|圣保罗)"
emoji = "🇧🇷"
[[emoji]]
match = "(?i:\\bCA[N]?\\d*\\b|Canada|Toronto|Montreal|Vancouver|加拿大|蒙特利尔|温哥华|楓葉|枫叶)"
match = "(?i:\\bCA[N]?\\b|Canada|Toronto|Montreal|Vancouver|加拿大|蒙特利尔|温哥华|楓葉|枫叶)"
emoji = "🇨🇦"
[[emoji]]
@@ -107,7 +103,7 @@ match = "(Czech|捷克)"
emoji = "🇨🇿"
[[emoji]]
match = "(?i:\\bDE[U]?\\d*\\b|Germany|法兰克福|德(国|意志)|中德|^德$)"
match = "(?i:\\bDE[U]?\\b|Germany|法兰克福|德(国|意志)|中德|^德$)"
emoji = "🇩🇪"
[[emoji]]
@@ -123,7 +119,7 @@ match = "(埃及|Egypt)"
emoji = "🇪🇬"
[[emoji]]
match = "(?i:\\bES[P]?\\d*\\b|Spain|西班牙)"
match = "(?i:\\bES[P]?\\b|Spain|西班牙)"
emoji = "🇪🇸"
[[emoji]]
@@ -135,11 +131,11 @@ match = "(Finland|Helsinki|芬兰|赫尔辛基)"
emoji = "🇫🇮"
[[emoji]]
match = "(?i:\\bFR[A]?\\d*\\b|France|Paris|法国|巴黎)"
match = "(?i:\\bFR[A]?\\b|France|Paris|法国|巴黎)"
emoji = "🇫🇷"
[[emoji]]
match = "(?i:\\bUK\\d*\\b|\\bGB[R]?\\d*\\b|England|United.*?Kingdom|London|英国|[^-]英|伦敦)"
match = "(?i:\\bUK\\b|\\bGB[R]?\\b|England|United.*?Kingdom|London|英国|[^-]英|伦敦)"
emoji = "🇬🇧"
[[emoji]]
@@ -175,7 +171,7 @@ match = "(马恩岛|Mann)"
emoji = "🇮🇲"
[[emoji]]
match = "(?i:\\bIN[D]?\\d*\\b|India|Mumbai|印度|孟买|加尔各答|贾坎德|泰米尔纳德)"
match = "(?i:\\bIN[D]?\\b|India|Mumbai|印度|孟买|加尔各答|贾坎德|泰米尔纳德)"
emoji = "🇮🇳"
[[emoji]]
@@ -183,11 +179,11 @@ match = "(伊朗|Iran)"
emoji = "🇮🇷"
[[emoji]]
match = "(?i:\\bIS[L]?\\d*\\b|Iceland|冰岛)"
match = "(?i:\\bIS[L]?\\b|Iceland|冰岛)"
emoji = "🇮🇸"
[[emoji]]
match = "(Italy|Italia|Milan|意大利|米兰)"
match = "(Italy|Milan|意大利|米兰)"
emoji = "🇮🇹"
[[emoji]]
@@ -206,6 +202,7 @@ emoji = "🇰🇬"
match = "(柬埔寨|Cambodia)"
emoji = "🇰🇭"
[[emoji]]
match = "(North.*?Korea|朝鲜)"
emoji = "🇰🇵"
@@ -234,10 +231,6 @@ emoji = "🇲🇩"
match = "(北马其顿|Macedonia)"
emoji = "🇲🇰"
[[emoji]]
match = "(缅甸|Myanmar)"
emoji = "🇲🇲"
[[emoji]]
match = "(蒙古|Монголулс|Mongolia)"
emoji = "🇲🇳"
@@ -255,7 +248,7 @@ match = "(Malaysia|马来|MY)"
emoji = "🇲🇾"
[[emoji]]
match = "(?i:\\bNL[D]?\\d*\\b|Netherlands|荷兰|阿姆斯特丹)"
match = "(?i:\\bNL[D]?\\b|Netherlands|荷兰|阿姆斯特丹)"
emoji = "🇳🇱"
[[emoji]]
@@ -267,7 +260,7 @@ match = "(新西兰|纽西兰|New Zealand)"
emoji = "🇳🇿"
[[emoji]]
match = "(?i:\\bP[O]?H\\d*\\b|Philippines|菲律宾)"
match = "(Philippines|菲律宾)"
emoji = "🇵🇭"
[[emoji]]
@@ -299,7 +292,7 @@ match = "(塞尔维亚|Serbia)"
emoji = "🇷🇸"
[[emoji]]
match = "(?i:\\bRU[S]?\\d*\\b|Russia|Moscow|Petersburg|Siberia|伯力|莫斯科|圣彼得堡|西伯利亚|新西伯利亚|哈巴罗夫斯克|俄罗斯|[^-]俄)"
match = "(?i:\\bRU[S]?\\b|Russia|Moscow|Petersburg|Siberia|伯力|莫斯科|圣彼得堡|西伯利亚|新西伯利亚|哈巴罗夫斯克|俄罗斯|[^-]俄)"
emoji = "🇷🇺"
[[emoji]]
@@ -327,7 +320,7 @@ match = "(突尼斯|Tunisia)"
emoji = "🇹🇳"
[[emoji]]
match = "(Turkey|Türkiye|土耳其|伊斯坦布尔)"
match = "(Turkey|土耳其|伊斯坦布尔)"
emoji = "🇹🇷"
[[emoji]]
@@ -371,4 +364,4 @@ emoji = "🇧🇩"
[[emoji]]
match = "(?i:\\bC[H]?N\\b|China|back|回国|中国[^-]|江苏[^-]|北京[^-]|上海[^-]|广州[^-]|深圳[^-]|杭州[^-]|常州[^-]|徐州[^-]|青岛[^-]|宁波[^-]|镇江[^-]|成都[^-]|河北[^-]|山西[^-]|辽宁[^-]|吉林[^-]|黑龙江[^-]|江苏[^-]|浙江[^-]|安徽[^-]|福建[^-]|江西[^-]|山东[^-]|河南[^-]|湖北[^-]|湖南[^-]|广东[^-]|海南[^-]|四川[^-]|贵州[^-]|云南[^-]|陕西[^-]|甘肃[^-]|青海[^-]|内蒙古[^-]|广西[^-]|西藏[^-]|宁夏[^-]|新疆[^-])"
emoji = "🇨🇳"
emoji = "🇨🇳"

View File

@@ -1,39 +1,38 @@
(?i:Bandwidth|expire|流量|时间|应急|过期),🏳️‍🌈
(?i:\bHK[G]?\d*\b|Hong.*?Kong|\bHKT\b|\bHKBN\b|\bHGC\b|\bWTT\b|\bCMI\b|[^-]港),🇭🇰
(?i:\bTW[N]?\d*\b|Taiwan|新北|彰化|\bCHT\b|台湾|[^-]台|\bHINET\b),🇨🇳
(?i:\bSG[P]?\d*\b|Singapore|新加坡|狮城|[^-]新),🇸🇬
(?i:\bHK[G]?\b|Hong.*?Kong|\bHKT\b|\bHKBN\b|\bHGC\b|\bWTT\b|\bCMI\b|[^-]港),🇭🇰
(?i:\bTW[N]?\b|Taiwan|新北|彰化|\bCHT\b|台湾|[^-]台|\bHINET\b),🇨🇳
(?i:\bSG[P]?\b|Singapore|新加坡|狮城|[^-]新),🇸🇬
(尼日利亚|Nigeria),🇳🇬
(?i:\bJP[N]?\d*\b|Japan|Tokyo|Osaka|Saitama|日本|东京|大阪|埼玉|[^-]日),🇯🇵
(?i:(?<!North\s)(\bK[O]?R\d*\b|Korea|首尔|韩|韓)),🇰🇷
(?i:\bUS[A]?\d*\b|America|United.*?States|美国|[^-]美|波特兰|达拉斯|俄勒冈|凤凰城|费利蒙|硅谷|拉斯维加斯|洛杉矶|圣何塞|圣克拉拉|西雅图|芝加哥),🇺🇸
(?i:\bJP[N]?\b|Japan|Tokyo|Osaka|Saitama|日本|东京|大阪|埼玉|[^-]日),🇯🇵
(?i:\bK[O]?R\b|Korea|首尔|韩|韓),🇰🇷
(?i:\bUS[A]?\b|America|United.*?States|美国|[^-]美|波特兰|达拉斯|俄勒冈|凤凰城|费利蒙|硅谷|拉斯维加斯|洛杉矶|圣何塞|圣克拉拉|西雅图|芝加哥),🇺🇸
(Ascension|阿森松),🇦🇨
(?i:\bUAE\b|Dubai|阿联酋|迪拜),🇦🇪
(阿尔巴尼亚|Albania),🇦🇱
(南极|Antarctica),🇦🇶
(Argentina|阿根廷),🇦🇷
(Austria|Vienna|奥地利|维也纳),🇦🇹
(?i:\bAU[S]?\d*\b|Australia|Sydney|澳大利亚|澳洲|悉尼),🇦🇺
(?i:\bAU[S]?\b|Australia|Sydney|澳大利亚|澳洲|悉尼),🇦🇺
(阿塞拜疆|Azerbaijan),🇦🇿
(波黑共和国|波士尼亚与赫塞哥维纳|Bosnia|Herzegovina),🇧🇦
(Belgium|比利时),🇧🇪
(保加利亚|Bulgaria),🇧🇬
(Brazil|Paulo|巴西|圣保罗),🇧🇷
(?i:\bCA[N]?\d*\b|Canada|Toronto|Montreal|Vancouver|加拿大|蒙特利尔|温哥华|楓葉|枫叶),🇨🇦
(?i:\bCA[N]?\b|Canada|Toronto|Montreal|Vancouver|加拿大|蒙特利尔|温哥华|楓葉|枫叶),🇨🇦
(Switzerland|Zurich|瑞士|苏黎世),🇨🇭
(智利|Chile),🇨🇱
(Colombia|哥伦比亚),🇨🇴
(Costa Rica|哥斯达黎加),🇨🇷
(塞浦路斯|Cyprus),🇨🇾
(Czech|捷克),🇨🇿
(?i:\bDE[U]?\d*\b|Germany|法兰克福|德(国|意志)|中德|^德$),🇩🇪
(?i:\bDE[U]?\b|Germany|法兰克福|德(国|意志)|中德|^德$),🇩🇪
(?i:\bD[N]?K\b|Denmark|丹麦),🇩🇰
(爱沙尼亚|Estonia),🇪🇪
(埃及|Egypt),🇪🇬
(?i:\bES[P]?\d*\b|Spain|西班牙),🇪🇸
(?i:\bES[P]?\b|Spain|西班牙),🇪🇸
(Europe|欧洲),🇪🇺
(Finland|Helsinki|芬兰|赫尔辛基),🇫🇮
(?i:\bFR[A]?\d*\b|France|Paris|法国|巴黎),🇫🇷
(?i:\bUK\d*\b|\bGB[R]?\d*\b|England|United.*?Kingdom|London|英国|[^-]英|伦敦),🇬🇧
(?i:\bFR[A]?\b|France|Paris|法国|巴黎),🇫🇷
(?i:\bUK\b|\bGB[R]?\b|England|United.*?Kingdom|London|英国|[^-]英|伦敦),🇬🇧
(希腊|Greece),🇬🇷
(格鲁吉亚|Georgia),🇬🇪
(克罗地亚|Croatia),🇭🇷
@@ -42,10 +41,10 @@
(Ireland|Dublin|爱尔兰|都柏林),🇮🇪
(Israel|以色列),🇮🇱
(马恩岛|Mann),🇮🇲
(?i:\bIN[D]?\d*\b|India|Mumbai|印度|孟买|加尔各答|贾坎德|泰米尔纳德),🇮🇳
(?i:\bIN[D]?\b|India|Mumbai|印度|孟买|加尔各答|贾坎德|泰米尔纳德),🇮🇳
(伊朗|Iran),🇮🇷
(?i:\bIS[L]?\d*\b|Iceland|冰岛),🇮🇸
(Italy|Italia|Milan|意大利|米兰),🇮🇹
(?i:\bIS[L]?\b|Iceland|冰岛),🇮🇸
(Italy|Milan|意大利|米兰),🇮🇹
(约旦|Jordan),🇯🇴
(肯尼亚|Kenya),🇰🇪
(吉尔吉斯斯坦|Kyrgyzstan),🇰🇬
@@ -57,15 +56,14 @@
(拉脱维亚|Latvia),🇱🇻
(Moldova|摩尔多瓦),🇲🇩
(北马其顿|Macedonia),🇲🇰
(缅甸|Myanmar),🇲🇲
(蒙古|Монголулс|Mongolia),🇲🇳
(Macao|澳门|\bCTM\b),🇲🇴
(墨西哥|Mexico),🇲🇽
(Malaysia|马来|MY),🇲🇾
(?i:\bNL[D]?\d*\b|Netherlands|荷兰|阿姆斯特丹),🇳🇱
(?i:\bNL[D]?\b|Netherlands|荷兰|阿姆斯特丹),🇳🇱
(挪威|Norway),🇳🇴
(新西兰|纽西兰|New Zealand),🇳🇿
(?i:\bP[O]?H\d*\b|Philippines|菲律宾),🇵🇭
(Philippines|菲律宾),🇵🇭
(Pakistan|巴基斯坦),🇵🇰
(?i:\bP[O]?L\b|Poland|波兰),🇵🇱
(巴拿马|Panama),🇵🇦
@@ -73,16 +71,15 @@
(葡萄牙|Portugal),🇵🇹
(Romania|罗马尼亚),🇷🇴
(塞尔维亚|Serbia),🇷🇸
(?i:\bRU[S]?\d*\b|Russia|Moscow|Petersburg|Siberia|伯力|莫斯科|圣彼得堡|西伯利亚|新西伯利亚|哈巴罗夫斯克|俄罗斯|[^-]俄),🇷🇺
(?i:\bRU[S]?\b|Russia|Moscow|Petersburg|Siberia|伯力|莫斯科|圣彼得堡|西伯利亚|新西伯利亚|哈巴罗夫斯克|俄罗斯|[^-]俄),🇷🇺
(Arabia|沙特),🇸🇦
(Sweden|瑞典),🇸🇪
(斯洛文尼亚|Slovenia),🇸🇮
(斯洛伐克|Slovensko),🇸🇰
(Thailand|泰国|曼谷),🇹🇭
(突尼斯|Tunisia),🇹🇳
(Turkey|Türkiye|土耳其|伊斯坦布尔),🇹🇷
(Turkey|土耳其|伊斯坦布尔),🇹🇷
(乌拉圭|Uruguay),🇺🇾
(梵蒂冈|Vatican),🇻🇦
(Vietnam|越南),🇻🇳
(Africa|南非),🇿🇦
(Ukraine|UA|乌克兰),🇺🇦

View File

@@ -4,45 +4,52 @@ ARG THREADS="4"
ARG SHA=""
# build minimized
WORKDIR /
WORKDIR /subconverter
COPY . /subconverter/
RUN set -xe && \
apk add --no-cache --virtual .build-tools git g++ build-base linux-headers cmake python3 && \
apk add --no-cache --virtual .build-deps curl-dev rapidjson-dev pcre2-dev yaml-cpp-dev && \
git clone https://github.com/ftk/quickjspp --depth=1 && \
cd quickjspp && \
git submodule update --init && \
cmake -DCMAKE_BUILD_TYPE=Release . && \
make quickjs -j $THREADS && \
install -d /usr/lib/quickjs/ && \
install -m644 quickjs/libquickjs.a /usr/lib/quickjs/ && \
install -d /usr/include/quickjs/ && \
install -m644 quickjs/quickjs.h quickjs/quickjs-libc.h /usr/include/quickjs/ && \
install -m644 quickjspp.hpp /usr/include && \
cd .. && \
git clone https://github.com/PerMalmberg/libcron --depth=1 && \
cd libcron && \
git submodule update --init && \
cmake -DCMAKE_BUILD_TYPE=Release . && \
make libcron -j $THREADS && \
install -m644 libcron/out/Release/liblibcron.a /usr/lib/ && \
install -d /usr/include/libcron/ && \
install -m644 libcron/include/libcron/* /usr/include/libcron/ && \
install -d /usr/include/date/ && \
install -m644 libcron/externals/date/include/date/* /usr/include/date/ && \
cd .. && \
git clone https://github.com/ToruNiina/toml11 --branch="v4.3.0" --depth=1 && \
cd toml11 && \
cmake -DCMAKE_CXX_STANDARD=11 . && \
make install -j $THREADS && \
cd .. && \
git clone https://github.com/asdlokj1qpi233/subconverter --depth=1 && \
cd subconverter && \
[ -n "$SHA" ] && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h;\
apk add --no-cache --virtual .build-tools git g++ build-base linux-headers cmake python3 curl unzip p7zip bash && \
apk add --no-cache --virtual .build-deps curl-dev rapidjson-dev pcre2-dev yaml-cpp-dev lua5.4-dev luajit-dev && \
curl -fsSL https://xmake.io/shget.text | bash && \
# git clone https://github.com/ftk/quickjspp --depth=1 && \
# cd quickjspp && \
# git submodule update --init && \
# cmake -DCMAKE_BUILD_TYPE=Release . && \
# make quickjs -j $THREADS && \
# install -d /usr/lib/quickjs/ && \
# install -m644 quickjs/libquickjs.a /usr/lib/quickjs/ && \
# install -d /usr/include/quickjs/ && \
# install -m644 quickjs/quickjs.h quickjs/quickjs-libc.h /usr/include/quickjs/ && \
# install -m644 quickjspp.hpp /usr/include && \
# cd .. && \
# git clone https://github.com/PerMalmberg/libcron --depth=1 && \
# cd libcron && \
# git submodule update --init && \
# cmake -DCMAKE_BUILD_TYPE=Release . && \
# make libcron -j $THREADS && \
# install -m644 libcron/out/Release/liblibcron.a /usr/lib/ && \
# install -d /usr/include/libcron/ && \
# install -m644 libcron/include/libcron/* /usr/include/libcron/ && \
# install -d /usr/include/date/ && \
# install -m644 libcron/externals/date/include/date/* /usr/include/date/ && \
# cd .. && \
# git clone https://github.com/ToruNiina/toml11 --branch="v3.7.1" --depth=1 && \
# cd toml11 && \
# cmake -DCMAKE_CXX_STANDARD=11 . && \
# make install -j $THREADS && \
# cd .. && \
# git clone https://github.com/tindy2013/subconverter --depth=1 && \
# cd subconverter && \
# [ -n "$SHA" ] && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h;\
# cmake -DCMAKE_BUILD_TYPE=Release -DUSE_EXTERNAL_CURL=ON -DUSE_EXTERNAL_RAPIDJSON=ON -DUSE_EXTERNAL_YAML_CPP=ON . && \
# make -j $THREADS && \
ls -alh && \
source "$HOME/.xmake/profile" && \
xmake f --root --static=false -m release -y -v && \
xmake --root -v subconverter && \
cp "$(find build -type f -name subconverter)" . && \
python3 -m ensurepip && \
python3 -m pip install gitpython && \
python3 scripts/update_rules.py -c scripts/rules_config.conf && \
cmake -DCMAKE_BUILD_TYPE=Release . && \
make -j $THREADS
python3 scripts/update_rules.py -c scripts/rules_config.conf
# build final image
FROM alpine:3.16
@@ -53,10 +60,6 @@ RUN apk add --no-cache --virtual subconverter-deps pcre2 libcurl yaml-cpp
COPY --from=builder /subconverter/subconverter /usr/bin/
COPY --from=builder /subconverter/base /base/
ENV TZ=Africa/Abidjan
RUN ln -sf /usr/share/zoneinfo/$TZ /etc/localtime
RUN echo $TZ > /etc/timezone
# set entry
WORKDIR /base
CMD subconverter

View File

@@ -1,51 +1,60 @@
#!/bin/bash
set -xe
apk add gcc g++ build-base linux-headers cmake make autoconf automake libtool python2 python3
apk add mbedtls-dev mbedtls-static zlib-dev rapidjson-dev zlib-static pcre2-dev
#apk add gcc g++ build-base linux-headers cmake make autoconf automake libtool python2 python3
#apk add mbedtls-dev mbedtls-static zlib-dev rapidjson-dev zlib-static pcre2-dev
#
#git clone https://github.com/curl/curl --depth=1 --branch curl-8_4_0
#cd curl
#cmake -DCURL_USE_MBEDTLS=ON -DHTTP_ONLY=ON -DBUILD_TESTING=OFF -DBUILD_SHARED_LIBS=OFF -DCMAKE_USE_LIBSSH2=OFF -DBUILD_CURL_EXE=OFF . > /dev/null
#make install -j2 > /dev/null
#cd ..
#
#git clone https://github.com/jbeder/yaml-cpp --depth=1
#cd yaml-cpp
#cmake -DCMAKE_BUILD_TYPE=Release -DYAML_CPP_BUILD_TESTS=OFF -DYAML_CPP_BUILD_TOOLS=OFF . > /dev/null
#make install -j3 > /dev/null
#cd ..
#
#git clone https://github.com/ftk/quickjspp --depth=1
#cd quickjspp
#cmake -DCMAKE_BUILD_TYPE=Release .
#make quickjs -j3 > /dev/null
#install -d /usr/lib/quickjs/
#install -m644 quickjs/libquickjs.a /usr/lib/quickjs/
#install -d /usr/include/quickjs/
#install -m644 quickjs/quickjs.h quickjs/quickjs-libc.h /usr/include/quickjs/
#install -m644 quickjspp.hpp /usr/include/
#cd ..
#
#git clone https://github.com/PerMalmberg/libcron --depth=1
#cd libcron
#git submodule update --init
#cmake -DCMAKE_BUILD_TYPE=Release .
#make libcron install -j3
#cd ..
#
#git clone https://github.com/ToruNiina/toml11 --branch="v3.7.1" --depth=1
#cd toml11
#cmake -DCMAKE_CXX_STANDARD=11 .
#make install -j4
#cd ..
#
#export PKG_CONFIG_PATH=/usr/lib64/pkgconfig
#cmake -DCMAKE_BUILD_TYPE=Release .
#make -j3
#rm subconverter
## shellcheck disable=SC2046
#g++ -o base/subconverter $(find CMakeFiles/subconverter.dir/src/ -name "*.o") -static -lpcre2-8 -lyaml-cpp -L/usr/lib64 -lcurl -lmbedtls -lmbedcrypto -lmbedx509 -lz -l:quickjs/libquickjs.a -llibcron -O3 -s
git clone https://github.com/curl/curl --depth=1 --branch curl-8_6_0
cd curl
cmake -DCURL_USE_MBEDTLS=ON -DHTTP_ONLY=ON -DBUILD_TESTING=OFF -DBUILD_SHARED_LIBS=OFF -DCMAKE_USE_LIBSSH2=OFF -DBUILD_CURL_EXE=OFF . > /dev/null
make install -j2 > /dev/null
cd ..
apk add git g++ build-base linux-headers cmake python3 curl unzip p7zip perl pkgconfig
apk add lua5.4-dev luajit-dev zlib-dev zlib-static mbedtls-dev mbedtls-static
curl -fsSL https://xmake.io/shget.text | bash
source "$HOME/.xmake/profile"
git clone https://github.com/jbeder/yaml-cpp --depth=1
cd yaml-cpp
cmake -DCMAKE_BUILD_TYPE=Release -DYAML_CPP_BUILD_TESTS=OFF -DYAML_CPP_BUILD_TOOLS=OFF . > /dev/null
make install -j3 > /dev/null
cd ..
git clone https://github.com/ftk/quickjspp --depth=1
cd quickjspp
cmake -DCMAKE_BUILD_TYPE=Release .
make quickjs -j3 > /dev/null
install -d /usr/lib/quickjs/
install -m644 quickjs/libquickjs.a /usr/lib/quickjs/
install -d /usr/include/quickjs/
install -m644 quickjs/quickjs.h quickjs/quickjs-libc.h /usr/include/quickjs/
install -m644 quickjspp.hpp /usr/include/
cd ..
git clone https://github.com/PerMalmberg/libcron --depth=1
cd libcron
git submodule update --init
cmake -DCMAKE_BUILD_TYPE=Release .
make libcron install -j3
cd ..
git clone https://github.com/ToruNiina/toml11 --branch="v4.3.0" --depth=1
cd toml11
cmake -DCMAKE_CXX_STANDARD=11 .
make install -j4
cd ..
export PKG_CONFIG_PATH=/usr/lib64/pkgconfig
cmake -DCMAKE_BUILD_TYPE=Release .
make -j3
rm subconverter
# shellcheck disable=SC2046
g++ -o base/subconverter $(find CMakeFiles/subconverter.dir/src/ -name "*.o") -static -lpcre2-8 -lyaml-cpp -L/usr/lib64 -lcurl -lmbedtls -lmbedcrypto -lmbedx509 -lz -l:quickjs/libquickjs.a -llibcron -O3 -s
xmake f --root --static=true -m release -y -v
xmake --root -v subconverter
cp "$(find build -name subconverter -type f)" base/subconverter
python3 -m ensurepip
python3 -m pip install gitpython

View File

@@ -1,60 +1,68 @@
#!/bin/bash
set -xe
brew reinstall rapidjson zlib pcre2 pkgconfig
#git clone https://github.com/curl/curl --depth=1 --branch curl-7_88_1
#cd curl
#./buildconf > /dev/null
#./configure --with-ssl=/usr/local/opt/openssl@1.1 --without-mbedtls --disable-ldap --disable-ldaps --disable-rtsp --without-libidn2 > /dev/null
#cmake -DCMAKE_USE_SECTRANSP=ON -DHTTP_ONLY=ON -DBUILD_TESTING=OFF -DBUILD_SHARED_LIBS=OFF -DCMAKE_USE_LIBSSH2=OFF . > /dev/null
#make -j8 > /dev/null
#brew reinstall rapidjson zlib pcre2 pkgconfig
#
##git clone https://github.com/curl/curl --depth=1 --branch curl-7_88_1
##cd curl
##./buildconf > /dev/null
##./configure --with-ssl=/usr/local/opt/openssl@1.1 --without-mbedtls --disable-ldap --disable-ldaps --disable-rtsp --without-libidn2 > /dev/null
##cmake -DCMAKE_USE_SECTRANSP=ON -DHTTP_ONLY=ON -DBUILD_TESTING=OFF -DBUILD_SHARED_LIBS=OFF -DCMAKE_USE_LIBSSH2=OFF . > /dev/null
##make -j8 > /dev/null
##cd ..
#
#git clone https://github.com/jbeder/yaml-cpp --depth=1
#cd yaml-cpp
#cmake -DCMAKE_BUILD_TYPE=Release -DYAML_CPP_BUILD_TESTS=OFF -DYAML_CPP_BUILD_TOOLS=OFF . > /dev/null
#make install -j8 > /dev/null
#cd ..
#
#git clone https://github.com/ftk/quickjspp --depth=1
#cd quickjspp
#cmake -DCMAKE_BUILD_TYPE=Release .
#make quickjs -j8
#install -d /usr/local/lib/quickjs/
#install -m644 quickjs/libquickjs.a /usr/local/lib/quickjs/
#install -d /usr/local/include/quickjs/
#install -m644 quickjs/quickjs.h quickjs/quickjs-libc.h /usr/local/include/quickjs/
#install -m644 quickjspp.hpp /usr/local/include/
#cd ..
#
#git clone https://github.com/PerMalmberg/libcron --depth=1
#cd libcron
#git submodule update --init
#cmake -DCMAKE_BUILD_TYPE=Release .
#make libcron install -j8
#install -m644 libcron/out/Release/liblibcron.a /usr/local/lib/
#install -d /usr/local/include/libcron/
#install -m644 libcron/include/libcron/* /usr/local/include/libcron/
#install -d /usr/local/include/date/
#install -m644 libcron/externals/date/include/date/* /usr/local/include/date/
#cd ..
#
#git clone https://github.com/ToruNiina/toml11 --depth=1
#cd toml11
#cmake -DCMAKE_CXX_STANDARD=11 .
#make install -j4
#cd ..
#
#cp /usr/local/opt/zlib/lib/libz.a .
#cp /usr/local/lib/libpcre2-8.a .
#
#cmake -DCMAKE_BUILD_TYPE=Release .
#make -j8
#rm subconverter
## shellcheck disable=SC2046
#c++ -Xlinker -unexported_symbol -Xlinker "*" -o base/subconverter -framework CoreFoundation -framework Security $(find CMakeFiles/subconverter.dir/src/ -name "*.o") $(find . -name "*.a") -lcurl -O3
git clone https://github.com/jbeder/yaml-cpp --depth=1
cd yaml-cpp
cmake -DCMAKE_BUILD_TYPE=Release -DYAML_CPP_BUILD_TESTS=OFF -DYAML_CPP_BUILD_TOOLS=OFF . > /dev/null
make -j6 > /dev/null
sudo make install > /dev/null
cd ..
brew reinstall xmake rapidjson pkgconfig
git clone https://github.com/ftk/quickjspp --depth=1
cd quickjspp
cmake -DCMAKE_BUILD_TYPE=Release .
make quickjs -j6 > /dev/null
sudo install -d /usr/local/lib/quickjs/
sudo install -m644 quickjs/libquickjs.a /usr/local/lib/quickjs/
sudo install -d /usr/local/include/quickjs/
sudo install -m644 quickjs/quickjs.h quickjs/quickjs-libc.h /usr/local/include/quickjs/
sudo install -m644 quickjspp.hpp /usr/local/include/
cd ..
git clone https://github.com/PerMalmberg/libcron --depth=1
cd libcron
git submodule update --init
cmake -DCMAKE_BUILD_TYPE=Release .
make libcron -j6
sudo install -m644 libcron/out/Release/liblibcron.a /usr/local/lib/
sudo install -d /usr/local/include/libcron/
sudo install -m644 libcron/include/libcron/* /usr/local/include/libcron/
sudo install -d /usr/local/include/date/
sudo install -m644 libcron/externals/date/include/date/* /usr/local/include/date/
cd ..
git clone https://github.com/ToruNiina/toml11 --branch="v4.3.0" --depth=1
cd toml11
cmake -DCMAKE_CXX_STANDARD=11 .
sudo make install -j6 > /dev/null
cd ..
cmake -DCMAKE_BUILD_TYPE=Release .
make -j6
rm subconverter
# shellcheck disable=SC2046
c++ -Xlinker -unexported_symbol -Xlinker "*" -o base/subconverter -framework CoreFoundation -framework Security $(find CMakeFiles/subconverter.dir/src/ -name "*.o") "$(brew --prefix zlib)/lib/libz.a" "$(brew --prefix pcre2)/lib/libpcre2-8.a" $(find . -name "*.a") -lcurl -O3
xmake f --root -m release -y -v
xmake --root -v subconverter
cp "$(find build -name subconverter -type f)" base/subconverter
python -m ensurepip
sudo python -m pip install gitpython
python -m pip install gitpython
python scripts/update_rules.py -c scripts/rules_config.conf
cd base

View File

@@ -1,57 +1,63 @@
#!/bin/bash
set -xe
#
#git clone https://github.com/curl/curl --depth=1 --branch curl-8_4_0
#cd curl
#cmake -DCMAKE_BUILD_TYPE=Release -DCURL_USE_LIBSSH2=OFF -DHTTP_ONLY=ON -DCURL_USE_SCHANNEL=ON -DBUILD_SHARED_LIBS=OFF -DBUILD_CURL_EXE=OFF -DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" -G "Unix Makefiles" -DHAVE_LIBIDN2=OFF -DCURL_USE_LIBPSL=OFF .
#make install -j4
#cd ..
#
#git clone https://github.com/jbeder/yaml-cpp --depth=1
#cd yaml-cpp
#cmake -DCMAKE_BUILD_TYPE=Release -DYAML_CPP_BUILD_TESTS=OFF -DYAML_CPP_BUILD_TOOLS=OFF -DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" -G "Unix Makefiles" .
#make install -j4
#cd ..
#
#git clone https://github.com/ftk/quickjspp --depth=1
#cd quickjspp
#patch quickjs/quickjs-libc.c -i ../scripts/patches/0001-quickjs-libc-add-realpath-for-Windows.patch
#cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE=Release .
#make quickjs -j4
#install -d "$MINGW_PREFIX/lib/quickjs/"
#install -m644 quickjs/libquickjs.a "$MINGW_PREFIX/lib/quickjs/"
#install -d "$MINGW_PREFIX/include/quickjs"
#install -m644 quickjs/quickjs.h quickjs/quickjs-libc.h "$MINGW_PREFIX/include/quickjs/"
#install -m644 quickjspp.hpp "$MINGW_PREFIX/include/"
#cd ..
#
#git clone https://github.com/PerMalmberg/libcron --depth=1
#cd libcron
#git submodule update --init
#cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" .
#make libcron install -j4
#cd ..
#
#git clone https://github.com/Tencent/rapidjson --depth=1
#cd rapidjson
#cmake -DRAPIDJSON_BUILD_DOC=OFF -DRAPIDJSON_BUILD_EXAMPLES=OFF -DRAPIDJSON_BUILD_TESTS=OFF -DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" -G "Unix Makefiles" .
#make install -j4
#cd ..
#
#git clone https://github.com/ToruNiina/toml11 --depth=1
#cd toml11
#cmake -DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" -G "Unix Makefiles" -DCMAKE_CXX_STANDARD=11 .
#make install -j4
#cd ..
#
#rm -f C:/Strawberry/perl/bin/pkg-config C:/Strawberry/perl/bin/pkg-config.bat
#cmake -DCMAKE_BUILD_TYPE=Release -G "Unix Makefiles" .
#make -j4
#rm subconverter.exe
## shellcheck disable=SC2046
#g++ $(find CMakeFiles/subconverter.dir/src -name "*.obj") curl/lib/libcurl.a -o base/subconverter.exe -static -lbcrypt -lpcre2-8 -l:quickjs/libquickjs.a -llibcron -lyaml-cpp -liphlpapi -lcrypt32 -lws2_32 -lwsock32 -lz -s
git clone https://github.com/curl/curl --depth=1 --branch curl-8_6_0
cd curl
cmake -DCMAKE_BUILD_TYPE=Release -DCURL_USE_LIBSSH2=OFF -DHTTP_ONLY=ON -DCURL_USE_SCHANNEL=ON -DBUILD_SHARED_LIBS=OFF -DBUILD_CURL_EXE=OFF -DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" -G "Unix Makefiles" -DHAVE_LIBIDN2=OFF -DCURL_USE_LIBPSL=OFF .
make install -j4
cd ..
git clone https://github.com/jbeder/yaml-cpp --depth=1
cd yaml-cpp
cmake -DCMAKE_BUILD_TYPE=Release -DYAML_CPP_BUILD_TESTS=OFF -DYAML_CPP_BUILD_TOOLS=OFF -DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" -G "Unix Makefiles" .
make install -j4
cd ..
git clone https://github.com/ftk/quickjspp --depth=1
cd quickjspp
patch quickjs/quickjs-libc.c -i ../scripts/patches/0001-quickjs-libc-add-realpath-for-Windows.patch
cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE=Release .
make quickjs -j4
install -d "$MINGW_PREFIX/lib/quickjs/"
install -m644 quickjs/libquickjs.a "$MINGW_PREFIX/lib/quickjs/"
install -d "$MINGW_PREFIX/include/quickjs"
install -m644 quickjs/quickjs.h quickjs/quickjs-libc.h "$MINGW_PREFIX/include/quickjs/"
install -m644 quickjspp.hpp "$MINGW_PREFIX/include/"
cd ..
git clone https://github.com/PerMalmberg/libcron --depth=1
cd libcron
git submodule update --init
cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" .
make libcron install -j4
cd ..
git clone https://github.com/Tencent/rapidjson --depth=1
cd rapidjson
cmake -DRAPIDJSON_BUILD_DOC=OFF -DRAPIDJSON_BUILD_EXAMPLES=OFF -DRAPIDJSON_BUILD_TESTS=OFF -DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" -G "Unix Makefiles" .
make install -j4
cd ..
git clone https://github.com/ToruNiina/toml11 --branch "v4.3.0" --depth=1
cd toml11
cmake -DCMAKE_INSTALL_PREFIX="$MINGW_PREFIX" -G "Unix Makefiles" -DCMAKE_CXX_STANDARD=11 .
make install -j4
cd ..
xmake f --root --static=true -m release -y -v
xmake --root -v subconverter
cp "$(find build -name subconverter.exe -type f)" base/subconverter.exe
python -m ensurepip
python -m pip install gitpython
python scripts/update_rules.py -c scripts/rules_config.conf
rm -f C:/Strawberry/perl/bin/pkg-config C:/Strawberry/perl/bin/pkg-config.bat
cmake -DCMAKE_BUILD_TYPE=Release -G "Unix Makefiles" .
make -j4
rm subconverter.exe
# shellcheck disable=SC2046
g++ $(find CMakeFiles/subconverter.dir/src -name "*.obj") curl/lib/libcurl.a -o base/subconverter.exe -static -lbcrypt -lpcre2-8 -l:quickjs/libquickjs.a -llibcron -lyaml-cpp -liphlpapi -lcrypt32 -lws2_32 -lwsock32 -lz -s
mv base subconverter
set +xe

View File

@@ -1,7 +1,7 @@
import glob
import os, sys
MAIN_IMAGE_NAME="asdlokj1qpi23/subconverter"
MAIN_IMAGE_NAME="tindy2013/subconverter"
TARGET_TAG="latest" if len(sys.argv) < 2 else sys.argv[1]
args=["docker manifest create {}:{}".format(MAIN_IMAGE_NAME, TARGET_TAG)]

View File

@@ -1,23 +1,23 @@
[ACL4SSR]
name=ACL4SSR
url=https://github.com/ACL4SSR/ACL4SSR
branch=master
checkout=1dc5c92b0c8ceaaecbc66530c309961f53e52c8c
match=Clash/*.list|Clash/Ruleset/**
[ACL4SSR_config]
name=ACL4SSR
url=https://github.com/ACL4SSR/ACL4SSR
branch=master
checkout=1dc5c92b0c8ceaaecbc66530c309961f53e52c8c
match=Clash/config/**
dest=base/config/
keep_tree=false
[DivineEngine]
url=https://github.com/DivineEngine/Profiles
checkout=f4d75f7d48a3f42129e030bef751d4d22bca02da
match=Surge/Ruleset/**
[NobyDa]
url=https://github.com/NobyDa/Script
branch=master
checkout=ae4c12f23de8078e02c373c9969b19af28257fcb
match=Surge/*.list
[lhie1]
url=https://github.com/dler-io/Rules
branch=main
match=Surge/Surge 3/Provider/**

View File

@@ -1,23 +0,0 @@
[ACL4SSR]
name=ACL4SSR
url=https://github.com/ACL4SSR/ACL4SSR
checkout=1dc5c92b0c8ceaaecbc66530c309961f53e52c8c
match=Clash/*.list|Clash/Ruleset/**
[ACL4SSR_config]
name=ACL4SSR
url=https://github.com/ACL4SSR/ACL4SSR
checkout=1dc5c92b0c8ceaaecbc66530c309961f53e52c8c
match=Clash/config/**
dest=base/config/
keep_tree=false
[DivineEngine]
url=https://github.com/DivineEngine/Profiles
checkout=f4d75f7d48a3f42129e030bef751d4d22bca02da
match=Surge/Ruleset/**
[NobyDa]
url=https://github.com/NobyDa/Script
checkout=ae4c12f23de8078e02c373c9969b19af28257fcb
match=Surge/*.list

View File

@@ -22,13 +22,10 @@ def open_repo(path: str):
return None
def update_rules(repo_path: str, save_path: str, matches: list[str], keep_tree: bool):
def update_rules(repo_path, save_path, commit, matches, keep_tree):
os.makedirs(save_path, exist_ok=True)
for pattern in matches:
files = glob.glob(os.path.join(repo_path, pattern), recursive=True)
if len(files) == 0:
logging.warn(f"no files found for pattern {pattern}")
continue
for file in files:
if os.path.isdir(file):
continue
@@ -54,13 +51,12 @@ def main():
for section in config.sections():
repo = config.get(section, "name", fallback=section)
url = config.get(section, "url")
commit = config.get(section, "commit", fallback=None)
branch = config.get(section, "branch", fallback=None)
commit = config.get(section, "checkout")
matches = config.get(section, "match").split("|")
save_path = config.get(section, "dest", fallback=f"base/rules/{repo}")
keep_tree = config.getboolean(section, "keep_tree", fallback=True)
logging.info(f"reading files from url {url}, matches {matches}, save to {save_path} keep_tree {keep_tree}")
logging.info(f"reading files from url {url} with commit {commit} and matches {matches}, save to {save_path} keep_tree {keep_tree}")
repo_path = os.path.join("./tmp/repo/", repo)
@@ -71,21 +67,8 @@ def main():
else:
logging.info(f"repo {repo_path} exists")
try:
if commit is not None:
logging.info(f"checking out to commit {commit}")
r.git.checkout(commit)
elif branch is not None:
logging.info(f"checking out to branch {branch}")
r.git.checkout(branch)
else:
logging.info(f"checking out to default branch")
r.active_branch.checkout()
except Exception as e:
logging.error(f"checkout failed {e}")
continue
update_rules(repo_path, save_path, matches, keep_tree)
r.git.checkout(commit)
update_rules(repo_path, save_path, commit, matches, keep_tree)
shutil.rmtree("./tmp", ignore_errors=True)

View File

@@ -17,9 +17,9 @@ namespace toml
static ProxyGroupConfig from_toml(const value& v)
{
ProxyGroupConfig conf;
conf.Name = find<String>(v, "name");
String type = find<String>(v, "type");
String strategy = find_or<String>(v, "strategy", "");
conf.Name = toml::find<String>(v, "name");
String type = toml::find<String>(v, "type");
String strategy = toml::find_or<String>(v, "strategy", "");
switch(hash_(type))
{
case "select"_hash:
@@ -27,18 +27,18 @@ namespace toml
break;
case "url-test"_hash:
conf.Type = ProxyGroupType::URLTest;
conf.Url = find<String>(v, "url");
conf.Interval = find<Integer>(v, "interval");
conf.Tolerance = find_or<Integer>(v, "tolerance", 0);
conf.Url = toml::find<String>(v, "url");
conf.Interval = toml::find<Integer>(v, "interval");
conf.Tolerance = toml::find_or<Integer>(v, "tolerance", 0);
if(v.contains("lazy"))
conf.Lazy = find_or<bool>(v, "lazy", false);
conf.Lazy = toml::find_or<bool>(v, "lazy", false);
if(v.contains("evaluate-before-use"))
conf.EvaluateBeforeUse = find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get());
conf.EvaluateBeforeUse = toml::find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get());
break;
case "load-balance"_hash:
conf.Type = ProxyGroupType::LoadBalance;
conf.Url = find<String>(v, "url");
conf.Interval = find<Integer>(v, "interval");
conf.Url = toml::find<String>(v, "url");
conf.Interval = toml::find<Integer>(v, "interval");
switch(hash_(strategy))
{
case "consistent-hashing"_hash:
@@ -49,14 +49,14 @@ namespace toml
break;
}
if(v.contains("persistent"))
conf.Persistent = find_or(v, "persistent", conf.Persistent.get());
conf.Persistent = toml::find_or(v, "persistent", conf.Persistent.get());
break;
case "fallback"_hash:
conf.Type = ProxyGroupType::Fallback;
conf.Url = find<String>(v, "url");
conf.Interval = find<Integer>(v, "interval");
conf.Url = toml::find<String>(v, "url");
conf.Interval = toml::find<Integer>(v, "interval");
if(v.contains("evaluate-before-use"))
conf.EvaluateBeforeUse = find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get());
conf.EvaluateBeforeUse = toml::find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get());
break;
case "relay"_hash:
conf.Type = ProxyGroupType::Relay;
@@ -64,26 +64,16 @@ namespace toml
case "ssid"_hash:
conf.Type = ProxyGroupType::SSID;
break;
case "smart"_hash:
conf.Type = ProxyGroupType::Smart;
conf.Url = find<String>(v, "url");
conf.Interval = find<Integer>(v, "interval");
conf.Tolerance = find_or<Integer>(v, "tolerance", 0);
if(v.contains("lazy"))
conf.Lazy = find_or<bool>(v, "lazy", false);
if(v.contains("evaluate-before-use"))
conf.EvaluateBeforeUse = find_or(v, "evaluate-before-use", conf.EvaluateBeforeUse.get());
break;
default:
throw serialization_error(format_error("Proxy Group has unsupported type!", v.at("type").location(), "should be one of following: select, url-test, load-balance, fallback, relay, ssid"), v.at("type").location());
throw toml::syntax_error("Proxy Group has incorrect type, should be one of following:\n select, url-test, load-balance, fallback, relay, ssid", v.at("type").location());
}
conf.Timeout = find_or(v, "timeout", 5);
conf.Proxies = find_or<StrArray>(v, "rule", {});
conf.UsingProvider = find_or<StrArray>(v, "use", {});
conf.Timeout = toml::find_or(v, "timeout", 5);
conf.Proxies = toml::find_or<StrArray>(v, "rule", {});
conf.UsingProvider = toml::find_or<StrArray>(v, "use", {});
if(conf.Proxies.empty() && conf.UsingProvider.empty())
throw serialization_error(format_error("Proxy Group must contains at least one of proxy match rule or provider!", v.location(), "here"), v.location());
throw toml::syntax_error("Proxy Group must contains at least one of proxy match rule or provider", v.location());
if(v.contains("disable-udp"))
conf.DisableUdp = find_or(v, "disable-udp", conf.DisableUdp.get());
conf.DisableUdp = toml::find_or(v, "disable-udp", conf.DisableUdp.get());
return conf;
}
};
@@ -94,8 +84,8 @@ namespace toml
static RulesetConfig from_toml(const value& v)
{
RulesetConfig conf;
conf.Group = find<String>(v, "group");
String type = find_or<String>(v, "type", "surge-ruleset");
conf.Group = toml::find<String>(v, "group");
String type = toml::find_or<String>(v, "type", "surge-ruleset");
switch(hash_(type))
{
/*
@@ -132,10 +122,10 @@ namespace toml
conf.Url = type + ":";
break;
default:
throw serialization_error(format_error("Ruleset has unsupported type!", v.at("type").location(), "should be one of following: surge-ruleset, quantumultx, clash-domain, clash-ipcidr, clash-classic"), v.at("type").location());
throw toml::syntax_error("Ruleset has incorrect type, should be one of following:\n surge-ruleset, quantumultx, clash-domain, clash-ipcidr, clash-classic", v.at("type").location());
}
conf.Url += find<String>(v, "ruleset");
conf.Interval = find_or<Integer>(v, "interval", 86400);
conf.Url += toml::find<String>(v, "ruleset");
conf.Interval = toml::find_or<Integer>(v, "interval", 86400);
return conf;
}
};
@@ -148,14 +138,14 @@ namespace toml
RegexMatchConfig conf;
if(v.contains("script"))
{
conf.Script = find<String>(v, "script");
conf.Script = toml::find<String>(v, "script");
return conf;
}
conf.Match = find<String>(v, "match");
conf.Match = toml::find<String>(v, "match");
if(v.contains("emoji"))
conf.Replace = find<String>(v, "emoji");
conf.Replace = toml::find<String>(v, "emoji");
else
conf.Replace = find<String>(v, "replace");
conf.Replace = toml::find<String>(v, "replace");
return conf;
}
};
@@ -166,10 +156,10 @@ namespace toml
static CronTaskConfig from_toml(const value& v)
{
CronTaskConfig conf;
conf.Name = find<String>(v, "name");
conf.CronExp = find<String>(v, "cronexp");
conf.Path = find<String>(v, "path");
conf.Timeout = find_or<Integer>(v, "timeout", 0);
conf.Name = toml::find<String>(v, "name");
conf.CronExp = toml::find<String>(v, "cronexp");
conf.Path = toml::find<String>(v, "path");
conf.Timeout = toml::find_or<Integer>(v, "timeout", 0);
return conf;
}
};
@@ -230,9 +220,6 @@ namespace INIBinding
case "ssid"_hash:
conf.Type = ProxyGroupType::SSID;
break;
case "smart"_hash:
conf.Type = ProxyGroupType::Smart;
break;
default:
continue;
}

View File

@@ -3,18 +3,17 @@
#include "def.h"
enum class ProxyGroupType
enum ProxyGroupType
{
Select,
URLTest,
Fallback,
LoadBalance,
Relay,
SSID,
Smart
SSID
};
enum class BalanceStrategy
enum BalanceStrategy
{
ConsistentHashing,
RoundRobin
@@ -46,7 +45,6 @@ struct ProxyGroupConfig
case ProxyGroupType::Fallback: return "fallback";
case ProxyGroupType::Relay: return "relay";
case ProxyGroupType::SSID: return "ssid";
case ProxyGroupType::Smart: return "smart";
}
return "";
}

View File

@@ -3,7 +3,7 @@
#include "def.h"
enum class RulesetType
enum RulesetType
{
SurgeRuleset,
QuantumultX,

View File

@@ -1,6 +1,5 @@
#include <string>
#include <vector>
#include <iostream>
#include <algorithm>
#include "handler/settings.h"
@@ -18,10 +17,6 @@
#include "nodemanip.h"
#include "subexport.h"
extern Settings global;
bool applyMatcher(const std::string &rule, std::string &real_rule, const Proxy &node);
int explodeConf(const std::string &filepath, std::vector<Proxy> &nodes)
{
return explodeConfContent(fileGet(filepath), nodes);
@@ -51,13 +46,14 @@ int addNodes(std::string link, std::vector<Proxy> &allNodes, int groupID, parse_
link = replaceAllDistinct(link, "\"", "");
/// script:filepath,arg1,arg2,...
#ifndef NO_JS_RUNTIME
if(authorized) script_safe_runner(parse_set.js_runtime, parse_set.js_context, [&](qjs::Context &ctx)
{
if(startsWith(link, "script:")) /// process subscription with script
{
writeLog(0, "Found script link. Start running...", LOG_LEVEL_INFO);
string_array args = split(link.substr(7), ",");
if(args.size() >= 1)
if(!args.empty())
{
std::string script = fileGet(args[0], false);
try
@@ -82,7 +78,7 @@ int addNodes(std::string link, std::vector<Proxy> &allNodes, int groupID, parse_
}
}
}
catch(qjs::exception)
catch(qjs::exception&)
{
script_print_stack(ctx);
}
@@ -104,12 +100,13 @@ int addNodes(std::string link, std::vector<Proxy> &allNodes, int groupID, parse_
duk_pop(ctx); /// pop err
}
*/
#endif
/// tag:group_name,link
if(startsWith(link, "tag:"))
{
string_size pos = link.find(",");
if(pos != link.npos)
string_size pos = link.find(',');
if(pos != std::string::npos)
{
custom_group = link.substr(4, pos - 4);
link.erase(0, pos + 1);
@@ -178,7 +175,7 @@ int addNodes(std::string link, std::vector<Proxy> &allNodes, int groupID, parse_
for(Proxy &x : nodes)
{
x.GroupId = groupID;
if(custom_group.size())
if(!custom_group.empty())
x.Group = custom_group;
}
copyNodes(nodes, allNodes);
@@ -232,7 +229,7 @@ int addNodes(std::string link, std::vector<Proxy> &allNodes, int groupID, parse_
bool chkIgnore(const Proxy &node, string_array &exclude_remarks, string_array &include_remarks)
{
bool excluded = false, included = false;
bool excluded, included;
//std::string remarks = UTF8ToACP(node.remarks);
//std::string remarks = node.remarks;
//writeLog(LOG_TYPE_INFO, "Comparing exclude remarks...");
@@ -247,7 +244,7 @@ bool chkIgnore(const Proxy &node, string_array &exclude_remarks, string_array &i
else
return false;
});
if(include_remarks.size() != 0)
if(!include_remarks.empty())
{
//writeLog(LOG_TYPE_INFO, "Comparing include remarks...");
included = std::any_of(include_remarks.cbegin(), include_remarks.cend(), [&node](const auto &x)
@@ -273,7 +270,7 @@ bool chkIgnore(const Proxy &node, string_array &exclude_remarks, string_array &i
void filterNodes(std::vector<Proxy> &nodes, string_array &exclude_remarks, string_array &include_remarks, int groupID)
{
int node_index = 0;
std::vector<Proxy>::iterator iter = nodes.begin();
auto iter = nodes.begin();
while(iter != nodes.end())
{
if(chkIgnore(*iter, exclude_remarks, include_remarks))
@@ -380,6 +377,7 @@ void nodeRename(Proxy &node, const RegexMatchConfigs &rename_array, extra_settin
for(const RegexMatchConfig &x : rename_array)
{
#ifndef NO_JS_RUNTIME
if(!x.Script.empty() && ext.authorized)
{
script_safe_runner(ext.js_runtime, ext.js_context, [&](qjs::Context &ctx)
@@ -395,19 +393,19 @@ void nodeRename(Proxy &node, const RegexMatchConfigs &rename_array, extra_settin
if(!returned_remark.empty())
remark = returned_remark;
}
catch (qjs::exception)
catch (qjs::exception&)
{
script_print_stack(ctx);
}
}, global.scriptCleanContext);
continue;
}
if(applyMatcher(x.Match, real_rule, node) && real_rule.size())
#endif
if(applyMatcher(x.Match, real_rule, node) && !real_rule.empty())
remark = regReplace(remark, real_rule, x.Replace);
}
if(remark.empty())
remark = original_remark;
return;
}
std::string removeEmoji(const std::string &orig_remark)
@@ -432,6 +430,7 @@ std::string addEmoji(const Proxy &node, const RegexMatchConfigs &emoji_array, ex
for(const RegexMatchConfig &x : emoji_array)
{
#ifndef NO_JS_RUNTIME
if(!x.Script.empty() && ext.authorized)
{
std::string result;
@@ -448,7 +447,7 @@ std::string addEmoji(const Proxy &node, const RegexMatchConfigs &emoji_array, ex
if(!ret.empty())
result = ret + " " + node.Remark;
}
catch (qjs::exception)
catch (qjs::exception&)
{
script_print_stack(ctx);
}
@@ -457,9 +456,10 @@ std::string addEmoji(const Proxy &node, const RegexMatchConfigs &emoji_array, ex
return result;
continue;
}
#endif
if(x.Replace.empty())
continue;
if(applyMatcher(x.Match, real_rule, node) && real_rule.size() && regFind(node.Remark, real_rule))
if(applyMatcher(x.Match, real_rule, node) && !real_rule.empty() && regFind(node.Remark, real_rule))
return x.Replace + " " + node.Remark;
}
return node.Remark;
@@ -481,7 +481,8 @@ void preprocessNodes(std::vector<Proxy> &nodes, extra_settings &ext)
if(ext.sort_flag)
{
bool failed = true;
if(ext.sort_script.size() && ext.authorized)
#ifndef NO_JS_RUNTIME
if(!ext.sort_script.empty() && ext.authorized)
{
std::string script = ext.sort_script;
if(startsWith(script, "path:"))
@@ -503,12 +504,13 @@ void preprocessNodes(std::vector<Proxy> &nodes, extra_settings &ext)
std::stable_sort(nodes.begin(), nodes.end(), comparer);
failed = false;
}
catch(qjs::exception)
catch(qjs::exception&)
{
script_print_stack(ctx);
}
}, global.scriptCleanContext);
}
#endif
if(failed) std::stable_sort(nodes.begin(), nodes.end(), [](const Proxy &a, const Proxy &b)
{
return a.Remark < b.Remark;

File diff suppressed because it is too large Load Diff

View File

@@ -34,13 +34,11 @@ struct extra_settings
std::string quanx_dev_id;
tribool udp = tribool();
tribool tfo = tribool();
tribool xudp = tribool();
tribool skip_cert_verify = tribool();
tribool tls13 = tribool();
bool clash_classical_ruleset = false;
std::string sort_script;
std::string clash_proxies_style = "flow";
std::string clash_proxy_groups_style = "flow";
bool authorized = false;
extra_settings() = default;
@@ -73,5 +71,5 @@ std::string proxyToQuan(std::vector<Proxy> &nodes, const std::string &base_conf,
void proxyToQuan(std::vector<Proxy> &nodes, INIReader &ini, std::vector<RulesetContent> &ruleset_content_array, const ProxyGroupConfigs &extra_proxy_group, extra_settings &ext);
std::string proxyToSSD(std::vector<Proxy> &nodes, std::string &group, std::string &userinfo, extra_settings &ext);
std::string proxyToSingBox(std::vector<Proxy> &nodes, const std::string &base_conf, std::vector<RulesetContent> &ruleset_content_array, const ProxyGroupConfigs &extra_proxy_group, extra_settings &ext);
void replaceAll(std::string& input, const std::string& search, const std::string& replace);
#endif // SUBEXPORT_H_INCLUDED
#endif // SUBEXPORT_H_INCLUDED

View File

@@ -357,10 +357,10 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
if(x.rule_type == RULESET_CLASH_IPCIDR || x.rule_type == RULESET_CLASH_DOMAIN || x.rule_type == RULESET_CLASH_CLASSICAL)
{
//rule_name = std::to_string(hash_(rule_group + rule_path));
rule_name = old_rule_name = urlDecode(findFileName(rule_path));
rule_name = old_rule_name = findFileName(rule_path);
int idx = 2;
while(std::find(groups.begin(), groups.end(), rule_name) != groups.end())
rule_name = old_rule_name + " " + std::to_string(idx++);
rule_name = old_rule_name + "_" + std::to_string(idx++);
names[rule_name] = rule_group;
urls[rule_name] = "*" + rule_path;
rule_type[rule_name] = x.rule_type;
@@ -386,10 +386,10 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
if(fileExist(rule_path, true) || isLink(rule_path))
{
//rule_name = std::to_string(hash_(rule_group + rule_path));
rule_name = old_rule_name = urlDecode(findFileName(rule_path));
rule_name = old_rule_name = findFileName(rule_path);
int idx = 2;
while(std::find(groups.begin(), groups.end(), rule_name) != groups.end())
rule_name = old_rule_name + " " + std::to_string(idx++);
rule_name = old_rule_name + "_" + std::to_string(idx++);
names[rule_name] = rule_group;
urls[rule_name] = rule_path_typed;
rule_type[rule_name] = x.rule_type;
@@ -436,9 +436,9 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
if(vArray.size() < 2)
continue;
if(keywords.find(rule_name) == keywords.end())
keywords[rule_name] = "\"" + trim(vArray[1]) + "\"";
keywords[rule_name] = "\"" + vArray[1] + "\"";
else
keywords[rule_name] += ",\"" + trim(vArray[1]) + "\"";
keywords[rule_name] += ",\"" + vArray[1] + "\"";
}
else
{
@@ -449,7 +449,7 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
}
else
{
strLine = vArray[0] + "," + trim(vArray[1]) + "," + rule_group;
strLine = vArray[0] + "," + vArray[1] + "," + rule_group;
if(vArray.size() > 2)
strLine += "," + vArray[2];
}
@@ -466,16 +466,14 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
}
}
if(has_domain[rule_name] && !script)
rules.emplace_back("RULE-SET," + rule_name + " (Domain)," + rule_group);
rules.emplace_back("RULE-SET," + rule_name + "_domain," + rule_group);
if(has_ipcidr[rule_name] && !script)
{
if(has_no_resolve)
rules.emplace_back("RULE-SET," + rule_name + " (IP-CIDR)," + rule_group + ",no-resolve");
rules.emplace_back("RULE-SET," + rule_name + "_ipcidr," + rule_group + ",no-resolve");
else
rules.emplace_back("RULE-SET," + rule_name + " (IP-CIDR)," + rule_group);
rules.emplace_back("RULE-SET," + rule_name + "_ipcidr," + rule_group);
}
if(!has_domain[rule_name] && !has_ipcidr[rule_name] && !script)
rules.emplace_back("RULE-SET," + rule_name + "," + rule_group);
if(std::find(groups.begin(), groups.end(), rule_name) == groups.end())
groups.emplace_back(rule_name);
}
@@ -490,14 +488,14 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
{
std::string yaml_key = x;
if(rule_type[x] != RULESET_CLASH_DOMAIN)
yaml_key += " (Domain)";
yaml_key += "_domain";
base_rule["rule-providers"][yaml_key]["type"] = "http";
base_rule["rule-providers"][yaml_key]["behavior"] = "domain";
if(url[0] == '*')
base_rule["rule-providers"][yaml_key]["url"] = url.substr(1);
else
base_rule["rule-providers"][yaml_key]["url"] = remote_path_prefix + "/getruleset?type=3&url=" + urlSafeBase64Encode(url);
base_rule["rule-providers"][yaml_key]["path"] = "./providers/" + std::to_string(hash_(url)) + "_domain.yaml";
base_rule["rule-providers"][yaml_key]["path"] = "./providers/rule-provider_" + yaml_key + ".yaml";
if(interval)
base_rule["rule-providers"][yaml_key]["interval"] = interval;
}
@@ -505,14 +503,14 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
{
std::string yaml_key = x;
if(rule_type[x] != RULESET_CLASH_IPCIDR)
yaml_key += " (IP-CIDR)";
yaml_key += "_ipcidr";
base_rule["rule-providers"][yaml_key]["type"] = "http";
base_rule["rule-providers"][yaml_key]["behavior"] = "ipcidr";
if(url[0] == '*')
base_rule["rule-providers"][yaml_key]["url"] = url.substr(1);
else
base_rule["rule-providers"][yaml_key]["url"] = remote_path_prefix + "/getruleset?type=4&url=" + urlSafeBase64Encode(url);
base_rule["rule-providers"][yaml_key]["path"] = "./providers/" + std::to_string(hash_(url)) + "_ipcidr.yaml";
base_rule["rule-providers"][yaml_key]["path"] = "./providers/rule-provider_" + yaml_key + ".yaml";
if(interval)
base_rule["rule-providers"][yaml_key]["interval"] = interval;
}
@@ -525,7 +523,7 @@ int renderClashScript(YAML::Node &base_rule, std::vector<RulesetContent> &rulese
base_rule["rule-providers"][yaml_key]["url"] = url.substr(1);
else
base_rule["rule-providers"][yaml_key]["url"] = remote_path_prefix + "/getruleset?type=6&url=" + urlSafeBase64Encode(url);
base_rule["rule-providers"][yaml_key]["path"] = "./providers/" + std::to_string(hash_(url)) + ".yaml";
base_rule["rule-providers"][yaml_key]["path"] = "./providers/rule-provider_" + yaml_key + ".yaml";
if(interval)
base_rule["rule-providers"][yaml_key]["interval"] = interval;
}

View File

@@ -37,18 +37,20 @@ extern WebServer webServer;
string_array gRegexBlacklist = {"(.*)*"};
std::string parseProxy(const std::string &source) {
std::string parseProxy(const std::string &source)
{
std::string proxy = source;
if (source == "SYSTEM")
if(source == "SYSTEM")
proxy = getSystemProxy();
else if (source == "NONE")
else if(source == "NONE")
proxy = "";
return proxy;
}
extern string_array ClashRuleTypes, SurgeRuleTypes, QuanXRuleTypes;
struct UAProfile {
struct UAProfile
{
std::string head;
std::string version_match;
std::string version_target;
@@ -63,7 +65,6 @@ const std::vector<UAProfile> UAMatchList = {
{"ClashForAndroid","","","clash",false},
{"ClashforWindows","\\/([0-9.]+)","0.11","clash",true},
{"ClashforWindows","","","clash",false},
{"clash-verge","","","clash",true},
{"ClashX Pro","","","clash",true},
{"ClashX","\\/([0-9.]+)","0.13","clash",true},
{"Clash","","","clash",true},
@@ -86,7 +87,8 @@ const std::vector<UAProfile> UAMatchList = {
{"V2RayX","","","v2ray"}
};
bool verGreaterEqual(const std::string &src_ver, const std::string &target_ver) {
bool verGreaterEqual(const std::string& src_ver, const std::string& target_ver)
{
std::istringstream src_stream(src_ver), target_stream(target_ver);
int src_part, target_part;
char dot;
@@ -109,55 +111,61 @@ bool verGreaterEqual(const std::string &src_ver, const std::string &target_ver)
return !bool(target_stream >> target_part);
}
void matchUserAgent(const std::string &user_agent, std::string &target, tribool &clash_new_name, int &surge_ver) {
if (user_agent.empty())
void matchUserAgent(const std::string &user_agent, std::string &target, tribool &clash_new_name, int &surge_ver)
{
if(user_agent.empty())
return;
for (const UAProfile &x: UAMatchList) {
if (startsWith(user_agent, x.head)) {
if (!x.version_match.empty()) {
for(const UAProfile &x : UAMatchList)
{
if(startsWith(user_agent, x.head))
{
if(!x.version_match.empty())
{
std::string version;
if (regGetMatch(user_agent, x.version_match, 2, 0, &version))
if(regGetMatch(user_agent, x.version_match, 2, 0, &version))
continue;
if (!x.version_target.empty() && !verGreaterEqual(version, x.version_target))
if(!x.version_target.empty() && !verGreaterEqual(version, x.version_target))
continue;
}
target = x.target;
clash_new_name = x.clash_new_name;
if (x.surge_ver != -1)
if(x.surge_ver != -1)
surge_ver = x.surge_ver;
return;
}
}
}
std::string getRuleset(RESPONSE_CALLBACK_ARGS) {
std::string getRuleset(RESPONSE_CALLBACK_ARGS)
{
auto &argument = request.argument;
int *status_code = &response.status_code;
/// type: 1 for Surge, 2 for Quantumult X, 3 for Clash domain rule-provider, 4 for Clash ipcidr rule-provider, 5 for Surge DOMAIN-SET, 6 for Clash classical ruleset
std::string url = urlSafeBase64Decode(getUrlArg(argument, "url")), type = getUrlArg(argument,
"type"), group = urlSafeBase64Decode(
getUrlArg(argument, "group"));
std::string url = urlSafeBase64Decode(getUrlArg(argument, "url")), type = getUrlArg(argument, "type"), group = urlSafeBase64Decode(getUrlArg(argument, "group"));
std::string output_content, dummy;
int type_int = to_int(type, 0);
if (url.empty() || type.empty() || (type_int == 2 && group.empty()) || (type_int < 1 || type_int > 6)) {
if(url.empty() || type.empty() || (type_int == 2 && group.empty()) || (type_int < 1 || type_int > 6))
{
*status_code = 400;
return "Invalid request!";
}
std::string proxy = parseProxy(global.proxyRuleset);
string_array vArray = split(url, "|");
for (std::string &x: vArray)
for(std::string &x : vArray)
x.insert(0, "ruleset,");
std::vector<RulesetContent> rca;
RulesetConfigs confs = INIBinding::from<RulesetConfig>::from_ini(vArray);
refreshRulesets(confs, rca);
for (RulesetContent &x: rca) {
for(RulesetContent &x : rca)
{
std::string content = x.rule_content.get();
output_content += convertRuleset(content, x.rule_type);
}
if (output_content.empty()) {
if(output_content.empty())
{
*status_code = 400;
return "Invalid request!";
}
@@ -169,16 +177,18 @@ std::string getRuleset(RESPONSE_CALLBACK_ARGS) {
ss << output_content;
char delimiter = getLineBreak(output_content);
std::string::size_type lineSize, posb, pose;
auto filterLine = [&]() {
auto filterLine = [&]()
{
posb = 0;
pose = strLine.find(',');
if (pose == std::string::npos)
if(pose == std::string::npos)
return 1;
posb = pose + 1;
pose = strLine.find(',', posb);
if (pose == std::string::npos) {
if(pose == std::string::npos)
{
pose = strLine.size();
if (strLine[pose - 1] == '\r')
if(strLine[pose - 1] == '\r')
pose--;
}
pose -= posb;
@@ -189,75 +199,76 @@ std::string getRuleset(RESPONSE_CALLBACK_ARGS) {
output_content.clear();
output_content.reserve(lineSize);
if (type_int == 3 || type_int == 4 || type_int == 6)
if(type_int == 3 || type_int == 4 || type_int == 6)
output_content = "payload:\n";
while (getline(ss, strLine, delimiter)) {
if (strFind(strLine, "//")) {
while(getline(ss, strLine, delimiter))
{
if(strFind(strLine, "//"))
{
strLine.erase(strLine.find("//"));
strLine = trimWhitespace(strLine);
}
switch (type_int) {
case 2:
if (!std::any_of(QuanXRuleTypes.begin(), QuanXRuleTypes.end(),
[&strLine](const std::string &type) { return startsWith(strLine, type); }))
continue;
break;
case 1:
if (!std::any_of(SurgeRuleTypes.begin(), SurgeRuleTypes.end(),
[&strLine](const std::string &type) { return startsWith(strLine, type); }))
continue;
break;
case 3:
if (!startsWith(strLine, "DOMAIN-SUFFIX,") && !startsWith(strLine, "DOMAIN,"))
continue;
if (filterLine())
continue;
output_content += " - '";
if (strLine[posb - 2] == 'X')
output_content += "+.";
output_content += strLine.substr(posb, pose);
output_content += "'\n";
switch(type_int)
{
case 2:
if(!std::any_of(QuanXRuleTypes.begin(), QuanXRuleTypes.end(), [&strLine](const std::string& type){return startsWith(strLine, type);}))
continue;
case 4:
if (!startsWith(strLine, "IP-CIDR,") && !startsWith(strLine, "IP-CIDR6,"))
continue;
if (filterLine())
continue;
output_content += " - '";
output_content += strLine.substr(posb, pose);
output_content += "'\n";
break;
case 1:
if(!std::any_of(SurgeRuleTypes.begin(), SurgeRuleTypes.end(), [&strLine](const std::string& type){return startsWith(strLine, type);}))
continue;
case 5:
if (!startsWith(strLine, "DOMAIN-SUFFIX,") && !startsWith(strLine, "DOMAIN,"))
continue;
if (filterLine())
continue;
if (strLine[posb - 2] == 'X')
output_content += '.';
output_content += strLine.substr(posb, pose);
output_content += '\n';
break;
case 3:
if(!startsWith(strLine, "DOMAIN-SUFFIX,") && !startsWith(strLine, "DOMAIN,"))
continue;
case 6:
if (!std::any_of(ClashRuleTypes.begin(), ClashRuleTypes.end(),
[&strLine](const std::string &type) { return startsWith(strLine, type); }))
continue;
output_content += " - ";
default:
break;
if(filterLine())
continue;
output_content += " - '";
if(strLine[posb - 2] == 'X')
output_content += "+.";
output_content += strLine.substr(posb, pose);
output_content += "'\n";
continue;
case 4:
if(!startsWith(strLine, "IP-CIDR,") && !startsWith(strLine, "IP-CIDR6,"))
continue;
if(filterLine())
continue;
output_content += " - '";
output_content += strLine.substr(posb, pose);
output_content += "'\n";
continue;
case 5:
if(!startsWith(strLine, "DOMAIN-SUFFIX,") && !startsWith(strLine, "DOMAIN,"))
continue;
if(filterLine())
continue;
if(strLine[posb - 2] == 'X')
output_content += '.';
output_content += strLine.substr(posb, pose);
output_content += '\n';
continue;
case 6:
if(!std::any_of(ClashRuleTypes.begin(), ClashRuleTypes.end(), [&strLine](const std::string& type){return startsWith(strLine, type);}))
continue;
output_content += " - ";
default:
break;
}
lineSize = strLine.size();
if (lineSize && strLine[lineSize - 1] == '\r') //remove line break
if(lineSize && strLine[lineSize - 1] == '\r') //remove line break
strLine.erase(--lineSize);
if (!strLine.empty() &&
(strLine[0] != ';' && strLine[0] != '#' && !(lineSize >= 2 && strLine[0] == '/' && strLine[1] == '/'))) {
if (type_int == 2) {
if (startsWith(strLine, "IP-CIDR6"))
if(!strLine.empty() && (strLine[0] != ';' && strLine[0] != '#' && !(lineSize >= 2 && strLine[0] == '/' && strLine[1] == '/')))
{
if(type_int == 2)
{
if(startsWith(strLine, "IP-CIDR6"))
strLine.replace(0, 8, "IP6-CIDR");
strLine += "," + group;
if (count_least(strLine, ',', 3) && regReplace(strLine, rule_match_regex, "$2") == ",no-resolve")
if(count_least(strLine, ',', 3) && regReplace(strLine, rule_match_regex, "$2") == ",no-resolve")
strLine = regReplace(strLine, rule_match_regex, "$1$3$2");
else
strLine = regReplace(strLine, rule_match_regex, "$1$3");
@@ -267,92 +278,73 @@ std::string getRuleset(RESPONSE_CALLBACK_ARGS) {
output_content += '\n';
}
if (output_content == "payload:\n") {
switch (type_int) {
case 3:
output_content += " - '--placeholder--'";
break;
case 4:
output_content += " - '0.0.0.0/32'";
break;
case 6:
output_content += " - 'DOMAIN,--placeholder--'";
break;
if(output_content == "payload:\n")
{
switch(type_int)
{
case 3:
output_content += " - '--placeholder--'";
break;
case 4:
output_content += " - '0.0.0.0/32'";
break;
case 6:
output_content += " - 'DOMAIN,--placeholder--'";
break;
}
}
return output_content;
}
void checkExternalBase(const std::string &path, std::string &dest) {
if (isLink(path) || (startsWith(path, global.basePath) && fileExist(path)))
void checkExternalBase(const std::string &path, std::string &dest)
{
if(isLink(path) || (startsWith(path, global.basePath) && fileExist(path)))
dest = path;
}
std::string subconverter(RESPONSE_CALLBACK_ARGS) {
std::string subconverter(RESPONSE_CALLBACK_ARGS)
{
auto &argument = request.argument;
int *status_code = &response.status_code;
std::string argTarget = getUrlArg(argument, "target"), argSurgeVer = getUrlArg(argument, "ver");
tribool argClashNewField = getUrlArg(argument, "new_name");
int intSurgeVer = !argSurgeVer.empty() ? to_int(argSurgeVer, 3) : 3;
if (argTarget == "auto")
if(argTarget == "auto")
matchUserAgent(request.headers["User-Agent"], argTarget, argClashNewField, intSurgeVer);
/// don't try to load groups or rulesets when generating simple subscriptions
bool lSimpleSubscription = false;
switch (hash_(argTarget)) {
case "ss"_hash:
case "ssd"_hash:
case "ssr"_hash:
case "sssub"_hash:
case "v2ray"_hash:
case "trojan"_hash:
case "mixed"_hash:
lSimpleSubscription = true;
break;
case "clash"_hash:
case "clashr"_hash:
case "surge"_hash:
case "quan"_hash:
case "quanx"_hash:
case "loon"_hash:
case "surfboard"_hash:
case "mellow"_hash:
case "singbox"_hash:
break;
default:
*status_code = 400;
return "Invalid target!";
switch(hash_(argTarget))
{
case "ss"_hash: case "ssd"_hash: case "ssr"_hash: case "sssub"_hash: case "v2ray"_hash: case "trojan"_hash: case "mixed"_hash:
lSimpleSubscription = true;
break;
case "clash"_hash: case "clashr"_hash: case "surge"_hash: case "quan"_hash: case "quanx"_hash: case "loon"_hash: case "surfboard"_hash: case "mellow"_hash: case "singbox"_hash:
break;
default:
*status_code = 400;
return "Invalid target!";
}
//check if we need to read configuration
if (global.reloadConfOnRequest && (!global.APIMode || global.CFWChildProcess) && !global.generatorMode)
if(global.reloadConfOnRequest && (!global.APIMode || global.CFWChildProcess) && !global.generatorMode)
readConf();
/// string values
std::string argUrl = getUrlArg(argument, "url");
std::string argGroupName = getUrlArg(argument, "group"), argUploadPath = getUrlArg(argument, "upload_path");
std::string argIncludeRemark = getUrlArg(argument, "include"), argExcludeRemark = getUrlArg(argument, "exclude");
std::string argCustomGroups = urlSafeBase64Decode(
getUrlArg(argument, "groups")), argCustomRulesets = urlSafeBase64Decode(
getUrlArg(argument, "ruleset")), argExternalConfig = getUrlArg(argument, "config");
std::string argDeviceID = getUrlArg(argument, "dev_id"), argFilename = getUrlArg(argument,
"filename"), argUpdateInterval = getUrlArg(
argument, "interval"), argUpdateStrict = getUrlArg(argument, "strict");
std::string argCustomGroups = urlSafeBase64Decode(getUrlArg(argument, "groups")), argCustomRulesets = urlSafeBase64Decode(getUrlArg(argument, "ruleset")), argExternalConfig = getUrlArg(argument, "config");
std::string argDeviceID = getUrlArg(argument, "dev_id"), argFilename = getUrlArg(argument, "filename"), argUpdateInterval = getUrlArg(argument, "interval"), argUpdateStrict = getUrlArg(argument, "strict");
std::string argRenames = getUrlArg(argument, "rename"), argFilterScript = getUrlArg(argument, "filter_script");
/// switches with default value
tribool argUpload = getUrlArg(argument, "upload"), argEmoji = getUrlArg(argument, "emoji"), argAddEmoji = getUrlArg(
argument, "add_emoji"), argRemoveEmoji = getUrlArg(argument, "remove_emoji");
tribool argAppendType = getUrlArg(argument, "append_type"), argTFO = getUrlArg(argument, "tfo"), argUDP = getUrlArg(
argument, "udp"), argGenNodeList = getUrlArg(argument, "list");
tribool argUpload = getUrlArg(argument, "upload"), argEmoji = getUrlArg(argument, "emoji"), argAddEmoji = getUrlArg(argument, "add_emoji"), argRemoveEmoji = getUrlArg(argument, "remove_emoji");
tribool argAppendType = getUrlArg(argument, "append_type"), argTFO = getUrlArg(argument, "tfo"), argUDP = getUrlArg(argument, "udp"), argGenNodeList = getUrlArg(argument, "list");
tribool argSort = getUrlArg(argument, "sort"), argUseSortScript = getUrlArg(argument, "sort_script");
tribool argGenClashScript = getUrlArg(argument, "script"), argEnableInsert = getUrlArg(argument, "insert");
tribool argSkipCertVerify = getUrlArg(argument, "scv"), argFilterDeprecated = getUrlArg(argument,
"fdn"), argExpandRulesets = getUrlArg(
argument, "expand"), argAppendUserinfo = getUrlArg(argument, "append_info");
tribool argPrependInsert = getUrlArg(argument, "prepend"), argGenClassicalRuleProvider = getUrlArg(argument,
"classic"), argTLS13 = getUrlArg(
argument, "tls13");
tribool argSkipCertVerify = getUrlArg(argument, "scv"), argFilterDeprecated = getUrlArg(argument, "fdn"), argExpandRulesets = getUrlArg(argument, "expand"), argAppendUserinfo = getUrlArg(argument, "append_info");
tribool argPrependInsert = getUrlArg(argument, "prepend"), argGenClassicalRuleProvider = getUrlArg(argument, "classic"), argTLS13 = getUrlArg(argument, "tls13");
std::string base_content, output_content;
ProxyGroupConfigs lCustomProxyGroups = global.customProxyGroups;
@@ -361,15 +353,10 @@ std::string subconverter(RESPONSE_CALLBACK_ARGS) {
std::vector<RulesetContent> lRulesetContent;
extra_settings ext;
std::string subInfo, dummy;
int interval = !argUpdateInterval.empty() ? to_int(argUpdateInterval, global.updateInterval)
: global.updateInterval;
bool authorized =
!global.APIMode || getUrlArg(argument, "token") == global.accessToken, strict = !argUpdateStrict.empty() ?
argUpdateStrict == "true"
: global.updateStrict;
int interval = !argUpdateInterval.empty() ? to_int(argUpdateInterval, global.updateInterval) : global.updateInterval;
bool authorized = !global.APIMode || getUrlArg(argument, "token") == global.accessToken, strict = !argUpdateStrict.empty() ? argUpdateStrict == "true" : global.updateStrict;
if (std::find(gRegexBlacklist.cbegin(), gRegexBlacklist.cend(), argIncludeRemark) != gRegexBlacklist.cend() ||
std::find(gRegexBlacklist.cbegin(), gRegexBlacklist.cend(), argExcludeRemark) != gRegexBlacklist.cend())
if(std::find(gRegexBlacklist.cbegin(), gRegexBlacklist.cend(), argIncludeRemark) != gRegexBlacklist.cend() || std::find(gRegexBlacklist.cbegin(), gRegexBlacklist.cend(), argExcludeRemark) != gRegexBlacklist.cend())
return "Invalid request!";
/// for external configuration
@@ -379,9 +366,10 @@ std::string subconverter(RESPONSE_CALLBACK_ARGS) {
/// validate urls
argEnableInsert.define(global.enableInsert);
if (argUrl.empty() && (!global.APIMode || authorized))
if(argUrl.empty() && (!global.APIMode || authorized))
argUrl = global.defaultUrls;
if ((argUrl.empty() && !(!global.insertUrls.empty() && argEnableInsert)) || argTarget.empty()) {
if((argUrl.empty() && !(!global.insertUrls.empty() && argEnableInsert)) || argTarget.empty())
{
*status_code = 400;
return "Invalid request!";
}
@@ -402,8 +390,9 @@ std::string subconverter(RESPONSE_CALLBACK_ARGS) {
// req_arg_map[x.substr(0, pos)] = x.substr(pos + 1);
// }
string_map req_arg_map;
for (auto &x: argument) {
if (x.first == "token")
for (auto &x : argument)
{
if(x.first == "token")
continue;
req_arg_map[x.first] = x.second;
}
@@ -421,11 +410,10 @@ std::string subconverter(RESPONSE_CALLBACK_ARGS) {
/// check other flags
ext.authorized = authorized;
ext.append_proxy_type = argAppendType.get(global.appendType);
if ((argTarget == "clash" || argTarget == "clashr") && argGenClashScript.is_undef())
if((argTarget == "clash" || argTarget == "clashr") && argGenClashScript.is_undef())
argExpandRulesets.define(true);
ext.clash_proxies_style = global.clashProxiesStyle;
ext.clash_proxy_groups_style = global.clashProxyGroupsStyle;
/// read preference from argument, assign global var if not in argument
ext.tfo.define(argTFO).define(global.TFOFlag);

View File

@@ -1,5 +1,6 @@
#include <string>
#include <mutex>
#include <toml.hpp>
#include "config/binding.h"
#include "handler/webget.h"
@@ -565,13 +566,13 @@ void readYAMLConf(YAML::Node &node)
}
template <class T, class... U>
void find_if_exist(const toml::value &v, const toml::value::key_type &k, T& target, U&&... args)
void find_if_exist(const toml::value &v, const toml::key &k, T& target, U&&... args)
{
if(v.contains(k)) target = toml::find<T>(v, k);
if constexpr (sizeof...(args) > 0) find_if_exist(v, std::forward<U>(args)...);
}
void operate_toml_kv_table(const std::vector<toml::table> &arr, const toml::value::key_type &key_name, const toml::value::key_type &value_name, std::function<void (const toml::value&, const toml::value&)> binary_op)
void operate_toml_kv_table(const std::vector<toml::table> &arr, const toml::key &key_name, const toml::key &value_name, std::function<void (const toml::value&, const toml::value&)> binary_op)
{
for(const toml::table &table : arr)
{
@@ -800,7 +801,7 @@ void readConf()
return readYAMLConf(yaml);
}
toml::value conf = parseToml(prefdata, global.prefPath);
if(!conf.is_empty() && toml::find_or<int>(conf, "version", 0))
if(!conf.is_uninitialized() && toml::find_or<int>(conf, "version", 0))
return readTOMLConf(conf);
}
catch (YAML::Exception &e)
@@ -1209,7 +1210,7 @@ int loadExternalConfig(std::string &path, ExternalConfig &ext)
if(yaml.size() && yaml["custom"].IsDefined())
return loadExternalYAML(yaml, ext);
toml::value conf = parseToml(base_content, path);
if(!conf.is_empty() && toml::find_or<int>(conf, "version", 0))
if(!conf.is_uninitialized() && toml::find_or<int>(conf, "version", 0))
return loadExternalTOML(conf, ext);
}
catch (YAML::Exception &e)

View File

@@ -13,7 +13,6 @@
#include "utils/string.h"
#include "utils/stl_extra.h"
#include "utils/tribool.h"
#include <toml.hpp>
struct Settings
{
@@ -49,7 +48,7 @@ struct Settings
tribool UDPFlag, TFOFlag, skipCertVerify, TLS13Flag, enableInsert;
bool enableSort = false, updateStrict = false;
bool clashUseNewField = false, singBoxAddClashModes = true;
std::string clashProxiesStyle = "flow", clashProxyGroupsStyle = "block";
std::string clashProxiesStyle = "flow";
std::string proxyConfig, proxyRuleset, proxySubscription;
int updateInterval = 0;
std::string sortScript, filterScript;
@@ -101,12 +100,7 @@ extern Settings global;
int importItems(string_array &target, bool scope_limit = true);
int loadExternalConfig(std::string &path, ExternalConfig &ext);
//template <class T, class... U>
//void find_if_exist(const toml::value &v, const toml::key &k, T& target, U&&... args)
//{
// if(v.contains(k)) target = toml::find<T>(v, k);
// if constexpr (sizeof...(args) > 0) find_if_exist(v, std::forward<U>(args)...);
//}
template <class... Args>
void parseGroupTimes(const std::string &src, Args... args)
{

View File

@@ -87,13 +87,11 @@ static int logger(CURL *handle, curl_infotype type, char *data, size_t size, voi
switch(type)
{
case CURLINFO_TEXT:
prefix = "CURL_INFO: ";
prefix = "CURL_INFO";
break;
case CURLINFO_HEADER_IN:
prefix = "CURL_HEADER: < ";
break;
case CURLINFO_HEADER_OUT:
prefix = "CURL_HEADER: > ";
prefix = "CURL_HEADER";
break;
case CURLINFO_DATA_IN:
case CURLINFO_DATA_OUT:
@@ -107,6 +105,7 @@ static int logger(CURL *handle, curl_infotype type, char *data, size_t size, voi
for(auto &x : lines)
{
std::string log_content = prefix;
log_content += ": ";
log_content += x;
writeLog(0, log_content, LOG_LEVEL_VERBOSE);
}
@@ -114,6 +113,7 @@ static int logger(CURL *handle, curl_infotype type, char *data, size_t size, voi
else
{
std::string log_content = prefix;
log_content += ": ";
log_content += trimWhitespace(content);
writeLog(0, log_content, LOG_LEVEL_VERBOSE);
}
@@ -172,8 +172,7 @@ static int curlGet(const FetchArgument &argument, FetchResult &result)
{
for(auto &x : *argument.request_headers)
{
auto header = x.first + ": " + x.second;
header_list = curl_slist_append(header_list, header.data());
header_list = curl_slist_append(header_list, (x.first + ": " + x.second).data());
}
if(!argument.request_headers->contains("User-Agent"))
curl_easy_setopt(curl_handle, CURLOPT_USERAGENT, user_agent_str);
@@ -234,7 +233,7 @@ static int curlGet(const FetchArgument &argument, FetchResult &result)
while(true)
{
retVal = curl_easy_perform(curl_handle);
if(retVal == CURLE_OK || max_fails <= fail_count || global.APIMode)
if(retVal == CURLE_OK || max_fails <= fail_count)
break;
else
fail_count++;

View File

@@ -233,10 +233,10 @@ int main(int argc, char *argv[])
}
}
std::string type = getUrlArg(request.argument, "type");
if(type == "form" || type == "direct")
{
if(type == "form")
fileWrite(global.prefPath, getFormData(request.postdata), true);
else if(type == "direct")
fileWrite(global.prefPath, request.postdata, true);
}
else
{
response.status_code = 501;

View File

@@ -20,47 +20,36 @@ enum class ProxyType
HTTP,
HTTPS,
SOCKS5,
WireGuard,
VLESS,
Hysteria,
Hysteria2,
TUIC
WireGuard
};
inline String getProxyTypeName(ProxyType type) {
switch (type) {
case ProxyType::Shadowsocks:
return "SS";
case ProxyType::ShadowsocksR:
return "SSR";
case ProxyType::VMess:
return "VMess";
case ProxyType::Trojan:
return "Trojan";
case ProxyType::Snell:
return "Snell";
case ProxyType::HTTP:
return "HTTP";
case ProxyType::HTTPS:
return "HTTPS";
case ProxyType::SOCKS5:
return "SOCKS5";
case ProxyType::WireGuard:
return "WireGuard";
case ProxyType::VLESS:
return "Vless";
case ProxyType::Hysteria:
return "Hysteria";
case ProxyType::Hysteria2:
return "Hysteria2";
case ProxyType::TUIC:
return "Tuic";
default:
return "Unknown";
inline String getProxyTypeName(ProxyType type)
{
switch(type)
{
case ProxyType::Shadowsocks:
return "SS";
case ProxyType::ShadowsocksR:
return "SSR";
case ProxyType::VMess:
return "VMess";
case ProxyType::Trojan:
return "Trojan";
case ProxyType::Snell:
return "Snell";
case ProxyType::HTTP:
return "HTTP";
case ProxyType::HTTPS:
return "HTTPS";
case ProxyType::SOCKS5:
return "SOCKS5";
default:
return "Unknown";
}
}
struct Proxy {
struct Proxy
{
ProxyType Type = ProxyType::Unknown;
uint32_t Id = 0;
uint32_t GroupId = 0;
@@ -68,7 +57,7 @@ struct Proxy {
String Remark;
String Hostname;
uint16_t Port = 0;
String CongestionControl;
String Username;
String Password;
String EncryptMethod;
@@ -82,8 +71,6 @@ struct Proxy {
uint16_t AlterId = 0;
String TransferProtocol;
String FakeType;
String AuthStr;
bool TLSSecure = false;
String Host;
@@ -94,7 +81,6 @@ struct Proxy {
String QUICSecret;
tribool UDP;
tribool XUDP;
tribool TCPFastOpen;
tribool AllowInsecure;
tribool TLS13;
@@ -113,30 +99,6 @@ struct Proxy {
uint16_t KeepAlive = 0;
String TestUrl;
String ClientId;
String Ports;
String Auth;
String Alpn;
String UpMbps;
String DownMbps;
String Insecure;
String Fingerprint;
String OBFSPassword;
String GRPCServiceName;
String GRPCMode;
String ShortId;
String Flow;
bool FlowShow = false;
tribool DisableSni;
uint32_t UpSpeed;
uint32_t DownSpeed;
String SNI;
tribool ReduceRtt;
String UdpRelayMode = "native";
uint16_t RequestTimeout = 15000;
String token;
String UnderlyingProxy;
std::vector<String> AlpnList;
String PacketEncoding;
};
#define SS_DEFAULT_GROUP "SSProvider"
@@ -147,9 +109,5 @@ struct Proxy {
#define TROJAN_DEFAULT_GROUP "TrojanProvider"
#define SNELL_DEFAULT_GROUP "SnellProvider"
#define WG_DEFAULT_GROUP "WireGuardProvider"
#define XRAY_DEFAULT_GROUP "XRayProvider"
#define HYSTERIA_DEFAULT_GROUP "HysteriaProvider"
#define HYSTERIA2_DEFAULT_GROUP "Hysteria2Provider"
#define TUIC_DEFAULT_GROUP "TuicProvider"
#endif // PROXY_H_INCLUDED

File diff suppressed because it is too large Load Diff

View File

@@ -5,7 +5,8 @@
#include "config/proxy.h"
enum class ConfType {
enum class ConfType
{
Unknow,
SS,
SSR,
@@ -19,115 +20,26 @@ enum class ConfType {
Local
};
void hysteriaConstruct(Proxy &node, const std::string &group, const std::string &remarks, const std::string &add,
const std::string &port, const std::string &type, const std::string &auth,
const std::string &auth_str, const std::string &host, const std::string &up,
const std::string &down, const std::string &alpn, const std::string &obfsParam,
const std::string &insecure, const std::string &ports, const std::string &sni,
tribool udp = tribool(), tribool tfo = tribool(), tribool scv = tribool(),
tribool tls13 = tribool(),const std::string& underlying_proxy="");
void hysteria2Construct(Proxy &node, const std::string &group, const std::string &remarks, const std::string &add,
const std::string &port, const std::string &password, const std::string &host,
const std::string &up, const std::string &down, const std::string &alpn,
const std::string &obfsParam, const std::string &obfsPassword, const std::string &sni,
const std::string &publicKey, const std::string &ports,
tribool udp, tribool tfo,
tribool scv,const std::string& underlying_proxy="");
void vlessConstruct(Proxy &node, const std::string &group, const std::string &remarks, const std::string &add,
const std::string &port, const std::string &type, const std::string &id, const std::string &aid,
const std::string &net, const std::string &cipher, const std::string &flow, const std::string &mode,
const std::string &path, const std::string &host, const std::string &edge, const std::string &tls,
const std::string &pkd, const std::string &sid, const std::string &fp, const std::string &sni,
const std::vector<std::string> &alpnList,const std::string &packet_encoding,
tribool udp = tribool(), tribool tfo = tribool(), tribool scv = tribool(),
tribool tls13 = tribool(),const std::string& underlying_proxy="");
void vmessConstruct(Proxy &node, const std::string &group, const std::string &remarks, const std::string &add,
const std::string &port, const std::string &type, const std::string &id, const std::string &aid,
const std::string &net, const std::string &cipher, const std::string &path, const std::string &host,
const std::string &edge, const std::string &tls, const std::string &sni,
const std::vector<std::string> &alpnList, tribool udp = tribool(),
tribool tfo = tribool(), tribool scv = tribool(), tribool tls13 = tribool(),const std::string& underlying_proxy="");
void ssrConstruct(Proxy &node, const std::string &group, const std::string &remarks, const std::string &server,
const std::string &port, const std::string &protocol, const std::string &method,
const std::string &obfs, const std::string &password, const std::string &obfsparam,
const std::string &protoparam, tribool udp = tribool(), tribool tfo = tribool(),
tribool scv = tribool(),const std::string& underlying_proxy="");
void ssConstruct(Proxy &node, const std::string &group, const std::string &remarks, const std::string &server,
const std::string &port, const std::string &password, const std::string &method,
const std::string &plugin, const std::string &pluginopts, tribool udp = tribool(),
tribool tfo = tribool(), tribool scv = tribool(), tribool tls13 = tribool(),const std::string& underlying_proxy="");
void socksConstruct(Proxy &node, const std::string &group, const std::string &remarks, const std::string &server,
const std::string &port, const std::string &username, const std::string &password,
tribool udp = tribool(), tribool tfo = tribool(), tribool scv = tribool(),const std::string& underlying_proxy="");
void httpConstruct(Proxy &node, const std::string &group, const std::string &remarks, const std::string &server,
const std::string &port, const std::string &username, const std::string &password, bool tls,
tribool tfo = tribool(), tribool scv = tribool(), tribool tls13 = tribool(),const std::string& underlying_proxy="");
void trojanConstruct(Proxy &node, const std::string &group, const std::string &remarks, const std::string &server,
const std::string &port, const std::string &password, const std::string &network,
const std::string &host, const std::string &path, const std::string &fp, const std::string &sni,
const std::vector<std::string> &alpnList,
bool tlssecure, tribool udp = tribool(), tribool tfo = tribool(), tribool scv = tribool(),
tribool tls13 = tribool(),const std::string& underlying_proxy="");
void snellConstruct(Proxy &node, const std::string &group, const std::string &remarks, const std::string &server,
const std::string &port, const std::string &password, const std::string &obfs,
const std::string &host, uint16_t version = 0, tribool udp = tribool(), tribool tfo = tribool(),
tribool scv = tribool(),const std::string& underlying_proxy="");
void tuicConstruct(Proxy &node, const std::string &group, const std::string &remarks, const std::string &add,
const std::string &port, const std::string &password, const std::string &congestion_control,
const std::string &alpn,
const std::string &sni, const std::string &uuid, const std::string &udpRelayMode,
const std::string &token,
tribool udp = tribool(), tribool tfo = tribool(),
tribool scv = tribool(), tribool reduceRtt = tribool(), tribool disableSni = tribool(),
uint16_t request_timeout = 15000,const std::string& underlying_proxy="");
void vmessConstruct(Proxy &node, const std::string &group, const std::string &remarks, const std::string &add, const std::string &port, const std::string &type, const std::string &id, const std::string &aid, const std::string &net, const std::string &cipher, const std::string &path, const std::string &host, const std::string &edge, const std::string &tls, const std::string &sni, tribool udp = tribool(), tribool tfo = tribool(), tribool scv = tribool(), tribool tls13 = tribool());
void ssrConstruct(Proxy &node, const std::string &group, const std::string &remarks, const std::string &server, const std::string &port, const std::string &protocol, const std::string &method, const std::string &obfs, const std::string &password, const std::string &obfsparam, const std::string &protoparam, tribool udp = tribool(), tribool tfo = tribool(), tribool scv = tribool());
void ssConstruct(Proxy &node, const std::string &group, const std::string &remarks, const std::string &server, const std::string &port, const std::string &password, const std::string &method, const std::string &plugin, const std::string &pluginopts, tribool udp = tribool(), tribool tfo = tribool(), tribool scv = tribool(), tribool tls13 = tribool());
void socksConstruct(Proxy &node, const std::string &group, const std::string &remarks, const std::string &server, const std::string &port, const std::string &username, const std::string &password, tribool udp = tribool(), tribool tfo = tribool(), tribool scv = tribool());
void httpConstruct(Proxy &node, const std::string &group, const std::string &remarks, const std::string &server, const std::string &port, const std::string &username, const std::string &password, bool tls, tribool tfo = tribool(), tribool scv = tribool(), tribool tls13 = tribool());
void trojanConstruct(Proxy &node, const std::string &group, const std::string &remarks, const std::string &server, const std::string &port, const std::string &password, const std::string &network, const std::string &host, const std::string &path, bool tlssecure, tribool udp = tribool(), tribool tfo = tribool(), tribool scv = tribool(), tribool tls13 = tribool());
void snellConstruct(Proxy &node, const std::string &group, const std::string &remarks, const std::string &server, const std::string &port, const std::string &password, const std::string &obfs, const std::string &host, uint16_t version = 0, tribool udp = tribool(), tribool tfo = tribool(), tribool scv = tribool());
void explodeVmess(std::string vmess, Proxy &node);
void explodeSSR(std::string ssr, Proxy &node);
void explodeSS(std::string ss, Proxy &node);
void explodeTrojan(std::string trojan, Proxy &node);
void explodeQuan(const std::string &quan, Proxy &node);
void explodeStdVMess(std::string vmess, Proxy &node);
void explodeStdVless(std::string vless, Proxy &node);
void explodeStdHysteria(std::string hysteria, Proxy &node);
void explodeStdHysteria2(std::string hysteria2, Proxy &node);
void explodeShadowrocket(std::string kit, Proxy &node);
void explodeKitsunebi(std::string kit, Proxy &node);
void explodeVless(std::string vless, Proxy &node);
void explodeHysteria(std::string hysteria, Proxy &node);
void explodeHysteria2(std::string hysteria2, Proxy &node);
/// Parse a link
void explode(const std::string &link, Proxy &node);
void explodeSSD(std::string link, std::vector<Proxy> &nodes);
void explodeSub(std::string sub, std::vector<Proxy> &nodes);
int explodeConf(const std::string &filepath, std::vector<Proxy> &nodes);
int explodeConfContent(const std::string &content, std::vector<Proxy> &nodes);
#endif // SUBPARSER_H_INCLUDED

View File

@@ -2,7 +2,6 @@
#include <map>
#include <iostream>
#include <quickjspp.hpp>
#include <utility>
#include <quickjs/quickjs-libc.h>
#ifdef _WIN32
@@ -227,7 +226,7 @@ public:
qjs_fetch_Headers headers;
std::string cookies;
std::string postdata;
explicit qjs_fetch_Request(std::string url) : url(std::move(url)) {}
explicit qjs_fetch_Request(const std::string &url) : url(url) {}
};
class qjs_fetch_Response
@@ -390,7 +389,7 @@ void script_runtime_init(qjs::Runtime &runtime)
js_std_init_handlers(runtime.rt);
}
int ShowMsgbox(const std::string &title, const std::string &content, uint16_t type = 0)
int ShowMsgbox(const std::string &title, std::string content, uint16_t type = 0)
{
#ifdef _WIN32
if(!type)
@@ -425,7 +424,7 @@ struct Lambda {
uint32_t currentTime()
{
return time(nullptr);
return time(NULL);
}
int script_context_init(qjs::Context &context)
@@ -526,7 +525,7 @@ int script_context_init(qjs::Context &context)
)", "<import>", JS_EVAL_TYPE_MODULE);
return 0;
}
catch(qjs::exception&)
catch(qjs::exception)
{
script_print_stack(context);
return 1;

View File

@@ -47,23 +47,16 @@ static httplib::Server::Handler makeHandler(const responseRoute &rr)
{
continue;
}
req.headers.emplace(h.first.data(), h.second.data());
req.headers[h.first] = h.second;
}
req.argument = request.params;
if (request.method == "POST" || request.method == "PUT" || request.method == "PATCH")
if (request.get_header_value("Content-Type") == "application/x-www-form-urlencoded")
{
if (request.is_multipart_form_data() && !request.files.empty())
{
req.postdata = request.files.begin()->second.content;
}
else if (request.get_header_value("Content-Type") == "application/x-www-form-urlencoded")
{
req.postdata = urlDecode(request.body);
}
else
{
req.postdata = request.body;
}
req.postdata = urlDecode(request.body);
}
else
{
req.postdata = request.body;
}
auto result = rr.rc(req, resp);
response.status = resp.status_code;
@@ -170,7 +163,6 @@ int WebServer::start_web_server_multi(listener_args *args)
{
res.set_header("Access-Control-Allow-Headers", req.get_header_value("Access-Control-Request-Headers"));
}
res.set_header("Access-Control-Allow-Origin", "*");
return httplib::Server::HandlerResponse::Unhandled;
});
for (auto &x : redirect_map)
@@ -195,7 +187,7 @@ int WebServer::start_web_server_multi(listener_args *args)
{
try
{
if (e) std::rethrow_exception(e);
std::rethrow_exception(e);
}
catch (const httplib::Error &err)
{
@@ -220,9 +212,6 @@ int WebServer::start_web_server_multi(listener_args *args)
{
server.set_mount_point("/", serve_file_root);
}
server.new_task_queue = [args] {
return new httplib::ThreadPool(args->max_workers);
};
server.bind_to_port(args->listen_address, args->port, 0);
std::thread thread([&]()

View File

@@ -26,8 +26,7 @@ std::string getTime(int type)
format = "%Y%m%d-%H%M%S";
break;
case 2:
format = "%Y/%m/%d %a %H:%M:%S.";
format += cMillis;
format = "%Y/%m/%d %a %H:%M:%S." + std::string(cMillis);
break;
case 3:
default:

View File

@@ -9,12 +9,7 @@ struct strICaseComp
{
bool operator() (const std::string &lhs, const std::string &rhs) const
{
return std::lexicographical_compare(lhs.begin(), lhs.end(), rhs.begin(),
rhs.end(),
[](unsigned char c1, unsigned char c2)
{
return ::tolower(c1) < ::tolower(c2);
});
return strcasecmp(lhs.c_str(), rhs.c_str());
}
};

View File

@@ -3,38 +3,88 @@
#include <sstream>
#include <string>
#include <vector>
#include <cstdlib>
#include <ctime>
#include <random>
#include <stdlib.h>
#include <time.h>
#include "string.h"
#include "map_extra.h"
std::vector<std::string> split(const std::string &s, const std::string &separator)
{
string_size bpos = 0, epos = s.find(separator);
std::vector<std::string> result;
while(bpos < s.size())
string_size i = 0;
while(i != s.size())
{
if(epos == std::string::npos)
epos = s.size();
result.push_back(s.substr(bpos, epos - bpos));
bpos = epos + separator.size();
epos = s.find(separator, bpos);
int flag = 0;
while(i != s.size() && flag == 0)
{
flag = 1;
for(char x : separator)
if(s[i] == x)
{
++i;
flag = 0;
break;
}
}
flag = 0;
string_size j = i;
while(j != s.size() && flag == 0)
{
for(char x : separator)
if(s[j] == x)
{
flag = 1;
break;
}
if(flag == 0)
++j;
}
if(i != j)
{
result.push_back(s.substr(i, j-i));
i = j;
}
}
return result;
}
void split(std::vector<std::string_view> &result, std::string_view s, char separator)
{
string_size bpos = 0, epos = s.find(separator);
while(bpos < s.size())
string_size i = 0;
while (i != s.size())
{
if(epos == std::string_view::npos)
epos = s.size();
result.push_back(s.substr(bpos, epos - bpos));
bpos = epos + 1;
epos = s.find(separator, bpos);
int flag = 0;
while(i != s.size() && flag == 0)
{
flag = 1;
if(s[i] == separator)
{
++i;
flag = 0;
break;
}
}
flag = 0;
string_size j = i;
while(j != s.size() && flag == 0)
{
if(s[j] == separator)
{
flag = 1;
break;
}
++j;
}
if (i != j)
{
result.push_back(s.substr(i, j-i));
i = j;
}
}
}
@@ -91,7 +141,7 @@ std::string toUpper(const std::string &str)
void processEscapeChar(std::string &str)
{
string_size pos = str.find('\\');
while(pos != std::string::npos)
while(pos != str.npos)
{
if(pos == str.size())
break;
@@ -141,7 +191,7 @@ void processEscapeCharReverse(std::string &str)
int parseCommaKeyValue(const std::string &input, const std::string &separator, string_pair_array &result)
{
string_size bpos = 0, epos = input.find(separator);
string_size bpos = 0, epos = input.find(',');
std::string kv;
while(bpos < input.size())
{
@@ -150,9 +200,9 @@ int parseCommaKeyValue(const std::string &input, const std::string &separator, s
else if(epos && input[epos - 1] == '\\')
{
kv += input.substr(bpos, epos - bpos - 1);
kv += separator;
kv += ',';
bpos = epos + 1;
epos = input.find(separator, bpos);
epos = input.find(',', bpos);
continue;
}
kv += input.substr(bpos, epos - bpos);
@@ -163,9 +213,9 @@ int parseCommaKeyValue(const std::string &input, const std::string &separator, s
result.emplace_back(kv.substr(0, eqpos), kv.substr(eqpos + 1));
kv.clear();
bpos = epos + 1;
epos = input.find(separator, bpos);
epos = input.find(',', bpos);
}
if(!kv.empty())
if(kv.size())
{
string_size eqpos = kv.find('=');
if(eqpos == std::string::npos)
@@ -278,12 +328,12 @@ std::string getUrlArg(const std::string &url, const std::string &request)
while(pos)
{
pos = url.rfind(pattern, pos);
if(pos != std::string::npos)
if(pos != url.npos)
{
if(pos == 0 || url[pos - 1] == '&' || url[pos - 1] == '?')
{
pos += pattern.size();
return url.substr(pos, url.find('&', pos) - pos);
return url.substr(pos, url.find("&", pos) - pos);
}
}
else
@@ -360,24 +410,23 @@ bool isStrUTF8(const std::string &data)
std::string randomStr(int len)
{
std::string retData;
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, 61);
for(int i = 0; i < len; i++)
srand(time(NULL));
int cnt = 0;
while(cnt < len)
{
int r = dis(gen);
if (r < 26)
switch((rand() % 3))
{
retData.push_back('a' + r);
}
else if (r < 52)
{
retData.push_back('A' + r - 26);
}
else
{
retData.push_back('0' + r - 52);
case 1:
retData += ('A' + rand() % 26);
break;
case 2:
retData += ('a' + rand() % 26);
break;
default:
retData += ('0' + rand() % 10);
break;
}
cnt++;
}
return retData;
}
@@ -402,7 +451,7 @@ int to_int(const std::string &str, int def_value)
std::string join(const string_array &arr, const std::string &delimiter)
{
if(arr.empty())
if(arr.size() == 0)
return "";
if(arr.size() == 1)
return arr[0];

View File

@@ -1,6 +1,6 @@
#ifndef VERSION_H_INCLUDED
#define VERSION_H_INCLUDED
#define VERSION "v0.9.8"
#define VERSION "v0.8.1"
#endif // VERSION_H_INCLUDED

63
xmake.lua Normal file
View File

@@ -0,0 +1,63 @@
add_rules("mode.debug", "mode.release")
option("static")
set_default(false)
set_showmenu(true)
set_category("option")
set_description("Build static binary.")
option_end()
add_requires("pcre2", "yaml-cpp", "rapidjson", "toml11")
includes("xmake/libcron.lua")
includes("xmake/yaml-cpp-static.lua")
includes("xmake/quickjspp.lua")
includes("xmake/curl-static.lua")
add_requires("libcron", {system = false})
add_requires("yaml-cpp-static", {system = false})
add_requires("quickjspp", {system = false})
if not is_plat("macosx") and get_config("static") == true then
add_requires("curl-static", {system = false})
else
add_requires("libcurl")
end
target("subconverter")
set_kind("binary")
if is_os("windows") then
add_syslinks("ws2_32", "wsock32")
end
if not is_os("macosx") and has_config("static") then
add_ldflags("-static")
end
add_files("src/**.cpp|lib/wrapper.cpp|server/webserver_libevent.cpp|script/script.cpp|generator/template/template_jinja2.cpp")
add_includedirs("src")
add_includedirs("include")
add_packages("pcre2", "rapidjson", "toml11", "libcron", "quickjspp")
if not is_plat("macosx") and get_config("static") == true then
add_packages("curl-static")
else
add_packages("libcurl")
end
if has_config("static") then
add_packages("yaml-cpp-static")
else
add_packages("yaml-cpp")
end
add_defines("CURL_STATICLIB")
add_defines("PCRE2_STATIC")
add_defines("YAML_CPP_STATIC_DEFINE")
add_cxxflags("-std=c++20")
target("subconverter_lib")
set_basename("subconverter")
set_kind("static")
add_files("src/**.cpp|handler/**.cpp|server/**.cpp|script/**.cpp|generator/template/template_jinja2.cpp")
add_includedirs("src")
add_includedirs("include")
add_packages("pcre2", "yaml-cpp", "rapidjson", "toml11")
add_defines("CURL_STATICLIB")
add_defines("PCRE2_STATIC")
add_defines("YAML_CPP_STATIC_DEFINE")
add_defines("NO_JS_RUNTIME")
add_defines("NO_WEBGET")
add_cxxflags("-std=c++20")

31
xmake/curl-static.lua Normal file
View File

@@ -0,0 +1,31 @@
package("curl-static")
add_deps("cmake")
add_versions("8.5.0", "7161cb17c01dcff1dc5bf89a18437d9d729f1ecd")
set_urls("https://github.com/curl/curl.git")
if is_plat("macosx", "iphoneos") then
add_frameworks("Security", "CoreFoundation", "SystemConfiguration")
elseif is_plat("linux") then
add_deps("mbedtls")
add_syslinks("pthread")
elseif is_plat("windows", "mingw") then
add_syslinks("advapi32", "crypt32", "wldap32", "winmm", "ws2_32", "user32", "bcrypt")
end
add_deps("zlib")
on_install(function (package)
local configs = {}
table.insert(configs, "-DCURL_USE_LIBSSH2=OFF")
table.insert(configs, "-DHAVE_LIBIDN2=OFF")
table.insert(configs, "-DCURL_USE_LIBPSL=OFF")
table.insert(configs, "-DBUILD_CURL_EXE=OFF")
table.insert(configs, "-DBUILD_TESTING=OFF")
table.insert(configs, "-DCURL_USE_MBEDTLS=" .. (package:is_plat("linux") and "ON" or "OFF"))
table.insert(configs, "-DCURL_USE_SCHANNEL=" .. (package:is_plat("windows") and "ON" or "OFF"))
table.insert(configs, "-DHTTP_ONLY=ON")
table.insert(configs, "-DCURL_USE_LIBSSH2=OFF")
table.insert(configs, "-DCMAKE_BUILD_TYPE=" .. (package:debug() and "Debug" or "Release"))
table.insert(configs, "-DBUILD_SHARED_LIBS=OFF")
import("package.tools.cmake").install(package, configs)
end)
package_end()

13
xmake/libcron.lua Normal file
View File

@@ -0,0 +1,13 @@
package("libcron")
add_deps("cmake")
add_versions("1.3.1", "41f238ceb09d4179e7346d78584a0c978e5d0059")
set_urls("https://github.com/PerMalmberg/libcron.git")
on_install(function (package)
local configs = {}
io.replace("CMakeLists.txt", "add_subdirectory(test)", "", {plain = true})
io.replace("CMakeLists.txt", "add_dependencies(cron_test libcron)", "", {plain = true})
table.insert(configs, "-DCMAKE_BUILD_TYPE=" .. (package:debug() and "Debug" or "Release"))
table.insert(configs, "-DBUILD_SHARED_LIBS=" .. (package:config("shared") and "ON" or "OFF"))
import("package.tools.cmake").install(package, configs)
end)
package_end()

54
xmake/quickjspp.lua Normal file
View File

@@ -0,0 +1,54 @@
package("quickjspp")
set_homepage("https://github.com/ftk/quickjspp")
set_description("QuickJS C++ wrapper")
add_urls("https://github.com/ftk/quickjspp.git")
add_versions("2022.7.22", "9cee4b4d27271d54b95f6f42bfdc534ebeaaeb72")
add_configs("shared", {description = "Build shared library.", default = false, type = "boolean", readonly = true})
add_includedirs("include", "include/quickjs")
add_linkdirs("lib/quickjs")
add_links("quickjs")
add_deps("cmake")
if is_plat("linux") then
add_syslinks("pthread", "dl", "m")
end
on_install("linux", "macosx", "mingw", function (package)
local configs = {"-DBUILD_TESTING=OFF"}
-- TODO, disable lto, maybe we need do it better
io.replace("CMakeLists.txt", "set(CMAKE_INTERPROCEDURAL_OPTIMIZATION TRUE)", "", {plain = true})
table.insert(configs, "-DCMAKE_BUILD_TYPE=" .. (package:debug() and "Debug" or "Release"))
import("package.tools.cmake").install(package, configs, {})
end)
on_test(function (package)
assert(package:check_cxxsnippets({test = [[
#include <iostream>
void test() {
using namespace qjs;
Runtime runtime;
Context context(runtime);
auto rt = runtime.rt;
auto ctx = context.ctx;
js_std_init_handlers(rt);
js_init_module_std(ctx, "std");
js_init_module_os(ctx, "os");
context.eval(R"xxx(
import * as std from 'std';
import * as os from 'os';
globalThis.std = std;
globalThis.os = os;
)xxx", "<input>", JS_EVAL_TYPE_MODULE);
js_std_loop(ctx);
js_std_free_handlers(rt);
}
]]}, {configs = {languages = "c++17"},
includes = {"quickjspp.hpp","quickjs/quickjs-libc.h"}}))
end)

11
xmake/yaml-cpp-static.lua Normal file
View File

@@ -0,0 +1,11 @@
package("yaml-cpp-static")
add_deps("cmake")
add_versions("0.8.0-SNAPSHOT", "eaf72053724814be3b99d38e292fca5797a57b7b")
set_urls("https://github.com/jbeder/yaml-cpp.git")
on_install(function (package)
local configs = {}
table.insert(configs, "-DCMAKE_BUILD_TYPE=" .. (package:debug() and "Debug" or "Release"))
table.insert(configs, "-DBUILD_SHARED_LIBS=" .. (package:config("shared") and "ON" or "OFF"))
import("package.tools.cmake").install(package, configs)
end)
package_end()