与上一个很棒的脚本,配套使用的脚本。
必须与上一个脚本配套使用!
必须与上一个脚本配套使用!
import os
import pickle
import sys
if __name__ == "__main__":
if len(sys.argv) < 2:
print(f"Usage: {os.path.basename(__file__)} <human_patch.dat>")
exit(0)
data_list = []
with open(sys.argv[1], "rb") as data_file:
data_list = pickle.load(data_file)
print("Start review auto patch failed files!")
for work_file, rej_file in data_list:
print(work_file)
os.system("code %s" % work_file)
os.system("view %s" % rej_file)
与chromium相关:
# coding: utf-8
import filecmp
import os
import pickle
import shutil
import subprocess
import sys
__src_dir_name = "src" + os.sep
__patch_dir_name = "patch" + os.sep
__reject_dir_name = "rejects" + os.sep
__copy_log_name = "copy_file.log"
__miss_log_name = "miss_file.log"
__patch_success_log_name = "patch_success.log"
__patch_failed_log_name = "patch_failed.log"
__human_patch_data_name = "human_patch.dat"
__force_copy_type = (".png", ".dll", ".exe", ".ico", ".icns")
__chromium_version_file = "chrome/VERSION"
__self_servion_file = "third_party/superBrowser/version"
__human_patch_list = []
def __reset_self_servion(src_dir):
chromium_version_path = os.path.join(src_dir, __chromium_version_file)
self_servion_path = os.path.join(src_dir, __self_servion_file)
patch = 0
with open(chromium_version_path) as chromium_file:
for it in chromium_file.readlines():
idx = it.find('=')
if -1 != idx and "PATCH" == it[:idx]:
patch = int(it[idx + 1:])
break
with open(self_servion_path, "r+") as self_file:
ver = float(self_file.read())
self_file.seek(0)
if patch > 50:
if ver >= 1.0:
minor = int((ver - 1.0) * 10) + 1
if minor > 9:
self_file.write("{:.2f}".format(minor / 100 + 1.0))
else:
self_file.write("{:.1f}".format(minor / 10 + 1.0))
else:
self_file.write("1.0")
else:
self_file.write("0.1")
def __check_base_version():
base_git_version_file = os.path.join(base_git_path,
__chromium_version_file)
base_version_file = os.path.join(base_version_path,
__chromium_version_file)
if not filecmp.cmp(base_version_file, base_git_version_file):
raise Exception(
"Git file version does not match the base file version.")
def __save_human_patch():
with open(os.path.join(output_path, __human_patch_data_name),
"wb") as data_file:
pickle.dump(__human_patch_list, data_file)
def __make_dirs(file_path):
dest_dir = os.path.dirname(file_path)
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
def save_miss_file(file_list, log_path):
with open(log_path, "w") as log_file:
for file in file_list:
print(file)
print(file, file=log_file)
def safe_copy(src, dest):
__make_dirs(dest)
shutil.copyfile(src, dest)
def generate_patch(old_file_path, new_file_path, work_file_path):
patch_file_path = output_path + __patch_dir_name + work_file_path + ".patch"
cmd_line = f"diff -c {old_file_path} {new_file_path} > {patch_file_path}"
__make_dirs(patch_file_path)
return 256 == os.system(cmd_line)
def apply_patch(base_file_path, work_file_path):
output_file_path = output_path + __src_dir_name + work_file_path
patch_file_path = output_path + __patch_dir_name + work_file_path + ".patch"
reject_file_path = output_path + __reject_dir_name + work_file_path + ".rej"
cmd_line = [
"patch", "-c", base_file_path, patch_file_path, "-o", output_file_path,
"-r", reject_file_path
]
__make_dirs(output_file_path)
__make_dirs(reject_file_path)
rv = subprocess.run(cmd_line, capture_output=True, text=True)
if 0 != rv.returncode:
__human_patch_list.append((output_file_path, reject_file_path))
return rv
def delete_empty_directories(path):
for dirpath, dirnames, _ in os.walk(path, topdown=False):
for dirname in dirnames:
dir_to_check = os.path.join(dirpath, dirname)
if not os.listdir(dir_to_check):
# print(f"Removing empty directory: {dir_to_check}")
os.rmdir(dir_to_check)
if __name__ == "__main__":
if "nt" == os.name:
print("This scripts can`t run in windows platform!")
exit(1)
try:
if len(sys.argv) < 5:
print(
"Usage: upgrade_version_file.py <base_git_path> <base_version_path> <new_version_path> <output_path>."
)
exit(0)
base_git_path = os.path.abspath(sys.argv[1])
base_version_path = os.path.abspath(sys.argv[2])
new_version_path = os.path.abspath(sys.argv[3])
output_path = os.path.abspath(sys.argv[4]) + os.sep
__check_base_version()
copy_list = []
patch_list = []
miss_list = []
print("Strat check file ...", end="", flush=True)
for root, _, files in os.walk(base_git_path):
for file in files:
git_file = os.path.join(root, file)
work_file = os.path.relpath(git_file, base_git_path)
base_version_file = os.path.join(base_version_path, work_file)
new_version_file = os.path.join(new_version_path, work_file)
# find copy file
if not os.path.exists(base_version_file):
copy_list.append((git_file, work_file))
elif not os.path.exists(
new_version_file): # new version miss file.
miss_list.append(work_file)
elif filecmp.cmp(
base_version_file,
new_version_file) or os.path.splitext(
base_version_file)[1] in __force_copy_type:
if not filecmp.cmp(git_file, base_version_file):
copy_list.append((git_file, work_file))
# make patch file list.
else:
patch_list.append((base_version_file, git_file,
new_version_file, work_file))
# add version file
copy_list.append(
(os.path.join(new_version_path,
__chromium_version_file), __chromium_version_file))
print(" do!")
# create output dir
if not os.path.exists(output_path):
os.makedirs(output_path)
# generate patch file list
print("----------------------patch file:----------------------")
with open(output_path + __patch_success_log_name,
"w") as success_log, open(
output_path + __patch_failed_log_name,
"w") as failed_log:
for base_version_file, git_file, new_version_file, work_file in patch_list:
print(f"{work_file} ... ", end="")
if generate_patch(base_version_file, git_file, work_file):
result = apply_patch(new_version_file, work_file)
if 0 == result.returncode:
print("OK!")
print(work_file, file=success_log)
else:
print("Failed!")
print(work_file, file=failed_log)
print(result.stdout, file=failed_log)
print(
"------------------------------------------------------------\n",
file=failed_log)
else:
print("Skip!")
print(f"{work_file} ... Skip!", file=failed_log)
print(
"\n------------------------------------------------------------\n",
file=failed_log)
delete_empty_directories(output_path + __reject_dir_name)
__save_human_patch()
# copy file
print("----------------------copy file:----------------------")
src_dir = os.path.join(output_path, __src_dir_name)
with open(output_path + __copy_log_name, "w") as copy_log:
for src, work_file in copy_list:
print(work_file)
safe_copy(src, src_dir + work_file)
print(work_file, file=copy_log)
__reset_self_servion(src_dir)
icon_dir = os.path.join(new_version_path, "chrome", "app", "theme",
"chromium")
dest_dir = os.path.join(src_dir, "third_party", "superBrowser",
"resource", "product", "other")
shutil.copy(os.path.join(icon_dir, "win", "chromium.ico"), dest_dir)
shutil.copy(os.path.join(icon_dir, "mac", "app.icns"), dest_dir)
shutil.copy(os.path.join(icon_dir, "linux", "product_logo_48.png"),
dest_dir)
# save miss file list
print("----------------------miss file:----------------------")
save_miss_file(miss_list, output_path + __miss_log_name)
except Exception as error:
print(error)
exit(1)
之前整理过如何快速的检出指定tag的chromium源码。
这次整理成python脚本,可以在当前目录直接运行。自动完成代码检出,并有出错重试的功能。
import argparse
import os
import time
import shutil
_gcl_context = """
solutions = [
{
"name": "src",
"url": "https://chromium.googlesource.com/chromium/src.git",
"managed": False,
"custom_deps": {},
"custom_vars": {"checkout_pgo_profiles": True,},
},
]
"""
def delete_error_git_dir(path):
for dirpath, dirnames, _ in os.walk(path):
for dirname in dirnames:
full_dir_path = os.path.join(dirpath, dirname)
if dirname == ".git" and len(
os.listdir(os.path.dirname(full_dir_path))) == 1:
print(
f"Deleting error .git directory: {full_dir_path} and its contents..."
)
try:
if os.name == "nt":
os.system(f"rmdir /s /q {full_dir_path}")
else:
shutil.rmtree(full_dir_path)
except OSError as e:
print(f"Error deleting {full_dir_path}: {str(e)}")
exit(1)
def _run_command_with_success(cmd):
while 0 != os.system(cmd):
print(f"Command:'{cmd}' failed! Start retry!")
delete_error_git_dir("src")
print(f"Command:'{cmd}' success!")
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Checkout chromium code with tag.")
parser.add_argument("tag", type=str, help="Tag to checkout.")
parser.add_argument("-u",
"--upgrade",
action="store_true",
help="Upgrade existing code to a specified tag.")
parser.add_argument(
"--os",
type=str,
help="Specify the target platform for Chromium. For example: --os mac."
)
args = parser.parse_args()
if not args.tag:
parser.error('the "tag" argument is necessary')
start = time.time()
with open(".gclient", "w") as gcl:
gcl.write(_gcl_context)
if args.os:
gcl.write(f'target_os = ["{args.os}"]\n')
# checkout chromium
if args.upgrade and os.path.exists("src"):
print(f"Start upgrade chromium code to tag {args.tag} !")
_run_command_with_success(
f"git -C \"src\" fetch origin refs/tags/{args.tag}")
os.system(f"git -C \"src\" checkout -b local_{args.tag} {args.tag}")
elif os.path.exists("src"):
print("Have 'src' directory, so skip checkout tag!")
else:
print(f"Start checkout chromium tag {args.tag} !")
_run_command_with_success(
f"git clone --depth 2 -b {args.tag} https://chromium.googlesource.com/chromium/src.git"
)
# run hook
print("Start run hook!")
_run_command_with_success(
"gclient sync -D --with_branch_heads --with_tags")
print(
f"checkout chromium {args.tag} Ok! Use time:{time.strftime('%H:%M:%S', time.gmtime(time.time() - start))} !"
)
在上一篇文章中,介绍了如何集成加强版的geodata文件到openwrt。
但是这个加强版的geodata每日更新。按之前的办法,要去github查看新版本的信息,手动更新。太麻烦了……
这里面提供一个python脚本,可以方便的自动更新Makefile文件到最新版本。
使用方法将脚本的内容保存成文件。比如up_geodata.sh
文件。
然后运行脚本,指定要更新的Makefiel文件路径:
python3 up_geodata.sh -b Makefile
参数-b
表示备份旧的文件,不想备份可以不加。
mac或者linux,还可以直接添加可执行权限,当成应用来运行:
chmod +x up_geodata.sh
以下是脚本内容:
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import requests
import re
import argparse
import shutil
import os
from datetime import datetime
# GitHub API URL
GITHUB_API_URL = "https://api.github.com/repos/Loyalsoldier/v2ray-rules-dat/releases/latest"
# 需要下载的 SHA256 文件
FILES = {"geoip": "geoip.dat.sha256sum", "geosite": "geosite.dat.sha256sum"}
def get_latest_version():
"""获取 GitHub 最新 Release 版本号"""
response = requests.get(GITHUB_API_URL)
if response.status_code == 200:
release_data = response.json()
return release_data['tag_name'] # 获取最新版本号
else:
raise Exception("Failed to fetch release data from GitHub API")
def download_sha256sum_file(version, file_key):
"""下载 SHA256 校验文件"""
file_name = FILES[file_key]
url = f"https://github.com/Loyalsoldier/v2ray-rules-dat/releases/download/{version}/{file_name}"
response = requests.get(url)
if response.status_code == 200:
return response.text.strip() # 直接返回文本内容
else:
raise Exception(
f"Failed to download {file_name} for version {version}")
def extract_hash(file_content):
"""解析 SHA256 文件中的哈希值"""
return file_content.split()[0] # 取第一列的哈希值
def backup_makefile(makefile_path):
"""备份 Makefile"""
if os.path.exists(makefile_path):
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
backup_path = f"{makefile_path}.bak_{timestamp}"
shutil.copy(makefile_path, backup_path)
print(f"✅ 备份已创建: {backup_path}")
else:
raise FileNotFoundError(f"❌ 未找到 Makefile: {makefile_path}")
def update_makefile(makefile_path, version, geoip_hash, geosite_hash):
"""修改 Makefile,更新版本号和哈希值"""
with open(makefile_path, "r") as file:
makefile_content = file.read()
# **修正版本号**
makefile_content = re.sub(r"(GEODATA_VER:=)\d+", r"\g<1>" + version,
makefile_content)
# **修正 geoip.dat 的 HASH**
makefile_content = re.sub(
r"(define Download/geoip\s+.*?\n\s+HASH:=)[a-f0-9]+",
r"\g<1>" + geoip_hash,
makefile_content,
flags=re.DOTALL)
# **修正 geosite.dat 的 HASH**
makefile_content = re.sub(
r"(define Download/geosite\s+.*?\n\s+HASH:=)[a-f0-9]+",
r"\g<1>" + geosite_hash,
makefile_content,
flags=re.DOTALL)
with open(makefile_path, "w") as file:
file.write(makefile_content)
print(
f"✅ Makefile 已成功更新: 版本 {version}, GeoIP Hash: {geoip_hash}, Geosite Hash: {geosite_hash}"
)
def main():
parser = argparse.ArgumentParser(
description="更新 Makefile 中的 v2ray-geodata 版本号和 HASH")
parser.add_argument("makefile", help="需要更新的 Makefile 路径")
parser.add_argument("-b",
"--backup",
action="store_true",
help="是否备份 Makefile")
args = parser.parse_args()
makefile_path = args.makefile
try:
# **只有在使用 -b 时才进行备份**
if args.backup:
backup_makefile(makefile_path)
# **获取最新版本号**
version = get_latest_version()
print(f"📌 最新版本号: {version}")
# **获取 SHA256 哈希值**
geoip_hash = extract_hash(download_sha256sum_file(version, "geoip"))
geosite_hash = extract_hash(download_sha256sum_file(
version, "geosite"))
print(f"📌 GeoIP Hash: {geoip_hash}")
print(f"📌 Geosite Hash: {geosite_hash}")
# **更新 Makefile**
update_makefile(makefile_path, version, geoip_hash, geosite_hash)
except Exception as e:
print(f"❌ 发生错误: {e}")
if __name__ == "__main__":
main()
早年写过一篇旧的同名文章。介绍过如何在openwrt编译时,直接集成一个自己想要的geodata文件。
只是当年才疏学浅,用了最土的方式,直接修改openwrt源文件的方式来实现。
如今研究过openwrt的项目结构后,发现通过添加外部feeds
来覆盖源目标的方式才是最正确的方法。
这个方法也适用于要添加其它的自定义项目。
首先,将要添加的项目的文件,这里geodata只有一个Makefile
文件。保存到适当的位置。
以我保存的路径为例:
/home/ubuntu/openwrt/feeds/v2ray-geodata/Makefile
然后,在openwrt项目,根目录下的feeds.conf.default
的第一行添加以下一行:
src-link custom /home/ubuntu/openwrt/feeds
为什么要在第一行?因为feeds的优先级是根据自上而下的加载顺序决定的。最先加载的优先级最高,可以覆盖后面原生自带的相同项目。
这里也可以看出来,在/home/ubuntu/openwrt/feeds
的路径下,还可以放入其它多个自己想要添加的项目。
最后就是正常的编译流程:
cd lede
git pull
./scripts/feeds update -a
./scripts/feeds install -a
make menuconfig
make download -j8
make V=s -j$(nproc)
下面是我的加强版geodata项目Makefile文件内容,可以直接保存使用。
# SPDX-License-Identifier: GPL-3.0-only
#
# Copyright (C) 2021-2022 ImmortalWrt.org
include $(TOPDIR)/rules.mk
PKG_NAME:=v2ray-geodata
PKG_RELEASE:=1
PKG_LICENSE_FILES:=LICENSE
PKG_MAINTAINER:=winger zhang <winger.zhang@gmail.com>
include $(INCLUDE_DIR)/package.mk
GEODATA_VER:=202503192212
GEOIP_FILE:=geoip.dat.$(GEODATA_VER)
define Download/geoip
URL:=https://github.com/Loyalsoldier/v2ray-rules-dat/releases/download/$(GEODATA_VER)/
URL_FILE:=geoip.dat
FILE:=$(GEOIP_FILE)
HASH:=e32b80017d1dea91bc36c4e7ed07a5e7f200356415e678a8a6f825fb521f7b68
endef
GEOSITE_FILE:=geosite.dat.$(GEODATA_VER)
define Download/geosite
URL:=https://github.com/Loyalsoldier/v2ray-rules-dat/releases/download/$(GEODATA_VER)/
URL_FILE:=geosite.dat
FILE:=$(GEOSITE_FILE)
HASH:=57c476f6e50737c5fdf68778ec1615fb2017f75886a6bb0252927c314e5904b7
endef
define Package/v2ray-geodata/template
SECTION:=net
CATEGORY:=Network
SUBMENU:=IP Addresses and Names
URL:=https://www.v2fly.org
PKGARCH:=all
endef
define Package/v2ray-geoip
$(call Package/v2ray-geodata/template)
TITLE:=GeoIP List for V2Ray
VERSION:=$(GEODATA_VER)-$(PKG_RELEASE)
LICENSE:=CC-BY-SA-4.0
endef
define Package/v2ray-geosite
$(call Package/v2ray-geodata/template)
TITLE:=Geosite List for V2Ray
VERSION:=$(GEODATA_VER)-$(PKG_RELEASE)
LICENSE:=MIT
endef
define Build/Prepare
$(call Build/Prepare/Default)
ifneq ($(CONFIG_PACKAGE_v2ray-geoip),)
$(call Download,geoip)
endif
ifneq ($(CONFIG_PACKAGE_v2ray-geosite),)
$(call Download,geosite)
endif
endef
define Build/Compile
endef
define Package/v2ray-geoip/install
$(INSTALL_DIR) $(1)/usr/share/v2ray
$(INSTALL_DATA) $(DL_DIR)/$(GEOIP_FILE) $(1)/usr/share/v2ray/geoip.dat
endef
define Package/v2ray-geosite/install
$(INSTALL_DIR) $(1)/usr/share/v2ray
$(INSTALL_DATA) $(DL_DIR)/$(GEOSITE_FILE) $(1)/usr/share/v2ray/geosite.dat
endef
$(eval $(call BuildPackage,v2ray-geoip))
$(eval $(call BuildPackage,v2ray-geosite))
最后,如何快速的更新Makefile文件,以编译打包最新的geodata文件。可以看我的另外一篇文章。