diff --git a/README.md b/README.md index d86cfc1..87ecf33 100644 --- a/README.md +++ b/README.md @@ -15,6 +15,7 @@ To improve this situation, we have launched this app store. We have broadly incl - **amd64 architecture:** deepin 20 / deepin 23 / Ubuntu 20.04 / Ubuntu 22.04 / UOS Home Edition 20 - **arm64 architecture:** UOS Professional Edition 1060 / Ubuntu 22.04 / deepin 23 +- **loong64 architecture:** deepin 23 > Special Note: We also support all versions of Ubuntu operating systems higher than Ubuntu 22.04, such as Ubuntu 22.10, 23.04, 23.10, etc. @@ -177,6 +178,6 @@ You can also check the [Chinese version](https://gitee.com/spark-store-project/s - If you want to follow our development progress, you can go to the [Spark App Store Board](https://gitee.com/spark-store-project/spark-store/board) for more information. - Our [Forum](https://www.deepinos.org/) - Our [QQ Group](https://blog.shenmo.tech/post/%E6%95%85%E9%9A%9C%E5%85%AC%E5%91%8A/) -- For commercial support, please visit [Flamescion Inc.](http://flamescion.cn/) +- For commercial support, please leave your issue~ --- \ No newline at end of file diff --git a/README.zh.md b/README.zh.md index fe1ea4c..fb6361c 100644 --- a/README.zh.md +++ b/README.zh.md @@ -11,12 +11,13 @@ Linux 应用的数量相对有限,Wine 软件的可获得性也颇为困难。 生态系统的构建并非依赖个体的孤立努力,而需要全社区共同参与。只有当大家的“星火”聚集一处,方可引发“燎原之势”。 -为了改善这一现状,我们推出了这个应用商店。我们广泛地收录了各种用户需求的软件包,汇集了高质量的小工具,并主动对 Wine 应用进行了适配,一切都储存在我们的软件库中,以便用户方便地获取。 +为了改善这一现状,我们推出了这个应用商店。我们广泛地收录了各种用户需求的软件包,汇集了高质量的小工具,并主动对 Wine 应用进行了适配,一切都储存在我们的软件库中,使得用户可以方便地获取这些应用。 **当前支持的Linux发行版包括:** - **amd64架构:** deepin 20 / deepin 23 / Ubuntu 20.04 / Ubuntu 22.04 / UOS家庭版20 - **arm64架构:** UOS专业版1060 / Ubuntu 22.04 / deepin 23 +- **loong64架构:** deepin 23 > 特别说明:我们还支持所有版本高于Ubuntu 22.04的Ubuntu操作系统,例如Ubuntu 22.10、23.04、23.10等。 @@ -188,7 +189,6 @@ GPLV3许可证的完整文本可以在以下链接找到:https://www.gnu.org/l - 如果你想关注我们的开发进度,可以跳转[星火应用商店Board](https://gitee.com/spark-store-project/spark-store/board)获取更多信息。 - 我们的[论坛](https://www.deepinos.org/) - 我们的[QQ群](https://blog.shenmo.tech/post/%E6%95%85%E9%9A%9C%E5%85%AC%E5%91%8A/) -- 商业支持请访问[火穗科技 Flamescion Inc.](http://flamescion.cn/) - +- 商业支持请留言咨询~ --- diff --git a/debian/changelog b/debian/changelog index e8a0d35..8eca23a 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,13 @@ +spark-store (4.2.11) stable; urgency=medium + + * 修复:判断是否安装状态错误 + * 修改:部分组件重构提升清晰度 + * 修复:v23下部分应用图标失效 + + + -- shenmo Sun, 5 Mar 2022 11:45:14 +0800 + + spark-store (4.2.10) stable; urgency=medium * 修复:部分发行版上无法启动自动创建的桌面图标 diff --git a/debian/rules b/debian/rules index 6fb5c06..fc38dd1 100755 --- a/debian/rules +++ b/debian/rules @@ -3,10 +3,13 @@ export QT_SELECT = qt5 export DEB_BUILD_MAINT_OPTIONS = hardening=+all include /usr/share/dpkg/default.mk -export DEB_VERSION = $(shell dpkg-parsechangelog -S Version) + DEB_BUILD_ARCH ?= $(shell dpkg-architecture -qDEB_BUILD_ARCH) DEB_HOST_MULTIARCH ?= $(shell dpkg-architecture -qDEB_HOST_MULTIARCH) +# Use realtime timestamp instead of the latest entry in debian/changelog +SOURCE_DATE_EPOCH := $(shell date +%s) + # Uncomment this to turn on verbose mode. #export DH_VERBOSE = 1 @@ -14,25 +17,23 @@ DEB_HOST_MULTIARCH ?= $(shell dpkg-architecture -qDEB_HOST_MULTIARCH) dh $@ --parallel override_dh_auto_clean: - rm -rf $(CURDIR)/build + rm -rf $(CURDIR)/build-$(DEB_HOST_MULTIARCH) override_dh_auto_configure: - mkdir -p $(CURDIR)/build + mkdir -p $(CURDIR)/build-$(DEB_HOST_MULTIARCH) qmake BUILD_VERSION=$(DEB_VERSION_UPSTREAM) spark-store-project.pro \ - -spec linux-g++ CONFIG+=qtquickcompiler \ - -o $(CURDIR)/build/ + -spec linux-g++ CONFIG+=force_debug_info \ + -o $(CURDIR)/build-$(DEB_HOST_MULTIARCH)/ override_dh_auto_build: - make MAKEFLAGS="$(MAKEFLAGS)" -C $(CURDIR)/build + make -C $(CURDIR)/build-$(DEB_HOST_MULTIARCH) -j$(JOBS) override_dh_auto_install: - make -C $(CURDIR)/build install \ + make -C $(CURDIR)/build-$(DEB_HOST_MULTIARCH) install \ INSTALL_ROOT=$(CURDIR)/debian/spark-store - # Ignore the dpkg-shlibdeps: warning (it uses none of the library's symbols) # Qt Mutidedia lib will ref to network libraray. override_dh_shlibdeps: dh_shlibdeps --dpkg-shlibdeps-params=--warnings=0 - diff --git a/debian/spark-store.postinst b/debian/spark-store.postinst index dae15a1..0e22c2d 100755 --- a/debian/spark-store.postinst +++ b/debian/spark-store.postinst @@ -12,6 +12,10 @@ case "$1" in aarch64) echo "Will not enable armhf since 4271" ;; + loongarch64) + echo "Enabling ABI1(OldWorld) arch..." + dpkg --add-architecture loongarch64 + ;; *) echo "Unknown architecture, skip enable 32-bit arch" @@ -25,12 +29,15 @@ case "$1" in # Check if /usr/local/bin existed mkdir -p /usr/local/bin - + ## I hate /usr/local/bin. We will abandon them later # Create symbol links for binary files ln -s -f /opt/durapps/spark-store/bin/spark-store /usr/local/bin/spark-store ln -s -f /opt/durapps/spark-store/bin/ssinstall /usr/local/bin/ssinstall ln -s -f /opt/durapps/spark-store/bin/ssaudit /usr/local/bin/ssaudit + ln -s -f /opt/durapps/spark-store/bin/ssinstall /usr/bin/ssinstall + ln -s -f /opt/durapps/spark-store/bin/ssaudit /usr/bin/ssaudit ln -s -f /opt/durapps/spark-store/bin/spark-dstore-patch /usr/local/bin/spark-dstore-patch + ln -s -f /opt/durapps/spark-store/bin/spark-dstore-patch /usr/bin/spark-dstore-patch ln -s -f /opt/durapps/spark-store/bin/aptss /usr/local/bin/ss-apt-fast ln -s -f /opt/durapps/spark-store/bin/aptss /usr/bin/aptss @@ -66,18 +73,7 @@ case "$1" in ;; triggered) - # Quit if deepin-app-store-tool existed - if [ -x "/usr/bin/deepin-app-store-tool" ] ; then - exit 0 - fi + spark-dstore-patch - # Trigger for UOS debs installation - echo '--------检测到Uniontech标准软件包,运行补丁以修正安装--------' - if [ -x "/usr/local/bin/spark-dstore-patch" ] ; then - /usr/local/bin/spark-dstore-patch - echo '-----------spark-dstore-patch补丁工具已运行完毕-----------' - else - echo '------------spark-dstore-patch补丁工具运行失败------------' - fi ;; esac diff --git a/debian/spark-store.postrm b/debian/spark-store.postrm old mode 100644 new mode 100755 diff --git a/debian/spark-store.prerm b/debian/spark-store.prerm index c9aa676..f266bb6 100755 --- a/debian/spark-store.prerm +++ b/debian/spark-store.prerm @@ -16,12 +16,15 @@ if [ "$1" = "remove" -o "$1" = "purge" ] ; then echo "卸载操作,进行配置清理" # Remove residual symbol links - rm -f /usr/local/bin/spark-store - rm -f /usr/local/bin/ssinstall - rm -f /usr/local/bin/ssaudit - rm -f /usr/local/bin/spark-dstore-patch - rm -f /usr/local/bin/ss-apt-fast - rm -f /usr/bin/aptss + unlink /usr/local/bin/spark-store + unlink /usr/local/bin/ssinstall + unlink /usr/local/bin/ssaudit + unlink /usr/bin/ssinstall + unlink /usr/bin/ssaudit + unlink /usr/local/bin/spark-dstore-patch + unlink /usr/bin/spark-dstore-patch + unlink /usr/local/bin/ss-apt-fast + unlink /usr/bin/aptss rm -rf /etc/aptss/ rm -rf /var/lib/aptss/ diff --git a/debian/spark-store.triggers b/debian/spark-store.triggers index 96a1351..739b708 100644 --- a/debian/spark-store.triggers +++ b/debian/spark-store.triggers @@ -1 +1,2 @@ interest-noawait /opt/apps +interest-noawait /usr/share/icons diff --git a/patchs/zinface-community-cmake-build-system.patch b/patchs/zinface-community-cmake-build-system.patch index 6bc736c..d0eed58 100644 --- a/patchs/zinface-community-cmake-build-system.patch +++ b/patchs/zinface-community-cmake-build-system.patch @@ -1,7 +1,7 @@ -From 2d49950909e0327b0ad72e008cb82ac97b7bae0e Mon Sep 17 00:00:00 2001 +From 2b0f5447a8c13fa63aed5286cf4b3bdbf2f04e46 Mon Sep 17 00:00:00 2001 From: zinface Date: Sun, 11 Dec 2022 22:27:23 +0800 -Subject: [PATCH 01/11] =?UTF-8?q?repo:=20=E4=B8=80=E6=AC=A1=E6=80=A7?= +Subject: [PATCH 01/12] =?UTF-8?q?repo:=20=E4=B8=80=E6=AC=A1=E6=80=A7?= =?UTF-8?q?=E5=AF=BC=E5=85=A5=20spark=20=E9=AA=A8=E6=9E=B6=E4=BB=A5?= =?UTF-8?q?=E5=8F=98=E4=B8=BA=20cmake=20=E6=9E=84=E5=BB=BA?= MIME-Version: 1.0 @@ -11,12 +11,12 @@ Content-Transfer-Encoding: 8bit TODO: 需处理 deb 安装脚本的问题 Signed-off-by: zinface --- - .gitignore | 34 +++ + .gitignore | 35 +++ CMakeLists.txt | 93 ++++++++ Makefile | 60 +++++ assets/spark.png | Bin 0 -> 4959 bytes cmake/DebPackageConfig.cmake | 327 +++++++++++++++++++++++++++ - cmake/SparkAppimageConfig.cmake | 133 +++++++++++ + cmake/SparkAppimageConfig.cmake | 132 +++++++++++ cmake/SparkBuildGraphviz.cmake | 8 + cmake/SparkDesktopMacros.cmake | 35 +++ cmake/SparkEnvConfig.cmake | 8 + @@ -26,13 +26,13 @@ Signed-off-by: zinface cmake/SparkFindQt6Config.cmake | 24 ++ cmake/SparkInstallMacrosConfig.cmake | 132 +++++++++++ cmake/SparkMacrosConfig.cmake | 129 +++++++++++ - cmake/SparkMacrosExtendConfig.cmake | 197 ++++++++++++++++ + cmake/SparkMacrosExtendConfig.cmake | 196 ++++++++++++++++ cmake/SparkTranslatorConfig.cmake | 27 +++ cmake/linuxdeployqt-help | 48 ++++ - cmake/package-deb.descript | 45 ++++ + cmake/package-deb.descript | 44 ++++ cmake/spark-appimage.desktop.in | 9 + cmake/spark-desktop.desktop.in | 11 + - 21 files changed, 1492 insertions(+) + 21 files changed, 1490 insertions(+) create mode 100644 CMakeLists.txt create mode 100644 Makefile create mode 100644 assets/spark.png @@ -55,13 +55,14 @@ Signed-off-by: zinface create mode 100644 cmake/spark-desktop.desktop.in diff --git a/.gitignore b/.gitignore -index e77dab8..5571870 100644 +index 21d239c..b55ce0c 100644 --- a/.gitignore +++ b/.gitignore -@@ -52,3 +52,37 @@ debian/files - debian/*.substvars - debian/spark-store +@@ -54,3 +54,38 @@ debian/spark-store + .vscode/* + src/spark-store ++ +# Ignore the build directory generated by the vsocde cmake extension +build/ +# Ignore the build directory generated by the vsocde clangd extension @@ -94,12 +95,11 @@ index e77dab8..5571870 100644 +build/ +# Ignore the build directory generated by the vsocde clangd extension +.cache -+# Ignore the make package/copytosource ++# Ignore the make package/copytosource +*.deb -\ No newline at end of file diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 -index 0000000..5864b54 +index 0000000..51cc090 --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,93 @@ @@ -151,9 +151,9 @@ index 0000000..5864b54 +target_link_qt5(spark-dstore-patch) + + -+# 安装主程序 spark-store 与 spark-dstore-patch -+spark_install_target(/opt/durapps/${PROJECT_NAME}/bin -+ ${PROJECT_NAME} ++# 安装主程序 spark-store 与 spark-dstore-patch ++spark_install_target(/opt/durapps/${PROJECT_NAME}/bin ++ ${PROJECT_NAME} + spark-dstore-patch) + +# 安装 systemd 服务(Spark Store更新通知程序) @@ -175,7 +175,7 @@ index 0000000..5864b54 +# 安装 desktop 文件 +spark_install_file(/usr/share/applications + pkg/usr/share/applications/spark-store.desktop) -+ ++ +# 安装 icon 文件 +spark_install_file(/usr/share/icons/hicolor/scalable/apps + pkg/usr/share/icons/hicolor/scalable/apps/spark-store.svg) @@ -199,7 +199,7 @@ index 0000000..5864b54 \ No newline at end of file diff --git a/Makefile b/Makefile new file mode 100644 -index 0000000..2df9883 +index 0000000..334ead1 --- /dev/null +++ b/Makefile @@ -0,0 +1,60 @@ @@ -222,7 +222,7 @@ index 0000000..2df9883 +run: all + exec $(shell find build/ -maxdepth 1 -type f -executable | grep $(PROJECT_NAME)) + -+debug: ++debug: + mkdir -p build + cd build && cmake -DCMAKE_BUILD_TYPE=Debug .. + cd build && make -j$(CPUS) @@ -370,14 +370,14 @@ HcmV?d00001 diff --git a/cmake/DebPackageConfig.cmake b/cmake/DebPackageConfig.cmake new file mode 100644 -index 0000000..2ab24e7 +index 0000000..d0351ec --- /dev/null +++ b/cmake/DebPackageConfig.cmake @@ -0,0 +1,327 @@ +cmake_minimum_required(VERSION 3.0.0) + +# function(add_deb_package PACKAGE_NAME PACKAGE_VERSION PACKAGE_MAINTAINER PACKAGE_EMAIL PACKAGE_SHORT_DESCRIPTION PACKAGE_LONG_DESCRIPTION) -+ ++ +# endfunction(add_deb_package PACKAGE_NAME PACKAGE_VERSION PACKAGE_MAINTAINER PACKAGE_EMAIL PACKAGE_SHORT_DESCRIPTION PACKAGE_LONG_DESCRIPTION) + +# if(add_deb_package VALUE) set(Package ${VALUE} PARENT_SCOPE) endif(add_deb_package VALUE) @@ -495,7 +495,7 @@ index 0000000..2ab24e7 + else() + set(CPACK_DEBIAN_PACKAGE_VERSION "${_IN_VAL}" PARENT_SCOPE) + endif(_IN_VAL STREQUAL "auto") -+ ++ + message("--> 软件版本: ${_IN_VAL}") + endif(_Version EQUAL "0") + @@ -507,7 +507,7 @@ index 0000000..2ab24e7 + + find_str("${_IN_KEY}" "Architecture" _Architecture) + if(_Architecture EQUAL "0") -+ set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "${_IN_VAL}" PARENT_SCOPE) ++ set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "${_IN_VAL}" PARENT_SCOPE) + if(_IN_VAL STREQUAL "auto") + execute_process( + COMMAND dpkg --print-architecture @@ -518,7 +518,7 @@ index 0000000..2ab24e7 + endif(_IN_VAL STREQUAL "auto") + message("--> 软件架构: ${_IN_VAL}") + endif(_Architecture EQUAL "0") -+ ++ + find_str("${_IN_KEY}" "Priority" _Priority) + if(_Priority EQUAL "0") + set(CPACK_DEBIAN_PACKAGE_PRIORITY "${_IN_VAL}" PARENT_SCOPE) @@ -548,7 +548,7 @@ index 0000000..2ab24e7 + set(CPACK_DEBIAN_PACKAGE_RECOMMENDS "${_IN_VAL}" PARENT_SCOPE) + message("--> 软件建议: ${_IN_VAL}") + endif(_Recommends EQUAL "0") -+ ++ +endfunction(set_package_vars _IN_KEY _IN_VAL) + +# 定义一个自定义(add_package_descript)函数 @@ -564,7 +564,7 @@ index 0000000..2ab24e7 + message(FATAL_ERROR "!! Not Found Path: ${PACKAGE_DES_PATH}") + return() + endif(EXISTS ${IN_DES}) -+ ++ + file(READ ${PACKAGE_DES_PATH} DES_CONTENT) + trim_str("${DES_CONTENT}" DES_CONTENT) + @@ -638,7 +638,7 @@ index 0000000..2ab24e7 + string(TIMESTAMP BUILD_TIME "%Y%m%d") + set(CPACK_DEBIAN_PACKAGE_VERSION "${CPACK_DEBIAN_PACKAGE_VERSION}-${BUILD_TIME}") + endif("${CalVer}" STREQUAL "true") -+ ++ + + + ##################### deb file name ##################### @@ -646,7 +646,7 @@ index 0000000..2ab24e7 + set(_Version "${CPACK_DEBIAN_PACKAGE_VERSION}") + set(_Architecture "${CPACK_DEBIAN_PACKAGE_ARCHITECTURE}") + -+ set(_DebFileName ++ set(_DebFileName + "${_Package}_${_Version}_${_Architecture}${PACKAGE_SUFFIX}.deb" + ) + set(CPACK_DEBIAN_FILE_NAME ${_DebFileName}) @@ -703,10 +703,10 @@ index 0000000..2ab24e7 +# string(TIMESTAMP BUILD_TIME "%Y%m%d") diff --git a/cmake/SparkAppimageConfig.cmake b/cmake/SparkAppimageConfig.cmake new file mode 100644 -index 0000000..d80279c +index 0000000..45f4e25 --- /dev/null +++ b/cmake/SparkAppimageConfig.cmake -@@ -0,0 +1,133 @@ +@@ -0,0 +1,132 @@ +# export PATH=/usr/lib/x86_64-linux-gnu/qt5/bin:$PATH +# export LD_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH +# export QT_PLUGIN_PATH=/usr/lib/x86_64-linux-gnu/qt5/plugins:$QT_PLUGIN_PATH @@ -781,7 +781,7 @@ index 0000000..d80279c + file(REAL_PATH ${APPIMAGETOOL} APPIMAGETOOL_REAL_PATH) + endif(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT}) + message("AppImage> Found APPIMAGETOOL Variable: ${LINUXDEPLOYQT_REAL_PATH}") -+ ++ + # do add_custome_target + make_directory(${APPIMAGE_OUTPUT}) + target_linuxdeploy() @@ -789,8 +789,8 @@ index 0000000..d80279c +endfunction(add_appimage) + +function(add_appimage_desktop) -+ configure_file(cmake/spark-appimage.desktop.in -+ ${CMAKE_BINARY_DIR}/spark-appimage.desktop @ONLY) ++ configure_file(cmake/spark-appimage.desktop.in ++ ${CMAKE_BINARY_DIR}/spark-appimage.desktop @ONLY) +endfunction(add_appimage_desktop) + +function(add_appimage_icon _ICON_PATH) @@ -800,11 +800,11 @@ index 0000000..d80279c + else() + file(COPY_FILE ${_ICON_PATH} ${CMAKE_BINARY_DIR}/spark-appimage.png) + endif(CMAKE_VERSION VERSION_LESS 3.21) -+endfunction(add_appimage_icon _ICON_PATH) ++endfunction(add_appimage_icon _ICON_PATH) + + + -+# 如果glic>=2.27,你就需要加上参数 -unsupported-allow-new-glibc (意思就是不再低版本发行版使用了) ++# 如果glic>=2.27,你就需要加上参数 -unsupported-allow-new-glibc (意思就是不再低版本发行版使用了) +# 或 -unsupported-bundle-everything(大概的意思是尝试兼容,实际测试,到其他发行版直接用不了了,有可能是发行版的原因,还是建议用前者,虽然放弃了低版本) + +# -unsupported-bundle-everything @@ -824,7 +824,7 @@ index 0000000..d80279c +# add_appimage_desktop() # 添加到 Appimage 中的默认desktop(使用来自 Spark 构建的 Desktop 构建中配置的信息(必须要求 spark-desktop)) +# add_appimage() # 应用对 Appimage 的构建 + -+# 2. 在 Makefile 进行构建目标构建 Appimage ++# 2. 在 Makefile 进行构建目标构建 Appimage +# Appimage 的构建流 -- +# 在 Makefile 进行构建目标构建 Appimage (要求提供工具的绝对路径,然后可依次进行linuxdeployqt, genrate-appimage) +# 来自于 https://github.com/probonopd/linuxdeployqt 的 linuxdeployqt @@ -839,7 +839,6 @@ index 0000000..d80279c +# genrate-appimage: +# cd build && cmake .. -DLINUXDEPLOYQT=$(LINUXDEPLOYQT) -DAPPIMAGETOOL=$(APPIMAGETOOL) +# cd build && make appimage -+ diff --git a/cmake/SparkBuildGraphviz.cmake b/cmake/SparkBuildGraphviz.cmake new file mode 100644 index 0000000..ce9dbc3 @@ -914,7 +913,7 @@ index 0000000..797faf4 \ No newline at end of file diff --git a/cmake/SparkFindDtkConfig.cmake b/cmake/SparkFindDtkConfig.cmake new file mode 100644 -index 0000000..278d0d1 +index 0000000..d1b2dfc --- /dev/null +++ b/cmake/SparkFindDtkConfig.cmake @@ -0,0 +1,11 @@ @@ -924,7 +923,7 @@ index 0000000..278d0d1 +find_package(Dtk COMPONENTS Core Widget Gui) + +function(target_link_dtk NAME) -+ target_link_libraries(${NAME} ++ target_link_libraries(${NAME} + ${DtkCore_LIBRARIES} + ${DtkWidget_LIBRARIES} + ${DtkGui_LIBRARIES}) @@ -946,7 +945,7 @@ index 0000000..a1b936c \ No newline at end of file diff --git a/cmake/SparkFindQt5Config.cmake b/cmake/SparkFindQt5Config.cmake new file mode 100644 -index 0000000..b56399f +index 0000000..6efade6 --- /dev/null +++ b/cmake/SparkFindQt5Config.cmake @@ -0,0 +1,154 @@ @@ -955,17 +954,17 @@ index 0000000..b56399f +find_package(Qt5 COMPONENTS Core Widgets Network Concurrent WebEngineWidgets Sql WebSockets REQUIRED) + +# function(target_link_qt5 NAME) -+# target_link_libraries(${NAME} ++# target_link_libraries(${NAME} +# Qt5::Core +# Qt5::Widgets +# Qt5::Network) +# endfunction(target_link_qt5 NAME) + -+# 使用 spark_add_link 生成 target_link_qt5 以替代上面内容 ++# 使用 spark_add_link 生成 target_link_qt5 以替代上面内容 +spark_add_link(qt5 Qt5::Core Qt5::Widgets Qt5::Network) + + -+# spark_add_link_qt5 ++# spark_add_link_qt5 +# 自定义宏 spark_add_link_qt5 以扩展 target_link_qt5_ 结构 + # _IN_NAME: 此宏使用嵌套宏 spark_add_link 时追加 名称 + # 同等于 spark_add_link(qt_ ${ARGN}) @@ -985,18 +984,18 @@ index 0000000..b56399f + # 特点: 任意长度参数 + # qt5_item: 为进行遍历后的单项,类似于 python3 中的 (for item in items:) + # 例如: qt5_item 为 Core: -+ # spark_add_link_qt5(${qt5_item} Qt5::${qt5_item}) -+ # 展开为 spark_add_link_qt5(Core Qt5::Core) ++ # spark_add_link_qt5(${qt5_item} Qt5::${qt5_item}) ++ # 展开为 spark_add_link_qt5(Core Qt5::Core) + # 展开为 spark_add_link(qt5_Core Qt5::Core) + # 展开为 spark_add_link(qt5_Core Qt5::Core) -+ # 特性: 增加 qt5_Core 转 qt5_core ++ # 特性: 增加 qt5_Core 转 qt5_core + # string(TOLOWER ) +macro(spark_add_links_qt5) + set(qt5_items ${ARGN}) + foreach(qt5_item IN LISTS qt5_items) + find_package(Qt5${qt5_item}) + spark_add_link_qt5(${qt5_item} Qt5::${qt5_item}) -+ ++ + string(TOLOWER "${qt5_item}" qt5_lower_item) + spark_add_link_qt5(${qt5_lower_item} Qt5::${qt5_item}) + message("add_target_link_qt5_${qt5_item} or add_target_link_qt5_${qt5_lower_item}") @@ -1107,7 +1106,7 @@ index 0000000..b56399f \ No newline at end of file diff --git a/cmake/SparkFindQt6Config.cmake b/cmake/SparkFindQt6Config.cmake new file mode 100644 -index 0000000..c69a1ca +index 0000000..dfd8917 --- /dev/null +++ b/cmake/SparkFindQt6Config.cmake @@ -0,0 +1,24 @@ @@ -1116,17 +1115,17 @@ index 0000000..c69a1ca +find_package(Qt6 COMPONENTS Core Widgets Network Concurrent) + +# function(target_link_qt6 NAME) -+# target_link_libraries(${NAME} ++# target_link_libraries(${NAME} +# Qt6::Core +# Qt6::Widgets +# Qt6::Network) +# endfunction(target_link_qt6 NAME) + -+# 使用 spark_add_link 生成 target_link_qt6 以替代上面内容 ++# 使用 spark_add_link 生成 target_link_qt6 以替代上面内容 +spark_add_link(qt6 Qt6::Core Qt6::Widgets Qt6::Network) + + -+# spark_add_link_qt6 ++# spark_add_link_qt6 +# 自定义宏 target_link_qt6 以扩展 target_link_qt6_ 结构 + # _IN_NAME: 此宏使用嵌套宏 spark_add_link 时追加 名称 + # 同等于 spark_add_link(qt_ ${ARGN}) @@ -1137,7 +1136,7 @@ index 0000000..c69a1ca +# 使用 spark_add_link_qt6 生成 target_link_qt6_ 的宏 diff --git a/cmake/SparkInstallMacrosConfig.cmake b/cmake/SparkInstallMacrosConfig.cmake new file mode 100644 -index 0000000..bbb958a +index 0000000..bf906bf --- /dev/null +++ b/cmake/SparkInstallMacrosConfig.cmake @@ -0,0 +1,132 @@ @@ -1147,7 +1146,7 @@ index 0000000..bbb958a +# 可接受的值为: 安装路径 目标A +# 可接受的值为: 安装路径 目标A 目标B 目标C... +macro(spark_install_target INSTALL_TARGET_DIR INSTALL_TARGETS) -+ install(TARGETS ++ install(TARGETS + ${INSTALL_TARGETS} ${ARGN} + DESTINATION ${INSTALL_TARGET_DIR}) +endmacro(spark_install_target INSTALL_TARGET_DIR INSTALL_TARGETS) @@ -1157,7 +1156,7 @@ index 0000000..bbb958a +# 可接受的值为: 安装路径 文件A +# 可接受的值为: 安装路径 文件A 文件B 文件C... +macro(spark_install_file INSTALL_FILE_DIR INSTALL_FILE) -+ install(FILES ++ install(FILES + ${INSTALL_FILE} ${ARGN} + DESTINATION ${INSTALL_FILE_DIR}) +endmacro(spark_install_file INSTALL_FILE_DIR INSTALL_FILE) @@ -1167,7 +1166,7 @@ index 0000000..bbb958a +# 可接受的值为: 安装路径 文件A +# 可接受的值为: 安装路径 文件A 文件B 文件C... +macro(spark_install_program INSTALL_PROGRAM_DIR INSTALL_PROGRAM) -+ install(PROGRAMS ++ install(PROGRAMS + ${INSTALL_PROGRAM} ${ARGN} + DESTINATION ${INSTALL_PROGRAM_DIR}) +endmacro(spark_install_program INSTALL_PROGRAM_DIR INSTALL_PROGRAM) @@ -1217,7 +1216,7 @@ index 0000000..bbb958a + # spark_install_file(${INSTALL_DIRECTORY_DIR} ${item}) + endif(IS_DIRECTORY ${item}) + endforeach(item IN LISTS INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST) -+ ++ + # message(FATAL_ERROR " directory: ${INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST}") + # message(FATAL_ERROR " directory: ${INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST_LENGTH}") + @@ -1264,7 +1263,7 @@ index 0000000..bbb958a + + # include(GNUInstallDirs) + set(SPARK_INSTALL_CHANGE_LOG_DIR "/usr/share/doc/${PROJECT_NAME}/") -+ install(FILES ++ install(FILES + ${CMAKE_BINARY_DIR}/changelog.gz + debian/copyright + @@ -1275,7 +1274,7 @@ index 0000000..bbb958a +endmacro(spark_install_changelog CHANGE_LOG_FILE) diff --git a/cmake/SparkMacrosConfig.cmake b/cmake/SparkMacrosConfig.cmake new file mode 100644 -index 0000000..1f53882 +index 0000000..67d84e1 --- /dev/null +++ b/cmake/SparkMacrosConfig.cmake @@ -0,0 +1,129 @@ @@ -1289,12 +1288,12 @@ index 0000000..1f53882 +macro(spark_add_library _lib_name) + message("================ ${_lib_name} Library ================") + add_library(${_lib_name} ${ARGN}) -+ ++ + set(SRCS ${ARGN}) + foreach(item IN LISTS SRCS) + message(" -> ${item}") + endforeach(item IN LISTS SRCS) -+ ++ + function(target_link_${_lib_name} TARGET) + message("${_lib_name}") + target_link_libraries(${TARGET} ${_lib_name}) @@ -1315,7 +1314,7 @@ index 0000000..1f53882 + foreach(item IN LISTS ${_lib_name}_SOURCES) + message(" -> ${item}") + endforeach(item IN LISTS ${_lib_name}_SOURCES) -+ ++ + function(target_link_${_lib_name} TARGET) + # message("target_link_${_lib_name}") + message(" -> (include): ${_lib_path}") @@ -1351,7 +1350,7 @@ index 0000000..1f53882 + foreach(item IN LISTS ${_exec_name}_SOURCES) + message(" -> ${item}") + endforeach(item IN LISTS ${_exec_name}_SOURCES) -+ ++ + # function(target_link_${_exec_name} TARGET) + # message("target_link_${_lib_name}") + message(" -> (include): ${_exec_path}") @@ -1367,16 +1366,16 @@ index 0000000..1f53882 + # 并根据库名生成一个 target_link_ 函数 +macro(spark_find_library _prefix) + find_package(PkgConfig REQUIRED) -+ ++ + # libnotify + pkg_check_modules(${_prefix} ${ARGN}) + function(target_link_${_prefix} TARGET) -+ target_include_directories(${TARGET} PUBLIC ++ target_include_directories(${TARGET} PUBLIC + ${${_prefix}_INCLUDE_DIRS}) -+ target_link_libraries(${TARGET} ++ target_link_libraries(${TARGET} + ${${_prefix}_LIBRARIES}) + endfunction(target_link_${_prefix} TARGET) -+ ++ +endmacro(spark_find_library _prefix) + + @@ -1404,17 +1403,17 @@ index 0000000..1f53882 +macro(spark_add_link _IN_NAME) + function(target_link_${_IN_NAME} _NAME) + message("LINK ${_NAME} ${ARGN}") -+ target_link_libraries(${_NAME} ++ target_link_libraries(${_NAME} + ${ARGN}) + endfunction(target_link_${_IN_NAME} _NAME) +endmacro(spark_add_link _IN_NAME) \ No newline at end of file diff --git a/cmake/SparkMacrosExtendConfig.cmake b/cmake/SparkMacrosExtendConfig.cmake new file mode 100644 -index 0000000..985aaad +index 0000000..bad0620 --- /dev/null +++ b/cmake/SparkMacrosExtendConfig.cmake -@@ -0,0 +1,197 @@ +@@ -0,0 +1,196 @@ + +# find_plus +# 寻找 INVAl 传入的字符串,如果存在 + 字符将写入位置到 OUTVAL @@ -1458,7 +1457,7 @@ index 0000000..985aaad + # set(${OUTVALLEN} 0 PARENT_SCOPE) + + set(_tmps "") # 设置为空的 -+ ++ + # 寻找下一个 + 位置 + find_plus(${INVAL} RIGHT_PLUS) + @@ -1480,9 +1479,9 @@ index 0000000..985aaad + message("--> 传入的 INVAL: --> 需计算+位置 --> 右移: ${_PLUSINDEX} -> 内容: ${NewVal}") + # string(REPLACE "+" ";" _tmps "${NewVal}") + # list(LENGTH FFFFL FFFFLEN) -+ ++ + # message("--> 计算新的 + 位置: ${_PLUSINDEX} --> 后面的 NewVal: ${NewVal}") -+ ++ + # find_plus(${NewVal} _NextPlus) + # if(_NextPlus LESS 0) + # list(APPEND _tmps ${NewVal}) @@ -1496,7 +1495,7 @@ index 0000000..985aaad + # # endforeach(item) + # endif(_NextPlus LESS 0) + endif(RIGHT_PLUS LESS 0) -+ ++ + set(${OUTVAL} ${_tmps} PARENT_SCOPE) + list(LENGTH _tmps _tmps_len) + set(${OUTVALLEN} ${_tmps_len} PARENT_SCOPE) @@ -1530,8 +1529,8 @@ index 0000000..985aaad + string(SUBSTRING "${REALPATH}" ${LASTINDEX} ${REALPATH_LENGTH} REALNAME_Dependency) + + # 找 + 号下标,这是找+号的函数 -+ find_plus(${REALPATH} RIGHT_PLUS) -+ ++ find_plus(${REALPATH} RIGHT_PLUS) ++ + # 判断是否有找到 + 号下标,值为 -1 或 正整数 + if(RIGHT_PLUS LESS 0) # 小于0: 不存在 + 号 + set(REALNAME "${REALNAME_Dependency}") @@ -1543,22 +1542,22 @@ index 0000000..985aaad + target_link_qt5(${REALNAME}) + else() + message("---> 传入路径末尾/右移部分: --> ${REALNAME_Dependency} <-- 依赖+") -+ ++ + # 存在+号,将截取从 / 到 + 号之间的内容作为目标名称 + # 例如 src/unclassified/widgets/DocTypeListView+JsonDeploy + # ^(LASTINDEX) ^(RIGHT_PLUS) + # 将 RIGHT_PLUS - LASTINDEX 计算出 DocTypeListView 字符长度 + math(EXPR REALNAME_LENGTH "${RIGHT_PLUS}-${LASTINDEX}") -+ ++ + message("---> 计算传入路径末尾/右移部分: --> 位置: ${RIGHT_PLUS}") + # message("---> 计算传入路径末尾/右移部分: --> 长度: ${REALNAME_Dependency}") -+ ++ + # 目标名称为 DocTypeListView + # 依赖为 JsonDeploy + # set(REALNAME "") + string(SUBSTRING "${REALPATH}" 0 ${RIGHT_PLUS} _REALPATH_DIR) + string(SUBSTRING "${REALPATH}" ${LASTINDEX} ${REALNAME_LENGTH} REALNAME) -+ ++ + message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME}") + + string(SUBSTRING "${REALPATH}" ${RIGHT_PLUS} ${REALPATH_LENGTH} Dependency) @@ -1570,10 +1569,10 @@ index 0000000..985aaad + + + message("---> 构建 ${REALNAME} -> ${REALNAME} ${_REALPATH_DIR}") -+ ++ + spark_add_library_path(${REALNAME} ${_REALPATH_DIR}) + # target_link_qt5(${REALNAME}) # 使用依赖的依赖或许也不错 -+ ++ + target_include_directories(${REALNAME} PUBLIC ${_REALPATH_DIR}) + target_link_libraries(${REALNAME} ${dependencies}) + @@ -1585,7 +1584,7 @@ index 0000000..985aaad + +# spark_add_source_paths +# 将指定路径中的文件变成可用的源文件列表 -+# ++# +macro(spark_add_source_paths SOURCE_VARIABLE_NAME) + set(SOURCE_PATHS ${ARGN}) + set(${SOURCE_VARIABLE_NAME}_PATHS "") @@ -1603,7 +1602,7 @@ index 0000000..985aaad + # # message(" -> ${item}") + # list(APPEND ${SOURCE_VARIABLE_NAME} ${item}) + # endforeach(item IN LISTS HEADER_LIST) -+ ++ + file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${SOURCE_PATH}/*.ui) + foreach(item IN LISTS UI_LIST) + # message(" -> ${item}") @@ -1611,10 +1610,9 @@ index 0000000..985aaad + endforeach(item IN LISTS UI_LIST) + endforeach(SOURCE_PATH IN LISTS SOURCE_PATHS) +endmacro(spark_add_source_paths SOURCE_VARIABLE_NAME) -+ diff --git a/cmake/SparkTranslatorConfig.cmake b/cmake/SparkTranslatorConfig.cmake new file mode 100644 -index 0000000..272dd9d +index 0000000..5375fe3 --- /dev/null +++ b/cmake/SparkTranslatorConfig.cmake @@ -0,0 +1,27 @@ @@ -1629,7 +1627,7 @@ index 0000000..272dd9d + message("-> ${item}") +endforeach(item IN LISTS SPARK_TRANSLATIONS) + -+qt5_add_translation(SPARK_QM_TRANSLATIONS ++qt5_add_translation(SPARK_QM_TRANSLATIONS + ${SPARK_TRANSLATIONS}) + +message("translator(ts -> qm):") @@ -1647,7 +1645,7 @@ index 0000000..272dd9d + # 此宏比较稳定 diff --git a/cmake/linuxdeployqt-help b/cmake/linuxdeployqt-help new file mode 100644 -index 0000000..1b72fda +index 0000000..12ac506 --- /dev/null +++ b/cmake/linuxdeployqt-help @@ -0,0 +1,48 @@ @@ -1698,14 +1696,14 @@ index 0000000..1b72fda + +See the "Deploying Applications on Linux" topic in the +documentation for more information about deployment on Linux. -+zinface@zinface-PC:/tmp/tmp.5gmZKUqn9s$ ++zinface@zinface-PC:/tmp/tmp.5gmZKUqn9s$ \ No newline at end of file diff --git a/cmake/package-deb.descript b/cmake/package-deb.descript new file mode 100644 -index 0000000..c1c275e +index 0000000..2b485d1 --- /dev/null +++ b/cmake/package-deb.descript -@@ -0,0 +1,45 @@ +@@ -0,0 +1,44 @@ +# 注释行(使用方式) +# find_package(DebPackage PATHS ${CMAKE_SOURCE_DIR}) +# add_package_descript(cmake/package-deb.descript) @@ -1746,11 +1744,10 @@ index 0000000..c1c275e +# 软件包主页 +Homepage: https://www.spark-app.store/ +# 软件包建议 -+Recommends: ++Recommends: +# 软件包描述信息 +Descrition: Spark Store + A community powered app store, based on DTK. -+ diff --git a/cmake/spark-appimage.desktop.in b/cmake/spark-appimage.desktop.in new file mode 100644 index 0000000..228a84a @@ -1786,13 +1783,13 @@ index 0000000..0fa070b +# Generated from the DesktopGenerater component of the z-Tools toolkit \ No newline at end of file -- -2.20.1 +2.33.1 -From d2524f967848e6ba0d2c8a097183264d0d197d68 Mon Sep 17 00:00:00 2001 +From 4926bf79dce4b057c16338c21b2f50eb517a0e4f Mon Sep 17 00:00:00 2001 From: zinface Date: Sun, 11 Dec 2022 22:37:26 +0800 -Subject: [PATCH 02/11] =?UTF-8?q?repo:=20=E5=A4=84=E7=90=86=20deb=20?= +Subject: [PATCH 02/12] =?UTF-8?q?repo:=20=E5=A4=84=E7=90=86=20deb=20?= =?UTF-8?q?=E5=AE=89=E8=A3=85=E8=84=9A=E6=9C=AC=E7=9A=84=E9=97=AE=E9=A2=98?= =?UTF-8?q?=EF=BC=8C=E5=AE=8C=E6=88=90=20cmake=20=E5=8C=96=E6=9E=84?= =?UTF-8?q?=E5=BB=BA?= @@ -1807,7 +1804,7 @@ Signed-off-by: zinface 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/Makefile b/Makefile -index 2df9883..c673c76 100644 +index 334ead1..6fdc69b 100644 --- a/Makefile +++ b/Makefile @@ -27,7 +27,21 @@ release: @@ -1834,7 +1831,7 @@ index 2df9883..c673c76 100644 tree build/_CPack_Packages/Linux/DEB/$(PROJECT_NAME)-* dpkg-deb --contents build/$(PROJECT_NAME)_*$(CALENDAR)*$(SUFFIX).deb diff --git a/cmake/DebPackageConfig.cmake b/cmake/DebPackageConfig.cmake -index 2ab24e7..38ad2d7 100644 +index d0351ec..d88fd51 100644 --- a/cmake/DebPackageConfig.cmake +++ b/cmake/DebPackageConfig.cmake @@ -290,10 +290,10 @@ function(add_package_descript IN_DES) @@ -1853,13 +1850,13 @@ index 2ab24e7..38ad2d7 100644 # 设置为ON,以便使用 dpkg-shlibdeps 生成更好的包依赖列表。 -- -2.20.1 +2.33.1 -From 7cd97a939b63143921936b9db3978d14f18a4f33 Mon Sep 17 00:00:00 2001 +From 054db0da4a5b55db3bef8baec70538a10f6612bb Mon Sep 17 00:00:00 2001 From: zinface Date: Mon, 12 Dec 2022 00:50:01 +0800 -Subject: [PATCH 03/11] =?UTF-8?q?docs:=20=E6=B7=BB=E5=8A=A0=20Spark=20?= +Subject: [PATCH 03/12] =?UTF-8?q?docs:=20=E6=B7=BB=E5=8A=A0=20Spark=20?= =?UTF-8?q?=E6=9E=84=E5=BB=BA=20=E4=B8=8E=20CMake=20=E6=9E=84=E5=BB=BA?= =?UTF-8?q?=E7=B3=BB=E7=BB=9F=E9=A2=84=E8=A7=88=E6=96=87=E6=A1=A3?= MIME-Version: 1.0 @@ -1874,7 +1871,7 @@ Signed-off-by: zinface diff --git a/DOCS/spark-cmake-build-system.md b/DOCS/spark-cmake-build-system.md new file mode 100644 -index 0000000..a807009 +index 0000000..b72805e --- /dev/null +++ b/DOCS/spark-cmake-build-system.md @@ -0,0 +1,301 @@ @@ -1893,7 +1890,7 @@ index 0000000..a807009 +- 有关 `CMake` 与 `Spark` 之间的关系 + + 在进行 `CMake` 化构建时,我们摒弃了传统 `CMake` 语法,使用以 `Spark` 为代号进行一种可扩展的 `CMake` 构建模块设计。 -+ ++ + 以下是使用传统 `CMake` 进行构建一个简单的 `Qt` 应用程序: + + ```cmake @@ -1915,7 +1912,7 @@ index 0000000..a807009 + set(QRC_SOURCES "") + + add_executable(${PROJECT_NAME} "main.cpp" -+ "mainwindow.cpp" "mainwindow.h" ++ "mainwindow.cpp" "mainwindow.h" + ${QRC_SOURCES} + ) + target_link_libraries(${PROJECT_NAME} Qt5::Core Qt5::Widgets Qt5::Network) @@ -1959,7 +1956,7 @@ index 0000000..a807009 + ``` + + 这样一写,我们觉得这是一种非常独特的构建工作,旨在为一些 Linux Qt 项目进行构建时,苦于没有一个较好的构建模板设计,每次在编写一个新的项目时,只能从头开始写构建脚本的一种解决方式。 -+ ++ + 我们并不打算发明构建工具,只不过在研究打破 `CMake` 传统构建风格时,我发现了 `XMake`,当时这是我当时安装一个 `XMake` 版本。 + + ``` @@ -1967,27 +1964,27 @@ index 0000000..a807009 + xmake v2.6.2+202201121245, A cross-platform build utility based on Lua + Copyright (C) 2015-present Ruki Wang, tboox.org, xmake.io + _ -+ __ ___ __ __ __ _| | ______ -+ \ \/ / | \/ |/ _ | |/ / __ \ -+ > < | \__/ | /_| | < ___/ -+ /_/\_\_|_| |_|\__ \|_|\_\____| -+ by ruki, xmake.io -+ ++ __ ___ __ __ __ _| | ______ ++ \ \/ / | \/ |/ _ | |/ / __ \ ++ > < | \__/ | /_| | < ___/ ++ /_/\_\_|_| |_|\__ \|_|\_\____| ++ by ruki, xmake.io ++ + 👉 Manual: https://xmake.io/#/getting_started + 🙏 Donate: https://xmake.io/#/sponsor + ``` -+ ++ + 在准备尝试使用最适用于 `Linux Qt` 项目的构建方式,也为更快构建一个 `Linux` 应用项目来进行扩展构建。 -+ ++ + 我们最开始完成了简单的封装一个 `spark_` 开头的函数来定义简单的构建库目标、构建可执行目标。 + + 当时使用的是 `function`,并没有使用宏 `macro`,起初认为是无太大区别,后来都转用 `macro` 来定义了。 + + ```cmake + # SparkMacrosConfig.cmake -+ ++ + cmake_minimum_required(VERSION 3.5.1) -+ ++ + # 定义一些 macro 用于自动生成构建结构 + + # spark_add_library [files]... @@ -1996,12 +1993,12 @@ index 0000000..a807009 + macro(spark_add_library _lib_name) + message("================ ${_lib_name} Library ================") + add_library(${_lib_name} ${ARGN}) -+ ++ + set(SRCS ${ARGN}) + foreach(item IN LISTS SRCS) + message(" -> ${item}") + endforeach(item IN LISTS SRCS) -+ ++ + function(target_link_${_lib_name} TARGET) + message("${_lib_name}") + target_link_libraries(${TARGET} ${_lib_name}) @@ -2022,7 +2019,7 @@ index 0000000..a807009 + ``` + + 这样,我们就完成了一个简单的构建目标的方式,通过包装一个 `add_library` 我们可以达到相同的目的。 -+ ++ + 并为其创建一个 `target_link_` 开头的`function`来明确声明这个库目标被使用者给依赖。 + + ```cmake @@ -2033,7 +2030,7 @@ index 0000000..a807009 + + # target_link_qt5 中只定义了有限的几个核心组件: Qt5::Core Qt5::Widgets Qt5::Network + -+ spark_add_executable(helloworld ++ spark_add_executable(helloworld + main.cpp) + + target_link_qt5(helloworld) # 表示 helloworld 可执行目标依赖于 Qt5 @@ -2061,12 +2058,12 @@ index 0000000..a807009 + 其中,从基于指定的源代码构建库与可执行文件,发展到使用指定的路径来构建为一个模块。 + + ```cmake -+ # 构建一个 bigimage 库,它将依赖于 qt5 ++ # 构建一个 bigimage 库,它将依赖于 qt5 + spark_add_libraries_path(bigimage src/spark-widgets/bigimage) + target_link_qt5(bigimage) + -+ -+ # 构建一个 imageshow 库,它将依赖于 bigimage ++ ++ # 构建一个 imageshow 库,它将依赖于 bigimage + spark_add_libraries_path(imageshow src/spark-widgets/imageshow) + target_link_bigimage(imageshow) + @@ -2087,7 +2084,7 @@ index 0000000..a807009 + ``` + +- `Spark` 构建与 `DTK` -+ ++ + 我们在为基于 Deepin Tool Kit(DTK) 的应用程序添加了简单的扩展,使用以下内容即可使你的程序依赖于 `DTK` + + ```cmake @@ -2107,7 +2104,7 @@ index 0000000..a807009 + 我们在为基于 `CMakeLists.txt` 中使用的 `install` 指令进行了 `CPack` 打包扩展,因为我们不喜欢类似 `Makefile` 这种 `make install` 安装的方式。 + + 所以我们也增加了一个扩展模块 `DebPackageConfig.cmake`,因为它是早于 `Spark` 构建出现,所以并不为它进行 `Spark` 命名,它拥有一个模板配置,可以通过简单的填充包描述信息即可实现打包。 -+ ++ + 注意,它的最开始三行即是使用方式说明,通过(cv)复制粘贴到您的顶层构建脚本中,即可完成打包功能,更多的软件包打包设定功能仍在 `DebPackageConfig.cmake` 中预留被注释的部分。 + + 例如您想生成软件包依赖列表等,在其中 `SHLIBDEPS` 字样的部分已预留注释。 @@ -2115,7 +2112,7 @@ index 0000000..a807009 + 例如您想为软件包增加 `pre[inst|rm]、post[inst|rm]` 等脚本,在其中 `CONTROL` 字样的部分已预留注释。 + + 描述文件还为您专门提供了可选的自动化填充软件包名称、软件包版本、软件包架构等,而无需要每次更新描述文件。 -+ ++ + ```ini + # 注释行(使用方式) + # find_package(DebPackage PATHS ${CMAKE_SOURCE_DIR}) @@ -2157,7 +2154,7 @@ index 0000000..a807009 + # 软件包主页 + Homepage: https://www.spark-app.store/ + # 软件包建议 -+ Recommends: ++ Recommends: + # 软件包描述信息 + Descrition: Spark Store + A community powered app store, based on DTK. @@ -2167,13 +2164,13 @@ index 0000000..a807009 +- 写在后面,有关 `Spark` 构建的起源与未来 + + `Spark` 构建真正意义上只是一个有趣的想法,并且为它付诸一定的实现。 -+ ++ + 我们拥抱过 qmake,我们也拥抱过 cmake。我们是混乱的 IDE 或是代码编辑器的忠实用户,就像是在 IDE 与 编辑器之间的战争从未停止过。 + + 在着手 `Spark` 构建之前,它就是一个想法,目的是为了尝试将星火应用商店从 `qmake` 构建转为 `cmake` 构建,它就像星星之火中的野火,它有自己的想法。而这个想法就是打破传统的构建方式,或尝试改造现有的构建模式。 + + 而这并没有为星火商店付出什么,甚至没有提交过任何 `bug fix`,只是一个因为喜欢安份但又不守已的试图破坏(改变)星火应用商店传统构建的疯狂的 `VSCode` 用户,事实上是一个 `CMake` 用户,因为他无法在 `VSCode` 中使用 `qmake` 增强 `VSCode` 的代码能力。 -+ ++ + 只能试图在一个已经发展了多年了项目上开始进行破坏(改造),将其转化为以 `cmake` 为主的构建,并在其它开源项目中寻找 `Spark` 的构建瓶颈以及拓展它疯狂的可扩展模块。 + + 在很久之后,这个想法终于在星火商店的 `4.0` 计划下开始正式实施,此时 `Spark` 构建已经为很多 `Linux Qt` 项目进行构建,包括非常复杂的构建探索,打破了一个又一个构建方式,最终完善了基本的构建模板。 @@ -2181,13 +2178,13 @@ index 0000000..a807009 + 现在,`Spark` 构建在强大的 `CMake` 扩展下增强了 `VSCode` 的代码编写能力,在绕了一大圈之后,终于回到了起源的地方,并开始了它的构建使命,为星火应用商店构建 `4.0` 以及未来的版本。 \ No newline at end of file -- -2.20.1 +2.33.1 -From bb128568ab7d1463c0002ffff470ad5c93364c99 Mon Sep 17 00:00:00 2001 +From 75aab8e3b58a250fce670f8e434bf76ad7866275 Mon Sep 17 00:00:00 2001 From: zinface Date: Thu, 15 Dec 2022 19:59:46 +0800 -Subject: [PATCH 04/11] =?UTF-8?q?repo:=20=E6=9B=B4=E6=96=B0=E7=94=A8?= +Subject: [PATCH 04/12] =?UTF-8?q?repo:=20=E6=9B=B4=E6=96=B0=E7=94=A8?= =?UTF-8?q?=E4=BA=8E=20Qt5=20Svg=20=E4=BE=9D=E8=B5=96=E7=9A=84=E6=9E=84?= =?UTF-8?q?=E5=BB=BA=E5=86=85=E5=AE=B9?= MIME-Version: 1.0 @@ -2200,7 +2197,7 @@ Content-Transfer-Encoding: 8bit 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt -index 5864b54..14530e1 100644 +index 51cc090..f3edc6c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -32,6 +32,7 @@ spark_add_library_realpaths( @@ -2212,7 +2209,7 @@ index 5864b54..14530e1 100644 spark_add_executable_path(${PROJECT_NAME} src diff --git a/cmake/SparkFindQt5Config.cmake b/cmake/SparkFindQt5Config.cmake -index b56399f..67e29b6 100644 +index 6efade6..0300b3d 100644 --- a/cmake/SparkFindQt5Config.cmake +++ b/cmake/SparkFindQt5Config.cmake @@ -1,6 +1,6 @@ @@ -2222,7 +2219,7 @@ index b56399f..67e29b6 100644 +find_package(Qt5 COMPONENTS Core Widgets Network Concurrent WebEngineWidgets REQUIRED) # function(target_link_qt5 NAME) - # target_link_libraries(${NAME} + # target_link_libraries(${NAME} @@ -132,7 +132,7 @@ spark_add_links_qt5( # SerialPort # ServiceSupport @@ -2233,13 +2230,13 @@ index b56399f..67e29b6 100644 # ThemeSupport # UiPlugin -- -2.20.1 +2.33.1 -From 877a52f4b753f55e087a6c58794d690fabb6ddbb Mon Sep 17 00:00:00 2001 +From 0ae64f99cd269a06009818d0c553661e2fd5981b Mon Sep 17 00:00:00 2001 From: zinface Date: Mon, 19 Dec 2022 02:58:17 +0800 -Subject: [PATCH 05/11] =?UTF-8?q?repo:=20=E6=9B=B4=E6=96=B0=E7=94=A8?= +Subject: [PATCH 05/12] =?UTF-8?q?repo:=20=E6=9B=B4=E6=96=B0=E7=94=A8?= =?UTF-8?q?=E4=BA=8E=E6=94=AF=E6=8C=81=20BaseWidgetOpacity=20=E5=9F=BA?= =?UTF-8?q?=E7=A1=80=E7=B1=BB=E7=9A=84=E6=9E=84=E5=BB=BA?= MIME-Version: 1.0 @@ -2251,7 +2248,7 @@ Content-Transfer-Encoding: 8bit 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt -index 14530e1..b5e32d8 100644 +index f3edc6c..d469796 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -24,11 +24,13 @@ spark_add_library_realpaths( @@ -2280,13 +2277,13 @@ index 14530e1..b5e32d8 100644 spark_add_executable_path(spark-dstore-patch src/spark-dstore-patch) -- -2.20.1 +2.33.1 -From f89bb6a145f2a77fa0e1b55b3ad17ade69c53a69 Mon Sep 17 00:00:00 2001 +From 3815f39926a5761769b55922cb3471ffa6e60baf Mon Sep 17 00:00:00 2001 From: zinface Date: Mon, 30 Jan 2023 20:48:25 +0800 -Subject: [PATCH 06/11] =?UTF-8?q?spark:=20=E5=88=9B=E5=BB=BA=E6=96=B0?= +Subject: [PATCH 06/12] =?UTF-8?q?spark:=20=E5=88=9B=E5=BB=BA=E6=96=B0?= =?UTF-8?q?=E7=9A=84=E6=A8=A1=E5=9D=97=E7=94=A8=E4=BA=8E=20debian/changelo?= =?UTF-8?q?g=20=E7=9A=84=E7=89=88=E6=9C=AC=E5=8F=B7=E9=87=8D=E5=86=99?= =?UTF-8?q?=E8=A7=84=E5=88=99?= @@ -2306,7 +2303,7 @@ Content-Transfer-Encoding: 8bit create mode 100644 cmake/SparkDebianChangelogVersion.cmake diff --git a/CMakeLists.txt b/CMakeLists.txt -index b5e32d8..73221bc 100644 +index d469796..990c7be 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -11,6 +11,10 @@ include(cmake/SparkFindDtkConfig.cmake) # 提供了 target_link_dtk 用 @@ -2316,13 +2313,13 @@ index b5e32d8..73221bc 100644 +include(cmake/SparkDebianChangelogVersion.cmake)# 提供了 spark_debian_ 开头的宏进行进行覆盖 PROJECT_VERSION + +# 在开始之前,使用项目中提供的 debian/changelog 进行重写本构建系统的 PROJECT_VERSION -+spark_debian_changelog_override_version(debian/changelog) ++spark_debian_changelog_override_version(debian/changelog) # 资源文件路径 set(QRC_SOURCES "src/assets/assets.qrc") diff --git a/cmake/SparkDebianChangelogVersion.cmake b/cmake/SparkDebianChangelogVersion.cmake new file mode 100644 -index 0000000..9d8bca2 +index 0000000..3b3add4 --- /dev/null +++ b/cmake/SparkDebianChangelogVersion.cmake @@ -0,0 +1,58 @@ @@ -2339,7 +2336,7 @@ index 0000000..9d8bca2 + + # 首次判断,如果判断文件不存在,将尽可能的判断文件是存在的 + if(NOT EXISTS ${CHANGELOG_FILE_PATH}) -+ ++ + # 在 CMake v3.19 起,可以使用 file(REAL_PATH ) 进行获取 path 的绝对路径 + if(CMAKE_VERSION GREATER_EQUAL 3.19) + file(REAL_PATH ${CHANGELOG_FILE_PATH} CHANGELOG_FILE_ABSOLUTE_PATH) @@ -2385,13 +2382,13 @@ index 0000000..9d8bca2 + endif(CHANGELOG_FILE_EXISTS) +endmacro(spark_debian_changelog_override_version _CHANGELOG_FILE_PATH) -- -2.20.1 +2.33.1 -From b26bac7572ab7477ab4b94174fbdece759be8bc7 Mon Sep 17 00:00:00 2001 +From 0f8ec4935071280518a8466ce36fbbd84ed1cbb0 Mon Sep 17 00:00:00 2001 From: zinface Date: Mon, 30 Jan 2023 21:23:15 +0800 -Subject: [PATCH 07/11] =?UTF-8?q?spark:=20=E5=AF=B9=20SparkDebianChangelog?= +Subject: [PATCH 07/12] =?UTF-8?q?spark:=20=E5=AF=B9=20SparkDebianChangelog?= =?UTF-8?q?Version.cmake=20=E6=A8=A1=E5=9D=97=E7=9A=84=E6=89=A9=E5=B1=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 @@ -2405,7 +2402,7 @@ Content-Transfer-Encoding: 8bit 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile -index c673c76..81ca078 100644 +index 6fdc69b..a68d7d1 100644 --- a/Makefile +++ b/Makefile @@ -8,6 +8,17 @@ SUFFIX=_$(OSID)$(OSRELEASE) @@ -2445,7 +2442,7 @@ index c673c76..81ca078 100644 # 进入 qdebug 模式,在 deepin 中默认被禁用,可 env | grep QT 查看,并在 /etc/X11/Xsession.d/00deepin-dde-env 配置中已定义 # 1. 禁止 qt 的 debug 打印: qt.*.debug=false diff --git a/cmake/SparkDebianChangelogVersion.cmake b/cmake/SparkDebianChangelogVersion.cmake -index 9d8bca2..65e1c16 100644 +index 3b3add4..05f9e52 100644 --- a/cmake/SparkDebianChangelogVersion.cmake +++ b/cmake/SparkDebianChangelogVersion.cmake @@ -54,5 +54,6 @@ macro(spark_debian_changelog_override_version _CHANGELOG_FILE_PATH) @@ -2456,13 +2453,13 @@ index 9d8bca2..65e1c16 100644 endif(CHANGELOG_FILE_EXISTS) endmacro(spark_debian_changelog_override_version _CHANGELOG_FILE_PATH) -- -2.20.1 +2.33.1 -From 26f00f79a2b8613477059beb6c2f492bc24a7bb5 Mon Sep 17 00:00:00 2001 +From 5455594cf1134a29dee32257e6d8d3d8b78c8fd6 Mon Sep 17 00:00:00 2001 From: zinface Date: Mon, 30 Jan 2023 23:17:24 +0800 -Subject: [PATCH 08/11] =?UTF-8?q?repo:=20=E5=A4=84=E7=90=86=20v4.2.2=20?= +Subject: [PATCH 08/12] =?UTF-8?q?repo:=20=E5=A4=84=E7=90=86=20v4.2.2=20?= =?UTF-8?q?=E7=89=88=E6=9C=AC=E6=97=B6=E6=9C=9F=E6=96=B0=E5=A2=9E=E7=9A=84?= =?UTF-8?q?=E5=AE=89=E8=A3=85=E6=96=87=E4=BB=B6?= MIME-Version: 1.0 @@ -2477,7 +2474,7 @@ pkg/usr/share/ssinstall/transhell 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/CMakeLists.txt b/CMakeLists.txt -index 73221bc..22a788d 100644 +index 990c7be..4e22685 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -86,6 +86,11 @@ spark_install_file(/usr/share/icons/hicolor/scalable/apps @@ -2493,7 +2490,7 @@ index 73221bc..22a788d 100644 spark_install_file(/usr/share/spark-store/translations ${SPARK_QM_TRANSLATIONS}) diff --git a/cmake/SparkInstallMacrosConfig.cmake b/cmake/SparkInstallMacrosConfig.cmake -index bbb958a..c7ea53b 100644 +index bf906bf..e1fd7a0 100644 --- a/cmake/SparkInstallMacrosConfig.cmake +++ b/cmake/SparkInstallMacrosConfig.cmake @@ -79,7 +79,7 @@ macro(spark_install_directory INSTALL_DIRECTORY_DIR INSTALL_DIRECOTRY) @@ -2506,13 +2503,13 @@ index bbb958a..c7ea53b 100644 install(DIRECTORY ${INSTALL_DIRECOTRY} ${ARGN} -- -2.20.1 +2.33.1 -From e4401b16037cb3b1b10c59f4c7a0fa3c2cd563b3 Mon Sep 17 00:00:00 2001 +From e4e6ca8b83f86e50ab32582a00b85bbe7057f454 Mon Sep 17 00:00:00 2001 From: zinface Date: Mon, 30 Jan 2023 23:18:44 +0800 -Subject: [PATCH 09/11] =?UTF-8?q?spark:=20=E5=AF=B9=20SparkTranslatorConfi?= +Subject: [PATCH 09/12] =?UTF-8?q?spark:=20=E5=AF=B9=20SparkTranslatorConfi?= =?UTF-8?q?g.cmake=20=E6=A8=A1=E5=9D=97=E7=9A=84=E6=89=A9=E5=B1=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 @@ -2525,11 +2522,11 @@ Content-Transfer-Encoding: 8bit 1 file changed, 5 insertions(+) diff --git a/cmake/SparkTranslatorConfig.cmake b/cmake/SparkTranslatorConfig.cmake -index 272dd9d..689e70d 100644 +index 5375fe3..46de519 100644 --- a/cmake/SparkTranslatorConfig.cmake +++ b/cmake/SparkTranslatorConfig.cmake @@ -12,6 +12,11 @@ endforeach(item IN LISTS SPARK_TRANSLATIONS) - qt5_add_translation(SPARK_QM_TRANSLATIONS + qt5_add_translation(SPARK_QM_TRANSLATIONS ${SPARK_TRANSLATIONS}) +file(WRITE ${CMAKE_BINARY_DIR}/SPARK_QM_TRANSLATIONS "") @@ -2541,13 +2538,13 @@ index 272dd9d..689e70d 100644 foreach(item IN LISTS SPARK_QM_TRANSLATIONS) message("-> ${item}") -- -2.20.1 +2.33.1 -From 2d8046099776e3909359ce49818ae28378af930c Mon Sep 17 00:00:00 2001 +From a0eb885925c78b8966c46540a993f4cc6983c4c6 Mon Sep 17 00:00:00 2001 From: zinface Date: Mon, 30 Jan 2023 23:20:12 +0800 -Subject: [PATCH 10/11] =?UTF-8?q?make:=20=E5=A4=84=E7=90=86=E9=BB=98?= +Subject: [PATCH 10/12] =?UTF-8?q?make:=20=E5=A4=84=E7=90=86=E9=BB=98?= =?UTF-8?q?=E8=AE=A4=20make=20=E7=9B=AE=E6=A0=87=E4=B8=BA=20override-versi?= =?UTF-8?q?on=20=E7=9A=84=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 @@ -2559,7 +2556,7 @@ Content-Transfer-Encoding: 8bit 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/Makefile b/Makefile -index 81ca078..076502c 100644 +index a68d7d1..ba45029 100644 --- a/Makefile +++ b/Makefile @@ -12,11 +12,6 @@ PROJECT_VERSION=* @@ -2592,13 +2589,13 @@ index 81ca078..076502c 100644 +endif \ No newline at end of file -- -2.20.1 +2.33.1 -From 043e6fb78e2bcb1a6fecefe2ba9aceb9df84e96e Mon Sep 17 00:00:00 2001 +From a22611edeb5706aa6b574b508f29b25f599346f5 Mon Sep 17 00:00:00 2001 From: zinface Date: Sun, 5 Feb 2023 16:09:40 +0800 -Subject: [PATCH 11/11] =?UTF-8?q?spark:=20=E5=A4=84=E7=90=86=E7=89=88?= +Subject: [PATCH 11/12] =?UTF-8?q?spark:=20=E5=A4=84=E7=90=86=E7=89=88?= =?UTF-8?q?=E6=9C=AC=E5=8F=B7=204.2.3~test1=20=E6=97=B6=E9=BB=98=E8=AE=A4?= =?UTF-8?q?=E7=9A=84=E6=88=AA=E5=8F=96=E5=AD=97=E7=AC=A6=E6=95=B0=E8=BF=87?= =?UTF-8?q?=E7=9F=AD=E9=97=AE=E9=A2=98?= @@ -2611,7 +2608,7 @@ Content-Transfer-Encoding: 8bit 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/cmake/SparkDebianChangelogVersion.cmake b/cmake/SparkDebianChangelogVersion.cmake -index 65e1c16..3ed5fa2 100644 +index 05f9e52..ee2f339 100644 --- a/cmake/SparkDebianChangelogVersion.cmake +++ b/cmake/SparkDebianChangelogVersion.cmake @@ -40,7 +40,8 @@ macro(spark_debian_changelog_override_version _CHANGELOG_FILE_PATH) @@ -2625,5 +2622,2298 @@ index 65e1c16..3ed5fa2 100644 string(FIND ${CHANGELOG_CONTENT} "(" V_PRE) # +1 to V_BEGIN string(FIND ${CHANGELOG_CONTENT} ")" V_END) -- -2.20.1 +2.33.1 + + +From 64d92ae46637c672842a7734327cbe0cb4f89ae1 Mon Sep 17 00:00:00 2001 +From: zinface +Date: Sat, 3 Feb 2024 02:38:06 +0800 +Subject: [PATCH 12/12] =?UTF-8?q?repo:=20=E4=B8=80=E6=AC=A1=E6=80=A7?= + =?UTF-8?q?=E5=A4=84=E7=90=86=204.2.3=20-4.2.10=20=E6=97=B6=E6=9C=9F?= + =?UTF-8?q?=E7=9A=84=E8=B7=A8=E8=B6=8A=E5=BC=8F=E6=9E=84=E5=BB=BA=E4=B8=8E?= + =?UTF-8?q?=E5=85=A8=E9=87=8F=E7=BA=A7=E6=9B=B4=E6=96=B0?= +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit + +--- + CMakeLists.txt | 72 ++- + assets/spark.png | Bin 4959 -> 8822 bytes + cmake/SparkAppimageConfig.cmake | 210 ++++--- + ...nfig.cmake => SparkDebPackageConfig.cmake} | 41 +- + cmake/SparkDebianChangelogVersion.cmake | 6 +- + cmake/SparkDesktopMacros.cmake | 43 +- + cmake/SparkEnvConfig.cmake | 18 +- + cmake/SparkFindDtkConfig.cmake | 2 +- + cmake/SparkFindQt5Config.cmake | 13 +- + cmake/SparkFindQt6Config.cmake | 110 +++- + cmake/SparkMacrosConfig.cmake | 348 +++++++++-- + cmake/SparkMacrosExtendConfig.cmake | 547 +++++++++++++----- + cmake/SparkTranslatorConfig.cmake | 77 ++- + cmake/linuxdeployqt-help | 2 +- + ...sktop.in => spark-appimage.desktop.in.txt} | 2 +- + ...eb.descript => spark-deb-package.descript} | 8 +- + ...esktop.in => spark-desktop.desktop.in.txt} | 4 +- + 17 files changed, 1127 insertions(+), 376 deletions(-) + rename cmake/{DebPackageConfig.cmake => SparkDebPackageConfig.cmake} (90%) + rename cmake/{spark-appimage.desktop.in => spark-appimage.desktop.in.txt} (83%) + rename cmake/{package-deb.descript => spark-deb-package.descript} (91%) + rename cmake/{spark-desktop.desktop.in => spark-desktop.desktop.in.txt} (88%) + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 4e22685..90fc62d 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -15,48 +15,53 @@ include(cmake/SparkDebianChangelogVersion.cmake)# 提供了 spark_debian_ 开头 + + # 在开始之前,使用项目中提供的 debian/changelog 进行重写本构建系统的 PROJECT_VERSION + spark_debian_changelog_override_version(debian/changelog) ++# 使用 git 获取当前分支名称 ++execute_process(COMMAND ++ git symbolic-ref --short -q HEAD ++ WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} ++ OUTPUT_STRIP_TRAILING_WHITESPACE ++ OUTPUT_VARIABLE PROJECT_BRANCH) ++ ++spark_debug_message("APP_BRANCH: ${PROJECT_BRANCH}") ++spark_debug_message("APP_VERSION: ${PROJECT_VERSION}") ++add_compile_definitions(APP_BRANCH="${PROJECT_BRANCH}") ++add_compile_definitions(APP_VERSION="${PROJECT_VERSION}") + + # 资源文件路径 + set(QRC_SOURCES "src/assets/assets.qrc") + +-include_directories(src) ++translator_qt(SPARK_QM_TRANSLATIONS ${CMAKE_SOURCE_DIR}/translations/*.ts) ++spark_debug_message("SPARK_QM_TRANSLATIONS: ${SPARK_QM_TRANSLATIONS}") + +-# 基于传入的项进行构建 +-# 可接受的值为: 路径列表 +-# 可接受的值为: 路径列表+依赖库A+依赖库B +-spark_add_library_realpaths( ++# spark_add_executable_path [files ... paths] ++# 构建一个可执行程序,基于指定的路径 ++spark_add_executable_path(${PROJECT_NAME} src + src/dbus +- src/utils+dbus +- src/backend+utils ++ src/utils ++ src/backend + src/widgets/base +- src/widgets/common+backend +- src/widgets+common +- src/pages+widgets +-) +- +-target_link_dtk(base) +-target_link_qt5_dbus(dbus) +-target_link_qt5_Concurrent(common) +-target_link_qt5_Concurrent(backend) +-target_link_qt5_Svg(common) +-target_link_qt5_WebEngineWidgets(common) +- +-spark_add_executable_path(${PROJECT_NAME} src ++ src/widgets/common ++ src/widgets ++ src/pages + ${QRC_SOURCES} ${SPARK_QM_TRANSLATIONS} + ) +-target_link_base(${PROJECT_NAME}) +-target_link_dbus(${PROJECT_NAME}) +-target_link_pages(${PROJECT_NAME}) ++target_link_qt5(${PROJECT_NAME}) ++target_link_dtk(${PROJECT_NAME}) ++target_link_qt5_svg(${PROJECT_NAME}) ++target_link_qt5_dbus(${PROJECT_NAME}) ++target_link_qt5_concurrent(${PROJECT_NAME}) ++target_link_qt5_WebEngineWidgets(${PROJECT_NAME}) + + +-spark_add_executable_path(spark-dstore-patch src/spark-dstore-patch) +-target_link_qt5(spark-dstore-patch) ++# spark_add_executable_path(spark-dstore-patch src/spark-dstore-patch) ++# target_link_qt5(spark-dstore-patch) + + + # 安装主程序 spark-store 与 spark-dstore-patch + spark_install_target(/opt/durapps/${PROJECT_NAME}/bin + ${PROJECT_NAME} +- spark-dstore-patch) ++ # spark-dstore-patch ++) + + # 安装 systemd 服务(Spark Store更新通知程序) + spark_install_file(/usr/lib/systemd/system/ +@@ -76,16 +81,23 @@ spark_install_file(/usr/share/bash-completion/completions + + # 安装 desktop 文件 + spark_install_file(/usr/share/applications +- pkg/usr/share/applications/spark-store.desktop) ++ pkg/usr/share/applications/spark-store.desktop ++ pkg/usr/share/applications/open-me-in-terminal.desktop) + + # 安装 icon 文件 + spark_install_file(/usr/share/icons/hicolor/scalable/apps +- pkg/usr/share/icons/hicolor/scalable/apps/spark-store.svg) ++ pkg/usr/share/icons/hicolor/scalable/apps/spark-store.svg ++ pkg/usr/share/icons/hicolor/scalable/apps/open-me-in-terminal.png) + + # 安装什么脚本? + spark_install_program(/tmp/spark-store-install + pkg/tmp/spark-store-install/feedback.sh) + ++# 安装什么翻译? Since: v4.3.10 ++spark_install_file(/usr/share/aptss/transhell ++ pkg/usr/share/aptss/transhell/aptss_en_US.transhell ++ pkg/usr/share/aptss/transhell/aptss_zh_CN.transhell) ++ + # 安装什么翻译? Since: v4.2.2 + spark_install_directory(/usr/share/ssinstall + pkg/usr/share/ssinstall/transhell +@@ -101,5 +113,5 @@ spark_install_changelog(${CMAKE_SOURCE_DIR}/debian/changelog) + include(cmake/SparkBuildGraphviz.cmake) + + # 注释行(使用方式) +-find_package(DebPackage PATHS ${CMAKE_SOURCE_DIR}) +-add_package_descript(cmake/package-deb.descript) +\ No newline at end of file ++find_package(SparkDebPackage PATHS ${CMAKE_SOURCE_DIR}) ++add_package_descript(cmake/spark-deb-package.descript) +\ No newline at end of file +diff --git a/assets/spark.png b/assets/spark.png +index 544e2c7cff5f70894e27a7a717d4a62120630b7a..bac324f1a8a025c65ea260274ebf078f6f52ce39 100644 +GIT binary patch +literal 8822 +zcmX9^2RK{r7dB(mUZq0SrdladTkTaNwivagVzfqVwJBO^6QV|iwu(ya+M9~nwOc!4 +z)TXrl@%uk{ZtnBk``vTT_nvd!bH4N41c-qaE%hyGA|fJM9c^`E!ny4Fpac;9{n+%j +z2`7N#V=Z+eVj?dhBP4(|o^XZgxi-|7i0D4w^+6n1`pBPfk-|?$UxQ+UhJu9rhJX5} +zNWvu!KMhMiwP&85E?#~_YCbLwelE^D0d9Uzd9-x&Ar_HzH;IUNh;-CdOao_kP;PJd +z*TUA{o(@-hOu^xB1splt>-I8IJeo`-acVsFZpU0)0YzO*j=LOxDq2(I4ud5R7wDh4nwcXk%*+WtSHl)Y +zGGeuHvs}Z!2UiMvS66IMtC8Y1q#YR%MY}9TDH*c&cSqy7)9HNE>B_H2e&Nz;PtJVb +z4GQh6VsPVXhjVc_s}&2nnLlB00j_Lzz!N@GJ6~EN@|N&s*KY@bni6s?mX)Fv+R+SqyAFZ+O%*Jke0xX +zePXNd{Li;b_Y2K*OHsB5@b_mr4%@GRk9X5v(#|oW#7z1#RE-dNrPki+2wztOmF|<;Y>|UGajWUV!F_6*AlR=->qZAN2bxs;~r!qF_3(C{#lye +zF`I^AW0}!FL`6)nt420!N +zu63Zd%5Zeq$U8gK55~Txwbg8(y%nP%g}=$qmdi?MAF&6mtcub%%qSHEhj_>=N7x9` +zxHl@l*?YMiBc&i=-cwLnjp$-d)*$|SCWqIM9UDWkjKo|WpvsBQZS2RspLd>d9&bXE +z&^3B@hFB84?1Nu$z{GEmW)855(y}bIO6?^1CiOK8%hr&S%Q>-jR_N$RCD7#|~P2LSs#IJE^ezRP3PbLE7s+uPJ&J$kvXSsq& +zP%?JkA*K$^cyjA8d +zP*$z3oLcLNz&BC&i@C?cX*|NjX+ANNH(}ke?>FUYG(H`T-Q(Hnbr?J?QPTmxXi26`ez6EZcy=63h*C&TY&qDWeidm&1X%S4H +zRKTd)nc57N1GVfBA{f&pm^jyl7%1Fhq@e0O0lsU%n|6DZ6_L(Yr8ZpeZZs|+ltcX3 +zLRWP|c+?m4GOj8z7Q8C)b<%1w5zDfNz%@~%)q`vZ&P8Wl6-FQtP +z_FljGtxH`8LbW9_RN3Y=-(R#?AI%>e7k(vmL9(q|OnL7TfDP{`{$qi-9Mt9Rqe7mG((Wkic +zOM;*;qhR4vfKQ_R+uL4Xwc!JLqfK4n{5jUPcZmg8mDWcd&V^D7?c1Hy1cW;>aX}ud +z2R{we{Hkatz&@nIdSU>ljr6~m(wkU8|HvShSAPG +z*}g4Jx|>Rs% +zw+=`I;m#=>ieQSQ8dNF|?Yh_8K9hnm)$`X+m4Lu~sm;S3!xf_N)-f7z>{)(=$vsnN +z6nOJoyi0TB4?s7{3-s-N<#PJac!L6}H`pH&xs`;`d<`{4tgTex{qB?$odlm6QiM9bT{bv+y|;| +z;W2`zF-g^WaAIzJgqYDrzbA9_2@1)ub6*xq!GEyVmpcGVgf@cVO<TBu}34dP~O1yIpvvKgI$2hl;JDF6>;dEuO~8Odhy#R}oTMauxksv(e36LvWAtWGQ%o`kI;aPYC +zW9jRe@#!0?67}j?qSx?k*#cAq1xW{FmLD{(9yH#?RtKmzuQcUGk-V;jh05sH`Kwg)2}FxoN{df`=3v7!K7rfXs8H!QO37H8G|Vbh-(LmxfIvSHHh0 +zF{^eDbdZzR4U;~iYWQF$$pFuKt|H=BOXM}z%0pm`D+aS*CRO`6V4Oo;c=)B#Rmi>| +z>#%aAF!0%TAyW&Z!=9hzGd5Gu=z_Lvr-X#^v2iO9A|0u);YqAF$wzi|n5#T$E?sRhHXsK^9N6q-px!l;(3C+{)+yde}_ +z*uMUCWodYv72ZTeovA(rEvXt8QoM4S*qnh+ag@sp0d%{K-Vh!Oa+4KhPrcOfZ=#*=BFk=_>%dgyRQ4ZYX5YTTtj) +zBvHIF$=KvDotpXFa{OsoIQw0x_X6E@I9!^;-b1l+V03gNMF?2ujcD+0idB7RGgN4& +zx2*qlNgN<;SvVt#w-@1ds{P#z9=s?ItGJdJ$`veFtUcn7!B+YSG`{seeT;>pI1LOGN?MhHJ*FblFQR8t{D9QQ}g0!9>!4EgP3o) +zb3FYf297_kM9|E^{^5GQZ?B2?%cZ{EEoGb08P97UE0G35?L^I^1Ig!6dfD188XWiV +z)I%EY(aOvn;RO<_a49_fas5(HheFqL(Hq`%d5EdPjQ}4pZ<&fC4!H%2D>&^V^<4wr%D@IsM5Zf){WjqjP#D3X +z4W@9``R+3$Q@%al6C@@k#9AIwgUqRygX}x$!*>X`bvWgG@|@fhlWdG=Gxrlnz%A00 +zXhQu#`ZoRG`(b^T*2qbXfz?Q=q=}4=hNP-X=oaJl2Y9@vo6QM};!uPQ^ne*}9oj`Y_Au=+sFl*)Bb%IY?x) +z(pro_E>TK?8+}U=f`8-~?XQWf>i=RUK>7p^D!e5&k^Geo9L@lTXFaLPLx@JY67u2s +zU&8z{E7I>*x^(E>V7ikkt6!XmEK{|05B7qT)nMgnH-=lvVD8Wj%~r~-WV|NsO?kP| +zc&d>7oMS~v4c=*a2>;zdi_ycZC*v5S +zsR6~x7X)KOt%UKO3P8Ex|BU>~an2|MJaF%KyokqRAabGD;BwC +z$QE0?F26j%DRWvdj(IZm9u8oeuE_)N@d;qdjWIT%Mj-u2&(F!vo|}$h%0hOJYp|h4 +zJ!6_|4il?ev)yMg$Aw;J3&y7aHY;fCdG83_@AND)nZC8IcT>9AUk3!(=> +z$L+3|7!v@lRjvOr4A^_Xp(ByNM(f`AEw9$*O4JXQa*XydMIRzt1y +z5lf0k^aIdP=du->#%(Ru(Q*c%np)Rd@!ESi$T4taOk_cRl6WmdiZsccWAoMdtRQ`h +zO#KaTEJTK9z;=L;_z%~fjHWY@MCXEX1d=!R4<-LvVVco})=(I`yCIf%?qErR9qw&z +zbT}hyf(h+u&PK$&F36t)-5WaX(QKECLbgjNt$Qe1*zLwvX6;uGiRj*vEDMz>1La^S-$#BsL_CDiU?KQ)0N% +z8Rbw^c4kk4c8m1g#lL6N^$spSKi^RuX1`v}SCs#HpPu5d)Qa!0JT)}f|9;rxHO?H< +z{zYtQ38dep%Rb`4kRS(UW+IoQX9HDWPPc7$ymYm3KFCm5VTva75HR +zX?IF_6cPV62sq$OgBFR=dHK@RQG#dGJ*8mS8CAr%;a}*QZ)}-GZZ7qETbjG?W?QJZ +zFtOOjn$#@PmqStciaQ2X?>1oa`i7^`IB7i1YnhLNqI_Dz@6;VO0m}Sej|P2Zf~R+? +z@7+(l+WM22)Yqif+?oVUD)_<~vg^n+9zMlEeQovJawACro$u75np#@F<*Od0kk$hu +z++IfW*AG5eP?ozdFD<5U^tEKA7RC&P33qe2!0B{Z;OGeJWMF%e_}k81e4qWT*M=J< +zWZJ#o4-ZM2^haE4umwqyA13%?cm#}CrVkoU7o7qR&ePesIVzs79rVuB`wd{;NaCfT!S%eeOT3B;q)Sl32FB +z5TOa;i +z8b3P9L!6lA8+`<)GsPC+B7j}>cJi^-$qUqZ`(r`M!P|PvGHAse091^+ +z`9v=Eb4!VR)54QxYwa2W6CTq>Tk&%Oe$~({x(oJnKxkv8gw`VW5()Y +z8M3ZxkGFd)iho^EUo4~*wffD@M}GbY3yAtX1q@PTjS;yH$7idS%f6B0=|ovB;BRux1dtO3 +zzY|Pu#QuSfpDFNo(nRK9CeO-ewUrxBX*e}AJUjkuYR2q3D0{|XV}^0Bk)$#cPS33CdH}K32W|Mtvkh91-^`Gf!6ANC~V3QLiX(q&8<3#}m1%Ie_U#u!Y5_e?} +z08;e6Q!{SC4+rJs1r6xIdt0pF$16>Rfw;Ae=W`1I5!OWYT`GE~=y+K?q4a8=D=K?i +zfpJ>i27{e4REIwP`0T4S*>ImoRW*_lsSQ?O8>#xz%KOnhjd|kZU-e);qm7 +zn&N065!&9&$TnR)7X^a4% +zW%DZVe?;Cx2z_w*{?=h&D>4sj?{KV#53p?5m1v +z6LS6~MfbWse#UiM|LvECc|W%GVL2D*tom!^0YaedP9S55>Zl-8GS7kR@-#oP0(~DO +zMzWAUkX33xcG-P>lyY_0p>W?|-DpCr-zV_tt^f3S>lR-?@KNP~83de(#FFMEJ;_BI +z@2f)agGh)R}6FpG>+5SB%j&sp(O1K@7yv7Ma@y@ +z#o5z8x#pYDRK#p%r7c|=av+eVP3BR+5+T#~3H35-2c!}7L(|dh5TAr`zoe%unv$8+ +z4|tNuQu2}%f?$56)JA7Bu$@$qfdO)OH>>XU896_p77i-r;osqJCOf;T?5;Eg@0WV&vo}~;(23D+Hw~#k>XX53aGp6|w +zUSBThIUeM$LT5{r%0#qJMotbwJpi>dQPX(*EK0ZooP89tP@>cA#LGg-pS(BIaBB!daN<2=))YV>e5n0rtT8>g^21axc +zza}*mWY;!oUp*Vh6#KUoAgDuOEg&F|+a10C>}W+%G%u9OQq*~GJEpg?ap5bcea5Ee1cSIb+@!M +z?p>3&P+nNt?PvPQ`=3?QKj#2J +z#0;X#!sD+?#x%BPIQnd*X;$LGqS-krUzKdNT$a0D_s?oe)IQSF3~onj9#v!`#vn%N +z3^EJt?f>K>7o_FAzJ@G$%;^GMh+p=I7*i##k#t@C4P*^J@w&D5*O3F0B&tW({~%&2 +zsf2f88kb(sDwnn8g8E_xmup=AOX$AezQI4uZ#t1~43r5JuRa9yrII>qP*SuLw4%!J +zz4VhvX}jzrzpOePmkAJvk8+%RRZ^UPz1Cxv%{oh~n#X8PiNr@CV-o*JfT{IDiM4k( +zoAzZT&~110Q?#g?3&w{6+IOZm|Dwf7_}3v3HUX_U)O#(sa~ +z!oDAR+Z`P_quz1(n4=a)0brEwkBP|7e4VMyJI@$zuA}qJ5=$tVbVxYcg&tEYg>X&p41xs}i6$Yp)oRiR7#u9wDa*7H>- +zX}MmZ<^HFFWGfqI)4XU|Z&O}_qD{bk@yg|{0a=>@I~8 +z=3S?+U&Z$*P#h#97!yo8S~Kc6KNHJKmZqm7qZP~-ACAoJ>l;6`obT@fc|t_Lnf!3G +zvJIIkhKb<6h++)vt}|DOzR5UZmG6T5!Ai8^LgK5$gk_>~Jrybyb0cr1mRFMLzByJl +zYELAImcL8gI=a+N7U2GXo3HPe3roNt;7!9F0MH9eKS)5<)H>C{?qR@5pKvU{$@B5< +z)h)maCBnwrj7EiPlKsP~cWz|3Ml(Ah&rolxcj)O)egycdfk +zu^hO$pVz2y=^^^MF_VU+i$$vRSMUyZu#$OtfweP=c1d#*$MvrqI(S#)dDjBA!Aq)+ +zmruhafv|f+)78FK3cmF7p|e=Gcq39?edpw$>vzP#_#R<7xo}fIuW+5OwH&vb+6Rc) +zpYd)fb8l%QX~E4uHc5#@K52iXU-r4WAQ~(7hJjs}UeR!(xa`7gy0w2s3T(R~2@;Hv +z1|t^syy8|ImAyEOhSd$2pxB+N_k4) +z*AdCeIXfdL+eRipchcM2f&8`sdLP#*T9ZhZjNMiy?7?UgeB}OUI)i1F?Vy|+xQ(zn +zPrvsbdTV;&ZcOg28Mfh$Fj9e&LkDOMbZ9x-#4SJ1wp|y^Lt`2t4>3EsWX>)i+s&!& +zmwRtv!VOpQ`J?w|DDORv()XxZ#s^|>!gc--49Q@qU}FiDsE>JTQ2M~crEvCj2?5Ub +zQXxn#V(ivLXjvLE`E1^MA*`?aTqI2Eg?_gYy5l(_h*0TZe|63?P5DymVdo?u)}!gs +zgzb$lcfiuG@rz7b-V6V>?1AL=A{Omcu$#Y=a6#XyTnOXLjzZOW>wX;j5V}$6NMI(# +zQ?7=aKY|L|v|fDbGh!kyW%+cd)(Csx&E4p!;3R{s@#x9!Fo<}im73lqSgqo9&4 +zBkkzT^{ncwyHjAueZ}y!rDNqqy#gX>hdH2WG1-R>^-mn6rs>4fVdj_Ql!xOtUBhdc +zw1_PS*?#X=j5F*QWSDaZiz|+>_J(UaMCLwxDkk`Ad$N?d>`_kGQ%C;f{)8(i9HINpK#hV`Xtk2fv|`=tO(0FYCg5{OA4oIZ`_lHs_P_Yo +zrG5R=SNNrWdu_FJVIr)L?R%b|^k2+wS0#%Z|H~Whfvz$26cn*!#rrQCJ~LH1ySluR +Wz1WyJE+u>pLZqW%pkAqJAN4<3s|e5l + +literal 4959 +zcmZ9Qc|26n`^T?!hOvx&4Pz%{EZLPYvTq^E5@pRcGGvX$AW~VT?0YC8CHu}WL)I*j +zCA+d^OJw_v@Avn|@AZ4V?z!iAo%_7*bMCpH^W6J>;>=8R>1nxX0RW)a*F)YU^_YtW +zrY4=^DWIpMj{2UVE)p04)_~)Bx;9qQ2n|Z_wm$&K2wgO?prUJmq(Kl`-&hB=G~+P>c2E7|m)F7F$6%61XMsolUg0zXycp@v@GZ1K>rt9xd?C!>%*+W7 +z%C&esu?1QvJQKjm(&8q^?oTJ)%8V0?)|OjY&d?Gr6jy6+xCM5i6!*6NB4P1KRCNm^ +zm+NR*M%4c-!-n9r0lIJ!D)W+&T9oD)Dem^7SRik@V(WfPQ@fFKyaf2lH?CLvA5uka +za&warZ&7cj6r);vpGc6CZ?~=95k;2K+aF*1m@^PTnvH2@U9bunlhI3nmsfK^BZU;4=_*3}V}PoZ +zEP*COH$^QdyIwzO=Shp{b@@LAC7u=@nYJ8)oEoIduWITqMn>MScBnM|V;V8ajW%>c +z2|9_!;}u5SRyWpkQzR8giy|l$Ivq`@U%TZM4}hv^OUIk_s0z#=s!u~04W3Iv&C;FbL%51jwmUPHQ@0l~qZwrDUlHbTaRh}I7O +zg75zlU9YVkytJ~+#_*>+av3b*ZLbM`=lrm(GyKlhzDKd&-~YS-XuB{i6aEdZrmT8V +z5=&CIeIGmv+apvfRY7`h1Zf4_L_-7KYf+zDaL#{K)Hw61>q|2q>%TNiMk|sXtmY*1 +z`E77tq7vBO#3uo(t!jj^QMa-dh___m=cxM&AL^ +zdT&14OSgK$%!-|9_M)?`i4B)w7eegd!IoH)mWyyhiqc1~EPAqoCCYEgl(hFM{^Ftj +z%GS_$^uT6K)$jtUK69tc1oS-cV3H( +zyzVwJW(p>4KWuO@dx-z65M|t#j~xmYkY<&V$cV9IcL@+9-%Akb(9C^=$km21|8lq_ +za=b^e+n~SA!s?z86LD4&0RU2Vl|bwCrvOB*uG>-oaP+AaCy?IW;MZ7A&oS_=puC#x +zTSjKS2X}HZv)}oKicKX7<~q>8hy|~*HpzV*Y^DRSBNNv-=R$KtX-5a5FE!_Wj#!o0njA +z8JkG4+{e@({dOMVP51|1y`CGI?{rMiLdMQTV)8ojeNwqrgP)*5q}hq9`jG=rE*1L0 +z=0gY)xu5I$L0nYIwuM<@k7MqNbid7Ko1mz?Wtyzjo`jUhJJU|J`Jq_(fZ+l%ogp5Y +zIDI`mBjycCE3h-oAO06y%KHv_U0fWu7`0F)$u5yL6u~KnhuEC++z(})gQ{w9X}O1^ +ziig+EPJfUA4&ecpZ?0Sc06XsoNMjeO3Wcj3%MW32I2nYaNKiwF#jknm8fO-R8aEHO +zS;P_Zcdx7H>7UoVjHFijGh;WVUGy??)C=6c|6BJ?%amgTP(}HCU2Z0Y^Sx|AO%6>B +z7k8KD-1)Kga0b7Xt>)Jmz><_Svi*-IB6_0ky0@X$d%1Z$EAcD*>w~VW$*SRrQOa6E +z)cKJdzv;DO-USxsZnV8sfR>g0;TF*eXKlHEv~kBDQlVHocet}SvAsdI1E^G1doNa$er}pksd?U1pF|_rB +zSIJIEOQLI~-J9cO}P)Oz~yJ4z~jwPCIW7GR>tKG}oJGSkdoz};#7?(Sg>_x?Y_Q?4k +zZ$BO!ta2Sdt}R&N@%WDQoxFGNn8p;VW$7qF|8D7og^|0?JUW*}Y|jx!#LUqPlwg=m +zRt9aEBD1%*_tO_~T=|(R%DbCN?p_VFK+vzERN1}RWAZ6OAYYD(J}CcnVj9+as%G)o +z;NJXAE1<2%q6D=&D&c&^K7J$1uCL+uS>u|xgNGNU%c~o5r72Q`D?M*NaI@;bFQ#CT +zV0IV|1Ll4vb*8mCG70}W_>J!pbL`q(Mk#Luq5Ho-?sljN6JfW)-Tyt?3`DZ%L +z>1cfFaA%b9aDM4sjzPiuCSI52j;PmRFq03dvd{@)=@Z9{wG$dz~4@#t3rj;1m%CZ{=~k9~XcBC6v7NckqV@1WVYQ<43f3{9(XPWS>EN{EO~*-CK*bt;ZS;!OLuY +z87ft)RVyp(Cw{BC?#*W-X}?E8n+mG`{Ikbd@Mf3BkFQ_T3aIyS+g0*qIBMqV83`?o +zX*3SoyLQT=V65w9M3)n><3cpp4wMiSNQ6I0WTSfL@yq6O5RJ^;rpPEzOSf?<#OEal +z#JE8?_%;i?y7A-hXB(+R7p{hi!m)9NPT7A;G|icpHm~wS^k`I({`l+|qO9g~*i~G*9imYv^HH~-3PeB-S_xwv+Y2l=g6>lXZk|B1v+dn| +zeA>r~Z}f3>@rByy3Q&w80&#K>pvR%5geJnqq +z#YL_Lw5jl$vkg7ZRPvcNku1Nz{`lM2`2I+BH-`3Ba?R1ny-~VYe +z9l%0>oH`pOV?m#)LN)yxXMS#M>?$?Ja6PLFE);UCNl#M06nrh>lc`K1PMyM&Ka>tI +zyKVLSSwJ-z2RXNRh*UcPO%t2{i@X_0uuwJ6@h;-=Qef3g6X8cFUHPoCZIv{}R78rZ%99agCe;SpR +z2&R5q?E=vp9E`14e_L9iWfefrys(&*EXOenhi}(uR8D%;1^v32tF*i$meYY6!3~@Q +zv5OSB5c`O2eYdLw^yThU*z33iu!U)sm(UUi!Yh5@S`weCs{BaFFDP7dWAap2{nG=s +zg+-P;PwqQ+?wHvS{X^xRx~)ampA>1zW`P2@zwfa|>{ +z(Zt?9q>hUSNyY-w8WjF3)S{^{Y;7-zeNdEWXCYNlYE#WdCdLmAQQa{ib}eB{46!Vm +zo13!fMtVj@*A05r-xRqe1O+nR=OyKWG>u1mlD&rJ7WUEOHCORSf`H4G9m&D*U>eu{ +zLp6o#gU{59h79h}@mqyQxAYnwjZ3|e)+cm~c9C*PmcN-nJ13-pb9}j+aMZB3eWbuU +z(aP`J@@Js(3eo*K%?H@(M#W~b(~+qW`F;+iobQ&M*W>{=WjBNNZqtpbh4N5N(I2dG +z-RX`fI|JPp?}OI)XaR2iVs;j=E!yAobeUouDw>}0b0z1W+MTAGY0eJ{GDB$rxn+Jx +zijgtNgG}Ip-xgzR(6Yw>ce#I{RXF)m?YpDnSx1P +z-qxP|)1Pe80-2Yo{|kjzD-b|ra*a%GbQ-JEf +zY4Ef^R`Uo`;5%GzqsAjSR8OWeT$^xkT*!`awX@U|_Abd2Kni%MHCjtQr!HimpSd78 +zqrPOZv^3?zweIu9Gt!GTOD19I)$#R&XHcKG{N6t4Uzm)% +z_&ik-;lla8ao5f-XCXafQiDpVG*V0{N!aCZPn=1CN`%)rVO5b3-l1<&5Rm>dgqG6& +zi6I?9NDN#D1uh~vl;mU=49d2IlV^tnzNl6O2YpihPema^^jse;K;WdUa}|$oaghqg +z(6Awt@Duo-@b4d^62bJ31eGM@W)0Qd@X!Ndd;7ddj(j^*YY2nz}q(w%?j=RPLP@eEF|B$PQ2KtCtcE0TG0n}qx$Q0g;>#Q +zXb4R~mYm3CJ1RdzfK4TCyeNO)4km{6`QK7Rtf74G7sV*O8|HzS0B>>4yF}W2o(lp* +zM{UWrv+Ba@vnVNI88u6!KF%=Wbx&cqT*am6q30wD#F98KVc5!5oJkm|LweHam10~r +zX@~3#%zVK@yDeBv6!qOETx37pSa`UBTxI#cHI-Sl3=?)E1K4yNsZ5YEKwM8qGV1Vn +zk8qYSbHYB+UTkQmS +t;Jjx^&~6n@&egfT2m_h_UkqA5Co_+SJESY3=}2`iKwrlMS%GlG{15vgE&>1m + +diff --git a/cmake/SparkAppimageConfig.cmake b/cmake/SparkAppimageConfig.cmake +index 45f4e25..7384b47 100644 +--- a/cmake/SparkAppimageConfig.cmake ++++ b/cmake/SparkAppimageConfig.cmake +@@ -1,62 +1,99 @@ +-# export PATH=/usr/lib/x86_64-linux-gnu/qt5/bin:$PATH +-# export LD_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH +-# export QT_PLUGIN_PATH=/usr/lib/x86_64-linux-gnu/qt5/plugins:$QT_PLUGIN_PATH +-# export QML2_IMPORT_PATH=/usr/lib/x86_64-linux-gnu/qt5/qml:$QML2_IMPORT_PATH +- +-# export PATH=/usr/lib/x86_64-linux-gnu/qt5/bin:$PATH +-# ~/linuxdeployqt-continuous-x86_64.AppImage spark-store-submitter -appimage +-# cd .. +-# ~/appimagetool-x86_64.AppImage appimage/ +- +-# LINUXDEPLOYQT=/home/zinface/linuxdeployqt-continuous-x86_64.AppImage +-# APPIMAGETOOL=/home/zinface/appimagetool-x86_64.AppImage +- +-# if () +-set(APPIMAGE_OUTPUT "${CMAKE_BINARY_DIR}/appimage") +-set(APPIMAGE_ICON "${APPIMAGE_OUTPUT}/default.png") +-set(APPIMAGE_DESTKOP "${APPIMAGE_OUTPUT}/default.desktop") +-# set(LINUXDEPLOYQT) +-# set(APPIMAGETOOL) +- +-function(execute_linuxdeploy _PATH) +- execute_process(COMMAND ${LINUXDEPLOYQT} +- WORKING_DIRECTORY "${APPIMAGE_OUTPUT}" +- ) +-endfunction(execute_linuxdeploy _PATH) +- +-function(target_linuxdeploy) +- add_custom_target(linuxdeploy pwd +- BYPRODUCTS appimage +- COMMAND cp ../${PROJECT_NAME} . +- COMMAND "${LINUXDEPLOYQT}" ${PROJECT_NAME} -appimage -unsupported-allow-new-glibc -verbose=3 -no-strip|| true +- COMMAND cp ../spark-appimage.desktop default.desktop +- COMMAND cp ../spark-appimage.png default.png +- WORKING_DIRECTORY "${APPIMAGE_OUTPUT}") +-endfunction(target_linuxdeploy) ++# SparkAppimageConfig.cmake ++ ++# 1. 在使用时通过准备默认的 icon 与 SparkDesktopConfig.cmake 在生成 desktop 文件 ++ # 时进行借用 desktop 文件中的基本描述完成 spark-appimage.dekstop 文件的生成, ++ # 并在使用 add_appimage 时完成所有逻辑判断与目标的定义 ++ # add_appimage_icon ++ # add_appimage_desktop ++ # add_appimage ++ ++# 2. 在 add_appimage 中,我们判断了是否为 cmake 提供了 LINUXDEPLOYQT 宏, ++ # 并获取此工具的真实路径。并继续判断了 APPIMAGETOOL 宏与该工具的真实路径。 ++ # 然后,创建一个目录,用于即将进行的 Appimage 打包。 ++ ++ # 通过调用 target_linuxdeploy() 来完成 linuxdeploy 的目标创建 ++ # 通过调用 target_appimage() 来完成 appimage 的目标创建 ++ ++# 3. 对于 linuxdeploy 目标,大致上就是通过执行 linuxdeployqt 命令与 -appimage ++ # 参数来创建可用于 Appimage 打包的内容结构,并且使用一些参数来处理相关库的依赖。 ++ # 其次就是,将 spark-appimage.desktop 复制为 default.desktop ++ # 另一个就是 spark-appimage.png 复制为 default.png ++ ++# 4. 对于 appimage 目标,大致上就是通过执行 appimagetool 命令将准备好打包的目录 ++ # 结构进行打包为 Appimage 可执行文件,其中为了避免可能缺失的文件,重复了对 ++ # default.desktop 文件与 default.png 文件的生成。 ++ # 这是一个依赖的 copy-desktop-appimage 目标,并先行执行 ++ ++# 要求: ++ # LINUXDEPLOYQT 提供的外部参数,一般指 linuxdeployqt 程序路径 ++ # APPIMAGETOOL 提供的外部参数,一般指 appimagetool 程序路径 ++ ++option(USE_APPIMAGE_NEW_GLIBC "允许在打包过程中使用较新版本的 glibc 库" ON) ++ ++set(APPIMAGE_OUTPUT "${CMAKE_BINARY_DIR}/appimage") ++set(APPIMAGE_OUTPUT_ICON "${APPIMAGE_OUTPUT}/default.png") ++set(APPIMAGE_OUTPUT_DESTKOP "${APPIMAGE_OUTPUT}/default.desktop") ++ ++# 1. 添加一个可以用于 Appimage 使用的图标文件 ++function(add_appimage_icon _icon) ++ if(CMAKE_VERSION VERSION_LESS 3.21) ++ message("> cmake version is less than 3.21") ++ configure_file(${_icon} ${APPIMAGE_OUTPUT_ICON} COPYONLY) ++ else() ++ file(MAKE_DIRECTORY ${APPIMAGE_OUTPUT}) ++ file(COPY_FILE ${_icon} ${APPIMAGE_OUTPUT_ICON}) ++ endif(CMAKE_VERSION VERSION_LESS 3.21) ++endfunction(add_appimage_icon _icon) ++ ++# 2. 基于 SparkDesktopMacros.cmake 提供的宏来定义 desktop 内容说明 ++ # 使用与自身的 desktop.in 模板进行生成 ++function(add_appimage_desktop) ++ configure_file(cmake/spark-appimage.desktop.in.txt ++ ${APPIMAGE_OUTPUT_DESTKOP} @ONLY) ++endfunction(add_appimage_desktop) ++ ++function(target_linuxdeploy _target) ++ ++ if(USE_APPIMAGE_NEW_GLIBC) ++ message("Use New glibc") ++ add_custom_target(linuxdeploy pwd ++ BYPRODUCTS appimage ++ COMMAND "${LINUXDEPLOYQT}" $ -appimage -unsupported-allow-new-glibc -verbose=3 -no-strip || true ++ WORKING_DIRECTORY "${APPIMAGE_OUTPUT}") ++ else() ++ message("Un Use New glibc") ++ add_custom_target(linuxdeploy pwd ++ BYPRODUCTS appimage ++ COMMAND "${LINUXDEPLOYQT}" $ -appimage -verbose=3 -no-strip || true ++ WORKING_DIRECTORY "${APPIMAGE_OUTPUT}") ++ endif(USE_APPIMAGE_NEW_GLIBC) ++ ++endfunction(target_linuxdeploy _target) + + function(target_appimage) +- add_custom_target(copy-desktop-appimage +- COMMAND cp ../spark-appimage.desktop default.desktop +- COMMAND cp ../spark-appimage.png default.png +- WORKING_DIRECTORY "${APPIMAGE_OUTPUT}") + add_custom_target(appimage pwd + COMMAND ${APPIMAGETOOL} ${APPIMAGE_OUTPUT} +- WORKING_DIRECTORY "${CMAKE_BINARY_DIR}" +- DEPENDS copy-desktop-appimage) ++ WORKING_DIRECTORY "${CMAKE_BINARY_DIR}") + endfunction(target_appimage) + +-function(add_appimage) ++# 3. 添加对目标的 Appimage 构建,Appimage 在一个项目中只能构建一个目标 ++function(add_appimage_target _target) + # check linuxdeploy + if(NOT DEFINED LINUXDEPLOYQT) + message("AppImage> Not Found LINUXDEPLOYQT Variable!") + return() + endif(NOT DEFINED LINUXDEPLOYQT) +- if(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT}) ++ if(CMAKE_VERSION VERSION_LESS 3.19) + message("> cmake version is less than 3.19") +- message(WARNING "!Relative paths are not supported!") ++ if(CMAKE_VERSION VERSION_GREATER 3.4) ++ get_filename_component(LINUXDEPLOYQT_REAL_PATH ${LINUXDEPLOYQT} REALPATH) ++ else() ++ message("> cmake version is less than 3.4") ++ message(WARNING "!Relative paths are not supported!") ++ endif(CMAKE_VERSION VERSION_GREATER 3.4) + else() + file(REAL_PATH ${LINUXDEPLOYQT} LINUXDEPLOYQT_REAL_PATH) +- endif(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT}) ++ endif(CMAKE_VERSION VERSION_LESS 3.19) + message("AppImage> Found LINUXDEPLOYQT Variable: ${LINUXDEPLOYQT_REAL_PATH}") + + # check appimagetool +@@ -64,47 +101,58 @@ function(add_appimage) + message("AppImage> Not Found APPIMAGETOOL Variable!") + return() + endif(NOT DEFINED APPIMAGETOOL) +- if(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT}) ++ if(CMAKE_VERSION VERSION_LESS 3.19) + # execute_process(COMMAND realpath ${APPIMAGETOOL} OUTPUT_VARIABLE APPIMAGETOOL_REAL_PATH) + message("> cmake version is less than 3.19") +- message(WARNING "!Relative paths are not supported!") ++ if(CMAKE_VERSION VERSION_GREATER 3.4) ++ get_filename_component(APPIMAGETOOL_REAL_PATH ${APPIMAGETOOL} REALPATH) ++ else() ++ message("> cmake version is less than 3.4") ++ message(WARNING "!Relative paths are not supported!") ++ endif(CMAKE_VERSION VERSION_GREATER 3.4) + else() + file(REAL_PATH ${APPIMAGETOOL} APPIMAGETOOL_REAL_PATH) +- endif(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT}) +- message("AppImage> Found APPIMAGETOOL Variable: ${LINUXDEPLOYQT_REAL_PATH}") ++ endif(CMAKE_VERSION VERSION_LESS 3.19) ++ message("AppImage> Found APPIMAGETOOL Variable: ${APPIMAGETOOL}") + + # do add_custome_target + make_directory(${APPIMAGE_OUTPUT}) +- target_linuxdeploy() ++ target_linuxdeploy(${_target}) + target_appimage() +-endfunction(add_appimage) + +-function(add_appimage_desktop) +- configure_file(cmake/spark-appimage.desktop.in +- ${CMAKE_BINARY_DIR}/spark-appimage.desktop @ONLY) +-endfunction(add_appimage_desktop) ++ # 重设目标输出的目录 ++ set_target_properties(${_target} ++ PROPERTIES ++ RUNTIME_OUTPUT_DIRECTORY "${APPIMAGE_OUTPUT}") + +-function(add_appimage_icon _ICON_PATH) +- if(CMAKE_VERSION VERSION_LESS 3.21) +- message("> cmake version is less than 3.21") +- configure_file(${_ICON_PATH} ${CMAKE_BINARY_DIR}/spark-appimage.png COPYONLY) +- else() +- file(COPY_FILE ${_ICON_PATH} ${CMAKE_BINARY_DIR}/spark-appimage.png) +- endif(CMAKE_VERSION VERSION_LESS 3.21) +-endfunction(add_appimage_icon _ICON_PATH) ++ # 为解决在不使用 -unsupported-allow-new-glibc 参数时, ++ # 可能不会生成 AppRun 软链接的问题 ++ if(NOT USE_APPIMAGE_NEW_GLIBC) ++ set_target_properties(${_target} ++ PROPERTIES ++ RUNTIME_OUTPUT_NAME "AppRun") ++ endif(NOT USE_APPIMAGE_NEW_GLIBC) + ++endfunction(add_appimage_target _target) + + +-# 如果glic>=2.27,你就需要加上参数 -unsupported-allow-new-glibc (意思就是不再低版本发行版使用了) +-# 或 -unsupported-bundle-everything(大概的意思是尝试兼容,实际测试,到其他发行版直接用不了了,有可能是发行版的原因,还是建议用前者,虽然放弃了低版本) ++# 如果 glic>=2.27, 你就需要加上参数 -unsupported-allow-new-glibc 意思就是不再低版本发行版使用了 ++# 或 -unsupported-bundle-everything ++# 大概的意思是尝试兼容,实际测试,到其他发行版直接用不了了,有可能是发行版的原因,还是建议用前者,虽然放弃了低版本 + + # -unsupported-bundle-everything +- # 捆绑所有依赖库,包括 ld-linux.so 加载器和 glibc。这将允许构建在较新系统上的应用程序在较旧的目标系统上运行,但不建议这样做,因为它会导致捆绑包超出所需的大小(并且可能到其他发行版无法使用) ++ # 捆绑所有依赖库,包括 ld-linux.so 加载器和 glibc。 ++ # 这将允许构建在较新系统上的应用程序在较旧的目标系统上运行, ++ # 但不建议这样做,因为它会导致捆绑包超出所需的大小(并且可能到其他发行版无法使用) + # -unsupported-allow-new-glibc +- # 允许 linuxdeployqt 在比仍受支持的最旧 Ubuntu LTS 版本更新的发行版上运行。这将导致 AppImage无法在所有仍受支持的发行版上运行,既不推荐也不测试或支持 ++ # 允许 linuxdeployqt 在比仍受支持的最旧 Ubuntu LTS 版本更新的发行版上运行。 ++ # 这将导致 AppImage无法在所有仍受支持的发行版上运行,既不推荐也不测试或支持 + +-# ./linuxdeployqt-7-x86_64.AppImage 程序目录/程序 -appimage -unsupported-allow-new-glibc +-# ./linuxdeployqt-7-x86_64.AppImage 程序目录/程序 -appimage -unsupported-bundle-everything ++# 对 linuxdeployqt 的使用 ++# ./linuxdeployqt-7-x86_64.AppImage ++ # 程序目录/程序 -appimage -unsupported-allow-new-glibc ++# ./linuxdeployqt-7-x86_64.AppImage ++ # 程序目录/程序 -appimage -unsupported-bundle-everything + + + +@@ -113,20 +161,32 @@ endfunction(add_appimage_icon _ICON_PATH) + # include(cmake/SparkAppimageConfig.cmake) # 导入来自 Spark 构建的 Appimage 构建 + # add_appimage_icon(assets/spark.png) # 添加到 Appimage 中的默认的图标 + # add_appimage_desktop() # 添加到 Appimage 中的默认desktop(使用来自 Spark 构建的 Desktop 构建中配置的信息(必须要求 spark-desktop)) +-# add_appimage() # 应用对 Appimage 的构建 ++# add_appimage_target(${PROJECT_NAME}) # 添加到 Appimage 中的默认目标,应用对 Appimage 的构建 + +-# 2. 在 Makefile 进行构建目标构建 Appimage +-# Appimage 的构建流 -- ++# 2. 在 Makefile 进行构建目标构建 Appimage 的构建流 -- + # 在 Makefile 进行构建目标构建 Appimage (要求提供工具的绝对路径,然后可依次进行linuxdeployqt, genrate-appimage) + # 来自于 https://github.com/probonopd/linuxdeployqt 的 linuxdeployqt + # 来自于 https://github.com/AppImage/AppImageKit 的 appimagetool +-# LINUXDEPLOYQT := "/home/zinface/Downloads/linuxdeployqt-continuous-x86_64.AppImage" +-# APPIMAGETOOL := "/home/zinface/Downloads/appimagetool-x86_64.AppImage" ++# 来自于 https://gitlink.org.cn/zinface/bundle-linuxdeployqt.git 托管存储的工具 ++ ++# 或指定你所想存放克隆项目的位置 ++# BUNDLE_LINUXDEPLOYQT := $(shell pwd)/build/bundle-linuxdeployqt ++ ++# download-bundle-linuxdeploytools: ++# -git clone https://gitlink.org.cn/zinface/bundle-linuxdeployqt.git $(BUNDLE_LINUXDEPLOYQT) + +-# linuxdeploy: all ++# LINUXDEPLOYQT := "$(BUNDLE_LINUXDEPLOYQT)/linuxdeployqt-continuous-x86_64.AppImage" ++# APPIMAGETOOL := "$(BUNDLE_LINUXDEPLOYQT)/appimagetool-x86_64.AppImage" ++ ++# linuxdeploy: release download-bundle-linuxdeploytools + # cd build && cmake .. -DLINUXDEPLOYQT=$(LINUXDEPLOYQT) -DAPPIMAGETOOL=$(APPIMAGETOOL) + # cd build && make linuxdeploy + + # genrate-appimage: + # cd build && cmake .. -DLINUXDEPLOYQT=$(LINUXDEPLOYQT) -DAPPIMAGETOOL=$(APPIMAGETOOL) + # cd build && make appimage ++ ++ ++ ++# NOTE: ++# 如果使用的库不存在于系统路径,则需要配置 export LD_LIBRARY_PATH=<路径> 以便 linuxdeployqt 可搜索到库的位置 +diff --git a/cmake/DebPackageConfig.cmake b/cmake/SparkDebPackageConfig.cmake +similarity index 90% +rename from cmake/DebPackageConfig.cmake +rename to cmake/SparkDebPackageConfig.cmake +index d88fd51..7ad5b33 100644 +--- a/cmake/DebPackageConfig.cmake ++++ b/cmake/SparkDebPackageConfig.cmake +@@ -1,7 +1,7 @@ + cmake_minimum_required(VERSION 3.0.0) + + # function(add_deb_package PACKAGE_NAME PACKAGE_VERSION PACKAGE_MAINTAINER PACKAGE_EMAIL PACKAGE_SHORT_DESCRIPTION PACKAGE_LONG_DESCRIPTION) +- ++ + # endfunction(add_deb_package PACKAGE_NAME PACKAGE_VERSION PACKAGE_MAINTAINER PACKAGE_EMAIL PACKAGE_SHORT_DESCRIPTION PACKAGE_LONG_DESCRIPTION) + + # if(add_deb_package VALUE) set(Package ${VALUE} PARENT_SCOPE) endif(add_deb_package VALUE) +@@ -119,7 +119,7 @@ function(set_package_vars _IN_KEY _IN_VAL) + else() + set(CPACK_DEBIAN_PACKAGE_VERSION "${_IN_VAL}" PARENT_SCOPE) + endif(_IN_VAL STREQUAL "auto") +- ++ + message("--> 软件版本: ${_IN_VAL}") + endif(_Version EQUAL "0") + +@@ -131,7 +131,7 @@ function(set_package_vars _IN_KEY _IN_VAL) + + find_str("${_IN_KEY}" "Architecture" _Architecture) + if(_Architecture EQUAL "0") +- set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "${_IN_VAL}" PARENT_SCOPE) ++ set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "${_IN_VAL}" PARENT_SCOPE) + if(_IN_VAL STREQUAL "auto") + execute_process( + COMMAND dpkg --print-architecture +@@ -142,7 +142,7 @@ function(set_package_vars _IN_KEY _IN_VAL) + endif(_IN_VAL STREQUAL "auto") + message("--> 软件架构: ${_IN_VAL}") + endif(_Architecture EQUAL "0") +- ++ + find_str("${_IN_KEY}" "Priority" _Priority) + if(_Priority EQUAL "0") + set(CPACK_DEBIAN_PACKAGE_PRIORITY "${_IN_VAL}" PARENT_SCOPE) +@@ -173,6 +173,12 @@ function(set_package_vars _IN_KEY _IN_VAL) + message("--> 软件建议: ${_IN_VAL}") + endif(_Recommends EQUAL "0") + ++ find_str("${_IN_KEY}" "Conflicts" _Conflicts) ++ if(_Conflicts EQUAL "0") ++ set(CPACK_DEBIAN_PACKAGE_CONFLICTS "${_IN_VAL}" PARENT_SCOPE) ++ message("--> 软件冲突: ${_IN_VAL}") ++ endif(_Conflicts EQUAL "0") ++ + endfunction(set_package_vars _IN_KEY _IN_VAL) + + # 定义一个自定义(add_package_descript)函数 +@@ -188,7 +194,7 @@ function(add_package_descript IN_DES) + message(FATAL_ERROR "!! Not Found Path: ${PACKAGE_DES_PATH}") + return() + endif(EXISTS ${IN_DES}) +- ++ + file(READ ${PACKAGE_DES_PATH} DES_CONTENT) + trim_str("${DES_CONTENT}" DES_CONTENT) + +@@ -238,7 +244,12 @@ function(add_package_descript IN_DES) + set(PREV_DES_LINE "") + while(NOT PREV_DES_LINE STREQUAL DES_LINE) + if(NOT PREV_DES_LINE STREQUAL "") +- set(Descrition "${Descrition}\n${DES_LINE}") ++ if ("${CMAKE_VERSION}" VERSION_LESS "3.15") ++ set(Descrition "${Descrition}\n${DES_LINE}") ++ else() ++ string(STRIP "${DES_LINE}" STRIP_DES_LINE) ++ set(Descrition "${Descrition}\n${STRIP_DES_LINE}") ++ endif("${CMAKE_VERSION}" VERSION_LESS "3.15") + endif(NOT PREV_DES_LINE STREQUAL "") + set(PREV_DES_LINE "${DES_LINE}") + sub_next(${DES_CONTENT} NEXT_INDEX DES_LINE DES_CONTENT) +@@ -262,7 +273,7 @@ function(add_package_descript IN_DES) + string(TIMESTAMP BUILD_TIME "%Y%m%d") + set(CPACK_DEBIAN_PACKAGE_VERSION "${CPACK_DEBIAN_PACKAGE_VERSION}-${BUILD_TIME}") + endif("${CalVer}" STREQUAL "true") +- ++ + + + ##################### deb file name ##################### +@@ -270,11 +281,22 @@ function(add_package_descript IN_DES) + set(_Version "${CPACK_DEBIAN_PACKAGE_VERSION}") + set(_Architecture "${CPACK_DEBIAN_PACKAGE_ARCHITECTURE}") + +- set(_DebFileName ++ set(_DebFileName + "${_Package}_${_Version}_${_Architecture}${PACKAGE_SUFFIX}.deb" + ) + set(CPACK_DEBIAN_FILE_NAME ${_DebFileName}) + ++ # 标识: spark-deb-package ++ if(NOT "${PACKAGE_SUFFIX}" STREQUAL "") ++ # eg: remove '_' of '_Debian' ++ string(SUBSTRING "${PACKAGE_SUFFIX}" 1 -1 DISTRIBUTION) ++ if ("${CMAKE_VERSION}" VERSION_LESS "3.15") ++ set(CPACK_DEBIAN_PACKAGE_DESCRIPTION "${Descrition}\n .\n Build for ${DISTRIBUTION} through spark-deb-build.") ++ else() ++ set(CPACK_DEBIAN_PACKAGE_DESCRIPTION ${Descrition} "\n.\nBuild for ${DISTRIBUTION} through spark-deb-build.") ++ endif("${CMAKE_VERSION}" VERSION_LESS "3.15") ++ ++ endif(NOT "${PACKAGE_SUFFIX}" STREQUAL "") + + # set(CPACK_DEBIAN_PACKAGE_NAME "${Package}") + # set(CPACK_DEBIAN_PACKAGE_VERSION "${Version}") +@@ -311,7 +333,7 @@ endfunction(add_package_descript IN_DES) + # CPACK_DEBIAN_FILE_NAME - n + # CPACK_DEBIAN_PACKAGE_NAME - y + # CPACK_DEBIAN_PACKAGE_VERSION - y +-# CPACK_DEBIAN_PACKAGE_ARCHITECTURE - y(auto) ++# CPACK_DEBIAN_PACKAGE_ARCHITECTURE - y(auto) -> dpkg --print-architecture + # CPACK_DEBIAN_PACKAGE_DEPENDS - y + # CPACK_DEBIAN_PACKAGE_PRIORITY - y + # CPACK_DEBIAN_PACKAGE_MAINTAINER - y +@@ -324,4 +346,5 @@ endfunction(add_package_descript IN_DES) + # set(ARCHITECTURE "arm64") + # endif() + ++ + # string(TIMESTAMP BUILD_TIME "%Y%m%d") +diff --git a/cmake/SparkDebianChangelogVersion.cmake b/cmake/SparkDebianChangelogVersion.cmake +index ee2f339..e439d37 100644 +--- a/cmake/SparkDebianChangelogVersion.cmake ++++ b/cmake/SparkDebianChangelogVersion.cmake +@@ -43,13 +43,13 @@ macro(spark_debian_changelog_override_version _CHANGELOG_FILE_PATH) + file(READ ${CHANGELOG_FILE_PATH} CHANGELOG_CONTENT LIMIT 30) + # fix: spark-store (4.2.3~test1) 已经超过 20 字符位,所以使用 30 进行保守计算 + +- string(FIND ${CHANGELOG_CONTENT} "(" V_PRE) # +1 to V_BEGIN +- string(FIND ${CHANGELOG_CONTENT} ")" V_END) ++ string(FIND "${CHANGELOG_CONTENT}" "(" V_PRE) # +1 to V_BEGIN ++ string(FIND "${CHANGELOG_CONTENT}" ")" V_END) + + math(EXPR V_BEGIN "${V_PRE}+1") + math(EXPR V_LENGTH "${V_END}-${V_BEGIN}") + +- string(SUBSTRING ${CHANGELOG_CONTENT} ${V_BEGIN} ${V_LENGTH} V) ++ string(SUBSTRING "${CHANGELOG_CONTENT}" ${V_BEGIN} ${V_LENGTH} V) + + message("> V = ${CHANGELOG_CONTENT}") + message("> V = [${V}]") +diff --git a/cmake/SparkDesktopMacros.cmake b/cmake/SparkDesktopMacros.cmake +index 223ac6b..bea9da8 100644 +--- a/cmake/SparkDesktopMacros.cmake ++++ b/cmake/SparkDesktopMacros.cmake +@@ -1,16 +1,19 @@ ++# SparkDesktopMacros.cmake + +-macro(spark_desktop_macros _APP_NAME _APP_NAME_ZH_CN _APP_COMMENT _APP_TYPE _APP_EXECUTE_PATH _APP_EXECUTE_ICON_PATH _APP_CATEGORIES) +- set(APP_NAME ${_APP_NAME}) +- set(APP_NAME_ZH_CN ${_APP_NAME_ZH_CN}) +- set(APP_COMMENT ${_APP_COMMENT}) +- set(APP_TYPE ${_APP_TYPE}) +- set(APP_EXECUTE_PATH ${_APP_EXECUTE_PATH}) +- set(APP_EXECUTE_ICON_PATH ${_APP_EXECUTE_ICON_PATH}) +- set(APP_CATEGORIES ${_APP_CATEGORIES}) +- configure_file(cmake/spark-desktop.desktop.in +- ${CMAKE_BINARY_DIR}/${_APP_NAME}.desktop ++macro(spark_desktop_macros) ++ set(APP_NAME ${ARGV0}) ++ set(APP_NAME_ZH_CN ${ARGV1}) ++ set(APP_COMMENT ${ARGV2}) ++ set(APP_TYPE ${ARGV3}) ++ set(APP_EXECUTE_PATH ${ARGV4}) ++ set(APP_EXECUTE_ICON_PATH ${ARGV5}) ++ set(APP_CATEGORIES ${ARGV6}) ++ set(APP_MIME_TYPE ${ARGV7}) ++ configure_file(cmake/spark-desktop.desktop.in.txt ++ ${CMAKE_BINARY_DIR}/${ARGV0}.desktop + ) +-endmacro(spark_desktop_macros _APP_NAME _APP_NAME_ZH_CN _APP_COMMENT _APP_TYPE _APP_EXECUTE_PATH _APP_EXECUTE_ICON_PATH _APP_CATEGORIES) ++ set(SPARK_DESKTOP_FILE ${CMAKE_BINARY_DIR}/${ARGV0}.desktop) ++endmacro(spark_desktop_macros) + + # include(cmake/SparkDesktopMacros.cmake) + # 内容默认应用名称: Name= 应与项目名称相同 +@@ -21,15 +24,27 @@ endmacro(spark_desktop_macros _APP_NAME _APP_NAME_ZH_CN _APP_COMMENT _APP_TYPE _ + # 应用类型: Type= + # 执行程序: Exec= + # 图标路径: Icon= +- # 应用分类: Category= ++ # 应用分类: Categories= ++ # MIME类型: MimeType= + # ) + ++# TODO 安装位置:INSTALL(将自动实现 install 文件,如 /usr/share/applications) ++ ++# install(FILES ${APP_NAME}.desktop ++# DESTINATION /usr/share/applications ++# ) ++ # 或者 ++# install(FILES ${SPARK_DESKTOP_FILE} ++# DESTINATION /usr/share/applications ++# ) ++ ++# 基于 configure_file 填充内容配置 + # configure_file( + # [NO_SOURCE_PERMISSIONS | USE_SOURCE_PERMISSIONS | + # FILE_PERMISSIONS ...] + # [COPYONLY] [ESCAPE_QUOTES] [@ONLY] + # [NEWLINE_STYLE [UNIX|DOS|WIN32|LF|CRLF] ]) + +-# install(FILES ${APP_NAME}.desktop ++# install(FILES ${SPARK_DESKTOP_FILE}.desktop + # DESTINATION /usr/share/applications +-# ) +\ No newline at end of file ++# ) +diff --git a/cmake/SparkEnvConfig.cmake b/cmake/SparkEnvConfig.cmake +index 797faf4..f9b4d55 100644 +--- a/cmake/SparkEnvConfig.cmake ++++ b/cmake/SparkEnvConfig.cmake +@@ -5,4 +5,20 @@ set(CMAKE_INCLUDE_CURRENT_DIR ON) + set(CMAKE_AUTOMOC ON) + set(CMAKE_AUTOUIC ON) + set(CMAKE_AUTORCC ON) +-# set(CMAKE_BUILD_TYPE "Debug") +\ No newline at end of file ++# set(CMAKE_BUILD_TYPE "Debug") ++ ++option(SPARK_DEBUG_MESSAGE "CMake Spark Module Debug Message." OFF) ++set(SPAKK_DEBUG_LOGFILE "${CMAKE_BINARY_DIR}/spark_debug.log" CACHE STRING "Spark Build Debug logfile." FORCE) ++file(WRITE ${SPAKK_DEBUG_LOGFILE}) ++ ++macro(spark_debug_message) ++ if(SPARK_DEBUG_MESSAGE) ++ set(SPARK_ONECE_LOG ${ARGN}) ++ if(NOT "${SPARK_ONECE_LOG}" STREQUAL "") ++ message("[SPARK_MESSAGE]: " ${SPARK_ONECE_LOG}) ++ endif(NOT "${SPARK_ONECE_LOG}" STREQUAL "") ++ file(APPEND ${SPAKK_DEBUG_LOGFILE} ${SPARK_ONECE_LOG} "\n") ++ unset(SPARK_ONECE_LOG) ++ endif(SPARK_DEBUG_MESSAGE) ++endmacro(spark_debug_message) ++ +diff --git a/cmake/SparkFindDtkConfig.cmake b/cmake/SparkFindDtkConfig.cmake +index d1b2dfc..278d0d1 100644 +--- a/cmake/SparkFindDtkConfig.cmake ++++ b/cmake/SparkFindDtkConfig.cmake +@@ -4,7 +4,7 @@ cmake_minimum_required(VERSION 3.5.1) + find_package(Dtk COMPONENTS Core Widget Gui) + + function(target_link_dtk NAME) +- target_link_libraries(${NAME} ++ target_link_libraries(${NAME} + ${DtkCore_LIBRARIES} + ${DtkWidget_LIBRARIES} + ${DtkGui_LIBRARIES}) +diff --git a/cmake/SparkFindQt5Config.cmake b/cmake/SparkFindQt5Config.cmake +index 0300b3d..cb095b6 100644 +--- a/cmake/SparkFindQt5Config.cmake ++++ b/cmake/SparkFindQt5Config.cmake +@@ -1,6 +1,8 @@ + cmake_minimum_required(VERSION 3.5.1) + +-find_package(Qt5 COMPONENTS Core Widgets Network Concurrent WebEngineWidgets REQUIRED) ++set(SPARK_FIND_QT5 TRUE) ++ ++find_package(Qt5 COMPONENTS Core Widgets Network REQUIRED) + + # function(target_link_qt5 NAME) + # target_link_libraries(${NAME} +@@ -22,10 +24,7 @@ macro(spark_add_link_qt5 _IN_NAME) + endmacro(spark_add_link_qt5 _IN_NAME) + + # 使用 spark_add_link_qt5 生成 target_link_qt5_ 的宏 +-spark_add_link_qt5(Concurrent Qt5::Concurrent) +-spark_add_link_qt5(Sql Qt5::Sql) +-spark_add_link_qt5(WebEngineWidgets Qt5::WebEngineWidgets) +-spark_add_link_qt5(WebSockets Qt5::WebSockets) ++# spark_add_link_qt5(Concurrent Qt5::Concurrent) + + # 高级自定义 + # spark_add_links_qt5 +@@ -47,7 +46,7 @@ macro(spark_add_links_qt5) + + string(TOLOWER "${qt5_item}" qt5_lower_item) + spark_add_link_qt5(${qt5_lower_item} Qt5::${qt5_item}) +- message("add_target_link_qt5_${qt5_item} or add_target_link_qt5_${qt5_lower_item}") ++ spark_debug_message("add_target_link_qt5_${qt5_item} or add_target_link_qt5_${qt5_lower_item}") + endforeach(qt5_item IN LISTS qt5_items) + endmacro(spark_add_links_qt5) + +@@ -151,4 +150,4 @@ spark_add_links_qt5( + # XkbCommonSupport + # Xml + # XmlPatterns +-) +\ No newline at end of file ++) +diff --git a/cmake/SparkFindQt6Config.cmake b/cmake/SparkFindQt6Config.cmake +index dfd8917..2c9d8cc 100644 +--- a/cmake/SparkFindQt6Config.cmake ++++ b/cmake/SparkFindQt6Config.cmake +@@ -1,6 +1,8 @@ + cmake_minimum_required(VERSION 3.5.1) + +-find_package(Qt6 COMPONENTS Core Widgets Network Concurrent) ++set(SPARK_FIND_QT6 TRUE) ++ ++find_package(Qt6 COMPONENTS Core Widgets Network REQUIRED) + + # function(target_link_qt6 NAME) + # target_link_libraries(${NAME} +@@ -14,7 +16,7 @@ spark_add_link(qt6 Qt6::Core Qt6::Widgets Qt6::Network) + + + # spark_add_link_qt6 +-# 自定义宏 target_link_qt6 以扩展 target_link_qt6_ 结构 ++# 自定义宏 spark_add_link_qt6 以扩展 target_link_qt6_ 结构 + # _IN_NAME: 此宏使用嵌套宏 spark_add_link 时追加 名称 + # 同等于 spark_add_link(qt_ ${ARGN}) + macro(spark_add_link_qt6 _IN_NAME) +@@ -22,3 +24,107 @@ macro(spark_add_link_qt6 _IN_NAME) + endmacro(spark_add_link_qt6 _IN_NAME) + + # 使用 spark_add_link_qt6 生成 target_link_qt6_ 的宏 ++# spark_add_link_qt5(Concurrent Qt6::Concurrent) ++ ++# 高级自定义 ++# spark_add_links_qt6 ++# 自定义宏 spark_add_links_qt6 以扩展 spark_add_link_qt6 宏配置组 ++ # 特点: 任意长度参数 ++ # qt6_item: 为进行遍历后的单项,类似于 python3 中的 (for item in items:) ++ # 例如: qt6_item 为 Core: ++ # spark_add_link_qt6(${qt6_item} Qt6::${qt6_item}) ++ # 展开为 spark_add_link_qt6(Core Qt6::Core) ++ # 展开为 spark_add_link(qt6_Core Qt6::Core) ++ # 展开为 spark_add_link(qt6_Core Qt6::Core) ++ # 特性: 增加 qt6_Core 转 qt6_core ++ # string(TOLOWER ) ++macro(spark_add_links_qt6) ++ set(qt6_items ${ARGN}) ++ foreach(qt6_item IN LISTS qt6_items) ++ find_package(Qt6${qt6_item}) ++ spark_add_link_qt6(${qt6_item} Qt6::${qt6_item}) ++ ++ string(TOLOWER "${qt6_item}" qt6_lower_item) ++ spark_add_link_qt6(${qt6_lower_item} Qt6::${qt6_item}) ++ spark_debug_message("add_target_link_qt6_${qt6_item} or add_target_link_qt6_${qt6_lower_item}") ++ endforeach(qt6_item IN LISTS qt6_items) ++endmacro(spark_add_links_qt6) ++ ++# 找出所有 Qt6 模板 ++# find /usr/lib/x86_64-linux-gnu/cmake/ -name "*Config.cmake" | sed 's@^.*/Qt6@Qt6@;' | grep ^Qt6 ++ ++# 掐头去尾,洗一次 ++# find /usr/lib/x86_64-linux-gnu/cmake/ -name "*Config.cmake" | sed 's@^.*/Qt5@Qt5@;' | grep ^Qt5 | sed 's@^Qt5@@; s@Config.cmake$@@; /^\s*$/d' ++ ++# 排序 ++# find /usr/lib/x86_64-linux-gnu/cmake/ -name "*Config.cmake" | sed 's@^.*/Qt5@Qt5@;' | grep ^Qt5 | sed 's@^Qt5@@; s@Config.cmake$@@; /^\s*$/d' | sort | pr -t -3 ++# find /usr/lib/x86_64-linux-gnu/cmake/ -name "*Config.cmake" | sed 's@^.*/Qt6@Qt6@;' | grep ^Qt6 | sed 's@^Qt6@@; s@Config.cmake$@@; /^\s*$/d' | sort | pr -t -3 ++ ++spark_add_links_qt6( ++ # BuildInternals ++ # BuildInternals/StandaloneTests/Qt5CompatTests ++ # BuildInternals/StandaloneTests/QtBaseTests ++ # Concurrent ++ # Core ++ # Core5Compat ++ # CoreTools ++ # DBus ++ # DBusTools ++ # DeviceDiscoverySupportPrivate ++ # EglFSDeviceIntegrationPrivate ++ # EglFsKmsGbmSupportPrivate ++ # EglFsKmsSupportPrivate ++ # FbSupportPrivate ++ # Gui ++ # GuiTools ++ # HostInfo ++ # InputSupportPrivate ++ # KmsSupportPrivate ++ # Network ++ # OpenGL ++ # OpenGLWidgets ++ # PrintSupport ++ # QComposePlatformInputContextPlugin ++ # QCupsPrinterSupportPlugin ++ # QEglFSEmulatorIntegrationPlugin ++ # QEglFSIntegrationPlugin ++ # QEglFSKmsEglDeviceIntegrationPlugin ++ # QEglFSKmsGbmIntegrationPlugin ++ # QEglFSX11IntegrationPlugin ++ # QEvdevKeyboardPlugin ++ # QEvdevMousePlugin ++ # QEvdevTabletPlugin ++ # QEvdevTouchScreenPlugin ++ # QGifPlugin ++ # QGtk3ThemePlugin ++ # QIBaseDriverPlugin ++ # QIbusPlatformInputContextPlugin ++ # QICOPlugin ++ # QJpegPlugin ++ # QLibInputPlugin ++ # QLinuxFbIntegrationPlugin ++ # QMinimalEglIntegrationPlugin ++ # QMinimalIntegrationPlugin ++ # QMYSQLDriverPlugin ++ # QNetworkManagerNetworkInformationPlugin ++ # QODBCDriverPlugin ++ # QOffscreenIntegrationPlugin ++ # QPSQLDriverPlugin ++ # QSQLiteDriverPlugin ++ # QTlsBackendCertOnlyPlugin ++ # QTlsBackendOpenSSLPlugin ++ # QTsLibPlugin ++ # QTuioTouchPlugin ++ # QVkKhrDisplayIntegrationPlugin ++ # QVncIntegrationPlugin ++ # QXcbEglIntegrationPlugin ++ # QXcbGlxIntegrationPlugin ++ # QXcbIntegrationPlugin ++ # QXdgDesktopPortalThemePlugin ++ # Sql ++ # Test ++ # Widgets ++ # WidgetsTools ++ # XcbQpaPrivate ++ # Xml ++) +diff --git a/cmake/SparkMacrosConfig.cmake b/cmake/SparkMacrosConfig.cmake +index 67d84e1..fd515be 100644 +--- a/cmake/SparkMacrosConfig.cmake ++++ b/cmake/SparkMacrosConfig.cmake +@@ -2,20 +2,62 @@ cmake_minimum_required(VERSION 3.5.1) + + # 定义一些 macro 用于自动生成构建结构 + ++# spark_aux_source_directory outvar invar [skip] ++# 获取目录下的所有源代码 ++macro(spark_aux_source_directory OUTVAR INVAR) ++ # iv: internal_variable ++ set(iv_args ${ARGN}) ++ list(LENGTH iv_args iv_arglen) ++ ++ file(GLOB iv_SOURCE_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${INVAR}/*.c ${INVAR}/*.cpp) ++ file(GLOB iv_HEADER_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${INVAR}/*.h ${INVAR}/*.hpp) ++ file(GLOB iv_QT_UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${INVAR}/*.ui ${INVAR}/*.qrc) ++ ++ if(iv_arglen EQUAL 1) ++ list(APPEND ${OUTVAR} ${iv_SOURCE_LIST} ${iv_HEADER_LIST} ${iv_QT_UI_LIST}) ++ else() ++ set(${OUTVAR} ${iv_SOURCE_LIST} ${iv_HEADER_LIST} ${iv_QT_UI_LIST}) ++ endif(iv_arglen EQUAL 1) ++ ++ unset(iv_args) ++ unset(iv_arglen) ++ unset(iv_SOURCE_LIST) ++ unset(iv_HEADER_LIST) ++ unset(iv_QT_UI_LIST) ++ ++endmacro(spark_aux_source_directory OUTVAR INVAR) ++ ++# spark_aux_source_directories outvar invar [...] ++# 获取目录列表下的所有源代码 ++ # spark_aux_source_directory 的扩展,支持多个 invar 与追加参数 ++macro(spark_aux_source_directories OUTVAR INVAR) ++ set(iv_aux_directories ${ARGN}) ++ ++ spark_aux_source_directory(${OUTVAR} ${INVAR}) ++ ++ foreach(iv_directory IN LISTS iv_aux_directories) ++ spark_aux_source_directory(${OUTVAR} ${iv_directory} SKIP) ++ endforeach(iv_directory IN LISTS iv_aux_directories) ++ ++ unset(iv_aux_directories) ++ ++endmacro(spark_aux_source_directories OUTVAR INVAR) ++ ++ + # spark_add_library [files]... + # 构建一个库,基于指定的源文件 + # 并根据库名生成 target_link_ 函数 + macro(spark_add_library _lib_name) +- message("================ ${_lib_name} Library ================") ++ spark_debug_message("================ ${_lib_name} Library ================") + add_library(${_lib_name} ${ARGN}) + + set(SRCS ${ARGN}) + foreach(item IN LISTS SRCS) +- message(" -> ${item}") ++ spark_debug_message(" -> ${item}") + endforeach(item IN LISTS SRCS) + + function(target_link_${_lib_name} TARGET) +- message("${_lib_name}") ++ spark_debug_message("${_lib_name}") + target_link_libraries(${TARGET} ${_lib_name}) + endfunction(target_link_${_lib_name} TARGET) + +@@ -26,59 +68,271 @@ endmacro(spark_add_library _lib_name) + # 并根据库名生成 target_link_ 函数 + # 函数内增加以 头文件搜索路径 + macro(spark_add_library_path _lib_name _lib_path) +- aux_source_directory(${_lib_path} ${_lib_name}_SOURCES) + +- message("================ spark_add_library_path: ${_lib_name} ================") +- file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${_lib_path}/*.ui) +- add_library(${_lib_name} ${${_lib_name}_SOURCES} ${UI_LIST}) +- foreach(item IN LISTS ${_lib_name}_SOURCES) +- message(" -> ${item}") +- endforeach(item IN LISTS ${_lib_name}_SOURCES) ++ # 0. 建立初始变量体系 ++ set(${_lib_name}_TYPE) ++ set(${_lib_name}_TYPE_MESSAGE "STATIC(Default)") ++ set(${_lib_name}_ARGN ${ARGN}) ++ ++ # 1. 判断 _lib_path 是否是 SHARED 或 STATIC ++ if(${_lib_path} STREQUAL SHARED OR ${_lib_path} STREQUAL STATIC) ++ set(${_lib_name}_TYPE ${_lib_path}) ++ set(${_lib_name}_TYPE_MESSAGE ${${_lib_name}_TYPE}) ++ ++ if(${ARGC} LESS 3) ++ message(FATAL_ERROR "Missing parameter, library path not specified.") ++ endif(${ARGC} LESS 3) ++ else() ++ # 如没有则将 _lib_path 加入到 ARGN ++ list(APPEND ${_lib_name}_ARGN ${_lib_path}) ++ endif(${_lib_path} STREQUAL SHARED OR ${_lib_path} STREQUAL STATIC) ++ ++ # 1. 处理由 spark_add_library_realpaths 构建转本构建时的清洗机制 ++ spark_debug_message("> Building: ${_lib_name}, type: ${${_lib_name}_TYPE_MESSAGE}") ++ set(${_lib_name}_ARGN_REF ${${_lib_name}_ARGN}) ++ unset(${_lib_name}_ARGN) ++ foreach(_old IN LISTS ${_lib_name}_ARGN_REF) ++ set(_new ${_old}) ++ string(FIND "${_old}" "+" _plus_index) ++ if(${_plus_index} GREATER 0) ++ string(SUBSTRING "${_old}" 0 ${_plus_index} _new) ++ spark_debug_message(" [CONVERT] ${_new} <- ${_old}") ++ endif(${_plus_index} GREATER 0) ++ list(APPEND ${_lib_name}_ARGN ${_new}) ++ endforeach(_old IN LISTS ${_lib_name}_ARGN_REF) ++ ++ ++ # 2.目标参数项分析出子项 ++ # 拆分出源代码、路径、未知项等 ++ set(${_lib_name}_ARGN_SOURCES) ++ set(${_lib_name}_ARGN_APPEND_PATHS) ++ set(${_lib_name}_ARGN_UNKNOW) ++ foreach(item IN LISTS ${_lib_name}_ARGN) ++ spark_debug_message(" [ARGN] check:" ${item}) ++ if(NOT EXISTS ${item}) ++ set(item ${CMAKE_CURRENT_LIST_DIR}/${item}) ++ endif() ++ if(EXISTS ${item}) ++ # spark_debug_message(" exists: true") ++ file(REAL_PATH ${item} ${_lib_name}_ARGN_item) ++ if(IS_DIRECTORY ${${_lib_name}_ARGN_item}) ++ list(APPEND ${_lib_name}_ARGN_APPEND_PATHS ${item}) ++ else() ++ list(APPEND ${_lib_name}_ARGN_SOURCES ${item}) ++ endif(IS_DIRECTORY ${${_lib_name}_ARGN_item}) ++ else() ++ list(APPEND ${_lib_name}_ARGN_UNKNOW ${item}) ++ spark_debug_message(" exists: false") ++ endif() ++ endforeach() ++ ++ list(LENGTH ${_lib_name}_ARGN_SOURCES ${_lib_name}_ARGN_SOURCES_LENGTH) ++ list(LENGTH ${_lib_name}_ARGN_APPEND_PATHS ${_lib_name}_ARGN_APPEND_PATHS_LENGTH) ++ list(LENGTH ${_lib_name}_ARGN_UNKNOW ${_lib_name}_ARGN_UNKNOW_LENGTH) ++ spark_debug_message(" result: files(${${_lib_name}_ARGN_SOURCES_LENGTH}), paths(${${_lib_name}_ARGN_APPEND_PATHS_LENGTH}), unknow(${${_lib_name}_ARGN_UNKNOW_LENGTH})" ${item}) ++ ++ # 3. 获取所以源代码为 any_files ++ spark_debug_message(" files:") ++ set(any_files ${${_lib_name}_ARGN_SOURCES}) ++ foreach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS) ++ spark_aux_source_directory(item_files ${item}) ++ list(APPEND any_files ${item_files}) ++ foreach(item_file IN LISTS item_files) ++ spark_debug_message(" ${item_file}") ++ endforeach(item_file IN LISTS item_files) ++ endforeach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS) ++ ++ # 4. 构建目标库 ++ add_library(${_lib_name} ${${_lib_name}_TYPE} ++ ${${_lib_name}_ARGN_SOURCES} ++ ${any_files}) ++ ++ # 5. 建立引用点 ++ # target_link_<_lib_name> 函数 ++ # target_include_<_lib_name> 函数 ++ ++ # target_<_lib_name>_include 函数 ++ # target_<_lib_name>_link 函数 + ++ function(target_${_lib_name}_include _include) ++ spark_debug_message("添加引用: ${_lib_name} <- ${_include} ${${_lib_name}_INCLUDE_ARGN}") ++ target_include_directories(${_lib_name} PRIVATE ${_include}) ++ endfunction(target_${_lib_name}_include _include) ++ ++ function(target_${_lib_name}_link _library) ++ spark_debug_message("添加链接: ${_lib_name} <- ${_library} ${${_lib_name}_LINK_ARGN}") ++ target_link_libraries(${_lib_name} ${_library}) ++ endfunction(target_${_lib_name}_link _library) ++ + function(target_link_${_lib_name} TARGET) +- # message("target_link_${_lib_name}") +- message(" -> (include): ${_lib_path}") +- target_include_directories(${TARGET} PUBLIC "${_lib_path}") ++ spark_debug_message("链接引用: ${TARGET} <- ${_lib_name}") ++ target_include_directories(${TARGET} PRIVATE ++ "${${_lib_name}_SOURCE_PATH}" ${${_lib_name}_ARGN_APPEND_PATHS}) + target_link_libraries(${TARGET} ${_lib_name}) + endfunction(target_link_${_lib_name} TARGET) + + function(target_include_${_lib_name} TARGET) +- # message("target_link_${_lib_name}") +- message(" -> (include): ${_lib_path}") +- target_include_directories(${TARGET} PUBLIC "${_lib_path}") +- # target_link_libraries(${TARGET} ${_lib_name}) ++ spark_debug_message("引入引用: ${TARGET} <- ${_lib_name}") ++ target_include_directories(${TARGET} PUBLIC ++ "${${_lib_name}_SOURCE_PATH}" ${${_lib_name}_ARGN_APPEND_PATHS}) + endfunction(target_include_${_lib_name} TARGET) + ++ ++ target_include_directories(${_lib_name} PRIVATE ++ "${${_lib_name}_ARGN_APPEND_PATHS}") ++ ++ # 输出 includes ++ spark_debug_message(" ${_lib_name}_ARGN_APPEND_PATHS: ") ++ foreach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS) ++ string(REPLACE "${CMAKE_SOURCE_DIR}/" "" item_var "${item}") ++ spark_debug_message(" ${item_var}") ++ endforeach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS) ++ ++ # 如果想用以下操作手动实现 target_link_include_directories ++ # 请注意对 LIST 类型使用 "" 进行包围 ++ # target_link_include_directories 的 PUBLIC 将会填充(追加)目标的 INCLUDE_DIRECTORIES 属性 ++ # target_link_include_directories 支持 cmake 生成大表达式,更容易操作,手动将无法实现此类能力 ++ # target_link_include_directories 支持相对路径和绝对路径参数 ++ # 手动操作将必须使用绝对路径,这是不好的地方 ++ # get_target_property(_lib_include_directories ${_lib_name} INCLUDE_DIRECTORIES) ++ # list(APPEND _lib_include_directories "${CMAKE_CURRENT_LIST_DIR}/${${_lib_name}_SOURCE_PATH}") ++ # spark_debug_message("----> ${CMAKE_CURRENT_LIST_DIR}/${${_lib_name}_SOURCE_PATH}") ++ # spark_debug_message("----> ${_lib_include_directories}") ++ # set_target_properties(${_lib_name} PROPERTIES ++ # INCLUDE_DIRECTORIES "${_lib_include_directories}" ++ # INTERFACE_INCLUDE_DIRECTORIES "${_lib_include_directories}" ++ # ) ++ + endmacro(spark_add_library_path _lib_name _lib_path) + ++# spark_add_shared_library [files ...] ++# 构建一个共享库,基于指定的源代码 ++ # 并根据库名生成 target_link_ 函数 ++macro(spark_add_shared_library _lib_name) ++ spark_add_library(${_lib_name} SHARED ${ARGN}) ++endmacro(spark_add_shared_library _lib_name) ++ ++# spark_add_shared_library_path [files ... paths] ++# 构建一个共享库,基于指定的路径 ++ # 并根据库名生成 target_link_ 函数 ++macro(spark_add_shared_library_path _lib_name) ++ spark_add_library_path(${_lib_name} SHARED ${ARGN}) ++endmacro(spark_add_shared_library_path _lib_name) ++ + # spark_add_executable [files]... + # 构建一个可执行文件,基于指定的源文件 + # Qt编译时源文件包括很多类型,需要指定 *.h/*.cpp/*.qrc/*.qm/... 等 + macro(spark_add_executable _exec_name) + +- message("================ ${_exec_name} Executable ================") ++ set(${_exec_name}_TYPE_MESSAGE "可执行程序") ++ spark_debug_message("> Building: ${_exec_name}, type: ${${_exec_name}_TYPE_MESSAGE}") ++ + add_executable(${_exec_name} ${ARGN}) + + endmacro(spark_add_executable _exec_name) + ++# spark_add_executable_path [files ... paths] ++# 构建一个可执行程序,基于指定的路径 + macro(spark_add_executable_path _exec_name _exec_path) +- aux_source_directory(${_exec_path} ${_exec_name}_SOURCES) +- +- message("================ ${_exec_name} Executable ================") +- file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${_exec_path}/*.ui) +- add_executable(${_exec_name} ${${_exec_name}_SOURCES} ${ARGN} ${UI_LIST}) +- foreach(item IN LISTS ${_exec_name}_SOURCES) +- message(" -> ${item}") +- endforeach(item IN LISTS ${_exec_name}_SOURCES) +- +- # function(target_link_${_exec_name} TARGET) +- # message("target_link_${_lib_name}") +- message(" -> (include): ${_exec_path}") +- target_include_directories(${_exec_name} PUBLIC "${_exec_path}") +- # target_link_libraries(${TARGET} ${_lib_name}) +- # endfunction(target_link_${_exec_name} TARGET) +- # target_link_${_exec_name}(${_exec_name}) ++ spark_add_executable(${_exec_name}) ++ ++ # 0. 建立初始变量体系 ++ # set(${_exec_name}_TYPE) ++ # set(${_exec_name}_TYPE_MESSAGE "可执行程序") ++ set(${_exec_name}_ARGN ${ARGN}) + ++ # 1. 处理由 spark_add_executable_realpaths 构建转本构建时的清洗机制 ++ # spark_debug_message("> Building: ${_exec_name}, type: ${${_exec_name}_TYPE_MESSAGE}") ++ set(${_exec_name}_ARGN_REF ${${_exec_name}_ARGN}) ++ unset(${_exec_name}_ARGN) ++ foreach(_old IN LISTS ${_exec_name}_ARGN_REF) ++ set(_new ${_old}) ++ string(FIND "${_old}" "+" _plus_index) ++ if(${_plus_index} GREATER 0) ++ string(SUBSTRING "${_old}" 0 ${_plus_index} _new) ++ spark_debug_message(" [CONVERT] ${_new} <- ${_old}") ++ endif(${_plus_index} GREATER 0) ++ list(APPEND ${_exec_name}_ARGN ${_new}) ++ endforeach(_old IN LISTS ${_exec_name}_ARGN_REF) ++ ++ # 1.目标参数项分析出子项 ++ # 拆分出源代码、路径、未知项等 ++ # spark_debug_message("> Building: ${_exec_name}, type: ${${_exec_name}_TYPE_MESSAGE}") ++ set(${_exec_name}_ARGN_SOURCES) ++ set(${_exec_name}_ARGN_APPEND_PATHS ${_exec_path}) ++ set(${_exec_name}_ARGN_UNKNOW) ++ foreach(item IN LISTS ${_exec_name}_ARGN) ++ spark_debug_message(" [ARGN] check:" ${item}) ++ if(CMAKE_VERSION VERSION_LESS 3.14) ++ string(REGEX MATCH "\.qm$" fext "${item}") ++ else() ++ get_filename_component(fext "${item}" LAST_EXT) ++ endif(CMAKE_VERSION VERSION_LESS 3.14) ++ ++ if(NOT EXISTS ${item} AND NOT "${fext}" STREQUAL ".qm") ++ set(item ${CMAKE_CURRENT_LIST_DIR}/${item}) ++ endif() ++ if(EXISTS ${item}) ++ # spark_debug_message(" exists: true") ++ file(REAL_PATH ${item} ${_exec_name}_ARGN_item) ++ if(IS_DIRECTORY ${${_exec_name}_ARGN_item}) ++ list(APPEND ${_exec_name}_ARGN_APPEND_PATHS ${item}) ++ else() ++ list(APPEND ${_exec_name}_ARGN_SOURCES ${item}) ++ endif(IS_DIRECTORY ${${_exec_name}_ARGN_item}) ++ else() ++ if("${fext}" STREQUAL ".qm") ++ list(APPEND ${_exec_name}_ARGN_SOURCES ${item}) ++ else() ++ list(APPEND ${_exec_name}_ARGN_UNKNOW ${item}) ++ spark_debug_message(" exists: false") ++ endif("${fext}" STREQUAL ".qm") ++ endif() ++ endforeach() ++ ++ list(LENGTH ${_exec_name}_ARGN_SOURCES ${_exec_name}_ARGN_SOURCES_LENGTH) ++ list(LENGTH ${_exec_name}_ARGN_APPEND_PATHS ${_exec_name}_ARGN_APPEND_PATHS_LENGTH) ++ list(LENGTH ${_exec_name}_ARGN_UNKNOW ${_exec_name}_ARGN_UNKNOW_LENGTH) ++ spark_debug_message(" result: files(${${_exec_name}_ARGN_SOURCES_LENGTH}), paths(${${_exec_name}_ARGN_APPEND_PATHS_LENGTH}), unknow(${${_exec_name}_ARGN_UNKNOW_LENGTH})" ${item}) ++ ++ ++ # 2. 获取所以源代码为 any_files ++ spark_debug_message(" files:") ++ set(any_files ${${_exec_name}_ARGN_SOURCES}) ++ foreach(item IN LISTS ${_exec_name}_ARGN_APPEND_PATHS) ++ spark_aux_source_directory(item_files ${item}) ++ list(APPEND any_files ${item_files}) ++ foreach(item_file IN LISTS item_files) ++ spark_debug_message(" ${item_file}") ++ endforeach(item_file IN LISTS item_files) ++ endforeach(item IN LISTS ${_exec_name}_ARGN_APPEND_PATHS) ++ ++ # 3. 构建可执行目标所需要的文件 ++ # add_executable(${_exec_name} ++ # ${${_exec_name}_ARGN_SOURCES} ++ # ${any_files}) ++ ++ target_sources(${_exec_name} PRIVATE ++ ${${_exec_name}_ARGN_SOURCES} ++ ${any_files}) ++ ++ # 4. 建立引用点 ++ # target_<_exec_name>_include 函数 ++ # target_<_exec_name>_link 函数 ++ function(target_${_exec_name}_include _include) ++ spark_debug_message("添加引用: ${_exec_name} <- ${_include} ${${_exec_name}_INCLUDE_ARGN}") ++ target_include_directories(${_exec_name} PRIVATE ${_include}) ++ endfunction(target_${_exec_name}_include _include) ++ ++ function(target_${_exec_name}_link _library) ++ spark_debug_message("添加链接: ${_exec_name} <- ${_library} ${${_exec_name}_LINK_ARGN}") ++ target_link_libraries(${_exec_name} ${_library}) ++ endfunction(target_${_exec_name}_link _library) ++ ++ target_include_directories(${_exec_name} PRIVATE ++ ${_exec_path}) ++ spark_debug_message(" include: ${_exec_path}\n") ++ + endmacro(spark_add_executable_path _exec_name _exec_path) + + # spark_find_library +@@ -98,6 +352,15 @@ macro(spark_find_library _prefix) + + endmacro(spark_find_library _prefix) + ++macro(target_link_qt) ++ ++ if(SPARK_FIND_QT6) ++ target_link_qt6(${ARGN}) ++ elseif(SPARK_FIND_QT5) ++ target_link_qt5(${ARGN}) ++ endif(SPARK_FIND_QT6) ++ ++endmacro(target_link_qt) + + # spark_add_executable_paths + # 自定义构建宏,基于指定的前缀名称,处理后续参数为子目录 +@@ -108,9 +371,9 @@ macro(spark_add_executable_paths _prefix_path) + set(PATHS ${ARGN}) + foreach(item IN LISTS PATHS) + file(GLOB QRCS "${item}/*.qrc") +- message(">>> add_executable: " "${_prefix_path}-${item} ${item} + ${QRCS}") ++ spark_debug_message(">>> add_executable: " "${_prefix_path}-${item} ${item} + ${QRCS}") + spark_add_executable_path(${_prefix_path}-${item} ${item} ${QRCS}) +- target_link_qt5(${_prefix_path}-${item}) ++ target_link_qt(${_prefix_path}-${item}) + endforeach(item IN LISTS PATHS) + endmacro(spark_add_executable_paths _prefix_path) + +@@ -120,10 +383,11 @@ endmacro(spark_add_executable_paths _prefix_path) + # ARGN: 此宏剩余的参数列表 + # 在使用 target_link_ 时 + # _NAME: 用于此 fucntion 中的要求参数: <_NAME>目标将要连接此库 +-macro(spark_add_link _IN_NAME) +- function(target_link_${_IN_NAME} _NAME) +- message("LINK ${_NAME} ${ARGN}") +- target_link_libraries(${_NAME} ++macro(spark_add_link _name) ++ function(target_link_${_name} _link) ++ spark_debug_message("> Linking: ${_link}") ++ spark_debug_message(" <- ${ARGN}\n") ++ target_link_libraries(${_link} + ${ARGN}) +- endfunction(target_link_${_IN_NAME} _NAME) +-endmacro(spark_add_link _IN_NAME) +\ No newline at end of file ++ endfunction(target_link_${_name} _link) ++endmacro(spark_add_link _name) +diff --git a/cmake/SparkMacrosExtendConfig.cmake b/cmake/SparkMacrosExtendConfig.cmake +index bad0620..0a4dcb2 100644 +--- a/cmake/SparkMacrosExtendConfig.cmake ++++ b/cmake/SparkMacrosExtendConfig.cmake +@@ -4,193 +4,428 @@ + function(find_plus INVAL OUTVAL) + string(FIND "${INVAL}" "+" plus_index) + set(${OUTVAL} ${plus_index} PARENT_SCOPE) +- # if(plus_index LESS 0) +- # set(${OUTVAL} -1 PARENT_SCOPE) +- # else() +- # set(${OUTVAL} ${plus_index} PARENT_SCOPE) +- # endif(plus_index LESS 0) + endfunction(find_plus INVAL OUTVAL) + +-# find_plus("FF" FFFF) +-# message("--> FFFF ${FFFF}") # --> FFFF -1 +-# find_plus("F+F" FFFF) +-# message("--> FFFF ${FFFF}") # --> FFFF 1 +-# find_plus("+F+F" FFFF) +-# message("--> FFFF ${FFFF}") # --> FFFF 0 +- +-# set(FFF) +-# list(APPEND FFFF ) +-# list(APPEND FFFF "F") +-# list(APPEND FFFF "FA") +-# message("--> FFFF: ${FFFF}") # --> FFFF: F;FA +- +-# set(FFFFS "") +-# list(APPEND FFFFS ${FFFF}) +-# message("--> FFFFS: ${FFFFS}") # --> FFFFS: F;FA +- +-# set(FFFF "+AA+BB+CC+DD") +-# string(REPLACE "+" ";" FFFFL "${FFFF}") +-# list(LENGTH FFFFL FFFFLEN) +-# message("--> FFFFL: ${FFFFL} --> ${FFFFLEN}") # --> FFFFL: F; +- +-# plus_list +-# 将传入的 "+AAA+BBB+CCC" 类型数据变成一个 列表(list) +-# 适用于不使用 string 进行替换 + 为 ";" 的情况下使用直接变成 list +-function(plus_list INVAL OUTVAL OUTVALLEN) +- # set(${OUTVAL} "..." PARENT_SCOPE) +- # set(${OUTVALLEN} 0 PARENT_SCOPE) +- +- set(_tmps "") # 设置为空的 +- +- # 寻找下一个 + 位置 +- find_plus(${INVAL} RIGHT_PLUS) +- +- string(LENGTH "${INVAL}" INVALLEN) +- message("--> 传入的 INVAL: --> 内容: ${INVAL}") +- message("--> 传入的 INVAL: --> 长度: ${INVALLEN}") +- message("--> 传入的 INVAL: --> +位置: ${RIGHT_PLUS}") +- +- # 判断是否有右侧 + 号 +- if(RIGHT_PLUS LESS 0) +- message("--> 传入的 INVAL: --> 无需计算新的+位置") +- # message("--> 计算新的 + 位置: ${_PLUSINDEX}") +- list(APPEND _tmps ${INVAL}) +- else() +- math(EXPR _PLUSINDEX "${RIGHT_PLUS}+1") +- message("--> 传入的 INVAL: --> 需计算+位置 --> 右移: ${_PLUSINDEX}") +- +- string(SUBSTRING "${INVAL}" ${_PLUSINDEX} ${INVALLEN} NewVal) +- message("--> 传入的 INVAL: --> 需计算+位置 --> 右移: ${_PLUSINDEX} -> 内容: ${NewVal}") +- # string(REPLACE "+" ";" _tmps "${NewVal}") +- # list(LENGTH FFFFL FFFFLEN) +- +- # message("--> 计算新的 + 位置: ${_PLUSINDEX} --> 后面的 NewVal: ${NewVal}") +- +- # find_plus(${NewVal} _NextPlus) +- # if(_NextPlus LESS 0) +- # list(APPEND _tmps ${NewVal}) +- # message("--> 追加新的 + 位置: ${_PLUSINDEX} --> 后面的") +- # else() +- # message("--> 追加新的 + 位置: ${_PLUSINDEX} --> 后面的") +- # # 重新 +- # # plus_list(${NewVal} NewValS ) +- # # foreach(item) +- # # list(APPEND _tmps ${item}) +- # # endforeach(item) +- # endif(_NextPlus LESS 0) +- endif(RIGHT_PLUS LESS 0) +- +- set(${OUTVAL} ${_tmps} PARENT_SCOPE) +- list(LENGTH _tmps _tmps_len) +- set(${OUTVALLEN} ${_tmps_len} PARENT_SCOPE) +- +-endfunction(plus_list INVAL OUTVAL OUTVALLEN) +- +-# plus_list("+AAA+BBB+CCC+DDD" FFF FFLEN) +-# message("--------> ${FFF}: -> ${FFLEN}") +- +-# spark_add_library_realpaths ++function(find_plus_v INVAL OUTVAL) ++ string(FIND "${${INVAL}}" "+" plus_index) ++ set(${OUTVAL} ${plus_index} PARENT_SCOPE) ++endfunction(find_plus_v INVAL OUTVAL) ++ ++function(find_colon INVAL OUTVAL) ++ string(FIND "${INVAL}" ":" colon_index) ++ set(${OUTVAL} ${colon_index} PARENT_SCOPE) ++endfunction(find_colon INVAL OUTVAL) ++ ++function(find_colon_v INVAL OUTVAL) ++ string(FIND "${${INVAL}}" ":" colon_index) ++ set(${OUTVAL} ${colon_index} PARENT_SCOPE) ++endfunction(find_colon_v INVAL OUTVAL) ++ ++function(find_dir INVAL OUTVAL) ++ string(FIND "${INVAL}" "/" _STR ${ARGN}) ++ set(${OUTVAL} ${_STR} PARENT_SCOPE) ++endfunction(find_dir INVAL OUTVAL) ++ ++function(find_dir_v INVAL OUTVAL) ++ string(FIND "${${INVAL}}" "/" _STR ${ARGN}) ++ set(${OUTVAL} ${_STR} PARENT_SCOPE) ++endfunction(find_dir_v INVAL OUTVAL) ++ ++# ++function(str_left INVAL INDEX OUTVAL) ++ set(LEFT_INDEX ${INDEX}) ++ string(SUBSTRING "${INVAL}" 0 ${LEFT_INDEX} _LEFT_V) ++ set(${OUTVAL} ${_LEFT_V} PARENT_SCOPE) ++endfunction(str_left INVAL INDEX OUTVAL) ++ ++function(str_right INVAL INDEX OUTVAL) ++ math(EXPR RIGHT_INDEX ${INDEX}+1) ++ string(SUBSTRING "${INVAL}" ${RIGHT_INDEX} -1 _RIGHT_V) ++ set(${OUTVAL} ${_RIGHT_V} PARENT_SCOPE) ++endfunction(str_right INVAL INDEX OUTVAL) ++ ++function(str_left_v INVAL INDEX OUTVAL) ++ set(LEFT_INDEX ${${INDEX}}) ++ string(SUBSTRING "${${INVAL}}" 0 ${LEFT_INDEX} _LEFT_V) ++ set(${OUTVAL} ${_LEFT_V} PARENT_SCOPE) ++endfunction(str_left_v INVAL INDEX OUTVAL) ++ ++function(str_right_v INVAL INDEX OUTVAL) ++ math(EXPR RIGHT_INDEX ${${INDEX}}+1) ++ string(SUBSTRING "${${INVAL}}" ${RIGHT_INDEX} -1 _RIGHT_V) ++ set(${OUTVAL} ${_RIGHT_V} PARENT_SCOPE) ++endfunction(str_right_v INVAL INDEX OUTVAL) ++ ++# ++function(find_colon_plus INVAL OUTVAL) ++ find_colon(${INVAL} COLON_INDEX) ++ str_right(${INVAL} ${COLON_INDEX} COLON_RIGHT) ++ find_plus_v(COLON_RIGHT PLUS_INDEX) ++ str_left_v(COLON_RIGHT PLUS_INDEX COLON_RIGHT_LEFT_PLUS) ++ ++ set(${OUTVAL} ${COLON_RIGHT_LEFT_PLUS} PARENT_SCOPE) ++endfunction(find_colon_plus INVAL OUTVAL) ++ ++function(find_colon_plus_v INVAL OUTVAL) ++ find_colon_v(${INVAL} COLON_INDEX) ++ str_right_v(${INVAL} COLON_INDEX COLON_RIGHT) ++ find_plus_v(COLON_RIGHT PLUS_INDEX) ++ str_left_v(COLON_RIGHT PLUS_INDEX COLON_RIGHT_LEFT_PLUS) ++ ++ set(${OUTVAL} ${COLON_RIGHT_LEFT_PLUS} PARENT_SCOPE) ++endfunction(find_colon_plus_v INVAL OUTVAL) ++ ++function(find_dir_plus INVAL OUTVAL) ++ # t:*/*+d ++ # ^ ++ find_dir("${INVAL}" SLASH_INDEX REVERSE) ++ str_right("${INVAL}" ${SLASH_INDEX} SLASH_RIGHT) ++ find_plus_v(SLASH_RIGHT PLUS_INDEX) ++ str_left_v(SLASH_RIGHT PLUS_INDEX SLASH_RIGHT_LEFT_PLUS) ++ ++ set(${OUTVAL} ${SLASH_RIGHT_LEFT_PLUS} PARENT_SCOPE) ++endfunction(find_dir_plus INVAL OUTVAL) ++ ++function(find_dir_plus_v INVAL OUTVAL) ++ # t:*/*+d ++ # ^ ++ find_dir("${${INVAL}}" SLASH_INDEX REVERSE) ++ str_right("${${INVAL}}" ${SLASH_INDEX} SLASH_RIGHT) ++ find_plus_v(SLASH_RIGHT PLUS_INDEX) ++ str_left_v(SLASH_RIGHT PLUS_INDEX SLASH_RIGHT_LEFT_PLUS) ++ ++ set(${OUTVAL} ${SLASH_RIGHT_LEFT_PLUS} PARENT_SCOPE) ++endfunction(find_dir_plus_v INVAL OUTVAL) ++ ++ ++# spark_add_library_source ... ++# 扩展 一行一可执行目标 的构建的扩展宏 ++# 在构建时将会另外加入这些资源 ++macro(spark_add_library_source target) ++ set(${target}_ADD_SOURCE ${ARGN}) ++endmacro(spark_add_library_source target) ++ ++# 冗余的 target_link_qt5 或 qt6 的处理逻辑 ++macro(_handle_spark_target_link_qt_macro _target) ++ target_link_qt(${_target}) ++endmacro(_handle_spark_target_link_qt_macro _target) ++ ++# spark_add_library_realpaths [dirs ...] + # 基于传入的项进行构建 +-# 可接受的值为: 路径列表 +-# 可接受的值为: 路径列表+依赖库A+依赖库B ++ # 可接受的值为: 路径列表 ++ # 可接受的值为: 路径列表+依赖库A+依赖库B + macro(spark_add_library_realpaths) +- message("---> 基于传入的项进行构建 <---") +- # message("--> src/unclassified/ItemDelegates/NdStyledItemDelegate") +- # string(FIND [REVERSE]) +- # string(SUBSTRING ) +- # math(EXPR value "100 * 0xA" OUTPUT_FORMAT DECIMAL) # value is set to "1000" + + set(REALPATHS ${ARGN}) + foreach(REALPATH IN LISTS REALPATHS) +- message("---> 传入路径: ${REALPATH} <--- ") +- string(LENGTH "${REALPATH}" REALPATH_LENGTH) +- message("---> 计算传入路径长度: --> 长度: ${REALPATH_LENGTH}") +- +- string(FIND "${REALPATH}" "/" LASTINDEX REVERSE) +- message("---> 计算传入路径末尾/位置: --> 长度: ${LASTINDEX}") +- math(EXPR LASTINDEX "${LASTINDEX}+1") +- message("---> 计算传入路径末尾/右移: --> 长度: ${LASTINDEX}") +- string(SUBSTRING "${REALPATH}" ${LASTINDEX} ${REALPATH_LENGTH} REALNAME_Dependency) + ++ # # 找 : 号下标,这是找:号的函数 ++ # find_colon(${REALPATH} COLON_INDEX) ++ # 找 / 号下标,这是找/号的函数 ++ find_dir_v(REALPATH SLASH_INDEX REVERSE) + # 找 + 号下标,这是找+号的函数 +- find_plus(${REALPATH} RIGHT_PLUS) ++ find_plus_v(REALPATH PLUS_INDEX) ++ ++ # + ++ if(PLUS_INDEX LESS 0) ++ # 完全没有 + 的情况下,它就是一个基于目录的构建 ++ set(dir ${REALPATH}) ++ str_right_v(REALPATH SLASH_INDEX target) ++ ++ spark_add_library_path(${target} ++ ${dir} ++ ${${target}_ADD_SOURCE} ++ ) ++ # 使用 spark_add_library_realpaths 构建的依赖将允许直接引用库头文件 ++ target_include_directories(${target} PUBLIC ${dir}) ++ _handle_spark_target_link_qt_macro(${target}) ++ else() ++ # 有 + 的情况下,获取 + 号下标右侧所有内容为 target_depends_str 并转为列表 ++ str_right_v(REALPATH PLUS_INDEX target_depends_str) ++ string(REPLACE "+" ";" target_depends "${target_depends_str}") ++ ++ find_dir_plus_v(REALPATH target) ++ str_left_v(REALPATH PLUS_INDEX dir) ++ ++ spark_add_library_path(${target} ++ ${dir} ++ ${${target}_ADD_SOURCE} ++ ) ++ spark_debug_message(" [INCLUDE_DIRS]: ${dir} ${dir}/.. \n") ++ target_include_directories(${target} PUBLIC ${dir} ${dir}/..) ++ target_link_libraries(${target} ${target_depends}) ++ endif(PLUS_INDEX LESS 0) + +- # 判断是否有找到 + 号下标,值为 -1 或 正整数 +- if(RIGHT_PLUS LESS 0) # 小于0: 不存在 + 号 +- set(REALNAME "${REALNAME_Dependency}") +- message("---> 传入路径末尾/右移部分: --> ${REALNAME} <-- 无依赖+") ++ endforeach(REALPATH IN LISTS REALPATHS) + +- message("---> 构建 ${REALNAME} -> ${REALNAME} ${REALPATH} ") ++endmacro(spark_add_library_realpaths) + +- spark_add_library_path(${REALNAME} ${REALPATH}) +- target_link_qt5(${REALNAME}) +- else() +- message("---> 传入路径末尾/右移部分: --> ${REALNAME_Dependency} <-- 依赖+") + +- # 存在+号,将截取从 / 到 + 号之间的内容作为目标名称 +- # 例如 src/unclassified/widgets/DocTypeListView+JsonDeploy +- # ^(LASTINDEX) ^(RIGHT_PLUS) +- # 将 RIGHT_PLUS - LASTINDEX 计算出 DocTypeListView 字符长度 +- math(EXPR REALNAME_LENGTH "${RIGHT_PLUS}-${LASTINDEX}") ++# spark_add_shared_library_realpaths [dirs ...] ++# 基于传入的项进行构建 ++ # 可接受的值为: 路径列表 ++ # 可接受的值为: 路径列表+依赖库A+依赖库B ++macro(spark_add_shared_library_realpaths) ++ ++ set(REALPATHS ${ARGN}) ++ foreach(REALPATH IN LISTS REALPATHS) + +- message("---> 计算传入路径末尾/右移部分: --> 位置: ${RIGHT_PLUS}") +- # message("---> 计算传入路径末尾/右移部分: --> 长度: ${REALNAME_Dependency}") ++ # # 找 : 号下标,这是找:号的函数 ++ # find_colon(${REALPATH} COLON_INDEX) ++ # 找 / 号下标,这是找/号的函数 ++ find_dir_v(REALPATH SLASH_INDEX REVERSE) ++ # 找 + 号下标,这是找+号的函数 ++ find_plus_v(REALPATH PLUS_INDEX) ++ ++ # + ++ if(PLUS_INDEX LESS 0) ++ # 完全没有 + 的情况下,它就是一个基于目录的构建 ++ set(dir ${REALPATH}) ++ str_right_v(REALPATH SLASH_INDEX target) ++ ++ spark_add_library_path(${target} SHARED ++ ${dir} ++ ${${target}_ADD_SOURCE} ++ ) ++ # 使用 spark_add_library_realpaths 构建的依赖将允许直接引用库头文件 ++ target_include_directories(${target} PUBLIC ${dir}) ++ _handle_spark_target_link_qt_macro(${target}) ++ else() ++ # 有 + 的情况下,获取 + 号下标右侧所有内容为 target_depends_str 并转为列表 ++ str_right_v(REALPATH PLUS_INDEX target_depends_str) ++ string(REPLACE "+" ";" target_depends "${target_depends_str}") ++ ++ find_dir_plus_v(REALPATH target) ++ str_left_v(REALPATH PLUS_INDEX dir) ++ ++ spark_add_library_path(${target} SHARED ++ ${dir} ++ ${${target}_ADD_SOURCE} ++ ) ++ spark_debug_message(" [INCLUDE_DIRS]: ${dir} ${dir}/.. \n") ++ target_include_directories(${target} PUBLIC ${dir} ${dir}/..) ++ target_link_libraries(${target} ${target_depends}) ++ endif(PLUS_INDEX LESS 0) + +- # 目标名称为 DocTypeListView +- # 依赖为 JsonDeploy +- # set(REALNAME "") +- string(SUBSTRING "${REALPATH}" 0 ${RIGHT_PLUS} _REALPATH_DIR) +- string(SUBSTRING "${REALPATH}" ${LASTINDEX} ${REALNAME_LENGTH} REALNAME) ++ endforeach(REALPATH IN LISTS REALPATHS) + +- message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME}") ++endmacro(spark_add_shared_library_realpaths) + +- string(SUBSTRING "${REALPATH}" ${RIGHT_PLUS} ${REALPATH_LENGTH} Dependency) +- message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME} --> +部分: ${Dependency}") ++# spark_aux_source_paths ++# 将指定路径中的文件变成可用的AUX源文件列表 ++macro(spark_aux_source_paths AUX_VAR) ++ set(${AUX_VAR} "") ++ set(${AUX_VAR}_PATHS ${ARGN}) + +- # plus_list(${Dependency} dependencies dependencies_len) +- string(REPLACE "+" ";" dependencies "${Dependency}") +- message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME} --> +部分: ${Dependency} --> 列表: ${dependencies} <-- ") ++ foreach(aux_path IN LISTS ${AUX_VAR}_PATHS) ++ # spark_debug_message("aux_path: ${aux_path}") ++ aux_source_directory(${aux_path} ${AUX_VAR}) ++ endforeach(aux_path IN LISTS ${AUX_VAR}_PATHS) + ++endmacro(spark_aux_source_paths AUX_VAR) + +- message("---> 构建 ${REALNAME} -> ${REALNAME} ${_REALPATH_DIR}") ++# spark_file_glob ++# 使用用 file(GLOB) 的匹配规则,并一次可匹配多个规则 ++# ++macro(spark_file_glob FGLOB_VAR) ++ set(${FGLOB_VAR} "") ++ set(${FGLOB_VAR}_PATHS ${ARGN}) + +- spark_add_library_path(${REALNAME} ${_REALPATH_DIR}) +- # target_link_qt5(${REALNAME}) # 使用依赖的依赖或许也不错 ++ foreach(fglob_path IN LISTS ${FGLOB_VAR}_PATHS) + +- target_include_directories(${REALNAME} PUBLIC ${_REALPATH_DIR}) +- target_link_libraries(${REALNAME} ${dependencies}) ++ file(GLOB FGLOB_PATH_SRCS ${fglob_path}) ++ foreach(fglob_path_src IN LISTS FGLOB_PATH_SRCS) ++ # spark_debug_message(" -> ${item}") ++ list(APPEND ${FGLOB_VAR} ${fglob_path_src}) ++ endforeach(fglob_path_src IN LISTS FGLOB_PATH_SRCS) + +- endif(RIGHT_PLUS LESS 0) +- endforeach(REALPATH IN LISTS REALPATHS) ++ endforeach(fglob_path IN LISTS ${FGLOB_VAR}_PATHS) + +-endmacro(spark_add_library_realpaths) ++endmacro(spark_file_glob FGLOB_VAR) + + + # spark_add_source_paths + # 将指定路径中的文件变成可用的源文件列表 + # +-macro(spark_add_source_paths SOURCE_VARIABLE_NAME) +- set(SOURCE_PATHS ${ARGN}) +- set(${SOURCE_VARIABLE_NAME}_PATHS "") +- set(${SOURCE_VARIABLE_NAME} "") +- foreach(SOURCE_PATH IN LISTS SOURCE_PATHS) +- list(APPEND ${SOURCE_VARIABLE_NAME}_PATHS ${CMAKE_CURRENT_SOURCE_DIR}/${SOURCE_PATH}) +- aux_source_directory(${SOURCE_PATH} _SOURCES) +- foreach(item IN LISTS _SOURCES) +- # message(" -> ${item}") +- list(APPEND ${SOURCE_VARIABLE_NAME} ${item}) +- endforeach(item IN LISTS _SOURCES) ++macro(spark_add_source_paths SOURCE_VAR) ++ set(${SOURCE_VAR} "") ++ set(${SOURCE_VAR}_PATHS ${ARGN}) ++ ++ spark_aux_source_paths(${SOURCE_VAR} ${ARGN}) ++ foreach(source_path IN LISTS ${SOURCE_VAR}_PATHS) ++ # list(APPEND ${SOURCE_VAR}_PATHS ${CMAKE_CURRENT_SOURCE_DIR}/${SOURCE_PATH}) ++ # aux_source_directory(${SOURCE_PATH} _SOURCES) ++ # foreach(item IN LISTS _SOURCES) ++ # # spark_debug_message(" -> ${item}") ++ # list(APPEND ${SOURCE_VAR} ${item}) ++ # endforeach(item IN LISTS _SOURCES) + + # file(GLOB HEADER_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${SOURCE_PATH}/*.h) + # foreach(item IN LISTS HEADER_LIST) +- # # message(" -> ${item}") +- # list(APPEND ${SOURCE_VARIABLE_NAME} ${item}) ++ # # spark_debug_message(" -> ${item}") ++ # list(APPEND ${SOURCE_VAR} ${item}) + # endforeach(item IN LISTS HEADER_LIST) + +- file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${SOURCE_PATH}/*.ui) +- foreach(item IN LISTS UI_LIST) +- # message(" -> ${item}") +- list(APPEND ${SOURCE_VARIABLE_NAME} ${item}) +- endforeach(item IN LISTS UI_LIST) +- endforeach(SOURCE_PATH IN LISTS SOURCE_PATHS) +-endmacro(spark_add_source_paths SOURCE_VARIABLE_NAME) ++ file(GLOB UI_SRCS RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${source_path}/*.ui) ++ foreach(ui_src IN LISTS UI_SRCS) ++ # spark_debug_message(" -> ${item}") ++ list(APPEND ${SOURCE_VAR} ${ui_src}) ++ endforeach(ui_src IN LISTS UI_SRCS) ++ endforeach(source_path IN LISTS ${SOURCE_VAR}_PATHS) ++endmacro(spark_add_source_paths SOURCE_VAR) ++ ++ ++# spark_add_library_file_glob ++ # ++macro(spark_add_library_file_glob _lib_name) ++ spark_file_glob(${_lib_name}_SOURCES ${ARGN}) ++ spark_add_library(${_lib_name} ${${_lib_name}_SOURCES}) ++endmacro(spark_add_library_file_glob _lib_name) ++ ++ ++ ++# spark_add_executable_source ... ++# 扩展 一行一可执行目标 的构建的扩展宏 ++# 在构建时将会另外加入这些资源 ++macro(spark_add_executable_source target) ++ set(${target}_ADD_SOURCE ${ARGN}) ++endmacro(spark_add_executable_source target) ++ ++# 冗余的 spark_add_executable_realpaths 的 dir 处理逻辑 ++macro(_handle_spark_add_executable_realpaths_if_dir_empty_macro) ++ if("${dir}" STREQUAL "") ++ spark_add_executable(${target} ++ ${${target}_ADD_SOURCE} ++ ) ++ else() ++ spark_add_executable_path(${target} ++ ${dir} ++ ${${target}_ADD_SOURCE} ++ ) ++ endif("${dir}" STREQUAL "") ++endmacro(_handle_spark_add_executable_realpaths_if_dir_empty_macro) ++ ++# spark_add_executable_realpaths ++# 基于传入的项进行构建 ++# 可接受的值为: 可执行目标:路径列表 ++# 可接受的值为: 可执行目标:路径列表+依赖库A+依赖库B ++macro(spark_add_executable_realpaths) ++ ++ set(REALPATHS ${ARGN}) ++ foreach(REALPATH IN LISTS REALPATHS) ++ ++ # 找 : 号下标,这是找:号的函数 ++ find_colon(${REALPATH} COLON_INDEX) ++ ++ if(COLON_INDEX LESS 0) ++ # do not anything ++ else() ++ # 找到 : 号,将截取 target 名称 ++ # string(SUBSTRING "${REALPATH}" 0 ${COLON_INDEX} REALTARGET) ++ find_colon_v(REALPATH COLON_INDEX) ++ str_left_v(REALPATH COLON_INDEX target) ++ str_right_v(REALPATH COLON_INDEX COLON_REMAIN) ++ # message(FATAL_ERROR "构建一个: ${target}") # 已验证 ++ ++ endif(COLON_INDEX LESS 0) ++ ++ # 找 + 号下标,这是找+号的函数 ++ find_plus_v(REALPATH PLUS_INDEX) ++ ++ if(PLUS_INDEX LESS 0) ++ # 完全没有 + 的情况下,它就是一个基于目录的构建 ++ set(dir ${COLON_REMAIN}) ++ # spark_add_executable_path(${target} ++ # ${dir} ++ # ${${target}_ADD_SOURCE} ++ # ) ++ _handle_spark_add_executable_realpaths_if_dir_empty_macro() ++ _handle_spark_target_link_qt_macro(${target}) ++ else() ++ # 有 + 的情况下,获取 + 号下标右侧所有内容为 target_depends_str 并转为列表 ++ str_right_v(REALPATH PLUS_INDEX target_depends_str) ++ string(REPLACE "+" ";" target_depends "${target_depends_str}") ++ ++ # 再从主要内容中获取 dir ,以及 ++ find_colon_plus_v(REALPATH dir) ++ # spark_add_executable_path(${target} ++ # ${dir} ++ # ${${target}_ADD_SOURCE} ++ # ) ++ _handle_spark_add_executable_realpaths_if_dir_empty_macro() ++ target_include_directories(${target} PUBLIC ${dir} ${dir}/..) ++ target_link_libraries(${target} ${target_depends}) ++ endif(PLUS_INDEX LESS 0) ++ endforeach(REALPATH IN LISTS REALPATHS) ++ ++endmacro(spark_add_executable_realpaths) ++ ++ ++# 一行一库概念构建 ++# 1.构建一个库,基于指定的目录路径进行构建 ++# src/widgets/DocTypeListView ++# ^目录将被用于制作的目标名称 ++# 目录下的所有文件将被用于制作此库的源代码文件 ++# ++# 2.构建一个库,基于指定的目录路径进行构建,并依赖其后面所列出的依赖项 ++# src/widgets/MaintainerInfoView+DocTypeListView+... ++# ^此库将被用于 MaintainerInfoView 库的依赖 ++# ^此符号'+'将被视为依赖项列表的分隔符 ++ ++# 一行一可执行目标概念 ++# 1.构建一个可执行目标,基于指定的目录路径进行构建(行不通,可执行目标很少为一个目录) ++# 2.构建一个可执行目标,基于指定的文件路径进行构建(也许可以) ++# 3.构建一个可执行目标,基于指定的文件名称进行构建() ++# 4.构建一个可执行目标,基于指定命名规则(target:dir:dir+depend+depend...) ++ ++ ++# 一行一目标概念:集成(一行一库 + 一行一可执行目标) ++# 1.构建一个目标,基于指定的目录进行构建(适用于library与executable) ++# 3.构建一个目标,命名规则与集成相同,类型只需要写一个前缀标识 ++# s[hared],d[yamic],t[可执行] ++# 静态库 s:dir+depend+depend... ++# 动态库 d:dir+depend+depend... ++# 可执行 t::dir+depend+depend... ++# ^ 可执行目标名称 ++ ++# 一行一目标 ++# spark_add_target_realpaths [realpaths] ++# realpaths: ++ # s: static (s:src/libs/hello) ++ # d: shared (d:src/libs/say) ++ # t: target (t::src+hello+say) ++# 参考 ++ # spark_add_executable_realpaths ++ # spark_add_shared_library_realpaths ++ # spark_add_library_realpaths ++macro(spark_add_target_realpaths tag) ++ set(${tag}_ARGN ${ARGN}) ++ ++ foreach(item IN LISTS ${tag}_ARGN) ++ str_left(${item} 1 item_type) ++ str_right(${item} 1 item_val) ++ ++ if("${item_type}" STREQUAL "t") ++ set(item_message "可执行文件") ++ elseif("${item_type}" STREQUAL "d") ++ set(item_message "动态库") ++ elseif("${item_type}" STREQUAL "s") ++ set(item_message "静态库") ++ endif("${item_type}" STREQUAL "t") ++ ++ spark_debug_message("代号: [${tag}] 构建 ${item_val}, 类型: ${item_message}") ++ spark_debug_message(" * ${item_val}") ++ ++ if("${item_type}" STREQUAL "t") ++ spark_add_executable_realpaths(${item_val}) ++ elseif("${item_type}" STREQUAL "d") ++ spark_add_shared_library_realpaths(${item_val}) ++ elseif("${item_type}" STREQUAL "s") ++ spark_add_library_realpaths(${item_val}) ++ endif("${item_type}" STREQUAL "t") ++ ++ spark_debug_message("") ++ ++ endforeach(item IN LISTS ${tag}_ARGN) ++ ++endmacro(spark_add_target_realpaths tag) +diff --git a/cmake/SparkTranslatorConfig.cmake b/cmake/SparkTranslatorConfig.cmake +index 46de519..8714e12 100644 +--- a/cmake/SparkTranslatorConfig.cmake ++++ b/cmake/SparkTranslatorConfig.cmake +@@ -1,32 +1,49 @@ + cmake_minimum_required(VERSION 3.5.1) + +-find_package(Qt5LinguistTools) +- +-file(GLOB SPARK_TRANSLATIONS ${CMAKE_SOURCE_DIR}/translations/*.ts) +- +-message("================ Translations ================") +-foreach(item IN LISTS SPARK_TRANSLATIONS) +- message("-> ${item}") +-endforeach(item IN LISTS SPARK_TRANSLATIONS) +- +-qt5_add_translation(SPARK_QM_TRANSLATIONS +- ${SPARK_TRANSLATIONS}) +- +-file(WRITE ${CMAKE_BINARY_DIR}/SPARK_QM_TRANSLATIONS "") +-foreach(item IN LISTS SPARK_QM_TRANSLATIONS) +- file(APPEND ${CMAKE_BINARY_DIR}/SPARK_QM_TRANSLATIONS "${item}\n") +-endforeach(item IN LISTS SPARK_QM_TRANSLATIONS) +- +-message("translator(ts -> qm):") +-foreach(item IN LISTS SPARK_QM_TRANSLATIONS) +- message("-> ${item}") +-endforeach(item IN LISTS SPARK_QM_TRANSLATIONS) +- +- +-# 注意,必须将 SPARK_QM_TRANSLATIONS 加入到 add_executable 参数中才能在编译时生成只有原文的ts文件 +- +-# qt5_create_translation +- # ts文件会在 make clean 或重新编译的时候一并被删除,再编译的时候生成全新的ts(原有的翻译会丢失,万分注意!!) +- +-# qt5_add_translation +- # 此宏比较稳定 ++# translator_qt5 _qmvar [... *.ts] ++macro(translator_qt5 _qmvar) ++ ++ set(${_qmvar}_ARNG ${ARGN}) ++ file(GLOB ${_qmvar}_TS_FILES ${${_qmvar}_ARNG}) ++ ++ find_package(Qt5LinguistTools) ++ qt5_add_translation(${_qmvar} ++ ${${_qmvar}_TS_FILES}) ++ ++ spark_debug_message("> QT Translation: ${_qmvar}") ++ file(WRITE ${CMAKE_BINARY_DIR}/${_qmvar} "") ++ foreach(item IN LISTS ${_qmvar}) ++ file(APPEND ${CMAKE_BINARY_DIR}/${_qmvar} "${item}\n") ++ spark_debug_message(" ${item}") ++ endforeach(item IN LISTS ${_qmvar}) ++ ++ # 注意,必须将 SPARK_QM_TRANSLATIONS 或 ${_qmvar} 加入到 add_executable 参数中才能在编译时生成只有原文的ts文件 ++ ++ # qt5_create_translation ++ # ts文件会在 make clean 或重新编译的时候一并被删除,再编译的时候生成全新的ts(原有的翻译会丢失,万分注意!!) ++ ++ # qt5_add_translation ++ # 此宏比较稳定 ++endmacro(translator_qt5 _qmvar) ++ ++ ++# translator_qt6 _qmvar [... *.ts] ++macro(translator_qt6 _qmvar) ++ # todo ++endmacro(translator_qt6 _qmvar) ++ ++# 冗余的 translator_qt5 或 qt6 的处理逻辑 ++macro(_handle_spark_translator_qt_macro _outvar) ++ if(SPARK_FIND_QT5) ++ translator_qt5(${_outvar} ${ARGN}) ++ endif(SPARK_FIND_QT5) ++ ++ if(SPARK_FIND_QT6) ++ translator_qt6(${_outvar} ${ARGN}) ++ endif(SPARK_FIND_QT6) ++endmacro(_handle_spark_translator_qt_macro _outvar) ++ ++# translator_qt _qmvar [... *.ts | match] ++macro(translator_qt) ++ _handle_spark_translator_qt_macro(${ARGN}) ++endmacro(translator_qt) +diff --git a/cmake/linuxdeployqt-help b/cmake/linuxdeployqt-help +index 12ac506..1b72fda 100644 +--- a/cmake/linuxdeployqt-help ++++ b/cmake/linuxdeployqt-help +@@ -45,4 +45,4 @@ Plugins related to a Qt library are copied in with the library. + + See the "Deploying Applications on Linux" topic in the + documentation for more information about deployment on Linux. +-zinface@zinface-PC:/tmp/tmp.5gmZKUqn9s$ +\ No newline at end of file ++zinface@zinface-PC:/tmp/tmp.5gmZKUqn9s$ +\ No newline at end of file +diff --git a/cmake/spark-appimage.desktop.in b/cmake/spark-appimage.desktop.in.txt +similarity index 83% +rename from cmake/spark-appimage.desktop.in +rename to cmake/spark-appimage.desktop.in.txt +index 228a84a..491716d 100644 +--- a/cmake/spark-appimage.desktop.in ++++ b/cmake/spark-appimage.desktop.in.txt +@@ -6,4 +6,4 @@ Icon=default + Comment=@APP_COMMENT@ + Terminal=true + Type=Application +-Categories=@APP_CATEGORIES@ +\ No newline at end of file ++Categories=@APP_CATEGORIES@; +\ No newline at end of file +diff --git a/cmake/package-deb.descript b/cmake/spark-deb-package.descript +similarity index 91% +rename from cmake/package-deb.descript +rename to cmake/spark-deb-package.descript +index 2b485d1..f352d6c 100644 +--- a/cmake/package-deb.descript ++++ b/cmake/spark-deb-package.descript +@@ -1,6 +1,6 @@ + # 注释行(使用方式) +-# find_package(DebPackage PATHS ${CMAKE_SOURCE_DIR}) +-# add_package_descript(cmake/package-deb.descript) ++# find_package(SparkDebPackage PATHS ${CMAKE_SOURCE_DIR}) ++# add_package_descript(cmake/spark-deb-package.descript) + + # 打包后的文件名称 + # FileName: 待定 +@@ -38,7 +38,9 @@ Maintainer: shenmo + # 软件包主页 + Homepage: https://www.spark-app.store/ + # 软件包建议 +-Recommends: ++Recommends: ++# 软件冲突 ++Conflicts: + # 软件包描述信息 + Descrition: Spark Store + A community powered app store, based on DTK. +diff --git a/cmake/spark-desktop.desktop.in b/cmake/spark-desktop.desktop.in.txt +similarity index 88% +rename from cmake/spark-desktop.desktop.in +rename to cmake/spark-desktop.desktop.in.txt +index 0fa070b..75663a2 100644 +--- a/cmake/spark-desktop.desktop.in ++++ b/cmake/spark-desktop.desktop.in.txt +@@ -7,5 +7,7 @@ Type=@APP_TYPE@ + Exec=@APP_EXECUTE_PATH@ + Icon=@APP_EXECUTE_ICON_PATH@ + Categories=@APP_CATEGORIES@ ++MimeType=@APP_MIME_TYPE@ ++ ++# Generated from the DesktopGenerater component of the z-Tools toolkit + +-# Generated from the DesktopGenerater component of the z-Tools toolkit +\ No newline at end of file +-- +2.33.1 diff --git a/src/backend/sparkapi.cpp b/src/backend/sparkapi.cpp index 8535528..8c64d1c 100644 --- a/src/backend/sparkapi.cpp +++ b/src/backend/sparkapi.cpp @@ -10,6 +10,8 @@ QString SparkAPI::serverUrl = ""; #elif __aarch64__ QString SparkAPI::serverUrlDir = "aarch64-store"; +#elif __loongarch__ + QString SparkAPI::serverUrlDir = "loong64-store"; #endif SparkAPI::SparkAPI(QObject *parent) : QObject(parent) diff --git a/src/mainwindow-dtk.cpp b/src/mainwindow-dtk.cpp index 2a88726..b278b1d 100755 --- a/src/mainwindow-dtk.cpp +++ b/src/mainwindow-dtk.cpp @@ -25,7 +25,7 @@ #define WaylandSearchCenter 1 #define OtherSearchCenter 2 #define RightSearchSpace 1 -#define UploadServerUrl "https://upload.deepinos.org/" +#define UploadServerUrl "https://upload.deepinos.org.cn/" MainWindow::MainWindow(QWidget *parent) : BaseWidgetOpacity(parent) diff --git a/src/pages/appintopage.cpp b/src/pages/appintopage.cpp index 55db732..e7ea1ec 100644 --- a/src/pages/appintopage.cpp +++ b/src/pages/appintopage.cpp @@ -55,7 +55,7 @@ void AppIntoPage::openUrl(const QUrl &url) ui->label_2->setText(info["More"].toString()); // 显示 tags - #if (DTK_VERSION >= DTK_VERSION_CHECK(5, 15, 0, 0)) + #if QT_VERSION >= QT_VERSION_CHECK(5, 15, 0) QStringList taglist = info["Tags"].toString().split(";", Qt::SkipEmptyParts); #else QStringList taglist = info["Tags"].toString().split(";", QString::SkipEmptyParts); @@ -133,11 +133,15 @@ void AppIntoPage::openUrl(const QUrl &url) bool isInstalled; bool isUpdated; QString packagename = info["Pkgname"].toString(); - isInstall.start("dpkg", QStringList() << "-s" << info["Pkgname"].toString()); + isInstall.start("/opt/durapps/spark-store/bin/store-helper/check-is-installed", QStringList() << info["Pkgname"].toString()); qDebug() << info["Pkgname"].toString(); isInstall.waitForFinished(180 * 1000); // 默认超时 3 分钟 - int error = QString::fromStdString(isInstall.readAllStandardError().toStdString()).length(); - if (error == 0) + + int exitCode = isInstall.exitCode(); + QProcess::ExitStatus exitStatus = isInstall.exitStatus(); + isInstall.close(); + + if (exitCode == 0 && exitStatus == QProcess::NormalExit) { isInstalled = true; @@ -150,7 +154,7 @@ void AppIntoPage::openUrl(const QUrl &url) isUpdate.start("dpkg", QStringList() << "--compare-versions" << localVersion << "ge" << info["Version"].toString()); isUpdate.waitForFinished(180 * 1000); // 默认超时 3 分钟 - if (!isUpdate.exitCode()) + if (isUpdate.exitCode() == 0 && isUpdate.exitStatus() == QProcess::NormalExit) { isUpdated = true; } @@ -158,6 +162,7 @@ void AppIntoPage::openUrl(const QUrl &url) { isUpdated = false; } + isUpdate.close(); } else { @@ -337,10 +342,27 @@ void AppIntoPage::isDownloading(const QUrl &url) } if (item->download == 3) { - ui->downloadButton->setEnabled(true); - ui->downloadButton->setText(tr("Reinstall")); - ui->downloadButton->show(); - ui->pushButton_3->show(); + QString packageName = info["Pkgname"].toString(); + QProcess process; + process.start("/opt/durapps/spark-store/bin/store-helper/check-is-installed", {packageName}); + process.waitForFinished(-1); + + int exitCode = process.exitCode(); + QProcess::ExitStatus exitStatus = process.exitStatus(); + process.close(); + + if (exitCode == 0 && exitStatus == QProcess::NormalExit) + { + ui->downloadButton->setEnabled(true); + ui->downloadButton->setText(tr("Reinstall")); + ui->downloadButton->show(); + ui->pushButton_3->show(); + } + else + { + ui->downloadButton->setEnabled(true); + ui->downloadButton->setText(tr("Download and Install")); + } } } @@ -485,22 +507,24 @@ void AppIntoPage::on_pushButton_3_clicked() QProcess uninstall; uninstall.start("pkexec", QStringList() << "apt" << "autopurge" << "-y" << info["Pkgname"].toString().toLower()); uninstall.waitForFinished(-1); + uninstall.close(); QProcess check; check.start("dpkg", QStringList() << "-s" << info["Pkgname"].toString().toLower()); - check.waitForFinished(10*1000); + check.waitForFinished(-1); - if (check.readAllStandardOutput().isEmpty()) + if (check.exitCode() != 0 || check.exitStatus() != QProcess::NormalExit) { ui->downloadButton->setText(tr("Download and Install")); ui->pushButton_3->hide(); - updatesEnabled(); Utils::sendNotification("spark-store",tr("Spark Store"),tr("Uninstall succeeded")); } ui->downloadButton->setEnabled(true); ui->pushButton_3->setEnabled(true); + + check.close(); }); } diff --git a/src/pages/applistpage.cpp b/src/pages/applistpage.cpp index 38f5f56..12168f5 100644 --- a/src/pages/applistpage.cpp +++ b/src/pages/applistpage.cpp @@ -1,108 +1,105 @@ -#include "applistpage.h" -#include "ui_applistpage.h" - - - -AppListPage::AppListPage(QWidget *parent) : QWidget(parent), - ui(new Ui::AppListPage) -{ - ui->setupUi(this); - ui->webEngineView->page()->setBackgroundColor(Qt::transparent); -} -void AppListPage::setTheme(bool dark) -{ - isDark = dark; - if (dark) - { - this->setStyleSheet("#frame{background-color: #252525;border-radius:14px;border:1px solid rgb(64, 64, 64);}"); - } - else - { - // 亮色模式 - this->setStyleSheet("#frame{background-color: #ffffff;border-radius:14px;border:1px solid rgb(229,229,229);}"); - } - if (isSearch) - { - getSearchList(nowType); - } - else - { - getAppList(nowType); - } -} -void AppListPage::getAppList(QString type) -{ - isSearch = false; - nowType = type; - SparkAPI *api = new SparkAPI(this); - QString url; - QString theme; - if (isDark) - { - theme = "theme=dark"; - } - else - { - theme = "theme=light"; - } - if (type == "") - { - url = api->getServerUrl() + SparkAPI::getArchDir() + "/#/flamescion/?" + theme + "&" + "arch=x86"; - #ifdef __aarch64__ - url = api->getServerUrl() + SparkAPI::getArchDir() + "/#/flamescion/?" + theme + "&" + "arch=aarch64"; - #endif - } - else - { - url = api->getServerUrl() + SparkAPI::getArchDir() + "/#/flamescion/applist?type=" + type + "&" + theme + "&" + "arch=x86"; - #ifdef __aarch64__ - url = api->getServerUrl() + SparkAPI::getArchDir() + "/#/flamescion/applist?type=" + type + "&" + theme + "&" + "arch=aarch64"; - #endif - } - - ui->webEngineView->setUrl(url); - delete api; -} - -void AppListPage::getSearchList(const QString &keyword) -{ - isSearch = true; - nowType = keyword; - SparkAPI *api = new SparkAPI(this); - QString url; - QString theme; - if (isDark) - { - theme = "theme=dark"; - } - else - { - theme = "theme=light"; - } - - url = api->getServerUrl() + SparkAPI::getArchDir() + "/#/flamescion/search?keywords=" + QUrl::toPercentEncoding(keyword) + "&" + theme + "&" + "arch=x86"; - #ifdef __aarch64__ - url = api->getServerUrl() + SparkAPI::getArchDir() + "/#/flamescion/search?keywords=" + QUrl::toPercentEncoding(keyword) + "&" + theme + "&" + "arch=aarch64"; - #endif - ui->webEngineView->setUrl(url); - delete api; -} - -AppListPage::~AppListPage() -{ - delete ui; -} - -void AppListPage::on_webEngineView_urlChanged(const QUrl &arg1) -{ - if (arg1.path().right(8) == "app.json") - { - QString url = arg1.toString(); - url = url.mid(url.indexOf("/" + SparkAPI::getArchDir() + "/")); - url = "spk:/" + url; - url = url.mid(0, url.indexOf("/app.json")); - qDebug() << "程序跳转链接地址:" << url; - ui->webEngineView->back(); - emit clicked(url); - } -} +#include "applistpage.h" +#include "ui_applistpage.h" + +#define BUILD_URL(theme, arch) \ + api->getServerUrl() + SparkAPI::getArchDir() + "/#/flamescion/" + (type.isEmpty() ? "?" : "applist?type=" + type + "&") + theme + "&arch=" + arch + +AppListPage::AppListPage(QWidget *parent) : QWidget(parent), + ui(new Ui::AppListPage) +{ + ui->setupUi(this); + ui->webEngineView->page()->setBackgroundColor(Qt::transparent); +} +void AppListPage::setTheme(bool dark) +{ + isDark = dark; + if (dark) + { + this->setStyleSheet("#frame{background-color: #252525;border-radius:14px;border:1px solid rgb(64, 64, 64);}"); + } + else + { + // 亮色模式 + this->setStyleSheet("#frame{background-color: #ffffff;border-radius:14px;border:1px solid rgb(229,229,229);}"); + } + if (isSearch) + { + getSearchList(nowType); + } + else + { + getAppList(nowType); + } +} +void AppListPage::getAppList(QString type) +{ + isSearch = false; + nowType = type; + SparkAPI *api = new SparkAPI(this); + QString url; + QString theme; + if (isDark) + { + theme = "theme=dark"; + } + else + { + theme = "theme=light"; + } + + #ifdef __aarch64__ + url = BUILD_URL(theme, "aarch64"); + #elif __loongarch__ + url = BUILD_URL(theme, "loong64"); + #else + url = BUILD_URL(theme, "x86"); + #endif + + ui->webEngineView->setUrl(url); + delete api; +} + +void AppListPage::getSearchList(const QString &keyword) +{ + isSearch = true; + nowType = keyword; + SparkAPI *api = new SparkAPI(this); + QString url; + QString theme; + if (isDark) + { + theme = "theme=dark"; + } + else + { + theme = "theme=light"; + } + + url = api->getServerUrl() + SparkAPI::getArchDir() + "/#/flamescion/search?keywords=" + QUrl::toPercentEncoding(keyword) + "&" + theme + "&" + "arch=x86"; + #ifdef __aarch64__ + url = api->getServerUrl() + SparkAPI::getArchDir() + "/#/flamescion/search?keywords=" + QUrl::toPercentEncoding(keyword) + "&" + theme + "&" + "arch=aarch64"; + #elif __loongarch__ + url = api->getServerUrl() + SparkAPI::getArchDir() + "/#/flamescion/search?keywords=" + QUrl::toPercentEncoding(keyword) + "&" + theme + "&" + "arch=loong64"; + #endif + ui->webEngineView->setUrl(url); + delete api; +} + +AppListPage::~AppListPage() +{ + delete ui; +} + +void AppListPage::on_webEngineView_urlChanged(const QUrl &arg1) +{ + if (arg1.path().right(8) == "app.json") + { + QString url = arg1.toString(); + url = url.mid(url.indexOf("/" + SparkAPI::getArchDir() + "/")); + url = "spk:/" + url; + url = url.mid(0, url.indexOf("/app.json")); + qDebug() << "程序跳转链接地址:" << url; + ui->webEngineView->back(); + emit clicked(url); + } +} diff --git a/src/pages/settingspage.cpp b/src/pages/settingspage.cpp index 87e90c6..16cf2c9 100644 --- a/src/pages/settingspage.cpp +++ b/src/pages/settingspage.cpp @@ -9,7 +9,7 @@ #define TMP_PATH "/tmp/spark-store" #define DEFAULT_SERVER_URL "https://cdn.d.store.deepinos.org.cn/" -#define DEFAULT_CHECK_DOMAIN "deepinos" + bool SettingsPage::needUncompatibleNotification = true; bool SettingsPage::isdownload = false; @@ -57,10 +57,7 @@ void SettingsPage::readServerList() // 创建 QTextStream 对象 QTextStream textStream(&file); - if (!textStream.readAll().contains(DEFAULT_CHECK_DOMAIN)) // 校验配置文件有效性 - { - return; - } + textStream.seek(0); // 回到开头 QString lineData = textStream.readLine(); // 读取文件的第一行 ui->comboBox_server->addItem(lineData); diff --git a/src/utils/utils.cpp b/src/utils/utils.cpp index 9a1440b..a3aa560 100644 --- a/src/utils/utils.cpp +++ b/src/utils/utils.cpp @@ -80,8 +80,7 @@ bool Utils::isWayland() bool Utils::isTreeLand() { bool isTreeLand = false; - if (qgetenv("DDE_CURRENT_COMPOSITER").toLower() == "treeland" - || qgetenv("DESKTOP_SESSION").toLower() == "treeland") { + if (qgetenv("DDE_CURRENT_COMPOSITOR").toLower() == "treeland") { isTreeLand = true; } @@ -185,14 +184,18 @@ void Utils::setQPAPlatform() qDebug() << "System Wayland enabled:" << isWayland << "Spark Wayland enabled:" << useWayland; - if (isWayland && useWayland && !(Dtk::Core::DSysInfo::isDeepin() || isDDE)) + /** + * NOTE: https://github.com/linuxdeepin/developer-center/issues/7217#issuecomment-1922653903 + * DDE Wayland has been deprecated, so using wayland plugin only + */ + if (isWayland && useWayland /*&& !(Dtk::Core::DSysInfo::isDeepin() || isDDE)*/) { qputenv("QT_QPA_PLATFORM", "wayland"); } - else if (isWayland && useWayland && (Dtk::Core::DSysInfo::isDeepin() && isDDE)) - { - qputenv("QT_QPA_PLATFORM", "dwayland"); - } + // else if (isWayland && useWayland && (Dtk::Core::DSysInfo::isDeepin() && isDDE)) + // { + // qputenv("QT_QPA_PLATFORM", "dwayland"); + // } else { qputenv("QT_QPA_PLATFORM", "dxcb"); diff --git a/tool/aptss b/tool/aptss index 88650e2..ae54b55 100755 --- a/tool/aptss +++ b/tool/aptss @@ -1,5 +1,7 @@ #!/bin/bash +SPARK_DOWNLOAD_SERVER_URL="https://d.store.deepinos.org.cn/" +SPARK_DOWNLOAD_SERVER_URL_NO_PROTOCOL="d.store.deepinos.org.cn" source /opt/durapps/spark-store/bin/bashimport/transhell.amber load_transhell @@ -12,6 +14,10 @@ case `arch` in STORE_URL="aarch64-store" STORE_LIST_URL="-aarch64" ;; + loongarch64) + STORE_URL="loong64-store" + STORE_LIST_URL="-loong64" + ;; esac SS_APT_FAST="/opt/durapps/spark-store/bin/apt-fast/ss-apt-fast" @@ -40,22 +46,22 @@ if [ ! -e "/tmp/aptss-conf/apt-fast.conf" ];then mkdir -p /tmp/aptss-conf/ echo -e "\e[1;32m${TRANSHELL_CONTENT_GETTING_SERVER_CONFIG_AND_MIRROR_LIST}\e[0m" echo -curl --progress-bar -o /tmp/aptss-conf/apt-fast.conf "https://d.store.deepinos.org.cn/apt-fast.conf" +curl --progress-bar -o /tmp/aptss-conf/apt-fast.conf "${SPARK_DOWNLOAD_SERVER_URL}/apt-fast.conf" chmod -R 755 /tmp/aptss-conf fi -if [ ! -e "/var/lib/aptss/lists/d.spark-app.store_${STORE_URL}_Packages" ] && [ ! -e "/var/lib/aptss/lists/d.store.deepinos.org.cn_${STORE_URL}_Packages" ] && [ ! -e "/var/lib/aptss/lists/mirrors.sdu.edu.cn_spark-store-repository_${STORE_URL}_Packages" ];then +if [ ! -e "/var/lib/aptss/lists/${SPARK_DOWNLOAD_SERVER_URL_NO_PROTOCOL}_${STORE_URL}_Packages" ] && [ ! -e "/var/lib/aptss/lists/d.store.deepinos.org.cn_${STORE_URL}_Packages" ] && [ ! -e "/var/lib/aptss/lists/mirrors.sdu.edu.cn_spark-store-repository_${STORE_URL}_Packages" ];then mkdir -p /tmp/aptss-conf/ echo -e "\e[1;32m${TRANSHELL_CONTENT_GETTING_SERVER_CONFIG_AND_MIRROR_LIST}\e[0m" echo -curl --silent -o /tmp/aptss-conf/apt-fast.conf "https://d.store.deepinos.org.cn/apt-fast.conf" +curl --silent -o /tmp/aptss-conf/apt-fast.conf "${SPARK_DOWNLOAD_SERVER_URL}/apt-fast.conf" chmod -R 755 /tmp/aptss-conf -curl --silent -o /opt/durapps/spark-store/bin/apt-fast-conf/sources.list.d/sparkstore.list "https://d.store.deepinos.org.cn/sparkstore${STORE_LIST_URL}.list" -apt update -c /opt/durapps/spark-store/bin/apt-fast-conf/aptss-apt.conf +curl --silent -o /opt/durapps/spark-store/bin/apt-fast-conf/sources.list.d/sparkstore.list "${SPARK_DOWNLOAD_SERVER_URL}/sparkstore${STORE_LIST_URL}.list" +/usr/bin/apt update -c /opt/durapps/spark-store/bin/apt-fast-conf/aptss-apt.conf #只更新星火源 @@ -92,35 +98,34 @@ elif [ "$1" = "policy" ] || [ "$1" = "search" ];then ###执行 - apt "$@" -c /opt/durapps/spark-store/bin/apt-fast-conf/aptss-apt.conf + /usr/bin/apt "$@" -c /opt/durapps/spark-store/bin/apt-fast-conf/aptss-apt.conf elif [ "$1" = "ssupdate" ];then mkdir -p /tmp/aptss-conf/ echo -e "\e[1;32m${TRANSHELL_CONTENT_GETTING_SERVER_CONFIG_AND_MIRROR_LIST}\e[0m" echo -curl --silent -o /tmp/aptss-conf/apt-fast.conf "https://d.store.deepinos.org.cn/apt-fast.conf" +curl --silent -o /tmp/aptss-conf/apt-fast.conf "${SPARK_DOWNLOAD_SERVER_URL}/apt-fast.conf" chmod -R 755 /tmp/aptss-conf -curl --silent -o /opt/durapps/spark-store/bin/apt-fast-conf/sources.list.d/sparkstore.list "https://d.store.deepinos.org.cn/sparkstore${STORE_LIST_URL}.list" +curl --silent -o /opt/durapps/spark-store/bin/apt-fast-conf/sources.list.d/sparkstore.list "${SPARK_DOWNLOAD_SERVER_URL}/sparkstore${STORE_LIST_URL}.list" -apt update -c /opt/durapps/spark-store/bin/apt-fast-conf/aptss-apt.conf -o Dir::Etc::sourceparts="-" -o APT::Get::List-Cleanup="0" -o Dir::Etc::sourcelist="/opt/durapps/spark-store/bin/apt-fast-conf/sources.list.d/sparkstore.list" +/usr/bin/apt update -c /opt/durapps/spark-store/bin/apt-fast-conf/aptss-apt.conf -o Dir::Etc::sourceparts="-" -o APT::Get::List-Cleanup="0" -o Dir::Etc::sourcelist="/opt/durapps/spark-store/bin/apt-fast-conf/sources.list.d/sparkstore.list" #只更新星火源 elif [ "$1" = "update" ];then echo -e "\e[1;32m${TRANSHELL_CONTENT_GETTING_SERVER_CONFIG_AND_MIRROR_LIST}\e[0m" echo -curl --progress-bar -o /opt/durapps/spark-store/bin/apt-fast-conf/sources.list.d/sparkstore.list "https://d.store.deepinos.org.cn/sparkstore${STORE_LIST_URL}.list" +curl --progress-bar -o /opt/durapps/spark-store/bin/apt-fast-conf/sources.list.d/sparkstore.list "${SPARK_DOWNLOAD_SERVER_URL}/sparkstore${STORE_LIST_URL}.list" mkdir -p /tmp/aptss-conf/ -curl --progress-bar -o /tmp/aptss-conf/apt-fast.conf "https://d.store.deepinos.org.cn/apt-fast.conf" +curl --progress-bar -o /tmp/aptss-conf/apt-fast.conf "${SPARK_DOWNLOAD_SERVER_URL}/apt-fast.conf" chmod -R 755 /tmp/aptss-conf ### 额外一份拿来给aptss自动补全用 - apt "$@" -c /opt/durapps/spark-store/bin/apt-fast-conf/aptss-apt.conf + /usr/bin/apt "$@" -c /opt/durapps/spark-store/bin/apt-fast-conf/aptss-apt.conf else - apt "$@" -c /opt/durapps/spark-store/bin/apt-fast-conf/aptss-apt.conf + /usr/bin/apt "$@" -c /opt/durapps/spark-store/bin/apt-fast-conf/aptss-apt.conf fi - diff --git a/tool/spark-dstore-patch b/tool/spark-dstore-patch index bd09396..c955fcd 100755 --- a/tool/spark-dstore-patch +++ b/tool/spark-dstore-patch @@ -1,6 +1,6 @@ #!/bin/bash -echo "----------------Running Spark DStore Patch----------------" + @@ -26,15 +26,13 @@ linkDir() { targetDir=$(dirname "$target") find "$source" -type f | while read sourceFile; do targetFile="$targetDir/${sourceFile#$sourceDir/}" - if [ -L "$targetFile" ] && [ "$(readlink "$targetFile")" = "$sourceFile" ]; then - continue - else - rm -f "$targetFile" - fi + ensureTargetDir "$targetFile" sourceFile=$(realpath --relative-to="$(dirname $targetFile)" "$sourceFile" ) - ln -s "$sourceFile" "$targetFile" + if [ ! -e ${targetFile} ];then + ln -sv "$sourceFile" "$targetFile" + fi done } @@ -65,61 +63,60 @@ linkApp() { done } +function exec_uos_package_link(){ -# execute linkApp function for each app and print output for app in $(enumAppInfoList); do linkApp "$app" & - if [ "$1" = "--debug" ]; then - echo "Linking for $app" - fi -# remove broken links in /usr/share done wait +} -if [ "$1" = "--debug" ]; then - echo "Cleaning links and updating databases and caches..." +function exec_v23_icon_link(){ +# Fix v23 broken icon +if [ ! -d "/usr/share/icons/hicolor/scalable/apps" ];then +mkdir -p /usr/share/icons/hicolor/scalable/apps fi +for icon_root_icon_path in $(ls /usr/share/icons/*.png /usr/share/icons/*.svg) +do +target_icon_path=/usr/share/icons/hicolor/scalable/apps/$(basename ${icon_root_icon_path}) +if [ ! -e ${target_icon_path} ];then +ln -sv $(realpath --relative-to=/usr/share/icons/hicolor/scalable/apps ${icon_root_icon_path}) /usr/share/icons/hicolor/scalable/apps +fi +done +} -if [ "$1" = "--debug" ]; then - find /usr/share/applications -xtype l -delete & - find /usr/share/icons -xtype l -delete & - find /usr/share/mime/packages -xtype l -delete & - find /usr/share/glib-2.0 -xtype l -delete & - find /usr/share/dbus-1/services -xtype l -delete & - find /usr/share/fcitx -xtype l -delete & - find /usr/share/help -xtype l -delete & - find /usr/share/locale -xtype l -delete & - find /usr/lib/`dpkg-architecture -qDEB_HOST_MULTIARCH`/fcitx -xtype l -delete & - find /usr/lib/mozilla/plugins -xtype l -delete & - find /usr/share/polkit-1/actions -xtype l -delete & - find /usr/share/fonts -xtype l -delete & - find /etc/fonts/conf.d -xtype l -delete & - update-icon-caches /usr/share/icons/* & - update-desktop-database -q & - update-mime-database -V /usr/share/mime & - glib-compile-schemas /usr/share/glib-2.0/schemas/ & - wait -else - find /usr/share/applications -xtype l -delete > /dev/null 2>&1 & - find /usr/share/icons -xtype l -delete > /dev/null 2>&1 & - find /usr/share/mime/packages -xtype l -delete > /dev/null 2>&1 & - find /usr/share/glib-2.0 -xtype l -delete > /dev/null 2>&1 & - find /usr/share/dbus-1/services -xtype l -delete > /dev/null 2>&1 & - find /usr/share/fcitx -xtype l -delete > /dev/null 2>&1 & - find /usr/share/help -xtype l -delete > /dev/null 2>&1 & - find /usr/share/locale -xtype l -delete > /dev/null 2>&1 & - find /usr/lib/`dpkg-architecture -qDEB_HOST_MULTIARCH`/fcitx -xtype l -delete > /dev/null 2>&1 & - find /usr/lib/mozilla/plugins -xtype l -delete > /dev/null 2>&1 & - find /usr/share/polkit-1/actions -xtype l -delete > /dev/null 2>&1 & - find /usr/share/fonts -xtype l -delete > /dev/null 2>&1 & - find /etc/fonts/conf.d -xtype l -delete > /dev/null 2>&1 & +function exec_link_clean(){ +# remove broken links in /usr/share + + find /usr/share/applications -xtype l -exec echo '{} is invalid now and going to be cleaned' \; -exec unlink {} \; & + find /usr/share/icons -xtype l -exec echo '{} is invalid now and going to be cleaned' \; -exec unlink {} \; & + find /usr/share/mime/packages -xtype l -exec echo '{} is invalid now and going to be cleaned' \; -exec unlink {} \; & + find /usr/share/glib-2.0 -xtype l -exec echo '{} is invalid now and going to be cleaned' \; -exec unlink {} \; & + find /usr/share/dbus-1/services -xtype l -exec echo '{} is invalid now and going to be cleaned' \; -exec unlink {} \; & + find /usr/share/fcitx -xtype l -exec echo '{} is invalid now and going to be cleaned' \; -exec unlink {} \; & + find /usr/share/help -xtype l -exec echo '{} is invalid now and going to be cleaned' \; -exec unlink {} \; & + find /usr/share/locale -xtype l -exec echo '{} is invalid now and going to be cleaned' \; -exec unlink {} \; & + find /usr/lib/`dpkg-architecture -qDEB_HOST_MULTIARCH`/fcitx -xtype l -exec echo '{} is invalid now and going to be cleaned' \; -exec unlink {} \; & + find /usr/lib/mozilla/plugins -xtype l -exec echo '{} is invalid now and going to be cleaned' \; -exec unlink {} \; & + find /usr/share/polkit-1/actions -xtype l -exec echo '{} is invalid now and going to be cleaned' \; -exec unlink {} \; & + find /usr/share/fonts -xtype l -exec echo '{} is invalid now and going to be cleaned' \; -exec unlink {} \; & + find /etc/fonts/conf.d -xtype l -exec echo '{} is invalid now and going to be cleaned' \; -exec unlink {} \; & update-icon-caches /usr/share/icons/* > /dev/null 2>&1 & update-desktop-database -q > /dev/null 2>&1 & update-mime-database -V /usr/share/mime > /dev/null 2>&1 & glib-compile-schemas /usr/share/glib-2.0/schemas/ > /dev/null 2>&1 & + +} + + +######################################################################################### +echo "----------------Running Spark DStore Patch----------------" +if [ ! -e /usr/bin/deepin-app-store-tool ];then +# execute linkApp function for each app and print output +exec_uos_package_link fi - - -echo "----------------Finished----------------" \ No newline at end of file +exec_v23_icon_link +exec_link_clean +echo "----------------Finished----------------" diff --git a/tool/ss-feedback/sender-d b/tool/ss-feedback/sender-d index bb51bdc..935b606 100755 --- a/tool/ss-feedback/sender-d +++ b/tool/ss-feedback/sender-d @@ -9,6 +9,9 @@ case `arch` in aarch64) STORE_URL="aarch64-store" ;; + loongarch64) + STORE_URL="loong64-store" + ;; esac if [ -z $1 ] || [ "$2" != "HD70642" ];then diff --git a/tool/ssinstall b/tool/ssinstall index 5490660..35f6b24 100755 --- a/tool/ssinstall +++ b/tool/ssinstall @@ -10,6 +10,10 @@ case $(arch) in aarch64) STORE_URL="aarch64-store" ;; + loongarch64) + STORE_URL="loong64-store" + STORE_LIST_URL="-loong64" + ;; esac echo "Spark Store Install script. 星火商店安装脚本" @@ -75,6 +79,7 @@ function hash_check() { echo "Running Spark Package Verify..." DEB_SHA512SUM=$(sha512sum "$1" | cut -d ' ' -f 1) + unset IS_SHA512SUM_CHECKED IS_SHA512SUM_CHECKED=$(cat "$PACKAGES_DATA_PATH" | grep "$DEB_SHA512SUM") } @@ -162,4 +167,3 @@ if [ ! -z "$IS_SHA512SUM_CHECKED" ]; then fi fi fi - diff --git a/tool/store-helper/check-is-installed b/tool/store-helper/check-is-installed new file mode 100755 index 0000000..4ce5f0f --- /dev/null +++ b/tool/store-helper/check-is-installed @@ -0,0 +1,2 @@ +#!/bin/bash +dpkg -l | grep "^ii" | grep -w "$1" > /dev/null diff --git a/translations/spark-store_en.ts b/translations/spark-store_en.ts index 5fe4de5..5f6f932 100644 --- a/translations/spark-store_en.ts +++ b/translations/spark-store_en.ts @@ -126,9 +126,9 @@ - - - + + + Download and Install @@ -175,79 +175,79 @@ - + Click Open - + Developer Mode Disabled - - - + + + Reinstall - + Upgrade - - + + Install - + Installing - - - - + + + + Warning - + The current application does not support deepin, there may be problems - + The current application does not support UOS, there may be problems - + The current application does not support Ubuntu, there may be problems - + The current application does not support current platform, there may be problems - - + + Spark Store - + Uninstall succeeded - + The URL has been copied to the clipboard diff --git a/translations/spark-store_es.ts b/translations/spark-store_es.ts index c2b1c62..ed8bc2c 100644 --- a/translations/spark-store_es.ts +++ b/translations/spark-store_es.ts @@ -126,9 +126,9 @@ - - - + + + Download and Install Descargar e instalar @@ -175,79 +175,79 @@ Sitio web - + Click Open Haga clic en "abrir" - + Developer Mode Disabled Se ha desactivado el modo desarrollador - - - + + + Reinstall Reinstalación - + Upgrade Actualización - - + + Install Instalación - + Installing Se está instalando - - - - + + + + Warning Aviso - + The current application does not support deepin, there may be problems La aplicación actual no admite deepin, puede haber problemas - + The current application does not support UOS, there may be problems La aplicación actual no admite uos, puede haber problemas - + The current application does not support Ubuntu, there may be problems La aplicación actual no admite ubuntu, puede haber problemas - + The current application does not support current platform, there may be problems La aplicación actual no admite la Plataforma actual, puede haber problemas - - + + Spark Store SPARK Store - + Uninstall succeeded Desinstalación exitosa - + The URL has been copied to the clipboard La URL ha sido copiada al portapapeles diff --git a/translations/spark-store_fr.ts b/translations/spark-store_fr.ts index 35d4a9e..824bff5 100644 --- a/translations/spark-store_fr.ts +++ b/translations/spark-store_fr.ts @@ -126,9 +126,9 @@ - - - + + + Download and Install Télécharger et installer @@ -175,79 +175,79 @@ Site Web - + Click Open Cliquez sur Ouvrir - + Developer Mode Disabled Mode développeur désactivé - - - + + + Reinstall Réinstaller - + Upgrade Mise à niveau - - + + Install Installation - + Installing Installation en cours - - - - + + + + Warning Avertissement - + The current application does not support deepin, there may be problems L'application actuelle ne supporte pas deepin, il peut y avoir un problème - + The current application does not support UOS, there may be problems L'application actuelle ne prend pas en charge uos, il peut y avoir un problème - + The current application does not support Ubuntu, there may be problems L'application actuelle ne supporte pas Ubuntu, il peut y avoir un problème - + The current application does not support current platform, there may be problems L'application actuelle ne prend pas en charge la plate - forme actuelle, il peut y avoir un problème - - + + Spark Store Le Spark store - + Uninstall succeeded Désinstallation réussie - + The URL has been copied to the clipboard L'URL a été copiée dans le presse - papiers diff --git a/translations/spark-store_zh_CN.ts b/translations/spark-store_zh_CN.ts index 3751fc4..8636071 100644 --- a/translations/spark-store_zh_CN.ts +++ b/translations/spark-store_zh_CN.ts @@ -121,9 +121,9 @@ - - - + + + Download and Install 下载并安装 @@ -175,79 +175,79 @@ 软件官网 - + Click Open 点击跳转 - + Developer Mode Disabled 开发者模式未开启 - - - + + + Reinstall 重新安装 - + Upgrade 升级 - - + + Install 安装 - + Installing 正在安装 - - - - + + + + Warning 警告 - + The current application does not support deepin, there may be problems 当前应用不支持deepin,安装后可能会出现问题 - + The current application does not support UOS, there may be problems 当前应用不支持UOS,安装后可能会出现问题 - + The current application does not support Ubuntu, there may be problems 当前应用不支持Ubuntu,安装后可能会出现问题 - + The current application does not support current platform, there may be problems 当前应用不支持平台,安装后可能会出现问题 - - + + Spark Store 星火应用商店 - + Uninstall succeeded 卸载成功 - + The URL has been copied to the clipboard 链接已复制到剪贴板 diff --git a/translations/spark-store_zh_TW.ts b/translations/spark-store_zh_TW.ts index 2965e18..be12399 100644 --- a/translations/spark-store_zh_TW.ts +++ b/translations/spark-store_zh_TW.ts @@ -121,9 +121,9 @@ - - - + + + Download and Install 下載並安裝 @@ -175,79 +175,79 @@ 软件官网 - + Click Open 点击跳转 - + Developer Mode Disabled 开发者模式未开启 - - - + + + Reinstall 重新安裝 - + Upgrade 升级 - - + + Install 安装 - + Installing 正在安装 - - - - + + + + Warning - + The current application does not support deepin, there may be problems - + The current application does not support UOS, there may be problems - + The current application does not support Ubuntu, there may be problems - + The current application does not support current platform, there may be problems - - + + Spark Store 星火应用商店 - + Uninstall succeeded 卸载成功 - + The URL has been copied to the clipboard 链接已复制到剪贴板