spark-store/patchs/zinface-community-cmake-build-system.patch
2024-02-03 01:51:54 +00:00

4920 lines
206 KiB
Diff
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

From 2b0f5447a8c13fa63aed5286cf4b3bdbf2f04e46 Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Sun, 11 Dec 2022 22:27:23 +0800
Subject: [PATCH 01/12] =?UTF-8?q?repo:=20=E4=B8=80=E6=AC=A1=E6=80=A7?=
=?UTF-8?q?=E5=AF=BC=E5=85=A5=20spark=20=E9=AA=A8=E6=9E=B6=E4=BB=A5?=
=?UTF-8?q?=E5=8F=98=E4=B8=BA=20cmake=20=E6=9E=84=E5=BB=BA?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
TODO: 需处理 deb 安装脚本的问题
Signed-off-by: zinface <zinface@163.com>
---
.gitignore | 35 +++
CMakeLists.txt | 93 ++++++++
Makefile | 60 +++++
assets/spark.png | Bin 0 -> 4959 bytes
cmake/DebPackageConfig.cmake | 327 +++++++++++++++++++++++++++
cmake/SparkAppimageConfig.cmake | 132 +++++++++++
cmake/SparkBuildGraphviz.cmake | 8 +
cmake/SparkDesktopMacros.cmake | 35 +++
cmake/SparkEnvConfig.cmake | 8 +
cmake/SparkFindDtkConfig.cmake | 11 +
cmake/SparkFindLibraries.cmake | 7 +
cmake/SparkFindQt5Config.cmake | 154 +++++++++++++
cmake/SparkFindQt6Config.cmake | 24 ++
cmake/SparkInstallMacrosConfig.cmake | 132 +++++++++++
cmake/SparkMacrosConfig.cmake | 129 +++++++++++
cmake/SparkMacrosExtendConfig.cmake | 196 ++++++++++++++++
cmake/SparkTranslatorConfig.cmake | 27 +++
cmake/linuxdeployqt-help | 48 ++++
cmake/package-deb.descript | 44 ++++
cmake/spark-appimage.desktop.in | 9 +
cmake/spark-desktop.desktop.in | 11 +
21 files changed, 1490 insertions(+)
create mode 100644 CMakeLists.txt
create mode 100644 Makefile
create mode 100644 assets/spark.png
create mode 100644 cmake/DebPackageConfig.cmake
create mode 100644 cmake/SparkAppimageConfig.cmake
create mode 100644 cmake/SparkBuildGraphviz.cmake
create mode 100644 cmake/SparkDesktopMacros.cmake
create mode 100644 cmake/SparkEnvConfig.cmake
create mode 100644 cmake/SparkFindDtkConfig.cmake
create mode 100644 cmake/SparkFindLibraries.cmake
create mode 100644 cmake/SparkFindQt5Config.cmake
create mode 100644 cmake/SparkFindQt6Config.cmake
create mode 100644 cmake/SparkInstallMacrosConfig.cmake
create mode 100644 cmake/SparkMacrosConfig.cmake
create mode 100644 cmake/SparkMacrosExtendConfig.cmake
create mode 100644 cmake/SparkTranslatorConfig.cmake
create mode 100644 cmake/linuxdeployqt-help
create mode 100644 cmake/package-deb.descript
create mode 100644 cmake/spark-appimage.desktop.in
create mode 100644 cmake/spark-desktop.desktop.in
diff --git a/.gitignore b/.gitignore
index 21d239c..b55ce0c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -54,3 +54,38 @@ debian/spark-store
.vscode/*
src/spark-store
+
+# Ignore the build directory generated by the vsocde cmake extension
+build/
+# Ignore the build directory generated by the vsocde clangd extension
+.cache
+
+# Created by https://www.toptal.com/developers/gitignore/api/cmake
+# Edit at https://www.toptal.com/developers/gitignore?templates=cmake
+
+### CMake ###
+CMakeLists.txt.user
+CMakeCache.txt
+CMakeFiles
+CMakeScripts
+Testing
+Makefile
+cmake_install.cmake
+install_manifest.txt
+compile_commands.json
+CTestTestfile.cmake
+_deps
+
+### CMake Patch ###
+# External projects
+*-prefix/
+
+# End of https://www.toptal.com/developers/gitignore/api/cmake
+
+!/Makefile
+# Ignore the build directory generated by the vsocde cmake extension
+build/
+# Ignore the build directory generated by the vsocde clangd extension
+.cache
+# Ignore the make package/copytosource
+*.deb
diff --git a/CMakeLists.txt b/CMakeLists.txt
new file mode 100644
index 0000000..51cc090
--- /dev/null
+++ b/CMakeLists.txt
@@ -0,0 +1,93 @@
+
+cmake_minimum_required(VERSION 3.5.1)
+
+project(spark-store LANGUAGES CXX VERSION 4.0.0)
+
+include(cmake/SparkEnvConfig.cmake) # 设置一些有关QT构建的开关
+include(cmake/SparkMacrosConfig.cmake) # 声明了一些 spark_ 开头的 macro 宏
+include(cmake/SparkFindLibraries.cmake) # 提供了基于 spark_ 宏生成的 target_link_<lib> 用于目标链接 <lib> 的库
+include(cmake/SparkFindQt5Config.cmake) # 提供了 target_link_qt5 用于目标链接 qt5 的库
+include(cmake/SparkFindDtkConfig.cmake) # 提供了 target_link_dtk 用于目标链接 dtk 的库
+include(cmake/SparkTranslatorConfig.cmake) # 提供了 qt5 ts转qm 的操作,最终生成 SPARK_QM_TRANSLATIONS 变量用于构建可执行文件时参与编译
+include(cmake/SparkMacrosExtendConfig.cmake) # 使用了 spark_ 宏基于已提供的宏参数自动展开构建可执行目标文件
+include(cmake/SparkInstallMacrosConfig.cmake) # 提供了 spark_install 开头的 macro 宏用于安装 target、file、program、directory、changelog 等内容
+
+# 资源文件路径
+set(QRC_SOURCES "src/assets/assets.qrc")
+
+include_directories(src)
+
+# 基于传入的项进行构建
+# 可接受的值为: 路径列表
+# 可接受的值为: 路径列表+依赖库A+依赖库B
+spark_add_library_realpaths(
+ src/dbus
+ src/utils+dbus
+ src/backend+utils
+ src/widgets/common+backend
+ src/widgets+common
+ src/pages+widgets
+)
+
+target_link_qt5_dbus(dbus)
+target_link_qt5_Concurrent(common)
+target_link_qt5_Concurrent(backend)
+target_link_qt5_WebEngineWidgets(common)
+
+spark_add_executable_path(${PROJECT_NAME} src
+ ${QRC_SOURCES} ${SPARK_QM_TRANSLATIONS}
+)
+target_link_dbus(${PROJECT_NAME})
+target_link_pages(${PROJECT_NAME})
+target_link_dtk(${PROJECT_NAME})
+
+
+spark_add_executable_path(spark-dstore-patch src/spark-dstore-patch)
+target_link_qt5(spark-dstore-patch)
+
+
+# 安装主程序 spark-store 与 spark-dstore-patch
+spark_install_target(/opt/durapps/${PROJECT_NAME}/bin
+ ${PROJECT_NAME}
+ spark-dstore-patch)
+
+# 安装 systemd 服务(Spark Store更新通知程序)
+spark_install_file(/usr/lib/systemd/system/
+ pkg/usr/lib/systemd/system/spark-update-notifier.service)
+
+# 安装 polkit 操作(运行 ss-do-upgrade-worker 需要权限)
+spark_install_file(/usr/share/polkit-1/actions/
+ pkg/usr/share/polkit-1/actions/store.spark-app.ss-do-upgrade-worker.policy)
+
+# 安装 spark-store 所需要的 tool 脚本
+spark_install_directory(/opt/durapps/${PROJECT_NAME}/bin
+ tool/*)
+
+# 安装 bash_completion
+spark_install_file(/usr/share/bash-completion/completions
+ pkg/usr/share/bash-completion/completions/aptss)
+
+# 安装 desktop 文件
+spark_install_file(/usr/share/applications
+ pkg/usr/share/applications/spark-store.desktop)
+
+# 安装 icon 文件
+spark_install_file(/usr/share/icons/hicolor/scalable/apps
+ pkg/usr/share/icons/hicolor/scalable/apps/spark-store.svg)
+
+# 安装什么脚本?
+spark_install_program(/tmp/spark-store-install
+ pkg/tmp/spark-store-install/feedback.sh)
+
+# 安装 qm 文件?
+spark_install_file(/usr/share/spark-store/translations
+ ${SPARK_QM_TRANSLATIONS})
+
+# 安装 changelog 文件,将自动使用 gzip 压缩
+spark_install_changelog(${CMAKE_SOURCE_DIR}/debian/changelog)
+
+include(cmake/SparkBuildGraphviz.cmake)
+
+# 注释行(使用方式)
+find_package(DebPackage PATHS ${CMAKE_SOURCE_DIR})
+add_package_descript(cmake/package-deb.descript)
\ No newline at end of file
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..334ead1
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,60 @@
+CPUS=$(shell nproc)
+CALENDAR=$(shell date '+%Y%m%d')
+OSID=$(shell lsb_release -si)
+OSRELEASE=$(shell lsb_release -sr)
+SUFFIX=
+ifneq ("$(OSID)", "")
+SUFFIX=_$(OSID)$(OSRELEASE)
+endif
+
+PROJECT_NAME=spark-store
+
+all:
+ mkdir -p build
+ cd build && cmake ..
+ cd build && make -j$(CPUS)
+
+run: all
+ exec $(shell find build/ -maxdepth 1 -type f -executable | grep $(PROJECT_NAME))
+
+debug:
+ mkdir -p build
+ cd build && cmake -DCMAKE_BUILD_TYPE=Debug ..
+ cd build && make -j$(CPUS)
+
+release:
+ mkdir -p build
+ cd build && cmake -DCMAKE_BUILD_TYPE=Release -DPACKAGE_SUFFIX="$(SUFFIX)" ..
+ cd build && make -j$(CPUS)
+
+package: release
+ cd build && make package
+ tree build/_CPack_Packages/Linux/DEB/$(PROJECT_NAME)-*
+ dpkg-deb --contents build/$(PROJECT_NAME)_*$(CALENDAR)*$(SUFFIX).deb
+ # cd build/_CPack_Packages/Linux/DEB/$(PROJECT_NAME)_*$(CALENDAR)*$(SUFFIX).deb && find .
+
+builddeps:
+ cd build && make builddeps
+
+cpus:
+ @echo "CPU数量: $(CPUS)"
+
+copytosource:package
+ cp build/$(PROJECT_NAME)_*$(CALENDAR)*.deb .
+
+# 进入 qdebug 模式,在 deepin 中默认被禁用,可 env | grep QT 查看,并在 /etc/X11/Xsession.d/00deepin-dde-env 配置中已定义
+# 1. 禁止 qt 的 debug 打印: qt.*.debug=false
+# qt.qpa.input.events
+# qt.qpa.events
+# 2. 禁止 dtk 的 debug 打印: dtk.*.debug=false
+# dtk.dpluginloader
+# 3. 禁止 qtcreator 本身的 debug 打印
+# qtc.autotest.testcodeparser
+# qtc.clangbackend.server
+# ...
+# 4. 关闭其它的太麻烦了,直接只启用本地 debug
+# .debug=true
+enter-qdebug-mode:
+ # 进入新的 bash 环境
+ @# export QT_LOGGING_RULES=".debug=true; qt.*.debug=false; dtk.*.debug=false; dde.*.debug=false; qtc*=false; " && bash
+ export QT_LOGGING_RULES=".debug=true" && bash
\ No newline at end of file
diff --git a/assets/spark.png b/assets/spark.png
new file mode 100644
index 0000000000000000000000000000000000000000..544e2c7cff5f70894e27a7a717d4a62120630b7a
GIT binary patch
literal 4959
zcmZ9Qc|26n`^T?!hOvx&4Pz%{EZLPYvTq^E5@pRcGGvX$AW~VT?0YC8CHu}WL)I*j
zCA+d^OJw_v@Avn|@AZ4V?z!iAo%_7*bMCpH^W6J>;>=8R>1nxX0RW)a*F)YU^_YtW
zrY4=^DWIpMj{2UVE)p04)_~)Bx;9qQ2n|Z_wm$&K2wgO?prUJmq(Kl`-&hB<LJMYK
z;H4PWY#<GBp>=G~+P>c2E<R{L`=N^y+QnJmkq6pcKv&<`%=!rf2LLb=^pRSYLEks+
zJuEHVG9I`5p0%~o`xwP(T(A&BWF})#9Om3~bQ_b(#@9SAhwGar%cpFq%$%?GJzQBD
z^t%0dF=*wD8muRw+g(o51m|ccnE8Hz9D~pmFQqMqR8NPt`aZ@95GBoY%)<s5UuG3-
zH8&0V|Ja-Yuky}6rHyUK_RAqC-|kS+r+)ehbx%+Lhha7VgMsr_00aWHDT62$d9(aF
z!U1cV`EuMgw}GD4t)>7|<?qk1QBcY33S(8F5ArwUXi7fPCW(YRo7<tLi0uxDVj0(x
z_<)l*SC*HNybE?BH(Rz$LC4rr3wa955G0X;EJy0xN_ATM3~{@N^o9Ne<uYS1%xvjV
zouES0hFd~~oOYam0PHA>m)F7F$6%61XMsolUg0zXycp@v@GZ1K>rt9xd?C!>%*+W7
z%C&esu?1QvJQKjm(&8q^?oTJ)%8V0?)|OjY&d?Gr6jy6+xCM5i6!*6NB4P1KRCNm^
zm+NR*M%4c-!-n9r0lIJ!D)W+&T9oD)Dem^7SRik@V(WfPQ@fFKyaf2lH?CLvA5uka
za&warZ&7cj6r);vpGc6CZ?~=95<VC}D!vy*f{QjEl&9OWL;iubp?H5yvEdB%@e6NX
z57kp^2wVcQ3u~hI>k;2K+aF*1m@^PTnvH2@U9bunlhI3nmsfK^BZU;4=_*3}V}PoZ
zEP*COH$^QdyIwzO=Shp{b@@LAC7u=@nYJ8)oEoIduWITqMn>MScBnM|V;V8ajW%>c
z2|9_!;}u5SRyWpkQzR8giy<WH+QY*7;#%0KMPjz2J^$`S;Aj2Q$(O|;?s2!}W-s;l
zu^~Jf@3^eIMr&D_;mxvB-21`xyjo8Mh`|)KZ&GW@tY9Ko+xhEH9q-}Ic$pF6Rb{$J
z6WRQGL}`*GY6-rGR-l>|l$Ivq`@U%TZM4}hv^OUI<i-$GP!{(iq3D;wT5100{_<z8
z=1;Ad?c^U8>k_s0z#=s!u~04W3Iv&C;FbL%51jwmUPHQ@0l~qZwrDUlHbTaRh}I7O
zg75zlU9YVkytJ~+#_*>+av3b*ZLbM`=lrm(GyKlhzDKd&-~YS-XuB{i6aEdZrmT8V
z5=&CIeIGmv+apvfRY7`h1Zf4_L_-7KYf+zDaL#{K)Hw61>q|2q>%TNiMk|sXtmY*1
z`E77tq7vBO#3uo(t!jj^QMa-dh_<S@?yNd3zMLp*QM?3}j{(IjCNs>__m=cxM&AL^
zdT&14OSgK$%!-|9_M)?`i4B)w7eegd!IoH)mWyyhiqc1~EPAqoCCYEgl(hFM{^Ftj
z%GS_$^uT<GuMO-c^$e_!ZI<)tqNempDT6iTHz|9|_cjckvM6YmeEHw;h;Vg`YvL(_
z(jqSectWzGVyL@+N;(xwEU<0OHRyt^OcZ<Qbm(M^U%g>6K)$jtUK69tc1oS-cV3H(
zyzVwJW(p>4KWuO@dx-z65M|t#j~xmYkY<&V$cV9IcL@+9-%Akb(9C^=$km21|8lq_
za=b^e+n~SA!s?z86LD4&0RU2Vl|bwCrvOB*uG>-oaP+AaCy?IW;MZ7A&oS_=puC#x
zTSjKS2X}HZv)}oKicKX7<~q>8hy|~*HpzV*Y^DRSBNNv-=<Mz7m2X=<O(`+?bKF-{
z>R$KtX-5a5FE<rK_;&5d64zhwYmB)DihD|kGMY$s$ypA4DYTWSd;03~Stbic2f`sB
zAwh%dMJa#xYuO@4+Y^@mQ7demfUh*~%iSD#4K60n5j(6;z0A87Nb@>!_Wj#!o0njA
z8JkG4+{e@({dOMVP51|1y`CGI?{rMiLdMQTV)8ojeNwqrgP)*5q}hq9`jG=rE*1L0
z=0gY)xu5I$L0nYIwuM<@k7MqNbid7Ko1mz?Wtyzjo`jUhJJU|J`Jq_(fZ+l%ogp5Y
zIDI`mBjycCE3h-oAO06y%KHv_U0fWu7`0F)$u5yL6u~KnhuEC++z(})gQ{w9X}O1^
ziig+EPJfUA4&ecpZ?0Sc06XsoNMjeO3Wcj3%MW32I2nYaNKiwF#jknm8fO-R8aEHO
zS;P_Zcdx7H>7UoVjHFijGh;WVUGy??)C=6c|6BJ?%amgTP(}HCU2Z0Y^Sx|AO%6>B
z7k8KD-1)Kga0b7Xt>)Jmz><_Svi*-IB6_0ky0@X$d%1Z$EAcD*>w~VW$*SRrQOa6E
z)cKJdzv;DO-USxsZnV8sfR>g0;TF*eXKlHEv~kBDQlVHocet}SvAsd<?82yC)LQ;W
z)r39#Wqj$`6kE-tmwVMDs-}*UN680yV|?4qFL>I1E^G1doNa$er}pksd?U1pF|_rB
zSIJIEOQLI~-<DjQJQ8&;nMSY8T27<NNl5c#E}S#wNzCQvhkqlEIn<jTityd}$UF=$
z<XI5Ea=B|>J9cO}P)Oz~yJ4z~jwPCIW7GR>tKG}oJGSkdoz};#7?(Sg>_x?Y_Q?4k
zZ$BO!ta2Sdt}R&N@%WDQoxFGNn8p;VW$7qF|8D7og^|0?JUW*}Y|jx!#LUqPlwg=m
zRt9aEBD1%*_tO_~T=|(R%DbCN?p_VFK+vzERN1}RWAZ6OAYYD(J}CcnVj9+as%G)o
z;NJXAE1<2%q6D=&D&c&^K7J$1uCL+uS>u|xgNGNU%c~o5r72Q`D?M*NaI@;bFQ#CT
zV0IV|1Ll4vb*8mCG70}W_>J!pbL`q(Mk#Luq5Ho-?sljN6JfW)-Tyt?3`DZ%L<hO-
zm1%2QcpEFqC@DA&Y)noZo<L3wmXhWO7T5CLyr%;aQ&VF{Kezq9Z=e-t6lGcYBlO&>
z>1cfFaA%b9aDM4sjzPiuCSI52<vO<;a(uRp40{~Kn6LBMmPVEeHJcOgypIw^HdR}0
zm6LbOEkgM=13_yKJS&9@eZ|7<X9r<lqI?mrld4&}+y)ocsy*K}qL^g986$Oc82=ro
zuC2p`f>j;PmRFq03dvd{@)=@Z9{wG$dz~4@#t3rj;1m%CZ{=~k9~XcBC6v7Nc<RUf
za0fm?Azz;LlJ)Y#jjF*)x4$yVmk#DrhOl)`Kk7jI4dJu{T@8Dun*0WoGkW%6p%IA#
zwzFR1?;{r8naAakq}Ot?>kqV@1WVYQ<43f3{9(XPWS>EN{EO~*-CK*bt;ZS;!OLuY
z87ft)RVyp(Cw{BC?#*W-X}?E8n+mG`{Ikbd@Mf3BkFQ_T3aIyS+g0*qIBMqV83`?o
zX*3SoyLQT=V65w9M3)n><3cpp4wMiSNQ6I0WTSfL@yq6O5RJ^;rpPEzOSf?<#OEal
z#JE8?_%;i?y7A-hXB(+R7p{hi!m)9NPT7A;G|icpHm~w<e;6$!Y4Fb<`kxOQK~ik7
z2qb>S^k`I({`l+|qO9g~*i~G*9imYv^HH~-3PeB-S_xwv+Y2l=g6>lXZk|B1v+dn|
zeA>r~Z}f3>@r<u`RNC~JSzXHVKWuV9PJevXSM)4ETvGjoXTP+@Sm!4fMnM%7F8Iff
zb>Byy3Q<u#`SlY{K^5Dg)A{NK6p<$`7p6%c+oJK*xb-{t=b^TC*q$w1kQI7~<=I#n
zUsrx-EC2+C9;adF_CoSYvo)?|_N_Pw%krKb00Zo)kx)$aOO2R5(5K_Eb(0c#$B5sg
z{0z_oksNnVsOYo_E#b$gov$vI);T=pzwd=%0b+vx5R`k((5OLUU}$(4UhG+Kr*w?}
zJ?oUew851nEwl58vi2;*E5|K?7<%F-)kCDbPq*w+RQGiP>&w80&#K>pvR%5geJnqq
z#YL_Lw5jl$vkg7ZRPvcNku1Nz{`lM2`2I<R--ltN0hN~4vHK*U?_%TU@<IG~k|O@%
znoTwT&;f1hd-Fe&&gpyQWo1oNlp#yTCg)~Y#%qU+b`54q1>+BH-`3Ba?R1ny-~VYe
z9l%0>oH`pOV?m#)LN)yxXMS#M>?$?Ja6PLFE);UCNl#M06nrh>lc`K1PMyM&Ka>tI
zyKVLSSwJ-z2RX<M$Fc<(rS~fTW{%nyS1=}&<eqVbw)PSJ<ep)k90YElR8ezf%^Lc)
zK7m(V2eK4@R)4svw76Xh8k(GJt$8#twX<q^25G1`_B?J<$Kz*ToHDUP1f|Y;lv8Ey
z@>NRh*UcPO%t2{i@X_0uuwJ6@h;-=Qef3g6X8cFUHPoCZIv{}R78rZ%99agCe;SpR
z2&R5q?E=vp9E`14e_L9iWfefrys(&*EXOenhi}(uR8D%;1^v32tF*i$meYY6!3~@Q
zv5OSB5c`O2eYdLw^yThU*z33iu!U)sm(UUi!Yh5@S`weCs{BaFFDP7dWAap2{nG=s
zg+-P;PwqQ+?wHv<WGCsCsCO4rx|Wd_14)@oaLWm2&kft9v8-l^{=qia<93z$CT*z^
z*q2JT%@xykw-Ow2s?^%W_=BG?$=o_stHxWYMy+|vajaCg^4_v!TByq*Mc@g$G@in+
zYNuZb6#9Mik)Vb&y{T`IPuoggq35^!q9Us2#>S{X^xRx~)ampA>1zW`P2@zwfa|>{
z(Zt?9q>hUSNyY-w8WjF3)S{^{Y;7-zeNdEWXCYNlYE#WdCdLmAQQa{ib}eB{46!Vm
zo13!fMtVj@*A05r-xRqe1O+nR=OyKWG>u1mlD&rJ7WUEOHCORSf`H4G9m&D*U>eu{
zLp6o#gU{59h79h}@mqyQxAYnwjZ3|e)+cm~c9C*PmcN-nJ13-pb9}j+aMZB3eWbuU
z(aP`J@@Js(3eo*K%?H@(M#W~b(~+qW`F;+iobQ&M*W>{=WjBNNZqtpbh4N5N(I2dG
z-RX`fI|JPp?}OI)XaR2iVs;j=E!yAobeUouDw>}0b0z1W+MTAGY0eJ{GDB$rxn+Jx
zijgtNgG}Ip-xgzR(6Y<B6j5&7I?EKz2e+k2gn|!#VFBU$FP~`ekpaTJkZ6yKe@hcz
z0&P<dte_M3j=c?L(KKla=JLCh$&q23=Ki1!(x1)bR{e(s(~5Hx!^RaGOirY(Ya+~q
zTax0op$Cq0B+JhX=8V5lAHJ;;{jP&Y^DwLVk?U_UN+40B#x_l|2@WMt@X^?PUm5R`
zka5Zoe%Mb;=NtBYJRMnP9OukRlM*dPy%;#8-SK6r$;j&qkHb)1d#Z^N<KZr2!&#Pj
zIU-G_uQVE_d}&k&{6RLLsXdWz{x!xRnx?axhpuNik+1!(RuG-vy%XmTtmxPBGjh&s
z%2Mr?Ut8IWv!*#|Vmm`90TQDH+;(IJMHnN3{zDQxfmd>w>ce#I{RXF)m?YpDnSx1P
z-qxP|)1Pe80-2Yo{|kjzD-b|ra*a%GbQ-JEf<BbF&h$&RrZ-+pVa7@^kGNqEe<rct
z-kwj~m}xv3un)%C!WKTK+rEbQ$D;L=|3ipou){Jid{G5mhMkg6p^%}xPG=SSMvT+>
zY4Ef^R`Uo`;5%GzqsAjSR8OWeT$^xkT*!`awX@U|_Abd2Kni%MHCjtQr!HimpSd78
zqrPOZv^3?zw<Q83PJS%)jk5~sW5wL%>eIu9Gt!GTOD19I)$#R&XHcKG{N6t4Uzm)%
z_&ik-;lla8ao5f-XCXafQiDpVG*V0{N!aCZPn=1CN`%)rVO5b3-l1<&5Rm>dgqG6&
zi6I?9NDN#D1uh~vl;mU=49d2IlV^tnzNl6O2YpihPema^^jse;K;WdUa}|$oaghqg
z(6Awt@Duo-@b4d^62bJ31eGM@W)0Qd@X!Ndd;7ddj(j^*YY2<F9B0=q{CkRTYlO1D
zGl*<!ByB1D*e5nwTT@}02EOS{{EEVld=V|ut?N{K!<D?M$QX97EGNgVZS|_~peG9q
zL$e2NzJNfu_N=TG$8b>nz}q(w%?j=RPLP@eEF|B$PQ2KtCtcE0TG0n}qx$Q0g;>#Q
zXb4R~mYm3CJ1RdzfK4TCyeNO)4km{6`QK7Rtf74G7sV*O8|HzS0B>>4yF}W2o(lp*
zM{UWrv+Ba@vnVNI88u6!KF%=Wbx&cqT*am6q30wD#F98KVc5!5oJkm|LweHam10~r
zX@~3#%zVK@yDeBv6!qOETx37pSa`UBTxI#cHI-Sl3=?)E1K4yNsZ5YEKwM8qGV1Vn
zk8qYSbHYB+UTkQmS<k~+_u?XWiR}U~EWCgOKyF#9aFf?0NFlo?l9dJq7v*7@BT&B>
t;Jjx^&~6n@&egfT2m_h_UkqA5Co_+SJESY3=}2`iKwrlMS%GlG{15vgE&>1m
literal 0
HcmV?d00001
diff --git a/cmake/DebPackageConfig.cmake b/cmake/DebPackageConfig.cmake
new file mode 100644
index 0000000..d0351ec
--- /dev/null
+++ b/cmake/DebPackageConfig.cmake
@@ -0,0 +1,327 @@
+cmake_minimum_required(VERSION 3.0.0)
+
+# function(add_deb_package PACKAGE_NAME PACKAGE_VERSION PACKAGE_MAINTAINER PACKAGE_EMAIL PACKAGE_SHORT_DESCRIPTION PACKAGE_LONG_DESCRIPTION)
+
+# endfunction(add_deb_package PACKAGE_NAME PACKAGE_VERSION PACKAGE_MAINTAINER PACKAGE_EMAIL PACKAGE_SHORT_DESCRIPTION PACKAGE_LONG_DESCRIPTION)
+
+# if(add_deb_package VALUE) set(Package ${VALUE} PARENT_SCOPE) endif(add_deb_package VALUE)
+# if(add_deb_version VALUE) set(Version ${VALUE} PARENT_SCOPE) endif(add_deb_version VALUE)
+# if(add_deb_maintainer VALUE) set(Maintainer ${VALUE} PARENT_SCOPE) endif(add_deb_maintainer VALUE)
+# if(add_deb_email VALUE) set(Email ${VALUE} PARENT_SCOPE) endif(add_deb_email VALUE)
+# if(add_deb_descrition VALUE) set(Descrition ${VALUE} PARENT_SCOPE) endif(add_deb_descrition VALUE)
+# if(add_deb_detail VALUE) set(Detail ${VALUE} PARENT_SCOPE) endif(add_deb_detail VALUE)
+
+
+# set(Package "")
+# set(Version "")
+# set(Architecture "")
+# set(Maintainer "")
+# set(Email "")
+# set(Descrition "")
+
+function(find_str _IN _SEP _OUT)
+ string(FIND "${_IN}" "${_SEP}" _TMP)
+ set(${_OUT} ${_TMP} PARENT_SCOPE)
+endfunction(find_str _IN _SEP _OUT)
+
+
+function(find_next _IN _OUT)
+ find_str("${_IN}" "\n" _TMP)
+ set(${_OUT} ${_TMP} PARENT_SCOPE)
+endfunction(find_next _IN _OUT)
+
+function(sub_next _IN _INDEX _OUT __OUT)
+ find_next(${_IN} _NEXTINDEX)
+ string(SUBSTRING "${_IN}" ${_INDEX} ${_NEXTINDEX} _TMP)
+ math(EXPR _NEXTINDEX ${_NEXTINDEX}+1)
+ string(SUBSTRING "${_IN}" ${_NEXTINDEX} -1 __TMP)
+ set(${_OUT} ${_TMP} PARENT_SCOPE)
+ set(${__OUT} ${__TMP} PARENT_SCOPE)
+endfunction(sub_next _IN _INDEX _OUT)
+
+function(trim_str _IN _OUT)
+ string(STRIP "${_IN}" _TMP)
+ set(${_OUT} ${_TMP} PARENT_SCOPE)
+endfunction(trim_str _IN _OUT)
+
+function(split_str _IN _SEP _OUT)
+ string(FIND "${_IN}" "${_SEP}" _TMP_INDEX)
+ if(NOT _TMP_INDEX EQUAL -1)
+ string(SUBSTRING "${_IN}" 0 ${_TMP_INDEX} _TMP)
+ math(EXPR _TMP_INDEX ${_TMP_INDEX}+1)
+ string(SUBSTRING "${_IN}" ${_TMP_INDEX} -1 __TMP)
+ set(${_OUT} "${_TMP};${__TMP}" PARENT_SCOPE)
+ else()
+ set(${_OUT} ${_IN} PARENT_SCOPE)
+ endif(NOT _TMP_INDEX EQUAL -1)
+endfunction(split_str _IN _SEP _OUT)
+
+function(split_str_p _IN _SEP _OUT __OUT)
+ split_str("${_IN}" "${_SEP}" _TMP)
+ list(GET _TMP 0 __TMP)
+ list(GET _TMP 1 ___TMP)
+ set(${_OUT} ${__TMP} PARENT_SCOPE)
+ set(${__OUT} ${___TMP} PARENT_SCOPE)
+endfunction(split_str_p _IN _SEP _OUT __OUT)
+
+function(split_str_n _IN _SEP _OUT _N)
+ if(_N GREATER 1)
+ set(_C ${_N})
+ set(_RET "")
+ set(_NEXT ${_IN})
+ while(NOT _C EQUAL 0)
+ split_str("${_NEXT}" "${_SEP}" _TMP)
+ list(LENGTH _TMP _TMP_LEN)
+ if(_TMP_LEN EQUAL 2)
+ list(GET _TMP 0 __TMP)
+ list(GET _TMP 1 _NEXT)
+ list(APPEND _RET ${__TMP})
+ else()
+ break()
+ endif(_TMP_LEN EQUAL 2)
+ math(EXPR _C "${_C}-1")
+ endwhile(NOT _C EQUAL 0)
+ list(APPEND _RET ${_NEXT})
+ set(${_OUT} ${_RET} PARENT_SCOPE)
+ else()
+ split_str("${_IN}" "${_SEP}" _TMP)
+ set(${_OUT} ${_TMP} PARENT_SCOPE)
+ endif(_N GREATER 1)
+endfunction(split_str_n _IN _SEP _OUT _N)
+
+
+function(set_package_vars _IN_KEY _IN_VAL)
+
+ # trim_str("${_IN_KEY}" _IN_KEY)
+
+ find_str("${_IN_KEY}" "Type" _Type)
+ if(_Type EQUAL "0")
+ string(TOUPPER "${_IN_VAL}" _IN_VAL_UPPER)
+ string(TOLOWER "${_IN_VAL}" _IN_VAL_LOWER)
+ set(CPACK_GENERATOR "${_IN_VAL_UPPER}" PARENT_SCOPE)
+ message("--> 软件包类型: ${_IN_VAL_LOWER}")
+ endif(_Type EQUAL "0")
+
+ find_str("${_IN_KEY}" "Package" _Package)
+ if(_Package EQUAL "0")
+ if(_IN_VAL STREQUAL "auto")
+ set(CPACK_DEBIAN_PACKAGE_NAME "${PROJECT_NAME}" PARENT_SCOPE)
+ else()
+ set(CPACK_DEBIAN_PACKAGE_NAME "${_IN_VAL}" PARENT_SCOPE)
+ endif(_IN_VAL STREQUAL "auto")
+ message("--> 软件包名: ${_IN_VAL}")
+ endif(_Package EQUAL "0")
+
+ find_str("${_IN_KEY}" "Version" _Version)
+ if(_Version EQUAL "0")
+ if(_IN_VAL STREQUAL "auto")
+ set(CPACK_DEBIAN_PACKAGE_VERSION "${PROJECT_VERSION}" PARENT_SCOPE)
+ else()
+ set(CPACK_DEBIAN_PACKAGE_VERSION "${_IN_VAL}" PARENT_SCOPE)
+ endif(_IN_VAL STREQUAL "auto")
+
+ message("--> 软件版本: ${_IN_VAL}")
+ endif(_Version EQUAL "0")
+
+ find_str("${_IN_KEY}" "CalVer" _CalVer)
+ if(_CalVer EQUAL "0")
+ set(CalVer "${_IN_VAL}" PARENT_SCOPE)
+ message("--> 日历化版本: ${_IN_VAL}")
+ endif(_CalVer EQUAL "0")
+
+ find_str("${_IN_KEY}" "Architecture" _Architecture)
+ if(_Architecture EQUAL "0")
+ set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "${_IN_VAL}" PARENT_SCOPE)
+ if(_IN_VAL STREQUAL "auto")
+ execute_process(
+ COMMAND dpkg --print-architecture
+ OUTPUT_VARIABLE _RETV
+ OUTPUT_STRIP_TRAILING_WHITESPACE
+ )
+ set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "${_RETV}" PARENT_SCOPE)
+ endif(_IN_VAL STREQUAL "auto")
+ message("--> 软件架构: ${_IN_VAL}")
+ endif(_Architecture EQUAL "0")
+
+ find_str("${_IN_KEY}" "Priority" _Priority)
+ if(_Priority EQUAL "0")
+ set(CPACK_DEBIAN_PACKAGE_PRIORITY "${_IN_VAL}" PARENT_SCOPE)
+ message("--> 优先级: ${_IN_VAL}")
+ endif(_Priority EQUAL "0")
+
+ find_str("${_IN_KEY}" "Depends" _Depends)
+ if(_Depends EQUAL "0")
+ set(CPACK_DEBIAN_PACKAGE_DEPENDS "${_IN_VAL}" PARENT_SCOPE)
+ message("--> 软件依赖: ${_IN_VAL}")
+ endif(_Depends EQUAL "0")
+
+ find_str("${_IN_KEY}" "Maintainer" _Maintainer)
+ if(_Maintainer EQUAL "0")
+ set(CPACK_DEBIAN_PACKAGE_MAINTAINER "${_IN_VAL}" PARENT_SCOPE)
+ message("--> 软件维护者: ${_IN_VAL}")
+ endif(_Maintainer EQUAL "0")
+
+ find_str("${_IN_KEY}" "Homepage" _Homepage)
+ if(_Homepage EQUAL "0")
+ set(CPACK_DEBIAN_PACKAGE_HOMEPAGE "${_IN_VAL}" PARENT_SCOPE)
+ message("--> 软件主页: ${_IN_VAL}")
+ endif(_Homepage EQUAL "0")
+
+ find_str("${_IN_KEY}" "Recommends" _Recommends)
+ if(_Recommends EQUAL "0")
+ set(CPACK_DEBIAN_PACKAGE_RECOMMENDS "${_IN_VAL}" PARENT_SCOPE)
+ message("--> 软件建议: ${_IN_VAL}")
+ endif(_Recommends EQUAL "0")
+
+endfunction(set_package_vars _IN_KEY _IN_VAL)
+
+# 定义一个自定义(add_package_descript)函数
+# 用于按特定配置约定自动化构建软件包配置
+function(add_package_descript IN_DES)
+ set(PACKAGE_DES_PATH "${IN_DES}")
+
+ if(EXISTS ${IN_DES})
+
+ elseif(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/${IN_DES}")
+ set(PACKAGE_DES_PATH "${CMAKE_CURRENT_SOURCE_DIR}/${IN_DES}")
+ else()
+ message(FATAL_ERROR "!! Not Found Path: ${PACKAGE_DES_PATH}")
+ return()
+ endif(EXISTS ${IN_DES})
+
+ file(READ ${PACKAGE_DES_PATH} DES_CONTENT)
+ trim_str("${DES_CONTENT}" DES_CONTENT)
+
+ ################## 解析 ##################
+
+ sub_next(${DES_CONTENT} NEXT_INDEX DES_LINE DES_CONTENT)
+ set(PREV_DES "")
+ while(NOT DES_LINE STREQUAL "${PREV_DES}")
+ # 检查该描述行是否是 # 注释开头,是的话将跳过该行
+ find_str("${DES_LINE}" "#" _COMMENT)
+ if(_COMMENT EQUAL "0")
+ message("--> !!!!!!! ${DES_LINE}")
+ sub_next(${DES_CONTENT} NEXT_INDEX DES_LINE DES_CONTENT)
+ continue()
+ endif(_COMMENT EQUAL "0")
+
+ # 检查该描述行是否是 Descrition 开头,是的话说明描述结尾了
+ find_str("${DES_LINE}" "Descrition" _DESCRIPTION)
+ if(_DESCRIPTION EQUAL "0")
+ break()
+ endif(_DESCRIPTION EQUAL "0")
+
+ split_str_n("${DES_LINE}" ":" _TMP 1)
+ list(LENGTH _TMP _TMP_LEN)
+
+ if(_TMP_LEN EQUAL 2)
+ split_str_p("${DES_LINE}" ":" _TMP __TMP)
+ trim_str("${__TMP}" __TMP)
+ string(LENGTH "${__TMP}" __TMP_LENGTH)
+ if(NOT __TMP_LENGTH EQUAL "0")
+ set_package_vars("${_TMP}" "${__TMP}")
+ endif(NOT __TMP_LENGTH EQUAL "0")
+ endif(_TMP_LEN EQUAL 2)
+
+ # 记录当前行,获取下一行,可能是已经结尾了(将保持重复行)
+ set(PREV_DES "${DES_LINE}")
+ sub_next(${DES_CONTENT} NEXT_INDEX DES_LINE DES_CONTENT)
+ endwhile(NOT DES_LINE STREQUAL "${PREV_DES}")
+
+
+ # 再一次检查该描述行是否是 Descrition 开头,是的话将进行分析描述行
+ find_str("${DES_LINE}" "Descrition" _DESCRIPTION)
+ if(_DESCRIPTION EQUAL "0")
+ split_str_p("${DES_LINE}" ":" _TMP __TMP)
+ trim_str("${__TMP}" __TMP)
+ set(Descrition ${__TMP})
+ set(PREV_DES_LINE "")
+ while(NOT PREV_DES_LINE STREQUAL DES_LINE)
+ if(NOT PREV_DES_LINE STREQUAL "")
+ set(Descrition "${Descrition}\n${DES_LINE}")
+ endif(NOT PREV_DES_LINE STREQUAL "")
+ set(PREV_DES_LINE "${DES_LINE}")
+ sub_next(${DES_CONTENT} NEXT_INDEX DES_LINE DES_CONTENT)
+ endwhile(NOT PREV_DES_LINE STREQUAL DES_LINE)
+ # set(Descrition "${Descrition}")
+ message("--> 软件说明: ${Descrition}")
+
+ set(CPACK_DEBIAN_PACKAGE_DESCRIPTION ${Descrition})
+ endif(_DESCRIPTION EQUAL "0")
+
+ ##################### deb #####################
+ # ARCHITECTURE
+ if(${CMAKE_HOST_SYSTEM_PROCESSOR} STREQUAL "x86_64")
+ set(ARCHITECTURE "amd64")
+ elseif(${CMAKE_HOST_SYSTEM_PROCESSOR} STREQUAL "aarch64")
+ set(ARCHITECTURE "arm64")
+ endif()
+
+ #################### Calendar Version ###################
+ if("${CalVer}" STREQUAL "true")
+ string(TIMESTAMP BUILD_TIME "%Y%m%d")
+ set(CPACK_DEBIAN_PACKAGE_VERSION "${CPACK_DEBIAN_PACKAGE_VERSION}-${BUILD_TIME}")
+ endif("${CalVer}" STREQUAL "true")
+
+
+
+ ##################### deb file name #####################
+ set(_Package "${CPACK_DEBIAN_PACKAGE_NAME}")
+ set(_Version "${CPACK_DEBIAN_PACKAGE_VERSION}")
+ set(_Architecture "${CPACK_DEBIAN_PACKAGE_ARCHITECTURE}")
+
+ set(_DebFileName
+ "${_Package}_${_Version}_${_Architecture}${PACKAGE_SUFFIX}.deb"
+ )
+ set(CPACK_DEBIAN_FILE_NAME ${_DebFileName})
+
+
+ # set(CPACK_DEBIAN_PACKAGE_NAME "${Package}")
+ # set(CPACK_DEBIAN_PACKAGE_VERSION "${Version}")
+ # set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "${Architecture}")
+ # set(CPACK_DEBIAN_PACKAGE_DEPENDS "${Depends}")
+ # set(CPACK_DEBIAN_PACKAGE_PRIORITY "${Priority}")
+ # set(CPACK_DEBIAN_PACKAGE_MAINTAINER "${Maintainer}")
+ # set(CPACK_DEBIAN_PACKAGE_DESCRIPTION "${Descrition}")
+
+ # 设置即将使用的标准脚本
+ set(CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA
+ # "${CMAKE_SOURCE_DIR}/config/DEBIAN/preinst"
+ # "${CMAKE_SOURCE_DIR}/config/DEBIAN/postinst"
+ # "${CMAKE_SOURCE_DIR}/config/DEBIAN/prerm"
+ # "${CMAKE_SOURCE_DIR}/config/DEBIAN/postrm"
+ "${CMAKE_SOURCE_DIR}/debian/spark-store.postinst"
+ "${CMAKE_SOURCE_DIR}/debian/spark-store.postrm"
+ "${CMAKE_SOURCE_DIR}/debian/spark-store.preinst"
+ "${CMAKE_SOURCE_DIR}/debian/spark-store.prerm"
+ )
+
+ # 设置为ON以便使用 dpkg-shlibdeps 生成更好的包依赖列表。
+ set(CPACK_DEBIAN_PACKAGE_SHLIBDEPS ON)
+ # set(CPACK_DEBIAN_PACKAGE_GENERATE_SHLIBS ON)
+ # set(CPACK_DEBIAN_PACKAGE_GENERATE_SHLIBS_POLICY "=")
+
+ include(CPack)
+
+endfunction(add_package_descript IN_DES)
+
+
+# TODO:
+# CPACK_GENERATOR
+# CPACK_DEBIAN_FILE_NAME - n
+# CPACK_DEBIAN_PACKAGE_NAME - y
+# CPACK_DEBIAN_PACKAGE_VERSION - y
+# CPACK_DEBIAN_PACKAGE_ARCHITECTURE - y(auto)
+# CPACK_DEBIAN_PACKAGE_DEPENDS - y
+# CPACK_DEBIAN_PACKAGE_PRIORITY - y
+# CPACK_DEBIAN_PACKAGE_MAINTAINER - y
+# CPACK_DEBIAN_PACKAGE_DESCRIPTION - y
+
+# ARCHITECTURE
+# if(${CMAKE_HOST_SYSTEM_PROCESSOR} STREQUAL "x86_64")
+# set(ARCHITECTURE "amd64")
+# elseif(${CMAKE_HOST_SYSTEM_PROCESSOR} STREQUAL "aarch64")
+# set(ARCHITECTURE "arm64")
+# endif()
+
+# string(TIMESTAMP BUILD_TIME "%Y%m%d")
diff --git a/cmake/SparkAppimageConfig.cmake b/cmake/SparkAppimageConfig.cmake
new file mode 100644
index 0000000..45f4e25
--- /dev/null
+++ b/cmake/SparkAppimageConfig.cmake
@@ -0,0 +1,132 @@
+# export PATH=/usr/lib/x86_64-linux-gnu/qt5/bin:$PATH
+# export LD_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH
+# export QT_PLUGIN_PATH=/usr/lib/x86_64-linux-gnu/qt5/plugins:$QT_PLUGIN_PATH
+# export QML2_IMPORT_PATH=/usr/lib/x86_64-linux-gnu/qt5/qml:$QML2_IMPORT_PATH
+
+# export PATH=/usr/lib/x86_64-linux-gnu/qt5/bin:$PATH
+# ~/linuxdeployqt-continuous-x86_64.AppImage spark-store-submitter -appimage
+# cd ..
+# ~/appimagetool-x86_64.AppImage appimage/
+
+# LINUXDEPLOYQT=/home/zinface/linuxdeployqt-continuous-x86_64.AppImage
+# APPIMAGETOOL=/home/zinface/appimagetool-x86_64.AppImage
+
+# if ()
+set(APPIMAGE_OUTPUT "${CMAKE_BINARY_DIR}/appimage")
+set(APPIMAGE_ICON "${APPIMAGE_OUTPUT}/default.png")
+set(APPIMAGE_DESTKOP "${APPIMAGE_OUTPUT}/default.desktop")
+# set(LINUXDEPLOYQT)
+# set(APPIMAGETOOL)
+
+function(execute_linuxdeploy _PATH)
+ execute_process(COMMAND ${LINUXDEPLOYQT}
+ WORKING_DIRECTORY "${APPIMAGE_OUTPUT}"
+ )
+endfunction(execute_linuxdeploy _PATH)
+
+function(target_linuxdeploy)
+ add_custom_target(linuxdeploy pwd
+ BYPRODUCTS appimage
+ COMMAND cp ../${PROJECT_NAME} .
+ COMMAND "${LINUXDEPLOYQT}" ${PROJECT_NAME} -appimage -unsupported-allow-new-glibc -verbose=3 -no-strip|| true
+ COMMAND cp ../spark-appimage.desktop default.desktop
+ COMMAND cp ../spark-appimage.png default.png
+ WORKING_DIRECTORY "${APPIMAGE_OUTPUT}")
+endfunction(target_linuxdeploy)
+
+function(target_appimage)
+ add_custom_target(copy-desktop-appimage
+ COMMAND cp ../spark-appimage.desktop default.desktop
+ COMMAND cp ../spark-appimage.png default.png
+ WORKING_DIRECTORY "${APPIMAGE_OUTPUT}")
+ add_custom_target(appimage pwd
+ COMMAND ${APPIMAGETOOL} ${APPIMAGE_OUTPUT}
+ WORKING_DIRECTORY "${CMAKE_BINARY_DIR}"
+ DEPENDS copy-desktop-appimage)
+endfunction(target_appimage)
+
+function(add_appimage)
+ # check linuxdeploy
+ if(NOT DEFINED LINUXDEPLOYQT)
+ message("AppImage> Not Found LINUXDEPLOYQT Variable!")
+ return()
+ endif(NOT DEFINED LINUXDEPLOYQT)
+ if(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
+ message("> cmake version is less than 3.19")
+ message(WARNING "!Relative paths are not supported!")
+ else()
+ file(REAL_PATH ${LINUXDEPLOYQT} LINUXDEPLOYQT_REAL_PATH)
+ endif(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
+ message("AppImage> Found LINUXDEPLOYQT Variable: ${LINUXDEPLOYQT_REAL_PATH}")
+
+ # check appimagetool
+ if(NOT DEFINED APPIMAGETOOL)
+ message("AppImage> Not Found APPIMAGETOOL Variable!")
+ return()
+ endif(NOT DEFINED APPIMAGETOOL)
+ if(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
+ # execute_process(COMMAND realpath ${APPIMAGETOOL} OUTPUT_VARIABLE APPIMAGETOOL_REAL_PATH)
+ message("> cmake version is less than 3.19")
+ message(WARNING "!Relative paths are not supported!")
+ else()
+ file(REAL_PATH ${APPIMAGETOOL} APPIMAGETOOL_REAL_PATH)
+ endif(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
+ message("AppImage> Found APPIMAGETOOL Variable: ${LINUXDEPLOYQT_REAL_PATH}")
+
+ # do add_custome_target
+ make_directory(${APPIMAGE_OUTPUT})
+ target_linuxdeploy()
+ target_appimage()
+endfunction(add_appimage)
+
+function(add_appimage_desktop)
+ configure_file(cmake/spark-appimage.desktop.in
+ ${CMAKE_BINARY_DIR}/spark-appimage.desktop @ONLY)
+endfunction(add_appimage_desktop)
+
+function(add_appimage_icon _ICON_PATH)
+ if(CMAKE_VERSION VERSION_LESS 3.21)
+ message("> cmake version is less than 3.21")
+ configure_file(${_ICON_PATH} ${CMAKE_BINARY_DIR}/spark-appimage.png COPYONLY)
+ else()
+ file(COPY_FILE ${_ICON_PATH} ${CMAKE_BINARY_DIR}/spark-appimage.png)
+ endif(CMAKE_VERSION VERSION_LESS 3.21)
+endfunction(add_appimage_icon _ICON_PATH)
+
+
+
+# 如果glic>=2.27,你就需要加上参数 -unsupported-allow-new-glibc (意思就是不再低版本发行版使用了)
+# 或 -unsupported-bundle-everything大概的意思是尝试兼容实际测试到其他发行版直接用不了了有可能是发行版的原因还是建议用前者虽然放弃了低版本
+
+# -unsupported-bundle-everything
+ # 捆绑所有依赖库,包括 ld-linux.so 加载器和 glibc。这将允许构建在较新系统上的应用程序在较旧的目标系统上运行但不建议这样做因为它会导致捆绑包超出所需的大小并且可能到其他发行版无法使用
+# -unsupported-allow-new-glibc
+ # 允许 linuxdeployqt 在比仍受支持的最旧 Ubuntu LTS 版本更新的发行版上运行。这将导致 AppImage无法在所有仍受支持的发行版上运行既不推荐也不测试或支持
+
+# ./linuxdeployqt-7-x86_64.AppImage 程序目录/程序 -appimage -unsupported-allow-new-glibc
+# ./linuxdeployqt-7-x86_64.AppImage 程序目录/程序 -appimage -unsupported-bundle-everything
+
+
+
+
+# 1. 在顶层构建中导入 Appimage 的构建
+# include(cmake/SparkAppimageConfig.cmake) # 导入来自 Spark 构建的 Appimage 构建
+# add_appimage_icon(assets/spark.png) # 添加到 Appimage 中的默认的图标
+# add_appimage_desktop() # 添加到 Appimage 中的默认desktop(使用来自 Spark 构建的 Desktop 构建中配置的信息(必须要求 spark-desktop))
+# add_appimage() # 应用对 Appimage 的构建
+
+# 2. 在 Makefile 进行构建目标构建 Appimage
+# Appimage 的构建流 --
+# 在 Makefile 进行构建目标构建 Appimage (要求提供工具的绝对路径然后可依次进行linuxdeployqt, genrate-appimage)
+# 来自于 https://github.com/probonopd/linuxdeployqt 的 linuxdeployqt
+# 来自于 https://github.com/AppImage/AppImageKit 的 appimagetool
+# LINUXDEPLOYQT := "/home/zinface/Downloads/linuxdeployqt-continuous-x86_64.AppImage"
+# APPIMAGETOOL := "/home/zinface/Downloads/appimagetool-x86_64.AppImage"
+
+# linuxdeploy: all
+# cd build && cmake .. -DLINUXDEPLOYQT=$(LINUXDEPLOYQT) -DAPPIMAGETOOL=$(APPIMAGETOOL)
+# cd build && make linuxdeploy
+
+# genrate-appimage:
+# cd build && cmake .. -DLINUXDEPLOYQT=$(LINUXDEPLOYQT) -DAPPIMAGETOOL=$(APPIMAGETOOL)
+# cd build && make appimage
diff --git a/cmake/SparkBuildGraphviz.cmake b/cmake/SparkBuildGraphviz.cmake
new file mode 100644
index 0000000..ce9dbc3
--- /dev/null
+++ b/cmake/SparkBuildGraphviz.cmake
@@ -0,0 +1,8 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+# 添加构建项目依赖图目标: make builddeps
+add_custom_target(builddeps
+ COMMAND "${CMAKE_COMMAND}" "--graphviz=graphviz/builddeps.dot" .
+ COMMAND dot -Tpng graphviz/builddeps.dot -o builddeps.png
+ WORKING_DIRECTORY "${CMAKE_BINARY_DIR}"
+)
\ No newline at end of file
diff --git a/cmake/SparkDesktopMacros.cmake b/cmake/SparkDesktopMacros.cmake
new file mode 100644
index 0000000..223ac6b
--- /dev/null
+++ b/cmake/SparkDesktopMacros.cmake
@@ -0,0 +1,35 @@
+
+macro(spark_desktop_macros _APP_NAME _APP_NAME_ZH_CN _APP_COMMENT _APP_TYPE _APP_EXECUTE_PATH _APP_EXECUTE_ICON_PATH _APP_CATEGORIES)
+ set(APP_NAME ${_APP_NAME})
+ set(APP_NAME_ZH_CN ${_APP_NAME_ZH_CN})
+ set(APP_COMMENT ${_APP_COMMENT})
+ set(APP_TYPE ${_APP_TYPE})
+ set(APP_EXECUTE_PATH ${_APP_EXECUTE_PATH})
+ set(APP_EXECUTE_ICON_PATH ${_APP_EXECUTE_ICON_PATH})
+ set(APP_CATEGORIES ${_APP_CATEGORIES})
+ configure_file(cmake/spark-desktop.desktop.in
+ ${CMAKE_BINARY_DIR}/${_APP_NAME}.desktop
+ )
+endmacro(spark_desktop_macros _APP_NAME _APP_NAME_ZH_CN _APP_COMMENT _APP_TYPE _APP_EXECUTE_PATH _APP_EXECUTE_ICON_PATH _APP_CATEGORIES)
+
+# include(cmake/SparkDesktopMacros.cmake)
+# 内容默认应用名称: Name= 应与项目名称相同
+# spark_desktop_macros(
+ # 应用名称: Name=
+ # 应用名称: Name[zh_CN]=
+ # 应用说明: Comment=
+ # 应用类型: Type=
+ # 执行程序: Exec=
+ # 图标路径: Icon=
+ # 应用分类: Category=
+# )
+
+# configure_file(<input> <output>
+# [NO_SOURCE_PERMISSIONS | USE_SOURCE_PERMISSIONS |
+# FILE_PERMISSIONS <permissions>...]
+# [COPYONLY] [ESCAPE_QUOTES] [@ONLY]
+# [NEWLINE_STYLE [UNIX|DOS|WIN32|LF|CRLF] ])
+
+# install(FILES ${APP_NAME}.desktop
+# DESTINATION /usr/share/applications
+# )
\ No newline at end of file
diff --git a/cmake/SparkEnvConfig.cmake b/cmake/SparkEnvConfig.cmake
new file mode 100644
index 0000000..797faf4
--- /dev/null
+++ b/cmake/SparkEnvConfig.cmake
@@ -0,0 +1,8 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
+set(CMAKE_INCLUDE_CURRENT_DIR ON)
+set(CMAKE_AUTOMOC ON)
+set(CMAKE_AUTOUIC ON)
+set(CMAKE_AUTORCC ON)
+# set(CMAKE_BUILD_TYPE "Debug")
\ No newline at end of file
diff --git a/cmake/SparkFindDtkConfig.cmake b/cmake/SparkFindDtkConfig.cmake
new file mode 100644
index 0000000..d1b2dfc
--- /dev/null
+++ b/cmake/SparkFindDtkConfig.cmake
@@ -0,0 +1,11 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+# include(SparkFindQt5Config.cmake)
+find_package(Dtk COMPONENTS Core Widget Gui)
+
+function(target_link_dtk NAME)
+ target_link_libraries(${NAME}
+ ${DtkCore_LIBRARIES}
+ ${DtkWidget_LIBRARIES}
+ ${DtkGui_LIBRARIES})
+endfunction(target_link_dtk NAME)
\ No newline at end of file
diff --git a/cmake/SparkFindLibraries.cmake b/cmake/SparkFindLibraries.cmake
new file mode 100644
index 0000000..a1b936c
--- /dev/null
+++ b/cmake/SparkFindLibraries.cmake
@@ -0,0 +1,7 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+# spark_find_library(notify libnotify)
+
+# function(target_link_${_prefix} TARGET)
+# target_link_libraries(${TARGET} ${_prefix})
+# endfunction(target_link_${_prefix} TARGET)
\ No newline at end of file
diff --git a/cmake/SparkFindQt5Config.cmake b/cmake/SparkFindQt5Config.cmake
new file mode 100644
index 0000000..6efade6
--- /dev/null
+++ b/cmake/SparkFindQt5Config.cmake
@@ -0,0 +1,154 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+find_package(Qt5 COMPONENTS Core Widgets Network Concurrent WebEngineWidgets Sql WebSockets REQUIRED)
+
+# function(target_link_qt5 NAME)
+# target_link_libraries(${NAME}
+# Qt5::Core
+# Qt5::Widgets
+# Qt5::Network)
+# endfunction(target_link_qt5 NAME)
+
+# 使用 spark_add_link 生成 target_link_qt5 以替代上面内容
+spark_add_link(qt5 Qt5::Core Qt5::Widgets Qt5::Network)
+
+
+# spark_add_link_qt5
+# 自定义宏 spark_add_link_qt5 以扩展 target_link_qt5_<name> 结构
+ # _IN_NAME: 此宏使用嵌套宏 spark_add_link 时追加 <name> 名称
+ # 同等于 spark_add_link(qt_<name> ${ARGN})
+macro(spark_add_link_qt5 _IN_NAME)
+ spark_add_link(qt5_${_IN_NAME} ${ARGN})
+endmacro(spark_add_link_qt5 _IN_NAME)
+
+# 使用 spark_add_link_qt5 生成 target_link_qt5_<name> 的宏
+spark_add_link_qt5(Concurrent Qt5::Concurrent)
+spark_add_link_qt5(Sql Qt5::Sql)
+spark_add_link_qt5(WebEngineWidgets Qt5::WebEngineWidgets)
+spark_add_link_qt5(WebSockets Qt5::WebSockets)
+
+# 高级自定义
+# spark_add_links_qt5
+# 自定义宏 spark_add_links_qt5 以扩展 spark_add_link_qt5 宏配置组
+ # 特点: 任意长度参数
+ # qt5_item: 为进行遍历后的单项,类似于 python3 中的 (for item in items:)
+ # 例如: qt5_item 为 Core
+ # spark_add_link_qt5(${qt5_item} Qt5::${qt5_item})
+ # 展开为 spark_add_link_qt5(Core Qt5::Core)
+ # 展开为 spark_add_link(qt5_Core Qt5::Core)
+ # 展开为 spark_add_link(qt5_Core Qt5::Core)
+ # 特性: 增加 qt5_Core 转 qt5_core
+ # string(TOLOWER <string> <output_variable>)
+macro(spark_add_links_qt5)
+ set(qt5_items ${ARGN})
+ foreach(qt5_item IN LISTS qt5_items)
+ find_package(Qt5${qt5_item})
+ spark_add_link_qt5(${qt5_item} Qt5::${qt5_item})
+
+ string(TOLOWER "${qt5_item}" qt5_lower_item)
+ spark_add_link_qt5(${qt5_lower_item} Qt5::${qt5_item})
+ message("add_target_link_qt5_${qt5_item} or add_target_link_qt5_${qt5_lower_item}")
+ endforeach(qt5_item IN LISTS qt5_items)
+endmacro(spark_add_links_qt5)
+
+
+# Core 用于其它模块的核心非图形类。
+# GUI 图形用户界面 GUI 组件基类。包括 OpenGL。
+# Multimedia 音频 视频 无线电 摄像头功能类。
+# Multimedia Widgets 用于实现多媒体功能,基于 Widget 的类。
+# Network 使网络编程更容易和更可移植的类。
+
+# QML QML 和 JavaScript 语言类。
+# Quick 以自定义用户界面 UI 构建高动态应用程序的声明性框架。
+# Quick Controls 为桌面、嵌入式及移动设备创建高性能用户界面提供轻量 QML 类型。这些类型运用简单样式化体系结构且非常高效。
+# Quick Dialogs 用于从 Qt Quick 应用程序创建系统对话框,并与之交互的类型。
+# Quick Layouts 布局是用于在用户界面中排列基于 Qt Quick 2 项的项。
+# Quick Test 用于 QML 应用程序的单元测试框架,其测试案例被编写成 JavaScript 函数。
+ # 注意: 二进制保证不兼容 Qt Quick Test但源代码仍兼容。
+
+# Qt SQL 集成使用 SQL 数据库的类。
+# Qt Test 单元测试 Qt 应用程序和库的类。
+ # 注意: 二进制保证不兼容 Qt Test但源代码仍兼容。
+# Qt Widgets 以 C++ 小部件扩展 Qt GUI 的类。
+
+
+
+# 找出所有 Qt5 模板
+# find /usr/lib/x86_64-linux-gnu/cmake/ -name "*Config.cmake" | sed 's@^.*/Qt5@Qt5@;' | grep ^Qt5
+
+# 掐头去尾,洗一次
+# find /usr/lib/x86_64-linux-gnu/cmake/ -name "*Config.cmake" | sed 's@^.*/Qt5@Qt5@;' | grep ^Qt5 | sed 's@^Qt5@@; s@Config.cmake$@@; /^\s*$/d'
+
+# 排序
+# find /usr/lib/x86_64-linux-gnu/cmake/ -name "*Config.cmake" | sed 's@^.*/Qt5@Qt5@;' | grep ^Qt5 | sed 's@^Qt5@@; s@Config.cmake$@@; /^\s*$/d' | sort | pr -t -3
+
+spark_add_links_qt5(
+ # AccessibilitySupport
+ # AttributionsScannerTools
+ Concurrent
+ # Core
+ DBus
+ # Designer
+ # DesignerComponents
+ # DeviceDiscoverySupport
+ # DocTools
+ # EdidSupport
+ # EglFSDeviceIntegration
+ # EglFsKmsSupport
+ # EglSupport
+ # EventDispatcherSupport
+ # FbSupport
+ # FontDatabaseSupport
+ # GlxSupport
+ # Gui
+ # Help
+ # InputSupport
+ # KmsSupport
+ # LinguistTools
+ # LinuxAccessibilitySupport
+ # Network
+ # OpenGL
+ # OpenGLExtensions
+ # PacketProtocol
+ # PlatformCompositorSupport
+ # Positioning
+ # PositioningQuick
+ # PrintSupport
+ # Qml
+ # QmlDebug
+ # QmlDevTools
+ # QmlImportScanner
+ # QmlModels
+ # QmlWorkerScript
+ # Quick
+ # QuickCompiler
+ # QuickControls2
+ # QuickParticles
+ # QuickShapes
+ # QuickTemplates2
+ # QuickTest
+ # QuickWidgets
+ # SerialBus
+ # SerialPort
+ # ServiceSupport
+ # Sql
+ # Svg
+ # Test
+ # ThemeSupport
+ # UiPlugin
+ # UiTools
+ # VulkanSupport
+ # WebChannel
+ # WebEngine
+ # WebEngineCore
+ WebEngineWidgets
+ # WebKit
+ # WebKitWidgets
+ # WebSockets
+ # Widgets
+ # X11Extras
+ # XcbQpa
+ # XkbCommonSupport
+ # Xml
+ # XmlPatterns
+)
\ No newline at end of file
diff --git a/cmake/SparkFindQt6Config.cmake b/cmake/SparkFindQt6Config.cmake
new file mode 100644
index 0000000..dfd8917
--- /dev/null
+++ b/cmake/SparkFindQt6Config.cmake
@@ -0,0 +1,24 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+find_package(Qt6 COMPONENTS Core Widgets Network Concurrent)
+
+# function(target_link_qt6 NAME)
+# target_link_libraries(${NAME}
+# Qt6::Core
+# Qt6::Widgets
+# Qt6::Network)
+# endfunction(target_link_qt6 NAME)
+
+# 使用 spark_add_link 生成 target_link_qt6 以替代上面内容
+spark_add_link(qt6 Qt6::Core Qt6::Widgets Qt6::Network)
+
+
+# spark_add_link_qt6
+# 自定义宏 target_link_qt6 以扩展 target_link_qt6_<name> 结构
+ # _IN_NAME: 此宏使用嵌套宏 spark_add_link 时追加 <name> 名称
+ # 同等于 spark_add_link(qt_<name> ${ARGN})
+macro(spark_add_link_qt6 _IN_NAME)
+ spark_add_link(qt6_${_IN_NAME} ${ARGN})
+endmacro(spark_add_link_qt6 _IN_NAME)
+
+# 使用 spark_add_link_qt6 生成 target_link_qt6_<name> 的宏
diff --git a/cmake/SparkInstallMacrosConfig.cmake b/cmake/SparkInstallMacrosConfig.cmake
new file mode 100644
index 0000000..bf906bf
--- /dev/null
+++ b/cmake/SparkInstallMacrosConfig.cmake
@@ -0,0 +1,132 @@
+
+# spark_install_target
+# 基于传入的路径/目标进行安装
+# 可接受的值为: 安装路径 目标A
+# 可接受的值为: 安装路径 目标A 目标B 目标C...
+macro(spark_install_target INSTALL_TARGET_DIR INSTALL_TARGETS)
+ install(TARGETS
+ ${INSTALL_TARGETS} ${ARGN}
+ DESTINATION ${INSTALL_TARGET_DIR})
+endmacro(spark_install_target INSTALL_TARGET_DIR INSTALL_TARGETS)
+
+# spark_install_file
+# 基于传入的路径/文件进行安装
+# 可接受的值为: 安装路径 文件A
+# 可接受的值为: 安装路径 文件A 文件B 文件C...
+macro(spark_install_file INSTALL_FILE_DIR INSTALL_FILE)
+ install(FILES
+ ${INSTALL_FILE} ${ARGN}
+ DESTINATION ${INSTALL_FILE_DIR})
+endmacro(spark_install_file INSTALL_FILE_DIR INSTALL_FILE)
+
+# spark_install_program
+# 基于传入的路径/文件进行安装,并自动为其添加可执行权限
+# 可接受的值为: 安装路径 文件A
+# 可接受的值为: 安装路径 文件A 文件B 文件C...
+macro(spark_install_program INSTALL_PROGRAM_DIR INSTALL_PROGRAM)
+ install(PROGRAMS
+ ${INSTALL_PROGRAM} ${ARGN}
+ DESTINATION ${INSTALL_PROGRAM_DIR})
+endmacro(spark_install_program INSTALL_PROGRAM_DIR INSTALL_PROGRAM)
+
+
+# spark_install_directory
+# 基于传入的路径/目录进行安装
+# 可接受的值为: 安装路径 路径A
+# 可接受的值为: 安装路径 路径A/* 为安装路径A下所有内容
+macro(spark_install_directory INSTALL_DIRECTORY_DIR INSTALL_DIRECOTRY)
+ # INSTALL_DIRECOTRY 可能包含 *
+ # 1. 找到 '*', 截取,列出目录下所有文件,安装
+ # 2. 是文件的直接使用 spark_install_file 安装
+ # 2. 是目录的直接使用 spark_install_directory 安装
+ # message(FATAL_ERROR "${INSTALL_DIRECTORY_DIR}")
+ # string(FIND <string> <substring> <output_variable> [REVERSE])
+ string(FIND "${INSTALL_DIRECOTRY}" "*" INSTALL_DIRECTORY_FIND_INDEX)
+ # message(FATAL_ERROR "${INSTALL_DIRECTORY_FIND_INDEX}: ${INSTALL_DIRECTORY_DIR}")
+
+ # file(GLOB <variable>
+ # [LIST_DIRECTORIES true|false] [RELATIVE <path>] [CONFIGURE_DEPENDS]
+ # [<globbing-expressions>...])
+
+ if (NOT INSTALL_DIRECTORY_FIND_INDEX EQUAL -1)
+ # string(SUBSTRING <string> <begin> <length> <output_variable>)
+ string(SUBSTRING "${INSTALL_DIRECOTRY}" 0 ${INSTALL_DIRECTORY_FIND_INDEX} INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING)
+ # message(FATAL_ERROR "directory: ${INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING}")
+
+ # file(GLOB <variable>
+ # [LIST_DIRECTORIES true|false] [RELATIVE <path>] [CONFIGURE_DEPENDS]
+ # [<globbing-expressions>...])
+
+ file(GLOB INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST ${INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING}/*)
+ list(LENGTH INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST_LENGTH)
+ foreach(item IN LISTS INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST)
+ # message("-> ${item}")
+ if(IS_DIRECTORY ${item})
+ message("-> ${item} IS_DIRECTORY")
+ # spark_install_directory(${INSTALL_DIRECTORY_DIR} ${item})
+ install(DIRECTORY
+ ${item}
+ DESTINATION ${INSTALL_DIRECTORY_DIR}
+ USE_SOURCE_PERMISSIONS)
+ else()
+ message("-> ${item} NOT IS_DIRECTORY")
+ spark_install_program(${INSTALL_DIRECTORY_DIR} ${item})
+ # spark_install_file(${INSTALL_DIRECTORY_DIR} ${item})
+ endif(IS_DIRECTORY ${item})
+ endforeach(item IN LISTS INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST)
+
+ # message(FATAL_ERROR " directory: ${INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST}")
+ # message(FATAL_ERROR " directory: ${INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST_LENGTH}")
+
+ else()
+ message(FATAL_ERROR "install ${INSTALL_DIRECTORY_DIR}")
+
+ install(DIRECTORY
+ ${INSTALL_DIRECOTRY} ${ARGN}
+ DESTINATION ${INSTALL_DIRECTORY_DIR})
+ endif(NOT INSTALL_DIRECTORY_FIND_INDEX EQUAL -1)
+
+endmacro(spark_install_directory INSTALL_DIRECTORY_DIR INSTALL_DIRECOTRY)
+
+
+# spark_install_changelog
+# 基于传入的路径/ changelog 文件路径进行安装,经过一系列检查并使用 gzip 进行压缩并安装
+# 可接受的值为: 安装路径 changelog文件路径
+macro(spark_install_changelog CHANGE_LOG_FILE)
+ set(SOURCE_CHANGE_LOG_FILE ${CHANGE_LOG_FILE})
+ if (EXISTS ${SOURCE_CHANGE_LOG_FILE})
+
+ execute_process(COMMAND test -f ${SOURCE_CHANGE_LOG_FILE}
+ RESULT_VARIABLE changelog_test
+ )
+ execute_process(COMMAND which gzip
+ RESULT_VARIABLE gzip_test
+ )
+ if (NOT changelog_test EQUAL 0)
+ message(FATAL_ERROR "NOTE: 不是常规文件: ${SOURCE_CHANGE_LOG_FILE}")
+ endif(NOT changelog_test EQUAL 0)
+
+ if (NOT gzip_test EQUAL 0)
+ message(FATAL_ERROR "NOTE: 未安装 gzip, 无法压缩 changelog")
+ endif(NOT gzip_test EQUAL 0)
+
+ # 压缩与安装日志文件
+ add_custom_command(
+ OUTPUT "${CMAKE_BINARY_DIR}/changelog.gz"
+ COMMAND gzip -cn9 "${SOURCE_CHANGE_LOG_FILE}" > "${CMAKE_BINARY_DIR}/changelog.gz"
+ WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
+ COMMENT "Compressing changelog"
+ )
+ add_custom_target(changelog ALL DEPENDS "${CMAKE_BINARY_DIR}/changelog.gz")
+
+ # include(GNUInstallDirs)
+ set(SPARK_INSTALL_CHANGE_LOG_DIR "/usr/share/doc/${PROJECT_NAME}/")
+ install(FILES
+ ${CMAKE_BINARY_DIR}/changelog.gz
+ debian/copyright
+
+ DESTINATION ${SPARK_INSTALL_CHANGE_LOG_DIR})
+ else()
+ message(FATAL_ERROR "未找到: ${SOURCE_CHANGE_LOG_FILE}")
+ endif(EXISTS ${SOURCE_CHANGE_LOG_FILE})
+endmacro(spark_install_changelog CHANGE_LOG_FILE)
diff --git a/cmake/SparkMacrosConfig.cmake b/cmake/SparkMacrosConfig.cmake
new file mode 100644
index 0000000..67d84e1
--- /dev/null
+++ b/cmake/SparkMacrosConfig.cmake
@@ -0,0 +1,129 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+# 定义一些 macro 用于自动生成构建结构
+
+# spark_add_library <lib_name> [files]...
+# 构建一个库,基于指定的源文件
+ # 并根据库名生成 target_link_<lib_name> 函数
+macro(spark_add_library _lib_name)
+ message("================ ${_lib_name} Library ================")
+ add_library(${_lib_name} ${ARGN})
+
+ set(SRCS ${ARGN})
+ foreach(item IN LISTS SRCS)
+ message(" -> ${item}")
+ endforeach(item IN LISTS SRCS)
+
+ function(target_link_${_lib_name} TARGET)
+ message("${_lib_name}")
+ target_link_libraries(${TARGET} ${_lib_name})
+ endfunction(target_link_${_lib_name} TARGET)
+
+endmacro(spark_add_library _lib_name)
+
+# spark_add_library_path <lib_name> <lib_path>
+# 构建一个库,基于指定的路径
+ # 并根据库名生成 target_link_<lib_name> 函数
+ # 函数内增加以 <lib_path> 头文件搜索路径
+macro(spark_add_library_path _lib_name _lib_path)
+ aux_source_directory(${_lib_path} ${_lib_name}_SOURCES)
+
+ message("================ spark_add_library_path: ${_lib_name} ================")
+ file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${_lib_path}/*.ui)
+ add_library(${_lib_name} ${${_lib_name}_SOURCES} ${UI_LIST})
+ foreach(item IN LISTS ${_lib_name}_SOURCES)
+ message(" -> ${item}")
+ endforeach(item IN LISTS ${_lib_name}_SOURCES)
+
+ function(target_link_${_lib_name} TARGET)
+ # message("target_link_${_lib_name}")
+ message(" -> (include): ${_lib_path}")
+ target_include_directories(${TARGET} PUBLIC "${_lib_path}")
+ target_link_libraries(${TARGET} ${_lib_name})
+ endfunction(target_link_${_lib_name} TARGET)
+
+ function(target_include_${_lib_name} TARGET)
+ # message("target_link_${_lib_name}")
+ message(" -> (include): ${_lib_path}")
+ target_include_directories(${TARGET} PUBLIC "${_lib_path}")
+ # target_link_libraries(${TARGET} ${_lib_name})
+ endfunction(target_include_${_lib_name} TARGET)
+
+endmacro(spark_add_library_path _lib_name _lib_path)
+
+# spark_add_executable <exec_name> [files]...
+# 构建一个可执行文件,基于指定的源文件
+ # Qt编译时源文件包括很多类型需要指定 *.h/*.cpp/*.qrc/*.qm/... 等
+macro(spark_add_executable _exec_name)
+
+ message("================ ${_exec_name} Executable ================")
+ add_executable(${_exec_name} ${ARGN})
+
+endmacro(spark_add_executable _exec_name)
+
+macro(spark_add_executable_path _exec_name _exec_path)
+ aux_source_directory(${_exec_path} ${_exec_name}_SOURCES)
+
+ message("================ ${_exec_name} Executable ================")
+ file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${_exec_path}/*.ui)
+ add_executable(${_exec_name} ${${_exec_name}_SOURCES} ${ARGN} ${UI_LIST})
+ foreach(item IN LISTS ${_exec_name}_SOURCES)
+ message(" -> ${item}")
+ endforeach(item IN LISTS ${_exec_name}_SOURCES)
+
+ # function(target_link_${_exec_name} TARGET)
+ # message("target_link_${_lib_name}")
+ message(" -> (include): ${_exec_path}")
+ target_include_directories(${_exec_name} PUBLIC "${_exec_path}")
+ # target_link_libraries(${TARGET} ${_lib_name})
+ # endfunction(target_link_${_exec_name} TARGET)
+ # target_link_${_exec_name}(${_exec_name})
+
+endmacro(spark_add_executable_path _exec_name _exec_path)
+
+# spark_find_library
+# 搜索一个库,基于指定的库名,调用 pkg-config 搜索库
+ # 并根据库名生成一个 target_link_<prefix> 函数
+macro(spark_find_library _prefix)
+ find_package(PkgConfig REQUIRED)
+
+ # libnotify
+ pkg_check_modules(${_prefix} ${ARGN})
+ function(target_link_${_prefix} TARGET)
+ target_include_directories(${TARGET} PUBLIC
+ ${${_prefix}_INCLUDE_DIRS})
+ target_link_libraries(${TARGET}
+ ${${_prefix}_LIBRARIES})
+ endfunction(target_link_${_prefix} TARGET)
+
+endmacro(spark_find_library _prefix)
+
+
+# spark_add_executable_paths
+# 自定义构建宏,基于指定的前缀名称,处理后续参数为子目录
+ # item: 为进行遍历后的单项,类似于 python3 中的 (for item in items:)
+ # file: 为在目录中不以递归(GLOB_RECURSE)方式寻找 qrc 文件,需要将其参与编译才能被 rcc
+ # 并根据 prefix-<item> 生成构建目标,
+macro(spark_add_executable_paths _prefix_path)
+ set(PATHS ${ARGN})
+ foreach(item IN LISTS PATHS)
+ file(GLOB QRCS "${item}/*.qrc")
+ message(">>> add_executable: " "${_prefix_path}-${item} ${item} + ${QRCS}")
+ spark_add_executable_path(${_prefix_path}-${item} ${item} ${QRCS})
+ target_link_qt5(${_prefix_path}-${item})
+ endforeach(item IN LISTS PATHS)
+endmacro(spark_add_executable_paths _prefix_path)
+
+# spark_add_link
+# 自定义宏以代替当前使用 fucntion 定义 target_link_<name> 结构
+ # _IN_NAME: 此宏生成 target_link_<name> 的要求参数
+ # ARGN: 此宏剩余的参数列表
+ # 在使用 target_link_<name> 时
+ # _NAME: 用于此 fucntion 中的要求参数: <_NAME>目标将要连接此库
+macro(spark_add_link _IN_NAME)
+ function(target_link_${_IN_NAME} _NAME)
+ message("LINK ${_NAME} ${ARGN}")
+ target_link_libraries(${_NAME}
+ ${ARGN})
+ endfunction(target_link_${_IN_NAME} _NAME)
+endmacro(spark_add_link _IN_NAME)
\ No newline at end of file
diff --git a/cmake/SparkMacrosExtendConfig.cmake b/cmake/SparkMacrosExtendConfig.cmake
new file mode 100644
index 0000000..bad0620
--- /dev/null
+++ b/cmake/SparkMacrosExtendConfig.cmake
@@ -0,0 +1,196 @@
+
+# find_plus
+# 寻找 INVAl 传入的字符串,如果存在 + 字符将写入位置到 OUTVAL
+function(find_plus INVAL OUTVAL)
+ string(FIND "${INVAL}" "+" plus_index)
+ set(${OUTVAL} ${plus_index} PARENT_SCOPE)
+ # if(plus_index LESS 0)
+ # set(${OUTVAL} -1 PARENT_SCOPE)
+ # else()
+ # set(${OUTVAL} ${plus_index} PARENT_SCOPE)
+ # endif(plus_index LESS 0)
+endfunction(find_plus INVAL OUTVAL)
+
+# find_plus("FF" FFFF)
+# message("--> FFFF ${FFFF}") # --> FFFF -1
+# find_plus("F+F" FFFF)
+# message("--> FFFF ${FFFF}") # --> FFFF 1
+# find_plus("+F+F" FFFF)
+# message("--> FFFF ${FFFF}") # --> FFFF 0
+
+# set(FFF)
+# list(APPEND FFFF )
+# list(APPEND FFFF "F")
+# list(APPEND FFFF "FA")
+# message("--> FFFF: ${FFFF}") # --> FFFF: F;FA
+
+# set(FFFFS "")
+# list(APPEND FFFFS ${FFFF})
+# message("--> FFFFS: ${FFFFS}") # --> FFFFS: F;FA
+
+# set(FFFF "+AA+BB+CC+DD")
+# string(REPLACE "+" ";" FFFFL "${FFFF}")
+# list(LENGTH FFFFL FFFFLEN)
+# message("--> FFFFL: ${FFFFL} --> ${FFFFLEN}") # --> FFFFL: F;
+
+# plus_list
+# 将传入的 "+AAA+BBB+CCC" 类型数据变成一个 列表(list)
+# 适用于不使用 string 进行替换 + 为 ";" 的情况下使用直接变成 list
+function(plus_list INVAL OUTVAL OUTVALLEN)
+ # set(${OUTVAL} "..." PARENT_SCOPE)
+ # set(${OUTVALLEN} 0 PARENT_SCOPE)
+
+ set(_tmps "") # 设置为空的
+
+ # 寻找下一个 + 位置
+ find_plus(${INVAL} RIGHT_PLUS)
+
+ string(LENGTH "${INVAL}" INVALLEN)
+ message("--> 传入的 INVAL: --> 内容: ${INVAL}")
+ message("--> 传入的 INVAL: --> 长度: ${INVALLEN}")
+ message("--> 传入的 INVAL: --> +位置: ${RIGHT_PLUS}")
+
+ # 判断是否有右侧 + 号
+ if(RIGHT_PLUS LESS 0)
+ message("--> 传入的 INVAL: --> 无需计算新的+位置")
+ # message("--> 计算新的 + 位置: ${_PLUSINDEX}")
+ list(APPEND _tmps ${INVAL})
+ else()
+ math(EXPR _PLUSINDEX "${RIGHT_PLUS}+1")
+ message("--> 传入的 INVAL: --> 需计算+位置 --> 右移: ${_PLUSINDEX}")
+
+ string(SUBSTRING "${INVAL}" ${_PLUSINDEX} ${INVALLEN} NewVal)
+ message("--> 传入的 INVAL: --> 需计算+位置 --> 右移: ${_PLUSINDEX} -> 内容: ${NewVal}")
+ # string(REPLACE "+" ";" _tmps "${NewVal}")
+ # list(LENGTH FFFFL FFFFLEN)
+
+ # message("--> 计算新的 + 位置: ${_PLUSINDEX} --> 后面的 NewVal: ${NewVal}")
+
+ # find_plus(${NewVal} _NextPlus)
+ # if(_NextPlus LESS 0)
+ # list(APPEND _tmps ${NewVal})
+ # message("--> 追加新的 + 位置: ${_PLUSINDEX} --> 后面的")
+ # else()
+ # message("--> 追加新的 + 位置: ${_PLUSINDEX} --> 后面的")
+ # # 重新
+ # # plus_list(${NewVal} NewValS )
+ # # foreach(item)
+ # # list(APPEND _tmps ${item})
+ # # endforeach(item)
+ # endif(_NextPlus LESS 0)
+ endif(RIGHT_PLUS LESS 0)
+
+ set(${OUTVAL} ${_tmps} PARENT_SCOPE)
+ list(LENGTH _tmps _tmps_len)
+ set(${OUTVALLEN} ${_tmps_len} PARENT_SCOPE)
+
+endfunction(plus_list INVAL OUTVAL OUTVALLEN)
+
+# plus_list("+AAA+BBB+CCC+DDD" FFF FFLEN)
+# message("--------> ${FFF}: -> ${FFLEN}")
+
+# spark_add_library_realpaths
+# 基于传入的项进行构建
+# 可接受的值为: 路径列表
+# 可接受的值为: 路径列表+依赖库A+依赖库B
+macro(spark_add_library_realpaths)
+ message("---> 基于传入的项进行构建 <---")
+ # message("--> src/unclassified/ItemDelegates/NdStyledItemDelegate")
+ # string(FIND <string> <substring> <output_variable> [REVERSE])
+ # string(SUBSTRING <string> <begin> <length> <output_variable>)
+ # math(EXPR value "100 * 0xA" OUTPUT_FORMAT DECIMAL) # value is set to "1000"
+
+ set(REALPATHS ${ARGN})
+ foreach(REALPATH IN LISTS REALPATHS)
+ message("---> 传入路径: ${REALPATH} <--- ")
+ string(LENGTH "${REALPATH}" REALPATH_LENGTH)
+ message("---> 计算传入路径长度: --> 长度: ${REALPATH_LENGTH}")
+
+ string(FIND "${REALPATH}" "/" LASTINDEX REVERSE)
+ message("---> 计算传入路径末尾/位置: --> 长度: ${LASTINDEX}")
+ math(EXPR LASTINDEX "${LASTINDEX}+1")
+ message("---> 计算传入路径末尾/右移: --> 长度: ${LASTINDEX}")
+ string(SUBSTRING "${REALPATH}" ${LASTINDEX} ${REALPATH_LENGTH} REALNAME_Dependency)
+
+ # 找 + 号下标,这是找+号的函数
+ find_plus(${REALPATH} RIGHT_PLUS)
+
+ # 判断是否有找到 + 号下标,值为 -1 或 正整数
+ if(RIGHT_PLUS LESS 0) # 小于0: 不存在 + 号
+ set(REALNAME "${REALNAME_Dependency}")
+ message("---> 传入路径末尾/右移部分: --> ${REALNAME} <-- 无依赖+")
+
+ message("---> 构建 ${REALNAME} -> ${REALNAME} ${REALPATH} ")
+
+ spark_add_library_path(${REALNAME} ${REALPATH})
+ target_link_qt5(${REALNAME})
+ else()
+ message("---> 传入路径末尾/右移部分: --> ${REALNAME_Dependency} <-- 依赖+")
+
+ # 存在+号,将截取从 / 到 + 号之间的内容作为目标名称
+ # 例如 src/unclassified/widgets/DocTypeListView+JsonDeploy
+ # ^(LASTINDEX) ^(RIGHT_PLUS)
+ # 将 RIGHT_PLUS - LASTINDEX 计算出 DocTypeListView 字符长度
+ math(EXPR REALNAME_LENGTH "${RIGHT_PLUS}-${LASTINDEX}")
+
+ message("---> 计算传入路径末尾/右移部分: --> 位置: ${RIGHT_PLUS}")
+ # message("---> 计算传入路径末尾/右移部分: --> 长度: ${REALNAME_Dependency}")
+
+ # 目标名称为 DocTypeListView
+ # 依赖为 JsonDeploy
+ # set(REALNAME "")
+ string(SUBSTRING "${REALPATH}" 0 ${RIGHT_PLUS} _REALPATH_DIR)
+ string(SUBSTRING "${REALPATH}" ${LASTINDEX} ${REALNAME_LENGTH} REALNAME)
+
+ message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME}")
+
+ string(SUBSTRING "${REALPATH}" ${RIGHT_PLUS} ${REALPATH_LENGTH} Dependency)
+ message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME} --> +部分: ${Dependency}")
+
+ # plus_list(${Dependency} dependencies dependencies_len)
+ string(REPLACE "+" ";" dependencies "${Dependency}")
+ message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME} --> +部分: ${Dependency} --> 列表: ${dependencies} <-- ")
+
+
+ message("---> 构建 ${REALNAME} -> ${REALNAME} ${_REALPATH_DIR}")
+
+ spark_add_library_path(${REALNAME} ${_REALPATH_DIR})
+ # target_link_qt5(${REALNAME}) # 使用依赖的依赖或许也不错
+
+ target_include_directories(${REALNAME} PUBLIC ${_REALPATH_DIR})
+ target_link_libraries(${REALNAME} ${dependencies})
+
+ endif(RIGHT_PLUS LESS 0)
+ endforeach(REALPATH IN LISTS REALPATHS)
+
+endmacro(spark_add_library_realpaths)
+
+
+# spark_add_source_paths
+# 将指定路径中的文件变成可用的源文件列表
+#
+macro(spark_add_source_paths SOURCE_VARIABLE_NAME)
+ set(SOURCE_PATHS ${ARGN})
+ set(${SOURCE_VARIABLE_NAME}_PATHS "")
+ set(${SOURCE_VARIABLE_NAME} "")
+ foreach(SOURCE_PATH IN LISTS SOURCE_PATHS)
+ list(APPEND ${SOURCE_VARIABLE_NAME}_PATHS ${CMAKE_CURRENT_SOURCE_DIR}/${SOURCE_PATH})
+ aux_source_directory(${SOURCE_PATH} _SOURCES)
+ foreach(item IN LISTS _SOURCES)
+ # message(" -> ${item}")
+ list(APPEND ${SOURCE_VARIABLE_NAME} ${item})
+ endforeach(item IN LISTS _SOURCES)
+
+ # file(GLOB HEADER_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${SOURCE_PATH}/*.h)
+ # foreach(item IN LISTS HEADER_LIST)
+ # # message(" -> ${item}")
+ # list(APPEND ${SOURCE_VARIABLE_NAME} ${item})
+ # endforeach(item IN LISTS HEADER_LIST)
+
+ file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${SOURCE_PATH}/*.ui)
+ foreach(item IN LISTS UI_LIST)
+ # message(" -> ${item}")
+ list(APPEND ${SOURCE_VARIABLE_NAME} ${item})
+ endforeach(item IN LISTS UI_LIST)
+ endforeach(SOURCE_PATH IN LISTS SOURCE_PATHS)
+endmacro(spark_add_source_paths SOURCE_VARIABLE_NAME)
diff --git a/cmake/SparkTranslatorConfig.cmake b/cmake/SparkTranslatorConfig.cmake
new file mode 100644
index 0000000..5375fe3
--- /dev/null
+++ b/cmake/SparkTranslatorConfig.cmake
@@ -0,0 +1,27 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+find_package(Qt5LinguistTools)
+
+file(GLOB SPARK_TRANSLATIONS ${CMAKE_SOURCE_DIR}/translations/*.ts)
+
+message("================ Translations ================")
+foreach(item IN LISTS SPARK_TRANSLATIONS)
+ message("-> ${item}")
+endforeach(item IN LISTS SPARK_TRANSLATIONS)
+
+qt5_add_translation(SPARK_QM_TRANSLATIONS
+ ${SPARK_TRANSLATIONS})
+
+message("translator(ts -> qm):")
+foreach(item IN LISTS SPARK_QM_TRANSLATIONS)
+ message("-> ${item}")
+endforeach(item IN LISTS SPARK_QM_TRANSLATIONS)
+
+
+# 注意,必须将 SPARK_QM_TRANSLATIONS 加入到 add_executable 参数中才能在编译时生成只有原文的ts文件
+
+# qt5_create_translation
+ # ts文件会在 make clean 或重新编译的时候一并被删除再编译的时候生成全新的ts原有的翻译会丢失万分注意!
+
+# qt5_add_translation
+ # 此宏比较稳定
diff --git a/cmake/linuxdeployqt-help b/cmake/linuxdeployqt-help
new file mode 100644
index 0000000..12ac506
--- /dev/null
+++ b/cmake/linuxdeployqt-help
@@ -0,0 +1,48 @@
+linuxdeployqt (commit 5fa79fa), build 36 built on 2022-08-21 12:36:03 UTC
+WARNING: Not checking glibc on the host system.
+ The resulting AppDir or AppImage may not run on older systems.
+ This mode is unsupported and discouraged.
+ For more information, please see
+ https://github.com/probonopd/linuxdeployqt/issues/340
+
+Usage: linuxdeployqt <app-binary|desktop file> [options]
+
+Options:
+ -always-overwrite : Copy files even if the target file exists.
+ -appimage : Create an AppImage (implies -bundle-non-qt-libs).
+ -bundle-non-qt-libs : Also bundle non-core, non-Qt libraries.
+ -exclude-libs=<list> : List of libraries which should be excluded,
+ separated by comma.
+ -ignore-glob=<glob> : Glob pattern relative to appdir to ignore when
+ searching for libraries.
+ -executable=<path> : Let the given executable use the deployed libraries
+ too
+ -extra-plugins=<list> : List of extra plugins which should be deployed,
+ separated by comma.
+ -no-copy-copyright-files : Skip deployment of copyright files.
+ -no-plugins : Skip plugin deployment.
+ -no-strip : Don't run 'strip' on the binaries.
+ -no-translations : Skip deployment of translations.
+ -qmake=<path> : The qmake executable to use.
+ -qmldir=<path> : Scan for QML imports in the given path.
+ -qmlimport=<path> : Add the given path to QML module search locations.
+ -show-exclude-libs : Print exclude libraries list.
+ -verbose=<0-3> : 0 = no output, 1 = error/warning (default),
+ 2 = normal, 3 = debug.
+ -updateinformation=<update string> : Embed update information STRING; if zsyncmake is installed, generate zsync file
+ -qtlibinfix=<infix> : Adapt the .so search if your Qt distribution has infix.
+ -version : Print version statement and exit.
+
+linuxdeployqt takes an application as input and makes it
+self-contained by copying in the Qt libraries and plugins that
+the application uses.
+
+By default it deploys the Qt instance that qmake on the $PATH points to.
+The '-qmake' option can be used to point to the qmake executable
+to be used instead.
+
+Plugins related to a Qt library are copied in with the library.
+
+See the "Deploying Applications on Linux" topic in the
+documentation for more information about deployment on Linux.
+zinface@zinface-PC:/tmp/tmp.5gmZKUqn9s$
\ No newline at end of file
diff --git a/cmake/package-deb.descript b/cmake/package-deb.descript
new file mode 100644
index 0000000..2b485d1
--- /dev/null
+++ b/cmake/package-deb.descript
@@ -0,0 +1,44 @@
+# 注释行(使用方式)
+# find_package(DebPackage PATHS ${CMAKE_SOURCE_DIR})
+# add_package_descript(cmake/package-deb.descript)
+
+# 打包后的文件名称
+# FileName: 待定
+# 配置 PACKAGE_SUFFIX 变量可添加尾巴名称
+# 如在 Makefile 中硬编码方式
+# OSID=$(shell lsb_release -si)
+# OSRELEASE=$(shell lsb_release -sr)
+# -DPACKAGE_SUFFIX="_$(OSID)$(OSRELEASE)"
+
+# deb 安装包的安装时脚本
+# 1.安装[前|后]执行脚本(preinst,postinst),
+# 2.卸载[前|后]执行脚本(prerm,postrm)
+# ControlExtra: 未定义(暂不支持)
+# 如需指定请修改 DebPackageConfig.cmake 模板(第252行)
+# CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA 变量
+
+# 打包类型,暂支持 deb, 未来支持 tgz(tar.gz)
+Type: deb
+# 软件包名称(自动, 使用 PROJECT_NAME 变量值)
+Package: auto
+# 软件包版本(自动, 使用 PROJECT_VERSION 变量值)
+Version: auto
+# 日历化尾部版本
+CalVer: true
+# 软件包架构(自动)
+Architecture: auto
+# 软件包属于的系统部分[admin|cli-mono|comm|database|debug|devel|doc|editors|education|electronics|embedded|fonts|games|gnome|gnu-r|gnustep|graphics|hamradio|haskell|httpd|interpreters|introspection|java|javascript|kde|kernel|libdevel|libs|lisp|localization|mail|math|metapackages|misc|net|news|ocaml|oldlibs|otherosfs|perl|php|python|ruby|rust|science|shells|sound|tasks|tex|text|utils|vcs|video|web|x11|xfce|zope]
+Section: utils
+# 软件包优先级[required|important|stantard|optional|extra]
+Priority: optional
+# 软件包依赖
+Depends: curl, aria2
+# 软件包维护者(组织或个人)
+Maintainer: shenmo <shenmo@spark-app.store>
+# 软件包主页
+Homepage: https://www.spark-app.store/
+# 软件包建议
+Recommends:
+# 软件包描述信息
+Descrition: Spark Store
+ A community powered app store, based on DTK.
diff --git a/cmake/spark-appimage.desktop.in b/cmake/spark-appimage.desktop.in
new file mode 100644
index 0000000..228a84a
--- /dev/null
+++ b/cmake/spark-appimage.desktop.in
@@ -0,0 +1,9 @@
+[Desktop Entry]
+Name=@APP_NAME@
+Name[zh_CN]=@APP_NAME_ZH_CN@
+Exec=AppRun %F
+Icon=default
+Comment=@APP_COMMENT@
+Terminal=true
+Type=Application
+Categories=@APP_CATEGORIES@
\ No newline at end of file
diff --git a/cmake/spark-desktop.desktop.in b/cmake/spark-desktop.desktop.in
new file mode 100644
index 0000000..0fa070b
--- /dev/null
+++ b/cmake/spark-desktop.desktop.in
@@ -0,0 +1,11 @@
+[Desktop Entry]
+Version=1.0
+Name=@APP_NAME@
+Name[zh_CN]=@APP_NAME_ZH_CN@
+Comment=@APP_COMMENT@
+Type=@APP_TYPE@
+Exec=@APP_EXECUTE_PATH@
+Icon=@APP_EXECUTE_ICON_PATH@
+Categories=@APP_CATEGORIES@
+
+# Generated from the DesktopGenerater component of the z-Tools toolkit
\ No newline at end of file
--
2.33.1
From 4926bf79dce4b057c16338c21b2f50eb517a0e4f Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Sun, 11 Dec 2022 22:37:26 +0800
Subject: [PATCH 02/12] =?UTF-8?q?repo:=20=E5=A4=84=E7=90=86=20deb=20?=
=?UTF-8?q?=E5=AE=89=E8=A3=85=E8=84=9A=E6=9C=AC=E7=9A=84=E9=97=AE=E9=A2=98?=
=?UTF-8?q?=EF=BC=8C=E5=AE=8C=E6=88=90=20cmake=20=E5=8C=96=E6=9E=84?=
=?UTF-8?q?=E5=BB=BA?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Signed-off-by: zinface <zinface@163.com>
---
Makefile | 16 +++++++++++++++-
cmake/DebPackageConfig.cmake | 8 ++++----
2 files changed, 19 insertions(+), 5 deletions(-)
diff --git a/Makefile b/Makefile
index 334ead1..6fdc69b 100644
--- a/Makefile
+++ b/Makefile
@@ -27,7 +27,21 @@ release:
cd build && cmake -DCMAKE_BUILD_TYPE=Release -DPACKAGE_SUFFIX="$(SUFFIX)" ..
cd build && make -j$(CPUS)
-package: release
+# 在 make package 时自动处理此内容
+# 这是由于 CMake 打包需要纯粹的安装脚本名称
+debian-build-scripts:
+ mkdir -p build/debian
+ cp debian/spark-store.postinst build/debian/postinst
+ cp debian/spark-store.postrm build/debian/postrm
+ cp debian/spark-store.preinst build/debian/preinst
+ cp debian/spark-store.prerm build/debian/prerm
+
+ chmod +x build/debian/postinst
+ chmod +x build/debian/postrm
+ chmod +x build/debian/preinst
+ chmod +x build/debian/prerm
+
+package: release debian-build-scripts
cd build && make package
tree build/_CPack_Packages/Linux/DEB/$(PROJECT_NAME)-*
dpkg-deb --contents build/$(PROJECT_NAME)_*$(CALENDAR)*$(SUFFIX).deb
diff --git a/cmake/DebPackageConfig.cmake b/cmake/DebPackageConfig.cmake
index d0351ec..d88fd51 100644
--- a/cmake/DebPackageConfig.cmake
+++ b/cmake/DebPackageConfig.cmake
@@ -290,10 +290,10 @@ function(add_package_descript IN_DES)
# "${CMAKE_SOURCE_DIR}/config/DEBIAN/postinst"
# "${CMAKE_SOURCE_DIR}/config/DEBIAN/prerm"
# "${CMAKE_SOURCE_DIR}/config/DEBIAN/postrm"
- "${CMAKE_SOURCE_DIR}/debian/spark-store.postinst"
- "${CMAKE_SOURCE_DIR}/debian/spark-store.postrm"
- "${CMAKE_SOURCE_DIR}/debian/spark-store.preinst"
- "${CMAKE_SOURCE_DIR}/debian/spark-store.prerm"
+ "${CMAKE_BINARY_DIR}/debian/postinst"
+ "${CMAKE_BINARY_DIR}/debian/postrm"
+ "${CMAKE_BINARY_DIR}/debian/preinst"
+ "${CMAKE_BINARY_DIR}/debian/prerm"
)
# 设置为ON以便使用 dpkg-shlibdeps 生成更好的包依赖列表。
--
2.33.1
From 054db0da4a5b55db3bef8baec70538a10f6612bb Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Mon, 12 Dec 2022 00:50:01 +0800
Subject: [PATCH 03/12] =?UTF-8?q?docs:=20=E6=B7=BB=E5=8A=A0=20Spark=20?=
=?UTF-8?q?=E6=9E=84=E5=BB=BA=20=E4=B8=8E=20CMake=20=E6=9E=84=E5=BB=BA?=
=?UTF-8?q?=E7=B3=BB=E7=BB=9F=E9=A2=84=E8=A7=88=E6=96=87=E6=A1=A3?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Signed-off-by: zinface <zinface@163.com>
---
DOCS/spark-cmake-build-system.md | 301 +++++++++++++++++++++++++++++++
1 file changed, 301 insertions(+)
create mode 100644 DOCS/spark-cmake-build-system.md
diff --git a/DOCS/spark-cmake-build-system.md b/DOCS/spark-cmake-build-system.md
new file mode 100644
index 0000000..b72805e
--- /dev/null
+++ b/DOCS/spark-cmake-build-system.md
@@ -0,0 +1,301 @@
+# Spark 构建 与 CMake 构建系统预览
+
+- 前言
+
+ ```cmake
+ # 在 v4.0 之前,我们一直使用 `qmake` 来进行构建星火应用商店。
+
+ # 在 v4.0 之后,我们加入了 cmake 构建,并使用 spark 构建为扩展进行开展项目的构建处理。
+
+ # 当然,这对于星火应用商店传统的 qmake 构建风格,我们表示还是可以继续保留。
+ ```
+
+- 有关 `CMake` 与 `Spark` 之间的关系
+
+ 在进行 `CMake` 化构建时,我们摒弃了传统 `CMake` 语法,使用以 `Spark` 为代号进行一种可扩展的 `CMake` 构建模块设计。
+
+ 以下是使用传统 `CMake` 进行构建一个简单的 `Qt` 应用程序:
+
+ ```cmake
+ cmake_minimum_required(VERSION 3.5.1)
+
+ project(template LANGUAGES CXX VERSION 0.0.1)
+
+ set(CMAKE_INCLUDE_CURRENT_DIR ON)
+ set(CMAKE_AUTOMOC ON)
+ set(CMAKE_AUTOUIC ON)
+ set(CMAKE_AUTORCC ON)
+
+ find_package(Qt5 COMPONENTS Core Widgets Network)
+
+ # 头文件目录
+ include_directories()
+
+ # 资源文件路径
+ set(QRC_SOURCES "")
+
+ add_executable(${PROJECT_NAME} "main.cpp"
+ "mainwindow.cpp" "mainwindow.h"
+ ${QRC_SOURCES}
+ )
+ target_link_libraries(${PROJECT_NAME} Qt5::Core Qt5::Widgets Qt5::Network)
+ ```
+
+ 在传统的 `CMake` 项目中,它保留了基于 `Makefile` 构建项目的风格设计,在每一个构建点都会有一个 `CMakeLists.txt` 存在,它就是所谓的 `Makefile` 构建的超集。
+
+ 终于,我们在编写了大量 `CMakeLists.txt` 之后,觉得需要一个更快的构建方式,最起码是移除原有的 `类C` 写法,通过包装不同的目的来完成构建工作,而不是在构建脚本中出现一大堆 `CMake` 传统语法。
+
+ 通过初的设计,我们仅保留了最顶层的 `CMakeLists.txt`,并将其作为一个唯一构建点。
+
+ ```cmake
+ cmake_minimum_required(VERSION 3.5.1)
+
+ project(spark-store LANGUAGES CXX VERSION 0.0.1)
+
+ # 构建 spark-stop 的配置流程
+ include(cmake/SparkEnvConfig.cmake) # 设置一些有关QT构建的开关
+ include(cmake/SparkMacrosConfig.cmake) # 声明了一些 spark_ 开头的 macro 宏
+ include(cmake/SparkFindQt5Config.cmake) # 提供了 target_link_qt5 用于目标链接 qt5 的库
+ include(cmake/SparkFindDtkConfig.cmake) # 提供了 target_link_dtk 用于目标链接 dtk 的库
+ include(cmake/SparkThirdLibraryConfig.cmake) # 提供了 third-party 下对应的 target_link_<lib> 用于目标链接 <lib> 的库
+ include(cmake/SparkFindLibraries.cmake) # 提供了基于 spark_ 宏生成的 target_link_<lib> 用于目标链接 <lib> 的库
+ include(cmake/SparkTranslatorConfig.cmake) # 提供了 qt5 ts转qm 的操作,最终生成 SPARK_QM_TRANSLATIONS 变量用于构建可执行文件时参与编译
+ include(cmake/SparkBuild.cmake) # 使用了 spark_ 宏基于已提供的宏参数自动展开构建可执行目标文件
+
+ # 构建 spark-store 可执行文件 (用于显式展开 SparkBuild.cmake 内容,如果使用 SparkBuild.cmake 此处将需要注释)
+ # spark_add_executable_path(${PROJECT_NAME} src ${SPARK_SOURCES} ${SPARK_QM_TRANSLATIONS})
+ # target_link_qt5(${PROJECT_NAME}) # 构建的目标需要使用 qt5 库
+ # target_link_dtk(${PROJECT_NAME}) # 构建的目标需要使用 dtk 库
+ # target_link_notify(${PROJECT_NAME}) # 构建的目标需要使用 notify 库
+ # target_link_QtNetworkService(${PROJECT_NAME}) # 构建的目标需要使用 third-part 库
+
+ # 子构建 spark-dstore-patch
+ # add_subdirectory(src/spark-dstore-patch) # 传统构建方式,但已经可使用 spark_ 宏构建目标
+ spark_add_executable_path(spark-dstore-patch src/spark-dstore-patch)
+ target_link_qt5(spark-dstore-patch) # 构建的目标需要使用 qt5 库
+
+ include(cmake/SparkInstall.cmake) # 使用了 DebPackage 提供的安装模式
+ include(cmake/SparkBuildGraphviz.cmake) # 添加了 builddeps 目标构建make builddeps 将会生成依赖图
+ ```
+
+ 这样一写,我们觉得这是一种非常独特的构建工作,旨在为一些 Linux Qt 项目进行构建时,苦于没有一个较好的构建模板设计,每次在编写一个新的项目时,只能从头开始写构建脚本的一种解决方式。
+
+ 我们并不打算发明构建工具,只不过在研究打破 `CMake` 传统构建风格时,我发现了 `XMake`,当时这是我当时安装一个 `XMake` 版本。
+
+ ```
+ $ xmake --version
+ xmake v2.6.2+202201121245, A cross-platform build utility based on Lua
+ Copyright (C) 2015-present Ruki Wang, tboox.org, xmake.io
+ _
+ __ ___ __ __ __ _| | ______
+ \ \/ / | \/ |/ _ | |/ / __ \
+ > < | \__/ | /_| | < ___/
+ /_/\_\_|_| |_|\__ \|_|\_\____|
+ by ruki, xmake.io
+
+ 👉 Manual: https://xmake.io/#/getting_started
+ 🙏 Donate: https://xmake.io/#/sponsor
+ ```
+
+ 在准备尝试使用最适用于 `Linux Qt` 项目的构建方式,也为更快构建一个 `Linux` 应用项目来进行扩展构建。
+
+ 我们最开始完成了简单的封装一个 `spark_` 开头的函数来定义简单的构建库目标、构建可执行目标。
+
+ 当时使用的是 `function`,并没有使用宏 `macro`,起初认为是无太大区别,后来都转用 `macro` 来定义了。
+
+ ```cmake
+ # SparkMacrosConfig.cmake
+
+ cmake_minimum_required(VERSION 3.5.1)
+
+ # 定义一些 macro 用于自动生成构建结构
+
+ # spark_add_library <lib_name> [files]...
+ # 构建一个库,基于指定的源文件
+ # 并根据库名生成 target_link_<lib_name> 函数
+ macro(spark_add_library _lib_name)
+ message("================ ${_lib_name} Library ================")
+ add_library(${_lib_name} ${ARGN})
+
+ set(SRCS ${ARGN})
+ foreach(item IN LISTS SRCS)
+ message(" -> ${item}")
+ endforeach(item IN LISTS SRCS)
+
+ function(target_link_${_lib_name} TARGET)
+ message("${_lib_name}")
+ target_link_libraries(${TARGET} ${_lib_name})
+ endfunction(target_link_${_lib_name} TARGET)
+
+ endmacro(spark_add_library _lib_name)
+
+
+ # spark_add_executable <exec_name> [files]...
+ # 构建一个可执行文件,基于指定的源文件
+ # Qt编译时源文件包括很多类型需要指定 *.h/*.cpp/*.qrc/*.qm/... 等
+ macro(spark_add_executable _exec_name)
+
+ message("================ ${_exec_name} Executable ================")
+ add_executable(${_exec_name} ${ARGN})
+
+ endmacro(spark_add_executable _exec_name)
+ ```
+
+ 这样,我们就完成了一个简单的构建目标的方式,通过包装一个 `add_library` 我们可以达到相同的目的。
+
+ 并为其创建一个 `target_link_` 开头的`function`来明确声明这个库目标被使用者给依赖。
+
+ ```cmake
+ # 例如构建一个 hellworld 目标,并链接到到 `qt5` 的基础库
+ # target_link_qt5 并不是由 spark_add_library 产生的。
+ # 只是为了更方便使用 Qt 库, 我们对其进行了一次简单的定义,
+ # 而 target_link_ 可以在使用 macro 宏时生成。
+
+ # target_link_qt5 中只定义了有限的几个核心组件: Qt5::Core Qt5::Widgets Qt5::Network
+
+ spark_add_executable(helloworld
+ main.cpp)
+
+ target_link_qt5(helloworld) # 表示 helloworld 可执行目标依赖于 Qt5
+ ```
+
+ 当然也可以这样
+
+ ```cmake
+ # 构建一个库目标 Say ,它依赖于 qt5 核心库进行构建
+ spark_add_library(Say say.h say.cpp)
+ target_link_qt5(Say)
+
+ # 构建一个可执行目标 helloworld它将依赖于 Say 库
+ spark_add_executable(helloworld main.cpp)
+ target_link_Say(helloworld)
+ ```
+
+
+- 来到 `Spark` 构建的世界
+
+ 这个 `Spark` 构建,最主要的方向就是追求扩展与应用到现有 `Linux Qt` 项目,并替换现有使用传统 `CMake` 构建的 `Linux Qt` 项目。
+
+ `Spark` 一直在追求新的构建风格,新的扩展模块,从最开始的封装简单的构建库与可执行文件,到生成 `desktop` 文件的模块,构建 `deb` 软件包的模块,构建新的 `install` 安装方案。
+
+ 其中,从基于指定的源代码构建库与可执行文件,发展到使用指定的路径来构建为一个模块。
+
+ ```cmake
+ # 构建一个 bigimage 库,它将依赖于 qt5
+ spark_add_libraries_path(bigimage src/spark-widgets/bigimage)
+ target_link_qt5(bigimage)
+
+
+ # 构建一个 imageshow 库,它将依赖于 bigimage
+ spark_add_libraries_path(imageshow src/spark-widgets/imageshow)
+ target_link_bigimage(imageshow)
+
+ ...
+ ```
+
+ 后来,这种方式也基本上被认为最繁琐的构建方式,我们开始了"一行一库"的构建时代,以上的构建内容可以被认为只有两行构建。
+
+ 一是构建 bigimage 库,二是构建 imageshow 库,三是 imageshow 依赖了 bigimage当然依赖列表就用'+'来进行表示吧。
+
+ ```cmake
+ # 基于传入的项进行构建
+ # 可接受的值为: 路径列表
+ # 可接受的值为: 路径列表+依赖库A+依赖库B
+ spark_add_library_realpaths(
+ src/spark-widgets/bigimage
+ src/spark-widgets/imageshow+bigimage)
+ ```
+
+- `Spark` 构建与 `DTK`
+
+ 我们在为基于 Deepin Tool Kit(DTK) 的应用程序添加了简单的扩展,使用以下内容即可使你的程序依赖于 `DTK`
+
+ ```cmake
+ # 引入 SparkFindDtk 模块
+ include(cmake/SparkFindDtkConfig.cmake)
+
+ # 构建一个 bigimage 库,它将自动依赖于 qt5
+ spark_add_library_realpaths(
+ src/spark-widgets/bigimage)
+
+ # 为 bigimage 库目标进行链接 DTK
+ target_link_dtk(bigimage)
+ ```
+
+- `Spark` 构建与 `deb` 打包
+
+ 我们在为基于 `CMakeLists.txt` 中使用的 `install` 指令进行了 `CPack` 打包扩展,因为我们不喜欢类似 `Makefile` 这种 `make install` 安装的方式。
+
+ 所以我们也增加了一个扩展模块 `DebPackageConfig.cmake`,因为它是早于 `Spark` 构建出现,所以并不为它进行 `Spark` 命名,它拥有一个模板配置,可以通过简单的填充包描述信息即可实现打包。
+
+ 注意,它的最开始三行即是使用方式说明,通过(cv)复制粘贴到您的顶层构建脚本中,即可完成打包功能,更多的软件包打包设定功能仍在 `DebPackageConfig.cmake` 中预留被注释的部分。
+
+ 例如您想生成软件包依赖列表等,在其中 `SHLIBDEPS` 字样的部分已预留注释。
+
+ 例如您想为软件包增加 `pre[inst|rm]、post[inst|rm]` 等脚本,在其中 `CONTROL` 字样的部分已预留注释。
+
+ 描述文件还为您专门提供了可选的自动化填充软件包名称、软件包版本、软件包架构等,而无需要每次更新描述文件。
+
+ ```ini
+ # 注释行(使用方式)
+ # find_package(DebPackage PATHS ${CMAKE_SOURCE_DIR})
+ # add_package_descript(cmake/package-deb.descript)
+
+ # 打包后的文件名称
+ # FileName: 待定
+ # 配置 PACKAGE_SUFFIX 变量可添加尾巴名称
+ # 如在 Makefile 中硬编码方式
+ # OSID=$(shell lsb_release -si)
+ # OSRELEASE=$(shell lsb_release -sr)
+ # -DPACKAGE_SUFFIX="_$(OSID)$(OSRELEASE)"
+
+ # deb 安装包的安装时脚本
+ # 1.安装[前|后]执行脚本(preinst,postinst),
+ # 2.卸载[前|后]执行脚本(prerm,postrm)
+ # ControlExtra: 未定义(暂不支持)
+ # 如需指定请修改 DebPackageConfig.cmake 模板(第252行)
+ # CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA 变量
+
+ # 打包类型,暂支持 deb, 未来支持 tgz(tar.gz)
+ Type: deb
+ # 软件包名称(自动, 使用 PROJECT_NAME 变量值)
+ Package: auto
+ # 软件包版本(自动, 使用 PROJECT_VERSION 变量值)
+ Version: auto
+ # 日历化尾部版本
+ CalVer: true
+ # 软件包架构(自动)
+ Architecture: auto
+ # 软件包属于的系统部分[admin|cli-mono|comm|database|debug|devel|doc|editors|education|electronics|embedded|fonts|games|gnome|gnu-r|gnustep|graphics|hamradio|haskell|httpd|interpreters|introspection|java|javascript|kde|kernel|libdevel|libs|lisp|localization|mail|math|metapackages|misc|net|news|ocaml|oldlibs|otherosfs|perl|php|python|ruby|rust|science|shells|sound|tasks|tex|text|utils|vcs|video|web|x11|xfce|zope]
+ Section: utils
+ # 软件包优先级[required|important|stantard|optional|extra]
+ Priority: optional
+ # 软件包依赖
+ Depends: curl, aria2
+ # 软件包维护者(组织或个人)
+ Maintainer: shenmo <shenmo@spark-app.store>
+ # 软件包主页
+ Homepage: https://www.spark-app.store/
+ # 软件包建议
+ Recommends:
+ # 软件包描述信息
+ Descrition: Spark Store
+ A community powered app store, based on DTK.
+ ```
+
+
+- 写在后面,有关 `Spark` 构建的起源与未来
+
+ `Spark` 构建真正意义上只是一个有趣的想法,并且为它付诸一定的实现。
+
+ 我们拥抱过 qmake我们也拥抱过 cmake。我们是混乱的 IDE 或是代码编辑器的忠实用户,就像是在 IDE 与 编辑器之间的战争从未停止过。
+
+ 在着手 `Spark` 构建之前,它就是一个想法,目的是为了尝试将星火应用商店从 `qmake` 构建转为 `cmake` 构建,它就像星星之火中的野火,它有自己的想法。而这个想法就是打破传统的构建方式,或尝试改造现有的构建模式。
+
+ 而这并没有为星火商店付出什么,甚至没有提交过任何 `bug fix`,只是一个因为喜欢安份但又不守已的试图破坏(改变)星火应用商店传统构建的疯狂的 `VSCode` 用户,事实上是一个 `CMake` 用户,因为他无法在 `VSCode` 中使用 `qmake` 增强 `VSCode` 的代码能力。
+
+ 只能试图在一个已经发展了多年了项目上开始进行破坏(改造),将其转化为以 `cmake` 为主的构建,并在其它开源项目中寻找 `Spark` 的构建瓶颈以及拓展它疯狂的可扩展模块。
+
+ 在很久之后,这个想法终于在星火商店的 `4.0` 计划下开始正式实施,此时 `Spark` 构建已经为很多 `Linux Qt` 项目进行构建,包括非常复杂的构建探索,打破了一个又一个构建方式,最终完善了基本的构建模板。
+
+ 现在,`Spark` 构建在强大的 `CMake` 扩展下增强了 `VSCode` 的代码编写能力,在绕了一大圈之后,终于回到了起源的地方,并开始了它的构建使命,为星火应用商店构建 `4.0` 以及未来的版本。
\ No newline at end of file
--
2.33.1
From 75aab8e3b58a250fce670f8e434bf76ad7866275 Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Thu, 15 Dec 2022 19:59:46 +0800
Subject: [PATCH 04/12] =?UTF-8?q?repo:=20=E6=9B=B4=E6=96=B0=E7=94=A8?=
=?UTF-8?q?=E4=BA=8E=20Qt5=20Svg=20=E4=BE=9D=E8=B5=96=E7=9A=84=E6=9E=84?=
=?UTF-8?q?=E5=BB=BA=E5=86=85=E5=AE=B9?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
CMakeLists.txt | 1 +
cmake/SparkFindQt5Config.cmake | 4 ++--
2 files changed, 3 insertions(+), 2 deletions(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 51cc090..f3edc6c 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -32,6 +32,7 @@ spark_add_library_realpaths(
target_link_qt5_dbus(dbus)
target_link_qt5_Concurrent(common)
target_link_qt5_Concurrent(backend)
+target_link_qt5_Svg(common)
target_link_qt5_WebEngineWidgets(common)
spark_add_executable_path(${PROJECT_NAME} src
diff --git a/cmake/SparkFindQt5Config.cmake b/cmake/SparkFindQt5Config.cmake
index 6efade6..0300b3d 100644
--- a/cmake/SparkFindQt5Config.cmake
+++ b/cmake/SparkFindQt5Config.cmake
@@ -1,6 +1,6 @@
cmake_minimum_required(VERSION 3.5.1)
-find_package(Qt5 COMPONENTS Core Widgets Network Concurrent WebEngineWidgets Sql WebSockets REQUIRED)
+find_package(Qt5 COMPONENTS Core Widgets Network Concurrent WebEngineWidgets REQUIRED)
# function(target_link_qt5 NAME)
# target_link_libraries(${NAME}
@@ -132,7 +132,7 @@ spark_add_links_qt5(
# SerialPort
# ServiceSupport
# Sql
- # Svg
+ Svg
# Test
# ThemeSupport
# UiPlugin
--
2.33.1
From 0ae64f99cd269a06009818d0c553661e2fd5981b Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Mon, 19 Dec 2022 02:58:17 +0800
Subject: [PATCH 05/12] =?UTF-8?q?repo:=20=E6=9B=B4=E6=96=B0=E7=94=A8?=
=?UTF-8?q?=E4=BA=8E=E6=94=AF=E6=8C=81=20BaseWidgetOpacity=20=E5=9F=BA?=
=?UTF-8?q?=E7=A1=80=E7=B1=BB=E7=9A=84=E6=9E=84=E5=BB=BA?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
CMakeLists.txt | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
index f3edc6c..d469796 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -24,11 +24,13 @@ spark_add_library_realpaths(
src/dbus
src/utils+dbus
src/backend+utils
+ src/widgets/base
src/widgets/common+backend
src/widgets+common
src/pages+widgets
)
+target_link_dtk(base)
target_link_qt5_dbus(dbus)
target_link_qt5_Concurrent(common)
target_link_qt5_Concurrent(backend)
@@ -38,9 +40,9 @@ target_link_qt5_WebEngineWidgets(common)
spark_add_executable_path(${PROJECT_NAME} src
${QRC_SOURCES} ${SPARK_QM_TRANSLATIONS}
)
+target_link_base(${PROJECT_NAME})
target_link_dbus(${PROJECT_NAME})
target_link_pages(${PROJECT_NAME})
-target_link_dtk(${PROJECT_NAME})
spark_add_executable_path(spark-dstore-patch src/spark-dstore-patch)
--
2.33.1
From 3815f39926a5761769b55922cb3471ffa6e60baf Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Mon, 30 Jan 2023 20:48:25 +0800
Subject: [PATCH 06/12] =?UTF-8?q?spark:=20=E5=88=9B=E5=BB=BA=E6=96=B0?=
=?UTF-8?q?=E7=9A=84=E6=A8=A1=E5=9D=97=E7=94=A8=E4=BA=8E=20debian/changelo?=
=?UTF-8?q?g=20=E7=9A=84=E7=89=88=E6=9C=AC=E5=8F=B7=E9=87=8D=E5=86=99?=
=?UTF-8?q?=E8=A7=84=E5=88=99?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
1. SparkDebianChangelogVersion.cmake
在此文件中提供 spark_debian_changelog_override_version 宏:
基于指定的 debian/changelog 文件并用于读取最新版本号并覆盖到项目版本
@Since: v4.0.0
---
CMakeLists.txt | 4 ++
cmake/SparkDebianChangelogVersion.cmake | 58 +++++++++++++++++++++++++
2 files changed, 62 insertions(+)
create mode 100644 cmake/SparkDebianChangelogVersion.cmake
diff --git a/CMakeLists.txt b/CMakeLists.txt
index d469796..990c7be 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -11,6 +11,10 @@ include(cmake/SparkFindDtkConfig.cmake) # 提供了 target_link_dtk 用
include(cmake/SparkTranslatorConfig.cmake) # 提供了 qt5 ts转qm 的操作,最终生成 SPARK_QM_TRANSLATIONS 变量用于构建可执行文件时参与编译
include(cmake/SparkMacrosExtendConfig.cmake) # 使用了 spark_ 宏基于已提供的宏参数自动展开构建可执行目标文件
include(cmake/SparkInstallMacrosConfig.cmake) # 提供了 spark_install 开头的 macro 宏用于安装 target、file、program、directory、changelog 等内容
+include(cmake/SparkDebianChangelogVersion.cmake)# 提供了 spark_debian_ 开头的宏进行进行覆盖 PROJECT_VERSION
+
+# 在开始之前,使用项目中提供的 debian/changelog 进行重写本构建系统的 PROJECT_VERSION
+spark_debian_changelog_override_version(debian/changelog)
# 资源文件路径
set(QRC_SOURCES "src/assets/assets.qrc")
diff --git a/cmake/SparkDebianChangelogVersion.cmake b/cmake/SparkDebianChangelogVersion.cmake
new file mode 100644
index 0000000..3b3add4
--- /dev/null
+++ b/cmake/SparkDebianChangelogVersion.cmake
@@ -0,0 +1,58 @@
+# SparkDebianChangelogVersion
+
+# 尝试读取 debian/changelog 文件的第一行数据,并查找
+# spark-store (4.2.2) stable; urgency=medium
+# 将 (version) 信息应用用于 PROJECT_VERSION
+
+
+macro(spark_debian_changelog_override_version _CHANGELOG_FILE_PATH)
+ set(CHANGELOG_FILE_PATH ${_CHANGELOG_FILE_PATH})
+ set(CHANGELOG_FILE_EXISTS FALSE)
+
+ # 首次判断,如果判断文件不存在,将尽可能的判断文件是存在的
+ if(NOT EXISTS ${CHANGELOG_FILE_PATH})
+
+ # 在 CMake v3.19 起,可以使用 file(REAL_PATH <path> <out-var>) 进行获取 path 的绝对路径
+ if(CMAKE_VERSION GREATER_EQUAL 3.19)
+ file(REAL_PATH ${CHANGELOG_FILE_PATH} CHANGELOG_FILE_ABSOLUTE_PATH)
+ if(EXISTS ${CHANGELOG_FILE_ABSOLUTE_PATH})
+ set(CHANGELOG_FILE_EXISTS TRUE)
+ set(CHANGELOG_FILE_PATH ${CHANGELOG_FILE_ABSOLUTE_PATH})
+ endif(EXISTS ${CHANGELOG_FILE_ABSOLUTE_PATH})
+ endif(CMAKE_VERSION GREATER_EQUAL 3.19)
+
+ # 第二次判断与处理 使用 file(SIZE) 方式
+ if(NOT CHANGELOG_FILE_EXISTS)
+ file(SIZE ${CHANGELOG_FILE_PATH} CHANGELOG_FILE_SIZE)
+ if(CHANGELOG_FILE_SIZE GREATER 0)
+ set(CHANGELOG_FILE_EXISTS TRUE)
+ endif(CHANGELOG_FILE_SIZE GREATER 0)
+ endif(NOT CHANGELOG_FILE_EXISTS)
+
+ # 第三次判断与处理 使用路径拼接方式
+ if(NOT CHANGELOG_FILE_EXISTS)
+ if(EXISTS ${CMAKE_SOURCE_DIR}/${CHANGELOG_FILE_PATH})
+ set(CHANGELOG_FILE_PATH ${CMAKE_SOURCE_DIR}/${CHANGELOG_FILE_PATH})
+ set(CHANGELOG_FILE_EXISTS TRUE)
+ endif(EXISTS ${CMAKE_SOURCE_DIR}/${CHANGELOG_FILE_PATH})
+ endif(NOT CHANGELOG_FILE_EXISTS)
+ endif(NOT EXISTS ${CHANGELOG_FILE_PATH})
+
+ message("> V = ${CHANGELOG_FILE_PATH}")
+ if(CHANGELOG_FILE_EXISTS)
+ file(READ ${CHANGELOG_FILE_PATH} CHANGELOG_CONTENT LIMIT 20)
+
+ string(FIND ${CHANGELOG_CONTENT} "(" V_PRE) # +1 to V_BEGIN
+ string(FIND ${CHANGELOG_CONTENT} ")" V_END)
+
+ math(EXPR V_BEGIN "${V_PRE}+1")
+ math(EXPR V_LENGTH "${V_END}-${V_BEGIN}")
+
+ string(SUBSTRING ${CHANGELOG_CONTENT} ${V_BEGIN} ${V_LENGTH} V)
+
+ message("> V = ${CHANGELOG_CONTENT}")
+ message("> V = [${V}]")
+
+ set(PROJECT_VERSION ${V})
+ endif(CHANGELOG_FILE_EXISTS)
+endmacro(spark_debian_changelog_override_version _CHANGELOG_FILE_PATH)
--
2.33.1
From 0f8ec4935071280518a8466ce36fbbd84ed1cbb0 Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Mon, 30 Jan 2023 21:23:15 +0800
Subject: [PATCH 07/12] =?UTF-8?q?spark:=20=E5=AF=B9=20SparkDebianChangelog?=
=?UTF-8?q?Version.cmake=20=E6=A8=A1=E5=9D=97=E7=9A=84=E6=89=A9=E5=B1=95?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
1. 增加 SPARK_OVERRIDE_VERSION 的文件输出
以解决 Makefile 中无法明确多个 deb 包时无法确定最新版本的情况
---
Makefile | 15 +++++++++++++--
cmake/SparkDebianChangelogVersion.cmake | 1 +
2 files changed, 14 insertions(+), 2 deletions(-)
diff --git a/Makefile b/Makefile
index 6fdc69b..a68d7d1 100644
--- a/Makefile
+++ b/Makefile
@@ -8,6 +8,17 @@ SUFFIX=_$(OSID)$(OSRELEASE)
endif
PROJECT_NAME=spark-store
+PROJECT_VERSION=*
+
+ifneq ($(wildcard build/SPARK_OVERRIDE_VERSION),)
+SPARK_OVERRIDE_VERSION=`cat build/SPARK_OVERRIDE_VERSION`
+.PHONY: override-version
+override-version:
+ @echo $(SPARK_OVERRIDE_VERSION)
+ @echo "wildcard - good: $(wildcard build/SPARK_OVERRIDE_VERSION)"
+ @echo "wildcard - bad.: $(wildcard build/SPARK_OVERRIDE_VERSIONS)"
+PROJECT_VERSION=$(SPARK_OVERRIDE_VERSION)-
+endif
all:
mkdir -p build
@@ -44,7 +55,7 @@ debian-build-scripts:
package: release debian-build-scripts
cd build && make package
tree build/_CPack_Packages/Linux/DEB/$(PROJECT_NAME)-*
- dpkg-deb --contents build/$(PROJECT_NAME)_*$(CALENDAR)*$(SUFFIX).deb
+ dpkg-deb --contents build/$(PROJECT_NAME)_$(PROJECT_VERSION)$(CALENDAR)*$(SUFFIX).deb
# cd build/_CPack_Packages/Linux/DEB/$(PROJECT_NAME)_*$(CALENDAR)*$(SUFFIX).deb && find .
builddeps:
@@ -54,7 +65,7 @@ cpus:
@echo "CPU数量: $(CPUS)"
copytosource:package
- cp build/$(PROJECT_NAME)_*$(CALENDAR)*.deb .
+ cp build/$(PROJECT_NAME)_$(PROJECT_VERSION)$(CALENDAR)*.deb .
# 进入 qdebug 模式,在 deepin 中默认被禁用,可 env | grep QT 查看,并在 /etc/X11/Xsession.d/00deepin-dde-env 配置中已定义
# 1. 禁止 qt 的 debug 打印: qt.*.debug=false
diff --git a/cmake/SparkDebianChangelogVersion.cmake b/cmake/SparkDebianChangelogVersion.cmake
index 3b3add4..05f9e52 100644
--- a/cmake/SparkDebianChangelogVersion.cmake
+++ b/cmake/SparkDebianChangelogVersion.cmake
@@ -54,5 +54,6 @@ macro(spark_debian_changelog_override_version _CHANGELOG_FILE_PATH)
message("> V = [${V}]")
set(PROJECT_VERSION ${V})
+ file(WRITE ${CMAKE_BINARY_DIR}/SPARK_OVERRIDE_VERSION ${V})
endif(CHANGELOG_FILE_EXISTS)
endmacro(spark_debian_changelog_override_version _CHANGELOG_FILE_PATH)
--
2.33.1
From 5455594cf1134a29dee32257e6d8d3d8b78c8fd6 Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Mon, 30 Jan 2023 23:17:24 +0800
Subject: [PATCH 08/12] =?UTF-8?q?repo:=20=E5=A4=84=E7=90=86=20v4.2.2=20?=
=?UTF-8?q?=E7=89=88=E6=9C=AC=E6=97=B6=E6=9C=9F=E6=96=B0=E5=A2=9E=E7=9A=84?=
=?UTF-8?q?=E5=AE=89=E8=A3=85=E6=96=87=E4=BB=B6?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
pkg/usr/share/ssinstall/transhell
-> /usr/share/ssinstall
---
CMakeLists.txt | 5 +++++
cmake/SparkInstallMacrosConfig.cmake | 2 +-
2 files changed, 6 insertions(+), 1 deletion(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 990c7be..4e22685 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -86,6 +86,11 @@ spark_install_file(/usr/share/icons/hicolor/scalable/apps
spark_install_program(/tmp/spark-store-install
pkg/tmp/spark-store-install/feedback.sh)
+# 安装什么翻译? Since: v4.2.2
+spark_install_directory(/usr/share/ssinstall
+ pkg/usr/share/ssinstall/transhell
+)
+
# 安装 qm 文件?
spark_install_file(/usr/share/spark-store/translations
${SPARK_QM_TRANSLATIONS})
diff --git a/cmake/SparkInstallMacrosConfig.cmake b/cmake/SparkInstallMacrosConfig.cmake
index bf906bf..e1fd7a0 100644
--- a/cmake/SparkInstallMacrosConfig.cmake
+++ b/cmake/SparkInstallMacrosConfig.cmake
@@ -79,7 +79,7 @@ macro(spark_install_directory INSTALL_DIRECTORY_DIR INSTALL_DIRECOTRY)
# message(FATAL_ERROR " directory: ${INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST_LENGTH}")
else()
- message(FATAL_ERROR "install ${INSTALL_DIRECTORY_DIR}")
+ # message(FATAL_ERROR "install ${INSTALL_DIRECTORY_DIR}")
install(DIRECTORY
${INSTALL_DIRECOTRY} ${ARGN}
--
2.33.1
From e4e6ca8b83f86e50ab32582a00b85bbe7057f454 Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Mon, 30 Jan 2023 23:18:44 +0800
Subject: [PATCH 09/12] =?UTF-8?q?spark:=20=E5=AF=B9=20SparkTranslatorConfi?=
=?UTF-8?q?g.cmake=20=E6=A8=A1=E5=9D=97=E7=9A=84=E6=89=A9=E5=B1=95?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
1. 增加 SPARK_QM_TRANSLATIONS 的文件输出
无任何其它变动
---
cmake/SparkTranslatorConfig.cmake | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/cmake/SparkTranslatorConfig.cmake b/cmake/SparkTranslatorConfig.cmake
index 5375fe3..46de519 100644
--- a/cmake/SparkTranslatorConfig.cmake
+++ b/cmake/SparkTranslatorConfig.cmake
@@ -12,6 +12,11 @@ endforeach(item IN LISTS SPARK_TRANSLATIONS)
qt5_add_translation(SPARK_QM_TRANSLATIONS
${SPARK_TRANSLATIONS})
+file(WRITE ${CMAKE_BINARY_DIR}/SPARK_QM_TRANSLATIONS "")
+foreach(item IN LISTS SPARK_QM_TRANSLATIONS)
+ file(APPEND ${CMAKE_BINARY_DIR}/SPARK_QM_TRANSLATIONS "${item}\n")
+endforeach(item IN LISTS SPARK_QM_TRANSLATIONS)
+
message("translator(ts -> qm):")
foreach(item IN LISTS SPARK_QM_TRANSLATIONS)
message("-> ${item}")
--
2.33.1
From a0eb885925c78b8966c46540a993f4cc6983c4c6 Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Mon, 30 Jan 2023 23:20:12 +0800
Subject: [PATCH 10/12] =?UTF-8?q?make:=20=E5=A4=84=E7=90=86=E9=BB=98?=
=?UTF-8?q?=E8=AE=A4=20make=20=E7=9B=AE=E6=A0=87=E4=B8=BA=20override-versi?=
=?UTF-8?q?on=20=E7=9A=84=E9=97=AE=E9=A2=98?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
Makefile | 16 ++++++++++------
1 file changed, 10 insertions(+), 6 deletions(-)
diff --git a/Makefile b/Makefile
index a68d7d1..ba45029 100644
--- a/Makefile
+++ b/Makefile
@@ -12,11 +12,6 @@ PROJECT_VERSION=*
ifneq ($(wildcard build/SPARK_OVERRIDE_VERSION),)
SPARK_OVERRIDE_VERSION=`cat build/SPARK_OVERRIDE_VERSION`
-.PHONY: override-version
-override-version:
- @echo $(SPARK_OVERRIDE_VERSION)
- @echo "wildcard - good: $(wildcard build/SPARK_OVERRIDE_VERSION)"
- @echo "wildcard - bad.: $(wildcard build/SPARK_OVERRIDE_VERSIONS)"
PROJECT_VERSION=$(SPARK_OVERRIDE_VERSION)-
endif
@@ -82,4 +77,13 @@ copytosource:package
enter-qdebug-mode:
# 进入新的 bash 环境
@# export QT_LOGGING_RULES=".debug=true; qt.*.debug=false; dtk.*.debug=false; dde.*.debug=false; qtc*=false; " && bash
- export QT_LOGGING_RULES=".debug=true" && bash
\ No newline at end of file
+ export QT_LOGGING_RULES=".debug=true" && bash
+
+
+ifneq ($(wildcard build/SPARK_OVERRIDE_VERSION),)
+.PHONY: override-version
+override-version:
+ @echo $(SPARK_OVERRIDE_VERSION)
+ @echo "wildcard - good: $(wildcard build/SPARK_OVERRIDE_VERSION)"
+ @echo "wildcard - bad.: $(wildcard build/SPARK_OVERRIDE_VERSIONS)"
+endif
\ No newline at end of file
--
2.33.1
From a22611edeb5706aa6b574b508f29b25f599346f5 Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Sun, 5 Feb 2023 16:09:40 +0800
Subject: [PATCH 11/12] =?UTF-8?q?spark:=20=E5=A4=84=E7=90=86=E7=89=88?=
=?UTF-8?q?=E6=9C=AC=E5=8F=B7=204.2.3~test1=20=E6=97=B6=E9=BB=98=E8=AE=A4?=
=?UTF-8?q?=E7=9A=84=E6=88=AA=E5=8F=96=E5=AD=97=E7=AC=A6=E6=95=B0=E8=BF=87?=
=?UTF-8?q?=E7=9F=AD=E9=97=AE=E9=A2=98?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
cmake/SparkDebianChangelogVersion.cmake | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/cmake/SparkDebianChangelogVersion.cmake b/cmake/SparkDebianChangelogVersion.cmake
index 05f9e52..ee2f339 100644
--- a/cmake/SparkDebianChangelogVersion.cmake
+++ b/cmake/SparkDebianChangelogVersion.cmake
@@ -40,7 +40,8 @@ macro(spark_debian_changelog_override_version _CHANGELOG_FILE_PATH)
message("> V = ${CHANGELOG_FILE_PATH}")
if(CHANGELOG_FILE_EXISTS)
- file(READ ${CHANGELOG_FILE_PATH} CHANGELOG_CONTENT LIMIT 20)
+ file(READ ${CHANGELOG_FILE_PATH} CHANGELOG_CONTENT LIMIT 30)
+ # fix: spark-store (4.2.3~test1) 已经超过 20 字符位,所以使用 30 进行保守计算
string(FIND ${CHANGELOG_CONTENT} "(" V_PRE) # +1 to V_BEGIN
string(FIND ${CHANGELOG_CONTENT} ")" V_END)
--
2.33.1
From 64d92ae46637c672842a7734327cbe0cb4f89ae1 Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Sat, 3 Feb 2024 02:38:06 +0800
Subject: [PATCH 12/12] =?UTF-8?q?repo:=20=E4=B8=80=E6=AC=A1=E6=80=A7?=
=?UTF-8?q?=E5=A4=84=E7=90=86=204.2.3=20-4.2.10=20=E6=97=B6=E6=9C=9F?=
=?UTF-8?q?=E7=9A=84=E8=B7=A8=E8=B6=8A=E5=BC=8F=E6=9E=84=E5=BB=BA=E4=B8=8E?=
=?UTF-8?q?=E5=85=A8=E9=87=8F=E7=BA=A7=E6=9B=B4=E6=96=B0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
CMakeLists.txt | 72 ++-
assets/spark.png | Bin 4959 -> 8822 bytes
cmake/SparkAppimageConfig.cmake | 210 ++++---
...nfig.cmake => SparkDebPackageConfig.cmake} | 41 +-
cmake/SparkDebianChangelogVersion.cmake | 6 +-
cmake/SparkDesktopMacros.cmake | 43 +-
cmake/SparkEnvConfig.cmake | 18 +-
cmake/SparkFindDtkConfig.cmake | 2 +-
cmake/SparkFindQt5Config.cmake | 13 +-
cmake/SparkFindQt6Config.cmake | 110 +++-
cmake/SparkMacrosConfig.cmake | 348 +++++++++--
cmake/SparkMacrosExtendConfig.cmake | 547 +++++++++++++-----
cmake/SparkTranslatorConfig.cmake | 77 ++-
cmake/linuxdeployqt-help | 2 +-
...sktop.in => spark-appimage.desktop.in.txt} | 2 +-
...eb.descript => spark-deb-package.descript} | 8 +-
...esktop.in => spark-desktop.desktop.in.txt} | 4 +-
17 files changed, 1127 insertions(+), 376 deletions(-)
rename cmake/{DebPackageConfig.cmake => SparkDebPackageConfig.cmake} (90%)
rename cmake/{spark-appimage.desktop.in => spark-appimage.desktop.in.txt} (83%)
rename cmake/{package-deb.descript => spark-deb-package.descript} (91%)
rename cmake/{spark-desktop.desktop.in => spark-desktop.desktop.in.txt} (88%)
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 4e22685..90fc62d 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -15,48 +15,53 @@ include(cmake/SparkDebianChangelogVersion.cmake)# 提供了 spark_debian_ 开头
# 在开始之前,使用项目中提供的 debian/changelog 进行重写本构建系统的 PROJECT_VERSION
spark_debian_changelog_override_version(debian/changelog)
+# 使用 git 获取当前分支名称
+execute_process(COMMAND
+ git symbolic-ref --short -q HEAD
+ WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
+ OUTPUT_STRIP_TRAILING_WHITESPACE
+ OUTPUT_VARIABLE PROJECT_BRANCH)
+
+spark_debug_message("APP_BRANCH: ${PROJECT_BRANCH}")
+spark_debug_message("APP_VERSION: ${PROJECT_VERSION}")
+add_compile_definitions(APP_BRANCH="${PROJECT_BRANCH}")
+add_compile_definitions(APP_VERSION="${PROJECT_VERSION}")
# 资源文件路径
set(QRC_SOURCES "src/assets/assets.qrc")
-include_directories(src)
+translator_qt(SPARK_QM_TRANSLATIONS ${CMAKE_SOURCE_DIR}/translations/*.ts)
+spark_debug_message("SPARK_QM_TRANSLATIONS: ${SPARK_QM_TRANSLATIONS}")
-# 基于传入的项进行构建
-# 可接受的值为: 路径列表
-# 可接受的值为: 路径列表+依赖库A+依赖库B
-spark_add_library_realpaths(
+# spark_add_executable_path <target> <path> [files ... paths]
+# 构建一个可执行程序,基于指定的路径
+spark_add_executable_path(${PROJECT_NAME} src
src/dbus
- src/utils+dbus
- src/backend+utils
+ src/utils
+ src/backend
src/widgets/base
- src/widgets/common+backend
- src/widgets+common
- src/pages+widgets
-)
-
-target_link_dtk(base)
-target_link_qt5_dbus(dbus)
-target_link_qt5_Concurrent(common)
-target_link_qt5_Concurrent(backend)
-target_link_qt5_Svg(common)
-target_link_qt5_WebEngineWidgets(common)
-
-spark_add_executable_path(${PROJECT_NAME} src
+ src/widgets/common
+ src/widgets
+ src/pages
${QRC_SOURCES} ${SPARK_QM_TRANSLATIONS}
)
-target_link_base(${PROJECT_NAME})
-target_link_dbus(${PROJECT_NAME})
-target_link_pages(${PROJECT_NAME})
+target_link_qt5(${PROJECT_NAME})
+target_link_dtk(${PROJECT_NAME})
+target_link_qt5_svg(${PROJECT_NAME})
+target_link_qt5_dbus(${PROJECT_NAME})
+target_link_qt5_concurrent(${PROJECT_NAME})
+target_link_qt5_WebEngineWidgets(${PROJECT_NAME})
-spark_add_executable_path(spark-dstore-patch src/spark-dstore-patch)
-target_link_qt5(spark-dstore-patch)
+# spark_add_executable_path(spark-dstore-patch src/spark-dstore-patch)
+# target_link_qt5(spark-dstore-patch)
# 安装主程序 spark-store 与 spark-dstore-patch
spark_install_target(/opt/durapps/${PROJECT_NAME}/bin
${PROJECT_NAME}
- spark-dstore-patch)
+ # spark-dstore-patch
+)
# 安装 systemd 服务(Spark Store更新通知程序)
spark_install_file(/usr/lib/systemd/system/
@@ -76,16 +81,23 @@ spark_install_file(/usr/share/bash-completion/completions
# 安装 desktop 文件
spark_install_file(/usr/share/applications
- pkg/usr/share/applications/spark-store.desktop)
+ pkg/usr/share/applications/spark-store.desktop
+ pkg/usr/share/applications/open-me-in-terminal.desktop)
# 安装 icon 文件
spark_install_file(/usr/share/icons/hicolor/scalable/apps
- pkg/usr/share/icons/hicolor/scalable/apps/spark-store.svg)
+ pkg/usr/share/icons/hicolor/scalable/apps/spark-store.svg
+ pkg/usr/share/icons/hicolor/scalable/apps/open-me-in-terminal.png)
# 安装什么脚本?
spark_install_program(/tmp/spark-store-install
pkg/tmp/spark-store-install/feedback.sh)
+# 安装什么翻译? Since: v4.3.10
+spark_install_file(/usr/share/aptss/transhell
+ pkg/usr/share/aptss/transhell/aptss_en_US.transhell
+ pkg/usr/share/aptss/transhell/aptss_zh_CN.transhell)
+
# 安装什么翻译? Since: v4.2.2
spark_install_directory(/usr/share/ssinstall
pkg/usr/share/ssinstall/transhell
@@ -101,5 +113,5 @@ spark_install_changelog(${CMAKE_SOURCE_DIR}/debian/changelog)
include(cmake/SparkBuildGraphviz.cmake)
# 注释行(使用方式)
-find_package(DebPackage PATHS ${CMAKE_SOURCE_DIR})
-add_package_descript(cmake/package-deb.descript)
\ No newline at end of file
+find_package(SparkDebPackage PATHS ${CMAKE_SOURCE_DIR})
+add_package_descript(cmake/spark-deb-package.descript)
\ No newline at end of file
diff --git a/assets/spark.png b/assets/spark.png
index 544e2c7cff5f70894e27a7a717d4a62120630b7a..bac324f1a8a025c65ea260274ebf078f6f52ce39 100644
GIT binary patch
literal 8822
zcmX9^2RK{r7dB(mUZq0SrdladTkTaNwivagVzfqVwJBO^6QV|iwu(ya+M9~nwOc!4
z)TXrl@%uk{ZtnBk``vTT_nvd!bH4N41c-qaE%hyGA|fJM9c^`E!ny4Fpac;9{n+%j
z2`7N#V=Z+eVj?dhBP4(|o^XZgxi-|7i0D4w^+6n1`pBPfk-|?$UxQ+UhJu9rhJX5}
zNWvu!KMhMiwP&85E?#~_YCbLwelE^D0d9Uzd9-x&Ar_HzH;IUNh;-CdOao_kP;PJd
z*TUA{o(@-hOu^xB1splt>-I8IJeo`-acVsFZ<K+gw<4YCh^MUF_e8$pTg2#~{@uF)
z;RixmfnOOp{kt`{y}AcE>pU0)0YzO*j=LOxDq2(I4ud5R7wDh4nwcXk%*+WtSHl)Y
zGGeuHvs}Z!2UiMvS66IMtC8Y1q#YR%MY}9TDH*c&cSqy7)9HNE>B_H2e&Nz;PtJVb
z4GQh6VsPVXhjVc_s}&2nnLlB<B&CWwa9{Cw@842#hjl=i1gBH8=4anPhW$XAeBU1r
z173U(H(pv63#+*pI;~OUo#aq6AKicS@89-mYO}-X%yvJg-52yCbl?NKb~or@6X}Y4
z{DU{TQEzmSX-fL|`?GCipHkDA3d^^?=O$n*vgt0@a(kwQo-*#aYA!;~epZ^kY6t1R
z%Qp8u8Kk+dU;4RVpkp0aBNl1wjk|cdnI!o;(?Y5|ju-_!!=K*&nh*mt+=S*!u5xW0
ztSpsuliXo>00j_Lzz!N@GJ6~EN@|N&s*KY@bni6s?mX)Fv+R+SqyAFZ+O%*Jke0xX
zePXNd{Li;b_Y2K*OHsB5@b_mr4%@GRk9X5v(#|oW#7z1#RE-dNrP<F<2Bj*o-}v&x
z<-+erGK>ki+2wztOmF|<;Y>|UGajWUV!F_6*AlR=->qZAN2bxs;~r!qF_3(C{#lye
zF`I^AW0}!FL`6)nt420!N<t<a!M5=T)ibsQ$Z=CGX2~FcbFq9Rt{0u#&&Fa|M@z!P
zmd*u7+vTw*+;JiWahU4&G?0DGzWw(}vVgx%_oBKQw(%^yUqV=*|0at~F0@@JyX=8t
z+pDPJNCpyUTBTd+Ne@mf=YtoIwW&ITIwj?u?=;Ne$BRAn3_{+S^4K{k`&+k+S!6~W
z<*_$=!<QZ#dfYK4D$L*&d~E;i0VEst+|7sXW3HITT%4GW0Bq!9d0Tus^QIXsx!K)>
zu63Zd%5Zeq$U8gK55~Txwbg8(y%nP%g}=$qmdi?MAF&6mtcub%%qSHEhj_>=N7x9`
zxHl@l*?YMiBc&i=-cwLnjp$-d)*$|SCWqIM9UDWkjKo|WpvsBQZS2RspLd>d9&bXE
z&^3B@hFB84?1Nu$z{GEmW)855(y}bIO6?^1CiOK8%hr&S%Q>-jR_N<TM3WMXb@20H
zJZ;nhS)h0uK14R_$4-2~B>$RCD7#|~P2LSs#IJE^ezRP3PbLE7s+uPJ&J$kvXSsq&
zP%?JkA*<GnBHnx&nfH%^NeWE=obw@Z+PyIhn|^F$u`r974a$}UAlee6>K$^cyjA8d
zP*$z3oLcLNz&BC&i@C?cX*|Nj<KUccM8x*Q_Z6=^?;ce=D|7~ps|N?s!m9LEmREu1
z4+jB>X+ANNH(}ke?>FUYG(H`T<lQ&rgOl*YL_Hvlvmz=|W#<n5_d@<?UbozZRHHie
z?$5Ca1?-dD-tA>-Q(Hnbr?J?QPTmxXi26`ez6EZcy=63h*C&TY&qDWeidm&1X%S4H
zRKTd)nc57N1GVfBA{f&pm^jyl7%1Fhq@e0O0lsU%n|6DZ6_L(Yr8ZpeZZs|+ltcX3
zLRWP|c+?m4G<Zr9lJ6^41$+K$TsFT=B04q`!Tw66c=Q3}XqH7V(xowZUThMkRDP~|
z51s{-C-(b9nU^hKsWGCcGLET>Oj8z7Q8C)b<%1w5zDfNz%@~%)q`vZ&P8Wl6-FQtP
z_FljGtxH`8LbW9_RN3Y=-(R#?AI%>e7k(vmL9(q|OnL7Tf<IK0Cc|x}I95Cckod&C
zbAt9)zvu6`&LhYngb$v@y=<y##0o772--Y<)}%7>DP{`{$qi-9Mt9Rqe7mG((Wkic
zOM;*;qhR4vfKQ_R+uL4Xwc!JLqfK4n{5jUPcZmg8mDWcd&V^D7?c1Hy1cW;>aX}ud
z2R{we{Hkatz<k)R#YKf~ty+8v6AH#7MbDb{V>&@nIdSU>ljr6~m(wkU8|HvShSAPG
z*}g4Jx|>R<uEvj&Id;1(k+W0Feo23AjAG%{O}%Aw;I<T*RI{bF^ma!8O|5YEfcx0t
z(rID=*iAI4H?6qXVeA1U9do0f)pTRz+PJW}<5IVoeik!!^h^fGx(I!GY7Rqz;UnRb
z?!kW^KyG~GlnFopZVGNxf6a<?n|&iN%Sx(I{}Dth2y9&IN}VoR&l`rATzXOG(D>s%
zw+=`I;m#=>ieQSQ8dNF|?Yh_8K9hnm)$`X+m4Lu~sm;S3!xf_N)-f7z>{)(=$vsnN
z6nOJoyi0TB4?s7{3-s-N<#PJac!L6}H`pH&xs`;`d<`{4tg<pn)OVKnmqUZU=}2Ta
zR`TLs@hZCC5{4D~dGe(srTaP-`?=9J-Jvms9G=bT=F4iR8BJTa%Ip|p_!UKq%2yw{
zpb(pl(%+6eo)*q0j17&S`S}wABvG1AUw9^A6e>Tex{qB?$odlm6QiM9bT{bv+y|;|
z;W2`zF-g^WaAIzJgqYDrzbA9_2@1)ub6*xq!GEyVmpcGVgf@cVO<<weCw%ydmHAVR
zgaYf)3<5!-L3+$YZ}hq!ywT&~NU6=p9234VZm%bEa1R+BqN+dQj?o9$-t!ogbN?<m
zIb>TBu}34dP~O1yIp<ZRGs$~(;OCre>vvKgI$2hl;JDF6>;dEuO~8Odhy#R}<QhCO
z3DGPlAi8<m2jzKQmZqNist-hX6=O<d(_Bet-ph1`QDDBsm!CEc5fgro6hQhv;%47O
za|a7{eQ}?F({5=7`EyAa>oTMauxksv(e36LvWAtWGQ%o`k<kl$*0Z)|6QS>I;aPYC
zW9jRe@#!0?67}j?qSx?k*#cAq1xW{FmLD{(9yH#?RtKmzuQcUGk-V;jh05s<rzK@N
zvn0tr|LDD7q%rd7rGz5YooWwGWCvu9+DpSEQC%S}(iu3Q4wNV@Bo)~>H`Kwg<EmSr
zu2!Y%KY3zjsNh+v>)2}FxoN{df`=3v7!K7rfXs8H!QO37H8G|Vbh-(LmxfIvSHHh0
zF{^eDbdZzR4U;~iYWQF$$pFuKt|H=BOXM}z%0pm`D+aS*CRO`6V4Oo;c=)B#Rmi>|
z>#%aAF!0%TAyW&Z!=9hzGd5Gu=z_Lvr-X#^v2<TL)Mxu1=-&B~CvrV=kn~H`6Rr|;
zHV=9fgr85Y6}q!lfk9np{Z+a$5kM9@Ng?_WzKPUeWvwUiNNEcF;hM9a9ME`X9CYKP
z81}b966776ZCdPxpEIJhEeNp5_D^O<p<Qw5uDL&zYOp(|A%BMDNLgXz`Ym%tHS=}a
zANvq)6m2A>iO9A|0u);YqAF$wzi|n5#T$E?sRhHXsK^9N6q-px!l;(3C+{)+yde}_
z*uMUCWodYv72ZTeovA(rEvXt8QoM4S*qnh+ag@sp0d%{K-V<EQd1t5m6IWw$ez9C1
zG~lEdk%SaT$i(8m<o$D_SG)>h!Oa+4KhPrcOfZ=#*=BFk=_>%dgyRQ4ZYX5YTTtj)
zBvHIF$=KvDotpXFa{OsoIQw0x_X6E@I9!^;-b1l+V03gNMF?2ujcD+0idB7RGgN4&
zx2*qlNgN<;SvVt#w-@1ds{P#z9=s?ItGJdJ$`veFtUcn7!B+YSG`{seeT<HJN=GLN
z9=c8WK=2z)?fVK0K(URJD;SWD@Yl#Ei}=84X*m>;>pI<bZ(42mwqFvmly}~-^^aUV
zvtsn1Tr)3$TpisM=Q7KM#U{iiQKnH-s#6?@U_dIu-zJ~4k=GnU&d7TFlh@41#JE|H
zL7ITTH#z+G!$(??1uD*{FTT=oPA+`$dMY?8Q`2~4y<Z+u0II{3*dz8`bqX3^KvAMh
z(AVJB?@tWkgEgNUvKTo8cNT=cW>1LOGN?MhHJ*FblFQR8t{D9QQ}g0!9>!4EgP3o)
zb3FYf297_<L288%(uG(moE`MV@i7!e8~o28S%5AURj@;_aH7empID`f?l5P|*XL#!
z71Nzbe4(S>kM9|E^{^5GQZ?B2?%cZ{EEoGb08P97UE0G3<Ldd-I9Oa*$S8*3b`0K?
z8`PFV$JlFbn`?RJDq`}vn<fO}QlU0tnd3XcQ)4i@MeajY`?Wx=uJGhrs&wJHbcoKd
z@q$k2Sn5j^8v%klKyz?2UQ;^OjDp1WR%~q362kBn#A*>5?L^I^1Ig!6dfD188XWiV
z)I%EY(aOvn;RO<_a49_fas5(HheFqL(Hq`%d5EdPjQ}4pZ<&fC4<WpVRlbH<-*PZp
zmdojUYCH<LrbQe3o^=*0S7IJQ82nHG>!H%2D>&^V^<4wr%D@IsM5Zf){WjqjP#D3X
z4W@9``R+3$Q@%al6C@@k#9AIwgUqRygX}x$!*>X`bvWgG@|@fhlWdG=Gxrlnz%A00
zXhQu#`ZoRG`(b^T*2qbXfz?Q=q=}4=hNP-X=oaJ<Rc(ZE=JULfRRp-7FB<$W8#Ibp
zs7RmkSw&*28?r=Ro5UdK>l2Y9@vo6<O(s#s-x6D~*wMq|>QM};!uPQ^n<?#MPN?NW
zde7G!T=3u;tmBh!)f9Sl1X&I_*Gf0)<P++LySbq{#|}@p;fvhn@T{sYXWS<|80@X1
z(0hGM(s2m|pIlKHAzRUtdfc2r0#eqgbCJlk<;O>e*}9oj`Y_Au=+sFl*)Bb%IY?x)
z(pro_E>TK?8+}U=f`8-~?XQWf>i=RUK>7p^D!e5&k^Geo9L@lTXFaLPLx@JY67u2s
zU&8z{E7I>*x^(E>V7ikkt6!XmEK{|05B7qT)nMgnH-=lvVD8Wj%~r~-WV|NsO?kP|
zc&d>7oMS<Gmq3h~p$JX0CV`CdeJh*~`d3-Mmve0aHQ>~v4c=*a2>;zdi_ycZC*v5S
zsR6~x7X)KOt%UKO3P8Ex|BU>~an2|MJaF%Kyokq<M+PDbz-Mu{#qeD>RAabGD;BwC
z$QE0?F26j%DRWvdj(IZm9u8oeuE_)N@d;qdjWIT%Mj-u2&(F!vo|}$h%0hOJYp|h4
zJ!6_|4<GJnYHH>il?ev)yMg$Aw;J3&y7aHY;fCdG83_@AND)nZC8IcT>9AUk3!(=>
z$L+3|7!<Fnr;kj*CHxa>v@lRjvOr4A^_Xp(ByNM(f`AEw9$*O4JXQa*XydMIRzt1y
z5lf0k^aIdP=du->#%(Ru(Q*c%np)Rd@!ESi$T4taOk_cRl6WmdiZsccWAoMdtRQ`h
zO#KaTEJTK9z;=L;_z%~fjHWY@MCXEX1d=!R4<-LvVVco})=(I`yCIf%?qErR9qw&z
zbT}hyf(h+u&PK$&F36t)-5WaX<DFQZH<&nLv9m{#3`sT3ezVmW**is_tX7NNc;1Q=
zuUVc?o>(QKECLbgjNt$Qe1*zLwvX6;uGiRj*vEDMz>1La^S-$#BsL_CDiU?KQ)0N%
z8Rbw^c4kk4c8m1g#lL6N^$spSKi^RuX1`v}SCs#HpPu5d)Qa!0JT)}f|9;rxHO?H<
z{zYtQ38dep%Rb`4kRS(UW+IoQX<t5@N)qz!+jr9LCCUp}k|emou-906p&nK{aa?U!
z+=Bv+=BEl)(SQfCF9rQ22Py@#*9|acqqKtQ{h1YyE<aU{rN7+epIdKT^xy1mM3u`5
zKZan)=l!0NRJHj~hn(&K#(RFK^2RNK9Zbg#KY5L&M?}m4v)1;U6CeqgM5Itjb37f5
zHcQVWaJT93p~CSn?16+FjcdkSr!&<X%3w5vUa>9HDWPPc7$ymYm3KFCm5VTva75HR
zX?IF_6cPV62sq$OgBFR=dHK@RQG#dGJ*8mS8CAr%;a}*QZ)}-GZZ7qETbjG?W?QJZ
zFtOOjn$#@PmqStciaQ2X?>1oa`i7^`IB7i1YnhLNqI_Dz@6;VO0m}Sej|P2Zf~R+?
z@7+(l+WM22)Yqif+?oVUD)_<~vg^n+9zMlEeQovJawACro$u75np#@F<*Od0kk$hu
z++IfW*AG5eP?ozdFD<5U^tEKA7RC&P33qe2!0B{Z;OGeJWMF%e_}k81e4qWT*M=J<
zWZJ#o4-ZM2^haE4umwqyA13%?cm#}CrVkoU7o7qR&ePesIVzs79<!*F^OOyb9x^*A
z99_Eh;dHuhp8gq-3%QT|aQ{>rVuB`<kePVR+j>wd{;NaCfT!S%eeOT3B;q)Sl32FB
z5T<oHPxW%;2PX`r$XXVU3@A`ae<4Rb@ydjH{7XAaoXJu!Q-G2mkSxLz9RA&_>Oa;i
z8b3P9L!6lA8+`<)GsPC+B7j}>cJi^-$qUqZ`(r`M!<wQkSVCULl!WZQz+ej!B;gal
zHv&(C$U_45Is@#&xG$dNM-1e9#C#Eq1343@EvSO|$U_jnohwsf>P|PvGHAse091^+
z`9v=Eb4!VR)54QxYwa2W<?^Hcv%TRp>6CTq>Tk&%Oe$~({x(oJnKxkv8gw`VW5()Y
z8M3ZxkGFd)iho^EUo4~*w<j`0h<|mahVQKMHC%n9=VL2t<!WrhGbC4F=oIB^Hg=`)
z+f6bh%EL0Hpj@h<>ffD@M}GbY3yAtX1q@PTjS;yH$7idS%f6B0=|ovB;BRux1dtO3
zzY|Pu#QuSfpDFNo(nRK9CeO-ewUrxBX*e}AJUjkuYR2q3D0{|XV}^0Bk)$<b;4=Fq
z#LC&i^pp1PB76^bP*U=|#W_Q91t#xkHD8(_SQHDIMn*Hm)?{_vOkPNQ_Vo<$Dmf(r
zhq?eXgIrMy6^fo(`2_l`OpvUYco)N?LzU<rq;S-)7<jbG%+9VSmz?isL*y`Y%~+;y
zj`*?Zo$xn^F|R}(k6PDPOh4*q@0I;}zhUrmk(pjKtslMq)8K=o2NcE?+{*PIWVVbj
zR&W~|04^v%?V96gbGnw^uyVH1;pwpeobSlH-TPl@$Nh8%$A{lel<Cscua~vFrTt63
zsx5*ug`3Sfb@SgH3WxuSGgRRwP6)r<Ko~Vl46`}yFQN)Po!sbYs2nY++NC4(eWD9S
z)-0DRl{X2}-l2rldUCZCOVIL_A1e~P7s}es#r%_ZHc=qP-D_Ks;!ckU^`vP&VYyRm
zF>#cPS33CdH}K32W|Mtvkh91-^`Gf!6ANC~V3QLiX(q&8<3#}m1%Ie_U#u!Y5_e?}
z08;e6Q!{SC4+rJs1r6xIdt0pF$16>Rfw;Ae=W`1I5!OWYT`GE~=y+K?q4a8=D=K?i
zfpJ>i27{e4REIwP`0T4S*>ImoRW<e<2!=uxU*G0it|(q#SwN%JMRJON*5!G%4~e5X
z&yH7Gdl4b7iK+z%|Nras5|d3ZTKgEwwrr>*_lsSQ?O8>#xz%KOnhjd|kZU-e);qm7
zn&N06<g>5!&9&$T<vKT_kVnzm*tz|Zw^ctaV3NY+Y~^JAKIf@S=w`0i>nR)7X^a4%
zW%DZVe?;C<m5_9X_BvuO;_2I@y>x2z_w*{?=h&D>4sj?{KV#53p?5m1<ad;4_iX>v
z6LS6~MfbWse#UiM|LvECc|W%GVL2D*tom!^0YaedP9S55>Zl-8GS7kR@-#oP0(~DO
zMzWAUkX33xcG-P>lyY_0p>W?|-DpCr-zV_tt^f3S>lR-?@KNP~83de(#FFMEJ;_BI
z<vUH{yv*DCKnTzxNBiZ-uTnGC>@2f)agGh)R}6FpG>+5SB%j&sp(O1K@7yv7Ma@y@
z#o5z8x#pYDRK#p%r7c|=av+eVP3BR+5+T#~3H35-2c!}7L(|dh5TAr`zoe%unv$8+
z4|tNuQu2}%f?$56)JA7Bu$<f?GVkBTaI(wMt*lCxoMoFV-{f7gP%*xoDlGpIcYZWl
zG#LVLIZ5qF*qdGw#%J*9=v(BJ#7E%0D9ULp(c%J7AmmO{3X=#&-SB?iIqwTei|(_h
zz<{0GogHCn>@$qfdO)OH>>XU896_p77i-r;osqJ<B3Ma9IzU6eZ0hUYQSOn2yoI}5
z)#5~?;#=YqF6LUndSq(}Fv8WBooH1?LsaLpmHk=XQpL*1BC6}rr3Z&(l4uTXx8ri^
zjm6wqDfsoGLy3?CLy(n<cxRN9#00;(rH2<Wa{#rs&;5V4dG+I~5rf2{jPHzDm?GZl
zZ|pB!c@tk9{*?{&kxdF6CEF>COf;T?5;Eg@0WV&vo}~;(23D+Hw~#k>XX53aGp6|w
zUSBThIUeM$LT5{r%0#qJMotbwJpi>dQPX(*EK0ZooP89tP@>cA#LGg-pS(<z`k}^v
zTR)ha%Sus681wc|r6Nl5&@)dhOS@+dO>BIaBB!daN<2=))YV>e5n0rtT8>g^21axc
zza}*mWY;!oUp*Vh6#KU<ba*zJ8)Sq0rZ(!ST~mH@uE>oAgDu<?sWAu{YBi8mhQAPa
zPVl9iTcA*`>OEg&F|+a10C><!c>}W+%G%u9OQq*~GJEpg?ap5bcea5Ee1cSIb+@!M
z?p>3&P+n<S9zu}uV<6rq@I6lEz&1Xzr5?Q2FRwu%ot);x@|hh1(Z4*)XV%)$`E?TA
z^+R4M!ZeMj?n*}V!++f98!Gkpo!!QhyZ8*!!u7u$#^bbYH1>Nt?PvPQ`=3?QKj#2J
z#0;X#!sD+?#x%BPIQnd*X;$LGqS-krUzKdNT$a0D_s?oe)IQSF3~onj9#v!`#vn%N
z3^EJt?f>K>7o_FAzJ@G$%;^GMh+p=I7*i##k#t@C4P*^J@w&D5*O3F0B&tW({~%&2
zsf2f88kb(sDwnn8g8E_xmup=AOX$AezQI4uZ#t1~43r5JuRa9yrII>qP*SuLw4%!J
zz4VhvX}jzrzpOePmkAJvk8+%RRZ^UPz1Cxv%{oh~n#X8PiNr@CV-o*JfT{IDiM4k(
zoAz<eKm<aqZ<3XkeZIZxg2FrS3e@YL`1f*4MwEk8g7M#nRd_J>ZT&~1<tIn3sKvPF
zv%c`A<j}t3_KpyBXBH!esl*a~>10Q?#g?3&w{6+IOZm|Dwf7_}3v3HUX_U)O#(sa~
z!oDAR+Z`P_quz1(n4=a)0brEwkBP|7e4VMyJI@$zuA}qJ5=$tVbVxYcg&tEY<ywGb
zmi@GLI6KcOWdJ7Jh-dV{tq6jR0u_Uspk>g>X&p41xs}i6$Yp)oRiR7#u9wDa*7H>-
zX}MmZ<^HFF<NV4r5peI9w4qd|dNlSPe(W4ss>W<i0{L2p9~jE$Zl~Y7Ts+R^zLha^
zahxx|0i539==!eARL6%!bK@fk3|q#LLv*o6(widF*$w8?lTJ2nd|6^F!tsDQRLZ11
z<wZ+`onhI7dSt9?4o|n6oXQuQPz*NgBZvRe^3ep6K3^$aY1H@=?jgmH&S<TY#4qpF
z>GfqI)4XU|Z&O}_qD{bk@yg|{0a=>@I~8<c2Y^Rs^h2OR?S_S~*uMRzSxg23nF%d>
z=3S?+U&Z$*P#h#97!yo8S~Kc6KNHJKmZqm7qZP~-ACAoJ>l;6`obT@fc|t_Lnf!3G
zvJIIkhKb<6h++)vt}|DOzR5UZmG6T5!Ai8^LgK5$gk_>~Jrybyb0cr1mRFMLzByJl
zYELAImcL8gI=a+N7U2GXo3HPe3roNt;7!9F0MH9eKS)5<)H>C{?qR@5pKvU{$@B5<
z)h)maCBnwrj7EiPlKsP~cWz|3Ml(Ah&rolxcj)O<yKuc%CPa}^6m}nL->)egycdfk
zu^hO$pVz2y=^^^MF_VU+i$$vRSMUyZu#$OtfweP=c1d#*$MvrqI(S#)dDjBA!Aq)+
zmruhafv|f+)78FK3cmF7p|e=Gcq39?edpw$>vzP#_#R<7xo}fIuW+5OwH&vb+6Rc)
zpYd)fb8l%QX~E4uHc5#@K52iXU-r4WAQ~(7hJjs}UeR!(xa`7gy0w2s3T(R~2@;Hv
z1<eMm+P-reP&SH+D*n}ZNv)LcLz4j~`psT`Kj}-;lJDJMLSKG@0^GKmgxqb|#YVSB
zHj~R<BrA{s20JI<fJbk2SGKo#&1*D3bAZgIXN3w!p;CQJb|+()e1z3&+SG7(a4#ZY
zi4cAUexFXcf?m?Fkl$&C7riuGXWJh&y@eU#6yB;BQ{Kn6p{;6RviL8Lp+d(h!}0FM
zm|&ZLz0vkw#Kqk+Eu|vUd|Nok2KuB+%f!t_Puz*Gs&jprl4#5UIaLBf5Sm12LYal<
zE(8Y``bC)A<=jEP_ub=%0)7FLaur^H`Di9Ct>|t^syy8|ImAyEOhSd$2pxB+N_k4)
z*AdCeIXfdL+eRipchcM2f&8`sdLP#*T9ZhZjNMiy?7?UgeB}OUI)i1F?Vy|+xQ(zn
zPrvsbdTV;&ZcOg28Mfh$Fj9e&LkDOMbZ9x-#4SJ1wp|y^Lt`2t4>3EsWX>)i+s&!&
zmwRtv!VOpQ`J?w|DDORv()XxZ#s^|>!gc--49Q@qU}FiDsE>JTQ2M~crEvCj2?5Ub
zQXxn#V(ivLXjvLE`E1^MA*`?aTqI2Eg?_gYy5l(_h*0TZe|63?P5DymVdo?u)}!gs
zgzb$lcfiuG@rz7b-V6V>?1AL=A{Omcu$#Y=a6#XyTnOXLjzZOW>wX;j5V}$6NMI(#
zQ?7=aKY|L|v|fDbGh!kyW%+<!S?&~7%GX=jB7mWh63P3oXRm1E_G17Y?gbGM@!0jZ
z0NsE)?=ld^s+1nh>cd)(Csx&E4p!;<Qd3Urc`t6vhpjSCE6?QE5?~p3%SYw*jcF2p
z`x3UdVex%jPeGKCZrvHhLL_|8=+M*1y=kRsnwVnOd%gdnx2&x$cr}}|csfxOTblj*
z*96ctYf4&L{Sa{UE#(S=eyF$pGTk?@T=r@$A(d>3R{s@#x9!Fo<}im73lqSgqo9&4
zBkkzT^{ncwyHjAueZ}y!rDNqqy#gX>hdH2WG1-R>^-mn6rs>4fVdj_Ql!xOtUBhdc
zw1_PS*?#X=j5F*QWSDaZiz|+>_J(UaMCLwxDkk`Ad$N?d>`_kGQ%C;f<FEiZp=Mia
z-_^g9caVdQkL;?D!ozG|s7YOj+4<Tp&TUU&&}FiE{r(RQzp9oLFCNAj+fyX-6&x67
zoW#!95&vX~P^aS8b&#hW#p!Xckm_u6ar9z~RQMfP)QBpPOiBa0e}1L5Y^AK(q29`Q
z|H&Ry3rSXdx%Pv>{)8(i9HINpK#hV`Xtk2fv|`=tO(0FYCg5{OA4oIZ`_lHs_P_Yo
zrG5R=SNNrWdu_FJVIr)L?R%b|^k2+wS0#%Z|H~Whfvz$26cn*!#rrQCJ~LH1ySluR
Wz1WyJE+u>pLZqW%pkAqJAN4<3s|e5l
literal 4959
zcmZ9Qc|26n`^T?!hOvx&4Pz%{EZLPYvTq^E5@pRcGGvX$AW~VT?0YC8CHu}WL)I*j
zCA+d^OJw_v@Avn|@AZ4V?z!iAo%_7*bMCpH^W6J>;>=8R>1nxX0RW)a*F)YU^_YtW
zrY4=^DWIpMj{2UVE)p04)_~)Bx;9qQ2n|Z_wm$&K2wgO?prUJmq(Kl`-&hB<LJMYK
z;H4PWY#<GBp>=G~+P>c2E<R{L`=N^y+QnJmkq6pcKv&<`%=!rf2LLb=^pRSYLEks+
zJuEHVG9I`5p0%~o`xwP(T(A&BWF})#9Om3~bQ_b(#@9SAhwGar%cpFq%$%?GJzQBD
z^t%0dF=*wD8muRw+g(o51m|ccnE8Hz9D~pmFQqMqR8NPt`aZ@95GBoY%)<s5UuG3-
zH8&0V|Ja-Yuky}6rHyUK_RAqC-|kS+r+)ehbx%+Lhha7VgMsr_00aWHDT62$d9(aF
z!U1cV`EuMgw}GD4t)>7|<?qk1QBcY33S(8F5ArwUXi7fPCW(YRo7<tLi0uxDVj0(x
z_<)l*SC*HNybE?BH(Rz$LC4rr3wa955G0X;EJy0xN_ATM3~{@N^o9Ne<uYS1%xvjV
zouES0hFd~~oOYam0PHA>m)F7F$6%61XMsolUg0zXycp@v@GZ1K>rt9xd?C!>%*+W7
z%C&esu?1QvJQKjm(&8q^?oTJ)%8V0?)|OjY&d?Gr6jy6+xCM5i6!*6NB4P1KRCNm^
zm+NR*M%4c-!-n9r0lIJ!D)W+&T9oD)Dem^7SRik@V(WfPQ@fFKyaf2lH?CLvA5uka
za&warZ&7cj6r);vpGc6CZ?~=95<VC}D!vy*f{QjEl&9OWL;iubp?H5yvEdB%@e6NX
z57kp^2wVcQ3u~hI>k;2K+aF*1m@^PTnvH2@U9bunlhI3nmsfK^BZU;4=_*3}V}PoZ
zEP*COH$^QdyIwzO=Shp{b@@LAC7u=@nYJ8)oEoIduWITqMn>MScBnM|V;V8ajW%>c
z2|9_!;}u5SRyWpkQzR8giy<WH+QY*7;#%0KMPjz2J^$`S;Aj2Q$(O|;?s2!}W-s;l
zu^~Jf@3^eIMr&D_;mxvB-21`xyjo8Mh`|)KZ&GW@tY9Ko+xhEH9q-}Ic$pF6Rb{$J
z6WRQGL}`*GY6-rGR-l>|l$Ivq`@U%TZM4}hv^OUI<i-$GP!{(iq3D;wT5100{_<z8
z=1;Ad?c^U8>k_s0z#=s!u~04W3Iv&C;FbL%51jwmUPHQ@0l~qZwrDUlHbTaRh}I7O
zg75zlU9YVkytJ~+#_*>+av3b*ZLbM`=lrm(GyKlhzDKd&-~YS-XuB{i6aEdZrmT8V
z5=&CIeIGmv+apvfRY7`h1Zf4_L_-7KYf+zDaL#{K)Hw61>q|2q>%TNiMk|sXtmY*1
z`E77tq7vBO#3uo(t!jj^QMa-dh_<S@?yNd3zMLp*QM?3}j{(IjCNs>__m=cxM&AL^
zdT&14OSgK$%!-|9_M)?`i4B)w7eegd!IoH)mWyyhiqc1~EPAqoCCYEgl(hFM{^Ftj
z%GS_$^uT<GuMO-c^$e_!ZI<)tqNempDT6iTHz|9|_cjckvM6YmeEHw;h;Vg`YvL(_
z(jqSectWzGVyL@+N;(xwEU<0OHRyt^OcZ<Qbm(M^U%g>6K)$jtUK69tc1oS-cV3H(
zyzVwJW(p>4KWuO@dx-z65M|t#j~xmYkY<&V$cV9IcL@+9-%Akb(9C^=$km21|8lq_
za=b^e+n~SA!s?z86LD4&0RU2Vl|bwCrvOB*uG>-oaP+AaCy?IW;MZ7A&oS_=puC#x
zTSjKS2X}HZv)}oKicKX7<~q>8hy|~*HpzV*Y^DRSBNNv-=<Mz7m2X=<O(`+?bKF-{
z>R$KtX-5a5FE<rK_;&5d64zhwYmB)DihD|kGMY$s$ypA4DYTWSd;03~Stbic2f`sB
zAwh%dMJa#xYuO@4+Y^@mQ7demfUh*~%iSD#4K60n5j(6;z0A87Nb@>!_Wj#!o0njA
z8JkG4+{e@({dOMVP51|1y`CGI?{rMiLdMQTV)8ojeNwqrgP)*5q}hq9`jG=rE*1L0
z=0gY)xu5I$L0nYIwuM<@k7MqNbid7Ko1mz?Wtyzjo`jUhJJU|J`Jq_(fZ+l%ogp5Y
zIDI`mBjycCE3h-oAO06y%KHv_U0fWu7`0F)$u5yL6u~KnhuEC++z(})gQ{w9X}O1^
ziig+EPJfUA4&ecpZ?0Sc06XsoNMjeO3Wcj3%MW32I2nYaNKiwF#jknm8fO-R8aEHO
zS;P_Zcdx7H>7UoVjHFijGh;WVUGy??)C=6c|6BJ?%amgTP(}HCU2Z0Y^Sx|AO%6>B
z7k8KD-1)Kga0b7Xt>)Jmz><_Svi*-IB6_0ky0@X$d%1Z$EAcD*>w~VW$*SRrQOa6E
z)cKJdzv;DO-USxsZnV8sfR>g0;TF*eXKlHEv~kBDQlVHocet}SvAsd<?82yC)LQ;W
z)r39#Wqj$`6kE-tmwVMDs-}*UN680yV|?4qFL>I1E^G1doNa$er}pksd?U1pF|_rB
zSIJIEOQLI~-<DjQJQ8&;nMSY8T27<NNl5c#E}S#wNzCQvhkqlEIn<jTityd}$UF=$
z<XI5Ea=B|>J9cO}P)Oz~yJ4z~jwPCIW7GR>tKG}oJGSkdoz};#7?(Sg>_x?Y_Q?4k
zZ$BO!ta2Sdt}R&N@%WDQoxFGNn8p;VW$7qF|8D7og^|0?JUW*}Y|jx!#LUqPlwg=m
zRt9aEBD1%*_tO_~T=|(R%DbCN?p_VFK+vzERN1}RWAZ6OAYYD(J}CcnVj9+as%G)o
z;NJXAE1<2%q6D=&D&c&^K7J$1uCL+uS>u|xgNGNU%c~o5r72Q`D?M*NaI@;bFQ#CT
zV0IV|1Ll4vb*8mCG70}W_>J!pbL`q(Mk#Luq5Ho-?sljN6JfW)-Tyt?3`DZ%L<hO-
zm1%2QcpEFqC@DA&Y)noZo<L3wmXhWO7T5CLyr%;aQ&VF{Kezq9Z=e-t6lGcYBlO&>
z>1cfFaA%b9aDM4sjzPiuCSI52<vO<;a(uRp40{~Kn6LBMmPVEeHJcOgypIw^HdR}0
zm6LbOEkgM=13_yKJS&9@eZ|7<X9r<lqI?mrld4&}+y)ocsy*K}qL^g986$Oc82=ro
zuC2p`f>j;PmRFq03dvd{@)=@Z9{wG$dz~4@#t3rj;1m%CZ{=~k9~XcBC6v7Nc<RUf
za0fm?Azz;LlJ)Y#jjF*)x4$yVmk#DrhOl)`Kk7jI4dJu{T@8Dun*0WoGkW%6p%IA#
zwzFR1?;{r8naAakq}Ot?>kqV@1WVYQ<43f3{9(XPWS>EN{EO~*-CK*bt;ZS;!OLuY
z87ft)RVyp(Cw{BC?#*W-X}?E8n+mG`{Ikbd@Mf3BkFQ_T3aIyS+g0*qIBMqV83`?o
zX*3SoyLQT=V65w9M3)n><3cpp4wMiSNQ6I0WTSfL@yq6O5RJ^;rpPEzOSf?<#OEal
z#JE8?_%;i?y7A-hXB(+R7p{hi!m)9NPT7A;G|icpHm~w<e;6$!Y4Fb<`kxOQK~ik7
z2qb>S^k`I({`l+|qO9g~*i~G*9imYv^HH~-3PeB-S_xwv+Y2l=g6>lXZk|B1v+dn|
zeA>r~Z}f3>@r<u`RNC~JSzXHVKWuV9PJevXSM)4ETvGjoXTP+@Sm!4fMnM%7F8Iff
zb>Byy3Q<u#`SlY{K^5Dg)A{NK6p<$`7p6%c+oJK*xb-{t=b^TC*q$w1kQI7~<=I#n
zUsrx-EC2+C9;adF_CoSYvo)?|_N_Pw%krKb00Zo)kx)$aOO2R5(5K_Eb(0c#$B5sg
z{0z_oksNnVsOYo_E#b$gov$vI);T=pzwd=%0b+vx5R`k((5OLUU}$(4UhG+Kr*w?}
zJ?oUew851nEwl58vi2;*E5|K?7<%F-)kCDbPq*w+RQGiP>&w80&#K>pvR%5geJnqq
z#YL_Lw5jl$vkg7ZRPvcNku1Nz{`lM2`2I<R--ltN0hN~4vHK*U?_%TU@<IG~k|O@%
znoTwT&;f1hd-Fe&&gpyQWo1oNlp#yTCg)~Y#%qU+b`54q1>+BH-`3Ba?R1ny-~VYe
z9l%0>oH`pOV?m#)LN)yxXMS#M>?$?Ja6PLFE);UCNl#M06nrh>lc`K1PMyM&Ka>tI
zyKVLSSwJ-z2RX<M$Fc<(rS~fTW{%nyS1=}&<eqVbw)PSJ<ep)k90YElR8ezf%^Lc)
zK7m(V2eK4@R)4svw76Xh8k(GJt$8#twX<q^25G1`_B?J<$Kz*ToHDUP1f|Y;lv8Ey
z@>NRh*UcPO%t2{i@X_0uuwJ6@h;-=Qef3g6X8cFUHPoCZIv{}R78rZ%99agCe;SpR
z2&R5q?E=vp9E`14e_L9iWfefrys(&*EXOenhi}(uR8D%;1^v32tF*i$meYY6!3~@Q
zv5OSB5c`O2eYdLw^yThU*z33iu!U)sm(UUi!Yh5@S`weCs{BaFFDP7dWAap2{nG=s
zg+-P;PwqQ+?wHv<WGCsCsCO4rx|Wd_14)@oaLWm2&kft9v8-l^{=qia<93z$CT*z^
z*q2JT%@xykw-Ow2s?^%W_=BG?$=o_stHxWYMy+|vajaCg^4_v!TByq*Mc@g$G@in+
zYNuZb6#9Mik)Vb&y{T`IPuoggq35^!q9Us2#>S{X^xRx~)ampA>1zW`P2@zwfa|>{
z(Zt?9q>hUSNyY-w8WjF3)S{^{Y;7-zeNdEWXCYNlYE#WdCdLmAQQa{ib}eB{46!Vm
zo13!fMtVj@*A05r-xRqe1O+nR=OyKWG>u1mlD&rJ7WUEOHCORSf`H4G9m&D*U>eu{
zLp6o#gU{59h79h}@mqyQxAYnwjZ3|e)+cm~c9C*PmcN-nJ13-pb9}j+aMZB3eWbuU
z(aP`J@@Js(3eo*K%?H@(M#W~b(~+qW`F;+iobQ&M*W>{=WjBNNZqtpbh4N5N(I2dG
z-RX`fI|JPp?}OI)XaR2iVs;j=E!yAobeUouDw>}0b0z1W+MTAGY0eJ{GDB$rxn+Jx
zijgtNgG}Ip-xgzR(6Y<B6j5&7I?EKz2e+k2gn|!#VFBU$FP~`ekpaTJkZ6yKe@hcz
z0&P<dte_M3j=c?L(KKla=JLCh$&q23=Ki1!(x1)bR{e(s(~5Hx!^RaGOirY(Ya+~q
zTax0op$Cq0B+JhX=8V5lAHJ;;{jP&Y^DwLVk?U_UN+40B#x_l|2@WMt@X^?PUm5R`
zka5Zoe%Mb;=NtBYJRMnP9OukRlM*dPy%;#8-SK6r$;j&qkHb)1d#Z^N<KZr2!&#Pj
zIU-G_uQVE_d}&k&{6RLLsXdWz{x!xRnx?axhpuNik+1!(RuG-vy%XmTtmxPBGjh&s
z%2Mr?Ut8IWv!*#|Vmm`90TQDH+;(IJMHnN3{zDQxfmd>w>ce#I{RXF)m?YpDnSx1P
z-qxP|)1Pe80-2Yo{|kjzD-b|ra*a%GbQ-JEf<BbF&h$&RrZ-+pVa7@^kGNqEe<rct
z-kwj~m}xv3un)%C!WKTK+rEbQ$D;L=|3ipou){Jid{G5mhMkg6p^%}xPG=SSMvT+>
zY4Ef^R`Uo`;5%GzqsAjSR8OWeT$^xkT*!`awX@U|_Abd2Kni%MHCjtQr!HimpSd78
zqrPOZv^3?zw<Q83PJS%)jk5~sW5wL%>eIu9Gt!GTOD19I)$#R&XHcKG{N6t4Uzm)%
z_&ik-;lla8ao5f-XCXafQiDpVG*V0{N!aCZPn=1CN`%)rVO5b3-l1<&5Rm>dgqG6&
zi6I?9NDN#D1uh~vl;mU=49d2IlV^tnzNl6O2YpihPema^^jse;K;WdUa}|$oaghqg
z(6Awt@Duo-@b4d^62bJ31eGM@W)0Qd@X!Ndd;7ddj(j^*YY2<F9B0=q{CkRTYlO1D
zGl*<!ByB1D*e5nwTT@}02EOS{{EEVld=V|ut?N{K!<D?M$QX97EGNgVZS|_~peG9q
zL$e2NzJNfu_N=TG$8b>nz}q(w%?j=RPLP@eEF|B$PQ2KtCtcE0TG0n}qx$Q0g;>#Q
zXb4R~mYm3CJ1RdzfK4TCyeNO)4km{6`QK7Rtf74G7sV*O8|HzS0B>>4yF}W2o(lp*
zM{UWrv+Ba@vnVNI88u6!KF%=Wbx&cqT*am6q30wD#F98KVc5!5oJkm|LweHam10~r
zX@~3#%zVK@yDeBv6!qOETx37pSa`UBTxI#cHI-Sl3=?)E1K4yNsZ5YEKwM8qGV1Vn
zk8qYSbHYB+UTkQmS<k~+_u?XWiR}U~EWCgOKyF#9aFf?0NFlo?l9dJq7v*7@BT&B>
t;Jjx^&~6n@&egfT2m_h_UkqA5Co_+SJESY3=}2`iKwrlMS%GlG{15vgE&>1m
diff --git a/cmake/SparkAppimageConfig.cmake b/cmake/SparkAppimageConfig.cmake
index 45f4e25..7384b47 100644
--- a/cmake/SparkAppimageConfig.cmake
+++ b/cmake/SparkAppimageConfig.cmake
@@ -1,62 +1,99 @@
-# export PATH=/usr/lib/x86_64-linux-gnu/qt5/bin:$PATH
-# export LD_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH
-# export QT_PLUGIN_PATH=/usr/lib/x86_64-linux-gnu/qt5/plugins:$QT_PLUGIN_PATH
-# export QML2_IMPORT_PATH=/usr/lib/x86_64-linux-gnu/qt5/qml:$QML2_IMPORT_PATH
-
-# export PATH=/usr/lib/x86_64-linux-gnu/qt5/bin:$PATH
-# ~/linuxdeployqt-continuous-x86_64.AppImage spark-store-submitter -appimage
-# cd ..
-# ~/appimagetool-x86_64.AppImage appimage/
-
-# LINUXDEPLOYQT=/home/zinface/linuxdeployqt-continuous-x86_64.AppImage
-# APPIMAGETOOL=/home/zinface/appimagetool-x86_64.AppImage
-
-# if ()
-set(APPIMAGE_OUTPUT "${CMAKE_BINARY_DIR}/appimage")
-set(APPIMAGE_ICON "${APPIMAGE_OUTPUT}/default.png")
-set(APPIMAGE_DESTKOP "${APPIMAGE_OUTPUT}/default.desktop")
-# set(LINUXDEPLOYQT)
-# set(APPIMAGETOOL)
-
-function(execute_linuxdeploy _PATH)
- execute_process(COMMAND ${LINUXDEPLOYQT}
- WORKING_DIRECTORY "${APPIMAGE_OUTPUT}"
- )
-endfunction(execute_linuxdeploy _PATH)
-
-function(target_linuxdeploy)
- add_custom_target(linuxdeploy pwd
- BYPRODUCTS appimage
- COMMAND cp ../${PROJECT_NAME} .
- COMMAND "${LINUXDEPLOYQT}" ${PROJECT_NAME} -appimage -unsupported-allow-new-glibc -verbose=3 -no-strip|| true
- COMMAND cp ../spark-appimage.desktop default.desktop
- COMMAND cp ../spark-appimage.png default.png
- WORKING_DIRECTORY "${APPIMAGE_OUTPUT}")
-endfunction(target_linuxdeploy)
+# SparkAppimageConfig.cmake
+
+# 1. 在使用时通过准备默认的 icon 与 SparkDesktopConfig.cmake 在生成 desktop 文件
+ # 时进行借用 desktop 文件中的基本描述完成 spark-appimage.dekstop 文件的生成,
+ # 并在使用 add_appimage 时完成所有逻辑判断与目标的定义
+ # add_appimage_icon
+ # add_appimage_desktop
+ # add_appimage
+
+# 2. 在 add_appimage 中,我们判断了是否为 cmake 提供了 LINUXDEPLOYQT 宏,
+ # 并获取此工具的真实路径。并继续判断了 APPIMAGETOOL 宏与该工具的真实路径。
+ # 然后,创建一个目录,用于即将进行的 Appimage 打包。
+
+ # 通过调用 target_linuxdeploy() 来完成 linuxdeploy 的目标创建
+ # 通过调用 target_appimage() 来完成 appimage 的目标创建
+
+# 3. 对于 linuxdeploy 目标,大致上就是通过执行 linuxdeployqt 命令与 -appimage
+ # 参数来创建可用于 Appimage 打包的内容结构,并且使用一些参数来处理相关库的依赖。
+ # 其次就是,将 spark-appimage.desktop 复制为 default.desktop
+ # 另一个就是 spark-appimage.png 复制为 default.png
+
+# 4. 对于 appimage 目标,大致上就是通过执行 appimagetool 命令将准备好打包的目录
+ # 结构进行打包为 Appimage 可执行文件,其中为了避免可能缺失的文件,重复了对
+ # default.desktop 文件与 default.png 文件的生成。
+ # 这是一个依赖的 copy-desktop-appimage 目标,并先行执行
+
+# 要求:
+ # LINUXDEPLOYQT 提供的外部参数,一般指 linuxdeployqt 程序路径
+ # APPIMAGETOOL 提供的外部参数,一般指 appimagetool 程序路径
+
+option(USE_APPIMAGE_NEW_GLIBC "允许在打包过程中使用较新版本的 glibc 库" ON)
+
+set(APPIMAGE_OUTPUT "${CMAKE_BINARY_DIR}/appimage")
+set(APPIMAGE_OUTPUT_ICON "${APPIMAGE_OUTPUT}/default.png")
+set(APPIMAGE_OUTPUT_DESTKOP "${APPIMAGE_OUTPUT}/default.desktop")
+
+# 1. 添加一个可以用于 Appimage 使用的图标文件
+function(add_appimage_icon _icon)
+ if(CMAKE_VERSION VERSION_LESS 3.21)
+ message("> cmake version is less than 3.21")
+ configure_file(${_icon} ${APPIMAGE_OUTPUT_ICON} COPYONLY)
+ else()
+ file(MAKE_DIRECTORY ${APPIMAGE_OUTPUT})
+ file(COPY_FILE ${_icon} ${APPIMAGE_OUTPUT_ICON})
+ endif(CMAKE_VERSION VERSION_LESS 3.21)
+endfunction(add_appimage_icon _icon)
+
+# 2. 基于 SparkDesktopMacros.cmake 提供的宏来定义 desktop 内容说明
+ # 使用与自身的 desktop.in 模板进行生成
+function(add_appimage_desktop)
+ configure_file(cmake/spark-appimage.desktop.in.txt
+ ${APPIMAGE_OUTPUT_DESTKOP} @ONLY)
+endfunction(add_appimage_desktop)
+
+function(target_linuxdeploy _target)
+
+ if(USE_APPIMAGE_NEW_GLIBC)
+ message("Use New glibc")
+ add_custom_target(linuxdeploy pwd
+ BYPRODUCTS appimage
+ COMMAND "${LINUXDEPLOYQT}" $<TARGET_FILE:${_target}> -appimage -unsupported-allow-new-glibc -verbose=3 -no-strip || true
+ WORKING_DIRECTORY "${APPIMAGE_OUTPUT}")
+ else()
+ message("Un Use New glibc")
+ add_custom_target(linuxdeploy pwd
+ BYPRODUCTS appimage
+ COMMAND "${LINUXDEPLOYQT}" $<TARGET_FILE:${_target}> -appimage -verbose=3 -no-strip || true
+ WORKING_DIRECTORY "${APPIMAGE_OUTPUT}")
+ endif(USE_APPIMAGE_NEW_GLIBC)
+
+endfunction(target_linuxdeploy _target)
function(target_appimage)
- add_custom_target(copy-desktop-appimage
- COMMAND cp ../spark-appimage.desktop default.desktop
- COMMAND cp ../spark-appimage.png default.png
- WORKING_DIRECTORY "${APPIMAGE_OUTPUT}")
add_custom_target(appimage pwd
COMMAND ${APPIMAGETOOL} ${APPIMAGE_OUTPUT}
- WORKING_DIRECTORY "${CMAKE_BINARY_DIR}"
- DEPENDS copy-desktop-appimage)
+ WORKING_DIRECTORY "${CMAKE_BINARY_DIR}")
endfunction(target_appimage)
-function(add_appimage)
+# 3. 添加对目标的 Appimage 构建Appimage 在一个项目中只能构建一个目标
+function(add_appimage_target _target)
# check linuxdeploy
if(NOT DEFINED LINUXDEPLOYQT)
message("AppImage> Not Found LINUXDEPLOYQT Variable!")
return()
endif(NOT DEFINED LINUXDEPLOYQT)
- if(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
+ if(CMAKE_VERSION VERSION_LESS 3.19)
message("> cmake version is less than 3.19")
- message(WARNING "!Relative paths are not supported!")
+ if(CMAKE_VERSION VERSION_GREATER 3.4)
+ get_filename_component(LINUXDEPLOYQT_REAL_PATH ${LINUXDEPLOYQT} REALPATH)
+ else()
+ message("> cmake version is less than 3.4")
+ message(WARNING "!Relative paths are not supported!")
+ endif(CMAKE_VERSION VERSION_GREATER 3.4)
else()
file(REAL_PATH ${LINUXDEPLOYQT} LINUXDEPLOYQT_REAL_PATH)
- endif(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
+ endif(CMAKE_VERSION VERSION_LESS 3.19)
message("AppImage> Found LINUXDEPLOYQT Variable: ${LINUXDEPLOYQT_REAL_PATH}")
# check appimagetool
@@ -64,47 +101,58 @@ function(add_appimage)
message("AppImage> Not Found APPIMAGETOOL Variable!")
return()
endif(NOT DEFINED APPIMAGETOOL)
- if(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
+ if(CMAKE_VERSION VERSION_LESS 3.19)
# execute_process(COMMAND realpath ${APPIMAGETOOL} OUTPUT_VARIABLE APPIMAGETOOL_REAL_PATH)
message("> cmake version is less than 3.19")
- message(WARNING "!Relative paths are not supported!")
+ if(CMAKE_VERSION VERSION_GREATER 3.4)
+ get_filename_component(APPIMAGETOOL_REAL_PATH ${APPIMAGETOOL} REALPATH)
+ else()
+ message("> cmake version is less than 3.4")
+ message(WARNING "!Relative paths are not supported!")
+ endif(CMAKE_VERSION VERSION_GREATER 3.4)
else()
file(REAL_PATH ${APPIMAGETOOL} APPIMAGETOOL_REAL_PATH)
- endif(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
- message("AppImage> Found APPIMAGETOOL Variable: ${LINUXDEPLOYQT_REAL_PATH}")
+ endif(CMAKE_VERSION VERSION_LESS 3.19)
+ message("AppImage> Found APPIMAGETOOL Variable: ${APPIMAGETOOL}")
# do add_custome_target
make_directory(${APPIMAGE_OUTPUT})
- target_linuxdeploy()
+ target_linuxdeploy(${_target})
target_appimage()
-endfunction(add_appimage)
-function(add_appimage_desktop)
- configure_file(cmake/spark-appimage.desktop.in
- ${CMAKE_BINARY_DIR}/spark-appimage.desktop @ONLY)
-endfunction(add_appimage_desktop)
+ # 重设目标输出的目录
+ set_target_properties(${_target}
+ PROPERTIES
+ RUNTIME_OUTPUT_DIRECTORY "${APPIMAGE_OUTPUT}")
-function(add_appimage_icon _ICON_PATH)
- if(CMAKE_VERSION VERSION_LESS 3.21)
- message("> cmake version is less than 3.21")
- configure_file(${_ICON_PATH} ${CMAKE_BINARY_DIR}/spark-appimage.png COPYONLY)
- else()
- file(COPY_FILE ${_ICON_PATH} ${CMAKE_BINARY_DIR}/spark-appimage.png)
- endif(CMAKE_VERSION VERSION_LESS 3.21)
-endfunction(add_appimage_icon _ICON_PATH)
+ # 为解决在不使用 -unsupported-allow-new-glibc 参数时,
+ # 可能不会生成 AppRun 软链接的问题
+ if(NOT USE_APPIMAGE_NEW_GLIBC)
+ set_target_properties(${_target}
+ PROPERTIES
+ RUNTIME_OUTPUT_NAME "AppRun")
+ endif(NOT USE_APPIMAGE_NEW_GLIBC)
+endfunction(add_appimage_target _target)
-# 如果glic>=2.27,你就需要加上参数 -unsupported-allow-new-glibc (意思就是不再低版本发行版使用了)
-# 或 -unsupported-bundle-everything大概的意思是尝试兼容实际测试到其他发行版直接用不了了有可能是发行版的原因还是建议用前者虽然放弃了低版本
+# 如果 glic>=2.27, 你就需要加上参数 -unsupported-allow-new-glibc 意思就是不再低版本发行版使用了
+# 或 -unsupported-bundle-everything
+# 大概的意思是尝试兼容,实际测试,到其他发行版直接用不了了,有可能是发行版的原因,还是建议用前者,虽然放弃了低版本
# -unsupported-bundle-everything
- # 捆绑所有依赖库,包括 ld-linux.so 加载器和 glibc。这将允许构建在较新系统上的应用程序在较旧的目标系统上运行但不建议这样做因为它会导致捆绑包超出所需的大小并且可能到其他发行版无法使用
+ # 捆绑所有依赖库,包括 ld-linux.so 加载器和 glibc。
+ # 这将允许构建在较新系统上的应用程序在较旧的目标系统上运行,
+ # 但不建议这样做,因为它会导致捆绑包超出所需的大小(并且可能到其他发行版无法使用)
# -unsupported-allow-new-glibc
- # 允许 linuxdeployqt 在比仍受支持的最旧 Ubuntu LTS 版本更新的发行版上运行。这将导致 AppImage无法在所有仍受支持的发行版上运行既不推荐也不测试或支持
+ # 允许 linuxdeployqt 在比仍受支持的最旧 Ubuntu LTS 版本更新的发行版上运行。
+ # 这将导致 AppImage无法在所有仍受支持的发行版上运行既不推荐也不测试或支持
-# ./linuxdeployqt-7-x86_64.AppImage 程序目录/程序 -appimage -unsupported-allow-new-glibc
-# ./linuxdeployqt-7-x86_64.AppImage 程序目录/程序 -appimage -unsupported-bundle-everything
+# 对 linuxdeployqt 的使用
+# ./linuxdeployqt-7-x86_64.AppImage
+ # 程序目录/程序 -appimage -unsupported-allow-new-glibc
+# ./linuxdeployqt-7-x86_64.AppImage
+ # 程序目录/程序 -appimage -unsupported-bundle-everything
@@ -113,20 +161,32 @@ endfunction(add_appimage_icon _ICON_PATH)
# include(cmake/SparkAppimageConfig.cmake) # 导入来自 Spark 构建的 Appimage 构建
# add_appimage_icon(assets/spark.png) # 添加到 Appimage 中的默认的图标
# add_appimage_desktop() # 添加到 Appimage 中的默认desktop(使用来自 Spark 构建的 Desktop 构建中配置的信息(必须要求 spark-desktop))
-# add_appimage() # 应用对 Appimage 的构建
+# add_appimage_target(${PROJECT_NAME}) # 添加到 Appimage 中的默认目标,应用对 Appimage 的构建
-# 2. 在 Makefile 进行构建目标构建 Appimage
-# Appimage 的构建流 --
+# 2. 在 Makefile 进行构建目标构建 Appimage 的构建流 --
# 在 Makefile 进行构建目标构建 Appimage (要求提供工具的绝对路径然后可依次进行linuxdeployqt, genrate-appimage)
# 来自于 https://github.com/probonopd/linuxdeployqt 的 linuxdeployqt
# 来自于 https://github.com/AppImage/AppImageKit 的 appimagetool
-# LINUXDEPLOYQT := "/home/zinface/Downloads/linuxdeployqt-continuous-x86_64.AppImage"
-# APPIMAGETOOL := "/home/zinface/Downloads/appimagetool-x86_64.AppImage"
+# 来自于 https://gitlink.org.cn/zinface/bundle-linuxdeployqt.git 托管存储的工具
+
+# 或指定你所想存放克隆项目的位置
+# BUNDLE_LINUXDEPLOYQT := $(shell pwd)/build/bundle-linuxdeployqt
+
+# download-bundle-linuxdeploytools:
+# -git clone https://gitlink.org.cn/zinface/bundle-linuxdeployqt.git $(BUNDLE_LINUXDEPLOYQT)
-# linuxdeploy: all
+# LINUXDEPLOYQT := "$(BUNDLE_LINUXDEPLOYQT)/linuxdeployqt-continuous-x86_64.AppImage"
+# APPIMAGETOOL := "$(BUNDLE_LINUXDEPLOYQT)/appimagetool-x86_64.AppImage"
+
+# linuxdeploy: release download-bundle-linuxdeploytools
# cd build && cmake .. -DLINUXDEPLOYQT=$(LINUXDEPLOYQT) -DAPPIMAGETOOL=$(APPIMAGETOOL)
# cd build && make linuxdeploy
# genrate-appimage:
# cd build && cmake .. -DLINUXDEPLOYQT=$(LINUXDEPLOYQT) -DAPPIMAGETOOL=$(APPIMAGETOOL)
# cd build && make appimage
+
+
+
+# NOTE:
+# 如果使用的库不存在于系统路径,则需要配置 export LD_LIBRARY_PATH=<路径> 以便 linuxdeployqt 可搜索到库的位置
diff --git a/cmake/DebPackageConfig.cmake b/cmake/SparkDebPackageConfig.cmake
similarity index 90%
rename from cmake/DebPackageConfig.cmake
rename to cmake/SparkDebPackageConfig.cmake
index d88fd51..7ad5b33 100644
--- a/cmake/DebPackageConfig.cmake
+++ b/cmake/SparkDebPackageConfig.cmake
@@ -1,7 +1,7 @@
cmake_minimum_required(VERSION 3.0.0)
# function(add_deb_package PACKAGE_NAME PACKAGE_VERSION PACKAGE_MAINTAINER PACKAGE_EMAIL PACKAGE_SHORT_DESCRIPTION PACKAGE_LONG_DESCRIPTION)
-
+
# endfunction(add_deb_package PACKAGE_NAME PACKAGE_VERSION PACKAGE_MAINTAINER PACKAGE_EMAIL PACKAGE_SHORT_DESCRIPTION PACKAGE_LONG_DESCRIPTION)
# if(add_deb_package VALUE) set(Package ${VALUE} PARENT_SCOPE) endif(add_deb_package VALUE)
@@ -119,7 +119,7 @@ function(set_package_vars _IN_KEY _IN_VAL)
else()
set(CPACK_DEBIAN_PACKAGE_VERSION "${_IN_VAL}" PARENT_SCOPE)
endif(_IN_VAL STREQUAL "auto")
-
+
message("--> 软件版本: ${_IN_VAL}")
endif(_Version EQUAL "0")
@@ -131,7 +131,7 @@ function(set_package_vars _IN_KEY _IN_VAL)
find_str("${_IN_KEY}" "Architecture" _Architecture)
if(_Architecture EQUAL "0")
- set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "${_IN_VAL}" PARENT_SCOPE)
+ set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "${_IN_VAL}" PARENT_SCOPE)
if(_IN_VAL STREQUAL "auto")
execute_process(
COMMAND dpkg --print-architecture
@@ -142,7 +142,7 @@ function(set_package_vars _IN_KEY _IN_VAL)
endif(_IN_VAL STREQUAL "auto")
message("--> 软件架构: ${_IN_VAL}")
endif(_Architecture EQUAL "0")
-
+
find_str("${_IN_KEY}" "Priority" _Priority)
if(_Priority EQUAL "0")
set(CPACK_DEBIAN_PACKAGE_PRIORITY "${_IN_VAL}" PARENT_SCOPE)
@@ -173,6 +173,12 @@ function(set_package_vars _IN_KEY _IN_VAL)
message("--> 软件建议: ${_IN_VAL}")
endif(_Recommends EQUAL "0")
+ find_str("${_IN_KEY}" "Conflicts" _Conflicts)
+ if(_Conflicts EQUAL "0")
+ set(CPACK_DEBIAN_PACKAGE_CONFLICTS "${_IN_VAL}" PARENT_SCOPE)
+ message("--> 软件冲突: ${_IN_VAL}")
+ endif(_Conflicts EQUAL "0")
+
endfunction(set_package_vars _IN_KEY _IN_VAL)
# 定义一个自定义(add_package_descript)函数
@@ -188,7 +194,7 @@ function(add_package_descript IN_DES)
message(FATAL_ERROR "!! Not Found Path: ${PACKAGE_DES_PATH}")
return()
endif(EXISTS ${IN_DES})
-
+
file(READ ${PACKAGE_DES_PATH} DES_CONTENT)
trim_str("${DES_CONTENT}" DES_CONTENT)
@@ -238,7 +244,12 @@ function(add_package_descript IN_DES)
set(PREV_DES_LINE "")
while(NOT PREV_DES_LINE STREQUAL DES_LINE)
if(NOT PREV_DES_LINE STREQUAL "")
- set(Descrition "${Descrition}\n${DES_LINE}")
+ if ("${CMAKE_VERSION}" VERSION_LESS "3.15")
+ set(Descrition "${Descrition}\n${DES_LINE}")
+ else()
+ string(STRIP "${DES_LINE}" STRIP_DES_LINE)
+ set(Descrition "${Descrition}\n${STRIP_DES_LINE}")
+ endif("${CMAKE_VERSION}" VERSION_LESS "3.15")
endif(NOT PREV_DES_LINE STREQUAL "")
set(PREV_DES_LINE "${DES_LINE}")
sub_next(${DES_CONTENT} NEXT_INDEX DES_LINE DES_CONTENT)
@@ -262,7 +273,7 @@ function(add_package_descript IN_DES)
string(TIMESTAMP BUILD_TIME "%Y%m%d")
set(CPACK_DEBIAN_PACKAGE_VERSION "${CPACK_DEBIAN_PACKAGE_VERSION}-${BUILD_TIME}")
endif("${CalVer}" STREQUAL "true")
-
+
##################### deb file name #####################
@@ -270,11 +281,22 @@ function(add_package_descript IN_DES)
set(_Version "${CPACK_DEBIAN_PACKAGE_VERSION}")
set(_Architecture "${CPACK_DEBIAN_PACKAGE_ARCHITECTURE}")
- set(_DebFileName
+ set(_DebFileName
"${_Package}_${_Version}_${_Architecture}${PACKAGE_SUFFIX}.deb"
)
set(CPACK_DEBIAN_FILE_NAME ${_DebFileName})
+ # 标识: spark-deb-package
+ if(NOT "${PACKAGE_SUFFIX}" STREQUAL "")
+ # eg: remove '_' of '_Debian'
+ string(SUBSTRING "${PACKAGE_SUFFIX}" 1 -1 DISTRIBUTION)
+ if ("${CMAKE_VERSION}" VERSION_LESS "3.15")
+ set(CPACK_DEBIAN_PACKAGE_DESCRIPTION "${Descrition}\n .\n Build for ${DISTRIBUTION} through spark-deb-build.")
+ else()
+ set(CPACK_DEBIAN_PACKAGE_DESCRIPTION ${Descrition} "\n.\nBuild for ${DISTRIBUTION} through spark-deb-build.")
+ endif("${CMAKE_VERSION}" VERSION_LESS "3.15")
+
+ endif(NOT "${PACKAGE_SUFFIX}" STREQUAL "")
# set(CPACK_DEBIAN_PACKAGE_NAME "${Package}")
# set(CPACK_DEBIAN_PACKAGE_VERSION "${Version}")
@@ -311,7 +333,7 @@ endfunction(add_package_descript IN_DES)
# CPACK_DEBIAN_FILE_NAME - n
# CPACK_DEBIAN_PACKAGE_NAME - y
# CPACK_DEBIAN_PACKAGE_VERSION - y
-# CPACK_DEBIAN_PACKAGE_ARCHITECTURE - y(auto)
+# CPACK_DEBIAN_PACKAGE_ARCHITECTURE - y(auto) -> dpkg --print-architecture
# CPACK_DEBIAN_PACKAGE_DEPENDS - y
# CPACK_DEBIAN_PACKAGE_PRIORITY - y
# CPACK_DEBIAN_PACKAGE_MAINTAINER - y
@@ -324,4 +346,5 @@ endfunction(add_package_descript IN_DES)
# set(ARCHITECTURE "arm64")
# endif()
+
# string(TIMESTAMP BUILD_TIME "%Y%m%d")
diff --git a/cmake/SparkDebianChangelogVersion.cmake b/cmake/SparkDebianChangelogVersion.cmake
index ee2f339..e439d37 100644
--- a/cmake/SparkDebianChangelogVersion.cmake
+++ b/cmake/SparkDebianChangelogVersion.cmake
@@ -43,13 +43,13 @@ macro(spark_debian_changelog_override_version _CHANGELOG_FILE_PATH)
file(READ ${CHANGELOG_FILE_PATH} CHANGELOG_CONTENT LIMIT 30)
# fix: spark-store (4.2.3~test1) 已经超过 20 字符位,所以使用 30 进行保守计算
- string(FIND ${CHANGELOG_CONTENT} "(" V_PRE) # +1 to V_BEGIN
- string(FIND ${CHANGELOG_CONTENT} ")" V_END)
+ string(FIND "${CHANGELOG_CONTENT}" "(" V_PRE) # +1 to V_BEGIN
+ string(FIND "${CHANGELOG_CONTENT}" ")" V_END)
math(EXPR V_BEGIN "${V_PRE}+1")
math(EXPR V_LENGTH "${V_END}-${V_BEGIN}")
- string(SUBSTRING ${CHANGELOG_CONTENT} ${V_BEGIN} ${V_LENGTH} V)
+ string(SUBSTRING "${CHANGELOG_CONTENT}" ${V_BEGIN} ${V_LENGTH} V)
message("> V = ${CHANGELOG_CONTENT}")
message("> V = [${V}]")
diff --git a/cmake/SparkDesktopMacros.cmake b/cmake/SparkDesktopMacros.cmake
index 223ac6b..bea9da8 100644
--- a/cmake/SparkDesktopMacros.cmake
+++ b/cmake/SparkDesktopMacros.cmake
@@ -1,16 +1,19 @@
+# SparkDesktopMacros.cmake
-macro(spark_desktop_macros _APP_NAME _APP_NAME_ZH_CN _APP_COMMENT _APP_TYPE _APP_EXECUTE_PATH _APP_EXECUTE_ICON_PATH _APP_CATEGORIES)
- set(APP_NAME ${_APP_NAME})
- set(APP_NAME_ZH_CN ${_APP_NAME_ZH_CN})
- set(APP_COMMENT ${_APP_COMMENT})
- set(APP_TYPE ${_APP_TYPE})
- set(APP_EXECUTE_PATH ${_APP_EXECUTE_PATH})
- set(APP_EXECUTE_ICON_PATH ${_APP_EXECUTE_ICON_PATH})
- set(APP_CATEGORIES ${_APP_CATEGORIES})
- configure_file(cmake/spark-desktop.desktop.in
- ${CMAKE_BINARY_DIR}/${_APP_NAME}.desktop
+macro(spark_desktop_macros)
+ set(APP_NAME ${ARGV0})
+ set(APP_NAME_ZH_CN ${ARGV1})
+ set(APP_COMMENT ${ARGV2})
+ set(APP_TYPE ${ARGV3})
+ set(APP_EXECUTE_PATH ${ARGV4})
+ set(APP_EXECUTE_ICON_PATH ${ARGV5})
+ set(APP_CATEGORIES ${ARGV6})
+ set(APP_MIME_TYPE ${ARGV7})
+ configure_file(cmake/spark-desktop.desktop.in.txt
+ ${CMAKE_BINARY_DIR}/${ARGV0}.desktop
)
-endmacro(spark_desktop_macros _APP_NAME _APP_NAME_ZH_CN _APP_COMMENT _APP_TYPE _APP_EXECUTE_PATH _APP_EXECUTE_ICON_PATH _APP_CATEGORIES)
+ set(SPARK_DESKTOP_FILE ${CMAKE_BINARY_DIR}/${ARGV0}.desktop)
+endmacro(spark_desktop_macros)
# include(cmake/SparkDesktopMacros.cmake)
# 内容默认应用名称: Name= 应与项目名称相同
@@ -21,15 +24,27 @@ endmacro(spark_desktop_macros _APP_NAME _APP_NAME_ZH_CN _APP_COMMENT _APP_TYPE _
# 应用类型: Type=
# 执行程序: Exec=
# 图标路径: Icon=
- # 应用分类: Category=
+ # 应用分类: Categories=
+ # MIME类型: MimeType=
# )
+# TODO 安装位置INSTALL(将自动实现 install 文件,如 /usr/share/applications)
+
+# install(FILES ${APP_NAME}.desktop
+# DESTINATION /usr/share/applications
+# )
+ # 或者
+# install(FILES ${SPARK_DESKTOP_FILE}
+# DESTINATION /usr/share/applications
+# )
+
+# 基于 configure_file 填充内容配置
# configure_file(<input> <output>
# [NO_SOURCE_PERMISSIONS | USE_SOURCE_PERMISSIONS |
# FILE_PERMISSIONS <permissions>...]
# [COPYONLY] [ESCAPE_QUOTES] [@ONLY]
# [NEWLINE_STYLE [UNIX|DOS|WIN32|LF|CRLF] ])
-# install(FILES ${APP_NAME}.desktop
+# install(FILES ${SPARK_DESKTOP_FILE}.desktop
# DESTINATION /usr/share/applications
-# )
\ No newline at end of file
+# )
diff --git a/cmake/SparkEnvConfig.cmake b/cmake/SparkEnvConfig.cmake
index 797faf4..f9b4d55 100644
--- a/cmake/SparkEnvConfig.cmake
+++ b/cmake/SparkEnvConfig.cmake
@@ -5,4 +5,20 @@ set(CMAKE_INCLUDE_CURRENT_DIR ON)
set(CMAKE_AUTOMOC ON)
set(CMAKE_AUTOUIC ON)
set(CMAKE_AUTORCC ON)
-# set(CMAKE_BUILD_TYPE "Debug")
\ No newline at end of file
+# set(CMAKE_BUILD_TYPE "Debug")
+
+option(SPARK_DEBUG_MESSAGE "CMake Spark Module Debug Message." OFF)
+set(SPAKK_DEBUG_LOGFILE "${CMAKE_BINARY_DIR}/spark_debug.log" CACHE STRING "Spark Build Debug logfile." FORCE)
+file(WRITE ${SPAKK_DEBUG_LOGFILE})
+
+macro(spark_debug_message)
+ if(SPARK_DEBUG_MESSAGE)
+ set(SPARK_ONECE_LOG ${ARGN})
+ if(NOT "${SPARK_ONECE_LOG}" STREQUAL "")
+ message("[SPARK_MESSAGE]: " ${SPARK_ONECE_LOG})
+ endif(NOT "${SPARK_ONECE_LOG}" STREQUAL "")
+ file(APPEND ${SPAKK_DEBUG_LOGFILE} ${SPARK_ONECE_LOG} "\n")
+ unset(SPARK_ONECE_LOG)
+ endif(SPARK_DEBUG_MESSAGE)
+endmacro(spark_debug_message)
+
diff --git a/cmake/SparkFindDtkConfig.cmake b/cmake/SparkFindDtkConfig.cmake
index d1b2dfc..278d0d1 100644
--- a/cmake/SparkFindDtkConfig.cmake
+++ b/cmake/SparkFindDtkConfig.cmake
@@ -4,7 +4,7 @@ cmake_minimum_required(VERSION 3.5.1)
find_package(Dtk COMPONENTS Core Widget Gui)
function(target_link_dtk NAME)
- target_link_libraries(${NAME}
+ target_link_libraries(${NAME}
${DtkCore_LIBRARIES}
${DtkWidget_LIBRARIES}
${DtkGui_LIBRARIES})
diff --git a/cmake/SparkFindQt5Config.cmake b/cmake/SparkFindQt5Config.cmake
index 0300b3d..cb095b6 100644
--- a/cmake/SparkFindQt5Config.cmake
+++ b/cmake/SparkFindQt5Config.cmake
@@ -1,6 +1,8 @@
cmake_minimum_required(VERSION 3.5.1)
-find_package(Qt5 COMPONENTS Core Widgets Network Concurrent WebEngineWidgets REQUIRED)
+set(SPARK_FIND_QT5 TRUE)
+
+find_package(Qt5 COMPONENTS Core Widgets Network REQUIRED)
# function(target_link_qt5 NAME)
# target_link_libraries(${NAME}
@@ -22,10 +24,7 @@ macro(spark_add_link_qt5 _IN_NAME)
endmacro(spark_add_link_qt5 _IN_NAME)
# 使用 spark_add_link_qt5 生成 target_link_qt5_<name> 的宏
-spark_add_link_qt5(Concurrent Qt5::Concurrent)
-spark_add_link_qt5(Sql Qt5::Sql)
-spark_add_link_qt5(WebEngineWidgets Qt5::WebEngineWidgets)
-spark_add_link_qt5(WebSockets Qt5::WebSockets)
+# spark_add_link_qt5(Concurrent Qt5::Concurrent)
# 高级自定义
# spark_add_links_qt5
@@ -47,7 +46,7 @@ macro(spark_add_links_qt5)
string(TOLOWER "${qt5_item}" qt5_lower_item)
spark_add_link_qt5(${qt5_lower_item} Qt5::${qt5_item})
- message("add_target_link_qt5_${qt5_item} or add_target_link_qt5_${qt5_lower_item}")
+ spark_debug_message("add_target_link_qt5_${qt5_item} or add_target_link_qt5_${qt5_lower_item}")
endforeach(qt5_item IN LISTS qt5_items)
endmacro(spark_add_links_qt5)
@@ -151,4 +150,4 @@ spark_add_links_qt5(
# XkbCommonSupport
# Xml
# XmlPatterns
-)
\ No newline at end of file
+)
diff --git a/cmake/SparkFindQt6Config.cmake b/cmake/SparkFindQt6Config.cmake
index dfd8917..2c9d8cc 100644
--- a/cmake/SparkFindQt6Config.cmake
+++ b/cmake/SparkFindQt6Config.cmake
@@ -1,6 +1,8 @@
cmake_minimum_required(VERSION 3.5.1)
-find_package(Qt6 COMPONENTS Core Widgets Network Concurrent)
+set(SPARK_FIND_QT6 TRUE)
+
+find_package(Qt6 COMPONENTS Core Widgets Network REQUIRED)
# function(target_link_qt6 NAME)
# target_link_libraries(${NAME}
@@ -14,7 +16,7 @@ spark_add_link(qt6 Qt6::Core Qt6::Widgets Qt6::Network)
# spark_add_link_qt6
-# 自定义宏 target_link_qt6 以扩展 target_link_qt6_<name> 结构
+# 自定义宏 spark_add_link_qt6 以扩展 target_link_qt6_<name> 结构
# _IN_NAME: 此宏使用嵌套宏 spark_add_link 时追加 <name> 名称
# 同等于 spark_add_link(qt_<name> ${ARGN})
macro(spark_add_link_qt6 _IN_NAME)
@@ -22,3 +24,107 @@ macro(spark_add_link_qt6 _IN_NAME)
endmacro(spark_add_link_qt6 _IN_NAME)
# 使用 spark_add_link_qt6 生成 target_link_qt6_<name> 的宏
+# spark_add_link_qt5(Concurrent Qt6::Concurrent)
+
+# 高级自定义
+# spark_add_links_qt6
+# 自定义宏 spark_add_links_qt6 以扩展 spark_add_link_qt6 宏配置组
+ # 特点: 任意长度参数
+ # qt6_item: 为进行遍历后的单项,类似于 python3 中的 (for item in items:)
+ # 例如: qt6_item 为 Core
+ # spark_add_link_qt6(${qt6_item} Qt6::${qt6_item})
+ # 展开为 spark_add_link_qt6(Core Qt6::Core)
+ # 展开为 spark_add_link(qt6_Core Qt6::Core)
+ # 展开为 spark_add_link(qt6_Core Qt6::Core)
+ # 特性: 增加 qt6_Core 转 qt6_core
+ # string(TOLOWER <string> <output_variable>)
+macro(spark_add_links_qt6)
+ set(qt6_items ${ARGN})
+ foreach(qt6_item IN LISTS qt6_items)
+ find_package(Qt6${qt6_item})
+ spark_add_link_qt6(${qt6_item} Qt6::${qt6_item})
+
+ string(TOLOWER "${qt6_item}" qt6_lower_item)
+ spark_add_link_qt6(${qt6_lower_item} Qt6::${qt6_item})
+ spark_debug_message("add_target_link_qt6_${qt6_item} or add_target_link_qt6_${qt6_lower_item}")
+ endforeach(qt6_item IN LISTS qt6_items)
+endmacro(spark_add_links_qt6)
+
+# 找出所有 Qt6 模板
+# find /usr/lib/x86_64-linux-gnu/cmake/ -name "*Config.cmake" | sed 's@^.*/Qt6@Qt6@;' | grep ^Qt6
+
+# 掐头去尾,洗一次
+# find /usr/lib/x86_64-linux-gnu/cmake/ -name "*Config.cmake" | sed 's@^.*/Qt5@Qt5@;' | grep ^Qt5 | sed 's@^Qt5@@; s@Config.cmake$@@; /^\s*$/d'
+
+# 排序
+# find /usr/lib/x86_64-linux-gnu/cmake/ -name "*Config.cmake" | sed 's@^.*/Qt5@Qt5@;' | grep ^Qt5 | sed 's@^Qt5@@; s@Config.cmake$@@; /^\s*$/d' | sort | pr -t -3
+# find /usr/lib/x86_64-linux-gnu/cmake/ -name "*Config.cmake" | sed 's@^.*/Qt6@Qt6@;' | grep ^Qt6 | sed 's@^Qt6@@; s@Config.cmake$@@; /^\s*$/d' | sort | pr -t -3
+
+spark_add_links_qt6(
+ # BuildInternals
+ # BuildInternals/StandaloneTests/Qt5CompatTests
+ # BuildInternals/StandaloneTests/QtBaseTests
+ # Concurrent
+ # Core
+ # Core5Compat
+ # CoreTools
+ # DBus
+ # DBusTools
+ # DeviceDiscoverySupportPrivate
+ # EglFSDeviceIntegrationPrivate
+ # EglFsKmsGbmSupportPrivate
+ # EglFsKmsSupportPrivate
+ # FbSupportPrivate
+ # Gui
+ # GuiTools
+ # HostInfo
+ # InputSupportPrivate
+ # KmsSupportPrivate
+ # Network
+ # OpenGL
+ # OpenGLWidgets
+ # PrintSupport
+ # QComposePlatformInputContextPlugin
+ # QCupsPrinterSupportPlugin
+ # QEglFSEmulatorIntegrationPlugin
+ # QEglFSIntegrationPlugin
+ # QEglFSKmsEglDeviceIntegrationPlugin
+ # QEglFSKmsGbmIntegrationPlugin
+ # QEglFSX11IntegrationPlugin
+ # QEvdevKeyboardPlugin
+ # QEvdevMousePlugin
+ # QEvdevTabletPlugin
+ # QEvdevTouchScreenPlugin
+ # QGifPlugin
+ # QGtk3ThemePlugin
+ # QIBaseDriverPlugin
+ # QIbusPlatformInputContextPlugin
+ # QICOPlugin
+ # QJpegPlugin
+ # QLibInputPlugin
+ # QLinuxFbIntegrationPlugin
+ # QMinimalEglIntegrationPlugin
+ # QMinimalIntegrationPlugin
+ # QMYSQLDriverPlugin
+ # QNetworkManagerNetworkInformationPlugin
+ # QODBCDriverPlugin
+ # QOffscreenIntegrationPlugin
+ # QPSQLDriverPlugin
+ # QSQLiteDriverPlugin
+ # QTlsBackendCertOnlyPlugin
+ # QTlsBackendOpenSSLPlugin
+ # QTsLibPlugin
+ # QTuioTouchPlugin
+ # QVkKhrDisplayIntegrationPlugin
+ # QVncIntegrationPlugin
+ # QXcbEglIntegrationPlugin
+ # QXcbGlxIntegrationPlugin
+ # QXcbIntegrationPlugin
+ # QXdgDesktopPortalThemePlugin
+ # Sql
+ # Test
+ # Widgets
+ # WidgetsTools
+ # XcbQpaPrivate
+ # Xml
+)
diff --git a/cmake/SparkMacrosConfig.cmake b/cmake/SparkMacrosConfig.cmake
index 67d84e1..fd515be 100644
--- a/cmake/SparkMacrosConfig.cmake
+++ b/cmake/SparkMacrosConfig.cmake
@@ -2,20 +2,62 @@ cmake_minimum_required(VERSION 3.5.1)
# 定义一些 macro 用于自动生成构建结构
+# spark_aux_source_directory outvar invar [skip]
+# 获取目录下的所有源代码
+macro(spark_aux_source_directory OUTVAR INVAR)
+ # iv: internal_variable
+ set(iv_args ${ARGN})
+ list(LENGTH iv_args iv_arglen)
+
+ file(GLOB iv_SOURCE_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${INVAR}/*.c ${INVAR}/*.cpp)
+ file(GLOB iv_HEADER_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${INVAR}/*.h ${INVAR}/*.hpp)
+ file(GLOB iv_QT_UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${INVAR}/*.ui ${INVAR}/*.qrc)
+
+ if(iv_arglen EQUAL 1)
+ list(APPEND ${OUTVAR} ${iv_SOURCE_LIST} ${iv_HEADER_LIST} ${iv_QT_UI_LIST})
+ else()
+ set(${OUTVAR} ${iv_SOURCE_LIST} ${iv_HEADER_LIST} ${iv_QT_UI_LIST})
+ endif(iv_arglen EQUAL 1)
+
+ unset(iv_args)
+ unset(iv_arglen)
+ unset(iv_SOURCE_LIST)
+ unset(iv_HEADER_LIST)
+ unset(iv_QT_UI_LIST)
+
+endmacro(spark_aux_source_directory OUTVAR INVAR)
+
+# spark_aux_source_directories outvar invar [...]
+# 获取目录列表下的所有源代码
+ # spark_aux_source_directory 的扩展,支持多个 invar 与追加参数
+macro(spark_aux_source_directories OUTVAR INVAR)
+ set(iv_aux_directories ${ARGN})
+
+ spark_aux_source_directory(${OUTVAR} ${INVAR})
+
+ foreach(iv_directory IN LISTS iv_aux_directories)
+ spark_aux_source_directory(${OUTVAR} ${iv_directory} SKIP)
+ endforeach(iv_directory IN LISTS iv_aux_directories)
+
+ unset(iv_aux_directories)
+
+endmacro(spark_aux_source_directories OUTVAR INVAR)
+
+
# spark_add_library <lib_name> [files]...
# 构建一个库,基于指定的源文件
# 并根据库名生成 target_link_<lib_name> 函数
macro(spark_add_library _lib_name)
- message("================ ${_lib_name} Library ================")
+ spark_debug_message("================ ${_lib_name} Library ================")
add_library(${_lib_name} ${ARGN})
set(SRCS ${ARGN})
foreach(item IN LISTS SRCS)
- message(" -> ${item}")
+ spark_debug_message(" -> ${item}")
endforeach(item IN LISTS SRCS)
function(target_link_${_lib_name} TARGET)
- message("${_lib_name}")
+ spark_debug_message("${_lib_name}")
target_link_libraries(${TARGET} ${_lib_name})
endfunction(target_link_${_lib_name} TARGET)
@@ -26,59 +68,271 @@ endmacro(spark_add_library _lib_name)
# 并根据库名生成 target_link_<lib_name> 函数
# 函数内增加以 <lib_path> 头文件搜索路径
macro(spark_add_library_path _lib_name _lib_path)
- aux_source_directory(${_lib_path} ${_lib_name}_SOURCES)
- message("================ spark_add_library_path: ${_lib_name} ================")
- file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${_lib_path}/*.ui)
- add_library(${_lib_name} ${${_lib_name}_SOURCES} ${UI_LIST})
- foreach(item IN LISTS ${_lib_name}_SOURCES)
- message(" -> ${item}")
- endforeach(item IN LISTS ${_lib_name}_SOURCES)
+ # 0. 建立初始变量体系
+ set(${_lib_name}_TYPE)
+ set(${_lib_name}_TYPE_MESSAGE "STATIC(Default)")
+ set(${_lib_name}_ARGN ${ARGN})
+
+ # 1. 判断 _lib_path 是否是 SHARED 或 STATIC
+ if(${_lib_path} STREQUAL SHARED OR ${_lib_path} STREQUAL STATIC)
+ set(${_lib_name}_TYPE ${_lib_path})
+ set(${_lib_name}_TYPE_MESSAGE ${${_lib_name}_TYPE})
+
+ if(${ARGC} LESS 3)
+ message(FATAL_ERROR "Missing parameter, library path not specified.")
+ endif(${ARGC} LESS 3)
+ else()
+ # 如没有则将 _lib_path 加入到 ARGN
+ list(APPEND ${_lib_name}_ARGN ${_lib_path})
+ endif(${_lib_path} STREQUAL SHARED OR ${_lib_path} STREQUAL STATIC)
+
+ # 1. 处理由 spark_add_library_realpaths 构建转本构建时的清洗机制
+ spark_debug_message("> Building: ${_lib_name}, type: ${${_lib_name}_TYPE_MESSAGE}")
+ set(${_lib_name}_ARGN_REF ${${_lib_name}_ARGN})
+ unset(${_lib_name}_ARGN)
+ foreach(_old IN LISTS ${_lib_name}_ARGN_REF)
+ set(_new ${_old})
+ string(FIND "${_old}" "+" _plus_index)
+ if(${_plus_index} GREATER 0)
+ string(SUBSTRING "${_old}" 0 ${_plus_index} _new)
+ spark_debug_message(" [CONVERT] ${_new} <- ${_old}")
+ endif(${_plus_index} GREATER 0)
+ list(APPEND ${_lib_name}_ARGN ${_new})
+ endforeach(_old IN LISTS ${_lib_name}_ARGN_REF)
+
+
+ # 2.目标参数项分析出子项
+ # 拆分出源代码、路径、未知项等
+ set(${_lib_name}_ARGN_SOURCES)
+ set(${_lib_name}_ARGN_APPEND_PATHS)
+ set(${_lib_name}_ARGN_UNKNOW)
+ foreach(item IN LISTS ${_lib_name}_ARGN)
+ spark_debug_message(" [ARGN] check:" ${item})
+ if(NOT EXISTS ${item})
+ set(item ${CMAKE_CURRENT_LIST_DIR}/${item})
+ endif()
+ if(EXISTS ${item})
+ # spark_debug_message(" exists: true")
+ file(REAL_PATH ${item} ${_lib_name}_ARGN_item)
+ if(IS_DIRECTORY ${${_lib_name}_ARGN_item})
+ list(APPEND ${_lib_name}_ARGN_APPEND_PATHS ${item})
+ else()
+ list(APPEND ${_lib_name}_ARGN_SOURCES ${item})
+ endif(IS_DIRECTORY ${${_lib_name}_ARGN_item})
+ else()
+ list(APPEND ${_lib_name}_ARGN_UNKNOW ${item})
+ spark_debug_message(" exists: false")
+ endif()
+ endforeach()
+
+ list(LENGTH ${_lib_name}_ARGN_SOURCES ${_lib_name}_ARGN_SOURCES_LENGTH)
+ list(LENGTH ${_lib_name}_ARGN_APPEND_PATHS ${_lib_name}_ARGN_APPEND_PATHS_LENGTH)
+ list(LENGTH ${_lib_name}_ARGN_UNKNOW ${_lib_name}_ARGN_UNKNOW_LENGTH)
+ spark_debug_message(" result: files(${${_lib_name}_ARGN_SOURCES_LENGTH}), paths(${${_lib_name}_ARGN_APPEND_PATHS_LENGTH}), unknow(${${_lib_name}_ARGN_UNKNOW_LENGTH})" ${item})
+
+ # 3. 获取所以源代码为 any_files
+ spark_debug_message(" files:")
+ set(any_files ${${_lib_name}_ARGN_SOURCES})
+ foreach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS)
+ spark_aux_source_directory(item_files ${item})
+ list(APPEND any_files ${item_files})
+ foreach(item_file IN LISTS item_files)
+ spark_debug_message(" ${item_file}")
+ endforeach(item_file IN LISTS item_files)
+ endforeach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS)
+
+ # 4. 构建目标库
+ add_library(${_lib_name} ${${_lib_name}_TYPE}
+ ${${_lib_name}_ARGN_SOURCES}
+ ${any_files})
+
+ # 5. 建立引用点
+ # target_link_<_lib_name> 函数
+ # target_include_<_lib_name> 函数
+
+ # target_<_lib_name>_include 函数
+ # target_<_lib_name>_link 函数
+ function(target_${_lib_name}_include _include)
+ spark_debug_message("添加引用: ${_lib_name} <- ${_include} ${${_lib_name}_INCLUDE_ARGN}")
+ target_include_directories(${_lib_name} PRIVATE ${_include})
+ endfunction(target_${_lib_name}_include _include)
+
+ function(target_${_lib_name}_link _library)
+ spark_debug_message("添加链接: ${_lib_name} <- ${_library} ${${_lib_name}_LINK_ARGN}")
+ target_link_libraries(${_lib_name} ${_library})
+ endfunction(target_${_lib_name}_link _library)
+
function(target_link_${_lib_name} TARGET)
- # message("target_link_${_lib_name}")
- message(" -> (include): ${_lib_path}")
- target_include_directories(${TARGET} PUBLIC "${_lib_path}")
+ spark_debug_message("链接引用: ${TARGET} <- ${_lib_name}")
+ target_include_directories(${TARGET} PRIVATE
+ "${${_lib_name}_SOURCE_PATH}" ${${_lib_name}_ARGN_APPEND_PATHS})
target_link_libraries(${TARGET} ${_lib_name})
endfunction(target_link_${_lib_name} TARGET)
function(target_include_${_lib_name} TARGET)
- # message("target_link_${_lib_name}")
- message(" -> (include): ${_lib_path}")
- target_include_directories(${TARGET} PUBLIC "${_lib_path}")
- # target_link_libraries(${TARGET} ${_lib_name})
+ spark_debug_message("引入引用: ${TARGET} <- ${_lib_name}")
+ target_include_directories(${TARGET} PUBLIC
+ "${${_lib_name}_SOURCE_PATH}" ${${_lib_name}_ARGN_APPEND_PATHS})
endfunction(target_include_${_lib_name} TARGET)
+
+ target_include_directories(${_lib_name} PRIVATE
+ "${${_lib_name}_ARGN_APPEND_PATHS}")
+
+ # 输出 includes
+ spark_debug_message(" ${_lib_name}_ARGN_APPEND_PATHS: ")
+ foreach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS)
+ string(REPLACE "${CMAKE_SOURCE_DIR}/" "" item_var "${item}")
+ spark_debug_message(" ${item_var}")
+ endforeach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS)
+
+ # 如果想用以下操作手动实现 target_link_include_directories
+ # 请注意对 LIST 类型使用 "" 进行包围
+ # target_link_include_directories 的 PUBLIC 将会填充(追加)目标的 INCLUDE_DIRECTORIES 属性
+ # target_link_include_directories 支持 cmake 生成大表达式,更容易操作,手动将无法实现此类能力
+ # target_link_include_directories 支持相对路径和绝对路径参数
+ # 手动操作将必须使用绝对路径,这是不好的地方
+ # get_target_property(_lib_include_directories ${_lib_name} INCLUDE_DIRECTORIES)
+ # list(APPEND _lib_include_directories "${CMAKE_CURRENT_LIST_DIR}/${${_lib_name}_SOURCE_PATH}")
+ # spark_debug_message("----> ${CMAKE_CURRENT_LIST_DIR}/${${_lib_name}_SOURCE_PATH}")
+ # spark_debug_message("----> ${_lib_include_directories}")
+ # set_target_properties(${_lib_name} PROPERTIES
+ # INCLUDE_DIRECTORIES "${_lib_include_directories}"
+ # INTERFACE_INCLUDE_DIRECTORIES "${_lib_include_directories}"
+ # )
+
endmacro(spark_add_library_path _lib_name _lib_path)
+# spark_add_shared_library <target> [files ...]
+# 构建一个共享库,基于指定的源代码
+ # 并根据库名生成 target_link_<lib_name> 函数
+macro(spark_add_shared_library _lib_name)
+ spark_add_library(${_lib_name} SHARED ${ARGN})
+endmacro(spark_add_shared_library _lib_name)
+
+# spark_add_shared_library_path <target> [files ... paths]
+# 构建一个共享库,基于指定的路径
+ # 并根据库名生成 target_link_<lib_name> 函数
+macro(spark_add_shared_library_path _lib_name)
+ spark_add_library_path(${_lib_name} SHARED ${ARGN})
+endmacro(spark_add_shared_library_path _lib_name)
+
# spark_add_executable <exec_name> [files]...
# 构建一个可执行文件,基于指定的源文件
# Qt编译时源文件包括很多类型需要指定 *.h/*.cpp/*.qrc/*.qm/... 等
macro(spark_add_executable _exec_name)
- message("================ ${_exec_name} Executable ================")
+ set(${_exec_name}_TYPE_MESSAGE "可执行程序")
+ spark_debug_message("> Building: ${_exec_name}, type: ${${_exec_name}_TYPE_MESSAGE}")
+
add_executable(${_exec_name} ${ARGN})
endmacro(spark_add_executable _exec_name)
+# spark_add_executable_path <target> <path> [files ... paths]
+# 构建一个可执行程序,基于指定的路径
macro(spark_add_executable_path _exec_name _exec_path)
- aux_source_directory(${_exec_path} ${_exec_name}_SOURCES)
-
- message("================ ${_exec_name} Executable ================")
- file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${_exec_path}/*.ui)
- add_executable(${_exec_name} ${${_exec_name}_SOURCES} ${ARGN} ${UI_LIST})
- foreach(item IN LISTS ${_exec_name}_SOURCES)
- message(" -> ${item}")
- endforeach(item IN LISTS ${_exec_name}_SOURCES)
-
- # function(target_link_${_exec_name} TARGET)
- # message("target_link_${_lib_name}")
- message(" -> (include): ${_exec_path}")
- target_include_directories(${_exec_name} PUBLIC "${_exec_path}")
- # target_link_libraries(${TARGET} ${_lib_name})
- # endfunction(target_link_${_exec_name} TARGET)
- # target_link_${_exec_name}(${_exec_name})
+ spark_add_executable(${_exec_name})
+
+ # 0. 建立初始变量体系
+ # set(${_exec_name}_TYPE)
+ # set(${_exec_name}_TYPE_MESSAGE "可执行程序")
+ set(${_exec_name}_ARGN ${ARGN})
+ # 1. 处理由 spark_add_executable_realpaths 构建转本构建时的清洗机制
+ # spark_debug_message("> Building: ${_exec_name}, type: ${${_exec_name}_TYPE_MESSAGE}")
+ set(${_exec_name}_ARGN_REF ${${_exec_name}_ARGN})
+ unset(${_exec_name}_ARGN)
+ foreach(_old IN LISTS ${_exec_name}_ARGN_REF)
+ set(_new ${_old})
+ string(FIND "${_old}" "+" _plus_index)
+ if(${_plus_index} GREATER 0)
+ string(SUBSTRING "${_old}" 0 ${_plus_index} _new)
+ spark_debug_message(" [CONVERT] ${_new} <- ${_old}")
+ endif(${_plus_index} GREATER 0)
+ list(APPEND ${_exec_name}_ARGN ${_new})
+ endforeach(_old IN LISTS ${_exec_name}_ARGN_REF)
+
+ # 1.目标参数项分析出子项
+ # 拆分出源代码、路径、未知项等
+ # spark_debug_message("> Building: ${_exec_name}, type: ${${_exec_name}_TYPE_MESSAGE}")
+ set(${_exec_name}_ARGN_SOURCES)
+ set(${_exec_name}_ARGN_APPEND_PATHS ${_exec_path})
+ set(${_exec_name}_ARGN_UNKNOW)
+ foreach(item IN LISTS ${_exec_name}_ARGN)
+ spark_debug_message(" [ARGN] check:" ${item})
+ if(CMAKE_VERSION VERSION_LESS 3.14)
+ string(REGEX MATCH "\.qm$" fext "${item}")
+ else()
+ get_filename_component(fext "${item}" LAST_EXT)
+ endif(CMAKE_VERSION VERSION_LESS 3.14)
+
+ if(NOT EXISTS ${item} AND NOT "${fext}" STREQUAL ".qm")
+ set(item ${CMAKE_CURRENT_LIST_DIR}/${item})
+ endif()
+ if(EXISTS ${item})
+ # spark_debug_message(" exists: true")
+ file(REAL_PATH ${item} ${_exec_name}_ARGN_item)
+ if(IS_DIRECTORY ${${_exec_name}_ARGN_item})
+ list(APPEND ${_exec_name}_ARGN_APPEND_PATHS ${item})
+ else()
+ list(APPEND ${_exec_name}_ARGN_SOURCES ${item})
+ endif(IS_DIRECTORY ${${_exec_name}_ARGN_item})
+ else()
+ if("${fext}" STREQUAL ".qm")
+ list(APPEND ${_exec_name}_ARGN_SOURCES ${item})
+ else()
+ list(APPEND ${_exec_name}_ARGN_UNKNOW ${item})
+ spark_debug_message(" exists: false")
+ endif("${fext}" STREQUAL ".qm")
+ endif()
+ endforeach()
+
+ list(LENGTH ${_exec_name}_ARGN_SOURCES ${_exec_name}_ARGN_SOURCES_LENGTH)
+ list(LENGTH ${_exec_name}_ARGN_APPEND_PATHS ${_exec_name}_ARGN_APPEND_PATHS_LENGTH)
+ list(LENGTH ${_exec_name}_ARGN_UNKNOW ${_exec_name}_ARGN_UNKNOW_LENGTH)
+ spark_debug_message(" result: files(${${_exec_name}_ARGN_SOURCES_LENGTH}), paths(${${_exec_name}_ARGN_APPEND_PATHS_LENGTH}), unknow(${${_exec_name}_ARGN_UNKNOW_LENGTH})" ${item})
+
+
+ # 2. 获取所以源代码为 any_files
+ spark_debug_message(" files:")
+ set(any_files ${${_exec_name}_ARGN_SOURCES})
+ foreach(item IN LISTS ${_exec_name}_ARGN_APPEND_PATHS)
+ spark_aux_source_directory(item_files ${item})
+ list(APPEND any_files ${item_files})
+ foreach(item_file IN LISTS item_files)
+ spark_debug_message(" ${item_file}")
+ endforeach(item_file IN LISTS item_files)
+ endforeach(item IN LISTS ${_exec_name}_ARGN_APPEND_PATHS)
+
+ # 3. 构建可执行目标所需要的文件
+ # add_executable(${_exec_name}
+ # ${${_exec_name}_ARGN_SOURCES}
+ # ${any_files})
+
+ target_sources(${_exec_name} PRIVATE
+ ${${_exec_name}_ARGN_SOURCES}
+ ${any_files})
+
+ # 4. 建立引用点
+ # target_<_exec_name>_include 函数
+ # target_<_exec_name>_link 函数
+ function(target_${_exec_name}_include _include)
+ spark_debug_message("添加引用: ${_exec_name} <- ${_include} ${${_exec_name}_INCLUDE_ARGN}")
+ target_include_directories(${_exec_name} PRIVATE ${_include})
+ endfunction(target_${_exec_name}_include _include)
+
+ function(target_${_exec_name}_link _library)
+ spark_debug_message("添加链接: ${_exec_name} <- ${_library} ${${_exec_name}_LINK_ARGN}")
+ target_link_libraries(${_exec_name} ${_library})
+ endfunction(target_${_exec_name}_link _library)
+
+ target_include_directories(${_exec_name} PRIVATE
+ ${_exec_path})
+ spark_debug_message(" include: ${_exec_path}\n")
+
endmacro(spark_add_executable_path _exec_name _exec_path)
# spark_find_library
@@ -98,6 +352,15 @@ macro(spark_find_library _prefix)
endmacro(spark_find_library _prefix)
+macro(target_link_qt)
+
+ if(SPARK_FIND_QT6)
+ target_link_qt6(${ARGN})
+ elseif(SPARK_FIND_QT5)
+ target_link_qt5(${ARGN})
+ endif(SPARK_FIND_QT6)
+
+endmacro(target_link_qt)
# spark_add_executable_paths
# 自定义构建宏,基于指定的前缀名称,处理后续参数为子目录
@@ -108,9 +371,9 @@ macro(spark_add_executable_paths _prefix_path)
set(PATHS ${ARGN})
foreach(item IN LISTS PATHS)
file(GLOB QRCS "${item}/*.qrc")
- message(">>> add_executable: " "${_prefix_path}-${item} ${item} + ${QRCS}")
+ spark_debug_message(">>> add_executable: " "${_prefix_path}-${item} ${item} + ${QRCS}")
spark_add_executable_path(${_prefix_path}-${item} ${item} ${QRCS})
- target_link_qt5(${_prefix_path}-${item})
+ target_link_qt(${_prefix_path}-${item})
endforeach(item IN LISTS PATHS)
endmacro(spark_add_executable_paths _prefix_path)
@@ -120,10 +383,11 @@ endmacro(spark_add_executable_paths _prefix_path)
# ARGN: 此宏剩余的参数列表
# 在使用 target_link_<name> 时
# _NAME: 用于此 fucntion 中的要求参数: <_NAME>目标将要连接此库
-macro(spark_add_link _IN_NAME)
- function(target_link_${_IN_NAME} _NAME)
- message("LINK ${_NAME} ${ARGN}")
- target_link_libraries(${_NAME}
+macro(spark_add_link _name)
+ function(target_link_${_name} _link)
+ spark_debug_message("> Linking: ${_link}")
+ spark_debug_message(" <- ${ARGN}\n")
+ target_link_libraries(${_link}
${ARGN})
- endfunction(target_link_${_IN_NAME} _NAME)
-endmacro(spark_add_link _IN_NAME)
\ No newline at end of file
+ endfunction(target_link_${_name} _link)
+endmacro(spark_add_link _name)
diff --git a/cmake/SparkMacrosExtendConfig.cmake b/cmake/SparkMacrosExtendConfig.cmake
index bad0620..0a4dcb2 100644
--- a/cmake/SparkMacrosExtendConfig.cmake
+++ b/cmake/SparkMacrosExtendConfig.cmake
@@ -4,193 +4,428 @@
function(find_plus INVAL OUTVAL)
string(FIND "${INVAL}" "+" plus_index)
set(${OUTVAL} ${plus_index} PARENT_SCOPE)
- # if(plus_index LESS 0)
- # set(${OUTVAL} -1 PARENT_SCOPE)
- # else()
- # set(${OUTVAL} ${plus_index} PARENT_SCOPE)
- # endif(plus_index LESS 0)
endfunction(find_plus INVAL OUTVAL)
-# find_plus("FF" FFFF)
-# message("--> FFFF ${FFFF}") # --> FFFF -1
-# find_plus("F+F" FFFF)
-# message("--> FFFF ${FFFF}") # --> FFFF 1
-# find_plus("+F+F" FFFF)
-# message("--> FFFF ${FFFF}") # --> FFFF 0
-
-# set(FFF)
-# list(APPEND FFFF )
-# list(APPEND FFFF "F")
-# list(APPEND FFFF "FA")
-# message("--> FFFF: ${FFFF}") # --> FFFF: F;FA
-
-# set(FFFFS "")
-# list(APPEND FFFFS ${FFFF})
-# message("--> FFFFS: ${FFFFS}") # --> FFFFS: F;FA
-
-# set(FFFF "+AA+BB+CC+DD")
-# string(REPLACE "+" ";" FFFFL "${FFFF}")
-# list(LENGTH FFFFL FFFFLEN)
-# message("--> FFFFL: ${FFFFL} --> ${FFFFLEN}") # --> FFFFL: F;
-
-# plus_list
-# 将传入的 "+AAA+BBB+CCC" 类型数据变成一个 列表(list)
-# 适用于不使用 string 进行替换 + 为 ";" 的情况下使用直接变成 list
-function(plus_list INVAL OUTVAL OUTVALLEN)
- # set(${OUTVAL} "..." PARENT_SCOPE)
- # set(${OUTVALLEN} 0 PARENT_SCOPE)
-
- set(_tmps "") # 设置为空的
-
- # 寻找下一个 + 位置
- find_plus(${INVAL} RIGHT_PLUS)
-
- string(LENGTH "${INVAL}" INVALLEN)
- message("--> 传入的 INVAL: --> 内容: ${INVAL}")
- message("--> 传入的 INVAL: --> 长度: ${INVALLEN}")
- message("--> 传入的 INVAL: --> +位置: ${RIGHT_PLUS}")
-
- # 判断是否有右侧 + 号
- if(RIGHT_PLUS LESS 0)
- message("--> 传入的 INVAL: --> 无需计算新的+位置")
- # message("--> 计算新的 + 位置: ${_PLUSINDEX}")
- list(APPEND _tmps ${INVAL})
- else()
- math(EXPR _PLUSINDEX "${RIGHT_PLUS}+1")
- message("--> 传入的 INVAL: --> 需计算+位置 --> 右移: ${_PLUSINDEX}")
-
- string(SUBSTRING "${INVAL}" ${_PLUSINDEX} ${INVALLEN} NewVal)
- message("--> 传入的 INVAL: --> 需计算+位置 --> 右移: ${_PLUSINDEX} -> 内容: ${NewVal}")
- # string(REPLACE "+" ";" _tmps "${NewVal}")
- # list(LENGTH FFFFL FFFFLEN)
-
- # message("--> 计算新的 + 位置: ${_PLUSINDEX} --> 后面的 NewVal: ${NewVal}")
-
- # find_plus(${NewVal} _NextPlus)
- # if(_NextPlus LESS 0)
- # list(APPEND _tmps ${NewVal})
- # message("--> 追加新的 + 位置: ${_PLUSINDEX} --> 后面的")
- # else()
- # message("--> 追加新的 + 位置: ${_PLUSINDEX} --> 后面的")
- # # 重新
- # # plus_list(${NewVal} NewValS )
- # # foreach(item)
- # # list(APPEND _tmps ${item})
- # # endforeach(item)
- # endif(_NextPlus LESS 0)
- endif(RIGHT_PLUS LESS 0)
-
- set(${OUTVAL} ${_tmps} PARENT_SCOPE)
- list(LENGTH _tmps _tmps_len)
- set(${OUTVALLEN} ${_tmps_len} PARENT_SCOPE)
-
-endfunction(plus_list INVAL OUTVAL OUTVALLEN)
-
-# plus_list("+AAA+BBB+CCC+DDD" FFF FFLEN)
-# message("--------> ${FFF}: -> ${FFLEN}")
-
-# spark_add_library_realpaths
+function(find_plus_v INVAL OUTVAL)
+ string(FIND "${${INVAL}}" "+" plus_index)
+ set(${OUTVAL} ${plus_index} PARENT_SCOPE)
+endfunction(find_plus_v INVAL OUTVAL)
+
+function(find_colon INVAL OUTVAL)
+ string(FIND "${INVAL}" ":" colon_index)
+ set(${OUTVAL} ${colon_index} PARENT_SCOPE)
+endfunction(find_colon INVAL OUTVAL)
+
+function(find_colon_v INVAL OUTVAL)
+ string(FIND "${${INVAL}}" ":" colon_index)
+ set(${OUTVAL} ${colon_index} PARENT_SCOPE)
+endfunction(find_colon_v INVAL OUTVAL)
+
+function(find_dir INVAL OUTVAL)
+ string(FIND "${INVAL}" "/" _STR ${ARGN})
+ set(${OUTVAL} ${_STR} PARENT_SCOPE)
+endfunction(find_dir INVAL OUTVAL)
+
+function(find_dir_v INVAL OUTVAL)
+ string(FIND "${${INVAL}}" "/" _STR ${ARGN})
+ set(${OUTVAL} ${_STR} PARENT_SCOPE)
+endfunction(find_dir_v INVAL OUTVAL)
+
+#
+function(str_left INVAL INDEX OUTVAL)
+ set(LEFT_INDEX ${INDEX})
+ string(SUBSTRING "${INVAL}" 0 ${LEFT_INDEX} _LEFT_V)
+ set(${OUTVAL} ${_LEFT_V} PARENT_SCOPE)
+endfunction(str_left INVAL INDEX OUTVAL)
+
+function(str_right INVAL INDEX OUTVAL)
+ math(EXPR RIGHT_INDEX ${INDEX}+1)
+ string(SUBSTRING "${INVAL}" ${RIGHT_INDEX} -1 _RIGHT_V)
+ set(${OUTVAL} ${_RIGHT_V} PARENT_SCOPE)
+endfunction(str_right INVAL INDEX OUTVAL)
+
+function(str_left_v INVAL INDEX OUTVAL)
+ set(LEFT_INDEX ${${INDEX}})
+ string(SUBSTRING "${${INVAL}}" 0 ${LEFT_INDEX} _LEFT_V)
+ set(${OUTVAL} ${_LEFT_V} PARENT_SCOPE)
+endfunction(str_left_v INVAL INDEX OUTVAL)
+
+function(str_right_v INVAL INDEX OUTVAL)
+ math(EXPR RIGHT_INDEX ${${INDEX}}+1)
+ string(SUBSTRING "${${INVAL}}" ${RIGHT_INDEX} -1 _RIGHT_V)
+ set(${OUTVAL} ${_RIGHT_V} PARENT_SCOPE)
+endfunction(str_right_v INVAL INDEX OUTVAL)
+
+#
+function(find_colon_plus INVAL OUTVAL)
+ find_colon(${INVAL} COLON_INDEX)
+ str_right(${INVAL} ${COLON_INDEX} COLON_RIGHT)
+ find_plus_v(COLON_RIGHT PLUS_INDEX)
+ str_left_v(COLON_RIGHT PLUS_INDEX COLON_RIGHT_LEFT_PLUS)
+
+ set(${OUTVAL} ${COLON_RIGHT_LEFT_PLUS} PARENT_SCOPE)
+endfunction(find_colon_plus INVAL OUTVAL)
+
+function(find_colon_plus_v INVAL OUTVAL)
+ find_colon_v(${INVAL} COLON_INDEX)
+ str_right_v(${INVAL} COLON_INDEX COLON_RIGHT)
+ find_plus_v(COLON_RIGHT PLUS_INDEX)
+ str_left_v(COLON_RIGHT PLUS_INDEX COLON_RIGHT_LEFT_PLUS)
+
+ set(${OUTVAL} ${COLON_RIGHT_LEFT_PLUS} PARENT_SCOPE)
+endfunction(find_colon_plus_v INVAL OUTVAL)
+
+function(find_dir_plus INVAL OUTVAL)
+ # t:*/*+d
+ # ^
+ find_dir("${INVAL}" SLASH_INDEX REVERSE)
+ str_right("${INVAL}" ${SLASH_INDEX} SLASH_RIGHT)
+ find_plus_v(SLASH_RIGHT PLUS_INDEX)
+ str_left_v(SLASH_RIGHT PLUS_INDEX SLASH_RIGHT_LEFT_PLUS)
+
+ set(${OUTVAL} ${SLASH_RIGHT_LEFT_PLUS} PARENT_SCOPE)
+endfunction(find_dir_plus INVAL OUTVAL)
+
+function(find_dir_plus_v INVAL OUTVAL)
+ # t:*/*+d
+ # ^
+ find_dir("${${INVAL}}" SLASH_INDEX REVERSE)
+ str_right("${${INVAL}}" ${SLASH_INDEX} SLASH_RIGHT)
+ find_plus_v(SLASH_RIGHT PLUS_INDEX)
+ str_left_v(SLASH_RIGHT PLUS_INDEX SLASH_RIGHT_LEFT_PLUS)
+
+ set(${OUTVAL} ${SLASH_RIGHT_LEFT_PLUS} PARENT_SCOPE)
+endfunction(find_dir_plus_v INVAL OUTVAL)
+
+
+# spark_add_library_source <target> ...
+# 扩展 一行一可执行目标 的构建的扩展宏
+# 在构建时将会另外加入这些资源
+macro(spark_add_library_source target)
+ set(${target}_ADD_SOURCE ${ARGN})
+endmacro(spark_add_library_source target)
+
+# 冗余的 target_link_qt5 或 qt6 的处理逻辑
+macro(_handle_spark_target_link_qt_macro _target)
+ target_link_qt(${_target})
+endmacro(_handle_spark_target_link_qt_macro _target)
+
+# spark_add_library_realpaths [dirs ...]
# 基于传入的项进行构建
-# 可接受的值为: 路径列表
-# 可接受的值为: 路径列表+依赖库A+依赖库B
+ # 可接受的值为: 路径列表
+ # 可接受的值为: 路径列表+依赖库A+依赖库B
macro(spark_add_library_realpaths)
- message("---> 基于传入的项进行构建 <---")
- # message("--> src/unclassified/ItemDelegates/NdStyledItemDelegate")
- # string(FIND <string> <substring> <output_variable> [REVERSE])
- # string(SUBSTRING <string> <begin> <length> <output_variable>)
- # math(EXPR value "100 * 0xA" OUTPUT_FORMAT DECIMAL) # value is set to "1000"
set(REALPATHS ${ARGN})
foreach(REALPATH IN LISTS REALPATHS)
- message("---> 传入路径: ${REALPATH} <--- ")
- string(LENGTH "${REALPATH}" REALPATH_LENGTH)
- message("---> 计算传入路径长度: --> 长度: ${REALPATH_LENGTH}")
-
- string(FIND "${REALPATH}" "/" LASTINDEX REVERSE)
- message("---> 计算传入路径末尾/位置: --> 长度: ${LASTINDEX}")
- math(EXPR LASTINDEX "${LASTINDEX}+1")
- message("---> 计算传入路径末尾/右移: --> 长度: ${LASTINDEX}")
- string(SUBSTRING "${REALPATH}" ${LASTINDEX} ${REALPATH_LENGTH} REALNAME_Dependency)
+ # # 找 : 号下标,这是找:号的函数
+ # find_colon(${REALPATH} COLON_INDEX)
+ # 找 / 号下标,这是找/号的函数
+ find_dir_v(REALPATH SLASH_INDEX REVERSE)
# 找 + 号下标,这是找+号的函数
- find_plus(${REALPATH} RIGHT_PLUS)
+ find_plus_v(REALPATH PLUS_INDEX)
+
+ # +
+ if(PLUS_INDEX LESS 0)
+ # 完全没有 + 的情况下,它就是一个基于目录的构建
+ set(dir ${REALPATH})
+ str_right_v(REALPATH SLASH_INDEX target)
+
+ spark_add_library_path(${target}
+ ${dir}
+ ${${target}_ADD_SOURCE}
+ )
+ # 使用 spark_add_library_realpaths 构建的依赖将允许直接引用库头文件
+ target_include_directories(${target} PUBLIC ${dir})
+ _handle_spark_target_link_qt_macro(${target})
+ else()
+ # 有 + 的情况下,获取 + 号下标右侧所有内容为 target_depends_str 并转为列表
+ str_right_v(REALPATH PLUS_INDEX target_depends_str)
+ string(REPLACE "+" ";" target_depends "${target_depends_str}")
+
+ find_dir_plus_v(REALPATH target)
+ str_left_v(REALPATH PLUS_INDEX dir)
+
+ spark_add_library_path(${target}
+ ${dir}
+ ${${target}_ADD_SOURCE}
+ )
+ spark_debug_message(" [INCLUDE_DIRS]: ${dir} ${dir}/.. \n")
+ target_include_directories(${target} PUBLIC ${dir} ${dir}/..)
+ target_link_libraries(${target} ${target_depends})
+ endif(PLUS_INDEX LESS 0)
- # 判断是否有找到 + 号下标,值为 -1 或 正整数
- if(RIGHT_PLUS LESS 0) # 小于0: 不存在 + 号
- set(REALNAME "${REALNAME_Dependency}")
- message("---> 传入路径末尾/右移部分: --> ${REALNAME} <-- 无依赖+")
+ endforeach(REALPATH IN LISTS REALPATHS)
- message("---> 构建 ${REALNAME} -> ${REALNAME} ${REALPATH} ")
+endmacro(spark_add_library_realpaths)
- spark_add_library_path(${REALNAME} ${REALPATH})
- target_link_qt5(${REALNAME})
- else()
- message("---> 传入路径末尾/右移部分: --> ${REALNAME_Dependency} <-- 依赖+")
- # 存在+号,将截取从 / 到 + 号之间的内容作为目标名称
- # 例如 src/unclassified/widgets/DocTypeListView+JsonDeploy
- # ^(LASTINDEX) ^(RIGHT_PLUS)
- # 将 RIGHT_PLUS - LASTINDEX 计算出 DocTypeListView 字符长度
- math(EXPR REALNAME_LENGTH "${RIGHT_PLUS}-${LASTINDEX}")
+# spark_add_shared_library_realpaths [dirs ...]
+# 基于传入的项进行构建
+ # 可接受的值为: 路径列表
+ # 可接受的值为: 路径列表+依赖库A+依赖库B
+macro(spark_add_shared_library_realpaths)
+
+ set(REALPATHS ${ARGN})
+ foreach(REALPATH IN LISTS REALPATHS)
- message("---> 计算传入路径末尾/右移部分: --> 位置: ${RIGHT_PLUS}")
- # message("---> 计算传入路径末尾/右移部分: --> 长度: ${REALNAME_Dependency}")
+ # # 找 : 号下标,这是找:号的函数
+ # find_colon(${REALPATH} COLON_INDEX)
+ # 找 / 号下标,这是找/号的函数
+ find_dir_v(REALPATH SLASH_INDEX REVERSE)
+ # 找 + 号下标,这是找+号的函数
+ find_plus_v(REALPATH PLUS_INDEX)
+
+ # +
+ if(PLUS_INDEX LESS 0)
+ # 完全没有 + 的情况下,它就是一个基于目录的构建
+ set(dir ${REALPATH})
+ str_right_v(REALPATH SLASH_INDEX target)
+
+ spark_add_library_path(${target} SHARED
+ ${dir}
+ ${${target}_ADD_SOURCE}
+ )
+ # 使用 spark_add_library_realpaths 构建的依赖将允许直接引用库头文件
+ target_include_directories(${target} PUBLIC ${dir})
+ _handle_spark_target_link_qt_macro(${target})
+ else()
+ # 有 + 的情况下,获取 + 号下标右侧所有内容为 target_depends_str 并转为列表
+ str_right_v(REALPATH PLUS_INDEX target_depends_str)
+ string(REPLACE "+" ";" target_depends "${target_depends_str}")
+
+ find_dir_plus_v(REALPATH target)
+ str_left_v(REALPATH PLUS_INDEX dir)
+
+ spark_add_library_path(${target} SHARED
+ ${dir}
+ ${${target}_ADD_SOURCE}
+ )
+ spark_debug_message(" [INCLUDE_DIRS]: ${dir} ${dir}/.. \n")
+ target_include_directories(${target} PUBLIC ${dir} ${dir}/..)
+ target_link_libraries(${target} ${target_depends})
+ endif(PLUS_INDEX LESS 0)
- # 目标名称为 DocTypeListView
- # 依赖为 JsonDeploy
- # set(REALNAME "")
- string(SUBSTRING "${REALPATH}" 0 ${RIGHT_PLUS} _REALPATH_DIR)
- string(SUBSTRING "${REALPATH}" ${LASTINDEX} ${REALNAME_LENGTH} REALNAME)
+ endforeach(REALPATH IN LISTS REALPATHS)
- message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME}")
+endmacro(spark_add_shared_library_realpaths)
- string(SUBSTRING "${REALPATH}" ${RIGHT_PLUS} ${REALPATH_LENGTH} Dependency)
- message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME} --> +部分: ${Dependency}")
+# spark_aux_source_paths
+# 将指定路径中的文件变成可用的AUX源文件列表
+macro(spark_aux_source_paths AUX_VAR)
+ set(${AUX_VAR} "")
+ set(${AUX_VAR}_PATHS ${ARGN})
- # plus_list(${Dependency} dependencies dependencies_len)
- string(REPLACE "+" ";" dependencies "${Dependency}")
- message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME} --> +部分: ${Dependency} --> 列表: ${dependencies} <-- ")
+ foreach(aux_path IN LISTS ${AUX_VAR}_PATHS)
+ # spark_debug_message("aux_path: ${aux_path}")
+ aux_source_directory(${aux_path} ${AUX_VAR})
+ endforeach(aux_path IN LISTS ${AUX_VAR}_PATHS)
+endmacro(spark_aux_source_paths AUX_VAR)
- message("---> 构建 ${REALNAME} -> ${REALNAME} ${_REALPATH_DIR}")
+# spark_file_glob
+# 使用用 file(GLOB) 的匹配规则,并一次可匹配多个规则
+#
+macro(spark_file_glob FGLOB_VAR)
+ set(${FGLOB_VAR} "")
+ set(${FGLOB_VAR}_PATHS ${ARGN})
- spark_add_library_path(${REALNAME} ${_REALPATH_DIR})
- # target_link_qt5(${REALNAME}) # 使用依赖的依赖或许也不错
+ foreach(fglob_path IN LISTS ${FGLOB_VAR}_PATHS)
- target_include_directories(${REALNAME} PUBLIC ${_REALPATH_DIR})
- target_link_libraries(${REALNAME} ${dependencies})
+ file(GLOB FGLOB_PATH_SRCS ${fglob_path})
+ foreach(fglob_path_src IN LISTS FGLOB_PATH_SRCS)
+ # spark_debug_message(" -> ${item}")
+ list(APPEND ${FGLOB_VAR} ${fglob_path_src})
+ endforeach(fglob_path_src IN LISTS FGLOB_PATH_SRCS)
- endif(RIGHT_PLUS LESS 0)
- endforeach(REALPATH IN LISTS REALPATHS)
+ endforeach(fglob_path IN LISTS ${FGLOB_VAR}_PATHS)
-endmacro(spark_add_library_realpaths)
+endmacro(spark_file_glob FGLOB_VAR)
# spark_add_source_paths
# 将指定路径中的文件变成可用的源文件列表
#
-macro(spark_add_source_paths SOURCE_VARIABLE_NAME)
- set(SOURCE_PATHS ${ARGN})
- set(${SOURCE_VARIABLE_NAME}_PATHS "")
- set(${SOURCE_VARIABLE_NAME} "")
- foreach(SOURCE_PATH IN LISTS SOURCE_PATHS)
- list(APPEND ${SOURCE_VARIABLE_NAME}_PATHS ${CMAKE_CURRENT_SOURCE_DIR}/${SOURCE_PATH})
- aux_source_directory(${SOURCE_PATH} _SOURCES)
- foreach(item IN LISTS _SOURCES)
- # message(" -> ${item}")
- list(APPEND ${SOURCE_VARIABLE_NAME} ${item})
- endforeach(item IN LISTS _SOURCES)
+macro(spark_add_source_paths SOURCE_VAR)
+ set(${SOURCE_VAR} "")
+ set(${SOURCE_VAR}_PATHS ${ARGN})
+
+ spark_aux_source_paths(${SOURCE_VAR} ${ARGN})
+ foreach(source_path IN LISTS ${SOURCE_VAR}_PATHS)
+ # list(APPEND ${SOURCE_VAR}_PATHS ${CMAKE_CURRENT_SOURCE_DIR}/${SOURCE_PATH})
+ # aux_source_directory(${SOURCE_PATH} _SOURCES)
+ # foreach(item IN LISTS _SOURCES)
+ # # spark_debug_message(" -> ${item}")
+ # list(APPEND ${SOURCE_VAR} ${item})
+ # endforeach(item IN LISTS _SOURCES)
# file(GLOB HEADER_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${SOURCE_PATH}/*.h)
# foreach(item IN LISTS HEADER_LIST)
- # # message(" -> ${item}")
- # list(APPEND ${SOURCE_VARIABLE_NAME} ${item})
+ # # spark_debug_message(" -> ${item}")
+ # list(APPEND ${SOURCE_VAR} ${item})
# endforeach(item IN LISTS HEADER_LIST)
- file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${SOURCE_PATH}/*.ui)
- foreach(item IN LISTS UI_LIST)
- # message(" -> ${item}")
- list(APPEND ${SOURCE_VARIABLE_NAME} ${item})
- endforeach(item IN LISTS UI_LIST)
- endforeach(SOURCE_PATH IN LISTS SOURCE_PATHS)
-endmacro(spark_add_source_paths SOURCE_VARIABLE_NAME)
+ file(GLOB UI_SRCS RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${source_path}/*.ui)
+ foreach(ui_src IN LISTS UI_SRCS)
+ # spark_debug_message(" -> ${item}")
+ list(APPEND ${SOURCE_VAR} ${ui_src})
+ endforeach(ui_src IN LISTS UI_SRCS)
+ endforeach(source_path IN LISTS ${SOURCE_VAR}_PATHS)
+endmacro(spark_add_source_paths SOURCE_VAR)
+
+
+# spark_add_library_file_glob
+ #
+macro(spark_add_library_file_glob _lib_name)
+ spark_file_glob(${_lib_name}_SOURCES ${ARGN})
+ spark_add_library(${_lib_name} ${${_lib_name}_SOURCES})
+endmacro(spark_add_library_file_glob _lib_name)
+
+
+
+# spark_add_executable_source <target> ...
+# 扩展 一行一可执行目标 的构建的扩展宏
+# 在构建时将会另外加入这些资源
+macro(spark_add_executable_source target)
+ set(${target}_ADD_SOURCE ${ARGN})
+endmacro(spark_add_executable_source target)
+
+# 冗余的 spark_add_executable_realpaths 的 dir 处理逻辑
+macro(_handle_spark_add_executable_realpaths_if_dir_empty_macro)
+ if("${dir}" STREQUAL "")
+ spark_add_executable(${target}
+ ${${target}_ADD_SOURCE}
+ )
+ else()
+ spark_add_executable_path(${target}
+ ${dir}
+ ${${target}_ADD_SOURCE}
+ )
+ endif("${dir}" STREQUAL "")
+endmacro(_handle_spark_add_executable_realpaths_if_dir_empty_macro)
+
+# spark_add_executable_realpaths
+# 基于传入的项进行构建
+# 可接受的值为: 可执行目标:路径列表
+# 可接受的值为: 可执行目标:路径列表+依赖库A+依赖库B
+macro(spark_add_executable_realpaths)
+
+ set(REALPATHS ${ARGN})
+ foreach(REALPATH IN LISTS REALPATHS)
+
+ # 找 : 号下标,这是找:号的函数
+ find_colon(${REALPATH} COLON_INDEX)
+
+ if(COLON_INDEX LESS 0)
+ # do not anything
+ else()
+ # 找到 : 号,将截取 target 名称
+ # string(SUBSTRING "${REALPATH}" 0 ${COLON_INDEX} REALTARGET)
+ find_colon_v(REALPATH COLON_INDEX)
+ str_left_v(REALPATH COLON_INDEX target)
+ str_right_v(REALPATH COLON_INDEX COLON_REMAIN)
+ # message(FATAL_ERROR "构建一个: ${target}") # 已验证
+
+ endif(COLON_INDEX LESS 0)
+
+ # 找 + 号下标,这是找+号的函数
+ find_plus_v(REALPATH PLUS_INDEX)
+
+ if(PLUS_INDEX LESS 0)
+ # 完全没有 + 的情况下,它就是一个基于目录的构建
+ set(dir ${COLON_REMAIN})
+ # spark_add_executable_path(${target}
+ # ${dir}
+ # ${${target}_ADD_SOURCE}
+ # )
+ _handle_spark_add_executable_realpaths_if_dir_empty_macro()
+ _handle_spark_target_link_qt_macro(${target})
+ else()
+ # 有 + 的情况下,获取 + 号下标右侧所有内容为 target_depends_str 并转为列表
+ str_right_v(REALPATH PLUS_INDEX target_depends_str)
+ string(REPLACE "+" ";" target_depends "${target_depends_str}")
+
+ # 再从主要内容中获取 dir ,以及
+ find_colon_plus_v(REALPATH dir)
+ # spark_add_executable_path(${target}
+ # ${dir}
+ # ${${target}_ADD_SOURCE}
+ # )
+ _handle_spark_add_executable_realpaths_if_dir_empty_macro()
+ target_include_directories(${target} PUBLIC ${dir} ${dir}/..)
+ target_link_libraries(${target} ${target_depends})
+ endif(PLUS_INDEX LESS 0)
+ endforeach(REALPATH IN LISTS REALPATHS)
+
+endmacro(spark_add_executable_realpaths)
+
+
+# 一行一库概念构建
+# 1.构建一个库,基于指定的目录路径进行构建
+# src/widgets/DocTypeListView
+# ^目录将被用于制作的目标名称
+# 目录下的所有文件将被用于制作此库的源代码文件
+#
+# 2.构建一个库,基于指定的目录路径进行构建,并依赖其后面所列出的依赖项
+# src/widgets/MaintainerInfoView+DocTypeListView+...
+# ^此库将被用于 MaintainerInfoView 库的依赖
+# ^此符号'+'将被视为依赖项列表的分隔符
+
+# 一行一可执行目标概念
+# 1.构建一个可执行目标,基于指定的目录路径进行构建(行不通,可执行目标很少为一个目录)
+# 2.构建一个可执行目标,基于指定的文件路径进行构建(也许可以)
+# 3.构建一个可执行目标,基于指定的文件名称进行构建()
+# 4.构建一个可执行目标,基于指定命名规则(target:dir:dir+depend+depend...)
+
+
+# 一行一目标概念:集成(一行一库 + 一行一可执行目标)
+# 1.构建一个目标,基于指定的目录进行构建(适用于library与executable)
+# 3.构建一个目标,命名规则与集成相同,类型只需要写一个前缀标识
+# s[hared],d[yamic],t[可执行]
+# 静态库 s:dir+depend+depend...
+# 动态库 d:dir+depend+depend...
+# 可执行 t:<target>:dir+depend+depend...
+# ^ 可执行目标名称
+
+# 一行一目标
+# spark_add_target_realpaths <tag> [realpaths]
+# realpaths:
+ # s: static (s:src/libs/hello)
+ # d: shared (d:src/libs/say)
+ # t: target (t:<target>:src+hello+say)
+# 参考
+ # spark_add_executable_realpaths
+ # spark_add_shared_library_realpaths
+ # spark_add_library_realpaths
+macro(spark_add_target_realpaths tag)
+ set(${tag}_ARGN ${ARGN})
+
+ foreach(item IN LISTS ${tag}_ARGN)
+ str_left(${item} 1 item_type)
+ str_right(${item} 1 item_val)
+
+ if("${item_type}" STREQUAL "t")
+ set(item_message "可执行文件")
+ elseif("${item_type}" STREQUAL "d")
+ set(item_message "动态库")
+ elseif("${item_type}" STREQUAL "s")
+ set(item_message "静态库")
+ endif("${item_type}" STREQUAL "t")
+
+ spark_debug_message("代号: [${tag}] 构建 ${item_val}, 类型: ${item_message}")
+ spark_debug_message(" * ${item_val}")
+
+ if("${item_type}" STREQUAL "t")
+ spark_add_executable_realpaths(${item_val})
+ elseif("${item_type}" STREQUAL "d")
+ spark_add_shared_library_realpaths(${item_val})
+ elseif("${item_type}" STREQUAL "s")
+ spark_add_library_realpaths(${item_val})
+ endif("${item_type}" STREQUAL "t")
+
+ spark_debug_message("")
+
+ endforeach(item IN LISTS ${tag}_ARGN)
+
+endmacro(spark_add_target_realpaths tag)
diff --git a/cmake/SparkTranslatorConfig.cmake b/cmake/SparkTranslatorConfig.cmake
index 46de519..8714e12 100644
--- a/cmake/SparkTranslatorConfig.cmake
+++ b/cmake/SparkTranslatorConfig.cmake
@@ -1,32 +1,49 @@
cmake_minimum_required(VERSION 3.5.1)
-find_package(Qt5LinguistTools)
-
-file(GLOB SPARK_TRANSLATIONS ${CMAKE_SOURCE_DIR}/translations/*.ts)
-
-message("================ Translations ================")
-foreach(item IN LISTS SPARK_TRANSLATIONS)
- message("-> ${item}")
-endforeach(item IN LISTS SPARK_TRANSLATIONS)
-
-qt5_add_translation(SPARK_QM_TRANSLATIONS
- ${SPARK_TRANSLATIONS})
-
-file(WRITE ${CMAKE_BINARY_DIR}/SPARK_QM_TRANSLATIONS "")
-foreach(item IN LISTS SPARK_QM_TRANSLATIONS)
- file(APPEND ${CMAKE_BINARY_DIR}/SPARK_QM_TRANSLATIONS "${item}\n")
-endforeach(item IN LISTS SPARK_QM_TRANSLATIONS)
-
-message("translator(ts -> qm):")
-foreach(item IN LISTS SPARK_QM_TRANSLATIONS)
- message("-> ${item}")
-endforeach(item IN LISTS SPARK_QM_TRANSLATIONS)
-
-
-# 注意,必须将 SPARK_QM_TRANSLATIONS 加入到 add_executable 参数中才能在编译时生成只有原文的ts文件
-
-# qt5_create_translation
- # ts文件会在 make clean 或重新编译的时候一并被删除再编译的时候生成全新的ts原有的翻译会丢失万分注意!
-
-# qt5_add_translation
- # 此宏比较稳定
+# translator_qt5 _qmvar [... *.ts]
+macro(translator_qt5 _qmvar)
+
+ set(${_qmvar}_ARNG ${ARGN})
+ file(GLOB ${_qmvar}_TS_FILES ${${_qmvar}_ARNG})
+
+ find_package(Qt5LinguistTools)
+ qt5_add_translation(${_qmvar}
+ ${${_qmvar}_TS_FILES})
+
+ spark_debug_message("> QT Translation: ${_qmvar}")
+ file(WRITE ${CMAKE_BINARY_DIR}/${_qmvar} "")
+ foreach(item IN LISTS ${_qmvar})
+ file(APPEND ${CMAKE_BINARY_DIR}/${_qmvar} "${item}\n")
+ spark_debug_message(" ${item}")
+ endforeach(item IN LISTS ${_qmvar})
+
+ # 注意,必须将 SPARK_QM_TRANSLATIONS 或 ${_qmvar} 加入到 add_executable 参数中才能在编译时生成只有原文的ts文件
+
+ # qt5_create_translation
+ # ts文件会在 make clean 或重新编译的时候一并被删除再编译的时候生成全新的ts原有的翻译会丢失万分注意!
+
+ # qt5_add_translation
+ # 此宏比较稳定
+endmacro(translator_qt5 _qmvar)
+
+
+# translator_qt6 _qmvar [... *.ts]
+macro(translator_qt6 _qmvar)
+ # todo
+endmacro(translator_qt6 _qmvar)
+
+# 冗余的 translator_qt5 或 qt6 的处理逻辑
+macro(_handle_spark_translator_qt_macro _outvar)
+ if(SPARK_FIND_QT5)
+ translator_qt5(${_outvar} ${ARGN})
+ endif(SPARK_FIND_QT5)
+
+ if(SPARK_FIND_QT6)
+ translator_qt6(${_outvar} ${ARGN})
+ endif(SPARK_FIND_QT6)
+endmacro(_handle_spark_translator_qt_macro _outvar)
+
+# translator_qt _qmvar [... *.ts | match]
+macro(translator_qt)
+ _handle_spark_translator_qt_macro(${ARGN})
+endmacro(translator_qt)
diff --git a/cmake/linuxdeployqt-help b/cmake/linuxdeployqt-help
index 12ac506..1b72fda 100644
--- a/cmake/linuxdeployqt-help
+++ b/cmake/linuxdeployqt-help
@@ -45,4 +45,4 @@ Plugins related to a Qt library are copied in with the library.
See the "Deploying Applications on Linux" topic in the
documentation for more information about deployment on Linux.
-zinface@zinface-PC:/tmp/tmp.5gmZKUqn9s$
\ No newline at end of file
+zinface@zinface-PC:/tmp/tmp.5gmZKUqn9s$
\ No newline at end of file
diff --git a/cmake/spark-appimage.desktop.in b/cmake/spark-appimage.desktop.in.txt
similarity index 83%
rename from cmake/spark-appimage.desktop.in
rename to cmake/spark-appimage.desktop.in.txt
index 228a84a..491716d 100644
--- a/cmake/spark-appimage.desktop.in
+++ b/cmake/spark-appimage.desktop.in.txt
@@ -6,4 +6,4 @@ Icon=default
Comment=@APP_COMMENT@
Terminal=true
Type=Application
-Categories=@APP_CATEGORIES@
\ No newline at end of file
+Categories=@APP_CATEGORIES@;
\ No newline at end of file
diff --git a/cmake/package-deb.descript b/cmake/spark-deb-package.descript
similarity index 91%
rename from cmake/package-deb.descript
rename to cmake/spark-deb-package.descript
index 2b485d1..f352d6c 100644
--- a/cmake/package-deb.descript
+++ b/cmake/spark-deb-package.descript
@@ -1,6 +1,6 @@
# 注释行(使用方式)
-# find_package(DebPackage PATHS ${CMAKE_SOURCE_DIR})
-# add_package_descript(cmake/package-deb.descript)
+# find_package(SparkDebPackage PATHS ${CMAKE_SOURCE_DIR})
+# add_package_descript(cmake/spark-deb-package.descript)
# 打包后的文件名称
# FileName: 待定
@@ -38,7 +38,9 @@ Maintainer: shenmo <shenmo@spark-app.store>
# 软件包主页
Homepage: https://www.spark-app.store/
# 软件包建议
-Recommends:
+Recommends:
+# 软件冲突
+Conflicts:
# 软件包描述信息
Descrition: Spark Store
A community powered app store, based on DTK.
diff --git a/cmake/spark-desktop.desktop.in b/cmake/spark-desktop.desktop.in.txt
similarity index 88%
rename from cmake/spark-desktop.desktop.in
rename to cmake/spark-desktop.desktop.in.txt
index 0fa070b..75663a2 100644
--- a/cmake/spark-desktop.desktop.in
+++ b/cmake/spark-desktop.desktop.in.txt
@@ -7,5 +7,7 @@ Type=@APP_TYPE@
Exec=@APP_EXECUTE_PATH@
Icon=@APP_EXECUTE_ICON_PATH@
Categories=@APP_CATEGORIES@
+MimeType=@APP_MIME_TYPE@
+
+# Generated from the DesktopGenerater component of the z-Tools toolkit
-# Generated from the DesktopGenerater component of the z-Tools toolkit
\ No newline at end of file
--
2.33.1