spark-store/patchs/zinface-community-cmake-build-system.patch

2461 lines
100 KiB
Diff
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

From 5c78aff9b5be86cf3d1874a71f2045b1362e0572 Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Sun, 11 Dec 2022 22:27:23 +0800
Subject: [PATCH 1/7] =?UTF-8?q?repo:=20=E4=B8=80=E6=AC=A1=E6=80=A7?=
=?UTF-8?q?=E5=AF=BC=E5=85=A5=20spark=20=E9=AA=A8=E6=9E=B6=E4=BB=A5?=
=?UTF-8?q?=E5=8F=98=E4=B8=BA=20cmake=20=E6=9E=84=E5=BB=BA?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
TODO: 需处理 deb 安装脚本的问题
Signed-off-by: zinface <zinface@163.com>
---
.gitignore | 34 +++
CMakeLists.txt | 93 ++++++++
Makefile | 60 +++++
assets/spark.png | Bin 0 -> 4959 bytes
cmake/DebPackageConfig.cmake | 327 +++++++++++++++++++++++++++
cmake/SparkAppimageConfig.cmake | 133 +++++++++++
cmake/SparkBuildGraphviz.cmake | 8 +
cmake/SparkDesktopMacros.cmake | 35 +++
cmake/SparkEnvConfig.cmake | 8 +
cmake/SparkFindDtkConfig.cmake | 11 +
cmake/SparkFindLibraries.cmake | 7 +
cmake/SparkFindQt5Config.cmake | 154 +++++++++++++
cmake/SparkFindQt6Config.cmake | 24 ++
cmake/SparkInstallMacrosConfig.cmake | 132 +++++++++++
cmake/SparkMacrosConfig.cmake | 129 +++++++++++
cmake/SparkMacrosExtendConfig.cmake | 197 ++++++++++++++++
cmake/SparkTranslatorConfig.cmake | 27 +++
cmake/linuxdeployqt-help | 48 ++++
cmake/package-deb.descript | 45 ++++
cmake/spark-appimage.desktop.in | 9 +
cmake/spark-desktop.desktop.in | 11 +
21 files changed, 1492 insertions(+)
create mode 100644 CMakeLists.txt
create mode 100644 Makefile
create mode 100644 assets/spark.png
create mode 100644 cmake/DebPackageConfig.cmake
create mode 100644 cmake/SparkAppimageConfig.cmake
create mode 100644 cmake/SparkBuildGraphviz.cmake
create mode 100644 cmake/SparkDesktopMacros.cmake
create mode 100644 cmake/SparkEnvConfig.cmake
create mode 100644 cmake/SparkFindDtkConfig.cmake
create mode 100644 cmake/SparkFindLibraries.cmake
create mode 100644 cmake/SparkFindQt5Config.cmake
create mode 100644 cmake/SparkFindQt6Config.cmake
create mode 100644 cmake/SparkInstallMacrosConfig.cmake
create mode 100644 cmake/SparkMacrosConfig.cmake
create mode 100644 cmake/SparkMacrosExtendConfig.cmake
create mode 100644 cmake/SparkTranslatorConfig.cmake
create mode 100644 cmake/linuxdeployqt-help
create mode 100644 cmake/package-deb.descript
create mode 100644 cmake/spark-appimage.desktop.in
create mode 100644 cmake/spark-desktop.desktop.in
diff --git a/.gitignore b/.gitignore
index e77dab8..5571870 100644
--- a/.gitignore
+++ b/.gitignore
@@ -52,3 +52,37 @@ debian/files
debian/*.substvars
debian/spark-store
+# Ignore the build directory generated by the vsocde cmake extension
+build/
+# Ignore the build directory generated by the vsocde clangd extension
+.cache
+
+# Created by https://www.toptal.com/developers/gitignore/api/cmake
+# Edit at https://www.toptal.com/developers/gitignore?templates=cmake
+
+### CMake ###
+CMakeLists.txt.user
+CMakeCache.txt
+CMakeFiles
+CMakeScripts
+Testing
+Makefile
+cmake_install.cmake
+install_manifest.txt
+compile_commands.json
+CTestTestfile.cmake
+_deps
+
+### CMake Patch ###
+# External projects
+*-prefix/
+
+# End of https://www.toptal.com/developers/gitignore/api/cmake
+
+!/Makefile
+# Ignore the build directory generated by the vsocde cmake extension
+build/
+# Ignore the build directory generated by the vsocde clangd extension
+.cache
+# Ignore the make package/copytosource
+*.deb
\ No newline at end of file
diff --git a/CMakeLists.txt b/CMakeLists.txt
new file mode 100644
index 0000000..5864b54
--- /dev/null
+++ b/CMakeLists.txt
@@ -0,0 +1,93 @@
+
+cmake_minimum_required(VERSION 3.5.1)
+
+project(spark-store LANGUAGES CXX VERSION 4.0.0)
+
+include(cmake/SparkEnvConfig.cmake) # 设置一些有关QT构建的开关
+include(cmake/SparkMacrosConfig.cmake) # 声明了一些 spark_ 开头的 macro 宏
+include(cmake/SparkFindLibraries.cmake) # 提供了基于 spark_ 宏生成的 target_link_<lib> 用于目标链接 <lib> 的库
+include(cmake/SparkFindQt5Config.cmake) # 提供了 target_link_qt5 用于目标链接 qt5 的库
+include(cmake/SparkFindDtkConfig.cmake) # 提供了 target_link_dtk 用于目标链接 dtk 的库
+include(cmake/SparkTranslatorConfig.cmake) # 提供了 qt5 ts转qm 的操作,最终生成 SPARK_QM_TRANSLATIONS 变量用于构建可执行文件时参与编译
+include(cmake/SparkMacrosExtendConfig.cmake) # 使用了 spark_ 宏基于已提供的宏参数自动展开构建可执行目标文件
+include(cmake/SparkInstallMacrosConfig.cmake) # 提供了 spark_install 开头的 macro 宏用于安装 target、file、program、directory、changelog 等内容
+
+# 资源文件路径
+set(QRC_SOURCES "src/assets/assets.qrc")
+
+include_directories(src)
+
+# 基于传入的项进行构建
+# 可接受的值为: 路径列表
+# 可接受的值为: 路径列表+依赖库A+依赖库B
+spark_add_library_realpaths(
+ src/dbus
+ src/utils+dbus
+ src/backend+utils
+ src/widgets/common+backend
+ src/widgets+common
+ src/pages+widgets
+)
+
+target_link_qt5_dbus(dbus)
+target_link_qt5_Concurrent(common)
+target_link_qt5_Concurrent(backend)
+target_link_qt5_WebEngineWidgets(common)
+
+spark_add_executable_path(${PROJECT_NAME} src
+ ${QRC_SOURCES} ${SPARK_QM_TRANSLATIONS}
+)
+target_link_dbus(${PROJECT_NAME})
+target_link_pages(${PROJECT_NAME})
+target_link_dtk(${PROJECT_NAME})
+
+
+spark_add_executable_path(spark-dstore-patch src/spark-dstore-patch)
+target_link_qt5(spark-dstore-patch)
+
+
+# 安装主程序 spark-store 与 spark-dstore-patch
+spark_install_target(/opt/durapps/${PROJECT_NAME}/bin
+ ${PROJECT_NAME}
+ spark-dstore-patch)
+
+# 安装 systemd 服务(Spark Store更新通知程序)
+spark_install_file(/usr/lib/systemd/system/
+ pkg/usr/lib/systemd/system/spark-update-notifier.service)
+
+# 安装 polkit 操作(运行 ss-do-upgrade-worker 需要权限)
+spark_install_file(/usr/share/polkit-1/actions/
+ pkg/usr/share/polkit-1/actions/store.spark-app.ss-do-upgrade-worker.policy)
+
+# 安装 spark-store 所需要的 tool 脚本
+spark_install_directory(/opt/durapps/${PROJECT_NAME}/bin
+ tool/*)
+
+# 安装 bash_completion
+spark_install_file(/usr/share/bash-completion/completions
+ pkg/usr/share/bash-completion/completions/aptss)
+
+# 安装 desktop 文件
+spark_install_file(/usr/share/applications
+ pkg/usr/share/applications/spark-store.desktop)
+
+# 安装 icon 文件
+spark_install_file(/usr/share/icons/hicolor/scalable/apps
+ pkg/usr/share/icons/hicolor/scalable/apps/spark-store.svg)
+
+# 安装什么脚本?
+spark_install_program(/tmp/spark-store-install
+ pkg/tmp/spark-store-install/feedback.sh)
+
+# 安装 qm 文件?
+spark_install_file(/usr/share/spark-store/translations
+ ${SPARK_QM_TRANSLATIONS})
+
+# 安装 changelog 文件,将自动使用 gzip 压缩
+spark_install_changelog(${CMAKE_SOURCE_DIR}/debian/changelog)
+
+include(cmake/SparkBuildGraphviz.cmake)
+
+# 注释行(使用方式)
+find_package(DebPackage PATHS ${CMAKE_SOURCE_DIR})
+add_package_descript(cmake/package-deb.descript)
\ No newline at end of file
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..2df9883
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,60 @@
+CPUS=$(shell nproc)
+CALENDAR=$(shell date '+%Y%m%d')
+OSID=$(shell lsb_release -si)
+OSRELEASE=$(shell lsb_release -sr)
+SUFFIX=
+ifneq ("$(OSID)", "")
+SUFFIX=_$(OSID)$(OSRELEASE)
+endif
+
+PROJECT_NAME=spark-store
+
+all:
+ mkdir -p build
+ cd build && cmake ..
+ cd build && make -j$(CPUS)
+
+run: all
+ exec $(shell find build/ -maxdepth 1 -type f -executable | grep $(PROJECT_NAME))
+
+debug:
+ mkdir -p build
+ cd build && cmake -DCMAKE_BUILD_TYPE=Debug ..
+ cd build && make -j$(CPUS)
+
+release:
+ mkdir -p build
+ cd build && cmake -DCMAKE_BUILD_TYPE=Release -DPACKAGE_SUFFIX="$(SUFFIX)" ..
+ cd build && make -j$(CPUS)
+
+package: release
+ cd build && make package
+ tree build/_CPack_Packages/Linux/DEB/$(PROJECT_NAME)-*
+ dpkg-deb --contents build/$(PROJECT_NAME)_*$(CALENDAR)*$(SUFFIX).deb
+ # cd build/_CPack_Packages/Linux/DEB/$(PROJECT_NAME)_*$(CALENDAR)*$(SUFFIX).deb && find .
+
+builddeps:
+ cd build && make builddeps
+
+cpus:
+ @echo "CPU数量: $(CPUS)"
+
+copytosource:package
+ cp build/$(PROJECT_NAME)_*$(CALENDAR)*.deb .
+
+# 进入 qdebug 模式,在 deepin 中默认被禁用,可 env | grep QT 查看,并在 /etc/X11/Xsession.d/00deepin-dde-env 配置中已定义
+# 1. 禁止 qt 的 debug 打印: qt.*.debug=false
+# qt.qpa.input.events
+# qt.qpa.events
+# 2. 禁止 dtk 的 debug 打印: dtk.*.debug=false
+# dtk.dpluginloader
+# 3. 禁止 qtcreator 本身的 debug 打印
+# qtc.autotest.testcodeparser
+# qtc.clangbackend.server
+# ...
+# 4. 关闭其它的太麻烦了,直接只启用本地 debug
+# .debug=true
+enter-qdebug-mode:
+ # 进入新的 bash 环境
+ @# export QT_LOGGING_RULES=".debug=true; qt.*.debug=false; dtk.*.debug=false; dde.*.debug=false; qtc*=false; " && bash
+ export QT_LOGGING_RULES=".debug=true" && bash
\ No newline at end of file
diff --git a/assets/spark.png b/assets/spark.png
new file mode 100644
index 0000000000000000000000000000000000000000..544e2c7cff5f70894e27a7a717d4a62120630b7a
GIT binary patch
literal 4959
zcmZ9Qc|26n`^T?!hOvx&4Pz%{EZLPYvTq^E5@pRcGGvX$AW~VT?0YC8CHu}WL)I*j
zCA+d^OJw_v@Avn|@AZ4V?z!iAo%_7*bMCpH^W6J>;>=8R>1nxX0RW)a*F)YU^_YtW
zrY4=^DWIpMj{2UVE)p04)_~)Bx;9qQ2n|Z_wm$&K2wgO?prUJmq(Kl`-&hB<LJMYK
z;H4PWY#<GBp>=G~+P>c2E<R{L`=N^y+QnJmkq6pcKv&<`%=!rf2LLb=^pRSYLEks+
zJuEHVG9I`5p0%~o`xwP(T(A&BWF})#9Om3~bQ_b(#@9SAhwGar%cpFq%$%?GJzQBD
z^t%0dF=*wD8muRw+g(o51m|ccnE8Hz9D~pmFQqMqR8NPt`aZ@95GBoY%)<s5UuG3-
zH8&0V|Ja-Yuky}6rHyUK_RAqC-|kS+r+)ehbx%+Lhha7VgMsr_00aWHDT62$d9(aF
z!U1cV`EuMgw}GD4t)>7|<?qk1QBcY33S(8F5ArwUXi7fPCW(YRo7<tLi0uxDVj0(x
z_<)l*SC*HNybE?BH(Rz$LC4rr3wa955G0X;EJy0xN_ATM3~{@N^o9Ne<uYS1%xvjV
zouES0hFd~~oOYam0PHA>m)F7F$6%61XMsolUg0zXycp@v@GZ1K>rt9xd?C!>%*+W7
z%C&esu?1QvJQKjm(&8q^?oTJ)%8V0?)|OjY&d?Gr6jy6+xCM5i6!*6NB4P1KRCNm^
zm+NR*M%4c-!-n9r0lIJ!D)W+&T9oD)Dem^7SRik@V(WfPQ@fFKyaf2lH?CLvA5uka
za&warZ&7cj6r);vpGc6CZ?~=95<VC}D!vy*f{QjEl&9OWL;iubp?H5yvEdB%@e6NX
z57kp^2wVcQ3u~hI>k;2K+aF*1m@^PTnvH2@U9bunlhI3nmsfK^BZU;4=_*3}V}PoZ
zEP*COH$^QdyIwzO=Shp{b@@LAC7u=@nYJ8)oEoIduWITqMn>MScBnM|V;V8ajW%>c
z2|9_!;}u5SRyWpkQzR8giy<WH+QY*7;#%0KMPjz2J^$`S;Aj2Q$(O|;?s2!}W-s;l
zu^~Jf@3^eIMr&D_;mxvB-21`xyjo8Mh`|)KZ&GW@tY9Ko+xhEH9q-}Ic$pF6Rb{$J
z6WRQGL}`*GY6-rGR-l>|l$Ivq`@U%TZM4}hv^OUI<i-$GP!{(iq3D;wT5100{_<z8
z=1;Ad?c^U8>k_s0z#=s!u~04W3Iv&C;FbL%51jwmUPHQ@0l~qZwrDUlHbTaRh}I7O
zg75zlU9YVkytJ~+#_*>+av3b*ZLbM`=lrm(GyKlhzDKd&-~YS-XuB{i6aEdZrmT8V
z5=&CIeIGmv+apvfRY7`h1Zf4_L_-7KYf+zDaL#{K)Hw61>q|2q>%TNiMk|sXtmY*1
z`E77tq7vBO#3uo(t!jj^QMa-dh_<S@?yNd3zMLp*QM?3}j{(IjCNs>__m=cxM&AL^
zdT&14OSgK$%!-|9_M)?`i4B)w7eegd!IoH)mWyyhiqc1~EPAqoCCYEgl(hFM{^Ftj
z%GS_$^uT<GuMO-c^$e_!ZI<)tqNempDT6iTHz|9|_cjckvM6YmeEHw;h;Vg`YvL(_
z(jqSectWzGVyL@+N;(xwEU<0OHRyt^OcZ<Qbm(M^U%g>6K)$jtUK69tc1oS-cV3H(
zyzVwJW(p>4KWuO@dx-z65M|t#j~xmYkY<&V$cV9IcL@+9-%Akb(9C^=$km21|8lq_
za=b^e+n~SA!s?z86LD4&0RU2Vl|bwCrvOB*uG>-oaP+AaCy?IW;MZ7A&oS_=puC#x
zTSjKS2X}HZv)}oKicKX7<~q>8hy|~*HpzV*Y^DRSBNNv-=<Mz7m2X=<O(`+?bKF-{
z>R$KtX-5a5FE<rK_;&5d64zhwYmB)DihD|kGMY$s$ypA4DYTWSd;03~Stbic2f`sB
zAwh%dMJa#xYuO@4+Y^@mQ7demfUh*~%iSD#4K60n5j(6;z0A87Nb@>!_Wj#!o0njA
z8JkG4+{e@({dOMVP51|1y`CGI?{rMiLdMQTV)8ojeNwqrgP)*5q}hq9`jG=rE*1L0
z=0gY)xu5I$L0nYIwuM<@k7MqNbid7Ko1mz?Wtyzjo`jUhJJU|J`Jq_(fZ+l%ogp5Y
zIDI`mBjycCE3h-oAO06y%KHv_U0fWu7`0F)$u5yL6u~KnhuEC++z(})gQ{w9X}O1^
ziig+EPJfUA4&ecpZ?0Sc06XsoNMjeO3Wcj3%MW32I2nYaNKiwF#jknm8fO-R8aEHO
zS;P_Zcdx7H>7UoVjHFijGh;WVUGy??)C=6c|6BJ?%amgTP(}HCU2Z0Y^Sx|AO%6>B
z7k8KD-1)Kga0b7Xt>)Jmz><_Svi*-IB6_0ky0@X$d%1Z$EAcD*>w~VW$*SRrQOa6E
z)cKJdzv;DO-USxsZnV8sfR>g0;TF*eXKlHEv~kBDQlVHocet}SvAsd<?82yC)LQ;W
z)r39#Wqj$`6kE-tmwVMDs-}*UN680yV|?4qFL>I1E^G1doNa$er}pksd?U1pF|_rB
zSIJIEOQLI~-<DjQJQ8&;nMSY8T27<NNl5c#E}S#wNzCQvhkqlEIn<jTityd}$UF=$
z<XI5Ea=B|>J9cO}P)Oz~yJ4z~jwPCIW7GR>tKG}oJGSkdoz};#7?(Sg>_x?Y_Q?4k
zZ$BO!ta2Sdt}R&N@%WDQoxFGNn8p;VW$7qF|8D7og^|0?JUW*}Y|jx!#LUqPlwg=m
zRt9aEBD1%*_tO_~T=|(R%DbCN?p_VFK+vzERN1}RWAZ6OAYYD(J}CcnVj9+as%G)o
z;NJXAE1<2%q6D=&D&c&^K7J$1uCL+uS>u|xgNGNU%c~o5r72Q`D?M*NaI@;bFQ#CT
zV0IV|1Ll4vb*8mCG70}W_>J!pbL`q(Mk#Luq5Ho-?sljN6JfW)-Tyt?3`DZ%L<hO-
zm1%2QcpEFqC@DA&Y)noZo<L3wmXhWO7T5CLyr%;aQ&VF{Kezq9Z=e-t6lGcYBlO&>
z>1cfFaA%b9aDM4sjzPiuCSI52<vO<;a(uRp40{~Kn6LBMmPVEeHJcOgypIw^HdR}0
zm6LbOEkgM=13_yKJS&9@eZ|7<X9r<lqI?mrld4&}+y)ocsy*K}qL^g986$Oc82=ro
zuC2p`f>j;PmRFq03dvd{@)=@Z9{wG$dz~4@#t3rj;1m%CZ{=~k9~XcBC6v7Nc<RUf
za0fm?Azz;LlJ)Y#jjF*)x4$yVmk#DrhOl)`Kk7jI4dJu{T@8Dun*0WoGkW%6p%IA#
zwzFR1?;{r8naAakq}Ot?>kqV@1WVYQ<43f3{9(XPWS>EN{EO~*-CK*bt;ZS;!OLuY
z87ft)RVyp(Cw{BC?#*W-X}?E8n+mG`{Ikbd@Mf3BkFQ_T3aIyS+g0*qIBMqV83`?o
zX*3SoyLQT=V65w9M3)n><3cpp4wMiSNQ6I0WTSfL@yq6O5RJ^;rpPEzOSf?<#OEal
z#JE8?_%;i?y7A-hXB(+R7p{hi!m)9NPT7A;G|icpHm~w<e;6$!Y4Fb<`kxOQK~ik7
z2qb>S^k`I({`l+|qO9g~*i~G*9imYv^HH~-3PeB-S_xwv+Y2l=g6>lXZk|B1v+dn|
zeA>r~Z}f3>@r<u`RNC~JSzXHVKWuV9PJevXSM)4ETvGjoXTP+@Sm!4fMnM%7F8Iff
zb>Byy3Q<u#`SlY{K^5Dg)A{NK6p<$`7p6%c+oJK*xb-{t=b^TC*q$w1kQI7~<=I#n
zUsrx-EC2+C9;adF_CoSYvo)?|_N_Pw%krKb00Zo)kx)$aOO2R5(5K_Eb(0c#$B5sg
z{0z_oksNnVsOYo_E#b$gov$vI);T=pzwd=%0b+vx5R`k((5OLUU}$(4UhG+Kr*w?}
zJ?oUew851nEwl58vi2;*E5|K?7<%F-)kCDbPq*w+RQGiP>&w80&#K>pvR%5geJnqq
z#YL_Lw5jl$vkg7ZRPvcNku1Nz{`lM2`2I<R--ltN0hN~4vHK*U?_%TU@<IG~k|O@%
znoTwT&;f1hd-Fe&&gpyQWo1oNlp#yTCg)~Y#%qU+b`54q1>+BH-`3Ba?R1ny-~VYe
z9l%0>oH`pOV?m#)LN)yxXMS#M>?$?Ja6PLFE);UCNl#M06nrh>lc`K1PMyM&Ka>tI
zyKVLSSwJ-z2RX<M$Fc<(rS~fTW{%nyS1=}&<eqVbw)PSJ<ep)k90YElR8ezf%^Lc)
zK7m(V2eK4@R)4svw76Xh8k(GJt$8#twX<q^25G1`_B?J<$Kz*ToHDUP1f|Y;lv8Ey
z@>NRh*UcPO%t2{i@X_0uuwJ6@h;-=Qef3g6X8cFUHPoCZIv{}R78rZ%99agCe;SpR
z2&R5q?E=vp9E`14e_L9iWfefrys(&*EXOenhi}(uR8D%;1^v32tF*i$meYY6!3~@Q
zv5OSB5c`O2eYdLw^yThU*z33iu!U)sm(UUi!Yh5@S`weCs{BaFFDP7dWAap2{nG=s
zg+-P;PwqQ+?wHv<WGCsCsCO4rx|Wd_14)@oaLWm2&kft9v8-l^{=qia<93z$CT*z^
z*q2JT%@xykw-Ow2s?^%W_=BG?$=o_stHxWYMy+|vajaCg^4_v!TByq*Mc@g$G@in+
zYNuZb6#9Mik)Vb&y{T`IPuoggq35^!q9Us2#>S{X^xRx~)ampA>1zW`P2@zwfa|>{
z(Zt?9q>hUSNyY-w8WjF3)S{^{Y;7-zeNdEWXCYNlYE#WdCdLmAQQa{ib}eB{46!Vm
zo13!fMtVj@*A05r-xRqe1O+nR=OyKWG>u1mlD&rJ7WUEOHCORSf`H4G9m&D*U>eu{
zLp6o#gU{59h79h}@mqyQxAYnwjZ3|e)+cm~c9C*PmcN-nJ13-pb9}j+aMZB3eWbuU
z(aP`J@@Js(3eo*K%?H@(M#W~b(~+qW`F;+iobQ&M*W>{=WjBNNZqtpbh4N5N(I2dG
z-RX`fI|JPp?}OI)XaR2iVs;j=E!yAobeUouDw>}0b0z1W+MTAGY0eJ{GDB$rxn+Jx
zijgtNgG}Ip-xgzR(6Y<B6j5&7I?EKz2e+k2gn|!#VFBU$FP~`ekpaTJkZ6yKe@hcz
z0&P<dte_M3j=c?L(KKla=JLCh$&q23=Ki1!(x1)bR{e(s(~5Hx!^RaGOirY(Ya+~q
zTax0op$Cq0B+JhX=8V5lAHJ;;{jP&Y^DwLVk?U_UN+40B#x_l|2@WMt@X^?PUm5R`
zka5Zoe%Mb;=NtBYJRMnP9OukRlM*dPy%;#8-SK6r$;j&qkHb)1d#Z^N<KZr2!&#Pj
zIU-G_uQVE_d}&k&{6RLLsXdWz{x!xRnx?axhpuNik+1!(RuG-vy%XmTtmxPBGjh&s
z%2Mr?Ut8IWv!*#|Vmm`90TQDH+;(IJMHnN3{zDQxfmd>w>ce#I{RXF)m?YpDnSx1P
z-qxP|)1Pe80-2Yo{|kjzD-b|ra*a%GbQ-JEf<BbF&h$&RrZ-+pVa7@^kGNqEe<rct
z-kwj~m}xv3un)%C!WKTK+rEbQ$D;L=|3ipou){Jid{G5mhMkg6p^%}xPG=SSMvT+>
zY4Ef^R`Uo`;5%GzqsAjSR8OWeT$^xkT*!`awX@U|_Abd2Kni%MHCjtQr!HimpSd78
zqrPOZv^3?zw<Q83PJS%)jk5~sW5wL%>eIu9Gt!GTOD19I)$#R&XHcKG{N6t4Uzm)%
z_&ik-;lla8ao5f-XCXafQiDpVG*V0{N!aCZPn=1CN`%)rVO5b3-l1<&5Rm>dgqG6&
zi6I?9NDN#D1uh~vl;mU=49d2IlV^tnzNl6O2YpihPema^^jse;K;WdUa}|$oaghqg
z(6Awt@Duo-@b4d^62bJ31eGM@W)0Qd@X!Ndd;7ddj(j^*YY2<F9B0=q{CkRTYlO1D
zGl*<!ByB1D*e5nwTT@}02EOS{{EEVld=V|ut?N{K!<D?M$QX97EGNgVZS|_~peG9q
zL$e2NzJNfu_N=TG$8b>nz}q(w%?j=RPLP@eEF|B$PQ2KtCtcE0TG0n}qx$Q0g;>#Q
zXb4R~mYm3CJ1RdzfK4TCyeNO)4km{6`QK7Rtf74G7sV*O8|HzS0B>>4yF}W2o(lp*
zM{UWrv+Ba@vnVNI88u6!KF%=Wbx&cqT*am6q30wD#F98KVc5!5oJkm|LweHam10~r
zX@~3#%zVK@yDeBv6!qOETx37pSa`UBTxI#cHI-Sl3=?)E1K4yNsZ5YEKwM8qGV1Vn
zk8qYSbHYB+UTkQmS<k~+_u?XWiR}U~EWCgOKyF#9aFf?0NFlo?l9dJq7v*7@BT&B>
t;Jjx^&~6n@&egfT2m_h_UkqA5Co_+SJESY3=}2`iKwrlMS%GlG{15vgE&>1m
literal 0
HcmV?d00001
diff --git a/cmake/DebPackageConfig.cmake b/cmake/DebPackageConfig.cmake
new file mode 100644
index 0000000..2ab24e7
--- /dev/null
+++ b/cmake/DebPackageConfig.cmake
@@ -0,0 +1,327 @@
+cmake_minimum_required(VERSION 3.0.0)
+
+# function(add_deb_package PACKAGE_NAME PACKAGE_VERSION PACKAGE_MAINTAINER PACKAGE_EMAIL PACKAGE_SHORT_DESCRIPTION PACKAGE_LONG_DESCRIPTION)
+
+# endfunction(add_deb_package PACKAGE_NAME PACKAGE_VERSION PACKAGE_MAINTAINER PACKAGE_EMAIL PACKAGE_SHORT_DESCRIPTION PACKAGE_LONG_DESCRIPTION)
+
+# if(add_deb_package VALUE) set(Package ${VALUE} PARENT_SCOPE) endif(add_deb_package VALUE)
+# if(add_deb_version VALUE) set(Version ${VALUE} PARENT_SCOPE) endif(add_deb_version VALUE)
+# if(add_deb_maintainer VALUE) set(Maintainer ${VALUE} PARENT_SCOPE) endif(add_deb_maintainer VALUE)
+# if(add_deb_email VALUE) set(Email ${VALUE} PARENT_SCOPE) endif(add_deb_email VALUE)
+# if(add_deb_descrition VALUE) set(Descrition ${VALUE} PARENT_SCOPE) endif(add_deb_descrition VALUE)
+# if(add_deb_detail VALUE) set(Detail ${VALUE} PARENT_SCOPE) endif(add_deb_detail VALUE)
+
+
+# set(Package "")
+# set(Version "")
+# set(Architecture "")
+# set(Maintainer "")
+# set(Email "")
+# set(Descrition "")
+
+function(find_str _IN _SEP _OUT)
+ string(FIND "${_IN}" "${_SEP}" _TMP)
+ set(${_OUT} ${_TMP} PARENT_SCOPE)
+endfunction(find_str _IN _SEP _OUT)
+
+
+function(find_next _IN _OUT)
+ find_str("${_IN}" "\n" _TMP)
+ set(${_OUT} ${_TMP} PARENT_SCOPE)
+endfunction(find_next _IN _OUT)
+
+function(sub_next _IN _INDEX _OUT __OUT)
+ find_next(${_IN} _NEXTINDEX)
+ string(SUBSTRING "${_IN}" ${_INDEX} ${_NEXTINDEX} _TMP)
+ math(EXPR _NEXTINDEX ${_NEXTINDEX}+1)
+ string(SUBSTRING "${_IN}" ${_NEXTINDEX} -1 __TMP)
+ set(${_OUT} ${_TMP} PARENT_SCOPE)
+ set(${__OUT} ${__TMP} PARENT_SCOPE)
+endfunction(sub_next _IN _INDEX _OUT)
+
+function(trim_str _IN _OUT)
+ string(STRIP "${_IN}" _TMP)
+ set(${_OUT} ${_TMP} PARENT_SCOPE)
+endfunction(trim_str _IN _OUT)
+
+function(split_str _IN _SEP _OUT)
+ string(FIND "${_IN}" "${_SEP}" _TMP_INDEX)
+ if(NOT _TMP_INDEX EQUAL -1)
+ string(SUBSTRING "${_IN}" 0 ${_TMP_INDEX} _TMP)
+ math(EXPR _TMP_INDEX ${_TMP_INDEX}+1)
+ string(SUBSTRING "${_IN}" ${_TMP_INDEX} -1 __TMP)
+ set(${_OUT} "${_TMP};${__TMP}" PARENT_SCOPE)
+ else()
+ set(${_OUT} ${_IN} PARENT_SCOPE)
+ endif(NOT _TMP_INDEX EQUAL -1)
+endfunction(split_str _IN _SEP _OUT)
+
+function(split_str_p _IN _SEP _OUT __OUT)
+ split_str("${_IN}" "${_SEP}" _TMP)
+ list(GET _TMP 0 __TMP)
+ list(GET _TMP 1 ___TMP)
+ set(${_OUT} ${__TMP} PARENT_SCOPE)
+ set(${__OUT} ${___TMP} PARENT_SCOPE)
+endfunction(split_str_p _IN _SEP _OUT __OUT)
+
+function(split_str_n _IN _SEP _OUT _N)
+ if(_N GREATER 1)
+ set(_C ${_N})
+ set(_RET "")
+ set(_NEXT ${_IN})
+ while(NOT _C EQUAL 0)
+ split_str("${_NEXT}" "${_SEP}" _TMP)
+ list(LENGTH _TMP _TMP_LEN)
+ if(_TMP_LEN EQUAL 2)
+ list(GET _TMP 0 __TMP)
+ list(GET _TMP 1 _NEXT)
+ list(APPEND _RET ${__TMP})
+ else()
+ break()
+ endif(_TMP_LEN EQUAL 2)
+ math(EXPR _C "${_C}-1")
+ endwhile(NOT _C EQUAL 0)
+ list(APPEND _RET ${_NEXT})
+ set(${_OUT} ${_RET} PARENT_SCOPE)
+ else()
+ split_str("${_IN}" "${_SEP}" _TMP)
+ set(${_OUT} ${_TMP} PARENT_SCOPE)
+ endif(_N GREATER 1)
+endfunction(split_str_n _IN _SEP _OUT _N)
+
+
+function(set_package_vars _IN_KEY _IN_VAL)
+
+ # trim_str("${_IN_KEY}" _IN_KEY)
+
+ find_str("${_IN_KEY}" "Type" _Type)
+ if(_Type EQUAL "0")
+ string(TOUPPER "${_IN_VAL}" _IN_VAL_UPPER)
+ string(TOLOWER "${_IN_VAL}" _IN_VAL_LOWER)
+ set(CPACK_GENERATOR "${_IN_VAL_UPPER}" PARENT_SCOPE)
+ message("--> 软件包类型: ${_IN_VAL_LOWER}")
+ endif(_Type EQUAL "0")
+
+ find_str("${_IN_KEY}" "Package" _Package)
+ if(_Package EQUAL "0")
+ if(_IN_VAL STREQUAL "auto")
+ set(CPACK_DEBIAN_PACKAGE_NAME "${PROJECT_NAME}" PARENT_SCOPE)
+ else()
+ set(CPACK_DEBIAN_PACKAGE_NAME "${_IN_VAL}" PARENT_SCOPE)
+ endif(_IN_VAL STREQUAL "auto")
+ message("--> 软件包名: ${_IN_VAL}")
+ endif(_Package EQUAL "0")
+
+ find_str("${_IN_KEY}" "Version" _Version)
+ if(_Version EQUAL "0")
+ if(_IN_VAL STREQUAL "auto")
+ set(CPACK_DEBIAN_PACKAGE_VERSION "${PROJECT_VERSION}" PARENT_SCOPE)
+ else()
+ set(CPACK_DEBIAN_PACKAGE_VERSION "${_IN_VAL}" PARENT_SCOPE)
+ endif(_IN_VAL STREQUAL "auto")
+
+ message("--> 软件版本: ${_IN_VAL}")
+ endif(_Version EQUAL "0")
+
+ find_str("${_IN_KEY}" "CalVer" _CalVer)
+ if(_CalVer EQUAL "0")
+ set(CalVer "${_IN_VAL}" PARENT_SCOPE)
+ message("--> 日历化版本: ${_IN_VAL}")
+ endif(_CalVer EQUAL "0")
+
+ find_str("${_IN_KEY}" "Architecture" _Architecture)
+ if(_Architecture EQUAL "0")
+ set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "${_IN_VAL}" PARENT_SCOPE)
+ if(_IN_VAL STREQUAL "auto")
+ execute_process(
+ COMMAND dpkg --print-architecture
+ OUTPUT_VARIABLE _RETV
+ OUTPUT_STRIP_TRAILING_WHITESPACE
+ )
+ set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "${_RETV}" PARENT_SCOPE)
+ endif(_IN_VAL STREQUAL "auto")
+ message("--> 软件架构: ${_IN_VAL}")
+ endif(_Architecture EQUAL "0")
+
+ find_str("${_IN_KEY}" "Priority" _Priority)
+ if(_Priority EQUAL "0")
+ set(CPACK_DEBIAN_PACKAGE_PRIORITY "${_IN_VAL}" PARENT_SCOPE)
+ message("--> 优先级: ${_IN_VAL}")
+ endif(_Priority EQUAL "0")
+
+ find_str("${_IN_KEY}" "Depends" _Depends)
+ if(_Depends EQUAL "0")
+ set(CPACK_DEBIAN_PACKAGE_DEPENDS "${_IN_VAL}" PARENT_SCOPE)
+ message("--> 软件依赖: ${_IN_VAL}")
+ endif(_Depends EQUAL "0")
+
+ find_str("${_IN_KEY}" "Maintainer" _Maintainer)
+ if(_Maintainer EQUAL "0")
+ set(CPACK_DEBIAN_PACKAGE_MAINTAINER "${_IN_VAL}" PARENT_SCOPE)
+ message("--> 软件维护者: ${_IN_VAL}")
+ endif(_Maintainer EQUAL "0")
+
+ find_str("${_IN_KEY}" "Homepage" _Homepage)
+ if(_Homepage EQUAL "0")
+ set(CPACK_DEBIAN_PACKAGE_HOMEPAGE "${_IN_VAL}" PARENT_SCOPE)
+ message("--> 软件主页: ${_IN_VAL}")
+ endif(_Homepage EQUAL "0")
+
+ find_str("${_IN_KEY}" "Recommends" _Recommends)
+ if(_Recommends EQUAL "0")
+ set(CPACK_DEBIAN_PACKAGE_RECOMMENDS "${_IN_VAL}" PARENT_SCOPE)
+ message("--> 软件建议: ${_IN_VAL}")
+ endif(_Recommends EQUAL "0")
+
+endfunction(set_package_vars _IN_KEY _IN_VAL)
+
+# 定义一个自定义(add_package_descript)函数
+# 用于按特定配置约定自动化构建软件包配置
+function(add_package_descript IN_DES)
+ set(PACKAGE_DES_PATH "${IN_DES}")
+
+ if(EXISTS ${IN_DES})
+
+ elseif(EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/${IN_DES}")
+ set(PACKAGE_DES_PATH "${CMAKE_CURRENT_SOURCE_DIR}/${IN_DES}")
+ else()
+ message(FATAL_ERROR "!! Not Found Path: ${PACKAGE_DES_PATH}")
+ return()
+ endif(EXISTS ${IN_DES})
+
+ file(READ ${PACKAGE_DES_PATH} DES_CONTENT)
+ trim_str("${DES_CONTENT}" DES_CONTENT)
+
+ ################## 解析 ##################
+
+ sub_next(${DES_CONTENT} NEXT_INDEX DES_LINE DES_CONTENT)
+ set(PREV_DES "")
+ while(NOT DES_LINE STREQUAL "${PREV_DES}")
+ # 检查该描述行是否是 # 注释开头,是的话将跳过该行
+ find_str("${DES_LINE}" "#" _COMMENT)
+ if(_COMMENT EQUAL "0")
+ message("--> !!!!!!! ${DES_LINE}")
+ sub_next(${DES_CONTENT} NEXT_INDEX DES_LINE DES_CONTENT)
+ continue()
+ endif(_COMMENT EQUAL "0")
+
+ # 检查该描述行是否是 Descrition 开头,是的话说明描述结尾了
+ find_str("${DES_LINE}" "Descrition" _DESCRIPTION)
+ if(_DESCRIPTION EQUAL "0")
+ break()
+ endif(_DESCRIPTION EQUAL "0")
+
+ split_str_n("${DES_LINE}" ":" _TMP 1)
+ list(LENGTH _TMP _TMP_LEN)
+
+ if(_TMP_LEN EQUAL 2)
+ split_str_p("${DES_LINE}" ":" _TMP __TMP)
+ trim_str("${__TMP}" __TMP)
+ string(LENGTH "${__TMP}" __TMP_LENGTH)
+ if(NOT __TMP_LENGTH EQUAL "0")
+ set_package_vars("${_TMP}" "${__TMP}")
+ endif(NOT __TMP_LENGTH EQUAL "0")
+ endif(_TMP_LEN EQUAL 2)
+
+ # 记录当前行,获取下一行,可能是已经结尾了(将保持重复行)
+ set(PREV_DES "${DES_LINE}")
+ sub_next(${DES_CONTENT} NEXT_INDEX DES_LINE DES_CONTENT)
+ endwhile(NOT DES_LINE STREQUAL "${PREV_DES}")
+
+
+ # 再一次检查该描述行是否是 Descrition 开头,是的话将进行分析描述行
+ find_str("${DES_LINE}" "Descrition" _DESCRIPTION)
+ if(_DESCRIPTION EQUAL "0")
+ split_str_p("${DES_LINE}" ":" _TMP __TMP)
+ trim_str("${__TMP}" __TMP)
+ set(Descrition ${__TMP})
+ set(PREV_DES_LINE "")
+ while(NOT PREV_DES_LINE STREQUAL DES_LINE)
+ if(NOT PREV_DES_LINE STREQUAL "")
+ set(Descrition "${Descrition}\n${DES_LINE}")
+ endif(NOT PREV_DES_LINE STREQUAL "")
+ set(PREV_DES_LINE "${DES_LINE}")
+ sub_next(${DES_CONTENT} NEXT_INDEX DES_LINE DES_CONTENT)
+ endwhile(NOT PREV_DES_LINE STREQUAL DES_LINE)
+ # set(Descrition "${Descrition}")
+ message("--> 软件说明: ${Descrition}")
+
+ set(CPACK_DEBIAN_PACKAGE_DESCRIPTION ${Descrition})
+ endif(_DESCRIPTION EQUAL "0")
+
+ ##################### deb #####################
+ # ARCHITECTURE
+ if(${CMAKE_HOST_SYSTEM_PROCESSOR} STREQUAL "x86_64")
+ set(ARCHITECTURE "amd64")
+ elseif(${CMAKE_HOST_SYSTEM_PROCESSOR} STREQUAL "aarch64")
+ set(ARCHITECTURE "arm64")
+ endif()
+
+ #################### Calendar Version ###################
+ if("${CalVer}" STREQUAL "true")
+ string(TIMESTAMP BUILD_TIME "%Y%m%d")
+ set(CPACK_DEBIAN_PACKAGE_VERSION "${CPACK_DEBIAN_PACKAGE_VERSION}-${BUILD_TIME}")
+ endif("${CalVer}" STREQUAL "true")
+
+
+
+ ##################### deb file name #####################
+ set(_Package "${CPACK_DEBIAN_PACKAGE_NAME}")
+ set(_Version "${CPACK_DEBIAN_PACKAGE_VERSION}")
+ set(_Architecture "${CPACK_DEBIAN_PACKAGE_ARCHITECTURE}")
+
+ set(_DebFileName
+ "${_Package}_${_Version}_${_Architecture}${PACKAGE_SUFFIX}.deb"
+ )
+ set(CPACK_DEBIAN_FILE_NAME ${_DebFileName})
+
+
+ # set(CPACK_DEBIAN_PACKAGE_NAME "${Package}")
+ # set(CPACK_DEBIAN_PACKAGE_VERSION "${Version}")
+ # set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "${Architecture}")
+ # set(CPACK_DEBIAN_PACKAGE_DEPENDS "${Depends}")
+ # set(CPACK_DEBIAN_PACKAGE_PRIORITY "${Priority}")
+ # set(CPACK_DEBIAN_PACKAGE_MAINTAINER "${Maintainer}")
+ # set(CPACK_DEBIAN_PACKAGE_DESCRIPTION "${Descrition}")
+
+ # 设置即将使用的标准脚本
+ set(CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA
+ # "${CMAKE_SOURCE_DIR}/config/DEBIAN/preinst"
+ # "${CMAKE_SOURCE_DIR}/config/DEBIAN/postinst"
+ # "${CMAKE_SOURCE_DIR}/config/DEBIAN/prerm"
+ # "${CMAKE_SOURCE_DIR}/config/DEBIAN/postrm"
+ "${CMAKE_SOURCE_DIR}/debian/spark-store.postinst"
+ "${CMAKE_SOURCE_DIR}/debian/spark-store.postrm"
+ "${CMAKE_SOURCE_DIR}/debian/spark-store.preinst"
+ "${CMAKE_SOURCE_DIR}/debian/spark-store.prerm"
+ )
+
+ # 设置为ON以便使用 dpkg-shlibdeps 生成更好的包依赖列表。
+ set(CPACK_DEBIAN_PACKAGE_SHLIBDEPS ON)
+ # set(CPACK_DEBIAN_PACKAGE_GENERATE_SHLIBS ON)
+ # set(CPACK_DEBIAN_PACKAGE_GENERATE_SHLIBS_POLICY "=")
+
+ include(CPack)
+
+endfunction(add_package_descript IN_DES)
+
+
+# TODO:
+# CPACK_GENERATOR
+# CPACK_DEBIAN_FILE_NAME - n
+# CPACK_DEBIAN_PACKAGE_NAME - y
+# CPACK_DEBIAN_PACKAGE_VERSION - y
+# CPACK_DEBIAN_PACKAGE_ARCHITECTURE - y(auto)
+# CPACK_DEBIAN_PACKAGE_DEPENDS - y
+# CPACK_DEBIAN_PACKAGE_PRIORITY - y
+# CPACK_DEBIAN_PACKAGE_MAINTAINER - y
+# CPACK_DEBIAN_PACKAGE_DESCRIPTION - y
+
+# ARCHITECTURE
+# if(${CMAKE_HOST_SYSTEM_PROCESSOR} STREQUAL "x86_64")
+# set(ARCHITECTURE "amd64")
+# elseif(${CMAKE_HOST_SYSTEM_PROCESSOR} STREQUAL "aarch64")
+# set(ARCHITECTURE "arm64")
+# endif()
+
+# string(TIMESTAMP BUILD_TIME "%Y%m%d")
diff --git a/cmake/SparkAppimageConfig.cmake b/cmake/SparkAppimageConfig.cmake
new file mode 100644
index 0000000..d80279c
--- /dev/null
+++ b/cmake/SparkAppimageConfig.cmake
@@ -0,0 +1,133 @@
+# export PATH=/usr/lib/x86_64-linux-gnu/qt5/bin:$PATH
+# export LD_LIBRARY_PATH=/usr/lib/x86_64-linux-gnu:$LD_LIBRARY_PATH
+# export QT_PLUGIN_PATH=/usr/lib/x86_64-linux-gnu/qt5/plugins:$QT_PLUGIN_PATH
+# export QML2_IMPORT_PATH=/usr/lib/x86_64-linux-gnu/qt5/qml:$QML2_IMPORT_PATH
+
+# export PATH=/usr/lib/x86_64-linux-gnu/qt5/bin:$PATH
+# ~/linuxdeployqt-continuous-x86_64.AppImage spark-store-submitter -appimage
+# cd ..
+# ~/appimagetool-x86_64.AppImage appimage/
+
+# LINUXDEPLOYQT=/home/zinface/linuxdeployqt-continuous-x86_64.AppImage
+# APPIMAGETOOL=/home/zinface/appimagetool-x86_64.AppImage
+
+# if ()
+set(APPIMAGE_OUTPUT "${CMAKE_BINARY_DIR}/appimage")
+set(APPIMAGE_ICON "${APPIMAGE_OUTPUT}/default.png")
+set(APPIMAGE_DESTKOP "${APPIMAGE_OUTPUT}/default.desktop")
+# set(LINUXDEPLOYQT)
+# set(APPIMAGETOOL)
+
+function(execute_linuxdeploy _PATH)
+ execute_process(COMMAND ${LINUXDEPLOYQT}
+ WORKING_DIRECTORY "${APPIMAGE_OUTPUT}"
+ )
+endfunction(execute_linuxdeploy _PATH)
+
+function(target_linuxdeploy)
+ add_custom_target(linuxdeploy pwd
+ BYPRODUCTS appimage
+ COMMAND cp ../${PROJECT_NAME} .
+ COMMAND "${LINUXDEPLOYQT}" ${PROJECT_NAME} -appimage -unsupported-allow-new-glibc -verbose=3 -no-strip|| true
+ COMMAND cp ../spark-appimage.desktop default.desktop
+ COMMAND cp ../spark-appimage.png default.png
+ WORKING_DIRECTORY "${APPIMAGE_OUTPUT}")
+endfunction(target_linuxdeploy)
+
+function(target_appimage)
+ add_custom_target(copy-desktop-appimage
+ COMMAND cp ../spark-appimage.desktop default.desktop
+ COMMAND cp ../spark-appimage.png default.png
+ WORKING_DIRECTORY "${APPIMAGE_OUTPUT}")
+ add_custom_target(appimage pwd
+ COMMAND ${APPIMAGETOOL} ${APPIMAGE_OUTPUT}
+ WORKING_DIRECTORY "${CMAKE_BINARY_DIR}"
+ DEPENDS copy-desktop-appimage)
+endfunction(target_appimage)
+
+function(add_appimage)
+ # check linuxdeploy
+ if(NOT DEFINED LINUXDEPLOYQT)
+ message("AppImage> Not Found LINUXDEPLOYQT Variable!")
+ return()
+ endif(NOT DEFINED LINUXDEPLOYQT)
+ if(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
+ message("> cmake version is less than 3.19")
+ message(WARNING "!Relative paths are not supported!")
+ else()
+ file(REAL_PATH ${LINUXDEPLOYQT} LINUXDEPLOYQT_REAL_PATH)
+ endif(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
+ message("AppImage> Found LINUXDEPLOYQT Variable: ${LINUXDEPLOYQT_REAL_PATH}")
+
+ # check appimagetool
+ if(NOT DEFINED APPIMAGETOOL)
+ message("AppImage> Not Found APPIMAGETOOL Variable!")
+ return()
+ endif(NOT DEFINED APPIMAGETOOL)
+ if(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
+ # execute_process(COMMAND realpath ${APPIMAGETOOL} OUTPUT_VARIABLE APPIMAGETOOL_REAL_PATH)
+ message("> cmake version is less than 3.19")
+ message(WARNING "!Relative paths are not supported!")
+ else()
+ file(REAL_PATH ${APPIMAGETOOL} APPIMAGETOOL_REAL_PATH)
+ endif(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
+ message("AppImage> Found APPIMAGETOOL Variable: ${LINUXDEPLOYQT_REAL_PATH}")
+
+ # do add_custome_target
+ make_directory(${APPIMAGE_OUTPUT})
+ target_linuxdeploy()
+ target_appimage()
+endfunction(add_appimage)
+
+function(add_appimage_desktop)
+ configure_file(cmake/spark-appimage.desktop.in
+ ${CMAKE_BINARY_DIR}/spark-appimage.desktop @ONLY)
+endfunction(add_appimage_desktop)
+
+function(add_appimage_icon _ICON_PATH)
+ if(CMAKE_VERSION VERSION_LESS 3.21)
+ message("> cmake version is less than 3.21")
+ configure_file(${_ICON_PATH} ${CMAKE_BINARY_DIR}/spark-appimage.png COPYONLY)
+ else()
+ file(COPY_FILE ${_ICON_PATH} ${CMAKE_BINARY_DIR}/spark-appimage.png)
+ endif(CMAKE_VERSION VERSION_LESS 3.21)
+endfunction(add_appimage_icon _ICON_PATH)
+
+
+
+# 如果glic>=2.27,你就需要加上参数 -unsupported-allow-new-glibc (意思就是不再低版本发行版使用了)
+# 或 -unsupported-bundle-everything大概的意思是尝试兼容实际测试到其他发行版直接用不了了有可能是发行版的原因还是建议用前者虽然放弃了低版本
+
+# -unsupported-bundle-everything
+ # 捆绑所有依赖库,包括 ld-linux.so 加载器和 glibc。这将允许构建在较新系统上的应用程序在较旧的目标系统上运行但不建议这样做因为它会导致捆绑包超出所需的大小并且可能到其他发行版无法使用
+# -unsupported-allow-new-glibc
+ # 允许 linuxdeployqt 在比仍受支持的最旧 Ubuntu LTS 版本更新的发行版上运行。这将导致 AppImage无法在所有仍受支持的发行版上运行既不推荐也不测试或支持
+
+# ./linuxdeployqt-7-x86_64.AppImage 程序目录/程序 -appimage -unsupported-allow-new-glibc
+# ./linuxdeployqt-7-x86_64.AppImage 程序目录/程序 -appimage -unsupported-bundle-everything
+
+
+
+
+# 1. 在顶层构建中导入 Appimage 的构建
+# include(cmake/SparkAppimageConfig.cmake) # 导入来自 Spark 构建的 Appimage 构建
+# add_appimage_icon(assets/spark.png) # 添加到 Appimage 中的默认的图标
+# add_appimage_desktop() # 添加到 Appimage 中的默认desktop(使用来自 Spark 构建的 Desktop 构建中配置的信息(必须要求 spark-desktop))
+# add_appimage() # 应用对 Appimage 的构建
+
+# 2. 在 Makefile 进行构建目标构建 Appimage
+# Appimage 的构建流 --
+# 在 Makefile 进行构建目标构建 Appimage (要求提供工具的绝对路径然后可依次进行linuxdeployqt, genrate-appimage)
+# 来自于 https://github.com/probonopd/linuxdeployqt 的 linuxdeployqt
+# 来自于 https://github.com/AppImage/AppImageKit 的 appimagetool
+# LINUXDEPLOYQT := "/home/zinface/Downloads/linuxdeployqt-continuous-x86_64.AppImage"
+# APPIMAGETOOL := "/home/zinface/Downloads/appimagetool-x86_64.AppImage"
+
+# linuxdeploy: all
+# cd build && cmake .. -DLINUXDEPLOYQT=$(LINUXDEPLOYQT) -DAPPIMAGETOOL=$(APPIMAGETOOL)
+# cd build && make linuxdeploy
+
+# genrate-appimage:
+# cd build && cmake .. -DLINUXDEPLOYQT=$(LINUXDEPLOYQT) -DAPPIMAGETOOL=$(APPIMAGETOOL)
+# cd build && make appimage
+
diff --git a/cmake/SparkBuildGraphviz.cmake b/cmake/SparkBuildGraphviz.cmake
new file mode 100644
index 0000000..ce9dbc3
--- /dev/null
+++ b/cmake/SparkBuildGraphviz.cmake
@@ -0,0 +1,8 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+# 添加构建项目依赖图目标: make builddeps
+add_custom_target(builddeps
+ COMMAND "${CMAKE_COMMAND}" "--graphviz=graphviz/builddeps.dot" .
+ COMMAND dot -Tpng graphviz/builddeps.dot -o builddeps.png
+ WORKING_DIRECTORY "${CMAKE_BINARY_DIR}"
+)
\ No newline at end of file
diff --git a/cmake/SparkDesktopMacros.cmake b/cmake/SparkDesktopMacros.cmake
new file mode 100644
index 0000000..223ac6b
--- /dev/null
+++ b/cmake/SparkDesktopMacros.cmake
@@ -0,0 +1,35 @@
+
+macro(spark_desktop_macros _APP_NAME _APP_NAME_ZH_CN _APP_COMMENT _APP_TYPE _APP_EXECUTE_PATH _APP_EXECUTE_ICON_PATH _APP_CATEGORIES)
+ set(APP_NAME ${_APP_NAME})
+ set(APP_NAME_ZH_CN ${_APP_NAME_ZH_CN})
+ set(APP_COMMENT ${_APP_COMMENT})
+ set(APP_TYPE ${_APP_TYPE})
+ set(APP_EXECUTE_PATH ${_APP_EXECUTE_PATH})
+ set(APP_EXECUTE_ICON_PATH ${_APP_EXECUTE_ICON_PATH})
+ set(APP_CATEGORIES ${_APP_CATEGORIES})
+ configure_file(cmake/spark-desktop.desktop.in
+ ${CMAKE_BINARY_DIR}/${_APP_NAME}.desktop
+ )
+endmacro(spark_desktop_macros _APP_NAME _APP_NAME_ZH_CN _APP_COMMENT _APP_TYPE _APP_EXECUTE_PATH _APP_EXECUTE_ICON_PATH _APP_CATEGORIES)
+
+# include(cmake/SparkDesktopMacros.cmake)
+# 内容默认应用名称: Name= 应与项目名称相同
+# spark_desktop_macros(
+ # 应用名称: Name=
+ # 应用名称: Name[zh_CN]=
+ # 应用说明: Comment=
+ # 应用类型: Type=
+ # 执行程序: Exec=
+ # 图标路径: Icon=
+ # 应用分类: Category=
+# )
+
+# configure_file(<input> <output>
+# [NO_SOURCE_PERMISSIONS | USE_SOURCE_PERMISSIONS |
+# FILE_PERMISSIONS <permissions>...]
+# [COPYONLY] [ESCAPE_QUOTES] [@ONLY]
+# [NEWLINE_STYLE [UNIX|DOS|WIN32|LF|CRLF] ])
+
+# install(FILES ${APP_NAME}.desktop
+# DESTINATION /usr/share/applications
+# )
\ No newline at end of file
diff --git a/cmake/SparkEnvConfig.cmake b/cmake/SparkEnvConfig.cmake
new file mode 100644
index 0000000..797faf4
--- /dev/null
+++ b/cmake/SparkEnvConfig.cmake
@@ -0,0 +1,8 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
+set(CMAKE_INCLUDE_CURRENT_DIR ON)
+set(CMAKE_AUTOMOC ON)
+set(CMAKE_AUTOUIC ON)
+set(CMAKE_AUTORCC ON)
+# set(CMAKE_BUILD_TYPE "Debug")
\ No newline at end of file
diff --git a/cmake/SparkFindDtkConfig.cmake b/cmake/SparkFindDtkConfig.cmake
new file mode 100644
index 0000000..278d0d1
--- /dev/null
+++ b/cmake/SparkFindDtkConfig.cmake
@@ -0,0 +1,11 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+# include(SparkFindQt5Config.cmake)
+find_package(Dtk COMPONENTS Core Widget Gui)
+
+function(target_link_dtk NAME)
+ target_link_libraries(${NAME}
+ ${DtkCore_LIBRARIES}
+ ${DtkWidget_LIBRARIES}
+ ${DtkGui_LIBRARIES})
+endfunction(target_link_dtk NAME)
\ No newline at end of file
diff --git a/cmake/SparkFindLibraries.cmake b/cmake/SparkFindLibraries.cmake
new file mode 100644
index 0000000..a1b936c
--- /dev/null
+++ b/cmake/SparkFindLibraries.cmake
@@ -0,0 +1,7 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+# spark_find_library(notify libnotify)
+
+# function(target_link_${_prefix} TARGET)
+# target_link_libraries(${TARGET} ${_prefix})
+# endfunction(target_link_${_prefix} TARGET)
\ No newline at end of file
diff --git a/cmake/SparkFindQt5Config.cmake b/cmake/SparkFindQt5Config.cmake
new file mode 100644
index 0000000..b56399f
--- /dev/null
+++ b/cmake/SparkFindQt5Config.cmake
@@ -0,0 +1,154 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+find_package(Qt5 COMPONENTS Core Widgets Network Concurrent WebEngineWidgets Sql WebSockets REQUIRED)
+
+# function(target_link_qt5 NAME)
+# target_link_libraries(${NAME}
+# Qt5::Core
+# Qt5::Widgets
+# Qt5::Network)
+# endfunction(target_link_qt5 NAME)
+
+# 使用 spark_add_link 生成 target_link_qt5 以替代上面内容
+spark_add_link(qt5 Qt5::Core Qt5::Widgets Qt5::Network)
+
+
+# spark_add_link_qt5
+# 自定义宏 spark_add_link_qt5 以扩展 target_link_qt5_<name> 结构
+ # _IN_NAME: 此宏使用嵌套宏 spark_add_link 时追加 <name> 名称
+ # 同等于 spark_add_link(qt_<name> ${ARGN})
+macro(spark_add_link_qt5 _IN_NAME)
+ spark_add_link(qt5_${_IN_NAME} ${ARGN})
+endmacro(spark_add_link_qt5 _IN_NAME)
+
+# 使用 spark_add_link_qt5 生成 target_link_qt5_<name> 的宏
+spark_add_link_qt5(Concurrent Qt5::Concurrent)
+spark_add_link_qt5(Sql Qt5::Sql)
+spark_add_link_qt5(WebEngineWidgets Qt5::WebEngineWidgets)
+spark_add_link_qt5(WebSockets Qt5::WebSockets)
+
+# 高级自定义
+# spark_add_links_qt5
+# 自定义宏 spark_add_links_qt5 以扩展 spark_add_link_qt5 宏配置组
+ # 特点: 任意长度参数
+ # qt5_item: 为进行遍历后的单项,类似于 python3 中的 (for item in items:)
+ # 例如: qt5_item 为 Core
+ # spark_add_link_qt5(${qt5_item} Qt5::${qt5_item})
+ # 展开为 spark_add_link_qt5(Core Qt5::Core)
+ # 展开为 spark_add_link(qt5_Core Qt5::Core)
+ # 展开为 spark_add_link(qt5_Core Qt5::Core)
+ # 特性: 增加 qt5_Core 转 qt5_core
+ # string(TOLOWER <string> <output_variable>)
+macro(spark_add_links_qt5)
+ set(qt5_items ${ARGN})
+ foreach(qt5_item IN LISTS qt5_items)
+ find_package(Qt5${qt5_item})
+ spark_add_link_qt5(${qt5_item} Qt5::${qt5_item})
+
+ string(TOLOWER "${qt5_item}" qt5_lower_item)
+ spark_add_link_qt5(${qt5_lower_item} Qt5::${qt5_item})
+ message("add_target_link_qt5_${qt5_item} or add_target_link_qt5_${qt5_lower_item}")
+ endforeach(qt5_item IN LISTS qt5_items)
+endmacro(spark_add_links_qt5)
+
+
+# Core 用于其它模块的核心非图形类。
+# GUI 图形用户界面 GUI 组件基类。包括 OpenGL。
+# Multimedia 音频 视频 无线电 摄像头功能类。
+# Multimedia Widgets 用于实现多媒体功能,基于 Widget 的类。
+# Network 使网络编程更容易和更可移植的类。
+
+# QML QML 和 JavaScript 语言类。
+# Quick 以自定义用户界面 UI 构建高动态应用程序的声明性框架。
+# Quick Controls 为桌面、嵌入式及移动设备创建高性能用户界面提供轻量 QML 类型。这些类型运用简单样式化体系结构且非常高效。
+# Quick Dialogs 用于从 Qt Quick 应用程序创建系统对话框,并与之交互的类型。
+# Quick Layouts 布局是用于在用户界面中排列基于 Qt Quick 2 项的项。
+# Quick Test 用于 QML 应用程序的单元测试框架,其测试案例被编写成 JavaScript 函数。
+ # 注意: 二进制保证不兼容 Qt Quick Test但源代码仍兼容。
+
+# Qt SQL 集成使用 SQL 数据库的类。
+# Qt Test 单元测试 Qt 应用程序和库的类。
+ # 注意: 二进制保证不兼容 Qt Test但源代码仍兼容。
+# Qt Widgets 以 C++ 小部件扩展 Qt GUI 的类。
+
+
+
+# 找出所有 Qt5 模板
+# find /usr/lib/x86_64-linux-gnu/cmake/ -name "*Config.cmake" | sed 's@^.*/Qt5@Qt5@;' | grep ^Qt5
+
+# 掐头去尾,洗一次
+# find /usr/lib/x86_64-linux-gnu/cmake/ -name "*Config.cmake" | sed 's@^.*/Qt5@Qt5@;' | grep ^Qt5 | sed 's@^Qt5@@; s@Config.cmake$@@; /^\s*$/d'
+
+# 排序
+# find /usr/lib/x86_64-linux-gnu/cmake/ -name "*Config.cmake" | sed 's@^.*/Qt5@Qt5@;' | grep ^Qt5 | sed 's@^Qt5@@; s@Config.cmake$@@; /^\s*$/d' | sort | pr -t -3
+
+spark_add_links_qt5(
+ # AccessibilitySupport
+ # AttributionsScannerTools
+ Concurrent
+ # Core
+ DBus
+ # Designer
+ # DesignerComponents
+ # DeviceDiscoverySupport
+ # DocTools
+ # EdidSupport
+ # EglFSDeviceIntegration
+ # EglFsKmsSupport
+ # EglSupport
+ # EventDispatcherSupport
+ # FbSupport
+ # FontDatabaseSupport
+ # GlxSupport
+ # Gui
+ # Help
+ # InputSupport
+ # KmsSupport
+ # LinguistTools
+ # LinuxAccessibilitySupport
+ # Network
+ # OpenGL
+ # OpenGLExtensions
+ # PacketProtocol
+ # PlatformCompositorSupport
+ # Positioning
+ # PositioningQuick
+ # PrintSupport
+ # Qml
+ # QmlDebug
+ # QmlDevTools
+ # QmlImportScanner
+ # QmlModels
+ # QmlWorkerScript
+ # Quick
+ # QuickCompiler
+ # QuickControls2
+ # QuickParticles
+ # QuickShapes
+ # QuickTemplates2
+ # QuickTest
+ # QuickWidgets
+ # SerialBus
+ # SerialPort
+ # ServiceSupport
+ # Sql
+ # Svg
+ # Test
+ # ThemeSupport
+ # UiPlugin
+ # UiTools
+ # VulkanSupport
+ # WebChannel
+ # WebEngine
+ # WebEngineCore
+ WebEngineWidgets
+ # WebKit
+ # WebKitWidgets
+ # WebSockets
+ # Widgets
+ # X11Extras
+ # XcbQpa
+ # XkbCommonSupport
+ # Xml
+ # XmlPatterns
+)
\ No newline at end of file
diff --git a/cmake/SparkFindQt6Config.cmake b/cmake/SparkFindQt6Config.cmake
new file mode 100644
index 0000000..c69a1ca
--- /dev/null
+++ b/cmake/SparkFindQt6Config.cmake
@@ -0,0 +1,24 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+find_package(Qt6 COMPONENTS Core Widgets Network Concurrent)
+
+# function(target_link_qt6 NAME)
+# target_link_libraries(${NAME}
+# Qt6::Core
+# Qt6::Widgets
+# Qt6::Network)
+# endfunction(target_link_qt6 NAME)
+
+# 使用 spark_add_link 生成 target_link_qt6 以替代上面内容
+spark_add_link(qt6 Qt6::Core Qt6::Widgets Qt6::Network)
+
+
+# spark_add_link_qt6
+# 自定义宏 target_link_qt6 以扩展 target_link_qt6_<name> 结构
+ # _IN_NAME: 此宏使用嵌套宏 spark_add_link 时追加 <name> 名称
+ # 同等于 spark_add_link(qt_<name> ${ARGN})
+macro(spark_add_link_qt6 _IN_NAME)
+ spark_add_link(qt6_${_IN_NAME} ${ARGN})
+endmacro(spark_add_link_qt6 _IN_NAME)
+
+# 使用 spark_add_link_qt6 生成 target_link_qt6_<name> 的宏
diff --git a/cmake/SparkInstallMacrosConfig.cmake b/cmake/SparkInstallMacrosConfig.cmake
new file mode 100644
index 0000000..bbb958a
--- /dev/null
+++ b/cmake/SparkInstallMacrosConfig.cmake
@@ -0,0 +1,132 @@
+
+# spark_install_target
+# 基于传入的路径/目标进行安装
+# 可接受的值为: 安装路径 目标A
+# 可接受的值为: 安装路径 目标A 目标B 目标C...
+macro(spark_install_target INSTALL_TARGET_DIR INSTALL_TARGETS)
+ install(TARGETS
+ ${INSTALL_TARGETS} ${ARGN}
+ DESTINATION ${INSTALL_TARGET_DIR})
+endmacro(spark_install_target INSTALL_TARGET_DIR INSTALL_TARGETS)
+
+# spark_install_file
+# 基于传入的路径/文件进行安装
+# 可接受的值为: 安装路径 文件A
+# 可接受的值为: 安装路径 文件A 文件B 文件C...
+macro(spark_install_file INSTALL_FILE_DIR INSTALL_FILE)
+ install(FILES
+ ${INSTALL_FILE} ${ARGN}
+ DESTINATION ${INSTALL_FILE_DIR})
+endmacro(spark_install_file INSTALL_FILE_DIR INSTALL_FILE)
+
+# spark_install_program
+# 基于传入的路径/文件进行安装,并自动为其添加可执行权限
+# 可接受的值为: 安装路径 文件A
+# 可接受的值为: 安装路径 文件A 文件B 文件C...
+macro(spark_install_program INSTALL_PROGRAM_DIR INSTALL_PROGRAM)
+ install(PROGRAMS
+ ${INSTALL_PROGRAM} ${ARGN}
+ DESTINATION ${INSTALL_PROGRAM_DIR})
+endmacro(spark_install_program INSTALL_PROGRAM_DIR INSTALL_PROGRAM)
+
+
+# spark_install_directory
+# 基于传入的路径/目录进行安装
+# 可接受的值为: 安装路径 路径A
+# 可接受的值为: 安装路径 路径A/* 为安装路径A下所有内容
+macro(spark_install_directory INSTALL_DIRECTORY_DIR INSTALL_DIRECOTRY)
+ # INSTALL_DIRECOTRY 可能包含 *
+ # 1. 找到 '*', 截取,列出目录下所有文件,安装
+ # 2. 是文件的直接使用 spark_install_file 安装
+ # 2. 是目录的直接使用 spark_install_directory 安装
+ # message(FATAL_ERROR "${INSTALL_DIRECTORY_DIR}")
+ # string(FIND <string> <substring> <output_variable> [REVERSE])
+ string(FIND "${INSTALL_DIRECOTRY}" "*" INSTALL_DIRECTORY_FIND_INDEX)
+ # message(FATAL_ERROR "${INSTALL_DIRECTORY_FIND_INDEX}: ${INSTALL_DIRECTORY_DIR}")
+
+ # file(GLOB <variable>
+ # [LIST_DIRECTORIES true|false] [RELATIVE <path>] [CONFIGURE_DEPENDS]
+ # [<globbing-expressions>...])
+
+ if (NOT INSTALL_DIRECTORY_FIND_INDEX EQUAL -1)
+ # string(SUBSTRING <string> <begin> <length> <output_variable>)
+ string(SUBSTRING "${INSTALL_DIRECOTRY}" 0 ${INSTALL_DIRECTORY_FIND_INDEX} INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING)
+ # message(FATAL_ERROR "directory: ${INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING}")
+
+ # file(GLOB <variable>
+ # [LIST_DIRECTORIES true|false] [RELATIVE <path>] [CONFIGURE_DEPENDS]
+ # [<globbing-expressions>...])
+
+ file(GLOB INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST ${INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING}/*)
+ list(LENGTH INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST_LENGTH)
+ foreach(item IN LISTS INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST)
+ # message("-> ${item}")
+ if(IS_DIRECTORY ${item})
+ message("-> ${item} IS_DIRECTORY")
+ # spark_install_directory(${INSTALL_DIRECTORY_DIR} ${item})
+ install(DIRECTORY
+ ${item}
+ DESTINATION ${INSTALL_DIRECTORY_DIR}
+ USE_SOURCE_PERMISSIONS)
+ else()
+ message("-> ${item} NOT IS_DIRECTORY")
+ spark_install_program(${INSTALL_DIRECTORY_DIR} ${item})
+ # spark_install_file(${INSTALL_DIRECTORY_DIR} ${item})
+ endif(IS_DIRECTORY ${item})
+ endforeach(item IN LISTS INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST)
+
+ # message(FATAL_ERROR " directory: ${INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST}")
+ # message(FATAL_ERROR " directory: ${INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST_LENGTH}")
+
+ else()
+ message(FATAL_ERROR "install ${INSTALL_DIRECTORY_DIR}")
+
+ install(DIRECTORY
+ ${INSTALL_DIRECOTRY} ${ARGN}
+ DESTINATION ${INSTALL_DIRECTORY_DIR})
+ endif(NOT INSTALL_DIRECTORY_FIND_INDEX EQUAL -1)
+
+endmacro(spark_install_directory INSTALL_DIRECTORY_DIR INSTALL_DIRECOTRY)
+
+
+# spark_install_changelog
+# 基于传入的路径/ changelog 文件路径进行安装,经过一系列检查并使用 gzip 进行压缩并安装
+# 可接受的值为: 安装路径 changelog文件路径
+macro(spark_install_changelog CHANGE_LOG_FILE)
+ set(SOURCE_CHANGE_LOG_FILE ${CHANGE_LOG_FILE})
+ if (EXISTS ${SOURCE_CHANGE_LOG_FILE})
+
+ execute_process(COMMAND test -f ${SOURCE_CHANGE_LOG_FILE}
+ RESULT_VARIABLE changelog_test
+ )
+ execute_process(COMMAND which gzip
+ RESULT_VARIABLE gzip_test
+ )
+ if (NOT changelog_test EQUAL 0)
+ message(FATAL_ERROR "NOTE: 不是常规文件: ${SOURCE_CHANGE_LOG_FILE}")
+ endif(NOT changelog_test EQUAL 0)
+
+ if (NOT gzip_test EQUAL 0)
+ message(FATAL_ERROR "NOTE: 未安装 gzip, 无法压缩 changelog")
+ endif(NOT gzip_test EQUAL 0)
+
+ # 压缩与安装日志文件
+ add_custom_command(
+ OUTPUT "${CMAKE_BINARY_DIR}/changelog.gz"
+ COMMAND gzip -cn9 "${SOURCE_CHANGE_LOG_FILE}" > "${CMAKE_BINARY_DIR}/changelog.gz"
+ WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
+ COMMENT "Compressing changelog"
+ )
+ add_custom_target(changelog ALL DEPENDS "${CMAKE_BINARY_DIR}/changelog.gz")
+
+ # include(GNUInstallDirs)
+ set(SPARK_INSTALL_CHANGE_LOG_DIR "/usr/share/doc/${PROJECT_NAME}/")
+ install(FILES
+ ${CMAKE_BINARY_DIR}/changelog.gz
+ debian/copyright
+
+ DESTINATION ${SPARK_INSTALL_CHANGE_LOG_DIR})
+ else()
+ message(FATAL_ERROR "未找到: ${SOURCE_CHANGE_LOG_FILE}")
+ endif(EXISTS ${SOURCE_CHANGE_LOG_FILE})
+endmacro(spark_install_changelog CHANGE_LOG_FILE)
diff --git a/cmake/SparkMacrosConfig.cmake b/cmake/SparkMacrosConfig.cmake
new file mode 100644
index 0000000..1f53882
--- /dev/null
+++ b/cmake/SparkMacrosConfig.cmake
@@ -0,0 +1,129 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+# 定义一些 macro 用于自动生成构建结构
+
+# spark_add_library <lib_name> [files]...
+# 构建一个库,基于指定的源文件
+ # 并根据库名生成 target_link_<lib_name> 函数
+macro(spark_add_library _lib_name)
+ message("================ ${_lib_name} Library ================")
+ add_library(${_lib_name} ${ARGN})
+
+ set(SRCS ${ARGN})
+ foreach(item IN LISTS SRCS)
+ message(" -> ${item}")
+ endforeach(item IN LISTS SRCS)
+
+ function(target_link_${_lib_name} TARGET)
+ message("${_lib_name}")
+ target_link_libraries(${TARGET} ${_lib_name})
+ endfunction(target_link_${_lib_name} TARGET)
+
+endmacro(spark_add_library _lib_name)
+
+# spark_add_library_path <lib_name> <lib_path>
+# 构建一个库,基于指定的路径
+ # 并根据库名生成 target_link_<lib_name> 函数
+ # 函数内增加以 <lib_path> 头文件搜索路径
+macro(spark_add_library_path _lib_name _lib_path)
+ aux_source_directory(${_lib_path} ${_lib_name}_SOURCES)
+
+ message("================ spark_add_library_path: ${_lib_name} ================")
+ file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${_lib_path}/*.ui)
+ add_library(${_lib_name} ${${_lib_name}_SOURCES} ${UI_LIST})
+ foreach(item IN LISTS ${_lib_name}_SOURCES)
+ message(" -> ${item}")
+ endforeach(item IN LISTS ${_lib_name}_SOURCES)
+
+ function(target_link_${_lib_name} TARGET)
+ # message("target_link_${_lib_name}")
+ message(" -> (include): ${_lib_path}")
+ target_include_directories(${TARGET} PUBLIC "${_lib_path}")
+ target_link_libraries(${TARGET} ${_lib_name})
+ endfunction(target_link_${_lib_name} TARGET)
+
+ function(target_include_${_lib_name} TARGET)
+ # message("target_link_${_lib_name}")
+ message(" -> (include): ${_lib_path}")
+ target_include_directories(${TARGET} PUBLIC "${_lib_path}")
+ # target_link_libraries(${TARGET} ${_lib_name})
+ endfunction(target_include_${_lib_name} TARGET)
+
+endmacro(spark_add_library_path _lib_name _lib_path)
+
+# spark_add_executable <exec_name> [files]...
+# 构建一个可执行文件,基于指定的源文件
+ # Qt编译时源文件包括很多类型需要指定 *.h/*.cpp/*.qrc/*.qm/... 等
+macro(spark_add_executable _exec_name)
+
+ message("================ ${_exec_name} Executable ================")
+ add_executable(${_exec_name} ${ARGN})
+
+endmacro(spark_add_executable _exec_name)
+
+macro(spark_add_executable_path _exec_name _exec_path)
+ aux_source_directory(${_exec_path} ${_exec_name}_SOURCES)
+
+ message("================ ${_exec_name} Executable ================")
+ file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${_exec_path}/*.ui)
+ add_executable(${_exec_name} ${${_exec_name}_SOURCES} ${ARGN} ${UI_LIST})
+ foreach(item IN LISTS ${_exec_name}_SOURCES)
+ message(" -> ${item}")
+ endforeach(item IN LISTS ${_exec_name}_SOURCES)
+
+ # function(target_link_${_exec_name} TARGET)
+ # message("target_link_${_lib_name}")
+ message(" -> (include): ${_exec_path}")
+ target_include_directories(${_exec_name} PUBLIC "${_exec_path}")
+ # target_link_libraries(${TARGET} ${_lib_name})
+ # endfunction(target_link_${_exec_name} TARGET)
+ # target_link_${_exec_name}(${_exec_name})
+
+endmacro(spark_add_executable_path _exec_name _exec_path)
+
+# spark_find_library
+# 搜索一个库,基于指定的库名,调用 pkg-config 搜索库
+ # 并根据库名生成一个 target_link_<prefix> 函数
+macro(spark_find_library _prefix)
+ find_package(PkgConfig REQUIRED)
+
+ # libnotify
+ pkg_check_modules(${_prefix} ${ARGN})
+ function(target_link_${_prefix} TARGET)
+ target_include_directories(${TARGET} PUBLIC
+ ${${_prefix}_INCLUDE_DIRS})
+ target_link_libraries(${TARGET}
+ ${${_prefix}_LIBRARIES})
+ endfunction(target_link_${_prefix} TARGET)
+
+endmacro(spark_find_library _prefix)
+
+
+# spark_add_executable_paths
+# 自定义构建宏,基于指定的前缀名称,处理后续参数为子目录
+ # item: 为进行遍历后的单项,类似于 python3 中的 (for item in items:)
+ # file: 为在目录中不以递归(GLOB_RECURSE)方式寻找 qrc 文件,需要将其参与编译才能被 rcc
+ # 并根据 prefix-<item> 生成构建目标,
+macro(spark_add_executable_paths _prefix_path)
+ set(PATHS ${ARGN})
+ foreach(item IN LISTS PATHS)
+ file(GLOB QRCS "${item}/*.qrc")
+ message(">>> add_executable: " "${_prefix_path}-${item} ${item} + ${QRCS}")
+ spark_add_executable_path(${_prefix_path}-${item} ${item} ${QRCS})
+ target_link_qt5(${_prefix_path}-${item})
+ endforeach(item IN LISTS PATHS)
+endmacro(spark_add_executable_paths _prefix_path)
+
+# spark_add_link
+# 自定义宏以代替当前使用 fucntion 定义 target_link_<name> 结构
+ # _IN_NAME: 此宏生成 target_link_<name> 的要求参数
+ # ARGN: 此宏剩余的参数列表
+ # 在使用 target_link_<name> 时
+ # _NAME: 用于此 fucntion 中的要求参数: <_NAME>目标将要连接此库
+macro(spark_add_link _IN_NAME)
+ function(target_link_${_IN_NAME} _NAME)
+ message("LINK ${_NAME} ${ARGN}")
+ target_link_libraries(${_NAME}
+ ${ARGN})
+ endfunction(target_link_${_IN_NAME} _NAME)
+endmacro(spark_add_link _IN_NAME)
\ No newline at end of file
diff --git a/cmake/SparkMacrosExtendConfig.cmake b/cmake/SparkMacrosExtendConfig.cmake
new file mode 100644
index 0000000..985aaad
--- /dev/null
+++ b/cmake/SparkMacrosExtendConfig.cmake
@@ -0,0 +1,197 @@
+
+# find_plus
+# 寻找 INVAl 传入的字符串,如果存在 + 字符将写入位置到 OUTVAL
+function(find_plus INVAL OUTVAL)
+ string(FIND "${INVAL}" "+" plus_index)
+ set(${OUTVAL} ${plus_index} PARENT_SCOPE)
+ # if(plus_index LESS 0)
+ # set(${OUTVAL} -1 PARENT_SCOPE)
+ # else()
+ # set(${OUTVAL} ${plus_index} PARENT_SCOPE)
+ # endif(plus_index LESS 0)
+endfunction(find_plus INVAL OUTVAL)
+
+# find_plus("FF" FFFF)
+# message("--> FFFF ${FFFF}") # --> FFFF -1
+# find_plus("F+F" FFFF)
+# message("--> FFFF ${FFFF}") # --> FFFF 1
+# find_plus("+F+F" FFFF)
+# message("--> FFFF ${FFFF}") # --> FFFF 0
+
+# set(FFF)
+# list(APPEND FFFF )
+# list(APPEND FFFF "F")
+# list(APPEND FFFF "FA")
+# message("--> FFFF: ${FFFF}") # --> FFFF: F;FA
+
+# set(FFFFS "")
+# list(APPEND FFFFS ${FFFF})
+# message("--> FFFFS: ${FFFFS}") # --> FFFFS: F;FA
+
+# set(FFFF "+AA+BB+CC+DD")
+# string(REPLACE "+" ";" FFFFL "${FFFF}")
+# list(LENGTH FFFFL FFFFLEN)
+# message("--> FFFFL: ${FFFFL} --> ${FFFFLEN}") # --> FFFFL: F;
+
+# plus_list
+# 将传入的 "+AAA+BBB+CCC" 类型数据变成一个 列表(list)
+# 适用于不使用 string 进行替换 + 为 ";" 的情况下使用直接变成 list
+function(plus_list INVAL OUTVAL OUTVALLEN)
+ # set(${OUTVAL} "..." PARENT_SCOPE)
+ # set(${OUTVALLEN} 0 PARENT_SCOPE)
+
+ set(_tmps "") # 设置为空的
+
+ # 寻找下一个 + 位置
+ find_plus(${INVAL} RIGHT_PLUS)
+
+ string(LENGTH "${INVAL}" INVALLEN)
+ message("--> 传入的 INVAL: --> 内容: ${INVAL}")
+ message("--> 传入的 INVAL: --> 长度: ${INVALLEN}")
+ message("--> 传入的 INVAL: --> +位置: ${RIGHT_PLUS}")
+
+ # 判断是否有右侧 + 号
+ if(RIGHT_PLUS LESS 0)
+ message("--> 传入的 INVAL: --> 无需计算新的+位置")
+ # message("--> 计算新的 + 位置: ${_PLUSINDEX}")
+ list(APPEND _tmps ${INVAL})
+ else()
+ math(EXPR _PLUSINDEX "${RIGHT_PLUS}+1")
+ message("--> 传入的 INVAL: --> 需计算+位置 --> 右移: ${_PLUSINDEX}")
+
+ string(SUBSTRING "${INVAL}" ${_PLUSINDEX} ${INVALLEN} NewVal)
+ message("--> 传入的 INVAL: --> 需计算+位置 --> 右移: ${_PLUSINDEX} -> 内容: ${NewVal}")
+ # string(REPLACE "+" ";" _tmps "${NewVal}")
+ # list(LENGTH FFFFL FFFFLEN)
+
+ # message("--> 计算新的 + 位置: ${_PLUSINDEX} --> 后面的 NewVal: ${NewVal}")
+
+ # find_plus(${NewVal} _NextPlus)
+ # if(_NextPlus LESS 0)
+ # list(APPEND _tmps ${NewVal})
+ # message("--> 追加新的 + 位置: ${_PLUSINDEX} --> 后面的")
+ # else()
+ # message("--> 追加新的 + 位置: ${_PLUSINDEX} --> 后面的")
+ # # 重新
+ # # plus_list(${NewVal} NewValS )
+ # # foreach(item)
+ # # list(APPEND _tmps ${item})
+ # # endforeach(item)
+ # endif(_NextPlus LESS 0)
+ endif(RIGHT_PLUS LESS 0)
+
+ set(${OUTVAL} ${_tmps} PARENT_SCOPE)
+ list(LENGTH _tmps _tmps_len)
+ set(${OUTVALLEN} ${_tmps_len} PARENT_SCOPE)
+
+endfunction(plus_list INVAL OUTVAL OUTVALLEN)
+
+# plus_list("+AAA+BBB+CCC+DDD" FFF FFLEN)
+# message("--------> ${FFF}: -> ${FFLEN}")
+
+# spark_add_library_realpaths
+# 基于传入的项进行构建
+# 可接受的值为: 路径列表
+# 可接受的值为: 路径列表+依赖库A+依赖库B
+macro(spark_add_library_realpaths)
+ message("---> 基于传入的项进行构建 <---")
+ # message("--> src/unclassified/ItemDelegates/NdStyledItemDelegate")
+ # string(FIND <string> <substring> <output_variable> [REVERSE])
+ # string(SUBSTRING <string> <begin> <length> <output_variable>)
+ # math(EXPR value "100 * 0xA" OUTPUT_FORMAT DECIMAL) # value is set to "1000"
+
+ set(REALPATHS ${ARGN})
+ foreach(REALPATH IN LISTS REALPATHS)
+ message("---> 传入路径: ${REALPATH} <--- ")
+ string(LENGTH "${REALPATH}" REALPATH_LENGTH)
+ message("---> 计算传入路径长度: --> 长度: ${REALPATH_LENGTH}")
+
+ string(FIND "${REALPATH}" "/" LASTINDEX REVERSE)
+ message("---> 计算传入路径末尾/位置: --> 长度: ${LASTINDEX}")
+ math(EXPR LASTINDEX "${LASTINDEX}+1")
+ message("---> 计算传入路径末尾/右移: --> 长度: ${LASTINDEX}")
+ string(SUBSTRING "${REALPATH}" ${LASTINDEX} ${REALPATH_LENGTH} REALNAME_Dependency)
+
+ # 找 + 号下标,这是找+号的函数
+ find_plus(${REALPATH} RIGHT_PLUS)
+
+ # 判断是否有找到 + 号下标,值为 -1 或 正整数
+ if(RIGHT_PLUS LESS 0) # 小于0: 不存在 + 号
+ set(REALNAME "${REALNAME_Dependency}")
+ message("---> 传入路径末尾/右移部分: --> ${REALNAME} <-- 无依赖+")
+
+ message("---> 构建 ${REALNAME} -> ${REALNAME} ${REALPATH} ")
+
+ spark_add_library_path(${REALNAME} ${REALPATH})
+ target_link_qt5(${REALNAME})
+ else()
+ message("---> 传入路径末尾/右移部分: --> ${REALNAME_Dependency} <-- 依赖+")
+
+ # 存在+号,将截取从 / 到 + 号之间的内容作为目标名称
+ # 例如 src/unclassified/widgets/DocTypeListView+JsonDeploy
+ # ^(LASTINDEX) ^(RIGHT_PLUS)
+ # 将 RIGHT_PLUS - LASTINDEX 计算出 DocTypeListView 字符长度
+ math(EXPR REALNAME_LENGTH "${RIGHT_PLUS}-${LASTINDEX}")
+
+ message("---> 计算传入路径末尾/右移部分: --> 位置: ${RIGHT_PLUS}")
+ # message("---> 计算传入路径末尾/右移部分: --> 长度: ${REALNAME_Dependency}")
+
+ # 目标名称为 DocTypeListView
+ # 依赖为 JsonDeploy
+ # set(REALNAME "")
+ string(SUBSTRING "${REALPATH}" 0 ${RIGHT_PLUS} _REALPATH_DIR)
+ string(SUBSTRING "${REALPATH}" ${LASTINDEX} ${REALNAME_LENGTH} REALNAME)
+
+ message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME}")
+
+ string(SUBSTRING "${REALPATH}" ${RIGHT_PLUS} ${REALPATH_LENGTH} Dependency)
+ message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME} --> +部分: ${Dependency}")
+
+ # plus_list(${Dependency} dependencies dependencies_len)
+ string(REPLACE "+" ";" dependencies "${Dependency}")
+ message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME} --> +部分: ${Dependency} --> 列表: ${dependencies} <-- ")
+
+
+ message("---> 构建 ${REALNAME} -> ${REALNAME} ${_REALPATH_DIR}")
+
+ spark_add_library_path(${REALNAME} ${_REALPATH_DIR})
+ # target_link_qt5(${REALNAME}) # 使用依赖的依赖或许也不错
+
+ target_include_directories(${REALNAME} PUBLIC ${_REALPATH_DIR})
+ target_link_libraries(${REALNAME} ${dependencies})
+
+ endif(RIGHT_PLUS LESS 0)
+ endforeach(REALPATH IN LISTS REALPATHS)
+
+endmacro(spark_add_library_realpaths)
+
+
+# spark_add_source_paths
+# 将指定路径中的文件变成可用的源文件列表
+#
+macro(spark_add_source_paths SOURCE_VARIABLE_NAME)
+ set(SOURCE_PATHS ${ARGN})
+ set(${SOURCE_VARIABLE_NAME}_PATHS "")
+ set(${SOURCE_VARIABLE_NAME} "")
+ foreach(SOURCE_PATH IN LISTS SOURCE_PATHS)
+ list(APPEND ${SOURCE_VARIABLE_NAME}_PATHS ${CMAKE_CURRENT_SOURCE_DIR}/${SOURCE_PATH})
+ aux_source_directory(${SOURCE_PATH} _SOURCES)
+ foreach(item IN LISTS _SOURCES)
+ # message(" -> ${item}")
+ list(APPEND ${SOURCE_VARIABLE_NAME} ${item})
+ endforeach(item IN LISTS _SOURCES)
+
+ # file(GLOB HEADER_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${SOURCE_PATH}/*.h)
+ # foreach(item IN LISTS HEADER_LIST)
+ # # message(" -> ${item}")
+ # list(APPEND ${SOURCE_VARIABLE_NAME} ${item})
+ # endforeach(item IN LISTS HEADER_LIST)
+
+ file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${SOURCE_PATH}/*.ui)
+ foreach(item IN LISTS UI_LIST)
+ # message(" -> ${item}")
+ list(APPEND ${SOURCE_VARIABLE_NAME} ${item})
+ endforeach(item IN LISTS UI_LIST)
+ endforeach(SOURCE_PATH IN LISTS SOURCE_PATHS)
+endmacro(spark_add_source_paths SOURCE_VARIABLE_NAME)
+
diff --git a/cmake/SparkTranslatorConfig.cmake b/cmake/SparkTranslatorConfig.cmake
new file mode 100644
index 0000000..272dd9d
--- /dev/null
+++ b/cmake/SparkTranslatorConfig.cmake
@@ -0,0 +1,27 @@
+cmake_minimum_required(VERSION 3.5.1)
+
+find_package(Qt5LinguistTools)
+
+file(GLOB SPARK_TRANSLATIONS ${CMAKE_SOURCE_DIR}/translations/*.ts)
+
+message("================ Translations ================")
+foreach(item IN LISTS SPARK_TRANSLATIONS)
+ message("-> ${item}")
+endforeach(item IN LISTS SPARK_TRANSLATIONS)
+
+qt5_add_translation(SPARK_QM_TRANSLATIONS
+ ${SPARK_TRANSLATIONS})
+
+message("translator(ts -> qm):")
+foreach(item IN LISTS SPARK_QM_TRANSLATIONS)
+ message("-> ${item}")
+endforeach(item IN LISTS SPARK_QM_TRANSLATIONS)
+
+
+# 注意,必须将 SPARK_QM_TRANSLATIONS 加入到 add_executable 参数中才能在编译时生成只有原文的ts文件
+
+# qt5_create_translation
+ # ts文件会在 make clean 或重新编译的时候一并被删除再编译的时候生成全新的ts原有的翻译会丢失万分注意!
+
+# qt5_add_translation
+ # 此宏比较稳定
diff --git a/cmake/linuxdeployqt-help b/cmake/linuxdeployqt-help
new file mode 100644
index 0000000..1b72fda
--- /dev/null
+++ b/cmake/linuxdeployqt-help
@@ -0,0 +1,48 @@
+linuxdeployqt (commit 5fa79fa), build 36 built on 2022-08-21 12:36:03 UTC
+WARNING: Not checking glibc on the host system.
+ The resulting AppDir or AppImage may not run on older systems.
+ This mode is unsupported and discouraged.
+ For more information, please see
+ https://github.com/probonopd/linuxdeployqt/issues/340
+
+Usage: linuxdeployqt <app-binary|desktop file> [options]
+
+Options:
+ -always-overwrite : Copy files even if the target file exists.
+ -appimage : Create an AppImage (implies -bundle-non-qt-libs).
+ -bundle-non-qt-libs : Also bundle non-core, non-Qt libraries.
+ -exclude-libs=<list> : List of libraries which should be excluded,
+ separated by comma.
+ -ignore-glob=<glob> : Glob pattern relative to appdir to ignore when
+ searching for libraries.
+ -executable=<path> : Let the given executable use the deployed libraries
+ too
+ -extra-plugins=<list> : List of extra plugins which should be deployed,
+ separated by comma.
+ -no-copy-copyright-files : Skip deployment of copyright files.
+ -no-plugins : Skip plugin deployment.
+ -no-strip : Don't run 'strip' on the binaries.
+ -no-translations : Skip deployment of translations.
+ -qmake=<path> : The qmake executable to use.
+ -qmldir=<path> : Scan for QML imports in the given path.
+ -qmlimport=<path> : Add the given path to QML module search locations.
+ -show-exclude-libs : Print exclude libraries list.
+ -verbose=<0-3> : 0 = no output, 1 = error/warning (default),
+ 2 = normal, 3 = debug.
+ -updateinformation=<update string> : Embed update information STRING; if zsyncmake is installed, generate zsync file
+ -qtlibinfix=<infix> : Adapt the .so search if your Qt distribution has infix.
+ -version : Print version statement and exit.
+
+linuxdeployqt takes an application as input and makes it
+self-contained by copying in the Qt libraries and plugins that
+the application uses.
+
+By default it deploys the Qt instance that qmake on the $PATH points to.
+The '-qmake' option can be used to point to the qmake executable
+to be used instead.
+
+Plugins related to a Qt library are copied in with the library.
+
+See the "Deploying Applications on Linux" topic in the
+documentation for more information about deployment on Linux.
+zinface@zinface-PC:/tmp/tmp.5gmZKUqn9s$
\ No newline at end of file
diff --git a/cmake/package-deb.descript b/cmake/package-deb.descript
new file mode 100644
index 0000000..c1c275e
--- /dev/null
+++ b/cmake/package-deb.descript
@@ -0,0 +1,45 @@
+# 注释行(使用方式)
+# find_package(DebPackage PATHS ${CMAKE_SOURCE_DIR})
+# add_package_descript(cmake/package-deb.descript)
+
+# 打包后的文件名称
+# FileName: 待定
+# 配置 PACKAGE_SUFFIX 变量可添加尾巴名称
+# 如在 Makefile 中硬编码方式
+# OSID=$(shell lsb_release -si)
+# OSRELEASE=$(shell lsb_release -sr)
+# -DPACKAGE_SUFFIX="_$(OSID)$(OSRELEASE)"
+
+# deb 安装包的安装时脚本
+# 1.安装[前|后]执行脚本(preinst,postinst),
+# 2.卸载[前|后]执行脚本(prerm,postrm)
+# ControlExtra: 未定义(暂不支持)
+# 如需指定请修改 DebPackageConfig.cmake 模板(第252行)
+# CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA 变量
+
+# 打包类型,暂支持 deb, 未来支持 tgz(tar.gz)
+Type: deb
+# 软件包名称(自动, 使用 PROJECT_NAME 变量值)
+Package: auto
+# 软件包版本(自动, 使用 PROJECT_VERSION 变量值)
+Version: auto
+# 日历化尾部版本
+CalVer: true
+# 软件包架构(自动)
+Architecture: auto
+# 软件包属于的系统部分[admin|cli-mono|comm|database|debug|devel|doc|editors|education|electronics|embedded|fonts|games|gnome|gnu-r|gnustep|graphics|hamradio|haskell|httpd|interpreters|introspection|java|javascript|kde|kernel|libdevel|libs|lisp|localization|mail|math|metapackages|misc|net|news|ocaml|oldlibs|otherosfs|perl|php|python|ruby|rust|science|shells|sound|tasks|tex|text|utils|vcs|video|web|x11|xfce|zope]
+Section: utils
+# 软件包优先级[required|important|stantard|optional|extra]
+Priority: optional
+# 软件包依赖
+Depends: curl, aria2
+# 软件包维护者(组织或个人)
+Maintainer: shenmo <shenmo@spark-app.store>
+# 软件包主页
+Homepage: https://www.spark-app.store/
+# 软件包建议
+Recommends:
+# 软件包描述信息
+Descrition: Spark Store
+ A community powered app store, based on DTK.
+
diff --git a/cmake/spark-appimage.desktop.in b/cmake/spark-appimage.desktop.in
new file mode 100644
index 0000000..228a84a
--- /dev/null
+++ b/cmake/spark-appimage.desktop.in
@@ -0,0 +1,9 @@
+[Desktop Entry]
+Name=@APP_NAME@
+Name[zh_CN]=@APP_NAME_ZH_CN@
+Exec=AppRun %F
+Icon=default
+Comment=@APP_COMMENT@
+Terminal=true
+Type=Application
+Categories=@APP_CATEGORIES@
\ No newline at end of file
diff --git a/cmake/spark-desktop.desktop.in b/cmake/spark-desktop.desktop.in
new file mode 100644
index 0000000..0fa070b
--- /dev/null
+++ b/cmake/spark-desktop.desktop.in
@@ -0,0 +1,11 @@
+[Desktop Entry]
+Version=1.0
+Name=@APP_NAME@
+Name[zh_CN]=@APP_NAME_ZH_CN@
+Comment=@APP_COMMENT@
+Type=@APP_TYPE@
+Exec=@APP_EXECUTE_PATH@
+Icon=@APP_EXECUTE_ICON_PATH@
+Categories=@APP_CATEGORIES@
+
+# Generated from the DesktopGenerater component of the z-Tools toolkit
\ No newline at end of file
--
2.20.1
From 0ba6387230becafa5c91817b036084dd702c47c0 Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Sun, 11 Dec 2022 22:37:26 +0800
Subject: [PATCH 2/7] =?UTF-8?q?repo:=20=E5=A4=84=E7=90=86=20deb=20?=
=?UTF-8?q?=E5=AE=89=E8=A3=85=E8=84=9A=E6=9C=AC=E7=9A=84=E9=97=AE=E9=A2=98?=
=?UTF-8?q?=EF=BC=8C=E5=AE=8C=E6=88=90=20cmake=20=E5=8C=96=E6=9E=84?=
=?UTF-8?q?=E5=BB=BA?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Signed-off-by: zinface <zinface@163.com>
---
Makefile | 16 +++++++++++++++-
cmake/DebPackageConfig.cmake | 8 ++++----
2 files changed, 19 insertions(+), 5 deletions(-)
diff --git a/Makefile b/Makefile
index 2df9883..c673c76 100644
--- a/Makefile
+++ b/Makefile
@@ -27,7 +27,21 @@ release:
cd build && cmake -DCMAKE_BUILD_TYPE=Release -DPACKAGE_SUFFIX="$(SUFFIX)" ..
cd build && make -j$(CPUS)
-package: release
+# 在 make package 时自动处理此内容
+# 这是由于 CMake 打包需要纯粹的安装脚本名称
+debian-build-scripts:
+ mkdir -p build/debian
+ cp debian/spark-store.postinst build/debian/postinst
+ cp debian/spark-store.postrm build/debian/postrm
+ cp debian/spark-store.preinst build/debian/preinst
+ cp debian/spark-store.prerm build/debian/prerm
+
+ chmod +x build/debian/postinst
+ chmod +x build/debian/postrm
+ chmod +x build/debian/preinst
+ chmod +x build/debian/prerm
+
+package: release debian-build-scripts
cd build && make package
tree build/_CPack_Packages/Linux/DEB/$(PROJECT_NAME)-*
dpkg-deb --contents build/$(PROJECT_NAME)_*$(CALENDAR)*$(SUFFIX).deb
diff --git a/cmake/DebPackageConfig.cmake b/cmake/DebPackageConfig.cmake
index 2ab24e7..38ad2d7 100644
--- a/cmake/DebPackageConfig.cmake
+++ b/cmake/DebPackageConfig.cmake
@@ -290,10 +290,10 @@ function(add_package_descript IN_DES)
# "${CMAKE_SOURCE_DIR}/config/DEBIAN/postinst"
# "${CMAKE_SOURCE_DIR}/config/DEBIAN/prerm"
# "${CMAKE_SOURCE_DIR}/config/DEBIAN/postrm"
- "${CMAKE_SOURCE_DIR}/debian/spark-store.postinst"
- "${CMAKE_SOURCE_DIR}/debian/spark-store.postrm"
- "${CMAKE_SOURCE_DIR}/debian/spark-store.preinst"
- "${CMAKE_SOURCE_DIR}/debian/spark-store.prerm"
+ "${CMAKE_BINARY_DIR}/debian/postinst"
+ "${CMAKE_BINARY_DIR}/debian/postrm"
+ "${CMAKE_BINARY_DIR}/debian/preinst"
+ "${CMAKE_BINARY_DIR}/debian/prerm"
)
# 设置为ON以便使用 dpkg-shlibdeps 生成更好的包依赖列表。
--
2.20.1
From bfef40012ff241f50d9448f2d4b787d22b24813a Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Mon, 12 Dec 2022 00:50:01 +0800
Subject: [PATCH 3/7] =?UTF-8?q?docs:=20=E6=B7=BB=E5=8A=A0=20Spark=20?=
=?UTF-8?q?=E6=9E=84=E5=BB=BA=20=E4=B8=8E=20CMake=20=E6=9E=84=E5=BB=BA?=
=?UTF-8?q?=E7=B3=BB=E7=BB=9F=E9=A2=84=E8=A7=88=E6=96=87=E6=A1=A3?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Signed-off-by: zinface <zinface@163.com>
---
DOCS/spark-cmake-build-system.md | 301 +++++++++++++++++++++++++++++++
1 file changed, 301 insertions(+)
create mode 100644 DOCS/spark-cmake-build-system.md
diff --git a/DOCS/spark-cmake-build-system.md b/DOCS/spark-cmake-build-system.md
new file mode 100644
index 0000000..a807009
--- /dev/null
+++ b/DOCS/spark-cmake-build-system.md
@@ -0,0 +1,301 @@
+# Spark 构建 与 CMake 构建系统预览
+
+- 前言
+
+ ```cmake
+ # 在 v4.0 之前,我们一直使用 `qmake` 来进行构建星火应用商店。
+
+ # 在 v4.0 之后,我们加入了 cmake 构建,并使用 spark 构建为扩展进行开展项目的构建处理。
+
+ # 当然,这对于星火应用商店传统的 qmake 构建风格,我们表示还是可以继续保留。
+ ```
+
+- 有关 `CMake` 与 `Spark` 之间的关系
+
+ 在进行 `CMake` 化构建时,我们摒弃了传统 `CMake` 语法,使用以 `Spark` 为代号进行一种可扩展的 `CMake` 构建模块设计。
+
+ 以下是使用传统 `CMake` 进行构建一个简单的 `Qt` 应用程序:
+
+ ```cmake
+ cmake_minimum_required(VERSION 3.5.1)
+
+ project(template LANGUAGES CXX VERSION 0.0.1)
+
+ set(CMAKE_INCLUDE_CURRENT_DIR ON)
+ set(CMAKE_AUTOMOC ON)
+ set(CMAKE_AUTOUIC ON)
+ set(CMAKE_AUTORCC ON)
+
+ find_package(Qt5 COMPONENTS Core Widgets Network)
+
+ # 头文件目录
+ include_directories()
+
+ # 资源文件路径
+ set(QRC_SOURCES "")
+
+ add_executable(${PROJECT_NAME} "main.cpp"
+ "mainwindow.cpp" "mainwindow.h"
+ ${QRC_SOURCES}
+ )
+ target_link_libraries(${PROJECT_NAME} Qt5::Core Qt5::Widgets Qt5::Network)
+ ```
+
+ 在传统的 `CMake` 项目中,它保留了基于 `Makefile` 构建项目的风格设计,在每一个构建点都会有一个 `CMakeLists.txt` 存在,它就是所谓的 `Makefile` 构建的超集。
+
+ 终于,我们在编写了大量 `CMakeLists.txt` 之后,觉得需要一个更快的构建方式,最起码是移除原有的 `类C` 写法,通过包装不同的目的来完成构建工作,而不是在构建脚本中出现一大堆 `CMake` 传统语法。
+
+ 通过初的设计,我们仅保留了最顶层的 `CMakeLists.txt`,并将其作为一个唯一构建点。
+
+ ```cmake
+ cmake_minimum_required(VERSION 3.5.1)
+
+ project(spark-store LANGUAGES CXX VERSION 0.0.1)
+
+ # 构建 spark-stop 的配置流程
+ include(cmake/SparkEnvConfig.cmake) # 设置一些有关QT构建的开关
+ include(cmake/SparkMacrosConfig.cmake) # 声明了一些 spark_ 开头的 macro 宏
+ include(cmake/SparkFindQt5Config.cmake) # 提供了 target_link_qt5 用于目标链接 qt5 的库
+ include(cmake/SparkFindDtkConfig.cmake) # 提供了 target_link_dtk 用于目标链接 dtk 的库
+ include(cmake/SparkThirdLibraryConfig.cmake) # 提供了 third-party 下对应的 target_link_<lib> 用于目标链接 <lib> 的库
+ include(cmake/SparkFindLibraries.cmake) # 提供了基于 spark_ 宏生成的 target_link_<lib> 用于目标链接 <lib> 的库
+ include(cmake/SparkTranslatorConfig.cmake) # 提供了 qt5 ts转qm 的操作,最终生成 SPARK_QM_TRANSLATIONS 变量用于构建可执行文件时参与编译
+ include(cmake/SparkBuild.cmake) # 使用了 spark_ 宏基于已提供的宏参数自动展开构建可执行目标文件
+
+ # 构建 spark-store 可执行文件 (用于显式展开 SparkBuild.cmake 内容,如果使用 SparkBuild.cmake 此处将需要注释)
+ # spark_add_executable_path(${PROJECT_NAME} src ${SPARK_SOURCES} ${SPARK_QM_TRANSLATIONS})
+ # target_link_qt5(${PROJECT_NAME}) # 构建的目标需要使用 qt5 库
+ # target_link_dtk(${PROJECT_NAME}) # 构建的目标需要使用 dtk 库
+ # target_link_notify(${PROJECT_NAME}) # 构建的目标需要使用 notify 库
+ # target_link_QtNetworkService(${PROJECT_NAME}) # 构建的目标需要使用 third-part 库
+
+ # 子构建 spark-dstore-patch
+ # add_subdirectory(src/spark-dstore-patch) # 传统构建方式,但已经可使用 spark_ 宏构建目标
+ spark_add_executable_path(spark-dstore-patch src/spark-dstore-patch)
+ target_link_qt5(spark-dstore-patch) # 构建的目标需要使用 qt5 库
+
+ include(cmake/SparkInstall.cmake) # 使用了 DebPackage 提供的安装模式
+ include(cmake/SparkBuildGraphviz.cmake) # 添加了 builddeps 目标构建make builddeps 将会生成依赖图
+ ```
+
+ 这样一写,我们觉得这是一种非常独特的构建工作,旨在为一些 Linux Qt 项目进行构建时,苦于没有一个较好的构建模板设计,每次在编写一个新的项目时,只能从头开始写构建脚本的一种解决方式。
+
+ 我们并不打算发明构建工具,只不过在研究打破 `CMake` 传统构建风格时,我发现了 `XMake`,当时这是我当时安装一个 `XMake` 版本。
+
+ ```
+ $ xmake --version
+ xmake v2.6.2+202201121245, A cross-platform build utility based on Lua
+ Copyright (C) 2015-present Ruki Wang, tboox.org, xmake.io
+ _
+ __ ___ __ __ __ _| | ______
+ \ \/ / | \/ |/ _ | |/ / __ \
+ > < | \__/ | /_| | < ___/
+ /_/\_\_|_| |_|\__ \|_|\_\____|
+ by ruki, xmake.io
+
+ 👉 Manual: https://xmake.io/#/getting_started
+ 🙏 Donate: https://xmake.io/#/sponsor
+ ```
+
+ 在准备尝试使用最适用于 `Linux Qt` 项目的构建方式,也为更快构建一个 `Linux` 应用项目来进行扩展构建。
+
+ 我们最开始完成了简单的封装一个 `spark_` 开头的函数来定义简单的构建库目标、构建可执行目标。
+
+ 当时使用的是 `function`,并没有使用宏 `macro`,起初认为是无太大区别,后来都转用 `macro` 来定义了。
+
+ ```cmake
+ # SparkMacrosConfig.cmake
+
+ cmake_minimum_required(VERSION 3.5.1)
+
+ # 定义一些 macro 用于自动生成构建结构
+
+ # spark_add_library <lib_name> [files]...
+ # 构建一个库,基于指定的源文件
+ # 并根据库名生成 target_link_<lib_name> 函数
+ macro(spark_add_library _lib_name)
+ message("================ ${_lib_name} Library ================")
+ add_library(${_lib_name} ${ARGN})
+
+ set(SRCS ${ARGN})
+ foreach(item IN LISTS SRCS)
+ message(" -> ${item}")
+ endforeach(item IN LISTS SRCS)
+
+ function(target_link_${_lib_name} TARGET)
+ message("${_lib_name}")
+ target_link_libraries(${TARGET} ${_lib_name})
+ endfunction(target_link_${_lib_name} TARGET)
+
+ endmacro(spark_add_library _lib_name)
+
+
+ # spark_add_executable <exec_name> [files]...
+ # 构建一个可执行文件,基于指定的源文件
+ # Qt编译时源文件包括很多类型需要指定 *.h/*.cpp/*.qrc/*.qm/... 等
+ macro(spark_add_executable _exec_name)
+
+ message("================ ${_exec_name} Executable ================")
+ add_executable(${_exec_name} ${ARGN})
+
+ endmacro(spark_add_executable _exec_name)
+ ```
+
+ 这样,我们就完成了一个简单的构建目标的方式,通过包装一个 `add_library` 我们可以达到相同的目的。
+
+ 并为其创建一个 `target_link_` 开头的`function`来明确声明这个库目标被使用者给依赖。
+
+ ```cmake
+ # 例如构建一个 hellworld 目标,并链接到到 `qt5` 的基础库
+ # target_link_qt5 并不是由 spark_add_library 产生的。
+ # 只是为了更方便使用 Qt 库, 我们对其进行了一次简单的定义,
+ # 而 target_link_ 可以在使用 macro 宏时生成。
+
+ # target_link_qt5 中只定义了有限的几个核心组件: Qt5::Core Qt5::Widgets Qt5::Network
+
+ spark_add_executable(helloworld
+ main.cpp)
+
+ target_link_qt5(helloworld) # 表示 helloworld 可执行目标依赖于 Qt5
+ ```
+
+ 当然也可以这样
+
+ ```cmake
+ # 构建一个库目标 Say ,它依赖于 qt5 核心库进行构建
+ spark_add_library(Say say.h say.cpp)
+ target_link_qt5(Say)
+
+ # 构建一个可执行目标 helloworld它将依赖于 Say 库
+ spark_add_executable(helloworld main.cpp)
+ target_link_Say(helloworld)
+ ```
+
+
+- 来到 `Spark` 构建的世界
+
+ 这个 `Spark` 构建,最主要的方向就是追求扩展与应用到现有 `Linux Qt` 项目,并替换现有使用传统 `CMake` 构建的 `Linux Qt` 项目。
+
+ `Spark` 一直在追求新的构建风格,新的扩展模块,从最开始的封装简单的构建库与可执行文件,到生成 `desktop` 文件的模块,构建 `deb` 软件包的模块,构建新的 `install` 安装方案。
+
+ 其中,从基于指定的源代码构建库与可执行文件,发展到使用指定的路径来构建为一个模块。
+
+ ```cmake
+ # 构建一个 bigimage 库,它将依赖于 qt5
+ spark_add_libraries_path(bigimage src/spark-widgets/bigimage)
+ target_link_qt5(bigimage)
+
+
+ # 构建一个 imageshow 库,它将依赖于 bigimage
+ spark_add_libraries_path(imageshow src/spark-widgets/imageshow)
+ target_link_bigimage(imageshow)
+
+ ...
+ ```
+
+ 后来,这种方式也基本上被认为最繁琐的构建方式,我们开始了"一行一库"的构建时代,以上的构建内容可以被认为只有两行构建。
+
+ 一是构建 bigimage 库,二是构建 imageshow 库,三是 imageshow 依赖了 bigimage当然依赖列表就用'+'来进行表示吧。
+
+ ```cmake
+ # 基于传入的项进行构建
+ # 可接受的值为: 路径列表
+ # 可接受的值为: 路径列表+依赖库A+依赖库B
+ spark_add_library_realpaths(
+ src/spark-widgets/bigimage
+ src/spark-widgets/imageshow+bigimage)
+ ```
+
+- `Spark` 构建与 `DTK`
+
+ 我们在为基于 Deepin Tool Kit(DTK) 的应用程序添加了简单的扩展,使用以下内容即可使你的程序依赖于 `DTK`
+
+ ```cmake
+ # 引入 SparkFindDtk 模块
+ include(cmake/SparkFindDtkConfig.cmake)
+
+ # 构建一个 bigimage 库,它将自动依赖于 qt5
+ spark_add_library_realpaths(
+ src/spark-widgets/bigimage)
+
+ # 为 bigimage 库目标进行链接 DTK
+ target_link_dtk(bigimage)
+ ```
+
+- `Spark` 构建与 `deb` 打包
+
+ 我们在为基于 `CMakeLists.txt` 中使用的 `install` 指令进行了 `CPack` 打包扩展,因为我们不喜欢类似 `Makefile` 这种 `make install` 安装的方式。
+
+ 所以我们也增加了一个扩展模块 `DebPackageConfig.cmake`,因为它是早于 `Spark` 构建出现,所以并不为它进行 `Spark` 命名,它拥有一个模板配置,可以通过简单的填充包描述信息即可实现打包。
+
+ 注意,它的最开始三行即是使用方式说明,通过(cv)复制粘贴到您的顶层构建脚本中,即可完成打包功能,更多的软件包打包设定功能仍在 `DebPackageConfig.cmake` 中预留被注释的部分。
+
+ 例如您想生成软件包依赖列表等,在其中 `SHLIBDEPS` 字样的部分已预留注释。
+
+ 例如您想为软件包增加 `pre[inst|rm]、post[inst|rm]` 等脚本,在其中 `CONTROL` 字样的部分已预留注释。
+
+ 描述文件还为您专门提供了可选的自动化填充软件包名称、软件包版本、软件包架构等,而无需要每次更新描述文件。
+
+ ```ini
+ # 注释行(使用方式)
+ # find_package(DebPackage PATHS ${CMAKE_SOURCE_DIR})
+ # add_package_descript(cmake/package-deb.descript)
+
+ # 打包后的文件名称
+ # FileName: 待定
+ # 配置 PACKAGE_SUFFIX 变量可添加尾巴名称
+ # 如在 Makefile 中硬编码方式
+ # OSID=$(shell lsb_release -si)
+ # OSRELEASE=$(shell lsb_release -sr)
+ # -DPACKAGE_SUFFIX="_$(OSID)$(OSRELEASE)"
+
+ # deb 安装包的安装时脚本
+ # 1.安装[前|后]执行脚本(preinst,postinst),
+ # 2.卸载[前|后]执行脚本(prerm,postrm)
+ # ControlExtra: 未定义(暂不支持)
+ # 如需指定请修改 DebPackageConfig.cmake 模板(第252行)
+ # CPACK_DEBIAN_PACKAGE_CONTROL_EXTRA 变量
+
+ # 打包类型,暂支持 deb, 未来支持 tgz(tar.gz)
+ Type: deb
+ # 软件包名称(自动, 使用 PROJECT_NAME 变量值)
+ Package: auto
+ # 软件包版本(自动, 使用 PROJECT_VERSION 变量值)
+ Version: auto
+ # 日历化尾部版本
+ CalVer: true
+ # 软件包架构(自动)
+ Architecture: auto
+ # 软件包属于的系统部分[admin|cli-mono|comm|database|debug|devel|doc|editors|education|electronics|embedded|fonts|games|gnome|gnu-r|gnustep|graphics|hamradio|haskell|httpd|interpreters|introspection|java|javascript|kde|kernel|libdevel|libs|lisp|localization|mail|math|metapackages|misc|net|news|ocaml|oldlibs|otherosfs|perl|php|python|ruby|rust|science|shells|sound|tasks|tex|text|utils|vcs|video|web|x11|xfce|zope]
+ Section: utils
+ # 软件包优先级[required|important|stantard|optional|extra]
+ Priority: optional
+ # 软件包依赖
+ Depends: curl, aria2
+ # 软件包维护者(组织或个人)
+ Maintainer: shenmo <shenmo@spark-app.store>
+ # 软件包主页
+ Homepage: https://www.spark-app.store/
+ # 软件包建议
+ Recommends:
+ # 软件包描述信息
+ Descrition: Spark Store
+ A community powered app store, based on DTK.
+ ```
+
+
+- 写在后面,有关 `Spark` 构建的起源与未来
+
+ `Spark` 构建真正意义上只是一个有趣的想法,并且为它付诸一定的实现。
+
+ 我们拥抱过 qmake我们也拥抱过 cmake。我们是混乱的 IDE 或是代码编辑器的忠实用户,就像是在 IDE 与 编辑器之间的战争从未停止过。
+
+ 在着手 `Spark` 构建之前,它就是一个想法,目的是为了尝试将星火应用商店从 `qmake` 构建转为 `cmake` 构建,它就像星星之火中的野火,它有自己的想法。而这个想法就是打破传统的构建方式,或尝试改造现有的构建模式。
+
+ 而这并没有为星火商店付出什么,甚至没有提交过任何 `bug fix`,只是一个因为喜欢安份但又不守已的试图破坏(改变)星火应用商店传统构建的疯狂的 `VSCode` 用户,事实上是一个 `CMake` 用户,因为他无法在 `VSCode` 中使用 `qmake` 增强 `VSCode` 的代码能力。
+
+ 只能试图在一个已经发展了多年了项目上开始进行破坏(改造),将其转化为以 `cmake` 为主的构建,并在其它开源项目中寻找 `Spark` 的构建瓶颈以及拓展它疯狂的可扩展模块。
+
+ 在很久之后,这个想法终于在星火商店的 `4.0` 计划下开始正式实施,此时 `Spark` 构建已经为很多 `Linux Qt` 项目进行构建,包括非常复杂的构建探索,打破了一个又一个构建方式,最终完善了基本的构建模板。
+
+ 现在,`Spark` 构建在强大的 `CMake` 扩展下增强了 `VSCode` 的代码编写能力,在绕了一大圈之后,终于回到了起源的地方,并开始了它的构建使命,为星火应用商店构建 `4.0` 以及未来的版本。
\ No newline at end of file
--
2.20.1
From f07c4dd69ccbc74dbf44f6417ecbd076ac01b620 Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Thu, 15 Dec 2022 19:59:46 +0800
Subject: [PATCH 4/7] =?UTF-8?q?repo:=20=E6=9B=B4=E6=96=B0=E7=94=A8?=
=?UTF-8?q?=E4=BA=8E=20Qt5=20Svg=20=E4=BE=9D=E8=B5=96=E7=9A=84=E6=9E=84?=
=?UTF-8?q?=E5=BB=BA=E5=86=85=E5=AE=B9?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
CMakeLists.txt | 1 +
cmake/SparkFindQt5Config.cmake | 4 ++--
2 files changed, 3 insertions(+), 2 deletions(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 5864b54..14530e1 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -32,6 +32,7 @@ spark_add_library_realpaths(
target_link_qt5_dbus(dbus)
target_link_qt5_Concurrent(common)
target_link_qt5_Concurrent(backend)
+target_link_qt5_Svg(common)
target_link_qt5_WebEngineWidgets(common)
spark_add_executable_path(${PROJECT_NAME} src
diff --git a/cmake/SparkFindQt5Config.cmake b/cmake/SparkFindQt5Config.cmake
index b56399f..67e29b6 100644
--- a/cmake/SparkFindQt5Config.cmake
+++ b/cmake/SparkFindQt5Config.cmake
@@ -1,6 +1,6 @@
cmake_minimum_required(VERSION 3.5.1)
-find_package(Qt5 COMPONENTS Core Widgets Network Concurrent WebEngineWidgets Sql WebSockets REQUIRED)
+find_package(Qt5 COMPONENTS Core Widgets Network Concurrent WebEngineWidgets REQUIRED)
# function(target_link_qt5 NAME)
# target_link_libraries(${NAME}
@@ -132,7 +132,7 @@ spark_add_links_qt5(
# SerialPort
# ServiceSupport
# Sql
- # Svg
+ Svg
# Test
# ThemeSupport
# UiPlugin
--
2.20.1
From dbd36f105e69ee736f19fa4d7632e3f12316955c Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Mon, 19 Dec 2022 02:58:17 +0800
Subject: [PATCH 5/7] =?UTF-8?q?repo:=20=E6=9B=B4=E6=96=B0=E7=94=A8?=
=?UTF-8?q?=E4=BA=8E=E6=94=AF=E6=8C=81=20BaseWidgetOpacity=20=E5=9F=BA?=
=?UTF-8?q?=E7=A1=80=E7=B1=BB=E7=9A=84=E6=9E=84=E5=BB=BA?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
CMakeLists.txt | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 14530e1..b5e32d8 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -24,11 +24,13 @@ spark_add_library_realpaths(
src/dbus
src/utils+dbus
src/backend+utils
+ src/widgets/base
src/widgets/common+backend
src/widgets+common
src/pages+widgets
)
+target_link_dtk(base)
target_link_qt5_dbus(dbus)
target_link_qt5_Concurrent(common)
target_link_qt5_Concurrent(backend)
@@ -38,9 +40,9 @@ target_link_qt5_WebEngineWidgets(common)
spark_add_executable_path(${PROJECT_NAME} src
${QRC_SOURCES} ${SPARK_QM_TRANSLATIONS}
)
+target_link_base(${PROJECT_NAME})
target_link_dbus(${PROJECT_NAME})
target_link_pages(${PROJECT_NAME})
-target_link_dtk(${PROJECT_NAME})
spark_add_executable_path(spark-dstore-patch src/spark-dstore-patch)
--
2.20.1
From 16b723cd1a37c49e9af668f5f97d17a2e383420a Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Mon, 30 Jan 2023 20:48:25 +0800
Subject: [PATCH 6/7] =?UTF-8?q?spark:=20=E5=88=9B=E5=BB=BA=E6=96=B0?=
=?UTF-8?q?=E7=9A=84=E6=A8=A1=E5=9D=97=E7=94=A8=E4=BA=8E=20debian/changelo?=
=?UTF-8?q?g=20=E7=9A=84=E7=89=88=E6=9C=AC=E5=8F=B7=E9=87=8D=E5=86=99?=
=?UTF-8?q?=E8=A7=84=E5=88=99?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
1. SparkDebianChangelogVersion.cmake
在此文件中提供 spark_debian_changelog_override_version 宏:
基于指定的 debian/changelog 文件并用于读取最新版本号并覆盖到项目版本
@Since: v4.0.0
---
CMakeLists.txt | 4 ++
cmake/SparkDebianChangelogVersion.cmake | 58 +++++++++++++++++++++++++
2 files changed, 62 insertions(+)
create mode 100644 cmake/SparkDebianChangelogVersion.cmake
diff --git a/CMakeLists.txt b/CMakeLists.txt
index b5e32d8..73221bc 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -11,6 +11,10 @@ include(cmake/SparkFindDtkConfig.cmake) # 提供了 target_link_dtk 用
include(cmake/SparkTranslatorConfig.cmake) # 提供了 qt5 ts转qm 的操作,最终生成 SPARK_QM_TRANSLATIONS 变量用于构建可执行文件时参与编译
include(cmake/SparkMacrosExtendConfig.cmake) # 使用了 spark_ 宏基于已提供的宏参数自动展开构建可执行目标文件
include(cmake/SparkInstallMacrosConfig.cmake) # 提供了 spark_install 开头的 macro 宏用于安装 target、file、program、directory、changelog 等内容
+include(cmake/SparkDebianChangelogVersion.cmake)# 提供了 spark_debian_ 开头的宏进行进行覆盖 PROJECT_VERSION
+
+# 在开始之前,使用项目中提供的 debian/changelog 进行重写本构建系统的 PROJECT_VERSION
+spark_debian_changelog_override_version(debian/changelog)
# 资源文件路径
set(QRC_SOURCES "src/assets/assets.qrc")
diff --git a/cmake/SparkDebianChangelogVersion.cmake b/cmake/SparkDebianChangelogVersion.cmake
new file mode 100644
index 0000000..9d8bca2
--- /dev/null
+++ b/cmake/SparkDebianChangelogVersion.cmake
@@ -0,0 +1,58 @@
+# SparkDebianChangelogVersion
+
+# 尝试读取 debian/changelog 文件的第一行数据,并查找
+# spark-store (4.2.2) stable; urgency=medium
+# 将 (version) 信息应用用于 PROJECT_VERSION
+
+
+macro(spark_debian_changelog_override_version _CHANGELOG_FILE_PATH)
+ set(CHANGELOG_FILE_PATH ${_CHANGELOG_FILE_PATH})
+ set(CHANGELOG_FILE_EXISTS FALSE)
+
+ # 首次判断,如果判断文件不存在,将尽可能的判断文件是存在的
+ if(NOT EXISTS ${CHANGELOG_FILE_PATH})
+
+ # 在 CMake v3.19 起,可以使用 file(REAL_PATH <path> <out-var>) 进行获取 path 的绝对路径
+ if(CMAKE_VERSION GREATER_EQUAL 3.19)
+ file(REAL_PATH ${CHANGELOG_FILE_PATH} CHANGELOG_FILE_ABSOLUTE_PATH)
+ if(EXISTS ${CHANGELOG_FILE_ABSOLUTE_PATH})
+ set(CHANGELOG_FILE_EXISTS TRUE)
+ set(CHANGELOG_FILE_PATH ${CHANGELOG_FILE_ABSOLUTE_PATH})
+ endif(EXISTS ${CHANGELOG_FILE_ABSOLUTE_PATH})
+ endif(CMAKE_VERSION GREATER_EQUAL 3.19)
+
+ # 第二次判断与处理 使用 file(SIZE) 方式
+ if(NOT CHANGELOG_FILE_EXISTS)
+ file(SIZE ${CHANGELOG_FILE_PATH} CHANGELOG_FILE_SIZE)
+ if(CHANGELOG_FILE_SIZE GREATER 0)
+ set(CHANGELOG_FILE_EXISTS TRUE)
+ endif(CHANGELOG_FILE_SIZE GREATER 0)
+ endif(NOT CHANGELOG_FILE_EXISTS)
+
+ # 第三次判断与处理 使用路径拼接方式
+ if(NOT CHANGELOG_FILE_EXISTS)
+ if(EXISTS ${CMAKE_SOURCE_DIR}/${CHANGELOG_FILE_PATH})
+ set(CHANGELOG_FILE_PATH ${CMAKE_SOURCE_DIR}/${CHANGELOG_FILE_PATH})
+ set(CHANGELOG_FILE_EXISTS TRUE)
+ endif(EXISTS ${CMAKE_SOURCE_DIR}/${CHANGELOG_FILE_PATH})
+ endif(NOT CHANGELOG_FILE_EXISTS)
+ endif(NOT EXISTS ${CHANGELOG_FILE_PATH})
+
+ message("> V = ${CHANGELOG_FILE_PATH}")
+ if(CHANGELOG_FILE_EXISTS)
+ file(READ ${CHANGELOG_FILE_PATH} CHANGELOG_CONTENT LIMIT 20)
+
+ string(FIND ${CHANGELOG_CONTENT} "(" V_PRE) # +1 to V_BEGIN
+ string(FIND ${CHANGELOG_CONTENT} ")" V_END)
+
+ math(EXPR V_BEGIN "${V_PRE}+1")
+ math(EXPR V_LENGTH "${V_END}-${V_BEGIN}")
+
+ string(SUBSTRING ${CHANGELOG_CONTENT} ${V_BEGIN} ${V_LENGTH} V)
+
+ message("> V = ${CHANGELOG_CONTENT}")
+ message("> V = [${V}]")
+
+ set(PROJECT_VERSION ${V})
+ endif(CHANGELOG_FILE_EXISTS)
+endmacro(spark_debian_changelog_override_version _CHANGELOG_FILE_PATH)
--
2.20.1
From b16b02606c94cf3f3b23c8e59facd827b4ef38b6 Mon Sep 17 00:00:00 2001
From: zinface <zinface@163.com>
Date: Mon, 30 Jan 2023 21:23:15 +0800
Subject: [PATCH 7/7] =?UTF-8?q?spark:=20=E5=AF=B9=20SparkDebianChangelogVe?=
=?UTF-8?q?rsion.cmake=20=E6=A8=A1=E5=9D=97=E7=9A=84=E6=89=A9=E5=B1=95?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
1. 增加 SPARK_OVERRIDE_VERSION 的文件输出
以解决 Makefile 中无法明确多个 deb 包时无法确定最新版本的情况
---
Makefile | 15 +++++++++++++--
cmake/SparkDebianChangelogVersion.cmake | 1 +
2 files changed, 14 insertions(+), 2 deletions(-)
diff --git a/Makefile b/Makefile
index c673c76..81ca078 100644
--- a/Makefile
+++ b/Makefile
@@ -8,6 +8,17 @@ SUFFIX=_$(OSID)$(OSRELEASE)
endif
PROJECT_NAME=spark-store
+PROJECT_VERSION=*
+
+ifneq ($(wildcard build/SPARK_OVERRIDE_VERSION),)
+SPARK_OVERRIDE_VERSION=`cat build/SPARK_OVERRIDE_VERSION`
+.PHONY: override-version
+override-version:
+ @echo $(SPARK_OVERRIDE_VERSION)
+ @echo "wildcard - good: $(wildcard build/SPARK_OVERRIDE_VERSION)"
+ @echo "wildcard - bad.: $(wildcard build/SPARK_OVERRIDE_VERSIONS)"
+PROJECT_VERSION=$(SPARK_OVERRIDE_VERSION)-
+endif
all:
mkdir -p build
@@ -44,7 +55,7 @@ debian-build-scripts:
package: release debian-build-scripts
cd build && make package
tree build/_CPack_Packages/Linux/DEB/$(PROJECT_NAME)-*
- dpkg-deb --contents build/$(PROJECT_NAME)_*$(CALENDAR)*$(SUFFIX).deb
+ dpkg-deb --contents build/$(PROJECT_NAME)_$(PROJECT_VERSION)$(CALENDAR)*$(SUFFIX).deb
# cd build/_CPack_Packages/Linux/DEB/$(PROJECT_NAME)_*$(CALENDAR)*$(SUFFIX).deb && find .
builddeps:
@@ -54,7 +65,7 @@ cpus:
@echo "CPU数量: $(CPUS)"
copytosource:package
- cp build/$(PROJECT_NAME)_*$(CALENDAR)*.deb .
+ cp build/$(PROJECT_NAME)_$(PROJECT_VERSION)$(CALENDAR)*.deb .
# 进入 qdebug 模式,在 deepin 中默认被禁用,可 env | grep QT 查看,并在 /etc/X11/Xsession.d/00deepin-dde-env 配置中已定义
# 1. 禁止 qt 的 debug 打印: qt.*.debug=false
diff --git a/cmake/SparkDebianChangelogVersion.cmake b/cmake/SparkDebianChangelogVersion.cmake
index 9d8bca2..65e1c16 100644
--- a/cmake/SparkDebianChangelogVersion.cmake
+++ b/cmake/SparkDebianChangelogVersion.cmake
@@ -54,5 +54,6 @@ macro(spark_debian_changelog_override_version _CHANGELOG_FILE_PATH)
message("> V = [${V}]")
set(PROJECT_VERSION ${V})
+ file(WRITE ${CMAKE_BINARY_DIR}/SPARK_OVERRIDE_VERSION ${V})
endif(CHANGELOG_FILE_EXISTS)
endmacro(spark_debian_changelog_override_version _CHANGELOG_FILE_PATH)
--
2.20.1