sfeed_update: separate the logic of running feeds in a function - sfeed - RSS a… | |
git clone git://git.codemadness.org/sfeed | |
Log | |
Files | |
Refs | |
README | |
LICENSE | |
--- | |
commit 195cbb7aa51b7726c9a3cc0f5bcd5e34968a2907 | |
parent a57e31efc26780e2079d336ea46c7e0776ec42e2 | |
Author: Hiltjo Posthuma <[email protected]> | |
Date: Wed, 20 Nov 2024 19:59:32 +0100 | |
sfeed_update: separate the logic of running feeds in a function | |
This is not documented as an overridable function (for now). | |
It can be used to more easily replace the logic of running feeds with xargs | |
with something else like GNU/parallel. | |
It can be used to reproduce the previous behaviour which didn't require xargs. | |
See commit for the old logic: cdb8f7feb135adf6f18e389b4bbf47886089474a | |
Example: | |
feed() { | |
# wait until ${maxjobs} are finished: will stall the queue if an item | |
# is slow, but it is portable. | |
[ ${signo} -ne 0 ] && return | |
[ $((curjobs % maxjobs)) -eq 0 ] && wait | |
[ ${signo} -ne 0 ] && return | |
curjobs=$((curjobs + 1)) | |
_feed "$@" & | |
} | |
runfeeds() { | |
# job counter. | |
curjobs=0 | |
# fetch feeds specified in config file. | |
feeds | |
# wait till all feeds are fetched (concurrently). | |
[ ${signo} -eq 0 ] && wait | |
} | |
Diffstat: | |
M sfeed_update | 15 ++++++++++----- | |
1 file changed, 10 insertions(+), 5 deletions(-) | |
--- | |
diff --git a/sfeed_update b/sfeed_update | |
@@ -201,6 +201,14 @@ feeds() { | |
die | |
} | |
+# runfeeds() | |
+runfeeds() { | |
+ # print feeds for parallel processing with xargs. | |
+ feeds > "${sfeedtmpdir}/jobs" || die | |
+ SFEED_UPDATE_CHILD="1" xargs -x -0 -P "${maxjobs}" -n 1 \ | |
+ "$(readlink -f "${argv0}")" < "${sfeedtmpdir}/jobs" | |
+} | |
+ | |
# main(args...) | |
main() { | |
# signal number received for parent. | |
@@ -217,11 +225,8 @@ main() { | |
touch "${sfeedtmpdir}/ok" || die | |
# make sure path exists. | |
mkdir -p "${sfeedpath}" | |
- | |
- # print feeds for parallel processing with xargs. | |
- feeds > "${sfeedtmpdir}/jobs" || die | |
- SFEED_UPDATE_CHILD="1" xargs -x -0 -P "${maxjobs}" -n 1 \ | |
- "$(readlink -f "${argv0}")" < "${sfeedtmpdir}/jobs" | |
+ # run and process the feeds. | |
+ runfeeds | |
statuscode=$? | |
# check error exit status indicator for parallel jobs. |