sfeed_update: add variable for max amount of feeds to update concurrently

This adds a variable for the maximum amount of feeds to update concurrently. A
system/user may have fork resource limits or want to setup some job limit.

Thanks leot for the idea and feedback!
This commit is contained in:
Hiltjo Posthuma 2018-09-30 19:20:01 +02:00
parent 4200d5c107
commit 2e0e1fa5d6
1 changed files with 11 additions and 0 deletions

View File

@ -5,6 +5,10 @@
# defaults
sfeedpath="$HOME/.sfeed/feeds"
# used for processing feeds concurrently: wait until ${maxjobs} amount of
# feeds are finished at a time.
maxjobs=8
# load config (evaluate shellscript).
# loadconfig(configfile)
loadconfig() {
@ -71,6 +75,11 @@ fetchfeed() {
# fetch and parse feed.
# feed(name, feedurl, [basesiteurl], [encoding])
feed() {
# wait until ${maxjobs} are finished: throughput using this logic is
# non-optimal, but it is simple and portable.
test $((curjobs % maxjobs)) -eq 0 && wait
curjobs=$((curjobs + 1))
(name="$1"
filename="$(printf '%s' "$1" | tr '/' '_')"
feedurl="$2"
@ -122,6 +131,8 @@ feeds() {
echo "See sfeedrc.example for an example." >&2
}
# job counter.
curjobs=0
# kill whole current process group on ^C (SIGINT).
isinterrupted="0"
# SIGTERM: signal to terminate parent.