diff --git a/Makefile b/Makefile index 38dc2e2..bc9c262 100644 --- a/Makefile +++ b/Makefile @@ -11,6 +11,7 @@ man: sh: mkdir -p $(DESTDIR)$(PREFIX)/bin cp -f sh/paste $(DESTDIR)$(PREFIX)/bin + cp -f sh/trss $(DESTDIR)$(PREFIX)/bin cp -f sh/disp $(DESTDIR)$(PREFIX)/bin cp -f sh/wallpaper $(DESTDIR)$(PREFIX)/bin cp -f sh/yt $(DESTDIR)$(PREFIX)/bin @@ -21,7 +22,6 @@ sh: cp -f sh/pco $(DESTDIR)$(PREFIX)/bin cp -f sh/git-survey $(DESTDIR)$(PREFIX)/bin cp -f sh/vim-swap-handler $(DESTDIR)$(PREFIX)/bin - cp -f sh/snownews-url-handler $(DESTDIR)$(PREFIX)/bin cp -f sh/status $(DESTDIR)$(PREFIX)/bin cp -f sh/statusbar $(DESTDIR)$(PREFIX)/bin cp -f sh/cfg $(DESTDIR)$(PREFIX)/bin @@ -34,6 +34,9 @@ sh: cp -f sh/ss $(DESTDIR)$(PREFIX)/bin cp -f sh/net $(DESTDIR)$(PREFIX)/bin +check: + shellcheck sh/* + mkc: c/scream c/timer c/boid c/anaconda c/colors c/boid: diff --git a/sh/paste b/sh/paste index 89b5eb0..87987e0 100755 --- a/sh/paste +++ b/sh/paste @@ -1,5 +1,4 @@ #!/bin/sh -set -x [ "$1" = "" ] && exit 1 -[ -f "$1" ] && curl -F"file=@${1}" https://ttm.sh && exit 0 +[ -f "$1" ] && curl -F"file=@${1}" https://0x0.st && exit 0 diff --git a/sh/snownews-url-handler b/sh/snownews-url-handler deleted file mode 100755 index 50bcff7..0000000 --- a/sh/snownews-url-handler +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/sh - -# tube="$(printf "%s\n" "$1" | grep "watch?v=" | wc -l)" -# [ "$tube" -eq 1 ] && mpv -v --video-sync=display-resample "$1" && exit - -firefox $1 -killall -10 simplestatus diff --git a/sh/trss b/sh/trss new file mode 100755 index 0000000..c7e0180 --- /dev/null +++ b/sh/trss @@ -0,0 +1,39 @@ +#!/bin/sh + +# get configuration & data directories +DATA="$HOME/.cache/trss" +CONFIG="$HOME/.config/trss" + +# make sure these directories are in place +mkdir -p "$DATA" +mkdir -p "$CONFIG" + +import_information () { + FEEDS="" + for i in "$CONFIG"/*; do + . "$i" + [ -z "$FEEDS" ] && FEEDS="$NAME" || FEEDS="$FEEDS $NAME" + done +} + +synchronize () { + for i in $FEEDS; do + url="$(get_feed_attr "$i" feed)" + printf "> synchronizing feed %s via url %s\n" "$i" "$url" + curl "$url" -so "$DATA/${i}.xml" + + # generate sfeed formatted file + printf "> creating sfeed compound feed file for feed %s\n" "$i" + cat "$DATA/${i}.xml" | sfeed | cat - "$DATA/${i}.sfeed" | uniq >> "$DATA/${i}.tmp.sfeed" + mv "$DATA/${i}.tmp.sfeed" "${DATA}/${i}.sfeed" + done +} + +get_feed_attr () { + eval "printf '%s' \${$1_$2}" +} + +import_information +synchronize +printf "%s\n" "$FEEDS" +get_feed_attr seirdy humanname