remove trss, now using sfeed, lol

This commit is contained in:
randomuser 2022-11-11 11:31:20 +00:00
parent 8c9020eec2
commit a4cdc434be
2 changed files with 0 additions and 177 deletions

View File

@ -11,7 +11,6 @@ man:
sh: sh:
mkdir -p $(DESTDIR)$(PREFIX)/bin mkdir -p $(DESTDIR)$(PREFIX)/bin
cp -f sh/paste $(DESTDIR)$(PREFIX)/bin cp -f sh/paste $(DESTDIR)$(PREFIX)/bin
cp -f sh/trss $(DESTDIR)$(PREFIX)/bin
cp -f sh/trss-yt-id $(DESTDIR)$(PREFIX)/bin cp -f sh/trss-yt-id $(DESTDIR)$(PREFIX)/bin
cp -f sh/disp $(DESTDIR)$(PREFIX)/bin cp -f sh/disp $(DESTDIR)$(PREFIX)/bin
cp -f sh/wallpaper $(DESTDIR)$(PREFIX)/bin cp -f sh/wallpaper $(DESTDIR)$(PREFIX)/bin

176
sh/trss
View File

@ -1,176 +0,0 @@
#!/bin/sh
# get configuration & data directories
DATA="$HOME/.cache/trss"
CONFIG="$HOME/.config/trss"
MODE=""
# make sure these directories are in place
mkdir -p "$DATA"
mkdir -p "$CONFIG"
import_information () {
FEEDS=""
for i in "$CONFIG"/*; do
. "$i"
[ -z "$FEEDS" ] && FEEDS="$NAME" || FEEDS="$FEEDS $NAME"
done
}
sync_individual () {
url="$(get_feed_attr "$1" feed)"
printf "> synchronizing feed %s via url %s\n" "$1" "$url"
curl "$url" -so "$DATA/${1}.xml"
# generate sfeed formatted file
printf "> creating sfeed compound feed file for feed %s\n" "$1"
cat "$DATA/${1}.xml" |
sfeed |
cat - "$DATA/${1}.sfeed" |
sort -u |
sort -r -k 1 >> "$DATA/${1}.tmp.sfeed"
mv "$DATA/${1}.tmp.sfeed" "${DATA}/${1}.sfeed"
}
synchronize () {
for i in $FEEDS; do
sync_individual "$i"
done
}
get_feed_attr () {
eval "printf '%s' \${$1_$2}"
}
cli_loop () {
read -p "$MODE\$ " input
case "$input" in
"ls"*)
ls_handler $input
;;
"cd"*)
cd_handler $input
;;
"cat"*)
cat_handler $input
;;
"sync"*)
sync_handler $input
;;
"open"*)
open_handler $input
;;
esac
}
ls_handler () {
if [ -z "$MODE" ]; then
# just list all the feeds
for i in $FEEDS; do
printf "%s\n" "$i"
done
else
# we have to list all the items in the feed
# check first if feed is downloaded
if [ -f "$DATA/${MODE}.sfeed" ]; then
awk -F'\t' '{ print $2 }' "$DATA/${MODE}.sfeed"
else
printf "synchronization of feed %s required\n" "$MODE"
fi
fi
}
cd_handler () {
if [ -z "$MODE" ]; then
# we're in the home state
[ -z "$2" ] && return
[ "$2" = ".." ] && return
# check if $2 is a valid feed
j=0
for i in $FEEDS; do
[ "$2" = "$i" ] && j=1
done
if [ "$j" -ne 1 ]; then
printf "trss: couldn't find feed %s\n" "$2"
return
fi
# set the $MODE to the feed
MODE="$2"
else
# we're in an individual feed
[ -z "$2" ] && MODE="" && return
[ "$2" = ".." ] && MODE="" && return
printf "trss: couldn't find feed %s\n" "$2"
fi
}
sync_handler () {
force=0
[ "$2" = "-f" ] && force=1
[ "$3" = "-f" ] && force=1
if [ -z "$MODE" ]; then
# sync everything
synchronize
else
[ -n "$2" ] && [ ! "$2" = "-f" ] &&
sync_individual "$2" ||
sync_individual "$MODE"
fi
}
open_handler() {
realargs="$(printf '%s\n' "$@" | tr '\n' ' ' | sed 's/^open //g')"
[ -z "$MODE" ] && printf "couldn't find entry matching %s\n" "$realargs"
results="$(awk -F'\t' -v pat="$realargs" '$2 ~ pat { print $0 }' "$DATA/${MODE}.sfeed")"
OLDIFS="$IFS"
IFS="
"
for i in $results; do
url="$(printf "%s" "$i" | awk -F'\t' '{ print $3 }')"
# on low power systems mpv can be expensive to run
# have to figure out why it stutters so much more than the browser
# case "$url" in
# *youtube*)
# mpv "$url"
# ;;
# *)
# fire "$url" &
# ;;
# esac
fire "$url" &
done
IFS="$OLDIFS"
}
cat_handler () {
realargs="$(printf '%s\n' "$@" | tr '\n' ' ' | sed 's/^cat //g;s/ $//g')"
[ -z "$MODE" ] && printf "couldn't find entry matching %s\n" "$realargs"
results="$(awk -F'\t' -v pat="$realargs" '$2 ~ pat { print $0 }' "$DATA/${MODE}.sfeed")"
OLDIFS="$IFS"
IFS="
"
for i in $results; do
entry="$(printf "%s" "$i" | awk -F'\t' '{ print $4 }')"
printf "%s\n" "$entry" |
sed -e 's|\\n|\n|g' -e 's|\\t|\t|g' |
lynx -stdin
done
IFS="$OLDIFS"
}
import_information
while true; do
cli_loop
done