hyprdots

my dotfiles
git clone https://git.awy.one/hyprdots.git
Log | Files | Refs | README | LICENSE

rssget (4011B)


      1 #!/bin/bash
      2 
      3 # Searches the website for RSS feeds and adds them to newsboat url list. Can
      4 # also find hidden RSS feeds on various websites, namely Youtube, Reddit,
      5 # Vimeo, Github, Gitlab and Medium. Gets site url as $1 or (if not present)
      6 # from X clipboard. Gets tags as $2. If it finds more than one feed, calls
      7 # wmenu for the user to choose which one to add. I have bound it to a keyboard
      8 # shortcut so i copy a site link and easily add its feed to the reader.
      9 
     10 # Inspired by and based on the logic of this extension:
     11 # https://github.com/shevabam/get-rss-feed-url-extension
     12 
     13 # This script requires rssadd to add feeds to the list.
     14 
     15 getlink () {
     16 	local url="$1"
     17 	feeds="$(curl -s "$url" | grep -Ex '.*type=.*(rss|rdf|atom).*' | sed 's/ //g')"
     18 	url="$(echo $url | sed 's|^\(https://[^/]*/\).*|\1|')"
     19 
     20 	for rsspath in $feeds; do
     21 		rsspath="$(echo $rsspath | sed -n "s|.*href=['\"]\([^'\"]*\)['\"].*|\1|p")"
     22 	if echo "$rsspath" | grep "http" > /dev/null; then
     23 		link="$rsspath"
     24 	elif echo "$rsspath" | grep -E "^/" > /dev/null; then
     25 		link="$url$(echo $rsspath | sed 's|^/||')"
     26 	else
     27 		link="$url$rsspath"
     28 	fi
     29 	echo $link
     30 	done
     31 }
     32 
     33 getRedditRss() {
     34 	echo "${1%/}.rss"
     35 }
     36 
     37 getYoutubeRss() {
     38 	local url="$1"
     39 	path=$(echo "$url" | sed -e 's|^http[s]*://||')
     40 	case "$path" in
     41 		*"/channel/"*) channel_id="$(echo $path | sed -r 's|.*channel/([^/]*).*|\1|')" && feed="https://www.youtube.com/feeds/videos.xml?channel_id=${channel_id}" ;;
     42 		*"/c/"*|*"/user/"*)
     43 			feed=$(wget -q "$url" -O tmp_rssget_yt \
     44 			&& sed -n 's|.*\("rssUrl":"[^"]*\).*|\1|; p' tmp_rssget_yt \
     45 			| grep rssUrl \
     46 			| sed 's|"rssUrl":"||') ;;
     47 	esac
     48 	echo "$feed"
     49 }
     50 
     51 getVimeoRss() {
     52     local url="$1"
     53         if echo "$url" | grep -q "/videos$"; then
     54             feed_url=$(echo "$url" | sed 's/\/videos$//' | sed 's/\/$/\/rss/')
     55         else
     56             feed_url="${url}/videos/rss"
     57         fi
     58     echo "$feed_url"
     59 }
     60 
     61 getGithubRss () {
     62 	local url="${1%/}"
     63 	if echo $url | grep -E "github.com/[^/]*/[a-zA-Z0-9].*" >/dev/null ; then
     64 		echo "${url}/commits.atom"
     65 		echo "${url}/releases.atom"
     66 		echo "${url}/tags.atom"
     67 	elif echo $url | grep -E "github.com/[^/]*(/)" >/dev/null ; then
     68 		echo "${url}.atom"
     69 	fi
     70 }
     71 
     72 getGitlabRss () {
     73 	local url="${1%/}"
     74 	echo "${url}.atom"
     75 }
     76 
     77 getMediumRss () {
     78 	echo $1 | sed 's|/tag/|/feed/|'
     79 }
     80 
     81 
     82 if [ -n "$1" ] ; then
     83 	url="$1"
     84 else
     85 	url="$(wl-paste)"
     86 	[ -z "$url" ] && echo "usage: $0 url 'tag1 tag2 tag3'" && exit 1
     87 fi
     88 
     89 declare -a list=()
     90 
     91 yt_regex="^(http(s)?://)?((w){3}\.)?(youtube\.com|invidio\.us|invidious\.flokinet\.to|invidious\.materialio\.us|iv\.datura\.network|invidious\.perennialte\.ch|invidious\.fdn\.fr|invidious\.private\.coffee|invidious\.protokolla\.fi|invidious\.privacyredirect\.com|yt\.artemislena\.eu|yt\.drgnz\.club|invidious\.incogniweb\.net|yewtu\.be|inv\.tux\.pizza|invidious\.reallyaweso\.me|iv\.melmac\.space|inv\.us\.projectsegfau\.lt|inv\.nadeko\.net|invidious\.darkness\.services|invidious\.jing\.rocks|invidious\.privacydev\.net|inv\.in\.projectsegfau\.lt|invidious\.drgns\.space)/(channel|user|c).+"
     92 reddit_regex="^(http(s)?://)?((w){3}\.)?reddit\.com.*"
     93 vimeo_regex="^(http(s)?://)?((w){3}.)?vimeo\.com.*"
     94 if echo $url | grep -Ex "$yt_regex" >/dev/null ; then
     95 	list="$(getYoutubeRss "$url")"
     96 elif echo $url | grep -Ex "$reddit_regex" >/dev/null ; then
     97 	list="$(getRedditRss "$url")"
     98 # vimeo actually works with getlink
     99 elif echo $url | grep -E "$vimeo_regex" >/dev/null ; then
    100 	list="$(getVimeoRss "$url")"
    101 elif echo $url | grep -E "github.com" >/dev/null ; then
    102 	list="$(getGithubRss "$url")"
    103 # gitlab also works with getlink
    104 elif echo $url | grep -E "gitlab.com/[a-zA-Z0-9].*" >/dev/null ; then
    105 	list="$(getGitlabRss "$url")"
    106 elif echo $url | grep -E "medium.com/tag" >/dev/null ; then
    107 	list="$(getMediumRss "$url")"
    108 else
    109 	list="$(getlink "$url")"
    110 fi
    111 
    112 [ "$(echo "$list" | wc -l)" -eq 1 ] && chosen_link="$list" || chosen_link=$(printf '%s\n' "${list[@]}" | wmenu -p "Choose a feed:")
    113 tags="$2"
    114 ifinstalled rssadd && rssadd "$chosen_link" "$tags"
    115 echo "$chosen_link" "$tags"