1
0
Fork 0

Compare commits

...

3 Commits

Author SHA1 Message Date
hnrd 0d43e6f505 make curl honour redirects 2023-02-17 15:43:57 +01:00
hnrd 5be901062b use /data as workdir, so /config can be immutable 2023-02-17 15:32:11 +01:00
hnrd 2901b87d87 fix docker setup example 2023-02-17 15:25:34 +01:00
4 changed files with 26 additions and 26 deletions

View File

@ -41,7 +41,7 @@ The entire thing should look something like:
cd ~
mkdir MastodonFireHose
cd MastodonFirehose
git pull https://github.com/raynormast/fake-firehose.git
git clone https://github.com/raynormast/fake-firehose.git
cd fake-firehose
docker build -t fakefirehose .
# Edit your docker-compose and .env.production here
@ -317,4 +317,4 @@ The work of [Gervasio Marchand](https://mastodonte.tech/@g3rv4) is fantastic but
I wanted the simplest setup and config I could create, without setting up an entirely new web UI.
There are a lot of things to do better, I'll work on the ones I have time and capability for. Otherwise, this project
is practically begging to be re-written in python or something else.
is practically begging to be re-written in python or something else.

View File

@ -4,9 +4,9 @@ while true
## This assumes that we have other scripts that are writing to the file called
## $source, which here is today's date appended with .uris.txt
today=`date +"%Y%m%d"`
source="/data/$today.uris.txt"
source="/data/$today.uris.txt"
## Here we take the top 500 lines of the file -- so we are in FIFO
## Here we take the top 500 lines of the file -- so we are in FIFO
## and pipe them thru uniq so we only pass unique URIs through to the fake relay
## This step easily cuts the total number of URIs in half and is the only way we can keep up
@ -18,11 +18,11 @@ while true
## Start looping through the unique URIs
cat backfilluris.txt| \
while read -r uri
do
do
echo "[INFO] RUN-FIREHOSE: Posting $uri"
## Send it to the fake relay as a background job
curl -X "POST" "$fakeRelayHost" \
curl -L -X "POST" "$fakeRelayHost" \
-H "Authorization: Bearer $fakeRelayKey" \
-H 'Content-Type: application/x-www-form-urlencoded; charset=utf-8' \
--data-urlencode "statusUrl=$uri" \
@ -35,20 +35,20 @@ while true
do
curls=`ps -ef|grep curl|wc -l`
echo "[INFO] RUN-FIREHOSE: Waiting for existing curls to finish, at $curls"
linesLeft=`cat "$source"|wc -l`
linesLeft=`cat "$source"|wc -l`
echo "[INFO] RUN-FIREHOSE:$linesLeft Total URIs left"
sleep 5s
done
done
linesLeft=`cat "$source"|wc -l`
linesLeft=`cat "$source"|wc -l`
## Wait until the queue is at least 500 lines long, less than that
## and there are not enough lines to see if there are duplicates.
until [ $linesLeft -gt $minURIs ]
do
linesLeft=`cat "$source"|wc -l`
linesLeft=`cat "$source"|wc -l`
echo "[INFO] RUN-FIREHOSE: Waiting for more URIs to batch, currently at $linesLeft"
sleep 5s
done

View File

@ -1,7 +1,7 @@
#!/bin/bash
echo > /config/urls.txt
echo > /config/hosts
echo > /data/urls.txt
echo > /data/hosts
# Get federated hosts and begin to stream them
cat /config/domains-federated | grep -v "##" | while read -r line
@ -23,15 +23,15 @@ do
if [[ $tag != "" ]]; then
echo "[INFO] Found tag $tag"
# Create a url to fetch for each tag
echo "https://$host/api/v1/streaming/hashtag?tag=$tag $host" >> /config/urls.txt
echo "https://$host/api/v1/streaming/hashtag?tag=$tag $host" >> /data/urls.txt
fi
done
elif [[ "$line" != *" #"* ]]; then
echo "[INFO] $line didn't have hashtags"
host=$line
echo "https://$line/api/v1/streaming/public $line" >> /config/urls.txt
fi
echo $host >> /config/hosts
echo "https://$line/api/v1/streaming/public $line" >> /data/urls.txt
fi
echo $host >> /data/hosts
fi
done
@ -56,30 +56,30 @@ do
if [[ $tag != "" ]]; then
echo "[INFO] Found tag $tag"
# Create a url to fetch for each tag
echo "https://$host/api/v1/streaming/hashtag/local?tag=$tag $host" >> /config/urls.txt
echo "https://$host/api/v1/streaming/hashtag/local?tag=$tag $host" >> /data/urls.txt
fi
done
elif [[ "$line" != *" #"* ]]; then
echo "[INFO] $line didn't have hashtags"
host=$line
echo "https://$line/api/v1/streaming/public/local $line" >> /config/urls.txt
echo "https://$line/api/v1/streaming/public/local $line" >> /data/urls.txt
fi
echo $host >> /config/hosts
echo $host >> /data/hosts
fi
done
cat /config/hashtags | grep -v "##" | while read -r hashtag; do
hashtag=`echo $hashtag | cut -d "#" -f 2`
sort /config/hosts | uniq -u |while read -r host; do
sort /data/hosts | uniq -u |while read -r host; do
if [[ $hashtag != "" && "$host" != "" ]]; then
echo "https://$host/api/v1/streaming/hashtag?tag=$hashtag $host" >> /config/hashtag-urls.txt
echo "https://$host/api/v1/streaming/hashtag?tag=$hashtag $host" >> /data/hashtag-urls.txt
fi
done
done
cat /config/hashtag-urls.txt >> /config/urls.txt
cat /data/hashtag-urls.txt >> /data/urls.txt
cat /config/urls.txt | while read -r url
cat /data/urls.txt | while read -r url
do
echo "[INFO] Opening $url to stream"
sleep $streamDelay
@ -96,4 +96,4 @@ fi
# then restart
echo "[INFO] Container restart timoe is $restartTimeout"
sleep $restartTimeout
exit 0
exit 0

View File

@ -31,7 +31,7 @@ do
then
#Not in archive mode
curl -X "GET" "$url" \
curl -L -X "GET" "$url" \
--no-progress-meter | \
tee -a "/data/$today.json" | \
grep url | \
@ -56,7 +56,7 @@ do
mkdir -p "/data/$today/"
fi
curl -X "GET" "$url" --no-progress-meter >> "/data/$today/$today.$host.json"
curl -L -X "GET" "$url" --no-progress-meter >> "/data/$today/$today.$host.json"
fi
# Basic exponential backoff
@ -79,4 +79,4 @@ do
done
## Exit 0 by default
exit 0
exit 0