forked from mirror/fake-firehose
Compare commits
3 Commits
a4614e886b
...
0d43e6f505
Author | SHA1 | Date |
---|---|---|
hnrd | 0d43e6f505 | |
hnrd | 5be901062b | |
hnrd | 2901b87d87 |
|
@ -41,7 +41,7 @@ The entire thing should look something like:
|
|||
cd ~
|
||||
mkdir MastodonFireHose
|
||||
cd MastodonFirehose
|
||||
git pull https://github.com/raynormast/fake-firehose.git
|
||||
git clone https://github.com/raynormast/fake-firehose.git
|
||||
cd fake-firehose
|
||||
docker build -t fakefirehose .
|
||||
# Edit your docker-compose and .env.production here
|
||||
|
|
|
@ -22,7 +22,7 @@ while true
|
|||
echo "[INFO] RUN-FIREHOSE: Posting $uri"
|
||||
|
||||
## Send it to the fake relay as a background job
|
||||
curl -X "POST" "$fakeRelayHost" \
|
||||
curl -L -X "POST" "$fakeRelayHost" \
|
||||
-H "Authorization: Bearer $fakeRelayKey" \
|
||||
-H 'Content-Type: application/x-www-form-urlencoded; charset=utf-8' \
|
||||
--data-urlencode "statusUrl=$uri" \
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#!/bin/bash
|
||||
|
||||
echo > /config/urls.txt
|
||||
echo > /config/hosts
|
||||
echo > /data/urls.txt
|
||||
echo > /data/hosts
|
||||
|
||||
# Get federated hosts and begin to stream them
|
||||
cat /config/domains-federated | grep -v "##" | while read -r line
|
||||
|
@ -23,15 +23,15 @@ do
|
|||
if [[ $tag != "" ]]; then
|
||||
echo "[INFO] Found tag $tag"
|
||||
# Create a url to fetch for each tag
|
||||
echo "https://$host/api/v1/streaming/hashtag?tag=$tag $host" >> /config/urls.txt
|
||||
echo "https://$host/api/v1/streaming/hashtag?tag=$tag $host" >> /data/urls.txt
|
||||
fi
|
||||
done
|
||||
elif [[ "$line" != *" #"* ]]; then
|
||||
echo "[INFO] $line didn't have hashtags"
|
||||
host=$line
|
||||
echo "https://$line/api/v1/streaming/public $line" >> /config/urls.txt
|
||||
echo "https://$line/api/v1/streaming/public $line" >> /data/urls.txt
|
||||
fi
|
||||
echo $host >> /config/hosts
|
||||
echo $host >> /data/hosts
|
||||
fi
|
||||
done
|
||||
|
||||
|
@ -56,30 +56,30 @@ do
|
|||
if [[ $tag != "" ]]; then
|
||||
echo "[INFO] Found tag $tag"
|
||||
# Create a url to fetch for each tag
|
||||
echo "https://$host/api/v1/streaming/hashtag/local?tag=$tag $host" >> /config/urls.txt
|
||||
echo "https://$host/api/v1/streaming/hashtag/local?tag=$tag $host" >> /data/urls.txt
|
||||
fi
|
||||
done
|
||||
elif [[ "$line" != *" #"* ]]; then
|
||||
echo "[INFO] $line didn't have hashtags"
|
||||
host=$line
|
||||
echo "https://$line/api/v1/streaming/public/local $line" >> /config/urls.txt
|
||||
echo "https://$line/api/v1/streaming/public/local $line" >> /data/urls.txt
|
||||
fi
|
||||
echo $host >> /config/hosts
|
||||
echo $host >> /data/hosts
|
||||
fi
|
||||
done
|
||||
|
||||
cat /config/hashtags | grep -v "##" | while read -r hashtag; do
|
||||
hashtag=`echo $hashtag | cut -d "#" -f 2`
|
||||
sort /config/hosts | uniq -u |while read -r host; do
|
||||
sort /data/hosts | uniq -u |while read -r host; do
|
||||
if [[ $hashtag != "" && "$host" != "" ]]; then
|
||||
echo "https://$host/api/v1/streaming/hashtag?tag=$hashtag $host" >> /config/hashtag-urls.txt
|
||||
echo "https://$host/api/v1/streaming/hashtag?tag=$hashtag $host" >> /data/hashtag-urls.txt
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
cat /config/hashtag-urls.txt >> /config/urls.txt
|
||||
cat /data/hashtag-urls.txt >> /data/urls.txt
|
||||
|
||||
cat /config/urls.txt | while read -r url
|
||||
cat /data/urls.txt | while read -r url
|
||||
do
|
||||
echo "[INFO] Opening $url to stream"
|
||||
sleep $streamDelay
|
||||
|
|
|
@ -31,7 +31,7 @@ do
|
|||
then
|
||||
#Not in archive mode
|
||||
|
||||
curl -X "GET" "$url" \
|
||||
curl -L -X "GET" "$url" \
|
||||
--no-progress-meter | \
|
||||
tee -a "/data/$today.json" | \
|
||||
grep url | \
|
||||
|
@ -56,7 +56,7 @@ do
|
|||
mkdir -p "/data/$today/"
|
||||
fi
|
||||
|
||||
curl -X "GET" "$url" --no-progress-meter >> "/data/$today/$today.$host.json"
|
||||
curl -L -X "GET" "$url" --no-progress-meter >> "/data/$today/$today.$host.json"
|
||||
fi
|
||||
|
||||
# Basic exponential backoff
|
||||
|
|
Loading…
Reference in New Issue