Posts random screenshots from Spice and Wolf to its account on udongein.xyz
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

62 lines
1.9 KiB

import sys
import random
import os.path as op
from urllib.parse import urlparse
import requests
from bs4 import BeautifulSoup
from mastodon import Mastodon
URL_HEAD = 'https://fancaps.net/anime/index.php?'
URL_PAGE = '&page='
URL_TRANSFER_HEAD = 'https://fancaps.net/anime/'
EPISODE_IDS = ['3785', '3786', '3787', '3788',
'3789', '3790', '3791', '3792',
'3781', '3782', '3783', '3784']
def main():
# Composing initial url of a random page on a random episode
episode_id = random.choice(EPISODE_IDS)
PAGE_AMOUNT = 20
if episode_id == '3792':
PAGE_AMOUNT += 1
page_id = random.randint(1, PAGE_AMOUNT)
source_url = URL_HEAD + episode_id + URL_PAGE + str(page_id)
# Searching for thumbnails
resp = requests.get(source_url)
soup = BeautifulSoup(resp.text, 'lxml')
div = random.choice(soup.findAll('div', {'class': 'col-lg-3 col-md-4 col-sm-6 col-xs-6'}))
href_url = str(div).split('"')[5]
# Checking out if selected screenshot is nsfw
with open('nsfw-ids.dat', 'r', encoding='utf-8') as file:
nsfw_hrefs = file.readlines()
is_sensitive = False
for nsfw_href in nsfw_hrefs:
if href_url.strip() == nsfw_href.strip():
is_sensitive = True
# Requesting final page by selecting a random thumbnail
resp = requests.get(URL_TRANSFER_HEAD + href_url)
soup = BeautifulSoup(resp.text, 'lxml')
div = soup.find('div', {'class': 'img-holder btn-group btn-group-lg'})
# Logging in and posting
mastodon = Mastodon(
access_token = 'token.dat',
api_base_url = 'https://udongein.xyz/'
)
file_url = str(div).split('"')[23]
media = mastodon.media_post(requests.get(file_url).content, 'image/jpeg')
toot = ':holo:'
mastodon.status_post(toot, media_ids=[media], visibility='unlisted', sensitive=is_sensitive)
if __name__ == '__main__':
sys.exit(main())