2018-10-09 03:11:51 +02:00
#!/usr/bin/env python3
2018-10-25 04:37:11 +02:00
# toot downloader version two!!
2018-10-09 03:11:51 +02:00
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
2019-08-15 03:56:27 +02:00
from mastodon import Mastodon , MastodonUnauthorizedError
2021-06-04 23:38:36 +02:00
import sqlite3 , signal , sys , json , re , argparse
2018-10-25 04:37:11 +02:00
import requests
2019-01-11 13:58:17 +01:00
import functions
2018-10-09 03:11:51 +02:00
2019-08-07 05:46:57 +02:00
parser = argparse . ArgumentParser ( description = ' Log in and download posts. ' )
2021-06-04 23:38:36 +02:00
parser . add_argument (
' -c ' , ' --cfg ' , dest = ' cfg ' , default = ' config.json ' , nargs = ' ? ' ,
help = " Specify a custom location for config.json. " )
2019-08-07 05:46:57 +02:00
args = parser . parse_args ( )
2019-05-19 14:31:42 +02:00
scopes = [ " read:statuses " , " read:accounts " , " read:follows " , " write:statuses " , " read:notifications " , " write:accounts " ]
2021-06-04 23:38:36 +02:00
# cfg defaults
2019-05-19 14:31:42 +02:00
2019-04-29 05:59:37 +02:00
cfg = {
" site " : " https://botsin.space " ,
" cw " : None ,
2021-06-04 23:38:36 +02:00
" instance_blacklist " : [ " bofa.lol " , " witches.town " , " knzk.me " ] , # rest in piece
2019-04-29 06:24:52 +02:00
" learn_from_cw " : False ,
" mention_handling " : 1 ,
2019-07-01 09:19:52 +02:00
" max_thread_length " : 15 ,
2021-06-04 23:14:56 +02:00
" strip_paired_punctuation " : False ,
" limit_length " : False ,
" length_lower_limit " : 5 ,
" length_upper_limit " : 50 ,
" overlap_ratio_enabled " : False ,
2021-06-11 23:29:51 +02:00
" overlap_ratio " : 0.7 ,
" ignored_cws " : [ ] ,
2019-04-29 05:59:37 +02:00
}
2019-05-19 14:31:42 +02:00
2019-08-14 07:00:35 +02:00
try :
cfg . update ( json . load ( open ( args . cfg , ' r ' ) ) )
except FileNotFoundError :
open ( args . cfg , " w " ) . write ( " {} " )
2019-08-07 05:46:57 +02:00
2020-03-10 07:54:00 +01:00
print ( " Using {} as configuration file " . format ( args . cfg ) )
2020-03-10 07:53:30 +01:00
if not cfg [ ' site ' ] . startswith ( " https:// " ) and not cfg [ ' site ' ] . startswith ( " http:// " ) :
print ( " Site must begin with ' https:// ' or ' http:// ' . Value ' {} ' is invalid - try ' https:// {} ' instead. " . format ( cfg [ ' site ' ] ) )
sys . exit ( 1 )
2018-10-25 04:37:11 +02:00
if " client " not in cfg :
2019-01-11 13:08:10 +01:00
print ( " No application info -- registering application with {} " . format ( cfg [ ' site ' ] ) )
2021-06-04 23:38:36 +02:00
client_id , client_secret = Mastodon . create_app (
" mstdn-ebooks " ,
2018-10-25 04:37:11 +02:00
api_base_url = cfg [ ' site ' ] ,
scopes = scopes ,
website = " https://github.com/Lynnesbian/mstdn-ebooks " )
cfg [ ' client ' ] = {
" id " : client_id ,
" secret " : client_secret
}
if " secret " not in cfg :
2019-01-11 13:08:10 +01:00
print ( " No user credentials -- logging in to {} " . format ( cfg [ ' site ' ] ) )
2021-06-04 23:38:36 +02:00
client = Mastodon (
client_id = cfg [ ' client ' ] [ ' id ' ] ,
client_secret = cfg [ ' client ' ] [ ' secret ' ] ,
2018-10-25 04:37:11 +02:00
api_base_url = cfg [ ' site ' ] )
2018-10-09 03:11:51 +02:00
2019-01-11 13:08:10 +01:00
print ( " Open this URL and authenticate to give mstdn-ebooks access to your bot ' s account: {} " . format ( client . auth_request_url ( scopes = scopes ) ) )
2018-10-25 04:37:11 +02:00
cfg [ ' secret ' ] = client . log_in ( code = input ( " Secret: " ) , scopes = scopes )
2018-10-09 03:11:51 +02:00
2019-08-07 05:46:57 +02:00
json . dump ( cfg , open ( args . cfg , " w+ " ) )
2018-10-09 03:11:51 +02:00
2021-06-04 23:38:36 +02:00
2018-10-25 04:37:11 +02:00
def extract_toot ( toot ) :
2019-01-11 13:58:17 +01:00
toot = functions . extract_toot ( toot )
2021-06-04 23:38:36 +02:00
toot = toot . replace ( " @ " , " @ \u200B " ) # put a zws between @ and username to avoid mentioning
2018-10-25 04:37:11 +02:00
return ( toot )
2018-10-09 03:11:51 +02:00
2021-06-04 23:38:36 +02:00
2021-06-14 22:34:33 +02:00
def get ( * args , * * kwargs ) :
r = requests . get ( * args , * * kwargs )
r . raise_for_status ( )
return r
2018-10-09 03:11:51 +02:00
client = Mastodon (
2018-10-25 04:37:11 +02:00
client_id = cfg [ ' client ' ] [ ' id ' ] ,
2021-06-04 23:38:36 +02:00
client_secret = cfg [ ' client ' ] [ ' secret ' ] ,
2019-02-25 19:30:40 +01:00
access_token = cfg [ ' secret ' ] ,
2018-10-25 04:37:11 +02:00
api_base_url = cfg [ ' site ' ] )
2018-10-09 03:11:51 +02:00
2019-08-15 03:56:27 +02:00
try :
me = client . account_verify_credentials ( )
except MastodonUnauthorizedError :
print ( " The provided access token in {} is invalid. Please delete {} and run main.py again. " . format ( args . cfg , args . cfg ) )
sys . exit ( 1 )
2018-10-09 03:11:51 +02:00
following = client . account_following ( me . id )
db = sqlite3 . connect ( " toots.db " )
2021-06-04 23:38:36 +02:00
db . text_factory = str
2018-10-09 03:11:51 +02:00
c = db . cursor ( )
2021-06-11 23:29:51 +02:00
c . execute ( " CREATE TABLE IF NOT EXISTS `toots` (sortid INTEGER UNIQUE PRIMARY KEY AUTOINCREMENT, id VARCHAR NOT NULL, cw VARCHAR, userid VARCHAR NOT NULL, uri VARCHAR NOT NULL, content VARCHAR NOT NULL) " )
2021-03-13 20:54:32 +01:00
c . execute ( " CREATE TRIGGER IF NOT EXISTS `dedup` AFTER INSERT ON toots FOR EACH ROW BEGIN DELETE FROM toots WHERE rowid NOT IN (SELECT MIN(sortid) FROM toots GROUP BY uri ); END; " )
2020-03-08 10:46:07 +01:00
db . commit ( )
2019-08-15 03:56:27 +02:00
2021-06-04 23:38:36 +02:00
2018-10-09 03:11:51 +02:00
def handleCtrlC ( signal , frame ) :
print ( " \n PREMATURE EVACUATION - Saving chunks " )
db . commit ( )
sys . exit ( 1 )
2021-06-04 23:38:36 +02:00
2018-10-09 03:11:51 +02:00
signal . signal ( signal . SIGINT , handleCtrlC )
2019-02-07 16:27:52 +01:00
patterns = {
2019-02-07 16:45:44 +01:00
" handle " : re . compile ( r " ^.*@(.+) " ) ,
" url " : re . compile ( r " https?: \ / \ /(.*) " ) ,
" uri " : re . compile ( r ' template= " ([^ " ]+) " ' ) ,
" pid " : re . compile ( r " [^ \ /]+$ " ) ,
2019-02-07 16:27:52 +01:00
}
2018-10-27 10:28:20 +02:00
2019-02-25 19:30:40 +01:00
2021-06-04 23:38:36 +02:00
def insert_toot ( oii , acc , post , cursor ) : # extracted to prevent duplication
2019-02-25 19:30:40 +01:00
pid = patterns [ " pid " ] . search ( oii [ ' object ' ] [ ' id ' ] ) . group ( 0 )
cursor . execute ( " REPLACE INTO toots (id, cw, userid, uri, content) VALUES (?, ?, ?, ?, ?) " , (
pid ,
2021-06-11 23:29:51 +02:00
oii [ ' object ' ] [ ' summary ' ] or None ,
2019-02-25 19:30:40 +01:00
acc . id ,
oii [ ' object ' ] [ ' id ' ] ,
post
) )
2018-10-09 03:11:51 +02:00
for f in following :
2019-08-15 03:56:27 +02:00
last_toot = c . execute ( " SELECT id FROM `toots` WHERE userid LIKE ? ORDER BY sortid DESC LIMIT 1 " , ( f . id , ) ) . fetchone ( )
2021-06-04 23:38:36 +02:00
if last_toot is not None :
2018-10-09 03:11:51 +02:00
last_toot = last_toot [ 0 ]
else :
last_toot = 0
2019-05-19 15:06:31 +02:00
print ( " Downloading posts for user @ {} , starting from {} " . format ( f . acct , last_toot ) )
2018-10-25 04:37:11 +02:00
2021-06-04 23:38:36 +02:00
# find the user's activitypub outbox
2019-02-25 02:18:38 +01:00
print ( " WebFingering... " )
2019-02-07 16:45:44 +01:00
instance = patterns [ " handle " ] . search ( f . acct )
2021-06-04 23:38:36 +02:00
if instance is None :
2019-02-07 16:45:44 +01:00
instance = patterns [ " url " ] . search ( cfg [ ' site ' ] ) . group ( 1 )
2018-10-25 04:37:11 +02:00
else :
instance = instance . group ( 1 )
2019-01-11 14:08:53 +01:00
if instance in cfg [ ' instance_blacklist ' ] :
print ( " skipping blacklisted instance: {} " . format ( instance ) )
2018-10-25 16:33:57 +02:00
continue
2019-01-11 13:15:05 +01:00
2018-10-25 04:37:11 +02:00
try :
2019-07-01 03:21:08 +02:00
# 1. download host-meta to find webfinger URL
2021-06-14 22:34:33 +02:00
r = get ( " https:// {} /.well-known/host-meta " . format ( instance ) , timeout = 10 )
2019-05-19 15:06:31 +02:00
# 2. use webfinger to find user's info page
2019-02-07 16:45:44 +01:00
uri = patterns [ " uri " ] . search ( r . text ) . group ( 1 )
2021-06-04 23:38:36 +02:00
uri = uri . format ( uri = " {} @ {} " . format ( f . username , instance ) )
2021-06-14 22:34:33 +02:00
r = get ( uri , headers = { " Accept " : " application/json " } , timeout = 10 )
2018-10-27 10:28:20 +02:00
j = r . json ( )
2019-05-19 15:06:31 +02:00
found = False
2019-02-07 01:53:23 +01:00
for link in j [ ' links ' ] :
if link [ ' rel ' ] == ' self ' :
2021-06-04 23:38:36 +02:00
# this is a link formatted like "https://instan.ce/users/username", which is what we need
2019-02-07 01:53:23 +01:00
uri = link [ ' href ' ]
2019-05-19 15:06:31 +02:00
found = True
break
if not found :
print ( " Couldn ' t find a valid ActivityPub outbox URL. " )
# 3. download first page of outbox
2018-11-07 06:39:12 +01:00
uri = " {} /outbox?page=true " . format ( uri )
2021-06-14 22:34:33 +02:00
r = get ( uri , timeout = 15 )
2018-10-25 04:37:11 +02:00
j = r . json ( )
2019-05-19 15:06:31 +02:00
except :
2018-10-25 04:37:11 +02:00
print ( " oopsy woopsy!! we made a fucky wucky!!! \n (we ' re probably rate limited, please hang up and try again) " )
sys . exit ( 1 )
2018-10-27 10:28:20 +02:00
pleroma = False
2020-03-10 05:35:12 +01:00
if ' next ' not in j and ' prev ' not in j :
# there's only one page of results, don't bother doing anything special
pass
2020-03-10 07:59:29 +01:00
elif ' prev ' not in j :
2019-05-19 15:06:31 +02:00
print ( " Using Pleroma compatibility mode " )
2018-10-27 10:28:20 +02:00
pleroma = True
2020-03-10 08:12:23 +01:00
if ' first ' in j :
# apparently there used to be a 'first' field in pleroma's outbox output, but it's not there any more
# i'll keep this for backwards compatibility with older pleroma instances
# it was removed in pleroma 1.0.7 - https://git.pleroma.social/pleroma/pleroma/-/blob/841e4e4d835b8d1cecb33102356ca045571ef1fc/CHANGELOG.md#107-2019-09-26
j = j [ ' first ' ]
2018-11-09 12:49:33 +01:00
else :
2019-05-19 15:06:31 +02:00
print ( " Using standard mode " )
2018-11-09 12:49:33 +01:00
uri = " {} &min_id= {} " . format ( uri , last_toot )
2021-06-14 22:34:33 +02:00
r = get ( uri )
2018-11-07 06:39:12 +01:00
j = r . json ( )
2019-05-19 15:06:31 +02:00
print ( " Downloading and saving posts " , end = ' ' , flush = True )
2018-11-07 06:39:12 +01:00
done = False
2018-11-09 12:50:36 +01:00
try :
2018-11-07 06:39:12 +01:00
while not done and len ( j [ ' orderedItems ' ] ) > 0 :
for oi in j [ ' orderedItems ' ] :
2018-11-28 20:36:05 +01:00
if oi [ ' type ' ] != " Create " :
2021-06-04 23:38:36 +02:00
continue # this isn't a toot/post/status/whatever, it's a boost or a follow or some other activitypub thing. ignore
2019-02-25 19:30:40 +01:00
2018-11-28 20:36:05 +01:00
# its a toost baby
content = oi [ ' object ' ] [ ' content ' ]
toot = extract_toot ( content )
# print(toot)
try :
if pleroma :
2018-12-30 00:58:43 +01:00
if c . execute ( " SELECT COUNT(*) FROM toots WHERE uri LIKE ? " , ( oi [ ' object ' ] [ ' id ' ] , ) ) . fetchone ( ) [ 0 ] > 0 :
2021-06-04 23:38:36 +02:00
# we've caught up to the notices we've already downloaded, so we can stop now
# you might be wondering, "lynne, what if the instance ratelimits you after 40 posts, and they've made 60 since main.py was last run? wouldn't the bot miss 20 posts and never be able to see them?" to which i reply, "i know but i don't know how to fix it"
2018-11-28 20:36:05 +01:00
done = True
2019-08-16 05:50:47 +02:00
continue
2019-07-10 02:43:56 +02:00
if ' lang ' in cfg :
2019-05-06 19:14:30 +02:00
try :
2021-06-04 23:38:36 +02:00
if oi [ ' object ' ] [ ' contentMap ' ] [ cfg [ ' lang ' ] ] : # filter for language
2019-05-06 19:14:30 +02:00
insert_toot ( oi , f , toot , c )
except KeyError :
2021-06-04 23:38:36 +02:00
# JSON doesn't have contentMap, just insert the toot irregardlessly
2019-02-25 19:30:40 +01:00
insert_toot ( oi , f , toot , c )
else :
insert_toot ( oi , f , toot , c )
2018-11-28 20:36:05 +01:00
pass
except :
2021-06-04 23:38:36 +02:00
pass # ignore any toots that don't successfully go into the DB
2019-05-19 15:06:31 +02:00
# get the next/previous page
try :
if not pleroma :
2021-06-14 22:34:33 +02:00
r = get ( j [ ' prev ' ] , timeout = 15 )
2019-05-19 15:06:31 +02:00
else :
2021-06-14 22:34:33 +02:00
r = get ( j [ ' next ' ] , timeout = 15 )
2019-05-19 15:06:31 +02:00
except requests . Timeout :
print ( " HTTP timeout, site did not respond within 15 seconds " )
2020-03-08 10:57:06 +01:00
except KeyError :
print ( " Couldn ' t get next page - we ' ve probably got all the posts " )
2019-05-19 15:06:31 +02:00
except :
print ( " An error occurred while trying to obtain more posts. " )
2018-11-07 06:39:12 +01:00
j = r . json ( )
print ( ' . ' , end = ' ' , flush = True )
2018-10-25 04:37:11 +02:00
print ( " Done! " )
db . commit ( )
2019-05-06 19:02:42 +02:00
except requests . HTTPError as e :
if e . response . status_code == 429 :
print ( " Rate limit exceeded. This means we ' re downloading too many posts in quick succession. Saving toots to database and moving to next followed account. " )
db . commit ( )
else :
# TODO: remove duplicate code
2019-05-19 15:06:31 +02:00
print ( " Encountered an error! Saving posts to database and moving to next followed account. " )
2019-05-06 19:02:42 +02:00
db . commit ( )
2018-11-09 12:50:36 +01:00
except :
2019-05-19 15:06:31 +02:00
print ( " Encountered an error! Saving posts to database and moving to next followed account. " )
2018-11-09 12:50:36 +01:00
db . commit ( )
2018-11-01 06:27:03 +01:00
print ( " Done! " )
2018-10-09 03:11:51 +02:00
db . commit ( )
2021-06-04 23:38:36 +02:00
db . execute ( " VACUUM " ) # compact db
2018-10-09 03:11:51 +02:00
db . commit ( )
2018-12-30 00:58:43 +01:00
db . close ( )