Merge pull request #32 from themotte/remove-marsey-bot-autocomments

Remove automatic marsey comments
This commit is contained in:
Ben Rog-Wilhelm 2022-05-08 21:58:57 -05:00 committed by GitHub
commit 761d1affab
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -1257,110 +1257,6 @@ def submit_post(v, sub=None):
g.db.add(n)
if not (post.sub and g.db.query(Exile.user_id).filter_by(user_id=SNAPPY_ID, sub=post.sub).one_or_none()):
if post.sub == 'dankchristianmemes':
body = random.choice(christian_emojis)
elif v.id == CARP_ID:
if random.random() < 0.02: body = "i love you carp"
else: body = ":#marseyfuckoffcarp:"
elif v.id == LAWLZ_ID:
if random.random() < 0.5: body = "wow, this lawlzpost sucks!"
else: body = "wow, a good lawlzpost for once!"
else:
body = random.choice(snappyquotes)
if body.startswith(''):
body = body[1:]
vote = Vote(user_id=SNAPPY_ID,
vote_type=-1,
submission_id=post.id,
real = True
)
g.db.add(vote)
post.downvotes += 1
if body.startswith('OP is a Trump supporter'):
flag = Flag(post_id=post.id, user_id=SNAPPY_ID, reason='Trump supporter')
g.db.add(flag)
elif body.startswith(''):
body = body[1:]
vote = Vote(user_id=SNAPPY_ID,
vote_type=1,
submission_id=post.id,
real = True
)
g.db.add(vote)
post.upvotes += 1
body += "\n\n"
if post.url:
if post.url.startswith('https://old.reddit.com/r/'):
rev = post.url.replace('https://old.reddit.com/', '')
rev = f"* [unddit.com](https://unddit.com/{rev})\n"
elif post.url.startswith("https://old.reddit.com/u/"):
rev = post.url.replace('https://old.reddit.com/u/', '')
rev = f"* [camas.github.io](https://camas.github.io/reddit-search/#\u007b\"author\":\"{rev}\",\"resultSize\":100\u007d)\n"
else: rev = ''
newposturl = post.url
if newposturl.startswith('/'): newposturl = f"{SITE_FULL}{newposturl}"
body += f"Snapshots:\n\n{rev}* [archive.org](https://web.archive.org/{newposturl})\n* [archive.ph](https://archive.ph/?url={quote(newposturl)}&run=1) (click to archive)\n* [ghostarchive.org](https://ghostarchive.org/search?term={quote(newposturl)}) (click to archive)\n\n"
gevent.spawn(archiveorg, newposturl)
captured = []
for i in list(snappy_url_regex.finditer(post.body_html))[:20]:
if i.group(0) in captured: continue
captured.append(i.group(0))
href = i.group(1)
if not href: continue
title = i.group(2)
if "Snapshots:\n\n" not in body: body += "Snapshots:\n\n"
if f'**[{title}]({href})**:\n\n' not in body:
body += f'**[{title}]({href})**:\n\n'
if href.startswith('https://old.reddit.com/r/'):
body += f'* [unddit.com](https://unddit.com/{href.replace("https://old.reddit.com/", "")})\n'
if href.startswith('https://old.reddit.com/u/'):
rev = post.url.replace('https://old.reddit.com/u/', '')
body += f"* [camas.github.io](https://camas.github.io/reddit-search/#\u007b\"author\":\"{rev}\",\"resultSize\":100\u007d)\n"
body += f'* [archive.org](https://web.archive.org/{href})\n'
body += f'* [archive.ph](https://archive.ph/?url={quote(href)}&run=1) (click to archive)\n'
body += f'* [ghostarchive.org](https://ghostarchive.org/search?term={quote(href)}) (click to archive)\n\n'
gevent.spawn(archiveorg, href)
body_html = sanitize(body)
if len(body_html) < 40000:
c = Comment(author_id=SNAPPY_ID,
distinguish_level=6,
parent_submission=post.id,
level=1,
over_18=False,
is_bot=True,
app_id=None,
body_html=body_html
)
g.db.add(c)
snappy = g.db.query(User).filter_by(id = SNAPPY_ID).one_or_none()
snappy.comment_count += 1
snappy.coins += 1
g.db.add(snappy)
if body.startswith('!slots1000'):
check_for_slots_command(body, snappy, c)
g.db.flush()
c.top_comment_id = c.id
post.comment_count += 1
post.replies = [c]
v.post_count = g.db.query(Submission.id).filter_by(author_id=v.id, is_banned=False, deleted_utc=0).count()
g.db.add(v)