first commit

This commit is contained in:
sose 2021-05-01 20:06:46 -07:00
commit 3a2fa548a5
11 changed files with 854 additions and 0 deletions

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
bot_password
*.secret

1
action_offset Normal file
View File

@ -0,0 +1 @@
0

0
bad_words Normal file
View File

1
continue_file Normal file
View File

@ -0,0 +1 @@
2014-01-12 16:45:42|503453

1
food_item_offset Normal file
View File

@ -0,0 +1 @@
0

500
good_words Normal file
View File

@ -0,0 +1,500 @@
clean and jerk
smattering
alectoromancy
lurve
kitesurfing
canoeing
biking
atropin
mono-theism
mono theism
n-space
felching
tobogganing
slate
evo-devo
zorbing
webcasting
blogging
popping
case hardening
hogging
name-dropping
hying
necking
copper
fascism
goss
head
guacamole
auburn
rhubarb rhubarb rhubarb
aethyr
ethyr
amethyst
coursing
luck
anglicization
bad
cast-iron
castiron
warmongering
waterproofing
acontextuality
outrecuidance
wayfaring
carpeting
meat
sweetening
melaena
pitting
chikara gami
plotting
shrinkwrap
fringing
payment
chanting
hair
thickness
pole-vaulting
cubism
rule of law
inclosure
hospitability
antiaircraft
oral
poundage
racketeering
color
het
beer
outgassing
cunt
dick
priestesshood
channa dahl
wiretapping
velocity
technology
solomongundy
disgust
balderdash
bullshit
lead
racewalking
fire
feed
sea-sickness
bussing
busing
handy-dandy
cabinet making
asafetida
chutzpa
gas
roofing
belief
delftware
hay
tachypnea
qigong
force
law
science
gallows humour
jerk
grammar
harikari
overwintering
camelcase
cutch
tenish
spot welding
drag
hæmatin
hæmoglobin
secret
acacia
accommodation
translation
radar
existence
air
art
string
change
hara-kiri
aa
analysis
rumor
cocking
lithium
magnesium
phosphorus
mothering
cadmium
thallium
friending
perfecting
aqua
puer
falconing
dobutamine
bar
hamburger
tom
ocean
pasta
fax
raj
honor
plaster
imprinting
railroading
compare
nano
media
data
threading
beef
sag
quicksilvering
paralinguistics
stock
war
counseling
Chinese
boot
body
sleep
interest
wind
manga
ploughing
forespin
fouling
shelling
football
nature
shit
bronze
pull
volunteering
note
information
neon
sleepwalking
light
baloney
ganking
writing
trite
legal
metaphor
card
titanium
rubidium
zirconium
budgeting
capitalism
democracy
error
labor
nix
focus
show
homo
liber
alcohol
anime
wasabi
melon
toast
golf
rum
lite
bene
boffing
divadom
aak
seg
codswallop
wine
varletry
thorybism
downshifting
farina
faro
satellite
cargo
slap
beseeming
platina
poppycock
work
first
mortar
paint
morphology
neologism
cheese
sulphonation
security
theory
sulphation
coat
draining
want
divining
ruin
rook
pop
crash
skin
couverture
lip
steel
management
mactation
desludging
sacking
gayety
sushimi
squirreling
Palaeogene
palaeomagnetism
respect
mu
influence
rock
graham
muff-diving
silence
oregano
autostop
smoking
prestige
slip
ed
haemagglutinin
weatherizing
passion
bagasse
alcool
obsession
administration
ado
flannelet
dopamine
button mashing
kybosh
bodging
synesthesia
half-mast
macrame
nitroglycerin
stick
wear
wildlife
phonetics
leukæmia
difference
leucaemia
leucæmia
authority
power
pig
logic
ErbB-2
dark
living
silva
rancor
liquor
cruor
cannabis
cunnilingus
flatus
valor
cassia
government
access
Greek
gobbledygook
voice mail
d-amphetamine
alpha taxonomy
wee
pasteurization
type
waste
eunuchism
angelolatry
cockfighting
chocolate
rata
embourgeoisement
sumo
underuse
coke
staff
clairvoyance
rock-bottom
rollerskating
technique
pan
roughhouse
transport
flatulence
basketball
gel
motion
cave
chai
pus
prison
farce
argent
matériel
bando
furore
spit
spoil
blackletter
thought
upset
concern
competition
cloth
act
control
connection
ice
sovereignty
cattle
insourcing
après-ski
heart
jus
character
odium
opium
terror
baba ghanoush
onyx
inertia
imperium
unemployment
metadata
pollen
immunisation
collation
spalt
yerba buena
seventh heaven
magick
ahinsa
tick-tack-toe
tee ball
ju-jitsu
jiu-jitsu
sturdy
somatotrophin
phytomenadione
capital
pizza
scanability
chance
balance
base
single-phasing
rigamarole
scampering
propaganda
debate
swan's-down
swan's down
cross talk
imperative
freethought
conching
micronization
draughts
scrolling
pain
pot
suicide
lieu
freelancing
justice
export
grippe
proposition
mains
rapport
engagement
suing
trahison
ronde
balsamico
macchia
nausea
bile
anemia
neu
caviar
corn
toweling
towelling
methylbenzene
phenylmethane
long-haul
allure
rime
email
porno
certitude
courgette
capivi
bricolage
pique
azote
pine
molecular-weight
dewatering
sourcecode
cane
colour
pressure
damage
paste
avarice
venison
armour
dance
cerise
grief
inspiration
lard
honour
confusion
oblivion
gunk-holing
animus
albumen
kalium
jujutsu
hyponatraemia
thiosulphuric acid
sulphurous acid
sod
slop
fervor
product
zero
vinification
ass
indicative
genitive
hunt and peck
canalisation

273
mcsb.py Executable file
View File

@ -0,0 +1,273 @@
#!/usr/bin/env python
import requests
import time
import json
import pint
import random
from typing import Optional
ureg = pint.UnitRegistry()
max_food_weight = 100000 # in mg
max_food_length = 1000 # in mm
max_food_volume = 2000 # in ml
meme = "*finishes {}* wow. that was truly the {} of {}."
'''
Mastodon.create_app(
'minecraftofbots',
api_base_url = 'https://botsin.space',
to_file = 'pytooter_clientcred.secret'
)
'''
bot_password=open('./bot_password', 'r').readlines()[0].strip('\n')
mastodon = Mastodon(
client_id = 'pytooter_clientcred.secret',
api_base_url = 'https://botsin.space'
)
mastodon.log_in(
'sose-iwnl+mcofbots@protonmail.com',
bot_password,
to_file = 'pytooter_usercred.secret'
)
mastodon = Mastodon(
access_token = 'pytooter_usercred.secret',
api_base_url = 'https://botsin.space'
)
def get_more_words():
# we're doing these 500 at a time so we can get some semblance of
# randomness
max_words: str = "500"
new_continue_point: str
with open("./continue_file", "r") as continue_file:
continue_point = continue_file.read()
if continue_point.strip() != "":
url: str = ("https://en.wiktionary.org/w/api.php?action=query"
+ "&format=json"
+ "&generator=categorymembers"
+ "&gcmnamespace=0"
+ "&gcmtitle=Category:English_uncountable_nouns"
+ "&gcmsort=timestamp"
+ "&gcmlimit=" + max_words
+ "&gcmcontinue=" + continue_point)
else:
url: str = ("https://en.wiktionary.org/w/api.php?action=query"
+ "&format=json"
+ "&generator=categorymembers"
+ "&gcmnamespace=0"
+ "&gcmtitle=Category:English_uncountable_nouns"
+ "&gcmsort=timestamp"
+ "&gcmlimit=" + max_words)
response: requests.Response = requests.get(url)
response_json: dict
words: list
if response.ok:
response_json = response.json()
else:
print("request failed with status code {}: {}"
.format(response.status_code, response.reason))
return False
new_continue_point = response_json["continue"]["gcmcontinue"]
words = response_json["query"]["pages"].values()
words = list(map(lambda x: x["title"], words))
with open("./good_words", "a") as good_words_file:
for word in words:
good_words_file.write(word + "\n")
print("wrote new words to file")
with open("./continue_file", "w") as continue_file:
continue_file.write(new_continue_point)
print("set to continue from " + new_continue_point)
def get_a_food_action():
food_item_offset: str
with open("./food_item_offset", "r+") as food_item_offset_file:
food_item_offset = food_item_offset_file.read().strip()
food_item_url: str = ("https://api.conceptnet.io/query"
+ "?end=/c/en/food"
+ "&rel=/r/IsA"
+ "&offset=" + food_item_offset
+ "&limit=1")
print("getting " + food_item_url)
response: requests.Response = requests.get(food_item_url)
food_item: str
if response.ok:
food_item = response.json()["edges"][0]["start"]["label"]
else:
print("request failed with status code {}: {}"
.format(response.status_code, response.reason))
return None
with open("./units", "r") as units_file:
lines: list = units_file.readlines()
unit: str = random.choice(lines)
amount: pint.Quantity
if unit[0] == "m":
amount = random.randrange(1, max_food_weight) * ureg.milligram
elif unit[0] == "v":
amount = random.randrange(1, max_food_volume) * ureg.milliliter
elif unit[0] == "l":
amount = random.randrange(1, max_food_length) * ureg.millimeter
amount = amount.to(ureg("".join(unit.strip().split()[1])))
with open("./food_item_offset", "w") as food_item_offset_file:
food_item_offset_file.write(str(int(food_item_offset) + 1))
return("eating " + str(amount).replace("_", " ") + "s of " + food_item)
def get_an_action():
action_offset: str
with open("./action_offset", "r") as action_offset_file:
action_offset = action_offset_file.read().strip()
action_url: str = ("https://api.conceptnet.io/query?start=/c/en/person"
+ "&rel=/r/CapableOf"
+ "&offset=" + action_offset
+ "&limit=1")
conjugation_url: str = ("https://lt-nlgservice.herokuapp.com"
+ "/rest/english/conjugate"
+ "?verb=")
print("getting " + action_url)
response: requests.Response = requests.get(action_url)
unconjugated_action: str
conjugated_action: str
if response.ok:
unconjugated_action = response.json()["edges"][0]["end"]["label"]
else:
print("request failed with status code {}: {}"
.format(response.status_code, response.reason))
return None
conjugation_url += unconjugated_action
print("getting " + conjugation_url)
response = requests.get(conjugation_url)
if response.ok:
if response.json()["result"] != "OK":
print("couldn't conjugate " + unconjugated_action)
with open("./action_offset", "w") as action_offset_file:
action_offset_file.write(str(int(action_offset) + 1))
return None
conjugated_action = response.json()["conjugation_tables"]["indicative"][5]["forms"][0][1]
conjugated_action = " ".join(conjugated_action.split()[1:])
else:
print("request failed with status code {}: {}"
.format(response.status_code, response.reason))
return None
with open("./action_offset", "w") as action_offset_file:
action_offset_file.write(str(int(action_offset) + 1))
return conjugated_action
def get_more_words():
# we're doing these 500 at a time so we can get some semblance of
# randomness
max_words: str = "500"
new_continue_point: str
with open("./continue_file", "r") as continue_file:
continue_point = continue_file.read()
if continue_point.strip() != "":
url: str = ("https://en.wiktionary.org/w/api.php?action=query"
+ "&format=json"
+ "&generator=categorymembers"
+ "&gcmnamespace=0"
+ "&gcmtitle=Category:English_uncountable_nouns"
+ "&gcmsort=timestamp"
+ "&gcmlimit=" + max_words
+ "&gcmcontinue=" + continue_point)
else:
url: str = ("https://en.wiktionary.org/w/api.php?action=query"
+ "&format=json"
+ "&generator=categorymembers"
+ "&gcmnamespace=0"
+ "&gcmtitle=Category:English_uncountable_nouns"
+ "&gcmsort=timestamp"
+ "&gcmlimit=" + max_words)
response: requests.Response = requests.get(url)
response_json: dict
words: list
if response.ok:
response_json = response.json()
else:
print("request failed with status code {}: {}"
.format(response.status_code, response.reason))
return None
new_continue_point = response_json["continue"]["gcmcontinue"]
words = response_json["query"]["pages"].values()
words = list(map(lambda x: x["title"], words))
with open("./good_words", "a") as good_words_file:
for word in words:
good_words_file.write(word + "\n")
print("wrote new words to file")
with open("./continue_file", "w") as continue_file:
continue_file.write(new_continue_point)
print("set to continue from " + new_continue_point)
def get_a_noun():
words: list
word: str
with open("./good_words", "r") as good_words_file:
words = good_words_file.readlines()
word = random.choice(words)
words.pop(words.index(word))
with open("./good_words", "w") as good_words_file:
good_words_file.writelines(words)
with open("./bad_words", "a") as bad_words_file:
bad_words_file.write(word)
if len(words) == 0:
print("out of words, getting more words...")
get_more_words()
word = word.strip()
print("chose " + word)
return word
print(get_a_noun())
while True:
random_number: int = random.randint(0, 2)
action: Optional[str]
if random_number == 0:
action = get_an_action()
if action is None:
continue
noun1 = get_a_noun()
noun2 = get_a_noun()
mastodon.toot(meme.format(action, noun1, noun2))
print("tooted " + meme.format(action, noun1, noun2))
elif random_number == 1:
action = get_a_food_action()
if action is None:
continue
noun1 = get_a_noun()
noun2 = get_a_noun()
mastodon.toot(meme.format(action, noun1, noun2))
print("tooted " + meme.format(action, noun1, noun2))
time.sleep(2600)

55
more_words.py Executable file
View File

@ -0,0 +1,55 @@
#!/usr/bin/env python
import requests
import json
def get_more_words():
# we're doing these 500 at a time so we can get some semblance of
# randomness
max_words: str = "500"
new_continue_point: str
with open("./continue_file", "r") as continue_file:
continue_point = continue_file.read()
if continue_point.strip() != "":
url: str = ("https://en.wiktionary.org/w/api.php?action=query"
+ "&format=json"
+ "&generator=categorymembers"
+ "&gcmnamespace=0"
+ "&gcmtitle=Category:English_uncountable_nouns"
+ "&gcmsort=timestamp"
+ "&gcmlimit=" + max_words
+ "&gcmcontinue=" + continue_point)
else:
url: str = ("https://en.wiktionary.org/w/api.php?action=query"
+ "&format=json"
+ "&generator=categorymembers"
+ "&gcmnamespace=0"
+ "&gcmtitle=Category:English_uncountable_nouns"
+ "&gcmsort=timestamp"
+ "&gcmlimit=" + max_words)
response: requests.Response = requests.get(url)
response_json: dict
words: list
if response.ok:
response_json = response.json()
else:
print("request failed with status code {}: {}"
.format(response.status_code, response.reason))
return False
new_continue_point = response_json["continue"]["gcmcontinue"]
words = response_json["query"]["pages"].values()
words = list(map(lambda x: x["title"], words))
with open("./good_words", "a") as good_words_file:
for word in words:
good_words_file.write(word + "\n")
print("wrote new words to file")
with open("./continue_file", "w") as continue_file:
continue_file.write(new_continue_point)
print("set to continue from " + new_continue_point)
get_more_words()

5
readme.md Normal file
View File

@ -0,0 +1,5 @@
# mcsb
truly the minecraft of bots.
Powered by ConceptNet http://conceptnet.io (CC BY SA 4.0)
and LiguaTools https://linguatools.org/language-apis/
mit licensed

7
reinit_files.sh Executable file
View File

@ -0,0 +1,7 @@
#!/bin/sh
: > continue_file
: > good_words
: > bad_words
echo "0" > action_offset
echo "0" > food_item_offset
./more_words.py

9
units Normal file
View File

@ -0,0 +1,9 @@
m gram
m carat
m unified_atomic_mass_unit
m pound
v liter
v cubic_centimeter
l nautical_mile
l meter
l centimeter