Compare commits

..

No commits in common. "49507b0f1ccc12ef930f7b1b0853b6460f74f35a" and "5f2edec82666946651ffb5fb37b5f3951b7c05bb" have entirely different histories.

10 changed files with 68 additions and 72 deletions

View file

@ -1,2 +0,0 @@
venv/
__pycache__/

View file

@ -1,3 +1,5 @@
MASTODON_INSTANCE_ADDRESS=mastodon.social
SENTENCES_DIRECTORIES=['./data/sentences/']
WORDS_DIRECTORIES=["./data/words/"]
MISSKEY_INSTANCE_ADDRESS="worldc.one"
MISSKEY_ACCESS_TOKEN="BWbGlK6lWuUgXQpFM7igtmbZ30g6BOIc" # it is just a sample
SENTENCES_FILES=['./data/sentences/default.txt'] # Putting an absolute path is recommended
# SENTENCES_DIRECTORIES=['./data/sentences/']

2
.gitignore vendored
View file

@ -1,6 +1,6 @@
venv/
setting.py
google_api.json
__pycache__/
.env
docker-compose.yml
token.secret

View file

@ -2,8 +2,12 @@ FROM python:bookworm
WORKDIR /app
COPY . .
COPY src src
COPY main.py main.py
COPY requirements.txt requirements.txt
# RUN python -m venv venv
# RUN source ./venv/bin/activate
RUN pip install -r requirements.txt
CMD ["python", "main.py"]

View file

@ -1,33 +1,23 @@
# Mastodon Random Sentence Bot
# Misskey Random Sentence Bot
[![no github badge](https://nogithub.codeberg.page/badge.svg)](https://nogithub.codeberg.page/)
A mastodon bot posting random sentences. Forked from [Misskey Random Sentence Bot](https://git.worldc.one/worldcone/misskey-random-sentence-bot)
## How to run with docker compose
1. clone repo
2. change setting
```bash
# copy compose file
cp docker-compose.example.yml docker-compose.yml
```
# copy example compose file
cp docker-compose.example.yml docker-compose.env
# copy and edit .env
cp .example.env .env
vim .env
# edit compose file
vim .docker-compose
```
3. add your access token
```bash
touch token.secret
vim token.secret
3. RUN
```
4. RUN
```bash
docker compose up
docker compose down
```
@ -38,20 +28,20 @@ docker compose down
2. make venv
```bash
```
python3 -m venv venv
source ./venv/bin/activate
```
3. install modules
```bash
```
pip install -r requirements.txt
```
4. change setting(changing the sentences and words directories as a absolute path is recommended)
4. change setting
```bash
```
# copy example setting file
cp .example.env .env
@ -61,7 +51,7 @@ vim .env
5. RUN
```bash
```
python main.py
```

View file

@ -6,8 +6,9 @@ services:
context: .
dockerfile: ./Dockerfile
environment:
- MASTODON_INSTANCE_ADDRESS=${MASTODON_INSTANCE_ADDRESS}
- SENTENCES_DIRECTORIES=${SENTENCES_DIRECTORIES}
- WORDS_DIRECTORIES=${WORDS_DIRECTORIES}
- MISSKEY_INSTANCE_ADDRESS=worldc.one
- MISSKEY_ACCESS_TOKEN=BWbGlK6lWuUgXQpFM7igtmbZ30g6BOIc
# - SENTENCES_FILES=['/data/sentences/default.txt']
- SENTENCES_DIRECTORIES=['/data/sentences/']
volumes:
- ./data:/app/data
- ./data:/data

14
main.py
View file

@ -1,7 +1,7 @@
import os
import random
from mastodon import Mastodon
from misskey import Misskey
from dotenv import load_dotenv
from src.choose_sentence import chooseSentence
@ -10,7 +10,7 @@ from src.generate_sentence import generateSentence
load_dotenv()
# authentication to misskey
mastodon = Mastodon(access_token="token.secret", api_base_url=os.environ["MASTODON_INSTANCE_ADDRESS"])
misskey = Misskey(address=os.environ["MISSKEY_INSTANCE_ADDRESS"], i=os.environ["MISSKEY_ACCESS_TOKEN"])
def writeRandomSentenceNote():
@ -22,12 +22,12 @@ def writeRandomSentenceNote():
# choose method and get sentence
sentence: str = random.choice(choice_methods)()
# write note
toot = mastodon.toot(sentence)
toot_url = toot["url"]
toot_content = toot["content"]
toot_created_at = toot["created_at"]
print(f"{toot_created_at} {toot_url} : {toot_content}")
random_sentence_note = misskey.notes_create(text=sentence)
note_id = random_sentence_note["createdNote"]["id"]
note_text = random_sentence_note["createdNote"]["text"]
print(f"{note_id} | {note_text}")
if __name__ == "__main__":

View file

@ -1,2 +1,3 @@
Mastodon.py
Misskey.py
python-dotenv

View file

@ -1,10 +1,12 @@
# Get mastodon access token
1. Preferences > Development > New Application
2. Set name of your app and enable `write`
3. Submit
# Misskey api key 발급받기
1. 설정 - api 접속
2. 엑세스 토큰 생성 - **"노트를 작성하거나 삭제합니다"** 켬
# docker compose crontab 설정하기
# Set crontab with docker compose
If you want your bot to post every hour:
```
0 * * * * docker compose -f <your bot's path>/docker-compose.yml up
docker compose -f 프로젝트경로/docker-compose.yml up
```
을 crontab에 등록하시면 됩니다(아마도)

View file

@ -8,34 +8,32 @@ from dotenv import load_dotenv
load_dotenv()
_sentences: list[str] =[]
# try:
_sentences=[]
try:
sentence_files=[]
# if os.environ.get("SENTENCES_FILE") and os.path.isfile(os.environ["SENTENCES_FILE"]):
# sentence_files.append(os.environ["SENTENCE_FILES"])
if os.environ.get("SENTENCES_FILE") and os.path.isfile(os.environ["SENTENCES_FILE"]):
sentence_files.append(os.environ["SENTENCE_FILES"])
# if os.environ.get("SENTENCES_FILES"):
# sentence_files.extend(list(filter(os.path.isfile, json.loads(os.environ["SENTENCES_FILES"]))))
if os.environ.get("SENTENCES_FILES"):
sentence_files.extend(list(filter(os.path.isfile, json.loads(os.environ["SENTENCES_FILES"]))))
# if os.environ.get("SENTENCES_DIRECTORY") and os.path.isdir(os.environ["SENTENCES_DIRECTORY"]):
# sentence_files.extend(glob(f'{os.environ["SENTENCES_DIRECTORY"]}*.txt'))
if os.environ.get("SENTENCES_DIRECTORY") and os.path.isdir(os.environ["SENTENCES_DIRECTORY"]):
sentence_files.extend(glob(f'{os.environ["SENTENCES_DIRECTORY"]}*.txt'))
# if os.environ.get("SENTENCES_DIRECTORIES"):
# get the directories including the sentences files
if os.environ.get("SENTENCES_DIRECTORIES"):
# 원라인 똥 뿌직(터져도 책임 안짐)
sentence_files.extend([glob(f"{_dir}*.txt") for _dir in filter(os.path.isdir, json.loads(os.environ["SENTENCES_DIRECTORIES"]))][0])
for sentence_file in sentence_files:
with open(sentence_file, "r") as f:
_sentences.extend(f.readlines())
# except:
# e = sys.exc_info()[1]
# _sentences = [f"Runtime error(Please contact to the admin): {e}"]
except:
e = sys.exc_info()[1]
_sentences = [f"에러발생(진짜임): {e}"]
# choose random sentence in google spread sheet
def chooseSentence() -> str:
# choice random sentence
return random.choice(_sentences).replace("\\n","\n")