finished wikiart

This commit is contained in:
Simon Junod
2024-05-02 22:23:57 +02:00
parent 7cbd09abad
commit c6b3078950
3 changed files with 37 additions and 22 deletions

View File

@@ -11,8 +11,7 @@ from .saints import SAINTS
def citation(): def citation():
try: try:
req = requests.get("http://www.unjourunpoeme.fr") soup = bs(requests.get("http://www.unjourunpoeme.fr").text, features="html.parser")
soup = bs(req.text, features="html.parser")
bloc = soup.find("div", {"class": "poemedujour"}) bloc = soup.find("div", {"class": "poemedujour"})
title = bloc.find("h3", {"class": "posttitle"}).text.strip() title = bloc.find("h3", {"class": "posttitle"}).text.strip()
author = bloc.find("a", {"class": "poemehasardauteur"}).text.strip() author = bloc.find("a", {"class": "poemehasardauteur"}).text.strip()
@@ -24,8 +23,7 @@ def citation():
def saint(): def saint():
today = datetime.now().strftime("%m/%d") return SAINTS[datetime.now().strftime("%m/%d")]
return SAINTS[today]
def weather_emoji(_id): def weather_emoji(_id):
@@ -70,18 +68,14 @@ def digest():
pass pass
now = datetime.now() now = datetime.now()
include_citation = False
include_weather = True
include_artwork = True
embed = Embed(color=[Color.red(), Color.gold(), Color.orange(), Color.blue(), Color.green(), Color.magenta(), Color.purple()][now.weekday()]) embed = Embed(color=[Color.red(), Color.gold(), Color.orange(), Color.blue(), Color.green(), Color.magenta(), Color.purple()][now.weekday()])
embed.title = "Bonjour !" embed.title = "Bonjour !"
embed.description = f"Nous sommes le {now.strftime('%A %-d %B')} ({saint()})" embed.description = f"Nous sommes le {now.strftime('%A %-d %B')} ({saint()})"
if include_citation: if EPHEMERIS_INCLUDE_CITATION:
embed.description += f"\n\n{citation()}\n\u200B" embed.description += f"\n\n{citation()}\n\u200B"
if include_weather: if EPHEMERIS_INCLUDE_WEATHER:
w_strings = [] w_strings = []
for w in weather(46.5196661, 6.6325467): for w in weather(46.5196661, 6.6325467):
w_strings.append(f"{w['emoji']} {w['description']} ({w['temp']}°C, {w['wind_speed']} km/h)") w_strings.append(f"{w['emoji']} {w['description']} ({w['temp']}°C, {w['wind_speed']} km/h)")
@@ -89,9 +83,34 @@ def digest():
embed.add_field(name="Après-midi", value=w_strings[1]) embed.add_field(name="Après-midi", value=w_strings[1])
embed.add_field(name="Soir", value=w_strings[2]) embed.add_field(name="Soir", value=w_strings[2])
if include_artwork: if EPHEMERIS_INCLUDE_ARTWORK:
embed.set_image("https://uploads3.wikiart.org/00340/images/tintoretto/the-miracle-of-st-mark-freeing-the-slave.jpg") soup = bs(requests.get("https://www.wikiart.org/").text, features="html.parser")
embed.set_footer(text="Coucou")
j = json.loads(soup.find("main", {"class": "wiki-layout-main-page"})["ng-init"].splitlines()[0][28:-1])
images = [
{
"url": j["ImageDescription"]["Url"],
"width": j["ImageDescription"]["Width"],
"height": j["ImageDescription"]["Height"],
},
{
"url": j["PaintingJson"]["image"],
"width": j["PaintingJson"]["width"],
"height": j["PaintingJson"]["height"],
},
]
if isinstance(j["PaintingJson"]["images"], list):
for image in j["PaintingJson"]["images"]:
images.append({
"url": image["image"],
"width": image["width"],
"height": image["height"],
})
embed.set_image(max(images, key=lambda x: x["width"] * x["height"])["url"])
embed.set_footer(text=f"{j['ArtistName']} - {j['Title']} ({j['CompletitionYear']})")
return embed return embed

View File

@@ -10,6 +10,10 @@ EPHEMERIS_CHANNEL_IDS = (CHANNEL1,)
CODENAMES_CHANNEL_IDS = (CHANNEL1, CHANNEL2) CODENAMES_CHANNEL_IDS = (CHANNEL1, CHANNEL2)
WORDLE_CHANNEL_IDS = (CHANNEL2, CHANNEL3) WORDLE_CHANNEL_IDS = (CHANNEL2, CHANNEL3)
EPHEMERIS_INCLUDE_CITATION = True
EPHEMERIS_INCLUDE_WEATHER = True
EPHEMERIS_INCLUDE_ARTWORK = True
WORDLE_VALID_WORDS = "cambot/wordlists/valid_words.txt" WORDLE_VALID_WORDS = "cambot/wordlists/valid_words.txt"
WORDLE_TARGET_WORDS = "cambot/wordlists/target_words.txt" WORDLE_TARGET_WORDS = "cambot/wordlists/target_words.txt"
WORDLE_POINTS = (1, 3, 6) WORDLE_POINTS = (1, 3, 6)

8
foo.py
View File

@@ -1,8 +0,0 @@
import requests
import json
from bs4 import BeautifulSoup as bs
html = requests.get("https://www.wikiart.org/").text
soup = bs(html, features="html.parser")
print(json.loads(soup.find("main", {"class": "wiki-layout-main-page"})["ng-init"].splitlines()[0][28:-1]))