changed posting events slightly to be bold, updated pipeline to not accidentally delete wrong event, updated scraper to skip leader field but also handle if its not present at all

This commit is contained in:
jenz 2022-06-20 21:01:10 +02:00
parent 23a9c1e41c
commit 0c45287ae6
3 changed files with 8 additions and 4 deletions

View File

@ -106,7 +106,7 @@ async def discord_task():
"""
cur.execute(sql_statement, [event_title])
try:
event_msg = f"NEW EVENT POSTED:\nTitle: {event_title}\nServer: {event_server}\nMaps: {event_maps}\nDate: {event_date}\nTime: {event_time}\nRewards: {event_reward}\nURL: {event_url}\n\n"
event_msg = f"**- New Event Posted! -**\nTitle: {event_title}\nServer: {event_server}\nMaps: {event_maps}\nDate: {event_date}\nTime: {event_time}\nRewards: {event_reward}\nURL: {event_url}\n\n"
for r in client.get_all_channels():
if r.name == 'events':
await r.send(event_msg)

View File

@ -16,9 +16,10 @@ class contentPipeline:
if res is None:
sql_statement = f"""
delete from unloze_event.event
where event_server like '%{item['event_server'].split(":270")[1]}%'
where event_server like '%{item['event_server'].split(":27")[1]}%'
"""
#very cheap way of replacing rows
print("deleting thread: ", sql_statement)
cur.execute(sql_statement)
sql_statement = f"""
insert into unloze_event.event

View File

@ -60,8 +60,11 @@ class unloze_spider(scrapy.Spider):
if index == 2:
event_date = r
if index == 3:
event_time = r[:-1]
if index == 4:
event_time = r
#just skipping the leader part on ze
if index == 4 and '27015' not in event_server:
event_reward = r
if index == 5 and '27015' in event_server:
event_reward = r
index += 1
event_maps = ""