initial release of event notifier
This commit is contained in:
parent
e4712524d2
commit
23a9c1e41c
5
event_notification/python/README.md
Normal file
5
event_notification/python/README.md
Normal file
@ -0,0 +1,5 @@
|
||||
source venv/bin/activate
|
||||
pip3 list
|
||||
pip3 install mysql-connector-python
|
||||
pip3 install discord.py
|
||||
pip3 install scrapy
|
13
event_notification/python/create_tables.sql
Normal file
13
event_notification/python/create_tables.sql
Normal file
@ -0,0 +1,13 @@
|
||||
CREATE TABLE unloze_event.event (
|
||||
`event_title` varchar(256) NOT NULL,
|
||||
`event_server` varchar(256) DEFAULT NULL,
|
||||
`event_maps` varchar(512) DEFAULT NULL,
|
||||
`event_date` varchar(512) DEFAULT NULL,
|
||||
`event_url` varchar(512) DEFAULT NULL,
|
||||
`event_time` varchar(256) DEFAULT NULL,
|
||||
`event_reward` varchar(256) DEFAULT NULL,
|
||||
`set_map_cooldown` boolean DEFAULT NULL,
|
||||
`posted_event_on_discord` boolean DEFAULT NULL,
|
||||
`created_on` datetime DEFAULT current_timestamp(),
|
||||
PRIMARY KEY (`event_title`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
124
event_notification/python/discord_event.py
Normal file
124
event_notification/python/discord_event.py
Normal file
@ -0,0 +1,124 @@
|
||||
#!/home/nonroot/event_scrapy/venv/bin/python3
|
||||
|
||||
import discord
|
||||
from datetime import datetime
|
||||
from discord.ext.tasks import loop
|
||||
from settings import get_connection_event, token
|
||||
|
||||
intents = discord.Intents.default()
|
||||
client = discord.Client(intents=intents)
|
||||
|
||||
@client.event
|
||||
async def on_message(message):
|
||||
if message.author.bot:
|
||||
return
|
||||
if client.user.mentioned_in(message):
|
||||
wanted_server = None
|
||||
if "ze" in message.content.lower():
|
||||
wanted_server = 27015
|
||||
elif "mg" in message.content.lower():
|
||||
wanted_server = 27017
|
||||
elif "zr" in message.content.lower():
|
||||
wanted_server = 27016
|
||||
if wanted_server is None:
|
||||
await message.channel.send("You did not specify a server. Either write ZE, MG or ZR.")
|
||||
return
|
||||
with get_connection_event() as conn:
|
||||
with conn.cursor() as cur:
|
||||
sql_statement = f"""
|
||||
select
|
||||
event_title, event_server, event_maps, event_date, event_time, event_reward, event_url
|
||||
from unloze_event.event where event_server like '%{wanted_server}%'
|
||||
"""
|
||||
cur.execute(sql_statement)
|
||||
res = cur.fetchall()
|
||||
event_msg = ""
|
||||
for res1 in res:
|
||||
event_title = res1[0]
|
||||
event_server = res1[1]
|
||||
event_maps = res1[2]
|
||||
event_date = res1[3]
|
||||
event_time = res1[4]
|
||||
event_reward = res1[5]
|
||||
event_url = res1[6]
|
||||
event_msg += f"Title: {event_title}\nServer: {event_server}\nMaps: {event_maps}\nDate: {event_date}\nTime: {event_time}\nRewards: {event_reward}\nURL: {event_url}\n\n"
|
||||
await message.channel.send(event_msg)
|
||||
|
||||
@loop(seconds = 10)
|
||||
async def discord_task():
|
||||
with get_connection_event() as conn:
|
||||
with conn.cursor() as cur:
|
||||
#only ze needs the cooldowns set
|
||||
sql_statement = f"""
|
||||
select event_maps, event_date
|
||||
from unloze_event.event e
|
||||
where e.set_map_cooldown is null
|
||||
and e.event_server like '%27015%'
|
||||
"""
|
||||
cur.execute(sql_statement)
|
||||
res = cur.fetchone()
|
||||
if res is not None:
|
||||
event_maps = res[0].split(" ")
|
||||
event_date = res[1].strip()
|
||||
today_formatted = f"{datetime.now():%d-%m-%Y}".replace("-", "/")
|
||||
#print("today_formatted: ", today_formatted)
|
||||
#print("event_date: ", event_date)
|
||||
if today_formatted == event_date:
|
||||
sql_statement = f"""
|
||||
update unloze_event.event
|
||||
set set_map_cooldown = true
|
||||
where event_server like '%27015%'
|
||||
"""
|
||||
cur.execute(sql_statement)
|
||||
for r in client.get_all_channels():
|
||||
if r.name == 'rcon-css-ze':
|
||||
print("event_maps: ", event_maps)
|
||||
for map in event_maps:
|
||||
#silly white space none sense
|
||||
if len(map) > 3:
|
||||
cooldown_msg = f"""sm_nominate_exclude_time {map} 1 0"""
|
||||
await r.send(cooldown_msg)
|
||||
conn.commit()
|
||||
|
||||
with get_connection_event() as conn:
|
||||
with conn.cursor() as cur:
|
||||
sql_statement = f"""
|
||||
select
|
||||
event_title, event_server, event_maps, event_date, event_time, event_reward, event_url
|
||||
from unloze_event.event where posted_event_on_discord is null
|
||||
"""
|
||||
cur.execute(sql_statement)
|
||||
res = cur.fetchall()
|
||||
if res is not None:
|
||||
for res1 in res:
|
||||
event_title = res1[0]
|
||||
event_server = res1[1]
|
||||
event_maps = res1[2]
|
||||
event_date = res1[3]
|
||||
event_time = res1[4]
|
||||
event_reward = res1[5]
|
||||
event_url = res1[6]
|
||||
|
||||
sql_statement = f"""
|
||||
update unloze_event.event
|
||||
set posted_event_on_discord = 1
|
||||
where event_title = %s
|
||||
"""
|
||||
cur.execute(sql_statement, [event_title])
|
||||
try:
|
||||
event_msg = f"NEW EVENT POSTED:\nTitle: {event_title}\nServer: {event_server}\nMaps: {event_maps}\nDate: {event_date}\nTime: {event_time}\nRewards: {event_reward}\nURL: {event_url}\n\n"
|
||||
for r in client.get_all_channels():
|
||||
if r.name == 'events':
|
||||
await r.send(event_msg)
|
||||
conn.commit()
|
||||
except Exception:
|
||||
import traceback
|
||||
error_msg = traceback.format_exc()
|
||||
print("traceback happened: ", error_msg)
|
||||
|
||||
def main():
|
||||
discord_task.start()
|
||||
client.run(token)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
37
event_notification/python/main.py
Normal file
37
event_notification/python/main.py
Normal file
@ -0,0 +1,37 @@
|
||||
#!/home/nonroot/event_scrapy/venv/bin/python3
|
||||
|
||||
from scrapy.crawler import CrawlerRunner
|
||||
from scrapy.utils.project import get_project_settings
|
||||
from twisted.internet import reactor, defer
|
||||
from scrape_event import unloze_spider
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def handle_urls(result, runner, reactor):
|
||||
for item in result:
|
||||
yield runner.crawl(unloze_spider, item = item)
|
||||
#this finishes the reactor.run()
|
||||
reactor.stop()
|
||||
|
||||
def main():
|
||||
result = []
|
||||
urls = []
|
||||
#mg
|
||||
urls.append("https://unloze.com/forums/events.79/")
|
||||
#ze
|
||||
urls.append("https://unloze.com/forums/events.76/")
|
||||
#zr
|
||||
urls.append("https://unloze.com/forums/events.80/")
|
||||
#jb but there are no events yet
|
||||
#urls.append("https://unloze.com/forums/events.90/")
|
||||
|
||||
for url in urls:
|
||||
d = {"event_title" : None, "event_server": None, "event_maps": None, "event_date": None, "event_time": None, "event_reward": None, "url": url}
|
||||
result.append(d)
|
||||
|
||||
runner = CrawlerRunner(get_project_settings())
|
||||
handle_urls(result, runner, reactor)
|
||||
reactor.run()
|
||||
print("reactor finish")
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
36
event_notification/python/pipelines.py
Normal file
36
event_notification/python/pipelines.py
Normal file
@ -0,0 +1,36 @@
|
||||
from settings import get_connection_event
|
||||
|
||||
class contentPipeline:
|
||||
def process_item(self, item, spider):
|
||||
print("entered process_item:")
|
||||
print("item: ", item)
|
||||
with get_connection_event() as conn:
|
||||
with conn.cursor() as cur:
|
||||
try:
|
||||
sql_statement = f"""
|
||||
select * from unloze_event.event e
|
||||
where e.event_title = %s
|
||||
"""
|
||||
cur.execute(sql_statement, [item['event_title']])
|
||||
res = cur.fetchone()
|
||||
if res is None:
|
||||
sql_statement = f"""
|
||||
delete from unloze_event.event
|
||||
where event_server like '%{item['event_server'].split(":270")[1]}%'
|
||||
"""
|
||||
#very cheap way of replacing rows
|
||||
cur.execute(sql_statement)
|
||||
sql_statement = f"""
|
||||
insert into unloze_event.event
|
||||
(event_title, event_server, event_maps, event_date, event_time, event_reward, event_url)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s)
|
||||
"""
|
||||
cur.execute(sql_statement, [item['event_title'], item['event_server'], item['event_maps'], item['event_date'], item['event_time'], item['event_reward'], item['event_url']])
|
||||
#context manager does not seem to work with this mysql library so manual commiting seems needed
|
||||
conn.commit()
|
||||
except Exception:
|
||||
import traceback
|
||||
error_msg = traceback.format_exc()
|
||||
print("error_msg: ", error_msg)
|
||||
|
||||
return item
|
108
event_notification/python/scrape_event.py
Normal file
108
event_notification/python/scrape_event.py
Normal file
@ -0,0 +1,108 @@
|
||||
import scrapy
|
||||
import traceback
|
||||
from scrapy_settings import EXT_SETTINGS
|
||||
from pprint import pprint
|
||||
|
||||
class unloze_spider(scrapy.Spider):
|
||||
"""
|
||||
Main unloze event scraper
|
||||
"""
|
||||
|
||||
custom_settings = EXT_SETTINGS
|
||||
|
||||
def __init__(self, item):
|
||||
self.url = item["url"]
|
||||
self.item = item
|
||||
|
||||
def start_requests(self):
|
||||
request = scrapy.Request(
|
||||
url = self.url,
|
||||
callback = self.parse
|
||||
)
|
||||
yield request
|
||||
|
||||
def parse(self, response):
|
||||
"""
|
||||
Parsing content in the events sections
|
||||
"""
|
||||
newest_thread = None
|
||||
threads = response.xpath("//div[@class='structItem-title']/@uix-href").extract()
|
||||
for thread in threads:
|
||||
if "poll" in thread.lower() or "nomination-thread" in thread.lower():
|
||||
continue
|
||||
newest_thread = thread
|
||||
break
|
||||
|
||||
if newest_thread is None:
|
||||
print("no thread found. url: ", response.url)
|
||||
import sys
|
||||
sys.exit(1)
|
||||
request = scrapy.Request(
|
||||
url = "https://unloze.com" + newest_thread,
|
||||
callback = self.parse2
|
||||
)
|
||||
yield request
|
||||
|
||||
def parse2(self, response):
|
||||
"""
|
||||
Parsing content on the actual newest event thread
|
||||
"""
|
||||
try:
|
||||
event_title = response.url.rsplit(".", 1)[0].rsplit("/", 1)[1]
|
||||
event_server = ""
|
||||
#several event managers do the threads differently in terms of highlighting and marks, they dont use standardization
|
||||
index = 0
|
||||
for r in response.xpath("//span[contains(text(),'TL;DR')]/../../../text()").extract():
|
||||
if "\n" in r or len(r) < 4:
|
||||
continue
|
||||
if index < 2:
|
||||
event_server += r
|
||||
if index == 2:
|
||||
event_date = r
|
||||
if index == 3:
|
||||
event_time = r[:-1]
|
||||
if index == 4:
|
||||
event_reward = r
|
||||
index += 1
|
||||
event_maps = ""
|
||||
for r in response.xpath("//span[contains(text(),'TL;DR')]/../../../a/text()").extract():
|
||||
event_maps += f"{r} "
|
||||
if not index:
|
||||
tldr_count = 0
|
||||
for r in response.xpath("//b[contains(text(),'TL;DR')]/../../../span//text()").extract():
|
||||
if "\n" in r or len(r) < 4:
|
||||
continue
|
||||
if "TL;DR" in r:
|
||||
tldr_count += 1
|
||||
if tldr_count < 2:
|
||||
continue
|
||||
if index == 2 or index == 4:
|
||||
event_server += r
|
||||
if index == 7:
|
||||
event_date = r
|
||||
if index == 9:
|
||||
event_time = r
|
||||
if index == 13:
|
||||
event_reward = r
|
||||
index += 1
|
||||
for r in response.xpath("//b[contains(text(),'TL;DR')]/../../../a//text()").extract():
|
||||
event_maps += f"{r} "
|
||||
|
||||
|
||||
self.item["event_title"] = event_title
|
||||
self.item["event_date"] = event_date
|
||||
self.item["event_time"] = event_time
|
||||
self.item["event_server"] = event_server
|
||||
self.item["event_maps"] = event_maps
|
||||
self.item["event_reward"] = event_reward
|
||||
self.item["event_url"] = response.url
|
||||
|
||||
except Exception:
|
||||
error_msg = traceback.format_exc()
|
||||
print("traceback msg: ", error_msg)
|
||||
print("url: ", response.url)
|
||||
import sys
|
||||
sys.exit(1)
|
||||
|
||||
#pprint(self.item)
|
||||
return self.item
|
5
event_notification/python/scrapy.cfg
Normal file
5
event_notification/python/scrapy.cfg
Normal file
@ -0,0 +1,5 @@
|
||||
[settings]
|
||||
default = scrapy_settings
|
||||
|
||||
[deploy]
|
||||
project = scrapy_unloze_events
|
10
event_notification/python/scrapy_settings.py
Normal file
10
event_notification/python/scrapy_settings.py
Normal file
@ -0,0 +1,10 @@
|
||||
BOT_NAME = "unloze_events"
|
||||
|
||||
SPIDER_MODULES = ['scrape_event']
|
||||
|
||||
EXT_SETTINGS = {
|
||||
"ITEM_PIPELINES": {
|
||||
"pipelines.contentPipeline": 1
|
||||
},
|
||||
"DOWNLOAD_DELAY" : 0
|
||||
}
|
105
event_notification/scripting/event_notifier.sp
Normal file
105
event_notification/scripting/event_notifier.sp
Normal file
@ -0,0 +1,105 @@
|
||||
#pragma semicolon 1
|
||||
#define PLUGIN_AUTHOR "jenz"
|
||||
#define PLUGIN_VERSION "1.0"
|
||||
#pragma newdecls required
|
||||
#include <sourcemod>
|
||||
|
||||
Database g_hDatabase;
|
||||
|
||||
public Plugin myinfo =
|
||||
{
|
||||
name = "event notifier ingame",
|
||||
author = PLUGIN_AUTHOR,
|
||||
description = "plugin simply tells information about the last announced event on this server",
|
||||
version = PLUGIN_VERSION,
|
||||
url = "www.unloze.com"
|
||||
};
|
||||
|
||||
public void OnPluginStart()
|
||||
{
|
||||
Database.Connect(SQL_OnDatabaseConnect, "Event_notifier");
|
||||
RegConsoleCmd("sm_event", Command_Event_notifier);
|
||||
RegConsoleCmd("sm_events", Command_Event_notifier);
|
||||
}
|
||||
|
||||
public void SQL_OnDatabaseConnect(Database db, const char[] error, any data)
|
||||
{
|
||||
if(!db || strlen(error))
|
||||
{
|
||||
LogError("Database error: %s", error);
|
||||
return;
|
||||
}
|
||||
g_hDatabase = db;
|
||||
}
|
||||
|
||||
public Action Command_Event_notifier(int client, int args)
|
||||
{
|
||||
if (!g_hDatabase)
|
||||
{
|
||||
Database.Connect(SQL_OnDatabaseConnect, "Event_notifier");
|
||||
return Plugin_Handled;
|
||||
}
|
||||
//only 3 servers with events, none exist on jb
|
||||
int i_port = GetConVarInt(FindConVar("hostport"));
|
||||
char sQuery[512];
|
||||
Format(sQuery, sizeof(sQuery), "select event_title, event_server, event_maps, event_date, event_time, event_reward from unloze_event.event e where e.event_server like '%s%i%s'", "%", i_port, "%");
|
||||
g_hDatabase.Query(SQL_OnQueryCompleted, sQuery, GetClientSerial(client));
|
||||
return Plugin_Handled;
|
||||
}
|
||||
|
||||
public void SQL_OnQueryCompleted(Database db, DBResultSet results, const char[] error, int iSerial)
|
||||
{
|
||||
if (!db || strlen(error))
|
||||
{
|
||||
LogError("Query error 3: %s", error);
|
||||
}
|
||||
int client;
|
||||
if ((client = GetClientFromSerial(iSerial)) == 0)
|
||||
return;
|
||||
Panel mSayPanel = new Panel(GetMenuStyleHandle(MenuStyle_Radio));
|
||||
char sTitle[256];
|
||||
if (results.RowCount && results.FetchRow())
|
||||
{
|
||||
char sBuffer[256];
|
||||
results.FetchString(0, sTitle, sizeof(sTitle));
|
||||
Format(sTitle, sizeof(sTitle), "Title: %s", sTitle);
|
||||
mSayPanel.SetTitle(sTitle);
|
||||
results.FetchString(1, sBuffer, sizeof(sBuffer));
|
||||
mSayPanel.DrawItem("", ITEMDRAW_SPACER);
|
||||
Format(sBuffer, sizeof(sBuffer), "Server: %s", sBuffer);
|
||||
mSayPanel.DrawText(sBuffer);
|
||||
results.FetchString(2, sBuffer, sizeof(sBuffer));
|
||||
mSayPanel.DrawItem("", ITEMDRAW_SPACER);
|
||||
Format(sBuffer, sizeof(sBuffer), "Maps: %s", sBuffer);
|
||||
mSayPanel.DrawText(sBuffer);
|
||||
results.FetchString(3, sBuffer, sizeof(sBuffer));
|
||||
mSayPanel.DrawItem("", ITEMDRAW_SPACER);
|
||||
Format(sBuffer, sizeof(sBuffer), "Date: %s", sBuffer);
|
||||
mSayPanel.DrawText(sBuffer);
|
||||
results.FetchString(4, sBuffer, sizeof(sBuffer));
|
||||
mSayPanel.DrawItem("", ITEMDRAW_SPACER);
|
||||
Format(sBuffer, sizeof(sBuffer), "Time: %s", sBuffer);
|
||||
mSayPanel.DrawText(sBuffer);
|
||||
results.FetchString(5, sBuffer, sizeof(sBuffer));
|
||||
mSayPanel.DrawItem("", ITEMDRAW_SPACER);
|
||||
Format(sBuffer, sizeof(sBuffer), "Reward: %s", sBuffer);
|
||||
mSayPanel.DrawText(sBuffer);
|
||||
}
|
||||
|
||||
mSayPanel.DrawItem("", ITEMDRAW_SPACER);
|
||||
mSayPanel.DrawItem("1. Got it!", ITEMDRAW_RAWLINE);
|
||||
|
||||
mSayPanel.SetKeys(1023);
|
||||
mSayPanel.Send(client, Handler_Menu, 0);
|
||||
delete mSayPanel;
|
||||
delete results;
|
||||
}
|
||||
|
||||
public int Handler_Menu(Menu menu, MenuAction action, int param1, int param2)
|
||||
{
|
||||
switch(action)
|
||||
{
|
||||
case MenuAction_Select, MenuAction_Cancel:
|
||||
delete menu;
|
||||
}
|
||||
}
|
10
event_notification/systemctl/discord_event_notifier.service
Normal file
10
event_notification/systemctl/discord_event_notifier.service
Normal file
@ -0,0 +1,10 @@
|
||||
[Unit]
|
||||
Description=runs discord event notifier
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=nonroot
|
||||
Environment=PYTHONUNBUFFERED=1
|
||||
Environment=PATH=/home/nonroot/event_scrapy/venv/bin:/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games:/snap/bin
|
||||
WorkingDirectory=/home/nonroot/event_scrapy
|
||||
ExecStart=/home/nonroot/event_scrapy/discord_event.py
|
@ -0,0 +1,8 @@
|
||||
[Unit]
|
||||
Description=Discord event notifier launcher
|
||||
|
||||
[Timer]
|
||||
OnCalendar=*-*-* *:55
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
10
event_notification/systemctl/event_scraper.service
Normal file
10
event_notification/systemctl/event_scraper.service
Normal file
@ -0,0 +1,10 @@
|
||||
[Unit]
|
||||
Description=runs event web scraping on the unloze forum
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=nonroot
|
||||
Environment=PYTHONUNBUFFERED=1
|
||||
Environment=PATH=/home/nonroot/event_scrapy/venv/bin:/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games:/snap/bin
|
||||
WorkingDirectory=/home/nonroot/event_scrapy
|
||||
ExecStart=/home/nonroot/event_scrapy/main.py
|
8
event_notification/systemctl/event_scraper.timer
Normal file
8
event_notification/systemctl/event_scraper.timer
Normal file
@ -0,0 +1,8 @@
|
||||
[Unit]
|
||||
Description=Decides when to scrape the event section on the forum
|
||||
|
||||
[Timer]
|
||||
OnCalendar=*-*-* *:0,30
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
Loading…
Reference in New Issue
Block a user