from bs4 import BeautifulSoup import requests import sqlite3 from flask import Flask, jsonify, render_template, request from apscheduler.schedulers.background import BackgroundScheduler # Initialize Flask app app = Flask(__name__) # Database setup db_name = 'events.db' def init_db(): conn = sqlite3.connect(db_name) cursor = conn.cursor() cursor.execute('''CREATE TABLE IF NOT EXISTS events ( id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT, date TEXT, time TEXT, location TEXT, description TEXT )''') conn.commit() conn.close() # Function to scrape data def scrape_events(): url = "https://www.visitgalena.org/events/" try: response = requests.get(url) response.raise_for_status() soup = BeautifulSoup(response.text, 'html.parser') # Update this based on the site's structure events = soup.find_all('div', class_='event-card') conn = sqlite3.connect(db_name) cursor = conn.cursor() for event in events: name = event.find('h3', class_='event-title').get_text(strip=True) date = event.find('span', class_='event-date').get_text(strip=True) time = event.find('span', class_='event-time').get_text(strip=True) location = event.find('div', class_='event-location').get_text(strip=True) description = event.find('p', class_='event-description').get_text(strip=True) # Insert or update the event cursor.execute('''INSERT OR IGNORE INTO events (name, date, time, location, description) VALUES (?, ?, ?, ?, ?)''', (name, date, time, location, description)) conn.commit() conn.close() except Exception as e: print(f"Error during scraping: {e}") # API endpoint to get events @app.route('/api/events', methods=['GET']) def get_events(): conn = sqlite3.connect(db_name) cursor = conn.cursor() cursor.execute('SELECT name, date, time, location, description FROM events') events = cursor.fetchall() conn.close() return jsonify([{ 'name': event[0], 'date': event[1], 'time': event[2], 'location': event[3], 'description': event[4] } for event in events]) # Frontend to manage events @app.route('/manage', methods=['GET']) def manage_events(): conn = sqlite3.connect(db_name) cursor = conn.cursor() cursor.execute('SELECT id, name, date, time, location, description FROM events') events = cursor.fetchall() conn.close() return render_template('manage.html', events=events) # Scheduler to run scraping at regular intervals scheduler = BackgroundScheduler() scheduler.add_job(scrape_events, 'interval', hours=6) scheduler.start() if __name__ == "__main__": init_db() app.run(debug=True)