Skip to content

Commit

Permalink
refactor and header ui work
Browse files Browse the repository at this point in the history
  • Loading branch information
ilude committed Mar 27, 2024
1 parent 73a7df0 commit 802ad8e
Show file tree
Hide file tree
Showing 9 changed files with 289 additions and 182 deletions.
133 changes: 26 additions & 107 deletions app/app.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,13 @@
import os
import html
import re
import string
import yaml
from datetime import datetime

import feedparser
import pprint
from datetime import datetime
from bs4 import BeautifulSoup
from docker import APIClient
from flask import Flask, render_template
from flask_caching import Cache
from post_processor import post_processor

import difflib
from utils import copy_default_to_configs
from utils import clean_html, copy_default_to_configs, load_file

copy_default_to_configs()

Expand All @@ -26,123 +18,50 @@
'CACHE_TYPE': 'simple',
'CACHE_DEFAULT_TIMEOUT': 600
})
#docker_client = APIClient(base_url='unix://var/run/docker.sock')
current_working_directory = os.path.dirname(os.path.realpath(__file__))

global last_modified_times
last_modified_times = {}

def load_file(file_name):
# Adjust file path for the configs subdirectory
file_path = os.path.join(current_working_directory, 'configs', file_name)

# Check the last modification time of the file
current_modified_time = os.path.getmtime(file_path)
current_data = cache.get(file_path)

# If the file has been modified since the last check, reload it
if current_modified_time != last_modified_times.get(file_path) or not current_data:
last_modified_times[file_path] = current_modified_time
with open(file_path, 'r') as file:
current_data = yaml.safe_load(file)
cache.set(file_path, current_data)

return current_data


def clean_html(text):
text = text.replace('\n', ' ').replace('\r', ' ')
text = BeautifulSoup(html.unescape(text), 'lxml').text
text = re.sub(r'\[.*?\].*$', '', text)
# text = re.sub(r'http[s]?://\S+', '', text, flags=re.IGNORECASE)
# text = ' '.join([x.capitalize() for x in text.split(' ')])

return text.strip()

def process_duplicates(data):
for article in data['articles']:
if difflib.SequenceMatcher(None, article['title'], article['summary']).ratio() == 0.0:
print(article['title'])
article['summary'] = ''

return data

@app.context_processor
def inject_current_date():
return {'today_date': datetime.now()}

# def docker_event_stream():
# events = docker_client.events()
# for event in events:
# yield 'data: {}\n\n'.format(event)

# @app.route('/events')
# def events():
# return app.response_class(
# docker_event_stream(),
# mimetype='text/event-stream'
# )

# Define route to render the template
@app.route('/')
@cache.cached(timeout=600)
def index():
# Load feeds and bookmarks
layout = load_file('layout.yml')
feeds = layout['feeds']
layout = load_file('layout.yml', cache)
headers = layout['headers']
bookmarks = load_file('bookmarks.yml')['bookmarks']

widgets = layout['widgets']
# Divide feeds into three columns
columns = [[], [], []]

# Add bookmarks to the second column
columns[1].append({
'title': 'Bookmarks',
'type': 'bookmarks',
'articles': [{'title': entry['title'], 'link': entry['url']} for entry in bookmarks]
})

# Add feeds to the appropriate column
for feed in feeds:
column_index = (feed['column'] - 1) % 3
columns[column_index].append({
'title': feed['name'],
'link': feed['link'],
'url': feed['url'], 'summary_enabled': bool(feed.get('summary', True))
})

# Parse feeds and extract titles, links, summaries, and columns
for column in columns:
for item in column:
if 'url' in item: # If it's a bookmark or a feed with a URL
# Check if the item data is already cached
cache_key = f"{item['url']}_parsed_data"
parsed_item = cache.get(cache_key)
if not parsed_item:
parsed_feed = feedparser.parse(item['url'])
parsed_item = {
'title': item['title'],
'articles': [{
'title': " ".join(entry.get('title', 'No Title').strip().split()) ,
'link': entry.link,
'summary': clean_html(entry.get('summary', ''))} for entry in parsed_feed.entries[:10]] if 'entries' in parsed_feed else []
}

#parsed_item = process_duplicates(parsed_item)
parsed_item = post_processor.process(parsed_item['title'], parsed_item)

# Cache parsed item data for 10 minutes
cache.set(cache_key, parsed_item, timeout=600)

item['title'] = parsed_item['title'] if parsed_item['title'] else item.get('name', 'Untitled')
item['articles'] = parsed_item['articles']
item['type'] = 'feed'
for widget in widgets:
column_index = (widget['column'] - 1) % 3
if widget['type'] == 'feed':
parsed_feed = feedparser.parse(widget['url'])
parsed_item = {
'title': widget['name'],
'link': widget['link'],
'type': widget['type'],
'summary_enabled': bool(widget.get('summary', True)),
'articles': [{
'title': " ".join(entry.get('title', 'No Title').split()).strip() ,
'link': entry.link,
'summary': clean_html(entry.get('summary', ''))} for entry in parsed_feed.entries[:10]] if 'entries' in parsed_feed else []
}
parsed_item = post_processor.process(parsed_item['title'], parsed_item)
columns[column_index].append(parsed_item)
elif widget['type'] == 'bookmarks':
columns[column_index].append({
'title': widget['name'],
'type': widget['type'],
'articles': [{'title': entry['title'], 'link': entry['url']} for entry in widget['bookmarks']]
})

# Pass column data to the template
return render_template('index.html', columns=columns, headers=headers)


if __name__ == '__main__':
port = int(os.environ.get("ONBOARD_PORT", 9830))
if os.environ.get("FLASK_DEBUG", "False") == "True":
Expand Down
Empty file removed app/configs/.keep
Empty file.
82 changes: 82 additions & 0 deletions app/configs/layout.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
headers:
- name: "Docs"
link: "https://docs.google.com/"
- name: "Gmail"
link: "https://mail.google.com/mail/u/0/"
- name: "Photos"
link: "https://photos.google.com/"
- name: "Maps"
link: "https://www.google.com/maps"
- name: "Youtube"
link: "https://www.youtube.com/"

widgets:
- name: "Lawrence Person's BattleSwarm Blog"
type: "feed"
link: "https://www.battleswarmblog.com/"
url: "https://www.battleswarmblog.com/?feed=rss2"
column: 1
- name: "Cafe Hayek"
type: "feed"
link: "https://cafehayek.com/"
url: "https://cafehayek.com/feed"
column: 1
- name: "Slashdot"
type: "feed"
link: "https://slashdot.org/"
url: "https://rss.slashdot.org/Slashdot/slashdotMain"
column: 1
summary: false
- name: "Bookmarks"
type: "bookmarks"
column: 2
bookmarks:
- title: Ali Express
url: https://www.aliexpress.com/
- title: Amazon
url: https://www.amazon.com/
- title: ChatGPT
url: https://chat.openai.com/
- title: ClaudeGPT
url: https://claude.ai/chats
- title: GrokGPT
url: https://chat.grok.com/
- title: Cloudflare
url: https://www.cloudflare.com/
- title: Codeword Generator
url: https://cxxr.github.io/nsa.js/
- title: Ebay
url: https://www.ebay.com/
- title: Gists
url: https://gist.github.com/
- title: Github
url: https://github.com/ilude
- title: Is There Any Deal
url: https://isthereanydeal.com/
- title: MoneroOcean
url: https://moneroocean.stream/#/dashboard?addr=47JaiTVAxoujSKhibhmTS3hHaN8mW6Jfr46N1msSBsFhAyJj91KJ2maEk3DMf6gM9Ndmx81CqPAo41XPakFM3NTpUtn9dxL
- title: Office 365
url: https://www.office.com/
- title: Slickdeals
url: https://slickdeals.net/
- title: Teams
url: https://teams.microsoft.com/
- title: Tailscale
url: https://tailscale.com/
- title: Thingiverse
url: https://www.thingiverse.com/
- name: "Real Clear Politics"
type: "feed"
link: "https://www.realclearpolitics.com/"
url: "https://www.realclearpolitics.com/index.xml"
column: 2
- name: "Instapundit"
type: "feed"
link: "https://instapundit.com/"
url: "https://instapundit.com/feed/"
column: 3
- name: "Twitchy"
type: "feed"
link: "https://twitchy.com/"
url: "https://twitchy.com/feed"
column: 3
36 changes: 0 additions & 36 deletions app/defaults/bookmarks.yml

This file was deleted.

48 changes: 47 additions & 1 deletion app/defaults/layout.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,34 +3,80 @@ headers:
link: "https://docs.google.com/"
- name: "Gmail"
link: "https://mail.google.com/mail/u/0/"
- name: "Photos"
link: "https://photos.google.com/"
- name: "Maps"
link: "https://www.google.com/maps"
- name: "Youtube"
link: "https://www.youtube.com/"

feeds:
widgets:
- name: "Lawrence Person's BattleSwarm Blog"
type: "feed"
link: "https://www.battleswarmblog.com/"
url: "https://www.battleswarmblog.com/?feed=rss2"
column: 1
- name: "Cafe Hayek"
type: "feed"
link: "https://cafehayek.com/"
url: "https://cafehayek.com/feed"
column: 1
- name: "Slashdot"
type: "feed"
link: "https://slashdot.org/"
url: "https://rss.slashdot.org/Slashdot/slashdotMain"
column: 1
summary: false
- name: "Bookmarks"
type: "bookmarks"
column: 2
bookmarks:
- title: Ali Express
url: https://www.aliexpress.com/
- title: Amazon
url: https://www.amazon.com/
- title: ChatGPT
url: https://chat.openai.com/
- title: ClaudeGPT
url: https://claude.ai/chats
- title: GrokGPT
url: https://chat.grok.com/
- title: Cloudflare
url: https://www.cloudflare.com/
- title: Codeword Generator
url: https://cxxr.github.io/nsa.js/
- title: Ebay
url: https://www.ebay.com/
- title: Gists
url: https://gist.github.com/
- title: Github
url: https://github.com/ilude
- title: Is There Any Deal
url: https://isthereanydeal.com/
- title: MoneroOcean
url: https://moneroocean.stream/#/dashboard?addr=47JaiTVAxoujSKhibhmTS3hHaN8mW6Jfr46N1msSBsFhAyJj91KJ2maEk3DMf6gM9Ndmx81CqPAo41XPakFM3NTpUtn9dxL
- title: Office 365
url: https://www.office.com/
- title: Slickdeals
url: https://slickdeals.net/
- title: Teams
url: https://teams.microsoft.com/
- title: Tailscale
url: https://tailscale.com/
- title: Thingiverse
url: https://www.thingiverse.com/
- name: "Real Clear Politics"
type: "feed"
link: "https://www.realclearpolitics.com/"
url: "https://www.realclearpolitics.com/index.xml"
column: 2
- name: "Instapundit"
type: "feed"
link: "https://instapundit.com/"
url: "https://instapundit.com/feed/"
column: 3
- name: "Twitchy"
type: "feed"
link: "https://twitchy.com/"
url: "https://twitchy.com/feed"
column: 3
14 changes: 14 additions & 0 deletions app/docker.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@

#docker_client = APIClient(base_url='unix://var/run/docker.sock')

# def docker_event_stream():
# events = docker_client.events()
# for event in events:
# yield 'data: {}\n\n'.format(event)

# @app.route('/events')
# def events():
# return app.response_class(
# docker_event_stream(),
# mimetype='text/event-stream'
# )
Loading

0 comments on commit 802ad8e

Please sign in to comment.