mirror of
https://github.com/Zetaphor/browser-recall.git
synced 2025-12-06 02:19:37 +00:00
Start of redux
This commit is contained in:
@@ -1 +0,0 @@
|
||||
# This file can be empty, it just marks the directory as a Python package
|
||||
@@ -1,18 +0,0 @@
|
||||
from datetime import datetime
|
||||
from typing import List, Tuple
|
||||
from browser_history import get_history, get_bookmarks
|
||||
from urllib.parse import urlparse
|
||||
|
||||
class BrowserHistoryCollector:
|
||||
@staticmethod
|
||||
def get_domain(url: str) -> str:
|
||||
return urlparse(url).netloc
|
||||
|
||||
def fetch_history(self) -> List[Tuple[datetime, str, str]]:
|
||||
outputs = get_history()
|
||||
# Returns list of tuples containing (datetime, url, title)
|
||||
return [(entry[0], entry[1], entry[2]) for entry in outputs.histories]
|
||||
|
||||
def fetch_bookmarks(self) -> List[Tuple[datetime, str, str, str]]:
|
||||
outputs = get_bookmarks()
|
||||
return outputs.bookmarks
|
||||
251
app/config.py
251
app/config.py
@@ -1,251 +0,0 @@
|
||||
import yaml
|
||||
from pathlib import Path
|
||||
from typing import Set
|
||||
import fnmatch
|
||||
import os
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Config:
|
||||
def __init__(self):
|
||||
self.config_path = Path(__file__).parent / "config.yaml"
|
||||
self.load_config()
|
||||
|
||||
def load_config(self):
|
||||
if not self.config_path.exists():
|
||||
self.config = {"ignored_domains": []}
|
||||
self.save_config()
|
||||
else:
|
||||
with open(self.config_path, 'r') as f:
|
||||
self.config = yaml.safe_load(f)
|
||||
|
||||
def save_config(self):
|
||||
with open(self.config_path, 'w') as f:
|
||||
yaml.dump(self.config, f)
|
||||
|
||||
def is_domain_ignored(self, domain: str) -> bool:
|
||||
"""Check if a domain matches any of the ignored patterns"""
|
||||
patterns = self.config.get('ignored_domains', [])
|
||||
return any(fnmatch.fnmatch(domain.lower(), pattern.lower()) for pattern in patterns)
|
||||
|
||||
def add_ignored_domain(self, pattern: str):
|
||||
"""Add a new domain pattern to the ignored list"""
|
||||
if 'ignored_domains' not in self.config:
|
||||
self.config['ignored_domains'] = []
|
||||
if pattern not in self.config['ignored_domains']:
|
||||
self.config['ignored_domains'].append(pattern)
|
||||
self.save_config()
|
||||
|
||||
def remove_ignored_domain(self, pattern: str):
|
||||
"""Remove a domain pattern from the ignored list"""
|
||||
if 'ignored_domains' in self.config:
|
||||
self.config['ignored_domains'] = [
|
||||
p for p in self.config['ignored_domains'] if p != pattern
|
||||
]
|
||||
self.save_config()
|
||||
|
||||
class ReaderConfig:
|
||||
def __init__(self):
|
||||
self.excluded_patterns: Set[str] = set()
|
||||
self._load_config()
|
||||
|
||||
def _load_config(self):
|
||||
config_path = Path("config/reader_config.yaml")
|
||||
if not config_path.exists():
|
||||
print("Warning: reader_config.yaml not found, creating default config")
|
||||
self._create_default_config(config_path)
|
||||
|
||||
try:
|
||||
with open(config_path, 'r') as f:
|
||||
config = yaml.safe_load(f)
|
||||
self.excluded_patterns = set(config.get('excluded_domains', []))
|
||||
except Exception as e:
|
||||
print(f"Error loading config: {e}")
|
||||
self.excluded_patterns = set()
|
||||
|
||||
def _create_default_config(self, config_path: Path):
|
||||
config_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
default_config = {
|
||||
'excluded_domains': [
|
||||
'localhost',
|
||||
'127.0.0.1',
|
||||
'192.168.*.*',
|
||||
'10.*.*.*'
|
||||
]
|
||||
}
|
||||
with open(config_path, 'w') as f:
|
||||
yaml.safe_dump(default_config, f, default_flow_style=False)
|
||||
|
||||
def is_domain_excluded(self, domain: str) -> bool:
|
||||
"""
|
||||
Check if a domain matches any exclusion pattern.
|
||||
Supports glob-style wildcards (* and ?)
|
||||
Examples:
|
||||
- '*.example.com' matches any subdomain of example.com
|
||||
- 'reddit-*.com' matches reddit-video.com, reddit-static.com, etc.
|
||||
- '192.168.*.*' matches any IP in the 192.168.0.0/16 subnet
|
||||
"""
|
||||
domain = domain.lower()
|
||||
|
||||
# Check each pattern
|
||||
for pattern in self.excluded_patterns:
|
||||
pattern = pattern.lower()
|
||||
|
||||
# Handle IP address patterns specially
|
||||
if any(c.isdigit() for c in pattern):
|
||||
if self._match_ip_pattern(domain, pattern):
|
||||
return True
|
||||
|
||||
# Handle domain patterns
|
||||
if fnmatch.fnmatch(domain, pattern):
|
||||
return True
|
||||
# Also check if the pattern matches when prepended with a dot
|
||||
# This handles cases like 'example.com' matching 'subdomain.example.com'
|
||||
if fnmatch.fnmatch(domain, f"*.{pattern}"):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _match_ip_pattern(self, domain: str, pattern: str) -> bool:
|
||||
"""
|
||||
Special handling for IP address patterns.
|
||||
Handles cases like '192.168.*.*' matching '192.168.1.1'
|
||||
"""
|
||||
# Skip if domain isn't IP-like
|
||||
if not any(c.isdigit() for c in domain):
|
||||
return False
|
||||
|
||||
# Split into octets
|
||||
domain_parts = domain.split('.')
|
||||
pattern_parts = pattern.split('.')
|
||||
|
||||
# Must have same number of parts
|
||||
if len(domain_parts) != len(pattern_parts):
|
||||
return False
|
||||
|
||||
# Check each octet
|
||||
for domain_part, pattern_part in zip(domain_parts, pattern_parts):
|
||||
if pattern_part == '*':
|
||||
continue
|
||||
if domain_part != pattern_part:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
DEFAULT_CONFIG_PATH = 'config/reader_config.yaml'
|
||||
USER_CONFIG_DIR = os.path.expanduser("~/.config/browser-recall")
|
||||
USER_CONFIG_PATH = os.path.join(USER_CONFIG_DIR, 'reader_config.yaml')
|
||||
|
||||
class Config:
|
||||
_instance = None
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
if not cls._instance:
|
||||
cls._instance = super(Config, cls).__new__(cls)
|
||||
cls._instance._initialized = False
|
||||
return cls._instance
|
||||
|
||||
def __init__(self, config_path=None):
|
||||
if self._initialized:
|
||||
return
|
||||
self._initialized = True
|
||||
|
||||
self.config_path = self._determine_config_path(config_path)
|
||||
self.config_data = self._load_config()
|
||||
logger.info(f"Config initialized using: {self.config_path}")
|
||||
# Pre-process excluded domains for faster lookup if needed,
|
||||
# but direct iteration with fnmatch is often fine for moderate lists.
|
||||
self.excluded_domains = self.config_data.get('excluded_domains', [])
|
||||
# Ensure it's a list
|
||||
if not isinstance(self.excluded_domains, list):
|
||||
logger.warning(f"Excluded domains in config is not a list: {self.excluded_domains}. Ignoring.")
|
||||
self.excluded_domains = []
|
||||
|
||||
|
||||
def _determine_config_path(self, provided_path):
|
||||
"""Determine the correct config path to use."""
|
||||
if provided_path and os.path.exists(provided_path):
|
||||
return provided_path
|
||||
if os.path.exists(USER_CONFIG_PATH):
|
||||
return USER_CONFIG_PATH
|
||||
if os.path.exists(DEFAULT_CONFIG_PATH):
|
||||
return DEFAULT_CONFIG_PATH
|
||||
logger.warning("No configuration file found at default or user locations. Using empty config.")
|
||||
return None # Indicate no file was found
|
||||
|
||||
def _load_config(self):
|
||||
"""Loads the YAML configuration file."""
|
||||
if not self.config_path:
|
||||
return {} # Return empty dict if no config file path determined
|
||||
|
||||
try:
|
||||
with open(self.config_path, 'r') as f:
|
||||
return yaml.safe_load(f) or {} # Return empty dict if file is empty
|
||||
except FileNotFoundError:
|
||||
logger.warning(f"Configuration file not found at {self.config_path}. Using default settings.")
|
||||
return {}
|
||||
except yaml.YAMLError as e:
|
||||
logger.error(f"Error parsing configuration file {self.config_path}: {e}")
|
||||
return {} # Return empty dict on parsing error
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error loading configuration {self.config_path}: {e}")
|
||||
return {}
|
||||
|
||||
def get_config(self):
|
||||
"""Returns the loaded configuration data."""
|
||||
return self.config_data
|
||||
|
||||
def reload_config(self):
|
||||
"""Reloads the configuration from the file."""
|
||||
logger.info(f"Reloading configuration from: {self.config_path}")
|
||||
self.config_data = self._load_config()
|
||||
self.excluded_domains = self.config_data.get('excluded_domains', [])
|
||||
if not isinstance(self.excluded_domains, list):
|
||||
logger.warning(f"Excluded domains in reloaded config is not a list: {self.excluded_domains}. Ignoring.")
|
||||
self.excluded_domains = []
|
||||
logger.info("Configuration reloaded.")
|
||||
|
||||
|
||||
def is_domain_ignored(self, domain: str) -> bool:
|
||||
"""
|
||||
Checks if a given domain matches any pattern in the excluded_domains list.
|
||||
Supports exact matches and wildcard (*) matching using fnmatch.
|
||||
"""
|
||||
if not domain: # Ignore empty domains
|
||||
return True
|
||||
if not self.excluded_domains: # If list is empty, nothing is ignored
|
||||
return False
|
||||
|
||||
# Normalize domain to lowercase for case-insensitive comparison
|
||||
domain_lower = domain.lower()
|
||||
|
||||
for pattern in self.excluded_domains:
|
||||
if not isinstance(pattern, str): # Skip non-string patterns
|
||||
continue
|
||||
|
||||
# Normalize pattern to lowercase
|
||||
pattern_lower = pattern.lower()
|
||||
|
||||
# Use fnmatch.fnmatch for wildcard support (*)
|
||||
if fnmatch.fnmatch(domain_lower, pattern_lower):
|
||||
# logger.debug(f"Domain '{domain}' ignored due to pattern '{pattern}'")
|
||||
return True
|
||||
return False
|
||||
|
||||
# --- Add methods to get specific config values safely ---
|
||||
@property
|
||||
def history_update_interval_seconds(self) -> int:
|
||||
"""Gets the history update interval, defaulting to 300."""
|
||||
return self.config_data.get('history_update_interval_seconds', 300)
|
||||
|
||||
@property
|
||||
def markdown_update_interval_seconds(self) -> int:
|
||||
"""Gets the markdown update interval, defaulting to 300."""
|
||||
return self.config_data.get('markdown_update_interval_seconds', 300)
|
||||
|
||||
# Add other specific getters as needed
|
||||
# Example:
|
||||
# @property
|
||||
# def some_other_setting(self) -> str:
|
||||
# return self.config_data.get('some_other_setting', 'default_value')
|
||||
@@ -1,13 +0,0 @@
|
||||
# Domains that should be ignored by the history tracker
|
||||
# Supports wildcards (*) for pattern matching
|
||||
ignored_domains:
|
||||
- "192.168.*" # Ignore local network addresses
|
||||
- "127.0.0.1" # Ignore localhost IP addresses
|
||||
- "localhost" # Ignore localhost domains
|
||||
- "172.*"
|
||||
- "localhost:*" # Ignore all localhost ports
|
||||
- "127.0.0.1:*" # Ignore all localhost IP ports
|
||||
- "*.local" # Ignore .local domains
|
||||
- "about:*" # Ignore about: URLs
|
||||
- "chrome-extension://*" # Ignore Chrome extensions
|
||||
- "chrome://*" # Ignore Chrome URLs
|
||||
281
app/database.py
281
app/database.py
@@ -1,281 +0,0 @@
|
||||
from sqlalchemy import create_engine, Column, Integer, String, DateTime, Text, event, text
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from datetime import datetime
|
||||
import sqlite3
|
||||
|
||||
SQLALCHEMY_DATABASE_URL = "sqlite:///./browser_history.db"
|
||||
|
||||
# Create engine with custom configuration
|
||||
engine = create_engine(
|
||||
SQLALCHEMY_DATABASE_URL,
|
||||
connect_args={
|
||||
"timeout": 30, # Connection timeout in seconds
|
||||
"check_same_thread": False, # Allow multi-threaded access
|
||||
},
|
||||
# Update pool configuration for better concurrency
|
||||
pool_size=5, # Increase pool size to handle concurrent requests
|
||||
max_overflow=10, # Allow some overflow connections
|
||||
pool_timeout=30, # Connection timeout from pool
|
||||
pool_recycle=3600, # Recycle connections every hour
|
||||
)
|
||||
|
||||
SessionLocal = sessionmaker(
|
||||
autocommit=False,
|
||||
autoflush=False,
|
||||
bind=engine,
|
||||
expire_on_commit=False # Prevent unnecessary reloads
|
||||
)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
@event.listens_for(engine, "connect")
|
||||
def set_sqlite_pragma(dbapi_connection, connection_record):
|
||||
"""Configure SQLite for better performance"""
|
||||
if isinstance(dbapi_connection, sqlite3.Connection):
|
||||
cursor = dbapi_connection.cursor()
|
||||
|
||||
# Enable WAL mode for better write performance and concurrency
|
||||
cursor.execute("PRAGMA journal_mode=WAL")
|
||||
|
||||
# Set page size to 4KB for better performance
|
||||
cursor.execute("PRAGMA page_size=4096")
|
||||
|
||||
# Set cache size to 32MB (-32000 pages * 4KB per page = ~32MB)
|
||||
cursor.execute("PRAGMA cache_size=-32000")
|
||||
|
||||
# Enable memory-mapped I/O for better performance
|
||||
cursor.execute("PRAGMA mmap_size=268435456") # 256MB
|
||||
|
||||
# Set synchronous mode to NORMAL for better write performance
|
||||
cursor.execute("PRAGMA synchronous=NORMAL")
|
||||
|
||||
# Enable foreign key support
|
||||
cursor.execute("PRAGMA foreign_keys=ON")
|
||||
|
||||
cursor.close()
|
||||
|
||||
class HistoryEntry(Base):
|
||||
__tablename__ = "history"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
url = Column(String, index=True) # Add index for URL lookups
|
||||
title = Column(String)
|
||||
visit_time = Column(DateTime, index=True) # Add index for time-based queries
|
||||
domain = Column(String, index=True) # Add index for domain filtering
|
||||
markdown_content = Column(Text, nullable=True)
|
||||
last_content_update = Column(DateTime, nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
# Composite index for common query patterns
|
||||
{'sqlite_with_rowid': True} # Ensure we have rowids for better performance
|
||||
)
|
||||
|
||||
class Bookmark(Base):
|
||||
__tablename__ = "bookmarks"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
url = Column(String, index=True)
|
||||
title = Column(String, nullable=True)
|
||||
added_time = Column(DateTime, index=True)
|
||||
folder = Column(String, index=True)
|
||||
domain = Column(String, index=True)
|
||||
|
||||
__table_args__ = (
|
||||
# Composite index for common query patterns
|
||||
{'sqlite_with_rowid': True} # Ensure we have rowids for better performance
|
||||
)
|
||||
|
||||
# Create tables
|
||||
Base.metadata.create_all(bind=engine)
|
||||
|
||||
# Initialize FTS tables for full-text search
|
||||
def init_fts():
|
||||
"""Initialize Full Text Search tables"""
|
||||
conn = engine.raw_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Create FTS table with content and title columns
|
||||
cursor.execute("""
|
||||
CREATE VIRTUAL TABLE IF NOT EXISTS history_fts USING fts5(
|
||||
title,
|
||||
markdown_content,
|
||||
domain, -- Add domain for filtering
|
||||
visit_time UNINDEXED, -- Add visit_time but don't index it
|
||||
content='history',
|
||||
content_rowid='id',
|
||||
tokenize='trigram'
|
||||
)
|
||||
""")
|
||||
|
||||
# Update triggers to include domain and visit_time
|
||||
cursor.execute("""
|
||||
CREATE TRIGGER IF NOT EXISTS history_ai AFTER INSERT ON history BEGIN
|
||||
INSERT INTO history_fts(rowid, title, markdown_content, domain, visit_time)
|
||||
VALUES (new.id, new.title, new.markdown_content, new.domain, new.visit_time);
|
||||
END;
|
||||
""")
|
||||
|
||||
cursor.execute("""
|
||||
CREATE TRIGGER IF NOT EXISTS history_ad AFTER DELETE ON history BEGIN
|
||||
INSERT INTO history_fts(history_fts, rowid, title, markdown_content, domain, visit_time)
|
||||
VALUES('delete', old.id, old.title, old.markdown_content, old.domain, old.visit_time);
|
||||
END;
|
||||
""")
|
||||
|
||||
cursor.execute("""
|
||||
CREATE TRIGGER IF NOT EXISTS history_au AFTER UPDATE ON history BEGIN
|
||||
INSERT INTO history_fts(history_fts, rowid, title, markdown_content, domain, visit_time)
|
||||
VALUES('delete', old.id, old.title, old.markdown_content, old.domain, old.visit_time);
|
||||
INSERT INTO history_fts(rowid, title, markdown_content, domain, visit_time)
|
||||
VALUES (new.id, new.title, new.markdown_content, new.domain, new.visit_time);
|
||||
END;
|
||||
""")
|
||||
|
||||
conn.commit()
|
||||
cursor.close()
|
||||
conn.close()
|
||||
|
||||
# Initialize FTS tables
|
||||
init_fts()
|
||||
|
||||
def reindex_fts():
|
||||
"""Reindex the FTS tables"""
|
||||
conn = engine.raw_connection()
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("INSERT INTO history_fts(history_fts) VALUES('rebuild')")
|
||||
conn.commit()
|
||||
cursor.close()
|
||||
conn.close()
|
||||
|
||||
def get_db():
|
||||
"""Get database session"""
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
def get_last_processed_timestamp(source):
|
||||
"""
|
||||
Get last processed timestamp for a source (e.g., 'chrome_history', 'chrome_bookmarks')
|
||||
"""
|
||||
db = next(get_db())
|
||||
try:
|
||||
result = db.execute(
|
||||
text('SELECT last_timestamp FROM last_processed WHERE source = :source'),
|
||||
{'source': source}
|
||||
).fetchone()
|
||||
return result[0] if result else 0
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
def update_last_processed_timestamp(source, timestamp):
|
||||
"""
|
||||
Update last processed timestamp for a source
|
||||
"""
|
||||
db = next(get_db())
|
||||
try:
|
||||
db.execute(
|
||||
text('''
|
||||
INSERT OR REPLACE INTO last_processed (source, last_timestamp)
|
||||
VALUES (:source, :timestamp)
|
||||
'''),
|
||||
{'source': source, 'timestamp': timestamp}
|
||||
)
|
||||
db.commit()
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
def create_tables():
|
||||
db = next(get_db())
|
||||
try:
|
||||
db.execute(
|
||||
text('''
|
||||
CREATE TABLE IF NOT EXISTS last_processed (
|
||||
source TEXT PRIMARY KEY,
|
||||
last_timestamp INTEGER
|
||||
)
|
||||
''')
|
||||
)
|
||||
db.commit()
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
def search_history(query, domain=None, start_date=None, end_date=None, db=None):
|
||||
"""
|
||||
Search history using FTS5 with proper ranking
|
||||
"""
|
||||
if db is None:
|
||||
db = next(get_db())
|
||||
|
||||
try:
|
||||
# Build the FTS query
|
||||
fts_query = f'"{query}"' # Exact phrase
|
||||
if domain:
|
||||
fts_query += f' AND domain:"{domain}"'
|
||||
|
||||
# Build date filter conditions
|
||||
date_conditions = []
|
||||
params = {'query': query}
|
||||
|
||||
if start_date:
|
||||
date_conditions.append("visit_time >= :start_date")
|
||||
params['start_date'] = start_date
|
||||
if end_date:
|
||||
date_conditions.append("visit_time <= :end_date")
|
||||
params['end_date'] = end_date
|
||||
|
||||
date_filter = f"AND {' AND '.join(date_conditions)}" if date_conditions else ""
|
||||
|
||||
# Execute the search query
|
||||
sql_query = f"""
|
||||
SELECT
|
||||
h.*,
|
||||
bm25(history_fts) as rank,
|
||||
highlight(history_fts, 0, '<mark>', '</mark>') as title_highlight,
|
||||
highlight(history_fts, 1, '<mark>', '</mark>') as content_highlight
|
||||
FROM history_fts
|
||||
JOIN history h ON history_fts.rowid = h.id
|
||||
WHERE history_fts MATCH :query
|
||||
{date_filter}
|
||||
ORDER BY rank, visit_time DESC
|
||||
LIMIT 100
|
||||
"""
|
||||
|
||||
results = db.execute(text(sql_query), params).fetchall()
|
||||
return results
|
||||
|
||||
except Exception as e:
|
||||
print(f"Search error: {e}")
|
||||
return []
|
||||
|
||||
def recreate_fts_tables():
|
||||
"""Drop and recreate the FTS tables"""
|
||||
conn = engine.raw_connection()
|
||||
cursor = conn.cursor()
|
||||
try:
|
||||
# Drop existing FTS table and triggers
|
||||
cursor.execute("DROP TRIGGER IF EXISTS history_ai")
|
||||
cursor.execute("DROP TRIGGER IF EXISTS history_ad")
|
||||
cursor.execute("DROP TRIGGER IF EXISTS history_au")
|
||||
cursor.execute("DROP TABLE IF EXISTS history_fts")
|
||||
|
||||
# Recreate FTS tables and triggers
|
||||
init_fts()
|
||||
|
||||
# Reindex all existing content
|
||||
cursor.execute("""
|
||||
INSERT INTO history_fts(rowid, title, markdown_content, domain, visit_time)
|
||||
SELECT id, title, markdown_content, domain, visit_time FROM history
|
||||
""")
|
||||
|
||||
conn.commit()
|
||||
print("Successfully recreated FTS tables and reindexed content")
|
||||
|
||||
except Exception as e:
|
||||
conn.rollback()
|
||||
print(f"Error recreating FTS tables: {e}")
|
||||
finally:
|
||||
cursor.close()
|
||||
conn.close()
|
||||
@@ -1,52 +0,0 @@
|
||||
import logging
|
||||
import logging.handlers
|
||||
import os
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
# Create logs directory if it doesn't exist
|
||||
LOGS_DIR = Path("logs")
|
||||
LOGS_DIR.mkdir(exist_ok=True)
|
||||
|
||||
# Create formatters
|
||||
CONSOLE_FORMAT = '%(levelname)s: %(message)s'
|
||||
FILE_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
|
||||
def setup_logger(name: str) -> logging.Logger:
|
||||
"""
|
||||
Set up a logger with both file and console handlers
|
||||
|
||||
Args:
|
||||
name: The name of the logger (usually __name__)
|
||||
|
||||
Returns:
|
||||
logging.Logger: Configured logger instance
|
||||
"""
|
||||
logger = logging.getLogger(name)
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
# Prevent adding handlers multiple times
|
||||
if logger.handlers:
|
||||
return logger
|
||||
|
||||
# Console handler
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setLevel(logging.WARNING)
|
||||
console_handler.setFormatter(logging.Formatter(CONSOLE_FORMAT))
|
||||
|
||||
# File handler
|
||||
log_file = LOGS_DIR / f"{datetime.now().strftime('%Y-%m')}.log"
|
||||
file_handler = logging.handlers.RotatingFileHandler(
|
||||
log_file,
|
||||
maxBytes=10*1024*1024, # 10MB
|
||||
backupCount=5,
|
||||
encoding='utf-8'
|
||||
)
|
||||
file_handler.setLevel(logging.INFO)
|
||||
file_handler.setFormatter(logging.Formatter(FILE_FORMAT))
|
||||
|
||||
# Add handlers
|
||||
logger.addHandler(console_handler)
|
||||
logger.addHandler(file_handler)
|
||||
|
||||
return logger
|
||||
293
app/main.py
293
app/main.py
@@ -1,293 +0,0 @@
|
||||
from fastapi import FastAPI, Depends
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
import asyncio
|
||||
from urllib.parse import urlparse
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
import browser_history
|
||||
from crawl4ai import AsyncWebCrawler
|
||||
|
||||
# Local imports
|
||||
from .logging_config import setup_logger
|
||||
from .database import (
|
||||
get_db,
|
||||
HistoryEntry,
|
||||
get_last_processed_timestamp,
|
||||
update_last_processed_timestamp,
|
||||
create_tables,
|
||||
engine,
|
||||
# recreate_fts_tables # Keep if needed, but often done manually or via migration tool
|
||||
)
|
||||
from .config import Config
|
||||
|
||||
# Import Routers
|
||||
from .routers import history, bookmarks, config as api_config, websocket, ui
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
|
||||
# --- Global Variables ---
|
||||
# These are accessed by other modules (like websocket router)
|
||||
# Consider using app state or dependency injection for cleaner management if complexity grows
|
||||
config_manager = Config() # Renamed to avoid conflict with router import
|
||||
crawler: Optional[AsyncWebCrawler] = None
|
||||
|
||||
# Import scheduler *after* crawler is defined
|
||||
from .scheduler import HistoryScheduler
|
||||
scheduler: Optional[HistoryScheduler] = None # Now initialize scheduler variable
|
||||
|
||||
# --- FastAPI App Initialization ---
|
||||
app = FastAPI(title="Browser History Search API")
|
||||
|
||||
# Add CORS middleware
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"], # Adjust in production
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Mount static files and templates
|
||||
app.mount("/static", StaticFiles(directory="app/static"), name="static")
|
||||
# Note: Templates are used within the ui router now, no need for global instance here unless used elsewhere
|
||||
|
||||
# --- Helper Function (Initial Sync) ---
|
||||
def process_browser_history():
|
||||
"""Fetches and stores new history entries from browser_history library (Initial Sync)."""
|
||||
try:
|
||||
logger.info("Starting browser history processing (initial sync)")
|
||||
outputs = browser_history.get_history()
|
||||
# browser_history returns platform specific History object, get histories list
|
||||
history_list = []
|
||||
if hasattr(outputs, 'histories') and outputs.histories:
|
||||
history_list = outputs.histories # List of (datetime, url, title)
|
||||
else:
|
||||
logger.warning("Could not retrieve histories list from browser_history output.")
|
||||
return # Exit if no history list found
|
||||
|
||||
logger.info(f"Found {len(history_list)} total history items from browser_history library")
|
||||
|
||||
current_timestamp_dt = datetime.now(timezone.utc)
|
||||
current_timestamp = int(current_timestamp_dt.timestamp()) # Use timezone-aware timestamp
|
||||
source_key = "browser_history_sync" # Differentiate from scheduler source
|
||||
last_timestamp = get_last_processed_timestamp(source_key) or 0 # Ensure it's 0 if None
|
||||
|
||||
logger.info(f"Last processed timestamp for initial sync '{source_key}': {last_timestamp}")
|
||||
|
||||
new_entries = []
|
||||
processed_urls_times = set() # Avoid duplicates within the batch
|
||||
|
||||
for entry in history_list:
|
||||
# Basic validation of entry structure
|
||||
if not isinstance(entry, (tuple, list)) or len(entry) < 2:
|
||||
logger.warning(f"Skipping malformed history entry: {entry}")
|
||||
continue
|
||||
timestamp, url = entry[0], entry[1]
|
||||
title = entry[2] if len(entry) > 2 else "" # Handle optional title
|
||||
|
||||
if not url or not timestamp:
|
||||
logger.warning(f"Skipping entry with missing URL or timestamp: Title='{title}'")
|
||||
continue
|
||||
|
||||
# Ensure timestamp is datetime object
|
||||
if not isinstance(timestamp, datetime):
|
||||
logger.warning(f"Skipping entry with non-datetime timestamp ({type(timestamp)}): {url}")
|
||||
continue
|
||||
|
||||
# Normalize timestamp (Assume local if naive, convert to UTC)
|
||||
if timestamp.tzinfo is None or timestamp.tzinfo.utcoffset(timestamp) is None:
|
||||
try:
|
||||
timestamp_aware = timestamp.astimezone() # Make aware using system local
|
||||
except Exception as tz_err:
|
||||
logger.warning(f"Could not determine local timezone for naive timestamp {timestamp}. Assuming UTC. Error: {tz_err}")
|
||||
timestamp_aware = timestamp.replace(tzinfo=timezone.utc) # Fallback to UTC
|
||||
else:
|
||||
timestamp_aware = timestamp
|
||||
timestamp_utc = timestamp_aware.astimezone(timezone.utc)
|
||||
|
||||
|
||||
# Filter for only new entries based on normalized UTC timestamp
|
||||
if timestamp_utc.timestamp() > last_timestamp:
|
||||
entry_key = (url, timestamp_utc.timestamp())
|
||||
if entry_key in processed_urls_times:
|
||||
continue # Skip duplicate within this batch
|
||||
|
||||
new_entries.append((timestamp_utc, url, title))
|
||||
processed_urls_times.add(entry_key)
|
||||
|
||||
logger.info(f"Found {len(new_entries)} new entries for initial sync after filtering")
|
||||
|
||||
if new_entries:
|
||||
added_count = 0
|
||||
skipped_ignored = 0
|
||||
# Use context manager for session
|
||||
with next(get_db()) as db:
|
||||
try:
|
||||
for timestamp_utc, url, title in new_entries:
|
||||
domain = urlparse(url).netloc
|
||||
if config_manager.is_domain_ignored(domain):
|
||||
# logger.debug(f"Skipping ignored domain during initial sync: {domain}")
|
||||
skipped_ignored += 1
|
||||
continue
|
||||
|
||||
# Optional: Check if entry already exists more robustly
|
||||
# existing = db.query(HistoryEntry.id).filter(HistoryEntry.url == url, HistoryEntry.visit_time == timestamp_utc).first()
|
||||
# if existing:
|
||||
# continue
|
||||
|
||||
history_entry = HistoryEntry(
|
||||
url=url,
|
||||
title=title or "", # Ensure title is not None
|
||||
visit_time=timestamp_utc,
|
||||
domain=domain
|
||||
# Note: No markdown content here, only basic history
|
||||
)
|
||||
db.add(history_entry)
|
||||
added_count += 1
|
||||
|
||||
if added_count > 0:
|
||||
db.commit()
|
||||
logger.info(f"Committed {added_count} new history entries from initial sync.")
|
||||
# Update the last processed timestamp only if successful commit
|
||||
update_last_processed_timestamp(source_key, current_timestamp)
|
||||
logger.info(f"Updated initial sync timestamp for '{source_key}' to {current_timestamp}")
|
||||
else:
|
||||
logger.info("No new unique entries to commit during initial sync.")
|
||||
# Update timestamp even if nothing new added, to mark sync time
|
||||
update_last_processed_timestamp(source_key, current_timestamp)
|
||||
logger.info(f"Updated initial sync timestamp check for '{source_key}' to {current_timestamp}")
|
||||
|
||||
|
||||
if skipped_ignored > 0:
|
||||
logger.info(f"Skipped {skipped_ignored} entries due to ignored domains during initial sync.")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error storing history item during initial sync: {str(e)}", exc_info=True)
|
||||
db.rollback()
|
||||
else:
|
||||
logger.info("No new history entries found during initial sync.")
|
||||
# Update timestamp even if nothing new found, to mark sync time
|
||||
update_last_processed_timestamp(source_key, current_timestamp)
|
||||
logger.info(f"Updated initial sync timestamp check for '{source_key}' to {current_timestamp}")
|
||||
|
||||
|
||||
except ImportError:
|
||||
logger.warning("`browser_history` library not found or import failed. Skipping initial sync.")
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing browser history during initial sync: {str(e)}", exc_info=True)
|
||||
|
||||
|
||||
# --- Startup and Shutdown Events ---
|
||||
@app.on_event("startup")
|
||||
async def startup_event():
|
||||
global crawler, scheduler # Allow modification of globals
|
||||
logger.info("Starting application initialization...")
|
||||
|
||||
try:
|
||||
# 1. Ensure base tables exist
|
||||
logger.info("Ensuring base tables exist...")
|
||||
create_tables()
|
||||
|
||||
# 2. Initialize the crawler
|
||||
logger.info("Initializing AsyncWebCrawler...")
|
||||
if crawler is None:
|
||||
crawler = AsyncWebCrawler()
|
||||
logger.info("AsyncWebCrawler initialized.")
|
||||
|
||||
# 3. Initialize the Scheduler *after* the crawler
|
||||
logger.info("Initializing HistoryScheduler...")
|
||||
if scheduler is None:
|
||||
scheduler = HistoryScheduler(crawler=crawler) # Pass crawler instance
|
||||
logger.info("HistoryScheduler initialized.")
|
||||
|
||||
# 4. Perform initial history sync from browser_history library
|
||||
logger.info("Performing initial browser history sync...")
|
||||
process_browser_history() # Sync history not processed before
|
||||
|
||||
# 5. Perform initial bookmark sync (using scheduler's method)
|
||||
# Run in background to avoid blocking startup if it takes long
|
||||
logger.info("Starting initial bookmark sync task...")
|
||||
asyncio.create_task(scheduler.update_bookmarks())
|
||||
|
||||
# 6. Start background tasks (scheduler for ongoing updates)
|
||||
logger.info("Starting background history update task...")
|
||||
asyncio.create_task(scheduler.update_history())
|
||||
|
||||
# --- Markdown Update Tasks ---
|
||||
# 7a. Trigger ONE initial batch processing run in the background
|
||||
logger.info("Starting initial markdown processing batch task...")
|
||||
asyncio.create_task(scheduler._process_markdown_batch()) # Run one batch now
|
||||
|
||||
# 7b. Start the PERIODIC background markdown update task
|
||||
logger.info("Starting periodic background markdown update task...")
|
||||
# Use the renamed method for the loop
|
||||
asyncio.create_task(scheduler.update_missing_markdown_periodically())
|
||||
# --- End Markdown Update Tasks ---
|
||||
|
||||
|
||||
logger.info("Application startup sequence initiated. Background tasks running.")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"FATAL ERROR during application startup: {str(e)}", exc_info=True)
|
||||
raise RuntimeError(f"Application startup failed: {e}") from e
|
||||
|
||||
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown_event():
|
||||
global crawler, scheduler
|
||||
logger.info("Starting application shutdown...")
|
||||
|
||||
# Stop scheduler tasks gracefully if possible (implement cancellation in tasks if needed)
|
||||
# For now, we just close resources
|
||||
|
||||
# Close scheduler resources
|
||||
if scheduler and hasattr(scheduler, 'close'):
|
||||
try:
|
||||
logger.info("Closing scheduler resources...")
|
||||
await scheduler.close() # Call the scheduler's close method
|
||||
except Exception as e:
|
||||
logger.error(f"Error closing scheduler: {e}", exc_info=True)
|
||||
|
||||
# Close crawler if needed (check crawl4ai docs for explicit close method)
|
||||
# Based on previous code, seems no explicit close needed, but keep check just in case
|
||||
if crawler and hasattr(crawler, 'aclose'):
|
||||
try:
|
||||
logger.info("Closing AsyncWebCrawler...")
|
||||
# await crawler.aclose() # Example if an async close exists
|
||||
except Exception as e:
|
||||
logger.error(f"Error closing crawler: {e}", exc_info=True)
|
||||
|
||||
|
||||
# Close database engine connections if necessary (usually handled automatically by SQLAlchemy)
|
||||
# if engine and hasattr(engine, 'dispose'): # Check if using async engine that needs dispose
|
||||
# await engine.dispose()
|
||||
|
||||
logger.info("Application shutdown complete.")
|
||||
|
||||
|
||||
# --- Include Routers ---
|
||||
app.include_router(history.router)
|
||||
app.include_router(bookmarks.router)
|
||||
app.include_router(api_config.router)
|
||||
app.include_router(websocket.router)
|
||||
app.include_router(ui.router)
|
||||
|
||||
# Optional: Add a root endpoint for health check or basic info
|
||||
@app.get("/health", tags=["service"])
|
||||
async def health_check():
|
||||
# Extended health check could verify DB connection or task status
|
||||
db_ok = False
|
||||
try:
|
||||
with next(get_db()) as db:
|
||||
db.execute("SELECT 1")
|
||||
db_ok = True
|
||||
except Exception:
|
||||
db_ok = False
|
||||
|
||||
return {
|
||||
"status": "ok",
|
||||
"database_connection": "ok" if db_ok else "error",
|
||||
# Add other checks as needed
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
@dataclass
|
||||
class PageInfo:
|
||||
url: str
|
||||
html: str
|
||||
timestamp: datetime
|
||||
@@ -1,47 +0,0 @@
|
||||
from fastapi import APIRouter, Depends, Query, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
from typing import List, Optional
|
||||
|
||||
from ..database import get_db, Bookmark
|
||||
from ..utils import serialize_bookmark
|
||||
from ..logging_config import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
router = APIRouter(prefix="/bookmarks", tags=["bookmarks"])
|
||||
|
||||
@router.get("/search")
|
||||
async def search_bookmarks(
|
||||
domain: Optional[str] = Query(None),
|
||||
folder: Optional[str] = Query(None),
|
||||
search_term: Optional[str] = Query(None),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""Search bookmarks with optimized queries"""
|
||||
try:
|
||||
# Build query efficiently
|
||||
query = db.query(Bookmark)
|
||||
|
||||
# Apply filters using index-optimized queries
|
||||
if domain:
|
||||
query = query.filter(Bookmark.domain == domain)
|
||||
|
||||
if folder:
|
||||
query = query.filter(Bookmark.folder == folder)
|
||||
|
||||
if search_term:
|
||||
# Use LIKE for title search (consider FTS for bookmarks if needed)
|
||||
search_pattern = f"%{search_term}%"
|
||||
query = query.filter(Bookmark.title.ilike(search_pattern))
|
||||
# Removed index hint as SQLAlchemy/SQLite usually handles this well with LIKE
|
||||
|
||||
# Add ordering and limit for better performance
|
||||
bookmarks = query.order_by(Bookmark.added_time.desc()).limit(1000).all()
|
||||
|
||||
return [serialize_bookmark(bookmark) for bookmark in bookmarks]
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Bookmark search error: {e}", exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail={"message": "Bookmark search operation failed", "error": str(e)}
|
||||
)
|
||||
@@ -1,43 +0,0 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from typing import List
|
||||
|
||||
from ..config import Config
|
||||
from ..logging_config import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
router = APIRouter(prefix="/config", tags=["config"])
|
||||
|
||||
# Assuming config is a singleton or easily accessible
|
||||
# If not, you might need to use Depends or app state
|
||||
config = Config()
|
||||
|
||||
@router.get("/ignored-domains")
|
||||
async def get_ignored_domains():
|
||||
"""Get list of ignored domain patterns"""
|
||||
try:
|
||||
return {"ignored_domains": config.config.get('ignored_domains', [])}
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting ignored domains: {e}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail="Failed to retrieve ignored domains")
|
||||
|
||||
|
||||
@router.post("/ignored-domains")
|
||||
async def add_ignored_domain(pattern: str):
|
||||
"""Add a new domain pattern to ignored list"""
|
||||
try:
|
||||
config.add_ignored_domain(pattern)
|
||||
return {"status": "success", "message": f"Added pattern: {pattern}"}
|
||||
except Exception as e:
|
||||
logger.error(f"Error adding ignored domain '{pattern}': {e}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail="Failed to add ignored domain")
|
||||
|
||||
|
||||
@router.delete("/ignored-domains/{pattern}")
|
||||
async def remove_ignored_domain(pattern: str):
|
||||
"""Remove a domain pattern from ignored list"""
|
||||
try:
|
||||
config.remove_ignored_domain(pattern)
|
||||
return {"status": "success", "message": f"Removed pattern: {pattern}"}
|
||||
except Exception as e:
|
||||
logger.error(f"Error removing ignored domain '{pattern}': {e}", exc_info=True)
|
||||
raise HTTPException(status_code=500, detail="Failed to remove ignored domain")
|
||||
@@ -1,132 +0,0 @@
|
||||
from fastapi import APIRouter, Depends, Query, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import text
|
||||
from typing import List, Optional
|
||||
|
||||
from ..database import get_db, HistoryEntry
|
||||
from ..utils import serialize_history_entry
|
||||
from ..logging_config import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
router = APIRouter(prefix="/history", tags=["history"])
|
||||
|
||||
@router.get("/search")
|
||||
async def search_history(
|
||||
query: Optional[str] = Query(None),
|
||||
domain: Optional[str] = Query(None),
|
||||
start_date: Optional[str] = Query(None),
|
||||
end_date: Optional[str] = Query(None),
|
||||
include_content: bool = Query(False),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""Search history using FTS5"""
|
||||
try:
|
||||
if query:
|
||||
# Build the FTS query
|
||||
# Basic query sanitization/escaping might be needed depending on FTS syntax usage
|
||||
# For simple term search, this is okay. For complex FTS syntax, more care is needed.
|
||||
fts_conditions = []
|
||||
params = {}
|
||||
|
||||
# Handle different query parts (title, content, domain)
|
||||
# Example: "term1 title:term2 domain:example.com"
|
||||
# This requires more sophisticated parsing. For now, assume simple query applies to title/content.
|
||||
# A safer approach for user input:
|
||||
sanitized_query = query.replace('"', '""') # Basic FTS escaping for quotes
|
||||
fts_match_expr = f'(title : "{sanitized_query}"* OR markdown_content : "{sanitized_query}"*)'
|
||||
params['fts_query'] = fts_match_expr
|
||||
|
||||
if domain:
|
||||
# Add domain filtering directly in FTS if possible and indexed
|
||||
# Assuming 'domain' is an indexed column in FTS table
|
||||
# params['fts_query'] += f' AND domain : "{domain}"' # Adjust FTS syntax if needed
|
||||
# Or filter after FTS search if domain isn't in FTS index efficiently
|
||||
pass # Domain filtering will be added later if needed
|
||||
|
||||
# Build the SQL query
|
||||
sql = """
|
||||
SELECT
|
||||
h.*,
|
||||
bm25(history_fts) as rank,
|
||||
highlight(history_fts, 0, '<mark>', '</mark>') as title_highlight,
|
||||
highlight(history_fts, 1, '<mark>', '</mark>') as content_highlight
|
||||
FROM history_fts
|
||||
JOIN history h ON history_fts.rowid = h.id
|
||||
WHERE history_fts MATCH :fts_query
|
||||
"""
|
||||
|
||||
# Add domain filter as a regular WHERE clause if not in FTS MATCH
|
||||
if domain:
|
||||
sql += " AND h.domain = :domain"
|
||||
params['domain'] = domain
|
||||
|
||||
# Add date filters if provided
|
||||
if start_date:
|
||||
sql += " AND h.visit_time >= :start_date"
|
||||
params['start_date'] = start_date
|
||||
if end_date:
|
||||
sql += " AND h.visit_time <= :end_date"
|
||||
params['end_date'] = end_date
|
||||
|
||||
sql += " ORDER BY rank DESC, h.visit_time DESC LIMIT 100" # Rank usually descends
|
||||
|
||||
results = db.execute(text(sql), params).fetchall()
|
||||
# Use the updated serializer that handles potential highlight/rank fields
|
||||
return [serialize_history_entry(row, include_content) for row in results]
|
||||
|
||||
else:
|
||||
# Handle non-search queries (basic filtering)
|
||||
query_builder = db.query(HistoryEntry)
|
||||
|
||||
if domain:
|
||||
query_builder = query_builder.filter(HistoryEntry.domain == domain)
|
||||
if start_date:
|
||||
query_builder = query_builder.filter(HistoryEntry.visit_time >= start_date)
|
||||
if end_date:
|
||||
query_builder = query_builder.filter(HistoryEntry.visit_time <= end_date)
|
||||
|
||||
entries = query_builder.order_by(HistoryEntry.visit_time.desc()).limit(100).all()
|
||||
return [serialize_history_entry(entry, include_content) for entry in entries]
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Search error: {str(e)}", exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail={"message": "Search operation failed", "error": str(e)}
|
||||
)
|
||||
|
||||
|
||||
@router.get("/search/advanced")
|
||||
async def advanced_history_search(
|
||||
query: str = Query(..., description="Full-text search query with SQLite FTS5 syntax"),
|
||||
include_content: bool = Query(False),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""Advanced full-text search using SQLite FTS5 features"""
|
||||
try:
|
||||
# Use raw SQL for advanced FTS query
|
||||
# Add rank and highlights here as well
|
||||
fts_query = """
|
||||
SELECT
|
||||
h.*,
|
||||
bm25(history_fts) as rank,
|
||||
highlight(history_fts, 0, '<mark>', '</mark>') as title_highlight,
|
||||
highlight(history_fts, 1, '<mark>', '</mark>') as content_highlight
|
||||
FROM history_fts
|
||||
JOIN history h ON history_fts.rowid = h.id
|
||||
WHERE history_fts MATCH :query
|
||||
ORDER BY rank DESC, h.visit_time DESC
|
||||
LIMIT 1000
|
||||
"""
|
||||
|
||||
results = db.execute(text(fts_query), {'query': query}).fetchall()
|
||||
|
||||
# Use the updated serializer
|
||||
return [serialize_history_entry(row, include_content) for row in results]
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Advanced search error: {e}", exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail={"message": "Advanced search operation failed", "error": str(e)}
|
||||
)
|
||||
@@ -1,52 +0,0 @@
|
||||
from fastapi import APIRouter, Depends, Request
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ..database import get_db, HistoryEntry, Bookmark
|
||||
from ..logging_config import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
router = APIRouter(tags=["ui"])
|
||||
templates = Jinja2Templates(directory="app/templates")
|
||||
|
||||
@router.get("/")
|
||||
async def home(request: Request, db: Session = Depends(get_db)):
|
||||
try:
|
||||
# Get recent history entries
|
||||
entries = db.query(HistoryEntry)\
|
||||
.order_by(HistoryEntry.visit_time.desc())\
|
||||
.limit(50)\
|
||||
.all()
|
||||
return templates.TemplateResponse(
|
||||
"index.html",
|
||||
{"request": request, "entries": entries}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading home page: {e}", exc_info=True)
|
||||
# Optionally return an error template
|
||||
return templates.TemplateResponse("error.html", {"request": request, "detail": "Could not load history"})
|
||||
|
||||
|
||||
@router.get("/search")
|
||||
async def search_page(request: Request):
|
||||
return templates.TemplateResponse(
|
||||
"search.html",
|
||||
{"request": request}
|
||||
)
|
||||
|
||||
|
||||
@router.get("/bookmarks")
|
||||
async def bookmarks_page(request: Request, db: Session = Depends(get_db)):
|
||||
try:
|
||||
bookmarks = db.query(Bookmark)\
|
||||
.order_by(Bookmark.added_time.desc())\
|
||||
.limit(50)\
|
||||
.all()
|
||||
return templates.TemplateResponse(
|
||||
"bookmarks.html",
|
||||
{"request": request, "bookmarks": bookmarks}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading bookmarks page: {e}", exc_info=True)
|
||||
# Optionally return an error template
|
||||
return templates.TemplateResponse("error.html", {"request": request, "detail": "Could not load bookmarks"})
|
||||
@@ -1,175 +0,0 @@
|
||||
import asyncio
|
||||
from fastapi import APIRouter, WebSocket, WebSocketDisconnect, Depends, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from urllib.parse import urlparse
|
||||
import iso8601
|
||||
|
||||
# Import necessary components from other modules
|
||||
from .. import main as app_main # To access global crawler instance
|
||||
from ..database import get_db, HistoryEntry
|
||||
from ..config import Config
|
||||
from ..logging_config import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
router = APIRouter(tags=["websocket"])
|
||||
config = Config() # Assuming config is okay as a separate instance here
|
||||
|
||||
@router.websocket("/ws")
|
||||
async def websocket_endpoint(websocket: WebSocket, db: Session = Depends(get_db)):
|
||||
# Access the global crawler instance from main.py
|
||||
crawler = app_main.crawler
|
||||
if not crawler:
|
||||
logger.error("Crawler not initialized!")
|
||||
await websocket.close(code=1011) # Internal Server Error
|
||||
return
|
||||
|
||||
logger.info("New WebSocket connection established")
|
||||
await websocket.accept()
|
||||
try:
|
||||
while True:
|
||||
data = await websocket.receive_json()
|
||||
|
||||
# Validate incoming data structure (basic check)
|
||||
if 'url' not in data or 'timestamp' not in data:
|
||||
logger.warning("Received invalid WebSocket message format.")
|
||||
await websocket.send_json({
|
||||
"status": "error",
|
||||
"message": "Invalid message format. 'url' and 'timestamp' required."
|
||||
})
|
||||
continue
|
||||
|
||||
url = data['url']
|
||||
try:
|
||||
timestamp = iso8601.parse_date(data['timestamp'])
|
||||
except iso8601.ParseError:
|
||||
logger.warning(f"Received invalid timestamp format: {data['timestamp']}")
|
||||
await websocket.send_json({
|
||||
"status": "error",
|
||||
"message": f"Invalid timestamp format: {data['timestamp']}"
|
||||
})
|
||||
continue
|
||||
|
||||
# Parse the URL and check if domain should be ignored
|
||||
try:
|
||||
domain = urlparse(url).netloc
|
||||
if not domain: # Handle invalid URLs
|
||||
raise ValueError("Could not parse domain from URL")
|
||||
except ValueError as e:
|
||||
logger.warning(f"Could not parse URL: {url}. Error: {e}")
|
||||
await websocket.send_json({"status": "error", "message": f"Invalid URL: {url}"})
|
||||
continue
|
||||
|
||||
if config.is_domain_ignored(domain):
|
||||
logger.info(f"Ignoring domain: {domain} for URL: {url}")
|
||||
await websocket.send_json({
|
||||
"status": "ignored",
|
||||
"message": f"Domain {domain} is in ignore list"
|
||||
})
|
||||
continue
|
||||
|
||||
logger.info(f"Processing page via WebSocket: {url}")
|
||||
|
||||
# Check if we already have a recent entry for this URL
|
||||
# Make timestamp timezone-aware (assuming UTC if naive)
|
||||
if timestamp.tzinfo is None:
|
||||
timestamp = timestamp.replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
timestamp = timestamp.astimezone(timezone.utc)
|
||||
|
||||
recent_threshold = timestamp - timedelta(minutes=5)
|
||||
existing_entry = db.query(HistoryEntry.id).filter(
|
||||
HistoryEntry.url == url,
|
||||
HistoryEntry.visit_time >= recent_threshold
|
||||
).first() # Only fetch ID for efficiency
|
||||
|
||||
if existing_entry:
|
||||
logger.info(f"Recent entry exists for URL: {url}")
|
||||
await websocket.send_json({
|
||||
"status": "skipped",
|
||||
"message": "Recent entry exists"
|
||||
})
|
||||
continue
|
||||
|
||||
# --- Start crawl4ai processing ---
|
||||
logger.info(f"Processing page with crawl4ai: {url}")
|
||||
markdown_content = None
|
||||
title = ''
|
||||
try:
|
||||
# Use the global crawler instance
|
||||
crawl_result = await crawler.arun(url=url)
|
||||
if crawl_result:
|
||||
markdown_content = crawl_result.markdown
|
||||
# Attempt to get title from metadata, fallback to empty string
|
||||
title = getattr(crawl_result.metadata, 'title', '') or '' # Ensure title is string
|
||||
if not title:
|
||||
logger.warning(f"Could not extract title for {url} using crawl4ai.")
|
||||
logger.info(f"crawl4ai processing complete. Markdown length: {len(markdown_content) if markdown_content else 0}, Title: '{title}'")
|
||||
else:
|
||||
logger.warning(f"crawl4ai returned None for URL: {url}")
|
||||
markdown_content = "" # Ensure it's not None
|
||||
title = ""
|
||||
|
||||
except Exception as crawl_error:
|
||||
logger.error(f"crawl4ai failed for URL {url}: {crawl_error}", exc_info=True)
|
||||
await websocket.send_json({
|
||||
"status": "error",
|
||||
"message": f"Failed to crawl page content: {str(crawl_error)}"
|
||||
})
|
||||
continue # Skip to next message
|
||||
# --- End crawl4ai processing ---
|
||||
|
||||
# Only proceed if we got some content or at least a title
|
||||
if not title and not markdown_content:
|
||||
logger.info(f"No title or content extracted by crawl4ai from: {url}")
|
||||
await websocket.send_json({
|
||||
"status": "skipped",
|
||||
"message": "No title or content extracted by crawl4ai"
|
||||
})
|
||||
continue
|
||||
|
||||
# Create history entry using data from crawl4ai
|
||||
history_entry = HistoryEntry(
|
||||
url=url,
|
||||
title=title, # Use title from crawl4ai
|
||||
visit_time=timestamp, # Use the parsed, timezone-aware timestamp
|
||||
domain=domain,
|
||||
markdown_content=markdown_content, # Use markdown from crawl4ai
|
||||
last_content_update=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
logger.debug(f"Attempting to save entry for {url} with markdown length: {len(markdown_content) if markdown_content else 0}")
|
||||
|
||||
db.add(history_entry)
|
||||
try:
|
||||
db.commit()
|
||||
logger.info(f"Successfully saved entry for: {url}")
|
||||
await websocket.send_json({
|
||||
"status": "success",
|
||||
"message": f"Processed page: {url}"
|
||||
})
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
logger.error(f"Error saving entry for {url}: {e}", exc_info=True)
|
||||
await websocket.send_json({
|
||||
"status": "error",
|
||||
"message": "Database error occurred while saving."
|
||||
})
|
||||
|
||||
except WebSocketDisconnect:
|
||||
logger.info("WebSocket client disconnected")
|
||||
except Exception as e:
|
||||
logger.error(f"Unhandled error in WebSocket handler: {e}", exc_info=True)
|
||||
# Attempt to inform client before closing (might fail if connection is already broken)
|
||||
try:
|
||||
await websocket.send_json({
|
||||
"status": "error",
|
||||
"message": "An internal server error occurred."
|
||||
})
|
||||
except Exception:
|
||||
pass # Ignore if sending fails
|
||||
# Ensure connection is closed on server error
|
||||
try:
|
||||
await websocket.close(code=1011) # Internal Server Error
|
||||
except Exception:
|
||||
pass # Ignore if closing fails
|
||||
386
app/scheduler.py
386
app/scheduler.py
@@ -1,386 +0,0 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import asyncio
|
||||
from sqlalchemy import or_, update
|
||||
from .database import HistoryEntry, Bookmark, get_last_processed_timestamp, update_last_processed_timestamp
|
||||
from .browser import BrowserHistoryCollector
|
||||
from .config import Config
|
||||
from .database import get_db
|
||||
import urllib.parse
|
||||
import logging
|
||||
from crawl4ai import AsyncWebCrawler
|
||||
from typing import Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class HistoryScheduler:
|
||||
def __init__(self, crawler: AsyncWebCrawler):
|
||||
self.browser_collector = BrowserHistoryCollector()
|
||||
self.last_history_update = None
|
||||
self.content_update_interval = timedelta(hours=24) # Update content daily
|
||||
self.config = Config()
|
||||
self.db_lock = asyncio.Lock()
|
||||
self.crawler = crawler
|
||||
|
||||
def _normalize_datetime(self, dt: datetime) -> Optional[datetime]:
|
||||
"""Convert datetime to UTC if it has timezone, or make it timezone-aware (UTC) if it doesn't"""
|
||||
if dt is None:
|
||||
return None
|
||||
|
||||
# If datetime is naive (no timezone), assume it's local and convert to UTC
|
||||
if dt.tzinfo is None or dt.tzinfo.utcoffset(dt) is None:
|
||||
# Assume local timezone if naive, then convert to UTC
|
||||
# This might need adjustment based on where the naive datetime originates
|
||||
# If browser_history always returns naive UTC, use: dt.replace(tzinfo=timezone.utc)
|
||||
# If browser_history returns naive local time:
|
||||
dt = dt.astimezone() # Make timezone-aware using system's local timezone
|
||||
return dt.astimezone(timezone.utc) # Convert to UTC
|
||||
|
||||
# If datetime already has timezone, convert to UTC
|
||||
return dt.astimezone(timezone.utc)
|
||||
|
||||
async def update_bookmarks(self):
|
||||
"""Update bookmarks from browsers"""
|
||||
try:
|
||||
# Use timezone-aware current time
|
||||
current_timestamp_dt = datetime.now(timezone.utc)
|
||||
current_timestamp = int(current_timestamp_dt.timestamp())
|
||||
source_key = "browser_bookmarks"
|
||||
# Ensure last_timestamp is 0 if None
|
||||
last_timestamp = get_last_processed_timestamp(source_key) or 0
|
||||
|
||||
logger.info(f"Fetching bookmarks. Last processed timestamp (UTC epoch): {last_timestamp}")
|
||||
bookmarks = self.browser_collector.fetch_bookmarks()
|
||||
logger.info(f"Found {len(bookmarks)} total bookmarks")
|
||||
|
||||
new_bookmarks = []
|
||||
skipped_ignored = 0
|
||||
processed_urls = set() # Avoid processing duplicate bookmark URLs within the same batch
|
||||
|
||||
for added_time, url, title, folder in bookmarks:
|
||||
if not url or url in processed_urls: # Skip empty or duplicate URLs in this batch
|
||||
continue
|
||||
|
||||
# Normalize timestamp *before* comparison
|
||||
normalized_added_time = self._normalize_datetime(added_time)
|
||||
if normalized_added_time is None:
|
||||
logger.warning(f"Skipping bookmark with invalid timestamp: {url} - {title}")
|
||||
continue
|
||||
|
||||
# Compare timestamps after normalization
|
||||
if normalized_added_time.timestamp() > last_timestamp:
|
||||
domain = urllib.parse.urlparse(url).netloc
|
||||
if self.config.is_domain_ignored(domain):
|
||||
# logger.debug(f"Skipping ignored domain for bookmark: {domain}")
|
||||
skipped_ignored += 1
|
||||
continue
|
||||
|
||||
new_bookmarks.append((normalized_added_time, url, title, folder, domain))
|
||||
processed_urls.add(url) # Mark URL as processed for this batch
|
||||
|
||||
logger.info(f"Found {len(new_bookmarks)} new bookmarks to process after filtering.")
|
||||
if skipped_ignored > 0:
|
||||
logger.info(f"Skipped {skipped_ignored} bookmarks due to ignored domains.")
|
||||
|
||||
|
||||
if new_bookmarks:
|
||||
async with self.db_lock:
|
||||
# Use context manager for session
|
||||
with next(get_db()) as db:
|
||||
added_count = 0
|
||||
try:
|
||||
for norm_added_time, url, title, folder, domain in new_bookmarks:
|
||||
# Optional: Check if bookmark already exists (by URL)
|
||||
# existing = db.query(Bookmark.id).filter(Bookmark.url == url).first()
|
||||
# if existing:
|
||||
# logger.debug(f"Bookmark already exists: {url}")
|
||||
# continue
|
||||
|
||||
bookmark = Bookmark(
|
||||
url=url,
|
||||
title=title or "", # Ensure title is not None
|
||||
added_time=norm_added_time,
|
||||
folder=folder or "", # Ensure folder is not None
|
||||
domain=domain
|
||||
)
|
||||
db.add(bookmark)
|
||||
added_count += 1
|
||||
|
||||
if added_count > 0:
|
||||
db.commit()
|
||||
logger.info(f"Successfully committed {added_count} new bookmarks.")
|
||||
# Update timestamp only if new bookmarks were added
|
||||
update_last_processed_timestamp(source_key, current_timestamp)
|
||||
logger.info(f"Updated last processed bookmark timestamp for '{source_key}' to {current_timestamp}")
|
||||
else:
|
||||
logger.info("No new unique bookmarks to add in this batch.")
|
||||
# Optionally update timestamp even if no *new* bookmarks were added,
|
||||
# to signify the check was performed up to 'current_timestamp'.
|
||||
# update_last_processed_timestamp(source_key, current_timestamp)
|
||||
# logger.info(f"Updated last processed bookmark timestamp check for '{source_key}' to {current_timestamp}")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error committing bookmarks: {str(e)}", exc_info=True)
|
||||
db.rollback()
|
||||
else:
|
||||
logger.info("No new bookmarks found since last check.")
|
||||
# Update timestamp to indicate the check was performed
|
||||
update_last_processed_timestamp(source_key, current_timestamp)
|
||||
logger.info(f"Updated last processed bookmark timestamp check for '{source_key}' to {current_timestamp}")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating bookmarks: {str(e)}", exc_info=True)
|
||||
|
||||
|
||||
async def update_history(self):
|
||||
"""Background task to update history periodically"""
|
||||
# Initial sleep to allow startup tasks (like initial sync) to potentially finish first
|
||||
await asyncio.sleep(10)
|
||||
while True:
|
||||
try:
|
||||
# Use timezone-aware current time
|
||||
current_timestamp_dt = datetime.now(timezone.utc)
|
||||
current_timestamp = int(current_timestamp_dt.timestamp())
|
||||
source_key = "browser_history_scheduler" # Use a different key than initial sync
|
||||
# Ensure last_timestamp is 0 if None
|
||||
last_timestamp = get_last_processed_timestamp(source_key) or 0
|
||||
|
||||
logger.info(f"Scheduler: Fetching history. Last processed timestamp (UTC epoch): {last_timestamp}")
|
||||
history_entries = self.browser_collector.fetch_history()
|
||||
logger.info(f"Scheduler: Found {len(history_entries)} total history entries from browser.")
|
||||
|
||||
new_entries = []
|
||||
skipped_ignored = 0
|
||||
processed_urls_times = set() # Avoid duplicates within the batch (url, timestamp)
|
||||
|
||||
for visit_time, url, title in history_entries:
|
||||
# Basic validation
|
||||
if not url or not visit_time:
|
||||
logger.warning(f"Scheduler: Skipping entry with missing URL or timestamp: {title}")
|
||||
continue
|
||||
|
||||
# Normalize timestamp *before* comparison
|
||||
normalized_visit_time = self._normalize_datetime(visit_time)
|
||||
if normalized_visit_time is None:
|
||||
logger.warning(f"Scheduler: Skipping history with invalid timestamp: {url} - {title}")
|
||||
continue
|
||||
|
||||
# Compare timestamps after normalization
|
||||
if normalized_visit_time.timestamp() > last_timestamp:
|
||||
entry_key = (url, normalized_visit_time.timestamp())
|
||||
if entry_key in processed_urls_times:
|
||||
continue # Skip duplicate within this batch
|
||||
|
||||
domain = urllib.parse.urlparse(url).netloc
|
||||
if self.config.is_domain_ignored(domain):
|
||||
# logger.debug(f"Scheduler: Skipping ignored domain: {domain}")
|
||||
skipped_ignored += 1
|
||||
continue
|
||||
|
||||
new_entries.append((normalized_visit_time, url, title, domain))
|
||||
processed_urls_times.add(entry_key)
|
||||
|
||||
logger.info(f"Scheduler: Found {len(new_entries)} new history entries to process after filtering.")
|
||||
if skipped_ignored > 0:
|
||||
logger.info(f"Scheduler: Skipped {skipped_ignored} history entries due to ignored domains.")
|
||||
|
||||
if new_entries:
|
||||
async with self.db_lock:
|
||||
# Use context manager for session
|
||||
with next(get_db()) as db:
|
||||
added_count = 0
|
||||
try:
|
||||
for norm_visit_time, url, title, domain in new_entries:
|
||||
# Optional: More robust check if entry already exists
|
||||
# existing = db.query(HistoryEntry.id).filter(
|
||||
# HistoryEntry.url == url,
|
||||
# HistoryEntry.visit_time == norm_visit_time
|
||||
# ).first()
|
||||
# if existing:
|
||||
# logger.debug(f"Scheduler: History entry already exists: {url} at {norm_visit_time}")
|
||||
# continue
|
||||
|
||||
history_entry = HistoryEntry(
|
||||
url=url,
|
||||
title=title or "", # Ensure title is not None
|
||||
visit_time=norm_visit_time,
|
||||
domain=domain
|
||||
# markdown_content is initially NULL
|
||||
)
|
||||
db.add(history_entry)
|
||||
added_count += 1
|
||||
|
||||
if added_count > 0:
|
||||
db.commit()
|
||||
logger.info(f"Scheduler: Successfully committed {added_count} new history entries.")
|
||||
# Update timestamp only if new entries were added
|
||||
update_last_processed_timestamp(source_key, current_timestamp)
|
||||
logger.info(f"Scheduler: Updated last processed history timestamp for '{source_key}' to {current_timestamp}")
|
||||
else:
|
||||
logger.info("Scheduler: No new unique history entries to add in this batch.")
|
||||
# Optionally update timestamp even if no *new* entries were added
|
||||
# update_last_processed_timestamp(source_key, current_timestamp)
|
||||
# logger.info(f"Scheduler: Updated last processed history timestamp check for '{source_key}' to {current_timestamp}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Scheduler: Error committing history: {str(e)}", exc_info=True)
|
||||
db.rollback()
|
||||
else:
|
||||
logger.info("Scheduler: No new history entries found since last check.")
|
||||
# Update timestamp to indicate the check was performed
|
||||
update_last_processed_timestamp(source_key, current_timestamp)
|
||||
logger.info(f"Scheduler: Updated last processed history timestamp check for '{source_key}' to {current_timestamp}")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Scheduler: Error in update_history loop: {str(e)}", exc_info=True)
|
||||
|
||||
# --- Access config value using property ---
|
||||
try:
|
||||
# Use direct attribute access via the @property
|
||||
wait_time = self.config.history_update_interval_seconds
|
||||
except Exception as config_err:
|
||||
logger.error(f"Scheduler (History): Error accessing config for wait time, using default 300s. Error: {config_err}")
|
||||
wait_time = 300
|
||||
# --- End Access ---
|
||||
|
||||
logger.debug(f"Scheduler (History): Sleeping for {wait_time} seconds.")
|
||||
await asyncio.sleep(wait_time) # Use the obtained wait_time
|
||||
|
||||
async def _process_markdown_batch(self):
|
||||
"""Fetches and processes one batch (up to 10) of history entries needing markdown."""
|
||||
entries_to_process = []
|
||||
try:
|
||||
# --- Query for entries (inside DB lock/session) ---
|
||||
async with self.db_lock:
|
||||
with next(get_db()) as db:
|
||||
# Find up to 10 entries where markdown_content is NULL or empty string
|
||||
entries_to_process = db.query(HistoryEntry).filter(
|
||||
or_(HistoryEntry.markdown_content == None, HistoryEntry.markdown_content == '')
|
||||
).order_by(HistoryEntry.visit_time.asc()).limit(10).all()
|
||||
|
||||
if entries_to_process:
|
||||
logger.info(f"Markdown Processor: Found {len(entries_to_process)} entries to process in this batch.")
|
||||
for entry in entries_to_process:
|
||||
db.expunge(entry) # Detach before async operations
|
||||
else:
|
||||
logger.info("Markdown Processor: No history entries found needing markdown update in this batch.")
|
||||
return # Nothing to do in this batch
|
||||
|
||||
|
||||
# --- Crawling and Updating (outside the DB lock/session) ---
|
||||
processed_count = 0
|
||||
skipped_ignored = 0
|
||||
for entry in entries_to_process:
|
||||
markdown_content = None
|
||||
crawl_success = False
|
||||
should_update_db = False
|
||||
|
||||
# --- ADD DOMAIN CHECK ---
|
||||
try:
|
||||
# +++ Add Debugging Lines +++
|
||||
logger.debug(f"Debugging urllib.parse type: {type(urllib.parse)}")
|
||||
logger.debug(f"Is 'urlparse' in urllib.parse? {'urlparse' in dir(urllib.parse)}")
|
||||
# +++ End Debugging Lines +++
|
||||
|
||||
domain = urllib.parse.urlparse(entry.url).netloc
|
||||
if self.config.is_domain_ignored(domain):
|
||||
logger.debug(f"Markdown Processor: Skipping ignored domain: {domain} for URL: {entry.url} (ID={entry.id})")
|
||||
skipped_ignored += 1
|
||||
continue
|
||||
except Exception as parse_err:
|
||||
logger.warning(f"Markdown Processor: Error parsing URL to get domain: {entry.url} (ID={entry.id}). Type={type(parse_err).__name__} Error: {parse_err}. Skipping entry.")
|
||||
continue
|
||||
# --- END DOMAIN CHECK ---
|
||||
|
||||
|
||||
try:
|
||||
logger.info(f"Markdown Processor: Crawling URL: {entry.url} (ID={entry.id})")
|
||||
if not self.crawler:
|
||||
logger.error("Markdown Processor: Crawler not initialized!")
|
||||
break # Stop processing this batch if crawler is missing
|
||||
|
||||
result = await self.crawler.arun(url=entry.url)
|
||||
|
||||
if result and result.markdown:
|
||||
markdown_content = result.markdown
|
||||
crawl_success = True
|
||||
logger.info(f"Markdown Processor: Successfully crawled and got markdown for ID={entry.id}.")
|
||||
else:
|
||||
logger.warning(f"Markdown Processor: Crawling completed but no markdown content found for ID={entry.id}, URL={entry.url}")
|
||||
markdown_content = "" # Mark as processed without content
|
||||
crawl_success = True
|
||||
|
||||
should_update_db = True
|
||||
|
||||
except Exception as crawl_error:
|
||||
logger.error(f"Markdown Processor: Error crawling URL {entry.url} (ID={entry.id}) Type={type(crawl_error).__name__}: {crawl_error}", exc_info=False)
|
||||
should_update_db = False # Don't update DB on crawl error
|
||||
|
||||
# --- Update DB for this specific entry ---
|
||||
if should_update_db:
|
||||
try:
|
||||
async with self.db_lock:
|
||||
with next(get_db()) as db_update:
|
||||
stmt = (
|
||||
update(HistoryEntry)
|
||||
.where(HistoryEntry.id == entry.id)
|
||||
.values(markdown_content=markdown_content)
|
||||
)
|
||||
result_proxy = db_update.execute(stmt)
|
||||
if result_proxy.rowcount > 0:
|
||||
db_update.commit()
|
||||
# Adjust log message based on whether it was skipped or processed
|
||||
if markdown_content == "" and crawl_success and not result.markdown: # Check if marked empty due to no content
|
||||
logger.info(f"Markdown Processor: Marked entry as processed (no content found) for ID={entry.id}.")
|
||||
elif crawl_success:
|
||||
logger.info(f"Markdown Processor: Successfully updated markdown status for ID={entry.id}.")
|
||||
|
||||
# Only increment processed_count if actual content was added or marked empty after crawl
|
||||
if markdown_content is not None: # Includes actual markdown or empty string marker
|
||||
processed_count += 1
|
||||
else:
|
||||
logger.warning(f"Markdown Processor: Could not find entry ID={entry.id} to update markdown status (rowcount 0).")
|
||||
db_update.rollback()
|
||||
except Exception as db_update_error:
|
||||
logger.error(f"Markdown Processor: Error updating database for ID={entry.id}: {db_update_error}", exc_info=True)
|
||||
|
||||
log_suffix = f"Updated {processed_count}"
|
||||
if skipped_ignored > 0:
|
||||
log_suffix += f", Skipped {skipped_ignored} (ignored domain)"
|
||||
log_suffix += f" out of {len(entries_to_process)} entries in this batch."
|
||||
logger.info(f"Markdown Processor: Finished processing batch. {log_suffix}")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Markdown Processor: Error processing markdown batch: {str(e)}", exc_info=True)
|
||||
|
||||
|
||||
async def update_missing_markdown_periodically(self):
|
||||
"""Periodically triggers the processing of batches of history entries needing markdown."""
|
||||
# Initial slight delay to ensure startup tasks settle
|
||||
await asyncio.sleep(15)
|
||||
logger.info("Starting periodic markdown update task...")
|
||||
while True:
|
||||
await self._process_markdown_batch() # Process one batch
|
||||
|
||||
# Wait before checking for the next batch
|
||||
# --- Access config value using property ---
|
||||
try:
|
||||
# Use direct attribute access via the @property
|
||||
wait_time = self.config.markdown_update_interval_seconds
|
||||
except Exception as config_err:
|
||||
logger.error(f"Periodic Markdown Updater: Error accessing config for wait time, using default 300s. Error: {config_err}")
|
||||
wait_time = 300
|
||||
# --- End Access ---
|
||||
|
||||
logger.debug(f"Periodic Markdown Updater: Sleeping for {wait_time} seconds before next batch.")
|
||||
await asyncio.sleep(wait_time)
|
||||
|
||||
async def close(self):
|
||||
"""Cleanup resources"""
|
||||
logger.info("Closing scheduler resources...")
|
||||
# Add any specific cleanup needed for BrowserHistoryCollector if necessary
|
||||
# The crawler is managed and closed (if needed) in main.py's shutdown
|
||||
pass
|
||||
@@ -1,209 +0,0 @@
|
||||
/* Custom styles can be added here */
|
||||
.active-nav-link {
|
||||
border-color: #60a5fa;
|
||||
color: #60a5fa;
|
||||
}
|
||||
|
||||
/* Add smooth transitions for hover effects */
|
||||
.hover\:border-primary {
|
||||
transition: border-color 0.2s ease-in-out;
|
||||
}
|
||||
|
||||
/* Custom scrollbar styles */
|
||||
::-webkit-scrollbar {
|
||||
width: 8px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-track {
|
||||
background: #1f2937;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-thumb {
|
||||
background: #60a5fa;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-thumb:hover {
|
||||
background: #3b82f6;
|
||||
}
|
||||
|
||||
/* Dark mode input styles */
|
||||
input[type="date"]::-webkit-calendar-picker-indicator {
|
||||
filter: invert(1);
|
||||
opacity: 0.5;
|
||||
}
|
||||
|
||||
/* Search result styles */
|
||||
.prose {
|
||||
font-size: 0.875rem;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
.prose p {
|
||||
margin-bottom: 0.75rem;
|
||||
}
|
||||
|
||||
.prose h1,
|
||||
.prose h2,
|
||||
.prose h3,
|
||||
.prose h4 {
|
||||
color: #f3f4f6;
|
||||
font-weight: 600;
|
||||
margin: 1.5rem 0 0.75rem 0;
|
||||
}
|
||||
|
||||
.prose a {
|
||||
color: #60a5fa;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.prose code {
|
||||
background: #374151;
|
||||
padding: 0.2em 0.4em;
|
||||
border-radius: 0.25rem;
|
||||
font-size: 0.875em;
|
||||
}
|
||||
|
||||
.prose pre {
|
||||
background: #374151;
|
||||
padding: 1rem;
|
||||
border-radius: 0.375rem;
|
||||
overflow-x: auto;
|
||||
margin: 1rem 0;
|
||||
}
|
||||
|
||||
.prose pre code {
|
||||
background: transparent;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.prose ul,
|
||||
.prose ol {
|
||||
padding-left: 1.5rem;
|
||||
margin: 0.75rem 0;
|
||||
}
|
||||
|
||||
.prose li {
|
||||
margin: 0.25rem 0;
|
||||
}
|
||||
|
||||
.prose blockquote {
|
||||
border-left: 4px solid #4b5563;
|
||||
padding-left: 1rem;
|
||||
margin: 1rem 0;
|
||||
color: #9ca3af;
|
||||
}
|
||||
|
||||
/* Search highlight styles */
|
||||
.highlight-search {
|
||||
background-color: #60a5fa;
|
||||
color: #111827;
|
||||
padding: 0.1em 0.2em;
|
||||
border-radius: 0.2em;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
/* Preview text styles */
|
||||
.preview-text {
|
||||
color: #9ca3af;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.preview-text::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
height: 2rem;
|
||||
background: linear-gradient(transparent, #1f2937);
|
||||
pointer-events: none;
|
||||
opacity: 0;
|
||||
transition: opacity 0.2s;
|
||||
}
|
||||
|
||||
.preview-text.collapsed::after {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.preview-text.collapsed {
|
||||
max-height: 8rem;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
/* Expand button styles */
|
||||
.expand-button {
|
||||
color: #60a5fa;
|
||||
font-size: 0.875rem;
|
||||
padding: 0.25rem 0.5rem;
|
||||
border-radius: 0.25rem;
|
||||
cursor: pointer;
|
||||
transition: all 0.2s;
|
||||
border: 1px solid #374151;
|
||||
}
|
||||
|
||||
.expand-button:hover {
|
||||
background-color: #374151;
|
||||
}
|
||||
|
||||
/* Form input styles */
|
||||
.form-input {
|
||||
background-color: #1f2937;
|
||||
border-color: #374151;
|
||||
color: #f3f4f6;
|
||||
transition: all 0.2s;
|
||||
}
|
||||
|
||||
.form-input:hover {
|
||||
border-color: #4b5563;
|
||||
}
|
||||
|
||||
.form-input:focus {
|
||||
border-color: #60a5fa;
|
||||
box-shadow: 0 0 0 2px rgba(96, 165, 250, 0.2);
|
||||
outline: none;
|
||||
}
|
||||
|
||||
/* Form label styles */
|
||||
.form-label {
|
||||
color: #9ca3af;
|
||||
font-weight: 500;
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
/* Search button styles */
|
||||
.search-button {
|
||||
background-color: #2563eb;
|
||||
color: white;
|
||||
font-weight: 500;
|
||||
padding: 0.5rem 1.5rem;
|
||||
border-radius: 0.375rem;
|
||||
transition: all 0.2s;
|
||||
border: 1px solid transparent;
|
||||
}
|
||||
|
||||
.search-button:hover {
|
||||
background-color: #1d4ed8;
|
||||
}
|
||||
|
||||
.search-button:focus {
|
||||
outline: none;
|
||||
box-shadow: 0 0 0 2px rgba(96, 165, 250, 0.5);
|
||||
}
|
||||
|
||||
.search-button:active {
|
||||
background-color: #1e40af;
|
||||
}
|
||||
|
||||
/* Form grid layout */
|
||||
.form-grid {
|
||||
display: grid;
|
||||
gap: 1.5rem;
|
||||
margin-bottom: 1.5rem;
|
||||
}
|
||||
|
||||
@media (min-width: 640px) {
|
||||
.form-grid {
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
}
|
||||
}
|
||||
6
app/static/js/marked.min.js
vendored
6
app/static/js/marked.min.js
vendored
File diff suppressed because one or more lines are too long
@@ -1,64 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>{% block title %}Browser History{% endblock %}</title>
|
||||
<script src="https://cdn.tailwindcss.com"></script>
|
||||
<link rel="stylesheet" href="{{ url_for('static', path='/css/main.css') }}">
|
||||
<script>
|
||||
tailwind.config = {
|
||||
theme: {
|
||||
extend: {
|
||||
colors: {
|
||||
primary: '#60a5fa',
|
||||
dark: {
|
||||
50: '#f9fafb',
|
||||
100: '#f3f4f6',
|
||||
200: '#e5e7eb',
|
||||
300: '#d1d5db',
|
||||
400: '#9ca3af',
|
||||
500: '#6b7280',
|
||||
600: '#4b5563',
|
||||
700: '#374151',
|
||||
800: '#1f2937',
|
||||
900: '#111827',
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
|
||||
<body class="bg-dark-900 text-gray-100">
|
||||
<nav class="bg-dark-800 shadow-lg border-b border-dark-700">
|
||||
<div class="max-w-7xl mx-auto px-4">
|
||||
<div class="flex justify-between h-16">
|
||||
<div class="flex">
|
||||
<div class="flex-shrink-0 flex items-center">
|
||||
<a href="/" class="text-xl font-bold text-primary">Browser History</a>
|
||||
</div>
|
||||
<div class="hidden sm:ml-6 sm:flex sm:space-x-8">
|
||||
<a href="/"
|
||||
class="text-gray-300 inline-flex items-center px-1 pt-1 border-b-2 border-transparent hover:border-primary hover:text-primary">Home</a>
|
||||
<a href="/search"
|
||||
class="text-gray-300 inline-flex items-center px-1 pt-1 border-b-2 border-transparent hover:border-primary hover:text-primary">Search</a>
|
||||
<a href="/bookmarks"
|
||||
class="text-gray-300 inline-flex items-center px-1 pt-1 border-b-2 border-transparent hover:border-primary hover:text-primary">Bookmarks</a>
|
||||
<a href="/docs"
|
||||
class="text-gray-300 inline-flex items-center px-1 pt-1 border-b-2 border-transparent hover:border-primary hover:text-primary">API
|
||||
Docs</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<main class="max-w-7xl mx-auto py-6 sm:px-6 lg:px-8">
|
||||
{% block content %}{% endblock %}
|
||||
</main>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
@@ -1,32 +0,0 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Browser History - Bookmarks{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="bg-dark-800 shadow overflow-hidden sm:rounded-lg border border-dark-700">
|
||||
<div class="px-4 py-5 sm:px-6">
|
||||
<h3 class="text-lg leading-6 font-medium text-gray-100">Bookmarks</h3>
|
||||
</div>
|
||||
<div class="border-t border-dark-700">
|
||||
<ul class="divide-y divide-gray-200" id="bookmarks-list">
|
||||
{% for bookmark in bookmarks %}
|
||||
<li class="px-4 py-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex-1 min-w-0">
|
||||
<p class="text-sm font-medium text-primary truncate">
|
||||
<a href="{{ bookmark.url }}" target="_blank">{{ bookmark.title }}</a>
|
||||
</p>
|
||||
<p class="text-sm text-gray-400">
|
||||
{{ bookmark.domain }} • {{ bookmark.added_time }}
|
||||
{% if bookmark.folder %}
|
||||
<span class="ml-2 px-2 py-1 text-xs rounded-full bg-dark-700 text-gray-300">{{ bookmark.folder }}</span>
|
||||
{% endif %}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -1,29 +0,0 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Browser History - Home{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="bg-dark-800 shadow overflow-hidden sm:rounded-lg border border-dark-700">
|
||||
<div class="px-4 py-5 sm:px-6">
|
||||
<h3 class="text-lg leading-6 font-medium text-gray-100">Recent History</h3>
|
||||
</div>
|
||||
<div class="border-t border-dark-700">
|
||||
<ul class="divide-y divide-gray-200" id="history-list">
|
||||
{% for entry in entries %}
|
||||
<li class="px-4 py-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex-1 min-w-0">
|
||||
<p class="text-sm font-medium text-primary truncate">
|
||||
<a href="{{ entry.url }}" target="_blank">{{ entry.title }}</a>
|
||||
</p>
|
||||
<p class="text-sm text-gray-400">
|
||||
{{ entry.domain }} • {{ entry.visit_time }}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
@@ -1,154 +0,0 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Browser History - Search{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="bg-dark-800 shadow sm:rounded-lg p-6 border border-dark-700">
|
||||
<form id="search-form">
|
||||
<div class="form-grid">
|
||||
<div>
|
||||
<label for="search-term" class="form-label">Search Term</label>
|
||||
<input type="text" name="search-term" id="search-term" placeholder="Enter search terms..."
|
||||
class="form-input w-full rounded-md">
|
||||
</div>
|
||||
<div>
|
||||
<label for="domain" class="form-label">Domain</label>
|
||||
<input type="text" name="domain" id="domain" placeholder="example.com" class="form-input w-full rounded-md">
|
||||
</div>
|
||||
<div>
|
||||
<label for="start-date" class="form-label">Start Date</label>
|
||||
<input type="date" name="start-date" id="start-date" class="form-input w-full rounded-md">
|
||||
</div>
|
||||
<div>
|
||||
<label for="end-date" class="form-label">End Date</label>
|
||||
<input type="date" name="end-date" id="end-date" class="form-input w-full rounded-md">
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex justify-end">
|
||||
<button type="submit" class="search-button">
|
||||
<span class="flex items-center">
|
||||
<svg class="w-4 h-4 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
|
||||
d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z" />
|
||||
</svg>
|
||||
Search
|
||||
</span>
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
<div id="results" class="mt-8">
|
||||
<div class="border-t border-dark-700 mt-4">
|
||||
<ul class="divide-y divide-gray-200" id="search-results">
|
||||
<!-- Results will be populated here -->
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Add Marked.js -->
|
||||
<script src="{{ url_for('static', path='/js/marked.min.js') }}"></script>
|
||||
|
||||
<script>
|
||||
// Configure marked for security
|
||||
marked.setOptions({
|
||||
headerIds: false,
|
||||
mangle: false
|
||||
});
|
||||
|
||||
function highlightSearchTerm(text, searchTerm) {
|
||||
if (!searchTerm || !text) return text;
|
||||
const regex = new RegExp(`(${searchTerm})`, 'gi');
|
||||
return text.replace(regex, '<mark class="highlight-search">$1</mark>');
|
||||
}
|
||||
|
||||
function getPreviewAroundMatch(text, searchTerm) {
|
||||
if (!text || !searchTerm) return '';
|
||||
|
||||
const regex = new RegExp(searchTerm, 'i');
|
||||
const match = text.match(regex);
|
||||
if (!match) return text.slice(0, 200) + '...';
|
||||
|
||||
const matchIndex = match.index;
|
||||
const previewLength = 150;
|
||||
const start = Math.max(0, matchIndex - previewLength);
|
||||
const end = Math.min(text.length, matchIndex + match[0].length + previewLength);
|
||||
|
||||
let preview = text.slice(start, end);
|
||||
if (start > 0) preview = '...' + preview;
|
||||
if (end < text.length) preview = preview + '...';
|
||||
|
||||
return preview;
|
||||
}
|
||||
|
||||
function toggleContent(button, contentId) {
|
||||
const content = document.getElementById(contentId);
|
||||
const isCollapsed = content.classList.contains('collapsed');
|
||||
|
||||
content.classList.toggle('collapsed');
|
||||
button.textContent = isCollapsed ? 'Show Less' : 'Show More';
|
||||
}
|
||||
|
||||
document.getElementById('search-form').addEventListener('submit', async (e) => {
|
||||
e.preventDefault();
|
||||
|
||||
const searchTerm = document.getElementById('search-term').value;
|
||||
const domain = document.getElementById('domain').value;
|
||||
const startDate = document.getElementById('start-date').value;
|
||||
const endDate = document.getElementById('end-date').value;
|
||||
|
||||
const params = new URLSearchParams();
|
||||
if (searchTerm) params.append('search_term', searchTerm);
|
||||
if (domain) params.append('domain', domain);
|
||||
if (startDate) params.append('start_date', startDate);
|
||||
if (endDate) params.append('end_date', endDate);
|
||||
params.append('include_content', 'true');
|
||||
|
||||
const response = await fetch(`/history/search?${params.toString()}`);
|
||||
const results = await response.json();
|
||||
|
||||
const resultsContainer = document.getElementById('search-results');
|
||||
resultsContainer.innerHTML = results.map((entry, index) => {
|
||||
let contentHtml = '';
|
||||
|
||||
if (entry.markdown_content) {
|
||||
const preview = getPreviewAroundMatch(entry.markdown_content, searchTerm);
|
||||
const fullContent = marked.parse(highlightSearchTerm(entry.markdown_content, searchTerm));
|
||||
const previewHtml = marked.parse(highlightSearchTerm(preview, searchTerm));
|
||||
|
||||
contentHtml = `
|
||||
<div class="mt-2 text-sm text-gray-300">
|
||||
<div id="content-${index}" class="preview-text prose prose-invert max-w-none collapsed">
|
||||
${fullContent}
|
||||
</div>
|
||||
<div class="mt-2">
|
||||
<button
|
||||
onclick="toggleContent(this, 'content-${index}')"
|
||||
class="expand-button"
|
||||
>
|
||||
Show More
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
|
||||
return `
|
||||
<li class="px-4 py-4">
|
||||
<div class="flex-1 min-w-0">
|
||||
<div class="flex items-center justify-between mb-2">
|
||||
<p class="text-sm font-medium text-primary truncate">
|
||||
<a href="${entry.url}" target="_blank">${highlightSearchTerm(entry.title, searchTerm)}</a>
|
||||
</p>
|
||||
<p class="text-sm text-gray-400 ml-4">
|
||||
${entry.domain} • ${new Date(entry.visit_time).toLocaleString()}
|
||||
</p>
|
||||
</div>
|
||||
${contentHtml}
|
||||
</div>
|
||||
</li>
|
||||
`;
|
||||
}).join('');
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
45
app/utils.py
45
app/utils.py
@@ -1,45 +0,0 @@
|
||||
from datetime import datetime
|
||||
from .database import HistoryEntry, Bookmark
|
||||
|
||||
def serialize_history_entry(entry, include_content: bool = False):
|
||||
"""Serialize a HistoryEntry object or raw SQL result to a dictionary"""
|
||||
# Handle both ORM objects and raw SQL results
|
||||
if hasattr(entry, '_mapping'): # Raw SQL result (from execute)
|
||||
result = {
|
||||
"id": entry.id,
|
||||
"url": entry.url,
|
||||
"title": entry.title,
|
||||
"visit_time": entry.visit_time.isoformat() if isinstance(entry.visit_time, datetime) else entry.visit_time,
|
||||
"domain": entry.domain,
|
||||
# Add potential highlight fields if they exist
|
||||
"title_highlight": getattr(entry, 'title_highlight', None),
|
||||
"content_highlight": getattr(entry, 'content_highlight', None),
|
||||
"rank": getattr(entry, 'rank', None)
|
||||
}
|
||||
if include_content:
|
||||
# Ensure markdown_content exists before accessing
|
||||
result["markdown_content"] = getattr(entry, 'markdown_content', None)
|
||||
|
||||
else: # ORM object (from query)
|
||||
result = {
|
||||
"id": entry.id,
|
||||
"url": entry.url,
|
||||
"title": entry.title,
|
||||
"visit_time": entry.visit_time.isoformat() if entry.visit_time else None,
|
||||
"domain": entry.domain,
|
||||
}
|
||||
if include_content:
|
||||
result["markdown_content"] = entry.markdown_content
|
||||
|
||||
return result
|
||||
|
||||
def serialize_bookmark(bookmark):
|
||||
"""Serialize a Bookmark object to a dictionary"""
|
||||
return {
|
||||
"id": bookmark.id,
|
||||
"url": bookmark.url,
|
||||
"title": bookmark.title,
|
||||
"added_time": bookmark.added_time.isoformat() if bookmark.added_time else None,
|
||||
"folder": bookmark.folder,
|
||||
"domain": bookmark.domain,
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
import asyncio
|
||||
import websockets
|
||||
import json
|
||||
from page_info import PageInfo
|
||||
from datetime import datetime
|
||||
|
||||
async def handle_websocket(websocket, path):
|
||||
try:
|
||||
async for message in websocket:
|
||||
data = json.loads(message)
|
||||
page_info = PageInfo(
|
||||
url=data['url'],
|
||||
html=data['html'],
|
||||
timestamp=datetime.fromisoformat(data['timestamp'])
|
||||
)
|
||||
print(f"Received page content from: {page_info.url}")
|
||||
# Here you can process the page_info object as needed
|
||||
|
||||
except websockets.exceptions.ConnectionClosed:
|
||||
print("Client disconnected")
|
||||
except Exception as e:
|
||||
print(f"Error handling message: {e}")
|
||||
|
||||
async def start_server():
|
||||
server = await websockets.serve(handle_websocket, "localhost", 8765)
|
||||
print("WebSocket server started on ws://localhost:8765")
|
||||
await server.wait_closed()
|
||||
|
||||
def run_server():
|
||||
asyncio.run(start_server())
|
||||
|
||||
if __name__ == "__main__":
|
||||
run_server()
|
||||
@@ -1,14 +0,0 @@
|
||||
[Unit]
|
||||
Description=Browser Recall Service
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=zetaphor
|
||||
WorkingDirectory=/home/zetaphor/browser-recall
|
||||
ExecStart=/usr/bin/python3 /home/zetaphor/Code/browser-recall/main.py
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
110
database.py
Normal file
110
database.py
Normal file
@@ -0,0 +1,110 @@
|
||||
import sqlite3
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Dict
|
||||
import threading
|
||||
|
||||
class Database:
|
||||
_instance = None
|
||||
_lock = threading.Lock()
|
||||
|
||||
def __new__(cls):
|
||||
with cls._lock:
|
||||
if cls._instance is None:
|
||||
cls._instance = super(Database, cls).__new__(cls)
|
||||
cls._instance._initialize_db()
|
||||
return cls._instance
|
||||
|
||||
def _initialize_db(self):
|
||||
"""Initialize the database connection and create tables if they don't exist."""
|
||||
self.conn = sqlite3.connect('history.db', check_same_thread=False)
|
||||
self.conn.row_factory = sqlite3.Row
|
||||
|
||||
try:
|
||||
# Set WAL mode first, before any other operations
|
||||
self.conn.execute('PRAGMA journal_mode=WAL')
|
||||
|
||||
# Other performance and reliability optimizations
|
||||
self.conn.execute('PRAGMA synchronous=NORMAL') # Balance between safety and speed
|
||||
self.conn.execute('PRAGMA temp_store=MEMORY') # Store temp tables and indices in memory
|
||||
self.conn.execute('PRAGMA cache_size=-64000') # Use 64MB of memory for page cache
|
||||
self.conn.execute('PRAGMA foreign_keys=ON') # Enable foreign key constraints
|
||||
except Exception as e:
|
||||
print(f"Error setting database PRAGMA options: {e}")
|
||||
# Optionally re-raise the exception if you want to halt execution
|
||||
raise
|
||||
|
||||
self.cursor = self.conn.cursor()
|
||||
|
||||
# Create history table
|
||||
self.cursor.execute('''
|
||||
CREATE TABLE IF NOT EXISTS history (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
url TEXT NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
created TIMESTAMP NOT NULL,
|
||||
updated TIMESTAMP NOT NULL
|
||||
)
|
||||
''')
|
||||
self.conn.commit()
|
||||
|
||||
def add_history(self, url: str, title: str, content: str) -> int:
|
||||
"""Add a new history entry."""
|
||||
now = datetime.utcnow()
|
||||
with self._lock:
|
||||
self.cursor.execute('''
|
||||
INSERT INTO history (url, title, content, created, updated)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
''', (url, title, content, now, now))
|
||||
self.conn.commit()
|
||||
return self.cursor.lastrowid
|
||||
|
||||
def get_history(self, limit: int = 100) -> List[Dict]:
|
||||
"""Get history entries, ordered by most recent first."""
|
||||
self.cursor.execute('''
|
||||
SELECT * FROM history
|
||||
ORDER BY created DESC
|
||||
LIMIT ?
|
||||
''', (limit,))
|
||||
return [dict(row) for row in self.cursor.fetchall()]
|
||||
|
||||
def update_history(self, id: int, title: Optional[str] = None,
|
||||
content: Optional[str] = None) -> bool:
|
||||
"""Update an existing history entry."""
|
||||
update_fields = []
|
||||
values = []
|
||||
|
||||
if title is not None:
|
||||
update_fields.append("title = ?")
|
||||
values.append(title)
|
||||
if content is not None:
|
||||
update_fields.append("content = ?")
|
||||
values.append(content)
|
||||
|
||||
if not update_fields:
|
||||
return False
|
||||
|
||||
update_fields.append("updated = ?")
|
||||
values.append(datetime.utcnow())
|
||||
values.append(id)
|
||||
|
||||
with self._lock:
|
||||
self.cursor.execute(f'''
|
||||
UPDATE history
|
||||
SET {", ".join(update_fields)}
|
||||
WHERE id = ?
|
||||
''', values)
|
||||
self.conn.commit()
|
||||
return self.cursor.rowcount > 0
|
||||
|
||||
def delete_history(self, id: int) -> bool:
|
||||
"""Delete a history entry."""
|
||||
with self._lock:
|
||||
self.cursor.execute('DELETE FROM history WHERE id = ?', (id,))
|
||||
self.conn.commit()
|
||||
return self.cursor.rowcount > 0
|
||||
|
||||
def __del__(self):
|
||||
"""Cleanup database connection."""
|
||||
if hasattr(self, 'conn'):
|
||||
self.conn.close()
|
||||
@@ -47,13 +47,8 @@ class WebSocketClient {
|
||||
}
|
||||
|
||||
tryReconnect() {
|
||||
if (this.reconnectAttempts < this.maxReconnectAttempts) {
|
||||
this.reconnectAttempts++;
|
||||
console.log(`Attempting to reconnect (${this.reconnectAttempts}/${this.maxReconnectAttempts})...`);
|
||||
setTimeout(() => this.connect(), 2000 * this.reconnectAttempts);
|
||||
} else {
|
||||
console.log('Max reconnection attempts reached');
|
||||
}
|
||||
console.log(`Attempting to reconnect (${this.reconnectAttempts}/${this.maxReconnectAttempts})...`);
|
||||
setTimeout(() => this.connect(), 2000 * this.reconnectAttempts);
|
||||
}
|
||||
|
||||
sendMessage(data) {
|
||||
@@ -77,56 +72,10 @@ class WebSocketClient {
|
||||
|
||||
const wsClient = new WebSocketClient();
|
||||
|
||||
async function isContentScriptReady(tabId) {
|
||||
try {
|
||||
await browser.tabs.sendMessage(tabId, { type: "PING" });
|
||||
return true;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function waitForContentScript(tabId, maxAttempts = 10) {
|
||||
console.log(`Waiting for content script in tab ${tabId}`);
|
||||
for (let i = 0; i < maxAttempts; i++) {
|
||||
if (await isContentScriptReady(tabId)) {
|
||||
console.log(`Content script ready in tab ${tabId}`);
|
||||
return true;
|
||||
}
|
||||
console.log(`Attempt ${i + 1}: Content script not ready, waiting...`);
|
||||
await new Promise(resolve => setTimeout(resolve, 500));
|
||||
}
|
||||
console.log(`Content script not ready after ${maxAttempts} attempts`);
|
||||
return false;
|
||||
}
|
||||
|
||||
async function sendMessageToTab(tabId) {
|
||||
try {
|
||||
console.log(`Checking content script status for tab ${tabId}`);
|
||||
if (await waitForContentScript(tabId)) {
|
||||
console.log(`Sending GET_PAGE_CONTENT message to tab ${tabId}`);
|
||||
await browser.tabs.sendMessage(tabId, {
|
||||
type: "GET_PAGE_CONTENT"
|
||||
});
|
||||
console.log(`Successfully sent message to tab ${tabId}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error sending message to tab ${tabId}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
// Listen for messages from content scripts
|
||||
browser.runtime.onMessage.addListener((message, sender) => {
|
||||
if (message.type === "SEND_PAGE_CONTENT") {
|
||||
console.log('Received page content from tab:', sender.tab.id);
|
||||
if (message.type === "SEND_PAGE_URL") {
|
||||
console.log('Received page url from tab:', sender.tab.id);
|
||||
wsClient.sendMessage(message.data);
|
||||
}
|
||||
});
|
||||
|
||||
browser.webNavigation.onCompleted.addListener(async (details) => {
|
||||
console.log("Navigation completed", details);
|
||||
if (details.frameId === 0) {
|
||||
console.log(`Main frame navigation detected for tab ${details.tabId}`);
|
||||
await sendMessageToTab(details.tabId);
|
||||
}
|
||||
});
|
||||
@@ -3,12 +3,11 @@ console.log("Content script starting initialization...");
|
||||
function sendPageContent() {
|
||||
const pageContent = {
|
||||
url: window.location.href,
|
||||
html: document.documentElement.outerHTML,
|
||||
timestamp: new Date().toISOString().replace(/\.\d{3}Z$/, 'Z')
|
||||
};
|
||||
|
||||
browser.runtime.sendMessage({
|
||||
type: "SEND_PAGE_CONTENT",
|
||||
type: "SEND_PAGE_URL",
|
||||
data: pageContent
|
||||
});
|
||||
}
|
||||
@@ -19,10 +18,6 @@ browser.runtime.onMessage.addListener((message, sender, sendResponse) => {
|
||||
return Promise.resolve({ status: "ready" });
|
||||
}
|
||||
|
||||
if (message.type === "GET_PAGE_CONTENT") {
|
||||
sendPageContent();
|
||||
}
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
|
||||
36
logger.py
Normal file
36
logger.py
Normal file
@@ -0,0 +1,36 @@
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
class Logger:
|
||||
_instance: Optional['Logger'] = None
|
||||
|
||||
def __new__(cls):
|
||||
if cls._instance is None:
|
||||
cls._instance = super().__new__(cls)
|
||||
cls._instance._initialize()
|
||||
return cls._instance
|
||||
|
||||
def _initialize(self):
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||
handlers=[
|
||||
logging.FileHandler(f'logs/main_{datetime.now().strftime("%Y%m%d")}.log'),
|
||||
logging.StreamHandler()
|
||||
]
|
||||
)
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
def info(self, message: str):
|
||||
self.logger.info(message)
|
||||
|
||||
def error(self, message: str):
|
||||
self.logger.error(message)
|
||||
|
||||
def warning(self, message: str):
|
||||
self.logger.warning(message)
|
||||
|
||||
def debug(self, message: str):
|
||||
self.logger.debug(message)
|
||||
69
main.py
69
main.py
@@ -1,15 +1,64 @@
|
||||
from fastapi import FastAPI, WebSocket
|
||||
import uvicorn
|
||||
from logger import Logger
|
||||
import os
|
||||
import sys
|
||||
from database import Database
|
||||
from crawl4ai import AsyncWebCrawler
|
||||
|
||||
# Add the app directory to the Python path
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
# Create logs directory if it doesn't exist
|
||||
os.makedirs('logs', exist_ok=True)
|
||||
|
||||
app = FastAPI()
|
||||
logger = Logger()
|
||||
|
||||
db = Database()
|
||||
|
||||
@app.websocket("/ws")
|
||||
async def websocket_endpoint(websocket: WebSocket):
|
||||
await websocket.accept()
|
||||
logger.info("New WebSocket connection established")
|
||||
|
||||
# Create crawler instance outside the loop for reuse
|
||||
async with AsyncWebCrawler() as crawler:
|
||||
try:
|
||||
while True:
|
||||
data = await websocket.receive_json()
|
||||
|
||||
# Crawl the URL to get title and content
|
||||
try:
|
||||
result = await crawler.arun(url=data["url"])
|
||||
# Get the first result from the container and access metadata
|
||||
crawl_result = result[0]
|
||||
title = crawl_result.metadata.get('title') or data["url"].split("/")[-1]
|
||||
content = crawl_result.markdown
|
||||
logger.info(f"Crawling result: {result}")
|
||||
except Exception as crawl_error:
|
||||
logger.error(f"Crawling error for {data['url']}: {str(crawl_error)}")
|
||||
title = data["url"].split("/")[-1]
|
||||
content = str(data)
|
||||
|
||||
# Store received data with crawled information
|
||||
db.add_history(
|
||||
url=data["url"],
|
||||
title=title,
|
||||
content=content
|
||||
)
|
||||
|
||||
logger.info(f"Processed URL: {data['url']} - {title}")
|
||||
await websocket.send_json({
|
||||
"status": "received",
|
||||
"data": {
|
||||
"url": data["url"],
|
||||
"title": title,
|
||||
"timestamp": data["timestamp"]
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"WebSocket error: {str(e)}")
|
||||
await websocket.close()
|
||||
finally:
|
||||
logger.info("WebSocket connection closed")
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Run the FastAPI application using uvicorn
|
||||
uvicorn.run(
|
||||
"app.main:app",
|
||||
host="0.0.0.0", # Allows external access
|
||||
port=8523,
|
||||
reload=True # Enable auto-reload during development
|
||||
)
|
||||
logger.info("Starting WebSocket server...")
|
||||
uvicorn.run(app, host="0.0.0.0", port=8523)
|
||||
|
||||
@@ -7,14 +7,6 @@ requires-python = ">=3.10.16"
|
||||
dependencies = [
|
||||
"crawl4ai",
|
||||
"fastapi",
|
||||
"sqlalchemy",
|
||||
"uvicorn",
|
||||
"pytz",
|
||||
"aiofiles",
|
||||
"websockets",
|
||||
"pyyaml",
|
||||
"browser-history",
|
||||
"pydantic",
|
||||
"pydantic-settings",
|
||||
"iso8601",
|
||||
"uvicorn[standard]",
|
||||
"crawl4ai",
|
||||
]
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
cd (dirname (status filename))
|
||||
|
||||
# Activate the virtual environment and run main.py silently
|
||||
vf activate general
|
||||
source ./venv/bin/activate
|
||||
python main.py > /dev/null 2>&1 &
|
||||
|
||||
# Print a simple confirmation message
|
||||
|
||||
251
uv.lock
generated
251
uv.lock
generated
@@ -180,48 +180,21 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/49/6abb616eb3cbab6a7cca303dc02fdf3836de2e0b834bf966a7f5271a34d8/beautifulsoup4-4.13.3-py3-none-any.whl", hash = "sha256:99045d7d3f08f91f0d656bc9b7efbae189426cd913d830294a15eefa0ea4df16", size = 186015 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "browser-history"
|
||||
version = "0.4.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/60/22/76d11c62a41f6deec0c176632dc0942f4cf250512c5fb8313af252fa7841/browser_history-0.4.1.tar.gz", hash = "sha256:21f2dd03127f835062ebd4852aeff80dc4afee25676eb08245c23fe94539aa5b", size = 30439 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/61/1b/127acd362c3a2ca23d2f9e4e46813e9fb7f36668d13e349b04b8bb5bd04d/browser_history-0.4.1-py3-none-any.whl", hash = "sha256:4fad5e59121f2d2e7e55db8a9ca70a8a5a53791a2f2dc354feeb348c1422b1cb", size = 21848 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "browser-recall"
|
||||
version = "0.1.0"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "aiofiles" },
|
||||
{ name = "browser-history" },
|
||||
{ name = "crawl4ai" },
|
||||
{ name = "fastapi" },
|
||||
{ name = "iso8601" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "pydantic-settings" },
|
||||
{ name = "pytz" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "sqlalchemy" },
|
||||
{ name = "uvicorn" },
|
||||
{ name = "websockets" },
|
||||
{ name = "uvicorn", extra = ["standard"] },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "aiofiles" },
|
||||
{ name = "browser-history" },
|
||||
{ name = "crawl4ai" },
|
||||
{ name = "fastapi" },
|
||||
{ name = "iso8601" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "pydantic-settings" },
|
||||
{ name = "pytz" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "sqlalchemy" },
|
||||
{ name = "uvicorn" },
|
||||
{ name = "websockets" },
|
||||
{ name = "uvicorn", extras = ["standard"] },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -714,6 +687,42 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/18/8d/f052b1e336bb2c1fc7ed1aaed898aa570c0b61a09707b108979d9fc6e308/httpcore-1.0.8-py3-none-any.whl", hash = "sha256:5254cf149bcb5f75e9d1b2b9f729ea4a4b883d1ad7379fc632b727cec23674be", size = 78732 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httptools"
|
||||
version = "0.6.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/6f/972f8eb0ea7d98a1c6be436e2142d51ad2a64ee18e02b0e7ff1f62171ab1/httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0", size = 198780 },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/b0/17c672b4bc5c7ba7f201eada4e96c71d0a59fbc185e60e42580093a86f21/httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da", size = 103297 },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/5e/b4a826fe91971a0b68e8c2bd4e7db3e7519882f5a8ccdb1194be2b3ab98f/httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1", size = 443130 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/51/ce61e531e40289a681a463e1258fa1e05e0be54540e40d91d065a264cd8f/httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50", size = 442148 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/9e/270b7d767849b0c96f275c695d27ca76c30671f8eb8cc1bab6ced5c5e1d0/httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959", size = 415949 },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/86/ced96e3179c48c6f656354e106934e65c8963d48b69be78f355797f0e1b3/httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4", size = 417591 },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/73/187a3f620ed3175364ddb56847d7a608a6fc42d551e133197098c0143eca/httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c", size = 88344 },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/26/bb526d4d14c2774fe07113ca1db7255737ffbb119315839af2065abfdac3/httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069", size = 199029 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/17/3e0d3e9b901c732987a45f4f94d4e2c62b89a041d93db89eafb262afd8d5/httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a", size = 103492 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/24/0fe235d7b69c42423c7698d086d4db96475f9b50b6ad26a718ef27a0bce6/httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975", size = 462891 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/2f/205d1f2a190b72da6ffb5f41a3736c26d6fa7871101212b15e9b5cd8f61d/httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636", size = 459788 },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/4c/d09ce0eff09057a206a74575ae8f1e1e2f0364d20e2442224f9e6612c8b9/httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721", size = 433214 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/d2/84c9e23edbccc4a4c6f96a1b8d99dfd2350289e94f00e9ccc7aadde26fb5/httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988", size = 434120 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/46/4d8e7ba9581416de1c425b8264e2cadd201eb709ec1584c381f3e98f51c1/httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17", size = 88565 },
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337 },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2", size = 510837 },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81", size = 485289 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f", size = 489779 },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970", size = 88634 },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/a3/9fe9ad23fd35f7de6b91eeb60848986058bd8b5a5c1e256f5860a160cc3e/httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660", size = 197214 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/d9/82d5e68bab783b632023f2fa31db20bebb4e89dfc4d2293945fd68484ee4/httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083", size = 102431 },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/c1/cb499655cbdbfb57b577734fde02f6fa0bbc3fe9fb4d87b742b512908dff/httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3", size = 473121 },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/71/ee32fd358f8a3bb199b03261f10921716990808a675d8160b5383487a317/httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071", size = 473805 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/0a/0d4df132bfca1507114198b766f1737d57580c9ad1cf93c1ff673e3387be/httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5", size = 448858 },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/6a/787004fdef2cabea27bad1073bf6a33f2437b4dbd3b6fb4a9d71172b1c7c/httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0", size = 452042 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8", size = 87682 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpx"
|
||||
version = "0.28.1"
|
||||
@@ -777,15 +786,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/79/9d/0fb148dc4d6fa4a7dd1d8378168d9b4cd8d4560a6fbf6f0121c5fc34eb68/importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e", size = 26971 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iso8601"
|
||||
version = "2.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b9/f3/ef59cee614d5e0accf6fd0cbba025b93b272e626ca89fb70a3e9187c5d15/iso8601-2.1.0.tar.gz", hash = "sha256:6b1d3829ee8921c4301998c909f7829fa9ed3cbdac0d3b16af2d743aed1ba8df", size = 6522 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/0c/f37b6a241f0759b7653ffa7213889d89ad49a2b76eb2ddf3b57b2738c347/iso8601-2.1.0-py3-none-any.whl", hash = "sha256:aac4145c4dcb66ad8b648a02830f5e2ff6c24af20f4f482689be402db2429242", size = 7545 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jinja2"
|
||||
version = "3.1.6"
|
||||
@@ -1570,19 +1570,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/12/6f/5596dc418f2e292ffc661d21931ab34591952e2843e7168ea5a52591f6ff/pydantic_core-2.33.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f995719707e0e29f0f41a8aa3bcea6e761a36c9136104d3189eafb83f5cec5e5", size = 2080951 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-settings"
|
||||
version = "2.8.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pydantic" },
|
||||
{ name = "python-dotenv" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/88/82/c79424d7d8c29b994fb01d277da57b0a9b09cc03c3ff875f9bd8a86b2145/pydantic_settings-2.8.1.tar.gz", hash = "sha256:d5c663dfbe9db9d5e1c646b2e161da12f0d734d422ee56f567d0ea2cee4e8585", size = 83550 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/53/a64f03044927dc47aafe029c42a5b7aabc38dfb813475e0e1bf71c4a59d0/pydantic_settings-2.8.1-py3-none-any.whl", hash = "sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c", size = 30839 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyee"
|
||||
version = "12.1.1"
|
||||
@@ -1632,15 +1619,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytz"
|
||||
version = "2025.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyyaml"
|
||||
version = "6.0.2"
|
||||
@@ -1932,51 +1910,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/c2/fe97d779f3ef3b15f05c94a2f1e3d21732574ed441687474db9d342a7315/soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9", size = 36186 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
version = "2.0.40"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "greenlet", marker = "(python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64')" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/68/c3/3f2bfa5e4dcd9938405fe2fab5b6ab94a9248a4f9536ea2fd497da20525f/sqlalchemy-2.0.40.tar.gz", hash = "sha256:d827099289c64589418ebbcaead0145cd19f4e3e8a93919a0100247af245fa00", size = 9664299 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/87/fa/8e8fd93684b04e65816be864bebf0000fe1602e5452d006f9acc5db14ce5/sqlalchemy-2.0.40-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1ea21bef99c703f44444ad29c2c1b6bd55d202750b6de8e06a955380f4725d7", size = 2112843 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/87/06992f78a9ce545dfd1fea3dd99262bec5221f6f9d2d2066c3e94662529f/sqlalchemy-2.0.40-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:afe63b208153f3a7a2d1a5b9df452b0673082588933e54e7c8aac457cf35e758", size = 2104032 },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/ee/57dc77282e8be22d686bd4681825299aa1069bbe090564868ea270ed5214/sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8aae085ea549a1eddbc9298b113cffb75e514eadbb542133dd2b99b5fb3b6af", size = 3086406 },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/3f/ceb9ab214b2e42d2e74a9209b3a2f2f073504eee16cddd2df81feeb67c2f/sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ea9181284754d37db15156eb7be09c86e16e50fbe77610e9e7bee09291771a1", size = 3094652 },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/0a/3401232a5b6d91a2df16c1dc39c6504c54575744c2faafa1e5a50de96621/sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5434223b795be5c5ef8244e5ac98056e290d3a99bdcc539b916e282b160dda00", size = 3050503 },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/c2/ea7171415ab131397f71a2673645c2fe29ebe9a93063d458eb89e42bf051/sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15d08d5ef1b779af6a0909b97be6c1fd4298057504eb6461be88bd1696cb438e", size = 3076011 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/ee/d8e229280d621bed8c51eebf1dd413aa09ca89e309b1fff40d881dd149af/sqlalchemy-2.0.40-cp310-cp310-win32.whl", hash = "sha256:cd2f75598ae70bcfca9117d9e51a3b06fe29edd972fdd7fd57cc97b4dbf3b08a", size = 2085136 },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/7f/ea1086136bc648cd4713a1e01869f7fc31979d67b3a8f973f5d9ab8de7e1/sqlalchemy-2.0.40-cp310-cp310-win_amd64.whl", hash = "sha256:2cbafc8d39ff1abdfdda96435f38fab141892dc759a2165947d1a8fffa7ef596", size = 2109421 },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/7e/55044a9ec48c3249bb38d5faae93f09579c35e862bb318ebd1ed7a1994a5/sqlalchemy-2.0.40-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6bacab7514de6146a1976bc56e1545bee247242fab030b89e5f70336fc0003e", size = 2114025 },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/0f/dcf7bba95f847aec72f638750747b12d37914f71c8cc7c133cf326ab945c/sqlalchemy-2.0.40-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5654d1ac34e922b6c5711631f2da497d3a7bffd6f9f87ac23b35feea56098011", size = 2104419 },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/70/c86a5c20715e4fe903dde4c2fd44fc7e7a0d5fb52c1b954d98526f65a3ea/sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35904d63412db21088739510216e9349e335f142ce4a04b69e2528020ee19ed4", size = 3222720 },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/cf/b891a8c1d0c27ce9163361664c2128c7a57de3f35000ea5202eb3a2917b7/sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c7a80ed86d6aaacb8160a1caef6680d4ddd03c944d985aecee940d168c411d1", size = 3222682 },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/3f/7709d8c8266953d945435a96b7f425ae4172a336963756b58e996fbef7f3/sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:519624685a51525ddaa7d8ba8265a1540442a2ec71476f0e75241eb8263d6f51", size = 3159542 },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/7e/717eaabaf0f80a0132dc2032ea8f745b7a0914451c984821a7c8737fb75a/sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2ee5f9999a5b0e9689bed96e60ee53c3384f1a05c2dd8068cc2e8361b0df5b7a", size = 3179864 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/cc/03eb5dfcdb575cbecd2bd82487b9848f250a4b6ecfb4707e834b4ce4ec07/sqlalchemy-2.0.40-cp311-cp311-win32.whl", hash = "sha256:c0cae71e20e3c02c52f6b9e9722bca70e4a90a466d59477822739dc31ac18b4b", size = 2084675 },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/48/440946bf9dc4dc231f4f31ef0d316f7135bf41d4b86aaba0c0655150d370/sqlalchemy-2.0.40-cp311-cp311-win_amd64.whl", hash = "sha256:574aea2c54d8f1dd1699449f332c7d9b71c339e04ae50163a3eb5ce4c4325ee4", size = 2110099 },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/06/552c1f92e880b57d8b92ce6619bd569b25cead492389b1d84904b55989d8/sqlalchemy-2.0.40-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9d3b31d0a1c44b74d3ae27a3de422dfccd2b8f0b75e51ecb2faa2bf65ab1ba0d", size = 2112620 },
|
||||
{ url = "https://files.pythonhosted.org/packages/01/72/a5bc6e76c34cebc071f758161dbe1453de8815ae6e662393910d3be6d70d/sqlalchemy-2.0.40-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:37f7a0f506cf78c80450ed1e816978643d3969f99c4ac6b01104a6fe95c5490a", size = 2103004 },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/fd/0e96c8e6767618ed1a06e4d7a167fe13734c2f8113c4cb704443e6783038/sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bb933a650323e476a2e4fbef8997a10d0003d4da996aad3fd7873e962fdde4d", size = 3252440 },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/6a/eb82e45b15a64266a2917a6833b51a334ea3c1991728fd905bfccbf5cf63/sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6959738971b4745eea16f818a2cd086fb35081383b078272c35ece2b07012716", size = 3263277 },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/97/ebe41ab4530f50af99e3995ebd4e0204bf1b0dc0930f32250dde19c389fe/sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:110179728e442dae85dd39591beb74072ae4ad55a44eda2acc6ec98ead80d5f2", size = 3198591 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/1c/a569c1b2b2f5ac20ba6846a1321a2bf52e9a4061001f282bf1c5528dcd69/sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8040680eaacdce4d635f12c55c714f3d4c7f57da2bc47a01229d115bd319191", size = 3225199 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/91/87cc71a6b10065ca0209d19a4bb575378abda6085e72fa0b61ffb2201b84/sqlalchemy-2.0.40-cp312-cp312-win32.whl", hash = "sha256:650490653b110905c10adac69408380688cefc1f536a137d0d69aca1069dc1d1", size = 2082959 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/9f/14c511cda174aa1ad9b0e42b64ff5a71db35d08b0d80dc044dae958921e5/sqlalchemy-2.0.40-cp312-cp312-win_amd64.whl", hash = "sha256:2be94d75ee06548d2fc591a3513422b873490efb124048f50556369a834853b0", size = 2108526 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/18/4e3a86cc0232377bc48c373a9ba6a1b3fb79ba32dbb4eda0b357f5a2c59d/sqlalchemy-2.0.40-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:915866fd50dd868fdcc18d61d8258db1bf9ed7fbd6dfec960ba43365952f3b01", size = 2107887 },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/60/9fa692b1d2ffc4cbd5f47753731fd332afed30137115d862d6e9a1e962c7/sqlalchemy-2.0.40-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a4c5a2905a9ccdc67a8963e24abd2f7afcd4348829412483695c59e0af9a705", size = 2098367 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/9f/84b78357ca641714a439eb3fbbddb17297dacfa05d951dbf24f28d7b5c08/sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55028d7a3ebdf7ace492fab9895cbc5270153f75442a0472d8516e03159ab364", size = 3184806 },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/7d/e06164161b6bfce04c01bfa01518a20cccbd4100d5c951e5a7422189191a/sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cfedff6878b0e0d1d0a50666a817ecd85051d12d56b43d9d425455e608b5ba0", size = 3198131 },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/51/354af20da42d7ec7b5c9de99edafbb7663a1d75686d1999ceb2c15811302/sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bb19e30fdae77d357ce92192a3504579abe48a66877f476880238a962e5b96db", size = 3131364 },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/2f/48a41ff4e6e10549d83fcc551ab85c268bde7c03cf77afb36303c6594d11/sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:16d325ea898f74b26ffcd1cf8c593b0beed8714f0317df2bed0d8d1de05a8f26", size = 3159482 },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/ac/e5e0a807163652a35be878c0ad5cfd8b1d29605edcadfb5df3c512cdf9f3/sqlalchemy-2.0.40-cp313-cp313-win32.whl", hash = "sha256:a669cbe5be3c63f75bcbee0b266779706f1a54bcb1000f302685b87d1b8c1500", size = 2080704 },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/cb/f38c61f7f2fd4d10494c1c135ff6a6ddb63508d0b47bccccd93670637309/sqlalchemy-2.0.40-cp313-cp313-win_amd64.whl", hash = "sha256:641ee2e0834812d657862f3a7de95e0048bdcb6c55496f39c6fa3d435f6ac6ad", size = 2104564 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/7c/5fc8e802e7506fe8b55a03a2e1dab156eae205c91bee46305755e086d2e2/sqlalchemy-2.0.40-py3-none-any.whl", hash = "sha256:32587e2e1e359276957e6fe5dad089758bc042a971a8a09ae8ecf7a8fe23d07a", size = 1903894 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "starlette"
|
||||
version = "0.46.1"
|
||||
@@ -2119,6 +2052,114 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315 },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
standard = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "httptools" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" },
|
||||
{ name = "watchfiles" },
|
||||
{ name = "websockets" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uvloop"
|
||||
version = "0.21.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/76/44a55515e8c9505aa1420aebacf4dd82552e5e15691654894e90d0bd051a/uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f", size = 1442019 },
|
||||
{ url = "https://files.pythonhosted.org/packages/35/5a/62d5800358a78cc25c8a6c72ef8b10851bdb8cca22e14d9c74167b7f86da/uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d", size = 801898 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/96/63695e0ebd7da6c741ccd4489b5947394435e198a1382349c17b1146bb97/uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26", size = 3827735 },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/e0/f0f8ec84979068ffae132c58c79af1de9cceeb664076beea86d941af1a30/uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb", size = 3825126 },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/fe/5e94a977d058a54a19df95f12f7161ab6e323ad49f4dabc28822eb2df7ea/uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f", size = 3705789 },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/dd/c7179618e46092a77e036650c1f056041a028a35c4d76945089fcfc38af8/uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c", size = 3800523 },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/a7/4cf0334105c1160dd6819f3297f8700fda7fc30ab4f61fbf3e725acbc7cc/uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8", size = 1447410 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/7c/1517b0bbc2dbe784b563d6ab54f2ef88c890fdad77232c98ed490aa07132/uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0", size = 805476 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/ea/0bfae1aceb82a503f358d8d2fa126ca9dbdb2ba9c7866974faec1cb5875c/uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e", size = 3960855 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/ca/0864176a649838b838f36d44bf31c451597ab363b60dc9e09c9630619d41/uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb", size = 3973185 },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/bf/08ad29979a936d63787ba47a540de2132169f140d54aa25bc8c3df3e67f4/uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6", size = 3820256 },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/e2/5cf6ef37e3daf2f06e651aae5ea108ad30df3cb269102678b61ebf1fdf42/uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d", size = 3937323 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284 },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089 },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc", size = 4693770 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb", size = 4451321 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f", size = 4659022 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123 },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325 },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816", size = 4701068 },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc", size = 4454428 },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "watchfiles"
|
||||
version = "1.0.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/03/e2/8ed598c42057de7aa5d97c472254af4906ff0a59a66699d426fc9ef795d7/watchfiles-1.0.5.tar.gz", hash = "sha256:b7529b5dcc114679d43827d8c35a07c493ad6f083633d573d81c660abc5979e9", size = 94537 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/af/4d/d02e6ea147bb7fff5fd109c694a95109612f419abed46548a930e7f7afa3/watchfiles-1.0.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5c40fe7dd9e5f81e0847b1ea64e1f5dd79dd61afbedb57759df06767ac719b40", size = 405632 },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/31/9ee50e29129d53a9a92ccf1d3992751dc56fc3c8f6ee721be1c7b9c81763/watchfiles-1.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c0db396e6003d99bb2d7232c957b5f0b5634bbd1b24e381a5afcc880f7373fb", size = 395734 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/8c/759176c97195306f028024f878e7f1c776bda66ccc5c68fa51e699cf8f1d/watchfiles-1.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b551d4fb482fc57d852b4541f911ba28957d051c8776e79c3b4a51eb5e2a1b11", size = 455008 },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/1a/5e977250c795ee79a0229e3b7f5e3a1b664e4e450756a22da84d2f4979fe/watchfiles-1.0.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:830aa432ba5c491d52a15b51526c29e4a4b92bf4f92253787f9726fe01519487", size = 459029 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/17/884cf039333605c1d6e296cf5be35fad0836953c3dfd2adb71b72f9dbcd0/watchfiles-1.0.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a16512051a822a416b0d477d5f8c0e67b67c1a20d9acecb0aafa3aa4d6e7d256", size = 488916 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/e0/bcb6e64b45837056c0a40f3a2db3ef51c2ced19fda38484fa7508e00632c/watchfiles-1.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe0cbc787770e52a96c6fda6726ace75be7f840cb327e1b08d7d54eadc3bc85", size = 523763 },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/e9/f67e9199f3bb35c1837447ecf07e9830ec00ff5d35a61e08c2cd67217949/watchfiles-1.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d363152c5e16b29d66cbde8fa614f9e313e6f94a8204eaab268db52231fe5358", size = 502891 },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/ed/a6cf815f215632f5c8065e9c41fe872025ffea35aa1f80499f86eae922db/watchfiles-1.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ee32c9a9bee4d0b7bd7cbeb53cb185cf0b622ac761efaa2eba84006c3b3a614", size = 454921 },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/4c/e14978599b80cde8486ab5a77a821e8a982ae8e2fcb22af7b0886a033ec8/watchfiles-1.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29c7fd632ccaf5517c16a5188e36f6612d6472ccf55382db6c7fe3fcccb7f59f", size = 631422 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/1a/9263e34c3458f7614b657f974f4ee61fd72f58adce8b436e16450e054efd/watchfiles-1.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e637810586e6fe380c8bc1b3910accd7f1d3a9a7262c8a78d4c8fb3ba6a2b3d", size = 625675 },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/1f/1803a18bd6ab04a0766386a19bcfe64641381a04939efdaa95f0e3b0eb58/watchfiles-1.0.5-cp310-cp310-win32.whl", hash = "sha256:cd47d063fbeabd4c6cae1d4bcaa38f0902f8dc5ed168072874ea11d0c7afc1ff", size = 277921 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/3b/29a89de074a7d6e8b4dc67c26e03d73313e4ecf0d6e97e942a65fa7c195e/watchfiles-1.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:86c0df05b47a79d80351cd179893f2f9c1b1cae49d96e8b3290c7f4bd0ca0a92", size = 291526 },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/f4/41b591f59021786ef517e1cdc3b510383551846703e03f204827854a96f8/watchfiles-1.0.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:237f9be419e977a0f8f6b2e7b0475ababe78ff1ab06822df95d914a945eac827", size = 405336 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/06/93789c135be4d6d0e4f63e96eea56dc54050b243eacc28439a26482b5235/watchfiles-1.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0da39ff917af8b27a4bdc5a97ac577552a38aac0d260a859c1517ea3dc1a7c4", size = 395977 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/db/1cd89bd83728ca37054512d4d35ab69b5f12b8aa2ac9be3b0276b3bf06cc/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cfcb3952350e95603f232a7a15f6c5f86c5375e46f0bd4ae70d43e3e063c13d", size = 455232 },
|
||||
{ url = "https://files.pythonhosted.org/packages/40/90/d8a4d44ffe960517e487c9c04f77b06b8abf05eb680bed71c82b5f2cad62/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68b2dddba7a4e6151384e252a5632efcaa9bc5d1c4b567f3cb621306b2ca9f63", size = 459151 },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/da/267a1546f26465dead1719caaba3ce660657f83c9d9c052ba98fb8856e13/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95cf944fcfc394c5f9de794ce581914900f82ff1f855326f25ebcf24d5397418", size = 489054 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/31/33850dfd5c6efb6f27d2465cc4c6b27c5a6f5ed53c6fa63b7263cf5f60f6/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf6cd9f83d7c023b1aba15d13f705ca7b7d38675c121f3cc4a6e25bd0857ee9", size = 523955 },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/84/b7d7b67856efb183a421f1416b44ca975cb2ea6c4544827955dfb01f7dc2/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:852de68acd6212cd6d33edf21e6f9e56e5d98c6add46f48244bd479d97c967c6", size = 502234 },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/87/6dc5ec6882a2254cfdd8b0718b684504e737273903b65d7338efaba08b52/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5730f3aa35e646103b53389d5bc77edfbf578ab6dab2e005142b5b80a35ef25", size = 454750 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/6c/3786c50213451a0ad15170d091570d4a6554976cf0df19878002fc96075a/watchfiles-1.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:18b3bd29954bc4abeeb4e9d9cf0b30227f0f206c86657674f544cb032296acd5", size = 631591 },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/b3/1427425ade4e359a0deacce01a47a26024b2ccdb53098f9d64d497f6684c/watchfiles-1.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ba5552a1b07c8edbf197055bc9d518b8f0d98a1c6a73a293bc0726dce068ed01", size = 625370 },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/ba/f60e053b0b5b8145d682672024aa91370a29c5c921a88977eb565de34086/watchfiles-1.0.5-cp311-cp311-win32.whl", hash = "sha256:2f1fefb2e90e89959447bc0420fddd1e76f625784340d64a2f7d5983ef9ad246", size = 277791 },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/ed/7603c4e164225c12c0d4e8700b64bb00e01a6c4eeea372292a3856be33a4/watchfiles-1.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:b6e76ceb1dd18c8e29c73f47d41866972e891fc4cc7ba014f487def72c1cf096", size = 291622 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/c2/99bb7c96b4450e36877fde33690ded286ff555b5a5c1d925855d556968a1/watchfiles-1.0.5-cp311-cp311-win_arm64.whl", hash = "sha256:266710eb6fddc1f5e51843c70e3bebfb0f5e77cf4f27129278c70554104d19ed", size = 283699 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/8c/4f0b9bdb75a1bfbd9c78fad7d8854369283f74fe7cf03eb16be77054536d/watchfiles-1.0.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5eb568c2aa6018e26da9e6c86f3ec3fd958cee7f0311b35c2630fa4217d17f2", size = 401511 },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/4e/7e15825def77f8bd359b6d3f379f0c9dac4eb09dd4ddd58fd7d14127179c/watchfiles-1.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0a04059f4923ce4e856b4b4e5e783a70f49d9663d22a4c3b3298165996d1377f", size = 392715 },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/65/b72fb817518728e08de5840d5d38571466c1b4a3f724d190cec909ee6f3f/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e380c89983ce6e6fe2dd1e1921b9952fb4e6da882931abd1824c092ed495dec", size = 454138 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/a4/86833fd2ea2e50ae28989f5950b5c3f91022d67092bfec08f8300d8b347b/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fe43139b2c0fdc4a14d4f8d5b5d967f7a2777fd3d38ecf5b1ec669b0d7e43c21", size = 458592 },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/7e/42cb8df8be9a37e50dd3a818816501cf7a20d635d76d6bd65aae3dbbff68/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee0822ce1b8a14fe5a066f93edd20aada932acfe348bede8aa2149f1a4489512", size = 487532 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/fd/13d26721c85d7f3df6169d8b495fcac8ab0dc8f0945ebea8845de4681dab/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a0dbcb1c2d8f2ab6e0a81c6699b236932bd264d4cef1ac475858d16c403de74d", size = 522865 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/0d/7f9ae243c04e96c5455d111e21b09087d0eeaf9a1369e13a01c7d3d82478/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a2014a2b18ad3ca53b1f6c23f8cd94a18ce930c1837bd891262c182640eb40a6", size = 499887 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/0f/a257766998e26aca4b3acf2ae97dff04b57071e991a510857d3799247c67/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f6ae86d5cb647bf58f9f655fcf577f713915a5d69057a0371bc257e2553234", size = 454498 },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/79/8bf142575a03e0af9c3d5f8bcae911ee6683ae93a625d349d4ecf4c8f7df/watchfiles-1.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1a7bac2bde1d661fb31f4d4e8e539e178774b76db3c2c17c4bb3e960a5de07a2", size = 630663 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/80/abe2e79f610e45c63a70d271caea90c49bbf93eb00fa947fa9b803a1d51f/watchfiles-1.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ab626da2fc1ac277bbf752446470b367f84b50295264d2d313e28dc4405d663", size = 625410 },
|
||||
{ url = "https://files.pythonhosted.org/packages/91/6f/bc7fbecb84a41a9069c2c6eb6319f7f7df113adf113e358c57fc1aff7ff5/watchfiles-1.0.5-cp312-cp312-win32.whl", hash = "sha256:9f4571a783914feda92018ef3901dab8caf5b029325b5fe4558c074582815249", size = 277965 },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/a5/bf1c297ea6649ec59e935ab311f63d8af5faa8f0b86993e3282b984263e3/watchfiles-1.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:360a398c3a19672cf93527f7e8d8b60d8275119c5d900f2e184d32483117a705", size = 291693 },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/7b/fd01087cc21db5c47e5beae507b87965db341cce8a86f9eb12bf5219d4e0/watchfiles-1.0.5-cp312-cp312-win_arm64.whl", hash = "sha256:1a2902ede862969077b97523987c38db28abbe09fb19866e711485d9fbf0d417", size = 283287 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/62/435766874b704f39b2fecd8395a29042db2b5ec4005bd34523415e9bd2e0/watchfiles-1.0.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0b289572c33a0deae62daa57e44a25b99b783e5f7aed81b314232b3d3c81a11d", size = 401531 },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/a6/e52a02c05411b9cb02823e6797ef9bbba0bfaf1bb627da1634d44d8af833/watchfiles-1.0.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a056c2f692d65bf1e99c41045e3bdcaea3cb9e6b5a53dcaf60a5f3bd95fc9763", size = 392417 },
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/53/c4af6819770455932144e0109d4854437769672d7ad897e76e8e1673435d/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9dca99744991fc9850d18015c4f0438865414e50069670f5f7eee08340d8b40", size = 453423 },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/d1/8e88df58bbbf819b8bc5cfbacd3c79e01b40261cad0fc84d1e1ebd778a07/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:894342d61d355446d02cd3988a7326af344143eb33a2fd5d38482a92072d9563", size = 458185 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/70/fffaa11962dd5429e47e478a18736d4e42bec42404f5ee3b92ef1b87ad60/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab44e1580924d1ffd7b3938e02716d5ad190441965138b4aa1d1f31ea0877f04", size = 486696 },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/db/723c0328e8b3692d53eb273797d9a08be6ffb1d16f1c0ba2bdbdc2a3852c/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6f9367b132078b2ceb8d066ff6c93a970a18c3029cea37bfd7b2d3dd2e5db8f", size = 522327 },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/05/9fccc43c50c39a76b68343484b9da7b12d42d0859c37c61aec018c967a32/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2e55a9b162e06e3f862fb61e399fe9f05d908d019d87bf5b496a04ef18a970a", size = 499741 },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/14/499e90c37fa518976782b10a18b18db9f55ea73ca14641615056f8194bb3/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0125f91f70e0732a9f8ee01e49515c35d38ba48db507a50c5bdcad9503af5827", size = 453995 },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/d9/f75d6840059320df5adecd2c687fbc18960a7f97b55c300d20f207d48aef/watchfiles-1.0.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:13bb21f8ba3248386337c9fa51c528868e6c34a707f729ab041c846d52a0c69a", size = 629693 },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/17/180ca383f5061b61406477218c55d66ec118e6c0c51f02d8142895fcf0a9/watchfiles-1.0.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:839ebd0df4a18c5b3c1b890145b5a3f5f64063c2a0d02b13c76d78fe5de34936", size = 624677 },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/15/714d6ef307f803f236d69ee9d421763707899d6298d9f3183e55e366d9af/watchfiles-1.0.5-cp313-cp313-win32.whl", hash = "sha256:4a8ec1e4e16e2d5bafc9ba82f7aaecfeec990ca7cd27e84fb6f191804ed2fcfc", size = 277804 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/b4/c57b99518fadf431f3ef47a610839e46e5f8abf9814f969859d1c65c02c7/watchfiles-1.0.5-cp313-cp313-win_amd64.whl", hash = "sha256:f436601594f15bf406518af922a89dcaab416568edb6f65c4e5bbbad1ea45c11", size = 291087 },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/03/81f9fcc3963b3fc415cd4b0b2b39ee8cc136c42fb10a36acf38745e9d283/watchfiles-1.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f59b870db1f1ae5a9ac28245707d955c8721dd6565e7f411024fa374b5362d1d", size = 405947 },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/97/8c4213a852feb64807ec1d380f42d4fc8bfaef896bdbd94318f8fd7f3e4e/watchfiles-1.0.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9475b0093767e1475095f2aeb1d219fb9664081d403d1dff81342df8cd707034", size = 397276 },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/12/d4464d19860cb9672efa45eec1b08f8472c478ed67dcd30647c51ada7aef/watchfiles-1.0.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc533aa50664ebd6c628b2f30591956519462f5d27f951ed03d6c82b2dfd9965", size = 455550 },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/fb/b07bcdf1034d8edeaef4c22f3e9e3157d37c5071b5f9492ffdfa4ad4bed7/watchfiles-1.0.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed1cd825158dcaae36acce7b2db33dcbfd12b30c34317a88b8ed80f0541cc57", size = 455542 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "websockets"
|
||||
version = "15.0.1"
|
||||
|
||||
Reference in New Issue
Block a user