main.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743
  1. #!/usr/bin/env -S uv --quiet run --script
  2. # /// script
  3. # requires-python = ">=3.12"
  4. # dependencies = [
  5. # "bs4",
  6. # "httpx",
  7. # "pydantic",
  8. # "python-dateutil",
  9. # "python-frontmatter",
  10. # "python-slugify",
  11. # "pytz",
  12. # "rich",
  13. # "typer",
  14. # "markdown-it-py",
  15. # "sqlmodel",
  16. # ]
  17. # ///
  18. import json
  19. import os
  20. import re
  21. from datetime import datetime
  22. from pathlib import Path
  23. from typing import Any
  24. from typing import Optional
  25. from urllib.parse import urlparse
  26. import frontmatter
  27. import httpx
  28. import typer
  29. from bs4 import BeautifulSoup
  30. from bs4 import Tag
  31. from markdown_it import MarkdownIt
  32. from pydantic import BaseModel
  33. from pydantic import ConfigDict
  34. from pydantic import Field
  35. from rich import print
  36. from rich.console import Console
  37. from rich.progress import track
  38. from rich.table import Table
  39. from slugify import slugify
  40. from sqlmodel import Field as SQLField
  41. from sqlmodel import Session
  42. from sqlmodel import SQLModel
  43. from sqlmodel import create_engine
  44. from sqlmodel import select
  45. app = typer.Typer(
  46. add_help_option=False,
  47. no_args_is_help=True,
  48. rich_markup_mode="rich",
  49. )
  50. class Project(BaseModel):
  51. """Model representing a Django project from the awesome list."""
  52. model_config = ConfigDict(extra="allow")
  53. name: str
  54. description: str
  55. url: str
  56. category: str
  57. slug: str = Field(default="")
  58. tags: list[str] = Field(default_factory=list)
  59. github_stars: int | None = None
  60. github_forks: int | None = None
  61. github_last_update: str | None = None
  62. github_last_commit: str | None = None
  63. previous_urls: list[str] = Field(default_factory=list)
  64. def __init__(self, **data):
  65. super().__init__(**data)
  66. if not self.slug:
  67. self.slug = slugify(self.name)
  68. # SQLModel database model
  69. class ProjectDB(SQLModel, table=True):
  70. """SQLModel for storing projects in SQLite database."""
  71. __tablename__ = "projects"
  72. id: Optional[int] = SQLField(default=None, primary_key=True)
  73. name: str = SQLField(index=True)
  74. description: str
  75. url: str = SQLField(unique=True)
  76. category: str = SQLField(index=True)
  77. slug: str = SQLField(unique=True, index=True)
  78. tags: str = SQLField(default="[]") # JSON string
  79. github_stars: Optional[int] = SQLField(default=None, index=True)
  80. github_forks: Optional[int] = SQLField(default=None)
  81. github_last_update: Optional[str] = SQLField(default=None)
  82. github_last_commit: Optional[str] = SQLField(default=None, index=True)
  83. previous_urls: str = SQLField(default="[]") # JSON string
  84. @classmethod
  85. def from_project(cls, project: Project) -> "ProjectDB":
  86. """Convert a Project to ProjectDB."""
  87. return cls(
  88. name=project.name,
  89. description=project.description,
  90. url=project.url,
  91. category=project.category,
  92. slug=project.slug,
  93. tags=json.dumps(project.tags),
  94. github_stars=project.github_stars,
  95. github_forks=project.github_forks,
  96. github_last_update=project.github_last_update,
  97. github_last_commit=project.github_last_commit,
  98. previous_urls=json.dumps(project.previous_urls),
  99. )
  100. def to_project(self) -> Project:
  101. """Convert ProjectDB back to Project."""
  102. return Project(
  103. name=self.name,
  104. description=self.description,
  105. url=self.url,
  106. category=self.category,
  107. slug=self.slug,
  108. tags=json.loads(self.tags),
  109. github_stars=self.github_stars,
  110. github_forks=self.github_forks,
  111. github_last_update=self.github_last_update,
  112. github_last_commit=self.github_last_commit,
  113. previous_urls=json.loads(self.previous_urls),
  114. )
  115. # Database configuration
  116. DATABASE_PATH = Path("projects.db")
  117. DATABASE_URL = f"sqlite:///{DATABASE_PATH}"
  118. console = Console()
  119. def get_engine():
  120. """Get SQLModel engine."""
  121. return create_engine(DATABASE_URL, echo=False)
  122. def init_db():
  123. """Initialize the database and create tables."""
  124. engine = get_engine()
  125. SQLModel.metadata.create_all(engine)
  126. return engine
  127. def parse_project_line(line: Tag, category: str) -> Project | None:
  128. """Parse a project line from the markdown and return a Project object."""
  129. try:
  130. # Find the project link
  131. link = line.find("a")
  132. if not link:
  133. return None
  134. name = link.text.strip()
  135. url = link.get("href", "").strip()
  136. # Get description (text after the link)
  137. description = line.text.replace(name, "").strip()
  138. description = re.sub(r"^\s*-\s*", "", description) # Remove leading dash
  139. description = re.sub(r"^\s*", "", description) # Remove leading whitespace
  140. if not all([name, url, description]):
  141. return None
  142. return Project(name=name, description=description, url=url, category=category)
  143. except Exception as e:
  144. print(f"[red]Error parsing project line: {e}[/red]")
  145. return None
  146. def read_readme(file_path: Path) -> str:
  147. """Read README content from local file and convert to HTML."""
  148. markdown_content = file_path.read_text()
  149. md = MarkdownIt()
  150. html_content = md.render(markdown_content)
  151. return html_content
  152. def parse_readme(content: str) -> list[Project]:
  153. """Parse README content and extract projects."""
  154. soup = BeautifulSoup(content, "html.parser")
  155. projects = []
  156. current_category = ""
  157. for element in soup.find_all(["h2", "h3", "li"]):
  158. if element.name in ["h2", "h3"]:
  159. current_category = element.text.strip()
  160. elif element.name == "li" and current_category:
  161. if current_category == "Contents":
  162. continue
  163. project = parse_project_line(element, current_category)
  164. if project:
  165. projects.append(project)
  166. return projects
  167. def merge_project_data(existing: dict[str, Any], new: dict[str, Any]) -> dict[str, Any]:
  168. """
  169. Merge existing project data with new data, preserving existing values
  170. while updating with new information where appropriate.
  171. """
  172. # Start with the existing data
  173. merged = existing.copy()
  174. # Always update core fields from the README
  175. core_fields = {"name", "url", "category"}
  176. for field in core_fields:
  177. if field in new:
  178. # If URL is changing, store the old URL in previous_urls
  179. if field == "url" and new["url"] != existing.get("url"):
  180. previous_urls = merged.get("previous_urls", [])
  181. old_url = existing.get("url")
  182. if old_url and old_url not in previous_urls:
  183. previous_urls.append(old_url)
  184. merged["previous_urls"] = previous_urls
  185. merged[field] = new[field]
  186. # Smart merge for description - update only if meaningfully different
  187. if "description" in new and new["description"] != existing.get("description", ""):
  188. merged["description"] = new["description"]
  189. # Update GitHub metrics if they exist in new data
  190. github_fields = {"github_stars", "github_forks", "github_last_update", "github_last_commit"}
  191. for field in github_fields:
  192. if field in new and new[field] is not None:
  193. merged[field] = new[field]
  194. return merged
  195. def save_project(project: Project, output_dir: Path):
  196. """Save project as a markdown file with frontmatter, preserving and merging existing content."""
  197. output_file = output_dir / f"{project.slug}.md"
  198. project_data = project.model_dump(exclude_none=True)
  199. if output_file.exists():
  200. try:
  201. # Load existing file
  202. existing_post = frontmatter.load(output_file)
  203. existing_data = dict(existing_post.metadata)
  204. # Merge data, favoring preservation of existing content
  205. merged_data = merge_project_data(existing_data, project_data)
  206. # Create new post with merged data but keep existing content
  207. post = frontmatter.Post(existing_post.content, **merged_data)
  208. except Exception as e:
  209. print(
  210. f"[yellow]Warning: Could not load existing file {output_file}, creating new: {e}[/yellow]"
  211. )
  212. post = frontmatter.Post(project.description, **project_data)
  213. else:
  214. # Create new file
  215. post = frontmatter.Post(project.description, **project_data)
  216. output_file.write_text(frontmatter.dumps(post))
  217. def extract_github_info(url: str) -> dict[str, str] | None:
  218. """Extract owner and repo from a GitHub URL."""
  219. parsed = urlparse(url)
  220. if parsed.netloc != "github.com":
  221. return None
  222. parts = parsed.path.strip("/").split("/")
  223. if len(parts) >= 2:
  224. return {"owner": parts[0], "repo": parts[1]}
  225. return None
  226. def get_github_metrics(
  227. owner: str, repo: str, client: httpx.Client
  228. ) -> tuple[dict, str | None]:
  229. """
  230. Fetch GitHub metrics for a repository.
  231. Returns a tuple of (metrics_dict, new_url) where new_url is set if the repo has moved.
  232. """
  233. headers = {}
  234. if github_token := os.environ.get("GITHUB_TOKEN"):
  235. headers["Authorization"] = f"token {github_token}"
  236. api_url = f"https://api.github.com/repos/{owner}/{repo}"
  237. try:
  238. response = client.get(
  239. api_url,
  240. headers=headers,
  241. timeout=10.0,
  242. follow_redirects=True, # Enable following redirects
  243. )
  244. # Check if we followed a redirect
  245. new_url = None
  246. if len(response.history) > 0:
  247. for r in response.history:
  248. if r.status_code == 301:
  249. # Get the new location from the API response
  250. data = response.json()
  251. new_url = data.get("html_url")
  252. if new_url:
  253. print(
  254. f"[yellow]Repository moved: {owner}/{repo} -> {new_url}[/yellow]"
  255. )
  256. break
  257. response.raise_for_status()
  258. data = response.json()
  259. metrics = {
  260. "github_stars": data["stargazers_count"],
  261. "github_forks": data["forks_count"],
  262. "github_last_update": data["updated_at"],
  263. }
  264. # Fetch last commit date
  265. commits_url = f"https://api.github.com/repos/{owner}/{repo}/commits"
  266. try:
  267. commits_response = client.get(
  268. commits_url,
  269. headers=headers,
  270. params={"per_page": 1},
  271. timeout=10.0,
  272. follow_redirects=True,
  273. )
  274. commits_response.raise_for_status()
  275. commits_data = commits_response.json()
  276. if commits_data and len(commits_data) > 0:
  277. metrics["github_last_commit"] = commits_data[0]["commit"]["committer"]["date"]
  278. except httpx.HTTPError as e:
  279. print(f"[yellow]Warning: Could not fetch commits for {owner}/{repo}: {str(e)}[/yellow]")
  280. return metrics, new_url
  281. except httpx.HTTPError as e:
  282. print(f"[red]Error fetching GitHub metrics for {owner}/{repo}: {str(e)}[/red]")
  283. return {}, None
  284. def load_project(file_path: Path) -> Project | None:
  285. """Load a project from a markdown file."""
  286. try:
  287. post = frontmatter.load(file_path)
  288. return Project(**post.metadata)
  289. except Exception as e:
  290. print(f"[red]Error loading project from {file_path}: {str(e)}[/red]")
  291. return None
  292. @app.command()
  293. def parse(readme_path: Path = Path("README.md"), output_dir: str = "_projects"):
  294. """
  295. Parse local Awesome Django README and create individual project files with frontmatter.
  296. Preserves existing file content and metadata while updating with new information from README.
  297. """
  298. if not readme_path.exists():
  299. print(f"[red]Error: README file not found at {readme_path}[/red]")
  300. raise typer.Exit(1)
  301. print(f"[bold blue]Reading README from {readme_path}...[/bold blue]")
  302. # Create output directory
  303. output_path = Path(output_dir)
  304. output_path.mkdir(exist_ok=True)
  305. # Read and parse README
  306. content = read_readme(readme_path)
  307. projects = parse_readme(content)
  308. print(f"[green]Found {len(projects)} projects[/green]")
  309. # Save individual project files
  310. for project in projects:
  311. save_project(project, output_path)
  312. print(f"[green]Updated {project.name} in {project.slug}.md[/green]")
  313. @app.command()
  314. def update_metrics(projects_dir: Path = Path("_projects"), batch_size: int = 50):
  315. """
  316. Update GitHub metrics (stars, forks, last update) for all projects.
  317. """
  318. if not projects_dir.exists():
  319. print(f"[red]Error: Projects directory not found at {projects_dir}[/red]")
  320. raise typer.Exit(1)
  321. print(
  322. f"[bold blue]Updating GitHub metrics for projects in {projects_dir}...[/bold blue]"
  323. )
  324. # Load all projects
  325. project_files = list(projects_dir.glob("*.md"))
  326. projects = []
  327. for file in project_files:
  328. if project := load_project(file):
  329. projects.append((file, project))
  330. print(f"[green]Found {len(projects)} projects to update[/green]")
  331. # Update metrics in batches to avoid rate limiting
  332. with httpx.Client() as client:
  333. for i in track(
  334. range(0, len(projects), batch_size), description="Updating projects"
  335. ):
  336. batch = projects[i : i + batch_size]
  337. for file_path, project in batch:
  338. if github_info := extract_github_info(project.url):
  339. metrics, new_url = get_github_metrics(
  340. github_info["owner"], github_info["repo"], client
  341. )
  342. if metrics:
  343. # Update project with new metrics
  344. for key, value in metrics.items():
  345. setattr(project, key, value)
  346. # Update URL if repository has moved
  347. if new_url and new_url != project.url:
  348. # Store the old URL in previous_urls
  349. if not hasattr(project, "previous_urls"):
  350. project.previous_urls = []
  351. project.previous_urls.append(project.url)
  352. # Update to new URL
  353. project.url = new_url
  354. print(
  355. f"[yellow]Updated URL for {project.name}: {project.url}[/yellow]"
  356. )
  357. save_project(project, projects_dir)
  358. print(f"[green]Updated metrics for {project.name}[/green]")
  359. print("[bold blue]Finished updating GitHub metrics![/bold blue]")
  360. @app.command()
  361. def sync_db(projects_dir: Path = Path("_projects")):
  362. """
  363. Sync projects from markdown files to SQLite database.
  364. """
  365. if not projects_dir.exists():
  366. print(f"[red]Error: Projects directory not found at {projects_dir}[/red]")
  367. raise typer.Exit(1)
  368. print(f"[bold blue]Syncing projects to {DATABASE_PATH}...[/bold blue]")
  369. engine = init_db()
  370. # Load all projects from markdown files
  371. project_files = list(projects_dir.glob("*.md"))
  372. projects_loaded = 0
  373. with Session(engine) as session:
  374. # Clear existing data
  375. session.exec(select(ProjectDB)).all()
  376. for existing in session.exec(select(ProjectDB)).all():
  377. session.delete(existing)
  378. session.commit()
  379. # Load new data
  380. for file in track(project_files, description="Loading projects"):
  381. if project := load_project(file):
  382. db_project = ProjectDB.from_project(project)
  383. session.add(db_project)
  384. projects_loaded += 1
  385. session.commit()
  386. print(f"[green]Synced {projects_loaded} projects to {DATABASE_PATH}[/green]")
  387. @app.command()
  388. def query(
  389. category: Optional[str] = typer.Option(None, "--category", "-c", help="Filter by category"),
  390. min_stars: Optional[int] = typer.Option(None, "--min-stars", "-s", help="Minimum GitHub stars"),
  391. limit: int = typer.Option(20, "--limit", "-l", help="Maximum results to show"),
  392. sort_by: str = typer.Option("stars", "--sort", help="Sort by: stars, name, commits"),
  393. ):
  394. """
  395. Query projects from the database with filters.
  396. """
  397. if not DATABASE_PATH.exists():
  398. print("[red]Database not found. Run 'sync-db' first.[/red]")
  399. raise typer.Exit(1)
  400. engine = get_engine()
  401. with Session(engine) as session:
  402. statement = select(ProjectDB)
  403. if category:
  404. statement = statement.where(ProjectDB.category == category)
  405. if min_stars:
  406. statement = statement.where(ProjectDB.github_stars >= min_stars)
  407. # Sorting
  408. if sort_by == "stars":
  409. statement = statement.order_by(ProjectDB.github_stars.desc())
  410. elif sort_by == "name":
  411. statement = statement.order_by(ProjectDB.name)
  412. elif sort_by == "commits":
  413. statement = statement.order_by(ProjectDB.github_last_commit.desc())
  414. statement = statement.limit(limit)
  415. results = session.exec(statement).all()
  416. if not results:
  417. print("[yellow]No projects found matching criteria.[/yellow]")
  418. return
  419. table = Table(title=f"Projects ({len(results)} results)")
  420. table.add_column("Name", style="cyan", no_wrap=True)
  421. table.add_column("Category", style="green")
  422. table.add_column("Stars", justify="right", style="yellow")
  423. table.add_column("Last Commit", style="magenta")
  424. for p in results:
  425. stars = str(p.github_stars) if p.github_stars else "-"
  426. last_commit = p.github_last_commit[:10] if p.github_last_commit else "-"
  427. table.add_row(p.name, p.category, stars, last_commit)
  428. console.print(table)
  429. @app.command()
  430. def top(
  431. limit: int = typer.Option(20, "--limit", "-l", help="Number of projects to show"),
  432. category: Optional[str] = typer.Option(None, "--category", "-c", help="Filter by category"),
  433. ):
  434. """
  435. Show top projects by GitHub stars.
  436. """
  437. if not DATABASE_PATH.exists():
  438. print("[red]Database not found. Run 'sync-db' first.[/red]")
  439. raise typer.Exit(1)
  440. engine = get_engine()
  441. with Session(engine) as session:
  442. statement = select(ProjectDB).where(ProjectDB.github_stars.isnot(None))
  443. if category:
  444. statement = statement.where(ProjectDB.category == category)
  445. statement = statement.order_by(ProjectDB.github_stars.desc()).limit(limit)
  446. results = session.exec(statement).all()
  447. table = Table(title=f"Top {len(results)} Projects by Stars")
  448. table.add_column("#", justify="right", style="dim")
  449. table.add_column("Name", style="cyan", no_wrap=True)
  450. table.add_column("Category", style="green")
  451. table.add_column("Stars", justify="right", style="yellow")
  452. table.add_column("Forks", justify="right", style="blue")
  453. table.add_column("URL", style="dim")
  454. for i, p in enumerate(results, 1):
  455. table.add_row(
  456. str(i),
  457. p.name,
  458. p.category,
  459. f"{p.github_stars:,}",
  460. str(p.github_forks or "-"),
  461. p.url[:50] + "..." if len(p.url) > 50 else p.url,
  462. )
  463. console.print(table)
  464. @app.command()
  465. def categories():
  466. """
  467. List all categories with project counts.
  468. """
  469. if not DATABASE_PATH.exists():
  470. print("[red]Database not found. Run 'sync-db' first.[/red]")
  471. raise typer.Exit(1)
  472. engine = get_engine()
  473. with Session(engine) as session:
  474. results = session.exec(select(ProjectDB)).all()
  475. # Count by category
  476. category_counts: dict[str, int] = {}
  477. category_stars: dict[str, int] = {}
  478. for p in results:
  479. category_counts[p.category] = category_counts.get(p.category, 0) + 1
  480. category_stars[p.category] = category_stars.get(p.category, 0) + (p.github_stars or 0)
  481. # Sort by count
  482. sorted_categories = sorted(category_counts.items(), key=lambda x: x[1], reverse=True)
  483. table = Table(title="Categories")
  484. table.add_column("Category", style="cyan")
  485. table.add_column("Projects", justify="right", style="green")
  486. table.add_column("Total Stars", justify="right", style="yellow")
  487. for cat, count in sorted_categories:
  488. table.add_row(cat, str(count), f"{category_stars[cat]:,}")
  489. console.print(table)
  490. print(f"\n[bold]Total: {len(sorted_categories)} categories, {len(results)} projects[/bold]")
  491. @app.command()
  492. def search(
  493. query: str = typer.Argument(..., help="Search term"),
  494. limit: int = typer.Option(20, "--limit", "-l", help="Maximum results"),
  495. ):
  496. """
  497. Search projects by name or description.
  498. """
  499. if not DATABASE_PATH.exists():
  500. print("[red]Database not found. Run 'sync-db' first.[/red]")
  501. raise typer.Exit(1)
  502. engine = get_engine()
  503. query_lower = query.lower()
  504. with Session(engine) as session:
  505. results = session.exec(select(ProjectDB)).all()
  506. # Filter by search term
  507. matches = [
  508. p for p in results
  509. if query_lower in p.name.lower() or query_lower in p.description.lower()
  510. ]
  511. # Sort by stars
  512. matches.sort(key=lambda x: x.github_stars or 0, reverse=True)
  513. matches = matches[:limit]
  514. if not matches:
  515. print(f"[yellow]No projects found matching '{query}'[/yellow]")
  516. return
  517. table = Table(title=f"Search results for '{query}' ({len(matches)} matches)")
  518. table.add_column("Name", style="cyan", no_wrap=True)
  519. table.add_column("Category", style="green")
  520. table.add_column("Stars", justify="right", style="yellow")
  521. table.add_column("Description", style="dim", max_width=50)
  522. for p in matches:
  523. stars = str(p.github_stars) if p.github_stars else "-"
  524. desc = p.description[:50] + "..." if len(p.description) > 50 else p.description
  525. table.add_row(p.name, p.category, stars, desc)
  526. console.print(table)
  527. @app.command()
  528. def stale(
  529. days: int = typer.Option(365, "--days", "-d", help="Days since last commit to consider stale"),
  530. limit: int = typer.Option(30, "--limit", "-l", help="Maximum results"),
  531. ):
  532. """
  533. Find stale/unmaintained projects (no commits in X days).
  534. """
  535. if not DATABASE_PATH.exists():
  536. print("[red]Database not found. Run 'sync-db' first.[/red]")
  537. raise typer.Exit(1)
  538. engine = get_engine()
  539. cutoff = datetime.now().replace(tzinfo=None)
  540. with Session(engine) as session:
  541. results = session.exec(
  542. select(ProjectDB).where(ProjectDB.github_last_commit.isnot(None))
  543. ).all()
  544. # Filter stale projects
  545. stale_projects = []
  546. for p in results:
  547. try:
  548. last_commit = datetime.fromisoformat(p.github_last_commit.replace("Z", "+00:00"))
  549. last_commit = last_commit.replace(tzinfo=None)
  550. days_since = (cutoff - last_commit).days
  551. if days_since >= days:
  552. stale_projects.append((p, days_since))
  553. except (ValueError, AttributeError):
  554. continue
  555. # Sort by oldest first
  556. stale_projects.sort(key=lambda x: x[1], reverse=True)
  557. stale_projects = stale_projects[:limit]
  558. if not stale_projects:
  559. print(f"[green]No stale projects found (>{days} days without commits)[/green]")
  560. return
  561. table = Table(title=f"Stale Projects (no commits in {days}+ days)")
  562. table.add_column("Name", style="cyan", no_wrap=True)
  563. table.add_column("Category", style="green")
  564. table.add_column("Stars", justify="right", style="yellow")
  565. table.add_column("Last Commit", style="red")
  566. table.add_column("Days Ago", justify="right", style="red")
  567. for p, days_ago in stale_projects:
  568. stars = str(p.github_stars) if p.github_stars else "-"
  569. last_commit = p.github_last_commit[:10] if p.github_last_commit else "-"
  570. table.add_row(p.name, p.category, stars, last_commit, str(days_ago))
  571. console.print(table)
  572. print(f"\n[bold red]Found {len(stale_projects)} stale projects[/bold red]")
  573. @app.command()
  574. def stats():
  575. """
  576. Show database statistics.
  577. """
  578. if not DATABASE_PATH.exists():
  579. print("[red]Database not found. Run 'sync-db' first.[/red]")
  580. raise typer.Exit(1)
  581. engine = get_engine()
  582. with Session(engine) as session:
  583. all_projects = session.exec(select(ProjectDB)).all()
  584. github_projects = [p for p in all_projects if p.github_stars is not None]
  585. total_stars = sum(p.github_stars or 0 for p in all_projects)
  586. categories = set(p.category for p in all_projects)
  587. print("\n[bold blue]Database Statistics[/bold blue]")
  588. print(f" Total projects: [green]{len(all_projects)}[/green]")
  589. print(f" GitHub projects: [green]{len(github_projects)}[/green]")
  590. print(f" Categories: [green]{len(categories)}[/green]")
  591. print(f" Total stars: [yellow]{total_stars:,}[/yellow]")
  592. if github_projects:
  593. avg_stars = total_stars / len(github_projects)
  594. max_stars_project = max(github_projects, key=lambda x: x.github_stars or 0)
  595. print(f" Average stars: [yellow]{avg_stars:.0f}[/yellow]")
  596. print(f" Most starred: [cyan]{max_stars_project.name}[/cyan] ({max_stars_project.github_stars:,} stars)")
  597. if __name__ == "__main__":
  598. app()