Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| import requests | |
| import json | |
| from datetime import datetime, timezone | |
| # Fetch data from the API | |
| API_URL = "https://huggingface.co/api/daily_papers" | |
| # Global variables for pagination | |
| current_page = 1 | |
| papers_per_page = 10 | |
| def fetch_papers(page=1): | |
| all_papers = [] | |
| while True: | |
| response = requests.get(f"{API_URL}?page={page}") | |
| if response.status_code == 200: | |
| data = response.json() | |
| if not data: | |
| break | |
| all_papers.extend(data) | |
| page += 1 | |
| else: | |
| print(f"Failed to fetch data: {response.status_code}") | |
| break | |
| return all_papers | |
| papers = fetch_papers() | |
| total_pages = (len(papers) + papers_per_page - 1) // papers_per_page | |
| print("API Response structure:", json.dumps(papers[0] if papers else {}, indent=2)) | |
| def format_paper(paper): | |
| title = paper.get('title', 'No title') | |
| url = f"https://huggingface.co/papers/{paper['paper'].get('id', '')}" | |
| authors = ', '.join([author.get('name', '') for author in paper['paper'].get('authors', [])]) | |
| upvotes = paper.get('paper', {}).get('upvotes', 0) | |
| comments = paper.get('numComments', 0) | |
| published_time = datetime.fromisoformat(paper.get('publishedAt', datetime.now(timezone.utc).isoformat()).replace('Z', '+00:00')) | |
| time_ago = (datetime.now(timezone.utc) - published_time).days | |
| return f"""<div style='border-bottom: 1px solid #eee; padding: 10px 0;'> | |
| <a href='{url}' target='_blank' style='color: #000; text-decoration: none; font-weight: bold;'>{title}</a> | |
| <div style='font-size: 0.8em; color: #666; margin-top: 5px;'> | |
| {upvotes} upvotes | by {authors} | {time_ago} days ago | {comments} comments | |
| </div> | |
| </div>""" | |
| def sort_papers_by_upvotes(papers): | |
| return sorted(papers, key=lambda x: x.get('paper', {}).get('upvotes', 0), reverse=True) | |
| def render_papers(): | |
| start_index = (current_page - 1) * papers_per_page | |
| end_index = start_index + papers_per_page | |
| sorted_papers = sort_papers_by_upvotes(papers) | |
| current_papers = sorted_papers[start_index:end_index] | |
| if not current_papers: | |
| return "<div>No more papers available.</div>" | |
| return "".join([format_paper(paper) for paper in current_papers]) | |
| def search_papers(query): | |
| global papers | |
| if not query: | |
| papers = fetch_papers() | |
| return render_papers() | |
| filtered_papers = [paper for paper in papers if query.lower() in paper.get('title', '').lower()] | |
| sorted_filtered_papers = sort_papers_by_upvotes(filtered_papers) | |
| return "".join([format_paper(paper) for paper in sorted_filtered_papers[:papers_per_page]]) | |
| def refresh_papers(): | |
| global papers, total_pages | |
| papers = fetch_papers() | |
| total_pages = (len(papers) + papers_per_page - 1) // papers_per_page | |
| return render_papers() | |
| def next_page(): | |
| global current_page | |
| if current_page < total_pages: | |
| current_page += 1 | |
| return render_papers(), f"Page {current_page} of {total_pages}" | |
| def prev_page(): | |
| global current_page | |
| if current_page > 1: | |
| current_page -= 1 | |
| return render_papers(), f"Page {current_page} of {total_pages}" | |
| css = """ | |
| body { | |
| font-family: Arial, sans-serif; | |
| max-width: 800px; | |
| margin: 0 auto; | |
| padding: 20px; | |
| } | |
| .paper-list { | |
| max-height: 600px; | |
| overflow-y: auto; | |
| border: 1px solid #eee; | |
| border-radius: 5px; | |
| padding: 10px; | |
| } | |
| .search-row { | |
| display: flex; | |
| gap: 10px; | |
| margin-bottom: 20px; | |
| } | |
| """ | |
| demo = gr.Blocks(css=css) | |
| with demo: | |
| gr.Markdown("# Daily Papers - HackerNews Style") | |
| with gr.Row(elem_classes=["search-row"]): | |
| search_input = gr.Textbox(label="Search papers", placeholder="Enter search term...") | |
| refresh_button = gr.Button("Refresh") | |
| paper_list = gr.HTML(render_papers(), elem_classes=["paper-list"]) | |
| with gr.Row(): | |
| prev_button = gr.Button("Previous Page") | |
| next_button = gr.Button("Next Page") | |
| page_info = gr.Markdown(f"Page {current_page} of {total_pages}") | |
| search_input.change(search_papers, inputs=[search_input], outputs=[paper_list]) | |
| refresh_button.click(refresh_papers, outputs=[paper_list]) | |
| prev_button.click(prev_page, outputs=[paper_list, page_info]) | |
| next_button.click(next_page, outputs=[paper_list, page_info]) | |
| if __name__ == "__main__": | |
| demo.launch(server_name="0.0.0.0", server_port=5000) | |