File size: 7,575 Bytes
7258883
1c09022
6234f75
0eb933f
e139f4c
 
0eb933f
e139f4c
72d2b05
5396a98
e139f4c
76edd3a
e139f4c
9c4d5bc
eaa80ff
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e139f4c
9c4d5bc
e139f4c
 
0cd6b27
72d2b05
 
e139f4c
0cd6b27
 
e139f4c
 
 
 
63bb324
9c4d5bc
e139f4c
 
 
 
 
 
 
 
353fcd6
f04df6f
e139f4c
 
f04df6f
e139f4c
 
 
 
 
 
f04df6f
353fcd6
f04df6f
e139f4c
353fcd6
e139f4c
 
f04df6f
e139f4c
f04df6f
 
353fcd6
f04df6f
e139f4c
f0f635f
e139f4c
9c4d5bc
e139f4c
9c4d5bc
e139f4c
 
 
 
 
 
 
 
 
 
 
 
9c4d5bc
e139f4c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9c4d5bc
824d4bb
e139f4c
 
 
 
 
eaa80ff
 
 
 
 
 
 
 
 
 
e139f4c
eaa80ff
 
 
 
e139f4c
eaa80ff
 
e139f4c
 
 
 
 
 
 
63bb324
e139f4c
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
import os, glob
from datetime import datetime, timezone
import pandas as pd
import gradio as gr
from datasets import load_dataset, Dataset
from huggingface_hub import HfApi, ModelInfo

# ---------- Config ----------
OWNER = "AIEnergyScore"
COMPUTE_SPACE = f"{OWNER}/launch-computation-example"
TOKEN = os.environ.get("DEBUG")  # keep your existing env var

API = HfApi(token=TOKEN)

def preflight_status():
    # 1) Check token presence
    if not TOKEN:
        return ("❌ No HF token found in env var 'DEBUG'. "
                "Add a secret named DEBUG in the Space settings (a token with 'write' scope).")

    # 2) Check identity
    try:
        me = API.whoami(token=TOKEN)
        user_str = me.get("name") or me.get("username") or "unknown-user"
    except Exception as e:
        return f"❌ Token error: cannot authenticate ({e})."

    # 3) Check dataset access
    repo_id = "AIEnergyScore/tested_proprietary_models"
    try:
        info = API.repo_info(repo_id=repo_id, repo_type="dataset", token=TOKEN)
        # If this succeeds, you at least have read access; write failure will surface during upload.
        return f"βœ… Connected as **{user_str}**. Dataset **{repo_id}** reachable."
    except Exception as e:
        return (f"⚠️ Auth OK as **{user_str}**, but cannot access dataset "
                f"**{repo_id}** ({e}). Make sure the token user has write access.")

# ---------- Upload to HF dataset (kept from your original) ----------
def add_docker_eval(zip_file):
    new_fid = os.path.basename(zip_file)
    if new_fid.endswith(".zip"):
        API.upload_file(
            path_or_fileobj=zip_file,
            repo_id="AIEnergyScore/tested_proprietary_models",
            path_in_repo="submitted_models/" + new_fid,
            repo_type="dataset",
            commit_message="Adding logs via submission Space.",
            token=TOKEN,
        )
        gr.Info(
            "Uploaded logs to dataset! We will validate their validity and add them to the next version of the leaderboard."
        )
    else:
        gr.Info("You can only upload .zip files here!")

# ---------- Minimal UI ----------
GITHUB_DOCKER_URL = "https://github.com/huggingface/AIEnergyScore"
METHODOLOGY_URL = "https://huggingface.co/spaces/AIEnergyScore/README"

with gr.Blocks(title="AI Energy Score") as demo:
    # Header links (kept)
    gr.HTML("""
        <style>
          .header-link { color: black !important; }
          @media (prefers-color-scheme: dark) { .header-link { color: white !important; } }
        </style>
        <div style="display:flex;justify-content:space-evenly;align-items:center;margin-bottom:20px;">
            <a class="header-link" href="https://huggingface.co/spaces/AIEnergyScore/leaderboard" style="text-decoration:none;font-weight:bold;font-size:1.1em;font-family:'Inter',sans-serif;">Leaderboard</a>
            <a class="header-link" href="https://huggingface.co/spaces/AIEnergyScore/Label" style="text-decoration:none;font-weight:bold;font-size:1.1em;font-family:'Inter',sans-serif;">Label Generator</a>
            <a class="header-link" href="https://huggingface.github.io/AIEnergyScore/#faq" style="text-decoration:none;font-weight:bold;font-size:1.1em;font-family:'Inter',sans-serif;">FAQ</a>
            <a class="header-link" href="https://huggingface.github.io/AIEnergyScore/#documentation" style="text-decoration:none;font-weight:bold;font-size:1.1em;font-family:'Inter',sans-serif;">Documentation</a>
            <a class="header-link" href="https://huggingface.co/spaces/AIEnergyScore/README/discussions" style="text-decoration:none;font-weight:bold;font-size:1.1em;font-family:'Inter',sans-serif;">Community</a>
        </div>
    """)

    # Logo (kept)
    gr.HTML("""
        <div style="margin-top:0px;">
            <picture style="display:block;margin:0 auto;max-width:300px;">
                <source media="(prefers-color-scheme: dark)" srcset="https://huggingface.co/spaces/AIEnergyScore/Leaderboard/resolve/main/logodark.png">
                <img src="https://huggingface.co/spaces/AIEnergyScore/Leaderboard/resolve/main/logo.png" alt="Logo" style="display:block;margin:0 auto;max-width:300px;height:auto;">
            </picture>
        </div>
    """)

    gr.Markdown("<div style='text-align:center;'><h2>Submission Portal</h2></div>")
    ##preflight_box = gr.Markdown(preflight_status())

    with gr.Row():
        # -------- Open Models ----------
        with gr.Column():
            gr.Markdown("""
### 🌿 Open Models
If your model is hosted on the πŸ€— Hub, please **start a new Discussion** and include:
- The **Hugging Face model link** (e.g., `org/model-name`)
- The **requested task type** (e.g., Text Generation)

> Requires a Hugging Face account.

➑️ **[Start a New Discussion](https://huggingface.co/spaces/AIEnergyScore/README/discussions)**
""")

        # -------- Closed Models ----------
        with gr.Column():
            gr.Markdown(f"""
### πŸ”’ Closed Models
Run the benchmark **in your own environment** and upload the logs here.

1. Use our Docker setup  
   β€’ **[Docker & configs]({GITHUB_DOCKER_URL})**  
   β€’ **[Methodology / Docs]({METHODOLOGY_URL})**  
2. When finished, upload the **ZIP file of logs** below.

**⚠️ By uploading the zip file, you agree to:**
- **Public Data Sharing:** We may publicly share the energy performance metrics derived from your submission (no proprietary configs disclosed).
- **Data Integrity:** Logs are accurate, unaltered, and produced per the specified procedures.
- **Model Representation:** The submitted run reflects your production-level model (quantization, etc.).
""")

            # Visible status box for user feedback
            status_box = gr.Markdown("")

            # Hidden file sink (kept pattern from your previous code)
            file_sink = gr.File(visible=False)

            upload_button = gr.UploadButton(
                "πŸ“ Upload a ZIP file with logs", file_count="single", file_types=[".zip"], interactive=True
            )

            # Wrapper: call your uploader and also write user-visible status
            def handle_zip_and_upload(temp_path):
                if not temp_path:
                    gr.Warning("No file selected.")
                    return "❌ No file uploaded."
            
                if not TOKEN:
                    gr.Warning("Missing HF token in env var 'DEBUG'.")
                    return "❌ Upload blocked: missing token (DEBUG)."
            
                # Enforce .zip
                if not str(temp_path).lower().endswith(".zip"):
                    gr.Warning("Only .zip files are accepted.")
                    return "❌ Please upload a .zip file."
            
                try:
                    # Your existing uploader: pushes to AIEnergyScore/tested_proprietary_models/submitted_models/
                    add_docker_eval(temp_path)  # shows a toast on success/failure internally
                    basename = os.path.basename(temp_path)
                    return f"βœ… Received and submitted: **{basename}**"
                except Exception as e:
                    gr.Warning(f"Upload error: {e}")
                    return f"❌ Upload failed β€” {e}"

            # IMPORTANT: bind inside Blocks context
            upload_button.upload(
                fn=handle_zip_and_upload,
                inputs=upload_button,     # UploadButton passes the temp file path
                outputs=status_box,       # show result here
            )

# Launch
if __name__ == "__main__":
    demo.launch()