openfree commited on
Commit
088a9e9
ยท
verified ยท
1 Parent(s): 720c9f9

Update app-backup.py

Browse files
Files changed (1) hide show
  1. app-backup.py +1468 -697
app-backup.py CHANGED
@@ -1,16 +1,8 @@
1
- #!/usr/bin/env python3
2
- """
3
- HF Space Auto-Deployer - README ์˜ค๋ฅ˜ ์ˆ˜์ • ๋ฒ„์ „
4
- ----------------------------------------
5
- ๊ณต๊ฐœ Git ๋ ˆํฌ์ง€ํ† ๋ฆฌ๋ฅผ Hugging Face Gradio Space๋กœ ์ž๋™ ๋ณ€ํ™˜ ๋ฐ ๋ฐฐํฌํ•ฉ๋‹ˆ๋‹ค.
6
- Git ๋ช…๋ น์–ด๋ฅผ ์‚ฌ์šฉํ•˜์ง€ ์•Š๊ณ  HuggingFace API๋งŒ ์‚ฌ์šฉํ•˜๋Š” ์•ˆ์ •์ ์ธ ๋ฒ„์ „์ž…๋‹ˆ๋‹ค.
7
-
8
- ํ•„์š” ํ™˜๊ฒฝ๋ณ€์ˆ˜:
9
- - HF_TOKEN: HuggingFace Write Token (์„ ํƒ, UI์—์„œ ์ž…๋ ฅ ๊ฐ€๋Šฅ)
10
- - BAPI_TOKEN: Brave Search API Key (์„ ํƒ)
11
- - OPENAI_API_KEY ๋˜๋Š” FRIENDLI_TOKEN: AI ์ƒ์„ฑ์šฉ (์„ ํƒ)
12
- """
13
-
14
  import os
15
  import sys
16
  import json
@@ -20,42 +12,287 @@ import tempfile
20
  import textwrap
21
  import requests
22
  import shutil
 
23
  from pathlib import Path
24
- from typing import Optional, Dict, List
25
 
26
- import gradio as gr
27
- from huggingface_hub import HfApi, create_repo
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
 
29
- # ========== Brave Search ํ—ฌํผ (์„ ํƒ์ ) ========== #
30
- def brave_search_repo(repo_url: str, count: int = 5) -> List[Dict]:
31
- """Brave Search API๋กœ ๋ ˆํฌ์ง€ํ† ๋ฆฌ ๋ฉ”ํƒ€๋ฐ์ดํ„ฐ ์ˆ˜์ง‘"""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  api_key = os.getenv("BAPI_TOKEN")
33
  if not api_key:
34
- print("โš ๏ธ BAPI_TOKEN์ด ์„ค์ •๋˜์ง€ ์•Š์•„ ๊ฒ€์ƒ‰์„ ๊ฑด๋„ˆ๋œ๋‹ˆ๋‹ค.")
35
- return []
36
 
37
- # API ํ‚ค ์ •๋ฆฌ (์ค„๋ฐ”๊ฟˆ, ๊ณต๋ฐฑ ์ œ๊ฑฐ)
38
  api_key = api_key.strip()
39
-
40
  headers = {"X-Subscription-Token": api_key, "Accept": "application/json"}
41
- params = {"q": f'site:github.com "{repo_url}"', "count": count, "search_lang": "en"}
42
 
43
- try:
44
- resp = requests.get(
45
- "https://api.search.brave.com/res/v1/web/search",
46
- headers=headers,
47
- params=params,
48
- timeout=10
49
- )
50
- resp.raise_for_status()
51
- return resp.json().get("web", {}).get("results", [])
52
- except Exception as e:
53
- print(f"โš ๏ธ Brave Search ๊ฒฝ๊ณ : {e}")
54
- return []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
 
56
- # ========== AI ์ƒ์„ฑ ํ—ฌํผ (์„ ํƒ์ ) ========== #
57
- def generate_gradio_app(context: str) -> Dict:
58
- """AI๋กœ Gradio ์•ฑ ์ƒ์„ฑ ๋˜๋Š” ๊ธฐ๋ณธ ํ…œํ”Œ๋ฆฟ ๋ฐ˜ํ™˜"""
 
59
 
60
  # OpenAI ์‹œ๋„
61
  openai_key = os.getenv("OPENAI_API_KEY")
@@ -69,13 +306,10 @@ def generate_gradio_app(context: str) -> Dict:
69
  payload = {
70
  "model": "gpt-4o-mini",
71
  "messages": [
72
- {
73
- "role": "system",
74
- "content": "You are an expert at creating Gradio apps. Generate a complete, working Gradio app based on the repository context. Return only valid JSON with keys: app_py, requirements_txt, summary. IMPORTANT: Always use gradio>=5.35.0 in requirements_txt."
75
- },
76
- {"role": "user", "content": f"Create a Gradio app for:\n{context[:4000]}"}
77
  ],
78
- "temperature": 0.7,
79
  "max_tokens": 4000
80
  }
81
 
@@ -88,15 +322,32 @@ def generate_gradio_app(context: str) -> Dict:
88
 
89
  if r.status_code == 200:
90
  response_text = r.json()["choices"][0]["message"]["content"]
91
- print("โœ… OpenAI API๋กœ ์•ฑ ์ƒ์„ฑ ์„ฑ๊ณต")
92
 
93
  # JSON ํŒŒ์‹ฑ
94
- if "```json" in response_text:
95
- start = response_text.find("```json") + 7
96
- end = response_text.find("```", start)
97
- response_text = response_text[start:end].strip()
98
-
99
- return json.loads(response_text)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
100
  except Exception as e:
101
  print(f"โš ๏ธ OpenAI API ์˜ค๋ฅ˜: {e}")
102
 
@@ -104,751 +355,1271 @@ def generate_gradio_app(context: str) -> Dict:
104
  friendli_token = os.getenv("FRIENDLI_TOKEN")
105
  if friendli_token:
106
  try:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
107
  for endpoint in [
108
  "https://api.friendli.ai/v1/chat/completions",
109
  "https://api.friendli.ai/dedicated/v1/chat/completions"
110
  ]:
111
- headers = {
112
- "Authorization": f"Bearer {friendli_token.strip()}",
113
- "Content-Type": "application/json"
114
- }
115
-
116
- payload = {
117
- "model": "meta-llama-3.1-70b-instruct",
118
- "messages": [
119
- {
120
- "role": "system",
121
- "content": "Generate a Gradio app. Return JSON with keys: app_py, requirements_txt, summary. IMPORTANT: Always use gradio>=5.35.0 in requirements_txt."
122
- },
123
- {"role": "user", "content": f"Create Gradio app for:\n{context[:4000]}"}
124
- ],
125
- "max_tokens": 4000,
126
- "temperature": 0.7
127
- }
128
-
129
  r = requests.post(endpoint, json=payload, headers=headers, timeout=30)
130
  if r.status_code == 200:
131
  response_text = r.json()["choices"][0]["message"]["content"]
132
- print(f"โœ… Friendli API๋กœ ์•ฑ ์ƒ์„ฑ ์„ฑ๊ณต")
133
 
134
  if "```json" in response_text:
135
  start = response_text.find("```json") + 7
136
  end = response_text.find("```", start)
137
  response_text = response_text[start:end].strip()
138
 
139
- return json.loads(response_text)
 
 
 
 
 
140
  except Exception as e:
141
  print(f"โš ๏ธ Friendli API ์˜ค๋ฅ˜: {e}")
142
 
143
- # ๊ธฐ๋ณธ ํ…œํ”Œ๋ฆฟ ๋ฐ˜ํ™˜
144
- print("โ„น๏ธ AI API๊ฐ€ ์—†์–ด ๊ธฐ๋ณธ ํ…œํ”Œ๋ฆฟ์„ ์‚ฌ์šฉํ•ฉ๋‹ˆ๋‹ค.")
145
- return None
146
 
147
- # ========== ๋ฉ”์ธ ๋ฐฐํฌ ํ•จ์ˆ˜ ========== #
148
- def deploy(repo_url: str, hf_token: str, private: bool = False) -> str:
149
- """๋ ˆํฌ์ง€ํ† ๋ฆฌ๋ฅผ Gradio Space๋กœ ๋ฐฐํฌ (์ €์žฅ์†Œ ๋‚ด์šฉ ๊ทธ๋Œ€๋กœ ๋ณต์‚ฌ)"""
150
 
151
- # ํ† ํฐ ์ •๋ฆฌ
152
- hf_token = hf_token.strip()
153
- if not hf_token.startswith("hf_"):
154
- raise ValueError("์ž˜๋ชป๋œ ํ† ํฐ ํ˜•์‹์ž…๋‹ˆ๋‹ค. 'hf_'๋กœ ์‹œ์ž‘ํ•ด์•ผ ํ•ฉ๋‹ˆ๋‹ค.")
155
 
156
- # HF API ์ดˆ๊ธฐํ™”
157
- api = HfApi(token=hf_token)
 
 
 
 
 
158
 
159
- # ์‚ฌ์šฉ์ž ์ •๋ณด ๋ฐ Space ์ด๋ฆ„ ์ƒ์„ฑ
160
- try:
161
- user_info = api.whoami()
162
- user = user_info["name"]
163
- except Exception as e:
164
- raise RuntimeError(f"HuggingFace ์ธ์ฆ ์‹คํŒจ: {e}")
 
 
 
 
 
 
 
 
 
165
 
166
- # Space ์ด๋ฆ„ ์ •๋ฆฌ
167
- repo_name = Path(repo_url.rstrip("/")).name.lower()
168
- repo_name = repo_name.replace(".", "-").replace("_", "-")[:32] # 32์ž ์ œํ•œ
169
- space_id = f"{user}/{repo_name}-space"
 
 
 
 
 
 
 
 
 
 
 
 
 
170
 
171
- print(f"\n๐Ÿ“ฆ Space ID: {space_id}")
 
172
 
173
- # Space ์ƒ์„ฑ
174
- try:
175
- create_repo(
176
- repo_id=space_id,
177
- repo_type="space",
178
- space_sdk="gradio",
179
- private=private,
180
- token=hf_token,
181
- exist_ok=True
182
- )
183
- print("โœ… Space ์ƒ์„ฑ ์™„๋ฃŒ")
184
- except Exception as e:
185
- print(f"โš ๏ธ Space ์ƒ์„ฑ ์˜ค๋ฅ˜: {e}")
186
- if "already exists" not in str(e):
187
- raise
188
 
189
- with tempfile.TemporaryDirectory() as work_dir:
190
- work_path = Path(work_dir)
191
-
192
- # ์›๋ณธ ๋ ˆํฌ์ง€ํ† ๋ฆฌ ํด๋ก 
193
- print("\n๐Ÿ“ฅ ๋ ˆํฌ์ง€ํ† ๋ฆฌ ํด๋ก  ์ค‘...")
194
- src_path = work_path / "source"
195
-
196
- try:
197
- import git
198
- # LFS ํŒŒ์ผ์„ ํฌํ•จํ•˜์—ฌ ํด๋ก 
199
- git.Repo.clone_from(repo_url, src_path, depth=1)
200
- print("โœ… ๋ ˆํฌ์ง€ํ† ๋ฆฌ ํด๋ก  ์™„๋ฃŒ")
201
-
202
- # Git LFS ํŒŒ์ผ ์ฒดํฌ ๋ฐ ๋‹ค์šด๋กœ๋“œ ์‹œ๋„
203
- try:
204
- repo = git.Repo(src_path)
205
- # LFS ํŒŒ์ผ ๋‹ค์šด๋กœ๋“œ ์‹œ๋„
206
- repo.git.lfs('pull')
207
- print("โœ… LFS ํŒŒ์ผ ๋‹ค์šด๋กœ๋“œ ์‹œ๋„")
208
- except Exception as lfs_error:
209
- print(f"โš ๏ธ LFS ํŒŒ์ผ ๋‹ค์šด๋กœ๋“œ ๊ฑด๋„ˆ๋œ€: {lfs_error}")
210
-
211
- except Exception as e:
212
- print(f"โŒ ๋ ˆํฌ ํด๋ก  ์‹คํŒจ: {e}")
213
- raise
214
-
215
- # Space์— ์—…๋กœ๋“œํ•  ํŒŒ์ผ๋“ค ์ค€๋น„
216
- print("\n๐Ÿ” ๋ ˆํฌ์ง€ํ† ๋ฆฌ ํŒŒ์ผ ๋ถ„์„ ์ค‘...")
217
-
218
- # ๊ธฐ๋ณธ Gradio app.py๊ฐ€ ์žˆ๋Š”์ง€ ํ™•์ธ
219
- has_app_py = (src_path / "app.py").exists()
220
- has_gradio_app = False
221
-
222
- # Gradio ๊ด€๋ จ ํŒŒ์ผ ์ฐพ๊ธฐ
223
- gradio_files = []
224
- for pattern in ["*gradio*.py", "*demo*.py", "*interface*.py", "*ui*.py"]:
225
- gradio_files.extend(src_path.glob(pattern))
226
-
227
- # requirements.txt ํ™•์ธ
228
- has_requirements = (src_path / "requirements.txt").exists()
229
- requirements_content = ""
230
-
231
- if has_requirements:
232
- requirements_content = (src_path / "requirements.txt").read_text(encoding="utf-8")
233
- # Gradio ๋ฒ„์ „ ํ™•์ธ ๋ฐ ์—…๋ฐ์ดํŠธ
234
- if "gradio" not in requirements_content.lower():
235
- requirements_content += "\ngradio>=5.35.0"
236
- else:
237
- # ๊ธฐ์กด gradio ๋ฒ„์ „์„ 5.35.0์œผ๋กœ ์—…๋ฐ์ดํŠธ
238
- import re
239
- requirements_content = re.sub(
240
- r'gradio[>=<~=]*[\d.]*',
241
- 'gradio>=5.35.0',
242
- requirements_content,
243
- flags=re.IGNORECASE
244
- )
245
- else:
246
- # requirements.txt๊ฐ€ ์—†์œผ๋ฉด ๊ธฐ๋ณธ ์ƒ์„ฑ
247
- requirements_content = "gradio>=5.35.0\n"
248
 
249
- # README.md ์ƒ์„ฑ (YAML ํ—ค๋”๋ฅผ ์ •ํ™•ํ•˜๊ฒŒ ์ƒ์„ฑ)
250
- readme_content = f"""---
251
- title: {repo_name.replace("-", " ").title()}
252
- emoji: ๐Ÿš€
253
- colorFrom: blue
254
- colorTo: green
255
- sdk: gradio
256
- sdk_version: "5.35.0"
257
- app_file: app.py
258
- pinned: false
259
- ---
260
 
261
- # {repo_name.replace("-", " ").title()}
 
 
 
 
 
 
262
 
263
- Deployed from: {repo_url}
 
264
 
265
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
266
- """
267
-
268
- # ํŒŒ์ผ ์—…๋กœ๋“œ
269
- print("\n๐Ÿ“ค Space์— ํŒŒ์ผ ์—…๋กœ๋“œ ์ค‘...")
270
-
271
- # ์—…๋กœ๋“œํ•  ํŒŒ์ผ ๋ชฉ๋ก ์ƒ์„ฑ (๋ชจ๋“  ํŒŒ์ผ ๋ณต์‚ฌ)
272
- files_to_upload = []
273
-
274
- # .git ํด๋”๋ฅผ ์ œ์™ธํ•œ ๋ชจ๋“  ํŒŒ์ผ ๋ณต์‚ฌ
275
- for file_path in src_path.rglob("*"):
276
- if file_path.is_file() and ".git" not in str(file_path):
277
- relative_path = file_path.relative_to(src_path)
278
-
279
- # README.md๋Š” ๊ฑด๋„ˆ๋›ฐ๊ธฐ (๋‚˜์ค‘์— ์ƒˆ๋กœ ์ƒ์„ฑ)
280
- if str(relative_path).lower() == "readme.md":
281
- continue
282
-
283
- # LFS ํฌ์ธํ„ฐ ํŒŒ์ผ ์ฒดํฌ
284
- is_lfs_pointer = False
285
- try:
286
- with open(file_path, 'rb') as f:
287
- header = f.read(100)
288
- if header.startswith(b'version https://git-lfs.github.com/spec/'):
289
- is_lfs_pointer = True
290
- print(f" โš ๏ธ LFS ํŒŒ์ผ ๊ฑด๋„ˆ๋œ€: {relative_path}")
291
- except:
292
- pass
293
-
294
- if is_lfs_pointer:
295
- continue
296
-
297
- # ๋Œ€์šฉ๋Ÿ‰ ํŒŒ์ผ ํ•„ํ„ฐ๋ง (100MB ์ด์ƒ)
298
- try:
299
- file_size = file_path.stat().st_size
300
- if file_size > 100 * 1024 * 1024:
301
- print(f" โš ๏ธ ํฐ ํŒŒ์ผ ๊ฑด๋„ˆ๋œ€: {relative_path} ({file_size / 1024 / 1024:.1f}MB)")
302
- continue
303
- except:
304
- continue
305
-
306
- # ํŠน์ • ํ™•์žฅ์ž ํ•„ํ„ฐ๋ง (๋Œ€์šฉ๋Ÿ‰ ๋ฏธ๋””์–ด ํŒŒ์ผ)
307
- skip_extensions = {'.mp4', '.avi', '.mov', '.mkv', '.wmv', '.flv',
308
- '.zip', '.tar', '.gz', '.rar', '.7z',
309
- '.weights', '.pkl', '.pth', '.h5', '.ckpt'}
310
- if file_path.suffix.lower() in skip_extensions:
311
- print(f" โš ๏ธ ๋ฏธ๋””์–ด/์•„์นด์ด๋ธŒ ํŒŒ์ผ ๊ฑด๋„ˆ๋œ€: {relative_path}")
312
- continue
313
-
314
- files_to_upload.append((str(relative_path), file_path))
315
-
316
- # app.py๊ฐ€ ์—†๊ณ  Gradio ํŒŒ์ผ๋„ ์—†์œผ๋ฉด ๊ธฐ๋ณธ app.py ์ƒ์„ฑ
317
- if not has_app_py and not gradio_files:
318
- print(" โš ๏ธ app.py๊ฐ€ ์—†์–ด ๊ธฐ๋ณธ ํ…œํ”Œ๋ฆฟ ์ƒ์„ฑ")
319
- app_content = f'''import gradio as gr
320
 
321
- def main():
322
- return f"""
 
323
  # {repo_name.replace("-", " ").title()}
324
 
325
- This Space was deployed from: {repo_url}
326
 
327
- The original repository doesn't contain a Gradio app.
328
- Please create an app.py file with your Gradio interface.
329
 
330
- ## Repository Structure
331
- Check the Files tab to see all uploaded files from the repository.
332
- """
333
-
334
- demo = gr.Interface(
335
- fn=main,
336
- inputs=None,
337
- outputs="markdown",
338
- title=f"{repo_name.replace("-", " ").title()} - Deployed from GitHub",
339
- description="Please edit app.py to add your Gradio interface."
340
- )
 
 
 
341
 
342
  if __name__ == "__main__":
343
  demo.launch()
344
  '''
345
- # app.py๋ฅผ ํŒŒ์ผ ๋ชฉ๋ก ๋งจ ์•ž์— ์ถ”๊ฐ€
346
- files_to_upload.insert(0, ("app.py", app_content))
347
-
348
- # requirements.txt ์—…๋ฐ์ดํŠธ
349
- files_to_upload.insert(0, ("requirements.txt", requirements_content))
350
-
351
- # README.md๋ฅผ ๋งจ ์•ž์— ์ถ”๊ฐ€ (ํ•ญ์ƒ ์ƒˆ๋กœ ์ƒ์„ฑ)
352
- files_to_upload.insert(0, ("README.md", readme_content))
353
-
354
- # ๋ชจ๋“  ํŒŒ์ผ์„ ํ•œ ๋ฒˆ์— ์—…๋กœ๋“œ (rate limit ํšŒํ”ผ)
355
- print("\n๐Ÿ“ค ํŒŒ์ผ ์ค€๋น„ ์ค‘...")
356
-
357
- # ์ž„์‹œ ๋””๋ ‰ํ† ๋ฆฌ ์ƒ์„ฑ
358
- upload_dir = work_path / "upload"
359
- upload_dir.mkdir(exist_ok=True)
360
-
361
- # ํŒŒ์ผ ์ค€๋น„
362
- for file_info in files_to_upload:
363
- try:
364
- if isinstance(file_info[1], str):
365
- # ๋ฌธ์ž์—ด ์ปจํ…์ธ 
366
- path_in_repo = file_info[0]
367
- content = file_info[1]
368
-
369
- # ๋””๋ ‰ํ† ๋ฆฌ ์ƒ์„ฑ
370
- file_path = upload_dir / path_in_repo
371
- file_path.parent.mkdir(parents=True, exist_ok=True)
372
-
373
- # ํŒŒ์ผ ์“ฐ๊ธฐ
374
- file_path.write_text(content, encoding="utf-8")
375
- else:
376
- # ํŒŒ์ผ ๋ณต์‚ฌ
377
- path_in_repo = file_info[0]
378
- src_file = file_info[1]
379
-
380
- # ๋””๋ ‰ํ† ๋ฆฌ ์ƒ์„ฑ
381
- dst_file = upload_dir / path_in_repo
382
- dst_file.parent.mkdir(parents=True, exist_ok=True)
383
-
384
- # ํŒŒ์ผ ๋ณต์‚ฌ
385
- shutil.copy2(src_file, dst_file)
386
-
387
- except Exception as e:
388
- print(f" โš ๏ธ {path_in_repo} ์ค€๋น„ ์‹คํŒจ: {e}")
389
- continue
390
-
391
- # ํ•œ ๋ฒˆ์— ๋ชจ๋“  ํŒŒ์ผ ์—…๋กœ๋“œ
392
- try:
393
- print("\n๐Ÿ“ค Space์— ํŒŒ์ผ ์—…๋กœ๋“œ ์ค‘...")
394
- api.upload_folder(
395
- folder_path=upload_dir,
396
- repo_id=space_id,
397
- repo_type="space",
398
- commit_message="Initial deployment from GitHub repository",
399
- ignore_patterns=["*.pyc", "__pycache__", ".git*", ".DS_Store"]
400
- )
401
- print(f"\nโœ… ๋ชจ๋“  ํŒŒ์ผ ์—…๋กœ๋“œ ์™„๋ฃŒ!")
402
- except Exception as e:
403
- if "429" in str(e):
404
- print(f"\nโš ๏ธ Rate limit ์˜ค๋ฅ˜: {e}")
405
- print("\n๐Ÿ’ก ํ•ด๊ฒฐ ๋ฐฉ๋ฒ•:")
406
- print("1. 1์‹œ๊ฐ„ ํ›„์— ๋‹ค์‹œ ์‹œ๋„ํ•˜์„ธ์š”")
407
- print("2. ๋˜๋Š” ์ˆ˜๋™์œผ๋กœ Space๋ฅผ ์ƒ์„ฑํ•˜๊ณ  ํŒŒ์ผ์„ ์—…๋กœ๋“œํ•˜์„ธ์š”")
408
- print(f" - Space URL: https://huggingface.co/spaces/{space_id}")
409
- raise RuntimeError("Rate limit์— ๊ฑธ๋ ธ์Šต๋‹ˆ๋‹ค. 1์‹œ๊ฐ„ ํ›„ ๋‹ค์‹œ ์‹œ๋„ํ•ด์ฃผ์„ธ์š”.")
410
- else:
411
- raise
412
-
413
-
414
 
415
- return f"https://huggingface.co/spaces/{space_id}"
 
 
 
 
416
 
417
- # ========== Gradio UI ========== #
418
- def launch_deploy(repo_url: str, private: bool, profile: gr.OAuthProfile | None) -> str:
419
- """Gradio UI์—์„œ ํ˜ธ์ถœ๋˜๋Š” ๋ฐฐํฌ ํ•จ์ˆ˜"""
 
420
 
421
- # ํ† ํฐ ๊ฐ€์ ธ์˜ค๊ธฐ
422
- hf_token = None
423
 
424
- # OAuth ํ”„๋กœํ•„ ํ™•์ธ
425
- if profile and hasattr(profile, 'token'):
426
- hf_token = profile.token
427
- if hf_token:
428
- hf_token = hf_token.strip()
429
 
430
- # ํ™˜๊ฒฝ๋ณ€์ˆ˜ ํด๋ฐฑ
431
- if not hf_token:
432
- hf_token = os.environ.get("HF_TOKEN")
433
- if hf_token:
434
- hf_token = hf_token.strip()
 
435
 
436
- if not hf_token:
437
- return """### โŒ ์ธ์ฆ ํ•„์š”
438
 
439
- HuggingFace ํ† ํฐ์ด ํ•„์š”ํ•ฉ๋‹ˆ๋‹ค. ๋‹ค์Œ ์ค‘ ํ•˜๋‚˜์˜ ๋ฐฉ๋ฒ•์„ ์‚ฌ์šฉํ•˜์„ธ์š”:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
440
 
441
- 1. **ํ™˜๊ฒฝ๋ณ€์ˆ˜ ์„ค์ •** (๊ถŒ์žฅ)
442
- - Space Settings โ†’ Variables and secrets
443
- - `HF_TOKEN` = `hf_xxxxx...` ์ถ”๊ฐ€
444
- - Space ์žฌ์‹œ์ž‘
 
 
445
 
446
- 2. **OAuth ๋กœ๊ทธ์ธ** (OAuth ์„ค์ • ํ•„์š”)
447
- - README.md์— `hf_oauth: true` ์ถ”๊ฐ€
448
- - ์šฐ์ธก ์ƒ๋‹จ์—์„œ ๋กœ๊ทธ์ธ
449
 
450
- **ํ† ํฐ ์ƒ์„ฑ**: https://huggingface.co/settings/tokens (Write ๊ถŒํ•œ ํ•„์š”)"""
 
 
 
451
 
452
- # URL ๊ฒ€์ฆ
453
- repo_url = repo_url.strip()
454
- if not repo_url:
455
- return "### โŒ Repository URL์„ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”."
456
 
457
- if not repo_url.startswith(("https://github.com/", "http://github.com/")):
458
- return """### โŒ ์ž˜๋ชป๋œ URL ํ˜•์‹
459
 
460
- GitHub URL ํ˜•์‹์œผ๋กœ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”.
461
- ์˜ˆ: `https://github.com/username/repository`"""
 
 
462
 
463
- # ๋ฐฐํฌ ์‹คํ–‰
464
- try:
465
- space_url = deploy(repo_url, hf_token, private)
 
 
 
 
 
 
 
 
 
 
466
 
467
- return f"""### โœ… Space ๋ฐฐํฌ ์™„๋ฃŒ! ๐ŸŽ‰
468
-
469
- **Space URL**: [{space_url}]({space_url})
 
 
 
 
 
470
 
471
- **๋‹ค์Œ ๋‹จ๊ณ„:**
472
- 1. ์œ„ ๋งํฌ๋ฅผ ํด๋ฆญํ•˜์—ฌ Space ๋ฐฉ๋ฌธ
473
- 2. 2-3๋ถ„ ์ •๋„ ๋นŒ๋“œ ๋Œ€๊ธฐ
474
- 3. Files ํƒญ์—์„œ `app.py` ํŽธ์ง‘ํ•˜์—ฌ ๊ธฐ๋Šฅ ์ถ”๊ฐ€
 
 
 
 
 
475
 
476
- **Tips:**
477
- - ๋นŒ๋“œ ์ƒํƒœ๋Š” Logs ํƒญ์—์„œ ํ™•์ธ
478
- - ์˜ค๋ฅ˜๊ฐ€ ์žˆ๋‹ค๋ฉด Files โ†’ app.py์—์„œ ์ˆ˜์ •
479
- - requirements.txt์— ํ•„์š”ํ•œ ํŒจํ‚ค์ง€ ์ถ”๊ฐ€"""
 
 
 
 
 
 
 
 
 
 
480
 
481
- except Exception as e:
482
- error_msg = str(e)
 
 
 
 
 
 
 
483
 
484
- if "์ธ์ฆ ์‹คํŒจ" in error_msg or "401" in error_msg:
485
- return """### โŒ ์ธ์ฆ ์‹คํŒจ
486
-
487
- ํ† ํฐ์ด ์œ ํšจํ•˜์ง€ ์•Š๊ฑฐ๋‚˜ ๊ถŒํ•œ์ด ๋ถ€์กฑํ•ฉ๋‹ˆ๋‹ค.
488
-
489
- **ํ•ด๊ฒฐ ๋ฐฉ๋ฒ•:**
490
- 1. [ํ† ํฐ ์ƒ์„ฑ ํŽ˜์ด์ง€](https://huggingface.co/settings/tokens) ๋ฐฉ๋ฌธ
491
- 2. New token โ†’ **write** ๊ถŒํ•œ ์ฒดํฌ
492
- 3. Space Settings์—์„œ HF_TOKEN ์—…๋ฐ์ดํŠธ"""
493
 
494
- elif "already exists" in error_msg:
495
- return """### โš ๏ธ Space๊ฐ€ ์ด๋ฏธ ์กด์žฌํ•ฉ๋‹ˆ๋‹ค
496
-
497
- ๋™์ผํ•œ ์ด๋ฆ„์˜ Space๊ฐ€ ์ด๋ฏธ ์žˆ์Šต๋‹ˆ๋‹ค.
498
- - ๊ธฐ์กด Space๋ฅผ ์‚ญ์ œํ•˜๊ฑฐ๋‚˜
499
- - ๋‹ค๋ฅธ Repository๋ฅผ ์„ ํƒํ•˜์„ธ์š”"""
500
 
501
- elif "LFS pointer pointed to a file that does not exist" in error_msg:
502
- return """### โš ๏ธ LFS ํŒŒ์ผ ์˜ค๋ฅ˜
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
503
 
504
- ์ €์žฅ์†Œ์— Git LFS๋กœ ๊ด€๋ฆฌ๋˜๋Š” ๋Œ€์šฉ๋Ÿ‰ ํŒŒ์ผ์ด ์žˆ์ง€๋งŒ ์‹ค์ œ ํŒŒ์ผ์ด ์—†์Šต๋‹ˆ๋‹ค.
505
 
506
- **ํ•ด๊ฒฐ ๋ฐฉ๋ฒ•:**
507
 
508
- 1. **์ž๋™ ํ•ด๊ฒฐ๋จ**:
509
- - LFS ํŒŒ์ผ๋“ค์„ ์ž๋™์œผ๋กœ ์ œ์™ธํ•˜๊ณ  ๋‹ค์‹œ ์‹œ๋„ํ•˜์„ธ์š”
510
- - ๋น„๋””์˜ค, ๋ชจ๋ธ ํŒŒ์ผ ๋“ฑ์€ ๊ฑด๋„ˆ๋œ๋‹ˆ๋‹ค
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
511
 
512
- 2. **์ˆ˜๋™์œผ๋กœ ํ•„์š”ํ•œ ํŒŒ์ผ๋งŒ ์—…๋กœ๋“œ**:
513
- - Space๋ฅผ ์ˆ˜๋™์œผ๋กœ ์ƒ์„ฑ
514
- - ํ•„์š”ํ•œ ์ฝ”๋“œ ํŒŒ์ผ๋งŒ ์„ ํƒ์ ์œผ๋กœ ์—…๋กœ๋“œ
515
 
516
- 3. **์›๋ณธ ์ €์žฅ์†Œ์—์„œ LFS ํŒŒ์ผ ๋‹ค์šด๋กœ๋“œ**:
517
- ```bash
518
- git clone <repo-url>
519
- cd <repo-name>
520
- git lfs pull
521
- ```
522
 
523
- **์ฐธ๊ณ **: Gradio ์•ฑ ์‹คํ–‰์— ํ•„์ˆ˜์ ์ด์ง€ ์•Š์€ ๋ฏธ๋””์–ด ํŒŒ์ผ๋“ค์€ ์ œ์™ธํ•ด๋„ ๋ฉ๋‹ˆ๋‹ค."""
 
 
 
524
 
525
- elif "429" in error_msg or "rate-limited" in error_msg.lower() or "Rate limit" in error_msg:
526
- # Space ์ด๋ฆ„ ์ƒ์„ฑ
527
- repo_name = Path(repo_url.rstrip("/")).name.lower()
528
- repo_name = repo_name.replace(".", "-").replace("_", "-")[:32]
529
-
530
- # ์‚ฌ์šฉ์ž ์ด๋ฆ„ ๊ฐ€์ ธ์˜ค๊ธฐ
531
  try:
532
- api = HfApi(token=hf_token)
533
- user_info = api.whoami()
534
- user = user_info["name"]
535
- space_id = f"{user}/{repo_name}-space"
536
- space_url = f"https://huggingface.co/spaces/{space_id}"
537
- except:
538
- space_url = "https://huggingface.co/new-space"
539
-
540
- return f"""### โš ๏ธ Rate Limit ์ดˆ๊ณผ
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
541
 
542
- HuggingFace API ์‚ฌ์šฉ ํ•œ๋„๋ฅผ ์ดˆ๊ณผํ–ˆ์Šต๋‹ˆ๋‹ค.
543
 
544
- **๐Ÿš€ ๋Œ€์ฒด ๋ฐฉ๋ฒ• - ์ˆ˜๋™ ๋ฐฐํฌ:**
 
 
545
 
546
- 1. **Space ์ƒ์„ฑํ•˜๊ธฐ**
547
- - [์ƒˆ Space ๋งŒ๋“ค๊ธฐ]({space_url}) ํด๋ฆญ
548
- - Space ์ด๋ฆ„: `{repo_name}-space` (๋˜๋Š” ์›ํ•˜๋Š” ์ด๋ฆ„)
549
- - SDK: **Gradio** ์„ ํƒ
550
- - Create Space ํด๋ฆญ
551
 
552
- 2. **ํŒŒ์ผ ์—…๋กœ๋“œํ•˜๊ธฐ**
553
- - Space์˜ **Files** ํƒญ ํด๋ฆญ
554
- - **+ Add file** โ†’ **Upload files** ํด๋ฆญ
555
- - ์•„๋ž˜ ํ•„์ˆ˜ ํŒŒ์ผ๋“ค ์—…๋กœ๋“œ:
556
 
557
- **๐Ÿ“„ ํ•„์ˆ˜ ํŒŒ์ผ ๋‚ด์šฉ (๋ณต์‚ฌํ•ด์„œ ์‚ฌ์šฉ):**
 
 
 
 
 
558
 
559
- <details>
560
- <summary><b>README.md</b> (ํด๋ฆญํ•ด์„œ ํŽผ์น˜๊ธฐ)</summary>
 
 
561
 
562
- ```markdown
563
  ---
564
- title: {repo_name.replace("-", " ").title()}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
565
  emoji: ๐Ÿš€
566
  colorFrom: blue
567
  colorTo: green
568
- sdk: gradio
569
  sdk_version: "5.35.0"
570
  app_file: app.py
571
  pinned: false
572
  ---
573
 
574
- # {repo_name.replace("-", " ").title()}
575
-
576
- Deployed from: {repo_url}
577
- ```
578
- </details>
579
-
580
- <details>
581
- <summary><b>app.py</b> (ํด๋ฆญํ•ด์„œ ํŽผ์น˜๊ธฐ)</summary>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
582
 
583
- ```python
584
- import gradio as gr
585
 
586
- def main():
587
- return f\"\"\"
588
- # Welcome to {repo_name.replace("-", " ").title()}
589
-
590
- This Space was created from: {repo_url}
591
-
592
- Please edit this app.py file to add your Gradio interface.
593
- \"\"\"
594
 
595
- demo = gr.Interface(
596
- fn=main,
597
- inputs=None,
598
- outputs="markdown",
599
- title="{repo_name.replace("-", " ").title()}",
600
- description="Edit app.py to customize this Space"
601
- )
602
 
603
- if __name__ == "__main__":
604
- demo.launch()
605
- ```
606
- </details>
 
607
 
608
- <details>
609
- <summary><b>requirements.txt</b> (ํด๋ฆญํ•ด์„œ ํŽผ์น˜๊ธฐ)</summary>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
610
 
611
- ```
612
- gradio>=5.35.0
613
- ```
614
- </details>
615
 
616
- 3. **GitHub ํŒŒ์ผ ๋ณต์‚ฌ** (์„ ํƒ์‚ฌํ•ญ)
617
- - GitHub repo์˜ ํŒŒ์ผ๋“ค์„ ๋‹ค์šด๋กœ๋“œ
618
- - Space์— ๋“œ๋ž˜๊ทธ ์•ค ๋“œ๋กญ์œผ๋กœ ์—…๋กœ๋“œ
 
 
 
619
 
620
- **โฐ ์ž๋™ ๋ฐฐํฌ ์žฌ์‹œ๋„:**
621
- - 1์‹œ๊ฐ„ ํ›„์— ๋‹ค์‹œ ์ด ๋„๊ตฌ๋ฅผ ์‚ฌ์šฉํ•˜์„ธ์š”
622
- - ๋˜๋Š” ๋‚ด์ผ ๋‹ค์‹œ ์‹œ๋„ํ•˜๋ฉด ํ•œ๋„๊ฐ€ ๋ฆฌ์…‹๋ฉ๋‹ˆ๋‹ค
 
 
623
 
624
- **๐Ÿ’ก ํŒ:**
625
- - ์ƒˆ ์‚ฌ์šฉ์ž๋Š” ์ฒ˜์Œ์— ์ œํ•œ์ด ์žˆ์ง€๋งŒ ์‹œ๊ฐ„์ด ์ง€๋‚˜๋ฉด ํ•œ๋„๊ฐ€ ์ฆ๊ฐ€ํ•ฉ๋‹ˆ๋‹ค
626
- - ํ•œ ๋ฒˆ์— ๋„ˆ๋ฌด ๋งŽ์€ Space๋ฅผ ๋งŒ๋“ค์ง€ ๋งˆ์„ธ์š”"""
 
 
627
 
628
- else:
629
- return f"""### โŒ ๋ฐฐํฌ ์˜ค๋ฅ˜
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
630
 
631
- **์˜ค๋ฅ˜ ๋‚ด์šฉ:**
632
- ```
633
- {error_msg}
634
- ```
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
635
 
636
- **์ผ๋ฐ˜์ ์ธ ํ•ด๊ฒฐ ๋ฐฉ๋ฒ•:**
637
- 1. Repository๊ฐ€ public์ธ์ง€ ํ™•์ธ
638
- 2. ํ† ํฐ์ด ์˜ฌ๋ฐ”๋ฅธ์ง€ ํ™•์ธ (hf_๋กœ ์‹œ์ž‘)
639
- 3. ์ธํ„ฐ๋„ท ์—ฐ๊ฒฐ ํ™•์ธ
640
- 4. ์ž ์‹œ ํ›„ ๋‹ค์‹œ ์‹œ๋„"""
641
-
642
- # ========== UI ์ƒ์„ฑ ========== #
643
- def create_ui():
644
- """Gradio UI ์ƒ์„ฑ"""
645
-
646
- with gr.Blocks(
647
- title="HF Space Auto-Deployer",
648
- theme=gr.themes.Soft(),
649
- css="""
650
- .main-container {
651
- max-width: 900px;
652
- margin: 0 auto;
653
- padding: 20px;
654
- }
655
- .header {
656
- text-align: center;
657
- margin-bottom: 30px;
658
- padding: 30px;
659
- background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
660
- color: white;
661
- border-radius: 15px;
662
- box-shadow: 0 10px 30px rgba(0,0,0,0.1);
663
- }
664
- .header h1 {
665
- margin: 0;
666
- font-size: 2.5em;
667
- font-weight: bold;
668
- }
669
- .header p {
670
- margin: 10px 0 0 0;
671
- font-size: 1.2em;
672
- opacity: 0.9;
673
- }
674
- .status-box {
675
- padding: 20px;
676
- border-radius: 10px;
677
- margin-top: 20px;
678
- background-color: #f8f9fa;
679
- border: 1px solid #dee2e6;
680
- }
681
- .info-box {
682
- background-color: #e3f2fd;
683
- padding: 15px 20px;
684
- border-radius: 8px;
685
- border-left: 4px solid #2196F3;
686
- margin: 15px 0;
687
- }
688
- .warning-box {
689
- background-color: #fff3cd;
690
- padding: 15px 20px;
691
- border-radius: 8px;
692
- border-left: 4px solid #ffc107;
693
- margin: 15px 0;
694
- }
695
- .gr-button-primary {
696
- background: linear-gradient(135deg, #667eea 0%, #764ba2 100%) !important;
697
- border: none !important;
698
- font-size: 1.1em !important;
699
- font-weight: bold !important;
700
- padding: 12px 30px !important;
701
- color: white !important;
702
- }
703
- .gr-button-primary:hover {
704
- transform: translateY(-2px);
705
- box-shadow: 0 5px 15px rgba(0,0,0,0.2);
706
- }
707
- """
708
- ) as demo:
709
- with gr.Column(elem_classes="main-container"):
710
- # ํ—ค๋”
711
- gr.HTML("""
712
- <div class="header">
713
- <h1>๐Ÿš€ GitHub โ†’ HuggingFace Space</h1>
714
- <p>GitHub ์ €์žฅ์†Œ๋ฅผ Gradio Space๋กœ ์ฆ‰์‹œ ๋ณ€ํ™˜</p>
715
- </div>
716
- """)
717
-
718
- # ํ™˜๊ฒฝ๋ณ€์ˆ˜ ์ƒํƒœ ์ฒดํฌ
719
- env_status = []
720
- if os.getenv("HF_TOKEN"):
721
- env_status.append("โœ… HF_TOKEN ์„ค์ •๋จ")
722
- else:
723
- env_status.append("โš ๏ธ HF_TOKEN ๋ฏธ์„ค์ •")
724
-
725
- if os.getenv("OPENAI_API_KEY"):
726
- env_status.append("โœ… OpenAI API ์‚ฌ์šฉ ๊ฐ€๋Šฅ")
727
- elif os.getenv("FRIENDLI_TOKEN"):
728
- env_status.append("โœ… Friendli API ์‚ฌ์šฉ ๊ฐ€๋Šฅ")
729
- else:
730
- env_status.append("โ„น๏ธ AI API ๋ฏธ์„ค์ • (๊ธฐ๋ณธ ํ…œํ”Œ๋ฆฟ ์‚ฌ์šฉ)")
731
-
732
- if os.getenv("BAPI_TOKEN"):
733
- env_status.append("โœ… Brave Search ์‚ฌ์šฉ ๊ฐ€๋Šฅ")
734
-
735
- # ์ƒํƒœ ํ‘œ์‹œ
736
- status_html = "<div class='info-box'><b>๐Ÿ”ง ํ™˜๊ฒฝ ์ƒํƒœ:</b><br>" + "<br>".join(env_status) + "</div>"
737
- gr.HTML(status_html)
738
-
739
- # OAuth ๋กœ๊ทธ์ธ ๋ฒ„ํŠผ (OAuth ์„ค์ •๋œ ๊ฒฝ์šฐ)
740
- if os.getenv("SPACE_ID"): # Space ํ™˜๊ฒฝ์ธ ๊ฒฝ์šฐ
741
- gr.LoginButton(value="Sign in with Hugging Face ๐Ÿค—", size="lg")
742
-
743
- # ์ž…๋ ฅ ํ•„๋“œ
744
- with gr.Group():
745
- repo_input = gr.Textbox(
746
- label="๐Ÿ“ฆ GitHub Repository URL",
747
- placeholder="https://github.com/username/repository",
748
- info="Public repository URL์„ ์ž…๋ ฅํ•˜์„ธ์š”",
749
- lines=1
750
- )
751
-
752
- private_checkbox = gr.Checkbox(
753
- label="๐Ÿ”’ Private Space๋กœ ์ƒ์„ฑ",
754
- value=False,
755
- info="์ฒดํฌํ•˜๋ฉด ๋ณธ์ธ๋งŒ ์ ‘๊ทผ ๊ฐ€๋Šฅํ•œ Space๊ฐ€ ์ƒ์„ฑ๋ฉ๋‹ˆ๋‹ค"
756
- )
757
-
758
- # ๋ฐฐํฌ ๋ฒ„ํŠผ
759
- deploy_btn = gr.Button(
760
- "๐Ÿš€ Space ์ƒ์„ฑํ•˜๊ธฐ",
761
- variant="primary",
762
- size="lg"
763
  )
764
 
765
- # ๊ฒฐ๊ณผ ์ถœ๋ ฅ
766
- output_status = gr.Markdown(elem_classes="status-box")
 
 
 
 
 
767
 
768
- # ์˜ˆ์‹œ
769
  gr.Markdown("""
770
- ### ๐Ÿ’ก ์˜ˆ์‹œ Repository
771
- - `https://github.com/gradio-app/gradio`
772
- - `https://github.com/huggingface/transformers`
773
- - `https://github.com/openai/whisper`
 
 
 
 
 
 
 
 
 
 
774
  """)
775
 
776
- # ์‚ฌ์šฉ ๊ฐ€์ด๋“œ
777
- with gr.Accordion("๐Ÿ“š ์‚ฌ์šฉ ๊ฐ€์ด๋“œ", open=False):
778
- gr.Markdown("""
779
- ### ํ•„์ˆ˜ ์„ค์ •
780
- 1. **HF_TOKEN**: Space Settings โ†’ Variables and secrets์—์„œ ์„ค์ •
781
- - [ํ† ํฐ ์ƒ์„ฑ](https://huggingface.co/settings/tokens) (write ๊ถŒํ•œ ํ•„์š”)
782
-
783
- ### ์„ ํƒ์  ์„ค์ • (๋” ๋‚˜์€ ๊ฒฐ๊ณผ๋ฅผ ์œ„ํ•ด)
784
- - **OPENAI_API_KEY**: GPT-4๋กœ ์Šค๋งˆํŠธํ•œ ์•ฑ ์ƒ์„ฑ
785
- - **FRIENDLI_TOKEN**: Llama 3.1๋กœ ์•ฑ ์ƒ์„ฑ
786
- - **BAPI_TOKEN**: ์ €์žฅ์†Œ ๋ฉ”ํƒ€๋ฐ์ดํ„ฐ ์ˆ˜์ง‘
787
-
788
- ### ์ž‘๋™ ๋ฐฉ์‹
789
- 1. GitHub repository ๋ถ„์„
790
- 2. AI๊ฐ€ ๋งž์ถคํ˜• Gradio ์•ฑ ์ƒ์„ฑ (AI ์—†์œผ๋ฉด ๊ธฐ๋ณธ ํ…œํ”Œ๋ฆฟ)
791
- 3. HuggingFace Space์— ์ž๋™ ๋ฐฐํฌ
792
- 4. ์ฆ‰์‹œ ์‚ฌ์šฉ ๊ฐ€๋Šฅ!
793
-
794
- ### ๋ฌธ์ œ ํ•ด๊ฒฐ
795
- - **ํ† ํฐ ์˜ค๋ฅ˜**: ํ† ํฐ์ด `hf_`๋กœ ์‹œ์ž‘ํ•˜๋Š”์ง€ ํ™•์ธ
796
- - **๋นŒ๋“œ ์‹คํŒจ**: Space Logs ํ™•์ธ
797
- - **์ˆ˜์ • ํ•„์š”**: Files ํƒญ์—์„œ app.py ํŽธ์ง‘
798
- """)
799
-
800
- # ์ด๋ฒคํŠธ ์—ฐ๊ฒฐ
801
- deploy_btn.click(
802
- fn=launch_deploy,
803
- inputs=[repo_input, private_checkbox],
804
- outputs=output_status
805
- )
806
-
807
- return demo
808
-
809
- # ========== ๋ฉ”์ธ ์‹คํ–‰ ========== #
810
- def main():
811
- """๋ฉ”์ธ ์‹คํ–‰ ํ•จ์ˆ˜"""
812
- # CLI ์ธ์ž ํŒŒ์‹ฑ
813
- parser = argparse.ArgumentParser(description="GitHub repo๋ฅผ HF Space๋กœ ๋ฐฐํฌ")
814
- parser.add_argument("--repo_url", help="GitHub repository URL")
815
- parser.add_argument("--hf_token", help="HuggingFace write token")
816
- parser.add_argument("--private", action="store_true", help="Create private Space")
817
- parser.add_argument("--no-ui", action="store_true", help="Run without UI")
818
-
819
- args = parser.parse_args()
820
-
821
- # CLI ๋ชจ๋“œ
822
- if args.no_ui and args.repo_url and args.hf_token:
823
- try:
824
- url = deploy(args.repo_url, args.hf_token, args.private)
825
- print(f"\nโœ… ๋ฐฐํฌ ์„ฑ๊ณต: {url}")
826
- except Exception as e:
827
- print(f"\nโŒ ๋ฐฐํฌ ์‹คํŒจ: {e}", file=sys.stderr)
828
- sys.exit(1)
829
 
830
- # Gradio UI ๋ชจ๋“œ
831
- else:
832
- print("\n" + "="*60)
833
- print("๐Ÿš€ HF Space Auto-Deployer ์‹œ์ž‘")
834
- print("="*60)
835
-
836
- # ํ™˜๊ฒฝ๋ณ€์ˆ˜ ์ƒํƒœ ์ถœ๋ ฅ
837
- print("\nํ™˜๊ฒฝ๋ณ€์ˆ˜ ์ƒํƒœ:")
838
- print(f" {'โœ…' if os.getenv('HF_TOKEN') else 'โŒ'} HF_TOKEN")
839
- print(f" {'โœ…' if os.getenv('OPENAI_API_KEY') else 'โš ๏ธ'} OPENAI_API_KEY (์„ ํƒ)")
840
- print(f" {'โœ…' if os.getenv('FRIENDLI_TOKEN') else 'โš ๏ธ'} FRIENDLI_TOKEN (์„ ํƒ)")
841
- print(f" {'โœ…' if os.getenv('BAPI_TOKEN') else 'โš ๏ธ'} BAPI_TOKEN (์„ ํƒ)")
842
-
843
- print("\n" + "="*60 + "\n")
844
-
845
- # Gradio ์•ฑ ์‹คํ–‰
846
- demo = create_ui()
847
- demo.launch(
848
- share=False,
849
- server_name="0.0.0.0",
850
- server_port=7860
851
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
852
 
853
  if __name__ == "__main__":
854
- main()
 
1
+ import gradio as gr
2
+ from huggingface_hub import HfApi, create_repo
3
+ from git import Repo
4
+ import uuid
5
+ from slugify import slugify
 
 
 
 
 
 
 
 
6
  import os
7
  import sys
8
  import json
 
12
  import textwrap
13
  import requests
14
  import shutil
15
+ import time
16
  from pathlib import Path
17
+ from typing import Optional, Dict, List, Tuple
18
 
19
+ # ========== LFS ์ฒ˜๋ฆฌ ํ•จ์ˆ˜ (์ฒซ ๋ฒˆ์งธ ์ฝ”๋“œ์—์„œ) ========== #
20
+ def is_lfs_pointer_file(filepath):
21
+ """Check if a file is a Git LFS pointer file."""
22
+ # Initialize analysis
23
+ analysis = {}
24
+
25
+ try:
26
+ with open(filepath, 'rb') as f:
27
+ header = f.read(100)
28
+ return header.startswith(b'version https://git-lfs.github.com/spec/v1')
29
+ except:
30
+ return False
31
+
32
+ def remove_lfs_files(folder):
33
+ """Remove all LFS pointer files from the repository."""
34
+ removed_files = []
35
+ for root, dirs, files in os.walk(folder):
36
+ # Skip .git directory
37
+ if '.git' in root:
38
+ continue
39
+
40
+ for file in files:
41
+ filepath = os.path.join(root, file)
42
+ if is_lfs_pointer_file(filepath):
43
+ os.remove(filepath)
44
+ removed_files.append(filepath.replace(folder + os.sep, ''))
45
+
46
+ return removed_files
47
 
48
+ # ========== Repository ๋ถ„์„ ํ•จ์ˆ˜ (๋‘ ๋ฒˆ์งธ ์ฝ”๋“œ์—์„œ) ========== #
49
+ def analyze_repository(src_path: Path) -> Dict:
50
+ """๋ ˆํฌ์ง€ํ† ๋ฆฌ ๊ตฌ์กฐ์™€ ๋‚ด์šฉ์„ ๋ถ„์„ํ•˜์—ฌ ์ •๋ณด ์ถ”์ถœ"""
51
+ analysis = {
52
+ "has_requirements": False,
53
+ "has_readme": False,
54
+ "has_setup_py": False,
55
+ "main_language": "python",
56
+ "key_files": [],
57
+ "dependencies": [],
58
+ "description": "",
59
+ "installation_steps": [],
60
+ "usage_examples": [],
61
+ "model_files": [],
62
+ "data_files": [],
63
+ "config_files": [],
64
+ "entry_points": []
65
+ }
66
+
67
+ # requirements.txt ๋ถ„์„
68
+ req_file = src_path / "requirements.txt"
69
+ if req_file.exists():
70
+ analysis["has_requirements"] = True
71
+ try:
72
+ reqs = req_file.read_text(encoding="utf-8").strip().split("\n")
73
+ # ์˜์กด์„ฑ ํ•„ํ„ฐ๋ง ๋ฐ ์ •๋ฆฌ
74
+ cleaned_deps = []
75
+ for r in reqs:
76
+ r = r.strip()
77
+ if r and not r.startswith("#"):
78
+ # ์ž˜๋ชป๋œ ๋ฒ„์ „ ์ˆ˜์ •
79
+ if "opencv-python==4.10.0" in r:
80
+ r = "opencv-python>=4.10.0.82"
81
+ elif "opencv-python==4.10" in r:
82
+ r = "opencv-python>=4.10.0.82"
83
+
84
+ # ๋ฒ„์ „ ์ œ์•ฝ์ด ๋„ˆ๋ฌด ์—„๊ฒฉํ•œ ๊ฒฝ์šฐ ์™„ํ™”
85
+ if "==" in r and not r.startswith("git+"):
86
+ pkg_name = r.split("==")[0]
87
+ # ์ค‘์š”ํ•œ ํŒจํ‚ค์ง€๋Š” ๋ฒ„์ „ ์œ ์ง€, ๋‚˜๋จธ์ง€๋Š” >= ๋กœ ๋ณ€๊ฒฝ
88
+ if pkg_name.lower() in ["torch", "tensorflow", "transformers", "numpy"]:
89
+ cleaned_deps.append(r)
90
+ else:
91
+ version = r.split("==")[1]
92
+ # ๋ฒ„์ „์ด x.y ํ˜•์‹์ด๋ฉด x.y.0์œผ๋กœ ๋ณ€๊ฒฝ
93
+ if version.count('.') == 1:
94
+ version = version + ".0"
95
+ cleaned_deps.append(f"{pkg_name}>={version}")
96
+ else:
97
+ cleaned_deps.append(r)
98
+ analysis["dependencies"] = cleaned_deps
99
+ except:
100
+ analysis["dependencies"] = []
101
+
102
+ # README ๋ถ„์„
103
+ for readme_name in ["README.md", "readme.md", "README.rst", "README.txt"]:
104
+ readme_file = src_path / readme_name
105
+ if readme_file.exists():
106
+ analysis["has_readme"] = True
107
+ try:
108
+ readme_content = readme_file.read_text(encoding="utf-8")
109
+ analysis["readme_content"] = readme_content[:5000] # ์ฒ˜์Œ 5000์ž๋งŒ
110
+
111
+ # ์„ค๋ช… ์ถ”์ถœ
112
+ lines = readme_content.split("\n")
113
+ for i, line in enumerate(lines[:10]):
114
+ if line.strip() and not line.startswith("#") and not line.startswith("!"):
115
+ analysis["description"] = line.strip()
116
+ break
117
+
118
+ # ์„ค์น˜ ๋ฐฉ๋ฒ• ์ฐพ๊ธฐ
119
+ install_section = False
120
+ usage_section = False
121
+ for line in lines:
122
+ if "install" in line.lower() and "#" in line:
123
+ install_section = True
124
+ usage_section = False
125
+ continue
126
+ elif "usage" in line.lower() and "#" in line:
127
+ usage_section = True
128
+ install_section = False
129
+ continue
130
+ elif "#" in line:
131
+ install_section = False
132
+ usage_section = False
133
+
134
+ if install_section and line.strip():
135
+ analysis["installation_steps"].append(line.strip())
136
+ elif usage_section and line.strip():
137
+ analysis["usage_examples"].append(line.strip())
138
+ except:
139
+ pass
140
+
141
+ # ์ฃผ์š” Python ํŒŒ์ผ ์ฐพ๊ธฐ
142
+ py_files = list(src_path.glob("**/*.py"))
143
+ for py_file in py_files[:20]: # ์ตœ๋Œ€ 20๊ฐœ๋งŒ ๋ถ„์„
144
+ if "__pycache__" not in str(py_file) and ".git" not in str(py_file):
145
+ relative_path = py_file.relative_to(src_path)
146
+
147
+ # ์—”ํŠธ๋ฆฌ ํฌ์ธํŠธ ํ›„๋ณด ์ฐพ๊ธฐ
148
+ if any(name in py_file.name for name in ["main.py", "app.py", "demo.py", "run.py", "server.py", "streamlit_app.py"]):
149
+ analysis["entry_points"].append(str(relative_path))
150
+
151
+ # ํŒŒ์ผ ๋‚ด์šฉ ๊ฐ„๋‹จํžˆ ํ™•์ธ
152
+ try:
153
+ content = py_file.read_text(encoding="utf-8")[:1000]
154
+ if "if __name__" in content and "main" in content:
155
+ analysis["entry_points"].append(str(relative_path))
156
+
157
+ # ์ฃผ์š” import ํ™•์ธ
158
+ if any(lib in content for lib in ["torch", "tensorflow", "transformers", "numpy", "pandas", "cv2", "PIL"]):
159
+ analysis["key_files"].append({
160
+ "path": str(relative_path),
161
+ "preview": content[:500]
162
+ })
163
+ except:
164
+ pass
165
+
166
+ # ๋ชจ๋ธ ํŒŒ์ผ ์ฐพ๊ธฐ
167
+ model_extensions = [".pth", ".pt", ".ckpt", ".h5", ".pb", ".onnx", ".safetensors"]
168
+ for ext in model_extensions:
169
+ model_files = list(src_path.glob(f"**/*{ext}"))
170
+ for mf in model_files[:5]:
171
+ if ".git" not in str(mf):
172
+ analysis["model_files"].append(str(mf.relative_to(src_path)))
173
+
174
+ # ์„ค์ • ํŒŒ์ผ ์ฐพ๊ธฐ
175
+ config_patterns = ["config.json", "config.yaml", "config.yml", "*.json", "*.yaml"]
176
+ for pattern in config_patterns:
177
+ config_files = list(src_path.glob(pattern))
178
+ for cf in config_files[:5]:
179
+ if ".git" not in str(cf):
180
+ analysis["config_files"].append(str(cf.relative_to(src_path)))
181
+
182
+ return analysis
183
+
184
+ # ========== Brave Search ํ—ฌํผ (๋‘ ๋ฒˆ์งธ ์ฝ”๋“œ์—์„œ) ========== #
185
+ def search_repo_info(repo_url: str) -> str:
186
+ """Brave Search๋กœ ๋ ˆํฌ์ง€ํ† ๋ฆฌ ์ •๋ณด ์ˆ˜์ง‘"""
187
  api_key = os.getenv("BAPI_TOKEN")
188
  if not api_key:
189
+ return ""
 
190
 
 
191
  api_key = api_key.strip()
 
192
  headers = {"X-Subscription-Token": api_key, "Accept": "application/json"}
 
193
 
194
+ # ๋ ˆํฌ์ง€ํ† ๋ฆฌ ์ด๋ฆ„ ์ถ”์ถœ
195
+ repo_parts = repo_url.rstrip("/").split("/")
196
+ if len(repo_parts) >= 2:
197
+ repo_name = f"{repo_parts[-2]}/{repo_parts[-1]}"
198
+ else:
199
+ return ""
200
+
201
+ # ๊ฒ€์ƒ‰ ์ฟผ๋ฆฌ๋“ค
202
+ queries = [
203
+ f'"{repo_name}" github tutorial',
204
+ f'"{repo_name}" usage example',
205
+ f'"{repo_name}" gradio streamlit demo'
206
+ ]
207
+
208
+ search_results = []
209
+ for query in queries:
210
+ params = {"q": query, "count": 3}
211
+ try:
212
+ resp = requests.get(
213
+ "https://api.search.brave.com/res/v1/web/search",
214
+ headers=headers,
215
+ params=params,
216
+ timeout=10
217
+ )
218
+ if resp.status_code == 200:
219
+ results = resp.json().get("web", {}).get("results", [])
220
+ for r in results:
221
+ search_results.append({
222
+ "title": r.get("title", ""),
223
+ "description": r.get("description", ""),
224
+ "url": r.get("url", "")
225
+ })
226
+ except:
227
+ continue
228
+
229
+ # ๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๋ฅผ ํ…์ŠคํŠธ๋กœ ๋ณ€ํ™˜
230
+ search_text = f"Search results for {repo_name}:\n"
231
+ for r in search_results[:5]:
232
+ search_text += f"\n- {r['title']}: {r['description']}\n"
233
+
234
+ return search_text
235
+
236
+ # ========== AI ์ƒ์„ฑ ํ—ฌํผ (๋‘ ๋ฒˆ์งธ ์ฝ”๋“œ์—์„œ) ========== #
237
+ def generate_gradio_app(repo_url: str, analysis: Dict, search_info: str = "") -> Dict:
238
+ """AI๋กœ ์‹ค์ œ ๋™์ž‘ํ•˜๋Š” Gradio ์•ฑ ์ƒ์„ฑ"""
239
+
240
+ # ์ปจํ…์ŠคํŠธ ์ค€๋น„
241
+ context = f"""Repository URL: {repo_url}
242
+
243
+ Repository Analysis:
244
+ - Description: {analysis.get('description', 'N/A')}
245
+ - Main Dependencies: {', '.join(analysis['dependencies'][:10])}
246
+ - Entry Points: {', '.join(analysis['entry_points'][:5])}
247
+ - Model Files: {', '.join(analysis['model_files'][:3])}
248
+ - Config Files: {', '.join(analysis['config_files'][:3])}
249
+
250
+ Key Files Found:
251
+ """
252
+
253
+ for kf in analysis.get('key_files', [])[:3]:
254
+ context += f"\n--- {kf['path']} ---\n{kf['preview']}\n"
255
+
256
+ if analysis.get('readme_content'):
257
+ context += f"\n--- README.md (excerpt) ---\n{analysis['readme_content'][:2000]}\n"
258
+
259
+ if search_info:
260
+ context += f"\n--- Web Search Results ---\n{search_info}\n"
261
+
262
+ # Installation steps
263
+ if analysis['installation_steps']:
264
+ context += f"\nInstallation Steps:\n"
265
+ for step in analysis['installation_steps'][:5]:
266
+ context += f"- {step}\n"
267
+
268
+ # Usage examples
269
+ if analysis['usage_examples']:
270
+ context += f"\nUsage Examples:\n"
271
+ for ex in analysis['usage_examples'][:5]:
272
+ context += f"- {ex}\n"
273
+
274
+ # System prompt
275
+ system_prompt = """You are an expert at creating Gradio apps from GitHub repositories.
276
+ Your task is to generate a complete, working Gradio interface that demonstrates the main functionality of the repository.
277
+
278
+ CRITICAL REQUIREMENTS:
279
+ 1. The app.py must be FULLY FUNCTIONAL and runnable
280
+ 2. DO NOT use 'from agent import' or any repository-specific imports that won't exist
281
+ 3. Handle errors gracefully with clear user feedback
282
+ 4. Include API key inputs when external services are required
283
+ 5. Create intuitive UI components for the main features
284
+ 6. Include helpful descriptions and examples
285
+ 7. Always use gradio>=5.35.0
286
+ 8. If the project requires external APIs (OpenAI, Anthropic, etc), include:
287
+ - API key input fields
288
+ - Clear instructions on how to obtain keys
289
+ - Environment variable setup guidance
290
+ - Graceful handling when keys are missing
291
 
292
+ Return ONLY valid JSON with these exact keys:
293
+ - app_py: Complete Gradio app code
294
+ - requirements_txt: All necessary dependencies including gradio>=5.35.0
295
+ - summary: Brief description of what the app does"""
296
 
297
  # OpenAI ์‹œ๋„
298
  openai_key = os.getenv("OPENAI_API_KEY")
 
306
  payload = {
307
  "model": "gpt-4o-mini",
308
  "messages": [
309
+ {"role": "system", "content": system_prompt},
310
+ {"role": "user", "content": f"Create a fully functional Gradio app for this repository:\n\n{context[:8000]}"}
 
 
 
311
  ],
312
+ "temperature": 0.3,
313
  "max_tokens": 4000
314
  }
315
 
 
322
 
323
  if r.status_code == 200:
324
  response_text = r.json()["choices"][0]["message"]["content"]
325
+ print("โœ… OpenAI API๋กœ ์Šค๋งˆํŠธ ์•ฑ ์ƒ์„ฑ ์„ฑ๊ณต")
326
 
327
  # JSON ํŒŒ์‹ฑ
328
+ try:
329
+ if "```json" in response_text:
330
+ start = response_text.find("```json") + 7
331
+ end = response_text.find("```", start)
332
+ response_text = response_text[start:end].strip()
333
+ elif "```" in response_text:
334
+ start = response_text.find("```") + 3
335
+ end = response_text.find("```", start)
336
+ response_text = response_text[start:end].strip()
337
+
338
+ result = json.loads(response_text)
339
+
340
+ if not all(key in result for key in ["app_py", "requirements_txt", "summary"]):
341
+ raise ValueError("Missing required keys in response")
342
+
343
+ if "gradio" not in result.get("requirements_txt", "").lower():
344
+ result["requirements_txt"] = "gradio>=5.35.0\n" + result.get("requirements_txt", "")
345
+
346
+ return result
347
+
348
+ except (json.JSONDecodeError, ValueError) as e:
349
+ print(f"โš ๏ธ JSON ํŒŒ์‹ฑ ์˜ค๋ฅ˜: {e}")
350
+ return None
351
  except Exception as e:
352
  print(f"โš ๏ธ OpenAI API ์˜ค๋ฅ˜: {e}")
353
 
 
355
  friendli_token = os.getenv("FRIENDLI_TOKEN")
356
  if friendli_token:
357
  try:
358
+ headers = {
359
+ "Authorization": f"Bearer {friendli_token.strip()}",
360
+ "Content-Type": "application/json"
361
+ }
362
+
363
+ payload = {
364
+ "model": "meta-llama-3.1-70b-instruct",
365
+ "messages": [
366
+ {"role": "system", "content": system_prompt},
367
+ {"role": "user", "content": f"Create a Gradio app:\n{context[:6000]}"}
368
+ ],
369
+ "max_tokens": 4000,
370
+ "temperature": 0.3
371
+ }
372
+
373
  for endpoint in [
374
  "https://api.friendli.ai/v1/chat/completions",
375
  "https://api.friendli.ai/dedicated/v1/chat/completions"
376
  ]:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
377
  r = requests.post(endpoint, json=payload, headers=headers, timeout=30)
378
  if r.status_code == 200:
379
  response_text = r.json()["choices"][0]["message"]["content"]
380
+ print("โœ… Friendli API๋กœ ์Šค๋งˆํŠธ ์•ฑ ์ƒ์„ฑ ์„ฑ๊ณต")
381
 
382
  if "```json" in response_text:
383
  start = response_text.find("```json") + 7
384
  end = response_text.find("```", start)
385
  response_text = response_text[start:end].strip()
386
 
387
+ result = json.loads(response_text)
388
+
389
+ if "gradio" not in result.get("requirements_txt", "").lower():
390
+ result["requirements_txt"] = "gradio>=5.35.0\n" + result.get("requirements_txt", "")
391
+
392
+ return result
393
  except Exception as e:
394
  print(f"โš ๏ธ Friendli API ์˜ค๋ฅ˜: {e}")
395
 
396
+ # ์Šค๋งˆํŠธ ๊ธฐ๋ณธ ํ…œํ”Œ๋ฆฟ ์ƒ์„ฑ
397
+ print("โ„น๏ธ AI API๊ฐ€ ์—†์–ด ์Šค๋งˆํŠธ ๊ธฐ๋ณธ ํ…œํ”Œ๋ฆฟ์„ ์ƒ์„ฑํ•ฉ๋‹ˆ๋‹ค.")
398
+ return create_smart_template(repo_url, analysis)
399
 
400
+ def create_smart_template(repo_url: str, analysis: Dict) -> Dict:
401
+ """๋ถ„์„ ๊ฒฐ๊ณผ๋ฅผ ๋ฐ”ํƒ•์œผ๋กœ ์Šค๋งˆํŠธํ•œ ๊ธฐ๋ณธ ํ…œํ”Œ๋ฆฟ ์ƒ์„ฑ"""
 
402
 
403
+ repo_name = Path(repo_url.rstrip("/")).name
404
+ description = analysis.get("description", "A project deployed from GitHub") if analysis else "A project deployed from GitHub"
 
 
405
 
406
+ # ์˜์กด์„ฑ ๊ธฐ๋ฐ˜ ์•ฑ ํƒ€์ž… ๊ฒฐ์ •
407
+ deps = " ".join(analysis.get("dependencies", [])) if analysis else ""
408
+ has_ml = any(lib in deps for lib in ["torch", "tensorflow", "transformers", "scikit-learn"])
409
+ has_cv = any(lib in deps for lib in ["cv2", "PIL", "pillow", "opencv"])
410
+ has_nlp = any(lib in deps for lib in ["transformers", "nltk", "spacy"])
411
+ has_audio = any(lib in deps for lib in ["librosa", "soundfile", "pyaudio"])
412
+ has_3d = any(lib in deps for lib in ["gaussian", "rasterizer", "plyfile", "trimesh"])
413
 
414
+ # ๊ธฐ๋ณธ requirements - git ์˜์กด์„ฑ ์ œ์™ธ
415
+ requirements = ["gradio>=5.35.0"]
416
+ if analysis and analysis.get("dependencies"):
417
+ # git+ ์˜์กด์„ฑ๊ณผ ๋กœ์ปฌ ์˜์กด์„ฑ ์ œ์™ธ
418
+ filtered_deps = []
419
+ for dep in analysis["dependencies"][:15]:
420
+ if not dep.startswith("git+") and not dep.startswith("-e") and not dep.startswith("file:"):
421
+ # ๋ฒ„์ „์ด ๋„ˆ๋ฌด ์—„๊ฒฉํ•œ ๊ฒฝ์šฐ ์™„ํ™”
422
+ if "==" in dep and dep.split("==")[0].lower() not in ["torch", "tensorflow", "numpy"]:
423
+ pkg_name = dep.split("==")[0]
424
+ version = dep.split("==")[1]
425
+ filtered_deps.append(f"{pkg_name}>={version}")
426
+ else:
427
+ filtered_deps.append(dep)
428
+ requirements.extend(filtered_deps)
429
 
430
+ # ์•ฑ ํƒ€์ž…๋ณ„ ํ…œํ”Œ๋ฆฟ ์ƒ์„ฑ
431
+ if has_3d or "gaussian" in repo_name.lower():
432
+ # 3D/Gaussian Splatting ์•ฑ
433
+ app_code = f'''import gradio as gr
434
+ import os
435
+ import sys
436
+
437
+ # Repository: {repo_url}
438
+ # {description}
439
+
440
+ # Note: This project requires CUDA-enabled GPU and complex build dependencies
441
+ # The original repository uses custom CUDA extensions that need compilation
442
+
443
+ def process_3d(input_file):
444
+ """3D processing function - placeholder for actual implementation"""
445
+ if input_file is None:
446
+ return "Please upload a 3D file or image"
447
 
448
+ info = """
449
+ ## โš ๏ธ Build Requirements Notice
450
 
451
+ This project requires:
452
+ 1. CUDA-enabled GPU
453
+ 2. Custom C++/CUDA extensions compilation
454
+ 3. Specific versions of PyTorch with CUDA support
 
 
 
 
 
 
 
 
 
 
 
455
 
456
+ The git dependencies in requirements.txt need PyTorch to be installed first.
457
+
458
+ For full functionality:
459
+ 1. Install PyTorch with CUDA: `pip install torch torchvision --index-url https://download.pytorch.org/whl/cu118`
460
+ 2. Install build tools: `apt-get install build-essential python3-dev ninja-build`
461
+ 3. Then install other requirements
462
+
463
+ Original repository: {repo_url}
464
+ """
465
+
466
+ return info
467
+
468
+ # Gradio interface
469
+ with gr.Blocks(title="{repo_name}") as demo:
470
+ gr.Markdown(f"""
471
+ # {repo_name.replace("-", " ").title()}
472
+
473
+ {description}
474
+
475
+ This space was created from: [{repo_url}]({repo_url})
476
+
477
+ **Note**: This project has complex build requirements. See below for details.
478
+ """)
479
+
480
+ with gr.Row():
481
+ with gr.Column():
482
+ input_file = gr.File(label="Upload 3D File or Image")
483
+ process_btn = gr.Button("Process", variant="primary")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
484
 
485
+ with gr.Column():
486
+ output_info = gr.Markdown()
487
+
488
+ process_btn.click(
489
+ fn=process_3d,
490
+ inputs=input_file,
491
+ outputs=output_info
492
+ )
 
 
 
493
 
494
+ if __name__ == "__main__":
495
+ demo.launch()
496
+ '''
497
+ elif has_cv:
498
+ app_code = f'''import gradio as gr
499
+ from PIL import Image
500
+ import numpy as np
501
 
502
+ # Repository: {repo_url}
503
+ # {description}
504
 
505
+ def process_image(image):
506
+ """์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ ํ•จ์ˆ˜ - ์‹ค์ œ ๊ตฌํ˜„์œผ๋กœ ๊ต์ฒด ํ•„์š”"""
507
+ if image is None:
508
+ return None, "Please upload an image"
509
+
510
+ # ์—ฌ๊ธฐ์— ์‹ค์ œ ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ ๋กœ์ง ๊ตฌํ˜„
511
+ # ์˜ˆ: ๋ชจ๋ธ ๋กœ๋“œ, ์ „์ฒ˜๋ฆฌ, ์ถ”๋ก , ํ›„์ฒ˜๋ฆฌ
512
+
513
+ # ๋ฐ๋ชจ์šฉ ๊ฐ„๋‹จํ•œ ์ฒ˜๋ฆฌ
514
+ img_array = np.array(image)
515
+ processed = Image.fromarray(img_array)
516
+
517
+ info = f"Image shape: {img_array.shape}"
518
+ return processed, info
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
519
 
520
+ # Gradio ์ธํ„ฐํŽ˜์ด์Šค ์ƒ์„ฑ
521
+ with gr.Blocks(title="{repo_name}") as demo:
522
+ gr.Markdown(f"""
523
  # {repo_name.replace("-", " ").title()}
524
 
525
+ {description}
526
 
527
+ This space was created from: [{repo_url}]({repo_url})
528
+ """)
529
 
530
+ with gr.Row():
531
+ with gr.Column():
532
+ input_image = gr.Image(label="Input Image", type="pil")
533
+ process_btn = gr.Button("Process Image", variant="primary")
534
+
535
+ with gr.Column():
536
+ output_image = gr.Image(label="Output Image")
537
+ output_info = gr.Textbox(label="Information")
538
+
539
+ process_btn.click(
540
+ fn=process_image,
541
+ inputs=input_image,
542
+ outputs=[output_image, output_info]
543
+ )
544
 
545
  if __name__ == "__main__":
546
  demo.launch()
547
  '''
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
548
 
549
+ elif has_nlp:
550
+ app_code = f'''import gradio as gr
551
+
552
+ # Repository: {repo_url}
553
+ # {description}
554
 
555
+ def process_text(text, max_length=100):
556
+ """ํ…์ŠคํŠธ ์ฒ˜๋ฆฌ ํ•จ์ˆ˜ - ์‹ค์ œ ๊ตฌํ˜„์œผ๋กœ ๊ต์ฒด ํ•„์š”"""
557
+ if not text:
558
+ return "Please enter some text"
559
 
560
+ # ์—ฌ๊ธฐ์— ์‹ค์ œ NLP ์ฒ˜๋ฆฌ ๋กœ์ง ๊ตฌํ˜„
 
561
 
562
+ # ๋ฐ๋ชจ์šฉ ๊ฐ„๋‹จ๏ฟฝ๏ฟฝ ์ฒ˜๋ฆฌ
563
+ word_count = len(text.split())
564
+ char_count = len(text)
 
 
565
 
566
+ result = f"""
567
+ **Analysis Results:**
568
+ - Word count: {word_count}
569
+ - Character count: {char_count}
570
+ - Average word length: {char_count/max(word_count, 1):.1f}
571
+ """
572
 
573
+ return result
 
574
 
575
+ # Gradio ์ธํ„ฐํŽ˜์ด์Šค ์ƒ์„ฑ
576
+ with gr.Blocks(title="{repo_name}") as demo:
577
+ gr.Markdown(f"""
578
+ # {repo_name.replace("-", " ").title()}
579
+
580
+ {description}
581
+
582
+ This space was created from: [{repo_url}]({repo_url})
583
+ """)
584
+
585
+ with gr.Row():
586
+ with gr.Column():
587
+ input_text = gr.Textbox(
588
+ label="Input Text",
589
+ placeholder="Enter your text here...",
590
+ lines=5
591
+ )
592
+ max_length = gr.Slider(
593
+ minimum=10,
594
+ maximum=500,
595
+ value=100,
596
+ label="Max Length"
597
+ )
598
+ process_btn = gr.Button("Process Text", variant="primary")
599
+
600
+ with gr.Column():
601
+ output_text = gr.Markdown(label="Results")
602
+
603
+ process_btn.click(
604
+ fn=process_text,
605
+ inputs=[input_text, max_length],
606
+ outputs=output_text
607
+ )
608
 
609
+ if __name__ == "__main__":
610
+ demo.launch()
611
+ '''
612
+
613
+ else:
614
+ app_code = f'''import gradio as gr
615
 
616
+ # Repository: {repo_url}
617
+ # {description}
 
618
 
619
+ def main_function(input_data):
620
+ """๋ฉ”์ธ ์ฒ˜๋ฆฌ ํ•จ์ˆ˜ - ์‹ค์ œ ๊ตฌํ˜„์œผ๋กœ ๊ต์ฒด ํ•„์š”"""
621
+ if not input_data:
622
+ return "Please provide input"
623
 
624
+ # ์—ฌ๊ธฐ์— ์‹ค์ œ ์ฒ˜๋ฆฌ ๋กœ์ง ๊ตฌํ˜„
 
 
 
625
 
626
+ result = f"Processed successfully! Input received: {input_data}"
627
+ return result
628
 
629
+ # Gradio ์ธํ„ฐํŽ˜์ด์Šค ์ƒ์„ฑ
630
+ with gr.Blocks(title="{repo_name}") as demo:
631
+ gr.Markdown(f"""
632
+ # {repo_name.replace("-", " ").title()}
633
 
634
+ {description}
635
+
636
+ This space was created from: [{repo_url}]({repo_url})
637
+ """)
638
+
639
+ with gr.Row():
640
+ with gr.Column():
641
+ input_data = gr.Textbox(
642
+ label="Input",
643
+ placeholder="Enter your input here...",
644
+ lines=3
645
+ )
646
+ process_btn = gr.Button("Process", variant="primary")
647
 
648
+ with gr.Column():
649
+ output_data = gr.Textbox(label="Output")
650
+
651
+ process_btn.click(
652
+ fn=main_function,
653
+ inputs=input_data,
654
+ outputs=output_data
655
+ )
656
 
657
+ if __name__ == "__main__":
658
+ demo.launch()
659
+ '''
660
+
661
+ return {
662
+ "app_py": app_code,
663
+ "requirements_txt": "\n".join(requirements),
664
+ "summary": f"Smart template created for {repo_name}"
665
+ }
666
 
667
+ # ========== ํ†ตํ•ฉ๋œ ๋ฉ”์ธ clone ํ•จ์ˆ˜ ========== #
668
+ def clone(repo_git, repo_hf, sdk_type, skip_lfs, enable_smart_generation):
669
+ """GitHub ๋ ˆํฌ์ง€ํ† ๋ฆฌ๋ฅผ HuggingFace Space๋กœ ๋ณต์ œํ•˜๊ณ  ์Šค๋งˆํŠธํ•˜๊ฒŒ app.py ์ƒ์„ฑ"""
670
+ folder = str(uuid.uuid4())
671
+
672
+ # ํ™˜๊ฒฝ๋ณ€์ˆ˜์—์„œ HF_TOKEN ๊ฐ€์ ธ์˜ค๊ธฐ
673
+ hf_token = os.getenv("HF_TOKEN")
674
+ if not hf_token:
675
+ yield "โŒ Error: HF_TOKEN not found in environment variables. Please set it in the Space settings."
676
+ return
677
+
678
+ try:
679
+ # Initialize progress messages
680
+ yield "๐Ÿ”„ Starting clone process..."
681
 
682
+ # Get user info
683
+ api = HfApi(token=hf_token)
684
+ try:
685
+ user_info = api.whoami()
686
+ username = user_info["name"]
687
+ yield f"โœ… Authenticated as: {username}"
688
+ except Exception as e:
689
+ yield f"โŒ Authentication failed: {str(e)}"
690
+ return
691
 
692
+ # Clone the repository
693
+ yield f"๐Ÿ“ฅ Cloning repository from {repo_git}..."
 
 
 
 
 
 
 
694
 
695
+ env = os.environ.copy()
696
+
697
+ # Always skip LFS download initially to avoid errors
698
+ env['GIT_LFS_SKIP_SMUDGE'] = '1'
699
+ clone_cmd = ['git', 'clone', '--recurse-submodules', repo_git, folder]
700
+ subprocess.run(clone_cmd, check=True, env=env)
701
 
702
+ if not skip_lfs:
703
+ # Try to pull LFS files
704
+ yield "๐Ÿ“ฆ Attempting to download LFS files..."
705
+ try:
706
+ subprocess.run(['git', 'lfs', 'install'], cwd=folder, check=True)
707
+ lfs_result = subprocess.run(['git', 'lfs', 'pull'], cwd=folder, capture_output=True, text=True)
708
+
709
+ if lfs_result.returncode != 0:
710
+ yield f"โš ๏ธ Warning: LFS download failed: {lfs_result.stderr}"
711
+ yield "โš ๏ธ Will remove LFS pointer files to prevent upload errors..."
712
+ skip_lfs = True # Force LFS skip
713
+ else:
714
+ yield "โœ… LFS files downloaded successfully"
715
+ except Exception as e:
716
+ yield f"โš ๏ธ LFS error: {str(e)}"
717
+ yield "โš ๏ธ Will remove LFS pointer files to prevent upload errors..."
718
+ skip_lfs = True # Force LFS skip
719
+
720
+ # If we're skipping LFS, remove all LFS pointer files
721
+ if skip_lfs:
722
+ yield "๐Ÿงน Removing LFS pointer files..."
723
+ removed_files = remove_lfs_files(folder)
724
+ if removed_files:
725
+ yield f"๐Ÿ“ Removed {len(removed_files)} LFS pointer files"
726
+ # Show first few removed files
727
+ for file in removed_files[:5]:
728
+ yield f" - {file}"
729
+ if len(removed_files) > 5:
730
+ yield f" ... and {len(removed_files) - 5} more files"
731
+
732
+ # ์Šค๋งˆํŠธ ์ƒ์„ฑ์ด ํ™œ์„ฑํ™”๋œ ๊ฒฝ์šฐ
733
+ if enable_smart_generation:
734
+ yield "๐Ÿ” Analyzing repository structure..."
735
+ folder_path = Path(folder)
736
+ analysis = analyze_repository(folder_path)
737
+
738
+ yield "๐Ÿ” Searching for additional information..."
739
+ search_info = search_repo_info(repo_git)
740
+
741
+ yield "๐Ÿค– Generating smart Gradio app..."
742
+ generated = generate_gradio_app(repo_git, analysis, search_info)
743
+
744
+ if generated and isinstance(generated, dict) and "app_py" in generated:
745
+ # app.py ์ƒ์„ฑ/๋ฎ์–ด์“ฐ๊ธฐ
746
+ app_path = folder_path / "app.py"
747
+ app_path.write_text(generated["app_py"], encoding="utf-8")
748
+ yield "โœ… Smart app.py generated"
749
+
750
+ # requirements.txt ์—…๋ฐ์ดํŠธ - ์˜์กด์„ฑ ์ˆœ์„œ ์ตœ์ ํ™”
751
+ req_path = folder_path / "requirements.txt"
752
+ existing_reqs = []
753
+ if req_path.exists():
754
+ try:
755
+ existing_reqs = req_path.read_text(encoding="utf-8").strip().split("\n")
756
+ except:
757
+ existing_reqs = []
758
+
759
+ new_reqs = generated["requirements_txt"].strip().split("\n") if generated["requirements_txt"] else []
760
+
761
+ # ์˜์กด์„ฑ ์ •๋ฆฌ ๋ฐ ์ˆœ์„œ ์ตœ์ ํ™”
762
+ all_reqs = set()
763
+ git_reqs = []
764
+ torch_reqs = []
765
+ regular_reqs = []
766
+
767
+ for req in existing_reqs + new_reqs:
768
+ req = req.strip()
769
+ if not req or req.startswith("#"):
770
+ continue
771
+
772
+ # git+ ์˜์กด์„ฑ์€ ๋”ฐ๋กœ ๊ด€๋ฆฌ
773
+ if req.startswith("git+"):
774
+ git_reqs.append(req)
775
+ # torch ๊ด€๋ จ ์˜์กด์„ฑ์€ ๋จผ์ € ์„ค์น˜
776
+ elif "torch" in req.lower() or "cuda" in req.lower():
777
+ torch_reqs.append(req)
778
+ else:
779
+ regular_reqs.append(req)
780
+
781
+ # gradio ๋ฒ„์ „ ํ™•์ธ ๋ฐ ์ถ”๊ฐ€
782
+ has_gradio = any("gradio" in req for req in regular_reqs)
783
+ if not has_gradio:
784
+ regular_reqs.append("gradio>=5.35.0")
785
+
786
+ # ์ตœ์ข… requirements.txt ์ƒ์„ฑ (์ˆœ์„œ ์ค‘์š”)
787
+ final_reqs = []
788
+
789
+ # 1. torch ๊ด€๋ จ ๋จผ์ €
790
+ if torch_reqs:
791
+ final_reqs.extend(sorted(set(torch_reqs)))
792
+ final_reqs.append("") # ๋นˆ ์ค„
793
+
794
+ # 2. ์ผ๋ฐ˜ ์˜์กด์„ฑ
795
+ final_reqs.extend(sorted(set(regular_reqs)))
796
+
797
+ # 3. git ์˜์กด์„ฑ์€ ๋งˆ์ง€๋ง‰์— (torch๊ฐ€ ํ•„์š”ํ•œ ๊ฒฝ์šฐ๊ฐ€ ๋งŽ์Œ)
798
+ if git_reqs:
799
+ final_reqs.append("") # ๋นˆ ์ค„
800
+ final_reqs.append("# Git dependencies (installed last)")
801
+ final_reqs.extend(sorted(set(git_reqs)))
802
+
803
+ req_content = "\n".join(final_reqs)
804
+ req_path.write_text(req_content, encoding="utf-8")
805
+ yield "โœ… Requirements.txt updated with optimized dependency order"
806
+
807
+ # README.md ์—…๋ฐ์ดํŠธ - ํ•ญ์ƒ ์ƒ์„ฑํ•˜์—ฌ ์˜ฌ๋ฐ”๋ฅธ ํ˜•์‹ ๋ณด์žฅ
808
+ readme_path = folder_path / "README.md"
809
+ readme_content = f"""---
810
+ title: {repo_hf.replace("-", " ").title()}
811
+ emoji: ๐Ÿš€
812
+ colorFrom: blue
813
+ colorTo: green
814
+ sdk: {sdk_type}
815
+ sdk_version: "5.35.0"
816
+ app_file: app.py
817
+ pinned: false
818
+ ---
819
+
820
+ # {repo_hf.replace("-", " ").title()}
821
 
822
+ {analysis.get('description', 'Deployed from GitHub repository')}
823
 
824
+ Deployed from: {repo_git}
825
 
826
+ ## Features
827
+ This Space provides a Gradio interface for the repository's main functionality.
828
+ The app.py was automatically generated based on repository analysis.
829
+
830
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
831
+ """
832
+ readme_path.write_text(readme_content, encoding="utf-8")
833
+ yield "โœ… README.md created/updated"
834
+ else:
835
+ # ์Šค๋งˆํŠธ ์ƒ์„ฑ์ด ๋น„ํ™œ์„ฑํ™”๋œ ๊ฒฝ์šฐ์—๋„ README.md ํ™•์ธ ๋ฐ ์ƒ์„ฑ
836
+ readme_path = Path(folder) / "README.md"
837
+ if not readme_path.exists():
838
+ # ๊ธฐ๋ณธ README.md ์ƒ์„ฑ
839
+ readme_content = f"""---
840
+ title: {repo_hf.replace("-", " ").title()}
841
+ emoji: ๐Ÿš€
842
+ colorFrom: blue
843
+ colorTo: green
844
+ sdk: {sdk_type}
845
+ sdk_version: "5.35.0"
846
+ app_file: app.py
847
+ pinned: false
848
+ ---
849
 
850
+ # {repo_hf.replace("-", " ").title()}
 
 
851
 
852
+ Deployed from: {repo_git}
 
 
 
 
 
853
 
854
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
855
+ """
856
+ readme_path.write_text(readme_content, encoding="utf-8")
857
+ yield "โœ… README.md created with required configuration"
858
 
859
+ # requirements.txt ํ™•์ธ ๋ฐ ๋ฌธ์ œ ํ•ด๊ฒฐ
860
+ req_path = Path(folder) / "requirements.txt"
861
+ if req_path.exists():
 
 
 
862
  try:
863
+ req_content = req_path.read_text(encoding="utf-8")
864
+ lines = req_content.strip().split("\n")
865
+
866
+ # ์˜์กด์„ฑ ๋ถ„๋ฅ˜ ๋ฐ ์ค‘๋ณต ์ œ๊ฑฐ
867
+ torch_deps = []
868
+ git_deps = []
869
+ regular_deps = []
870
+ problem_git_deps = [] # torch๊ฐ€ ํ•„์š”ํ•œ git ์˜์กด์„ฑ
871
+ seen_packages = {} # ํŒจํ‚ค์ง€๋ช… -> ์ „์ฒด ์˜์กด์„ฑ ๋งคํ•‘
872
+
873
+ for line in lines:
874
+ line = line.strip()
875
+ if not line or line.startswith("#"):
876
+ continue
877
+
878
+ if line.startswith("git+"):
879
+ # git ์˜์กด์„ฑ ์ค‘ CUDA/์ปดํŒŒ์ผ์ด ํ•„์š”ํ•œ ๊ฒฝ์šฐ ํ™•์ธ
880
+ cuda_keywords = ["gaussian", "rasterizer", "diff-", "cuda", "nvdiffrast", "tiny-cuda"]
881
+ if any(keyword in line.lower() for keyword in cuda_keywords):
882
+ problem_git_deps.append(line)
883
+ else:
884
+ git_deps.append(line)
885
+ else:
886
+ # ํŒจํ‚ค์ง€๋ช… ์ถ”์ถœ (๋ฒ„์ „ ์ง€์ •์ž ์ œ๊ฑฐ)
887
+ pkg_name = line.split("==")[0].split(">=")[0].split("<=")[0].split(">")[0].split("<")[0].split("~=")[0].split("[")[0].strip()
888
+
889
+ # ํŠน๋ณ„ํ•œ ์„ค์น˜๊ฐ€ ํ•„์š”ํ•œ ํŒจํ‚ค์ง€๋“ค
890
+ special_install_packages = ["pytorch3d", "torch-scatter", "torch-sparse", "torch-geometric", "tiny-cuda-nn"]
891
+
892
+ if pkg_name in special_install_packages:
893
+ problem_git_deps.append(f"# {line} # Requires special installation")
894
+ yield f" โ†’ Marked {pkg_name} for special handling"
895
+ continue
896
+
897
+ # ํŠน์ • ํŒจํ‚ค์ง€์˜ ์ž˜๋ชป๋œ ๋ฒ„์ „ ์ˆ˜์ •
898
+ if pkg_name == "opencv-python":
899
+ if "==4.10.0" in line or "==4.10" in line:
900
+ line = "opencv-python>=4.10.0.82"
901
+ yield "๐Ÿ“ Fixed opencv-python version (4.10.0 โ†’ 4.10.0.82)"
902
+
903
+ # ์ค‘๋ณต ์ฒดํฌ
904
+ if pkg_name in seen_packages:
905
+ # ์ด๋ฏธ ์žˆ๋Š” ํŒจํ‚ค์ง€๋ฉด ๋ฒ„์ „ ๋น„๊ต
906
+ existing = seen_packages[pkg_name]
907
+ # ๋” ๊ตฌ์ฒด์ ์ธ ๋ฒ„์ „์„ ์„ ํƒ (== > >= > ๋ฒ„์ „ ์—†์Œ)
908
+ if "==" in line and "==" not in existing:
909
+ seen_packages[pkg_name] = line
910
+ elif "==" not in existing and ">=" in line and ">=" not in existing:
911
+ seen_packages[pkg_name] = line
912
+ # ๊ฐ™์€ ์ˆ˜์ค€์ด๋ฉด ๋” ์ตœ์‹  ๋ฒ„์ „ ์„ ํƒ
913
+ elif "==" in line and "==" in existing:
914
+ try:
915
+ new_ver = line.split("==")[1]
916
+ old_ver = existing.split("==")[1]
917
+ # ๋ฒ„์ „ ๋น„๊ต (๊ฐ„๋‹จํ•œ ๋ฌธ์ž์—ด ๋น„๊ต)
918
+ if new_ver > old_ver:
919
+ seen_packages[pkg_name] = line
920
+ except:
921
+ pass
922
+ yield f" โ†’ Resolved duplicate: {pkg_name} - using {seen_packages[pkg_name]}"
923
+ else:
924
+ seen_packages[pkg_name] = line
925
+
926
+ # ๋ถ„๋ฅ˜๋œ ์˜์กด์„ฑ์œผ๋กœ ์žฌ๊ตฌ์„ฑ
927
+ for pkg_name, dep_line in seen_packages.items():
928
+ if any(t in pkg_name.lower() for t in ["torch==", "torch>=", "torch~=", "torch<", "torch>", "torch[", "torchvision", "torchaudio"]):
929
+ torch_deps.append(dep_line)
930
+ else:
931
+ regular_deps.append(dep_line)
932
+
933
+ # gradio ๋ฒ„์ „ ํ™•์ธ
934
+ has_gradio = any("gradio" in pkg for pkg in seen_packages.keys())
935
+ if not has_gradio:
936
+ regular_deps.append("gradio>=5.35.0")
937
+ seen_packages["gradio"] = "gradio>=5.35.0"
938
+
939
+ # torch๊ฐ€ ์—†์œผ๋ฉด ์ถ”๊ฐ€ (CUDA ์˜์กด์„ฑ์ด ์žˆ๋Š” ๊ฒฝ์šฐ)
940
+ torch_packages = [p for p in seen_packages.keys() if p == "torch"]
941
+ if not torch_packages and (problem_git_deps or any("torch" in dep for dep in git_deps)):
942
+ torch_deps.append("torch>=2.0.0")
943
+ yield "โš ๏ธ Added torch dependency for git packages"
944
+
945
+ # CPU ๋ฒ„์ „ torch๋กœ ๋Œ€์ฒด ์ œ์•ˆ
946
+ cpu_torch_suggested = False
947
+ for i, dep in enumerate(torch_deps):
948
+ if "torch==" in dep or "torch>=" in dep:
949
+ # CUDA ๋ฒ„์ „์ด ๋ช…์‹œ๋˜์–ด ์žˆ์œผ๋ฉด CPU ๋ฒ„์ „ ์ œ์•ˆ
950
+ if "+cu" in dep:
951
+ torch_deps[i] = dep.split("+cu")[0]
952
+ cpu_torch_suggested = True
953
+
954
+ if cpu_torch_suggested:
955
+ yield "โ„น๏ธ Converted torch to CPU version for HuggingFace Spaces compatibility"
956
+
957
+ # ์žฌ์ •๋ ฌ๋œ requirements.txt ์ž‘์„ฑ
958
+ new_lines = []
959
+
960
+ # 1. ๋จผ์ € torch ์„ค์น˜
961
+ if torch_deps:
962
+ new_lines.append("# PyTorch - Must be installed first")
963
+ new_lines.extend(sorted(set(torch_deps)))
964
+ new_lines.append("")
965
+
966
+ # 2. ์ผ๋ฐ˜ ์˜์กด์„ฑ
967
+ if regular_deps:
968
+ # ํŠน๋ณ„ํ•œ ์„ค์น˜๊ฐ€ ํ•„์š”ํ•œ ํŒจํ‚ค์ง€๋“ค
969
+ special_packages = {
970
+ "pytorch3d": "# pytorch3d requires special installation from https://github.com/facebookresearch/pytorch3d/blob/main/INSTALL.md",
971
+ "torch-scatter": "# torch-scatter requires matching torch version",
972
+ "torch-sparse": "# torch-sparse requires matching torch version",
973
+ "torch-geometric": "# torch-geometric requires special installation"
974
+ }
975
+
976
+ # ์ถ”๊ฐ€ ๋ฒ„์ „ ๊ฒ€์ฆ ๋ฐ ์ˆ˜์ •
977
+ validated_deps = []
978
+ problematic_versions = {
979
+ "opencv-python": {
980
+ "4.10.0": "4.10.0.84",
981
+ "4.10": "4.10.0.84",
982
+ "4.9.0": "4.9.0.80",
983
+ "4.8.0": "4.8.0.76"
984
+ },
985
+ "pillow": {
986
+ "10.0": "10.0.0",
987
+ "9.5": "9.5.0"
988
+ }
989
+ }
990
+
991
+ skipped_packages = []
992
+
993
+ for dep in regular_deps:
994
+ pkg_name = dep.split("==")[0].split(">=")[0].split("[")[0].strip()
995
+
996
+ # ํŠน๋ณ„ํ•œ ์„ค์น˜๊ฐ€ ํ•„์š”ํ•œ ํŒจํ‚ค์ง€๋Š” ์ฃผ์„ ์ฒ˜๋ฆฌ
997
+ if pkg_name in special_packages:
998
+ skipped_packages.append(f"# {dep} {special_packages[pkg_name]}")
999
+ yield f" โ†’ Commented out {pkg_name} (requires special installation)"
1000
+ continue
1001
+
1002
+ # ๋ฒ„์ „ ์ˆ˜์ •์ด ํ•„์š”ํ•œ ํŒจํ‚ค์ง€ ์ฒ˜๋ฆฌ
1003
+ if pkg_name in problematic_versions and "==" in dep:
1004
+ version = dep.split("==")[1].strip()
1005
+ if version in problematic_versions[pkg_name]:
1006
+ new_version = problematic_versions[pkg_name][version]
1007
+ new_dep = f"{pkg_name}>={new_version}"
1008
+ validated_deps.append(new_dep)
1009
+ yield f" โ†’ Fixed version: {dep} โ†’ {new_dep}"
1010
+ else:
1011
+ validated_deps.append(dep)
1012
+ else:
1013
+ validated_deps.append(dep)
1014
+
1015
+ new_lines.append("# Core dependencies")
1016
+ # opencv-python ์ค‘๋ณต ์ œ๊ฑฐ ํ™•์ธ
1017
+ deduped_regular = []
1018
+ seen = set()
1019
+ for dep in sorted(validated_deps):
1020
+ pkg_name = dep.split("==")[0].split(">=")[0].split("<=")[0].split(">")[0].split("<")[0].split("~=")[0].split("[")[0].strip()
1021
+ if pkg_name not in seen:
1022
+ deduped_regular.append(dep)
1023
+ seen.add(pkg_name)
1024
+ new_lines.extend(deduped_regular)
1025
+ new_lines.append("")
1026
+
1027
+ # ํŠน๋ณ„ํ•œ ์„ค์น˜๊ฐ€ ํ•„์š”ํ•œ ํŒจํ‚ค์ง€๋“ค์„ ์ฃผ์„์œผ๋กœ ์ถ”๊ฐ€
1028
+ if skipped_packages:
1029
+ new_lines.append("# โš ๏ธ The following packages require special installation:")
1030
+ new_lines.extend(skipped_packages)
1031
+ new_lines.append("")
1032
+
1033
+ # 3. ์ผ๋ฐ˜ git ์˜์กด์„ฑ
1034
+ if git_deps:
1035
+ new_lines.append("# Git dependencies")
1036
+ new_lines.extend(sorted(set(git_deps)))
1037
+ new_lines.append("")
1038
+
1039
+ # ๋ฌธ์ œ๊ฐ€ ๋˜๋Š” git ์˜์กด์„ฑ๊ณผ ํŠน์ˆ˜ ํŒจํ‚ค์ง€๋Š” ์ฃผ์„ ์ฒ˜๋ฆฌ
1040
+ if problem_git_deps:
1041
+ new_lines.append("")
1042
+ new_lines.append("# โš ๏ธ CUDA-dependent packages and special installations")
1043
+ new_lines.append("# These packages require special installation methods:")
1044
+ new_lines.append("# - pytorch3d: Install from https://github.com/facebookresearch/pytorch3d/blob/main/INSTALL.md")
1045
+ new_lines.append("# - CUDA packages: Require CUDA toolkit and GPU environment")
1046
+ new_lines.append("#")
1047
+ for dep in problem_git_deps:
1048
+ if not dep.startswith("#"):
1049
+ new_lines.append(f"# {dep}")
1050
+ else:
1051
+ new_lines.append(dep)
1052
+
1053
+ # ๊ฒฝ๊ณ  ๋ฉ”์‹œ์ง€ ์ถœ๋ ฅ
1054
+ yield f"โš ๏ธ Commented out {len(problem_git_deps)} packages requiring special installation"
1055
+
1056
+ # ๋นˆ ์ค„ ์ œ๊ฑฐํ•˜๊ณ  ์ •๋ฆฌ
1057
+ final_lines = []
1058
+ for i, line in enumerate(new_lines):
1059
+ # ์ฃผ์„ ๋‹ค์Œ์— ๋ฐ”๋กœ ๋‚ด์šฉ์ด ์žˆ๋Š”์ง€ ํ™•์ธ
1060
+ if line.strip() and not (i > 0 and new_lines[i-1].startswith("#") and line == ""):
1061
+ final_lines.append(line)
1062
+ elif line == "" and i < len(new_lines) - 1: # ์ค‘๊ฐ„์˜ ๋นˆ ์ค„์€ ์œ ์ง€
1063
+ final_lines.append(line)
1064
+
1065
+ req_path.write_text("\n".join(final_lines), encoding="utf-8")
1066
+
1067
+ # ์˜์กด์„ฑ ๊ฐœ์ˆ˜ ํ†ต๊ณ„
1068
+ total_deps = len(torch_deps) + len(regular_deps) + len(git_deps) + len(problem_git_deps)
1069
+ yield f"โœ… Reorganized requirements.txt - Total {total_deps} dependencies (duplicates removed)"
1070
+ if torch_deps:
1071
+ yield f" - PyTorch packages: {len(torch_deps)}"
1072
+ if regular_deps:
1073
+ yield f" - Regular packages: {len(set(regular_deps))}"
1074
+ if git_deps or problem_git_deps:
1075
+ yield f" - Git dependencies: {len(git_deps + problem_git_deps)} ({len(problem_git_deps)} commented)"
1076
+
1077
+ # pre-requirements.txt๋Š” ๋” ์ด์ƒ ํ•„์š”ํ•˜์ง€ ์•Š์Œ (ํ†ตํ•ฉ๋œ requirements.txt ์‚ฌ์šฉ)
1078
+ # packages.txt๋„ HF Spaces ๊ธฐ๋ณธ ํ™˜๊ฒฝ์—์„œ๋Š” ๋ถˆํ•„์š”
1079
+
1080
+ # README.md์— ๋กœ์ปฌ ์‹คํ–‰ ๊ฐ€์ด๋“œ ์ถ”๊ฐ€
1081
+ if problem_git_deps:
1082
+ readme_path = Path(folder) / "README.md"
1083
+ if readme_path.exists():
1084
+ try:
1085
+ existing_readme = readme_path.read_text(encoding="utf-8")
1086
+
1087
+ # YAML ํ—ค๋” ์ดํ›„์— ๋กœ์ปฌ ์‹คํ–‰ ๊ฐ€์ด๋“œ ์ถ”๊ฐ€
1088
+ if "---" in existing_readme:
1089
+ parts = existing_readme.split("---", 2)
1090
+ if len(parts) >= 3:
1091
+ yaml_header = parts[1]
1092
+ content = parts[2]
1093
+
1094
+ # repo_id๊ฐ€ ์ •์˜๋˜์ง€ ์•Š์•˜์œผ๋ฏ€๋กœ repo_hf์™€ username ์‚ฌ์šฉ
1095
+ repo_id = f"{username}/{slugify(repo_hf)}"
1096
+
1097
+ local_guide = f"""
1098
+ ## โš ๏ธ GPU/CUDA Requirements
1099
+
1100
+ This project contains CUDA-dependent packages that cannot run on standard HuggingFace Spaces (CPU environment).
1101
 
1102
+ ### Running Locally with GPU
1103
 
1104
+ ```bash
1105
+ # Install CUDA Toolkit (if not installed)
1106
+ # Visit: https://developer.nvidia.com/cuda-downloads
1107
 
1108
+ # Clone this Space
1109
+ git clone https://huggingface.co/spaces/{repo_id}
1110
+ cd {repo_id.split('/')[-1]}
 
 
1111
 
1112
+ # Install PyTorch with CUDA
1113
+ pip install torch torchvision --index-url https://download.pytorch.org/whl/cu118
 
 
1114
 
1115
+ # Uncomment CUDA dependencies in requirements.txt
1116
+ # Then install all requirements
1117
+ pip install -r requirements.txt
1118
+ ```
1119
+
1120
+ ### Enabling GPU on HuggingFace Spaces
1121
 
1122
+ To use GPU on this Space:
1123
+ 1. Go to Settings โ†’ Hardware
1124
+ 2. Select GPU (T4 or A10G)
1125
+ 3. Costs apply for GPU usage
1126
 
 
1127
  ---
1128
+
1129
+ """
1130
+ new_readme = f"---{yaml_header}---\n{local_guide}{content}"
1131
+ readme_path.write_text(new_readme, encoding="utf-8")
1132
+ yield "๐Ÿ“ Added GPU setup guide to README.md"
1133
+ except Exception as e:
1134
+ yield f"โš ๏ธ Could not update README with GPU guide: {str(e)}"
1135
+ except Exception as e:
1136
+ yield f"โš ๏ธ Error processing requirements.txt: {str(e)}"
1137
+
1138
+ # Remove .git directory to save space and avoid issues
1139
+ git_dir = os.path.join(folder, '.git')
1140
+ if os.path.exists(git_dir):
1141
+ shutil.rmtree(git_dir)
1142
+ yield "๐Ÿงน Removed .git directory"
1143
+
1144
+ # Also clean up .gitattributes to remove LFS tracking
1145
+ gitattributes_path = os.path.join(folder, '.gitattributes')
1146
+ if os.path.exists(gitattributes_path):
1147
+ yield "๐Ÿงน Cleaning .gitattributes file..."
1148
+ with open(gitattributes_path, 'r') as f:
1149
+ lines = f.readlines()
1150
+
1151
+ new_lines = []
1152
+ for line in lines:
1153
+ if 'filter=lfs' not in line:
1154
+ new_lines.append(line)
1155
+
1156
+ if new_lines:
1157
+ with open(gitattributes_path, 'w') as f:
1158
+ f.writelines(new_lines)
1159
+ else:
1160
+ # Remove empty .gitattributes
1161
+ os.remove(gitattributes_path)
1162
+
1163
+ # ๊ธฐ์กด README๊ฐ€ ์žˆ๋Š”์ง€ ํ™•์ธํ•˜๊ณ  Space ํ—ค๋”๊ฐ€ ์—†์œผ๋ฉด ์ถ”๊ฐ€
1164
+ readme_path = Path(folder) / "README.md"
1165
+ if readme_path.exists():
1166
+ try:
1167
+ existing_content = readme_path.read_text(encoding="utf-8")
1168
+ # YAML ํ—ค๋”๊ฐ€ ์—†์œผ๋ฉด ์ถ”๊ฐ€
1169
+ if not existing_content.strip().startswith("---"):
1170
+ yaml_header = f"""---
1171
+ title: {repo_hf.replace("-", " ").title()}
1172
  emoji: ๐Ÿš€
1173
  colorFrom: blue
1174
  colorTo: green
1175
+ sdk: {sdk_type}
1176
  sdk_version: "5.35.0"
1177
  app_file: app.py
1178
  pinned: false
1179
  ---
1180
 
1181
+ """
1182
+ new_content = yaml_header + existing_content
1183
+ readme_path.write_text(new_content, encoding="utf-8")
1184
+ yield "โœ… Updated README.md with Space configuration"
1185
+ except Exception as e:
1186
+ yield f"โš ๏ธ Could not update README.md: {str(e)}"
1187
+
1188
+ # Create the HuggingFace repo with retries
1189
+ yield "๐Ÿ—๏ธ Creating Hugging Face Space..."
1190
+
1191
+ repo_id = f"{username}/{slugify(repo_hf)}"
1192
+ space_created = False
1193
+
1194
+ # Space ์ƒ์„ฑ ์‹œ๋„
1195
+ for attempt in range(3):
1196
+ try:
1197
+ yield f" Creating Space: {repo_id} (attempt {attempt + 1}/3)"
1198
+
1199
+ # ๋จผ์ € ๊ธฐ์กด Space๊ฐ€ ์žˆ๋Š”์ง€ ํ™•์ธ
1200
+ try:
1201
+ existing_space = api.space_info(repo_id=repo_id, token=hf_token)
1202
+ yield f" โ„น๏ธ Space already exists: {existing_space.id}"
1203
+ space_created = True
1204
+ break
1205
+ except:
1206
+ # Space๊ฐ€ ์—†์œผ๋ฉด ์ƒ์„ฑ
1207
+ pass
1208
+
1209
+ # Space ์ƒ์„ฑ
1210
+ create_result = api.create_repo(
1211
+ repo_id=repo_id,
1212
+ repo_type="space",
1213
+ space_sdk=sdk_type,
1214
+ exist_ok=True,
1215
+ private=False,
1216
+ token=hf_token
1217
+ )
1218
+
1219
+ # ์ƒ์„ฑ ํ›„ ์ž ์‹œ ๋Œ€๊ธฐ
1220
+ import time
1221
+ time.sleep(3)
1222
+
1223
+ # ์ƒ์„ฑ ํ™•์ธ
1224
+ space_info = api.space_info(repo_id=repo_id, token=hf_token)
1225
+ yield f" โœ… Space created successfully: {space_info.id}"
1226
+ space_created = True
1227
+ break
1228
+
1229
+ except Exception as e:
1230
+ error_msg = str(e)
1231
+
1232
+ # Rate limit ์—๋Ÿฌ ์ฒ˜๋ฆฌ
1233
+ if "429" in error_msg or "Too Many Requests" in error_msg:
1234
+ yield f"""
1235
+ โŒ **Rate Limit Error**
1236
 
1237
+ You have reached the HuggingFace API rate limit for creating Spaces.
 
1238
 
1239
+ **What this means:**
1240
+ - New users have limited Space creation quotas
1241
+ - You need to wait before creating more Spaces (usually 17-24 hours)
1242
+ - Your limits will increase over time as you use HuggingFace
 
 
 
 
1243
 
1244
+ **Solutions:**
1245
+ 1. **Wait**: Try again in 17-24 hours
1246
+ 2. **Use existing Space**: Update an existing Space instead of creating a new one
1247
+ 3. **Contact HuggingFace**: Email website@huggingface.co if you need immediate access
1248
+ 4. **Alternative**: Create the Space manually on HuggingFace and upload the files
 
 
1249
 
1250
+ **Manual Space Creation Steps:**
1251
+ 1. Go to https://huggingface.co/new-space
1252
+ 2. Create a Space named: `{repo_hf}`
1253
+ 3. Select SDK: {sdk_type}
1254
+ 4. After creation, use the "Files" tab to upload your repository contents
1255
 
1256
+ Repository has been cloned to local folder and is ready for manual upload.
1257
+ """
1258
+ # Rate limit์˜ ๊ฒฝ์šฐ ์žฌ์‹œ๋„ํ•˜์ง€ ์•Š์Œ
1259
+ raise Exception(f"Rate limit reached. Please try again later or create the Space manually.")
1260
+
1261
+ yield f" โš ๏ธ Attempt {attempt + 1} failed: {error_msg[:100]}..."
1262
+ if attempt < 2:
1263
+ yield " Retrying in 5 seconds..."
1264
+ import time
1265
+ time.sleep(5)
1266
+ else:
1267
+ yield f" โŒ Failed to create space after 3 attempts"
1268
+ raise Exception(f"Could not create space: {error_msg}")
1269
+
1270
+ if not space_created:
1271
+ raise Exception("Failed to create space")
1272
+
1273
+ # Check folder size
1274
+ folder_size = sum(os.path.getsize(os.path.join(dirpath, filename))
1275
+ for dirpath, dirnames, filenames in os.walk(folder)
1276
+ for filename in filenames) / (1024 * 1024) # Size in MB
1277
+
1278
+ yield f"๐Ÿ“Š Folder size: {folder_size:.2f} MB"
1279
+
1280
+ # Count remaining files
1281
+ file_count = sum(len(files) for _, _, files in os.walk(folder))
1282
+ yield f"๐Ÿ“ Total files to upload: {file_count}"
1283
+
1284
+ # Upload to HuggingFace with retry logic
1285
+ upload_success = False
1286
+ max_retries = 3
1287
+
1288
+ for attempt in range(max_retries):
1289
+ try:
1290
+ if attempt > 0:
1291
+ yield f"๐Ÿ“ค Upload attempt {attempt + 1}/{max_retries}..."
1292
+ import time
1293
+ time.sleep(5) # ์žฌ์‹œ๋„ ์ „ ๋Œ€๊ธฐ
1294
+
1295
+ if folder_size > 500: # If larger than 500MB, use upload_large_folder
1296
+ yield "๐Ÿ“ค Uploading large folder to Hugging Face (this may take several minutes)..."
1297
+ api.upload_large_folder(
1298
+ folder_path=folder,
1299
+ repo_id=repo_id,
1300
+ repo_type="space",
1301
+ token=hf_token,
1302
+ commit_message="Deploy from GitHub repository",
1303
+ ignore_patterns=["*.pyc", "__pycache__", ".git*", ".DS_Store", "*.egg-info"]
1304
+ )
1305
+ else:
1306
+ yield "๐Ÿ“ค Uploading to Hugging Face..."
1307
+ api.upload_folder(
1308
+ folder_path=folder,
1309
+ repo_id=repo_id,
1310
+ repo_type="space",
1311
+ token=hf_token,
1312
+ commit_message="Deploy from GitHub repository",
1313
+ ignore_patterns=["*.pyc", "__pycache__", ".git*", ".DS_Store", "*.egg-info"]
1314
+ )
1315
+
1316
+ upload_success = True
1317
+ yield "โœ… Upload completed successfully"
1318
+ break
1319
+
1320
+ except Exception as upload_error:
1321
+ error_msg = str(upload_error)
1322
+
1323
+ if "404" in error_msg and attempt < max_retries - 1:
1324
+ yield f" โš ๏ธ Upload failed (404). Space might not be ready yet."
1325
+ yield " Waiting 10 seconds before retry..."
1326
+ import time
1327
+ time.sleep(10)
1328
+
1329
+ # Space ๋‹ค์‹œ ํ™•์ธ
1330
+ try:
1331
+ space_info = api.space_info(repo_id=repo_id, token=hf_token)
1332
+ yield f" โœ… Space confirmed to exist"
1333
+ except:
1334
+ # Space ์žฌ์ƒ์„ฑ ์‹œ๋„
1335
+ yield " ๐Ÿ”„ Attempting to recreate space..."
1336
+ try:
1337
+ api.create_repo(
1338
+ repo_id=repo_id,
1339
+ repo_type="space",
1340
+ space_sdk=sdk_type,
1341
+ exist_ok=True,
1342
+ private=False,
1343
+ token=hf_token
1344
+ )
1345
+ yield " โœ… Space recreated"
1346
+ except Exception as recreate_error:
1347
+ yield f" โŒ Could not recreate space: {str(recreate_error)}"
1348
+
1349
+ elif "LFS pointer" in error_msg:
1350
+ yield "โŒ Upload failed due to remaining LFS pointer files"
1351
+ yield "๐Ÿ” Searching for remaining LFS pointers..."
1352
+
1353
+ # Do another scan for LFS files
1354
+ lfs_count = 0
1355
+ for root, dirs, files in os.walk(folder):
1356
+ for file in files:
1357
+ filepath = os.path.join(root, file)
1358
+ if is_lfs_pointer_file(filepath):
1359
+ lfs_count += 1
1360
+ if lfs_count <= 5: # ์ฒ˜์Œ 5๊ฐœ๋งŒ ํ‘œ์‹œ
1361
+ yield f" Found LFS pointer: {filepath.replace(folder + os.sep, '')}"
1362
+ if lfs_count > 5:
1363
+ yield f" ... and {lfs_count - 5} more LFS pointer files"
1364
+ raise upload_error
1365
+
1366
+ elif attempt == max_retries - 1:
1367
+ yield f"โŒ Upload failed after {max_retries} attempts: {error_msg[:200]}..."
1368
+ raise upload_error
1369
+ else:
1370
+ yield f" โš ๏ธ Upload failed: {error_msg[:100]}..."
1371
+
1372
+ if not upload_success:
1373
+ raise Exception("Upload failed after all retries")
1374
+
1375
+ # Clean up the temporary folder
1376
+ shutil.rmtree(folder)
1377
+
1378
+ space_url = f"https://huggingface.co/spaces/{repo_id}"
1379
+
1380
+ # ์„ฑ๊ณต ๋ฉ”์‹œ์ง€์™€ ์ƒ์„ธ ์ •๋ณด ์ถœ๋ ฅ
1381
+ yield f"""
1382
+ โœ… **Successfully created Space!**
1383
 
1384
+ ๐Ÿ”— **Your Space URL**: {space_url}
 
 
 
1385
 
1386
+ ๐Ÿ“‹ **Deployment Summary:**
1387
+ - **Space ID**: `{repo_id}`
1388
+ - **Source Repository**: {repo_git}
1389
+ - **SDK Type**: {sdk_type}
1390
+ - **Smart Generation**: {'Enabled' if enable_smart_generation else 'Disabled'}
1391
+ - **LFS Files**: {'Skipped' if skip_lfs else 'Included'}
1392
 
1393
+ ๐Ÿš€ **Next Steps:**
1394
+ 1. Click the link above to visit your Space
1395
+ 2. Wait 2-3 minutes for the initial build to complete
1396
+ 3. Check the "Logs" tab if you encounter any issues
1397
+ 4. The Space will automatically rebuild when you make changes
1398
 
1399
+ ๐Ÿ’ก **Tips:**
1400
+ - If the build fails, check the requirements.txt file
1401
+ - For GPU-required projects, enable GPU in Space Settings
1402
+ - You can edit files directly in the Space's Files tab
1403
+ """
1404
 
1405
+ if skip_lfs:
1406
+ yield "\nโš ๏ธ **Note**: LFS files were removed. The Space may be missing some large files (videos, models, etc.)"
1407
+
1408
+ if enable_smart_generation:
1409
+ yield "\n๐Ÿค– **Smart Generation**: An AI-generated Gradio interface was created based on repository analysis"
1410
+
1411
+ # ์ถ”๊ฐ€ ์•ˆ๋‚ด์‚ฌํ•ญ
1412
+ if any(dep.startswith("git+") for dep in analysis.get("dependencies", [])) if enable_smart_generation else False:
1413
+ yield "\nโš ๏ธ **Build Notice**: This repository contains git dependencies that may take longer to build"
1414
+
1415
+ except subprocess.CalledProcessError as e:
1416
+ if os.path.exists(folder):
1417
+ shutil.rmtree(folder)
1418
+ yield f"โŒ Git error: {str(e)}"
1419
+ except Exception as e:
1420
+ if os.path.exists(folder):
1421
+ shutil.rmtree(folder)
1422
+ yield f"โŒ Error: {str(e)}"
1423
 
1424
+ # Custom CSS for better styling
1425
+ css = """
1426
+ .container {
1427
+ max-width: 900px;
1428
+ margin: auto;
1429
+ padding: 20px;
1430
+ }
1431
+ .output-box {
1432
+ min-height: 100px;
1433
+ max-height: 400px;
1434
+ overflow-y: auto;
1435
+ font-family: monospace;
1436
+ font-size: 14px;
1437
+ line-height: 1.5;
1438
+ }
1439
+ .warning-box {
1440
+ background-color: #fff3cd;
1441
+ border: 1px solid #ffeaa7;
1442
+ border-radius: 4px;
1443
+ padding: 12px;
1444
+ margin: 10px 0;
1445
+ }
1446
+ .error-box {
1447
+ background-color: #f8d7da;
1448
+ border: 1px solid #f5c6cb;
1449
+ border-radius: 4px;
1450
+ padding: 12px;
1451
+ margin: 10px 0;
1452
+ }
1453
+ .info-box {
1454
+ background-color: #d1ecf1;
1455
+ border: 1px solid #bee5eb;
1456
+ border-radius: 4px;
1457
+ padding: 12px;
1458
+ margin: 10px 0;
1459
+ }
1460
+ """
1461
 
1462
+ with gr.Blocks(css=css) as demo:
1463
+ gr.Markdown("# ๐Ÿš€ Smart GitHub to Hugging Face Space Cloner")
1464
+ gr.Markdown("""
1465
+ Clone any public GitHub repository and convert it to a Hugging Face Space!
1466
+
1467
+ **Features:**
1468
+ - โœ… Automatic handling of Git LFS issues
1469
+ - โœ… Removes problematic LFS pointer files
1470
+ - โœ… Progress updates during cloning
1471
+ - โœ… Support for large repositories
1472
+ - ๐Ÿค– **NEW: Smart app.py generation with AI analysis**
1473
+ """)
1474
+
1475
+ # Check for HF_TOKEN
1476
+ if not os.getenv("HF_TOKEN"):
1477
+ gr.Markdown("""
1478
+ <div class="error-box">
1479
+ <strong>โŒ HF_TOKEN Required</strong><br>
1480
+ Please set the HF_TOKEN environment variable in your Space settings:
1481
+ <ol>
1482
+ <li>Go to your Space Settings</li>
1483
+ <li>Navigate to "Variables and secrets"</li>
1484
+ <li>Add a new secret: Name = <code>HF_TOKEN</code>, Value = your Hugging Face write token</li>
1485
+ <li>Get a token from: <a href="https://huggingface.co/settings/tokens" target="_blank">https://huggingface.co/settings/tokens</a></li>
1486
+ </ol>
1487
+ </div>
1488
+ """)
1489
+ else:
1490
+ gr.Markdown("""
1491
+ <div class="info-box">
1492
+ <strong>โœ… HF_TOKEN Found</strong><br>
1493
+ Ready to clone repositories to your Hugging Face account.
1494
+ </div>
1495
+ """)
1496
+
1497
+ # Rate limit ๊ฒฝ๊ณ  ์ถ”๊ฐ€
1498
+ gr.Markdown("""
1499
+ <div class="warning-box">
1500
+ <strong>โš ๏ธ Rate Limits for New Users</strong><br>
1501
+ New HuggingFace users have limited Space creation quotas:
1502
+ <ul>
1503
+ <li>You can create only a few Spaces per day initially</li>
1504
+ <li>Limits increase over time with account activity</li>
1505
+ <li>If you hit the limit, wait 17-24 hours or update existing Spaces</li>
1506
+ <li>Contact website@huggingface.co for immediate access needs</li>
1507
+ </ul>
1508
+ </div>
1509
+ """)
1510
+
1511
+ with gr.Row():
1512
+ with gr.Column():
1513
+ repo_git = gr.Textbox(
1514
+ label="GitHub Repository URL",
1515
+ placeholder="https://github.com/username/repository",
1516
+ info="Enter the full URL of the GitHub repository"
1517
+ )
1518
+ repo_hf = gr.Textbox(
1519
+ label="Hugging Face Space Name",
1520
+ placeholder="my-awesome-space",
1521
+ info="Choose a name for your new Space (will be slugified)"
1522
+ )
1523
+ sdk_choices = gr.Radio(
1524
+ ["gradio", "streamlit", "docker", "static"],
1525
+ label="Space SDK",
1526
+ value="gradio",
1527
+ info="Select the SDK type for your Space"
1528
+ )
1529
+ skip_lfs = gr.Checkbox(
1530
+ label="Skip Git LFS files",
1531
+ value=True, # Default to True due to common LFS issues
1532
+ info="Recommended if the repo has large files (videos, models, datasets)"
1533
+ )
1534
+ enable_smart_generation = gr.Checkbox(
1535
+ label="๐Ÿค– Enable Smart app.py Generation (Beta)",
1536
+ value=False,
1537
+ info="Analyze repository and generate working Gradio interface with AI"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1538
  )
1539
 
1540
+ gr.Markdown("""
1541
+ <div class="warning-box">
1542
+ <strong>โš ๏ธ About Git LFS</strong><br>
1543
+ Many repos use Git LFS for large files. If these files are missing or causing errors,
1544
+ keeping "Skip Git LFS files" checked will remove them and allow successful cloning.
1545
+ </div>
1546
+ """)
1547
 
1548
+ # Smart Generation ์ •๋ณด
1549
  gr.Markdown("""
1550
+ <div class="info-box">
1551
+ <strong>๐Ÿค– About Smart Generation</strong><br>
1552
+ When enabled, the system will:
1553
+ <ul>
1554
+ <li>Analyze repository structure and dependencies</li>
1555
+ <li>Search for usage examples and documentation</li>
1556
+ <li>Generate a working Gradio interface using AI</li>
1557
+ <li>Create appropriate requirements.txt</li>
1558
+ </ul>
1559
+ <br>
1560
+ <strong>Required Environment Variables:</strong><br>
1561
+ - <code>OPENAI_API_KEY</code> or <code>FRIENDLI_TOKEN</code> for AI generation<br>
1562
+ - <code>BAPI_TOKEN</code> for web search (optional)
1563
+ </div>
1564
  """)
1565
 
1566
+ btn = gr.Button("๐ŸŽฏ Clone Repository", variant="primary")
1567
+
1568
+ with gr.Column():
1569
+ output = gr.Textbox(
1570
+ label="Progress",
1571
+ lines=15,
1572
+ elem_classes=["output-box"],
1573
+ interactive=False,
1574
+ show_copy_button=True
1575
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1576
 
1577
+ gr.Markdown("""
1578
+ ### ๐Ÿ“ Instructions:
1579
+ 1. **Setup**: Make sure HF_TOKEN is set in your Space settings
1580
+ 2. **Repository URL**: Enter the full GitHub repository URL
1581
+ 3. **Space Name**: Choose a name for your new Space
1582
+ 4. **SDK**: Select the appropriate SDK for your Space
1583
+ 5. **LFS Files**: Keep "Skip Git LFS files" checked if unsure
1584
+ 6. **Smart Generation**: Enable to automatically create working app.py
1585
+ 7. **Clone**: Click "Clone Repository" and monitor progress
1586
+
1587
+ ### ๐Ÿšจ Troubleshooting:
1588
+
1589
+ <div class="error-box">
1590
+ <strong>LFS pointer file errors?</strong><br>
1591
+ Make sure "Skip Git LFS files" is checked. This removes large file pointers that can cause upload failures.
1592
+ </div>
1593
+
1594
+ - **Missing files after cloning**: The repository used Git LFS for large files that are no longer available
1595
+ - **Slow uploads**: Large repositories take time. Consider using a smaller repository or removing unnecessary files
1596
+ - **Space doesn't work**: Check if removed LFS files were essential (models, data, etc.) and add them manually
1597
+ - **Smart Generation issues**: Make sure you have the required API keys set in environment variables
1598
+ """)
1599
+
1600
+ btn.click(
1601
+ fn=clone,
1602
+ inputs=[repo_git, repo_hf, sdk_choices, skip_lfs, enable_smart_generation],
1603
+ outputs=output
1604
+ )
1605
+
1606
+ # ์„ฑ๊ณต ์‚ฌ๋ก€ ๋ฐ ํŒ
1607
+ gr.Markdown("""
1608
+ ### ๐ŸŒŸ Success Tips:
1609
+
1610
+ 1. **For ML/AI Projects**: Enable GPU in Space Settings after deployment
1611
+ 2. **For Large Files**: Use Git LFS or host models on HuggingFace Hub
1612
+ 3. **For Complex Dependencies**: Check build logs and adjust requirements.txt
1613
+ 4. **For Private APIs**: Add secrets in Space Settings (Settings โ†’ Variables and secrets)
1614
+
1615
+ ### ๐Ÿ“Š Supported Project Types:
1616
+ - ๐Ÿค– Machine Learning models (PyTorch, TensorFlow, Transformers)
1617
+ - ๐Ÿ–ผ๏ธ Computer Vision applications
1618
+ - ๐Ÿ“ NLP and text processing
1619
+ - ๐ŸŽต Audio processing and generation
1620
+ - ๐Ÿ“ˆ Data visualization and analysis
1621
+ - ๐ŸŽฎ Interactive demos and games
1622
+ """)
1623
 
1624
  if __name__ == "__main__":
1625
+ demo.launch()