ariG23498 HF Staff commited on
Commit
333ab22
·
verified ·
1 Parent(s): 832d06c

Upload inclusionAI_Ring-flash-linear-2.0-128k_0.txt with huggingface_hub

Browse files
inclusionAI_Ring-flash-linear-2.0-128k_0.txt CHANGED
@@ -11,7 +11,7 @@ pipe(messages)
11
 
12
  ERROR:
13
  Traceback (most recent call last):
14
- File "/tmp/inclusionAI_Ring-flash-linear-2.0-128k_0x3vson.py", line 17, in <module>
15
  pipe = pipeline("text-generation", model="inclusionAI/Ring-flash-linear-2.0-128k", trust_remote_code=True)
16
  File "/tmp/.cache/uv/environments-v2/4ad9569faf5e3eee/lib/python3.13/site-packages/transformers/pipelines/__init__.py", line 1027, in pipeline
17
  framework, model = infer_framework_load_model(
 
11
 
12
  ERROR:
13
  Traceback (most recent call last):
14
+ File "/tmp/inclusionAI_Ring-flash-linear-2.0-128k_0539ISc.py", line 19, in <module>
15
  pipe = pipeline("text-generation", model="inclusionAI/Ring-flash-linear-2.0-128k", trust_remote_code=True)
16
  File "/tmp/.cache/uv/environments-v2/4ad9569faf5e3eee/lib/python3.13/site-packages/transformers/pipelines/__init__.py", line 1027, in pipeline
17
  framework, model = infer_framework_load_model(