Luigi commited on
Commit
238a95a
·
1 Parent(s): bd12f6b

avoid memory leak

Browse files
Files changed (1) hide show
  1. app.py +5 -1
app.py CHANGED
@@ -121,13 +121,17 @@ def caption_frame(frame, size, model_file, clip_file, interval_ms, sys_prompt, u
121
  ]}
122
  ]
123
  # re-init handler
124
- llm.chat_handler.__init__(clip_model_path=clip_file, verbose=False)
125
  resp = llm.create_chat_completion(
126
  messages=messages,
127
  max_tokens=128,
128
  temperature=0.1,
129
  stop=["<end_of_utterance>"]
130
  )
 
 
 
 
131
  return resp.get('choices', [{}])[0].get('message', {}).get('content', '').strip()
132
 
133
  # Gradio UI
 
121
  ]}
122
  ]
123
  # re-init handler
124
+ llm.chat_handler = SmolVLM2ChatHandler(clip_model_path=clip_file, verbose=False)
125
  resp = llm.create_chat_completion(
126
  messages=messages,
127
  max_tokens=128,
128
  temperature=0.1,
129
  stop=["<end_of_utterance>"]
130
  )
131
+
132
+ import gc
133
+ gc.collect()
134
+
135
  return resp.get('choices', [{}])[0].get('message', {}).get('content', '').strip()
136
 
137
  # Gradio UI