Spaces:
Running
on
Zero
Running
on
Zero
| import random | |
| import argparse | |
| import glob | |
| import json | |
| import os | |
| import time | |
| import rtmidi | |
| from concurrent.futures import ThreadPoolExecutor | |
| import gradio as gr | |
| import numpy as np | |
| import torch | |
| import torch.nn.functional as F | |
| from huggingface_hub import hf_hub_download | |
| from transformers import DynamicCache | |
| import MIDI | |
| from midi_model import MIDIModel, MIDIModelConfig | |
| from midi_synthesizer import MidiSynthesizer | |
| MAX_SEED = np.iinfo(np.int32).max | |
| in_space = os.getenv("SYSTEM") == "spaces" | |
| # Chord definitions and emoji mappings remain the same | |
| # ... (keeping your CHORD_EMOJIS and CHORD_NOTES dictionaries) | |
| # Example song data for "Do You Believe in Love" | |
| SONG_DATA = { | |
| "title": "Do You Believe in Love", | |
| "artist": "Huey Lewis and the News", | |
| "progression": ["G", "D", "Em", "C"], # Simplified progression | |
| "lyrics": [ | |
| "I was walking down a one-way street", | |
| "Just a-looking for someone to meet", | |
| "One woman who was looking for a man", | |
| "Now I'm hoping that the feeling is right" | |
| ] | |
| } | |
| class MIDIDeviceManager: | |
| # ... (keeping your existing MIDIDeviceManager class) | |
| def get_device_info(self): | |
| """Return detailed info about connected MIDI devices""" | |
| devices = self.get_available_devices() | |
| if not devices: | |
| return "No MIDI devices detected" | |
| return "\n".join([f"Port {i}: {name}" for i, name in enumerate(devices)]) | |
| # Global MIDI manager | |
| midi_manager = MIDIDeviceManager() | |
| def analyze_midi_file(midi_file_path): | |
| """Analyze uploaded MIDI file to extract chord progression""" | |
| try: | |
| midi = MIDI.load(midi_file_path) | |
| # Simple chord detection (this could be expanded with proper analysis) | |
| detected_chords = [] | |
| for track in midi.tracks: | |
| current_chord = [] | |
| for event in track.events: | |
| if event.type == 'note_on' and event.velocity > 0: | |
| current_chord.append(event.note) | |
| if len(current_chord) >= 3: # Basic triad detection | |
| for chord_name, notes in CHORD_NOTES.items(): | |
| if set(current_chord[:3]) == set(notes[:3]): | |
| detected_chords.append(chord_name) | |
| current_chord = [] | |
| break | |
| return detected_chords if detected_chords else SONG_DATA["progression"] | |
| except Exception as e: | |
| return SONG_DATA["progression"] # Fallback to example progression | |
| def generate_chord_sheet(chords, lyrics): | |
| """Generate a formatted chord sheet with chords and lyrics""" | |
| sheet = "" | |
| for i, (chord, lyric) in enumerate(zip(chords * (len(lyrics) // len(chords) + 1), lyrics)): | |
| sheet += f"{CHORD_EMOJIS.get(chord, '🎵')} {chord}\n" | |
| sheet += f"{lyric}\n\n" | |
| return sheet | |
| def create_chord_visualizer(chords, lyrics): | |
| """Create HTML visualization for chord sheet""" | |
| html = "<div style='font-family: monospace; line-height: 1.5;'>" | |
| for i, (chord, lyric) in enumerate(zip(chords * (len(lyrics) // len(chords) + 1), lyrics)): | |
| html += f"<div style='margin-bottom: 10px;'>" | |
| html += f"<span style='color: #2196F3; font-weight: bold;'>{CHORD_EMOJIS.get(chord, '🎵')} {chord}</span>" | |
| html += f"<br>{lyric}</div>" | |
| html += "</div>" | |
| return html | |
| # ... (keeping your existing helper functions like create_msg, etc.) | |
| if __name__ == "__main__": | |
| parser = argparse.ArgumentParser() | |
| # ... (keeping your existing parser arguments) | |
| opt = parser.parse_args() | |
| OUTPUT_BATCH_SIZE = opt.batch | |
| # Initialize MIDI components | |
| midi_manager = MIDIDeviceManager() | |
| soundfont_path = hf_hub_download_retry(repo_id="skytnt/midi-model", filename="soundfont.sf2") | |
| thread_pool = ThreadPoolExecutor(max_workers=OUTPUT_BATCH_SIZE) | |
| synthesizer = MidiSynthesizer(soundfont_path) | |
| chord_types = ['', 'm', '7', 'maj7', 'm7'] | |
| keyboard = create_virtual_keyboard(chord_types) | |
| # Enhanced CSS with visualizer styling | |
| keyboard_css = """ | |
| .chord-button { /* ... existing styles ... */ } | |
| .chord-queue { /* ... existing styles ... */ } | |
| .root-c { background-color: #FFCDD2; } | |
| .root-d { background-color: #F8BBD0; } | |
| .root-e { background-color: #E1BEE7; } | |
| .root-f { background-color: #D1C4E9; } | |
| .root-g { background-color: #C5CAE9; } | |
| .root-a { background-color: #BBDEFB; } | |
| .root-b { background-color: #B3E5FC; } | |
| .visualizer-container { | |
| background: #f9f9f9; | |
| padding: 15px; | |
| border-radius: 8px; | |
| margin-top: 15px; | |
| } | |
| """ | |
| load_javascript() | |
| app = gr.Blocks(theme=gr.themes.Soft(), css=keyboard_css) | |
| with app: | |
| gr.Markdown("<h1 style='text-align: center;'>🎵 Chord Sheet Generator & Visualizer 🎵</h1>") | |
| # MIDI Device Info | |
| with gr.Row(): | |
| with gr.Column(scale=3): | |
| midi_device = gr.Dropdown(label="MIDI Output Device", | |
| choices=midi_manager.get_available_devices(), | |
| type="index") | |
| device_info = gr.Textbox(label="Connected MIDI Devices", | |
| value=midi_manager.get_device_info(), | |
| readonly=True) | |
| refresh_button = gr.Button("🔄 Refresh MIDI Devices") | |
| with gr.Column(scale=1): | |
| tempo = gr.Slider(label="Tempo (BPM)", | |
| minimum=40, | |
| maximum=200, | |
| value=120, | |
| step=1) | |
| # MIDI File Upload and Chord Sheet Generation | |
| with gr.Row(): | |
| midi_upload = gr.File(label="Upload MIDI File for Analysis") | |
| chord_output = gr.Textbox(label="Generated Chord Sheet", | |
| lines=10, | |
| value=generate_chord_sheet(SONG_DATA["progression"], SONG_DATA["lyrics"])) | |
| # Chord Visualizer | |
| visualizer = gr.HTML(label="Chord Sheet Visualizer", | |
| value=create_chord_visualizer(SONG_DATA["progression"], SONG_DATA["lyrics"]), | |
| elem_classes=["visualizer-container"]) | |
| # Chord Queue and Playback | |
| chord_queue = gr.State([]) | |
| queue_display = gr.Markdown("### Current Chord Queue\n*No chords in queue*", | |
| elem_classes=["chord-queue"]) | |
| play_queue_button = gr.Button("▶️ Play Chord Sequence", variant="primary") | |
| clear_queue_button = gr.Button("🗑️ Clear Queue", variant="secondary") | |
| # Virtual Keyboard | |
| gr.Markdown("## Virtual Chord Keyboard") | |
| for root in ['C', 'D', 'E', 'F', 'G', 'A', 'B']: | |
| with gr.Row(): | |
| gr.Markdown(f"### {root}") | |
| for chord_type in chord_types: | |
| chord_name, emoji = keyboard[root][chord_type] | |
| display_name = chord_name if chord_type == '' else chord_name | |
| button = gr.Button(f"{emoji} {display_name}", | |
| elem_classes=[f"chord-button root-{root.lower()}"]) | |
| button.click( | |
| fn=play_chord_on_device, | |
| inputs=[gr.State(chord_name), midi_device], | |
| outputs=None | |
| ).then( | |
| fn=add_chord_to_queue, | |
| inputs=[gr.State(chord_name), chord_queue], | |
| outputs=[chord_queue] | |
| ).then( | |
| fn=lambda q: f"### Current Chord Queue\n" + " → ".join(q) if q else "*No chords in queue*", | |
| inputs=[chord_queue], | |
| outputs=[queue_display] | |
| ) | |
| # Event Handlers | |
| refresh_button.click( | |
| fn=lambda: (midi_manager.get_available_devices(), midi_manager.get_device_info()), | |
| inputs=None, | |
| outputs=[midi_device, device_info] | |
| ) | |
| midi_upload.change( | |
| fn=lambda file: (analyze_midi_file(file.name), | |
| generate_chord_sheet(analyze_midi_file(file.name), SONG_DATA["lyrics"]), | |
| create_chord_visualizer(analyze_midi_file(file.name), SONG_DATA["lyrics"])), | |
| inputs=[midi_upload], | |
| outputs=[chord_queue, chord_output, visualizer] | |
| ) | |
| play_queue_button.click( | |
| fn=play_chord_sequence, | |
| inputs=[chord_queue, midi_device, tempo], | |
| outputs=[chord_queue] | |
| ) | |
| clear_queue_button.click( | |
| fn=lambda: ([], "### Current Chord Queue\n*No chords in queue*"), | |
| inputs=None, | |
| outputs=[chord_queue, queue_display] | |
| ) | |
| app.queue().launch(server_port=opt.port, share=opt.share, inbrowser=True, ssr_mode=False) | |
| midi_manager.close() |