Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
| import { apiClient } from '@/utils/api'; | |
| import { usingBrowserDb } from '@/utils/env'; | |
| import { JobRecord, JobUpsertPayload } from '@/types'; | |
| import { v4 as uuidv4 } from 'uuid'; | |
| const LOCAL_JOBS_STORAGE_KEY = 'aitk.browser.jobs'; | |
| const ensureIsoString = (value: unknown): string => { | |
| if (!value) { | |
| return new Date().toISOString(); | |
| } | |
| if (typeof value === 'string') { | |
| return value; | |
| } | |
| if (value instanceof Date) { | |
| return value.toISOString(); | |
| } | |
| const date = new Date(value as any); | |
| if (!Number.isNaN(date.getTime())) { | |
| return date.toISOString(); | |
| } | |
| return new Date().toISOString(); | |
| }; | |
| const normalizeJob = (job: any): JobRecord => { | |
| return { | |
| id: job.id, | |
| name: job.name || '', | |
| gpu_ids: job.gpu_ids || '', | |
| job_config: typeof job.job_config === 'string' ? job.job_config : JSON.stringify(job.job_config || {}), | |
| status: job.status || 'stopped', | |
| stop: Boolean(job.stop), | |
| step: typeof job.step === 'number' ? job.step : 0, | |
| info: job.info || '', | |
| speed_string: job.speed_string || '', | |
| created_at: ensureIsoString(job.created_at), | |
| updated_at: ensureIsoString(job.updated_at), | |
| }; | |
| }; | |
| const readLocalJobs = (): JobRecord[] => { | |
| if (typeof window === 'undefined') { | |
| return []; | |
| } | |
| try { | |
| const raw = window.localStorage.getItem(LOCAL_JOBS_STORAGE_KEY); | |
| if (!raw) { | |
| return []; | |
| } | |
| const parsed = JSON.parse(raw); | |
| if (!Array.isArray(parsed)) { | |
| return []; | |
| } | |
| return parsed.map(normalizeJob); | |
| } catch (error) { | |
| console.error('Failed to read jobs from localStorage:', error); | |
| return []; | |
| } | |
| }; | |
| const writeLocalJobs = (jobs: JobRecord[]) => { | |
| if (typeof window === 'undefined') { | |
| return; | |
| } | |
| try { | |
| window.localStorage.setItem(LOCAL_JOBS_STORAGE_KEY, JSON.stringify(jobs)); | |
| } catch (error) { | |
| console.error('Failed to write jobs to localStorage:', error); | |
| } | |
| }; | |
| const serializeJobConfig = (jobConfig: unknown): string => { | |
| if (typeof jobConfig === 'string') { | |
| return jobConfig; | |
| } | |
| try { | |
| return JSON.stringify(jobConfig ?? {}); | |
| } catch (error) { | |
| console.error('Failed to serialize job config:', error); | |
| return JSON.stringify({}); | |
| } | |
| }; | |
| export const listJobs = async (): Promise<JobRecord[]> => { | |
| if (usingBrowserDb) { | |
| const jobs = readLocalJobs(); | |
| return jobs.sort((a, b) => (a.created_at < b.created_at ? 1 : -1)); | |
| } | |
| const response = await apiClient.get('/api/jobs'); | |
| const jobs = Array.isArray(response.data?.jobs) ? response.data.jobs : []; | |
| return jobs.map(normalizeJob); | |
| }; | |
| export const getJob = async (id: string): Promise<JobRecord | null> => { | |
| if (usingBrowserDb) { | |
| const jobs = readLocalJobs(); | |
| const job = jobs.find(item => item.id === id); | |
| return job || null; | |
| } | |
| const response = await apiClient.get('/api/jobs', { params: { id } }); | |
| if (!response.data) { | |
| return null; | |
| } | |
| return normalizeJob(response.data); | |
| }; | |
| const ensureUniqueName = (jobs: JobRecord[], name: string, excludeId?: string) => { | |
| const existing = jobs.find(job => job.name === name && job.id !== excludeId); | |
| if (existing) { | |
| const error: Error & { code?: string } = new Error('Job name already exists'); | |
| error.code = 'P2002'; | |
| throw error; | |
| } | |
| }; | |
| export const upsertJob = async (payload: JobUpsertPayload): Promise<JobRecord> => { | |
| if (usingBrowserDb) { | |
| const jobs = readLocalJobs(); | |
| const serializedConfig = serializeJobConfig(payload.job_config); | |
| const now = new Date().toISOString(); | |
| if (payload.id) { | |
| const index = jobs.findIndex(job => job.id === payload.id); | |
| if (index === -1) { | |
| throw new Error('Job not found'); | |
| } | |
| ensureUniqueName(jobs, payload.name, payload.id); | |
| const updatedJob: JobRecord = { | |
| ...jobs[index], | |
| name: payload.name, | |
| gpu_ids: payload.gpu_ids ?? jobs[index].gpu_ids, | |
| job_config: serializedConfig, | |
| status: payload.status ?? jobs[index].status, | |
| stop: payload.stop ?? jobs[index].stop, | |
| step: payload.step ?? jobs[index].step, | |
| info: payload.info ?? jobs[index].info, | |
| speed_string: payload.speed_string ?? jobs[index].speed_string, | |
| updated_at: now, | |
| }; | |
| jobs[index] = updatedJob; | |
| writeLocalJobs(jobs); | |
| return updatedJob; | |
| } | |
| ensureUniqueName(jobs, payload.name); | |
| const newJob: JobRecord = { | |
| id: uuidv4(), | |
| name: payload.name, | |
| gpu_ids: payload.gpu_ids ?? '', | |
| job_config: serializedConfig, | |
| status: payload.status ?? 'stopped', | |
| stop: payload.stop ?? false, | |
| step: payload.step ?? 0, | |
| info: payload.info ?? '', | |
| speed_string: payload.speed_string ?? '', | |
| created_at: now, | |
| updated_at: now, | |
| }; | |
| jobs.push(newJob); | |
| writeLocalJobs(jobs); | |
| return newJob; | |
| } | |
| const response = await apiClient.post('/api/jobs', { | |
| id: payload.id, | |
| name: payload.name, | |
| gpu_ids: payload.gpu_ids, | |
| job_config: payload.job_config, | |
| }); | |
| return normalizeJob(response.data); | |
| }; | |
| export const deleteJobRecord = async (id: string): Promise<JobRecord | null> => { | |
| if (usingBrowserDb) { | |
| const jobs = readLocalJobs(); | |
| const index = jobs.findIndex(job => job.id === id); | |
| if (index === -1) { | |
| return null; | |
| } | |
| const [removed] = jobs.splice(index, 1); | |
| writeLocalJobs(jobs); | |
| return removed; | |
| } | |
| const response = await apiClient.get(`/api/jobs/${id}/delete`); | |
| return normalizeJob(response.data); | |
| }; | |
| export const clearLocalJobs = () => { | |
| if (typeof window === 'undefined') { | |
| return; | |
| } | |
| window.localStorage.removeItem(LOCAL_JOBS_STORAGE_KEY); | |
| }; | |