Spaces:
Runtime error
Runtime error
up
Browse files
app.py
CHANGED
|
@@ -59,11 +59,11 @@ def fetch_datasets(base_folder: str):
|
|
| 59 |
return datasets, gr.update(choices=datasets, value=None), fetch_groups(base_folder, datasets, None, "union")
|
| 60 |
|
| 61 |
|
| 62 |
-
def export_data(exported_data):
|
| 63 |
if not exported_data:
|
| 64 |
return None
|
| 65 |
# Assuming exported_data is a dictionary where the key is the dataset name and the value is the data to be exported
|
| 66 |
-
with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".json") as temp:
|
| 67 |
json.dump(exported_data, temp)
|
| 68 |
temp_path = temp.name
|
| 69 |
return gr.update(visible=True, value=temp_path)
|
|
@@ -92,7 +92,6 @@ def fetch_groups(base_folder, datasets, old_groups, type="intersection"):
|
|
| 92 |
|
| 93 |
|
| 94 |
def fetch_stats(base_folder, datasets, group, old_stats, type="intersection"):
|
| 95 |
-
print("Fetching stats")
|
| 96 |
with ThreadPoolExecutor() as executor:
|
| 97 |
STATS = list(executor.map(lambda run: [Path(x).name for x in find_folders(base_folder, f"{run}/{group}")], datasets))
|
| 98 |
if len(STATS) == 0:
|
|
@@ -141,10 +140,12 @@ def prepare_non_grouped_data(dataset_path, base_folder, grouping, stat_name, nor
|
|
| 141 |
stats = load_stats(base_folder, dataset_path, stat_name, grouping)
|
| 142 |
stats_rounded = defaultdict(lambda: 0)
|
| 143 |
for key, value in stats.items():
|
| 144 |
-
stats_rounded[float(key)] += value.total
|
| 145 |
if normalization:
|
| 146 |
normalizer = sum(stats_rounded.values())
|
| 147 |
stats_rounded = {k: v / normalizer for k, v in stats_rounded.items()}
|
|
|
|
|
|
|
| 148 |
return stats_rounded
|
| 149 |
|
| 150 |
|
|
@@ -432,7 +433,7 @@ Groupings:
|
|
| 432 |
|
| 433 |
export_data_button.click(
|
| 434 |
fn=export_data,
|
| 435 |
-
inputs=[exported_data],
|
| 436 |
outputs=export_data_json,
|
| 437 |
)
|
| 438 |
|
|
|
|
| 59 |
return datasets, gr.update(choices=datasets, value=None), fetch_groups(base_folder, datasets, None, "union")
|
| 60 |
|
| 61 |
|
| 62 |
+
def export_data(exported_data, stat_name):
|
| 63 |
if not exported_data:
|
| 64 |
return None
|
| 65 |
# Assuming exported_data is a dictionary where the key is the dataset name and the value is the data to be exported
|
| 66 |
+
with tempfile.NamedTemporaryFile(mode="w", delete=False, prefix=stat_name, suffix=".json") as temp:
|
| 67 |
json.dump(exported_data, temp)
|
| 68 |
temp_path = temp.name
|
| 69 |
return gr.update(visible=True, value=temp_path)
|
|
|
|
| 92 |
|
| 93 |
|
| 94 |
def fetch_stats(base_folder, datasets, group, old_stats, type="intersection"):
|
|
|
|
| 95 |
with ThreadPoolExecutor() as executor:
|
| 96 |
STATS = list(executor.map(lambda run: [Path(x).name for x in find_folders(base_folder, f"{run}/{group}")], datasets))
|
| 97 |
if len(STATS) == 0:
|
|
|
|
| 140 |
stats = load_stats(base_folder, dataset_path, stat_name, grouping)
|
| 141 |
stats_rounded = defaultdict(lambda: 0)
|
| 142 |
for key, value in stats.items():
|
| 143 |
+
stats_rounded[round(float(key), 2)] += value.total
|
| 144 |
if normalization:
|
| 145 |
normalizer = sum(stats_rounded.values())
|
| 146 |
stats_rounded = {k: v / normalizer for k, v in stats_rounded.items()}
|
| 147 |
+
# check that the sum of the values is 1
|
| 148 |
+
summed = sum(stats_rounded.values())
|
| 149 |
return stats_rounded
|
| 150 |
|
| 151 |
|
|
|
|
| 433 |
|
| 434 |
export_data_button.click(
|
| 435 |
fn=export_data,
|
| 436 |
+
inputs=[exported_data, stat_name_dropdown],
|
| 437 |
outputs=export_data_json,
|
| 438 |
)
|
| 439 |
|