{"username": "Oriaz", "space_url": "https://huggingface.co/spaces/Oriaz/submission-template-frugal-ai", "submission_timestamp": "2025-01-06T16:33:16.165519", "model_description": "Embedding + Logistic Regression", "accuracy": 0.7243642329778507, "energy_consumed_wh": 1.8579968425613453, "emissions_gco2eq": 0.6858505086018414, "emissions_data": {"run_id": "ccbe2387-e097-4677-a6ed-39400a5a1485", "duration": 44.544224079, "emissions": 0.0006858505086018414, "emissions_rate": 1.5397088437067526e-05, "cpu_power": 105, "gpu_power": 39.415202015964574, "ram_power": 5.74672794342041, "cpu_energy": 0.0012991950741000006, "gpu_energy": 0.00048769677904600016, "ram_energy": 7.110498941534462e-05, "energy_consumed": 0.0018579968425613453, "country_name": "United States", "country_iso_code": "USA", "region": "virginia", "cloud_provider": "", "cloud_region": "", "os": "Linux-5.10.230-223.885.amzn2.x86_64-x86_64-with-glibc2.36", "python_version": "3.9.21", "codecarbon_version": "2.8.2", "cpu_count": 4, "cpu_model": "Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz", "gpu_count": 1, "gpu_model": "1 x Tesla T4", "ram_total_size": 15.324607849121094, "tracking_mode": "machine", "on_cloud": "N", "pue": 1}, "api_route": "/text", "dataset_config": {"dataset_name": "QuotaClimat/frugalaichallenge-text-train", "test_size": 0.2, "test_seed": 42}}