Spaces:
Runtime error
Runtime error
File size: 6,647 Bytes
9ac5ea2 ebe86df 38e70c4 e21b55c fe8da28 38e70c4 ebe86df e21b55c ebe86df c30da09 19d66b4 d947152 1bddee8 a7e9f5d d947152 c30da09 fe8da28 19d66b4 38e70c4 cef1f1e 38e70c4 d947152 1bddee8 d947152 c30da09 7de9cd4 1bddee8 a7e9f5d 1bddee8 38e70c4 9ac5ea2 38e70c4 1bddee8 7de9cd4 1bddee8 a7e9f5d 7de9cd4 38e70c4 1bddee8 cef1f1e d947152 fe8da28 d947152 fe8da28 d947152 fe8da28 d947152 38e70c4 cef1f1e 38e70c4 19d66b4 02bc3af b5bfbc4 cef1f1e 9ac5ea2 19d66b4 1bddee8 19d66b4 38e70c4 cef1f1e 38e70c4 cef1f1e 38e70c4 cef1f1e 38e70c4 bd334dc 38e70c4 bd334dc 12e35a6 38e70c4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 |
import collections
import os
from datetime import datetime, timedelta
import json
from http.server import SimpleHTTPRequestHandler, ThreadingHTTPServer
from urllib.parse import parse_qs, urlparse
from huggingface_hub import list_datasets, login, HfFolder
from datasets import load_dataset, DatasetDict, Dataset
import numpy as np
HF_TOKEN = os.environ['HF_TOKEN']
login(HF_TOKEN)
HfFolder.save_token(HF_TOKEN)
datasets = {
"stars": load_dataset("open-source-metrics/preprocessed_stars"),
"issues": load_dataset("open-source-metrics/preprocessed_issues"),
"pip": load_dataset("open-source-metrics/preprocessed_pip").sort('day'),
}
external_datasets = {
"pip": load_dataset("open-source-metrics/pip-external").sort('day'),
"stars": load_dataset("open-source-metrics/stars-external"),
"issues": load_dataset("open-source-metrics/issues-external")
}
def cut_output(full_output: Dataset, library_names: list):
output = full_output.to_dict().items()
output = {k: v + [None] for k, v in output if k in library_names + ['day']}
last_value = max(output[k].index(None) for k in output.keys() if k != 'day')
return {k: v[:last_value] for k, v in output.items()}
def parse_name_and_options(path):
url = urlparse(path)
query = parse_qs(url.query)
library_names = query.get("input", None)[0]
library_names = library_names.split(',')
options = query.get("options", None)[0]
options = options.split(',')
return library_names, options
class RequestHandler(SimpleHTTPRequestHandler):
def do_GET(self):
print(self.path)
if self.path == "/":
self.path = "index.html"
return SimpleHTTPRequestHandler.do_GET(self)
if self.path.startswith("/initialize"):
dataset_with_most_splits = max(datasets['stars'].column_names.values(), key=len)
if 'day' in dataset_with_most_splits:
dataset_with_most_splits.remove('day')
external_dataset_keys = {k: set(v.keys()) for k, v in external_datasets.items()}
external_dataset_with_most_splits = max([d for d in external_dataset_keys.values()], key=len)
for external in external_dataset_with_most_splits:
dataset_with_most_splits.remove(external)
warnings = []
print("Initializing ...")
for k, v in external_dataset_keys.items():
if len(v) < len(external_dataset_with_most_splits):
warnings.append(
f"The {k} external dataset does not contain all splits. Missing: {external_dataset_with_most_splits - v}"
f".\nSelecting that split to show the pip install numbers will not work."
)
dataset_with_most_splits = list(dataset_with_most_splits)
dataset_with_most_splits.sort()
external_dataset_with_most_splits = list(external_dataset_with_most_splits)
external_dataset_with_most_splits.sort()
res = {
'internal': dataset_with_most_splits,
'external': external_dataset_with_most_splits,
'warnings': warnings
}
print(f"Returning: {res}")
return self.response(res)
if self.path.startswith("/retrievePipInstalls"):
errors = []
library_names, options = parse_name_and_options(self.path)
cumulated = '1' in options
week_over_week = '2' in options
def sum_of_lists(lists):
def _sum(items):
while None in items:
items.remove(None)
return sum(items)
return [_sum(list(a)) for a in zip(*lists)]
if week_over_week:
if cumulated:
cumulated_dict = {
'Cumulated': sum_of_lists([v for k, v in datasets['pip']['wow'].to_dict().items() if k in library_names]),
'day': datasets['pip']['wow'].to_dict()['day']
}
return self.response(cumulated_dict)
else:
return self.response({k: v for k, v in datasets['pip']['wow'].to_dict().items() if k in library_names + ['day']})
else:
if cumulated:
cumulated_dict = {
'Cumulated': sum_of_lists([v for k, v in datasets['pip']['raw'].to_dict().items() if k in library_names]),
'day': datasets['pip']['raw'].to_dict()['day']
}
return self.response(cumulated_dict)
else:
return self.response({k: v for k, v in datasets['pip']['raw'].to_dict().items() if k in library_names + ['day']})
if self.path.startswith("/retrieveStars"):
library_names, options = parse_name_and_options(self.path)
week_over_week = '1' in options
if week_over_week:
return self.response({k: v for k, v in datasets['stars']['wow'].to_dict().items() if k in library_names + ['day']})
else:
return self.response({k: v for k, v in datasets['stars']['raw'].to_dict().items() if k in library_names + ['day']})
if self.path.startswith("/retrieveIssues"):
library_names, options = parse_name_and_options(self.path)
exclude_org_members = '1' in options
week_over_week = '2' in options
if week_over_week:
if exclude_org_members:
return self.response(cut_output(datasets['issues']['eom_wow'], library_names))
else:
return self.response({k: v for k, v in datasets['issues']['wow'].to_dict().items() if k in library_names + ['day']})
else:
if exclude_org_members:
return self.response({k: v for k, v in datasets['issues']['eom'].to_dict().items() if k in library_names + ['day']})
else:
return self.response({k: v for k, v in datasets['issues']['raw'].to_dict().items() if k in library_names + ['day']})
return SimpleHTTPRequestHandler.do_GET(self)
def response(self, output):
self.send_response(200)
self.send_header("Content-Type", "application/json")
self.end_headers()
self.wfile.write(json.dumps(output).encode("utf-8"))
return SimpleHTTPRequestHandler
server = ThreadingHTTPServer(("", 7860), RequestHandler)
print("Running on port 7860")
server.serve_forever()
|