Spaces:
Runtime error
Runtime error
Commit
·
cef1f1e
1
Parent(s):
d2889eb
Week over week + exclude org members + cumulated
Browse files
app.py
CHANGED
|
@@ -27,9 +27,24 @@ def _range(e):
|
|
| 27 |
global val
|
| 28 |
e['range'] = val
|
| 29 |
val += 1
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
return e
|
| 31 |
|
| 32 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
stars = {}
|
| 34 |
for k, v in datasets['stars'].items():
|
| 35 |
stars[k] = v.map(_range)
|
|
@@ -39,6 +54,8 @@ issues = {}
|
|
| 39 |
for k, v in datasets['issues'].items():
|
| 40 |
issues[k] = v.map(_range)
|
| 41 |
val = 0
|
|
|
|
|
|
|
| 42 |
|
| 43 |
datasets['stars'] = DatasetDict(**stars)
|
| 44 |
datasets['issues'] = DatasetDict(**issues)
|
|
@@ -69,6 +86,17 @@ def running_mean(x, N, total_length=-1):
|
|
| 69 |
return np.pad(cumsum[N:] - cumsum[:-N], (to_pad, 0)) / float(N)
|
| 70 |
|
| 71 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 72 |
class RequestHandler(SimpleHTTPRequestHandler):
|
| 73 |
def do_GET(self):
|
| 74 |
print(self.path)
|
|
@@ -86,28 +114,16 @@ class RequestHandler(SimpleHTTPRequestHandler):
|
|
| 86 |
if len(v) < len(dataset_with_most_splits):
|
| 87 |
warnings.extend(f"The {k} dataset does not contain all splits. Missing: {dataset_with_most_splits - v}")
|
| 88 |
|
| 89 |
-
self.send_response(200)
|
| 90 |
-
self.send_header("Content-Type", "application/json")
|
| 91 |
-
self.end_headers()
|
| 92 |
-
|
| 93 |
# TODO: Send and display warnings
|
| 94 |
dataset_with_most_splits = list(dataset_with_most_splits)
|
| 95 |
dataset_with_most_splits.sort()
|
| 96 |
-
self.wfile.write(json.dumps(list(dataset_with_most_splits)).encode("utf-8"))
|
| 97 |
|
| 98 |
-
return
|
| 99 |
|
| 100 |
if self.path.startswith("/retrievePipInstalls"):
|
| 101 |
-
|
| 102 |
-
query = parse_qs(url.query)
|
| 103 |
-
library_names = query.get("input", None)[0]
|
| 104 |
-
library_names = library_names.split(',')
|
| 105 |
-
|
| 106 |
-
if 'Cumulated' in library_names:
|
| 107 |
-
dataset_keys = {k: set(v.keys()) for k, v in datasets.items()}
|
| 108 |
-
dataset_with_most_splits = max([d for d in dataset_keys.values()], key=len)
|
| 109 |
-
library_names = list(dataset_with_most_splits)
|
| 110 |
|
|
|
|
| 111 |
returned_values = {}
|
| 112 |
for library_name in library_names:
|
| 113 |
for i in datasets['pip'][library_name]:
|
|
@@ -136,31 +152,29 @@ class RequestHandler(SimpleHTTPRequestHandler):
|
|
| 136 |
output = {l: [k[l] for k in returned_values.values()] for l in library_names}
|
| 137 |
output['day'] = list(returned_values.keys())
|
| 138 |
|
| 139 |
-
self.
|
| 140 |
-
self.send_header("Content-Type", "application/json")
|
| 141 |
-
self.end_headers()
|
| 142 |
-
|
| 143 |
-
self.wfile.write(json.dumps(output).encode("utf-8"))
|
| 144 |
-
|
| 145 |
-
return SimpleHTTPRequestHandler
|
| 146 |
|
| 147 |
if self.path.startswith("/retrieveStars"):
|
| 148 |
-
|
| 149 |
-
query = parse_qs(url.query)
|
| 150 |
-
library_names = query.get("input", None)[0]
|
| 151 |
-
library_names = library_names.split(',')
|
| 152 |
-
|
| 153 |
returned_values = {}
|
| 154 |
dataset_dict = datasets['stars']
|
|
|
|
| 155 |
|
| 156 |
for library_name in library_names:
|
| 157 |
dataset = dataset_dict[library_name]
|
| 158 |
|
|
|
|
|
|
|
| 159 |
for i in dataset:
|
|
|
|
|
|
|
| 160 |
if i['dates'] in returned_values:
|
| 161 |
-
returned_values[i['dates']][library_name] = i['range']
|
| 162 |
else:
|
| 163 |
-
returned_values[i['dates']] = {library_name: i['range']}
|
|
|
|
|
|
|
|
|
|
| 164 |
|
| 165 |
returned_values = collections.OrderedDict(sorted(returned_values.items()))
|
| 166 |
returned_values = link_values(library_names, returned_values)
|
|
@@ -168,33 +182,40 @@ class RequestHandler(SimpleHTTPRequestHandler):
|
|
| 168 |
output['day'] = list(returned_values.keys())[::-1]
|
| 169 |
|
| 170 |
# Trim down to a smaller number of points.
|
| 171 |
-
|
| 172 |
-
|
| 173 |
-
|
| 174 |
-
|
| 175 |
-
self.end_headers()
|
| 176 |
|
| 177 |
-
self.
|
| 178 |
|
| 179 |
-
return SimpleHTTPRequestHandler
|
| 180 |
|
| 181 |
if self.path.startswith("/retrieveIssues"):
|
| 182 |
-
|
| 183 |
-
|
| 184 |
-
|
| 185 |
-
|
| 186 |
|
| 187 |
returned_values = {}
|
| 188 |
dataset_dict = datasets['issues']
|
|
|
|
| 189 |
|
| 190 |
for library_name in library_names:
|
| 191 |
dataset = dataset_dict[library_name]
|
| 192 |
|
|
|
|
|
|
|
| 193 |
for i in dataset:
|
|
|
|
|
|
|
|
|
|
| 194 |
if i['dates'] in returned_values:
|
| 195 |
-
returned_values[i['dates']][library_name] = i[
|
| 196 |
else:
|
| 197 |
-
returned_values[i['dates']] = {library_name: i[
|
|
|
|
|
|
|
|
|
|
| 198 |
|
| 199 |
returned_values = collections.OrderedDict(sorted(returned_values.items()))
|
| 200 |
returned_values = link_values(library_names, returned_values)
|
|
@@ -202,17 +223,23 @@ class RequestHandler(SimpleHTTPRequestHandler):
|
|
| 202 |
output['day'] = list(returned_values.keys())[::-1]
|
| 203 |
|
| 204 |
# Trim down to a smaller number of points.
|
| 205 |
-
|
|
|
|
|
|
|
|
|
|
| 206 |
|
| 207 |
-
self.
|
| 208 |
-
self.send_header("Content-Type", "application/json")
|
| 209 |
-
self.end_headers()
|
| 210 |
|
| 211 |
-
|
| 212 |
|
| 213 |
-
|
|
|
|
|
|
|
|
|
|
| 214 |
|
| 215 |
-
|
|
|
|
|
|
|
| 216 |
|
| 217 |
|
| 218 |
server = ThreadingHTTPServer(("", 7860), RequestHandler)
|
|
|
|
| 27 |
global val
|
| 28 |
e['range'] = val
|
| 29 |
val += 1
|
| 30 |
+
|
| 31 |
+
current_date = datetime.strptime(e['dates'], "%Y-%m-%dT%H:%M:%SZ")
|
| 32 |
+
first_date = datetime.fromtimestamp(1)
|
| 33 |
+
week = abs(current_date - first_date).days // 7
|
| 34 |
+
e['week'] = week
|
| 35 |
+
|
| 36 |
return e
|
| 37 |
|
| 38 |
|
| 39 |
+
def _ignore_org_members(e):
|
| 40 |
+
global val
|
| 41 |
+
e['range_non_org'] = val
|
| 42 |
+
|
| 43 |
+
if e['type']['authorAssociation'] != 'MEMBER':
|
| 44 |
+
val += 1
|
| 45 |
+
|
| 46 |
+
return e
|
| 47 |
+
|
| 48 |
stars = {}
|
| 49 |
for k, v in datasets['stars'].items():
|
| 50 |
stars[k] = v.map(_range)
|
|
|
|
| 54 |
for k, v in datasets['issues'].items():
|
| 55 |
issues[k] = v.map(_range)
|
| 56 |
val = 0
|
| 57 |
+
issues[k] = issues[k].map(_ignore_org_members)
|
| 58 |
+
val = 0
|
| 59 |
|
| 60 |
datasets['stars'] = DatasetDict(**stars)
|
| 61 |
datasets['issues'] = DatasetDict(**issues)
|
|
|
|
| 86 |
return np.pad(cumsum[N:] - cumsum[:-N], (to_pad, 0)) / float(N)
|
| 87 |
|
| 88 |
|
| 89 |
+
def parse_name_and_options(path):
|
| 90 |
+
url = urlparse(path)
|
| 91 |
+
query = parse_qs(url.query)
|
| 92 |
+
library_names = query.get("input", None)[0]
|
| 93 |
+
library_names = library_names.split(',')
|
| 94 |
+
options = query.get("options", None)[0]
|
| 95 |
+
options = options.split(',')
|
| 96 |
+
|
| 97 |
+
return library_names, options
|
| 98 |
+
|
| 99 |
+
|
| 100 |
class RequestHandler(SimpleHTTPRequestHandler):
|
| 101 |
def do_GET(self):
|
| 102 |
print(self.path)
|
|
|
|
| 114 |
if len(v) < len(dataset_with_most_splits):
|
| 115 |
warnings.extend(f"The {k} dataset does not contain all splits. Missing: {dataset_with_most_splits - v}")
|
| 116 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 117 |
# TODO: Send and display warnings
|
| 118 |
dataset_with_most_splits = list(dataset_with_most_splits)
|
| 119 |
dataset_with_most_splits.sort()
|
|
|
|
| 120 |
|
| 121 |
+
return self.response(list(dataset_with_most_splits))
|
| 122 |
|
| 123 |
if self.path.startswith("/retrievePipInstalls"):
|
| 124 |
+
library_names, options = parse_name_and_options(self.path)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 125 |
|
| 126 |
+
if '1' in options:
|
| 127 |
returned_values = {}
|
| 128 |
for library_name in library_names:
|
| 129 |
for i in datasets['pip'][library_name]:
|
|
|
|
| 152 |
output = {l: [k[l] for k in returned_values.values()] for l in library_names}
|
| 153 |
output['day'] = list(returned_values.keys())
|
| 154 |
|
| 155 |
+
return self.response(output)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 156 |
|
| 157 |
if self.path.startswith("/retrieveStars"):
|
| 158 |
+
library_names, options = parse_name_and_options(self.path)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 159 |
returned_values = {}
|
| 160 |
dataset_dict = datasets['stars']
|
| 161 |
+
week_over_week = '1' in options
|
| 162 |
|
| 163 |
for library_name in library_names:
|
| 164 |
dataset = dataset_dict[library_name]
|
| 165 |
|
| 166 |
+
last_value = 0
|
| 167 |
+
last_week = dataset[0]['week']
|
| 168 |
for i in dataset:
|
| 169 |
+
if week_over_week and last_week == i['week']:
|
| 170 |
+
continue
|
| 171 |
if i['dates'] in returned_values:
|
| 172 |
+
returned_values[i['dates']][library_name] = i['range'] - last_value
|
| 173 |
else:
|
| 174 |
+
returned_values[i['dates']] = {library_name: i['range'] - last_value}
|
| 175 |
+
|
| 176 |
+
last_value = i['range'] if week_over_week else 0
|
| 177 |
+
last_week = i['week']
|
| 178 |
|
| 179 |
returned_values = collections.OrderedDict(sorted(returned_values.items()))
|
| 180 |
returned_values = link_values(library_names, returned_values)
|
|
|
|
| 182 |
output['day'] = list(returned_values.keys())[::-1]
|
| 183 |
|
| 184 |
# Trim down to a smaller number of points.
|
| 185 |
+
if len(output) > 100:
|
| 186 |
+
output = {
|
| 187 |
+
k: [v for i, v in enumerate(value) if i % int(len(value) / 100) == 0] for k, value in output.items()
|
| 188 |
+
}
|
|
|
|
| 189 |
|
| 190 |
+
return self.response(output)
|
| 191 |
|
|
|
|
| 192 |
|
| 193 |
if self.path.startswith("/retrieveIssues"):
|
| 194 |
+
library_names, options = parse_name_and_options(self.path)
|
| 195 |
+
|
| 196 |
+
exclude_org_members = '1' in options
|
| 197 |
+
week_over_week = '2' in options
|
| 198 |
|
| 199 |
returned_values = {}
|
| 200 |
dataset_dict = datasets['issues']
|
| 201 |
+
range_id = 'range' if not exclude_org_members else 'range_non_org'
|
| 202 |
|
| 203 |
for library_name in library_names:
|
| 204 |
dataset = dataset_dict[library_name]
|
| 205 |
|
| 206 |
+
last_value = 0
|
| 207 |
+
last_week = dataset[0]['week']
|
| 208 |
for i in dataset:
|
| 209 |
+
if week_over_week and last_week == i['week']:
|
| 210 |
+
continue
|
| 211 |
+
|
| 212 |
if i['dates'] in returned_values:
|
| 213 |
+
returned_values[i['dates']][library_name] = i[range_id] - last_value
|
| 214 |
else:
|
| 215 |
+
returned_values[i['dates']] = {library_name: i[range_id] - last_value}
|
| 216 |
+
|
| 217 |
+
last_value = i[range_id] if week_over_week else 0
|
| 218 |
+
last_week = i['week']
|
| 219 |
|
| 220 |
returned_values = collections.OrderedDict(sorted(returned_values.items()))
|
| 221 |
returned_values = link_values(library_names, returned_values)
|
|
|
|
| 223 |
output['day'] = list(returned_values.keys())[::-1]
|
| 224 |
|
| 225 |
# Trim down to a smaller number of points.
|
| 226 |
+
if len(output) > 100:
|
| 227 |
+
output = {
|
| 228 |
+
k: [v for i, v in enumerate(value) if i % int(len(value) / 100) == 0] for k, value in output.items()
|
| 229 |
+
}
|
| 230 |
|
| 231 |
+
return self.response(output)
|
|
|
|
|
|
|
| 232 |
|
| 233 |
+
return SimpleHTTPRequestHandler.do_GET(self)
|
| 234 |
|
| 235 |
+
def response(self, output):
|
| 236 |
+
self.send_response(200)
|
| 237 |
+
self.send_header("Content-Type", "application/json")
|
| 238 |
+
self.end_headers()
|
| 239 |
|
| 240 |
+
self.wfile.write(json.dumps(output).encode("utf-8"))
|
| 241 |
+
|
| 242 |
+
return SimpleHTTPRequestHandler
|
| 243 |
|
| 244 |
|
| 245 |
server = ThreadingHTTPServer(("", 7860), RequestHandler)
|
index.js
CHANGED
|
@@ -13,6 +13,61 @@ const load = () => {
|
|
| 13 |
return l0
|
| 14 |
}
|
| 15 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
let charts = [];
|
| 17 |
|
| 18 |
const createButton = (title, libraries, methods) => {
|
|
@@ -21,16 +76,20 @@ const createButton = (title, libraries, methods) => {
|
|
| 21 |
button.onclick = async () => {
|
| 22 |
document.getElementById('pip-graph').innerHTML = ''
|
| 23 |
document.getElementById('star-graph').innerHTML = ''
|
|
|
|
| 24 |
const e = load()
|
| 25 |
document.body.appendChild(e)
|
| 26 |
const selectedLibraries = libraries.filter(e => document.querySelector(`#${e}Checkbox`).checked);
|
|
|
|
|
|
|
|
|
|
| 27 |
if (charts.length !== 0) {
|
| 28 |
for (const chart of charts) {
|
| 29 |
chart.destroy()
|
| 30 |
}
|
| 31 |
}
|
| 32 |
for (const method of methods()) {
|
| 33 |
-
charts.push(await method(selectedLibraries))
|
| 34 |
}
|
| 35 |
document.body.removeChild(e)
|
| 36 |
};
|
|
@@ -75,8 +134,11 @@ const initialize = async () => {
|
|
| 75 |
librarySelector.appendChild(div)
|
| 76 |
}
|
| 77 |
|
| 78 |
-
for (const element of ['pip', 'stars', '
|
| 79 |
const div = document.createElement('div');
|
|
|
|
|
|
|
|
|
|
| 80 |
const checkBox = document.createElement('input');
|
| 81 |
checkBox.type = 'checkbox'
|
| 82 |
checkBox.id = `${element}CheckboxGraph`;
|
|
@@ -91,8 +153,13 @@ const initialize = async () => {
|
|
| 91 |
graphSelector.appendChild(div)
|
| 92 |
}
|
| 93 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 94 |
const fetchButton = createButton('Fetch', inferJson, () => {
|
| 95 |
-
const graphNames = ['pip', 'stars', '
|
| 96 |
const graphs = []
|
| 97 |
|
| 98 |
if (graphNames.includes('pip'))
|
|
@@ -101,7 +168,7 @@ const initialize = async () => {
|
|
| 101 |
if (graphNames.includes('stars'))
|
| 102 |
graphs.push(retrieveStars)
|
| 103 |
|
| 104 |
-
if (graphNames.includes('
|
| 105 |
graphs.push(retrieveIssues)
|
| 106 |
|
| 107 |
return graphs
|
|
@@ -109,8 +176,9 @@ const initialize = async () => {
|
|
| 109 |
selectorSubmit.appendChild(fetchButton);
|
| 110 |
};
|
| 111 |
|
| 112 |
-
const retrievePipInstalls = async (libraryNames) => {
|
| 113 |
-
const
|
|
|
|
| 114 |
const inferJson = await inferResponse.json();
|
| 115 |
const colors = ['#FF0000', '#00FF00', '#0000FF', '#FF00FF', '#FFFF00', '#0000FF', '#F090F0', '#90F0F0', '#F0F090']
|
| 116 |
|
|
@@ -159,8 +227,9 @@ const retrievePipInstalls = async (libraryNames) => {
|
|
| 159 |
return myChart;
|
| 160 |
};
|
| 161 |
|
| 162 |
-
const retrieveStars = async (libraryNames) => {
|
| 163 |
-
const
|
|
|
|
| 164 |
const inferJson = await inferResponse.json();
|
| 165 |
const colors = ['#FF0000', '#00FF00', '#0000FF', '#FF00FF', '#FFFF00', '#0000FF', '#F090F0', '#90F0F0', '#F0F090']
|
| 166 |
|
|
@@ -209,8 +278,9 @@ const retrieveStars = async (libraryNames) => {
|
|
| 209 |
return myChart;
|
| 210 |
};
|
| 211 |
|
| 212 |
-
const retrieveIssues = async (libraryNames) => {
|
| 213 |
-
const
|
|
|
|
| 214 |
const inferJson = await inferResponse.json();
|
| 215 |
const colors = ['#FF0000', '#00FF00', '#0000FF', '#FF00FF', '#FFFF00', '#0000FF', '#F090F0', '#90F0F0', '#F0F090']
|
| 216 |
|
|
@@ -252,7 +322,7 @@ const retrieveIssues = async (libraryNames) => {
|
|
| 252 |
plugins: {
|
| 253 |
title: {
|
| 254 |
display: true,
|
| 255 |
-
text: 'Number of
|
| 256 |
}
|
| 257 |
}
|
| 258 |
}
|
|
|
|
| 13 |
return l0
|
| 14 |
}
|
| 15 |
|
| 16 |
+
const getCheckedOptions = () => {
|
| 17 |
+
const options = Array.from(document.querySelectorAll('.option-div'))
|
| 18 |
+
.map(e => Array.from(e.children)
|
| 19 |
+
.filter(e => e.nodeName == 'DIV'))
|
| 20 |
+
.filter(e => e.length)
|
| 21 |
+
.flat()
|
| 22 |
+
.map(e => e.id)
|
| 23 |
+
.filter(e => document.querySelector(`#${e}-checkbox`).checked)
|
| 24 |
+
|
| 25 |
+
const optionsDict = {}
|
| 26 |
+
for (let option of options) {
|
| 27 |
+
const key = option.split('-option-')[0]
|
| 28 |
+
const value = option.split('-option-')[1]
|
| 29 |
+
|
| 30 |
+
if (key in optionsDict)
|
| 31 |
+
optionsDict[key].push(value)
|
| 32 |
+
else
|
| 33 |
+
optionsDict[key] = [value]
|
| 34 |
+
}
|
| 35 |
+
|
| 36 |
+
console.log('dict', optionsDict)
|
| 37 |
+
|
| 38 |
+
return optionsDict;
|
| 39 |
+
}
|
| 40 |
+
|
| 41 |
+
const addOption = (category, optionName) => {
|
| 42 |
+
/* Options for the issue div */
|
| 43 |
+
const issueDiv = document.getElementById(`${category}Div`);
|
| 44 |
+
const div = document.createElement('div')
|
| 45 |
+
|
| 46 |
+
let found = false;
|
| 47 |
+
let optionNumber = 0;
|
| 48 |
+
while (!found && ++optionNumber < 100) {
|
| 49 |
+
let previousOption = document.getElementById(`${category}-option-${optionNumber}`);
|
| 50 |
+
console.log(previousOption)
|
| 51 |
+
found = previousOption === null;
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
+
div.id = `${category}-option-${optionNumber}`;
|
| 55 |
+
issueDiv.appendChild(div);
|
| 56 |
+
|
| 57 |
+
const checkBox = document.createElement('input');
|
| 58 |
+
checkBox.type = 'checkbox'
|
| 59 |
+
checkBox.id = `${category}-option-${optionNumber}-checkbox`
|
| 60 |
+
|
| 61 |
+
const checkBoxLabel = document.createElement('label');
|
| 62 |
+
const labelSpan = document.createElement('span')
|
| 63 |
+
labelSpan.textContent = optionName;
|
| 64 |
+
checkBoxLabel.appendChild(checkBox)
|
| 65 |
+
checkBoxLabel.appendChild(labelSpan)
|
| 66 |
+
div.appendChild(checkBoxLabel)
|
| 67 |
+
|
| 68 |
+
return optionNumber
|
| 69 |
+
}
|
| 70 |
+
|
| 71 |
let charts = [];
|
| 72 |
|
| 73 |
const createButton = (title, libraries, methods) => {
|
|
|
|
| 76 |
button.onclick = async () => {
|
| 77 |
document.getElementById('pip-graph').innerHTML = ''
|
| 78 |
document.getElementById('star-graph').innerHTML = ''
|
| 79 |
+
document.getElementById('issue-graph').innerHTML = ''
|
| 80 |
const e = load()
|
| 81 |
document.body.appendChild(e)
|
| 82 |
const selectedLibraries = libraries.filter(e => document.querySelector(`#${e}Checkbox`).checked);
|
| 83 |
+
|
| 84 |
+
const relevantOptions = getCheckedOptions();
|
| 85 |
+
|
| 86 |
if (charts.length !== 0) {
|
| 87 |
for (const chart of charts) {
|
| 88 |
chart.destroy()
|
| 89 |
}
|
| 90 |
}
|
| 91 |
for (const method of methods()) {
|
| 92 |
+
charts.push(await method(selectedLibraries, relevantOptions))
|
| 93 |
}
|
| 94 |
document.body.removeChild(e)
|
| 95 |
};
|
|
|
|
| 134 |
librarySelector.appendChild(div)
|
| 135 |
}
|
| 136 |
|
| 137 |
+
for (const element of ['pip', 'stars', 'issue']) {
|
| 138 |
const div = document.createElement('div');
|
| 139 |
+
div.classList.add('option-div')
|
| 140 |
+
div.id = `${element}Div`;
|
| 141 |
+
|
| 142 |
const checkBox = document.createElement('input');
|
| 143 |
checkBox.type = 'checkbox'
|
| 144 |
checkBox.id = `${element}CheckboxGraph`;
|
|
|
|
| 153 |
graphSelector.appendChild(div)
|
| 154 |
}
|
| 155 |
|
| 156 |
+
addOption('pip', "Cumulated");
|
| 157 |
+
addOption('issue', "Exclude org members");
|
| 158 |
+
addOption('issue', "Week over week");
|
| 159 |
+
addOption('stars', "Week over week");
|
| 160 |
+
|
| 161 |
const fetchButton = createButton('Fetch', inferJson, () => {
|
| 162 |
+
const graphNames = ['pip', 'stars', 'issue'].filter(e => document.querySelector(`#${e}CheckboxGraph`).checked);
|
| 163 |
const graphs = []
|
| 164 |
|
| 165 |
if (graphNames.includes('pip'))
|
|
|
|
| 168 |
if (graphNames.includes('stars'))
|
| 169 |
graphs.push(retrieveStars)
|
| 170 |
|
| 171 |
+
if (graphNames.includes('issue'))
|
| 172 |
graphs.push(retrieveIssues)
|
| 173 |
|
| 174 |
return graphs
|
|
|
|
| 176 |
selectorSubmit.appendChild(fetchButton);
|
| 177 |
};
|
| 178 |
|
| 179 |
+
const retrievePipInstalls = async (libraryNames, options) => {
|
| 180 |
+
const relevantOptions = options['pip']
|
| 181 |
+
const inferResponse = await fetch(`retrievePipInstalls?input=${libraryNames}&options=${relevantOptions}`);
|
| 182 |
const inferJson = await inferResponse.json();
|
| 183 |
const colors = ['#FF0000', '#00FF00', '#0000FF', '#FF00FF', '#FFFF00', '#0000FF', '#F090F0', '#90F0F0', '#F0F090']
|
| 184 |
|
|
|
|
| 227 |
return myChart;
|
| 228 |
};
|
| 229 |
|
| 230 |
+
const retrieveStars = async (libraryNames, options) => {
|
| 231 |
+
const relevantOptions = options['stars']
|
| 232 |
+
const inferResponse = await fetch(`retrieveStars?input=${libraryNames}&options=${relevantOptions}`);
|
| 233 |
const inferJson = await inferResponse.json();
|
| 234 |
const colors = ['#FF0000', '#00FF00', '#0000FF', '#FF00FF', '#FFFF00', '#0000FF', '#F090F0', '#90F0F0', '#F0F090']
|
| 235 |
|
|
|
|
| 278 |
return myChart;
|
| 279 |
};
|
| 280 |
|
| 281 |
+
const retrieveIssues = async (libraryNames, options) => {
|
| 282 |
+
const relevantOptions = options['issue']
|
| 283 |
+
const inferResponse = await fetch(`retrieveIssues?input=${libraryNames}&options=${relevantOptions}`);
|
| 284 |
const inferJson = await inferResponse.json();
|
| 285 |
const colors = ['#FF0000', '#00FF00', '#0000FF', '#FF00FF', '#FFFF00', '#0000FF', '#F090F0', '#90F0F0', '#F0F090']
|
| 286 |
|
|
|
|
| 322 |
plugins: {
|
| 323 |
title: {
|
| 324 |
display: true,
|
| 325 |
+
text: 'Number of issue, PRs, and comments on these'
|
| 326 |
}
|
| 327 |
}
|
| 328 |
}
|
style.css
CHANGED
|
@@ -39,6 +39,19 @@ button:hover {
|
|
| 39 |
width: 100%;
|
| 40 |
}
|
| 41 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 42 |
.submit {
|
| 43 |
margin-bottom: 50px;
|
| 44 |
}
|
|
|
|
| 39 |
width: 100%;
|
| 40 |
}
|
| 41 |
|
| 42 |
+
.option-div {
|
| 43 |
+
border-radius: 3px;
|
| 44 |
+
border-color: rgb(180, 180, 200);
|
| 45 |
+
border-style: solid;
|
| 46 |
+
border-width: 2px 4px 4px 2px;
|
| 47 |
+
margin: 14px 0;
|
| 48 |
+
padding: 5px;
|
| 49 |
+
}
|
| 50 |
+
|
| 51 |
+
.option-div > div {
|
| 52 |
+
margin-left: 20px;
|
| 53 |
+
}
|
| 54 |
+
|
| 55 |
.submit {
|
| 56 |
margin-bottom: 50px;
|
| 57 |
}
|