soury commited on
Commit
c27eae8
·
1 Parent(s): 2e32068

create data folder

Browse files
energy-report-llm-inference.json → data/energy-report-llm-inference.json RENAMED
@@ -1,91 +1,91 @@
1
- {
2
- "header": {
3
- "licensing": "Creative Commons 4.0",
4
- "formatVersion": "0.1",
5
- "reportId": "2f4643f7-68b5-4fb6-21f0-b5dcda04897d",
6
- "reportDatetime": "2025-02-26 16:57:00",
7
- "reportStatus": "draft",
8
- "publisher": {
9
- "name": "sopra steria",
10
- "confidentialityLevel": "public"
11
- }
12
- },
13
- "task": {
14
- "taskStage": "inference",
15
- "taskFamily": "chatbot",
16
- "nbRequest": 1,
17
- "algorithms": [
18
- {
19
- "algorithmType": "llm",
20
- "foundationModelName": "llama2-13b",
21
- "foundationModelUri":"https://huggingface.co/meta-llama/Llama-2-13b-hf",
22
- "framework": "vllm",
23
- "parametersNumber": 13,
24
- "quantization": "q16"
25
- }
26
- ],
27
- "dataset": [
28
- {
29
- "dataUsage": "input",
30
- "dataType": "token",
31
- "dataQuantity": 11
32
- },
33
- {
34
- "dataUsage": "output",
35
- "dataType": "token",
36
- "dataQuantity": 828
37
- }
38
- ],
39
- "estimatedAccuracy":"veryGood"
40
- },
41
- "measures": [
42
- {
43
- "measurementMethod": "codecarbon",
44
- "version": "2.5.0",
45
- "cpuTrackingMode": "constant",
46
- "gpuTrackingMode": "nvml",
47
- "powerConsumption": 0.00267074,
48
- "measurementDuration": 19.09390426,
49
- "measurementDateTime": "2024-09-30 09:09:40"
50
- }
51
- ],
52
- "system": {
53
- "os": "linux"
54
- },
55
- "software": {
56
- "language": "python",
57
- "version": "3.10.12"
58
- },
59
- "infrastructure":{
60
- "infraType": "publicCloud",
61
- "cloudProvider": "ovh",
62
- "components": [
63
- {
64
- "componentName": "Intel(R) Xeon(R) Gold 6226R CPU @ 2.90GHz",
65
- "componentType": "cpu",
66
- "nbComponent": 30,
67
- "manufacturer": "Intel",
68
- "family": "Xeon",
69
- "series": "Gold 6226R"
70
- },
71
- {
72
- "componentName": "2 x Tesla V100S-PCIE-32GB",
73
- "componentType": "gpu",
74
- "nbComponent": 2,
75
- "memorySize": 32,
76
- "manufacturer": "Tesla",
77
- "family": "V100"
78
- },
79
- {
80
- "componentType": "ram",
81
- "nbComponent": 1,
82
- "memorySize": 86
83
- }
84
- ]
85
- },
86
- "environment": {
87
- "country": "france",
88
- "powerSupplierType": "public"
89
- },
90
- "quality": "high"
91
  }
 
1
+ {
2
+ "header": {
3
+ "licensing": "Creative Commons 4.0",
4
+ "formatVersion": "0.1",
5
+ "reportId": "2f4643f7-68b5-4fb6-21f0-b5dcda04897d",
6
+ "reportDatetime": "2025-02-26 16:57:00",
7
+ "reportStatus": "draft",
8
+ "publisher": {
9
+ "name": "sopra steria",
10
+ "confidentialityLevel": "public"
11
+ }
12
+ },
13
+ "task": {
14
+ "taskStage": "inference",
15
+ "taskFamily": "chatbot",
16
+ "nbRequest": 1,
17
+ "algorithms": [
18
+ {
19
+ "algorithmType": "llm",
20
+ "foundationModelName": "llama2-13b",
21
+ "foundationModelUri":"https://huggingface.co/meta-llama/Llama-2-13b-hf",
22
+ "framework": "vllm",
23
+ "parametersNumber": 13,
24
+ "quantization": "q16"
25
+ }
26
+ ],
27
+ "dataset": [
28
+ {
29
+ "dataUsage": "input",
30
+ "dataType": "token",
31
+ "dataQuantity": 11
32
+ },
33
+ {
34
+ "dataUsage": "output",
35
+ "dataType": "token",
36
+ "dataQuantity": 828
37
+ }
38
+ ],
39
+ "estimatedAccuracy":"veryGood"
40
+ },
41
+ "measures": [
42
+ {
43
+ "measurementMethod": "codecarbon",
44
+ "version": "2.5.0",
45
+ "cpuTrackingMode": "constant",
46
+ "gpuTrackingMode": "nvml",
47
+ "powerConsumption": 0.00267074,
48
+ "measurementDuration": 19.09390426,
49
+ "measurementDateTime": "2024-09-30 09:09:40"
50
+ }
51
+ ],
52
+ "system": {
53
+ "os": "linux"
54
+ },
55
+ "software": {
56
+ "language": "python",
57
+ "version": "3.10.12"
58
+ },
59
+ "infrastructure":{
60
+ "infraType": "publicCloud",
61
+ "cloudProvider": "ovh",
62
+ "components": [
63
+ {
64
+ "componentName": "Intel(R) Xeon(R) Gold 6226R CPU @ 2.90GHz",
65
+ "componentType": "cpu",
66
+ "nbComponent": 30,
67
+ "manufacturer": "Intel",
68
+ "family": "Xeon",
69
+ "series": "Gold 6226R"
70
+ },
71
+ {
72
+ "componentName": "2 x Tesla V100S-PCIE-32GB",
73
+ "componentType": "gpu",
74
+ "nbComponent": 2,
75
+ "memorySize": 32,
76
+ "manufacturer": "Tesla",
77
+ "family": "V100"
78
+ },
79
+ {
80
+ "componentType": "ram",
81
+ "nbComponent": 1,
82
+ "memorySize": 86
83
+ }
84
+ ]
85
+ },
86
+ "environment": {
87
+ "country": "france",
88
+ "powerSupplierType": "public"
89
+ },
90
+ "quality": "high"
91
  }