add AIBOM
#22
by
RiccardoDav
- opened
ibm-granite_granite-timeseries-ttm-r1.json
ADDED
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bomFormat": "CycloneDX",
|
3 |
+
"specVersion": "1.6",
|
4 |
+
"serialNumber": "urn:uuid:7c0d9634-2b52-4f6b-9094-bc22e03bcb73",
|
5 |
+
"version": 1,
|
6 |
+
"metadata": {
|
7 |
+
"timestamp": "2025-06-05T09:40:25.261363+00:00",
|
8 |
+
"component": {
|
9 |
+
"type": "machine-learning-model",
|
10 |
+
"bom-ref": "ibm-granite/granite-timeseries-ttm-r1-c11d1baa-919c-53d7-bb4e-e5702ff8e5b7",
|
11 |
+
"name": "ibm-granite/granite-timeseries-ttm-r1",
|
12 |
+
"externalReferences": [
|
13 |
+
{
|
14 |
+
"url": "https://huggingface.co/ibm-granite/granite-timeseries-ttm-r1",
|
15 |
+
"type": "documentation"
|
16 |
+
}
|
17 |
+
],
|
18 |
+
"modelCard": {
|
19 |
+
"modelParameters": {
|
20 |
+
"task": "time-series-forecasting",
|
21 |
+
"architectureFamily": "tinytimemixer",
|
22 |
+
"modelArchitecture": "TinyTimeMixerForPrediction"
|
23 |
+
},
|
24 |
+
"properties": [
|
25 |
+
{
|
26 |
+
"name": "library_name",
|
27 |
+
"value": "granite-tsfm"
|
28 |
+
}
|
29 |
+
],
|
30 |
+
"consideration": {
|
31 |
+
"useCases": "```"
|
32 |
+
}
|
33 |
+
},
|
34 |
+
"authors": [
|
35 |
+
{
|
36 |
+
"name": "ibm-granite"
|
37 |
+
}
|
38 |
+
],
|
39 |
+
"licenses": [
|
40 |
+
{
|
41 |
+
"license": {
|
42 |
+
"id": "Apache-2.0",
|
43 |
+
"url": "https://spdx.org/licenses/Apache-2.0.html"
|
44 |
+
}
|
45 |
+
}
|
46 |
+
],
|
47 |
+
"description": "TTM falls under the category of \u201cfocused pre-trained models\u201d, wherein each pre-trained TTM is tailored for a particular forecastingsetting (governed by the context length and forecast length). Instead of building one massive model supporting all forecasting settings,we opt for the approach of constructing smaller pre-trained models, each focusing on a specific forecasting setting, therebyyielding more accurate results. Furthermore, this approach ensures that our models remain extremely small and exceptionally fast,facilitating easy deployment without demanding a ton of resources.Hence, in this model card, we plan to release several pre-trainedTTMs that can cater to many common forecasting settings in practice. Additionally, we have released our source code along withour pretraining scripts that users can utilize to pretrain models on their own. Pretraining TTMs is very easy and fast, takingonly 3-6 hours using 6 A100 GPUs, as opposed to several days or weeks in traditional approaches.Each pre-trained model will be released in a different branch name in this model card. Kindly access the required model using ourgetting started [notebook](https://github.com/IBM/tsfm/blob/main/notebooks/hfdemo/ttm_getting_started.ipynb) mentioning the branch name.",
|
48 |
+
"tags": [
|
49 |
+
"granite-tsfm",
|
50 |
+
"safetensors",
|
51 |
+
"tinytimemixer",
|
52 |
+
"time series",
|
53 |
+
"forecasting",
|
54 |
+
"pretrained models",
|
55 |
+
"foundation models",
|
56 |
+
"time series foundation models",
|
57 |
+
"time-series",
|
58 |
+
"time-series-forecasting",
|
59 |
+
"arxiv:2401.03955",
|
60 |
+
"license:apache-2.0",
|
61 |
+
"region:us"
|
62 |
+
]
|
63 |
+
}
|
64 |
+
}
|
65 |
+
}
|