mainmagic commited on
Commit
37cf536
·
1 Parent(s): eaa1099

Upload fine-tuned Chronos model

Browse files
.gitattributes CHANGED
@@ -33,3 +33,6 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ forecast_example_1.png filter=lfs diff=lfs merge=lfs -text
37
+ forecast_example_2.png filter=lfs diff=lfs merge=lfs -text
38
+ forecast_example_3.png filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language: en
3
+ license: apache-2.0
4
+ library_name: chronos
5
+ tags:
6
+ - chronos
7
+ - time-series
8
+ - forecasting
9
+ - finance
10
+ - cryptocurrency
11
+ datasets:
12
+ - time-series
13
+ ---
14
+
15
+ # chronos-t5-small-btc-m1
16
+
17
+ This is a Chronos model fine-tuned on financial time series data. The model is based on the T5 architecture and is designed for time series forecasting.
18
+
19
+ ## Model Description
20
+
21
+ - **Model Type:** Chronos (T5-based time series forecasting model)
22
+ - **Fine-tuned from:** amazon/chronos-t5-small
23
+ - **Uploaded by:** mainmagic
24
+ - **Date:** 2025-04-06
25
+
26
+ Chronos model fine-tuned on BTC/USD M1 data for time series forecasting
27
+
28
+ ## Performance Metrics
29
+
30
+ | Metric | Value |
31
+ |--------|-------|
32
+ | mse | 1.0823 |
33
+ | mae | 0.8172 |
34
+ | mape | 16552.9256 |
35
+
36
+
37
+
38
+ ## Usage
39
+
40
+ ```python
41
+ # Import the Chronos pipeline
42
+ # Note: You may need to adjust the import path based on your installation
43
+ import sys
44
+ sys.path.append('/path/to/chronos-forecasting/src') # Adjust this path
45
+ from chronos.chronos import ChronosPipeline
46
+ import torch
47
+
48
+ # Load the model
49
+ pipeline = ChronosPipeline.from_pretrained("mainmagic/chronos-t5-small-btc-m1")
50
+
51
+ # Create input data (example)
52
+ context = torch.randn(1, 512) # Batch size 1, context length 512
53
+
54
+ # Generate forecast
55
+ forecast = pipeline.predict(
56
+ context,
57
+ prediction_length=60, # Predict 60 steps ahead
58
+ num_samples=20 # Generate 20 different forecast trajectories
59
+ )
60
+
61
+ # Use median as point forecast
62
+ median_forecast = torch.median(forecast, dim=1)[0]
63
+ ```
64
+
65
+ ## Training Details
66
+
67
+ This model was fine-tuned using the Chronos native training scripts. The model was trained on financial time series data with the following parameters:
68
+
69
+ - Context length: 512
70
+ - Prediction length: 60
71
+ - Optimizer: adamw_torch
72
+ - Learning rate: 0.0001
73
+ - Batch size: 16
74
+ - Gradient accumulation steps: 4
75
+
76
+ ## Limitations
77
+
78
+ This model is specifically trained for financial time series forecasting and may not perform well on other types of time series data. The model's performance may also vary depending on market conditions and the specific financial instrument being forecasted.
79
+
80
+ ## Citation
81
+
82
+ If you use this model, please cite:
83
+
84
+ ```bibtex
85
+ @misc{chronos-forecasting,
86
+ author = {Amazon Science},
87
+ title = {Chronos: Learning the Language of Time Series},
88
+ year = {2024},
89
+ publisher = {GitHub},
90
+ journal = {GitHub repository},
91
+ howpublished = {\url{https://github.com/amazon-science/chronos-forecasting}}
92
+ }
93
+ ```
config.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "T5ForConditionalGeneration"
4
+ ],
5
+ "chronos_config": {
6
+ "context_length": 512,
7
+ "eos_token_id": 1,
8
+ "model_type": "seq2seq",
9
+ "n_special_tokens": 2,
10
+ "n_tokens": 4096,
11
+ "num_samples": 20,
12
+ "pad_token_id": 0,
13
+ "prediction_length": 60,
14
+ "temperature": 1.0,
15
+ "tokenizer_class": "MeanScaleUniformBins",
16
+ "tokenizer_kwargs": {
17
+ "high_limit": 15.0,
18
+ "low_limit": -15.0
19
+ },
20
+ "top_k": 50,
21
+ "top_p": 1.0,
22
+ "use_eos_token": true
23
+ },
24
+ "classifier_dropout": 0.0,
25
+ "d_ff": 2048,
26
+ "d_kv": 64,
27
+ "d_model": 512,
28
+ "decoder_start_token_id": 0,
29
+ "dense_act_fn": "relu",
30
+ "dropout_rate": 0.1,
31
+ "eos_token_id": 1,
32
+ "feed_forward_proj": "relu",
33
+ "initializer_factor": 0.05,
34
+ "is_encoder_decoder": true,
35
+ "is_gated_act": false,
36
+ "layer_norm_epsilon": 1e-06,
37
+ "model_type": "t5",
38
+ "n_positions": 512,
39
+ "num_decoder_layers": 6,
40
+ "num_heads": 8,
41
+ "num_layers": 6,
42
+ "pad_token_id": 0,
43
+ "relative_attention_max_distance": 128,
44
+ "relative_attention_num_buckets": 32,
45
+ "torch_dtype": "float32",
46
+ "transformers_version": "4.51.0",
47
+ "use_cache": true,
48
+ "vocab_size": 4096
49
+ }
forecast_example_1.png ADDED

Git LFS Details

  • SHA256: 451bf294dbea04952d27e8e7db3c869594c653751ba04906efba307798a9ab4a
  • Pointer size: 130 Bytes
  • Size of remote file: 83.7 kB
forecast_example_2.png ADDED

Git LFS Details

  • SHA256: e906a8547de3485ef0da397cf9d6d3a0ef303ccc8e9878310313841f39c5aded
  • Pointer size: 130 Bytes
  • Size of remote file: 82.7 kB
forecast_example_3.png ADDED

Git LFS Details

  • SHA256: c3877ee5396688e4ef2970f197445b0bdb6478a17e10701de6e5a32501cc19f2
  • Pointer size: 130 Bytes
  • Size of remote file: 90.5 kB
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "decoder_start_token_id": 0,
4
+ "eos_token_id": 1,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.51.0"
7
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6be3b1f11a7f5b0aac1048e4b4ca77416240f6005677b5b24e741935a1a59b73
3
+ size 184632360
normalization_params.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"min_vals": {"open": 3134.9, "high": 3134.9, "low": 3134.9, "close": 3134.9, "volume": 1.0}, "max_vals": {"open": 109288.19, "high": 109288.19, "low": 109288.19, "close": 109288.19, "volume": 1044.0}}
training_info.json ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "training_config": {
3
+ "training_data_paths": "['./chronos_training_data.arrow']",
4
+ "probability": "[1.0]",
5
+ "context_length": 512,
6
+ "prediction_length": 60,
7
+ "min_past": 60,
8
+ "max_steps": 100,
9
+ "save_steps": 50,
10
+ "log_steps": 100,
11
+ "per_device_train_batch_size": 16,
12
+ "learning_rate": 0.0001,
13
+ "optim": "adamw_torch",
14
+ "shuffle_buffer_length": 10000,
15
+ "gradient_accumulation_steps": 4,
16
+ "model_id": "amazon/chronos-t5-small",
17
+ "model_type": "seq2seq",
18
+ "random_init": false,
19
+ "tie_embeddings": true,
20
+ "output_dir": "./chronos-native-fine-tuned-model",
21
+ "tf32": false,
22
+ "torch_compile": false,
23
+ "tokenizer_class": "MeanScaleUniformBins",
24
+ "tokenizer_kwargs": "{'low_limit': -15.0, 'high_limit': 15.0}",
25
+ "n_tokens": 4096,
26
+ "n_special_tokens": 2,
27
+ "pad_token_id": 0,
28
+ "eos_token_id": 1,
29
+ "use_eos_token": true,
30
+ "lr_scheduler_type": "linear",
31
+ "warmup_ratio": 0.1,
32
+ "dataloader_num_workers": 0,
33
+ "max_missing_prop": 0.9,
34
+ "num_samples": 20,
35
+ "temperature": 1.0,
36
+ "top_k": 50,
37
+ "top_p": 1.0,
38
+ "seed": 1907066225
39
+ },
40
+ "job_info": {
41
+ "cuda_available": false,
42
+ "torchelastic_launched": false,
43
+ "python_version": "3.13.2 (main, Feb 4 2025, 14:51:09) [Clang 16.0.0 (clang-1600.0.26.6)]",
44
+ "torch_version": "2.6.0",
45
+ "numpy_version": "1.26.4",
46
+ "gluonts_version": "0.16.0",
47
+ "transformers_version": "4.51.0",
48
+ "accelerate_version": "0.34.2"
49
+ }
50
+ }