tebakaja commited on
Commit
37f31d4
·
1 Parent(s): 34c0b26

deployment

Browse files
.gitignore CHANGED
@@ -1,3 +1,6 @@
1
  /node_modules
2
  /indonesia_stocks
3
- /onnxruntime
 
 
 
 
1
  /node_modules
2
  /indonesia_stocks
3
+ /onnxruntime
4
+ /onnxruntime-linux-x64-1.21.0
5
+ /models
6
+ getmodels.sh
Dockerfile CHANGED
@@ -1,26 +1,44 @@
1
  FROM golang:1.21.13-alpine3.20
2
 
3
  LABEL creator="al-fariqy raihan"
 
4
 
5
  ENV APP_DIR=/thesis_forecasting_website \
6
  GO111MODULE=on \
7
- CGO_ENABLED=0
8
 
9
  WORKDIR ${APP_DIR}
10
 
11
- RUN apk add --no-cache git git-lfs curl \
12
  && git lfs install
 
13
 
14
  COPY go.mod go.sum ./
15
  RUN go mod download
16
 
17
  COPY . .
18
 
 
 
 
19
  RUN go build -o main . \
20
  && go clean -modcache \
21
  && rm -rf /root/.cache/go-build /root/go/pkg
22
 
23
- RUN git clone https://huggingface.co/datasets/qywok/indonesia_stocks
 
 
 
 
 
 
 
 
 
 
 
 
 
24
 
25
  RUN chmod -R 755 ${APP_DIR}
26
 
 
1
  FROM golang:1.21.13-alpine3.20
2
 
3
  LABEL creator="al-fariqy raihan"
4
+ LABEL npm="202143501514"
5
 
6
  ENV APP_DIR=/thesis_forecasting_website \
7
  GO111MODULE=on \
8
+ CGO_ENABLED=0
9
 
10
  WORKDIR ${APP_DIR}
11
 
12
+ RUN apk add --no-cache git git-lfs curl tzdata \
13
  && git lfs install
14
+ ENV TZ=Asia/Jakarta
15
 
16
  COPY go.mod go.sum ./
17
  RUN go mod download
18
 
19
  COPY . .
20
 
21
+ # NOTES:
22
+ # gw pake Hugging Face (Docker),
23
+ # gak bisa multi-stage build bjirrr... wkwk..
24
  RUN go build -o main . \
25
  && go clean -modcache \
26
  && rm -rf /root/.cache/go-build /root/go/pkg
27
 
28
+ RUN wget https://github.com/microsoft/onnxruntime/releases/download/v1.21.0/onnxruntime-linux-x64-1.21.0.tgz && \
29
+ tar -xvzf onnxruntime-linux-x64-1.21.0.tgz && \
30
+ rm -rf onnxruntime-linux-x64-1.21.0.tgz && \
31
+ mv ./onnxruntime-linux-x64-1.21.0 ./onnxruntime
32
+
33
+ RUN git lfs install && \
34
+ git clone https://huggingface.co/datasets/qywok/indonesia_stocks && \
35
+ mkdir -p models && \
36
+ for i in $(seq 1 10); do \
37
+ git clone https://huggingface.co/qywok/stock_models_$i && \
38
+ cd stock_models_$i && git lfs pull && cd .. && \
39
+ mv stock_models_$i/*.onnx models/ && \
40
+ rm -rf stock_models_$i; \
41
+ done
42
 
43
  RUN chmod -R 755 ${APP_DIR}
44
 
handlers/inference_handler.go ADDED
@@ -0,0 +1 @@
 
 
1
+ package handlers
inferences/bigru_inference.go DELETED
@@ -1 +0,0 @@
1
- package inferences
 
 
inferences/bilstm_inference.go DELETED
@@ -1 +0,0 @@
1
- package inferences
 
 
inferences/gru_inference.go DELETED
File without changes
inferences/lstm_inference.go DELETED
File without changes
inferences/stock_prediction.go ADDED
@@ -0,0 +1,161 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package inferences
2
+
3
+ import (
4
+ "fmt"
5
+ "log"
6
+ "time"
7
+
8
+ "github.com/gofiber/fiber/v2"
9
+
10
+ loaders "thesis_forecasting_website/loaders"
11
+ onnxruntime "github.com/belajarqywok/onnxruntime_go"
12
+ "sync"
13
+ )
14
+
15
+ var once sync.Once
16
+ var isInit bool
17
+
18
+ func denormalization(data, minValue, maxValue float32) float32 {
19
+ return (data * (maxValue - minValue)) + minValue
20
+ }
21
+
22
+ type StockRequest struct {
23
+ Issuer string `json:"issuer"`
24
+ Days int `json:"days"`
25
+ }
26
+
27
+ type StockPrice struct {
28
+ Date string `json:"date"`
29
+ Price float64 `json:"price"`
30
+ }
31
+
32
+ type StockResponse struct {
33
+ Actuals []StockPrice `json:"actuals"`
34
+ Prediction []StockPrice `json:"prediction"`
35
+ }
36
+
37
+ func StockPredictionHandler(c *fiber.Ctx) error {
38
+ req := new(StockRequest)
39
+ if err := c.BodyParser(req); err != nil {
40
+ return c.Status(fiber.StatusBadRequest).JSON(fiber.Map{
41
+ "error": "Invalid request body",
42
+ })
43
+ }
44
+
45
+ if req.Days <= 0 { req.Days = 7 }
46
+
47
+ once.Do(func() {
48
+ onnxruntime.SetSharedLibraryPath("./onnxruntime-linux-x64-1.21.0/lib/libonnxruntime.so")
49
+ if err := onnxruntime.InitializeEnvironment(); err != nil {
50
+ log.Fatal("Error initializing ONNX runtime: ", err)
51
+ }
52
+
53
+ isInit = true
54
+ })
55
+
56
+ if !isInit { log.Fatal("ONNX runtime not initialized") }
57
+
58
+ // load dataset
59
+ dataset_csv_path := fmt.Sprintf("./indonesia_stocks/modeling_datas/%s.csv", req.Issuer)
60
+ data, err := loaders.DatasetLoader(dataset_csv_path)
61
+ if err != nil {
62
+ return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{
63
+ "error": "Error loading CSV",
64
+ })
65
+ }
66
+
67
+ // load scaler
68
+ minmax_json_path := fmt.Sprintf("./indonesia_stocks/min_max/%s.json", req.Issuer)
69
+ scalers, err := loaders.ScalersLoader(minmax_json_path)
70
+ if err != nil {
71
+ return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{
72
+ "error": "Error loading scalers",
73
+ })
74
+ }
75
+
76
+ n := 30
77
+ if len(data) < n { n = len(data) }
78
+ lastDataActual := data[len(data) - n:]
79
+
80
+ var actuals []StockPrice
81
+ for _, row := range lastDataActual {
82
+ date := row[0].(string)
83
+ closeVal := row[1].(float32)
84
+
85
+ closePrice := denormalization(
86
+ closeVal,
87
+ scalers.MinValue["Close"],
88
+ scalers.MaxValue["Close"],
89
+ )
90
+
91
+ actuals = append(actuals, StockPrice{
92
+ Date: date,
93
+ Price: float64(closePrice),
94
+ })
95
+ }
96
+
97
+
98
+ // Prepare input for model
99
+ sequenceLength := int64(60)
100
+ featureSize := int64(5)
101
+ lastData := data[len(data) - int(sequenceLength):]
102
+
103
+ inputData := make([]float32, sequenceLength * featureSize)
104
+ for i, row := range lastData {
105
+ for j := 1; j <= int(featureSize); j++ {
106
+ val, ok := row[j].(float32)
107
+ if !ok {
108
+ log.Fatalf("Expected float32 at row %d col %d, got %T", i, j, row[j])
109
+ }
110
+
111
+ inputData[i*int(featureSize) + (j-1)] = val
112
+ }
113
+ }
114
+
115
+ inputShape := onnxruntime.NewShape(1, sequenceLength, featureSize)
116
+ inputTensor, err := onnxruntime.NewTensor(inputShape, inputData)
117
+ if err != nil { log.Fatalf("Error creating input tensor: %v", err) }
118
+
119
+ outputShape := onnxruntime.NewShape(1, 1)
120
+ outputTensor, err := onnxruntime.NewEmptyTensor[float32](outputShape)
121
+ if err != nil { log.Fatalf("Error creating output tensor: %v", err) }
122
+
123
+ model_onnx_path := fmt.Sprintf("./models/%s.onnx", req.Issuer)
124
+ session, err := onnxruntime.NewAdvancedSession(
125
+ model_onnx_path,
126
+ []string{"input"}, []string{"output"},
127
+ []onnxruntime.ArbitraryTensor{inputTensor},
128
+ []onnxruntime.ArbitraryTensor{outputTensor}, nil,
129
+ )
130
+
131
+ if err != nil { log.Fatalf("Error initializing ONNX session: %v", err) }
132
+
133
+ // generate predictions
134
+ var predicted []StockPrice
135
+ lastDate, _ := time.Parse("2006-01-02", actuals[len(actuals)-1].Date)
136
+
137
+ for i := 0; i < req.Days; i++ {
138
+ if err := session.Run(); err != nil {
139
+ log.Fatalf("Error running model: %v", err)
140
+ }
141
+
142
+ predictedClose := outputTensor.GetData()[0]
143
+ denormPrice := denormalization(predictedClose, scalers.MinValue["Close"], scalers.MaxValue["Close"])
144
+
145
+ lastDate = lastDate.AddDate(0, 0, 1)
146
+ predicted = append(predicted, StockPrice{
147
+ Date: lastDate.Format("2006-01-02"),
148
+ Price: float64(denormPrice),
149
+ })
150
+
151
+ copy(inputData, inputData[int(featureSize):])
152
+ inputData[len(inputData)-1] = predictedClose
153
+ }
154
+
155
+ resp := StockResponse{
156
+ Actuals: actuals,
157
+ Prediction: predicted,
158
+ }
159
+
160
+ return c.JSON(resp)
161
+ }
loaders/fundamental_loader.go CHANGED
@@ -5,6 +5,43 @@ import (
5
  "encoding/json"
6
  )
7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  func FundamentalLoader(path string) (*Fundamental, error) {
9
  file, err := os.ReadFile(path)
10
  if err != nil {
 
5
  "encoding/json"
6
  )
7
 
8
+ type Fundamental struct {
9
+ FontawesomeIcon string `json:"fontawesome_icon"`
10
+ Symbol string `json:"symbol"`
11
+ SectorID string `json:"sector_id"`
12
+ ShortName string `json:"shortName"`
13
+
14
+ Address string `json:"address"`
15
+ Phone string `json:"phone"`
16
+ Website string `json:"website"`
17
+ MarketCap string `json:"marketCap"`
18
+ DividendRate string `json:"dividendRate"`
19
+ DividendYield string `json:"dividendYield"`
20
+ EarningsGrowth string `json:"earningsGrowth"`
21
+ ProfitMargins string `json:"profitMargins"`
22
+ GrossMargins string `json:"grossMargins"`
23
+ Beta string `json:"beta"`
24
+ BookValue string `json:"bookValue"`
25
+ PriceToBook string `json:"priceToBook"`
26
+
27
+ QuickRatio string `json:"quickRatio"`
28
+ CurrentRatio string `json:"currentRatio"`
29
+ DebtToEquity string `json:"debtToEquity"`
30
+ RevenuePerShare string `json:"revenuePerShare"`
31
+ RevenueGrowth string `json:"revenueGrowth"`
32
+ Ebitda string `json:"ebitda"`
33
+ RegularMarketChange string `json:"regularMarketChange"`
34
+ PayoutRatio string `json:"payoutRatio"`
35
+ TrailingPE string `json:"trailingPE"`
36
+ ForwardPE string `json:"forwardPE"`
37
+ TrailingEps string `json:"trailingEps"`
38
+ ForwardEps string `json:"forwardEps"`
39
+ }
40
+
41
+ type FundamentalsWrapper struct {
42
+ Fundamentals Fundamental `json:"fundamentals"`
43
+ }
44
+
45
  func FundamentalLoader(path string) (*Fundamental, error) {
46
  file, err := os.ReadFile(path)
47
  if err != nil {
loaders/fundamental_struct.go DELETED
@@ -1,38 +0,0 @@
1
- package loaders
2
-
3
- type Fundamental struct {
4
- FontawesomeIcon string `json:"fontawesome_icon"`
5
- Symbol string `json:"symbol"`
6
- SectorID string `json:"sector_id"`
7
- ShortName string `json:"shortName"`
8
-
9
- Address string `json:"address"`
10
- Phone string `json:"phone"`
11
- Website string `json:"website"`
12
- MarketCap string `json:"marketCap"`
13
- DividendRate string `json:"dividendRate"`
14
- DividendYield string `json:"dividendYield"`
15
- EarningsGrowth string `json:"earningsGrowth"`
16
- ProfitMargins string `json:"profitMargins"`
17
- GrossMargins string `json:"grossMargins"`
18
- Beta string `json:"beta"`
19
- BookValue string `json:"bookValue"`
20
- PriceToBook string `json:"priceToBook"`
21
-
22
- QuickRatio string `json:"quickRatio"`
23
- CurrentRatio string `json:"currentRatio"`
24
- DebtToEquity string `json:"debtToEquity"`
25
- RevenuePerShare string `json:"revenuePerShare"`
26
- RevenueGrowth string `json:"revenueGrowth"`
27
- Ebitda string `json:"ebitda"`
28
- RegularMarketChange string `json:"regularMarketChange"`
29
- PayoutRatio string `json:"payoutRatio"`
30
- TrailingPE string `json:"trailingPE"`
31
- ForwardPE string `json:"forwardPE"`
32
- TrailingEps string `json:"trailingEps"`
33
- ForwardEps string `json:"forwardEps"`
34
- }
35
-
36
- type FundamentalsWrapper struct {
37
- Fundamentals Fundamental `json:"fundamentals"`
38
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
loaders/historical_loader.go CHANGED
@@ -5,6 +5,20 @@ import (
5
  "encoding/json"
6
  )
7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  func HistoricalLoader(path string) ([]Historical, error) {
9
  file, err := os.ReadFile(path)
10
  if err != nil {
 
5
  "encoding/json"
6
  )
7
 
8
+ type Historical struct {
9
+ Date string `json:"date"`
10
+ FullDate string `json:"full_date"`
11
+ Open float64 `json:"open"`
12
+ High float64 `json:"high"`
13
+ Low float64 `json:"low"`
14
+ Close float64 `json:"close"`
15
+ Volume float64 `json:"volume"`
16
+ }
17
+
18
+ type HistoricalsWrapper struct {
19
+ Historicals []Historical `json:"historicals"`
20
+ }
21
+
22
  func HistoricalLoader(path string) ([]Historical, error) {
23
  file, err := os.ReadFile(path)
24
  if err != nil {
loaders/historical_struct.go DELETED
@@ -1,15 +0,0 @@
1
- package loaders
2
-
3
- type Historical struct {
4
- Date string `json:"date"`
5
- FullDate string `json:"full_date"`
6
- Open float64 `json:"open"`
7
- High float64 `json:"high"`
8
- Low float64 `json:"low"`
9
- Close float64 `json:"close"`
10
- Volume float64 `json:"volume"`
11
- }
12
-
13
- type HistoricalsWrapper struct {
14
- Historicals []Historical `json:"historicals"`
15
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
loaders/indicator_loader.go CHANGED
@@ -5,6 +5,18 @@ import (
5
  "encoding/json"
6
  )
7
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  func IndicatorLoader(path string) ([]Indicator, error) {
9
  file, err := os.ReadFile(path)
10
  if err != nil {
 
5
  "encoding/json"
6
  )
7
 
8
+ type Indicator struct {
9
+ Date string `json:"date"`
10
+ FullDate string `json:"full_date"`
11
+ MFI float64 `json:"MFI"`
12
+ RSI float64 `json:"RSI"`
13
+ MACD float64 `json:"MACD"`
14
+ }
15
+
16
+ type IndicatorsWrapper struct {
17
+ Indicators []Indicator `json:"indicators"`
18
+ }
19
+
20
  func IndicatorLoader(path string) ([]Indicator, error) {
21
  file, err := os.ReadFile(path)
22
  if err != nil {
loaders/indicator_struct.go DELETED
@@ -1,13 +0,0 @@
1
- package loaders
2
-
3
- type Indicator struct {
4
- Date string `json:"date"`
5
- FullDate string `json:"full_date"`
6
- MFI float64 `json:"MFI"`
7
- RSI float64 `json:"RSI"`
8
- MACD float64 `json:"MACD"`
9
- }
10
-
11
- type IndicatorsWrapper struct {
12
- Indicators []Indicator `json:"indicators"`
13
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
loaders/inference_loader.go ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package loaders
2
+
3
+ import (
4
+ "os"
5
+ "fmt"
6
+ "strconv"
7
+ "io/ioutil"
8
+ "encoding/csv"
9
+ "encoding/json"
10
+ )
11
+
12
+ type Scalers struct {
13
+ MinValue map[string]float32 `json:"min_value"`
14
+ MaxValue map[string]float32 `json:"max_value"`
15
+ }
16
+
17
+ func ScalersLoader(filename string) (Scalers, error) {
18
+ var scalers_data Scalers
19
+
20
+ scalers_json, err := ioutil.ReadFile(filename)
21
+ if err != nil { return scalers_data, err }
22
+
23
+ err = json.Unmarshal(scalers_json, &scalers_data)
24
+ if err != nil { return scalers_data, err }
25
+
26
+ return scalers_data, nil
27
+ }
28
+
29
+ func DatasetLoader(filepath string) ([][]interface{}, error) {
30
+ csv_file, err := os.Open(filepath)
31
+ if err != nil {
32
+ return nil, err
33
+ }
34
+ defer csv_file.Close()
35
+
36
+ csv_reader := csv.NewReader(csv_file)
37
+ csv_records, err := csv_reader.ReadAll()
38
+ if err != nil {
39
+ return nil, err
40
+ }
41
+
42
+ var csv_data [][]interface{}
43
+ for _, csv_row := range csv_records[1:] {
44
+ var row_data []interface{}
45
+
46
+ // kolom pertama tetap string (Date)
47
+ row_data = append(row_data, csv_row[0])
48
+
49
+ // sisanya float32
50
+ for _, csv_row_val := range csv_row[1:] {
51
+ val, err := strconv.ParseFloat(csv_row_val, 32)
52
+ if err != nil {
53
+ return nil, fmt.Errorf("error parsing value '%s': %v", csv_row_val, err)
54
+ }
55
+ row_data = append(row_data, float32(val))
56
+ }
57
+
58
+ csv_data = append(csv_data, row_data)
59
+ }
60
+
61
+ return csv_data, nil
62
+ }
63
+
64
+
loaders/issuer_loader.go CHANGED
@@ -5,6 +5,19 @@ import (
5
  "encoding/json"
6
  )
7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  func IssuerLoader(path string) ([]Issuer, error) {
9
  file, err := os.ReadFile(path)
10
  if err != nil {
 
5
  "encoding/json"
6
  )
7
 
8
+ type Issuer struct {
9
+ FontawesomeIcon string `json:"fontawesome_icon"`
10
+ Symbol string `json:"symbol"`
11
+ SectorID string `json:"sector_id"`
12
+ ShortName string `json:"shortName"`
13
+ Beta string `json:"beta"`
14
+ DividendYield string `json:"dividendYield"`
15
+ }
16
+
17
+ type IssuersWrapper struct {
18
+ Issuers []Issuer `json:"infographics"`
19
+ }
20
+
21
  func IssuerLoader(path string) ([]Issuer, error) {
22
  file, err := os.ReadFile(path)
23
  if err != nil {
loaders/issuer_struct.go DELETED
@@ -1,14 +0,0 @@
1
- package loaders
2
-
3
- type Issuer struct {
4
- FontawesomeIcon string `json:"fontawesome_icon"`
5
- Symbol string `json:"symbol"`
6
- SectorID string `json:"sector_id"`
7
- ShortName string `json:"shortName"`
8
- Beta string `json:"beta"`
9
- DividendYield string `json:"dividendYield"`
10
- }
11
-
12
- type IssuersWrapper struct {
13
- Issuers []Issuer `json:"infographics"`
14
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
loaders/sector_loader.go CHANGED
@@ -5,11 +5,13 @@ import (
5
  "encoding/json"
6
  )
7
 
 
 
 
 
8
  func SectorLoader(path string) ([]string, error) {
9
  file, err := os.ReadFile(path)
10
- if err != nil {
11
- return nil, err
12
- }
13
 
14
  var data SectorsWrapper
15
  if err := json.Unmarshal(file, &data); err != nil {
 
5
  "encoding/json"
6
  )
7
 
8
+ type SectorsWrapper struct {
9
+ Sectors []string `json:"sectors"`
10
+ }
11
+
12
  func SectorLoader(path string) ([]string, error) {
13
  file, err := os.ReadFile(path)
14
+ if err != nil { return nil, err }
 
 
15
 
16
  var data SectorsWrapper
17
  if err := json.Unmarshal(file, &data); err != nil {
loaders/sector_struct.go DELETED
@@ -1,5 +0,0 @@
1
- package loaders
2
-
3
- type SectorsWrapper struct {
4
- Sectors []string `json:"sectors"`
5
- }
 
 
 
 
 
 
main.go CHANGED
@@ -15,6 +15,7 @@ import (
15
 
16
  helpers "thesis_forecasting_website/helpers"
17
  handlers "thesis_forecasting_website/handlers"
 
18
  middlewares "thesis_forecasting_website/middlewares"
19
  )
20
 
@@ -36,6 +37,7 @@ func main() {
36
 
37
  forecasting_service.Get("/", handlers.IssuerHandler)
38
  forecasting_service.Get("/infographic", handlers.InfographicHandler)
 
39
 
40
  host := os.Getenv("FORECASTING_SERVICE_HOST")
41
  port := os.Getenv("FORECASTING_SERVICE_PORT")
 
15
 
16
  helpers "thesis_forecasting_website/helpers"
17
  handlers "thesis_forecasting_website/handlers"
18
+ inferences "thesis_forecasting_website/inferences"
19
  middlewares "thesis_forecasting_website/middlewares"
20
  )
21
 
 
37
 
38
  forecasting_service.Get("/", handlers.IssuerHandler)
39
  forecasting_service.Get("/infographic", handlers.InfographicHandler)
40
+ forecasting_service.Post("/prediction", inferences.StockPredictionHandler)
41
 
42
  host := os.Getenv("FORECASTING_SERVICE_HOST")
43
  port := os.Getenv("FORECASTING_SERVICE_PORT")
makefile CHANGED
@@ -10,5 +10,14 @@ run:
10
  npm run min:js:infographic:stock_historical:table
11
  go run main.go
12
 
 
 
 
13
 
14
-
 
 
 
 
 
 
 
10
  npm run min:js:infographic:stock_historical:table
11
  go run main.go
12
 
13
+ getmodels:
14
+ apt install -y git git-lfs
15
+ git lfs install
16
 
17
+ mkdir -p models
18
+ for i in $(seq 1 10); do
19
+ git clone https://huggingface.co/qywok/stock_models_$i
20
+ cd stock_models_$i && git lfs pull && cd ..
21
+ mv stock_models_$i/*.onnx models/
22
+ rm -rf stock_models_$i
23
+ done