andreylitvinov commited on
Commit
4320174
·
1 Parent(s): 75eccbf
Files changed (2) hide show
  1. handler copy.py +51 -0
  2. handler.py +2 -44
handler copy.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Dict, List, Any
2
+ # import torch
3
+ from transformers import DistilBertTokenizer, DistilBertForSequenceClassification
4
+
5
+
6
+ # from optimum.onnxruntime import ORTModelForSequenceClassification
7
+ from transformers import AutoModel
8
+ # from transformers import AutoModelForSequenceClassification, AutoTokenizer
9
+
10
+ from transformers import pipeline, AutoTokenizer
11
+
12
+ # checkpoint = "distilbert-base-uncased-finetuned-sst-2-english"
13
+ checkpoint = "distilbert-base-uncased"
14
+
15
+ class EndpointHandler():
16
+
17
+ def __init__(self, path=""):
18
+ # load the optimized model
19
+ # model = ORTModelForSequenceClassification.from_pretrained(path)
20
+ # model = AutoModel.from_pretrained(checkpoint)
21
+ # model = AutoModelForSequenceClassification.from_pretrained(checkpoint)
22
+
23
+ # tokenizer = AutoTokenizer.from_pretrained(path)
24
+ # tokenizer = AutoTokenizer.from_pretrained(pretrained_model_name_or_path=checkpoint)
25
+ model = DistilBertForSequenceClassification.from_pretrained(checkpoint)
26
+ tokenizer = DistilBertTokenizer.from_pretrained(checkpoint)
27
+
28
+ # create inference pipeline
29
+ self.pipeline = pipeline("text-classification", model=model, tokenizer=tokenizer)
30
+
31
+
32
+ def __call__(self, data: Any) -> List[List[Dict[str, float]]]:
33
+ """
34
+ Args:
35
+ data (:obj:):
36
+ includes the input data and the parameters for the inference.
37
+ Return:
38
+ A :obj:`list`:. The object returned should be a list of one list like [[{"label": 0.9939950108528137}]] containing :
39
+ - "label": A string representing what the label/class is. There can be multiple labels.
40
+ - "score": A score between 0 and 1 describing how confident the model is for this label/class.
41
+ """
42
+ inputs = data.pop("inputs", data)
43
+ parameters = data.pop("parameters", None)
44
+
45
+ # pass inputs with all kwargs in data
46
+ if parameters is not None:
47
+ prediction = self.pipeline(inputs, **parameters)
48
+ else:
49
+ prediction = self.pipeline(inputs)
50
+ # postprocess the prediction
51
+ return prediction
handler.py CHANGED
@@ -1,51 +1,9 @@
1
  from typing import Dict, List, Any
2
- # import torch
3
- from transformers import DistilBertTokenizer, DistilBertForSequenceClassification
4
-
5
-
6
- # from optimum.onnxruntime import ORTModelForSequenceClassification
7
- from transformers import AutoModel
8
- # from transformers import AutoModelForSequenceClassification, AutoTokenizer
9
-
10
- from transformers import pipeline, AutoTokenizer
11
-
12
- # checkpoint = "distilbert-base-uncased-finetuned-sst-2-english"
13
- checkpoint = "distilbert-base-uncased"
14
 
15
  class EndpointHandler():
16
-
17
  def __init__(self, path=""):
18
- # load the optimized model
19
- # model = ORTModelForSequenceClassification.from_pretrained(path)
20
- # model = AutoModel.from_pretrained(checkpoint)
21
- # model = AutoModelForSequenceClassification.from_pretrained(checkpoint)
22
-
23
- # tokenizer = AutoTokenizer.from_pretrained(path)
24
- # tokenizer = AutoTokenizer.from_pretrained(pretrained_model_name_or_path=checkpoint)
25
- model = DistilBertForSequenceClassification.from_pretrained(checkpoint)
26
- tokenizer = DistilBertTokenizer.from_pretrained(checkpoint)
27
-
28
- # create inference pipeline
29
- self.pipeline = pipeline("text-classification", model=model, tokenizer=tokenizer)
30
 
31
 
32
  def __call__(self, data: Any) -> List[List[Dict[str, float]]]:
33
- """
34
- Args:
35
- data (:obj:):
36
- includes the input data and the parameters for the inference.
37
- Return:
38
- A :obj:`list`:. The object returned should be a list of one list like [[{"label": 0.9939950108528137}]] containing :
39
- - "label": A string representing what the label/class is. There can be multiple labels.
40
- - "score": A score between 0 and 1 describing how confident the model is for this label/class.
41
- """
42
- inputs = data.pop("inputs", data)
43
- parameters = data.pop("parameters", None)
44
-
45
- # pass inputs with all kwargs in data
46
- if parameters is not None:
47
- prediction = self.pipeline(inputs, **parameters)
48
- else:
49
- prediction = self.pipeline(inputs)
50
- # postprocess the prediction
51
- return prediction
 
1
  from typing import Dict, List, Any
 
 
 
 
 
 
 
 
 
 
 
 
2
 
3
  class EndpointHandler():
 
4
  def __init__(self, path=""):
5
+ pass
 
 
 
 
 
 
 
 
 
 
 
6
 
7
 
8
  def __call__(self, data: Any) -> List[List[Dict[str, float]]]:
9
+ return 42