Commit
·
55e5d9c
1
Parent(s):
37b9ac3
Update app.py
Browse files
app.py
CHANGED
|
@@ -18,8 +18,8 @@ import requests
|
|
| 18 |
#from sentence_transformers import SentenceTransformer, util
|
| 19 |
#from sklearn.metrics.pairwise import cosine_similarity
|
| 20 |
|
| 21 |
-
device = "cuda:0" if torch.cuda.is_available() else "cpu"
|
| 22 |
-
|
| 23 |
|
| 24 |
#SentenceTransformer('stsb-distilbert-base', device=device)
|
| 25 |
|
|
@@ -43,9 +43,16 @@ def softmax(x):
|
|
| 43 |
return np.divide(exps, np.sum(exps))
|
| 44 |
|
| 45 |
# Load pre-trained model
|
| 46 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 47 |
model.eval()
|
| 48 |
-
tokenizer =
|
|
|
|
|
|
|
|
|
|
| 49 |
|
| 50 |
|
| 51 |
def cloze_prob(text):
|
|
@@ -102,7 +109,7 @@ def Visual_re_ranker(caption, visual_context_label, visual_context_prob):
|
|
| 102 |
visual_context_label= visual_context_label
|
| 103 |
visual_context_prob = visual_context_prob
|
| 104 |
caption_emb = model.encode(caption, convert_to_tensor=True)
|
| 105 |
-
visual_context_label_emb =
|
| 106 |
|
| 107 |
|
| 108 |
sim = cosine_scores = util.pytorch_cos_sim(caption_emb, visual_context_label_emb)
|
|
|
|
| 18 |
#from sentence_transformers import SentenceTransformer, util
|
| 19 |
#from sklearn.metrics.pairwise import cosine_similarity
|
| 20 |
|
| 21 |
+
#device = "cuda:0" if torch.cuda.is_available() else "cpu"
|
| 22 |
+
model_1 = gr.interface.huggingface.load('sentence-transformers/stsb-distilbert-base')
|
| 23 |
|
| 24 |
#SentenceTransformer('stsb-distilbert-base', device=device)
|
| 25 |
|
|
|
|
| 43 |
return np.divide(exps, np.sum(exps))
|
| 44 |
|
| 45 |
# Load pre-trained model
|
| 46 |
+
|
| 47 |
+
#model = GPT2LMHeadModel.from_pretrained('distilgpt2', output_hidden_states = True, output_attentions = True)
|
| 48 |
+
|
| 49 |
+
model = gr.interface.huggingface.load('distilgpt2', output_hidden_states = True, output_attentions = True)
|
| 50 |
+
|
| 51 |
model.eval()
|
| 52 |
+
tokenizer = gr.interface.huggingface.load('distilgpt2')
|
| 53 |
+
|
| 54 |
+
#tokenizer = GPT2Tokenizer.from_pretrained('distilgpt2')
|
| 55 |
+
#tokenizer = GPT2Tokenizer.from_pretrained('distilgpt2')
|
| 56 |
|
| 57 |
|
| 58 |
def cloze_prob(text):
|
|
|
|
| 109 |
visual_context_label= visual_context_label
|
| 110 |
visual_context_prob = visual_context_prob
|
| 111 |
caption_emb = model.encode(caption, convert_to_tensor=True)
|
| 112 |
+
visual_context_label_emb = model_1.encode(visual_context_label, convert_to_tensor=True)
|
| 113 |
|
| 114 |
|
| 115 |
sim = cosine_scores = util.pytorch_cos_sim(caption_emb, visual_context_label_emb)
|