from sentence_transformers import SentenceTransformer import numpy as np import faiss import gradio as gr def read_text_from_file(file_path): with open(file_path, "r") as text_file: text = text_file.read() return text text_file_path = "iiti.txt" texts = read_text_from_file(text_file_path) texts = texts.split("&&") model = SentenceTransformer('sentence-transformers/multi-qa-MiniLM-L6-cos-v1') doc_emb = model.encode(texts) d = doc_emb.shape[1] # Dimension of vectors print(doc_emb.shape) index = faiss.IndexFlatL2(d) index.add(doc_emb) def embed_query(query): query_emb = model.encode(query) return query_emb def question(query): query_vector = np.asarray(embed_query(query)) query_vector=np.expand_dims(query_vector,axis=0) print(query_vector.shape) k = 3 # Number of nearest neighbors to retrieve D, I = index.search(query_vector, k) relevant_paragraph="" for i in range(k): relevant_paragraph_index = I[0][i] relevant_paragraph += texts[relevant_paragraph_index] + "\n" return relevant_paragraph demo = gr.Interface(fn=question, inputs="text", outputs="text") if __name__ == "__main__": demo.launch()