-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathexample.cpp
97 lines (84 loc) · 3.21 KB
/
example.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
// run with
// llama-server -hf nomic-ai/nomic-embed-text-v1.5-GGUF --embedding --pooling mean
#include <cstdint>
#include <iostream>
#include <cpr/cpr.h>
#include <nlohmann/json.hpp>
#include <pgvector/pqxx.hpp>
#include <pqxx/pqxx>
using json = nlohmann::json;
std::vector<std::vector<float>> embed(const std::vector<std::string>& texts, const std::string& taskType) {
// nomic-embed-text-v1.5 uses a task prefix
// https://huggingface.co/nomic-ai/nomic-embed-text-v1.5
std::vector<std::string> input;
input.reserve(texts.size());
for (auto& v : texts) {
input.push_back(taskType + ": " + v);
}
std::string url = "http://localhost:8080/v1/embeddings";
json data = {
{"input", input}
};
cpr::Response r = cpr::Post(
cpr::Url{url},
cpr::Body{data.dump()},
cpr::Header{{"Content-Type", "application/json"}}
);
if (r.status_code != 200) {
throw std::runtime_error("Bad status: " + std::to_string(r.status_code));
}
json response = json::parse(r.text);
std::vector<std::vector<float>> embeddings;
for (auto& v: response["data"]) {
embeddings.emplace_back(v["embedding"]);
}
return embeddings;
}
int main() {
pqxx::connection conn("dbname=pgvector_example");
pqxx::nontransaction tx(conn);
tx.exec("CREATE EXTENSION IF NOT EXISTS vector");
tx.exec("DROP TABLE IF EXISTS documents");
tx.exec("CREATE TABLE documents (id bigserial PRIMARY KEY, content text, embedding vector(768))");
tx.exec("CREATE INDEX ON documents USING GIN (to_tsvector('english', content))");
std::vector<std::string> input = {
"The dog is barking",
"The cat is purring",
"The bear is growling"
};
auto embeddings = embed(input, "search_document");
for (size_t i = 0; i < input.size(); i++) {
tx.exec("INSERT INTO documents (content, embedding) VALUES ($1, $2)", pqxx::params{input[i], pgvector::Vector(embeddings[i])});
}
std::string sql = R"(
WITH semantic_search AS (
SELECT id, RANK () OVER (ORDER BY embedding <=> $2) AS rank
FROM documents
ORDER BY embedding <=> $2
LIMIT 20
),
keyword_search AS (
SELECT id, RANK () OVER (ORDER BY ts_rank_cd(to_tsvector('english', content), query) DESC)
FROM documents, plainto_tsquery('english', $1) query
WHERE to_tsvector('english', content) @@ query
ORDER BY ts_rank_cd(to_tsvector('english', content), query) DESC
LIMIT 20
)
SELECT
COALESCE(semantic_search.id, keyword_search.id) AS id,
COALESCE(1.0 / ($3 + semantic_search.rank), 0.0) +
COALESCE(1.0 / ($3 + keyword_search.rank), 0.0) AS score
FROM semantic_search
FULL OUTER JOIN keyword_search ON semantic_search.id = keyword_search.id
ORDER BY score DESC
LIMIT 5
)";
std::string query = "growling bear";
auto query_embedding = embed({query}, "search_query")[0];
double k = 60;
pqxx::result result = tx.exec(sql, pqxx::params{query, pgvector::Vector(query_embedding), k});
for (const auto& row : result) {
std::cout << "document: " << row[0].as<std::string>() << ", RRF score: " << row[1].as<double>() << std::endl;
}
return 0;
}