Question answering datasets provide an out-of-the-box solution for a machine learning powered search engine. AnnDB uses state-of-the-art machine learning models to provide highly relevant search results. Using the API, you can build a dataset of facts that you can later query using natural language queries.
Create a Dataset
Create a dataset with Question Answering type which tells AnnDB to encode your facts and queries to vectors.
In order to manage data in your dataset, create a corresponding dataset instance using the client.
dataset = client.text('<DATASET_NAME>')
dataset = client.text("<DATASET_NAME>")
Search
result = dataset.search('query', 10)
for item in result:
print(item.id, item.metadata)
result = dataset.search("query", 10)
result.each do |item|
puts item.id, item.metadata
end
Insert
# Single item
id = dataset.insert(
'London has 8.9 million inhabitants.',
metadata={'key': 'value'}
)
# Batch
result = dataset.insert_batch([
anndb_api.TextItem(
None,
'London has 8.9 million inhabitants.',
{'key': 'value'}
),
...
])
for r in result:
print(r.id, r.error)
id = dataset.insert(
"London has 8.9 million inhabitants.",
metadata={ "key": "value" }
)
result = dataset.insert_batch([
{
text: "London has 8.9 million inhabitants.",
metadata: { "key": "value" }
},
...
])
result.each { |r|
puts r[:id], r[:error]
}
Update
# Single item
id = dataset.update(
id,
'London has 9 million inhabitants.',
metadata={'key': 'value'}
)
# Batch
result = dataset.update_batch([
anndb_api.TextItem(
id,
'London has 9 million inhabitants.',
{'key': 'value'}
),
...
])
for r in result:
print(r.id, r.error)
id = dataset.update(
id,
"London has 9 million inhabitants.",
metadata={ "key": "value" }
)
result = dataset.update_batch([
{
id: id,
text: "London has 9 million inhabitants.",
metadata: { "key": "value" }
},
...
])
result.each { |r|
puts r[:id], r[:error]
}
Delete
# Single item
dataset.delete(id)
# Batch
result = dataset.delete_batch([id, ...])
for r in result:
print(r.id, r.error)