Sometimes you may need to get an export of your Algolia index, so you can use the data in some other way. To accomplish this, you can leverage the browse
method with one of our API clients.
We currently don’t provide a way to export index data from the Algolia dashboard directly, since indices can potentially be quite large.
Exporting the index
The browse
method lets you retrieve records beyond the 1,000 default limit of the search
method. You can use an empty query to indicate that you want to retrieve all records.
Once you have them, you can save them to a file.
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
| // composer autoload
require __DIR__ . '/vendor/autoload.php';
// if you are not using composer
// require_once 'path/to/algoliasearch.php';
$client = Algolia\AlgoliaSearch\SearchClient::create('YourApplicationID', 'YourAdminAPIKey');
$index = $client->initIndex('your_index_name');
$objects = [];
foreach ($index->browseObjects() as $hit) {
$objects[] = $hit;
}
file_put_contents('your_filename', json_encode($objects));
|
1
2
3
4
5
6
7
8
9
10
11
12
13
| require 'json'
require 'algolia'
client = Algolia::Search::Client.create('YourApplicationID', 'YourAdminAPIKey')
index = client.init_index('your_index_name')
hits = []
index.browse do |hit|
hits.push(hit)
end
File.write('your_filename', hits.to_json)
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
| // Only in Node.js
const fs = require('fs');
const algoliasearch = require('algoliasearch');
const client = algoliasearch('YourApplicationID', 'YourAdminAPIKey');
const index = client.initIndex('your_index_name');
let hits = [];
index
.browseObjects({
batch: (objects) => (hits = hits.concat(objects)),
})
.then(() => {
console.log('Finished! We got %d hits', hits.length);
fs.writeFile(
'browse.json',
JSON.stringify(hits, null, 2),
'utf-8',
(err) => {
if (err) throw err;
console.log('Your index was successfully exported!');
}
);
});
|
1
2
3
4
5
6
7
8
9
10
11
12
13
| import json
from algoliasearch.search_client import SearchClient
client = SearchClient.create('YourApplicationID', 'YourAdminAPIKey')
index = client.init_index('your_index_name')
hits = []
for hit in index.browse_objects({'query': ''}):
hits.append(hit)
with open('your_filename', 'w') as f:
json.dump(hits, f)
|
1
2
3
4
5
6
7
8
9
10
11
| index.browseObjects { result in
if case .success(let response) = result {
let records: [JSON] = response.flatMap { $0.hits.map(\.object) }
do {
let recordsData = try JSONEncoder().encode(records)
try recordsData.write(to: .init(fileURLWithPath: ""), options: .atomic)
} catch let error {
print(error)
}
}
}
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
| SearchClient client = new SearchClient("YourApplicationID", "YourAdminAPIKey");
SearchIndex index = client.InitIndex("your_index_name");
var result = index.Browse<JObject>(new BrowseIndexQuery());
JArray records = new JArray();
foreach (var hit in result)
{
records.Add(hit);
}
using (StreamWriter file = File.CreateText(@"your_filename.json"))
{
JsonSerializer serializer = new JsonSerializer();
serializer.Serialize(file, records);
}
|
1
2
3
4
5
6
7
8
9
10
11
| SearchClient client =
DefaultSearchClient.create("YourApplicationID", "YourAdminAPIKey");
SearchIndex<Actor> index = client.initIndex("actors", Actor.class);
IndexIterable<Employee> results = index.browseObjects(new BrowseIndexQuery());
List<Employee> records = new ArrayList<>();
results.forEach(records::add);
ObjectMapper objectMapper = Defaults.getObjectMapper();
objectMapper.writeValue(new File("your_filename.json"), records);
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
| package main
import (
"encoding/json"
"io"
"io/ioutil"
"os"
"github.com/algolia/algoliasearch-client-go/v3/algolia/search"
)
type Actor struct {
Name string `json:"name"`
Rating int `json:"rating"`
ImagePath string `json:"image_path"`
AlternativeName string `json:"alternative_name"`
ObjectID string `json:"objectID"`
}
func main() {
client := search.NewClient("YourApplicationID", "YourAdminAPIKey")
index := client.InitIndex("actors")
it, err := index.BrowseObjects()
if err != nil {
// error handling
}
var actors []Actor
var actor Actor
for {
_, err = it.Next(&actor)
if err != nil {
if err == io.EOF {
break
}
// error handling
}
actors = append(actors, actor)
}
data, err := json.Marshal(actors)
if err != nil {
// error handling
}
err = ioutil.WriteFile("actors_downloaded.json", data, os.ModePerm)
if err != nil {
// error handling
}
}
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
| import java.io.{File, PrintWriter}
import algolia.AlgoliaDsl._
import algolia.objects.Query
import algolia.responses.ObjectID
import algolia.{AlgoliaClient, AlgoliaSyncHelper}
import org.json4s.native.Serialization.write
import scala.concurrent.{ExecutionContext, ExecutionContextExecutor}
import scala.concurrent.duration._
case class MyCaseClass(objectID: String, /* ... */) extends ObjectID
object Main {
def main(args: Array[String]): Unit = {
implicit val ec: ExecutionContextExecutor = ExecutionContext.global
implicit val awaitDuration: FiniteDuration = 10 seconds
val client = new AlgoliaClient("YourApplicationID", "YourAdminAPIKey")
val indexName = "your_index_name"
val helper = AlgoliaSyncHelper(client)
val records: Seq[MyCaseClass] = helper.browse[Show](indexName, Query(query = Some("")))
.flatten
.toSeq
val w = new PrintWriter(new File("your_filename.json"))
w.write(write(records))
w.close()
}
}
|
1
2
3
4
5
6
| val records = index.browseObjects().flatMap { response ->
response.hits.map { it.json }
}
val json = Json.stringify(JsonObjectSerializer.list, records)
File("your_filename.json").writeText(json)
|
When exporting large indices, we recommend that you chunk the data.