Skip to content
This repository was archived by the owner on Jun 11, 2025. It is now read-only.

Commit 2efcda7

Browse files
committed
[firebase-release] Updated Firebase Import to 2.2.0
1 parent 290f25b commit 2efcda7

2 files changed

Lines changed: 293 additions & 1 deletion

File tree

bin/firebase-import.js

Lines changed: 292 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,292 @@
1+
#!/usr/bin/env node
2+
var firebase = require('firebase'),
3+
optimist = require('optimist'),
4+
ProgressBar = require('progress'),
5+
assert = require('assert'),
6+
path = require('path'),
7+
fs = require('fs'),
8+
JSONStream = require('JSONStream'),
9+
util = require('util');
10+
admin = require("firebase-admin");
11+
12+
// We try to write data in ~1MB chunks (in reality this often ends up being much smaller, due to the JSON structure).
13+
var CHUNK_SIZE = 1024*1024;
14+
15+
// Keep ~50 writes outstanding at a time (this increases throughput, so we're not delayed by server round-trips).
16+
var OUTSTANDING_WRITE_COUNT = 50;
17+
18+
var argv = require('optimist')
19+
.usage('Usage: $0')
20+
21+
.demand('database_url')
22+
.describe('database_url', 'Firebase database URL (e.g. https://databaseName.firebaseio.com).')
23+
.alias('d', 'database_url')
24+
25+
.demand('path')
26+
.describe('path', 'Database path (e.g. /products).')
27+
.alias('p', 'path')
28+
29+
.demand('json')
30+
.describe('json', 'The JSON file to import.')
31+
.alias('j', 'json')
32+
33+
.boolean('merge')
34+
.describe('merge', 'Write the top-level children without overwriting the whole parent.')
35+
.alias('m', 'merge')
36+
37+
.boolean('force')
38+
.describe('force', 'Don\'t prompt before overwriting data.')
39+
40+
.describe('service_account', 'Path to a JSON file with your service account credentials.')
41+
.alias('s', 'service_account')
42+
43+
.argv;
44+
45+
function main() {
46+
admin.initializeApp({
47+
credential: admin.credential.cert(argv.service_account),
48+
databaseURL: argv.database_url
49+
});
50+
51+
var db = admin.database();
52+
var ref = db.ref(argv.path);
53+
54+
var connFailTimeout = setTimeout(function() {
55+
console.log('Failed to connect to Firebase.');
56+
process.exit();
57+
}, 10000);
58+
59+
function ready() {
60+
clearTimeout(connFailTimeout);
61+
promptToContinue(ref, function() { start(ref); });
62+
}
63+
64+
var connFunc = db.ref('.info/connected').on('value', function(s) {
65+
if(s.val() === true) {
66+
db.ref('.info/connected').off('value', connFunc);
67+
ready();
68+
}
69+
});
70+
}
71+
72+
function promptToContinue(ref, next) {
73+
if (argv.force) {
74+
next();
75+
} else {
76+
if (argv.merge) {
77+
console.log('Each top-level child in ' + argv.json + ' will be written under ' + ref.toString() + '. If a child already exists, it will be overwritten.');
78+
} else {
79+
console.log('All data at ' + ref.toString() + ' will be overwritten.');
80+
}
81+
console.log('Press <enter> to proceed, Ctrl-C to abort.');
82+
process.stdin.resume();
83+
process.stdin.once('data', next);
84+
}
85+
}
86+
87+
function readFirstNonWhitespaceChar(file, callback) {
88+
var firstChar;
89+
var rs = fs.createReadStream(file);
90+
rs.on('data', function(chunk) {
91+
var s = chunk.toString().trim();
92+
if (s !== "") {
93+
rs.close();
94+
}
95+
firstChar = s[0];
96+
})
97+
.on('error', callback)
98+
.on('close', function() {
99+
return callback(null, firstChar);
100+
});
101+
}
102+
103+
function getJsonFromFile(file, callback) {
104+
readFirstNonWhitespaceChar(file, function(err, firstChar) {
105+
var json;
106+
if (firstChar === "[" || firstChar === "{") {
107+
var jsonStream;
108+
var onFunc;
109+
if (firstChar === "[") {
110+
json = [];
111+
jsonStream = JSONStream.parse("*");
112+
onFunc = function(r) {
113+
json.push(r);
114+
};
115+
} else {
116+
json = {};
117+
jsonStream = JSONStream.parse("$*");
118+
onFunc = function(r) {
119+
json[r.key] = r.value;
120+
};
121+
}
122+
fs.createReadStream(file)
123+
.pipe(jsonStream)
124+
.on('data', onFunc)
125+
.on('error', callback)
126+
.on('close', function() {
127+
return callback(null, json);
128+
});
129+
} else {
130+
json = require(file);
131+
return callback(null, json);
132+
}
133+
});
134+
}
135+
136+
function start(ref) {
137+
var file = path.resolve(argv.json);
138+
console.log('Reading ' + file + '... (may take a minute)');
139+
140+
getJsonFromFile(file, function(err, json) {
141+
var clearFirst = true, splitTopLevel = false;
142+
if (argv.merge) {
143+
clearFirst = false;
144+
// Need to split into chunks at the top level to ensure we don't overwrite the parent.
145+
splitTopLevel = true;
146+
}
147+
148+
console.log('Preparing JSON for import... (may take a minute)');
149+
var chunks = createChunks(ref, json, splitTopLevel);
150+
151+
if (clearFirst) {
152+
ref.remove(function(error) {
153+
if (error) throw(error);
154+
uploadChunks(chunks);
155+
});
156+
} else {
157+
uploadChunks(chunks);
158+
}
159+
});
160+
}
161+
162+
function uploadChunks(chunks) {
163+
var uploader = new ChunkUploader(chunks);
164+
uploader.go(function() {
165+
console.log('\nImport completed.');
166+
process.exit();
167+
});
168+
}
169+
170+
function createChunks(ref, json, forceSplit) {
171+
var chunkRes = chunkInternal(ref, json, forceSplit);
172+
if (!chunkRes.chunks) {
173+
return [{ref: ref, json: json}];
174+
} else {
175+
return chunkRes.chunks;
176+
}
177+
}
178+
179+
function chunkInternal(ref, json, forceSplit) {
180+
var size = 0;
181+
var priority = null;
182+
var jsonIsObject = json !== null && typeof json === 'object';
183+
if (jsonIsObject) {
184+
size += 2; // {}
185+
}
186+
187+
if (jsonIsObject && ('.priority' in json)) {
188+
size += 12; // ".priority":
189+
priority = json['.priority'];
190+
size += json['.priority'].toString().length;
191+
}
192+
193+
var value = json;
194+
if (jsonIsObject && ('.value' in json)) {
195+
size += 9; // ".value":
196+
value = json['.value'];
197+
}
198+
199+
if (value === null || typeof value !== 'object') {
200+
// It's a leaf, it can't be chunked.
201+
size += JSON.stringify(value).length;
202+
return { chunks: null, size: size };
203+
} else {
204+
// children node.
205+
var chunks = [];
206+
var splitUp = false;
207+
for(var key in json) {
208+
if (key !== '.priority') {
209+
size += key.length + 3;
210+
211+
var chunkRes = chunkInternal(ref.child(key), json[key]);
212+
size += chunkRes.size;
213+
214+
if (chunkRes.chunks) {
215+
for(var i = 0; i < chunkRes.chunks.length; i++) {
216+
chunks.push(chunkRes.chunks[i]);
217+
}
218+
// One of the children had to be broken into chunks. We have to break all of them.
219+
splitUp = true;
220+
} else {
221+
chunks.push({ref: ref.child(key), json: json[key]});
222+
}
223+
}
224+
}
225+
226+
// Add priority last since it must be added after at least one child.
227+
if (priority !== null) {
228+
chunks.push({ref: ref, priority: priority});
229+
}
230+
231+
if (forceSplit || splitUp || size >= CHUNK_SIZE) {
232+
return { chunks: chunks, size: size };
233+
} else {
234+
return { chunks: null, size: size }
235+
}
236+
}
237+
}
238+
239+
function ChunkUploader(chunks) {
240+
this.next = 0;
241+
this.chunks = chunks;
242+
if (process.stdout.isTTY) {
243+
this.bar = new ProgressBar('Importing [:bar] :percent (:current/:total)', { width: 50, total: chunks.length, incomplete: ' ' });
244+
} else {
245+
console.log('Importing... (may take a while)');
246+
}
247+
}
248+
249+
ChunkUploader.prototype.go = function(onComplete) {
250+
this.onComplete = onComplete;
251+
252+
for(var i = 0; i < OUTSTANDING_WRITE_COUNT && i < this.chunks.length; i++) {
253+
this.uploadNext();
254+
}
255+
};
256+
257+
ChunkUploader.prototype.uploadNext = function() {
258+
var chunkNum = this.next, chunk = this.chunks[chunkNum];
259+
assert(chunkNum < this.chunks.length);
260+
this.next++;
261+
262+
var self = this;
263+
var onComplete = function(error) {
264+
if (error) {
265+
console.log('Error uploading to ' + self.chunks[i].ref.toString() + ': ' + util.inspect(json));
266+
console.error(error);
267+
throw error;
268+
}
269+
270+
if (process.stdout.isTTY && self.bar) {
271+
self.bar.tick();
272+
}
273+
274+
if (chunkNum === self.chunks.length - 1) {
275+
self.onComplete();
276+
} else {
277+
// upload next chunk.
278+
assert(self.next === self.chunks.length || self.next === chunkNum + OUTSTANDING_WRITE_COUNT);
279+
if (self.next < self.chunks.length)
280+
self.uploadNext();
281+
}
282+
};
283+
284+
if ('json' in chunk) {
285+
chunk.ref.set(chunk.json, onComplete);
286+
} else {
287+
assert('priority' in chunk)
288+
chunk.ref.setPriority(chunk.priority, onComplete);
289+
}
290+
}
291+
292+
main();

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
{
22
"name": "firebase-import",
33
"description": "npm config for Firebase Import",
4-
"version": "2.1.0",
4+
"version": "2.2.0",
55
"dependencies": {
66
"JSONStream": "^1.2.1",
77
"firebase": "^3.4.0",

0 commit comments

Comments
 (0)