Skip to content
This repository was archived by the owner on Jun 11, 2025. It is now read-only.

Commit f388785

Browse files
committed
[firebase-release] Updated Firebase Import to 2.1.0
1 parent 83b4d5f commit f388785

2 files changed

Lines changed: 291 additions & 1 deletion

File tree

bin/firebase-import.js

Lines changed: 290 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,290 @@
1+
#!/usr/bin/env node
2+
var firebase = require('firebase'),
3+
optimist = require('optimist'),
4+
ProgressBar = require('progress'),
5+
assert = require('assert'),
6+
path = require('path'),
7+
fs = require('fs'),
8+
JSONStream = require('JSONStream'),
9+
util = require('util');
10+
11+
// We try to write data in ~1MB chunks (in reality this often ends up being much smaller, due to the JSON structure).
12+
var CHUNK_SIZE = 1024*1024;
13+
14+
// Keep ~50 writes outstanding at a time (this increases throughput, so we're not delayed by server round-trips).
15+
var OUTSTANDING_WRITE_COUNT = 50;
16+
17+
var argv = require('optimist')
18+
.usage('Usage: $0')
19+
20+
.demand('database_url')
21+
.describe('database_url', 'Firebase database URL (e.g. https://databaseName.firebaseio.com).')
22+
.alias('d', 'database_url')
23+
24+
.demand('path')
25+
.describe('path', 'Database path (e.g. /products).')
26+
.alias('p', 'path')
27+
28+
.demand('json')
29+
.describe('json', 'The JSON file to import.')
30+
.alias('j', 'json')
31+
32+
.boolean('merge')
33+
.describe('merge', 'Write the top-level children without overwriting the whole parent.')
34+
.alias('m', 'merge')
35+
36+
.boolean('force')
37+
.describe('force', 'Don\'t prompt before overwriting data.')
38+
39+
.describe('service_account', 'Path to a JSON file with your service account credentials.')
40+
.alias('s', 'service_account')
41+
42+
.argv;
43+
44+
function main() {
45+
firebase.initializeApp({
46+
databaseURL: argv.database_url,
47+
serviceAccount: argv.service_account,
48+
});
49+
var db = firebase.database();
50+
var ref = db.ref(argv.path);
51+
52+
var connFailTimeout = setTimeout(function() {
53+
console.log('Failed to connect to Firebase.');
54+
process.exit();
55+
}, 10000);
56+
57+
function ready() {
58+
clearTimeout(connFailTimeout);
59+
promptToContinue(ref, function() { start(ref); });
60+
}
61+
62+
var connFunc = db.ref('.info/connected').on('value', function(s) {
63+
if(s.val() === true) {
64+
db.ref('.info/connected').off('value', connFunc);
65+
ready();
66+
}
67+
});
68+
}
69+
70+
function promptToContinue(ref, next) {
71+
if (argv.force) {
72+
next();
73+
} else {
74+
if (argv.merge) {
75+
console.log('Each top-level child in ' + argv.json + ' will be written under ' + ref.toString() + '. If a child already exists, it will be overwritten.');
76+
} else {
77+
console.log('All data at ' + ref.toString() + ' will be overwritten.');
78+
}
79+
console.log('Press <enter> to proceed, Ctrl-C to abort.');
80+
process.stdin.resume();
81+
process.stdin.once('data', next);
82+
}
83+
}
84+
85+
function readFirstNonWhitespaceChar(file, callback) {
86+
var firstChar;
87+
var rs = fs.createReadStream(file);
88+
rs.on('data', function(chunk) {
89+
var s = chunk.toString().trim();
90+
if (s !== "") {
91+
rs.close();
92+
}
93+
firstChar = s[0];
94+
})
95+
.on('error', callback)
96+
.on('close', function() {
97+
return callback(null, firstChar);
98+
});
99+
}
100+
101+
function getJsonFromFile(file, callback) {
102+
readFirstNonWhitespaceChar(file, function(err, firstChar) {
103+
var json;
104+
if (firstChar === "[" || firstChar === "{") {
105+
var jsonStream;
106+
var onFunc;
107+
if (firstChar === "[") {
108+
json = [];
109+
jsonStream = JSONStream.parse("*");
110+
onFunc = function(r) {
111+
json.push(r);
112+
};
113+
} else {
114+
json = {};
115+
jsonStream = JSONStream.parse("$*");
116+
onFunc = function(r) {
117+
json[r.key] = r.value;
118+
};
119+
}
120+
fs.createReadStream(file)
121+
.pipe(jsonStream)
122+
.on('data', onFunc)
123+
.on('error', callback)
124+
.on('close', function() {
125+
return callback(null, json);
126+
});
127+
} else {
128+
json = require(file);
129+
return callback(null, json);
130+
}
131+
});
132+
}
133+
134+
function start(ref) {
135+
var file = path.resolve(argv.json);
136+
console.log('Reading ' + file + '... (may take a minute)');
137+
138+
getJsonFromFile(file, function(err, json) {
139+
var clearFirst = true, splitTopLevel = false;
140+
if (argv.merge) {
141+
clearFirst = false;
142+
// Need to split into chunks at the top level to ensure we don't overwrite the parent.
143+
splitTopLevel = true;
144+
}
145+
146+
console.log('Preparing JSON for import... (may take a minute)');
147+
var chunks = createChunks(ref, json, splitTopLevel);
148+
149+
if (clearFirst) {
150+
ref.remove(function(error) {
151+
if (error) throw(error);
152+
uploadChunks(chunks);
153+
});
154+
} else {
155+
uploadChunks(chunks);
156+
}
157+
});
158+
}
159+
160+
function uploadChunks(chunks) {
161+
var uploader = new ChunkUploader(chunks);
162+
uploader.go(function() {
163+
console.log('\nImport completed.');
164+
process.exit();
165+
});
166+
}
167+
168+
function createChunks(ref, json, forceSplit) {
169+
var chunkRes = chunkInternal(ref, json, forceSplit);
170+
if (!chunkRes.chunks) {
171+
return [{ref: ref, json: json}];
172+
} else {
173+
return chunkRes.chunks;
174+
}
175+
}
176+
177+
function chunkInternal(ref, json, forceSplit) {
178+
var size = 0;
179+
var priority = null;
180+
var jsonIsObject = json !== null && typeof json === 'object';
181+
if (jsonIsObject) {
182+
size += 2; // {}
183+
}
184+
185+
if (jsonIsObject && ('.priority' in json)) {
186+
size += 12; // ".priority":
187+
priority = json['.priority'];
188+
size += json['.priority'].toString().length;
189+
}
190+
191+
var value = json;
192+
if (jsonIsObject && ('.value' in json)) {
193+
size += 9; // ".value":
194+
value = json['.value'];
195+
}
196+
197+
if (value === null || typeof value !== 'object') {
198+
// It's a leaf, it can't be chunked.
199+
size += JSON.stringify(value).length;
200+
return { chunks: null, size: size };
201+
} else {
202+
// children node.
203+
var chunks = [];
204+
var splitUp = false;
205+
for(var key in json) {
206+
if (key !== '.priority') {
207+
size += key.length + 3;
208+
209+
var chunkRes = chunkInternal(ref.child(key), json[key]);
210+
size += chunkRes.size;
211+
212+
if (chunkRes.chunks) {
213+
for(var i = 0; i < chunkRes.chunks.length; i++) {
214+
chunks.push(chunkRes.chunks[i]);
215+
}
216+
// One of the children had to be broken into chunks. We have to break all of them.
217+
splitUp = true;
218+
} else {
219+
chunks.push({ref: ref.child(key), json: json[key]});
220+
}
221+
}
222+
}
223+
224+
// Add priority last since it must be added after at least one child.
225+
if (priority !== null) {
226+
chunks.push({ref: ref, priority: priority});
227+
}
228+
229+
if (forceSplit || splitUp || size >= CHUNK_SIZE) {
230+
return { chunks: chunks, size: size };
231+
} else {
232+
return { chunks: null, size: size }
233+
}
234+
}
235+
}
236+
237+
function ChunkUploader(chunks) {
238+
this.next = 0;
239+
this.chunks = chunks;
240+
if (process.stdout.isTTY) {
241+
this.bar = new ProgressBar('Importing [:bar] :percent (:current/:total)', { width: 50, total: chunks.length, incomplete: ' ' });
242+
} else {
243+
console.log('Importing... (may take a while)');
244+
}
245+
}
246+
247+
ChunkUploader.prototype.go = function(onComplete) {
248+
this.onComplete = onComplete;
249+
250+
for(var i = 0; i < OUTSTANDING_WRITE_COUNT && i < this.chunks.length; i++) {
251+
this.uploadNext();
252+
}
253+
};
254+
255+
ChunkUploader.prototype.uploadNext = function() {
256+
var chunkNum = this.next, chunk = this.chunks[chunkNum];
257+
assert(chunkNum < this.chunks.length);
258+
this.next++;
259+
260+
var self = this;
261+
var onComplete = function(error) {
262+
if (error) {
263+
console.log('Error uploading to ' + self.chunks[i].ref.toString() + ': ' + util.inspect(json));
264+
console.error(error);
265+
throw error;
266+
}
267+
268+
if (process.stdout.isTTY && self.bar) {
269+
self.bar.tick();
270+
}
271+
272+
if (chunkNum === self.chunks.length - 1) {
273+
self.onComplete();
274+
} else {
275+
// upload next chunk.
276+
assert(self.next === self.chunks.length || self.next === chunkNum + OUTSTANDING_WRITE_COUNT);
277+
if (self.next < self.chunks.length)
278+
self.uploadNext();
279+
}
280+
};
281+
282+
if ('json' in chunk) {
283+
chunk.ref.set(chunk.json, onComplete);
284+
} else {
285+
assert('priority' in chunk)
286+
chunk.ref.setPriority(chunk.priority, onComplete);
287+
}
288+
}
289+
290+
main();

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
{
22
"name": "firebase-import",
33
"description": "npm config for Firebase Import",
4-
"version": "2.0.0",
4+
"version": "2.1.0",
55
"dependencies": {
66
"JSONStream": "^1.2.1",
77
"firebase": "^3.4.0",

0 commit comments

Comments
 (0)