Comprehensive guide for migrating from Firebase to ɳSelf
Migrating from Firebase to ɳSelf is the most complex migration due to fundamental architectural differences, but offers significant benefits:
| Challenge | Difficulty | Time Estimate |
|---|---|---|
| NoSQL → SQL schema design | High | 4-8 hours |
| Data transformation & import | High | 4-8 hours |
| Authentication migration | Medium | 2-3 hours |
| Security rules → Hasura permissions | High | 2-4 hours |
| Frontend SDK replacement | High | 4-8 hours |
| Cloud Functions conversion | Medium | 2-4 hours |
Total: 16-32 hours
{
"posts": {
"post1": {
"title": "Hello",
"author": {
"id": "user1",
"name": "John",
"email": "john@example.com" // Duplicated
},
"comments": [
{ "text": "Nice!", "user": "Alice" },
{ "text": "Great!", "user": "Bob" }
]
}
}
}CREATE TABLE users (
id UUID PRIMARY KEY,
name TEXT,
email TEXT UNIQUE
);
CREATE TABLE posts (
id UUID PRIMARY KEY,
title TEXT,
author_id UUID REFERENCES users(id)
);
CREATE TABLE comments (
id UUID PRIMARY KEY,
post_id UUID REFERENCES posts(id),
user_id UUID REFERENCES users(id),
text TEXT
);Before you start, ensure you have:
# Install nself
curl -sSL https://install.nself.org | bash
# Install Firebase CLI
npm install -g firebase-tools
# Install PostgreSQL client
brew install postgresql # macOS
sudo apt-get install postgresql-client # Ubuntu
# Install jq for JSON processing
brew install jq# 1. Authenticate with Firebase
firebase login
# 2. Export Firestore data
firebase firestore:export gs://[YOUR-BUCKET]/firestore-export
# 3. Export Authentication users
# From Firebase Console → Authentication → Users → Export Users
# 4. List Cloud Functions
firebase functions:list
# 5. Document all Security Rules
firebase deploy --only firestore:rules --dry-runEstimated time: 4-8 hours
Most Important Phase: Proper schema design is critical. Take time to normalize your data structure.
Analyze your Firestore collections:
# Export and analyze
firebase firestore:export gs://[YOUR-BUCKET]/export
gsutil -m cp -r gs://[YOUR-BUCKET]/export .
# Analyze structure
cat export/all_namespaces/all_kinds/*.export_metadata | jqusers/
- uid (string)
- email (string)
- displayName (string)
posts/
- postId (string)
- title (string)
- authorId (string)
- authorName (string) ← Denormalized
- tags (array)
- comments/ (subcollection)
- commentId (string)
- text (string)
- userId (string)
- userName (string) ← DenormalizedCREATE TABLE users (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
firebase_uid TEXT UNIQUE, -- For migration mapping
email TEXT UNIQUE NOT NULL,
display_name TEXT,
created_at TIMESTAMPTZ DEFAULT NOW()
);
CREATE TABLE posts (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
firebase_id TEXT UNIQUE,
title TEXT NOT NULL,
author_id UUID REFERENCES users(id) ON DELETE CASCADE,
published BOOLEAN DEFAULT false,
created_at TIMESTAMPTZ DEFAULT NOW()
);
-- Normalize array → table
CREATE TABLE tags (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
name TEXT UNIQUE NOT NULL
);
CREATE TABLE post_tags (
post_id UUID REFERENCES posts(id) ON DELETE CASCADE,
tag_id UUID REFERENCES tags(id) ON DELETE CASCADE,
PRIMARY KEY (post_id, tag_id)
);
CREATE TABLE comments (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
firebase_id TEXT UNIQUE,
post_id UUID REFERENCES posts(id) ON DELETE CASCADE,
user_id UUID REFERENCES users(id) ON DELETE SET NULL,
text TEXT NOT NULL,
created_at TIMESTAMPTZ DEFAULT NOW()
);
-- Indexes for performance
CREATE INDEX idx_posts_author ON posts(author_id);
CREATE INDEX idx_comments_post ON comments(post_id);Estimated time: 4-8 hours
// firebase-to-sql.js
const admin = require('firebase-admin')
const fs = require('fs')
admin.initializeApp({
credential: admin.credential.cert(require('./serviceAccountKey.json'))
})
const db = admin.firestore()
async function exportToSQL() {
const users = []
const posts = []
const comments = []
const tags = new Set()
const postTags = []
// Export users
const usersSnapshot = await db.collection('users').get()
usersSnapshot.forEach(doc => {
const data = doc.data()
users.push({
firebase_uid: doc.id,
email: data.email,
display_name: data.displayName || null,
created_at: data.createdAt?.toDate() || new Date()
})
})
// Export posts
const postsSnapshot = await db.collection('posts').get()
for (const doc of postsSnapshot.docs) {
const data = doc.data()
posts.push({
firebase_id: doc.id,
title: data.title,
content: data.content,
author_firebase_uid: data.authorId,
published: data.published || false,
created_at: data.createdAt?.toDate() || new Date()
})
// Extract tags (array → table)
if (data.tags && Array.isArray(data.tags)) {
data.tags.forEach(tag => {
tags.add(tag)
postTags.push({
post_firebase_id: doc.id,
tag_name: tag
})
})
}
// Export comments subcollection
const commentsSnapshot = await db
.collection('posts')
.doc(doc.id)
.collection('comments')
.get()
commentsSnapshot.forEach(commentDoc => {
const commentData = commentDoc.data()
comments.push({
firebase_id: commentDoc.id,
post_firebase_id: doc.id,
user_firebase_uid: commentData.userId,
text: commentData.text,
created_at: commentData.createdAt?.toDate() || new Date()
})
})
}
// Write to JSON files
fs.writeFileSync('users.json', JSON.stringify(users, null, 2))
fs.writeFileSync('posts.json', JSON.stringify(posts, null, 2))
fs.writeFileSync('comments.json', JSON.stringify(comments, null, 2))
fs.writeFileSync('tags.json', JSON.stringify([...tags], null, 2))
fs.writeFileSync('post_tags.json', JSON.stringify(postTags, null, 2))
console.log(`✅ Export complete!
Users: ${users.length}
Posts: ${posts.length}
Comments: ${comments.length}
Tags: ${tags.size}`)
}
exportToSQL()node firebase-to-sql.jsmkdir firebase-migration && cd firebase-migration
nself init --wizard
# Configure .env
nano .envPROJECT_NAME=firebase-migration
ENV=dev
BASE_DOMAIN=localhost
POSTGRES_DB=firebase_migration
POSTGRES_PASSWORD=your-secure-password
HASURA_GRAPHQL_ADMIN_SECRET=your-admin-secret
HASURA_GRAPHQL_JWT_SECRET={"type":"HS256","key":"your-jwt-secret-min-32-chars"}
MINIO_ENABLED=true
REDIS_ENABLED=true
FUNCTIONS_ENABLED=true
nself build
nself start# Create schema
nself db shell < schema.sql
# Import data (simplified - use proper import script)
nself db import import.sql
# Verify
nself db shellSELECT COUNT(*) FROM users;
SELECT COUNT(*) FROM posts;
SELECT COUNT(*) FROM comments;
-- Check relationships
SELECT p.title, u.display_name AS author
FROM posts p
JOIN users u ON p.author_id = u.id
LIMIT 10;Estimated time: 2-3 hours
// export-auth-users.js
const admin = require('firebase-admin')
const fs = require('fs')
admin.initializeApp({
credential: admin.credential.cert(require('./serviceAccountKey.json'))
})
async function exportAuthUsers() {
const listUsers = await admin.auth().listUsers()
const users = listUsers.users.map(user => ({
uid: user.uid,
email: user.email,
emailVerified: user.emailVerified,
displayName: user.displayName,
photoURL: user.photoURL,
disabled: user.disabled,
metadata: {
creationTime: user.metadata.creationTime,
lastSignInTime: user.metadata.lastSignInTime
},
providerData: user.providerData
}))
fs.writeFileSync('firebase-auth-users.json', JSON.stringify(users, null, 2))
console.log(`✅ Exported ${users.length} users`)
}
exportAuthUsers()-- Import users into auth.users table
INSERT INTO auth.users (
id, email, email_verified, display_name, avatar_url, created_at
)
SELECT
gen_random_uuid(),
email,
true,
display_name,
photo_url,
created_at
FROM users
WHERE firebase_uid IS NOT NULL;Critical: Firebase password hashes are not portable. All users must reset passwords.
# Send password reset emails
cat > send-reset-emails.sh << 'EOF'
#!/bin/bash
AUTH_URL="http://auth.localhost/v1"
nself db shell -c "SELECT email FROM auth.users;" | tail -n +3 | head -n -2 | while read EMAIL; do
echo "Sending reset to: $EMAIL"
curl -X POST "$AUTH_URL/user/password-reset" \
-H "Content-Type: application/json" \
-d "{\"email\": \"$EMAIL\"}"
sleep 1
done
EOF
chmod +x send-reset-emails.sh
./send-reset-emails.shEstimated time: 2-4 hours
// firestore.rules
rules_version = '2';
service cloud.firestore {
match /databases/{database}/documents {
match /posts/{postId} {
allow read: if true; // Anyone can read
allow create: if request.auth != null;
allow update, delete: if request.auth.uid == resource.data.authorId;
}
}
}In Hasura Console (http://api.localhost) → Data → posts → Permissions:
# User role (authenticated users)
role: user
permissions:
select:
filter: {} # Anyone can read
columns: [id, title, content, author_id, created_at]
insert:
check:
author_id: { _eq: X-Hasura-User-Id }
columns: [title, content]
update:
filter:
author_id: { _eq: X-Hasura-User-Id } # Only author
check:
author_id: { _eq: X-Hasura-User-Id }
columns: [title, content]
delete:
filter:
author_id: { _eq: X-Hasura-User-Id }
# Anonymous role
role: anonymous
permissions:
select:
filter:
published: { _eq: true }
columns: [id, title, content, created_at]import {
getFirestore,
collection,
query,
where,
getDocs
} from 'firebase/firestore'
const db = getFirestore()
// Fetch posts
const q = query(
collection(db, 'posts'),
where('published', '==', true)
)
const snapshot = await getDocs(q)
const posts = snapshot.docs.map(doc => ({
id: doc.id,
...doc.data()
}))import { useQuery, gql } from '@apollo/client'
const GET_POSTS = gql`
query GetPosts {
posts(where: { published: { _eq: true } }) {
id
title
content
author {
display_name
}
}
}
`
const { data } = useQuery(GET_POSTS)
const posts = data?.postsimport { onSnapshot } from 'firebase/firestore'
const unsubscribe = onSnapshot(
query(collection(db, 'posts')),
(snapshot) => {
const posts = snapshot.docs.map(doc => ({
id: doc.id,
...doc.data()
}))
setPosts(posts)
}
)import { useSubscription, gql } from '@apollo/client'
const POSTS_SUBSCRIPTION = gql`
subscription OnPosts {
posts(order_by: { created_at: desc }) {
id
title
content
}
}
`
const { data } = useSubscription(POSTS_SUBSCRIPTION)
const posts = data?.postsProblem: Firebase document IDs are strings, PostgreSQL UUIDs are different
Solution: Keep Firebase IDs for mapping, use UUIDs for new primary keys
ALTER TABLE posts ADD COLUMN firebase_id TEXT;
ALTER TABLE posts ADD COLUMN id UUID PRIMARY KEY DEFAULT gen_random_uuid();Problem: Firestore supports arrays, PostgreSQL requires normalization
Solution: Create junction tables
-- Instead of tags: ['javascript', 'node']
-- Create tags table + post_tags junction table
CREATE TABLE tags (id UUID PRIMARY KEY, name TEXT UNIQUE);
CREATE TABLE post_tags (
post_id UUID REFERENCES posts(id),
tag_id UUID REFERENCES tags(id),
PRIMARY KEY (post_id, tag_id)
);Solution: Convert Firebase Timestamp to Date
const createdAt = firestoreDoc.data().createdAt.toDate()If migration fails:
# Rollback steps
# 1. Change DNS back to Firebase
# 2. Revert frontend to Firebase SDK
# 3. Rebuild and deployMigrating from Firebase to ɳSelf is a significant undertaking due to NoSQL → SQL paradigm shift, but offers substantial benefits:
Timeline: 16-32 hours for complete migration
Recommended Approach:
Professional Migration Support: For assistance with complex Firebase migrations, contact migrations@nself.org for consulting services.