Files
reddit-lemmy-importer/src/index.js
2025-01-18 04:44:26 +13:00

81 lines
2.7 KiB
JavaScript

import { processPostsAndComments } from './parser.js';
import { writeSql } from './sql.js';
import { join } from "node:path";
// es6 >:(
import path from 'path';
import {
fileURLToPath
} from 'url';
import { exists, existsSync, writeFileSync, appendFileSync } from 'node:fs';
export const __filename = fileURLToPath(
import.meta.url);
export const __dirname = path.dirname(__filename);
import yargs from 'yargs';
// https://github.com/yargs/yargs/blob/main/docs/examples.md section "Yargs is here to help you..."
var args = yargs(process.argv.slice(2))
.alias('c', 'comm')
.alias('u', 'user')
.alias('o', 'output')
.describe({
'c': 'Name of the community the archive will be added to',
'u': 'Name of the user the archived posts will be made by',
'o': 'Path that the .sql file will save to',
'posts': 'The JSON dump file of submissions you got from https://the-eye.eu/redarcs/',
'comments': 'The JSON dump file of comments',
})
.string(['comm', 'user', 'output', 'posts', 'comments'])
.nargs(['comm', 'user', 'output', 'posts', 'comments'], 1)
.demandOption(['comm', 'user', 'output', 'posts', 'comments'])
.help('h')
.alias('h', 'help')
.epilog("git: https://git.stardust.wtf/starlight/reddit-lemmy-importer")
.parse();
/* // returns the trees filename
processPostsAndComments(args.posts, args.comments, (result) => {
console.log(result)
}); */
function printThreadStructure(thread, level = 0) {
thread.forEach(item => {
var out = '';
out += ' '.repeat(level) + `${item.name} by ${item.author}: `
// reddit item type 3 = submission/post, else its a comment
out += item.name[1] == "3" ? `"${item.title}` : item.body == "" ? "\"\"" : `"${item.body}"`
console.log(out)
if (item.children.length > 0) {
printThreadStructure(item.children, level + 1);
}
});
}
async function unflatten(postsFile, commentsFile) {
try {
const result = await processPostsAndComments(postsFile, commentsFile);
//console.log('Thread Structure:');
//printThreadStructure(result);
// Optional: write the result to a file
//writeFileSync('processed-threads.json', JSON.stringify(result, null, 2));
// empty the file if it exists
existsSync(args.output) ? writeFileSync(args.output, '') : null
result.forEach(post => {
appendFileSync(args.output, writeSql(post, args.comm, args.user))
})
} catch (error) {
console.error('Error processing files:', error);
}
}
// Run the main function
unflatten(args.posts, args.comments);
//console.log("HOLY FUCKING SMOKES!" + existsSync(tree))
const outputPath = join(__dirname, '/', args.output);