Datasets:
Tasks:
Text Generation
Formats:
parquet
Sub-tasks:
language-modeling
Languages:
Danish
Size:
10M - 100M
ArXiv:
DOI:
License:
| // wtf_bridge.js | |
| // Usage: node wtf_bridge.js | |
| // Reads newline-delimited JSON from stdin: {"wikitext":"...","lang":"da"} | |
| // Writes newline-delimited JSON to stdout: {"text":"...","isRedirect":false} or {"error":"..."} | |
| const wtf = require('wtf_wikipedia'); | |
| const readline = require('readline'); | |
| const rl = readline.createInterface({ | |
| input: process.stdin, | |
| output: process.stdout, | |
| terminal: false | |
| }); | |
| process.on('uncaughtException', (err) => { | |
| // emit as JSON so Python can see it | |
| try { | |
| process.stdout.write(JSON.stringify({ error: String(err && err.stack || err) }) + '\n'); | |
| } catch (e) {} | |
| process.exit(1); | |
| }); | |
| rl.on('line', (line) => { | |
| (async () => { | |
| try { | |
| const payload = JSON.parse(line); | |
| const wikitext = payload.wikitext || ''; | |
| const lang = payload.lang || null; | |
| // parse wikitext into a document (sync) | |
| const doc = lang ? wtf(wikitext, lang) : wtf(wikitext); | |
| const text = (doc && typeof doc.text === 'function') ? doc.text() : ''; | |
| const isRedirect = (doc && typeof doc.isRedirect === 'function') ? doc.isRedirect() : false; | |
| process.stdout.write(JSON.stringify({ text, isRedirect }) + '\n'); | |
| } catch (err) { | |
| process.stdout.write(JSON.stringify({ error: String(err && err.stack || err) }) + '\n'); | |
| } | |
| })(); | |
| }); |