简体   繁体   中英

Change Javascript BLOB encoding to ANSI instead of UTF-8

I was wondering if it is possible to save a simple txt file with Javascript and BLOB with the ANSI encoding.

At this moment I have a script that creates a txt file with CRLF line endings, but with a UTF-8 encoding.

Is it possible to save it with the ANSI encoding? I need this to import the txt file on a 'old' windows program that needs ANSI instead of UTF-8.

This is the example I used: https://jsfiddle.net/UselessCode/qm5AG/

let textFile = null;

function makeTextFile () {
    let text = `Some text with nice line endings\nand special characters like é and ü.`;

    const data = new Blob([text], {
        type: "text/plain",
        endings: "native"
    });

    if (textFile !== null) {
        window.URL.revokeObjectURL(textFile);
    }

    textFile = window.URL.createObjectURL(data);

    return textFile;
}

There used to be an option using the TextEncoder API to encode from USVStrings to arbitrary encodings, but this has been removed from specs and browsers.

You'd need to use a library in order to perform the conversion. Here, I'll use inexorabletash/text-encoding :

 (async()=> { const text = `Some text with nice line endings\\nand special characters like é and ü.`; const encoding = 'windows-1252'; // aka ANSI const encoder = new TextEncoder(encoding, { NONSTANDARD_allowLegacyEncoding: true }); const data = encoder.encode(text); // `data` is an Uint8Array const encoded_as_ANSI = new Blob([data]); // for demo only const encoded_as_UTF8 = new Blob([text]); const ANSI_read = await readAsText(encoded_as_ANSI, encoding); const UTF8_read = await readAsText(encoded_as_UTF8, encoding); console.log("(ANSI)", ANSI_read); console.log("(UTF8)", UTF8_read); })(); function readAsText(blob, encoding) { return new Promise(res => { const reader = new FileReader(); reader.onload = e => res(reader.result); reader.readAsText(blob, encoding); }); }
 <script>window.TextEncoder = null;// force installation of the polyfill</script> <script src="https://cdn.jsdelivr.net/gh/inexorabletash/text-encoding/lib/encoding-indexes.js"></script> <script src="https://cdn.jsdelivr.net/gh/inexorabletash/text-encoding/lib/encoding.js"></script>

However going this route, we loose the endings option, since this applies only to string blobParts.

So one way would be to first create an utf-8 Blob, with the endings option, then convert this UTF-8 blob to ANSI:

 (async () => { const text = `Some text with nice line endings\\nand special characters like é and ü.`; const encoding = 'windows-1252'; // aka ANSI const utf8_blob = new Blob( [text], { endings: "native" } ); const utf_8_txt = await utf8_blob.text(); const encoder = new TextEncoder(encoding, { NONSTANDARD_allowLegacyEncoding: true }); const data = encoder.encode(utf_8_txt); // now `data` is an Uint8Array const encoded_as_ANSI = new Blob([data]); const read_as_ANSI = await readAsText(encoded_as_ANSI, encoding) console.log(read_as_ANSI); })(); function readAsText(blob, encoding) { return new Promise(res => { const reader = new FileReader(); reader.onload = e => res(reader.result); reader.readAsText(blob, encoding); }); }
 <script>window.TextEncoder = null;// force installation of the polyfill</script> <script src="https://cdn.jsdelivr.net/gh/inexorabletash/text-encoding/lib/encoding-indexes.js"></script> <script src="https://cdn.jsdelivr.net/gh/inexorabletash/text-encoding/lib/encoding.js"></script>

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM