diff --git a/.gitignore b/.gitignore index 599be4e..b778db5 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ *.ez /build erl_crash.dump +*.png \ No newline at end of file diff --git a/gleam.toml b/gleam.toml index adb3302..afae194 100644 --- a/gleam.toml +++ b/gleam.toml @@ -10,7 +10,8 @@ links = [ [dependencies] gleam_stdlib = ">= 0.34.0 and < 2.0.0" -gzlib = ">= 1.0.0 and < 2.0.0" +gzlib = ">= 1.0.1 and < 2.0.0" [dev-dependencies] gleeunit = ">= 1.0.0 and < 2.0.0" +simplifile = ">= 2.0.0 and < 3.0.0" diff --git a/manifest.toml b/manifest.toml index 4f46b2b..26c41c5 100644 --- a/manifest.toml +++ b/manifest.toml @@ -2,12 +2,15 @@ # You typically do not need to edit this file packages = [ + { name = "filepath", version = "1.0.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "filepath", source = "hex", outer_checksum = "EFB6FF65C98B2A16378ABC3EE2B14124168C0CE5201553DE652E2644DCFDB594" }, { name = "gleam_stdlib", version = "0.38.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "663CF11861179AF415A625307447775C09404E752FF99A24E2057C835319F1BE" }, { name = "gleeunit", version = "1.1.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "72CDC3D3F719478F26C4E2C5FED3E657AC81EC14A47D2D2DEBB8693CA3220C3B" }, - { name = "gzlib", version = "1.0.0", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gzlib", source = "hex", outer_checksum = "EC6A3FAF20B8A707B5A550E1B622785685759991C9D13CFC4AAE8FE34FDDF3B8" }, + { name = "gzlib", version = "1.0.1", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gzlib", source = "hex", outer_checksum = "5E71EF6C973CB61CDF25D1C5CDBD129C481CE432D6FD089FBB4E30B95CCCE935" }, + { name = "simplifile", version = "2.0.0", build_tools = ["gleam"], requirements = ["filepath", "gleam_stdlib"], otp_app = "simplifile", source = "hex", outer_checksum = "95219227A43FCFE62C6E494F413A1D56FF953B68FE420698612E3D89A1EFE029" }, ] [requirements] gleam_stdlib = { version = ">= 0.34.0 and < 2.0.0" } gleeunit = { version = ">= 1.0.0 and < 2.0.0" } -gzlib = { version = ">= 1.0.0 and < 2.0.0" } +gzlib = { version = ">= 1.0.1 and < 2.0.0"} +simplifile = { version = ">= 2.0.0 and < 3.0.0" } diff --git a/src/pngleam.gleam b/src/pngleam.gleam index ab23847..eedb7be 100644 --- a/src/pngleam.gleam +++ b/src/pngleam.gleam @@ -1,5 +1,10 @@ import gleam/bit_array +import gleam/bool +import gleam/float +import gleam/int import gleam/list +import gleam/option +import gleam/result import gzlib pub const no_compression = gzlib.no_compression @@ -33,6 +38,17 @@ fn color_type_to_int(color_type: ColorType) -> Int { } } +fn int_to_color_type(color_type: Int) -> Result(ColorType, Nil) { + case color_type { + 0b000 -> Ok(Greyscale) + 0b010 -> Ok(Color) + 0b011 -> Ok(Indexed) + 0b100 -> Ok(GreyscaleWithAlpha) + 0b110 -> Ok(ColorWithAlpha) + _ -> Error(Nil) + } +} + pub opaque type ColorInfo { ColorInfo(color_type: ColorType, bit_depth: Int) } @@ -63,36 +79,63 @@ pub fn color_info( } } +pub fn color_info_bits(color_info: ColorInfo) -> Int { + case color_info.color_type { + Greyscale -> color_info.bit_depth + Color -> 3 * color_info.bit_depth + Indexed -> color_info.bit_depth + GreyscaleWithAlpha -> 2 * color_info.bit_depth + ColorWithAlpha -> 4 * color_info.bit_depth + } +} + +fn partition_bit_array( + data: BitArray, + at bytes: Int, +) -> Result(#(BitArray, BitArray), Nil) { + use left <- result.try(bit_array.slice(data, 0, bytes)) + use right <- result.try(bit_array.slice( + data, + bytes, + bit_array.byte_size(data) - bytes, + )) + Ok(#(left, right)) +} + +const chunk_size = 8192 + +fn do_chunk_bit_array(data: BitArray, chunks: List(BitArray)) -> List(BitArray) { + case partition_bit_array(data, chunk_size) { + Ok(#(chunk, rest)) -> do_chunk_bit_array(rest, [chunk, ..chunks]) + _ -> [data, ..chunks] + } +} + fn chunk_bit_array(data: BitArray) -> List(BitArray) { do_chunk_bit_array(data, []) |> list.reverse } -fn do_chunk_bit_array(data: BitArray, chunks: List(BitArray)) -> List(BitArray) { - case data { - <> -> - do_chunk_bit_array(rest, [chunk, ..chunks]) - chunk -> [chunk, ..chunks] - } -} - -fn get_chunk(tag: String, data: BitArray) -> BitArray { +fn create_chunk(tag: String, data: BitArray) -> BitArray { let data_size = bit_array.byte_size(data) - let tag_bits = <> + let tag_bits = bit_array.from_string(tag) let checksum = gzlib.continue_crc32(gzlib.crc32(tag_bits), data) <> } -const signature = <<137, "PNG":utf8, "\r\n":utf8, 26, "\n":utf8>> +const signature = <<137, 80, 78, 71, 13, 10, 26, 10>> + +pub type BinaryRowData = + List(BitArray) pub fn from_packed( - row_data row_data: List(BitArray), + row_data row_data: BinaryRowData, width width: Int, height height: Int, color_info color_info: ColorInfo, compression_level compression_level: gzlib.CompressionLevel, ) -> BitArray { let ihdr = - get_chunk("IHDR", << + create_chunk("IHDR", << width:size(32), height:size(32), color_info.bit_depth:size(8), @@ -101,14 +144,409 @@ pub fn from_packed( 0:size(8), 0:size(8), >>) + let no_filter_int = filter_type_to_int(None) let idats = row_data - |> list.map(fn(d) { <<0, d:bits>> }) + |> list.map(fn(d) { <> }) |> bit_array.concat |> gzlib.compress_with_level(compression_level) |> chunk_bit_array - |> list.map(get_chunk("IDAT", _)) + |> list.map(create_chunk("IDAT", _)) |> bit_array.concat - let iend = get_chunk("IEND", <<>>) + let iend = create_chunk("IEND", <<>>) <> } + +pub type ParseError { + InvalidSignature + InvalidChunkTag + ChecksumMismatch + InvalidChunkOrder + MissingHeaderChunk + InvalidChunkData + InvalidColorType + InvalidBitDepth + InvalidCompressionType + InvalidFilterMethod + InvalidInterlaceMethod + UnsupportedInterlaceMethod + InvalidRowFilterType + InvalidRowData +} + +fn parse_chunk( + data: BitArray, +) -> Result(#(String, BitArray, BitArray), ParseError) { + case data { + <> -> { + use tag <- result.try( + bit_array.to_string(tag_bits) + |> result.replace_error(InvalidChunkTag), + ) + case partition_bit_array(rest, data_size) { + Ok(#(data, <>)) -> { + let computed_checksum = + gzlib.continue_crc32(gzlib.crc32(tag_bits), data) + use <- bool.guard( + computed_checksum != checksum, + Error(ChecksumMismatch), + ) + Ok(#(tag, data, rest)) + } + _ -> Error(InvalidChunkData) + } + } + _ -> Error(InvalidChunkData) + } +} + +fn parse_signature(data: BitArray) -> Result(BitArray, ParseError) { + case data { + <<137, 80, 78, 71, 13, 10, 26, 10, rest:bytes>> -> Ok(rest) + _ -> Error(InvalidSignature) + } +} + +pub type PngMetadata { + PngMetadata(width: Int, height: Int, color_info: ColorInfo) +} + +fn parse_header(header_data: BitArray) -> Result(PngMetadata, ParseError) { + case header_data { + << + width:size(32), + height:size(32), + bit_depth:size(8), + color_type:size(8), + compression_method:size(8), + filter_method:size(8), + interlace_method:size(8), + >> -> { + use col_type <- result.try( + int_to_color_type(color_type) + |> result.replace_error(InvalidColorType), + ) + use col_info <- result.try( + color_info(col_type, bit_depth) + |> result.replace_error(InvalidBitDepth), + ) + use <- bool.guard(compression_method != 0, Error(InvalidCompressionType)) + use <- bool.guard(filter_method != 0, Error(InvalidFilterMethod)) + use <- bool.guard( + interlace_method != 0 && interlace_method != 1, + Error(InvalidInterlaceMethod), + ) + use <- bool.guard( + interlace_method == 1, + Error(UnsupportedInterlaceMethod), + ) + Ok(PngMetadata(width, height, col_info)) + } + _ -> Error(InvalidChunkData) + } +} + +pub fn parse_metadata(data: BitArray) -> Result(PngMetadata, ParseError) { + use chunk_data <- result.try(parse_signature(data)) + use #(tag, chunk_data, _) <- result.try(parse_chunk(chunk_data)) + use <- bool.guard(tag != "IHDR", Error(MissingHeaderChunk)) + parse_header(chunk_data) +} + +pub type RawPalette = + BitArray + +type PngDataState { + PngDataState(palette: option.Option(RawPalette), image_parts: BinaryRowData) +} + +fn do_parse_image_data( + data: BitArray, + state: PngDataState, +) -> Result(PngDataState, ParseError) { + use #(tag, chunk_data, rest) <- result.try(parse_chunk(data)) + case tag, state.image_parts { + "PLTE", [] -> + do_parse_image_data( + rest, + PngDataState(..state, palette: option.Some(chunk_data)), + ) + "PLTE", _ -> Error(InvalidChunkOrder) + "IDAT", parts -> + do_parse_image_data( + rest, + PngDataState(..state, image_parts: [chunk_data, ..parts]), + ) + "IEND", _ -> Ok(state) + _, [] -> do_parse_image_data(rest, state) + _, _ -> Ok(state) + } +} + +fn parse_image_data(data: BitArray) -> Result(PngDataState, ParseError) { + do_parse_image_data(data, PngDataState(option.None, [])) +} + +pub type FilterType { + None + Sub + Up + Average + Paeth +} + +pub fn int_to_filter_type(filter_type: Int) -> Result(FilterType, Nil) { + case filter_type { + 0 -> Ok(None) + 1 -> Ok(Sub) + 2 -> Ok(Up) + 3 -> Ok(Average) + 4 -> Ok(Paeth) + _ -> Error(Nil) + } +} + +pub fn filter_type_to_int(filter_type: FilterType) -> Int { + case filter_type { + None -> 0 + Sub -> 1 + Up -> 2 + Average -> 3 + Paeth -> 4 + } +} + +@external(erlang, "pngleam_erl", "subUnfilter") +@external(javascript, "./pngleam_js.mjs", "subUnfilter") +fn sub_unfilter(row: BitArray, bytes_per_pixel: Int) -> BitArray + +@external(erlang, "pngleam_erl", "upUnfilter") +@external(javascript, "./pngleam_js.mjs", "upUnfilter") +fn up_unfilter(row: BitArray, row_above: BitArray) -> BitArray + +@external(erlang, "pngleam_erl", "avgUnfilter") +@external(javascript, "./pngleam_js.mjs", "avgUnfilter") +fn avg_unfilter( + row: BitArray, + row_above: BitArray, + bytes_per_pixel: Int, +) -> BitArray + +@external(erlang, "pngleam_erl", "paethUnfilter") +@external(javascript, "./pngleam_js.mjs", "paethUnfilter") +fn paeth_unfilter( + row: BitArray, + row_above: BitArray, + bytes_per_pixel: Int, +) -> BitArray + +fn do_parse_image_rows( + data: BitArray, + bytes_per_row: Int, + bytes_per_pixel: Int, + rows: BinaryRowData, +) -> Result(BinaryRowData, ParseError) { + let bits_per_row = bytes_per_row * 8 + case data { + <<>> -> Ok(rows) + <> -> { + use filter_type <- result.try( + int_to_filter_type(filter_type) + |> result.replace_error(InvalidRowFilterType), + ) + use #(row, rest) <- result.try( + partition_bit_array(rest, bytes_per_row) + |> result.replace_error(InvalidRowData), + ) + let row = case filter_type { + None -> row + Sub -> sub_unfilter(row, bytes_per_pixel) + Up -> + up_unfilter( + row, + result.unwrap(list.first(rows), <<0:size(bits_per_row)>>), + ) + Average -> + avg_unfilter( + row, + result.unwrap(list.first(rows), <<0:size(bits_per_row)>>), + bytes_per_pixel, + ) + Paeth -> + paeth_unfilter( + row, + result.unwrap(list.first(rows), <<0:size(bits_per_row)>>), + bytes_per_pixel, + ) + } + do_parse_image_rows(rest, bytes_per_row, bytes_per_pixel, [row, ..rows]) + } + _ -> Error(InvalidRowData) + } +} + +fn parse_image_rows( + data: BitArray, + bytes_per_row: Int, + bytes_per_pixel: Int, +) -> Result(BinaryRowData, ParseError) { + do_parse_image_rows(data, bytes_per_row, bytes_per_pixel, []) + |> result.map(list.reverse) +} + +fn bits_to_bytes(bits: Int) -> Int { + bits + |> int.to_float + |> fn(x) { x /. 8.0 } + |> float.ceiling + |> float.round +} + +pub type PngData(p, i) { + PngData(metadata: PngMetadata, palette: option.Option(p), image_data: i) +} + +pub type PngBitArrayData = + PngData(RawPalette, BinaryRowData) + +pub fn parse_to_bit_arrays( + data: BitArray, +) -> Result(PngBitArrayData, ParseError) { + use rest <- result.try(parse_signature(data)) + use #(tag, chunk_data, rest) <- result.try(parse_chunk(rest)) + use <- bool.guard(tag != "IHDR", Error(MissingHeaderChunk)) + use metadata <- result.try(parse_header(chunk_data)) + use PngDataState(palette, image_parts) <- result.try(parse_image_data(rest)) + let image_data = + image_parts + |> list.reverse + |> bit_array.concat + |> gzlib.uncompress + let bits_per_pixel = color_info_bits(metadata.color_info) + let bytes_per_row = bits_to_bytes(metadata.width * bits_per_pixel) + let bytes_per_pixel = bits_to_bytes(bits_per_pixel) + use idat_rows <- result.try(parse_image_rows( + image_data, + bytes_per_row, + bytes_per_pixel, + )) + Ok(PngData(metadata, palette, idat_rows)) +} + +@external(erlang, "pngleam_erl", "bitArrayToInts") +@external(javascript, "./pngleam_js.mjs", "bitArrayToInts") +fn bit_array_to_ints(data: BitArray, int_size: Int) -> List(Int) + +fn do_chunk2( + values: List(a), + chunks: List(#(a, a)), +) -> Result(List(#(a, a)), Nil) { + case values { + [] -> Ok(chunks) + [a, b, ..rest] -> do_chunk2(rest, [#(a, b), ..chunks]) + _ -> Error(Nil) + } +} + +fn chunk2(values: List(a)) -> Result(List(#(a, a)), Nil) { + do_chunk2(values, []) |> result.map(list.reverse) +} + +fn do_chunk3( + values: List(a), + chunks: List(#(a, a, a)), +) -> Result(List(#(a, a, a)), Nil) { + case values { + [] -> Ok(chunks) + [a, b, c, ..rest] -> do_chunk3(rest, [#(a, b, c), ..chunks]) + _ -> Error(Nil) + } +} + +fn chunk3(values: List(a)) -> Result(List(#(a, a, a)), Nil) { + do_chunk3(values, []) |> result.map(list.reverse) +} + +fn do_chunk4( + values: List(a), + chunks: List(#(a, a, a, a)), +) -> Result(List(#(a, a, a, a)), Nil) { + case values { + [] -> Ok(chunks) + [a, b, c, d, ..rest] -> do_chunk4(rest, [#(a, b, c, d), ..chunks]) + _ -> Error(Nil) + } +} + +fn chunk4(values: List(a)) -> Result(List(#(a, a, a, a)), Nil) { + do_chunk4(values, []) |> result.map(list.reverse) +} + +pub type GreyscaleValue = + Int + +pub type ColorValue = + #(Int, Int, Int) + +pub type PalleteIndex = + Int + +pub type GreyscaleWithAlphaValue = + #(Int, Int) + +pub type ColorWithAlphaValue = + #(Int, Int, Int, Int) + +pub type Grid(a) = + List(List(a)) + +pub type PixelData { + GreyscaleData(Grid(GreyscaleValue)) + ColorData(Grid(ColorValue)) + IndexedData(Grid(PalleteIndex)) + GreyscaleWithAlphaData(Grid(GreyscaleWithAlphaValue)) + ColorWithAlphaData(Grid(ColorWithAlphaValue)) +} + +pub type PaletteColor = + #(Int, Int, Int) + +pub type PaletteColors = + List(PaletteColor) + +pub type PngPixelData = + PngData(PaletteColors, PixelData) + +pub fn parse_to_pixel_data(data: BitArray) -> Result(PngPixelData, ParseError) { + use PngData(metadata, palette, row_data) <- result.try(parse_to_bit_arrays( + data, + )) + use palette <- result.try(case palette { + option.Some(p) -> + case bit_array_to_ints(p, 8) |> chunk3 { + Ok(p) -> Ok(option.Some(p)) + Error(_) -> Error(InvalidChunkData) + } + option.None -> Ok(option.None) + }) + let bit_depth = metadata.color_info.bit_depth + let num_values = + metadata.width * color_info_bits(metadata.color_info) / bit_depth + let values = + list.map(row_data, fn(row) { + bit_array_to_ints(row, bit_depth) |> list.take(num_values) + }) + use image_data <- result.try( + case metadata.color_info.color_type { + Greyscale -> Ok(GreyscaleData(values)) + Color -> list.try_map(values, chunk3) |> result.map(ColorData) + Indexed -> Ok(IndexedData(values)) + GreyscaleWithAlpha -> + list.try_map(values, chunk2) |> result.map(GreyscaleWithAlphaData) + ColorWithAlpha -> + list.try_map(values, chunk4) |> result.map(ColorWithAlphaData) + } + |> result.replace_error(InvalidRowData), + ) + Ok(PngData(metadata, palette, image_data)) +} diff --git a/src/pngleam_erl.erl b/src/pngleam_erl.erl new file mode 100644 index 0000000..0f8dfa4 --- /dev/null +++ b/src/pngleam_erl.erl @@ -0,0 +1,72 @@ +-module(pngleam_erl). + +-export([subUnfilter/2, upUnfilter/2, avgUnfilter/3, paethUnfilter/3, bitArrayToInts/1, bitArrayToInts/2]). + +addBytewise(As, Bs) -> + list_to_binary(lists:zipwith(fun(A, B) -> (A + B) rem 256 end, binary_to_list(As), binary_to_list(Bs), trim)). + +avgBytewise(As, Bs) -> + list_to_binary(lists:zipwith(fun(A, B) -> ((A + B) div 2) rem 256 end, binary_to_list(As), binary_to_list(Bs), trim)). + +paethBytewise(As, Bs, Cs) -> + list_to_binary(lists:zipwith3(fun(A, B, C) -> + P = A + B - C, % initial estimate + PA = abs(P - A), % distances to a, b, c + PB = abs(P - B), + PC = abs(P - C), + % return nearest of a,b,c, + % breaking ties in order a,b,c. + case (PA =< PB) and (PA =< PC) of + true -> A; + false -> case (PB =< PC) of + true -> B; + false -> C + end + end + end, binary_to_list(As), binary_to_list(Bs), binary_to_list(Cs), trim)). + +doSubUnfilter(Row, BytesPerPixel, Acc, Prev) -> + case Row of + <<>> -> Acc; + <> -> + New = addBytewise(Curr, Prev), + doSubUnfilter(Rest, BytesPerPixel, <>, New) + end. + +subUnfilter(Row, BytesPerPixel) -> doSubUnfilter(Row, BytesPerPixel, <<>>, <<0:(BytesPerPixel * 8)>>). + +upUnfilter(Row, Above) -> addBytewise(Row, Above). + +doAvgUnfilter(Row, Above, BytesPerPixel, Acc, Prev) -> + case Row of + <<>> -> Acc; + <> -> + case Above of <> -> + Avg = avgBytewise(Prev, CurrAbove), + New = addBytewise(Curr, Avg), + doAvgUnfilter(Rest, RestAbove, BytesPerPixel, <>, New) + end + end. + +avgUnfilter(Row, Above, BytesPerPixel) -> doAvgUnfilter(Row, Above, BytesPerPixel, <<>>, <<0:(BytesPerPixel * 8)>>). + +doPaethUnfilter(Row, Above, BytesPerPixel, Acc, Prev, PrevAbove) -> + case Row of + <<>> -> Acc; + <> -> + case Above of <> -> + Paeth = paethBytewise(Prev, CurrAbove, PrevAbove), + New = addBytewise(Curr, Paeth), + doPaethUnfilter(Rest, RestAbove, BytesPerPixel, <>, New, CurrAbove) + end + end. + +paethUnfilter(Row, Above, BytesPerPixel) -> doPaethUnfilter(Row, Above, BytesPerPixel, <<>>, <<0:(BytesPerPixel * 8)>>, <<0:(BytesPerPixel * 8)>>). + +doBitArrayToInts(As, IntSize, Values) -> + case As of + <> -> doBitArrayToInts(Rest, IntSize, [V | Values]); + _ -> Values + end. +bitArrayToInts(As, IntSize) -> lists:reverse(doBitArrayToInts(As, IntSize, [])). +bitArrayToInts(As) -> bitArrayToInts(As, 8). diff --git a/src/pngleam_js.mjs b/src/pngleam_js.mjs new file mode 100644 index 0000000..a3c432c --- /dev/null +++ b/src/pngleam_js.mjs @@ -0,0 +1,82 @@ +import { BitArray, toList } from "./gleam.mjs" + +export const subUnfilter = (row, bpp) => { + const state = Buffer.alloc(bpp) + return new BitArray(new Uint8Array(row.buffer.map((x, i) => { + const j = i % bpp + const y = x + state[j] + state[j] = y + return y + }))) +} + +export const upUnfilter = (row, above) => new BitArray(new Uint8Array(row.buffer.map((x, i) => x + above[i]))) + +const avg = (a, b) => Math.floor((a + b) / 2) + +export const avgUnfilter = (row, above, bpp) => { + const state = Buffer.alloc(bpp) + return new BitArray(new Uint8Array(row.buffer.map((x, i) => { + const j = i % bpp + const y = x + avg(state[j], above[i]) + state[j] = y + return y + }))) +} + +const paeth = (a, b, c) => { + const p = a + b - c // initial estimate + const pa = Math.abs(p - a) // distances to a, b, c + const pb = Math.abs(p - b) + const pc = Math.abs(p - c) + // return nearest of a,b,c, + // breaking ties in order a,b,c. + if (pa <= pb && pa <= pc) return a + else if (pb <= pc) return b + else return c +} + +export const paethUnfilter = (row, above, bpp) => { + const state = Buffer.alloc(bpp) + return new BitArray(new Uint8Array(row.buffer.map((x, i) => { + const j = i % bpp + const y = x + paeth(state[j], above[i], above[i - bpp] ?? 0) + state[j] = y + return y + }))) +} + +const doAddBytewise = (as, bs) => as.map((x, i) => x + bs[i]) +export const addBytewise = (as, bs) => new BitArray(new Uint8Array(doAddBytewise(as.buffer, bs.buffer))) + +const doSubBytewise = (as, bs) => as.map((a, i) => a - bs[i]) +export const subBytewise = (as, bs) => new BitArray(new Uint8Array(doSubBytewise(as.buffer, bs.buffer))) + +const doAvgBytewise = (as, bs) => as.map(a => Math.floor((a + bs[i]) / 2)) +export const avgBytewise = (as, bs) => new BitArray(new Uint8Array(doAvgBytewise(as.buffer, bs.buffer))) + +const doPaethBytewise = (as, bs, cs) => as.map((a, i) => { + const b = bs[i] + const c = cs[i] + const p = a + b - c // initial estimate + const pa = Math.abs(p - a) // distances to a, b, c + const pb = Math.abs(p - b) + const pc = Math.abs(p - c) + // return nearest of a,b,c, + // breaking ties in order a,b,c. + if (pa <= pb && pa <= pc) return a + else if (pb <= pc) return b + else return c +}) +export const paethBytewise = (as, bs, cs) => new BitArray(new Uint8Array(doPaethBytewise(as.buffer, bs.buffer, cs.buffer))) + +const raise = message => { throw new Error(message) } + +export const bitArrayToInts = (as, intSize = 8) => toList( + intSize === 16 ? [...Array(as.buffer.length / 2)].map((_, i) => (as.buffer[i * 2] << 8) + as.buffer[i * 2 + 1]) : + intSize === 8 ? [...as.buffer] : + intSize === 4 ? [...as.buffer].flatMap(x => [x >> 4, x & 15]) : + intSize === 2 ? [...as.buffer].flatMap(x => [x >> 6, x >> 4 & 3, x >> 2 & 3, x & 3]) : + intSize === 1 ? [...as.buffer].flatMap(x => [x >> 7, x >> 6 & 1, x >> 5 & 1, x >> 4 & 1, x >> 3 & 1, x >> 2 & 1, x >> 1 & 1, x & 1]) : + raise("Invalid int size") +)