Add json metadata generation and improve site

This commit is contained in:
LilyRose2798 2024-04-06 00:31:10 +11:00
parent 4540c36cdd
commit 4997a7716c
2 changed files with 264 additions and 64 deletions

View File

@ -7,6 +7,7 @@
<title>IP Map</title>
<script src="https://unpkg.com/maplibre-gl@4.1.2/dist/maplibre-gl.js"></script>
<link rel="stylesheet" href="https://unpkg.com/maplibre-gl@4.1.2/dist/maplibre-gl.css" />
<link rel="shortcut icon" href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAACXBIWXMAAABGAAAARgGVRxdWAAAJnUlEQVR4nO1bS2xjVxn+fK+d6/gd5zFJxpNkmEdbOZqGjkphgSaMYBbtYtJSIyEW00pI7ArdwGI20wULkFBTITZIqMNDLAiLjATVUIRIBRJFKiEZ4ekM0MaZpHnZSWzHdvy+6L/nJLGd+7TjzKDOJx3Zvo///Oc//znn/79zbJNlGZ9mCJ/q1j82wMMwwFT0FaWYvd5mHO8cMBW9CeAa//UqIuGb/Do1/G1+/eeIhI/NEMdjgKloAAA19mrDnVf559sN128hEp5ov2LHZ4DanjeLY/GE9s8BU9Ebmo0vllhRxzX+blvRXg+Yio4B+Kfm/dQO+/R79aR8FpHw3JHrxtFuD5jUvbtbYKUVGS2ifQaYio4DuKT7TKXCij4ucU9qC9rpAcazeL7AijHaNhm20wBH2Wv/lx6g7/4PT1YdHucCbZSdekRl1aGdBjjKtbttcYDd9JMsnqfJaLzhzgwi4RmVN2KG84Bo2v4xDZ3GVfUhg0XCSTOCjSPBqegEX4YaE5lapHjAMrlfcX2Gp461BLvc32OkZ23mSB3xHV78Ou/cUhKwSHhaT7C2AZh1qdJhI+1qkOJGuIGp6AiABd2nt/jQDuq1Q8FpRMIxnhsYNbwRi0oHqnupigG0U1crmOdeM6k7DDI59ulx6Yl+jzeadHq6BZ1ucUPUDQ21QXijxcaDK0oW1x2HGa+sFAMkuaxWGg/epkPZpZoHGLvuESBry+J7zuuKoB/kvw+37G53ldgfSjU47AHsgTfarcl/Oj5GySsohb4fA95obDxBfRJk88D2Ueg0U1jCTHEJ5xxBfMP5ZN29931sef98uiHU7/0TIMWA1AVg59mjMk2X2tJ42APY8sU0280DsWXgk3U2Y9Nvi6DG/7g0j9tYUl5Myjm8kv8ZYtUEhishpWTsq/hL13UUBU6Q9NwHht4B/P+y3kzSkXQlnUn3A53n1Fjng0BIbdnrdAKnBoD4NrCyDlSqQIeDzdo+jxGTo+BK5whiUhZ+yQXIwC/L7+O2/T4uix/jBeEp5Zm4bx7x4L+xUHkXT6S/Csj9QOaLQPG0uUYTs5TOsFWFKDYKsHqCLL4Qxb2nhpW4hC2l+8viwRAwoq+IuEhlgMTWAYtDFXX5gd4gM4wOYkIGd507eD43gF9Js3imGkLIFeCy49jw38XZT74CPDEPrJwEdgyCI2pofAvYTrGOIXRKrOF+T23D1bBPs9XPAVPRmKnAhyonq2dzzOqkAAUzJ3pUDTFTSKDXLqHsB5yiA8NpEf8IxnGx1A9yi53uRQQXBiGG8oBcAFbLQCUB2FRWPqp7PcHcnDqAvNHtYt5o0Akci4iER/Z+NM4B5vg3qoh6fSQEjJ4HnjrDhgQp1UBxUeOvbP0dv8EGns54IXeKqDgEdLsCcAh2OBxOuKsDEJ0C4HUAS72AZx6QfgRUPqivl2RTHVQX1Ul1kw4mPFCrjY0eEOCJh5VQ83ANO5u4bdvFbU+I/S7E0Odyw+HtwPNJN0qj3fAN9UJwS+yFDPV6AphdBUIJIL8G5FJA/svK7dLST2GTe2AfeqkVtcBD9ZHa1UAtEKKw881ma5gr5fG5jQUM+1z4uq8LL9rdOC2LCJSBglzBWsiJoefOwtaYCRbLwJ0F4H4SEDogO2kYpFDN3EF59a+QE8voOPcahO6WAsLXEQnreMAepqIzrVBa301vwO6043pHF7ZFGdudIuByIOAQ0d3jhet8r/qL99YhbyWBYgbIEmEqwFaRUNmeQTWbhOPU15pVCUpOEQk3ps6afMBEK0Phh76+/e+/S+XxbFnCZ3aqSEgCxIBO4mOzQV7bhZATUIKA3OYq/IPnIQavQAw2o8k+UlostTojwcbIeKu01ly+iG+tbOG13C7edQqoQoawsqP9wtIWZBHIdWaxvjaLldk/Irex3IoK4G0Y1yJI9AkRNilOtzIcfpHMYtQj4RlRxIN+L9xOEd19LmCkoUs/SkBOZFDZLUFcy6GQTyGfWENgONxs1eCp9IQeO2Rub5CFkBPNpskLgoCYT8LFbBG2cgn5k370dkmAVwLyZSWOkHNFyPfiyjDISlU4clU4HZ3NVAee+0/vs0g6sLY5qs0Lgl/b95TXl7LYEG0443PiZY+IC5CxVZXxN5cTLyTTSI8OwL24CcgyqqcCsEfXkfF0wF2oQrDbkdvNIrm4jExsDbZiFeeuXq6t6z3OETTCEh9IME+KYn9umKmr/IBBqhsmf0jkUXXa8ZJfUhr/k+Us3ikCk0NAwSWhsp5FkWIm2QYhnkPF0wnHTgH3PlhEwO/C4FgIRY8P6w/mUNxMA/UGuMSJkkMMj1W0Rouz/GFObWj8+pwP3ww48AXJpvz2VGT4s0X4ZBn/7XDCmylgswhslgApXUTKbocgCLClChArzCtdXQH0hc9j9JrqBH6VZ3gtbZs1fz6AZY/TzSyVsxURoyjjt/dY5738ZACb5SoGJN0ERgspPtGpkp5GaM4DmNX/bLXxuxUZv9+sQCyU0SHacMFrVwp9txfK+DCWQnkvszMPv9IRTXqCdQ9gnOFcMz3/ZjSDu5kqRrs78O2zzrp7d+7GsbacRrdfwsXnQlZFg3vCmBrtpYdmPOCm6cZT9kY5O8flPgd2tncxaDvcy5IMFDd30R2oWfroXeMDFHvwc90sweoyOM5d3xik+EcPgMETRry/NohrICbqzJARwVGLL1mZD6x6gPmTGkurTOlmGw++YUIySJZ5WDpNYtUAagHQYZDCRJ+dMNzzMwbJIFnmjWBORw6rBjCmy4ixodLTZab3F3nRBskgWXtyj0LHGpg3gJllhihoGrNETxnv+OJQVKmFfs41kmwz1LyFJdGKBwQMnyA3pXWcqHRzk5Y5A5AskkmyzQ0FY105rBhAP+aOc7p8sM/sxJfikeS0Kd6BZJJsqqNmaW1K1xqYN4DecVVa8oiqDvI9AnOYVhIZlszoHmLYB8mmOqguvfjAwtFaq5Ogek/RzhHx8uSm5lG7VW3+UDTVQXXFNbcuLbFYVg1weLzuHXe11vi36kJW9v0t028r84HmMVtLSVHrBiCcPGFFxqJGj98wXBLN1WnJANYIETZW9xjjmFKZKFpJiVOaHB1dYweyZkzLE8U9tndc2fBgxdx8wtH6/wXM7yHMcwZHf4Jia7jZ80CqXL8VHMVBSaP0c5HvyIyZmp3pGXqW3jEeEpZSXzVYHQJmlZjnnMG00Tk9TbAtrEk+LCY4GdvoFS0b4CiGQKDmOHuynX9v4fWN1UR6lhhgNTz+7/AjoMNDxWMDPAI6PDwA+B/ygrofO/r6jAAAAABJRU5ErkJggg==" />
<style>
body {
margin: 0;
@ -16,11 +17,15 @@
width: 100%;
height: 100%;
}
#map {
main {
width: 100vh;
height: 100%;
margin: 0 auto;
}
#map {
width: 100%;
height: 100%;
}
.maplibregl-canvas {
cursor: pointer;
}
@ -29,12 +34,54 @@
font-size: 1rem;
padding: 0.8rem 1.2rem;
}
.map-overlay {
position: absolute;
top: 1rem;
right: 1rem;
padding: 1.5rem;
background-color: #222;
color: #eee;
box-shadow: 3px 3px 2px rgba(0, 0, 0, 0.8);
border-radius: 3px;
}
.map-overlay h2 {
display: block;
margin: 0;
margin-bottom: 1rem;
}
#map-style-controls ul {
padding-left: 1em;
list-style-type: none;
}
#map-style-controls > ul {
padding-left: 0;
}
#map-style-controls ul.hidden {
display: none;
}
#map-style-controls label {
display: block;
padding: 0.5rem;
font-weight: bold;
}
#map-style-controls input[type=radio] {
padding: 0;
margin: 0;
border: 0;
margin-right: 0.5rem;
}
</style>
</head>
<body>
<div id="map"></div>
<main>
<div id="map"></div>
<div class="map-overlay">
<h2>Map Style</h2>
<div id="map-style-controls"><p>Loading available styles...</p></div>
</div>
</main>
<script>
const hilbert_c2i = ({ x, y }) => {
const coordsToHilbert = ({ x, y }) => {
let rotation = 0
let reflection = 0
let index = 0
@ -63,7 +110,7 @@
return index
}
const hilbert_i2c = index => {
const hilbertToCoords = index => {
let rotation = 0
let reflection = 0
let coord = { x: 0, y: 0 }
@ -85,7 +132,15 @@
return coord
}
const tilesDir = "tiles"
const sourceId = "ipmap-tiles-source"
const styleControlsDiv = document.getElementById("map-style-controls")
const dateDir = (date = new Date()) => `${date.getFullYear()}-${(date.getMonth() + 1).toString().padStart(2, "0")}-${date.getDate().toString().padStart(2, "0")}`
const getId = (date, variant, colormap) => `${date.replaceAll("-", "")}-${variant}-${colormap}`
const getTilesUrl = (date, variant, colormap) => `${tilesDir}/${date}/${variant}/${colormap}/{z}/{y}/{x}.png`
const getSourceId = (date, variant, colormap) => `ipmap-tiles-source-${getId(date, variant, colormap)}`
const getLayerId = (date, variant, colormap) => `ipmap-tiles-layer-${getId(date, variant, colormap)}`
const map = new maplibregl.Map({
container: "map",
@ -95,27 +150,8 @@
dragRotate: false,
style: {
version: 8,
sources: {
"ipmap-tiles": {
type: "raster",
tiles: [
"tiles/2024-03-30/density/jet/{z}/{y}/{x}.png" // change to using remote json with list of tilemaps
],
minzoom: 0,
maxzoom: 8,
tileSize: 256
}
},
layers: [
{
id: "ipmap-tiles-layer",
type: "raster",
source: "ipmap-tiles",
paint: {
"raster-resampling": "nearest"
}
}
]
sources: {},
layers: []
},
center: [0, 0],
minZoom: -1,
@ -123,11 +159,135 @@
zoom: 0
})
map.painter.context.extTextureFilterAnisotropic = undefined
const dataP = fetch(`${tilesDir}/tiles.json`).then(res => res.json())
map.once("style.load", async () => {
const data = await dataP
const flatData = Object.entries(data)
.sort(([a], [b]) => a.localeCompare(b))
.flatMap(([date, variantData]) =>
Object.entries(variantData)
.sort(([a], [b]) => a.localeCompare(b))
.flatMap(([variant, colormaps]) => colormaps
.sort((a, b) => a.localeCompare(b))
.flatMap(colormap => ({ date, variant, colormap }))))
if (flatData.length === 0) {
console.log("no data found")
return
}
let { date: curDate, variant: curVariant, colormap: curColormap } = flatData[flatData.length - 1]
map.addSource(sourceId, {
type: "raster",
tiles: [getTilesUrl(curDate, curVariant, curColormap)],
tileSize: 256,
minzoom: 0,
maxzoom: 8,
})
map.addLayer({
id: "ipmap-tiles-layer",
type: "raster",
source: sourceId,
paint: {
"raster-resampling": "nearest"
}
})
const setStyle = (date, variant, colormap) => {
if (date === curDate && variant === curVariant && colormap === curColormap || !data[date]?.[variant]?.includes(colormap))
return
map.getSource(sourceId)?.setTiles([getTilesUrl(date, variant, colormap)])
curDate = date
curVariant = variant
curColormap = colormap
}
const dateList = document.createElement("ul")
for (const [date, variantData] of Object.entries(data).sort(([a], [b]) => a.localeCompare(b))) {
const isCurDate = date === curDate
const dateInput = document.createElement("input")
dateInput.type = "radio"
dateInput.name = "date"
dateInput.value = date
dateInput.checked = isCurDate
const dateLabel = document.createElement("label")
dateLabel.appendChild(dateInput)
dateLabel.appendChild(document.createTextNode(date))
const dateItem = document.createElement("li")
dateItem.appendChild(dateLabel)
const variantList = document.createElement("ul")
if (!isCurDate) variantList.className = "hidden"
for (const [variant, colormaps] of Object.entries(variantData).sort(([a], [b]) => a.localeCompare(b))) {
const isCurVariant = variant === curVariant
const variantInput = document.createElement("input")
variantInput.type = "radio"
variantInput.name = `${date}-variant`
variantInput.value = variant
variantInput.checked = isCurVariant
const variantLabel = document.createElement("label")
variantLabel.appendChild(variantInput)
variantLabel.appendChild(document.createTextNode(variant))
const variantItem = document.createElement("li")
variantItem.appendChild(variantLabel)
const colormapList = document.createElement("ul")
if (!isCurVariant) colormapList.classList.add("hidden")
for (const colormap of colormaps.sort((a, b) => a.localeCompare(b))) {
const isCurColormap = colormap === curColormap
const colormapInput = document.createElement("input")
colormapInput.type = "radio"
colormapInput.name = `${date}-${variant}-colormap`
colormapInput.value = colormap
colormapInput.checked = isCurColormap
const colormapLabel = document.createElement("label")
colormapLabel.appendChild(colormapInput)
colormapLabel.appendChild(document.createTextNode(colormap))
const colormapItem = document.createElement("li")
colormapItem.appendChild(colormapLabel)
colormapItem.addEventListener("click", e => {
;[...dateList.children].forEach(el =>
[...el.lastChild.children].forEach(el =>
[...el.lastChild.children].forEach(el => {
const cb = el.firstChild.firstChild
cb.checked = cb.value === colormap
})))
setStyle(curDate, curVariant, colormap)
})
colormapList.appendChild(colormapItem)
}
variantInput.addEventListener("click", e => {
;[...dateList.children].forEach(el =>
[...el.lastChild.children].forEach(el => {
const cb = el.firstChild.firstChild
const isCur = cb.value === variant
el.lastChild.className = isCur ? "" : "hidden"
cb.checked = isCur
}))
setStyle(curDate, variant, curColormap)
})
variantItem.appendChild(colormapList)
variantList.appendChild(variantItem)
}
dateInput.addEventListener("click", e => {
;[...dateList.children].forEach(el => {
const cb = el.firstChild.firstChild
const isCur = cb.value === date
el.lastChild.className = isCur ? "" : "hidden"
})
setStyle(date, curVariant, curColormap)
})
dateItem.appendChild(variantList)
dateList.appendChild(dateItem)
}
styleControlsDiv.replaceChildren(dateList)
})
map.addControl(new maplibregl.NavigationControl({ showCompass: false }), "top-left")
const toIp = v => `${v >> 24 & 0xFF}.${v >> 16 & 0xFF}.${v >> 8 & 0xFF}.${v >> 0 & 0xFF}`
map.on("click", (e) => {
const { x, y } = maplibregl.MercatorCoordinate.fromLngLat(e.lngLat, 0)
const rawIp = hilbert_c2i({ x: Math.floor(0x10000 * x), y: Math.floor(0x10000 * y) })
const rawIp = coordsToHilbert({ x: Math.floor(0x10000 * x), y: Math.floor(0x10000 * y) })
const subnet = Math.min(32, Math.round(map.getZoom()) * 2 + 18)
const text = subnet < 32 ?
`Range: ${toIp((rawIp >> (32 - subnet)) << (32 - subnet))}/${subnet}` :
@ -137,7 +297,6 @@
.setLngLat(e.lngLat)
.addTo(map)
})
const setTileUrl = (date, variant, colormap) => map.getSource("ipmap-tiles").setTiles([`tiles/${dateDir(date)}/${variant}/${colormap}/{z}/{y}/{x}.png`])
</script>
</body>
</html>

115
ipmap.py
View File

@ -4,6 +4,7 @@ import sys
import math
import functools
import argparse
import json
from pathlib import Path
import png
import hilbert
@ -14,9 +15,18 @@ from multiprocessing import Pool
from dataclasses import dataclass
from typing import Literal
def convert_to_parquet(csv_path: Path, parquet_path: Path, quiet=False):
def dedup_preserving_order(vals: list) -> list:
seen = set()
result = []
for item in vals:
if item not in seen:
seen.add(item)
result.append(item)
return result
def convert_to_parquet(csv_path: Path, parquet_path: Path, quiet = False):
if not quiet:
print(f"scanning csv '{csv_path}' into parquet '{parquet_path}'...", end=" ", flush=True)
print(f"scanning csv '{csv_path}' into parquet '{parquet_path}'...", end = " ", flush = True)
lf = pl.scan_csv(csv_path, schema={
"saddr": pl.String,
"rtt_us": pl.UInt64,
@ -28,7 +38,7 @@ def convert_to_parquet(csv_path: Path, parquet_path: Path, quiet=False):
lf = lf.with_columns(saddr = pl.col("saddr").str.split_exact(".", 3).struct.rename_fields(["a", "b", "c", "d"]))
lf = lf.with_columns(saddr = pl.col("saddr").struct.field("a").cast(pl.UInt32) * 0x1000000 + pl.col("saddr").struct.field("b").cast(pl.UInt32) * 0x10000 + pl.col("saddr").struct.field("c").cast(pl.UInt32) * 0x100 + pl.col("saddr").struct.field("d").cast(pl.UInt32))
lf = lf.unique("saddr")
lf = lf.with_columns(coords = pl.col("saddr").map_batches(functools.partial(hilbert.decode, num_dims=2, num_bits=16), pl.Array(pl.UInt16, 2), is_elementwise = True))
lf = lf.with_columns(coords = pl.col("saddr").map_batches(functools.partial(hilbert.decode, num_dims = 2, num_bits = 16), pl.Array(pl.UInt16, 2), is_elementwise = True))
lf = lf.with_columns(x = pl.col("coords").arr.get(0), y = pl.col("coords").arr.get(1))
lf = lf.drop("coords")
lf.sink_parquet(parquet_path)
@ -36,18 +46,18 @@ def convert_to_parquet(csv_path: Path, parquet_path: Path, quiet=False):
print("done")
def write_tile(path: Path, rows: np.ndarray):
path.parent.mkdir(exist_ok=True, parents=True)
png.Writer(rows.shape[0], rows.shape[1], greyscale=False, alpha=False).write_packed(path.open("wb"), rows)
path.parent.mkdir(exist_ok = True, parents = True)
png.Writer(rows.shape[0], rows.shape[1], greyscale = False, alpha = False).write_packed(path.open("wb"), rows)
default_tile_size = 256
default_colormaps = ["viridis"]
default_variants = ["density", "rtt"]
default_processes = 16
def generate_tiles(parquet_path: Path, tiles_dir: Path, tile_size=default_tile_size,
def generate_tiles(parquet_path: Path, tiles_dir: Path, tile_size = default_tile_size,
variants: list[str] = default_variants, colormaps: list[str] = default_colormaps,
processes=default_processes, num_rows: int | None = None,
skip_iters: int | None = None, quiet=False):
processes = default_processes, num_rows: int | None = None,
skip_iters: int | None = None, json_path: Path | None = None, quiet = False):
if tile_size < 1 or tile_size > 0x10000 or tile_size & (tile_size - 1) != 0:
raise ValueError(f"tile size must be a power of 2 between 1 and {0x10000}")
@ -56,6 +66,7 @@ def generate_tiles(parquet_path: Path, tiles_dir: Path, tile_size=default_tile_s
if len(colormaps) == 0:
raise ValueError("must specify at least one colormap")
colormaps = dedup_preserving_order(colormaps)
colormaps_by_name = { colormap: [bytes(c) for c in (Colormap(colormap).lut()[:,0:3] * (256.0 - np.finfo(np.float32).eps)).astype(np.uint8)] for colormap in colormaps }
generate_density = False
@ -68,8 +79,35 @@ def generate_tiles(parquet_path: Path, tiles_dir: Path, tile_size=default_tile_s
else:
raise ValueError(f"unknown variant '{variant}'")
if json_path:
if json_path.is_dir():
raise ValueError("json path must not be a directory")
try:
tiles_dir_parts = tiles_dir.relative_to(json_path.parent).parts
except ValueError:
raise ValueError("tiles path must be relative to the json path")
try:
tile_metadata = json.loads(json_path.read_text(encoding = "UTF-8"))
except:
if not quiet:
print(f"reading parquet '{parquet_path}'...", end=" ", flush=True)
print("json file not found at provided path, so it will be created instead")
tile_metadata = {}
tile_metadata_cur = tile_metadata
for part in tiles_dir_parts:
if not part in tile_metadata_cur:
tile_metadata_cur[part] = {}
tile_metadata_cur = tile_metadata_cur[part]
for variant in variants:
if not variant in tile_metadata_cur:
tile_metadata_cur[variant] = colormaps
else:
tile_metadata_cur[variant] = dedup_preserving_order(tile_metadata_cur[variant] + colormaps)
json_path.write_text(json.dumps(tile_metadata, indent=2), encoding = "UTF-8")
if not quiet:
print(f"wrote metadata to json file at '{json_path}'")
if not quiet:
print(f"reading parquet '{parquet_path}'...", end = " ", flush = True)
df = pl.read_parquet(parquet_path, columns = ["x", "y", "rtt_us"], n_rows=num_rows).with_columns(count = pl.lit(1, pl.UInt32))
if not quiet:
print("done")
@ -81,18 +119,18 @@ def generate_tiles(parquet_path: Path, tiles_dir: Path, tile_size=default_tile_s
nonlocal df
if not quiet:
print(f"creating {type_name} image data with {colormap} colormap...", end=" ", flush=True)
print(f"creating {type_name} image data with {colormap} colormap...", end = " ", flush = True)
image_data = np.zeros((tiles_per_side * tile_size, tiles_per_side * tile_size), dtype = "S3")
image_data[(df.get_column("y"), df.get_column("x"))] = (255 * df.get_column(col_name) // divisor).clip(0, 255).cast(pl.UInt8).replace(pl.int_range(256), colormaps_by_name[colormap], return_dtype=pl.Binary)
image_data[(df.get_column("y"), df.get_column("x"))] = (255 * df.get_column(col_name) // divisor).clip(0, 255).cast(pl.UInt8).replace(pl.int_range(256), colormaps_by_name[colormap], return_dtype = pl.Binary)
if not quiet:
print("done")
if not quiet:
print(f"writing {tiles_per_side}x{tiles_per_side}={tiles_per_side * tiles_per_side} {type_name} images with {colormap} colormap...", end=" ", flush=True)
print(f"writing {tiles_per_side}x{tiles_per_side}={tiles_per_side * tiles_per_side} {type_name} images with {colormap} colormap...", end = " ", flush = True)
with Pool(processes) as pool:
z = tiles_per_side.bit_length() - 1
z_path = tiles_dir / type_name / colormap / f"{z}"
z_path.mkdir(exist_ok=True, parents=True)
z_path.mkdir(exist_ok = True, parents = True)
pool.starmap(write_tile, [
(z_path / f"{y}" / f"{x}.png", image_data[
y * tile_size : y * tile_size + tile_size,
@ -115,8 +153,8 @@ def generate_tiles(parquet_path: Path, tiles_dir: Path, tile_size=default_tile_s
possible_overlaps *= scale * scale
if not quiet:
print(f"scaling {len(df)} coords down from {prev_tiles_per_side}x{prev_tiles_per_side} tiles to {tiles_per_side}x{tiles_per_side} tiles...", end=" ", flush=True)
df = df.with_columns(x=pl.col("x") // scale, y=pl.col("y") // scale).group_by(["x", "y"]).agg(count=pl.sum("count"), rtt_us=pl.mean("rtt_us"))
print(f"scaling {len(df)} coords down from {prev_tiles_per_side}x{prev_tiles_per_side} tiles to {tiles_per_side}x{tiles_per_side} tiles...", end = " ", flush = True)
df = df.with_columns(x = pl.col("x") // scale, y = pl.col("y") // scale).group_by(["x", "y"]).agg(count = pl.sum("count"), rtt_us = pl.mean("rtt_us"))
if not quiet:
print(f"done with {len(df)} coords remaining")
@ -133,7 +171,7 @@ def generate_tiles(parquet_path: Path, tiles_dir: Path, tile_size=default_tile_s
if generate_density:
generate_images(colormap, "density", "count", 256 if possible_overlaps == 1 else possible_overlaps)
if generate_rtt:
generate_images(colormap, "rtt", "rtt_us", int(df.get_column("rtt_us").std()))
generate_images(colormap, "rtt", "rtt_us", int(df.get_column("rtt_us").std() / (2.0 * tiles_per_side.bit_length() ** 0.5)))
if tiles_per_side == 1:
break
scale_down_coords()
@ -150,40 +188,43 @@ class IpMapArgs:
processes: int
num_rows: int | None
skip_iters: int | None
json: str | None
def parse_list_arg(arg: str):
return [x.strip().lower() for x in arg.split(",") if x.strip()]
def main():
parser = argparse.ArgumentParser("ipmap")
parser.add_argument("-q", "--quiet", action="store_true", help="decrease output verbosity")
subparsers = parser.add_subparsers(dest="command", required=True, help="the command to run")
convert_parser = subparsers.add_parser("convert", help="convert scan data from csv to parquet format")
convert_parser.add_argument("input", help="the input path of the csv file to read the scan data from")
convert_parser.add_argument("output", help="the output path of the parquet file to save the converted scan data to")
generate_parser = subparsers.add_parser("generate", help="generate tile images from scan data in parquet format")
generate_parser.add_argument("-t", "--tile-size", default=default_tile_size, type=int, help="the tile size to use (default: %(default)s)")
generate_parser.add_argument("-v", "--variants", default=",".join(default_variants), help="a comma separated list of variants to generate (default: %(default)s)")
generate_parser.add_argument("-c", "--colormaps", default=",".join(default_colormaps), help="a comma separated list of colormaps to generate (default: %(default)s)")
generate_parser.add_argument("-p", "--processes", default=default_processes, type=int, help="how many processes to spawn for saving images (default: %(default)s)")
generate_parser.add_argument("-n", "--num-rows", type=int, help="how many rows to read from the scan data (default: all)")
generate_parser.add_argument("-s", "--skip-iters", type=int, help="how many iterations to skip generating images for (default: none)")
generate_parser.add_argument("input", help="the input path of the parquet file to read the scan data from")
generate_parser.add_argument("output", help="the output path to save the generated tile images to")
args = parser.parse_args(namespace=IpMapArgs)
parser.add_argument("-q", "--quiet", action = "store_true", help = "decrease output verbosity")
subparsers = parser.add_subparsers(dest = "command", required = True, help = "the command to run")
convert_parser = subparsers.add_parser("convert", help = "convert scan data from csv to parquet format")
convert_parser.add_argument("input", help = "the input path of the csv file to read the scan data from")
convert_parser.add_argument("output", help = "the output path of the parquet file to save the converted scan data to")
generate_parser = subparsers.add_parser("generate", help = "generate tile images from scan data in parquet format")
generate_parser.add_argument("-t", "--tile-size", default = default_tile_size, type = int, help = "the tile size to use (default: %(default)s)")
generate_parser.add_argument("-v", "--variants", default = ",".join(default_variants), help = "a comma separated list of variants to generate (default: %(default)s)")
generate_parser.add_argument("-c", "--colormaps", default = ",".join(default_colormaps), help = "a comma separated list of colormaps to generate (default: %(default)s)")
generate_parser.add_argument("-p", "--processes", default = default_processes, type = int, help = "how many processes to spawn for saving images (default: %(default)s)")
generate_parser.add_argument("-n", "--num-rows", type = int, help = "how many rows to read from the scan data (default: all)")
generate_parser.add_argument("-s", "--skip-iters", type = int, help = "how many iterations to skip generating images for (default: none)")
generate_parser.add_argument("-j", "--json", help = "the path for the json file to store metadata about the tile images (default: none)")
generate_parser.add_argument("input", help = "the input path of the parquet file to read the scan data from")
generate_parser.add_argument("output", help = "the output path to save the generated tile images to")
args = parser.parse_args(namespace = IpMapArgs)
try:
if args.command == "convert":
convert_to_parquet(csv_path=Path(args.input), parquet_path=Path(args.output), quiet=args.quiet)
convert_to_parquet(csv_path = Path(args.input), parquet_path = Path(args.output), quiet = args.quiet)
elif args.command == "generate":
generate_tiles(parquet_path=Path(args.input), tiles_dir=Path(args.output),
tile_size=args.tile_size, variants=parse_list_arg(args.variants),
colormaps=parse_list_arg(args.colormaps), processes=args.processes,
num_rows=args.num_rows, skip_iters=args.skip_iters, quiet=args.quiet)
generate_tiles(parquet_path = Path(args.input), tiles_dir = Path(args.output),
tile_size = args.tile_size, variants = parse_list_arg(args.variants),
colormaps = parse_list_arg(args.colormaps), processes = args.processes,
num_rows = args.num_rows, skip_iters = args.skip_iters,
json_path = Path(args.json) if args.json else None, quiet = args.quiet)
else:
raise ValueError("invalid command")
except ValueError as e:
print(f"error: {e}")
print(f"error: {e}", file = sys.stderr)
sys.exit(1)
if __name__ == "__main__":