Add alpha transparency option
This commit is contained in:
parent
4997a7716c
commit
008aa845a2
94
index.html
94
index.html
|
@ -2,7 +2,7 @@
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8">
|
<meta charset="UTF-8">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no">
|
||||||
<meta name="color-scheme" content="dark light">
|
<meta name="color-scheme" content="dark light">
|
||||||
<title>IP Map</title>
|
<title>IP Map</title>
|
||||||
<script src="https://unpkg.com/maplibre-gl@4.1.2/dist/maplibre-gl.js"></script>
|
<script src="https://unpkg.com/maplibre-gl@4.1.2/dist/maplibre-gl.js"></script>
|
||||||
|
@ -12,46 +12,72 @@
|
||||||
body {
|
body {
|
||||||
margin: 0;
|
margin: 0;
|
||||||
padding: 0;
|
padding: 0;
|
||||||
|
background-color: #111;
|
||||||
}
|
}
|
||||||
html, body {
|
html, body {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
height: 100%;
|
height: 100%;
|
||||||
}
|
}
|
||||||
main {
|
main {
|
||||||
width: 100vh;
|
|
||||||
height: 100%;
|
height: 100%;
|
||||||
margin: 0 auto;
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
font-family: sans-serif;
|
||||||
}
|
}
|
||||||
#map {
|
#map {
|
||||||
width: 100%;
|
width: min(100vw, 100vh);
|
||||||
height: 100%;
|
height: min(100vw, 100vh);
|
||||||
}
|
}
|
||||||
.maplibregl-canvas {
|
.maplibregl-canvas {
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
}
|
}
|
||||||
|
.maplibregl-popup {
|
||||||
|
max-width: unset !important;
|
||||||
|
}
|
||||||
.maplibregl-popup-content {
|
.maplibregl-popup-content {
|
||||||
background-color: #222;
|
background-color: #333;
|
||||||
font-size: 1rem;
|
font-size: 1rem;
|
||||||
padding: 0.8rem 1.2rem;
|
padding: 0.6rem 2.8rem 0.6rem 1rem;
|
||||||
|
border-radius: 0.5rem;
|
||||||
|
}
|
||||||
|
.maplibregl-popup-close-button {
|
||||||
|
height: 100%;
|
||||||
|
aspect-ratio: 1 / 1;
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
padding-bottom: 0.25rem;
|
||||||
|
font-size: 1.5rem;
|
||||||
}
|
}
|
||||||
.map-overlay {
|
.map-overlay {
|
||||||
position: absolute;
|
position: absolute;
|
||||||
top: 1rem;
|
top: 1rem;
|
||||||
right: 1rem;
|
left: 1rem;
|
||||||
padding: 1.5rem;
|
padding: 0.6rem;
|
||||||
background-color: #222;
|
background-color: #333;
|
||||||
color: #eee;
|
color: #eee;
|
||||||
box-shadow: 3px 3px 2px rgba(0, 0, 0, 0.8);
|
box-shadow: 3px 3px 2px rgba(0, 0, 0, 0.8);
|
||||||
border-radius: 3px;
|
border-radius: 0.5rem;
|
||||||
|
max-height: calc(100% - 2rem);
|
||||||
|
box-sizing: border-box;
|
||||||
|
overflow-y: scroll;
|
||||||
|
}
|
||||||
|
.map-overlay summary {
|
||||||
|
margin: 0 0.4rem;
|
||||||
}
|
}
|
||||||
.map-overlay h2 {
|
.map-overlay h2 {
|
||||||
display: block;
|
display: inline-block;
|
||||||
margin: 0;
|
margin: 0 0 0 0.4rem;
|
||||||
margin-bottom: 1rem;
|
user-select: none;
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
#map-style-controls {
|
||||||
|
margin-top: 0.6rem;
|
||||||
}
|
}
|
||||||
#map-style-controls ul {
|
#map-style-controls ul {
|
||||||
padding-left: 1em;
|
padding-left: 1em;
|
||||||
list-style-type: none;
|
list-style-type: none;
|
||||||
|
margin: 0;
|
||||||
}
|
}
|
||||||
#map-style-controls > ul {
|
#map-style-controls > ul {
|
||||||
padding-left: 0;
|
padding-left: 0;
|
||||||
|
@ -61,8 +87,8 @@
|
||||||
}
|
}
|
||||||
#map-style-controls label {
|
#map-style-controls label {
|
||||||
display: block;
|
display: block;
|
||||||
padding: 0.5rem;
|
padding: 0.2rem 1rem 0.2rem 0.4rem;
|
||||||
font-weight: bold;
|
user-select: none;
|
||||||
}
|
}
|
||||||
#map-style-controls input[type=radio] {
|
#map-style-controls input[type=radio] {
|
||||||
padding: 0;
|
padding: 0;
|
||||||
|
@ -75,10 +101,10 @@
|
||||||
<body>
|
<body>
|
||||||
<main>
|
<main>
|
||||||
<div id="map"></div>
|
<div id="map"></div>
|
||||||
<div class="map-overlay">
|
<details class="map-overlay">
|
||||||
<h2>Map Style</h2>
|
<summary><h2>Style</h2></summary>
|
||||||
<div id="map-style-controls"><p>Loading available styles...</p></div>
|
<div id="map-style-controls"><p>Loading available styles...</p></div>
|
||||||
</div>
|
</details>
|
||||||
</main>
|
</main>
|
||||||
<script>
|
<script>
|
||||||
const coordsToHilbert = ({ x, y }) => {
|
const coordsToHilbert = ({ x, y }) => {
|
||||||
|
@ -132,35 +158,31 @@
|
||||||
return coord
|
return coord
|
||||||
}
|
}
|
||||||
|
|
||||||
const tilesDir = "tiles"
|
|
||||||
const sourceId = "ipmap-tiles-source"
|
|
||||||
const styleControlsDiv = document.getElementById("map-style-controls")
|
|
||||||
|
|
||||||
const dateDir = (date = new Date()) => `${date.getFullYear()}-${(date.getMonth() + 1).toString().padStart(2, "0")}-${date.getDate().toString().padStart(2, "0")}`
|
|
||||||
const getId = (date, variant, colormap) => `${date.replaceAll("-", "")}-${variant}-${colormap}`
|
|
||||||
const getTilesUrl = (date, variant, colormap) => `${tilesDir}/${date}/${variant}/${colormap}/{z}/{y}/{x}.png`
|
|
||||||
const getSourceId = (date, variant, colormap) => `ipmap-tiles-source-${getId(date, variant, colormap)}`
|
|
||||||
const getLayerId = (date, variant, colormap) => `ipmap-tiles-layer-${getId(date, variant, colormap)}`
|
|
||||||
|
|
||||||
const map = new maplibregl.Map({
|
const map = new maplibregl.Map({
|
||||||
container: "map",
|
container: "map",
|
||||||
attributionControl: false,
|
attributionControl: false,
|
||||||
renderWorldCopies: false,
|
renderWorldCopies: false,
|
||||||
doubleClickZoom: false,
|
doubleClickZoom: false,
|
||||||
dragRotate: false,
|
dragRotate: false,
|
||||||
|
pitchWithRotate: false,
|
||||||
|
touchPitch: false,
|
||||||
style: {
|
style: {
|
||||||
version: 8,
|
version: 8,
|
||||||
sources: {},
|
sources: {},
|
||||||
layers: []
|
layers: []
|
||||||
},
|
},
|
||||||
center: [0, 0],
|
center: [0, 0],
|
||||||
minZoom: -1,
|
minZoom: -2,
|
||||||
maxZoom: 12,
|
maxZoom: 12,
|
||||||
zoom: 0
|
zoom: 0
|
||||||
})
|
})
|
||||||
map.painter.context.extTextureFilterAnisotropic = undefined
|
|
||||||
|
|
||||||
const dataP = fetch(`${tilesDir}/tiles.json`).then(res => res.json())
|
map.painter.context.extTextureFilterAnisotropic = undefined
|
||||||
|
map.touchZoomRotate.disableRotation()
|
||||||
|
|
||||||
|
const tilesDir = "tiles"
|
||||||
|
const sourceId = "ipmap-tiles-source"
|
||||||
|
const dataP = fetch(`${tilesDir}/tiles.json`, { cache: "no-store" }).then(res => res.json())
|
||||||
|
|
||||||
map.once("style.load", async () => {
|
map.once("style.load", async () => {
|
||||||
const data = await dataP
|
const data = await dataP
|
||||||
|
@ -181,7 +203,7 @@
|
||||||
|
|
||||||
map.addSource(sourceId, {
|
map.addSource(sourceId, {
|
||||||
type: "raster",
|
type: "raster",
|
||||||
tiles: [getTilesUrl(curDate, curVariant, curColormap)],
|
tiles: [`${tilesDir}/${curDate}/${curVariant}/${curColormap}/{z}/{y}/{x}.png`],
|
||||||
tileSize: 256,
|
tileSize: 256,
|
||||||
minzoom: 0,
|
minzoom: 0,
|
||||||
maxzoom: 8,
|
maxzoom: 8,
|
||||||
|
@ -198,7 +220,7 @@
|
||||||
const setStyle = (date, variant, colormap) => {
|
const setStyle = (date, variant, colormap) => {
|
||||||
if (date === curDate && variant === curVariant && colormap === curColormap || !data[date]?.[variant]?.includes(colormap))
|
if (date === curDate && variant === curVariant && colormap === curColormap || !data[date]?.[variant]?.includes(colormap))
|
||||||
return
|
return
|
||||||
map.getSource(sourceId)?.setTiles([getTilesUrl(date, variant, colormap)])
|
map.getSource(sourceId)?.setTiles([`${tilesDir}/${date}/${variant}/${colormap}/{z}/{y}/{x}.png`])
|
||||||
curDate = date
|
curDate = date
|
||||||
curVariant = variant
|
curVariant = variant
|
||||||
curColormap = colormap
|
curColormap = colormap
|
||||||
|
@ -280,10 +302,10 @@
|
||||||
dateItem.appendChild(variantList)
|
dateItem.appendChild(variantList)
|
||||||
dateList.appendChild(dateItem)
|
dateList.appendChild(dateItem)
|
||||||
}
|
}
|
||||||
styleControlsDiv.replaceChildren(dateList)
|
document.getElementById("map-style-controls").replaceChildren(dateList)
|
||||||
})
|
})
|
||||||
|
|
||||||
map.addControl(new maplibregl.NavigationControl({ showCompass: false }), "top-left")
|
map.addControl(new maplibregl.NavigationControl({ showCompass: false }), "top-right")
|
||||||
const toIp = v => `${v >> 24 & 0xFF}.${v >> 16 & 0xFF}.${v >> 8 & 0xFF}.${v >> 0 & 0xFF}`
|
const toIp = v => `${v >> 24 & 0xFF}.${v >> 16 & 0xFF}.${v >> 8 & 0xFF}.${v >> 0 & 0xFF}`
|
||||||
map.on("click", (e) => {
|
map.on("click", (e) => {
|
||||||
const { x, y } = maplibregl.MercatorCoordinate.fromLngLat(e.lngLat, 0)
|
const { x, y } = maplibregl.MercatorCoordinate.fromLngLat(e.lngLat, 0)
|
||||||
|
|
95
ipmap.py
95
ipmap.py
|
@ -5,6 +5,7 @@ import math
|
||||||
import functools
|
import functools
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
|
import shutil
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import png
|
import png
|
||||||
import hilbert
|
import hilbert
|
||||||
|
@ -24,7 +25,7 @@ def dedup_preserving_order(vals: list) -> list:
|
||||||
result.append(item)
|
result.append(item)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def convert_to_parquet(csv_path: Path, parquet_path: Path, quiet = False):
|
def convert_to_parquet(csv_path: Path, parquet_path: Path, *, quiet = False):
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print(f"scanning csv '{csv_path}' into parquet '{parquet_path}'...", end = " ", flush = True)
|
print(f"scanning csv '{csv_path}' into parquet '{parquet_path}'...", end = " ", flush = True)
|
||||||
lf = pl.scan_csv(csv_path, schema={
|
lf = pl.scan_csv(csv_path, schema={
|
||||||
|
@ -45,16 +46,16 @@ def convert_to_parquet(csv_path: Path, parquet_path: Path, quiet = False):
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print("done")
|
print("done")
|
||||||
|
|
||||||
def write_tile(path: Path, rows: np.ndarray):
|
def write_tile(path: Path, rows: np.ndarray, *, alpha = False):
|
||||||
path.parent.mkdir(exist_ok = True, parents = True)
|
path.parent.mkdir(exist_ok = True, parents = True)
|
||||||
png.Writer(rows.shape[0], rows.shape[1], greyscale = False, alpha = False).write_packed(path.open("wb"), rows)
|
png.Writer(rows.shape[1], rows.shape[0], greyscale = False, alpha = alpha).write_packed(path.open("wb"), rows)
|
||||||
|
|
||||||
default_tile_size = 256
|
default_tile_size = 256
|
||||||
default_colormaps = ["viridis"]
|
default_colormaps = ["viridis"]
|
||||||
default_variants = ["density", "rtt"]
|
default_variants = ["density", "rtt"]
|
||||||
default_processes = 16
|
default_processes = 16
|
||||||
|
|
||||||
def generate_tiles(parquet_path: Path, tiles_dir: Path, tile_size = default_tile_size,
|
def generate_tiles(parquet_path: Path, tiles_dir: Path, *, tile_size = default_tile_size, alpha = False,
|
||||||
variants: list[str] = default_variants, colormaps: list[str] = default_colormaps,
|
variants: list[str] = default_variants, colormaps: list[str] = default_colormaps,
|
||||||
processes = default_processes, num_rows: int | None = None,
|
processes = default_processes, num_rows: int | None = None,
|
||||||
skip_iters: int | None = None, json_path: Path | None = None, quiet = False):
|
skip_iters: int | None = None, json_path: Path | None = None, quiet = False):
|
||||||
|
@ -67,7 +68,8 @@ def generate_tiles(parquet_path: Path, tiles_dir: Path, tile_size = default_tile
|
||||||
raise ValueError("must specify at least one colormap")
|
raise ValueError("must specify at least one colormap")
|
||||||
|
|
||||||
colormaps = dedup_preserving_order(colormaps)
|
colormaps = dedup_preserving_order(colormaps)
|
||||||
colormaps_by_name = { colormap: [bytes(c) for c in (Colormap(colormap).lut()[:,0:3] * (256.0 - np.finfo(np.float32).eps)).astype(np.uint8)] for colormap in colormaps }
|
channels = 4 if alpha else 3
|
||||||
|
colormaps_by_name = { colormap: [bytes(c) for c in (Colormap(colormap).lut()[:,0:channels] * (256.0 - np.finfo(np.float32).eps)).astype(np.uint8)] for colormap in colormaps }
|
||||||
|
|
||||||
generate_density = False
|
generate_density = False
|
||||||
generate_rtt = False
|
generate_rtt = False
|
||||||
|
@ -87,11 +89,16 @@ def generate_tiles(parquet_path: Path, tiles_dir: Path, tile_size = default_tile
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ValueError("tiles path must be relative to the json path")
|
raise ValueError("tiles path must be relative to the json path")
|
||||||
try:
|
try:
|
||||||
tile_metadata = json.loads(json_path.read_text(encoding = "UTF-8"))
|
text = json_path.read_text(encoding = "UTF-8")
|
||||||
except:
|
except:
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print("json file not found at provided path, so it will be created instead")
|
print("json file not found at provided path, so it will be created instead")
|
||||||
tile_metadata = {}
|
tile_metadata = {}
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
tile_metadata: dict = json.loads(text)
|
||||||
|
except:
|
||||||
|
raise ValueError("invalid json found at provided path")
|
||||||
tile_metadata_cur = tile_metadata
|
tile_metadata_cur = tile_metadata
|
||||||
for part in tiles_dir_parts:
|
for part in tiles_dir_parts:
|
||||||
if not part in tile_metadata_cur:
|
if not part in tile_metadata_cur:
|
||||||
|
@ -102,9 +109,11 @@ def generate_tiles(parquet_path: Path, tiles_dir: Path, tile_size = default_tile
|
||||||
tile_metadata_cur[variant] = colormaps
|
tile_metadata_cur[variant] = colormaps
|
||||||
else:
|
else:
|
||||||
tile_metadata_cur[variant] = dedup_preserving_order(tile_metadata_cur[variant] + colormaps)
|
tile_metadata_cur[variant] = dedup_preserving_order(tile_metadata_cur[variant] + colormaps)
|
||||||
|
if not quiet:
|
||||||
|
print(f"writing metadata to json file at '{json_path}'...", end = " ", flush = True)
|
||||||
json_path.write_text(json.dumps(tile_metadata, indent=2), encoding = "UTF-8")
|
json_path.write_text(json.dumps(tile_metadata, indent=2), encoding = "UTF-8")
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print(f"wrote metadata to json file at '{json_path}'")
|
print("done")
|
||||||
|
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print(f"reading parquet '{parquet_path}'...", end = " ", flush = True)
|
print(f"reading parquet '{parquet_path}'...", end = " ", flush = True)
|
||||||
|
@ -115,13 +124,15 @@ def generate_tiles(parquet_path: Path, tiles_dir: Path, tile_size = default_tile
|
||||||
tiles_per_side = int(math.sqrt(0x100000000)) // tile_size
|
tiles_per_side = int(math.sqrt(0x100000000)) // tile_size
|
||||||
possible_overlaps = 1
|
possible_overlaps = 1
|
||||||
|
|
||||||
def generate_images(colormap: str, type_name: str, col_name: str, divisor: int):
|
write_tile_p = functools.partial(write_tile, alpha = alpha)
|
||||||
|
|
||||||
|
def generate_images(colormap: str, type_name: str, col_name: str, divisor: int | float):
|
||||||
nonlocal df
|
nonlocal df
|
||||||
|
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print(f"creating {type_name} image data with {colormap} colormap...", end = " ", flush = True)
|
print(f"creating {type_name} image data with {colormap} colormap...", end = " ", flush = True)
|
||||||
image_data = np.zeros((tiles_per_side * tile_size, tiles_per_side * tile_size), dtype = "S3")
|
image_data = np.zeros((tiles_per_side * tile_size, tiles_per_side * tile_size), dtype = f"S{channels}")
|
||||||
image_data[(df.get_column("y"), df.get_column("x"))] = (255 * df.get_column(col_name) // divisor).clip(0, 255).cast(pl.UInt8).replace(pl.int_range(256), colormaps_by_name[colormap], return_dtype = pl.Binary)
|
image_data[(df.get_column("y"), df.get_column("x"))] = (df.get_column(col_name) / divisor * 255.9999).clip(0, 255).cast(pl.UInt8).replace(pl.int_range(256), colormaps_by_name[colormap], return_dtype = pl.Binary)
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print("done")
|
print("done")
|
||||||
|
|
||||||
|
@ -131,7 +142,7 @@ def generate_tiles(parquet_path: Path, tiles_dir: Path, tile_size = default_tile
|
||||||
z = tiles_per_side.bit_length() - 1
|
z = tiles_per_side.bit_length() - 1
|
||||||
z_path = tiles_dir / type_name / colormap / f"{z}"
|
z_path = tiles_dir / type_name / colormap / f"{z}"
|
||||||
z_path.mkdir(exist_ok = True, parents = True)
|
z_path.mkdir(exist_ok = True, parents = True)
|
||||||
pool.starmap(write_tile, [
|
pool.starmap(write_tile_p, [
|
||||||
(z_path / f"{y}" / f"{x}.png", image_data[
|
(z_path / f"{y}" / f"{x}.png", image_data[
|
||||||
y * tile_size : y * tile_size + tile_size,
|
y * tile_size : y * tile_size + tile_size,
|
||||||
x * tile_size : x * tile_size + tile_size,
|
x * tile_size : x * tile_size + tile_size,
|
||||||
|
@ -169,20 +180,62 @@ def generate_tiles(parquet_path: Path, tiles_dir: Path, tile_size = default_tile
|
||||||
while True:
|
while True:
|
||||||
for colormap in colormaps:
|
for colormap in colormaps:
|
||||||
if generate_density:
|
if generate_density:
|
||||||
generate_images(colormap, "density", "count", 256 if possible_overlaps == 1 else possible_overlaps)
|
generate_images(colormap, "density", "count", possible_overlaps)
|
||||||
if generate_rtt:
|
if generate_rtt:
|
||||||
generate_images(colormap, "rtt", "rtt_us", int(df.get_column("rtt_us").std() / (2.0 * tiles_per_side.bit_length() ** 0.5)))
|
generate_images(colormap, "rtt", "rtt_us", df.get_column("rtt_us").std() / tiles_per_side.bit_length())
|
||||||
if tiles_per_side == 1:
|
if tiles_per_side == 1:
|
||||||
break
|
break
|
||||||
scale_down_coords()
|
scale_down_coords()
|
||||||
|
|
||||||
|
def remove_tiles(tiles_dir: Path, *, json_path: Path | None = None, quiet = False):
|
||||||
|
if not tiles_dir.is_dir():
|
||||||
|
raise ValueError(f"'{tiles_dir}' is not an existing directory")
|
||||||
|
|
||||||
|
if json_path:
|
||||||
|
if json_path.is_dir():
|
||||||
|
raise ValueError("json path must not be a directory")
|
||||||
|
try:
|
||||||
|
*tiles_dir_parts, tiles_dir_final = tiles_dir.relative_to(json_path.parent).parts
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError("tiles path must be relative to but not containing the json path")
|
||||||
|
try:
|
||||||
|
text = json_path.read_text(encoding = "UTF-8")
|
||||||
|
except:
|
||||||
|
raise ValueError("json file not found at provided path")
|
||||||
|
try:
|
||||||
|
tile_metadata = json.loads(text)
|
||||||
|
except:
|
||||||
|
raise ValueError("invalid json found at provided path")
|
||||||
|
tile_metadata_cur = tile_metadata
|
||||||
|
try:
|
||||||
|
for part in tiles_dir_parts:
|
||||||
|
tile_metadata_cur = tile_metadata_cur[part]
|
||||||
|
if isinstance(tile_metadata_cur, list):
|
||||||
|
tile_metadata_cur = tile_metadata_cur.remove(tiles_dir_final)
|
||||||
|
else:
|
||||||
|
del tile_metadata_cur[tiles_dir_final]
|
||||||
|
except:
|
||||||
|
raise ValueError(f"unable to find path '{'/'.join([*tiles_dir_parts, tiles_dir_final])}' within json file")
|
||||||
|
if not quiet:
|
||||||
|
print(f"writing metadata to json file at '{json_path}'...", end = " ", flush = True)
|
||||||
|
json_path.write_text(json.dumps(tile_metadata, indent=2), encoding = "UTF-8")
|
||||||
|
if not quiet:
|
||||||
|
print("done")
|
||||||
|
|
||||||
|
if not quiet:
|
||||||
|
print(f"removing files from '{tiles_dir}'...", end = " ", flush = True)
|
||||||
|
shutil.rmtree(tiles_dir)
|
||||||
|
if not quiet:
|
||||||
|
print("done")
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class IpMapArgs:
|
class IpMapArgs:
|
||||||
command: Literal["convert", "generate"]
|
command: Literal["convert", "generate", "remove"]
|
||||||
quiet: bool
|
quiet: bool
|
||||||
input: str
|
input: str
|
||||||
output: str
|
output: str
|
||||||
tile_size: int
|
tile_size: int
|
||||||
|
alpha: bool
|
||||||
colormaps: str
|
colormaps: str
|
||||||
variants: str
|
variants: str
|
||||||
processes: int
|
processes: int
|
||||||
|
@ -202,6 +255,7 @@ def main():
|
||||||
convert_parser.add_argument("output", help = "the output path of the parquet file to save the converted scan data to")
|
convert_parser.add_argument("output", help = "the output path of the parquet file to save the converted scan data to")
|
||||||
generate_parser = subparsers.add_parser("generate", help = "generate tile images from scan data in parquet format")
|
generate_parser = subparsers.add_parser("generate", help = "generate tile images from scan data in parquet format")
|
||||||
generate_parser.add_argument("-t", "--tile-size", default = default_tile_size, type = int, help = "the tile size to use (default: %(default)s)")
|
generate_parser.add_argument("-t", "--tile-size", default = default_tile_size, type = int, help = "the tile size to use (default: %(default)s)")
|
||||||
|
generate_parser.add_argument("-a", "--alpha", action = "store_true", help = "use alpha channel instead of black")
|
||||||
generate_parser.add_argument("-v", "--variants", default = ",".join(default_variants), help = "a comma separated list of variants to generate (default: %(default)s)")
|
generate_parser.add_argument("-v", "--variants", default = ",".join(default_variants), help = "a comma separated list of variants to generate (default: %(default)s)")
|
||||||
generate_parser.add_argument("-c", "--colormaps", default = ",".join(default_colormaps), help = "a comma separated list of colormaps to generate (default: %(default)s)")
|
generate_parser.add_argument("-c", "--colormaps", default = ",".join(default_colormaps), help = "a comma separated list of colormaps to generate (default: %(default)s)")
|
||||||
generate_parser.add_argument("-p", "--processes", default = default_processes, type = int, help = "how many processes to spawn for saving images (default: %(default)s)")
|
generate_parser.add_argument("-p", "--processes", default = default_processes, type = int, help = "how many processes to spawn for saving images (default: %(default)s)")
|
||||||
|
@ -210,6 +264,9 @@ def main():
|
||||||
generate_parser.add_argument("-j", "--json", help = "the path for the json file to store metadata about the tile images (default: none)")
|
generate_parser.add_argument("-j", "--json", help = "the path for the json file to store metadata about the tile images (default: none)")
|
||||||
generate_parser.add_argument("input", help = "the input path of the parquet file to read the scan data from")
|
generate_parser.add_argument("input", help = "the input path of the parquet file to read the scan data from")
|
||||||
generate_parser.add_argument("output", help = "the output path to save the generated tile images to")
|
generate_parser.add_argument("output", help = "the output path to save the generated tile images to")
|
||||||
|
remove_parser = subparsers.add_parser("remove", help = "remove tile images")
|
||||||
|
remove_parser.add_argument("-j", "--json", help = "the path for the json file to store metadata about the tile images (default: none)")
|
||||||
|
remove_parser.add_argument("input", help = "the path containing tile images to remove")
|
||||||
args = parser.parse_args(namespace = IpMapArgs)
|
args = parser.parse_args(namespace = IpMapArgs)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -217,10 +274,12 @@ def main():
|
||||||
convert_to_parquet(csv_path = Path(args.input), parquet_path = Path(args.output), quiet = args.quiet)
|
convert_to_parquet(csv_path = Path(args.input), parquet_path = Path(args.output), quiet = args.quiet)
|
||||||
elif args.command == "generate":
|
elif args.command == "generate":
|
||||||
generate_tiles(parquet_path = Path(args.input), tiles_dir = Path(args.output),
|
generate_tiles(parquet_path = Path(args.input), tiles_dir = Path(args.output),
|
||||||
tile_size = args.tile_size, variants = parse_list_arg(args.variants),
|
tile_size = args.tile_size, alpha = args.alpha,
|
||||||
colormaps = parse_list_arg(args.colormaps), processes = args.processes,
|
variants = parse_list_arg(args.variants), colormaps = parse_list_arg(args.colormaps),
|
||||||
num_rows = args.num_rows, skip_iters = args.skip_iters,
|
processes = args.processes, num_rows = args.num_rows, skip_iters = args.skip_iters,
|
||||||
json_path = Path(args.json) if args.json else None, quiet = args.quiet)
|
json_path = Path(args.json) if args.json else None, quiet = args.quiet)
|
||||||
|
elif args.command == "remove":
|
||||||
|
remove_tiles(tiles_dir = Path(args.input), json_path = Path(args.json) if args.json else None, quiet = args.quiet)
|
||||||
else:
|
else:
|
||||||
raise ValueError("invalid command")
|
raise ValueError("invalid command")
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
|
|
2
zmap.sh
2
zmap.sh
|
@ -31,4 +31,4 @@ zmap -B '100M' -M icmp_echo_time '0.0.0.0/0' -f 'saddr,rtt_us,success' -o "$LOCA
|
||||||
ssh "$REMOTE" "'"mkdir -p "$CURRENT_REMOTE_DATA_PATH""'" && \
|
ssh "$REMOTE" "'"mkdir -p "$CURRENT_REMOTE_DATA_PATH""'" && \
|
||||||
scp "$LOCAL_PARQUET_PATH" "$REMOTE":"$REMOTE_PARQUET_PATH" && \
|
scp "$LOCAL_PARQUET_PATH" "$REMOTE":"$REMOTE_PARQUET_PATH" && \
|
||||||
ssh "$REMOTE" "'"mkdir -p "$CURRENT_REMOTE_TILES_PATH""'" && \
|
ssh "$REMOTE" "'"mkdir -p "$CURRENT_REMOTE_TILES_PATH""'" && \
|
||||||
ssh "$REMOTE" "'""$REMOTE_IPMAP_PATH" generate -c "$COLORMAPS" -v "$VARIANTS" "$REMOTE_PARQUET_PATH" "$CURRENT_REMOTE_TILES_PATH""'"
|
ssh "$REMOTE" "'""$REMOTE_IPMAP_PATH" generate -a -c "$COLORMAPS" -v "$VARIANTS" "$REMOTE_PARQUET_PATH" "$CURRENT_REMOTE_TILES_PATH""'"
|
||||||
|
|
Loading…
Reference in New Issue