Skip to content

Commit 26afc9c

Browse files
Improve performance, update image library, minor bugfix in TNS indexing.
Fix * index handling with trailing spaces. Add image.THRESHHOLD_R, image.THRESHHOLD_G, image.THRESHHOLD_B, image.THRESHHOLD_A, image.RESIZE, image.EDGE, and image.CELLSHADE. Rename image.LOAD_IMG to image.LOAD. Update est.asmln.
1 parent 8cb3475 commit 26afc9c

File tree

7 files changed

+515
-88
lines changed

7 files changed

+515
-88
lines changed

asm-lang.exe

322 Bytes
Binary file not shown.

ext/image.py

Lines changed: 299 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -633,6 +633,83 @@ def _op_scale(interpreter, args, _arg_nodes, _env, location):
633633
return Value(TYPE_TNS, Tensor(shape=[target_w, target_h, 4], data=flat))
634634

635635

636+
def _op_resize(interpreter, args, _arg_nodes, _env, location):
637+
from interpreter import ASMRuntimeError, TYPE_INT, TYPE_TNS, Tensor, Value
638+
639+
# args: src, new_width, new_height, antialiasing=1 (antialiasing optional)
640+
if len(args) < 3:
641+
raise ASMRuntimeError("RESIZE expects at least 3 arguments", location=location, rewrite_rule="RESIZE")
642+
src = interpreter._expect_tns(args[0], "RESIZE", location)
643+
target_w = interpreter._expect_int(args[1], "RESIZE", location)
644+
target_h = interpreter._expect_int(args[2], "RESIZE", location)
645+
antialiasing = 1
646+
if len(args) >= 4:
647+
antialiasing = interpreter._expect_int(args[3], "RESIZE", location)
648+
649+
if len(src.shape) != 3 or src.shape[2] != 4:
650+
raise ASMRuntimeError("RESIZE expects a 3D image tensor with 4 channels", location=location, rewrite_rule="RESIZE")
651+
652+
src_w, src_h, _ = src.shape
653+
if target_w <= 0 or target_h <= 0:
654+
raise ASMRuntimeError("RESIZE target dimensions must be positive", location=location, rewrite_rule="RESIZE")
655+
# Fast path: identical size -> return a copy
656+
if src_h == target_h and src_w == target_w:
657+
flat = np.array(src.data.flat, dtype=object)
658+
return Value(TYPE_TNS, Tensor(shape=list(src.shape), data=flat))
659+
660+
interpreter.builtins._ensure_tensor_ints(src, "RESIZE", location)
661+
662+
src_arr = src.data.reshape((src_w, src_h, 4))
663+
out = np.empty((target_w, target_h, 4), dtype=object)
664+
_expect_int = interpreter._expect_int
665+
_Val = Value
666+
_TINT = TYPE_INT
667+
668+
if antialiasing:
669+
# Bilinear interpolation (absolute target dimensions)
670+
scale_y = src_h / float(target_h)
671+
scale_x = src_w / float(target_w)
672+
for j in range(target_h):
673+
src_y = (j + 0.5) * scale_y - 0.5
674+
y0 = int(math.floor(src_y))
675+
y1 = y0 + 1
676+
wy = src_y - y0
677+
wy0 = 1.0 - wy
678+
y0_clamped = max(0, min(src_h - 1, y0))
679+
y1_clamped = max(0, min(src_h - 1, y1))
680+
for i in range(target_w):
681+
src_x = (i + 0.5) * scale_x - 0.5
682+
x0 = int(math.floor(src_x))
683+
x1 = x0 + 1
684+
wx = src_x - x0
685+
wx0 = 1.0 - wx
686+
x0_clamped = max(0, min(src_w - 1, x0))
687+
x1_clamped = max(0, min(src_w - 1, x1))
688+
# sample four neighbors and blend — src_arr is [x,y,c]
689+
for c in range(4):
690+
v00 = _expect_int(src_arr[x0_clamped, y0_clamped, c], "RESIZE", location)
691+
v10 = _expect_int(src_arr[x1_clamped, y0_clamped, c], "RESIZE", location)
692+
v01 = _expect_int(src_arr[x0_clamped, y1_clamped, c], "RESIZE", location)
693+
v11 = _expect_int(src_arr[x1_clamped, y1_clamped, c], "RESIZE", location)
694+
val = (v00 * (wy0 * wx0) + v10 * (wy0 * wx) + v01 * (wy * wx0) + v11 * (wy * wx))
695+
iv = int(round(val))
696+
iv = 0 if iv < 0 else (255 if iv > 255 else iv)
697+
out[i, j, c] = _Val(_TINT, iv)
698+
else:
699+
# Nearest-neighbor
700+
for j in range(target_h):
701+
src_y = int(round((j + 0.5) * (src_h / float(target_h)) - 0.5))
702+
sy = max(0, min(src_h - 1, src_y))
703+
for i in range(target_w):
704+
src_x = int(round((i + 0.5) * (src_w / float(target_w)) - 0.5))
705+
sx = max(0, min(src_w - 1, src_x))
706+
for c in range(4):
707+
out[i, j, c] = _Val(_TINT, int(_expect_int(src_arr[sx, sy, c], "RESIZE", location)))
708+
709+
flat = np.array(out.flatten(), dtype=object)
710+
return Value(TYPE_TNS, Tensor(shape=[target_w, target_h, 4], data=flat))
711+
712+
636713
def _op_rotate(interpreter, args, _arg_nodes, _env, location):
637714
from interpreter import ASMRuntimeError, TYPE_INT, TYPE_TNS, Tensor, Value
638715

@@ -1410,6 +1487,72 @@ def _op_replace_color(interpreter, args, _arg_nodes, _env, location):
14101487
return Value(TYPE_TNS, Tensor(shape=list(img.shape), data=flat))
14111488

14121489

1490+
def _op_thresh_generic(interpreter, args, _arg_nodes, _env, location, channel: int, rule: str):
1491+
from interpreter import ASMRuntimeError, TYPE_INT, TYPE_TNS, Tensor, Value
1492+
1493+
if len(args) < 2:
1494+
raise ASMRuntimeError(f"{rule} expects at least 2 arguments", location=location, rewrite_rule=rule)
1495+
img = interpreter._expect_tns(args[0], rule, location)
1496+
thresh = interpreter._expect_int(args[1], rule, location)
1497+
1498+
# Optional color
1499+
d_r = d_g = d_b = d_a = 0
1500+
d_has_alpha = True
1501+
if len(args) >= 3:
1502+
color_t = interpreter._expect_tns(args[2], rule, location)
1503+
if len(color_t.shape) != 1 or color_t.shape[0] not in (3, 4):
1504+
raise ASMRuntimeError(f"{rule}: color must be a 1-D TNS length 3 or 4", location=location, rewrite_rule=rule)
1505+
carr = color_t.data.reshape(tuple(color_t.shape))
1506+
d_r = interpreter._expect_int(carr[0], rule, location)
1507+
d_g = interpreter._expect_int(carr[1], rule, location)
1508+
d_b = interpreter._expect_int(carr[2], rule, location)
1509+
if color_t.shape[0] == 4:
1510+
d_a = interpreter._expect_int(carr[3], rule, location)
1511+
d_has_alpha = True
1512+
else:
1513+
d_has_alpha = False
1514+
1515+
if len(img.shape) != 3 or img.shape[2] != 4:
1516+
raise ASMRuntimeError(f"{rule} expects a 3D image tensor with 4 channels", location=location, rewrite_rule=rule)
1517+
1518+
w, h, _ = img.shape
1519+
interpreter.builtins._ensure_tensor_ints(img, rule, location)
1520+
arr = img.data.reshape((w, h, 4))
1521+
new_arr = arr.copy()
1522+
1523+
_Val = Value
1524+
_TINT = TYPE_INT
1525+
1526+
for y in range(h):
1527+
for x in range(w):
1528+
p_val = interpreter._expect_int(new_arr[x, y, channel], rule, location)
1529+
if p_val == thresh:
1530+
new_arr[x, y, 0] = _Val(_TINT, int(_clamp_channel(d_r)))
1531+
new_arr[x, y, 1] = _Val(_TINT, int(_clamp_channel(d_g)))
1532+
new_arr[x, y, 2] = _Val(_TINT, int(_clamp_channel(d_b)))
1533+
if d_has_alpha:
1534+
new_arr[x, y, 3] = _Val(_TINT, int(_clamp_channel(d_a)))
1535+
1536+
flat = np.array(new_arr.flatten(), dtype=object)
1537+
return Value(TYPE_TNS, Tensor(shape=list(img.shape), data=flat))
1538+
1539+
1540+
def _op_thresh_a(interpreter, args, _arg_nodes, _env, location):
1541+
return _op_thresh_generic(interpreter, args, _arg_nodes, _env, location, channel=3, rule="THRESHHOLD_A")
1542+
1543+
1544+
def _op_thresh_r(interpreter, args, _arg_nodes, _env, location):
1545+
return _op_thresh_generic(interpreter, args, _arg_nodes, _env, location, channel=0, rule="THRESHHOLD_R")
1546+
1547+
1548+
def _op_thresh_g(interpreter, args, _arg_nodes, _env, location):
1549+
return _op_thresh_generic(interpreter, args, _arg_nodes, _env, location, channel=1, rule="THRESHHOLD_G")
1550+
1551+
1552+
def _op_thresh_b(interpreter, args, _arg_nodes, _env, location):
1553+
return _op_thresh_generic(interpreter, args, _arg_nodes, _env, location, channel=2, rule="THRESHHOLD_B")
1554+
1555+
14131556
def _op_render_text(interpreter, args, _arg_nodes, _env, location):
14141557
from interpreter import ASMRuntimeError, TYPE_INT, TYPE_TNS, Tensor, Value
14151558

@@ -1934,6 +2077,155 @@ def _op_invert(interpreter, args, _arg_nodes, _env, location):
19342077
data = np.array(flat_objs, dtype=object)
19352078
return Value(TYPE_TNS, Tensor(shape=list(img.shape), data=data))
19362079

2080+
2081+
def _op_edge(interpreter, args, _arg_nodes, _env, location):
2082+
from interpreter import ASMRuntimeError, TYPE_INT, TYPE_TNS, Tensor, Value
2083+
2084+
if len(args) != 1:
2085+
raise ASMRuntimeError("EDGE expects 1 argument", location=location, rewrite_rule="EDGE")
2086+
img = interpreter._expect_tns(args[0], "EDGE", location)
2087+
2088+
# Expect a 3D image tensor with 4 channels (RGBA)
2089+
if len(img.shape) != 3 or img.shape[2] != 4:
2090+
raise ASMRuntimeError("EDGE expects a 3D image tensor with 4 channels", location=location, rewrite_rule="EDGE")
2091+
2092+
w, h, _ = img.shape
2093+
interpreter.builtins._ensure_tensor_ints(img, "EDGE", location)
2094+
arr = img.data.reshape((w, h, 4))
2095+
2096+
# Fast path: build an int numpy array of shape (w,h,4) using fromiter
2097+
total = w * h * 4
2098+
flat_iter = (int(v.value) for v in arr.flatten())
2099+
flat_ints = np.fromiter(flat_iter, dtype=np.int64, count=total)
2100+
int_arr = flat_ints.reshape((w, h, 4))
2101+
2102+
# Compute luminance vectorized: shape (w,h)
2103+
lum = (0.299 * int_arr[:, :, 0].astype(float)
2104+
+ 0.587 * int_arr[:, :, 1].astype(float)
2105+
+ 0.114 * int_arr[:, :, 2].astype(float))
2106+
2107+
# Vectorized separable Gaussian blur using numpy.convolve per line (C implementation)
2108+
def _gaussian_blur_2d(src: np.ndarray, radius: int) -> np.ndarray:
2109+
if radius <= 0:
2110+
return src.copy()
2111+
sigma = max(0.5, radius / 2.0)
2112+
ksize = radius * 2 + 1
2113+
kernel = np.array([math.exp(-((i - radius) ** 2) / (2.0 * sigma * sigma)) for i in range(ksize)], dtype=float)
2114+
kernel /= kernel.sum()
2115+
2116+
# horizontal pass: convolve along x for each y (use numpy.convolve C implementation)
2117+
tmp = np.empty_like(src, dtype=float)
2118+
for y in range(src.shape[1]):
2119+
tmp[:, y] = np.convolve(src[:, y], kernel, mode='same')
2120+
2121+
# vertical pass: convolve along y for each x
2122+
out = np.empty_like(src, dtype=float)
2123+
for x in range(src.shape[0]):
2124+
out[x, :] = np.convolve(tmp[x, :], kernel, mode='same')
2125+
2126+
return out
2127+
2128+
# DoG: small - large blur (radii 1 and 2)
2129+
small = _gaussian_blur_2d(lum, 1)
2130+
large = _gaussian_blur_2d(lum, 2)
2131+
dog = small - large
2132+
2133+
mag = np.abs(dog)
2134+
maxv = float(mag.max()) if mag.size > 0 else 0.0
2135+
if maxv <= 0.0:
2136+
scaled = np.zeros_like(mag, dtype=np.int32)
2137+
else:
2138+
scaled = np.clip(np.round((mag / maxv) * 255.0), 0, 255).astype(np.int32)
2139+
2140+
# Build output 4-channel image efficiently: R=G=B=scaled magnitude, alpha preserved
2141+
alpha_flat = int_arr[:, :, 3].flatten().astype(np.int32)
2142+
total_pix = w * h
2143+
out_flat_ints = np.empty(total_pix * 4, dtype=np.int32)
2144+
vals = scaled.flatten()
2145+
out_flat_ints[0::4] = vals
2146+
out_flat_ints[1::4] = vals
2147+
out_flat_ints[2::4] = vals
2148+
out_flat_ints[3::4] = alpha_flat
2149+
2150+
# Wrap into Value objects (one list comprehension over ints)
2151+
_Val = Value
2152+
_TINT = TYPE_INT
2153+
flat_objs = [_Val(_TINT, int(v)) for v in out_flat_ints]
2154+
data = np.array(flat_objs, dtype=object)
2155+
return Value(TYPE_TNS, Tensor(shape=[w, h, 4], data=data))
2156+
2157+
def _op_cellshade(interpreter, args, _arg_nodes, _env, location):
2158+
from interpreter import ASMRuntimeError, TYPE_INT, TYPE_TNS, Tensor, Value
2159+
2160+
if len(args) != 2:
2161+
raise ASMRuntimeError("CELLSHADE expects 2 arguments", location=location, rewrite_rule="CELLSHADE")
2162+
img = interpreter._expect_tns(args[0], "CELLSHADE", location)
2163+
palette = interpreter._expect_tns(args[1], "CELLSHADE", location)
2164+
2165+
# Expect a 3D image tensor with 4 channels (RGBA)
2166+
if len(img.shape) != 3 or img.shape[2] != 4:
2167+
raise ASMRuntimeError("CELLSHADE expects a 3D image tensor with 4 channels", location=location, rewrite_rule="CELLSHADE")
2168+
2169+
w, h, _ = img.shape
2170+
interpreter.builtins._ensure_tensor_ints(img, "CELLSHADE", location)
2171+
img_arr = img.data.reshape((w, h, 4))
2172+
2173+
# Normalize palette into Nx(3 or 4) integer array
2174+
if len(palette.shape) == 1 and palette.shape[0] in (3, 4):
2175+
pal_arr = palette.data.reshape(tuple(palette.shape))
2176+
pal_list = [interpreter._expect_int(pal_arr[i], "CELLSHADE", location) for i in range(palette.shape[0])]
2177+
pal_np = np.array([pal_list], dtype=np.int32)
2178+
elif len(palette.shape) == 2 and palette.shape[1] in (3, 4):
2179+
pal_view = palette.data.reshape(tuple(palette.shape))
2180+
pal_np = np.empty((palette.shape[0], palette.shape[1]), dtype=np.int32)
2181+
for i in range(palette.shape[0]):
2182+
for j in range(palette.shape[1]):
2183+
pal_np[i, j] = interpreter._expect_int(pal_view[i, j], "CELLSHADE", location)
2184+
else:
2185+
raise ASMRuntimeError("CELLSHADE: colors must be a TNS of shape [N,3] or [N,4] or a single 1-D color", location=location, rewrite_rule="CELLSHADE")
2186+
2187+
# Separate RGB and optional alpha
2188+
if pal_np.shape[1] == 3:
2189+
pal_rgb = pal_np[:, :3]
2190+
pal_alpha = None
2191+
else:
2192+
pal_rgb = pal_np[:, :3]
2193+
pal_alpha = pal_np[:, 3]
2194+
2195+
# Build integer image array
2196+
total = w * h * 4
2197+
flat_iter = (int(v.value) for v in img_arr.flatten())
2198+
flat_ints = np.fromiter(flat_iter, dtype=np.int64, count=total)
2199+
int_img = flat_ints.reshape((w, h, 4)).astype(np.int32)
2200+
2201+
rgb = int_img[:, :, :3]
2202+
2203+
# Compute squared distances to palette colors using broadcasting
2204+
# resulting shape: (w, h, n)
2205+
dif = rgb[..., None, :] - pal_rgb[None, None, :, :]
2206+
d2 = np.sum(dif.astype(np.int64) * dif.astype(np.int64), axis=-1)
2207+
idx = np.argmin(d2, axis=2)
2208+
2209+
# Build output int array
2210+
out_ints = np.empty((w, h, 4), dtype=np.int32)
2211+
for i in range(pal_rgb.shape[0]):
2212+
mask = (idx == i)
2213+
out_ints[mask, 0] = pal_rgb[i, 0]
2214+
out_ints[mask, 1] = pal_rgb[i, 1]
2215+
out_ints[mask, 2] = pal_rgb[i, 2]
2216+
if pal_alpha is None:
2217+
# preserve source alpha
2218+
out_ints[mask, 3] = int_img[mask, 3]
2219+
else:
2220+
out_ints[mask, 3] = pal_alpha[i]
2221+
2222+
# Convert to Value objects
2223+
_Val = Value
2224+
_TINT = TYPE_INT
2225+
flat_objs = [_Val(_TINT, int(v)) for v in out_ints.flatten()]
2226+
data = np.array(flat_objs, dtype=object)
2227+
return Value(TYPE_TNS, Tensor(shape=[w, h, 4], data=data))
2228+
19372229
def asm_lang_register(ext: ExtensionAPI) -> None:
19382230
ext.metadata(name="image", version="0.1.0")
19392231
ext.register_operator("LOAD_PNG", 1, 1, _op_load_png, doc="LOAD_PNG(path):TNS[width][height][r,g,b,a]")
@@ -1950,6 +2242,13 @@ def asm_lang_register(ext: ExtensionAPI) -> None:
19502242
ext.register_operator("ROTATE", 2, 2, _op_rotate, doc="ROTATE(TNS:img, FLT:degrees):TNS")
19512243
ext.register_operator("GRAYSCALE", 1, 1, _op_grayscale, doc="GRAYSCALE(TNS:img):TNS (rgb channels set to luminance, alpha preserved)")
19522244
ext.register_operator("INVERT", 1, 1, _op_invert, doc="INVERT(TNS:img):TNS (invert RGB channels, preserve alpha)")
2245+
ext.register_operator("EDGE", 1, 1, _op_edge, doc="EDGE(TNS:img):TNS (difference-of-gaussians edge detector)")
19532246
ext.register_operator("BLUR", 2, 2, _op_blur, doc="BLUR(TNS:img, INT:radius):TNS (gaussian blur, radius in pixels)")
19542247
ext.register_operator("REPLACE_COLOR", 3, 3, _op_replace_color, doc="REPLACE_COLOR(TNS:img, TNS:src_color[3|4], TNS:dst_color[3|4]):TNS - Replace src_color with dst_color; RGB dst preserves alpha if dst has no alpha")
19552248
ext.register_operator("RENDER_TEXT", 2, 6, _op_render_text, doc="RENDER_TEXT(STR:text, INT:size, STR:font_path=\"\", TNS:color, TNS:bgcolor, INT:antialiasing=1):TNS")
2249+
ext.register_operator("RESIZE", 3, 4, _op_resize, doc="RESIZE(TNS:img, INT:new_width, INT:new_height, INT:antialiasing=1):TNS")
2250+
ext.register_operator("CELLSHADE", 2, 2, _op_cellshade, doc="CELLSHADE(TNS:img, TNS:colors):TNS (map pixels to nearest palette color)")
2251+
ext.register_operator("THRESHHOLD_A", 2, 3, _op_thresh_a, doc="THRESHHOLD_A(TNS:img, INT:a, TNS:color=[0,0,0,0]):TNS")
2252+
ext.register_operator("THRESHHOLD_R", 2, 3, _op_thresh_r, doc="THRESHHOLD_R(TNS:img, INT:r, TNS:color=[0,0,0,0]):TNS")
2253+
ext.register_operator("THRESHHOLD_G", 2, 3, _op_thresh_g, doc="THRESHHOLD_G(TNS:img, INT:g, TNS:color=[0,0,0,0]):TNS")
2254+
ext.register_operator("THRESHHOLD_B", 2, 3, _op_thresh_b, doc="THRESHHOLD_B(TNS:img, INT:b, TNS:color=[0,0,0,0]):TNS")

extensions.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -332,7 +332,7 @@ def read_asmx(pointer_file: str) -> List[str]:
332332
base_dir = os.path.dirname(os.path.abspath(pointer_file))
333333
out: List[str] = []
334334
with open(pointer_file, "r", encoding="utf-8") as handle:
335-
for raw in handle.read().splitlines():
335+
for raw in handle:
336336
line = raw.strip()
337337
if not line or line.startswith("#"):
338338
continue

0 commit comments

Comments
 (0)