luci-0.10: merge r6759-r6784

This commit is contained in:
Jo-Philipp Wich 2011-01-23 18:38:22 +00:00
parent 8383f6b979
commit 658a2a69bd
28 changed files with 4032 additions and 119 deletions

View file

@ -66,32 +66,22 @@ end
-------------------- View -------------------- -------------------- View --------------------
f = SimpleForm("ffwizward", "Freifunkassistent", f = SimpleForm("ffwizward", "Freifunkassistent",
"Dieser Assistent unterstützt Sie bei der Einrichtung des Routers für das Freifunknetz.") "Dieser Assistent unterstützt Sie bei der Einrichtung des Routers für das Freifunknetz.")
-- main netconfig
local newpsswd = has_rom and sys.exec("diff /rom/etc/passwd /etc/passwd") -- if password is not set or default then force the user to set a new one
if newpsswd ~= "" then if sys.exec("diff /rom/etc/passwd /etc/passwd") == "" then
pw = f:field(Flag, "pw", "Router Passwort", "Setzen Sie den Haken, um Ihr Passwort zu ändern.") pw1 = f:field(Value, "pw1", translate("password"))
function pw.cfgvalue(self, section) pw1.password = true
return 1 pw1.rmempty = false
pw2 = f:field(Value, "pw2", translate("confirmation"))
pw2.password = true
pw2.rmempty = false
function pw2.validate(self, value, section)
return pw1:formvalue(section) == value and value
end end
end end
pw1 = f:field(Value, "pw1", translate("password"))
pw1.password = true
pw1.rmempty = false
pw2 = f:field(Value, "pw2", translate("confirmation"))
pw2.password = true
pw2.rmempty = false
function pw2.validate(self, value, section)
return pw1:formvalue(section) == value and value
end
if newpsswd ~= "" then
pw1:depends("pw", "1")
pw2:depends("pw", "1")
end
net = f:field(ListValue, "net", "Freifunk Community", "Nutzen Sie die Einstellungen der Freifunk Gemeinschaft in ihrer Nachbarschaft.") net = f:field(ListValue, "net", "Freifunk Community", "Nutzen Sie die Einstellungen der Freifunk Gemeinschaft in ihrer Nachbarschaft.")
net.rmempty = false net.rmempty = false
net.optional = false net.optional = false
@ -131,9 +121,7 @@ function hostname.write(self, section, value)
uci:save("freifunk") uci:save("freifunk")
end end
function hostname.validate(self, value) function hostname.validate(self, value)
if (#value > 16) then if (#value > 24) or string.find(value, "[^%w%.%-]") or string.find(string.sub(value, value:len()), "[%.%-]") or string.find(string.sub(value, 1), "[%.%-]") then
return
elseif (string.find(value, "[^%w%_%-]")) then
return return
else else
return value return value
@ -186,23 +174,22 @@ uci:foreach("wireless", "wifi-device",
function chan.cfgvalue(self, section) function chan.cfgvalue(self, section)
return uci:get("freifunk", "wizard", "chan_" .. device) return uci:get("freifunk", "wizard", "chan_" .. device)
end end
chan:value('default') chan:value('default')
for i = 1, 14, 1 do for _, f in ipairs(sys.wifi.channels(device)) do
chan:value(i) if not f.restricted then
end chan:value(f.channel)
for i = 36, 64, 4 do end
chan:value(i)
end
for i = 100, 140, 4 do
chan:value(i)
end end
function chan.write(self, sec, value) function chan.write(self, sec, value)
if value then if value then
uci:set("freifunk", "wizard", "chan_" .. device, value) uci:set("freifunk", "wizard", "chan_" .. device, value)
uci:save("freifunk") uci:save("freifunk")
end end
end end
local meship = f:field(Value, "meship_" .. device, device:upper() .. " Mesh IP Adresse einrichten", "Ihre Mesh IP Adresse erhalten Sie von der Freifunk Gemeinschaft in Ihrer Nachbarschaft. Es ist eine netzweit eindeutige Identifikation, z.B. 104.1.1.1.")
local meship = f:field(Value, "meship_" .. device, device:upper() .. " Mesh IP Adresse einrichten", "Ihre Mesh IP Adresse erhalten Sie von der Freifunk Gemeinschaft in Ihrer Nachbarschaft. Es ist eine netzweit eindeutige Identifikation, z.B. 104.1.1.1.")
meship:depends("device_" .. device, "1") meship:depends("device_" .. device, "1")
meship.rmempty = true meship.rmempty = true
function meship.cfgvalue(self, section) function meship.cfgvalue(self, section)
@ -590,11 +577,7 @@ function f.handle(self, state, data)
else else
if data.pw1 then if data.pw1 then
local stat = luci.sys.user.setpasswd("root", data.pw1) == 0 local stat = luci.sys.user.setpasswd("root", data.pw1) == 0
-- if stat then end
-- f.message = translate("a_s_changepw_changed")
-- else
-- f.errmessage = translate("unknownerror")
end
data.pw1 = nil data.pw1 = nil
data.pw2 = nil data.pw2 = nil
uci:commit("freifunk") uci:commit("freifunk")
@ -619,9 +602,9 @@ function f.handle(self, state, data)
if has_radvd then if has_radvd then
uci:commit("radvd") uci:commit("radvd")
end end
-- the following line didn't work without admin-mini, for now i just replaced it with sys.exec... soma
-- luci.http.redirect(luci.dispatcher.build_url(unpack(luci.dispatcher.context.requested.path), "system", "reboot") .. "?reboot=1") sys.exec("for s in network dnsmasq luci_splash firewall uhttpd olsrd radvd l2gvpn; do [ -x /etc/init.d/$s ] && /etc/init.d/$s restart;done > /dev/null &")
sys.exec("reboot") luci.http.redirect(luci.dispatcher.build_url(luci.dispatcher.context.path[1], "freifunk", "ffwizard"))
end end
return false return false
elseif state == FORM_INVALID then elseif state == FORM_INVALID then
@ -782,50 +765,39 @@ function main.write(self, section, value)
end end
-- New Config -- New Config
-- Tune wifi device -- Tune wifi device
local ssiduci = uci:get("freifunk", community, "ssid") local ssid = uci:get("freifunk", community, "ssid") or "olsr.freifunk.net"
local ssiddot = string.find(ssiduci,'%..*')
local ssidshort
if ssiddot then
ssidshort = string.sub(ssiduci,ssiddot)
else
ssidshort = ssiduci
end
local devconfig = uci:get_all("freifunk", "wifi_device") local devconfig = uci:get_all("freifunk", "wifi_device")
util.update(devconfig, uci:get_all(external, "wifi_device") or {}) util.update(devconfig, uci:get_all(external, "wifi_device") or {})
local ssid = uci:get("freifunk", community, "ssid")
local channel = luci.http.formvalue("cbid.ffwizward.1.chan_" .. device) local channel = luci.http.formvalue("cbid.ffwizward.1.chan_" .. device)
local hwmode = "11bg" local hwmode = "11bg"
local bssid = "02:CA:FF:EE:BA:BE" local bssid = uci:get_all(external, "wifi_iface", "bssid") or "02:CA:FF:EE:BA:BE"
local mrate = 5500 local mrate = 5500
-- set bssid, see https://kifuse02.pberg.freifunk.net/moin/channel-bssid-essid for schema
if channel and channel ~= "default" then if channel and channel ~= "default" then
if devconfig.channel ~= channel then if devconfig.channel ~= channel then
devconfig.channel = channel devconfig.channel = channel
local chan = tonumber(channel) local chan = tonumber(channel)
if chan >= 0 and chan < 10 then if chan >= 0 and chan < 10 then
bssid = channel .. "2:CA:FF:EE:BA:BE" bssid = channel .. "2:CA:FF:EE:BA:BE"
ssid = "ch" .. channel .. ssidshort
elseif chan == 10 then elseif chan == 10 then
bssid = "02:CA:FF:EE:BA:BE" bssid = "02:CA:FF:EE:BA:BE"
ssid = "ch" .. channel .. ssidshort
elseif chan >= 11 and chan <= 14 then elseif chan >= 11 and chan <= 14 then
bssid = string.format("%X",channel) .. "2:CA:FF:EE:BA:BE" bssid = string.format("%X",channel) .. "2:CA:FF:EE:BA:BE"
ssid = "ch" .. channel .. ssidshort
elseif chan >= 36 and chan <= 64 then elseif chan >= 36 and chan <= 64 then
hwmode = "11a" hwmode = "11a"
mrate = "" mrate = ""
outdoor = 0 outdoor = 0
bssid = "00:" .. channel ..":CA:FF:EE:EE" bssid = "00:" .. channel ..":CA:FF:EE:EE"
ssid = "ch" .. channel .. ssidshort
elseif chan >= 100 and chan <= 140 then elseif chan >= 100 and chan <= 140 then
hwmode = "11a" hwmode = "11a"
mrate = "" mrate = ""
outdoor = 1 outdoor = 1
bssid = "01:" .. string.sub(channel, 2) .. ":CA:FF:EE:EE" bssid = "01:" .. string.sub(channel, 2) .. ":CA:FF:EE:EE"
ssid = "ch" .. channel .. ssidshort
end end
devconfig.hwmode = hwmode devconfig.hwmode = hwmode
devconfig.outdoor = outdoor devconfig.outdoor = outdoor
end end
ssid = ssid .. " - ch" .. channel
end end
uci:tset("wireless", device, devconfig) uci:tset("wireless", device, devconfig)
-- Create wifi iface -- Create wifi iface
@ -833,13 +805,7 @@ function main.write(self, section, value)
util.update(ifconfig, uci:get_all(external, "wifi_iface") or {}) util.update(ifconfig, uci:get_all(external, "wifi_iface") or {})
ifconfig.device = device ifconfig.device = device
ifconfig.network = nif ifconfig.network = nif
if ssid then ifconfig.ssid = ssid
-- See Table https://kifuse02.pberg.freifunk.net/moin/channel-bssid-essid
ifconfig.ssid = ssid
else
ifconfig.ssid = "olsr.freifunk.net"
end
-- See Table https://kifuse02.pberg.freifunk.net/moin/channel-bssid-essid
ifconfig.bssid = bssid ifconfig.bssid = bssid
ifconfig.encryption="none" ifconfig.encryption="none"
-- Read Preset -- Read Preset
@ -947,8 +913,8 @@ function main.write(self, section, value)
device =device, device =device,
mode ="ap", mode ="ap",
encryption ="none", encryption ="none",
network =nif.."dhcp", network =nif .. "dhcp",
ssid ="AP"..ssidshort ssid ="AP-" .. ssid
}) })
if has_radvd then if has_radvd then
uci:section("radvd", "interface", nil, { uci:section("radvd", "interface", nil, {

View file

@ -113,6 +113,7 @@ s = m:section(TypedSection, "redirect", translate("Redirections"))
s.template = "cbi/tblsection" s.template = "cbi/tblsection"
s.addremove = true s.addremove = true
s.anonymous = true s.anonymous = true
s.sortable = true
s.extedit = ds.build_url("admin", "network", "firewall", "redirect", "%s") s.extedit = ds.build_url("admin", "network", "firewall", "redirect", "%s")
function s.create(self, section) function s.create(self, section)
@ -191,6 +192,7 @@ end
s = m:section(TypedSection, "rule", translate("Rules")) s = m:section(TypedSection, "rule", translate("Rules"))
s.addremove = true s.addremove = true
s.anonymous = true s.anonymous = true
s.sortable = true
s.template = "cbi/tblsection" s.template = "cbi/tblsection"
s.extedit = ds.build_url("admin", "network", "firewall", "rule", "%s") s.extedit = ds.build_url("admin", "network", "firewall", "rule", "%s")
s.defaults.target = "ACCEPT" s.defaults.target = "ACCEPT"

View file

@ -64,14 +64,20 @@ wa.cbi_add_knownips(dsth)
l7 = s:option(ListValue, "layer7", translate("Service")) l7 = s:option(ListValue, "layer7", translate("Service"))
l7.rmempty = true l7.rmempty = true
l7:value("", translate("all")) l7:value("", translate("all"))
local pats = fs.glob("/etc/l7-protocols/*/*.pat")
local pats = io.popen("find /etc/l7-protocols/ -type f -name '*.pat'")
if pats then if pats then
for f in pats do local l
f = f:match("([^/]+)%.pat$") while true do
if f then l = pats:read("*l")
l7:value(f) if not l then break end
l = l:match("([^/]+)%.pat$")
if l then
l7:value(l)
end end
end end
pats:close()
end end
p = s:option(Value, "proto", translate("Protocol")) p = s:option(Value, "proto", translate("Protocol"))

View file

@ -50,14 +50,20 @@ wa.cbi_add_knownips(dsth)
l7 = s:option(ListValue, "layer7", translate("Service")) l7 = s:option(ListValue, "layer7", translate("Service"))
l7.rmempty = true l7.rmempty = true
l7:value("", translate("all")) l7:value("", translate("all"))
local pats = fs.glob("/etc/l7-protocols/*/*.pat")
local pats = io.popen("find /etc/l7-protocols/ -type f -name '*.pat'")
if pats then if pats then
for f in pats do local l
f = f:match("([^/]+)%.pat$") while true do
if f then l = pats:read("*l")
l7:value(f) if not l then break end
l = l:match("([^/]+)%.pat$")
if l then
l7:value(l)
end end
end end
pats:close()
end end
p = s:option(ListValue, "proto", translate("Protocol")) p = s:option(ListValue, "proto", translate("Protocol"))

View file

@ -32,6 +32,9 @@ luasource:
endif endif
luadiet: luasource
for i in $$(find dist -type f -name '*.lua'); do LUA_PATH="../../contrib/luasrcdiet/lua/?.lua" $(LUA) ../../contrib/luasrcdiet/lua/LuaSrcDiet.lua --maximum $$i -o $$i.diet && mv $$i.diet $$i; done
luastrip: luasource luastrip: luasource
for i in $$(find dist -type f -name '*.lua'); do perl -e 'undef $$/; open( F, "< $$ARGV[0]" ) || die $$!; $$src = <F>; close F; $$src =~ s/--\[\[.*?\]\](--)?//gs; $$src =~ s/^\s*--.*?\n//gm; open( F, "> $$ARGV[0]" ) || die $$!; print F $$src; close F' $$i; done for i in $$(find dist -type f -name '*.lua'); do perl -e 'undef $$/; open( F, "< $$ARGV[0]" ) || die $$!; $$src = <F>; close F; $$src =~ s/--\[\[.*?\]\](--)?//gs; $$src =~ s/^\s*--.*?\n//gm; open( F, "> $$ARGV[0]" ) || die $$!; print F $$src; close F' $$i; done

View file

@ -0,0 +1,2 @@
include ../../build/config.mk
include ../../build/module.mk

View file

@ -0,0 +1,38 @@
LuaSrcDiet License
------------------
LuaSrcDiet is licensed under the terms of the MIT license reproduced
below. This means that LuaSrcDiet is free software and can be used for
both academic and commercial purposes at absolutely no cost.
Parts of LuaSrcDiet is based on Lua 5 code. See COPYRIGHT_Lua51
(Lua 5.1.3) for Lua 5 license information.
For details and rationale, see http://www.lua.org/license.html .
===============================================================================
Copyright (C) 2005-2008 Kein-Hong Man <khman@users.sf.net>
Lua 5.1.3 Copyright (C) 1994-2008 Lua.org, PUC-Rio.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
===============================================================================
(end of COPYRIGHT)

View file

@ -0,0 +1,670 @@
#!/usr/bin/env lua
--[[--------------------------------------------------------------------
LuaSrcDiet
Compresses Lua source code by removing unnecessary characters.
For Lua 5.1.x source code.
Copyright (c) 2008 Kein-Hong Man <khman@users.sf.net>
The COPYRIGHT file describes the conditions
under which this software may be distributed.
See the ChangeLog for more information.
----------------------------------------------------------------------]]
--[[--------------------------------------------------------------------
-- NOTES:
-- * Remember to update version and date information below (MSG_TITLE)
-- * TODO: to implement pcall() to properly handle lexer etc. errors
-- * TODO: verify token stream or double-check binary chunk?
-- * TODO: need some automatic testing for a semblance of sanity
-- * TODO: the plugin module is highly experimental and unstable
----------------------------------------------------------------------]]
-- standard libraries, functions
local string = string
local math = math
local table = table
local require = require
local print = print
local sub = string.sub
local gmatch = string.gmatch
-- support modules
local llex = require "llex"
local lparser = require "lparser"
local optlex = require "optlex"
local optparser = require "optparser"
local plugin
--[[--------------------------------------------------------------------
-- messages and textual data
----------------------------------------------------------------------]]
local MSG_TITLE = [[
LuaSrcDiet: Puts your Lua 5.1 source code on a diet
Version 0.11.2 (20080608) Copyright (c) 2005-2008 Kein-Hong Man
The COPYRIGHT file describes the conditions under which this
software may be distributed.
]]
local MSG_USAGE = [[
usage: LuaSrcDiet [options] [filenames]
example:
>LuaSrcDiet myscript.lua -o myscript_.lua
options:
-v, --version prints version information
-h, --help prints usage information
-o <file> specify file name to write output
-s <suffix> suffix for output files (default '_')
--keep <msg> keep block comment with <msg> inside
--plugin <module> run <module> in plugin/ directory
- stop handling arguments
(optimization levels)
--none all optimizations off (normalizes EOLs only)
--basic lexer-based optimizations only
--maximum maximize reduction of source
(informational)
--quiet process files quietly
--read-only read file and print token stats only
--dump-lexer dump raw tokens from lexer to stdout
--dump-parser dump variable tracking tables from parser
--details extra info (strings, numbers, locals)
features (to disable, insert 'no' prefix like --noopt-comments):
%s
default settings:
%s]]
------------------------------------------------------------------------
-- optimization options, for ease of switching on and off
-- * positive to enable optimization, negative (no) to disable
-- * these options should follow --opt-* and --noopt-* style for now
------------------------------------------------------------------------
local OPTION = [[
--opt-comments,'remove comments and block comments'
--opt-whitespace,'remove whitespace excluding EOLs'
--opt-emptylines,'remove empty lines'
--opt-eols,'all above, plus remove unnecessary EOLs'
--opt-strings,'optimize strings and long strings'
--opt-numbers,'optimize numbers'
--opt-locals,'optimize local variable names'
--opt-entropy,'tries to reduce symbol entropy of locals'
]]
-- preset configuration
local DEFAULT_CONFIG = [[
--opt-comments --opt-whitespace --opt-emptylines
--opt-numbers --opt-locals
]]
-- override configurations: MUST explicitly enable/disable everything
local BASIC_CONFIG = [[
--opt-comments --opt-whitespace --opt-emptylines
--noopt-eols --noopt-strings --noopt-numbers
--noopt-locals
]]
local MAXIMUM_CONFIG = [[
--opt-comments --opt-whitespace --opt-emptylines
--opt-eols --opt-strings --opt-numbers
--opt-locals --opt-entropy
]]
local NONE_CONFIG = [[
--noopt-comments --noopt-whitespace --noopt-emptylines
--noopt-eols --noopt-strings --noopt-numbers
--noopt-locals
]]
local DEFAULT_SUFFIX = "_" -- default suffix for file renaming
local PLUGIN_SUFFIX = "plugin/" -- relative location of plugins
--[[--------------------------------------------------------------------
-- startup and initialize option list handling
----------------------------------------------------------------------]]
-- simple error message handler; change to error if traceback wanted
local function die(msg)
print("LuaSrcDiet: "..msg); os.exit()
end
--die = error--DEBUG
if not string.match(_VERSION, "5.1", 1, 1) then -- sanity check
die("requires Lua 5.1 to run")
end
------------------------------------------------------------------------
-- prepares text for list of optimizations, prepare lookup table
------------------------------------------------------------------------
local MSG_OPTIONS = ""
do
local WIDTH = 24
local o = {}
for op, desc in gmatch(OPTION, "%s*([^,]+),'([^']+)'") do
local msg = " "..op
msg = msg..string.rep(" ", WIDTH - #msg)..desc.."\n"
MSG_OPTIONS = MSG_OPTIONS..msg
o[op] = true
o["--no"..sub(op, 3)] = true
end
OPTION = o -- replace OPTION with lookup table
end
MSG_USAGE = string.format(MSG_USAGE, MSG_OPTIONS, DEFAULT_CONFIG)
------------------------------------------------------------------------
-- global variable initialization, option set handling
------------------------------------------------------------------------
local suffix = DEFAULT_SUFFIX -- file suffix
local option = {} -- program options
local stat_c, stat_l -- statistics tables
-- function to set option lookup table based on a text list of options
-- note: additional forced settings for --opt-eols is done in optlex.lua
local function set_options(CONFIG)
for op in gmatch(CONFIG, "(%-%-%S+)") do
if sub(op, 3, 4) == "no" and -- handle negative options
OPTION["--"..sub(op, 5)] then
option[sub(op, 5)] = false
else
option[sub(op, 3)] = true
end
end
end
--[[--------------------------------------------------------------------
-- support functions
----------------------------------------------------------------------]]
-- list of token types, parser-significant types are up to TTYPE_GRAMMAR
-- while the rest are not used by parsers; arranged for stats display
local TTYPES = {
"TK_KEYWORD", "TK_NAME", "TK_NUMBER", -- grammar
"TK_STRING", "TK_LSTRING", "TK_OP",
"TK_EOS",
"TK_COMMENT", "TK_LCOMMENT", -- non-grammar
"TK_EOL", "TK_SPACE",
}
local TTYPE_GRAMMAR = 7
local EOLTYPES = { -- EOL names for token dump
["\n"] = "LF", ["\r"] = "CR",
["\n\r"] = "LFCR", ["\r\n"] = "CRLF",
}
------------------------------------------------------------------------
-- read source code from file
------------------------------------------------------------------------
local function load_file(fname)
local INF = io.open(fname, "rb")
if not INF then die("cannot open \""..fname.."\" for reading") end
local dat = INF:read("*a")
if not dat then die("cannot read from \""..fname.."\"") end
INF:close()
return dat
end
------------------------------------------------------------------------
-- save source code to file
------------------------------------------------------------------------
local function save_file(fname, dat)
local OUTF = io.open(fname, "wb")
if not OUTF then die("cannot open \""..fname.."\" for writing") end
local status = OUTF:write(dat)
if not status then die("cannot write to \""..fname.."\"") end
OUTF:close()
end
------------------------------------------------------------------------
-- functions to deal with statistics
------------------------------------------------------------------------
-- initialize statistics table
local function stat_init()
stat_c, stat_l = {}, {}
for i = 1, #TTYPES do
local ttype = TTYPES[i]
stat_c[ttype], stat_l[ttype] = 0, 0
end
end
-- add a token to statistics table
local function stat_add(tok, seminfo)
stat_c[tok] = stat_c[tok] + 1
stat_l[tok] = stat_l[tok] + #seminfo
end
-- do totals for statistics table, return average table
local function stat_calc()
local function avg(c, l) -- safe average function
if c == 0 then return 0 end
return l / c
end
local stat_a = {}
local c, l = 0, 0
for i = 1, TTYPE_GRAMMAR do -- total grammar tokens
local ttype = TTYPES[i]
c = c + stat_c[ttype]; l = l + stat_l[ttype]
end
stat_c.TOTAL_TOK, stat_l.TOTAL_TOK = c, l
stat_a.TOTAL_TOK = avg(c, l)
c, l = 0, 0
for i = 1, #TTYPES do -- total all tokens
local ttype = TTYPES[i]
c = c + stat_c[ttype]; l = l + stat_l[ttype]
stat_a[ttype] = avg(stat_c[ttype], stat_l[ttype])
end
stat_c.TOTAL_ALL, stat_l.TOTAL_ALL = c, l
stat_a.TOTAL_ALL = avg(c, l)
return stat_a
end
--[[--------------------------------------------------------------------
-- main tasks
----------------------------------------------------------------------]]
------------------------------------------------------------------------
-- a simple token dumper, minimal translation of seminfo data
------------------------------------------------------------------------
local function dump_tokens(srcfl)
--------------------------------------------------------------------
-- load file and process source input into tokens
--------------------------------------------------------------------
local z = load_file(srcfl)
llex.init(z)
llex.llex()
local toklist, seminfolist = llex.tok, llex.seminfo
--------------------------------------------------------------------
-- display output
--------------------------------------------------------------------
for i = 1, #toklist do
local tok, seminfo = toklist[i], seminfolist[i]
if tok == "TK_OP" and string.byte(seminfo) < 32 then
seminfo = "(".. string.byte(seminfo)..")"
elseif tok == "TK_EOL" then
seminfo = EOLTYPES[seminfo]
else
seminfo = "'"..seminfo.."'"
end
print(tok.." "..seminfo)
end--for
end
----------------------------------------------------------------------
-- parser dump; dump globalinfo and localinfo tables
----------------------------------------------------------------------
local function dump_parser(srcfl)
local print = print
--------------------------------------------------------------------
-- load file and process source input into tokens
--------------------------------------------------------------------
local z = load_file(srcfl)
llex.init(z)
llex.llex()
local toklist, seminfolist, toklnlist
= llex.tok, llex.seminfo, llex.tokln
--------------------------------------------------------------------
-- do parser optimization here
--------------------------------------------------------------------
lparser.init(toklist, seminfolist, toklnlist)
local globalinfo, localinfo = lparser.parser()
--------------------------------------------------------------------
-- display output
--------------------------------------------------------------------
local hl = string.rep("-", 72)
print("*** Local/Global Variable Tracker Tables ***")
print(hl.."\n GLOBALS\n"..hl)
-- global tables have a list of xref numbers only
for i = 1, #globalinfo do
local obj = globalinfo[i]
local msg = "("..i..") '"..obj.name.."' -> "
local xref = obj.xref
for j = 1, #xref do msg = msg..xref[j].." " end
print(msg)
end
-- local tables have xref numbers and a few other special
-- numbers that are specially named: decl (declaration xref),
-- act (activation xref), rem (removal xref)
print(hl.."\n LOCALS (decl=declared act=activated rem=removed)\n"..hl)
for i = 1, #localinfo do
local obj = localinfo[i]
local msg = "("..i..") '"..obj.name.."' decl:"..obj.decl..
" act:"..obj.act.." rem:"..obj.rem
if obj.isself then
msg = msg.." isself"
end
msg = msg.." -> "
local xref = obj.xref
for j = 1, #xref do msg = msg..xref[j].." " end
print(msg)
end
print(hl.."\n")
end
------------------------------------------------------------------------
-- reads source file(s) and reports some statistics
------------------------------------------------------------------------
local function read_only(srcfl)
local print = print
--------------------------------------------------------------------
-- load file and process source input into tokens
--------------------------------------------------------------------
local z = load_file(srcfl)
llex.init(z)
llex.llex()
local toklist, seminfolist = llex.tok, llex.seminfo
print(MSG_TITLE)
print("Statistics for: "..srcfl.."\n")
--------------------------------------------------------------------
-- collect statistics
--------------------------------------------------------------------
stat_init()
for i = 1, #toklist do
local tok, seminfo = toklist[i], seminfolist[i]
stat_add(tok, seminfo)
end--for
local stat_a = stat_calc()
--------------------------------------------------------------------
-- display output
--------------------------------------------------------------------
local fmt = string.format
local function figures(tt)
return stat_c[tt], stat_l[tt], stat_a[tt]
end
local tabf1, tabf2 = "%-16s%8s%8s%10s", "%-16s%8d%8d%10.2f"
local hl = string.rep("-", 42)
print(fmt(tabf1, "Lexical", "Input", "Input", "Input"))
print(fmt(tabf1, "Elements", "Count", "Bytes", "Average"))
print(hl)
for i = 1, #TTYPES do
local ttype = TTYPES[i]
print(fmt(tabf2, ttype, figures(ttype)))
if ttype == "TK_EOS" then print(hl) end
end
print(hl)
print(fmt(tabf2, "Total Elements", figures("TOTAL_ALL")))
print(hl)
print(fmt(tabf2, "Total Tokens", figures("TOTAL_TOK")))
print(hl.."\n")
end
------------------------------------------------------------------------
-- process source file(s), write output and reports some statistics
------------------------------------------------------------------------
local function process_file(srcfl, destfl)
local function print(...) -- handle quiet option
if option.QUIET then return end
_G.print(...)
end
if plugin and plugin.init then -- plugin init
option.EXIT = false
plugin.init(option, srcfl, destfl)
if option.EXIT then return end
end
print(MSG_TITLE) -- title message
--------------------------------------------------------------------
-- load file and process source input into tokens
--------------------------------------------------------------------
local z = load_file(srcfl)
if plugin and plugin.post_load then -- plugin post-load
z = plugin.post_load(z) or z
if option.EXIT then return end
end
llex.init(z)
llex.llex()
local toklist, seminfolist, toklnlist
= llex.tok, llex.seminfo, llex.tokln
if plugin and plugin.post_lex then -- plugin post-lex
plugin.post_lex(toklist, seminfolist, toklnlist)
if option.EXIT then return end
end
--------------------------------------------------------------------
-- collect 'before' statistics
--------------------------------------------------------------------
stat_init()
for i = 1, #toklist do
local tok, seminfo = toklist[i], seminfolist[i]
stat_add(tok, seminfo)
end--for
local stat1_a = stat_calc()
local stat1_c, stat1_l = stat_c, stat_l
--------------------------------------------------------------------
-- do parser optimization here
--------------------------------------------------------------------
if option["opt-locals"] then
optparser.print = print -- hack
lparser.init(toklist, seminfolist, toklnlist)
local globalinfo, localinfo = lparser.parser()
if plugin and plugin.post_parse then -- plugin post-parse
plugin.post_parse(globalinfo, localinfo)
if option.EXIT then return end
end
optparser.optimize(option, toklist, seminfolist, globalinfo, localinfo)
if plugin and plugin.post_optparse then -- plugin post-optparse
plugin.post_optparse()
if option.EXIT then return end
end
end
--------------------------------------------------------------------
-- do lexer optimization here, save output file
--------------------------------------------------------------------
optlex.print = print -- hack
toklist, seminfolist, toklnlist
= optlex.optimize(option, toklist, seminfolist, toklnlist)
if plugin and plugin.post_optlex then -- plugin post-optlex
plugin.post_optlex(toklist, seminfolist, toklnlist)
if option.EXIT then return end
end
local dat = table.concat(seminfolist)
-- depending on options selected, embedded EOLs in long strings and
-- long comments may not have been translated to \n, tack a warning
if string.find(dat, "\r\n", 1, 1) or
string.find(dat, "\n\r", 1, 1) then
optlex.warn.mixedeol = true
end
-- save optimized source stream to output file
save_file(destfl, dat)
--------------------------------------------------------------------
-- collect 'after' statistics
--------------------------------------------------------------------
stat_init()
for i = 1, #toklist do
local tok, seminfo = toklist[i], seminfolist[i]
stat_add(tok, seminfo)
end--for
local stat_a = stat_calc()
--------------------------------------------------------------------
-- display output
--------------------------------------------------------------------
print("Statistics for: "..srcfl.." -> "..destfl.."\n")
local fmt = string.format
local function figures(tt)
return stat1_c[tt], stat1_l[tt], stat1_a[tt],
stat_c[tt], stat_l[tt], stat_a[tt]
end
local tabf1, tabf2 = "%-16s%8s%8s%10s%8s%8s%10s",
"%-16s%8d%8d%10.2f%8d%8d%10.2f"
local hl = string.rep("-", 68)
print("*** lexer-based optimizations summary ***\n"..hl)
print(fmt(tabf1, "Lexical",
"Input", "Input", "Input",
"Output", "Output", "Output"))
print(fmt(tabf1, "Elements",
"Count", "Bytes", "Average",
"Count", "Bytes", "Average"))
print(hl)
for i = 1, #TTYPES do
local ttype = TTYPES[i]
print(fmt(tabf2, ttype, figures(ttype)))
if ttype == "TK_EOS" then print(hl) end
end
print(hl)
print(fmt(tabf2, "Total Elements", figures("TOTAL_ALL")))
print(hl)
print(fmt(tabf2, "Total Tokens", figures("TOTAL_TOK")))
print(hl)
--------------------------------------------------------------------
-- report warning flags from optimizing process
--------------------------------------------------------------------
if optlex.warn.lstring then
print("* WARNING: "..optlex.warn.lstring)
elseif optlex.warn.mixedeol then
print("* WARNING: ".."output still contains some CRLF or LFCR line endings")
end
print()
end
--[[--------------------------------------------------------------------
-- main functions
----------------------------------------------------------------------]]
local arg = {...} -- program arguments
local fspec = {}
set_options(DEFAULT_CONFIG) -- set to default options at beginning
------------------------------------------------------------------------
-- per-file handling, ship off to tasks
------------------------------------------------------------------------
local function do_files(fspec)
for _, srcfl in ipairs(fspec) do
local destfl
------------------------------------------------------------------
-- find and replace extension for filenames
------------------------------------------------------------------
local extb, exte = string.find(srcfl, "%.[^%.%\\%/]*$")
local basename, extension = srcfl, ""
if extb and extb > 1 then
basename = sub(srcfl, 1, extb - 1)
extension = sub(srcfl, extb, exte)
end
destfl = basename..suffix..extension
if #fspec == 1 and option.OUTPUT_FILE then
destfl = option.OUTPUT_FILE
end
if srcfl == destfl then
die("output filename identical to input filename")
end
------------------------------------------------------------------
-- perform requested operations
------------------------------------------------------------------
if option.DUMP_LEXER then
dump_tokens(srcfl)
elseif option.DUMP_PARSER then
dump_parser(srcfl)
elseif option.READ_ONLY then
read_only(srcfl)
else
process_file(srcfl, destfl)
end
end--for
end
------------------------------------------------------------------------
-- main function (entry point is after this definition)
------------------------------------------------------------------------
local function main()
local argn, i = #arg, 1
if argn == 0 then
option.HELP = true
end
--------------------------------------------------------------------
-- handle arguments
--------------------------------------------------------------------
while i <= argn do
local o, p = arg[i], arg[i + 1]
local dash = string.match(o, "^%-%-?")
if dash == "-" then -- single-dash options
if o == "-h" then
option.HELP = true; break
elseif o == "-v" then
option.VERSION = true; break
elseif o == "-s" then
if not p then die("-s option needs suffix specification") end
suffix = p
i = i + 1
elseif o == "-o" then
if not p then die("-o option needs a file name") end
option.OUTPUT_FILE = p
i = i + 1
elseif o == "-" then
break -- ignore rest of args
else
die("unrecognized option "..o)
end
elseif dash == "--" then -- double-dash options
if o == "--help" then
option.HELP = true; break
elseif o == "--version" then
option.VERSION = true; break
elseif o == "--keep" then
if not p then die("--keep option needs a string to match for") end
option.KEEP = p
i = i + 1
elseif o == "--plugin" then
if not p then die("--plugin option needs a module name") end
if option.PLUGIN then die("only one plugin can be specified") end
option.PLUGIN = p
plugin = require(PLUGIN_SUFFIX..p)
i = i + 1
elseif o == "--quiet" then
option.QUIET = true
elseif o == "--read-only" then
option.READ_ONLY = true
elseif o == "--basic" then
set_options(BASIC_CONFIG)
elseif o == "--maximum" then
set_options(MAXIMUM_CONFIG)
elseif o == "--none" then
set_options(NONE_CONFIG)
elseif o == "--dump-lexer" then
option.DUMP_LEXER = true
elseif o == "--dump-parser" then
option.DUMP_PARSER = true
elseif o == "--details" then
option.DETAILS = true
elseif OPTION[o] then -- lookup optimization options
set_options(o)
else
die("unrecognized option "..o)
end
else
fspec[#fspec + 1] = o -- potential filename
end
i = i + 1
end--while
if option.HELP then
print(MSG_TITLE..MSG_USAGE); return true
elseif option.VERSION then
print(MSG_TITLE); return true
end
if #fspec > 0 then
if #fspec > 1 and option.OUTPUT_FILE then
die("with -o, only one source file can be specified")
end
do_files(fspec)
return true
else
die("nothing to do!")
end
end
-- entry point -> main() -> do_files()
if not main() then
die("Please run with option -h or --help for usage information")
end
-- end of script

View file

@ -0,0 +1,355 @@
--[[--------------------------------------------------------------------
llex.lua: Lua 5.1 lexical analyzer in Lua
This file is part of LuaSrcDiet, based on Yueliang material.
Copyright (c) 2008 Kein-Hong Man <khman@users.sf.net>
The COPYRIGHT file describes the conditions
under which this software may be distributed.
See the ChangeLog for more information.
----------------------------------------------------------------------]]
--[[--------------------------------------------------------------------
-- NOTES:
-- * This is a version of the native 5.1.x lexer from Yueliang 0.4.0,
-- with significant modifications to handle LuaSrcDiet's needs:
-- (1) llex.error is an optional error function handler
-- (2) seminfo for strings include their delimiters and no
-- translation operations are performed on them
-- * ADDED shbang handling has been added to support executable scripts
-- * NO localized decimal point replacement magic
-- * NO limit to number of lines
-- * NO support for compatible long strings (LUA_COMPAT_LSTR)
-- * Please read technotes.txt for more technical details.
----------------------------------------------------------------------]]
local base = _G
local string = require "string"
module "llex"
local find = string.find
local match = string.match
local sub = string.sub
----------------------------------------------------------------------
-- initialize keyword list, variables
----------------------------------------------------------------------
local kw = {}
for v in string.gmatch([[
and break do else elseif end false for function if in
local nil not or repeat return then true until while]], "%S+") do
kw[v] = true
end
-- NOTE: see init() for module variables (externally visible):
-- tok, seminfo, tokln
local z, -- source stream
sourceid, -- name of source
I, -- position of lexer
buff, -- buffer for strings
ln -- line number
----------------------------------------------------------------------
-- add information to token listing
----------------------------------------------------------------------
local function addtoken(token, info)
local i = #tok + 1
tok[i] = token
seminfo[i] = info
tokln[i] = ln
end
----------------------------------------------------------------------
-- handles line number incrementation and end-of-line characters
----------------------------------------------------------------------
local function inclinenumber(i, is_tok)
local sub = sub
local old = sub(z, i, i)
i = i + 1 -- skip '\n' or '\r'
local c = sub(z, i, i)
if (c == "\n" or c == "\r") and (c ~= old) then
i = i + 1 -- skip '\n\r' or '\r\n'
old = old..c
end
if is_tok then addtoken("TK_EOL", old) end
ln = ln + 1
I = i
return i
end
----------------------------------------------------------------------
-- initialize lexer for given source _z and source name _sourceid
----------------------------------------------------------------------
function init(_z, _sourceid)
z = _z -- source
sourceid = _sourceid -- name of source
I = 1 -- lexer's position in source
ln = 1 -- line number
tok = {} -- lexed token list*
seminfo = {} -- lexed semantic information list*
tokln = {} -- line numbers for messages*
-- (*) externally visible thru' module
--------------------------------------------------------------------
-- initial processing (shbang handling)
--------------------------------------------------------------------
local p, _, q, r = find(z, "^(#[^\r\n]*)(\r?\n?)")
if p then -- skip first line
I = I + #q
addtoken("TK_COMMENT", q)
if #r > 0 then inclinenumber(I, true) end
end
end
----------------------------------------------------------------------
-- returns a chunk name or id, no truncation for long names
----------------------------------------------------------------------
function chunkid()
if sourceid and match(sourceid, "^[=@]") then
return sub(sourceid, 2) -- remove first char
end
return "[string]"
end
----------------------------------------------------------------------
-- formats error message and throws error
-- * a simplified version, does not report what token was responsible
----------------------------------------------------------------------
function errorline(s, line)
local e = error or base.error
e(string.format("%s:%d: %s", chunkid(), line or ln, s))
end
local errorline = errorline
------------------------------------------------------------------------
-- count separators ("=") in a long string delimiter
------------------------------------------------------------------------
local function skip_sep(i)
local sub = sub
local s = sub(z, i, i)
i = i + 1
local count = #match(z, "=*", i) -- note, take the length
i = i + count
I = i
return (sub(z, i, i) == s) and count or (-count) - 1
end
----------------------------------------------------------------------
-- reads a long string or long comment
----------------------------------------------------------------------
local function read_long_string(is_str, sep)
local i = I + 1 -- skip 2nd '['
local sub = sub
local c = sub(z, i, i)
if c == "\r" or c == "\n" then -- string starts with a newline?
i = inclinenumber(i) -- skip it
end
local j = i
while true do
local p, q, r = find(z, "([\r\n%]])", i) -- (long range)
if not p then
errorline(is_str and "unfinished long string" or
"unfinished long comment")
end
i = p
if r == "]" then -- delimiter test
if skip_sep(i) == sep then
buff = sub(z, buff, I)
I = I + 1 -- skip 2nd ']'
return buff
end
i = I
else -- newline
buff = buff.."\n"
i = inclinenumber(i)
end
end--while
end
----------------------------------------------------------------------
-- reads a string
----------------------------------------------------------------------
local function read_string(del)
local i = I
local find = find
local sub = sub
while true do
local p, q, r = find(z, "([\n\r\\\"\'])", i) -- (long range)
if p then
if r == "\n" or r == "\r" then
errorline("unfinished string")
end
i = p
if r == "\\" then -- handle escapes
i = i + 1
r = sub(z, i, i)
if r == "" then break end -- (EOZ error)
p = find("abfnrtv\n\r", r, 1, true)
------------------------------------------------------
if p then -- special escapes
if p > 7 then
i = inclinenumber(i)
else
i = i + 1
end
------------------------------------------------------
elseif find(r, "%D") then -- other non-digits
i = i + 1
------------------------------------------------------
else -- \xxx sequence
local p, q, s = find(z, "^(%d%d?%d?)", i)
i = q + 1
if s + 1 > 256 then -- UCHAR_MAX
errorline("escape sequence too large")
end
------------------------------------------------------
end--if p
else
i = i + 1
if r == del then -- ending delimiter
I = i
return sub(z, buff, i - 1) -- return string
end
end--if r
else
break -- (error)
end--if p
end--while
errorline("unfinished string")
end
------------------------------------------------------------------------
-- main lexer function
------------------------------------------------------------------------
function llex()
local find = find
local match = match
while true do--outer
local i = I
-- inner loop allows break to be used to nicely section tests
while true do--inner
----------------------------------------------------------------
local p, _, r = find(z, "^([_%a][_%w]*)", i)
if p then
I = i + #r
if kw[r] then
addtoken("TK_KEYWORD", r) -- reserved word (keyword)
else
addtoken("TK_NAME", r) -- identifier
end
break -- (continue)
end
----------------------------------------------------------------
local p, _, r = find(z, "^(%.?)%d", i)
if p then -- numeral
if r == "." then i = i + 1 end
local _, q, r = find(z, "^%d*[%.%d]*([eE]?)", i)
i = q + 1
if #r == 1 then -- optional exponent
if match(z, "^[%+%-]", i) then -- optional sign
i = i + 1
end
end
local _, q = find(z, "^[_%w]*", i)
I = q + 1
local v = sub(z, p, q) -- string equivalent
if not base.tonumber(v) then -- handles hex test also
errorline("malformed number")
end
addtoken("TK_NUMBER", v)
break -- (continue)
end
----------------------------------------------------------------
local p, q, r, t = find(z, "^((%s)[ \t\v\f]*)", i)
if p then
if t == "\n" or t == "\r" then -- newline
inclinenumber(i, true)
else
I = q + 1 -- whitespace
addtoken("TK_SPACE", r)
end
break -- (continue)
end
----------------------------------------------------------------
local r = match(z, "^%p", i)
if r then
buff = i
local p = find("-[\"\'.=<>~", r, 1, true)
if p then
-- two-level if block for punctuation/symbols
--------------------------------------------------------
if p <= 2 then
if p == 1 then -- minus
local c = match(z, "^%-%-(%[?)", i)
if c then
i = i + 2
local sep = -1
if c == "[" then
sep = skip_sep(i)
end
if sep >= 0 then -- long comment
addtoken("TK_LCOMMENT", read_long_string(false, sep))
else -- short comment
I = find(z, "[\n\r]", i) or (#z + 1)
addtoken("TK_COMMENT", sub(z, buff, I - 1))
end
break -- (continue)
end
-- (fall through for "-")
else -- [ or long string
local sep = skip_sep(i)
if sep >= 0 then
addtoken("TK_LSTRING", read_long_string(true, sep))
elseif sep == -1 then
addtoken("TK_OP", "[")
else
errorline("invalid long string delimiter")
end
break -- (continue)
end
--------------------------------------------------------
elseif p <= 5 then
if p < 5 then -- strings
I = i + 1
addtoken("TK_STRING", read_string(r))
break -- (continue)
end
r = match(z, "^%.%.?%.?", i) -- .|..|... dots
-- (fall through)
--------------------------------------------------------
else -- relational
r = match(z, "^%p=?", i)
-- (fall through)
end
end
I = i + #r
addtoken("TK_OP", r) -- for other symbols, fall through
break -- (continue)
end
----------------------------------------------------------------
local r = sub(z, i, i)
if r ~= "" then
I = i + 1
addtoken("TK_OP", r) -- other single-char tokens
break
end
addtoken("TK_EOS", "") -- end of stream,
return -- exit here
----------------------------------------------------------------
end--while inner
end--while outer
end
return base.getfenv()

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,832 @@
--[[--------------------------------------------------------------------
optlex.lua: does lexer-based optimizations
This file is part of LuaSrcDiet.
Copyright (c) 2008 Kein-Hong Man <khman@users.sf.net>
The COPYRIGHT file describes the conditions
under which this software may be distributed.
See the ChangeLog for more information.
----------------------------------------------------------------------]]
--[[--------------------------------------------------------------------
-- NOTES:
-- * For more lexer-based optimization ideas, see the TODO items or
-- look at technotes.txt.
-- * TODO: general string delimiter conversion optimizer
-- * TODO: (numbers) warn if overly significant digit
----------------------------------------------------------------------]]
local base = _G
local string = require "string"
module "optlex"
local match = string.match
local sub = string.sub
local find = string.find
local rep = string.rep
local print
------------------------------------------------------------------------
-- variables and data structures
------------------------------------------------------------------------
-- error function, can override by setting own function into module
error = base.error
warn = {} -- table for warning flags
local stoks, sinfos, stoklns -- source lists
local is_realtoken = { -- significant (grammar) tokens
TK_KEYWORD = true,
TK_NAME = true,
TK_NUMBER = true,
TK_STRING = true,
TK_LSTRING = true,
TK_OP = true,
TK_EOS = true,
}
local is_faketoken = { -- whitespace (non-grammar) tokens
TK_COMMENT = true,
TK_LCOMMENT = true,
TK_EOL = true,
TK_SPACE = true,
}
local opt_details -- for extra information
------------------------------------------------------------------------
-- true if current token is at the start of a line
-- * skips over deleted tokens via recursion
------------------------------------------------------------------------
local function atlinestart(i)
local tok = stoks[i - 1]
if i <= 1 or tok == "TK_EOL" then
return true
elseif tok == "" then
return atlinestart(i - 1)
end
return false
end
------------------------------------------------------------------------
-- true if current token is at the end of a line
-- * skips over deleted tokens via recursion
------------------------------------------------------------------------
local function atlineend(i)
local tok = stoks[i + 1]
if i >= #stoks or tok == "TK_EOL" or tok == "TK_EOS" then
return true
elseif tok == "" then
return atlineend(i + 1)
end
return false
end
------------------------------------------------------------------------
-- counts comment EOLs inside a long comment
-- * in order to keep line numbering, EOLs need to be reinserted
------------------------------------------------------------------------
local function commenteols(lcomment)
local sep = #match(lcomment, "^%-%-%[=*%[")
local z = sub(lcomment, sep + 1, -(sep - 1)) -- remove delims
local i, c = 1, 0
while true do
local p, q, r, s = find(z, "([\r\n])([\r\n]?)", i)
if not p then break end -- if no matches, done
i = p + 1
c = c + 1
if #s > 0 and r ~= s then -- skip CRLF or LFCR
i = i + 1
end
end
return c
end
------------------------------------------------------------------------
-- compares two tokens (i, j) and returns the whitespace required
-- * important! see technotes.txt for more information
-- * only two grammar/real tokens are being considered
-- * if "", no separation is needed
-- * if " ", then at least one whitespace (or EOL) is required
------------------------------------------------------------------------
local function checkpair(i, j)
local match = match
local t1, t2 = stoks[i], stoks[j]
--------------------------------------------------------------------
if t1 == "TK_STRING" or t1 == "TK_LSTRING" or
t2 == "TK_STRING" or t2 == "TK_LSTRING" then
return ""
--------------------------------------------------------------------
elseif t1 == "TK_OP" or t2 == "TK_OP" then
if (t1 == "TK_OP" and (t2 == "TK_KEYWORD" or t2 == "TK_NAME")) or
(t2 == "TK_OP" and (t1 == "TK_KEYWORD" or t1 == "TK_NAME")) then
return ""
end
if t1 == "TK_OP" and t2 == "TK_OP" then
-- for TK_OP/TK_OP pairs, see notes in technotes.txt
local op, op2 = sinfos[i], sinfos[j]
if (match(op, "^%.%.?$") and match(op2, "^%.")) or
(match(op, "^[~=<>]$") and op2 == "=") or
(op == "[" and (op2 == "[" or op2 == "=")) then
return " "
end
return ""
end
-- "TK_OP" + "TK_NUMBER" case
local op = sinfos[i]
if t2 == "TK_OP" then op = sinfos[j] end
if match(op, "^%.%.?%.?$") then
return " "
end
return ""
--------------------------------------------------------------------
else-- "TK_KEYWORD" | "TK_NAME" | "TK_NUMBER" then
return " "
--------------------------------------------------------------------
end
end
------------------------------------------------------------------------
-- repack tokens, removing deletions caused by optimization process
------------------------------------------------------------------------
local function repack_tokens()
local dtoks, dinfos, dtoklns = {}, {}, {}
local j = 1
for i = 1, #stoks do
local tok = stoks[i]
if tok ~= "" then
dtoks[j], dinfos[j], dtoklns[j] = tok, sinfos[i], stoklns[i]
j = j + 1
end
end
stoks, sinfos, stoklns = dtoks, dinfos, dtoklns
end
------------------------------------------------------------------------
-- number optimization
-- * optimization using string formatting functions is one way of doing
-- this, but here, we consider all cases and handle them separately
-- (possibly an idiotic approach...)
-- * scientific notation being generated is not in canonical form, this
-- may or may not be a bad thing, feedback welcome
-- * note: intermediate portions need to fit into a normal number range
-- * optimizations can be divided based on number patterns:
-- * hexadecimal:
-- (1) no need to remove leading zeros, just skip to (2)
-- (2) convert to integer if size equal or smaller
-- * change if equal size -> lose the 'x' to reduce entropy
-- (3) number is then processed as an integer
-- (4) note: does not make 0[xX] consistent
-- * integer:
-- (1) note: includes anything with trailing ".", ".0", ...
-- (2) remove useless fractional part, if present, e.g. 123.000
-- (3) remove leading zeros, e.g. 000123
-- (4) switch to scientific if shorter, e.g. 123000 -> 123e3
-- * with fraction:
-- (1) split into digits dot digits
-- (2) if no integer portion, take as zero (can omit later)
-- (3) handle degenerate .000 case, after which the fractional part
-- must be non-zero (if zero, it's matched as an integer)
-- (4) remove trailing zeros for fractional portion
-- (5) p.q where p > 0 and q > 0 cannot be shortened any more
-- (6) otherwise p == 0 and the form is .q, e.g. .000123
-- (7) if scientific shorter, convert, e.g. .000123 -> 123e-6
-- * scientific:
-- (1) split into (digits dot digits) [eE] ([+-] digits)
-- (2) if significand has ".", shift it out so it becomes an integer
-- (3) if significand is zero, just use zero
-- (4) remove leading zeros for significand
-- (5) shift out trailing zeros for significand
-- (6) examine exponent and determine which format is best:
-- integer, with fraction, scientific
------------------------------------------------------------------------
local function do_number(i)
local before = sinfos[i] -- 'before'
local z = before -- working representation
local y -- 'after', if better
--------------------------------------------------------------------
if match(z, "^0[xX]") then -- hexadecimal number
local v = base.tostring(base.tonumber(z))
if #v <= #z then
z = v -- change to integer, AND continue
else
return -- no change; stick to hex
end
end
--------------------------------------------------------------------
if match(z, "^%d+%.?0*$") then -- integer or has useless frac
z = match(z, "^(%d+)%.?0*$") -- int portion only
if z + 0 > 0 then
z = match(z, "^0*([1-9]%d*)$") -- remove leading zeros
local v = #match(z, "0*$")
local nv = base.tostring(v)
if v > #nv + 1 then -- scientific is shorter
z = sub(z, 1, #z - v).."e"..nv
end
y = z
else
y = "0" -- basic zero
end
--------------------------------------------------------------------
elseif not match(z, "[eE]") then -- number with fraction part
local p, q = match(z, "^(%d*)%.(%d+)$") -- split
if p == "" then p = 0 end -- int part zero
if q + 0 == 0 and p == 0 then
y = "0" -- degenerate .000 case
else
-- now, q > 0 holds and p is a number
local v = #match(q, "0*$") -- remove trailing zeros
if v > 0 then
q = sub(q, 1, #q - v)
end
-- if p > 0, nothing else we can do to simplify p.q case
if p + 0 > 0 then
y = p.."."..q
else
y = "."..q -- tentative, e.g. .000123
local v = #match(q, "^0*") -- # leading spaces
local w = #q - v -- # significant digits
local nv = base.tostring(#q)
-- e.g. compare 123e-6 versus .000123
if w + 2 + #nv < 1 + #q then
y = sub(q, -w).."e-"..nv
end
end
end
--------------------------------------------------------------------
else -- scientific number
local sig, ex = match(z, "^([^eE]+)[eE]([%+%-]?%d+)$")
ex = base.tonumber(ex)
-- if got ".", shift out fractional portion of significand
local p, q = match(sig, "^(%d*)%.(%d*)$")
if p then
ex = ex - #q
sig = p..q
end
if sig + 0 == 0 then
y = "0" -- basic zero
else
local v = #match(sig, "^0*") -- remove leading zeros
sig = sub(sig, v + 1)
v = #match(sig, "0*$") -- shift out trailing zeros
if v > 0 then
sig = sub(sig, 1, #sig - v)
ex = ex + v
end
-- examine exponent and determine which format is best
local nex = base.tostring(ex)
if ex == 0 then -- it's just an integer
y = sig
elseif ex > 0 and (ex <= 1 + #nex) then -- a number
y = sig..rep("0", ex)
elseif ex < 0 and (ex >= -#sig) then -- fraction, e.g. .123
v = #sig + ex
y = sub(sig, 1, v).."."..sub(sig, v + 1)
elseif ex < 0 and (#nex >= -ex - #sig) then
-- e.g. compare 1234e-5 versus .01234
-- gives: #sig + 1 + #nex >= 1 + (-ex - #sig) + #sig
-- -> #nex >= -ex - #sig
v = -ex - #sig
y = "."..rep("0", v)..sig
else -- non-canonical scientific representation
y = sig.."e"..ex
end
end--if sig
end
--------------------------------------------------------------------
if y and y ~= sinfos[i] then
if opt_details then
print("<number> (line "..stoklns[i]..") "..sinfos[i].." -> "..y)
opt_details = opt_details + 1
end
sinfos[i] = y
end
end
------------------------------------------------------------------------
-- string optimization
-- * note: works on well-formed strings only!
-- * optimizations on characters can be summarized as follows:
-- \a\b\f\n\r\t\v -- no change
-- \\ -- no change
-- \"\' -- depends on delim, other can remove \
-- \[\] -- remove \
-- \<char> -- general escape, remove \
-- \<eol> -- normalize the EOL only
-- \ddd -- if \a\b\f\n\r\t\v, change to latter
-- if other < ascii 32, keep ddd but zap leading zeros
-- if >= ascii 32, translate it into the literal, then also
-- do escapes for \\,\",\' cases
-- <other> -- no change
-- * switch delimiters if string becomes shorter
------------------------------------------------------------------------
local function do_string(I)
local info = sinfos[I]
local delim = sub(info, 1, 1) -- delimiter used
local ndelim = (delim == "'") and '"' or "'" -- opposite " <-> '
local z = sub(info, 2, -2) -- actual string
local i = 1
local c_delim, c_ndelim = 0, 0 -- "/' counts
--------------------------------------------------------------------
while i <= #z do
local c = sub(z, i, i)
----------------------------------------------------------------
if c == "\\" then -- escaped stuff
local j = i + 1
local d = sub(z, j, j)
local p = find("abfnrtv\\\n\r\"\'0123456789", d, 1, true)
------------------------------------------------------------
if not p then -- \<char> -- remove \
z = sub(z, 1, i - 1)..sub(z, j)
i = i + 1
------------------------------------------------------------
elseif p <= 8 then -- \a\b\f\n\r\t\v\\
i = i + 2 -- no change
------------------------------------------------------------
elseif p <= 10 then -- \<eol> -- normalize EOL
local eol = sub(z, j, j + 1)
if eol == "\r\n" or eol == "\n\r" then
z = sub(z, 1, i).."\n"..sub(z, j + 2)
elseif p == 10 then -- \r case
z = sub(z, 1, i).."\n"..sub(z, j + 1)
end
i = i + 2
------------------------------------------------------------
elseif p <= 12 then -- \"\' -- remove \ for ndelim
if d == delim then
c_delim = c_delim + 1
i = i + 2
else
c_ndelim = c_ndelim + 1
z = sub(z, 1, i - 1)..sub(z, j)
i = i + 1
end
------------------------------------------------------------
else -- \ddd -- various steps
local s = match(z, "^(%d%d?%d?)", j)
j = i + 1 + #s -- skip to location
local cv = s + 0
local cc = string.char(cv)
local p = find("\a\b\f\n\r\t\v", cc, 1, true)
if p then -- special escapes
s = "\\"..sub("abfnrtv", p, p)
elseif cv < 32 then -- normalized \ddd
s = "\\"..cv
elseif cc == delim then -- \<delim>
s = "\\"..cc
c_delim = c_delim + 1
elseif cc == "\\" then -- \\
s = "\\\\"
else -- literal character
s = cc
if cc == ndelim then
c_ndelim = c_ndelim + 1
end
end
z = sub(z, 1, i - 1)..s..sub(z, j)
i = i + #s
------------------------------------------------------------
end--if p
----------------------------------------------------------------
else-- c ~= "\\" -- <other> -- no change
i = i + 1
if c == ndelim then -- count ndelim, for switching delimiters
c_ndelim = c_ndelim + 1
end
----------------------------------------------------------------
end--if c
end--while
--------------------------------------------------------------------
-- switching delimiters, a long-winded derivation:
-- (1) delim takes 2+2*c_delim bytes, ndelim takes c_ndelim bytes
-- (2) delim becomes c_delim bytes, ndelim becomes 2+2*c_ndelim bytes
-- simplifying the condition (1)>(2) --> c_delim > c_ndelim
if c_delim > c_ndelim then
i = 1
while i <= #z do
local p, q, r = find(z, "([\'\"])", i)
if not p then break end
if r == delim then -- \<delim> -> <delim>
z = sub(z, 1, p - 2)..sub(z, p)
i = p
else-- r == ndelim -- <ndelim> -> \<ndelim>
z = sub(z, 1, p - 1).."\\"..sub(z, p)
i = p + 2
end
end--while
delim = ndelim -- actually change delimiters
end
--------------------------------------------------------------------
z = delim..z..delim
if z ~= sinfos[I] then
if opt_details then
print("<string> (line "..stoklns[I]..") "..sinfos[I].." -> "..z)
opt_details = opt_details + 1
end
sinfos[I] = z
end
end
------------------------------------------------------------------------
-- long string optimization
-- * note: warning flagged if trailing whitespace found, not trimmed
-- * remove first optional newline
-- * normalize embedded newlines
-- * reduce '=' separators in delimiters if possible
------------------------------------------------------------------------
local function do_lstring(I)
local info = sinfos[I]
local delim1 = match(info, "^%[=*%[") -- cut out delimiters
local sep = #delim1
local delim2 = sub(info, -sep, -1)
local z = sub(info, sep + 1, -(sep + 1)) -- lstring without delims
local y = ""
local i = 1
--------------------------------------------------------------------
while true do
local p, q, r, s = find(z, "([\r\n])([\r\n]?)", i)
-- deal with a single line
local ln
if not p then
ln = sub(z, i)
elseif p >= i then
ln = sub(z, i, p - 1)
end
if ln ~= "" then
-- flag a warning if there are trailing spaces, won't optimize!
if match(ln, "%s+$") then
warn.lstring = "trailing whitespace in long string near line "..stoklns[I]
end
y = y..ln
end
if not p then -- done if no more EOLs
break
end
-- deal with line endings, normalize them
i = p + 1
if p then
if #s > 0 and r ~= s then -- skip CRLF or LFCR
i = i + 1
end
-- skip first newline, which can be safely deleted
if not(i == 1 and i == p) then
y = y.."\n"
end
end
end--while
--------------------------------------------------------------------
-- handle possible deletion of one or more '=' separators
if sep >= 3 then
local chk, okay = sep - 1
-- loop to test ending delimiter with less of '=' down to zero
while chk >= 2 do
local delim = "%]"..rep("=", chk - 2).."%]"
if not match(y, delim) then okay = chk end
chk = chk - 1
end
if okay then -- change delimiters
sep = rep("=", okay - 2)
delim1, delim2 = "["..sep.."[", "]"..sep.."]"
end
end
--------------------------------------------------------------------
sinfos[I] = delim1..y..delim2
end
------------------------------------------------------------------------
-- long comment optimization
-- * note: does not remove first optional newline
-- * trim trailing whitespace
-- * normalize embedded newlines
-- * reduce '=' separators in delimiters if possible
------------------------------------------------------------------------
local function do_lcomment(I)
local info = sinfos[I]
local delim1 = match(info, "^%-%-%[=*%[") -- cut out delimiters
local sep = #delim1
local delim2 = sub(info, -sep, -1)
local z = sub(info, sep + 1, -(sep - 1)) -- comment without delims
local y = ""
local i = 1
--------------------------------------------------------------------
while true do
local p, q, r, s = find(z, "([\r\n])([\r\n]?)", i)
-- deal with a single line, extract and check trailing whitespace
local ln
if not p then
ln = sub(z, i)
elseif p >= i then
ln = sub(z, i, p - 1)
end
if ln ~= "" then
-- trim trailing whitespace if non-empty line
local ws = match(ln, "%s*$")
if #ws > 0 then ln = sub(ln, 1, -(ws + 1)) end
y = y..ln
end
if not p then -- done if no more EOLs
break
end
-- deal with line endings, normalize them
i = p + 1
if p then
if #s > 0 and r ~= s then -- skip CRLF or LFCR
i = i + 1
end
y = y.."\n"
end
end--while
--------------------------------------------------------------------
-- handle possible deletion of one or more '=' separators
sep = sep - 2
if sep >= 3 then
local chk, okay = sep - 1
-- loop to test ending delimiter with less of '=' down to zero
while chk >= 2 do
local delim = "%]"..rep("=", chk - 2).."%]"
if not match(y, delim) then okay = chk end
chk = chk - 1
end
if okay then -- change delimiters
sep = rep("=", okay - 2)
delim1, delim2 = "--["..sep.."[", "]"..sep.."]"
end
end
--------------------------------------------------------------------
sinfos[I] = delim1..y..delim2
end
------------------------------------------------------------------------
-- short comment optimization
-- * trim trailing whitespace
------------------------------------------------------------------------
local function do_comment(i)
local info = sinfos[i]
local ws = match(info, "%s*$") -- just look from end of string
if #ws > 0 then
info = sub(info, 1, -(ws + 1)) -- trim trailing whitespace
end
sinfos[i] = info
end
------------------------------------------------------------------------
-- returns true if string found in long comment
-- * this is a feature to keep copyright or license texts
------------------------------------------------------------------------
local function keep_lcomment(opt_keep, info)
if not opt_keep then return false end -- option not set
local delim1 = match(info, "^%-%-%[=*%[") -- cut out delimiters
local sep = #delim1
local delim2 = sub(info, -sep, -1)
local z = sub(info, sep + 1, -(sep - 1)) -- comment without delims
if find(z, opt_keep, 1, true) then -- try to match
return true
end
end
------------------------------------------------------------------------
-- main entry point
-- * currently, lexer processing has 2 passes
-- * processing is done on a line-oriented basis, which is easier to
-- grok due to the next point...
-- * since there are various options that can be enabled or disabled,
-- processing is a little messy or convoluted
------------------------------------------------------------------------
function optimize(option, toklist, semlist, toklnlist)
--------------------------------------------------------------------
-- set option flags
--------------------------------------------------------------------
local opt_comments = option["opt-comments"]
local opt_whitespace = option["opt-whitespace"]
local opt_emptylines = option["opt-emptylines"]
local opt_eols = option["opt-eols"]
local opt_strings = option["opt-strings"]
local opt_numbers = option["opt-numbers"]
local opt_keep = option.KEEP
opt_details = option.DETAILS and 0 -- upvalues for details display
print = print or base.print
if opt_eols then -- forced settings, otherwise won't work properly
opt_comments = true
opt_whitespace = true
opt_emptylines = true
end
--------------------------------------------------------------------
-- variable initialization
--------------------------------------------------------------------
stoks, sinfos, stoklns -- set source lists
= toklist, semlist, toklnlist
local i = 1 -- token position
local tok, info -- current token
local prev -- position of last grammar token
-- on same line (for TK_SPACE stuff)
--------------------------------------------------------------------
-- changes a token, info pair
--------------------------------------------------------------------
local function settoken(tok, info, I)
I = I or i
stoks[I] = tok or ""
sinfos[I] = info or ""
end
--------------------------------------------------------------------
-- processing loop (PASS 1)
--------------------------------------------------------------------
while true do
tok, info = stoks[i], sinfos[i]
----------------------------------------------------------------
local atstart = atlinestart(i) -- set line begin flag
if atstart then prev = nil end
----------------------------------------------------------------
if tok == "TK_EOS" then -- end of stream/pass
break
----------------------------------------------------------------
elseif tok == "TK_KEYWORD" or -- keywords, identifiers,
tok == "TK_NAME" or -- operators
tok == "TK_OP" then
-- TK_KEYWORD and TK_OP can't be optimized without a big
-- optimization framework; it would be more of an optimizing
-- compiler, not a source code compressor
-- TK_NAME that are locals needs parser to analyze/optimize
prev = i
----------------------------------------------------------------
elseif tok == "TK_NUMBER" then -- numbers
if opt_numbers then
do_number(i) -- optimize
end
prev = i
----------------------------------------------------------------
elseif tok == "TK_STRING" or -- strings, long strings
tok == "TK_LSTRING" then
if opt_strings then
if tok == "TK_STRING" then
do_string(i) -- optimize
else
do_lstring(i) -- optimize
end
end
prev = i
----------------------------------------------------------------
elseif tok == "TK_COMMENT" then -- short comments
if opt_comments then
if i == 1 and sub(info, 1, 1) == "#" then
-- keep shbang comment, trim whitespace
do_comment(i)
else
-- safe to delete, as a TK_EOL (or TK_EOS) always follows
settoken() -- remove entirely
end
elseif opt_whitespace then -- trim whitespace only
do_comment(i)
end
----------------------------------------------------------------
elseif tok == "TK_LCOMMENT" then -- long comments
if keep_lcomment(opt_keep, info) then
------------------------------------------------------------
-- if --keep, we keep a long comment if <msg> is found;
-- this is a feature to keep copyright or license texts
if opt_whitespace then -- trim whitespace only
do_lcomment(i)
end
prev = i
elseif opt_comments then
local eols = commenteols(info)
------------------------------------------------------------
-- prepare opt_emptylines case first, if a disposable token
-- follows, current one is safe to dump, else keep a space;
-- it is implied that the operation is safe for '-', because
-- current is a TK_LCOMMENT, and must be separate from a '-'
if is_faketoken[stoks[i + 1]] then
settoken() -- remove entirely
tok = ""
else
settoken("TK_SPACE", " ")
end
------------------------------------------------------------
-- if there are embedded EOLs to keep and opt_emptylines is
-- disabled, then switch the token into one or more EOLs
if not opt_emptylines and eols > 0 then
settoken("TK_EOL", rep("\n", eols))
end
------------------------------------------------------------
-- if optimizing whitespaces, force reinterpretation of the
-- token to give a chance for the space to be optimized away
if opt_whitespace and tok ~= "" then
i = i - 1 -- to reinterpret
end
------------------------------------------------------------
else -- disabled case
if opt_whitespace then -- trim whitespace only
do_lcomment(i)
end
prev = i
end
----------------------------------------------------------------
elseif tok == "TK_EOL" then -- line endings
if atstart and opt_emptylines then
settoken() -- remove entirely
elseif info == "\r\n" or info == "\n\r" then
-- normalize the rest of the EOLs for CRLF/LFCR only
-- (note that TK_LCOMMENT can change into several EOLs)
settoken("TK_EOL", "\n")
end
----------------------------------------------------------------
elseif tok == "TK_SPACE" then -- whitespace
if opt_whitespace then
if atstart or atlineend(i) then
-- delete leading and trailing whitespace
settoken() -- remove entirely
else
------------------------------------------------------------
-- at this point, since leading whitespace have been removed,
-- there should be a either a real token or a TK_LCOMMENT
-- prior to hitting this whitespace; the TK_LCOMMENT case
-- only happens if opt_comments is disabled; so prev ~= nil
local ptok = stoks[prev]
if ptok == "TK_LCOMMENT" then
-- previous TK_LCOMMENT can abut with anything
settoken() -- remove entirely
else
-- prev must be a grammar token; consecutive TK_SPACE
-- tokens is impossible when optimizing whitespace
local ntok = stoks[i + 1]
if is_faketoken[ntok] then
-- handle special case where a '-' cannot abut with
-- either a short comment or a long comment
if (ntok == "TK_COMMENT" or ntok == "TK_LCOMMENT") and
ptok == "TK_OP" and sinfos[prev] == "-" then
-- keep token
else
settoken() -- remove entirely
end
else--is_realtoken
-- check a pair of grammar tokens, if can abut, then
-- delete space token entirely, otherwise keep one space
local s = checkpair(prev, i + 1)
if s == "" then
settoken() -- remove entirely
else
settoken("TK_SPACE", " ")
end
end
end
------------------------------------------------------------
end
end
----------------------------------------------------------------
else
error("unidentified token encountered")
end
----------------------------------------------------------------
i = i + 1
end--while
repack_tokens()
--------------------------------------------------------------------
-- processing loop (PASS 2)
--------------------------------------------------------------------
if opt_eols then
i = 1
-- aggressive EOL removal only works with most non-grammar tokens
-- optimized away because it is a rather simple scheme -- basically
-- it just checks 'real' token pairs around EOLs
if stoks[1] == "TK_COMMENT" then
-- first comment still existing must be shbang, skip whole line
i = 3
end
while true do
tok, info = stoks[i], sinfos[i]
--------------------------------------------------------------
if tok == "TK_EOS" then -- end of stream/pass
break
--------------------------------------------------------------
elseif tok == "TK_EOL" then -- consider each TK_EOL
local t1, t2 = stoks[i - 1], stoks[i + 1]
if is_realtoken[t1] and is_realtoken[t2] then -- sanity check
local s = checkpair(i - 1, i + 1)
if s == "" then
settoken() -- remove entirely
end
end
end--if tok
--------------------------------------------------------------
i = i + 1
end--while
repack_tokens()
end
--------------------------------------------------------------------
if opt_details and opt_details > 0 then print() end -- spacing
return stoks, sinfos, stoklns
end

View file

@ -0,0 +1,564 @@
--[[--------------------------------------------------------------------
optparser.lua: does parser-based optimizations
This file is part of LuaSrcDiet.
Copyright (c) 2008 Kein-Hong Man <khman@users.sf.net>
The COPYRIGHT file describes the conditions
under which this software may be distributed.
See the ChangeLog for more information.
----------------------------------------------------------------------]]
--[[--------------------------------------------------------------------
-- NOTES:
-- * For more parser-based optimization ideas, see the TODO items or
-- look at technotes.txt.
-- * The processing load is quite significant, but since this is an
-- off-line text processor, I believe we can wait a few seconds.
-- * TODO: might process "local a,a,a" wrongly... need tests!
-- * TODO: remove position handling if overlapped locals (rem < 0)
-- needs more study, to check behaviour
-- * TODO: there are probably better ways to do allocation, e.g. by
-- choosing better methods to sort and pick locals...
-- * TODO: we don't need 53*63 two-letter identifiers; we can make
-- do with significantly less depending on how many that are really
-- needed and improve entropy; e.g. 13 needed -> choose 4*4 instead
----------------------------------------------------------------------]]
local base = _G
local string = require "string"
local table = require "table"
module "optparser"
----------------------------------------------------------------------
-- Letter frequencies for reducing symbol entropy (fixed version)
-- * Might help a wee bit when the output file is compressed
-- * See Wikipedia: http://en.wikipedia.org/wiki/Letter_frequencies
-- * We use letter frequencies according to a Linotype keyboard, plus
-- the underscore, and both lower case and upper case letters.
-- * The arrangement below (LC, underscore, %d, UC) is arbitrary.
-- * This is certainly not optimal, but is quick-and-dirty and the
-- process has no significant overhead
----------------------------------------------------------------------
local LETTERS = "etaoinshrdlucmfwypvbgkqjxz_ETAOINSHRDLUCMFWYPVBGKQJXZ"
local ALPHANUM = "etaoinshrdlucmfwypvbgkqjxz_0123456789ETAOINSHRDLUCMFWYPVBGKQJXZ"
-- names or identifiers that must be skipped
-- * the first two lines are for keywords
local SKIP_NAME = {}
for v in string.gmatch([[
and break do else elseif end false for function if in
local nil not or repeat return then true until while
self]], "%S+") do
SKIP_NAME[v] = true
end
------------------------------------------------------------------------
-- variables and data structures
------------------------------------------------------------------------
local toklist, seminfolist, -- token lists
globalinfo, localinfo, -- variable information tables
globaluniq, localuniq, -- unique name tables
var_new, -- index of new variable names
varlist -- list of output variables
----------------------------------------------------------------------
-- preprocess information table to get lists of unique names
----------------------------------------------------------------------
local function preprocess(infotable)
local uniqtable = {}
for i = 1, #infotable do -- enumerate info table
local obj = infotable[i]
local name = obj.name
--------------------------------------------------------------------
if not uniqtable[name] then -- not found, start an entry
uniqtable[name] = {
decl = 0, token = 0, size = 0,
}
end
--------------------------------------------------------------------
local uniq = uniqtable[name] -- count declarations, tokens, size
uniq.decl = uniq.decl + 1
local xref = obj.xref
local xcount = #xref
uniq.token = uniq.token + xcount
uniq.size = uniq.size + xcount * #name
--------------------------------------------------------------------
if obj.decl then -- if local table, create first,last pairs
obj.id = i
obj.xcount = xcount
if xcount > 1 then -- if ==1, means local never accessed
obj.first = xref[2]
obj.last = xref[xcount]
end
--------------------------------------------------------------------
else -- if global table, add a back ref
uniq.id = i
end
--------------------------------------------------------------------
end--for
return uniqtable
end
----------------------------------------------------------------------
-- calculate actual symbol frequencies, in order to reduce entropy
-- * this may help further reduce the size of compressed sources
-- * note that since parsing optimizations is put before lexing
-- optimizations, the frequency table is not exact!
-- * yes, this will miss --keep block comments too...
----------------------------------------------------------------------
local function recalc_for_entropy(option)
local byte = string.byte
local char = string.char
-- table of token classes to accept in calculating symbol frequency
local ACCEPT = {
TK_KEYWORD = true, TK_NAME = true, TK_NUMBER = true,
TK_STRING = true, TK_LSTRING = true,
}
if not option["opt-comments"] then
ACCEPT.TK_COMMENT = true
ACCEPT.TK_LCOMMENT = true
end
--------------------------------------------------------------------
-- create a new table and remove any original locals by filtering
--------------------------------------------------------------------
local filtered = {}
for i = 1, #toklist do
filtered[i] = seminfolist[i]
end
for i = 1, #localinfo do -- enumerate local info table
local obj = localinfo[i]
local xref = obj.xref
for j = 1, obj.xcount do
local p = xref[j]
filtered[p] = "" -- remove locals
end
end
--------------------------------------------------------------------
local freq = {} -- reset symbol frequency table
for i = 0, 255 do freq[i] = 0 end
for i = 1, #toklist do -- gather symbol frequency
local tok, info = toklist[i], filtered[i]
if ACCEPT[tok] then
for j = 1, #info do
local c = byte(info, j)
freq[c] = freq[c] + 1
end
end--if
end--for
--------------------------------------------------------------------
-- function to re-sort symbols according to actual frequencies
--------------------------------------------------------------------
local function resort(symbols)
local symlist = {}
for i = 1, #symbols do -- prepare table to sort
local c = byte(symbols, i)
symlist[i] = { c = c, freq = freq[c], }
end
table.sort(symlist, -- sort selected symbols
function(v1, v2)
return v1.freq > v2.freq
end
)
local charlist = {} -- reconstitute the string
for i = 1, #symlist do
charlist[i] = char(symlist[i].c)
end
return table.concat(charlist)
end
--------------------------------------------------------------------
LETTERS = resort(LETTERS) -- change letter arrangement
ALPHANUM = resort(ALPHANUM)
end
----------------------------------------------------------------------
-- returns a string containing a new local variable name to use, and
-- a flag indicating whether it collides with a global variable
-- * trapping keywords and other names like 'self' is done elsewhere
----------------------------------------------------------------------
local function new_var_name()
local var
local cletters, calphanum = #LETTERS, #ALPHANUM
local v = var_new
if v < cletters then -- single char
v = v + 1
var = string.sub(LETTERS, v, v)
else -- longer names
local range, sz = cletters, 1 -- calculate # chars fit
repeat
v = v - range
range = range * calphanum
sz = sz + 1
until range > v
local n = v % cletters -- left side cycles faster
v = (v - n) / cletters -- do first char first
n = n + 1
var = string.sub(LETTERS, n, n)
while sz > 1 do
local m = v % calphanum
v = (v - m) / calphanum
m = m + 1
var = var..string.sub(ALPHANUM, m, m)
sz = sz - 1
end
end
var_new = var_new + 1
return var, globaluniq[var] ~= nil
end
----------------------------------------------------------------------
-- calculate and print some statistics
-- * probably better in main source, put here for now
----------------------------------------------------------------------
local function stats_summary(globaluniq, localuniq, afteruniq, option)
local print = print or base.print
local fmt = string.format
local opt_details = option.DETAILS
local uniq_g , uniq_li, uniq_lo, uniq_ti, uniq_to, -- stats needed
decl_g, decl_li, decl_lo, decl_ti, decl_to,
token_g, token_li, token_lo, token_ti, token_to,
size_g, size_li, size_lo, size_ti, size_to
= 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0
local function avg(c, l) -- safe average function
if c == 0 then return 0 end
return l / c
end
--------------------------------------------------------------------
-- collect statistics (note: globals do not have declarations!)
--------------------------------------------------------------------
for name, uniq in base.pairs(globaluniq) do
uniq_g = uniq_g + 1
token_g = token_g + uniq.token
size_g = size_g + uniq.size
end
for name, uniq in base.pairs(localuniq) do
uniq_li = uniq_li + 1
decl_li = decl_li + uniq.decl
token_li = token_li + uniq.token
size_li = size_li + uniq.size
end
for name, uniq in base.pairs(afteruniq) do
uniq_lo = uniq_lo + 1
decl_lo = decl_lo + uniq.decl
token_lo = token_lo + uniq.token
size_lo = size_lo + uniq.size
end
uniq_ti = uniq_g + uniq_li
decl_ti = decl_g + decl_li
token_ti = token_g + token_li
size_ti = size_g + size_li
uniq_to = uniq_g + uniq_lo
decl_to = decl_g + decl_lo
token_to = token_g + token_lo
size_to = size_g + size_lo
--------------------------------------------------------------------
-- detailed stats: global list
--------------------------------------------------------------------
if opt_details then
local sorted = {} -- sort table of unique global names by size
for name, uniq in base.pairs(globaluniq) do
uniq.name = name
sorted[#sorted + 1] = uniq
end
table.sort(sorted,
function(v1, v2)
return v1.size > v2.size
end
)
local tabf1, tabf2 = "%8s%8s%10s %s", "%8d%8d%10.2f %s"
local hl = string.rep("-", 44)
print("*** global variable list (sorted by size) ***\n"..hl)
print(fmt(tabf1, "Token", "Input", "Input", "Global"))
print(fmt(tabf1, "Count", "Bytes", "Average", "Name"))
print(hl)
for i = 1, #sorted do
local uniq = sorted[i]
print(fmt(tabf2, uniq.token, uniq.size, avg(uniq.token, uniq.size), uniq.name))
end
print(hl)
print(fmt(tabf2, token_g, size_g, avg(token_g, size_g), "TOTAL"))
print(hl.."\n")
--------------------------------------------------------------------
-- detailed stats: local list
--------------------------------------------------------------------
local tabf1, tabf2 = "%8s%8s%8s%10s%8s%10s %s", "%8d%8d%8d%10.2f%8d%10.2f %s"
local hl = string.rep("-", 70)
print("*** local variable list (sorted by allocation order) ***\n"..hl)
print(fmt(tabf1, "Decl.", "Token", "Input", "Input", "Output", "Output", "Global"))
print(fmt(tabf1, "Count", "Count", "Bytes", "Average", "Bytes", "Average", "Name"))
print(hl)
for i = 1, #varlist do -- iterate according to order assigned
local name = varlist[i]
local uniq = afteruniq[name]
local old_t, old_s = 0, 0
for j = 1, #localinfo do -- find corresponding old names and calculate
local obj = localinfo[j]
if obj.name == name then
old_t = old_t + obj.xcount
old_s = old_s + obj.xcount * #obj.oldname
end
end
print(fmt(tabf2, uniq.decl, uniq.token, old_s, avg(old_t, old_s),
uniq.size, avg(uniq.token, uniq.size), name))
end
print(hl)
print(fmt(tabf2, decl_lo, token_lo, size_li, avg(token_li, size_li),
size_lo, avg(token_lo, size_lo), "TOTAL"))
print(hl.."\n")
end--if opt_details
--------------------------------------------------------------------
-- display output
--------------------------------------------------------------------
local tabf1, tabf2 = "%-16s%8s%8s%8s%8s%10s", "%-16s%8d%8d%8d%8d%10.2f"
local hl = string.rep("-", 58)
print("*** local variable optimization summary ***\n"..hl)
print(fmt(tabf1, "Variable", "Unique", "Decl.", "Token", "Size", "Average"))
print(fmt(tabf1, "Types", "Names", "Count", "Count", "Bytes", "Bytes"))
print(hl)
print(fmt(tabf2, "Global", uniq_g, decl_g, token_g, size_g, avg(token_g, size_g)))
print(hl)
print(fmt(tabf2, "Local (in)", uniq_li, decl_li, token_li, size_li, avg(token_li, size_li)))
print(fmt(tabf2, "TOTAL (in)", uniq_ti, decl_ti, token_ti, size_ti, avg(token_ti, size_ti)))
print(hl)
print(fmt(tabf2, "Local (out)", uniq_lo, decl_lo, token_lo, size_lo, avg(token_lo, size_lo)))
print(fmt(tabf2, "TOTAL (out)", uniq_to, decl_to, token_to, size_to, avg(token_to, size_to)))
print(hl.."\n")
end
----------------------------------------------------------------------
-- main entry point
-- * does only local variable optimization for now
----------------------------------------------------------------------
function optimize(option, _toklist, _seminfolist, _globalinfo, _localinfo)
-- set tables
toklist, seminfolist, globalinfo, localinfo
= _toklist, _seminfolist, _globalinfo, _localinfo
var_new = 0 -- reset variable name allocator
varlist = {}
------------------------------------------------------------------
-- preprocess global/local tables, handle entropy reduction
------------------------------------------------------------------
globaluniq = preprocess(globalinfo)
localuniq = preprocess(localinfo)
if option["opt-entropy"] then -- for entropy improvement
recalc_for_entropy(option)
end
------------------------------------------------------------------
-- build initial declared object table, then sort according to
-- token count, this might help assign more tokens to more common
-- variable names such as 'e' thus possibly reducing entropy
-- * an object knows its localinfo index via its 'id' field
-- * special handling for "self" special local (parameter) here
------------------------------------------------------------------
local object = {}
for i = 1, #localinfo do
object[i] = localinfo[i]
end
table.sort(object, -- sort largest first
function(v1, v2)
return v1.xcount > v2.xcount
end
)
------------------------------------------------------------------
-- the special "self" function parameters must be preserved
-- * the allocator below will never use "self", so it is safe to
-- keep those implicit declarations as-is
------------------------------------------------------------------
local temp, j, gotself = {}, 1, false
for i = 1, #object do
local obj = object[i]
if not obj.isself then
temp[j] = obj
j = j + 1
else
gotself = true
end
end
object = temp
------------------------------------------------------------------
-- a simple first-come first-served heuristic name allocator,
-- note that this is in no way optimal...
-- * each object is a local variable declaration plus existence
-- * the aim is to assign short names to as many tokens as possible,
-- so the following tries to maximize name reuse
-- * note that we preserve sort order
------------------------------------------------------------------
local nobject = #object
while nobject > 0 do
local varname, gcollide
repeat
varname, gcollide = new_var_name() -- collect a variable name
until not SKIP_NAME[varname] -- skip all special names
varlist[#varlist + 1] = varname -- keep a list
local oleft = nobject
------------------------------------------------------------------
-- if variable name collides with an existing global, the name
-- cannot be used by a local when the name is accessed as a global
-- during which the local is alive (between 'act' to 'rem'), so
-- we drop objects that collides with the corresponding global
------------------------------------------------------------------
if gcollide then
-- find the xref table of the global
local gref = globalinfo[globaluniq[varname].id].xref
local ngref = #gref
-- enumerate for all current objects; all are valid at this point
for i = 1, nobject do
local obj = object[i]
local act, rem = obj.act, obj.rem -- 'live' range of local
-- if rem < 0, it is a -id to a local that had the same name
-- so follow rem to extend it; does this make sense?
while rem < 0 do
rem = localinfo[-rem].rem
end
local drop
for j = 1, ngref do
local p = gref[j]
if p >= act and p <= rem then drop = true end -- in range?
end
if drop then
obj.skip = true
oleft = oleft - 1
end
end--for
end--if gcollide
------------------------------------------------------------------
-- now the first unassigned local (since it's sorted) will be the
-- one with the most tokens to rename, so we set this one and then
-- eliminate all others that collides, then any locals that left
-- can then reuse the same variable name; this is repeated until
-- all local declaration that can use this name is assigned
-- * the criteria for local-local reuse/collision is:
-- A is the local with a name already assigned
-- B is the unassigned local under consideration
-- => anytime A is accessed, it cannot be when B is 'live'
-- => to speed up things, we have first/last accesses noted
------------------------------------------------------------------
while oleft > 0 do
local i = 1
while object[i].skip do -- scan for first object
i = i + 1
end
------------------------------------------------------------------
-- first object is free for assignment of the variable name
-- [first,last] gives the access range for collision checking
------------------------------------------------------------------
oleft = oleft - 1
local obja = object[i]
i = i + 1
obja.newname = varname
obja.skip = true
obja.done = true
local first, last = obja.first, obja.last
local xref = obja.xref
------------------------------------------------------------------
-- then, scan all the rest and drop those colliding
-- if A was never accessed then it'll never collide with anything
-- otherwise trivial skip if:
-- * B was activated after A's last access (last < act)
-- * B was removed before A's first access (first > rem)
-- if not, see detailed skip below...
------------------------------------------------------------------
if first and oleft > 0 then -- must have at least 1 access
local scanleft = oleft
while scanleft > 0 do
while object[i].skip do -- next valid object
i = i + 1
end
scanleft = scanleft - 1
local objb = object[i]
i = i + 1
local act, rem = objb.act, objb.rem -- live range of B
-- if rem < 0, extend range of rem thru' following local
while rem < 0 do
rem = localinfo[-rem].rem
end
--------------------------------------------------------
if not(last < act or first > rem) then -- possible collision
--------------------------------------------------------
-- B is activated later than A or at the same statement,
-- this means for no collision, A cannot be accessed when B
-- is alive, since B overrides A (or is a peer)
--------------------------------------------------------
if act >= obja.act then
for j = 1, obja.xcount do -- ... then check every access
local p = xref[j]
if p >= act and p <= rem then -- A accessed when B live!
oleft = oleft - 1
objb.skip = true
break
end
end--for
--------------------------------------------------------
-- A is activated later than B, this means for no collision,
-- A's access is okay since it overrides B, but B's last
-- access need to be earlier than A's activation time
--------------------------------------------------------
else
if objb.last and objb.last >= obja.act then
oleft = oleft - 1
objb.skip = true
end
end
end
--------------------------------------------------------
if oleft == 0 then break end
end
end--if first
------------------------------------------------------------------
end--while
------------------------------------------------------------------
-- after assigning all possible locals to one variable name, the
-- unassigned locals/objects have the skip field reset and the table
-- is compacted, to hopefully reduce iteration time
------------------------------------------------------------------
local temp, j = {}, 1
for i = 1, nobject do
local obj = object[i]
if not obj.done then
obj.skip = false
temp[j] = obj
j = j + 1
end
end
object = temp -- new compacted object table
nobject = #object -- objects left to process
------------------------------------------------------------------
end--while
------------------------------------------------------------------
-- after assigning all locals with new variable names, we can
-- patch in the new names, and reprocess to get 'after' stats
------------------------------------------------------------------
for i = 1, #localinfo do -- enumerate all locals
local obj = localinfo[i]
local xref = obj.xref
if obj.newname then -- if got new name, patch it in
for j = 1, obj.xcount do
local p = xref[j] -- xrefs indexes the token list
seminfolist[p] = obj.newname
end
obj.name, obj.oldname -- adjust names
= obj.newname, obj.name
else
obj.oldname = obj.name -- for cases like 'self'
end
end
------------------------------------------------------------------
-- deal with statistics output
------------------------------------------------------------------
if gotself then -- add 'self' to end of list
varlist[#varlist + 1] = "self"
end
local afteruniq = preprocess(localinfo)
stats_summary(globaluniq, localuniq, afteruniq, option)
------------------------------------------------------------------
end

View file

@ -103,6 +103,9 @@ define Package/luci-lib-core/config
config PACKAGE_luci-lib-core_stripped config PACKAGE_luci-lib-core_stripped
bool "Stripped" bool "Stripped"
config PACKAGE_luci-lib-core_srcdiet
bool "Compressed Source"
config PACKAGE_luci-lib-core_source config PACKAGE_luci-lib-core_source
bool "Full Source" bool "Full Source"
@ -117,8 +120,8 @@ ifneq ($(CONFIG_PACKAGE_luci-lib-core_stripped),)
LUA_TARGET:=strip LUA_TARGET:=strip
endif endif
ifneq ($(CONFIG_PACKAGE_luci-lib-core_zipped),) ifneq ($(CONFIG_PACKAGE_luci-lib-core_srcdiet),)
LUA_TARGET:=gzip LUA_TARGET:=diet
endif endif
ifneq ($(CONFIG_PACKAGE_luci-lib-core),) ifneq ($(CONFIG_PACKAGE_luci-lib-core),)

View file

@ -242,10 +242,19 @@ function has_ipv6(self)
end end
function add_network(self, n, options) function add_network(self, n, options)
if n and #n > 0 and n:match("^[a-zA-Z0-9_]+$") and not self:get_network(n) then local oldnet = self:get_network(n)
if n and #n > 0 and n:match("^[a-zA-Z0-9_]+$") and not oldnet then
if uci_r:section("network", "interface", n, options) then if uci_r:section("network", "interface", n, options) then
return network(n) return network(n)
end end
elseif oldnet and oldnet:is_empty() then
if options then
local k, v
for k, v in pairs(options) do
oldnet:set(k, v)
end
end
return oldnet
end end
end end
@ -1016,7 +1025,7 @@ end
function wifinet.active_mode(self) function wifinet.active_mode(self)
local m = _stror(self.iwinfo.mode, self.iwdata.mode) or "ap" local m = _stror(self.iwinfo.mode, self.iwdata.mode) or "ap"
if m == "ap" then m = "AP" if m == "ap" then m = "Master"
elseif m == "sta" then m = "Client" elseif m == "sta" then m = "Client"
elseif m == "adhoc" then m = "Ad-Hoc" elseif m == "adhoc" then m = "Ad-Hoc"
elseif m == "mesh" then m = "Mesh" elseif m == "mesh" then m = "Mesh"
@ -1118,8 +1127,7 @@ function wifinet.get_i18n(self)
end end
function wifinet.adminlink(self) function wifinet.adminlink(self)
return dsp.build_url("admin", "network", "wireless", return dsp.build_url("admin", "network", "wireless", self.netid)
self.iwdata.device, self.netid)
end end
function wifinet.get_network(self) function wifinet.get_network(self)

View file

@ -402,16 +402,24 @@ function cbi_filebrowser(id, url, defpath) {
browser.focus(); browser.focus();
} }
function cbi_dynlist_init(name) function cbi_dynlist_init(name, respath)
{ {
function cbi_dynlist_renumber(e) function cbi_dynlist_renumber(e)
{ {
var count = 1; /* in a perfect world, we could just getElementsByName() - but not if
var childs = e.parentNode.childNodes; * MSIE is involved... */
var inputs = [ ]; // = document.getElementsByName(name);
for (var i = 0; i < e.parentNode.childNodes.length; i++)
if (e.parentNode.childNodes[i].name == name)
inputs.push(e.parentNode.childNodes[i]);
for( var i = 0; i < childs.length; i++ ) for (var i = 0; i < inputs.length; i++)
if( childs[i].name == name ) {
childs[i].id = name + '.' + (count++); inputs[i].id = name + '.' + (i + 1);
inputs[i].nextSibling.src = respath + (
(i+1) < inputs.length ? '/cbi/remove.gif' : '/cbi/add.gif'
);
}
e.focus(); e.focus();
} }
@ -480,6 +488,7 @@ function cbi_dynlist_init(name)
if (se.value.length == 0 && jump) if (se.value.length == 0 && jump)
{ {
se.parentNode.removeChild(se.nextSibling.nextSibling);
se.parentNode.removeChild(se.nextSibling); se.parentNode.removeChild(se.nextSibling);
se.parentNode.removeChild(se); se.parentNode.removeChild(se);
@ -488,6 +497,9 @@ function cbi_dynlist_init(name)
if (ev.preventDefault) if (ev.preventDefault)
ev.preventDefault(); ev.preventDefault();
/* IE Quirk, needs double focus somehow */
jump.focus();
return false; return false;
} }
@ -499,17 +511,22 @@ function cbi_dynlist_init(name)
n.name = se.name; n.name = se.name;
n.type = se.type; n.type = se.type;
var b = document.createElement('img');
cbi_bind(n, 'keydown', cbi_dynlist_keydown); cbi_bind(n, 'keydown', cbi_dynlist_keydown);
cbi_bind(n, 'keypress', cbi_dynlist_keypress); cbi_bind(n, 'keypress', cbi_dynlist_keypress);
cbi_bind(b, 'click', cbi_dynlist_btnclick);
if (next) if (next)
{ {
se.parentNode.insertBefore(n, next); se.parentNode.insertBefore(n, next);
se.parentNode.insertBefore(b, next);
se.parentNode.insertBefore(document.createElement('br'), next); se.parentNode.insertBefore(document.createElement('br'), next);
} }
else else
{ {
se.parentNode.appendChild(n); se.parentNode.appendChild(n);
se.parentNode.appendChild(b);
se.parentNode.appendChild(document.createElement('br')); se.parentNode.appendChild(document.createElement('br'));
} }
@ -540,11 +557,45 @@ function cbi_dynlist_init(name)
return true; return true;
} }
function cbi_dynlist_btnclick(ev)
{
ev = ev ? ev : window.event;
var se = ev.target ? ev.target : ev.srcElement;
if (se.src.indexOf('remove') > -1)
{
se.previousSibling.value = '';
cbi_dynlist_keydown({
target: se.previousSibling,
keyCode: 8
});
}
else
{
cbi_dynlist_keydown({
target: se.previousSibling,
keyCode: 13
});
}
return false;
}
var inputs = document.getElementsByName(name); var inputs = document.getElementsByName(name);
for( var i = 0; i < inputs.length; i++ ) for( var i = 0; i < inputs.length; i++ )
{ {
var btn = document.createElement('img');
btn.src = respath + (
(i+1) < inputs.length ? '/cbi/remove.gif' : '/cbi/add.gif'
);
inputs[i].parentNode.insertBefore(btn, inputs[i].nextSibling);
cbi_bind(inputs[i], 'keydown', cbi_dynlist_keydown); cbi_bind(inputs[i], 'keydown', cbi_dynlist_keydown);
cbi_bind(inputs[i], 'keypress', cbi_dynlist_keypress); cbi_bind(inputs[i], 'keypress', cbi_dynlist_keypress);
cbi_bind(btn, 'click', cbi_dynlist_btnclick);
} }
} }
@ -718,6 +769,56 @@ function cbi_validate_field(cbid, optional, type)
} }
} }
function cbi_row_swap(elem, up, store)
{
var tr = elem.parentNode;
while (tr && tr.nodeName.toLowerCase() != 'tr')
tr = tr.parentNode;
if (!tr)
return false;
var table = tr.parentNode;
while (table && table.nodeName.toLowerCase() != 'table')
table = table.parentNode;
if (!table)
return false;
var s = up ? 3 : 2;
var e = up ? table.rows.length : table.rows.length - 1;
for (var idx = s; idx < e; idx++)
{
if (table.rows[idx] == tr)
{
if (up)
tr.parentNode.insertBefore(table.rows[idx], table.rows[idx-1]);
else
tr.parentNode.insertBefore(table.rows[idx+1], table.rows[idx]);
break;
}
}
var ids = [ ];
for (idx = 2; idx < table.rows.length; idx++)
{
table.rows[idx].className = table.rows[idx].className.replace(
/cbi-rowstyle-[12]/, 'cbi-rowstyle-' + (1 + (idx % 2))
);
if (table.rows[idx].id && table.rows[idx].id.match(/-([^\-]+)$/) )
ids.push(RegExp.$1);
}
var input = document.getElementById(store);
if (input)
input.value = ids.join(' ');
return false;
}
if( ! String.serialize ) if( ! String.serialize )
String.serialize = function(o) String.serialize = function(o)
{ {

Binary file not shown.

After

Width:  |  Height:  |  Size: 131 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 130 B

View file

@ -50,6 +50,7 @@ AUTO = true
CREATE_PREFIX = "cbi.cts." CREATE_PREFIX = "cbi.cts."
REMOVE_PREFIX = "cbi.rts." REMOVE_PREFIX = "cbi.rts."
RESORT_PREFIX = "cbi.sts."
-- Loads a CBI map from given file, creating an environment and returns it -- Loads a CBI map from given file, creating an environment and returns it
function load(cbimap, ...) function load(cbimap, ...)
@ -1121,6 +1122,20 @@ function TypedSection.parse(self, novld)
end end
end end
if self.sortable then
local stval = RESORT_PREFIX .. self.config .. "." .. self.sectiontype
local order = self.map:formvalue(stval)
if order and #order > 0 then
local sid
local num = 0
for sid in util.imatch(order) do
self.map.uci:reorder(self.config, sid, num)
num = num + 1
end
self.changed = (num > 0)
end
end
if created or self.changed then if created or self.changed then
self:push_events() self:push_events()
end end

View file

@ -27,7 +27,7 @@ $Id$
<% end end %> <% end end %>
</div> </div>
<script type="text/javascript"> <script type="text/javascript">
cbi_dynlist_init('<%=cbid%>'); cbi_dynlist_init('<%=cbid%>', '<%=resource%>');
<% if self.datatype then -%> <% if self.datatype then -%>
<% if #self.keylist > 0 then -%> <% if #self.keylist > 0 then -%>
cbi_combobox_init('<%=cbid .. "." .. i%>', { cbi_combobox_init('<%=cbid .. "." .. i%>', {

View file

@ -35,6 +35,9 @@ end
<% if self.title and #self.title > 0 then -%> <% if self.title and #self.title > 0 then -%>
<legend><%=self.title%></legend> <legend><%=self.title%></legend>
<%- end %> <%- end %>
<%- if self.sortable then -%>
<input type="hidden" id="cbi.sts.<%=self.config%>.<%=self.sectiontype%>" name="cbi.sts.<%=self.config%>.<%=self.sectiontype%>" value="" />
<%- end -%>
<div class="cbi-section-descr"><%=self.description%></div> <div class="cbi-section-descr"><%=self.description%></div>
<div class="cbi-section-node"> <div class="cbi-section-node">
<%- local count = 0 -%> <%- local count = 0 -%>
@ -53,7 +56,9 @@ end
<%-=k.title-%> <%-=k.title-%>
<%- if k.titleref then -%></a><%- end -%> <%- if k.titleref then -%></a><%- end -%>
</th> </th>
<%- count = count + 1; end; end; if self.extedit or self.addremove then -%> <%- count = count + 1; end; end; if self.sortable then -%>
<th class="cbi-section-table-cell"><%:Sort%></th>
<%- end; if self.extedit or self.addremove then -%>
<th class="cbi-section-table-cell">&#160;</th> <th class="cbi-section-table-cell">&#160;</th>
<%- count = count + 1; end -%> <%- count = count + 1; end -%>
</tr> </tr>
@ -67,7 +72,9 @@ end
<%- end -%> <%- end -%>
<%- for i, k in pairs(self.children) do if not k.optional then -%> <%- for i, k in pairs(self.children) do if not k.optional then -%>
<th class="cbi-section-table-cell"<%=width(k)%>><%=k.description%></th> <th class="cbi-section-table-cell"<%=width(k)%>><%=k.description%></th>
<%- end; end; if self.extedit or self.addremove then -%> <%- end; end; if self.sortable then -%>
<th class="cbi-section-table-cell"></th>
<%- end; if self.extedit or self.addremove then -%>
<th class="cbi-section-table-cell"></th> <th class="cbi-section-table-cell"></th>
<%- end -%> <%- end -%>
</tr> </tr>
@ -91,6 +98,13 @@ end
end end
-%> -%>
<%- if self.sortable then -%>
<td class="cbi-section-table-cell" style="width:50px">
<a href="#" onclick="return cbi_row_swap(this, true, 'cbi.sts.<%=self.config%>.<%=self.sectiontype%>')" title="<%:Move up%>"><img src="<%=resource%>/cbi/up.gif" alt="<%:Move up%>" /></a>
<a href="#" onclick="return cbi_row_swap(this, false, 'cbi.sts.<%=self.config%>.<%=self.sectiontype%>')" title="<%:Move down%>"><img src="<%=resource%>/cbi/down.gif" alt="<%:Move down%>" /></a>
</td>
<%- end -%>
<%- if self.extedit or self.addremove then -%> <%- if self.extedit or self.addremove then -%>
<td class="cbi-section-table-cell" style="width:50px"> <td class="cbi-section-table-cell" style="width:50px">
<%- if self.extedit then -%> <%- if self.extedit then -%>

View file

@ -17,6 +17,7 @@ module("luci.controller.admin.network", package.seeall)
function index() function index()
require("luci.i18n") require("luci.i18n")
local uci = require("luci.model.uci").cursor() local uci = require("luci.model.uci").cursor()
local net = require "luci.model.network".init(uci)
local i18n = luci.i18n.translate local i18n = luci.i18n.translate
local has_wifi = nixio.fs.stat("/etc/config/wireless") local has_wifi = nixio.fs.stat("/etc/config/wireless")
local has_switch = false local has_switch = false
@ -59,6 +60,18 @@ function index()
page = entry({"admin", "network", "wireless_status"}, call("wifi_status"), nil, 16) page = entry({"admin", "network", "wireless_status"}, call("wifi_status"), nil, 16)
page.leaf = true page.leaf = true
local wdev
for _, wdev in ipairs(net:get_wifidevs()) do
local wnet
for _, wnet in ipairs(wdev:get_wifinets()) do
entry(
{"admin", "network", "wireless", wnet.netid},
alias("admin", "network", "wireless"),
wdev:name() .. ": " .. wnet:shortname()
)
end
end
end end
page = entry({"admin", "network", "network"}, arcombine(cbi("admin_network/network"), cbi("admin_network/ifaces")), i18n("Interfaces"), 10) page = entry({"admin", "network", "network"}, arcombine(cbi("admin_network/network"), cbi("admin_network/ifaces")), i18n("Interfaces"), 10)
@ -307,6 +320,7 @@ function iface_delete()
end end
function wifi_status() function wifi_status()
local netm = require "luci.model.network".init()
local path = luci.dispatcher.context.requestpath local path = luci.dispatcher.context.requestpath
local arp = luci.sys.net.arptable() local arp = luci.sys.net.arptable()
local rv = { } local rv = { }
@ -314,17 +328,22 @@ function wifi_status()
local dev local dev
for dev in path[#path]:gmatch("[%w%.%-]+") do for dev in path[#path]:gmatch("[%w%.%-]+") do
local j = { id = dev } local j = { id = dev }
local iw = luci.sys.wifi.getiwinfo(dev) local wn = netm:get_wifinet(dev)
local iw = wn and wn.iwinfo
if iw then if iw then
local f local f
for _, f in ipairs({ for _, f in ipairs({
"channel", "frequency", "txpower", "bitrate", "signal", "noise", "channel", "frequency", "txpower", "bitrate", "signal", "noise",
"quality", "quality_max", "mode", "ssid", "bssid", "country", "quality", "quality_max", "bssid", "country",
"encryption", "ifname", "assoclist" "encryption", "ifname", "assoclist"
}) do }) do
j[f] = iw[f] j[f] = iw[f]
end end
end end
j.mode = wn and wn:active_mode() or "?"
j.ssid = wn and wn:active_ssid() or "?"
rv[#rv+1] = j rv[#rv+1] = j
end end

View file

@ -24,7 +24,7 @@ function index()
entry({"admin", "uci", "changes"}, call("action_changes"), i18n("Changes"), 40).query = {redir=redir} entry({"admin", "uci", "changes"}, call("action_changes"), i18n("Changes"), 40).query = {redir=redir}
entry({"admin", "uci", "revert"}, call("action_revert"), i18n("Revert"), 30).query = {redir=redir} entry({"admin", "uci", "revert"}, call("action_revert"), i18n("Revert"), 30).query = {redir=redir}
entry({"admin", "uci", "apply"}, call("action_apply"), i18n("Apply"), 20).query = {redir=redir} entry({"admin", "uci", "apply"}, call("action_apply"), i18n("Apply"), 20).query = {redir=redir}
entry({"admin", "uci", "saveapply"}, call("action_apply"), i18n("Save & Apply"), 10).query = {redir=redir} entry({"admin", "uci", "saveapply"}, call("action_apply"), i18n("Save &#38; Apply"), 10).query = {redir=redir}
end end
function action_changes() function action_changes()

View file

@ -17,7 +17,6 @@ local nw = require "luci.model.network"
local fs = require "nixio.fs" local fs = require "nixio.fs"
arg[1] = arg[1] or "" arg[1] = arg[1] or ""
arg[2] = arg[2] or ""
m = Map("wireless", "", m = Map("wireless", "",
translate("The <em>Device Configuration</em> section covers physical settings of the radio " .. translate("The <em>Device Configuration</em> section covers physical settings of the radio " ..
@ -30,24 +29,25 @@ m:chain("network")
local ifsection local ifsection
function m.on_commit(map) function m.on_commit(map)
local wnet = nw:get_wifinet(arg[2]) local wnet = nw:get_wifinet(arg[1])
if ifsection and wnet then if ifsection and wnet then
ifsection.section = wnet.sid ifsection.section = wnet.sid
m.title = wnet:get_i18n() m.title = luci.util.pcdata(wnet:get_i18n())
end end
end end
nw.init(m.uci) nw.init(m.uci)
local wnet = nw:get_wifinet(arg[2]) local wnet = nw:get_wifinet(arg[1])
local wdev = wnet and wnet:get_device()
-- redirect to overview page if network does not exist anymore (e.g. after a revert) -- redirect to overview page if network does not exist anymore (e.g. after a revert)
if not wnet then if not wnet or not wdev then
luci.http.redirect(luci.dispatcher.build_url("admin/network/wireless")) luci.http.redirect(luci.dispatcher.build_url("admin/network/wireless"))
return return
end end
m.title = wnet:get_i18n() m.title = luci.util.pcdata(wnet:get_i18n())
local iw = luci.sys.wifi.getiwinfo(arg[1]) local iw = luci.sys.wifi.getiwinfo(arg[1])
@ -55,7 +55,7 @@ local tx_powers = iw.txpwrlist or { }
local hw_modes = iw.hwmodelist or { } local hw_modes = iw.hwmodelist or { }
s = m:section(NamedSection, arg[1], "wifi-device", translate("Device Configuration")) s = m:section(NamedSection, wdev:name(), "wifi-device", translate("Device Configuration"))
s.addremove = false s.addremove = false
s:tab("general", translate("General Setup")) s:tab("general", translate("General Setup"))
@ -70,7 +70,7 @@ back.titleref = luci.dispatcher.build_url("admin", "network", "wireless")
st = s:taboption("general", DummyValue, "__status", translate("Status")) st = s:taboption("general", DummyValue, "__status", translate("Status"))
st.template = "admin_network/wifi_status" st.template = "admin_network/wifi_status"
st.ifname = arg[2] st.ifname = arg[1]
en = s:taboption("general", Flag, "disabled", translate("Enable device")) en = s:taboption("general", Flag, "disabled", translate("Enable device"))
en.enabled = "0" en.enabled = "0"
@ -82,11 +82,11 @@ function en.cfgvalue(self, section)
end end
local hwtype = m:get(arg[1], "type") local hwtype = wdev:get("type")
local htcaps = m:get(arg[1], "ht_capab") and true or false local htcaps = wdev:get("ht_capab") and true or false
-- NanoFoo -- NanoFoo
local nsantenna = m:get(arg[1], "antenna") local nsantenna = wdev:get("antenna")
ch = s:taboption("general", Value, "channel", translate("Channel")) ch = s:taboption("general", Value, "channel", translate("Channel"))
ch:value("auto", translate("auto")) ch:value("auto", translate("auto"))
@ -276,7 +276,7 @@ s = m:section(NamedSection, wnet.sid, "wifi-iface", translate("Interface Configu
ifsection = s ifsection = s
s.addremove = false s.addremove = false
s.anonymous = true s.anonymous = true
s.defaults.device = arg[1] s.defaults.device = wdev:name()
s:tab("general", translate("General Setup")) s:tab("general", translate("General Setup"))
s:tab("encryption", translate("Wireless Security")) s:tab("encryption", translate("Wireless Security"))

View file

@ -166,9 +166,7 @@ function newnet.parse(self, section)
uci:save("network") uci:save("network")
uci:save("firewall") uci:save("firewall")
luci.http.redirect(luci.dispatcher.build_url( luci.http.redirect(wnet:adminlink())
"admin/network/wireless", wdev:name(), wnet:ifname()
))
end end
end end
end end

View file

@ -33,8 +33,8 @@ $Id$
'<%:Waiting for router...%>' '<%:Waiting for router...%>'
; ;
legend.style.display = null;
legend.parentNode.style.display = 'block'; legend.parentNode.style.display = 'block';
legend.style.display = 'inline';
stxhr.get('<%=luci.dispatcher.build_url("admin", "network")%>/diag_' + tool + '/' + addr, null, stxhr.get('<%=luci.dispatcher.build_url("admin", "network")%>/diag_' + tool + '/' + addr, null,
function(x) function(x)

View file

@ -167,7 +167,7 @@ $Id$
{ {
if (is_assoc) if (is_assoc)
info.innerHTML = String.format( info.innerHTML = String.format(
'<strong><%:SSID%>:</strong> %s | ' + '<strong><%:SSID%>:</strong> %h | ' +
'<strong><%:Mode%>:</strong> %s<br />' + '<strong><%:Mode%>:</strong> %s<br />' +
'<strong><%:BSSID%>:</strong> %s | ' + '<strong><%:BSSID%>:</strong> %s | ' +
'<strong><%:Encryption%>:</strong> %s', '<strong><%:Encryption%>:</strong> %s',
@ -175,7 +175,12 @@ $Id$
iw.encryption ? iw.encryption.description : '<%:None%>' iw.encryption ? iw.encryption.description : '<%:None%>'
); );
else else
info.innerHTML = '<em><%:Wireless is disabled or not associated%></em>'; info.innerHTML = String.format(
'<strong><%:SSID%>:</strong> %h | ' +
'<strong><%:Mode%>:</strong> %s<br />' +
'<em><%:Wireless is disabled or not associated%></em>',
iw.ssid || '?', iw.mode
);
} }
var dev = document.getElementById(wifidevs[iw.id] + '-iw-devinfo'); var dev = document.getElementById(wifidevs[iw.id] + '-iw-devinfo');

View file

@ -37,7 +37,7 @@
if (d && is_assoc) if (d && is_assoc)
d.innerHTML = String.format( d.innerHTML = String.format(
'<strong><%:Mode%>:</strong> %s | ' + '<strong><%:Mode%>:</strong> %s | ' +
'<strong><%:SSID%>:</strong> %s<br />' + '<strong><%:SSID%>:</strong> %h<br />' +
'<strong><%:BSSID%>:</strong> %s | ' + '<strong><%:BSSID%>:</strong> %s | ' +
'<strong><%:Encryption%>:</strong> %s<br />' + '<strong><%:Encryption%>:</strong> %s<br />' +
'<strong><%:Channel%>:</strong> %d (%.3f GHz) | ' + '<strong><%:Channel%>:</strong> %d (%.3f GHz) | ' +
@ -53,7 +53,12 @@
iw.bitrate ? (iw.bitrate / 1000) : 0, iw.country iw.bitrate ? (iw.bitrate / 1000) : 0, iw.country
); );
else if (d) else if (d)
d.innerHTML = '<%:Wireless is disabled or not associated%>'; d.innerHTML = String.format(
'<strong><%:SSID%>:</strong> %h | ' +
'<strong><%:Mode%>:</strong> %s<br />' +
'<em><%:Wireless is disabled or not associated%></em>',
iw.ssid || '?', iw.mode
);
} }
window.setTimeout(update_status, 5000); window.setTimeout(update_status, 5000);

View file

@ -74,6 +74,12 @@ require("luci.http").prepare_content("application/xhtml+xml")
</div></div> </div></div>
<![endif]--> <![endif]-->
<%- if luci.sys.user.getuser("root") and not luci.sys.user.getpasswd("root") then -%>
<div class="warning">
<strong><%:No password set!%></strong><br />
<%:There is no password set on this router. Please configure a root password to protect the web interface and enable SSH.%>
</div>
<%- end -%>
<p class="skiplink"> <p class="skiplink">
<span id="skiplink1"><a href="#navigation"><%:skiplink1 Skip to navigation%></a></span> <span id="skiplink1"><a href="#navigation"><%:skiplink1 Skip to navigation%></a></span>