从字符串生成超链接

时间:2015-09-14 19:41:43

标签: lua lua-patterns

我正在尝试创建一个函数,该函数将解析字符串并替换使用该URL的HTML版本找到的URL。

例如,test.com将成为<a href="http://www.test.com>http://www.test.com</a>

以下是我正在使用的代码:

function parse_url (x)
    local domains = [[.ac.ad.ae.aero.af.ag.ai.al.am.an.ao.aq.ar.arpa.as.asia.at.au
       .aw.ax.az.ba.bb.bd.be.bf.bg.bh.bi.biz.bj.bm.bn.bo.br.bs.bt.bv.bw.by.bz.ca
       .cat.cc.cd.cf.cg.ch.ci.ck.cl.cm.cn.co.com.coop.cr.cs.cu.cv.cx.cy.cz.dd.de
       .dj.dk.dm.do.dz.ec.edu.ee.eg.eh.er.es.et.eu.fi.firm.fj.fk.fm.fo.fr.fx.ga
       .gb.gd.ge.gf.gh.gi.gl.gm.gn.gov.gp.gq.gr.gs.gt.gu.gw.gy.hk.hm.hn.hr.ht.hu
       .id.ie.il.im.in.info.int.io.iq.ir.is.it.je.jm.jo.jobs.jp.ke.kg.kh.ki.km.kn
       .kp.kr.kw.ky.kz.la.lb.lc.li.lk.lr.ls.lt.lu.lv.ly.ma.mc.md.me.mg.mh.mil.mk
       .ml.mm.mn.mo.mobi.mp.mq.mr.ms.mt.mu.museum.mv.mw.mx.my.mz.na.name.nato.nc
       .ne.net.nf.ng.ni.nl.no.nom.np.nr.nt.nu.nz.om.org.pa.pe.pf.pg.ph.pk.pl.pm
       .pn.post.pr.pro.ps.pt.pw.py.qa.re.ro.ru.rw.sa.sb.sc.sd.se.sg.sh.si.sj.sk
       .sl.sm.sn.so.sr.ss.st.store.su.sv.sy.sz.tc.td.tel.tf.tg.th.tj.tk.tl.tm.tn
       .to.tp.tr.travel.tt.tv.tw.tz.ua.ug.uk.um.us.uy.va.vc.ve.vg.vi.vn.vu.web.wf
       .ws.xxx.ye.yt.yu.za.zm.zr.zw]]
    local tlds = {}
    for tld in domains:gmatch'%w+' do
       tlds[tld] = true
    end
    local protocols = {[''] = 0, ['http://'] = 0, ['https://'] = 0, ['ftp://'] = 0}

    for pos, url, prot, subd, tld, colon, port, slash, path in x:gmatch
       '()(([%w_.~!*:@&+$/?%%#-]-)(%w[-.%w]*%.)(%w+)(:?)(%d*)(/?)([%w_.~!*:@&+$/?%%#=-]*))'
    do
       if protocols[prot:lower()] == (1 - #slash) * #path
          and (colon == '' or port ~= '' and port + 0 < 65536)
          and (tlds[tld:lower()] or tld:find'^%d+$' and subd:find'^%d+%.%d+%.%d+%.$'
          and math.max(tld, subd:match'^(%d+)%.(%d+)%.(%d+)%.$') < 256)
          and not subd:find'%W%W'
       then
          return string.gsub(x, url, "<a href=\"" .. url .. "\">" .. url .. "</a>")
       end
    end
end

我希望解决一些问题:

1)如果字符串x不包含任何URL,则返回nil结果。我希望它保持字符串不变

2)它无法识别内部网链接(http://test

3)它无法识别多个子域级别(http://mysite.whatever.com无效,http://mysite.whatever.co.uk无效)

4)它会识别同一网址的重复实例,但找不到后续网址。例如,对于字符串http://www.test.com http://www.test.com http://www.whatever.comhttp://www.test.com将被修改两次,但根本不会识别http://www.whatever.com

我可以更改什么才能使其正常工作?

2 个答案:

答案 0 :(得分:1)

这是我最初编写sane_uri lpeg模式的用例:https://github.com/daurnimator/lpeg_patterns#uri

示例:(填写您自己的html_escape函数)

local lpeg = require "lpeg"
local alpha = lpeg.R("az", "AZ")
local sane_uri = require "lpeg_patterns.uri".sane_uri
local patt = lpeg.Cs((lpeg.Cg(lpeg.C(sane_uri))/function(u, t)
    if t.scheme == "http" or t.scheme == "https" then -- your scheme whitelist
        return "<a href=\""..html_escape(u).."\">"..html_escape(u).."</a>"
    end
end+(alpha^0*(1-alpha)))^0);
print(s:match("some http://example.com/ text"))

以上示例使用lpeg manual中描述的“全局替换”方法,以及类似于“仅在字边界处查找模式”示例的代码。

答案 1 :(得分:0)

Pure Lua解决方案(如果您没有在目标系统上使用lpeg)

-- all characters allowed to be inside URL according to RFC 3986 but without
-- comma, semicolon, apostrophe, equal, brackets and parentheses
-- (as they are used frequently as URL separators)
local Some_text_with_URLs = [[
   test
   test.com
   http://test.com
   http://test
   http://mysite.whatever.co.uk
   http://www.lua.org:80/manual/5.2/contents.html
   L.ua 5.2
   url=127.0.0.1:8080
   http://retracker.local/announce
   https://www.google.com/search?q=who+are+Lua+people&tbm=isch
   auth link: ftp://user:pwd@site.com/path - not recognized yet :(
]]

function ParseURL(text_with_URLs)    

    local domains = [[.ac.ad.ae.aero.af.ag.ai.al.am.an.ao.aq.ar.arpa.as.asia.at.au
       .aw.ax.az.ba.bb.bd.be.bf.bg.bh.bi.biz.bj.bm.bn.bo.br.bs.bt.bv.bw.by.bz.ca
       .cat.cc.cd.cf.cg.ch.ci.ck.cl.cm.cn.co.com.coop.cr.cs.cu.cv.cx.cy.cz.dd.de
       .dj.dk.dm.do.dz.ec.edu.ee.eg.eh.er.es.et.eu.fi.firm.fj.fk.fm.fo.fr.fx.ga
       .gb.gd.ge.gf.gh.gi.gl.gm.gn.gov.gp.gq.gr.gs.gt.gu.gw.gy.hk.hm.hn.hr.ht.hu
       .id.ie.il.im.in.info.int.io.iq.ir.is.it.je.jm.jo.jobs.jp.ke.kg.kh.ki.km.kn
       .kp.kr.kw.ky.kz.la.lb.lc.li.lk.lr.ls.lt.lu.lv.ly.ma.mc.md.me.mg.mh.mil.mk
       .ml.mm.mn.mo.mobi.mp.mq.mr.ms.mt.mu.museum.mv.mw.mx.my.mz.na.name.nato.nc
       .ne.net.nf.ng.ni.nl.no.nom.np.nr.nt.nu.nz.om.org.pa.pe.pf.pg.ph.pk.pl.pm
       .pn.post.pr.pro.ps.pt.pw.py.qa.re.ro.ru.rw.sa.sb.sc.sd.se.sg.sh.si.sj.sk
       .sl.sm.sn.so.sr.ss.st.store.su.sv.sy.sz.tc.td.tel.tf.tg.th.tj.tk.tl.tm.tn
       .to.tp.tr.travel.tt.tv.tw.tz.ua.ug.uk.um.us.uy.va.vc.ve.vg.vi.vn.vu.web.wf
       .ws.xxx.ye.yt.yu.za.zm.zr.zw]]
    local tlds = {}
    for tld in domains:gmatch'%w+' do
       tlds[tld] = true
    end
    local function max4(a,b,c,d) return math.max(a+0, b+0, c+0, d+0) end
    local protocols = {[''] = 0, ['http://'] = 0, ['https://'] = 0, ['ftp://'] = 0}
    local finished, positions = {}, {}

    for pos_start, url, prot, subd, tld, colon, port, slash, path, pos_finish in
       text_with_URLs:gmatch'()(([%w_.~!*:@&+$/?%%#-]-)(%w[-.%w]*%.)(%w+)(:?)(%d*)(/?)([%w_.~!*:@&+$/?%%#=-]*))()'
    do
       if protocols[prot:lower()] == (1 - #slash) * #path and not subd:find'%W%W'
          and (colon == '' or port ~= '' and port + 0 < 65536)
          and (tlds[tld:lower()] or tld:find'^%d+$' and subd:find'^%d+%.%d+%.%d+%.$'
          and max4(tld, subd:match'^(%d+)%.(%d+)%.(%d+)%.$') < 256)
       then
          finished[pos_start] = pos_finish
          table.insert(positions, pos_start)
       end
    end

    for pos_start, url, prot, dom, colon, port, slash, path, pos_finish in
       text_with_URLs:gmatch'()((%f[%w]%a+://)(%w[-.%w]*)(:?)(%d*)(/?)([%w_.~!*:@&+$/?%%#=-]*))()'
    do
       if not finished[pos_start] and not (dom..'.'):find'%W%W'
          and protocols[prot:lower()] == (1 - #slash) * #path
          and (colon == '' or port ~= '' and port + 0 < 65536)
       then
          finished[pos_start] = pos_finish
          table.insert(positions, pos_start)
       end
    end

    local new_text = {}

    local function append_text(text)
       table.insert(new_text, text)
    end

    local function append_URL(URL)
       table.insert(new_text, '<a href="'..(URL:find'://' and URL or 'http://'..URL)..'">'..URL..'</a>')
    end

    table.sort(positions)
    local last_pos = 1
    for _, pos_start in ipairs(positions) do
       append_text(text_with_URLs:sub(last_pos, pos_start - 1))
       last_pos = finished[pos_start]
       append_URL(text_with_URLs:sub(pos_start, last_pos - 1))
    end
    append_text(text_with_URLs:sub(last_pos))

    return table.concat(new_text)
end

print(ParseURL(Some_text_with_URLs))