2506 changed files with 606654 additions and 0 deletions
@ -0,0 +1,37 @@ |
|||
<script> |
|||
export default { |
|||
onLaunch: function() { |
|||
let _this = this |
|||
let systemCache = _this.$commonMethod.getSystemCacheDate('myCache') |
|||
console.log("systemCache",systemCache) |
|||
if(!systemCache){ |
|||
_this.$commonMethod.JumpUrl('/pages/login/login') |
|||
}else{ |
|||
let currentTime = new Date().getTime() - systemCache.tokenTime |
|||
if (currentTime >= 10800000){ |
|||
_this.$commonMethod.verifyPowerIsTrue('myCache') |
|||
} |
|||
// _this.$commonMethod.verifyPowerIsTrue(systemCache,'/pages/login/login') |
|||
} |
|||
}, |
|||
onShow: function() { |
|||
console.log('App Show') |
|||
}, |
|||
onHide: function() { |
|||
console.log('App Hide') |
|||
|
|||
} |
|||
} |
|||
</script> |
|||
|
|||
<style lang="scss"> |
|||
/*每个页面公共css */ |
|||
/* #ifdef H5 */ |
|||
// uni-page-head { display: none; } |
|||
/* #endif */ |
|||
@import '@/common/uni.css'; |
|||
@import "uview-ui/index.scss"; |
|||
@import "@/uni_modules/s-ui/styles/index.scss"; |
|||
/*每个页面公共css */ |
|||
@import "/common/iconfont.css"; |
|||
</style> |
|||
@ -0,0 +1,171 @@ |
|||
const goods = [{ |
|||
"id": "1", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd1.jpg", |
|||
"goodName": "【1】 六罐装荷兰美素佳儿金装2段900g", |
|||
"goodPrice": 1149.00, |
|||
"goodSold": 648 |
|||
}, { |
|||
"id": "2", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd2.jpg", |
|||
"goodName": "【2】 韩国Amore爱茉莉红吕洗发水套装修复受损发质", |
|||
"goodPrice": 89.00, |
|||
"goodSold": 128 |
|||
}, { |
|||
"id": "3", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd3.jpg", |
|||
"goodName": "【3】 Friso美素佳儿 金装婴儿配方奶粉3段900g", |
|||
"goodPrice": 195.00, |
|||
"goodSold": 968 |
|||
}, { |
|||
"id": "4", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd4.jpg", |
|||
"goodName": "【4】 Fisher goodPrice费雪 费雪三轮儿童滑行车", |
|||
"goodPrice": 299.00, |
|||
"goodSold": 85 |
|||
}, { |
|||
"id": "5", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd5.jpg", |
|||
"goodName": "【5】 Babylee巴布力 实木婴儿床 雷卡拉130*70cm", |
|||
"goodPrice": 1889.00, |
|||
"goodSold": 18 |
|||
}, { |
|||
"id": "6", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd6.jpg", |
|||
"goodName": "【6】 Pigeon贝亲 独立三层奶粉盒 送小罐奶粉1段200g", |
|||
"goodPrice": 70.00, |
|||
"goodSold": 658 |
|||
}, { |
|||
"id": "7", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd7.jpg", |
|||
"goodName": "【7】 TTBOO兔兔小布 肩纽扣套装", |
|||
"goodPrice": 268.00, |
|||
"goodSold": 128 |
|||
}, { |
|||
"id": "8", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd8.jpg", |
|||
"goodName": "【8】 Nuna璐拉 婴儿布里奇果精纯嫩肤沐浴露婴儿精纯芦荟胶", |
|||
"goodPrice": 140.00, |
|||
"goodSold": 366 |
|||
}, { |
|||
"id": "9", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd9.jpg", |
|||
"goodName": "【9】 illuma启赋 奶粉3段900g", |
|||
"goodPrice": 252.00, |
|||
"goodSold": 98 |
|||
}, { |
|||
"id": "10", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd10.jpg", |
|||
"goodName": "【10】 Abbott雅培乳蛋白部分水解婴儿配方奶粉3段820g", |
|||
"goodPrice": 89.00, |
|||
"goodSold": 128 |
|||
}, { |
|||
"id": "11", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd11.jpg", |
|||
"goodName": "【11】 韩蜜 酷炫唇蜜(礼盒套装)2.8g*4", |
|||
"goodPrice": 179.00, |
|||
"goodSold": 35 |
|||
}, { |
|||
"id": "12", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd12.jpg", |
|||
"goodName": "【12】 保税区直发【3包装】日本Merries花王纸尿裤NB90", |
|||
"goodPrice": 289.00, |
|||
"goodSold": 1928 |
|||
}, { |
|||
"id": "13", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd13.jpg", |
|||
"goodName": "【13】 Comotomo可么多么 硅胶奶瓶(0-3月奶嘴)150ml绿色", |
|||
"goodPrice": 203.00, |
|||
"goodSold": 87 |
|||
}, { |
|||
"id": "14", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd14.jpg", |
|||
"goodName": "【14】 香港直邮德国瑞德露Rival de Loop芦荟精华安瓶", |
|||
"goodPrice": 152.00, |
|||
"goodSold": 61 |
|||
}, { |
|||
"id": "15", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd15.jpg", |
|||
"goodName": "【15】 保税区直发药师堂尊马油香草味温和保湿无刺激面霜", |
|||
"goodPrice": 269.00, |
|||
"goodSold": 73 |
|||
}, { |
|||
"id": "16", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd16.jpg", |
|||
"goodName": "【16】 香港直邮日本Spatreatment眼膜保湿去细纹法令纹", |
|||
"goodPrice": 219.00, |
|||
"goodSold": 13 |
|||
}, { |
|||
"id": "17", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd17.jpg", |
|||
"goodName": "【17】 韩国MEDIHEALNMF可莱丝针剂睡眠面膜", |
|||
"goodPrice": 81.00, |
|||
"goodSold": 128 |
|||
}, { |
|||
"id": "18", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd18.jpg", |
|||
"goodName": "【18】 DHC蝶翠诗橄榄蜂蜜滋养洗脸手工皂90g", |
|||
"goodPrice": 123.00, |
|||
"goodSold": 77 |
|||
}, { |
|||
"id": "19", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd19.jpg", |
|||
"goodName": "【19】 日本资生堂CPB肌肤之钥新版隔离霜 清爽型 30ml", |
|||
"goodPrice": 429.00, |
|||
"goodSold": 36 |
|||
}, { |
|||
"id": "20", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd20.jpg", |
|||
"goodName": "【20】 Heinz亨氏 婴儿面条优加面条全素套餐组合3口味3盒", |
|||
"goodPrice": 39.00, |
|||
"goodSold": 61 |
|||
}, { |
|||
"id": "21", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd21.jpg", |
|||
"goodName": "【21】 Heinz亨氏 乐维滋果汁泥组合5口味15袋", |
|||
"goodPrice": 69.00, |
|||
"goodSold": 55 |
|||
}, { |
|||
"id": "22", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd22.jpg", |
|||
"goodName": "【22】 保税区直发澳大利亚Swisse高浓度蔓越莓胶囊30粒", |
|||
"goodPrice": 271.00, |
|||
"goodSold": 19 |
|||
}, { |
|||
"id": "23", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd23.jpg", |
|||
"goodName": "【23】 挪威Nordic Naturals小鱼婴幼儿鱼油DHA滴剂", |
|||
"goodPrice": 102.00, |
|||
"goodSold": 125 |
|||
}, { |
|||
"id": "24", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd24.jpg", |
|||
"goodName": "【24】 澳大利亚Bio island DHA for Pregnancy海藻油DHA", |
|||
"goodPrice": 289.00, |
|||
"goodSold": 28 |
|||
}, { |
|||
"id": "25", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd25.jpg", |
|||
"goodName": "【25】 澳大利亚Fatblaster Coconut Detox椰子水", |
|||
"goodPrice": 152.00, |
|||
"goodSold": 17 |
|||
}, { |
|||
"id": "26", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd26.jpg", |
|||
"goodName": "【26】 Suitsky舒比奇 高护极薄舒爽纸尿片尿不湿XL60", |
|||
"goodPrice": 99.00, |
|||
"goodSold": 181 |
|||
}, { |
|||
"id": "27", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd27.jpg", |
|||
"goodName": "【27】 英国JUST SOAP手工皂 玫瑰天竺葵蛋糕皂", |
|||
"goodPrice": 72.00, |
|||
"goodSold": 66 |
|||
}, { |
|||
"id": "28", |
|||
"goodImg": "https://www.mescroll.com/demo/res/img/pd28.jpg", |
|||
"goodName": "【28】 德国NUK 多色婴幼儿带盖学饮杯", |
|||
"goodPrice": 92.00, |
|||
"goodSold": 138 |
|||
}] |
|||
|
|||
export default goods; |
|||
@ -0,0 +1,178 @@ |
|||
/* |
|||
本地模拟接口请求, 仅demo演示用. |
|||
实际项目以您服务器接口返回的数据为准,无需本地处理分页. |
|||
请参考官方写法: https://www.mescroll.com/uni.html?v=20200210#tagUpCallback
|
|||
* */ |
|||
|
|||
// 模拟数据
|
|||
import goods from "./goods.js"; |
|||
|
|||
// 获取新闻列表
|
|||
export function apiNewList(pageNum, pageSize) { |
|||
return new Promise((resolute, reject)=>{ |
|||
//延时一秒,模拟联网
|
|||
setTimeout(function() { |
|||
try { |
|||
let list = []; |
|||
if (!pageNum) { |
|||
//模拟下拉刷新返回的数据
|
|||
let id=new Date().getTime(); |
|||
let newObj = { |
|||
id:id, |
|||
title: "【新增新闻" + id + "】 标题", |
|||
content: "新增新闻的内容" |
|||
}; |
|||
list.push(newObj); |
|||
} else { |
|||
//模拟上拉加载返回的数据
|
|||
for (let i = 0; i < pageSize; i++) { |
|||
let upIndex = (pageNum - 1) * pageSize + i + 1; |
|||
let newObj = { |
|||
id:upIndex, |
|||
title: "【新闻" + upIndex + "】 标题标题标题标题标题", |
|||
content: "内容内容内容内容内容内容内容内容内容" |
|||
}; |
|||
list.push(newObj); |
|||
} |
|||
console.log("page.num=" + pageNum + ", page.size=" + pageSize + ", curPageData.length=" + list.length); |
|||
} |
|||
//模拟接口请求成功
|
|||
resolute(list); |
|||
} catch (e) { |
|||
//模拟接口请求失败
|
|||
reject(e); |
|||
} |
|||
}, 1000) |
|||
}) |
|||
} |
|||
|
|||
// 搜索商品
|
|||
export function apiGoods(pageNum, pageSize, keyword) { |
|||
return new Promise((resolute, reject)=>{ |
|||
//延时一秒,模拟联网
|
|||
setTimeout(()=> { |
|||
try{ |
|||
let data = { |
|||
list: [], // 数据列表
|
|||
totalCount: 0, // 总数量
|
|||
totalPage: 0, // 总页数
|
|||
hasNext: false // 是否有下一页
|
|||
} |
|||
|
|||
// 符合关键词的记录
|
|||
let keywordList = []; |
|||
if (!keyword || keyword == "全部") { |
|||
// 搜索全部商品
|
|||
keywordList = goods; |
|||
}else{ |
|||
// 关键词搜索
|
|||
if(keyword=="母婴") keyword="婴"; // 为这个关键词展示多几条数据
|
|||
for (let i = 0; i < goods.length; i++) { |
|||
let good = goods[i] |
|||
if (good.goodName.indexOf(keyword) !== -1) { |
|||
keywordList.push(good) |
|||
} |
|||
} |
|||
} |
|||
|
|||
// 分页
|
|||
for (let i = (pageNum - 1) * pageSize; i < pageNum * pageSize; i++) { |
|||
if (i >= keywordList.length) break |
|||
data.list.push(keywordList[i]) |
|||
} |
|||
|
|||
// 汇总数据
|
|||
data.totalCount = keywordList.length; |
|||
data.totalPage = Math.ceil(data.totalCount/pageSize); |
|||
data.hasNext = pageNum < data.totalPage |
|||
|
|||
//模拟接口请求成功
|
|||
console.log("pageNum=" + pageNum + ", pageSize=" + pageSize + ", data.list.length=" + data.list.length + ", totalCount=" + data.totalCount + ", totalPage=" + data.totalPage + ", hasNext=" + data.hasNext + (keyword ? ", keyword=" + keyword : "")); |
|||
resolute(data); |
|||
} catch (e) { |
|||
//模拟接口请求失败
|
|||
reject(e); |
|||
} |
|||
},1000) |
|||
}) |
|||
} |
|||
|
|||
// 获取微博列表
|
|||
export function apiWeiboList(pageNum, pageSize) { |
|||
return new Promise((resolute, reject)=>{ |
|||
//延时2秒,模拟联网
|
|||
setTimeout(function() { |
|||
try { |
|||
let list = []; |
|||
if(!pageNum){ |
|||
//此处模拟下拉刷新返回的数据
|
|||
let id=new Date().getTime(); |
|||
let newObj={id:id, title:"【新增微博"+id+"】 新增微博", content:"新增微博的内容,新增微博的内容"}; |
|||
list.push(newObj); |
|||
}else{ |
|||
//此处模拟上拉加载返回的数据
|
|||
for (let i = 0; i < pageSize; i++) { |
|||
let upIndex=(pageNum-1)*pageSize+i+1; |
|||
let newObj={id:upIndex, title:"【微博"+upIndex+"】 标题标题标题标题标题标题", content:"内容内容内容内容内容内容内容内容内容内容"}; |
|||
list.push(newObj); |
|||
} |
|||
console.log("page.num=" + pageNum + ", page.size=" + pageSize + ", curPageData.length=" + list.length); |
|||
} |
|||
//模拟接口请求成功
|
|||
resolute(list); |
|||
} catch (e) { |
|||
//模拟接口请求失败
|
|||
reject(e); |
|||
} |
|||
}, 2000) |
|||
}) |
|||
} |
|||
|
|||
|
|||
// 获取消息列表(共5页消息)
|
|||
export function apiMsgList(pageNum, pageSize) { |
|||
return new Promise((resolute, reject)=>{ |
|||
//延时一秒,模拟联网
|
|||
setTimeout(function() { |
|||
try { |
|||
let list = []; |
|||
//模拟下拉加载更多记录
|
|||
for (let i = 0; i < pageSize; i++) { |
|||
let msgId = (pageNum - 1) * pageSize + i + 1; |
|||
let newObj = { |
|||
id: msgId, |
|||
title: "【消息" + msgId + "】", |
|||
content: "内容: 下拉获取聊天记录" |
|||
}; |
|||
// 此处模拟只有5页的消息 (第5页只有3条)
|
|||
if(pageNum>=5 && i>=3){}else{ |
|||
list.unshift(newObj); |
|||
} |
|||
} |
|||
console.log("page.num=" + pageNum + ", page.size=" + pageSize + ", curPageData.length=" + list.length); |
|||
//模拟接口请求成功
|
|||
resolute(list); |
|||
} catch (e) { |
|||
//模拟接口请求失败
|
|||
reject(e); |
|||
} |
|||
}, 1000) |
|||
}) |
|||
} |
|||
|
|||
// 获取tabs类目
|
|||
export function apiGetTabs() { |
|||
return new Promise((resolute, reject)=>{ |
|||
//延时,模拟联网
|
|||
setTimeout(function() { |
|||
try { |
|||
let tabs = ['全部', '奶粉', '面膜', '图书', '果汁', '奶瓶', '美素', '花王', '韩蜜', '口红', '毛巾', '玩具', '衣服']; |
|||
//模拟接口请求成功
|
|||
resolute(tabs); |
|||
} catch (e) { |
|||
//模拟接口请求失败
|
|||
reject(e); |
|||
} |
|||
}, 10) |
|||
}) |
|||
} |
|||
@ -0,0 +1,56 @@ |
|||
let util = () => { |
|||
return { |
|||
JumpUrlAsync(UrlStr){ |
|||
console.log(UrlStr) |
|||
uni.redirectTo({ |
|||
url: UrlStr, |
|||
success:function(data){ |
|||
console.log("redirectTo--->",data) |
|||
}, |
|||
fail: errval=>{ |
|||
uni.navigateTo({ |
|||
url: UrlStr, |
|||
success:function(data){ |
|||
console.log("navigateTo--->",data) |
|||
}, |
|||
fail: errsunval => { |
|||
uni.reLaunch({ |
|||
url: UrlStr, |
|||
fail: err_sun_val => { |
|||
console.log(123) |
|||
console.log("reLaunch---->",err_sun_val) |
|||
} |
|||
}); |
|||
} |
|||
}); |
|||
} |
|||
}) |
|||
|
|||
}, |
|||
setSystemCacheData(userKeyVal,tokenVal,userNameVal,myPwdVal,getToUrl) { |
|||
console.log('接口返回--缓存设置-1---',userKeyVal,tokenVal); |
|||
let _this=this |
|||
let tokenTimeVal=new Date().getTime() |
|||
uni.setStorage({ |
|||
key: 'myCache', |
|||
data: { |
|||
tokenTime:tokenTimeVal, |
|||
userKey:userKeyVal, |
|||
token:tokenVal, |
|||
userName:userNameVal, |
|||
myPwd:myPwdVal |
|||
}, |
|||
success: function (data) { |
|||
return 1; |
|||
}, |
|||
fail() { |
|||
return 2; |
|||
} |
|||
}); |
|||
|
|||
|
|||
} |
|||
} |
|||
} |
|||
// 抛出当前模块
|
|||
export default util |
|||
@ -0,0 +1,44 @@ |
|||
//echarts配置
|
|||
const echartsStruct = { |
|||
title:{ |
|||
text:"", //主标题文本 支持使用 \n 换行。
|
|||
subtext:"", //副标题文本,支持使用 \n 换行。
|
|||
textStyle:{ //文字设定
|
|||
fontSize: 14, //字体大小
|
|||
fontWeight: 400, //字体粗细
|
|||
align:'center',//文字水平对齐方式(left/right)
|
|||
verticalAlign:'middle',//文字垂直对齐方式(top/bottom)
|
|||
}, |
|||
top:20, |
|||
left: 'center', |
|||
textVerticalAlign:'middle', //整体(包括 text 和 subtext)的垂直对齐。
|
|||
}, |
|||
grid:{ |
|||
right:0, |
|||
bottom: 30 |
|||
}, |
|||
xAxis:{ //x轴设置
|
|||
type:'category', |
|||
axisLabel: { |
|||
//x轴文字的配置
|
|||
show: true, |
|||
interval: 0,//使x轴文字显示全
|
|||
}, |
|||
data:[] |
|||
}, |
|||
yAxis:{ |
|||
type:'value', |
|||
|
|||
}, |
|||
tooltip:{ |
|||
trigger:'axis', |
|||
axisPointer: { |
|||
type: 'shadow' |
|||
} |
|||
}, |
|||
series:[] |
|||
} |
|||
|
|||
export const echartsMethod= { |
|||
echartsStruct |
|||
} |
|||
File diff suppressed because one or more lines are too long
@ -0,0 +1,57 @@ |
|||
const postApi = async(getUrl,userKey,token,data)=>{ |
|||
let _this = this |
|||
uni.request({ |
|||
url:getUrl, |
|||
header: { |
|||
'Content-Type': 'application/json', //自定义请求头信息
|
|||
// 'User-Agent':250,
|
|||
'user-key': userKey, |
|||
'user-token':token |
|||
}, |
|||
method:'POST',//请求方式,必须为大写
|
|||
data:data, |
|||
success: (res) => { |
|||
console.log(res) |
|||
}, |
|||
fail:function(e){ |
|||
_this.$refs.uToast.show({ |
|||
message: e, |
|||
duration: 1000 * 2, |
|||
position:'bottom', |
|||
}) |
|||
} |
|||
}); |
|||
|
|||
}; |
|||
//post提交
|
|||
const getApi= function(getUrl,userKey,token){ |
|||
let _this = this |
|||
uni.request({ |
|||
url:getUrl, |
|||
header: { |
|||
'Content-Type': 'application/json', //自定义请求头信息
|
|||
// 'User-Agent':250,
|
|||
'user-key': userKey, |
|||
'user-token':token |
|||
}, |
|||
method:'GET',//请求方式,必须为大写
|
|||
success: (res) => { |
|||
return res |
|||
}, |
|||
fail:function(e){ |
|||
_this.$refs.uToast.show({ |
|||
message: e, |
|||
duration: 1000 * 2, |
|||
position:'bottom', |
|||
}) |
|||
return {code: 7000, data: data, msg: '获取失败'} |
|||
} |
|||
}); |
|||
} |
|||
|
|||
|
|||
|
|||
export const httpApi= { |
|||
postApi, |
|||
getApi |
|||
}; |
|||
@ -0,0 +1,123 @@ |
|||
//公共图表用色
|
|||
const colorCommon = ['#5470C6','#91CC75','#FAC858','#EE6666','#73C0DE','#3BA272','#FF0000','#009900','#FFCC00','#660099','#CCFF33','#66CCFF','#000CCC','#FF99FF','#CC9999','#CC9933','#33FFFF','#CC6633','#9933CC','#FF3366'] |
|||
//跳转链接
|
|||
// const localhostUrl = "http://kpi.beiding.net";
|
|||
const localhostUrl = "http://hxgk.user.phone"; |
|||
const serverUrl =""; |
|||
const JumpUrl = function(UrlStr) { |
|||
console.log(UrlStr) |
|||
uni.navigateTo({ |
|||
url: UrlStr, |
|||
success:function(data){ |
|||
console.log("redirectTo--->",data) |
|||
}, |
|||
fail: errval=>{ |
|||
uni.redirectTo({ |
|||
url: UrlStr, |
|||
success:function(data){ |
|||
console.log("navigateTo--->",data) |
|||
}, |
|||
fail: errsunval => { |
|||
uni.reLaunch({ |
|||
url: UrlStr, |
|||
fail: err_sun_val => { |
|||
console.log(123) |
|||
console.log("reLaunch---->",err_sun_val) |
|||
} |
|||
}); |
|||
} |
|||
}); |
|||
} |
|||
}); |
|||
} |
|||
//清除缓存
|
|||
const delCache = function(UrlStr) { |
|||
uni.clearStorage(); |
|||
_this.JumpUrl(UrlStr) |
|||
} |
|||
//判断授权是否还有效
|
|||
const verifyPowerIsTrue = function(userKeyCont,UrlStr){ |
|||
console.log(userKeyCont,UrlStr,"判断授权是否还有效") |
|||
let _this =this |
|||
_this.loading = true |
|||
uni.request({ |
|||
url:localhostUrl+'/kpiapi/base/verify_empower_istrue', |
|||
header: { |
|||
// 'Content-Type': 'application/x-www-form-urlencoded'
|
|||
'Content-Type': 'application/json' //自定义请求头信息
|
|||
}, |
|||
method:'POST',//请求方式,必须为大写
|
|||
data:{ |
|||
userKey:userKeyCont.userKey |
|||
}, |
|||
success: (res) => { |
|||
// console.log('接口返回--login--3--',res.data);
|
|||
let callBackData = res.data |
|||
if(callBackData.code != 0){ |
|||
delCache(UrlStr) |
|||
}else{ |
|||
setSystemCacheData(userKeyCont.userKey,userKeyCont.token,userKeyCont.userName,userKeyCont.myPwd,userKeyCont.userinfo,userKeyCont.usercont) |
|||
} |
|||
|
|||
}, |
|||
fail:function(e){ |
|||
console.log('接口返回--login--2--',e); |
|||
} |
|||
}) |
|||
} |
|||
//设置本地数据缓存
|
|||
const setSystemCacheData = function (userKeyVal,tokenVal,userNameVal,myPwdVal,userInfo,userCont) { |
|||
console.log('接口返回--缓存设置-1---',userKeyVal,tokenVal); |
|||
let _this=this |
|||
let tokenTimeVal=new Date().getTime() |
|||
let returnData = false |
|||
uni.setStorage({ |
|||
key: 'myCache', |
|||
data: { |
|||
tokenTime:tokenTimeVal, |
|||
userKey:userKeyVal, |
|||
token:tokenVal, |
|||
userName:userNameVal, |
|||
myPwd:myPwdVal, |
|||
userInfo:userInfo, |
|||
userCont:userCont |
|||
}, |
|||
success: function () { |
|||
returnData = true |
|||
}, |
|||
fail() { |
|||
returnData = false |
|||
} |
|||
}); |
|||
return returnData |
|||
|
|||
} |
|||
//获取缓存数据
|
|||
const getSystemCacheDate = function(cacheKey){ |
|||
let cacheDate = uni.getStorageSync(cacheKey) |
|||
// console.log(cacheDate);
|
|||
return cacheDate |
|||
} |
|||
//随机生成16禁止颜色
|
|||
const randomHexColor= function() { |
|||
//随机生成十六进制颜色
|
|||
var hex = Math.floor(Math.random() * 16777216).toString(16); |
|||
//生成ffffff以内16进制数
|
|||
while (hex.length < 6) { |
|||
//while循环判断hex位数,少于6位前面加0凑够6位
|
|||
hex = '0' + hex; |
|||
} |
|||
return '#' + hex; //返回‘#'开头16进制颜色
|
|||
} |
|||
//注册定义的方法
|
|||
export const commonMethod= { |
|||
JumpUrl, |
|||
delCache, |
|||
verifyPowerIsTrue, |
|||
setSystemCacheData, |
|||
getSystemCacheDate, |
|||
localhostUrl, |
|||
serverUrl, |
|||
colorCommon, |
|||
randomHexColor |
|||
} |
|||
@ -0,0 +1,165 @@ |
|||
const happenTime = (num) => { // 时间戳转换成 “yyyy-MM-dd hh:mm:ss”格式
|
|||
let date = new Date(num); |
|||
//时间戳为10位需*1000,时间戳为13位的话不需乘1000
|
|||
//年
|
|||
let year = date.getFullYear(); |
|||
|
|||
// 月
|
|||
let month = date.getMonth() + 1; |
|||
month = month < 10 ? ('0' + month) : month; //月补0
|
|||
|
|||
//日
|
|||
let day = date.getDate(); |
|||
day = day < 10 ? ('0' + day) : day; //天补0
|
|||
|
|||
//时
|
|||
let hours = date.getHours(); |
|||
hours = hours < 10 ? ('0' + hours) : hours; //小时补0
|
|||
|
|||
// 分
|
|||
let minutes = date.getMinutes(); |
|||
minutes = minutes < 10 ? ('0' + minutes) : minutes; //分钟补0
|
|||
|
|||
// 秒
|
|||
let seconds = date.getSeconds(); |
|||
seconds = seconds < 10 ? ('0' + seconds) : seconds; //秒补0
|
|||
|
|||
// 星期
|
|||
const weekArr = ["星期日", "星期一", "星期二", "星期三", "星期四", "星期五", "星期六"] |
|||
let weekDay = weekArr[date.getDay()]; |
|||
let weekNum = date.getDay(); |
|||
return `${year}-${month}-${day} ${hours}:${minutes}:${seconds} ${weekDay}` |
|||
} |
|||
|
|||
const happenTimeJson = (num) => { // 时间戳转换成 对象格式
|
|||
/** |
|||
* day: number>10 ? number : "'0'+num" |
|||
* hours: number>10 ? number : "'0'+num" |
|||
* minutes: number>10 ? number : "'0'+num" |
|||
* month: number>10 ? number : "'0'+num" |
|||
* seconds: number>10 ? number : "'0'+num" |
|||
* weekDay: '' // 星期一 ~ 星期日
|
|||
* weekNum:number 0 ~ 6 |
|||
* year:number |
|||
*/ |
|||
let date = new Date(num); |
|||
//时间戳为10位需*1000,时间戳为13位的话不需乘1000
|
|||
//年
|
|||
let year = date.getFullYear(); |
|||
|
|||
// 月
|
|||
let month = date.getMonth() + 1; |
|||
month = month < 10 ? ('0' + month) : month; //月补0
|
|||
|
|||
//日
|
|||
let day = date.getDate(); |
|||
day = day < 10 ? ('0' + day) : day; //天补0
|
|||
|
|||
//时
|
|||
let hours = date.getHours(); |
|||
hours = hours < 10 ? ('0' + hours) : hours; //小时补0
|
|||
|
|||
// 分
|
|||
let minutes = date.getMinutes(); |
|||
minutes = minutes < 10 ? ('0' + minutes) : minutes; //分钟补0
|
|||
|
|||
// 秒
|
|||
let seconds = date.getSeconds(); |
|||
seconds = seconds < 10 ? ('0' + seconds) : seconds; //秒补0
|
|||
|
|||
// 星期
|
|||
const weekArr = ["星期日", "星期一", "星期二", "星期三", "星期四", "星期五", "星期六"] |
|||
let weekDay = weekArr[date.getDay()]; |
|||
let weekNum = date.getDay(); |
|||
return { |
|||
year, |
|||
month, |
|||
day, |
|||
hours, |
|||
minutes, |
|||
seconds, |
|||
weekDay, |
|||
weekNum, |
|||
} |
|||
} |
|||
|
|||
const timeProcessing = (time) => { // “yyyy-MM-dd hh:mm:ss”转换成时间戳
|
|||
/** |
|||
* 未接收到参数时@return 当前时间 |
|||
*/ |
|||
let Time = time ? new Date(time) : new Date(); |
|||
|
|||
// console.log(Time) //Fri Nov 19 2021 01:30:00 GMT+0800 (中国标准时间)
|
|||
let timestemp = Time.getTime(); |
|||
// console.log(timestemp)
|
|||
//1637256600000
|
|||
return timestemp |
|||
} |
|||
|
|||
//时间是否在本周
|
|||
const isThisWeek = (time) => { |
|||
// 当前时间 年月日
|
|||
const nowTime = happenTimeJson(timeProcessing()) |
|||
let timestamp = timeProcessing(`${nowTime.year}-${nowTime.month}-${nowTime.day} 00:00:00`); |
|||
//本周周日的的时间
|
|||
/** |
|||
* (serverDate.getDay() === 0 ? 7 :serverDate.getDay() ) |
|||
* 因为星期天为0 所以需要装换为7 |
|||
* 用8减,是因为这周结束要到下周一的0点 |
|||
*/ |
|||
let sundayTiem = timestamp + ((8 - (nowTime.weekNum === 0 ? 7 : nowTime.weekNum)) * 24 * 60 * 60 * 1000) |
|||
// 本周周一的时间
|
|||
let mondayTime = timestamp - (((nowTime.weekNum === 0 ? 7 : nowTime.weekNum) - 1) * 24 * 60 * 60 * 1000) |
|||
// 接收参数时间
|
|||
let currentData = timeProcessing(time); |
|||
if (currentData > mondayTime && currentData < sundayTiem) { |
|||
/** |
|||
* isweek=true是本周 false不是本周 |
|||
*/ |
|||
return { |
|||
isweek: true, |
|||
date: happenTimeJson(currentData) |
|||
} |
|||
} else { |
|||
return { |
|||
isweek: false, |
|||
date: happenTimeJson(currentData) |
|||
} |
|||
} |
|||
} |
|||
|
|||
//时间是否在今天
|
|||
const isThisToday = (time) => { |
|||
// 当前时间
|
|||
let timestamp = timeProcessing(); |
|||
let serverDate = happenTimeJson(timestamp); |
|||
// 今天开始时间
|
|||
let startTiem =timeProcessing(`${serverDate.year}-${serverDate.month}-${serverDate.day-1} 00:00:00`); |
|||
// 今天结束时间
|
|||
let endTime = timeProcessing(`${serverDate.year}-${serverDate.month}-${serverDate.day+1} 00:00:00`); |
|||
// 接收参数时间
|
|||
let currentData = timeProcessing(time); |
|||
|
|||
if (currentData > startTiem && currentData < endTime) { |
|||
/** |
|||
* isweek=true是今天 false不是今天 |
|||
*/ |
|||
return { |
|||
isweek: true, |
|||
date: happenTimeJson(currentData) |
|||
} |
|||
} else { |
|||
return { |
|||
isweek: false, |
|||
date: happenTimeJson(currentData) |
|||
} |
|||
} |
|||
} |
|||
|
|||
export default { |
|||
timeProcessing, |
|||
happenTimeJson, |
|||
happenTime, |
|||
isThisWeek, |
|||
isThisToday |
|||
} |
|||
@ -0,0 +1,542 @@ |
|||
@font-face { |
|||
font-family: uni-icons; |
|||
src: url('~@/components/uni-icons/uni.ttf') format('truetype'); |
|||
font-weight: 400; |
|||
font-display: "auto"; |
|||
font-style: normal |
|||
} |
|||
|
|||
[class*=" uni-icons-"], |
|||
[class^=uni-icons-] { |
|||
font-family: uni-icons !important; |
|||
speak: none; |
|||
font-style: normal; |
|||
font-weight: 400; |
|||
font-variant: normal; |
|||
text-transform: none; |
|||
line-height: 1; |
|||
vertical-align: baseline; |
|||
display: inline-block; |
|||
-webkit-font-smoothing: antialiased; |
|||
-moz-osx-font-smoothing: grayscale |
|||
} |
|||
|
|||
.uni-icons-shop:before { |
|||
content: "\e609"; |
|||
} |
|||
|
|||
.uni-icons-headphones:before { |
|||
content: "\e8bf"; |
|||
} |
|||
|
|||
.uni-icons-pulldown:before { |
|||
content: "\e588"; |
|||
} |
|||
|
|||
.uni-icons-scan:before { |
|||
content: "\e612"; |
|||
} |
|||
|
|||
.uni-icons-back:before { |
|||
content: "\e471"; |
|||
} |
|||
|
|||
.uni-icons-forward:before { |
|||
content: "\e470"; |
|||
} |
|||
|
|||
.uni-icons-refreshempty:before { |
|||
content: "\e461"; |
|||
} |
|||
|
|||
.uni-icons-checkbox-filled:before { |
|||
content: "\e442"; |
|||
} |
|||
|
|||
.uni-icons-checkbox:before { |
|||
content: "\e7fa"; |
|||
} |
|||
|
|||
.uni-icons-loop:before { |
|||
content: "\e565"; |
|||
} |
|||
|
|||
.uni-icons-arrowthindown:before { |
|||
content: "\e585"; |
|||
} |
|||
|
|||
.uni-icons-arrowthinleft:before { |
|||
content: "\e586"; |
|||
} |
|||
|
|||
.uni-icons-arrowthinright:before { |
|||
content: "\e587"; |
|||
} |
|||
|
|||
.uni-icons-arrowthinup:before { |
|||
content: "\e584"; |
|||
} |
|||
|
|||
.uni-icons-bars:before { |
|||
content: "\e563"; |
|||
} |
|||
|
|||
.uni-icons-cart-filled:before { |
|||
content: "\e7f4"; |
|||
} |
|||
|
|||
.uni-icons-cart:before { |
|||
content: "\e7f5"; |
|||
} |
|||
|
|||
.uni-icons-arrowleft:before { |
|||
content: "\e582"; |
|||
} |
|||
|
|||
.uni-icons-arrowdown:before { |
|||
content: "\e581"; |
|||
} |
|||
|
|||
.uni-icons-arrowright:before { |
|||
content: "\e583"; |
|||
} |
|||
|
|||
.uni-icons-arrowup:before { |
|||
content: "\e580"; |
|||
} |
|||
|
|||
.uni-icons-eye-filled:before { |
|||
content: "\e568"; |
|||
} |
|||
|
|||
.uni-icons-eye-slash-filled:before { |
|||
content: "\e822"; |
|||
} |
|||
|
|||
.uni-icons-eye-slash:before { |
|||
content: "\e823"; |
|||
} |
|||
|
|||
.uni-icons-eye:before { |
|||
content: "\e824"; |
|||
} |
|||
|
|||
.uni-icons-reload:before { |
|||
content: "\e462"; |
|||
} |
|||
|
|||
.uni-icons-hand-thumbsdown-filled:before { |
|||
content: "\e83b"; |
|||
} |
|||
|
|||
.uni-icons-hand-thumbsdown:before { |
|||
content: "\e83c"; |
|||
} |
|||
|
|||
.uni-icons-hand-thumbsup-filled:before { |
|||
content: "\e83d"; |
|||
} |
|||
|
|||
.uni-icons-heart-filled:before { |
|||
content: "\e83e"; |
|||
} |
|||
|
|||
.uni-icons-hand-thumbsup:before { |
|||
content: "\e83f"; |
|||
} |
|||
|
|||
.uni-icons-heart:before { |
|||
content: "\e840"; |
|||
} |
|||
|
|||
.uni-icons-mail-open-filled:before { |
|||
content: "\e84d"; |
|||
} |
|||
|
|||
.uni-icons-mail-open:before { |
|||
content: "\e84e"; |
|||
} |
|||
|
|||
.uni-icons-list:before { |
|||
content: "\e562"; |
|||
} |
|||
|
|||
.uni-icons-map-pin:before { |
|||
content: "\e85e"; |
|||
} |
|||
|
|||
.uni-icons-map-pin-ellipse:before { |
|||
content: "\e864"; |
|||
} |
|||
|
|||
.uni-icons-paperclip:before { |
|||
content: "\e567"; |
|||
} |
|||
|
|||
.uni-icons-images-filled:before { |
|||
content: "\e87a"; |
|||
} |
|||
|
|||
.uni-icons-images:before { |
|||
content: "\e87b"; |
|||
} |
|||
|
|||
.uni-icons-search:before { |
|||
content: "\e466"; |
|||
} |
|||
|
|||
.uni-icons-settings:before { |
|||
content: "\e560"; |
|||
} |
|||
|
|||
.uni-icons-cloud-download:before { |
|||
content: "\e8e4"; |
|||
} |
|||
|
|||
.uni-icons-cloud-upload-filled:before { |
|||
content: "\e8e5"; |
|||
} |
|||
|
|||
.uni-icons-cloud-upload:before { |
|||
content: "\e8e6"; |
|||
} |
|||
|
|||
.uni-icons-cloud-download-filled:before { |
|||
content: "\e8e9"; |
|||
} |
|||
|
|||
.uni-icons-more:before { |
|||
content: "\e507"; |
|||
} |
|||
|
|||
.uni-icons-more-filled:before { |
|||
content: "\e537"; |
|||
} |
|||
|
|||
.uni-icons-refresh:before { |
|||
content: "\e407"; |
|||
} |
|||
|
|||
.uni-icons-refresh-filled:before { |
|||
content: "\e437"; |
|||
} |
|||
|
|||
.uni-icons-undo-filled:before { |
|||
content: "\e7d6"; |
|||
} |
|||
|
|||
.uni-icons-undo:before { |
|||
content: "\e406"; |
|||
} |
|||
|
|||
.uni-icons-redo:before { |
|||
content: "\e405"; |
|||
} |
|||
|
|||
.uni-icons-redo-filled:before { |
|||
content: "\e7d9"; |
|||
} |
|||
|
|||
.uni-icons-camera:before { |
|||
content: "\e301"; |
|||
} |
|||
|
|||
.uni-icons-camera-filled:before { |
|||
content: "\e7ef"; |
|||
} |
|||
|
|||
.uni-icons-smallcircle-filled:before { |
|||
content: "\e801"; |
|||
} |
|||
|
|||
.uni-icons-circle:before { |
|||
content: "\e411"; |
|||
} |
|||
|
|||
.uni-icons-flag-filled:before { |
|||
content: "\e825"; |
|||
} |
|||
|
|||
.uni-icons-flag:before { |
|||
content: "\e508"; |
|||
} |
|||
|
|||
.uni-icons-gear-filled:before { |
|||
content: "\e532"; |
|||
} |
|||
|
|||
.uni-icons-gear:before { |
|||
content: "\e502"; |
|||
} |
|||
|
|||
.uni-icons-home:before { |
|||
content: "\e500"; |
|||
} |
|||
|
|||
.uni-icons-info:before { |
|||
content: "\e504"; |
|||
} |
|||
|
|||
.uni-icons-home-filled:before { |
|||
content: "\e530"; |
|||
} |
|||
|
|||
.uni-icons-info-filled:before { |
|||
content: "\e534"; |
|||
} |
|||
|
|||
.uni-icons-circle-filled:before { |
|||
content: "\e441"; |
|||
} |
|||
|
|||
.uni-icons-chat-filled:before { |
|||
content: "\e847"; |
|||
} |
|||
|
|||
.uni-icons-chat:before { |
|||
content: "\e263"; |
|||
} |
|||
|
|||
.uni-icons-checkmarkempty:before { |
|||
content: "\e472"; |
|||
} |
|||
|
|||
.uni-icons-locked-filled:before { |
|||
content: "\e856"; |
|||
} |
|||
|
|||
.uni-icons-locked:before { |
|||
content: "\e506"; |
|||
} |
|||
|
|||
.uni-icons-map-filled:before { |
|||
content: "\e85c"; |
|||
} |
|||
|
|||
.uni-icons-map:before { |
|||
content: "\e364"; |
|||
} |
|||
|
|||
.uni-icons-minus-filled:before { |
|||
content: "\e440"; |
|||
} |
|||
|
|||
.uni-icons-mic-filled:before { |
|||
content: "\e332"; |
|||
} |
|||
|
|||
.uni-icons-minus:before { |
|||
content: "\e410"; |
|||
} |
|||
|
|||
.uni-icons-micoff:before { |
|||
content: "\e360"; |
|||
} |
|||
|
|||
.uni-icons-mic:before { |
|||
content: "\e302"; |
|||
} |
|||
|
|||
.uni-icons-clear:before { |
|||
content: "\e434"; |
|||
} |
|||
|
|||
.uni-icons-smallcircle:before { |
|||
content: "\e868"; |
|||
} |
|||
|
|||
.uni-icons-close:before { |
|||
content: "\e404"; |
|||
} |
|||
|
|||
.uni-icons-closeempty:before { |
|||
content: "\e460"; |
|||
} |
|||
|
|||
.uni-icons-paperplane:before { |
|||
content: "\e503"; |
|||
} |
|||
|
|||
.uni-icons-paperplane-filled:before { |
|||
content: "\e86e"; |
|||
} |
|||
|
|||
.uni-icons-image:before { |
|||
content: "\e363"; |
|||
} |
|||
|
|||
.uni-icons-image-filled:before { |
|||
content: "\e877"; |
|||
} |
|||
|
|||
.uni-icons-location-filled:before { |
|||
content: "\e333"; |
|||
} |
|||
|
|||
.uni-icons-location:before { |
|||
content: "\e303"; |
|||
} |
|||
|
|||
.uni-icons-plus-filled:before { |
|||
content: "\e439"; |
|||
} |
|||
|
|||
.uni-icons-plus:before { |
|||
content: "\e409"; |
|||
} |
|||
|
|||
.uni-icons-plusempty:before { |
|||
content: "\e468"; |
|||
} |
|||
|
|||
.uni-icons-help-filled:before { |
|||
content: "\e535"; |
|||
} |
|||
|
|||
.uni-icons-help:before { |
|||
content: "\e505"; |
|||
} |
|||
|
|||
.uni-icons-navigate-filled:before { |
|||
content: "\e884"; |
|||
} |
|||
|
|||
.uni-icons-navigate:before { |
|||
content: "\e501"; |
|||
} |
|||
|
|||
.uni-icons-mic-slash-filled:before { |
|||
content: "\e892"; |
|||
} |
|||
|
|||
.uni-icons-sound:before { |
|||
content: "\e590"; |
|||
} |
|||
|
|||
.uni-icons-sound-filled:before { |
|||
content: "\e8a1"; |
|||
} |
|||
|
|||
.uni-icons-spinner-cycle:before { |
|||
content: "\e465"; |
|||
} |
|||
|
|||
.uni-icons-download-filled:before { |
|||
content: "\e8a4"; |
|||
} |
|||
|
|||
.uni-icons-videocam-filled:before { |
|||
content: "\e8af"; |
|||
} |
|||
|
|||
.uni-icons-upload:before { |
|||
content: "\e402"; |
|||
} |
|||
|
|||
.uni-icons-upload-filled:before { |
|||
content: "\e8b1"; |
|||
} |
|||
|
|||
.uni-icons-starhalf:before { |
|||
content: "\e463"; |
|||
} |
|||
|
|||
.uni-icons-star-filled:before { |
|||
content: "\e438"; |
|||
} |
|||
|
|||
.uni-icons-star:before { |
|||
content: "\e408"; |
|||
} |
|||
|
|||
.uni-icons-trash:before { |
|||
content: "\e401"; |
|||
} |
|||
|
|||
.uni-icons-compose:before { |
|||
content: "\e400"; |
|||
} |
|||
|
|||
.uni-icons-videocam:before { |
|||
content: "\e300"; |
|||
} |
|||
|
|||
.uni-icons-trash-filled:before { |
|||
content: "\e8dc"; |
|||
} |
|||
|
|||
.uni-icons-download:before { |
|||
content: "\e403"; |
|||
} |
|||
|
|||
.uni-icons-qq:before { |
|||
content: "\e264"; |
|||
} |
|||
|
|||
.uni-icons-weibo:before { |
|||
content: "\e260"; |
|||
} |
|||
|
|||
.uni-icons-weixin:before { |
|||
content: "\e261"; |
|||
} |
|||
|
|||
.uni-icons-pengyouquan:before { |
|||
content: "\e262"; |
|||
} |
|||
|
|||
.uni-icons-chatboxes:before { |
|||
content: "\e203"; |
|||
} |
|||
|
|||
.uni-icons-chatboxes-filled:before { |
|||
content: "\e233"; |
|||
} |
|||
|
|||
.uni-icons-email-filled:before { |
|||
content: "\e231"; |
|||
} |
|||
|
|||
.uni-icons-email:before { |
|||
content: "\e201"; |
|||
} |
|||
|
|||
.uni-icons-person-filled:before { |
|||
content: "\e131"; |
|||
} |
|||
|
|||
.uni-icons-contact-filled:before { |
|||
content: "\e130"; |
|||
} |
|||
|
|||
.uni-icons-person:before { |
|||
content: "\e101"; |
|||
} |
|||
|
|||
.uni-icons-contact:before { |
|||
content: "\e100"; |
|||
} |
|||
|
|||
.uni-icons-phone:before { |
|||
content: "\e200"; |
|||
} |
|||
|
|||
.uni-icons-personadd-filled:before { |
|||
content: "\e132"; |
|||
} |
|||
|
|||
.uni-icons-personadd:before { |
|||
content: "\e102"; |
|||
} |
|||
|
|||
.uni-icons-phone-filled:before { |
|||
content: "\e230"; |
|||
} |
|||
|
|||
.uni-icons-chatbubble-filled:before { |
|||
content: "\e232"; |
|||
} |
|||
|
|||
.uni-icons-chatbubble:before { |
|||
content: "\e202"; |
|||
} |
|||
@ -0,0 +1,246 @@ |
|||
/* 全局公共样式 */ |
|||
|
|||
body, |
|||
html { |
|||
-webkit-user-select: auto; |
|||
user-select: auto; |
|||
} |
|||
|
|||
/* #ifdef H5 */ |
|||
|
|||
uni-main { |
|||
position: relative; |
|||
background-color: #f5f5f5; |
|||
} |
|||
|
|||
.uni-mask+.uni-left-window, .uni-mask+.uni-right-window{ |
|||
position: fixed; |
|||
} |
|||
|
|||
.uni-container .uni-forms { |
|||
padding: 15px; |
|||
max-width: 460px; |
|||
} |
|||
|
|||
/* #endif */ |
|||
|
|||
/* #ifndef H5 */ |
|||
.uni-nav-menu { |
|||
height: 100vh; |
|||
} |
|||
/* #endif */ |
|||
|
|||
.pointer { |
|||
cursor: pointer; |
|||
} |
|||
|
|||
.uni-top-window { |
|||
z-index: 998; |
|||
overflow: visible; |
|||
} |
|||
|
|||
.uni-tips { |
|||
font-size: 12px; |
|||
color: #666; |
|||
} |
|||
|
|||
/* 容器 */ |
|||
.uni-container { |
|||
padding: 15px; |
|||
box-sizing: border-box; |
|||
} |
|||
|
|||
/* 标题栏 */ |
|||
.uni-header { |
|||
padding: 0 15px; |
|||
display: flex; |
|||
height: 55px; |
|||
align-items: center; |
|||
justify-content: space-between; |
|||
border-bottom: 1px #f5f5f5 solid; |
|||
} |
|||
|
|||
.uni-title { |
|||
margin-right: 10px; |
|||
font-size: 16px; |
|||
font-weight: 500; |
|||
color: #333; |
|||
} |
|||
|
|||
.uni-sub-title { |
|||
margin-top: 3px; |
|||
font-size: 14px; |
|||
color: #999; |
|||
} |
|||
|
|||
.uni-link { |
|||
color: #3A8EE6; |
|||
cursor: pointer; |
|||
text-decoration: underline; |
|||
} |
|||
|
|||
.uni-group { |
|||
display: flex; |
|||
align-items: center; |
|||
justify-content: center; |
|||
word-break: keep-all; |
|||
} |
|||
|
|||
/* 按钮样式 */ |
|||
.uni-button-group { |
|||
margin-top: 30px; |
|||
display: flex; |
|||
align-items: center; |
|||
justify-content: center; |
|||
} |
|||
|
|||
.uni-button { |
|||
padding: 10px 20px; |
|||
font-size: 14px; |
|||
border-radius: 4px; |
|||
line-height: 1; |
|||
margin: 0; |
|||
box-sizing: border-box; |
|||
overflow: initial; |
|||
} |
|||
|
|||
.uni-button+.uni-button { |
|||
margin-left: 10px; |
|||
} |
|||
|
|||
|
|||
.uni-button:hover, |
|||
.uni-button:focus { |
|||
opacity: 0.9; |
|||
} |
|||
|
|||
.uni-button:active { |
|||
opacity: 1; |
|||
} |
|||
|
|||
.uni-button-full { |
|||
width: 100%; |
|||
} |
|||
|
|||
/* 搜索框样式 */ |
|||
.uni-search { |
|||
height: 30px; |
|||
line-height: 30px; |
|||
font-size: 12px; |
|||
padding: 0 10px; |
|||
border: 1px #eee solid; |
|||
margin-right: 10px; |
|||
border-radius: 5px; |
|||
} |
|||
|
|||
/* 分页容器 */ |
|||
.uni-pagination-box { |
|||
display: flex; |
|||
justify-content: center; |
|||
margin-top: 20px; |
|||
} |
|||
|
|||
|
|||
.uni-input-border, |
|||
.uni-textarea-border { |
|||
width: 100%; |
|||
font-size: 14px; |
|||
color: #666; |
|||
border: 1px #e5e5e5 solid; |
|||
border-radius: 5px; |
|||
box-sizing: border-box; |
|||
} |
|||
|
|||
.uni-input-border { |
|||
padding: 0 10px; |
|||
height: 35px; |
|||
|
|||
} |
|||
|
|||
.uni-textarea-border { |
|||
padding: 10px; |
|||
height: 80px; |
|||
} |
|||
|
|||
.uni-disabled { |
|||
background-color: #F5F7FA; |
|||
color: #C0C4CC; |
|||
} |
|||
|
|||
.uni-icon-password-eye { |
|||
position: absolute; |
|||
right: 8px; |
|||
top: 6px; |
|||
font-family: uniicons; |
|||
font-size: 20px; |
|||
font-weight: normal; |
|||
font-style: normal; |
|||
width: 24px; |
|||
height: 24px; |
|||
line-height: 24px; |
|||
color: #999999; |
|||
} |
|||
|
|||
.uni-eye-active { |
|||
color: #007AFF; |
|||
} |
|||
|
|||
.uni-tabs__header { |
|||
position: relative; |
|||
background-color: #f5f7fa; |
|||
border-bottom: 1px solid #e4e7ed; |
|||
} |
|||
|
|||
.uni-tabs__nav-wrap { |
|||
overflow: hidden; |
|||
margin-bottom: -1px; |
|||
position: relative; |
|||
} |
|||
|
|||
.uni-tabs__nav-scroll { |
|||
overflow: hidden; |
|||
} |
|||
|
|||
.uni-tabs__nav { |
|||
position: relative; |
|||
white-space: nowrap; |
|||
} |
|||
|
|||
|
|||
.uni-tabs__item { |
|||
position: relative; |
|||
padding: 0 20px; |
|||
height: 40px; |
|||
box-sizing: border-box; |
|||
line-height: 40px; |
|||
display: inline-block; |
|||
list-style: none; |
|||
font-size: 14px; |
|||
font-weight: 500; |
|||
color: #909399; |
|||
margin-top: -1px; |
|||
margin-left: -1px; |
|||
border: 1px solid transparent; |
|||
cursor: pointer; |
|||
} |
|||
|
|||
.uni-tabs__item.is-active { |
|||
color: #007aff; |
|||
background-color: #fff; |
|||
border-right-color: #dcdfe6; |
|||
border-left-color: #dcdfe6; |
|||
} |
|||
|
|||
.uni-form-item-tips { |
|||
color: #999; |
|||
font-size: 12px; |
|||
margin-top: 3px; |
|||
position: absolute; |
|||
|
|||
} |
|||
|
|||
.uni-form-item-empty { |
|||
color: #999; |
|||
min-height: 36px; |
|||
line-height: 36px; |
|||
} |
|||
@ -0,0 +1,37 @@ |
|||
const install = (Vue,vm) => { |
|||
// 挂载到$u 上
|
|||
vm.$u.utils = { |
|||
|
|||
//设置本地数据缓存
|
|||
setSystemCacheData:(userKeyVal,tokenVal,userNameVal,myPwdVal)=>{ |
|||
console.log('接口返回--缓存设置-1---',userKeyVal,tokenVal); |
|||
let _this=this |
|||
let tokenTimeVal=new Date().getTime() |
|||
let returnData = 0 |
|||
uni.setStorage({ |
|||
key: 'myCache', |
|||
data: { |
|||
tokenTime:tokenTimeVal, |
|||
userKey:userKeyVal, |
|||
token:tokenVal, |
|||
userName:userNameVal, |
|||
myPwd:myPwdVal |
|||
}, |
|||
success: function () { |
|||
returnData = 1111 |
|||
}, |
|||
fail() { |
|||
returnData = 2222 |
|||
} |
|||
}); |
|||
return returnData |
|||
} |
|||
|
|||
|
|||
} |
|||
} |
|||
|
|||
//抛出入口
|
|||
export default { |
|||
install |
|||
} |
|||
@ -0,0 +1,124 @@ |
|||
## 树形层级选择器 |
|||
### 简介 |
|||
为统一样式而生,树形层级选择器,picker弹窗形式的,样式和比例参照uniapp的picker和uni-data-picker组件 |
|||
* 支持单选、多选、父级选择,当然也支持单层选择 |
|||
* 支持Object对象属性自定义映射 |
|||
* 支持显示全部选中、部分选中、未选中三种状态 |
|||
* 支持快速自定义简单样式(分割线、按钮、标题、对齐等),深入样式可复写css |
|||
|
|||
### 使用方法 |
|||
在 `script` 中引入组件 |
|||
``` javascript |
|||
import baTreePicker from "@/components/ba-tree-picker/ba-tree-picker.vue" |
|||
export default { |
|||
components: { |
|||
baTreePicker |
|||
} |
|||
``` |
|||
在 `template` 中使用组件 |
|||
``` javascript |
|||
<ba-tree-picker ref="treePicker" :multiple='false' @select-change="selectChange" title="选择城市" |
|||
:localdata="listData" valueKey="value" textKey="label" childrenKey="children" /> |
|||
``` |
|||
在 `script` 中定义打开方法,和选择监听 |
|||
``` javascript |
|||
methods: { |
|||
// 显示选择器 |
|||
showPicker() { |
|||
this.$refs.treePicker._show(); |
|||
}, |
|||
//监听选择(ids为数组) |
|||
selectChange(ids, names) { |
|||
console.log(ids, names) |
|||
} |
|||
} |
|||
``` |
|||
在 `template` 中调用打开 |
|||
``` javascript |
|||
<view @click="showPicker">调用选择器</view> |
|||
``` |
|||
|
|||
### 属性 |
|||
|属性名|类型|默认值|说明| |
|||
|:-|:-:|:--:|-:| |
|||
|localdata|Array|[]|源数据,目前支持tree结构,后续会考虑支持扁平化结构| |
|||
|valueKey|String|id|指定 Object 中 key 的值作为节点数据id| |
|||
|textKey|String|name|指定 Object 中 key 的值作为节点显示内容| |
|||
|childrenKey|String|children|指定 Object 中 key 的值作为节点子集| |
|||
|multiple|Boolean|false|是否多选,默认单选| |
|||
|selectParent|Boolean|true|是否可以选父级,默认可以| |
|||
|title|String| |标题| |
|||
|titleColor|String||标题颜色| |
|||
|confirmColor|String|#0055ff|确定按钮颜色| |
|||
|cancelColor|String|#757575|取消按钮颜色| |
|||
|switchColor|String|#666|节点切换图标颜色| |
|||
|border|Boolean|false|是否有分割线,默认无| |
|||
|
|||
|
|||
|
|||
### 数据格式 |
|||
|
|||
注意:必须有id、name(id可通过valueKey来配置为其它键值,如value)字段,且唯一 |
|||
|
|||
``` json |
|||
[ |
|||
{ |
|||
id: 1, |
|||
name: '公司1', |
|||
children: [{ |
|||
id: 11, |
|||
name: '研发部', |
|||
children: [{ |
|||
id: 111, |
|||
name: '张三', |
|||
|
|||
},{ |
|||
id: 112, |
|||
name: '李四', |
|||
|
|||
}] |
|||
},{ |
|||
id: 12, |
|||
name: '综合部', |
|||
|
|||
} ] |
|||
}, |
|||
{ |
|||
id: 2, |
|||
name: '公司2', |
|||
children: [{ |
|||
id: 21, |
|||
name: '研发部', |
|||
|
|||
},{ |
|||
id: 22, |
|||
name: '综合部', |
|||
|
|||
},{ |
|||
id: 23, |
|||
name: '财务部', |
|||
|
|||
}, ] |
|||
}, |
|||
{ |
|||
id: 3, |
|||
name: '公司3' |
|||
}, |
|||
{ |
|||
id: 4, |
|||
name: '公司4', |
|||
children: [{ |
|||
id: 41, |
|||
name: '研发部', |
|||
|
|||
}] |
|||
} |
|||
] |
|||
``` |
|||
</details> |
|||
|
|||
### 方法 |
|||
|方法名|参数|默认值|说明| |
|||
|:-|:-:|:--:|-:| |
|||
|_show()| | |显示选择器| |
|||
|_hide()| | |隐藏选择器| |
|||
@ -0,0 +1,619 @@ |
|||
<!-- 树形层级选择器--> |
|||
<!-- 1、支持单选、多选 --> |
|||
<template> |
|||
<view> |
|||
<view class="tree-cover" :class="{'show':showDialog}" @tap="_cancel"></view> |
|||
<view class="tree-dialog" :class="{'show':showDialog}"> |
|||
<view class="tree-bar"> |
|||
<view class="tree-bar-cancel" :style="{'color':cancelColor}" hover-class="hover-c" @tap="_cancel">取消 |
|||
</view> |
|||
<view class="tree-bar-title" :style="{'color':titleColor}">{{title}}</view> |
|||
<view class="tree-bar-confirm" :style="{'color':confirmColor}" hover-class="hover-c" @tap="_confirm"> |
|||
{{multiple?'确定':''}} |
|||
</view> |
|||
</view> |
|||
<view class="tree-view"> |
|||
<scroll-view class="tree-list" :scroll-y="true"> |
|||
<block v-for="(item, index) in treeList" :key="index"> |
|||
<view class="tree-item" :style="[{ |
|||
paddingLeft: item.level*30 + 'rpx' |
|||
}]" :class="{ |
|||
itemBorder: border === true, |
|||
show: item.isShow |
|||
}"> |
|||
<view class="item-label"> |
|||
<view class="item-icon uni-inline-item" @tap.stop="_onItemSwitch(item, index)"> |
|||
<view v-if="!item.isLastLevel&&item.isShowChild" class="switch-on" |
|||
:style="{'border-left-color':switchColor}"> |
|||
</view> |
|||
<view v-else-if="!item.isLastLevel&&!item.isShowChild" class="switch-off" |
|||
:style="{'border-top-color':switchColor}"> |
|||
</view> |
|||
<view v-else class="item-last-dot" :style="{'border-top-color':switchColor}"> |
|||
</view> |
|||
</view> |
|||
<view class="uni-flex-item uni-inline-item" @tap.stop="_onItemSelect(item, index)"> |
|||
<view class="item-name"> {{item.name+(item.childCount?"("+item.childCount+")":'')}} |
|||
</view> |
|||
<view class="item-check" v-if="selectParent?true:item.isLastLevel"> |
|||
<view class="item-check-yes" v-if="item.checkStatus==1" |
|||
:class="{'radio':!multiple}" :style="{'border-color':confirmColor}"> |
|||
<view class="item-check-yes-part" |
|||
:style="{'background-color':confirmColor}"> |
|||
</view> |
|||
</view> |
|||
<view class="item-check-yes" v-else-if="item.checkStatus==2" |
|||
:class="{'radio':!multiple}" :style="{'border-color':confirmColor}"> |
|||
<view class="item-check-yes-all" :style="{'background-color':confirmColor}"> |
|||
</view> |
|||
</view> |
|||
<view class="item-check-no" v-else :class="{'radio':!multiple}" |
|||
:style="{'border-color':confirmColor}"></view> |
|||
</view> |
|||
</view> |
|||
</view> |
|||
|
|||
</view> |
|||
</block> |
|||
</scroll-view> |
|||
</view> |
|||
</view> |
|||
</view> |
|||
</template> |
|||
|
|||
<script> |
|||
export default { |
|||
emits: ['select-change'], |
|||
name: "ba-tree-picker", |
|||
props: { |
|||
valueKey: { |
|||
type: String, |
|||
default: 'id' |
|||
}, |
|||
textKey: { |
|||
type: String, |
|||
default: 'name' |
|||
}, |
|||
childrenKey: { |
|||
type: String, |
|||
default: 'children' |
|||
}, |
|||
localdata: { |
|||
type: Array, |
|||
default: function() { |
|||
return [] |
|||
} |
|||
}, |
|||
localTreeList: { //在已经格式化好的数据 |
|||
type: Array, |
|||
default: function() { |
|||
return [] |
|||
} |
|||
}, |
|||
selectedData: { |
|||
type: Array, |
|||
default: function() { |
|||
return [] |
|||
} |
|||
}, |
|||
title: { |
|||
type: String, |
|||
default: '' |
|||
}, |
|||
multiple: { // 是否可以多选 |
|||
type: Boolean, |
|||
default: true |
|||
}, |
|||
selectParent: { //是否可以选父级 |
|||
type: Boolean, |
|||
default: true |
|||
}, |
|||
confirmColor: { // 确定按钮颜色 |
|||
type: String, |
|||
default: '' // #0055ff |
|||
}, |
|||
cancelColor: { // 取消按钮颜色 |
|||
type: String, |
|||
default: '' // #757575 |
|||
}, |
|||
titleColor: { // 标题颜色 |
|||
type: String, |
|||
default: '' // |
|||
}, |
|||
switchColor: { // 节点切换图标颜色 |
|||
type: String, |
|||
default: '' // #666 |
|||
}, |
|||
border: { // 是否有分割线 |
|||
type: Boolean, |
|||
default: false |
|||
}, |
|||
}, |
|||
data() { |
|||
return { |
|||
showDialog: false, |
|||
treeList: [] |
|||
} |
|||
}, |
|||
computed: {}, |
|||
methods: { |
|||
_show() { |
|||
this.showDialog = true |
|||
}, |
|||
_hide() { |
|||
this.showDialog = false |
|||
}, |
|||
_cancel() { |
|||
this._hide() |
|||
this.$emit("cancel", ''); |
|||
}, |
|||
_confirm() { //多选 |
|||
let selectedList = []; //如果子集全部选中,只返回父级 id |
|||
let selectedNames; |
|||
let currentLevel = -1; |
|||
this.treeList.forEach((item, index) => { |
|||
if (currentLevel >= 0 && item.level > currentLevel) { |
|||
|
|||
} else { |
|||
if (item.checkStatus === 2) { |
|||
currentLevel = item.level; |
|||
selectedList.push(item.id); |
|||
selectedNames = selectedNames ? selectedNames + ' / ' + item.name : item.name; |
|||
} else { |
|||
currentLevel = -1; |
|||
} |
|||
} |
|||
}) |
|||
//console.log('_confirm', selectedList); |
|||
this._hide() |
|||
this.$emit("select-change", selectedList, selectedNames); |
|||
}, |
|||
//格式化原数据(原数据为tree结构) |
|||
_formatTreeData(list = [], level = 0, parentItem, isShowChild = true) { |
|||
let nextIndex = 0; |
|||
let parentId = -1; |
|||
let initCheckStatus = 0; |
|||
if (parentItem) { |
|||
nextIndex = this.treeList.findIndex(item => item.id === parentItem.id) + 1; |
|||
parentId = parentItem.id; |
|||
if (!this.multiple) { //单选 |
|||
initCheckStatus = 0; |
|||
} else |
|||
initCheckStatus = parentItem.checkStatus == 2 ? 2 : 0; |
|||
} |
|||
list.forEach(item => { |
|||
let isLastLevel = true; |
|||
if (item && item[this.childrenKey]) { |
|||
let children = item[this.childrenKey]; |
|||
if (Array.isArray(children) && children.length > 0) { |
|||
isLastLevel = false; |
|||
} |
|||
} |
|||
|
|||
let itemT = { |
|||
id: item[this.valueKey], |
|||
name: item[this.textKey], |
|||
level, |
|||
isLastLevel, |
|||
isShow: isShowChild, |
|||
isShowChild: false, |
|||
checkStatus: initCheckStatus, |
|||
orCheckStatus: 0, |
|||
parentId, |
|||
children: item[this.childrenKey], |
|||
childCount: item[this.childrenKey] ? item[this.childrenKey].length : 0, |
|||
childCheckCount: 0, |
|||
childCheckPCount: 0 |
|||
}; |
|||
|
|||
if (this.selectedData.indexOf(itemT.id) >= 0) { |
|||
itemT.checkStatus = 2; |
|||
itemT.orCheckStatus = 2; |
|||
itemT.childCheckCount = itemT.children ? itemT.children.length : 0; |
|||
this._onItemParentSelect(itemT, nextIndex); |
|||
} |
|||
|
|||
this.treeList.splice(nextIndex, 0, itemT); |
|||
nextIndex++; |
|||
}) |
|||
//console.log(this.treeList); |
|||
}, |
|||
// 节点打开、关闭切换 |
|||
_onItemSwitch(item, index) { |
|||
// console.log(item) |
|||
//console.log('_itemSwitch') |
|||
if (item.isLastLevel === true) { |
|||
return; |
|||
} |
|||
item.isShowChild = !item.isShowChild; |
|||
if (item.children) { |
|||
this._formatTreeData(item.children, item.level + 1, item); |
|||
item.children = undefined; |
|||
} else { |
|||
this._onItemChildSwitch(item, index); |
|||
} |
|||
}, |
|||
_onItemChildSwitch(item, index) { |
|||
//console.log('_onItemChildSwitch') |
|||
const firstChildIndex = index + 1; |
|||
if (firstChildIndex > 0) |
|||
for (var i = firstChildIndex; i < this.treeList.length; i++) { |
|||
let itemChild = this.treeList[i]; |
|||
if (itemChild.level > item.level) { |
|||
if (item.isShowChild) { |
|||
if (itemChild.parentId === item.id) { |
|||
itemChild.isShow = item.isShowChild; |
|||
if (!itemChild.isShow) { |
|||
itemChild.isShowChild = false; |
|||
} |
|||
} |
|||
} else { |
|||
itemChild.isShow = item.isShowChild; |
|||
itemChild.isShowChild = false; |
|||
} |
|||
} else { |
|||
return; |
|||
} |
|||
} |
|||
}, |
|||
// 节点选中、取消选中 |
|||
_onItemSelect(item, index) { |
|||
//console.log('_onItemSelect') |
|||
//console.log(item) |
|||
if (!this.multiple) { //单选 |
|||
item.checkStatus = item.checkStatus == 0 ? 2 : 0; |
|||
|
|||
this.treeList.forEach((v, i) => { |
|||
if (i != index) { |
|||
this.treeList[i].checkStatus = 0 |
|||
} else { |
|||
this.treeList[i].checkStatus = 2 |
|||
} |
|||
}) |
|||
|
|||
let selectedList = []; |
|||
let selectedNames; |
|||
selectedList.push(item.id); |
|||
selectedNames = item.name; |
|||
this._hide() |
|||
this.$emit("select-change", selectedList, selectedNames); |
|||
return |
|||
} |
|||
|
|||
let oldCheckStatus = item.checkStatus; |
|||
switch (oldCheckStatus) { |
|||
case 0: |
|||
item.checkStatus = 2; |
|||
item.childCheckCount = item.childCount; |
|||
item.childCheckPCount = 0; |
|||
break; |
|||
case 1: |
|||
case 2: |
|||
item.checkStatus = 0; |
|||
item.childCheckCount = 0; |
|||
item.childCheckPCount = 0; |
|||
break; |
|||
default: |
|||
break; |
|||
} |
|||
//子节点 全部选中 |
|||
this._onItemChildSelect(item, index); |
|||
//父节点 选中状态变化 |
|||
this._onItemParentSelect(item, index, oldCheckStatus); |
|||
}, |
|||
_onItemChildSelect(item, index) { |
|||
//console.log('_onItemChildSelect') |
|||
let allChildCount = 0; |
|||
if (item.childCount && item.childCount > 0) { |
|||
index++; |
|||
while (index < this.treeList.length && this.treeList[index].level > item.level) { |
|||
let itemChild = this.treeList[index]; |
|||
itemChild.checkStatus = item.checkStatus; |
|||
if (itemChild.checkStatus == 2) { |
|||
itemChild.childCheckCount = itemChild.childCount; |
|||
itemChild.childCheckPCount = 0; |
|||
} else if (itemChild.checkStatus == 0) { |
|||
itemChild.childCheckCount = 0; |
|||
itemChild.childCheckPCount = 0; |
|||
} |
|||
// console.log('>>>>index:', index, 'item:', itemChild.name, ' status:', itemChild |
|||
// .checkStatus) |
|||
index++; |
|||
} |
|||
} |
|||
}, |
|||
_onItemParentSelect(item, index, oldCheckStatus) { |
|||
//console.log('_onItemParentSelect') |
|||
//console.log(item) |
|||
const parentIndex = this.treeList.findIndex(itemP => itemP.id == item.parentId); |
|||
//console.log('parentIndex:' + parentIndex) |
|||
if (parentIndex >= 0) { |
|||
let itemParent = this.treeList[parentIndex]; |
|||
let count = itemParent.childCheckCount; |
|||
let oldCheckStatusParent = itemParent.checkStatus; |
|||
|
|||
if (oldCheckStatus == 1) { |
|||
itemParent.childCheckPCount -= 1; |
|||
} else if (oldCheckStatus == 2) { |
|||
itemParent.childCheckCount -= 1; |
|||
} |
|||
if (item.checkStatus == 1) { |
|||
itemParent.childCheckPCount += 1; |
|||
} else if (item.checkStatus == 2) { |
|||
itemParent.childCheckCount += 1; |
|||
} |
|||
|
|||
if (itemParent.childCheckCount <= 0 && itemParent.childCheckPCount <= 0) { |
|||
itemParent.childCheckCount = 0; |
|||
itemParent.childCheckPCount = 0; |
|||
itemParent.checkStatus = 0; |
|||
} else if (itemParent.childCheckCount >= itemParent.childCount) { |
|||
itemParent.childCheckCount = itemParent.childCount; |
|||
itemParent.childCheckPCount = 0; |
|||
itemParent.checkStatus = 2; |
|||
} else { |
|||
itemParent.checkStatus = 1; |
|||
} |
|||
//console.log('itemParent:', itemParent) |
|||
this._onItemParentSelect(itemParent, parentIndex, oldCheckStatusParent); |
|||
} |
|||
}, |
|||
// 重置数据 |
|||
_reTreeList() { |
|||
this.treeList.forEach((v, i) => { |
|||
this.treeList[i].checkStatus = v.orCheckStatus |
|||
}) |
|||
}, |
|||
_initTree() { |
|||
this.treeList = []; |
|||
this._formatTreeData(this.localdata); |
|||
} |
|||
}, |
|||
watch: { |
|||
localdata() { |
|||
this._initTree(); |
|||
}, |
|||
localTreeList() { |
|||
this.treeList = this.localTreeList; |
|||
} |
|||
}, |
|||
mounted() { |
|||
this._initTree(); |
|||
} |
|||
} |
|||
</script> |
|||
|
|||
<style scoped> |
|||
.tree-cover { |
|||
position: fixed; |
|||
top: 0rpx; |
|||
right: 0rpx; |
|||
bottom: 0rpx; |
|||
left: 0rpx; |
|||
z-index: 100; |
|||
background-color: rgba(0, 0, 0, .4); |
|||
opacity: 0; |
|||
transition: all 0.3s ease; |
|||
visibility: hidden; |
|||
} |
|||
|
|||
.tree-cover.show { |
|||
visibility: visible; |
|||
opacity: 1; |
|||
} |
|||
|
|||
.tree-dialog { |
|||
position: fixed; |
|||
top: 0rpx; |
|||
right: 0rpx; |
|||
bottom: 0rpx; |
|||
left: 0rpx; |
|||
background-color: #fff; |
|||
border-top-left-radius: 10px; |
|||
border-top-right-radius: 10px; |
|||
/* #ifndef APP-NVUE */ |
|||
display: flex; |
|||
/* #endif */ |
|||
flex-direction: column; |
|||
z-index: 102; |
|||
top: 20%; |
|||
transition: all 0.3s ease; |
|||
transform: translateY(100%); |
|||
} |
|||
|
|||
.tree-dialog.show { |
|||
transform: translateY(0); |
|||
} |
|||
|
|||
.tree-bar { |
|||
/* background-color: #fff; */ |
|||
height: 90rpx; |
|||
padding-left: 25rpx; |
|||
padding-right: 25rpx; |
|||
display: flex; |
|||
justify-content: space-between; |
|||
align-items: center; |
|||
box-sizing: border-box; |
|||
border-bottom-width: 1rpx !important; |
|||
border-bottom-style: solid; |
|||
border-bottom-color: #f5f5f5; |
|||
font-size: 32rpx; |
|||
color: #757575; |
|||
line-height: 1; |
|||
} |
|||
|
|||
.tree-bar-confirm { |
|||
color: #0055ff; |
|||
padding: 15rpx; |
|||
} |
|||
|
|||
.tree-bar-title {} |
|||
|
|||
.tree-bar-cancel { |
|||
color: #757575; |
|||
padding: 15rpx; |
|||
} |
|||
|
|||
.tree-view { |
|||
flex: 1; |
|||
padding: 20rpx; |
|||
/* #ifndef APP-NVUE */ |
|||
display: flex; |
|||
/* #endif */ |
|||
flex-direction: column; |
|||
overflow: hidden; |
|||
height: 100%; |
|||
} |
|||
|
|||
.tree-list { |
|||
flex: 1; |
|||
height: 100%; |
|||
overflow: hidden; |
|||
} |
|||
|
|||
.tree-item { |
|||
display: flex; |
|||
justify-content: space-between; |
|||
align-items: center; |
|||
line-height: 1; |
|||
height: 0; |
|||
opacity: 0; |
|||
transition: 0.2s; |
|||
overflow: hidden; |
|||
} |
|||
|
|||
.tree-item.show { |
|||
height: 90rpx; |
|||
opacity: 1; |
|||
} |
|||
|
|||
.tree-item.showchild:before { |
|||
transform: rotate(90deg); |
|||
} |
|||
|
|||
.tree-item.last:before { |
|||
opacity: 0; |
|||
} |
|||
|
|||
.switch-on { |
|||
width: 0; |
|||
height: 0; |
|||
border-left: 10rpx solid transparent; |
|||
border-right: 10rpx solid transparent; |
|||
border-top: 15rpx solid #666; |
|||
} |
|||
|
|||
.switch-off { |
|||
width: 0; |
|||
height: 0; |
|||
border-bottom: 10rpx solid transparent; |
|||
border-top: 10rpx solid transparent; |
|||
border-left: 15rpx solid #666; |
|||
} |
|||
|
|||
.item-last-dot { |
|||
position: absolute; |
|||
width: 10rpx; |
|||
height: 10rpx; |
|||
border-radius: 100%; |
|||
background: #666; |
|||
} |
|||
|
|||
.item-icon { |
|||
width: 26rpx; |
|||
height: 26rpx; |
|||
margin-right: 8rpx; |
|||
padding-right: 20rpx; |
|||
padding-left: 20rpx; |
|||
} |
|||
|
|||
.item-label { |
|||
flex: 1; |
|||
display: flex; |
|||
align-items: center; |
|||
height: 100%; |
|||
line-height: 1.2; |
|||
} |
|||
|
|||
.item-name { |
|||
flex: 1; |
|||
overflow: hidden; |
|||
text-overflow: ellipsis; |
|||
white-space: nowrap; |
|||
width: 450rpx; |
|||
} |
|||
|
|||
.item-check { |
|||
width: 40px; |
|||
height: 40px; |
|||
display: flex; |
|||
justify-content: center; |
|||
align-items: center; |
|||
} |
|||
|
|||
.item-check-yes, |
|||
.item-check-no { |
|||
width: 20px; |
|||
height: 20px; |
|||
border-top-left-radius: 20%; |
|||
border-top-right-radius: 20%; |
|||
border-bottom-right-radius: 20%; |
|||
border-bottom-left-radius: 20%; |
|||
border-top-width: 1rpx; |
|||
border-left-width: 1rpx; |
|||
border-bottom-width: 1rpx; |
|||
border-right-width: 1rpx; |
|||
border-style: solid; |
|||
border-color: #0055ff; |
|||
display: flex; |
|||
justify-content: center; |
|||
align-items: center; |
|||
box-sizing: border-box; |
|||
} |
|||
|
|||
.item-check-yes-part { |
|||
width: 12px; |
|||
height: 12px; |
|||
border-top-left-radius: 20%; |
|||
border-top-right-radius: 20%; |
|||
border-bottom-right-radius: 20%; |
|||
border-bottom-left-radius: 20%; |
|||
background-color: #0055ff; |
|||
} |
|||
|
|||
.item-check-yes-all { |
|||
margin-bottom: 5px; |
|||
border: 2px solid #007aff; |
|||
border-left: 0; |
|||
border-top: 0; |
|||
height: 12px; |
|||
width: 6px; |
|||
transform-origin: center; |
|||
/* #ifndef APP-NVUE */ |
|||
transition: all 0.3s; |
|||
/* #endif */ |
|||
transform: rotate(45deg); |
|||
} |
|||
|
|||
.item-check .radio { |
|||
border-top-left-radius: 50%; |
|||
border-top-right-radius: 50%; |
|||
border-bottom-right-radius: 50%; |
|||
border-bottom-left-radius: 50%; |
|||
} |
|||
|
|||
.item-check .radio .item-check-yes-b { |
|||
border-top-left-radius: 50%; |
|||
border-top-right-radius: 50%; |
|||
border-bottom-right-radius: 50%; |
|||
border-bottom-left-radius: 50%; |
|||
} |
|||
|
|||
.hover-c { |
|||
opacity: 0.6; |
|||
} |
|||
|
|||
.itemBorder { |
|||
border-bottom: 1px solid #e5e5e5; |
|||
} |
|||
</style> |
|||
@ -0,0 +1,67 @@ |
|||
<!-- 商品列表组件 <good-list :list="xx"></good-list> --> |
|||
<template> |
|||
<view class="good-list"> |
|||
<view :id="'good'+good.id" class="good-li" v-for="good in list" :key="good.id"> |
|||
<image class="good-img" :src="good.goodImg" mode="widthFix"/> |
|||
<view class="flex-item"> |
|||
<view class="good-name">{{good.goodName}}</view> |
|||
<text class="good-price">{{good.goodPrice}} 元</text> |
|||
<text class="good-sold">已售{{good.goodSold}}件</text> |
|||
</view> |
|||
</view> |
|||
</view> |
|||
</template> |
|||
|
|||
<script> |
|||
export default { |
|||
props:{ |
|||
list: { |
|||
type: Array, |
|||
default(){ |
|||
return [] |
|||
} |
|||
} |
|||
} |
|||
} |
|||
</script> |
|||
|
|||
<style lang="scss"> |
|||
.good-list{ |
|||
background-color: #fff; |
|||
|
|||
.good-li{ |
|||
display: flex; |
|||
align-items: center; |
|||
padding: 20upx; |
|||
border-bottom: 1upx solid #eee; |
|||
|
|||
.good-img{ |
|||
width: 160upx; |
|||
height: 160upx; |
|||
margin-right: 20rpx; |
|||
} |
|||
|
|||
.flex-item{ |
|||
flex: 1; |
|||
|
|||
.good-name{ |
|||
font-size: 26upx; |
|||
line-height: 40upx; |
|||
height: 80upx; |
|||
margin-bottom: 20upx; |
|||
overflow: hidden; |
|||
} |
|||
.good-price{ |
|||
font-size: 26upx; |
|||
color: red; |
|||
} |
|||
.good-sold{ |
|||
font-size: 24upx; |
|||
margin-left: 16upx; |
|||
color: gray; |
|||
} |
|||
|
|||
} |
|||
} |
|||
} |
|||
</style> |
|||
@ -0,0 +1,214 @@ |
|||
<!-- tab组件: <me-tabs v-model="tabIndex" :tabs="tabs" @change="tabChange"></me-tabs> --> |
|||
<template> |
|||
<view class="me-tabs" :class="{'tabs-fixed': fixed}" :style="{height: tabHeightVal, top:topFixed, 'margin-top':topMargin}"> |
|||
<scroll-view v-if="tabs.length" :id="viewId" :scroll-left="scrollLeft" scroll-x scroll-with-animation :scroll-animation-duration="300"> |
|||
<view class="tabs-item" :class="{'tabs-flex':!isScroll, 'tabs-scroll':isScroll}"> |
|||
<!-- tab --> |
|||
<view class="tab-item" :style="{width: tabWidthVal, height: tabHeightVal, 'line-height':tabHeightVal}" v-for="(tab, i) in tabs" :class="{'active': value===i}" :key="i" @click="tabClick(i)"> |
|||
<view> |
|||
{{getTabName(tab)}} |
|||
</view> |
|||
|
|||
<!-- <view class="tabs-line" ></view> --> |
|||
</view> |
|||
<!-- 下划线 --> |
|||
<!-- <view class="tabs-line" :style="{left:lineLeft}"></view> --> |
|||
</view> |
|||
</scroll-view> |
|||
</view> |
|||
</template> |
|||
|
|||
<script> |
|||
export default { |
|||
props:{ |
|||
tabs: { // 支持格式: ['全部', '待付款'] 或 [{name:'全部'}, {name:'待付款'}] |
|||
type: Array, |
|||
default(){ |
|||
return [] |
|||
} |
|||
}, |
|||
nameKey: { // 取name的字段 |
|||
type: String, |
|||
default: 'name' |
|||
}, |
|||
value: { // 当前显示的下标 (使用v-model语法糖: 1.props需为value; 2.需回调input事件) |
|||
type: [String, Number], |
|||
default: 0 |
|||
}, |
|||
fixed: Boolean, // 是否悬浮,默认false |
|||
tabWidth: Number, // 每个tab的宽度,默认不设置值,为flex平均分配; 如果指定宽度,则不使用flex,每个tab居左,超过则水平滑动(单位默认rpx) |
|||
height: { // 高度,单位rpx |
|||
type: Number, |
|||
default: 64 |
|||
}, |
|||
top: { // 顶部偏移的距离,默认单位rpx (当fixed=true时,已加上windowTop) |
|||
type: Number, |
|||
default: 0 |
|||
} |
|||
}, |
|||
data() { |
|||
return { |
|||
viewId: 'id_' + Math.random().toString(36).substr(2,16), |
|||
scrollLeft: 0, |
|||
windowWidth: 0, |
|||
windowTop: 0 |
|||
} |
|||
}, |
|||
computed: { |
|||
isScroll(){ |
|||
return this.tabWidth && this.tabs.length // 指定了tabWidth的宽度,则支持水平滑动 |
|||
}, |
|||
tabHeightPx(){ |
|||
return uni.upx2px(this.height) |
|||
}, |
|||
tabHeightVal(){ |
|||
return this.tabHeightPx+'px' |
|||
}, |
|||
tabWidthPx(){ |
|||
return uni.upx2px(this.tabWidth) |
|||
}, |
|||
tabWidthVal(){ |
|||
return this.isScroll ? this.tabWidthPx+'px' : '' |
|||
}, |
|||
lineLeft() { |
|||
if (this.isScroll) { |
|||
return this.tabWidthPx * this.value + this.tabWidthPx/2 + 'px' // 需转为px (用rpx的话iOS真机显示有误差) |
|||
} else{ |
|||
return 100/this.tabs.length*(this.value + 1) - 100/(this.tabs.length*2) + '%' |
|||
} |
|||
}, |
|||
topFixed(){ |
|||
return this.fixed ? this.windowTop + uni.upx2px(this.top) + 'px' : 0 |
|||
}, |
|||
topMargin(){ |
|||
return this.fixed ? 0 : this.top + 'rpx' |
|||
} |
|||
}, |
|||
watch: { |
|||
tabs() { |
|||
// this.warpWidth = null; // 重新计算容器宽度 |
|||
this.scrollCenter(); // 水平滚动到中间 |
|||
}, |
|||
value() { |
|||
this.scrollCenter(); // 水平滚动到中间 |
|||
} |
|||
}, |
|||
created() { |
|||
let sys = uni.getSystemInfoSync(); |
|||
this.windowWidth = sys.windowWidth |
|||
this.windowTop = sys.windowTop |
|||
}, |
|||
mounted() { |
|||
this.scrollCenter() // 滚动到当前下标 |
|||
}, |
|||
methods: { |
|||
getTabName(tab){ |
|||
return typeof tab === "object" ? tab[this.nameKey] : tab |
|||
}, |
|||
tabClick(i){ |
|||
console.log(this) |
|||
if(this.value!=i){ |
|||
this.$emit("input",i); |
|||
this.$emit("change",i); |
|||
} |
|||
}, |
|||
async scrollCenter(){ |
|||
if(!this.isScroll) return; |
|||
if(!this.warpWidth){ // tabs容器的宽度 |
|||
let rect = await this.initWarpRect() |
|||
this.warpWidth = rect ? rect.width : this.windowWidth; // 某些情况下取不到宽度,暂时取屏幕宽度 |
|||
} |
|||
let tabLeft = this.tabWidthPx * this.value + this.tabWidthPx/2; // 当前tab中心点到左边的距离 |
|||
let diff = tabLeft - this.warpWidth/2 // 如果超过tabs容器的一半,则滚动差值 |
|||
this.scrollLeft = diff; |
|||
// #ifdef MP-TOUTIAO |
|||
this.scrollTimer && clearTimeout(this.scrollTimer) |
|||
this.scrollTimer = setTimeout(()=>{ // 字节跳动小程序,需延时再次设置scrollLeft,否则tab切换跨度较大时不生效 |
|||
this.scrollLeft = Math.ceil(diff) |
|||
},400) |
|||
// #endif |
|||
}, |
|||
initWarpRect(){ |
|||
return new Promise(resolve=>{ |
|||
setTimeout(()=>{ // 延时确保dom已渲染, 不使用$nextclick |
|||
let query = uni.createSelectorQuery(); |
|||
// #ifndef MP-ALIPAY |
|||
query = query.in(this) // 支付宝小程序不支持in(this),而字节跳动小程序必须写in(this), 否则都取不到值 |
|||
// #endif |
|||
query.select('#'+this.viewId).boundingClientRect(data => { |
|||
resolve(data) |
|||
}).exec(); |
|||
},20) |
|||
}) |
|||
} |
|||
} |
|||
} |
|||
</script> |
|||
|
|||
<style lang="scss"> |
|||
.me-tabs{ |
|||
position: relative; |
|||
font-size: 24rpx; |
|||
background-color: #fff; |
|||
border-bottom: 1rpx solid #eee; |
|||
box-sizing: border-box; |
|||
overflow-y: hidden; |
|||
background-color: #fff; |
|||
&.tabs-fixed{ |
|||
z-index: 990; |
|||
position: fixed; |
|||
left: 0; |
|||
width: 100%; |
|||
} |
|||
|
|||
.tabs-item{ |
|||
position: relative; |
|||
white-space: nowrap; |
|||
padding-bottom: 30rpx; // 撑开高度,再配合me-tabs的overflow-y: hidden,以达到隐藏滚动条的目的 |
|||
box-sizing: border-box; |
|||
.tab-item{ |
|||
position: relative; |
|||
text-align: center; |
|||
box-sizing: border-box; |
|||
line-height: 25rpx; |
|||
padding: 5rpx 15rpx; |
|||
bottom: 10rpx; |
|||
&.active{ |
|||
font-weight: bold; |
|||
color: #2979ff; |
|||
background-color: #E8F5FF; |
|||
border-radius: 20rpx; |
|||
} |
|||
} |
|||
// 选中tab的线 |
|||
.tabs-line{ |
|||
z-index: 1; |
|||
position: inherit; |
|||
bottom: 5px; // 至少与.tabs-item的padding-bottom一致,才能保证在底部边缘 |
|||
width: 50rpx; |
|||
height: 6rpx; |
|||
transform: translateX(-50%); |
|||
border-radius: 4rpx; |
|||
transition: left .3s; |
|||
background: red; |
|||
margin: 0 auto; |
|||
} |
|||
} |
|||
|
|||
// 平分的方式显示item |
|||
.tabs-flex{ |
|||
display: flex; |
|||
.tab-item{ |
|||
flex: 1; |
|||
} |
|||
} |
|||
// 居左显示item,支持水平滑动 |
|||
.tabs-scroll{ |
|||
.tab-item{ |
|||
display: inline-block; |
|||
} |
|||
} |
|||
|
|||
|
|||
} |
|||
</style> |
|||
@ -0,0 +1,179 @@ |
|||
<!-- 视频组件: <me-video src="视频地址" poster="封面图"></me-video> |
|||
video标签在APP端是原生组件, 真机APP端下拉时会渲染不及时, 出现悬浮错位现象; |
|||
me-video组件, 未播放时自动展示image封面, 播放时才显示video, 提高性能; 如果播放中执行下拉,会自动显示封面, 避免视频下拉悬浮错位; |
|||
--> |
|||
<template> |
|||
<view class="me-video" :style="{width:width, height:height}"> |
|||
<!-- 播放的时候才渲染video标签 --> |
|||
<video v-if="showVideo" ref="videoRef" class="video" :class="{'full-play': fullplay&&!autoplay, 'mescroll-dowload': mescrollDownLoad}" :src="src" autoplay :loop="loop" @click="videoClick" x5-playsinline="true" x5-video-player-type="h5" playsinline="true" webkit-playsinline="true" x5-video-player-fullscreen="false"></video> |
|||
<!-- 播放按钮 --> |
|||
<view v-else class="btn-play"> <view class="triangle"></view> </view> |
|||
<!-- 封面 --> |
|||
<image v-if="(!showVideo || mescrollDownLoad) && poster" class="poster" :src="poster" @click="play()" mode="aspectFit"></image> |
|||
</view> |
|||
</template> |
|||
|
|||
<script> |
|||
export default { |
|||
props: { |
|||
src: String, // 视频地址 |
|||
poster: String, // 封面图 |
|||
autoplay: { // 是否自动播放 |
|||
type: Boolean, |
|||
default(){ |
|||
return false |
|||
} |
|||
}, |
|||
fullplay: { // 是否全屏播放,默认不全屏 |
|||
type: Boolean, |
|||
default(){ |
|||
return false |
|||
} |
|||
}, |
|||
loop: { // 是否循环播放 |
|||
type: Boolean, |
|||
default(){ |
|||
return true // 循环播放可避免Android微信播放完毕显示广告 |
|||
} |
|||
}, |
|||
width: { // 宽度 (需带单位,支持格式: '100%', '300px', '300rpx') |
|||
type: String, |
|||
default: "100%" |
|||
}, |
|||
height: { // 高度 (需带单位,支持格式: '100%', '300px', '300rpx') |
|||
type: String, |
|||
default: "225px" |
|||
}, |
|||
mescroll: { // mescroll对象,APP端下拉刷新时显示封面,隐藏视频.缓解APP端视频下拉悬浮错位问题 |
|||
type: Object, |
|||
default(){ |
|||
return {} |
|||
} |
|||
} |
|||
}, |
|||
data() { |
|||
return { |
|||
showVideo: this.autoplay // 是否播放视频 |
|||
} |
|||
}, |
|||
computed: { |
|||
// 是否下拉中 (下拉隐藏视频,显示封面, 仅APP端生效) |
|||
mescrollDownLoad() { |
|||
// #ifdef APP-PLUS |
|||
return this.mescroll.downLoadType |
|||
// #endif |
|||
// #ifndef APP-PLUS |
|||
return false |
|||
// #endif |
|||
} |
|||
}, |
|||
watch: { |
|||
autoplay(val) { |
|||
if(val) this.play() |
|||
} |
|||
}, |
|||
methods: { |
|||
// 播放 |
|||
play(){ |
|||
this.showVideo = true |
|||
this.wxAutoPlay() |
|||
}, |
|||
// 视频点击事件 |
|||
videoClick(){ |
|||
// 全屏播放时,点击视频退出 |
|||
if(this.fullplay) this.showVideo = false |
|||
}, |
|||
// 解决微信端视频无法自动播放的问题 |
|||
wxAutoPlay(){ |
|||
// #ifdef H5 |
|||
// 微信端 |
|||
if(navigator.userAgent.toLowerCase().match(/MicroMessenger/i) == 'micromessenger'){ |
|||
// iOS |
|||
let head = document.getElementsByTagName("head")[0] |
|||
let wxscript = document.createElement("script"); |
|||
wxscript.type = "text/javascript" |
|||
wxscript.src = "https://res.wx.qq.com/open/js/jweixin-1.6.0.js" |
|||
head.appendChild(wxscript) |
|||
let vm = this |
|||
let doPlay = function(){ |
|||
vm.$refs.videoRef && vm.$refs.videoRef.play() |
|||
} |
|||
wxscript.onload = function(){ |
|||
window.wx.config({ |
|||
debug: !1, |
|||
appId: "", |
|||
timestamp: 1, |
|||
nonceStr: "", |
|||
signature: "", |
|||
jsApiList: [] |
|||
}) |
|||
window.wx.ready(doPlay) |
|||
} |
|||
// Android |
|||
document.addEventListener("WeixinJSBridgeReady", doPlay, false); |
|||
// 先尝试播放 |
|||
setTimeout(()=>{ |
|||
doPlay() |
|||
},20) |
|||
} |
|||
// #endif |
|||
} |
|||
} |
|||
} |
|||
</script> |
|||
|
|||
<style lang="scss"> |
|||
.me-video{ |
|||
position: relative; |
|||
background-color: #000; |
|||
overflow: hidden; |
|||
// 播放按钮 |
|||
.btn-play{ |
|||
z-index: 9; |
|||
position: absolute; |
|||
left: 50%; |
|||
top: 50%; |
|||
transform: translate(-50%, -50%); |
|||
width: 100rpx; |
|||
height: 100rpx; |
|||
border-radius: 50%; |
|||
background-color: rgba(0,0,0,.75); |
|||
pointer-events: none; |
|||
.triangle{ |
|||
position: absolute; |
|||
left: 50%; |
|||
top: 50%; |
|||
transform: translate(-25%, -50%); |
|||
width: 0; |
|||
height: 0; |
|||
border-top: 16rpx solid transparent; |
|||
border-left: 24rpx solid #fff; |
|||
border-bottom: 16rpx solid transparent; |
|||
} |
|||
} |
|||
// 封面图 |
|||
.poster{ |
|||
width: 100%; |
|||
height: 100%; |
|||
vertical-align: bottom; |
|||
} |
|||
// 视频 (默认非全屏播放) |
|||
.video{ |
|||
z-index: 8; |
|||
position: absolute; |
|||
top: 0; |
|||
left: 0; |
|||
width: 100%; |
|||
height: 100%; |
|||
// 全屏播放 |
|||
&.full-play{ |
|||
z-index: 999; |
|||
position: fixed; |
|||
} |
|||
// 下拉时隐藏视频 |
|||
&.mescroll-dowload{ |
|||
display: none; |
|||
} |
|||
} |
|||
} |
|||
</style> |
|||
@ -0,0 +1,425 @@ |
|||
<template xlang="wxml"> |
|||
<view class="tki-tree"> |
|||
<view class="tki-tree-mask" :class="{'show':showTree}" @tap="_cancel"></view> |
|||
<view class="tki-tree-cnt" :class="{'show':showTree}"> |
|||
<view class="tki-tree-bar"> |
|||
<view class="tki-tree-bar-cancel" :style="{'color':cancelColor}" hover-class="hover-c" @tap="_cancel">取消</view> |
|||
<view class="tki-tree-bar-title" :style="{'color':titleColor}">{{title}}</view> |
|||
<view class="tki-tree-bar-confirm" :style="{'color':confirmColor}" hover-class="hover-c" @tap="_confirm">确定</view> |
|||
</view> |
|||
<view class="tki-tree-view"> |
|||
<scroll-view class="tki-tree-view-sc" :scroll-y="true"> |
|||
<block v-for="(item, index) in treeList" :key="index"> |
|||
<view class="tki-tree-item" :style="[{ |
|||
paddingLeft: item.rank*15 + 'px', |
|||
zIndex: item.rank*-1 +50 |
|||
}]" |
|||
:class="{ |
|||
border: border === true, |
|||
show: item.show, |
|||
last: item.lastRank, |
|||
showchild: item.showChild, |
|||
open: item.open, |
|||
}"> |
|||
<view class="tki-tree-label" @tap.stop="_treeItemTap(item, index)"> |
|||
<image class="tki-tree-icon" :src="item.lastRank ? lastIcon : item.showChild ? currentIcon : defaultIcon"></image> |
|||
{{item.name}} |
|||
</view> |
|||
<view class="tki-tree-check" @tap.stop="_treeItemSelect(item, index)" v-if="selectParent?true:item.lastRank"> |
|||
<view class="tki-tree-check-yes" v-if="item.checked" :class="{'radio':!multiple}" :style="{'border-color':confirmColor}"> |
|||
<view class="tki-tree-check-yes-b" :style="{'background-color':confirmColor}"></view> |
|||
</view> |
|||
<view class="tki-tree-check-no" v-else :class="{'radio':!multiple}" :style="{'border-color':confirmColor}"></view> |
|||
</view> |
|||
</view> |
|||
</block> |
|||
</scroll-view> |
|||
</view> |
|||
</view> |
|||
</view> |
|||
</template> |
|||
|
|||
<script> |
|||
export default { |
|||
name: "tki-tree", |
|||
props: { |
|||
lazy: { |
|||
type: Boolean, |
|||
default: false |
|||
}, |
|||
range: { |
|||
type: Array, |
|||
default: function() { |
|||
return [] |
|||
} |
|||
}, |
|||
idKey: { |
|||
type: String, |
|||
default: 'id' |
|||
}, |
|||
rangeKey: { |
|||
type: String, |
|||
default: 'label' |
|||
}, |
|||
title: { |
|||
type: String, |
|||
default: '' |
|||
}, |
|||
multiple: { // 是否可以多选 |
|||
type: Boolean, |
|||
default: false |
|||
// default: true |
|||
}, |
|||
selectParent: { //是否可以选父级 |
|||
type: Boolean, |
|||
default: false |
|||
}, |
|||
foldAll: { //折叠时关闭所有已经打开的子集,再次打开时需要一级一级打开 |
|||
type: Boolean, |
|||
default: false |
|||
}, |
|||
confirmColor: { // 确定按钮颜色 |
|||
type: String, |
|||
default: '' // #07bb07 |
|||
}, |
|||
cancelColor: { // 取消按钮颜色 |
|||
type: String, |
|||
default: '' // #757575 |
|||
}, |
|||
titleColor: { // 标题颜色 |
|||
type: String, |
|||
default: '' // #757575 |
|||
}, |
|||
currentIcon: { // 展开时候的ic |
|||
type: String, |
|||
default: 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAFEAAABRCAYAAACqj0o2AAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyJpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoV2luZG93cykiIHhtcE1NOkluc3RhbmNlSUQ9InhtcC5paWQ6MEQ0QTM0MzQ1Q0RBMTFFOUE0MjY4NzI1Njc1RjI1ODIiIHhtcE1NOkRvY3VtZW50SUQ9InhtcC5kaWQ6MEQ0QTM0MzU1Q0RBMTFFOUE0MjY4NzI1Njc1RjI1ODIiPiA8eG1wTU06RGVyaXZlZEZyb20gc3RSZWY6aW5zdGFuY2VJRD0ieG1wLmlpZDowRDRBMzQzMjVDREExMUU5QTQyNjg3MjU2NzVGMjU4MiIgc3RSZWY6ZG9jdW1lbnRJRD0ieG1wLmRpZDowRDRBMzQzMzVDREExMUU5QTQyNjg3MjU2NzVGMjU4MiIvPiA8L3JkZjpEZXNjcmlwdGlvbj4gPC9yZGY6UkRGPiA8L3g6eG1wbWV0YT4gPD94cGFja2V0IGVuZD0iciI/PidwepsAAAK0SURBVHja7JxbTsJAFIYHww7ciStgCeoGvGxAiOsgURegoL5720AXYLiIr0aJviq3Zx3PhIEnKG3ndtr+f3KixrSUj/ZjzjClIqUUiFm2gAAQAREQEUAEREAERAQQAREQAREBREAEREBEEqa67h9RFDWllDv0awWYlqlQHmu1WjMRRMoV1QFttA12y3xRtdNczq8EsE4/f8FumX2q77ROvNXk8UGMEKdUz6tYJHljaZAbuyUH+UR1to5BEohTuqwPCeS4pAA/qY6o/kyHOAMCeRK3owJnj+rH1jjxhqpVsstaebCz6TmnHWyXyY+xHjSBWBY/bvSgadtXBj9u9KCN3rnIfkzkQVsTEEX0Y2IP2oKo/HhMICcFAThUcwVZNGU6FdbX/XURzkbVF4+ybGhjPrFdgP66QdXNurGtSdk6Xdb9nAJ8oDo3OQlsQZzkdPw41ONBo6vI5scDefRjZg+6gpg3Pxp50CXEvPjR2IOuIXL3oxUPuobI3Y9WPOgDIlc/WvOgL4iL/vqFCcD7LH0xB4hj7cfQ/fWH9qCT+FhG0tN+DBk1PzjOM0SVllixcsBT1AvYc/kAPhc0hRg/3uvxoCgKRN9+dOrBUBB9+9GpB0NC9OVH5x4MDdG1H714kANEV3705kEOEBf9dcPi/lQnsuvLg1wgSu3Ha0v7Uh4MMgUXeuG71H407a+VBy9CPQkOdw+MtB+nGbd/D+FBbhBNxo9SjwcngJjNj0E9yBFiFj8G9SBXiGn8GNyDnCEm8SMLD3KHGOdHNh7kDjHOj2w8mAeIi/5arX+c6b/fxHz9oADEdGdjR/fXCw/OOB5oVfCOgnepz8IB14PMw03jCmTE+QBx5z0gAmKSqK9OUF+hcAeIhu/QYr4Qie8rjW83hhMBERARQAREQAREBBABERCLnH8BBgA+TQI7U4t53AAAAABJRU5ErkJggg==' |
|||
}, |
|||
defaultIcon: { // 折叠时候的ic |
|||
type: String, |
|||
default: 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAFEAAABRCAYAAACqj0o2AAACE0lEQVR4Xu3c200DMRCF4XEltJAOkEugA+ggpUAHoQMqiFMCdEAJUMEiS4mEELlIO7bPOeN9i6K1rG/952myyea1WiCtXmEuYBPR4RBMxInoIOCwhOtJLKVszWyXc/5y2BvNEq6I+/3+kFK6M7OHnPM7jcLKjbZAvD/uaZtzflm5P4rbWyJWgDcze1LPuzVihfxUz7sH4ilJ2bx7Isrm3RtRMu8RiHJ5j0SUyXs0okTeCIj0eSMh0uaNhkiZNyIiXd7IiDR5oyNS5M2ACJ83EyJs3myIkHkzIsLlzYwIkzc7IkTeCojD81ZCHJa3GuKQvBURu+etjNgtb3XELnlHQGyedyTEZnlHQ2ySd0RE97wjI7rlHR3RJe+JeIrbLOecD6ePpZQ6W1kn2epo4MUrPOKyLN8ppYq1+y1VStncOjIdGnFZlo+U0uOtWOeOY2TE12Ouq//pEA7xXL7XfvcufR8K0Svfv6CREN3yDYfYIt9QiK3yjYTYLF95xB75SiP2ylcZsVu+cogj8pVCHJWvEuKwfOkREfKlRkTJlxkRJl86RMR8qRBR82VChM0XHpEhX2hElnyREWnyhUNkzBcKkTVfJETafIcjKuQ7FFEl35GIMvl2R1TMtyuiar49EWXzbY5oZpv/hibXTF2h3+s60FRKeT6+3TjMS3nrA3ZFRD8xrfY3ER1kJ+JEdBBwWGKeRAfEH1wS5WFZSDB/AAAAAElFTkSuQmCC' |
|||
}, |
|||
lastIcon: { // 没有子集的ic |
|||
type: String, |
|||
default: '' |
|||
}, |
|||
border: { // 是否有分割线 |
|||
type: Boolean, |
|||
default: false |
|||
}, |
|||
}, |
|||
data() { |
|||
return { |
|||
showTree: false, |
|||
treeList: [], |
|||
selectIndex: -1, |
|||
returnedItem: [] ,//定义一个空数组 |
|||
pids: [], |
|||
ancestorsIds: [], |
|||
childNums: [], |
|||
} |
|||
}, |
|||
computed: {}, |
|||
methods: { |
|||
_show() { |
|||
this.showTree = true |
|||
}, |
|||
_hide() { |
|||
this.showTree = false |
|||
}, |
|||
_cancel() { |
|||
this._hide() |
|||
this.$emit("cancel", ''); |
|||
}, |
|||
_confirm() { |
|||
// 处理所选数据 |
|||
let rt = [], |
|||
obj = {}; |
|||
this.treeList.forEach((v, i) => { |
|||
if (this.treeList[i].checked) { |
|||
rt.push(this.treeList[i].id) |
|||
} |
|||
}) |
|||
this._hide() |
|||
this.$emit("confirm", rt); |
|||
}, |
|||
//扁平化树结构 |
|||
_renderTreeList(list = [], rank = 0, parentId = [], parents = []) { |
|||
list.forEach(item => { |
|||
this.treeList.push({ |
|||
id: item[this.idKey], |
|||
name: item[this.rangeKey], |
|||
source: item, |
|||
parentId, // 父级id数组 |
|||
parents, // 父级id数组 |
|||
rank, // 层级 |
|||
showChild: false, //子级是否显示 |
|||
open: false, //是否打开 |
|||
show: rank === 0, // 自身是否显示 |
|||
hideArr: [], |
|||
orChecked: item.checked ? item.checked : false, |
|||
checked: item.checked ? item.checked : false, |
|||
childNum: 0 |
|||
}) |
|||
|
|||
if (Array.isArray(item.children) && item.children.length > 0) { |
|||
// console.log(item) |
|||
let parentid = [...parentId], |
|||
parentArr = [...parents]; |
|||
delete parentArr.children |
|||
parentid.push(item[this.idKey]); |
|||
parentArr.push({ |
|||
[this.idKey]: item[this.idKey], |
|||
[this.rangeKey]: item[this.rangeKey] |
|||
}) |
|||
// lazy |
|||
if(!this.lazy) { |
|||
this._renderTreeList(item.children, rank + 1, parentid, parentArr) |
|||
} |
|||
} else { |
|||
this.treeList[this.treeList.length - 1].lastRank = true; |
|||
} |
|||
}) |
|||
}, |
|||
// 处理默认选择 |
|||
_defaultSelect() { |
|||
this.treeList.forEach((v, i) => { |
|||
if (v.checked) { |
|||
this.treeList.forEach((v2, i2) => { |
|||
if (v.parentId.toString().indexOf(v2.parentId.toString()) >= 0) { |
|||
v2.show = true |
|||
if (v.parentId.includes(v2.id)) { |
|||
v2.showChild = true; |
|||
v2.open = true; |
|||
} |
|||
} |
|||
}) |
|||
} |
|||
}) |
|||
}, |
|||
getOwn(id, arr){ |
|||
//利用foreach循环遍历 |
|||
arr.forEach((item) => { |
|||
//判断递归结束条件 |
|||
if(item[this.idKey] == id) |
|||
{ |
|||
// 存储数据到空数组 |
|||
this.returnedItem = item |
|||
} |
|||
else if(item.children != null) //判断chlidren是否有数据 |
|||
{ |
|||
//递归调用 |
|||
this.getOwn(id, item.children); |
|||
} |
|||
}) |
|||
return this.returnedItem |
|||
}, |
|||
setShow (id, arr, isShow) { |
|||
arr.forEach((item, index) => { |
|||
if(item.parentId.includes(id)) { |
|||
this.treeList[index].showChild = isShow |
|||
this.treeList[index].show = isShow |
|||
} else if (item.children !== undefined) { |
|||
this.setShow(id, item.children, isShow) |
|||
} |
|||
}) |
|||
}, |
|||
// 点击 |
|||
_treeItemTap(item, index) { |
|||
// console.log(item) |
|||
if (item.lastRank === true) { |
|||
//点击最后一级时触发事件 |
|||
this.treeList[index].checked = !this.treeList[index].checked |
|||
this._fixMultiple(index) |
|||
return; |
|||
} |
|||
let id = item.id; |
|||
item.showChild = !item.showChild; |
|||
// qingqian |
|||
if(item.showChild) { |
|||
const range = this.range |
|||
const parentIdArr = item.parentId |
|||
// 找到当前元素 |
|||
const own = this.getOwn(id, range) |
|||
const checkedChildren = own.children |
|||
// 子元素插入的索引位置 |
|||
const nextIndex = this.treeList.findIndex(itemT => itemT.id === item.id) |
|||
console.log(checkedChildren); |
|||
if(checkedChildren === undefined || checkedChildren.length < 1) { |
|||
return |
|||
} |
|||
// 子节点数量 |
|||
this.treeList[index].childNum = checkedChildren.length |
|||
const newRank = item.rank + 1 |
|||
checkedChildren.forEach(itemC => { |
|||
const childObj = { |
|||
id: itemC[this.idKey], |
|||
name: itemC[this.rangeKey], |
|||
source: {}, |
|||
parentId: [item.id], // 父级id数组 |
|||
parents: [item], // 父级id数组 |
|||
rank: newRank, // 层级 |
|||
showChild: false, //子级是否显示 |
|||
open: false, //是否打开 |
|||
show: 1, // 自身是否显示 |
|||
hideArr: [], |
|||
orChecked: this.treeList[index].checked, |
|||
checked: this.treeList[index].checked, |
|||
} |
|||
if(!this.treeList.some(itemT => itemT.id === itemC[this.idKey])) { |
|||
this.treeList.splice(nextIndex+1,0,childObj) |
|||
} |
|||
}) |
|||
} |
|||
// 展开/隐藏子级/孙级 |
|||
let list = this.treeList |
|||
item.open = item.showChild ? true : !item.open; |
|||
list.forEach((childItem, i) => { |
|||
if (item.showChild === false) { |
|||
//隐藏所有子级 |
|||
if (!childItem.parentId.includes(id)) { |
|||
return; |
|||
} |
|||
//TODO: 修改 |
|||
if (!this.foldAll) { |
|||
if (childItem.lastRank !== true && !childItem.open) { |
|||
childItem.showChild = false; |
|||
this.setShow(childItem.id, this.treeList, false) |
|||
} |
|||
// 为隐藏的内容添加一个标记 |
|||
if (childItem.show) { |
|||
childItem.hideArr[item.rank] = id |
|||
} |
|||
} else { |
|||
if (childItem.lastRank !== true) { |
|||
childItem.showChild = false; |
|||
// 继续隐藏子级的的子级 |
|||
this.setShow(childItem.id, this.treeList, false) |
|||
} |
|||
} |
|||
if(childItem.children !== undefined) { |
|||
childItem.children.forEach((childItem1, i1) => { |
|||
if(!childItem1.parentId.includes(childItem.id)) { |
|||
return |
|||
} |
|||
childItem.children[i1].showChild = false |
|||
childItem.children[i1].show = false |
|||
}) |
|||
} |
|||
childItem.show = false; |
|||
} else { |
|||
// 打开子集 |
|||
if (childItem.parentId[childItem.parentId.length - 1] === id) { |
|||
childItem.show = true; |
|||
} |
|||
// 打开被隐藏的子集 |
|||
if (childItem.parentId.includes(id) && !this.foldAll) { |
|||
// console.log(childItem.hideArr) |
|||
if (childItem.hideArr[item.rank] === id) { |
|||
childItem.show = true; |
|||
if (childItem.open && childItem.showChild) { |
|||
childItem.showChild = true |
|||
} else { |
|||
childItem.showChild = false |
|||
} |
|||
childItem.hideArr[item.rank] = null |
|||
} |
|||
} |
|||
} |
|||
}) |
|||
}, |
|||
// 通过父id处理子级 |
|||
syncChecked (trees, pid, checked) { |
|||
trees.forEach((item,index) => { |
|||
if(item.parentId.includes(pid)) { |
|||
this.treeList[index].checked = checked |
|||
this.syncChecked(trees, item.id, checked) |
|||
} else if(item.children !== undefined) { |
|||
this.syncChecked(item.children, pid, checked) |
|||
} |
|||
}) |
|||
}, |
|||
// 获取父级往上所有层级的id 并同步状态 |
|||
setAncestors (pids, checked) { |
|||
this.treeList.forEach((item,index) => { |
|||
if(pids.includes(item.id)) { |
|||
if(checked && this.childNums[item.id] !== undefined && item.childNum === this.childNums[item.id]) { |
|||
// 子级全部选中, 父级才选中 |
|||
this.treeList[index].checked = true |
|||
} else { |
|||
this.treeList[index].checked = false |
|||
} |
|||
this.setAncestors(item.parentId, checked) |
|||
} |
|||
}) |
|||
}, |
|||
_treeItemSelect(item, index) { |
|||
this.treeList[index].checked = !this.treeList[index].checked |
|||
// 选父级, 子级自动全选 |
|||
this.syncChecked(this.treeList, item.id, this.treeList[index].checked) |
|||
|
|||
if(item.rank > 0) { |
|||
item.parentId.forEach((pid, indexP) => { |
|||
const parent = this.treeList.filter(i => i.id === pid) |
|||
const childNum = parent.length > 0 ? parent[0].childNum : 0 |
|||
if(this.childNums[pid] === undefined) { |
|||
this.childNums[pid] = 1 |
|||
} else if(this.childNums[pid] < childNum) { |
|||
this.childNums[pid]++ |
|||
} |
|||
}) |
|||
//子级选择/选满/取消选择, 父级往上同步状态 |
|||
this.setAncestors(item.parentId, this.treeList[index].checked) |
|||
} |
|||
this._fixMultiple(index) |
|||
}, |
|||
// 处理单选多选 |
|||
_fixMultiple(index) { |
|||
if (!this.multiple) { |
|||
// 如果是单选 |
|||
this.treeList.forEach((v, i) => { |
|||
if (i != index) { |
|||
this.treeList[i].checked = false |
|||
} else { |
|||
this.treeList[i].checked = true |
|||
} |
|||
}) |
|||
} |
|||
}, |
|||
// 重置数据 |
|||
_reTreeList() { |
|||
this.treeList.forEach((v, i) => { |
|||
this.treeList[i].checked = v.orChecked |
|||
}) |
|||
}, |
|||
_initTree(range = this.range){ |
|||
this.treeList = []; |
|||
this._renderTreeList(range); |
|||
this.$nextTick(() => { |
|||
this._defaultSelect(range) |
|||
}) |
|||
} |
|||
}, |
|||
watch: { |
|||
range(list) { |
|||
this._initTree(list); |
|||
}, |
|||
multiple() { |
|||
if (this.range.length) { |
|||
this._reTreeList(); |
|||
} |
|||
}, |
|||
selectParent() { |
|||
if (this.range.length) { |
|||
this._reTreeList(); |
|||
} |
|||
}, |
|||
}, |
|||
mounted() { |
|||
this._initTree(); |
|||
} |
|||
} |
|||
</script> |
|||
|
|||
<style scoped> |
|||
@import "./style.css"; |
|||
</style> |
|||
@ -0,0 +1,151 @@ |
|||
.tki-tree-mask { |
|||
position: fixed; |
|||
top: 0rpx; |
|||
right: 0rpx; |
|||
bottom: 0rpx; |
|||
left: 0rpx; |
|||
z-index: 9998; |
|||
background-color: rgba(0, 0, 0, 0.6); |
|||
opacity: 0; |
|||
transition: all 0.3s ease; |
|||
visibility: hidden; |
|||
} |
|||
.tki-tree-mask.show { |
|||
visibility: visible; |
|||
opacity: 1; |
|||
} |
|||
.tki-tree-cnt { |
|||
position: fixed; |
|||
top: 0rpx; |
|||
right: 0rpx; |
|||
bottom: 0rpx; |
|||
left: 0rpx; |
|||
z-index: 9999; |
|||
top: 160rpx; |
|||
transition: all 0.3s ease; |
|||
transform: translateY(100%); |
|||
} |
|||
.tki-tree-cnt.show { |
|||
transform: translateY(0); |
|||
} |
|||
.tki-tree-bar { |
|||
background-color: #fff; |
|||
height: 72rpx; |
|||
padding-left: 20rpx; |
|||
padding-right: 20rpx; |
|||
display: flex; |
|||
justify-content: space-between; |
|||
align-items: center; |
|||
box-sizing: border-box; |
|||
border-bottom-width: 1rpx !important; |
|||
border-bottom-style: solid; |
|||
border-bottom-color: #f5f5f5; |
|||
font-size: 32rpx; |
|||
color: #757575; |
|||
line-height: 1; |
|||
} |
|||
.tki-tree-bar-confirm { |
|||
color: #07bb07; |
|||
} |
|||
.tki-tree-view { |
|||
position: absolute; |
|||
top: 0rpx; |
|||
right: 0rpx; |
|||
bottom: 0rpx; |
|||
left: 0rpx; |
|||
top: 72rpx; |
|||
background-color: #fff; |
|||
padding-top: 20rpx; |
|||
padding-right: 20rpx; |
|||
padding-bottom: 20rpx; |
|||
padding-left: 20rpx; |
|||
} |
|||
.tki-tree-view-sc { |
|||
height: 100%; |
|||
overflow: hidden; |
|||
} |
|||
.tki-tree-item { |
|||
display: flex; |
|||
justify-content: space-between; |
|||
align-items: center; |
|||
font-size: 26rpx; |
|||
color: #757575; |
|||
line-height: 1; |
|||
height: 0; |
|||
opacity: 0; |
|||
transition: 0.2s; |
|||
position: relative; |
|||
overflow: hidden; |
|||
} |
|||
.tki-tree-item.show { |
|||
height: 80rpx; |
|||
opacity: 1; |
|||
} |
|||
.tki-tree-item.showchild:before { |
|||
transform: rotate(90deg); |
|||
} |
|||
.tki-tree-item.last:before { |
|||
opacity: 0; |
|||
} |
|||
.tki-tree-icon { |
|||
width: 26rpx; |
|||
height: 26rpx; |
|||
margin-right: 8rpx; |
|||
} |
|||
.tki-tree-label { |
|||
flex: 1; |
|||
display: flex; |
|||
align-items: center; |
|||
height: 100%; |
|||
line-height: 1.2; |
|||
} |
|||
.tki-tree-check { |
|||
width: 40px; |
|||
height: 40px; |
|||
display: flex; |
|||
justify-content: center; |
|||
align-items: center; |
|||
} |
|||
.tki-tree-check-yes, |
|||
.tki-tree-check-no { |
|||
width: 20px; |
|||
height: 20px; |
|||
border-top-left-radius: 20%; |
|||
border-top-right-radius: 20%; |
|||
border-bottom-right-radius: 20%; |
|||
border-bottom-left-radius: 20%; |
|||
border-top-width: 1rpx; |
|||
border-left-width: 1rpx; |
|||
border-bottom-width: 1rpx; |
|||
border-right-width: 1rpx; |
|||
border-style: solid; |
|||
border-color: #07bb07; |
|||
display: flex; |
|||
justify-content: center; |
|||
align-items: center; |
|||
box-sizing: border-box; |
|||
} |
|||
.tki-tree-check-yes-b { |
|||
width: 12px; |
|||
height: 12px; |
|||
border-top-left-radius: 20%; |
|||
border-top-right-radius: 20%; |
|||
border-bottom-right-radius: 20%; |
|||
border-bottom-left-radius: 20%; |
|||
background-color: #07bb07; |
|||
} |
|||
.tki-tree-check .radio { |
|||
border-top-left-radius: 50%; |
|||
border-top-right-radius: 50%; |
|||
border-bottom-right-radius: 50%; |
|||
border-bottom-left-radius: 50%; |
|||
} |
|||
.tki-tree-check .radio .tki-tree-check-yes-b { |
|||
border-top-left-radius: 50%; |
|||
border-top-right-radius: 50%; |
|||
border-bottom-right-radius: 50%; |
|||
border-bottom-left-radius: 50%; |
|||
} |
|||
.hover-c { |
|||
opacity: 0.6; |
|||
} |
|||
|
After Width: | Height: | Size: 259 KiB |
@ -0,0 +1,21 @@ |
|||
<!DOCTYPE html> |
|||
<html lang="en"> |
|||
<head> |
|||
<meta charset="UTF-8" /> |
|||
<script> |
|||
var coverSupport = 'CSS' in window && typeof CSS.supports === 'function' && (CSS.supports('top: env(a)') || |
|||
CSS.supports('top: constant(a)')) |
|||
document.write( |
|||
'<meta name="viewport" content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0' + |
|||
(coverSupport ? ', viewport-fit=cover' : '') + '" />') |
|||
</script> |
|||
<title></title> |
|||
<!--preload-links--> |
|||
<!--app-context--> |
|||
<link rel="icon" type="image/x-icon" sizes="32x32" href="favicon.ico"> |
|||
</head> |
|||
<body> |
|||
<div id="app"><!--app-html--></div> |
|||
<script type="module" src="/main.js"></script> |
|||
</body> |
|||
</html> |
|||
@ -0,0 +1,48 @@ |
|||
import App from './App' |
|||
import { commonMethod } from '@/common/public.js' |
|||
import { echartsMethod } from '@/common/echarts.js' |
|||
import * as timetostring from '@/common/timetostring.js' |
|||
import * as echarts from 'echarts'; |
|||
// import { httpApi } from '@/common/postorget.js'
|
|||
Vue.prototype.$echarts = echarts |
|||
Vue.prototype.$commonMethod = commonMethod |
|||
Vue.prototype.$echartsMethod = echartsMethod |
|||
Vue.prototype.$timetostring = timetostring |
|||
// Vue.prototype.$httpApi = httpApi
|
|||
|
|||
import uView from "uview-ui"; |
|||
Vue.use(uView); |
|||
|
|||
import sUi from '@/uni_modules/s-ui'; |
|||
Vue.use(sUi, { |
|||
// 是否给页面page全局混入一个onEmitPage钩子,使用this.$emitPage(pagePath,...args)触发
|
|||
useEmitPageMixin: true, |
|||
// 是否替换uni默认的showLoading,hideLoading
|
|||
replaceUniLoading: true, |
|||
// 是否替换uni默认的showToast,hideToast
|
|||
replaceUniToast: true, |
|||
// 是否替换uni默认的showModal
|
|||
replaceUniModal: true, |
|||
// 返回首页
|
|||
homePath: '/pages/index/index', |
|||
}); |
|||
|
|||
// #ifndef VUE3
|
|||
import Vue from 'vue' |
|||
Vue.config.productionTip = false |
|||
App.mpType = 'app' |
|||
const app = new Vue({ |
|||
...App |
|||
}) |
|||
app.$mount() |
|||
// #endif
|
|||
|
|||
// #ifdef VUE3
|
|||
import { createSSRApp } from 'vue' |
|||
export function createApp() { |
|||
const app = createSSRApp(App) |
|||
return { |
|||
app |
|||
} |
|||
} |
|||
// #endif
|
|||
@ -0,0 +1,77 @@ |
|||
{ |
|||
"name" : "kpi_sub_vue2", |
|||
"appid" : "__UNI__E74E3F4", |
|||
"description" : "", |
|||
"versionName" : "1.0.0", |
|||
"versionCode" : "100", |
|||
"transformPx" : false, |
|||
/* 5+App特有相关 */ |
|||
"app-plus" : { |
|||
"usingComponents" : true, |
|||
"nvueStyleCompiler" : "uni-app", |
|||
"compilerVersion" : 3, |
|||
"splashscreen" : { |
|||
"alwaysShowBeforeRender" : true, |
|||
"waiting" : true, |
|||
"autoclose" : true, |
|||
"delay" : 0 |
|||
}, |
|||
/* 模块配置 */ |
|||
"modules" : {}, |
|||
/* 应用发布信息 */ |
|||
"distribute" : { |
|||
/* android打包配置 */ |
|||
"android" : { |
|||
"permissions" : [ |
|||
"<uses-permission android:name=\"android.permission.CHANGE_NETWORK_STATE\"/>", |
|||
"<uses-permission android:name=\"android.permission.MOUNT_UNMOUNT_FILESYSTEMS\"/>", |
|||
"<uses-permission android:name=\"android.permission.VIBRATE\"/>", |
|||
"<uses-permission android:name=\"android.permission.READ_LOGS\"/>", |
|||
"<uses-permission android:name=\"android.permission.ACCESS_WIFI_STATE\"/>", |
|||
"<uses-feature android:name=\"android.hardware.camera.autofocus\"/>", |
|||
"<uses-permission android:name=\"android.permission.ACCESS_NETWORK_STATE\"/>", |
|||
"<uses-permission android:name=\"android.permission.CAMERA\"/>", |
|||
"<uses-permission android:name=\"android.permission.GET_ACCOUNTS\"/>", |
|||
"<uses-permission android:name=\"android.permission.READ_PHONE_STATE\"/>", |
|||
"<uses-permission android:name=\"android.permission.CHANGE_WIFI_STATE\"/>", |
|||
"<uses-permission android:name=\"android.permission.WAKE_LOCK\"/>", |
|||
"<uses-permission android:name=\"android.permission.FLASHLIGHT\"/>", |
|||
"<uses-feature android:name=\"android.hardware.camera\"/>", |
|||
"<uses-permission android:name=\"android.permission.WRITE_SETTINGS\"/>" |
|||
] |
|||
}, |
|||
/* ios打包配置 */ |
|||
"ios" : {}, |
|||
/* SDK配置 */ |
|||
"sdkConfigs" : {} |
|||
} |
|||
}, |
|||
/* 快应用特有相关 */ |
|||
"quickapp" : {}, |
|||
/* 小程序特有相关 */ |
|||
"mp-weixin" : { |
|||
"appid" : "", |
|||
"setting" : { |
|||
"urlCheck" : false |
|||
}, |
|||
"usingComponents" : true |
|||
}, |
|||
"mp-alipay" : { |
|||
"usingComponents" : true |
|||
}, |
|||
"mp-baidu" : { |
|||
"usingComponents" : true |
|||
}, |
|||
"mp-toutiao" : { |
|||
"usingComponents" : true |
|||
}, |
|||
"uniStatistics" : { |
|||
"enable" : false |
|||
}, |
|||
"vueVersion" : "2", |
|||
"h5" : { |
|||
"devServer" : { |
|||
"port" : 2589 |
|||
} |
|||
} |
|||
} |
|||
@ -0,0 +1,12 @@ |
|||
#!/bin/sh |
|||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") |
|||
|
|||
case `uname` in |
|||
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; |
|||
esac |
|||
|
|||
if [ -x "$basedir/node" ]; then |
|||
exec "$basedir/node" "$basedir/../acorn/bin/acorn" "$@" |
|||
else |
|||
exec node "$basedir/../acorn/bin/acorn" "$@" |
|||
fi |
|||
@ -0,0 +1,17 @@ |
|||
@ECHO off |
|||
GOTO start |
|||
:find_dp0 |
|||
SET dp0=%~dp0 |
|||
EXIT /b |
|||
:start |
|||
SETLOCAL |
|||
CALL :find_dp0 |
|||
|
|||
IF EXIST "%dp0%\node.exe" ( |
|||
SET "_prog=%dp0%\node.exe" |
|||
) ELSE ( |
|||
SET "_prog=node" |
|||
SET PATHEXT=%PATHEXT:;.JS;=;% |
|||
) |
|||
|
|||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\acorn\bin\acorn" %* |
|||
@ -0,0 +1,28 @@ |
|||
#!/usr/bin/env pwsh |
|||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent |
|||
|
|||
$exe="" |
|||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { |
|||
# Fix case when both the Windows and Linux builds of Node |
|||
# are installed in the same directory |
|||
$exe=".exe" |
|||
} |
|||
$ret=0 |
|||
if (Test-Path "$basedir/node$exe") { |
|||
# Support pipeline input |
|||
if ($MyInvocation.ExpectingInput) { |
|||
$input | & "$basedir/node$exe" "$basedir/../acorn/bin/acorn" $args |
|||
} else { |
|||
& "$basedir/node$exe" "$basedir/../acorn/bin/acorn" $args |
|||
} |
|||
$ret=$LASTEXITCODE |
|||
} else { |
|||
# Support pipeline input |
|||
if ($MyInvocation.ExpectingInput) { |
|||
$input | & "node$exe" "$basedir/../acorn/bin/acorn" $args |
|||
} else { |
|||
& "node$exe" "$basedir/../acorn/bin/acorn" $args |
|||
} |
|||
$ret=$LASTEXITCODE |
|||
} |
|||
exit $ret |
|||
@ -0,0 +1,12 @@ |
|||
#!/bin/sh |
|||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") |
|||
|
|||
case `uname` in |
|||
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; |
|||
esac |
|||
|
|||
if [ -x "$basedir/node" ]; then |
|||
exec "$basedir/node" "$basedir/../browserslist/cli.js" "$@" |
|||
else |
|||
exec node "$basedir/../browserslist/cli.js" "$@" |
|||
fi |
|||
@ -0,0 +1,12 @@ |
|||
#!/bin/sh |
|||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") |
|||
|
|||
case `uname` in |
|||
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; |
|||
esac |
|||
|
|||
if [ -x "$basedir/node" ]; then |
|||
exec "$basedir/node" "$basedir/../update-browserslist-db/cli.js" "$@" |
|||
else |
|||
exec node "$basedir/../update-browserslist-db/cli.js" "$@" |
|||
fi |
|||
@ -0,0 +1,17 @@ |
|||
@ECHO off |
|||
GOTO start |
|||
:find_dp0 |
|||
SET dp0=%~dp0 |
|||
EXIT /b |
|||
:start |
|||
SETLOCAL |
|||
CALL :find_dp0 |
|||
|
|||
IF EXIST "%dp0%\node.exe" ( |
|||
SET "_prog=%dp0%\node.exe" |
|||
) ELSE ( |
|||
SET "_prog=node" |
|||
SET PATHEXT=%PATHEXT:;.JS;=;% |
|||
) |
|||
|
|||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\update-browserslist-db\cli.js" %* |
|||
@ -0,0 +1,28 @@ |
|||
#!/usr/bin/env pwsh |
|||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent |
|||
|
|||
$exe="" |
|||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { |
|||
# Fix case when both the Windows and Linux builds of Node |
|||
# are installed in the same directory |
|||
$exe=".exe" |
|||
} |
|||
$ret=0 |
|||
if (Test-Path "$basedir/node$exe") { |
|||
# Support pipeline input |
|||
if ($MyInvocation.ExpectingInput) { |
|||
$input | & "$basedir/node$exe" "$basedir/../update-browserslist-db/cli.js" $args |
|||
} else { |
|||
& "$basedir/node$exe" "$basedir/../update-browserslist-db/cli.js" $args |
|||
} |
|||
$ret=$LASTEXITCODE |
|||
} else { |
|||
# Support pipeline input |
|||
if ($MyInvocation.ExpectingInput) { |
|||
$input | & "node$exe" "$basedir/../update-browserslist-db/cli.js" $args |
|||
} else { |
|||
& "node$exe" "$basedir/../update-browserslist-db/cli.js" $args |
|||
} |
|||
$ret=$LASTEXITCODE |
|||
} |
|||
exit $ret |
|||
@ -0,0 +1,17 @@ |
|||
@ECHO off |
|||
GOTO start |
|||
:find_dp0 |
|||
SET dp0=%~dp0 |
|||
EXIT /b |
|||
:start |
|||
SETLOCAL |
|||
CALL :find_dp0 |
|||
|
|||
IF EXIST "%dp0%\node.exe" ( |
|||
SET "_prog=%dp0%\node.exe" |
|||
) ELSE ( |
|||
SET "_prog=node" |
|||
SET PATHEXT=%PATHEXT:;.JS;=;% |
|||
) |
|||
|
|||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\browserslist\cli.js" %* |
|||
@ -0,0 +1,28 @@ |
|||
#!/usr/bin/env pwsh |
|||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent |
|||
|
|||
$exe="" |
|||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { |
|||
# Fix case when both the Windows and Linux builds of Node |
|||
# are installed in the same directory |
|||
$exe=".exe" |
|||
} |
|||
$ret=0 |
|||
if (Test-Path "$basedir/node$exe") { |
|||
# Support pipeline input |
|||
if ($MyInvocation.ExpectingInput) { |
|||
$input | & "$basedir/node$exe" "$basedir/../browserslist/cli.js" $args |
|||
} else { |
|||
& "$basedir/node$exe" "$basedir/../browserslist/cli.js" $args |
|||
} |
|||
$ret=$LASTEXITCODE |
|||
} else { |
|||
# Support pipeline input |
|||
if ($MyInvocation.ExpectingInput) { |
|||
$input | & "node$exe" "$basedir/../browserslist/cli.js" $args |
|||
} else { |
|||
& "node$exe" "$basedir/../browserslist/cli.js" $args |
|||
} |
|||
$ret=$LASTEXITCODE |
|||
} |
|||
exit $ret |
|||
@ -0,0 +1,12 @@ |
|||
#!/bin/sh |
|||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") |
|||
|
|||
case `uname` in |
|||
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; |
|||
esac |
|||
|
|||
if [ -x "$basedir/node" ]; then |
|||
exec "$basedir/node" "$basedir/../json5/lib/cli.js" "$@" |
|||
else |
|||
exec node "$basedir/../json5/lib/cli.js" "$@" |
|||
fi |
|||
@ -0,0 +1,17 @@ |
|||
@ECHO off |
|||
GOTO start |
|||
:find_dp0 |
|||
SET dp0=%~dp0 |
|||
EXIT /b |
|||
:start |
|||
SETLOCAL |
|||
CALL :find_dp0 |
|||
|
|||
IF EXIST "%dp0%\node.exe" ( |
|||
SET "_prog=%dp0%\node.exe" |
|||
) ELSE ( |
|||
SET "_prog=node" |
|||
SET PATHEXT=%PATHEXT:;.JS;=;% |
|||
) |
|||
|
|||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\json5\lib\cli.js" %* |
|||
@ -0,0 +1,28 @@ |
|||
#!/usr/bin/env pwsh |
|||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent |
|||
|
|||
$exe="" |
|||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { |
|||
# Fix case when both the Windows and Linux builds of Node |
|||
# are installed in the same directory |
|||
$exe=".exe" |
|||
} |
|||
$ret=0 |
|||
if (Test-Path "$basedir/node$exe") { |
|||
# Support pipeline input |
|||
if ($MyInvocation.ExpectingInput) { |
|||
$input | & "$basedir/node$exe" "$basedir/../json5/lib/cli.js" $args |
|||
} else { |
|||
& "$basedir/node$exe" "$basedir/../json5/lib/cli.js" $args |
|||
} |
|||
$ret=$LASTEXITCODE |
|||
} else { |
|||
# Support pipeline input |
|||
if ($MyInvocation.ExpectingInput) { |
|||
$input | & "node$exe" "$basedir/../json5/lib/cli.js" $args |
|||
} else { |
|||
& "node$exe" "$basedir/../json5/lib/cli.js" $args |
|||
} |
|||
$ret=$LASTEXITCODE |
|||
} |
|||
exit $ret |
|||
@ -0,0 +1,12 @@ |
|||
#!/bin/sh |
|||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") |
|||
|
|||
case `uname` in |
|||
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; |
|||
esac |
|||
|
|||
if [ -x "$basedir/node" ]; then |
|||
exec "$basedir/node" "$basedir/../sass/sass.js" "$@" |
|||
else |
|||
exec node "$basedir/../sass/sass.js" "$@" |
|||
fi |
|||
@ -0,0 +1,17 @@ |
|||
@ECHO off |
|||
GOTO start |
|||
:find_dp0 |
|||
SET dp0=%~dp0 |
|||
EXIT /b |
|||
:start |
|||
SETLOCAL |
|||
CALL :find_dp0 |
|||
|
|||
IF EXIST "%dp0%\node.exe" ( |
|||
SET "_prog=%dp0%\node.exe" |
|||
) ELSE ( |
|||
SET "_prog=node" |
|||
SET PATHEXT=%PATHEXT:;.JS;=;% |
|||
) |
|||
|
|||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\sass\sass.js" %* |
|||
@ -0,0 +1,28 @@ |
|||
#!/usr/bin/env pwsh |
|||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent |
|||
|
|||
$exe="" |
|||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { |
|||
# Fix case when both the Windows and Linux builds of Node |
|||
# are installed in the same directory |
|||
$exe=".exe" |
|||
} |
|||
$ret=0 |
|||
if (Test-Path "$basedir/node$exe") { |
|||
# Support pipeline input |
|||
if ($MyInvocation.ExpectingInput) { |
|||
$input | & "$basedir/node$exe" "$basedir/../sass/sass.js" $args |
|||
} else { |
|||
& "$basedir/node$exe" "$basedir/../sass/sass.js" $args |
|||
} |
|||
$ret=$LASTEXITCODE |
|||
} else { |
|||
# Support pipeline input |
|||
if ($MyInvocation.ExpectingInput) { |
|||
$input | & "node$exe" "$basedir/../sass/sass.js" $args |
|||
} else { |
|||
& "node$exe" "$basedir/../sass/sass.js" $args |
|||
} |
|||
$ret=$LASTEXITCODE |
|||
} |
|||
exit $ret |
|||
@ -0,0 +1,12 @@ |
|||
#!/bin/sh |
|||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") |
|||
|
|||
case `uname` in |
|||
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; |
|||
esac |
|||
|
|||
if [ -x "$basedir/node" ]; then |
|||
exec "$basedir/node" "$basedir/../semver/bin/semver.js" "$@" |
|||
else |
|||
exec node "$basedir/../semver/bin/semver.js" "$@" |
|||
fi |
|||
@ -0,0 +1,17 @@ |
|||
@ECHO off |
|||
GOTO start |
|||
:find_dp0 |
|||
SET dp0=%~dp0 |
|||
EXIT /b |
|||
:start |
|||
SETLOCAL |
|||
CALL :find_dp0 |
|||
|
|||
IF EXIST "%dp0%\node.exe" ( |
|||
SET "_prog=%dp0%\node.exe" |
|||
) ELSE ( |
|||
SET "_prog=node" |
|||
SET PATHEXT=%PATHEXT:;.JS;=;% |
|||
) |
|||
|
|||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\semver\bin\semver.js" %* |
|||
@ -0,0 +1,28 @@ |
|||
#!/usr/bin/env pwsh |
|||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent |
|||
|
|||
$exe="" |
|||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { |
|||
# Fix case when both the Windows and Linux builds of Node |
|||
# are installed in the same directory |
|||
$exe=".exe" |
|||
} |
|||
$ret=0 |
|||
if (Test-Path "$basedir/node$exe") { |
|||
# Support pipeline input |
|||
if ($MyInvocation.ExpectingInput) { |
|||
$input | & "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args |
|||
} else { |
|||
& "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args |
|||
} |
|||
$ret=$LASTEXITCODE |
|||
} else { |
|||
# Support pipeline input |
|||
if ($MyInvocation.ExpectingInput) { |
|||
$input | & "node$exe" "$basedir/../semver/bin/semver.js" $args |
|||
} else { |
|||
& "node$exe" "$basedir/../semver/bin/semver.js" $args |
|||
} |
|||
$ret=$LASTEXITCODE |
|||
} |
|||
exit $ret |
|||
@ -0,0 +1,12 @@ |
|||
#!/bin/sh |
|||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") |
|||
|
|||
case `uname` in |
|||
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; |
|||
esac |
|||
|
|||
if [ -x "$basedir/node" ]; then |
|||
exec "$basedir/node" "$basedir/../terser/bin/terser" "$@" |
|||
else |
|||
exec node "$basedir/../terser/bin/terser" "$@" |
|||
fi |
|||
@ -0,0 +1,17 @@ |
|||
@ECHO off |
|||
GOTO start |
|||
:find_dp0 |
|||
SET dp0=%~dp0 |
|||
EXIT /b |
|||
:start |
|||
SETLOCAL |
|||
CALL :find_dp0 |
|||
|
|||
IF EXIST "%dp0%\node.exe" ( |
|||
SET "_prog=%dp0%\node.exe" |
|||
) ELSE ( |
|||
SET "_prog=node" |
|||
SET PATHEXT=%PATHEXT:;.JS;=;% |
|||
) |
|||
|
|||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\terser\bin\terser" %* |
|||
@ -0,0 +1,28 @@ |
|||
#!/usr/bin/env pwsh |
|||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent |
|||
|
|||
$exe="" |
|||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { |
|||
# Fix case when both the Windows and Linux builds of Node |
|||
# are installed in the same directory |
|||
$exe=".exe" |
|||
} |
|||
$ret=0 |
|||
if (Test-Path "$basedir/node$exe") { |
|||
# Support pipeline input |
|||
if ($MyInvocation.ExpectingInput) { |
|||
$input | & "$basedir/node$exe" "$basedir/../terser/bin/terser" $args |
|||
} else { |
|||
& "$basedir/node$exe" "$basedir/../terser/bin/terser" $args |
|||
} |
|||
$ret=$LASTEXITCODE |
|||
} else { |
|||
# Support pipeline input |
|||
if ($MyInvocation.ExpectingInput) { |
|||
$input | & "node$exe" "$basedir/../terser/bin/terser" $args |
|||
} else { |
|||
& "node$exe" "$basedir/../terser/bin/terser" $args |
|||
} |
|||
$ret=$LASTEXITCODE |
|||
} |
|||
exit $ret |
|||
@ -0,0 +1,12 @@ |
|||
#!/bin/sh |
|||
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") |
|||
|
|||
case `uname` in |
|||
*CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; |
|||
esac |
|||
|
|||
if [ -x "$basedir/node" ]; then |
|||
exec "$basedir/node" "$basedir/../webpack/bin/webpack.js" "$@" |
|||
else |
|||
exec node "$basedir/../webpack/bin/webpack.js" "$@" |
|||
fi |
|||
@ -0,0 +1,17 @@ |
|||
@ECHO off |
|||
GOTO start |
|||
:find_dp0 |
|||
SET dp0=%~dp0 |
|||
EXIT /b |
|||
:start |
|||
SETLOCAL |
|||
CALL :find_dp0 |
|||
|
|||
IF EXIST "%dp0%\node.exe" ( |
|||
SET "_prog=%dp0%\node.exe" |
|||
) ELSE ( |
|||
SET "_prog=node" |
|||
SET PATHEXT=%PATHEXT:;.JS;=;% |
|||
) |
|||
|
|||
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\webpack\bin\webpack.js" %* |
|||
@ -0,0 +1,28 @@ |
|||
#!/usr/bin/env pwsh |
|||
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent |
|||
|
|||
$exe="" |
|||
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { |
|||
# Fix case when both the Windows and Linux builds of Node |
|||
# are installed in the same directory |
|||
$exe=".exe" |
|||
} |
|||
$ret=0 |
|||
if (Test-Path "$basedir/node$exe") { |
|||
# Support pipeline input |
|||
if ($MyInvocation.ExpectingInput) { |
|||
$input | & "$basedir/node$exe" "$basedir/../webpack/bin/webpack.js" $args |
|||
} else { |
|||
& "$basedir/node$exe" "$basedir/../webpack/bin/webpack.js" $args |
|||
} |
|||
$ret=$LASTEXITCODE |
|||
} else { |
|||
# Support pipeline input |
|||
if ($MyInvocation.ExpectingInput) { |
|||
$input | & "node$exe" "$basedir/../webpack/bin/webpack.js" $args |
|||
} else { |
|||
& "node$exe" "$basedir/../webpack/bin/webpack.js" $args |
|||
} |
|||
$ret=$LASTEXITCODE |
|||
} |
|||
exit $ret |
|||
File diff suppressed because it is too large
@ -0,0 +1,19 @@ |
|||
Copyright 2022 Justin Ridgewell <jridgewell@google.com> |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
of this software and associated documentation files (the "Software"), to deal |
|||
in the Software without restriction, including without limitation the rights |
|||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|||
copies of the Software, and to permit persons to whom the Software is |
|||
furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in |
|||
all copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
|||
SOFTWARE. |
|||
@ -0,0 +1,227 @@ |
|||
# @jridgewell/gen-mapping |
|||
|
|||
> Generate source maps |
|||
|
|||
`gen-mapping` allows you to generate a source map during transpilation or minification. |
|||
With a source map, you're able to trace the original location in the source file, either in Chrome's |
|||
DevTools or using a library like [`@jridgewell/trace-mapping`][trace-mapping]. |
|||
|
|||
You may already be familiar with the [`source-map`][source-map] package's `SourceMapGenerator`. This |
|||
provides the same `addMapping` and `setSourceContent` API. |
|||
|
|||
## Installation |
|||
|
|||
```sh |
|||
npm install @jridgewell/gen-mapping |
|||
``` |
|||
|
|||
## Usage |
|||
|
|||
```typescript |
|||
import { GenMapping, addMapping, setSourceContent, toEncodedMap, toDecodedMap } from '@jridgewell/gen-mapping'; |
|||
|
|||
const map = new GenMapping({ |
|||
file: 'output.js', |
|||
sourceRoot: 'https://example.com/', |
|||
}); |
|||
|
|||
setSourceContent(map, 'input.js', `function foo() {}`); |
|||
|
|||
addMapping(map, { |
|||
// Lines start at line 1, columns at column 0. |
|||
generated: { line: 1, column: 0 }, |
|||
source: 'input.js', |
|||
original: { line: 1, column: 0 }, |
|||
}); |
|||
|
|||
addMapping(map, { |
|||
generated: { line: 1, column: 9 }, |
|||
source: 'input.js', |
|||
original: { line: 1, column: 9 }, |
|||
name: 'foo', |
|||
}); |
|||
|
|||
assert.deepEqual(toDecodedMap(map), { |
|||
version: 3, |
|||
file: 'output.js', |
|||
names: ['foo'], |
|||
sourceRoot: 'https://example.com/', |
|||
sources: ['input.js'], |
|||
sourcesContent: ['function foo() {}'], |
|||
mappings: [ |
|||
[ [0, 0, 0, 0], [9, 0, 0, 9, 0] ] |
|||
], |
|||
}); |
|||
|
|||
assert.deepEqual(toEncodedMap(map), { |
|||
version: 3, |
|||
file: 'output.js', |
|||
names: ['foo'], |
|||
sourceRoot: 'https://example.com/', |
|||
sources: ['input.js'], |
|||
sourcesContent: ['function foo() {}'], |
|||
mappings: 'AAAA,SAASA', |
|||
}); |
|||
``` |
|||
|
|||
### Smaller Sourcemaps |
|||
|
|||
Not everything needs to be added to a sourcemap, and needless markings can cause signficantly |
|||
larger file sizes. `gen-mapping` exposes `maybeAddSegment`/`maybeAddMapping` APIs that will |
|||
intelligently determine if this marking adds useful information. If not, the marking will be |
|||
skipped. |
|||
|
|||
```typescript |
|||
import { maybeAddMapping } from '@jridgewell/gen-mapping'; |
|||
|
|||
const map = new GenMapping(); |
|||
|
|||
// Adding a sourceless marking at the beginning of a line isn't useful. |
|||
maybeAddMapping(map, { |
|||
generated: { line: 1, column: 0 }, |
|||
}); |
|||
|
|||
// Adding a new source marking is useful. |
|||
maybeAddMapping(map, { |
|||
generated: { line: 1, column: 0 }, |
|||
source: 'input.js', |
|||
original: { line: 1, column: 0 }, |
|||
}); |
|||
|
|||
// But adding another marking pointing to the exact same original location isn't, even if the |
|||
// generated column changed. |
|||
maybeAddMapping(map, { |
|||
generated: { line: 1, column: 9 }, |
|||
source: 'input.js', |
|||
original: { line: 1, column: 0 }, |
|||
}); |
|||
|
|||
assert.deepEqual(toEncodedMap(map), { |
|||
version: 3, |
|||
names: [], |
|||
sources: ['input.js'], |
|||
sourcesContent: [null], |
|||
mappings: 'AAAA', |
|||
}); |
|||
``` |
|||
|
|||
## Benchmarks |
|||
|
|||
``` |
|||
node v18.0.0 |
|||
|
|||
amp.js.map |
|||
Memory Usage: |
|||
gen-mapping: addSegment 5852872 bytes |
|||
gen-mapping: addMapping 7716042 bytes |
|||
source-map-js 6143250 bytes |
|||
source-map-0.6.1 6124102 bytes |
|||
source-map-0.8.0 6121173 bytes |
|||
Smallest memory usage is gen-mapping: addSegment |
|||
|
|||
Adding speed: |
|||
gen-mapping: addSegment x 441 ops/sec ±2.07% (90 runs sampled) |
|||
gen-mapping: addMapping x 350 ops/sec ±2.40% (86 runs sampled) |
|||
source-map-js: addMapping x 169 ops/sec ±2.42% (80 runs sampled) |
|||
source-map-0.6.1: addMapping x 167 ops/sec ±2.56% (80 runs sampled) |
|||
source-map-0.8.0: addMapping x 168 ops/sec ±2.52% (80 runs sampled) |
|||
Fastest is gen-mapping: addSegment |
|||
|
|||
Generate speed: |
|||
gen-mapping: decoded output x 150,824,370 ops/sec ±0.07% (102 runs sampled) |
|||
gen-mapping: encoded output x 663 ops/sec ±0.22% (98 runs sampled) |
|||
source-map-js: encoded output x 197 ops/sec ±0.45% (84 runs sampled) |
|||
source-map-0.6.1: encoded output x 198 ops/sec ±0.33% (85 runs sampled) |
|||
source-map-0.8.0: encoded output x 197 ops/sec ±0.06% (93 runs sampled) |
|||
Fastest is gen-mapping: decoded output |
|||
|
|||
|
|||
*** |
|||
|
|||
|
|||
babel.min.js.map |
|||
Memory Usage: |
|||
gen-mapping: addSegment 37578063 bytes |
|||
gen-mapping: addMapping 37212897 bytes |
|||
source-map-js 47638527 bytes |
|||
source-map-0.6.1 47690503 bytes |
|||
source-map-0.8.0 47470188 bytes |
|||
Smallest memory usage is gen-mapping: addMapping |
|||
|
|||
Adding speed: |
|||
gen-mapping: addSegment x 31.05 ops/sec ±8.31% (43 runs sampled) |
|||
gen-mapping: addMapping x 29.83 ops/sec ±7.36% (51 runs sampled) |
|||
source-map-js: addMapping x 20.73 ops/sec ±6.22% (38 runs sampled) |
|||
source-map-0.6.1: addMapping x 20.03 ops/sec ±10.51% (38 runs sampled) |
|||
source-map-0.8.0: addMapping x 19.30 ops/sec ±8.27% (37 runs sampled) |
|||
Fastest is gen-mapping: addSegment |
|||
|
|||
Generate speed: |
|||
gen-mapping: decoded output x 381,379,234 ops/sec ±0.29% (96 runs sampled) |
|||
gen-mapping: encoded output x 95.15 ops/sec ±2.98% (72 runs sampled) |
|||
source-map-js: encoded output x 15.20 ops/sec ±7.41% (33 runs sampled) |
|||
source-map-0.6.1: encoded output x 16.36 ops/sec ±10.46% (31 runs sampled) |
|||
source-map-0.8.0: encoded output x 16.06 ops/sec ±6.45% (31 runs sampled) |
|||
Fastest is gen-mapping: decoded output |
|||
|
|||
|
|||
*** |
|||
|
|||
|
|||
preact.js.map |
|||
Memory Usage: |
|||
gen-mapping: addSegment 416247 bytes |
|||
gen-mapping: addMapping 419824 bytes |
|||
source-map-js 1024619 bytes |
|||
source-map-0.6.1 1146004 bytes |
|||
source-map-0.8.0 1113250 bytes |
|||
Smallest memory usage is gen-mapping: addSegment |
|||
|
|||
Adding speed: |
|||
gen-mapping: addSegment x 13,755 ops/sec ±0.15% (98 runs sampled) |
|||
gen-mapping: addMapping x 13,013 ops/sec ±0.11% (101 runs sampled) |
|||
source-map-js: addMapping x 4,564 ops/sec ±0.21% (98 runs sampled) |
|||
source-map-0.6.1: addMapping x 4,562 ops/sec ±0.11% (99 runs sampled) |
|||
source-map-0.8.0: addMapping x 4,593 ops/sec ±0.11% (100 runs sampled) |
|||
Fastest is gen-mapping: addSegment |
|||
|
|||
Generate speed: |
|||
gen-mapping: decoded output x 379,864,020 ops/sec ±0.23% (93 runs sampled) |
|||
gen-mapping: encoded output x 14,368 ops/sec ±4.07% (82 runs sampled) |
|||
source-map-js: encoded output x 5,261 ops/sec ±0.21% (99 runs sampled) |
|||
source-map-0.6.1: encoded output x 5,124 ops/sec ±0.58% (99 runs sampled) |
|||
source-map-0.8.0: encoded output x 5,434 ops/sec ±0.33% (96 runs sampled) |
|||
Fastest is gen-mapping: decoded output |
|||
|
|||
|
|||
*** |
|||
|
|||
|
|||
react.js.map |
|||
Memory Usage: |
|||
gen-mapping: addSegment 975096 bytes |
|||
gen-mapping: addMapping 1102981 bytes |
|||
source-map-js 2918836 bytes |
|||
source-map-0.6.1 2885435 bytes |
|||
source-map-0.8.0 2874336 bytes |
|||
Smallest memory usage is gen-mapping: addSegment |
|||
|
|||
Adding speed: |
|||
gen-mapping: addSegment x 4,772 ops/sec ±0.15% (100 runs sampled) |
|||
gen-mapping: addMapping x 4,456 ops/sec ±0.13% (97 runs sampled) |
|||
source-map-js: addMapping x 1,618 ops/sec ±0.24% (97 runs sampled) |
|||
source-map-0.6.1: addMapping x 1,622 ops/sec ±0.12% (99 runs sampled) |
|||
source-map-0.8.0: addMapping x 1,631 ops/sec ±0.12% (100 runs sampled) |
|||
Fastest is gen-mapping: addSegment |
|||
|
|||
Generate speed: |
|||
gen-mapping: decoded output x 379,107,695 ops/sec ±0.07% (99 runs sampled) |
|||
gen-mapping: encoded output x 5,421 ops/sec ±1.60% (89 runs sampled) |
|||
source-map-js: encoded output x 2,113 ops/sec ±1.81% (98 runs sampled) |
|||
source-map-0.6.1: encoded output x 2,126 ops/sec ±0.10% (100 runs sampled) |
|||
source-map-0.8.0: encoded output x 2,176 ops/sec ±0.39% (98 runs sampled) |
|||
Fastest is gen-mapping: decoded output |
|||
``` |
|||
|
|||
[source-map]: https://www.npmjs.com/package/source-map |
|||
[trace-mapping]: https://github.com/jridgewell/trace-mapping |
|||
@ -0,0 +1,230 @@ |
|||
import { SetArray, put } from '@jridgewell/set-array'; |
|||
import { encode } from '@jridgewell/sourcemap-codec'; |
|||
import { TraceMap, decodedMappings } from '@jridgewell/trace-mapping'; |
|||
|
|||
const COLUMN = 0; |
|||
const SOURCES_INDEX = 1; |
|||
const SOURCE_LINE = 2; |
|||
const SOURCE_COLUMN = 3; |
|||
const NAMES_INDEX = 4; |
|||
|
|||
const NO_NAME = -1; |
|||
/** |
|||
* A low-level API to associate a generated position with an original source position. Line and |
|||
* column here are 0-based, unlike `addMapping`. |
|||
*/ |
|||
let addSegment; |
|||
/** |
|||
* A high-level API to associate a generated position with an original source position. Line is |
|||
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library. |
|||
*/ |
|||
let addMapping; |
|||
/** |
|||
* Same as `addSegment`, but will only add the segment if it generates useful information in the |
|||
* resulting map. This only works correctly if segments are added **in order**, meaning you should |
|||
* not add a segment with a lower generated line/column than one that came before. |
|||
*/ |
|||
let maybeAddSegment; |
|||
/** |
|||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the |
|||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should |
|||
* not add a mapping with a lower generated line/column than one that came before. |
|||
*/ |
|||
let maybeAddMapping; |
|||
/** |
|||
* Adds/removes the content of the source file to the source map. |
|||
*/ |
|||
let setSourceContent; |
|||
/** |
|||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects |
|||
* a sourcemap, or to JSON.stringify. |
|||
*/ |
|||
let toDecodedMap; |
|||
/** |
|||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects |
|||
* a sourcemap, or to JSON.stringify. |
|||
*/ |
|||
let toEncodedMap; |
|||
/** |
|||
* Constructs a new GenMapping, using the already present mappings of the input. |
|||
*/ |
|||
let fromMap; |
|||
/** |
|||
* Returns an array of high-level mapping objects for every recorded segment, which could then be |
|||
* passed to the `source-map` library. |
|||
*/ |
|||
let allMappings; |
|||
// This split declaration is only so that terser can elminiate the static initialization block.
|
|||
let addSegmentInternal; |
|||
/** |
|||
* Provides the state to generate a sourcemap. |
|||
*/ |
|||
class GenMapping { |
|||
constructor({ file, sourceRoot } = {}) { |
|||
this._names = new SetArray(); |
|||
this._sources = new SetArray(); |
|||
this._sourcesContent = []; |
|||
this._mappings = []; |
|||
this.file = file; |
|||
this.sourceRoot = sourceRoot; |
|||
} |
|||
} |
|||
(() => { |
|||
addSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => { |
|||
return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content); |
|||
}; |
|||
maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => { |
|||
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content); |
|||
}; |
|||
addMapping = (map, mapping) => { |
|||
return addMappingInternal(false, map, mapping); |
|||
}; |
|||
maybeAddMapping = (map, mapping) => { |
|||
return addMappingInternal(true, map, mapping); |
|||
}; |
|||
setSourceContent = (map, source, content) => { |
|||
const { _sources: sources, _sourcesContent: sourcesContent } = map; |
|||
sourcesContent[put(sources, source)] = content; |
|||
}; |
|||
toDecodedMap = (map) => { |
|||
const { file, sourceRoot, _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map; |
|||
removeEmptyFinalLines(mappings); |
|||
return { |
|||
version: 3, |
|||
file: file || undefined, |
|||
names: names.array, |
|||
sourceRoot: sourceRoot || undefined, |
|||
sources: sources.array, |
|||
sourcesContent, |
|||
mappings, |
|||
}; |
|||
}; |
|||
toEncodedMap = (map) => { |
|||
const decoded = toDecodedMap(map); |
|||
return Object.assign(Object.assign({}, decoded), { mappings: encode(decoded.mappings) }); |
|||
}; |
|||
allMappings = (map) => { |
|||
const out = []; |
|||
const { _mappings: mappings, _sources: sources, _names: names } = map; |
|||
for (let i = 0; i < mappings.length; i++) { |
|||
const line = mappings[i]; |
|||
for (let j = 0; j < line.length; j++) { |
|||
const seg = line[j]; |
|||
const generated = { line: i + 1, column: seg[COLUMN] }; |
|||
let source = undefined; |
|||
let original = undefined; |
|||
let name = undefined; |
|||
if (seg.length !== 1) { |
|||
source = sources.array[seg[SOURCES_INDEX]]; |
|||
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] }; |
|||
if (seg.length === 5) |
|||
name = names.array[seg[NAMES_INDEX]]; |
|||
} |
|||
out.push({ generated, source, original, name }); |
|||
} |
|||
} |
|||
return out; |
|||
}; |
|||
fromMap = (input) => { |
|||
const map = new TraceMap(input); |
|||
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot }); |
|||
putAll(gen._names, map.names); |
|||
putAll(gen._sources, map.sources); |
|||
gen._sourcesContent = map.sourcesContent || map.sources.map(() => null); |
|||
gen._mappings = decodedMappings(map); |
|||
return gen; |
|||
}; |
|||
// Internal helpers
|
|||
addSegmentInternal = (skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => { |
|||
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map; |
|||
const line = getLine(mappings, genLine); |
|||
const index = getColumnIndex(line, genColumn); |
|||
if (!source) { |
|||
if (skipable && skipSourceless(line, index)) |
|||
return; |
|||
return insert(line, index, [genColumn]); |
|||
} |
|||
const sourcesIndex = put(sources, source); |
|||
const namesIndex = name ? put(names, name) : NO_NAME; |
|||
if (sourcesIndex === sourcesContent.length) |
|||
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null; |
|||
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) { |
|||
return; |
|||
} |
|||
return insert(line, index, name |
|||
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] |
|||
: [genColumn, sourcesIndex, sourceLine, sourceColumn]); |
|||
}; |
|||
})(); |
|||
function getLine(mappings, index) { |
|||
for (let i = mappings.length; i <= index; i++) { |
|||
mappings[i] = []; |
|||
} |
|||
return mappings[index]; |
|||
} |
|||
function getColumnIndex(line, genColumn) { |
|||
let index = line.length; |
|||
for (let i = index - 1; i >= 0; index = i--) { |
|||
const current = line[i]; |
|||
if (genColumn >= current[COLUMN]) |
|||
break; |
|||
} |
|||
return index; |
|||
} |
|||
function insert(array, index, value) { |
|||
for (let i = array.length; i > index; i--) { |
|||
array[i] = array[i - 1]; |
|||
} |
|||
array[index] = value; |
|||
} |
|||
function removeEmptyFinalLines(mappings) { |
|||
const { length } = mappings; |
|||
let len = length; |
|||
for (let i = len - 1; i >= 0; len = i, i--) { |
|||
if (mappings[i].length > 0) |
|||
break; |
|||
} |
|||
if (len < length) |
|||
mappings.length = len; |
|||
} |
|||
function putAll(strarr, array) { |
|||
for (let i = 0; i < array.length; i++) |
|||
put(strarr, array[i]); |
|||
} |
|||
function skipSourceless(line, index) { |
|||
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
|||
// doesn't generate any useful information.
|
|||
if (index === 0) |
|||
return true; |
|||
const prev = line[index - 1]; |
|||
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
|||
// genrate any new information. Else, this segment will end the source/named segment and point to
|
|||
// a sourceless position, which is useful.
|
|||
return prev.length === 1; |
|||
} |
|||
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) { |
|||
// A source/named segment at the start of a line gives position at that genColumn
|
|||
if (index === 0) |
|||
return false; |
|||
const prev = line[index - 1]; |
|||
// If the previous segment is sourceless, then we're transitioning to a source.
|
|||
if (prev.length === 1) |
|||
return false; |
|||
// If the previous segment maps to the exact same source position, then this segment doesn't
|
|||
// provide any new position information.
|
|||
return (sourcesIndex === prev[SOURCES_INDEX] && |
|||
sourceLine === prev[SOURCE_LINE] && |
|||
sourceColumn === prev[SOURCE_COLUMN] && |
|||
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME)); |
|||
} |
|||
function addMappingInternal(skipable, map, mapping) { |
|||
const { generated, source, original, name, content } = mapping; |
|||
if (!source) { |
|||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null); |
|||
} |
|||
const s = source; |
|||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, s, original.line - 1, original.column, name, content); |
|||
} |
|||
|
|||
export { GenMapping, addMapping, addSegment, allMappings, fromMap, maybeAddMapping, maybeAddSegment, setSourceContent, toDecodedMap, toEncodedMap }; |
|||
//# sourceMappingURL=gen-mapping.mjs.map
|
|||
File diff suppressed because one or more lines are too long
@ -0,0 +1,236 @@ |
|||
(function (global, factory) { |
|||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/set-array'), require('@jridgewell/sourcemap-codec'), require('@jridgewell/trace-mapping')) : |
|||
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/set-array', '@jridgewell/sourcemap-codec', '@jridgewell/trace-mapping'], factory) : |
|||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.genMapping = {}, global.setArray, global.sourcemapCodec, global.traceMapping)); |
|||
})(this, (function (exports, setArray, sourcemapCodec, traceMapping) { 'use strict'; |
|||
|
|||
const COLUMN = 0; |
|||
const SOURCES_INDEX = 1; |
|||
const SOURCE_LINE = 2; |
|||
const SOURCE_COLUMN = 3; |
|||
const NAMES_INDEX = 4; |
|||
|
|||
const NO_NAME = -1; |
|||
/** |
|||
* A low-level API to associate a generated position with an original source position. Line and |
|||
* column here are 0-based, unlike `addMapping`. |
|||
*/ |
|||
exports.addSegment = void 0; |
|||
/** |
|||
* A high-level API to associate a generated position with an original source position. Line is |
|||
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library. |
|||
*/ |
|||
exports.addMapping = void 0; |
|||
/** |
|||
* Same as `addSegment`, but will only add the segment if it generates useful information in the |
|||
* resulting map. This only works correctly if segments are added **in order**, meaning you should |
|||
* not add a segment with a lower generated line/column than one that came before. |
|||
*/ |
|||
exports.maybeAddSegment = void 0; |
|||
/** |
|||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the |
|||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should |
|||
* not add a mapping with a lower generated line/column than one that came before. |
|||
*/ |
|||
exports.maybeAddMapping = void 0; |
|||
/** |
|||
* Adds/removes the content of the source file to the source map. |
|||
*/ |
|||
exports.setSourceContent = void 0; |
|||
/** |
|||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects |
|||
* a sourcemap, or to JSON.stringify. |
|||
*/ |
|||
exports.toDecodedMap = void 0; |
|||
/** |
|||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects |
|||
* a sourcemap, or to JSON.stringify. |
|||
*/ |
|||
exports.toEncodedMap = void 0; |
|||
/** |
|||
* Constructs a new GenMapping, using the already present mappings of the input. |
|||
*/ |
|||
exports.fromMap = void 0; |
|||
/** |
|||
* Returns an array of high-level mapping objects for every recorded segment, which could then be |
|||
* passed to the `source-map` library. |
|||
*/ |
|||
exports.allMappings = void 0; |
|||
// This split declaration is only so that terser can elminiate the static initialization block.
|
|||
let addSegmentInternal; |
|||
/** |
|||
* Provides the state to generate a sourcemap. |
|||
*/ |
|||
class GenMapping { |
|||
constructor({ file, sourceRoot } = {}) { |
|||
this._names = new setArray.SetArray(); |
|||
this._sources = new setArray.SetArray(); |
|||
this._sourcesContent = []; |
|||
this._mappings = []; |
|||
this.file = file; |
|||
this.sourceRoot = sourceRoot; |
|||
} |
|||
} |
|||
(() => { |
|||
exports.addSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => { |
|||
return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content); |
|||
}; |
|||
exports.maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => { |
|||
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content); |
|||
}; |
|||
exports.addMapping = (map, mapping) => { |
|||
return addMappingInternal(false, map, mapping); |
|||
}; |
|||
exports.maybeAddMapping = (map, mapping) => { |
|||
return addMappingInternal(true, map, mapping); |
|||
}; |
|||
exports.setSourceContent = (map, source, content) => { |
|||
const { _sources: sources, _sourcesContent: sourcesContent } = map; |
|||
sourcesContent[setArray.put(sources, source)] = content; |
|||
}; |
|||
exports.toDecodedMap = (map) => { |
|||
const { file, sourceRoot, _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map; |
|||
removeEmptyFinalLines(mappings); |
|||
return { |
|||
version: 3, |
|||
file: file || undefined, |
|||
names: names.array, |
|||
sourceRoot: sourceRoot || undefined, |
|||
sources: sources.array, |
|||
sourcesContent, |
|||
mappings, |
|||
}; |
|||
}; |
|||
exports.toEncodedMap = (map) => { |
|||
const decoded = exports.toDecodedMap(map); |
|||
return Object.assign(Object.assign({}, decoded), { mappings: sourcemapCodec.encode(decoded.mappings) }); |
|||
}; |
|||
exports.allMappings = (map) => { |
|||
const out = []; |
|||
const { _mappings: mappings, _sources: sources, _names: names } = map; |
|||
for (let i = 0; i < mappings.length; i++) { |
|||
const line = mappings[i]; |
|||
for (let j = 0; j < line.length; j++) { |
|||
const seg = line[j]; |
|||
const generated = { line: i + 1, column: seg[COLUMN] }; |
|||
let source = undefined; |
|||
let original = undefined; |
|||
let name = undefined; |
|||
if (seg.length !== 1) { |
|||
source = sources.array[seg[SOURCES_INDEX]]; |
|||
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] }; |
|||
if (seg.length === 5) |
|||
name = names.array[seg[NAMES_INDEX]]; |
|||
} |
|||
out.push({ generated, source, original, name }); |
|||
} |
|||
} |
|||
return out; |
|||
}; |
|||
exports.fromMap = (input) => { |
|||
const map = new traceMapping.TraceMap(input); |
|||
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot }); |
|||
putAll(gen._names, map.names); |
|||
putAll(gen._sources, map.sources); |
|||
gen._sourcesContent = map.sourcesContent || map.sources.map(() => null); |
|||
gen._mappings = traceMapping.decodedMappings(map); |
|||
return gen; |
|||
}; |
|||
// Internal helpers
|
|||
addSegmentInternal = (skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => { |
|||
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map; |
|||
const line = getLine(mappings, genLine); |
|||
const index = getColumnIndex(line, genColumn); |
|||
if (!source) { |
|||
if (skipable && skipSourceless(line, index)) |
|||
return; |
|||
return insert(line, index, [genColumn]); |
|||
} |
|||
const sourcesIndex = setArray.put(sources, source); |
|||
const namesIndex = name ? setArray.put(names, name) : NO_NAME; |
|||
if (sourcesIndex === sourcesContent.length) |
|||
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null; |
|||
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) { |
|||
return; |
|||
} |
|||
return insert(line, index, name |
|||
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] |
|||
: [genColumn, sourcesIndex, sourceLine, sourceColumn]); |
|||
}; |
|||
})(); |
|||
function getLine(mappings, index) { |
|||
for (let i = mappings.length; i <= index; i++) { |
|||
mappings[i] = []; |
|||
} |
|||
return mappings[index]; |
|||
} |
|||
function getColumnIndex(line, genColumn) { |
|||
let index = line.length; |
|||
for (let i = index - 1; i >= 0; index = i--) { |
|||
const current = line[i]; |
|||
if (genColumn >= current[COLUMN]) |
|||
break; |
|||
} |
|||
return index; |
|||
} |
|||
function insert(array, index, value) { |
|||
for (let i = array.length; i > index; i--) { |
|||
array[i] = array[i - 1]; |
|||
} |
|||
array[index] = value; |
|||
} |
|||
function removeEmptyFinalLines(mappings) { |
|||
const { length } = mappings; |
|||
let len = length; |
|||
for (let i = len - 1; i >= 0; len = i, i--) { |
|||
if (mappings[i].length > 0) |
|||
break; |
|||
} |
|||
if (len < length) |
|||
mappings.length = len; |
|||
} |
|||
function putAll(strarr, array) { |
|||
for (let i = 0; i < array.length; i++) |
|||
setArray.put(strarr, array[i]); |
|||
} |
|||
function skipSourceless(line, index) { |
|||
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
|||
// doesn't generate any useful information.
|
|||
if (index === 0) |
|||
return true; |
|||
const prev = line[index - 1]; |
|||
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
|||
// genrate any new information. Else, this segment will end the source/named segment and point to
|
|||
// a sourceless position, which is useful.
|
|||
return prev.length === 1; |
|||
} |
|||
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) { |
|||
// A source/named segment at the start of a line gives position at that genColumn
|
|||
if (index === 0) |
|||
return false; |
|||
const prev = line[index - 1]; |
|||
// If the previous segment is sourceless, then we're transitioning to a source.
|
|||
if (prev.length === 1) |
|||
return false; |
|||
// If the previous segment maps to the exact same source position, then this segment doesn't
|
|||
// provide any new position information.
|
|||
return (sourcesIndex === prev[SOURCES_INDEX] && |
|||
sourceLine === prev[SOURCE_LINE] && |
|||
sourceColumn === prev[SOURCE_COLUMN] && |
|||
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME)); |
|||
} |
|||
function addMappingInternal(skipable, map, mapping) { |
|||
const { generated, source, original, name, content } = mapping; |
|||
if (!source) { |
|||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null); |
|||
} |
|||
const s = source; |
|||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, s, original.line - 1, original.column, name, content); |
|||
} |
|||
|
|||
exports.GenMapping = GenMapping; |
|||
|
|||
Object.defineProperty(exports, '__esModule', { value: true }); |
|||
|
|||
})); |
|||
//# sourceMappingURL=gen-mapping.umd.js.map
|
|||
File diff suppressed because one or more lines are too long
@ -0,0 +1,90 @@ |
|||
import type { SourceMapInput } from '@jridgewell/trace-mapping'; |
|||
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types'; |
|||
export type { DecodedSourceMap, EncodedSourceMap, Mapping }; |
|||
export declare type Options = { |
|||
file?: string | null; |
|||
sourceRoot?: string | null; |
|||
}; |
|||
/** |
|||
* A low-level API to associate a generated position with an original source position. Line and |
|||
* column here are 0-based, unlike `addMapping`. |
|||
*/ |
|||
export declare let addSegment: { |
|||
(map: GenMapping, genLine: number, genColumn: number, source?: null, sourceLine?: null, sourceColumn?: null, name?: null, content?: null): void; |
|||
(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name?: null, content?: string | null): void; |
|||
(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name: string, content?: string | null): void; |
|||
}; |
|||
/** |
|||
* A high-level API to associate a generated position with an original source position. Line is |
|||
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library. |
|||
*/ |
|||
export declare let addMapping: { |
|||
(map: GenMapping, mapping: { |
|||
generated: Pos; |
|||
source?: null; |
|||
original?: null; |
|||
name?: null; |
|||
content?: null; |
|||
}): void; |
|||
(map: GenMapping, mapping: { |
|||
generated: Pos; |
|||
source: string; |
|||
original: Pos; |
|||
name?: null; |
|||
content?: string | null; |
|||
}): void; |
|||
(map: GenMapping, mapping: { |
|||
generated: Pos; |
|||
source: string; |
|||
original: Pos; |
|||
name: string; |
|||
content?: string | null; |
|||
}): void; |
|||
}; |
|||
/** |
|||
* Same as `addSegment`, but will only add the segment if it generates useful information in the |
|||
* resulting map. This only works correctly if segments are added **in order**, meaning you should |
|||
* not add a segment with a lower generated line/column than one that came before. |
|||
*/ |
|||
export declare let maybeAddSegment: typeof addSegment; |
|||
/** |
|||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the |
|||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should |
|||
* not add a mapping with a lower generated line/column than one that came before. |
|||
*/ |
|||
export declare let maybeAddMapping: typeof addMapping; |
|||
/** |
|||
* Adds/removes the content of the source file to the source map. |
|||
*/ |
|||
export declare let setSourceContent: (map: GenMapping, source: string, content: string | null) => void; |
|||
/** |
|||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects |
|||
* a sourcemap, or to JSON.stringify. |
|||
*/ |
|||
export declare let toDecodedMap: (map: GenMapping) => DecodedSourceMap; |
|||
/** |
|||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects |
|||
* a sourcemap, or to JSON.stringify. |
|||
*/ |
|||
export declare let toEncodedMap: (map: GenMapping) => EncodedSourceMap; |
|||
/** |
|||
* Constructs a new GenMapping, using the already present mappings of the input. |
|||
*/ |
|||
export declare let fromMap: (input: SourceMapInput) => GenMapping; |
|||
/** |
|||
* Returns an array of high-level mapping objects for every recorded segment, which could then be |
|||
* passed to the `source-map` library. |
|||
*/ |
|||
export declare let allMappings: (map: GenMapping) => Mapping[]; |
|||
/** |
|||
* Provides the state to generate a sourcemap. |
|||
*/ |
|||
export declare class GenMapping { |
|||
private _names; |
|||
private _sources; |
|||
private _sourcesContent; |
|||
private _mappings; |
|||
file: string | null | undefined; |
|||
sourceRoot: string | null | undefined; |
|||
constructor({ file, sourceRoot }?: Options); |
|||
} |
|||
@ -0,0 +1,12 @@ |
|||
declare type GeneratedColumn = number; |
|||
declare type SourcesIndex = number; |
|||
declare type SourceLine = number; |
|||
declare type SourceColumn = number; |
|||
declare type NamesIndex = number; |
|||
export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex]; |
|||
export declare const COLUMN = 0; |
|||
export declare const SOURCES_INDEX = 1; |
|||
export declare const SOURCE_LINE = 2; |
|||
export declare const SOURCE_COLUMN = 3; |
|||
export declare const NAMES_INDEX = 4; |
|||
export {}; |
|||
@ -0,0 +1,35 @@ |
|||
import type { SourceMapSegment } from './sourcemap-segment'; |
|||
export interface SourceMapV3 { |
|||
file?: string | null; |
|||
names: readonly string[]; |
|||
sourceRoot?: string; |
|||
sources: readonly (string | null)[]; |
|||
sourcesContent?: readonly (string | null)[]; |
|||
version: 3; |
|||
} |
|||
export interface EncodedSourceMap extends SourceMapV3 { |
|||
mappings: string; |
|||
} |
|||
export interface DecodedSourceMap extends SourceMapV3 { |
|||
mappings: readonly SourceMapSegment[][]; |
|||
} |
|||
export interface Pos { |
|||
line: number; |
|||
column: number; |
|||
} |
|||
export declare type Mapping = { |
|||
generated: Pos; |
|||
source: undefined; |
|||
original: undefined; |
|||
name: undefined; |
|||
} | { |
|||
generated: Pos; |
|||
source: string; |
|||
original: Pos; |
|||
name: string; |
|||
} | { |
|||
generated: Pos; |
|||
source: string; |
|||
original: Pos; |
|||
name: undefined; |
|||
}; |
|||
@ -0,0 +1,78 @@ |
|||
{ |
|||
"name": "@jridgewell/gen-mapping", |
|||
"version": "0.3.2", |
|||
"description": "Generate source maps", |
|||
"keywords": [ |
|||
"source", |
|||
"map" |
|||
], |
|||
"author": "Justin Ridgewell <justin@ridgewell.name>", |
|||
"license": "MIT", |
|||
"repository": "https://github.com/jridgewell/gen-mapping", |
|||
"main": "dist/gen-mapping.umd.js", |
|||
"module": "dist/gen-mapping.mjs", |
|||
"typings": "dist/types/gen-mapping.d.ts", |
|||
"exports": { |
|||
".": [ |
|||
{ |
|||
"types": "./dist/types/gen-mapping.d.ts", |
|||
"browser": "./dist/gen-mapping.umd.js", |
|||
"require": "./dist/gen-mapping.umd.js", |
|||
"import": "./dist/gen-mapping.mjs" |
|||
}, |
|||
"./dist/gen-mapping.umd.js" |
|||
], |
|||
"./package.json": "./package.json" |
|||
}, |
|||
"files": [ |
|||
"dist", |
|||
"src" |
|||
], |
|||
"engines": { |
|||
"node": ">=6.0.0" |
|||
}, |
|||
"scripts": { |
|||
"benchmark": "run-s build:rollup benchmark:*", |
|||
"benchmark:install": "cd benchmark && npm install", |
|||
"benchmark:only": "node benchmark/index.mjs", |
|||
"prebuild": "rm -rf dist", |
|||
"build": "run-s -n build:*", |
|||
"build:rollup": "rollup -c rollup.config.js", |
|||
"build:ts": "tsc --project tsconfig.build.json", |
|||
"lint": "run-s -n lint:*", |
|||
"lint:prettier": "npm run test:lint:prettier -- --write", |
|||
"lint:ts": "npm run test:lint:ts -- --fix", |
|||
"pretest": "run-s build:rollup", |
|||
"test": "run-s -n test:lint test:coverage", |
|||
"test:debug": "mocha --inspect-brk", |
|||
"test:lint": "run-s -n test:lint:*", |
|||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'", |
|||
"test:lint:ts": "eslint '{src,test}/**/*.ts'", |
|||
"test:only": "mocha", |
|||
"test:coverage": "c8 mocha", |
|||
"test:watch": "run-p 'build:rollup -- --watch' 'test:only -- --watch'", |
|||
"prepublishOnly": "npm run preversion", |
|||
"preversion": "run-s test build" |
|||
}, |
|||
"devDependencies": { |
|||
"@rollup/plugin-typescript": "8.3.2", |
|||
"@types/mocha": "9.1.1", |
|||
"@types/node": "17.0.29", |
|||
"@typescript-eslint/eslint-plugin": "5.21.0", |
|||
"@typescript-eslint/parser": "5.21.0", |
|||
"benchmark": "2.1.4", |
|||
"c8": "7.11.2", |
|||
"eslint": "8.14.0", |
|||
"eslint-config-prettier": "8.5.0", |
|||
"mocha": "9.2.2", |
|||
"npm-run-all": "4.1.5", |
|||
"prettier": "2.6.2", |
|||
"rollup": "2.70.2", |
|||
"typescript": "4.6.3" |
|||
}, |
|||
"dependencies": { |
|||
"@jridgewell/set-array": "^1.0.1", |
|||
"@jridgewell/sourcemap-codec": "^1.4.10", |
|||
"@jridgewell/trace-mapping": "^0.3.9" |
|||
} |
|||
} |
|||
@ -0,0 +1,458 @@ |
|||
import { SetArray, put } from '@jridgewell/set-array'; |
|||
import { encode } from '@jridgewell/sourcemap-codec'; |
|||
import { TraceMap, decodedMappings } from '@jridgewell/trace-mapping'; |
|||
|
|||
import { |
|||
COLUMN, |
|||
SOURCES_INDEX, |
|||
SOURCE_LINE, |
|||
SOURCE_COLUMN, |
|||
NAMES_INDEX, |
|||
} from './sourcemap-segment'; |
|||
|
|||
import type { SourceMapInput } from '@jridgewell/trace-mapping'; |
|||
import type { SourceMapSegment } from './sourcemap-segment'; |
|||
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types'; |
|||
|
|||
export type { DecodedSourceMap, EncodedSourceMap, Mapping }; |
|||
|
|||
export type Options = { |
|||
file?: string | null; |
|||
sourceRoot?: string | null; |
|||
}; |
|||
|
|||
const NO_NAME = -1; |
|||
|
|||
/** |
|||
* A low-level API to associate a generated position with an original source position. Line and |
|||
* column here are 0-based, unlike `addMapping`. |
|||
*/ |
|||
export let addSegment: { |
|||
( |
|||
map: GenMapping, |
|||
genLine: number, |
|||
genColumn: number, |
|||
source?: null, |
|||
sourceLine?: null, |
|||
sourceColumn?: null, |
|||
name?: null, |
|||
content?: null, |
|||
): void; |
|||
( |
|||
map: GenMapping, |
|||
genLine: number, |
|||
genColumn: number, |
|||
source: string, |
|||
sourceLine: number, |
|||
sourceColumn: number, |
|||
name?: null, |
|||
content?: string | null, |
|||
): void; |
|||
( |
|||
map: GenMapping, |
|||
genLine: number, |
|||
genColumn: number, |
|||
source: string, |
|||
sourceLine: number, |
|||
sourceColumn: number, |
|||
name: string, |
|||
content?: string | null, |
|||
): void; |
|||
}; |
|||
|
|||
/** |
|||
* A high-level API to associate a generated position with an original source position. Line is |
|||
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library. |
|||
*/ |
|||
export let addMapping: { |
|||
( |
|||
map: GenMapping, |
|||
mapping: { |
|||
generated: Pos; |
|||
source?: null; |
|||
original?: null; |
|||
name?: null; |
|||
content?: null; |
|||
}, |
|||
): void; |
|||
( |
|||
map: GenMapping, |
|||
mapping: { |
|||
generated: Pos; |
|||
source: string; |
|||
original: Pos; |
|||
name?: null; |
|||
content?: string | null; |
|||
}, |
|||
): void; |
|||
( |
|||
map: GenMapping, |
|||
mapping: { |
|||
generated: Pos; |
|||
source: string; |
|||
original: Pos; |
|||
name: string; |
|||
content?: string | null; |
|||
}, |
|||
): void; |
|||
}; |
|||
|
|||
/** |
|||
* Same as `addSegment`, but will only add the segment if it generates useful information in the |
|||
* resulting map. This only works correctly if segments are added **in order**, meaning you should |
|||
* not add a segment with a lower generated line/column than one that came before. |
|||
*/ |
|||
export let maybeAddSegment: typeof addSegment; |
|||
|
|||
/** |
|||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the |
|||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should |
|||
* not add a mapping with a lower generated line/column than one that came before. |
|||
*/ |
|||
export let maybeAddMapping: typeof addMapping; |
|||
|
|||
/** |
|||
* Adds/removes the content of the source file to the source map. |
|||
*/ |
|||
export let setSourceContent: (map: GenMapping, source: string, content: string | null) => void; |
|||
|
|||
/** |
|||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects |
|||
* a sourcemap, or to JSON.stringify. |
|||
*/ |
|||
export let toDecodedMap: (map: GenMapping) => DecodedSourceMap; |
|||
|
|||
/** |
|||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects |
|||
* a sourcemap, or to JSON.stringify. |
|||
*/ |
|||
export let toEncodedMap: (map: GenMapping) => EncodedSourceMap; |
|||
|
|||
/** |
|||
* Constructs a new GenMapping, using the already present mappings of the input. |
|||
*/ |
|||
export let fromMap: (input: SourceMapInput) => GenMapping; |
|||
|
|||
/** |
|||
* Returns an array of high-level mapping objects for every recorded segment, which could then be |
|||
* passed to the `source-map` library. |
|||
*/ |
|||
export let allMappings: (map: GenMapping) => Mapping[]; |
|||
|
|||
// This split declaration is only so that terser can elminiate the static initialization block.
|
|||
let addSegmentInternal: <S extends string | null | undefined>( |
|||
skipable: boolean, |
|||
map: GenMapping, |
|||
genLine: number, |
|||
genColumn: number, |
|||
source: S, |
|||
sourceLine: S extends string ? number : null | undefined, |
|||
sourceColumn: S extends string ? number : null | undefined, |
|||
name: S extends string ? string | null | undefined : null | undefined, |
|||
content: S extends string ? string | null | undefined : null | undefined, |
|||
) => void; |
|||
|
|||
/** |
|||
* Provides the state to generate a sourcemap. |
|||
*/ |
|||
export class GenMapping { |
|||
private _names = new SetArray(); |
|||
private _sources = new SetArray(); |
|||
private _sourcesContent: (string | null)[] = []; |
|||
private _mappings: SourceMapSegment[][] = []; |
|||
declare file: string | null | undefined; |
|||
declare sourceRoot: string | null | undefined; |
|||
|
|||
constructor({ file, sourceRoot }: Options = {}) { |
|||
this.file = file; |
|||
this.sourceRoot = sourceRoot; |
|||
} |
|||
|
|||
static { |
|||
addSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => { |
|||
return addSegmentInternal( |
|||
false, |
|||
map, |
|||
genLine, |
|||
genColumn, |
|||
source, |
|||
sourceLine, |
|||
sourceColumn, |
|||
name, |
|||
content, |
|||
); |
|||
}; |
|||
|
|||
maybeAddSegment = ( |
|||
map, |
|||
genLine, |
|||
genColumn, |
|||
source, |
|||
sourceLine, |
|||
sourceColumn, |
|||
name, |
|||
content, |
|||
) => { |
|||
return addSegmentInternal( |
|||
true, |
|||
map, |
|||
genLine, |
|||
genColumn, |
|||
source, |
|||
sourceLine, |
|||
sourceColumn, |
|||
name, |
|||
content, |
|||
); |
|||
}; |
|||
|
|||
addMapping = (map, mapping) => { |
|||
return addMappingInternal(false, map, mapping as Parameters<typeof addMappingInternal>[2]); |
|||
}; |
|||
|
|||
maybeAddMapping = (map, mapping) => { |
|||
return addMappingInternal(true, map, mapping as Parameters<typeof addMappingInternal>[2]); |
|||
}; |
|||
|
|||
setSourceContent = (map, source, content) => { |
|||
const { _sources: sources, _sourcesContent: sourcesContent } = map; |
|||
sourcesContent[put(sources, source)] = content; |
|||
}; |
|||
|
|||
toDecodedMap = (map) => { |
|||
const { |
|||
file, |
|||
sourceRoot, |
|||
_mappings: mappings, |
|||
_sources: sources, |
|||
_sourcesContent: sourcesContent, |
|||
_names: names, |
|||
} = map; |
|||
removeEmptyFinalLines(mappings); |
|||
|
|||
return { |
|||
version: 3, |
|||
file: file || undefined, |
|||
names: names.array, |
|||
sourceRoot: sourceRoot || undefined, |
|||
sources: sources.array, |
|||
sourcesContent, |
|||
mappings, |
|||
}; |
|||
}; |
|||
|
|||
toEncodedMap = (map) => { |
|||
const decoded = toDecodedMap(map); |
|||
return { |
|||
...decoded, |
|||
mappings: encode(decoded.mappings as SourceMapSegment[][]), |
|||
}; |
|||
}; |
|||
|
|||
allMappings = (map) => { |
|||
const out: Mapping[] = []; |
|||
const { _mappings: mappings, _sources: sources, _names: names } = map; |
|||
|
|||
for (let i = 0; i < mappings.length; i++) { |
|||
const line = mappings[i]; |
|||
for (let j = 0; j < line.length; j++) { |
|||
const seg = line[j]; |
|||
|
|||
const generated = { line: i + 1, column: seg[COLUMN] }; |
|||
let source: string | undefined = undefined; |
|||
let original: Pos | undefined = undefined; |
|||
let name: string | undefined = undefined; |
|||
|
|||
if (seg.length !== 1) { |
|||
source = sources.array[seg[SOURCES_INDEX]]; |
|||
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] }; |
|||
|
|||
if (seg.length === 5) name = names.array[seg[NAMES_INDEX]]; |
|||
} |
|||
|
|||
out.push({ generated, source, original, name } as Mapping); |
|||
} |
|||
} |
|||
|
|||
return out; |
|||
}; |
|||
|
|||
fromMap = (input) => { |
|||
const map = new TraceMap(input); |
|||
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot }); |
|||
|
|||
putAll(gen._names, map.names); |
|||
putAll(gen._sources, map.sources as string[]); |
|||
gen._sourcesContent = map.sourcesContent || map.sources.map(() => null); |
|||
gen._mappings = decodedMappings(map) as GenMapping['_mappings']; |
|||
|
|||
return gen; |
|||
}; |
|||
|
|||
// Internal helpers
|
|||
addSegmentInternal = ( |
|||
skipable, |
|||
map, |
|||
genLine, |
|||
genColumn, |
|||
source, |
|||
sourceLine, |
|||
sourceColumn, |
|||
name, |
|||
content, |
|||
) => { |
|||
const { |
|||
_mappings: mappings, |
|||
_sources: sources, |
|||
_sourcesContent: sourcesContent, |
|||
_names: names, |
|||
} = map; |
|||
const line = getLine(mappings, genLine); |
|||
const index = getColumnIndex(line, genColumn); |
|||
|
|||
if (!source) { |
|||
if (skipable && skipSourceless(line, index)) return; |
|||
return insert(line, index, [genColumn]); |
|||
} |
|||
|
|||
// Sigh, TypeScript can't figure out sourceLine and sourceColumn aren't nullish if source
|
|||
// isn't nullish.
|
|||
assert<number>(sourceLine); |
|||
assert<number>(sourceColumn); |
|||
|
|||
const sourcesIndex = put(sources, source); |
|||
const namesIndex = name ? put(names, name) : NO_NAME; |
|||
if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content ?? null; |
|||
|
|||
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) { |
|||
return; |
|||
} |
|||
|
|||
return insert( |
|||
line, |
|||
index, |
|||
name |
|||
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] |
|||
: [genColumn, sourcesIndex, sourceLine, sourceColumn], |
|||
); |
|||
}; |
|||
} |
|||
} |
|||
|
|||
function assert<T>(_val: unknown): asserts _val is T { |
|||
// noop.
|
|||
} |
|||
|
|||
function getLine(mappings: SourceMapSegment[][], index: number): SourceMapSegment[] { |
|||
for (let i = mappings.length; i <= index; i++) { |
|||
mappings[i] = []; |
|||
} |
|||
return mappings[index]; |
|||
} |
|||
|
|||
function getColumnIndex(line: SourceMapSegment[], genColumn: number): number { |
|||
let index = line.length; |
|||
for (let i = index - 1; i >= 0; index = i--) { |
|||
const current = line[i]; |
|||
if (genColumn >= current[COLUMN]) break; |
|||
} |
|||
return index; |
|||
} |
|||
|
|||
function insert<T>(array: T[], index: number, value: T) { |
|||
for (let i = array.length; i > index; i--) { |
|||
array[i] = array[i - 1]; |
|||
} |
|||
array[index] = value; |
|||
} |
|||
|
|||
function removeEmptyFinalLines(mappings: SourceMapSegment[][]) { |
|||
const { length } = mappings; |
|||
let len = length; |
|||
for (let i = len - 1; i >= 0; len = i, i--) { |
|||
if (mappings[i].length > 0) break; |
|||
} |
|||
if (len < length) mappings.length = len; |
|||
} |
|||
|
|||
function putAll(strarr: SetArray, array: string[]) { |
|||
for (let i = 0; i < array.length; i++) put(strarr, array[i]); |
|||
} |
|||
|
|||
function skipSourceless(line: SourceMapSegment[], index: number): boolean { |
|||
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
|||
// doesn't generate any useful information.
|
|||
if (index === 0) return true; |
|||
|
|||
const prev = line[index - 1]; |
|||
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
|||
// genrate any new information. Else, this segment will end the source/named segment and point to
|
|||
// a sourceless position, which is useful.
|
|||
return prev.length === 1; |
|||
} |
|||
|
|||
function skipSource( |
|||
line: SourceMapSegment[], |
|||
index: number, |
|||
sourcesIndex: number, |
|||
sourceLine: number, |
|||
sourceColumn: number, |
|||
namesIndex: number, |
|||
): boolean { |
|||
// A source/named segment at the start of a line gives position at that genColumn
|
|||
if (index === 0) return false; |
|||
|
|||
const prev = line[index - 1]; |
|||
|
|||
// If the previous segment is sourceless, then we're transitioning to a source.
|
|||
if (prev.length === 1) return false; |
|||
|
|||
// If the previous segment maps to the exact same source position, then this segment doesn't
|
|||
// provide any new position information.
|
|||
return ( |
|||
sourcesIndex === prev[SOURCES_INDEX] && |
|||
sourceLine === prev[SOURCE_LINE] && |
|||
sourceColumn === prev[SOURCE_COLUMN] && |
|||
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME) |
|||
); |
|||
} |
|||
|
|||
function addMappingInternal<S extends string | null | undefined>( |
|||
skipable: boolean, |
|||
map: GenMapping, |
|||
mapping: { |
|||
generated: Pos; |
|||
source: S; |
|||
original: S extends string ? Pos : null | undefined; |
|||
name: S extends string ? string | null | undefined : null | undefined; |
|||
content: S extends string ? string | null | undefined : null | undefined; |
|||
}, |
|||
) { |
|||
const { generated, source, original, name, content } = mapping; |
|||
if (!source) { |
|||
return addSegmentInternal( |
|||
skipable, |
|||
map, |
|||
generated.line - 1, |
|||
generated.column, |
|||
null, |
|||
null, |
|||
null, |
|||
null, |
|||
null, |
|||
); |
|||
} |
|||
const s: string = source; |
|||
assert<Pos>(original); |
|||
return addSegmentInternal( |
|||
skipable, |
|||
map, |
|||
generated.line - 1, |
|||
generated.column, |
|||
s, |
|||
original.line - 1, |
|||
original.column, |
|||
name, |
|||
content, |
|||
); |
|||
} |
|||
@ -0,0 +1,16 @@ |
|||
type GeneratedColumn = number; |
|||
type SourcesIndex = number; |
|||
type SourceLine = number; |
|||
type SourceColumn = number; |
|||
type NamesIndex = number; |
|||
|
|||
export type SourceMapSegment = |
|||
| [GeneratedColumn] |
|||
| [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] |
|||
| [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex]; |
|||
|
|||
export const COLUMN = 0; |
|||
export const SOURCES_INDEX = 1; |
|||
export const SOURCE_LINE = 2; |
|||
export const SOURCE_COLUMN = 3; |
|||
export const NAMES_INDEX = 4; |
|||
@ -0,0 +1,43 @@ |
|||
import type { SourceMapSegment } from './sourcemap-segment'; |
|||
|
|||
export interface SourceMapV3 { |
|||
file?: string | null; |
|||
names: readonly string[]; |
|||
sourceRoot?: string; |
|||
sources: readonly (string | null)[]; |
|||
sourcesContent?: readonly (string | null)[]; |
|||
version: 3; |
|||
} |
|||
|
|||
export interface EncodedSourceMap extends SourceMapV3 { |
|||
mappings: string; |
|||
} |
|||
|
|||
export interface DecodedSourceMap extends SourceMapV3 { |
|||
mappings: readonly SourceMapSegment[][]; |
|||
} |
|||
|
|||
export interface Pos { |
|||
line: number; |
|||
column: number; |
|||
} |
|||
|
|||
export type Mapping = |
|||
| { |
|||
generated: Pos; |
|||
source: undefined; |
|||
original: undefined; |
|||
name: undefined; |
|||
} |
|||
| { |
|||
generated: Pos; |
|||
source: string; |
|||
original: Pos; |
|||
name: string; |
|||
} |
|||
| { |
|||
generated: Pos; |
|||
source: string; |
|||
original: Pos; |
|||
name: undefined; |
|||
}; |
|||
@ -0,0 +1,19 @@ |
|||
Copyright 2019 Justin Ridgewell <jridgewell@google.com> |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
of this software and associated documentation files (the "Software"), to deal |
|||
in the Software without restriction, including without limitation the rights |
|||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|||
copies of the Software, and to permit persons to whom the Software is |
|||
furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in |
|||
all copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
|||
SOFTWARE. |
|||
@ -0,0 +1,40 @@ |
|||
# @jridgewell/resolve-uri |
|||
|
|||
> Resolve a URI relative to an optional base URI |
|||
|
|||
Resolve any combination of absolute URIs, protocol-realtive URIs, absolute paths, or relative paths. |
|||
|
|||
## Installation |
|||
|
|||
```sh |
|||
npm install @jridgewell/resolve-uri |
|||
``` |
|||
|
|||
## Usage |
|||
|
|||
```typescript |
|||
function resolve(input: string, base?: string): string; |
|||
``` |
|||
|
|||
```js |
|||
import resolve from '@jridgewell/resolve-uri'; |
|||
|
|||
resolve('foo', 'https://example.com'); // => 'https://example.com/foo' |
|||
``` |
|||
|
|||
| Input | Base | Resolution | Explanation | |
|||
|-----------------------|-------------------------|--------------------------------|--------------------------------------------------------------| |
|||
| `https://example.com` | _any_ | `https://example.com/` | Input is normalized only | |
|||
| `//example.com` | `https://base.com/` | `https://example.com/` | Input inherits the base's protocol | |
|||
| `//example.com` | _rest_ | `//example.com/` | Input is normalized only | |
|||
| `/example` | `https://base.com/` | `https://base.com/example` | Input inherits the base's origin | |
|||
| `/example` | `//base.com/` | `//base.com/example` | Input inherits the base's host and remains protocol relative | |
|||
| `/example` | _rest_ | `/example` | Input is normalized only | |
|||
| `example` | `https://base.com/dir/` | `https://base.com/dir/example` | Input is joined with the base | |
|||
| `example` | `https://base.com/file` | `https://base.com/example` | Input is joined with the base without its file | |
|||
| `example` | `//base.com/dir/` | `//base.com/dir/example` | Input is joined with the base's last directory | |
|||
| `example` | `//base.com/file` | `//base.com/example` | Input is joined with the base without its file | |
|||
| `example` | `/base/dir/` | `/base/dir/example` | Input is joined with the base's last directory | |
|||
| `example` | `/base/file` | `/base/example` | Input is joined with the base without its file | |
|||
| `example` | `base/dir/` | `base/dir/example` | Input is joined with the base's last directory | |
|||
| `example` | `base/file` | `base/example` | Input is joined with the base without its file | |
|||
@ -0,0 +1,242 @@ |
|||
// Matches the scheme of a URL, eg "http://"
|
|||
const schemeRegex = /^[\w+.-]+:\/\//; |
|||
/** |
|||
* Matches the parts of a URL: |
|||
* 1. Scheme, including ":", guaranteed. |
|||
* 2. User/password, including "@", optional. |
|||
* 3. Host, guaranteed. |
|||
* 4. Port, including ":", optional. |
|||
* 5. Path, including "/", optional. |
|||
* 6. Query, including "?", optional. |
|||
* 7. Hash, including "#", optional. |
|||
*/ |
|||
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/; |
|||
/** |
|||
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start |
|||
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive). |
|||
* |
|||
* 1. Host, optional. |
|||
* 2. Path, which may include "/", guaranteed. |
|||
* 3. Query, including "?", optional. |
|||
* 4. Hash, including "#", optional. |
|||
*/ |
|||
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i; |
|||
var UrlType; |
|||
(function (UrlType) { |
|||
UrlType[UrlType["Empty"] = 1] = "Empty"; |
|||
UrlType[UrlType["Hash"] = 2] = "Hash"; |
|||
UrlType[UrlType["Query"] = 3] = "Query"; |
|||
UrlType[UrlType["RelativePath"] = 4] = "RelativePath"; |
|||
UrlType[UrlType["AbsolutePath"] = 5] = "AbsolutePath"; |
|||
UrlType[UrlType["SchemeRelative"] = 6] = "SchemeRelative"; |
|||
UrlType[UrlType["Absolute"] = 7] = "Absolute"; |
|||
})(UrlType || (UrlType = {})); |
|||
function isAbsoluteUrl(input) { |
|||
return schemeRegex.test(input); |
|||
} |
|||
function isSchemeRelativeUrl(input) { |
|||
return input.startsWith('//'); |
|||
} |
|||
function isAbsolutePath(input) { |
|||
return input.startsWith('/'); |
|||
} |
|||
function isFileUrl(input) { |
|||
return input.startsWith('file:'); |
|||
} |
|||
function isRelative(input) { |
|||
return /^[.?#]/.test(input); |
|||
} |
|||
function parseAbsoluteUrl(input) { |
|||
const match = urlRegex.exec(input); |
|||
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || ''); |
|||
} |
|||
function parseFileUrl(input) { |
|||
const match = fileRegex.exec(input); |
|||
const path = match[2]; |
|||
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || ''); |
|||
} |
|||
function makeUrl(scheme, user, host, port, path, query, hash) { |
|||
return { |
|||
scheme, |
|||
user, |
|||
host, |
|||
port, |
|||
path, |
|||
query, |
|||
hash, |
|||
type: UrlType.Absolute, |
|||
}; |
|||
} |
|||
function parseUrl(input) { |
|||
if (isSchemeRelativeUrl(input)) { |
|||
const url = parseAbsoluteUrl('http:' + input); |
|||
url.scheme = ''; |
|||
url.type = UrlType.SchemeRelative; |
|||
return url; |
|||
} |
|||
if (isAbsolutePath(input)) { |
|||
const url = parseAbsoluteUrl('http://foo.com' + input); |
|||
url.scheme = ''; |
|||
url.host = ''; |
|||
url.type = UrlType.AbsolutePath; |
|||
return url; |
|||
} |
|||
if (isFileUrl(input)) |
|||
return parseFileUrl(input); |
|||
if (isAbsoluteUrl(input)) |
|||
return parseAbsoluteUrl(input); |
|||
const url = parseAbsoluteUrl('http://foo.com/' + input); |
|||
url.scheme = ''; |
|||
url.host = ''; |
|||
url.type = input |
|||
? input.startsWith('?') |
|||
? UrlType.Query |
|||
: input.startsWith('#') |
|||
? UrlType.Hash |
|||
: UrlType.RelativePath |
|||
: UrlType.Empty; |
|||
return url; |
|||
} |
|||
function stripPathFilename(path) { |
|||
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
|||
// paths. It's not a file, so we can't strip it.
|
|||
if (path.endsWith('/..')) |
|||
return path; |
|||
const index = path.lastIndexOf('/'); |
|||
return path.slice(0, index + 1); |
|||
} |
|||
function mergePaths(url, base) { |
|||
normalizePath(base, base.type); |
|||
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
|||
// path).
|
|||
if (url.path === '/') { |
|||
url.path = base.path; |
|||
} |
|||
else { |
|||
// Resolution happens relative to the base path's directory, not the file.
|
|||
url.path = stripPathFilename(base.path) + url.path; |
|||
} |
|||
} |
|||
/** |
|||
* The path can have empty directories "//", unneeded parents "foo/..", or current directory |
|||
* "foo/.". We need to normalize to a standard representation. |
|||
*/ |
|||
function normalizePath(url, type) { |
|||
const rel = type <= UrlType.RelativePath; |
|||
const pieces = url.path.split('/'); |
|||
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
|||
// pieces[0] is an empty string.
|
|||
let pointer = 1; |
|||
// Positive is the number of real directories we've output, used for popping a parent directory.
|
|||
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
|||
let positive = 0; |
|||
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
|||
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
|||
// real directory, we won't need to append, unless the other conditions happen again.
|
|||
let addTrailingSlash = false; |
|||
for (let i = 1; i < pieces.length; i++) { |
|||
const piece = pieces[i]; |
|||
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
|||
if (!piece) { |
|||
addTrailingSlash = true; |
|||
continue; |
|||
} |
|||
// If we encounter a real directory, then we don't need to append anymore.
|
|||
addTrailingSlash = false; |
|||
// A current directory, which we can always drop.
|
|||
if (piece === '.') |
|||
continue; |
|||
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
|||
// have an excess of parents, and we'll need to keep the "..".
|
|||
if (piece === '..') { |
|||
if (positive) { |
|||
addTrailingSlash = true; |
|||
positive--; |
|||
pointer--; |
|||
} |
|||
else if (rel) { |
|||
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
|||
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
|||
pieces[pointer++] = piece; |
|||
} |
|||
continue; |
|||
} |
|||
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
|||
// any popped or dropped directories.
|
|||
pieces[pointer++] = piece; |
|||
positive++; |
|||
} |
|||
let path = ''; |
|||
for (let i = 1; i < pointer; i++) { |
|||
path += '/' + pieces[i]; |
|||
} |
|||
if (!path || (addTrailingSlash && !path.endsWith('/..'))) { |
|||
path += '/'; |
|||
} |
|||
url.path = path; |
|||
} |
|||
/** |
|||
* Attempts to resolve `input` URL/path relative to `base`. |
|||
*/ |
|||
function resolve(input, base) { |
|||
if (!input && !base) |
|||
return ''; |
|||
const url = parseUrl(input); |
|||
let inputType = url.type; |
|||
if (base && inputType !== UrlType.Absolute) { |
|||
const baseUrl = parseUrl(base); |
|||
const baseType = baseUrl.type; |
|||
switch (inputType) { |
|||
case UrlType.Empty: |
|||
url.hash = baseUrl.hash; |
|||
// fall through
|
|||
case UrlType.Hash: |
|||
url.query = baseUrl.query; |
|||
// fall through
|
|||
case UrlType.Query: |
|||
case UrlType.RelativePath: |
|||
mergePaths(url, baseUrl); |
|||
// fall through
|
|||
case UrlType.AbsolutePath: |
|||
// The host, user, and port are joined, you can't copy one without the others.
|
|||
url.user = baseUrl.user; |
|||
url.host = baseUrl.host; |
|||
url.port = baseUrl.port; |
|||
// fall through
|
|||
case UrlType.SchemeRelative: |
|||
// The input doesn't have a schema at least, so we need to copy at least that over.
|
|||
url.scheme = baseUrl.scheme; |
|||
} |
|||
if (baseType > inputType) |
|||
inputType = baseType; |
|||
} |
|||
normalizePath(url, inputType); |
|||
const queryHash = url.query + url.hash; |
|||
switch (inputType) { |
|||
// This is impossible, because of the empty checks at the start of the function.
|
|||
// case UrlType.Empty:
|
|||
case UrlType.Hash: |
|||
case UrlType.Query: |
|||
return queryHash; |
|||
case UrlType.RelativePath: { |
|||
// The first char is always a "/", and we need it to be relative.
|
|||
const path = url.path.slice(1); |
|||
if (!path) |
|||
return queryHash || '.'; |
|||
if (isRelative(base || input) && !isRelative(path)) { |
|||
// If base started with a leading ".", or there is no base and input started with a ".",
|
|||
// then we need to ensure that the relative path starts with a ".". We don't know if
|
|||
// relative starts with a "..", though, so check before prepending.
|
|||
return './' + path + queryHash; |
|||
} |
|||
return path + queryHash; |
|||
} |
|||
case UrlType.AbsolutePath: |
|||
return url.path + queryHash; |
|||
default: |
|||
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash; |
|||
} |
|||
} |
|||
|
|||
export { resolve as default }; |
|||
//# sourceMappingURL=resolve-uri.mjs.map
|
|||
File diff suppressed because one or more lines are too long
@ -0,0 +1,250 @@ |
|||
(function (global, factory) { |
|||
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : |
|||
typeof define === 'function' && define.amd ? define(factory) : |
|||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.resolveURI = factory()); |
|||
})(this, (function () { 'use strict'; |
|||
|
|||
// Matches the scheme of a URL, eg "http://"
|
|||
const schemeRegex = /^[\w+.-]+:\/\//; |
|||
/** |
|||
* Matches the parts of a URL: |
|||
* 1. Scheme, including ":", guaranteed. |
|||
* 2. User/password, including "@", optional. |
|||
* 3. Host, guaranteed. |
|||
* 4. Port, including ":", optional. |
|||
* 5. Path, including "/", optional. |
|||
* 6. Query, including "?", optional. |
|||
* 7. Hash, including "#", optional. |
|||
*/ |
|||
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/; |
|||
/** |
|||
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start |
|||
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive). |
|||
* |
|||
* 1. Host, optional. |
|||
* 2. Path, which may include "/", guaranteed. |
|||
* 3. Query, including "?", optional. |
|||
* 4. Hash, including "#", optional. |
|||
*/ |
|||
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i; |
|||
var UrlType; |
|||
(function (UrlType) { |
|||
UrlType[UrlType["Empty"] = 1] = "Empty"; |
|||
UrlType[UrlType["Hash"] = 2] = "Hash"; |
|||
UrlType[UrlType["Query"] = 3] = "Query"; |
|||
UrlType[UrlType["RelativePath"] = 4] = "RelativePath"; |
|||
UrlType[UrlType["AbsolutePath"] = 5] = "AbsolutePath"; |
|||
UrlType[UrlType["SchemeRelative"] = 6] = "SchemeRelative"; |
|||
UrlType[UrlType["Absolute"] = 7] = "Absolute"; |
|||
})(UrlType || (UrlType = {})); |
|||
function isAbsoluteUrl(input) { |
|||
return schemeRegex.test(input); |
|||
} |
|||
function isSchemeRelativeUrl(input) { |
|||
return input.startsWith('//'); |
|||
} |
|||
function isAbsolutePath(input) { |
|||
return input.startsWith('/'); |
|||
} |
|||
function isFileUrl(input) { |
|||
return input.startsWith('file:'); |
|||
} |
|||
function isRelative(input) { |
|||
return /^[.?#]/.test(input); |
|||
} |
|||
function parseAbsoluteUrl(input) { |
|||
const match = urlRegex.exec(input); |
|||
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || ''); |
|||
} |
|||
function parseFileUrl(input) { |
|||
const match = fileRegex.exec(input); |
|||
const path = match[2]; |
|||
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || ''); |
|||
} |
|||
function makeUrl(scheme, user, host, port, path, query, hash) { |
|||
return { |
|||
scheme, |
|||
user, |
|||
host, |
|||
port, |
|||
path, |
|||
query, |
|||
hash, |
|||
type: UrlType.Absolute, |
|||
}; |
|||
} |
|||
function parseUrl(input) { |
|||
if (isSchemeRelativeUrl(input)) { |
|||
const url = parseAbsoluteUrl('http:' + input); |
|||
url.scheme = ''; |
|||
url.type = UrlType.SchemeRelative; |
|||
return url; |
|||
} |
|||
if (isAbsolutePath(input)) { |
|||
const url = parseAbsoluteUrl('http://foo.com' + input); |
|||
url.scheme = ''; |
|||
url.host = ''; |
|||
url.type = UrlType.AbsolutePath; |
|||
return url; |
|||
} |
|||
if (isFileUrl(input)) |
|||
return parseFileUrl(input); |
|||
if (isAbsoluteUrl(input)) |
|||
return parseAbsoluteUrl(input); |
|||
const url = parseAbsoluteUrl('http://foo.com/' + input); |
|||
url.scheme = ''; |
|||
url.host = ''; |
|||
url.type = input |
|||
? input.startsWith('?') |
|||
? UrlType.Query |
|||
: input.startsWith('#') |
|||
? UrlType.Hash |
|||
: UrlType.RelativePath |
|||
: UrlType.Empty; |
|||
return url; |
|||
} |
|||
function stripPathFilename(path) { |
|||
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
|||
// paths. It's not a file, so we can't strip it.
|
|||
if (path.endsWith('/..')) |
|||
return path; |
|||
const index = path.lastIndexOf('/'); |
|||
return path.slice(0, index + 1); |
|||
} |
|||
function mergePaths(url, base) { |
|||
normalizePath(base, base.type); |
|||
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
|||
// path).
|
|||
if (url.path === '/') { |
|||
url.path = base.path; |
|||
} |
|||
else { |
|||
// Resolution happens relative to the base path's directory, not the file.
|
|||
url.path = stripPathFilename(base.path) + url.path; |
|||
} |
|||
} |
|||
/** |
|||
* The path can have empty directories "//", unneeded parents "foo/..", or current directory |
|||
* "foo/.". We need to normalize to a standard representation. |
|||
*/ |
|||
function normalizePath(url, type) { |
|||
const rel = type <= UrlType.RelativePath; |
|||
const pieces = url.path.split('/'); |
|||
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
|||
// pieces[0] is an empty string.
|
|||
let pointer = 1; |
|||
// Positive is the number of real directories we've output, used for popping a parent directory.
|
|||
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
|||
let positive = 0; |
|||
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
|||
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
|||
// real directory, we won't need to append, unless the other conditions happen again.
|
|||
let addTrailingSlash = false; |
|||
for (let i = 1; i < pieces.length; i++) { |
|||
const piece = pieces[i]; |
|||
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
|||
if (!piece) { |
|||
addTrailingSlash = true; |
|||
continue; |
|||
} |
|||
// If we encounter a real directory, then we don't need to append anymore.
|
|||
addTrailingSlash = false; |
|||
// A current directory, which we can always drop.
|
|||
if (piece === '.') |
|||
continue; |
|||
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
|||
// have an excess of parents, and we'll need to keep the "..".
|
|||
if (piece === '..') { |
|||
if (positive) { |
|||
addTrailingSlash = true; |
|||
positive--; |
|||
pointer--; |
|||
} |
|||
else if (rel) { |
|||
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
|||
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
|||
pieces[pointer++] = piece; |
|||
} |
|||
continue; |
|||
} |
|||
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
|||
// any popped or dropped directories.
|
|||
pieces[pointer++] = piece; |
|||
positive++; |
|||
} |
|||
let path = ''; |
|||
for (let i = 1; i < pointer; i++) { |
|||
path += '/' + pieces[i]; |
|||
} |
|||
if (!path || (addTrailingSlash && !path.endsWith('/..'))) { |
|||
path += '/'; |
|||
} |
|||
url.path = path; |
|||
} |
|||
/** |
|||
* Attempts to resolve `input` URL/path relative to `base`. |
|||
*/ |
|||
function resolve(input, base) { |
|||
if (!input && !base) |
|||
return ''; |
|||
const url = parseUrl(input); |
|||
let inputType = url.type; |
|||
if (base && inputType !== UrlType.Absolute) { |
|||
const baseUrl = parseUrl(base); |
|||
const baseType = baseUrl.type; |
|||
switch (inputType) { |
|||
case UrlType.Empty: |
|||
url.hash = baseUrl.hash; |
|||
// fall through
|
|||
case UrlType.Hash: |
|||
url.query = baseUrl.query; |
|||
// fall through
|
|||
case UrlType.Query: |
|||
case UrlType.RelativePath: |
|||
mergePaths(url, baseUrl); |
|||
// fall through
|
|||
case UrlType.AbsolutePath: |
|||
// The host, user, and port are joined, you can't copy one without the others.
|
|||
url.user = baseUrl.user; |
|||
url.host = baseUrl.host; |
|||
url.port = baseUrl.port; |
|||
// fall through
|
|||
case UrlType.SchemeRelative: |
|||
// The input doesn't have a schema at least, so we need to copy at least that over.
|
|||
url.scheme = baseUrl.scheme; |
|||
} |
|||
if (baseType > inputType) |
|||
inputType = baseType; |
|||
} |
|||
normalizePath(url, inputType); |
|||
const queryHash = url.query + url.hash; |
|||
switch (inputType) { |
|||
// This is impossible, because of the empty checks at the start of the function.
|
|||
// case UrlType.Empty:
|
|||
case UrlType.Hash: |
|||
case UrlType.Query: |
|||
return queryHash; |
|||
case UrlType.RelativePath: { |
|||
// The first char is always a "/", and we need it to be relative.
|
|||
const path = url.path.slice(1); |
|||
if (!path) |
|||
return queryHash || '.'; |
|||
if (isRelative(base || input) && !isRelative(path)) { |
|||
// If base started with a leading ".", or there is no base and input started with a ".",
|
|||
// then we need to ensure that the relative path starts with a ".". We don't know if
|
|||
// relative starts with a "..", though, so check before prepending.
|
|||
return './' + path + queryHash; |
|||
} |
|||
return path + queryHash; |
|||
} |
|||
case UrlType.AbsolutePath: |
|||
return url.path + queryHash; |
|||
default: |
|||
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash; |
|||
} |
|||
} |
|||
|
|||
return resolve; |
|||
|
|||
})); |
|||
//# sourceMappingURL=resolve-uri.umd.js.map
|
|||
File diff suppressed because one or more lines are too long
@ -0,0 +1,4 @@ |
|||
/** |
|||
* Attempts to resolve `input` URL/path relative to `base`. |
|||
*/ |
|||
export default function resolve(input: string, base: string | undefined): string; |
|||
@ -0,0 +1,69 @@ |
|||
{ |
|||
"name": "@jridgewell/resolve-uri", |
|||
"version": "3.1.0", |
|||
"description": "Resolve a URI relative to an optional base URI", |
|||
"keywords": [ |
|||
"resolve", |
|||
"uri", |
|||
"url", |
|||
"path" |
|||
], |
|||
"author": "Justin Ridgewell <justin@ridgewell.name>", |
|||
"license": "MIT", |
|||
"repository": "https://github.com/jridgewell/resolve-uri", |
|||
"main": "dist/resolve-uri.umd.js", |
|||
"module": "dist/resolve-uri.mjs", |
|||
"typings": "dist/types/resolve-uri.d.ts", |
|||
"exports": { |
|||
".": [ |
|||
{ |
|||
"types": "./dist/types/resolve-uri.d.ts", |
|||
"browser": "./dist/resolve-uri.umd.js", |
|||
"require": "./dist/resolve-uri.umd.js", |
|||
"import": "./dist/resolve-uri.mjs" |
|||
}, |
|||
"./dist/resolve-uri.umd.js" |
|||
], |
|||
"./package.json": "./package.json" |
|||
}, |
|||
"files": [ |
|||
"dist" |
|||
], |
|||
"engines": { |
|||
"node": ">=6.0.0" |
|||
}, |
|||
"scripts": { |
|||
"prebuild": "rm -rf dist", |
|||
"build": "run-s -n build:*", |
|||
"build:rollup": "rollup -c rollup.config.js", |
|||
"build:ts": "tsc --project tsconfig.build.json", |
|||
"lint": "run-s -n lint:*", |
|||
"lint:prettier": "npm run test:lint:prettier -- --write", |
|||
"lint:ts": "npm run test:lint:ts -- --fix", |
|||
"pretest": "run-s build:rollup", |
|||
"test": "run-s -n test:lint test:only", |
|||
"test:debug": "mocha --inspect-brk", |
|||
"test:lint": "run-s -n test:lint:*", |
|||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'", |
|||
"test:lint:ts": "eslint '{src,test}/**/*.ts'", |
|||
"test:only": "mocha", |
|||
"test:coverage": "c8 mocha", |
|||
"test:watch": "mocha --watch", |
|||
"prepublishOnly": "npm run preversion", |
|||
"preversion": "run-s test build" |
|||
}, |
|||
"devDependencies": { |
|||
"@jridgewell/resolve-uri-latest": "npm:@jridgewell/resolve-uri@*", |
|||
"@rollup/plugin-typescript": "8.3.0", |
|||
"@typescript-eslint/eslint-plugin": "5.10.0", |
|||
"@typescript-eslint/parser": "5.10.0", |
|||
"c8": "7.11.0", |
|||
"eslint": "8.7.0", |
|||
"eslint-config-prettier": "8.3.0", |
|||
"mocha": "9.2.0", |
|||
"npm-run-all": "4.1.5", |
|||
"prettier": "2.5.1", |
|||
"rollup": "2.66.0", |
|||
"typescript": "4.5.5" |
|||
} |
|||
} |
|||
@ -0,0 +1,19 @@ |
|||
Copyright 2022 Justin Ridgewell <jridgewell@google.com> |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
of this software and associated documentation files (the "Software"), to deal |
|||
in the Software without restriction, including without limitation the rights |
|||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|||
copies of the Software, and to permit persons to whom the Software is |
|||
furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in |
|||
all copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
|||
SOFTWARE. |
|||
@ -0,0 +1,37 @@ |
|||
# @jridgewell/set-array |
|||
|
|||
> Like a Set, but provides the index of the `key` in the backing array |
|||
|
|||
This is designed to allow synchronizing a second array with the contents of the backing array, like |
|||
how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, and there |
|||
are never duplicates. |
|||
|
|||
## Installation |
|||
|
|||
```sh |
|||
npm install @jridgewell/set-array |
|||
``` |
|||
|
|||
## Usage |
|||
|
|||
```js |
|||
import { SetArray, get, put, pop } from '@jridgewell/set-array'; |
|||
|
|||
const sa = new SetArray(); |
|||
|
|||
let index = put(sa, 'first'); |
|||
assert.strictEqual(index, 0); |
|||
|
|||
index = put(sa, 'second'); |
|||
assert.strictEqual(index, 1); |
|||
|
|||
assert.deepEqual(sa.array, [ 'first', 'second' ]); |
|||
|
|||
index = get(sa, 'first'); |
|||
assert.strictEqual(index, 0); |
|||
|
|||
pop(sa); |
|||
index = get(sa, 'second'); |
|||
assert.strictEqual(index, undefined); |
|||
assert.deepEqual(sa.array, [ 'first' ]); |
|||
``` |
|||
@ -0,0 +1,48 @@ |
|||
/** |
|||
* Gets the index associated with `key` in the backing array, if it is already present. |
|||
*/ |
|||
let get; |
|||
/** |
|||
* Puts `key` into the backing array, if it is not already present. Returns |
|||
* the index of the `key` in the backing array. |
|||
*/ |
|||
let put; |
|||
/** |
|||
* Pops the last added item out of the SetArray. |
|||
*/ |
|||
let pop; |
|||
/** |
|||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the |
|||
* index of the `key` in the backing array. |
|||
* |
|||
* This is designed to allow synchronizing a second array with the contents of the backing array, |
|||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, |
|||
* and there are never duplicates. |
|||
*/ |
|||
class SetArray { |
|||
constructor() { |
|||
this._indexes = { __proto__: null }; |
|||
this.array = []; |
|||
} |
|||
} |
|||
(() => { |
|||
get = (strarr, key) => strarr._indexes[key]; |
|||
put = (strarr, key) => { |
|||
// The key may or may not be present. If it is present, it's a number.
|
|||
const index = get(strarr, key); |
|||
if (index !== undefined) |
|||
return index; |
|||
const { array, _indexes: indexes } = strarr; |
|||
return (indexes[key] = array.push(key) - 1); |
|||
}; |
|||
pop = (strarr) => { |
|||
const { array, _indexes: indexes } = strarr; |
|||
if (array.length === 0) |
|||
return; |
|||
const last = array.pop(); |
|||
indexes[last] = undefined; |
|||
}; |
|||
})(); |
|||
|
|||
export { SetArray, get, pop, put }; |
|||
//# sourceMappingURL=set-array.mjs.map
|
|||
@ -0,0 +1 @@ |
|||
{"version":3,"file":"set-array.mjs","sources":["../src/set-array.ts"],"sourcesContent":["/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport let get: (strarr: SetArray, key: string) => number | undefined;\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport let put: (strarr: SetArray, key: string) => number;\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport let pop: (strarr: SetArray) => void;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray {\n private declare _indexes: { [key: string]: number | undefined };\n declare array: readonly string[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n\n static {\n get = (strarr, key) => strarr._indexes[key];\n\n put = (strarr, key) => {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(strarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = strarr;\n\n return (indexes[key] = (array as string[]).push(key) - 1);\n };\n\n pop = (strarr) => {\n const { array, _indexes: indexes } = strarr;\n if (array.length === 0) return;\n\n const last = (array as string[]).pop()!;\n indexes[last] = undefined;\n };\n }\n}\n"],"names":[],"mappings":"AAAA;;;IAGW,IAA2D;AAEtE;;;;IAIW,IAA+C;AAE1D;;;IAGW,IAAgC;AAE3C;;;;;;;;MAQa,QAAQ;IAInB;QACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;QAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;KACjB;CAuBF;AArBC;IACE,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,KAAK,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;IAE5C,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG;;QAEhB,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC/B,IAAI,KAAK,KAAK,SAAS;YAAE,OAAO,KAAK,CAAC;QAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC;QAE5C,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAI,KAAkB,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;KAC3D,CAAC;IAEF,GAAG,GAAG,CAAC,MAAM;QACX,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC;QAC5C,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO;QAE/B,MAAM,IAAI,GAAI,KAAkB,CAAC,GAAG,EAAG,CAAC;QACxC,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;KAC3B,CAAC;AACJ,CAAC,GAAA;;;;"} |
|||
@ -0,0 +1,58 @@ |
|||
(function (global, factory) { |
|||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : |
|||
typeof define === 'function' && define.amd ? define(['exports'], factory) : |
|||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.setArray = {})); |
|||
})(this, (function (exports) { 'use strict'; |
|||
|
|||
/** |
|||
* Gets the index associated with `key` in the backing array, if it is already present. |
|||
*/ |
|||
exports.get = void 0; |
|||
/** |
|||
* Puts `key` into the backing array, if it is not already present. Returns |
|||
* the index of the `key` in the backing array. |
|||
*/ |
|||
exports.put = void 0; |
|||
/** |
|||
* Pops the last added item out of the SetArray. |
|||
*/ |
|||
exports.pop = void 0; |
|||
/** |
|||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the |
|||
* index of the `key` in the backing array. |
|||
* |
|||
* This is designed to allow synchronizing a second array with the contents of the backing array, |
|||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, |
|||
* and there are never duplicates. |
|||
*/ |
|||
class SetArray { |
|||
constructor() { |
|||
this._indexes = { __proto__: null }; |
|||
this.array = []; |
|||
} |
|||
} |
|||
(() => { |
|||
exports.get = (strarr, key) => strarr._indexes[key]; |
|||
exports.put = (strarr, key) => { |
|||
// The key may or may not be present. If it is present, it's a number.
|
|||
const index = exports.get(strarr, key); |
|||
if (index !== undefined) |
|||
return index; |
|||
const { array, _indexes: indexes } = strarr; |
|||
return (indexes[key] = array.push(key) - 1); |
|||
}; |
|||
exports.pop = (strarr) => { |
|||
const { array, _indexes: indexes } = strarr; |
|||
if (array.length === 0) |
|||
return; |
|||
const last = array.pop(); |
|||
indexes[last] = undefined; |
|||
}; |
|||
})(); |
|||
|
|||
exports.SetArray = SetArray; |
|||
|
|||
Object.defineProperty(exports, '__esModule', { value: true }); |
|||
|
|||
})); |
|||
//# sourceMappingURL=set-array.umd.js.map
|
|||
@ -0,0 +1 @@ |
|||
{"version":3,"file":"set-array.umd.js","sources":["../src/set-array.ts"],"sourcesContent":["/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport let get: (strarr: SetArray, key: string) => number | undefined;\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport let put: (strarr: SetArray, key: string) => number;\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport let pop: (strarr: SetArray) => void;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray {\n private declare _indexes: { [key: string]: number | undefined };\n declare array: readonly string[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n\n static {\n get = (strarr, key) => strarr._indexes[key];\n\n put = (strarr, key) => {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(strarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = strarr;\n\n return (indexes[key] = (array as string[]).push(key) - 1);\n };\n\n pop = (strarr) => {\n const { array, _indexes: indexes } = strarr;\n if (array.length === 0) return;\n\n const last = (array as string[]).pop()!;\n indexes[last] = undefined;\n };\n }\n}\n"],"names":["get","put","pop"],"mappings":";;;;;;IAAA;;;AAGWA,yBAA2D;IAEtE;;;;AAIWC,yBAA+C;IAE1D;;;AAGWC,yBAAgC;IAE3C;;;;;;;;UAQa,QAAQ;QAInB;YACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;YAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;SACjB;KAuBF;IArBC;QACEF,WAAG,GAAG,CAAC,MAAM,EAAE,GAAG,KAAK,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;QAE5CC,WAAG,GAAG,CAAC,MAAM,EAAE,GAAG;;YAEhB,MAAM,KAAK,GAAGD,WAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;YAC/B,IAAI,KAAK,KAAK,SAAS;gBAAE,OAAO,KAAK,CAAC;YAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC;YAE5C,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAI,KAAkB,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;SAC3D,CAAC;QAEFE,WAAG,GAAG,CAAC,MAAM;YACX,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC;YAC5C,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;gBAAE,OAAO;YAE/B,MAAM,IAAI,GAAI,KAAkB,CAAC,GAAG,EAAG,CAAC;YACxC,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;SAC3B,CAAC;IACJ,CAAC,GAAA;;;;;;;;;;"} |
|||
@ -0,0 +1,26 @@ |
|||
/** |
|||
* Gets the index associated with `key` in the backing array, if it is already present. |
|||
*/ |
|||
export declare let get: (strarr: SetArray, key: string) => number | undefined; |
|||
/** |
|||
* Puts `key` into the backing array, if it is not already present. Returns |
|||
* the index of the `key` in the backing array. |
|||
*/ |
|||
export declare let put: (strarr: SetArray, key: string) => number; |
|||
/** |
|||
* Pops the last added item out of the SetArray. |
|||
*/ |
|||
export declare let pop: (strarr: SetArray) => void; |
|||
/** |
|||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the |
|||
* index of the `key` in the backing array. |
|||
* |
|||
* This is designed to allow synchronizing a second array with the contents of the backing array, |
|||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, |
|||
* and there are never duplicates. |
|||
*/ |
|||
export declare class SetArray { |
|||
private _indexes; |
|||
array: readonly string[]; |
|||
constructor(); |
|||
} |
|||
@ -0,0 +1,66 @@ |
|||
{ |
|||
"name": "@jridgewell/set-array", |
|||
"version": "1.1.2", |
|||
"description": "Like a Set, but provides the index of the `key` in the backing array", |
|||
"keywords": [], |
|||
"author": "Justin Ridgewell <justin@ridgewell.name>", |
|||
"license": "MIT", |
|||
"repository": "https://github.com/jridgewell/set-array", |
|||
"main": "dist/set-array.umd.js", |
|||
"module": "dist/set-array.mjs", |
|||
"typings": "dist/types/set-array.d.ts", |
|||
"exports": { |
|||
".": [ |
|||
{ |
|||
"types": "./dist/types/set-array.d.ts", |
|||
"browser": "./dist/set-array.umd.js", |
|||
"require": "./dist/set-array.umd.js", |
|||
"import": "./dist/set-array.mjs" |
|||
}, |
|||
"./dist/set-array.umd.js" |
|||
], |
|||
"./package.json": "./package.json" |
|||
}, |
|||
"files": [ |
|||
"dist", |
|||
"src" |
|||
], |
|||
"engines": { |
|||
"node": ">=6.0.0" |
|||
}, |
|||
"scripts": { |
|||
"prebuild": "rm -rf dist", |
|||
"build": "run-s -n build:*", |
|||
"build:rollup": "rollup -c rollup.config.js", |
|||
"build:ts": "tsc --project tsconfig.build.json", |
|||
"lint": "run-s -n lint:*", |
|||
"lint:prettier": "npm run test:lint:prettier -- --write", |
|||
"lint:ts": "npm run test:lint:ts -- --fix", |
|||
"pretest": "run-s build:rollup", |
|||
"test": "run-s -n test:lint test:only", |
|||
"test:debug": "mocha --inspect-brk", |
|||
"test:lint": "run-s -n test:lint:*", |
|||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'", |
|||
"test:lint:ts": "eslint '{src,test}/**/*.ts'", |
|||
"test:only": "mocha", |
|||
"test:coverage": "c8 mocha", |
|||
"test:watch": "mocha --watch", |
|||
"prepublishOnly": "npm run preversion", |
|||
"preversion": "run-s test build" |
|||
}, |
|||
"devDependencies": { |
|||
"@rollup/plugin-typescript": "8.3.0", |
|||
"@types/mocha": "9.1.1", |
|||
"@types/node": "17.0.29", |
|||
"@typescript-eslint/eslint-plugin": "5.10.0", |
|||
"@typescript-eslint/parser": "5.10.0", |
|||
"c8": "7.11.0", |
|||
"eslint": "8.7.0", |
|||
"eslint-config-prettier": "8.3.0", |
|||
"mocha": "9.2.0", |
|||
"npm-run-all": "4.1.5", |
|||
"prettier": "2.5.1", |
|||
"rollup": "2.66.0", |
|||
"typescript": "4.5.5" |
|||
} |
|||
} |
|||
@ -0,0 +1,55 @@ |
|||
/** |
|||
* Gets the index associated with `key` in the backing array, if it is already present. |
|||
*/ |
|||
export let get: (strarr: SetArray, key: string) => number | undefined; |
|||
|
|||
/** |
|||
* Puts `key` into the backing array, if it is not already present. Returns |
|||
* the index of the `key` in the backing array. |
|||
*/ |
|||
export let put: (strarr: SetArray, key: string) => number; |
|||
|
|||
/** |
|||
* Pops the last added item out of the SetArray. |
|||
*/ |
|||
export let pop: (strarr: SetArray) => void; |
|||
|
|||
/** |
|||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the |
|||
* index of the `key` in the backing array. |
|||
* |
|||
* This is designed to allow synchronizing a second array with the contents of the backing array, |
|||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, |
|||
* and there are never duplicates. |
|||
*/ |
|||
export class SetArray { |
|||
private declare _indexes: { [key: string]: number | undefined }; |
|||
declare array: readonly string[]; |
|||
|
|||
constructor() { |
|||
this._indexes = { __proto__: null } as any; |
|||
this.array = []; |
|||
} |
|||
|
|||
static { |
|||
get = (strarr, key) => strarr._indexes[key]; |
|||
|
|||
put = (strarr, key) => { |
|||
// The key may or may not be present. If it is present, it's a number.
|
|||
const index = get(strarr, key); |
|||
if (index !== undefined) return index; |
|||
|
|||
const { array, _indexes: indexes } = strarr; |
|||
|
|||
return (indexes[key] = (array as string[]).push(key) - 1); |
|||
}; |
|||
|
|||
pop = (strarr) => { |
|||
const { array, _indexes: indexes } = strarr; |
|||
if (array.length === 0) return; |
|||
|
|||
const last = (array as string[]).pop()!; |
|||
indexes[last] = undefined; |
|||
}; |
|||
} |
|||
} |
|||
@ -0,0 +1,19 @@ |
|||
Copyright 2019 Justin Ridgewell <jridgewell@google.com> |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
of this software and associated documentation files (the "Software"), to deal |
|||
in the Software without restriction, including without limitation the rights |
|||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|||
copies of the Software, and to permit persons to whom the Software is |
|||
furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in |
|||
all copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
|||
SOFTWARE. |
|||
@ -0,0 +1,82 @@ |
|||
# @jridgewell/source-map |
|||
|
|||
> Packages `@jridgewell/trace-mapping` and `@jridgewell/gen-mapping` into the familiar source-map API |
|||
|
|||
This isn't the full API, but it's the core functionality. This wraps |
|||
[@jridgewell/trace-mapping][trace-mapping] and [@jridgewell/gen-mapping][gen-mapping] |
|||
implementations. |
|||
|
|||
## Installation |
|||
|
|||
```sh |
|||
npm install @jridgewell/source-map |
|||
``` |
|||
|
|||
## Usage |
|||
|
|||
TODO |
|||
|
|||
### SourceMapConsumer |
|||
|
|||
```typescript |
|||
import { SourceMapConsumer } from '@jridgewell/source-map'; |
|||
const smc = new SourceMapConsumer({ |
|||
version: 3, |
|||
names: ['foo'], |
|||
sources: ['input.js'], |
|||
mappings: 'AAAAA', |
|||
}); |
|||
``` |
|||
|
|||
#### SourceMapConsumer.prototype.originalPositionFor(generatedPosition) |
|||
|
|||
```typescript |
|||
const smc = new SourceMapConsumer(map); |
|||
smc.originalPositionFor({ line: 1, column: 0 }); |
|||
``` |
|||
|
|||
### SourceMapGenerator |
|||
|
|||
```typescript |
|||
import { SourceMapGenerator } from '@jridgewell/source-map'; |
|||
const smg = new SourceMapGenerator({ |
|||
file: 'output.js', |
|||
sourceRoot: 'https://example.com/', |
|||
}); |
|||
``` |
|||
|
|||
#### SourceMapGenerator.prototype.addMapping(mapping) |
|||
|
|||
```typescript |
|||
const smg = new SourceMapGenerator(); |
|||
smg.addMapping({ |
|||
generated: { line: 1, column: 0 }, |
|||
source: 'input.js', |
|||
original: { line: 1, column: 0 }, |
|||
name: 'foo', |
|||
}); |
|||
``` |
|||
|
|||
#### SourceMapGenerator.prototype.setSourceContent(sourceFile, sourceContent) |
|||
|
|||
```typescript |
|||
const smg = new SourceMapGenerator(); |
|||
smg.setSourceContent('input.js', 'foobar'); |
|||
``` |
|||
|
|||
#### SourceMapGenerator.prototype.toJSON() |
|||
|
|||
```typescript |
|||
const smg = new SourceMapGenerator(); |
|||
smg.toJSON(); // { version: 3, names: [], sources: [], mappings: '' } |
|||
``` |
|||
|
|||
#### SourceMapGenerator.prototype.toDecodedMap() |
|||
|
|||
```typescript |
|||
const smg = new SourceMapGenerator(); |
|||
smg.toDecodedMap(); // { version: 3, names: [], sources: [], mappings: [] } |
|||
``` |
|||
|
|||
[trace-mapping]: https://github.com/jridgewell/trace-mapping/ |
|||
[gen-mapping]: https://github.com/jridgewell/gen-mapping/ |
|||
@ -0,0 +1,928 @@ |
|||
const comma = ','.charCodeAt(0); |
|||
const semicolon = ';'.charCodeAt(0); |
|||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; |
|||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
|||
const charToInteger = new Uint8Array(128); // z is 122 in ASCII
|
|||
for (let i = 0; i < chars.length; i++) { |
|||
const c = chars.charCodeAt(i); |
|||
charToInteger[c] = i; |
|||
intToChar[i] = c; |
|||
} |
|||
// Provide a fallback for older environments.
|
|||
const td = typeof TextDecoder !== 'undefined' |
|||
? new TextDecoder() |
|||
: typeof Buffer !== 'undefined' |
|||
? { |
|||
decode(buf) { |
|||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength); |
|||
return out.toString(); |
|||
}, |
|||
} |
|||
: { |
|||
decode(buf) { |
|||
let out = ''; |
|||
for (let i = 0; i < buf.length; i++) { |
|||
out += String.fromCharCode(buf[i]); |
|||
} |
|||
return out; |
|||
}, |
|||
}; |
|||
function decode(mappings) { |
|||
const state = new Int32Array(5); |
|||
const decoded = []; |
|||
let line = []; |
|||
let sorted = true; |
|||
let lastCol = 0; |
|||
for (let i = 0; i < mappings.length;) { |
|||
const c = mappings.charCodeAt(i); |
|||
if (c === comma) { |
|||
i++; |
|||
} |
|||
else if (c === semicolon) { |
|||
state[0] = lastCol = 0; |
|||
if (!sorted) |
|||
sort(line); |
|||
sorted = true; |
|||
decoded.push(line); |
|||
line = []; |
|||
i++; |
|||
} |
|||
else { |
|||
i = decodeInteger(mappings, i, state, 0); // generatedCodeColumn
|
|||
const col = state[0]; |
|||
if (col < lastCol) |
|||
sorted = false; |
|||
lastCol = col; |
|||
if (!hasMoreSegments(mappings, i)) { |
|||
line.push([col]); |
|||
continue; |
|||
} |
|||
i = decodeInteger(mappings, i, state, 1); // sourceFileIndex
|
|||
i = decodeInteger(mappings, i, state, 2); // sourceCodeLine
|
|||
i = decodeInteger(mappings, i, state, 3); // sourceCodeColumn
|
|||
if (!hasMoreSegments(mappings, i)) { |
|||
line.push([col, state[1], state[2], state[3]]); |
|||
continue; |
|||
} |
|||
i = decodeInteger(mappings, i, state, 4); // nameIndex
|
|||
line.push([col, state[1], state[2], state[3], state[4]]); |
|||
} |
|||
} |
|||
if (!sorted) |
|||
sort(line); |
|||
decoded.push(line); |
|||
return decoded; |
|||
} |
|||
function decodeInteger(mappings, pos, state, j) { |
|||
let value = 0; |
|||
let shift = 0; |
|||
let integer = 0; |
|||
do { |
|||
const c = mappings.charCodeAt(pos++); |
|||
integer = charToInteger[c]; |
|||
value |= (integer & 31) << shift; |
|||
shift += 5; |
|||
} while (integer & 32); |
|||
const shouldNegate = value & 1; |
|||
value >>>= 1; |
|||
if (shouldNegate) { |
|||
value = -0x80000000 | -value; |
|||
} |
|||
state[j] += value; |
|||
return pos; |
|||
} |
|||
function hasMoreSegments(mappings, i) { |
|||
if (i >= mappings.length) |
|||
return false; |
|||
const c = mappings.charCodeAt(i); |
|||
if (c === comma || c === semicolon) |
|||
return false; |
|||
return true; |
|||
} |
|||
function sort(line) { |
|||
line.sort(sortComparator$1); |
|||
} |
|||
function sortComparator$1(a, b) { |
|||
return a[0] - b[0]; |
|||
} |
|||
function encode(decoded) { |
|||
const state = new Int32Array(5); |
|||
let buf = new Uint8Array(1024); |
|||
let pos = 0; |
|||
for (let i = 0; i < decoded.length; i++) { |
|||
const line = decoded[i]; |
|||
if (i > 0) { |
|||
buf = reserve(buf, pos, 1); |
|||
buf[pos++] = semicolon; |
|||
} |
|||
if (line.length === 0) |
|||
continue; |
|||
state[0] = 0; |
|||
for (let j = 0; j < line.length; j++) { |
|||
const segment = line[j]; |
|||
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
|||
// may push a comma.
|
|||
buf = reserve(buf, pos, 36); |
|||
if (j > 0) |
|||
buf[pos++] = comma; |
|||
pos = encodeInteger(buf, pos, state, segment, 0); // generatedCodeColumn
|
|||
if (segment.length === 1) |
|||
continue; |
|||
pos = encodeInteger(buf, pos, state, segment, 1); // sourceFileIndex
|
|||
pos = encodeInteger(buf, pos, state, segment, 2); // sourceCodeLine
|
|||
pos = encodeInteger(buf, pos, state, segment, 3); // sourceCodeColumn
|
|||
if (segment.length === 4) |
|||
continue; |
|||
pos = encodeInteger(buf, pos, state, segment, 4); // nameIndex
|
|||
} |
|||
} |
|||
return td.decode(buf.subarray(0, pos)); |
|||
} |
|||
function reserve(buf, pos, count) { |
|||
if (buf.length > pos + count) |
|||
return buf; |
|||
const swap = new Uint8Array(buf.length * 2); |
|||
swap.set(buf); |
|||
return swap; |
|||
} |
|||
function encodeInteger(buf, pos, state, segment, j) { |
|||
const next = segment[j]; |
|||
let num = next - state[j]; |
|||
state[j] = next; |
|||
num = num < 0 ? (-num << 1) | 1 : num << 1; |
|||
do { |
|||
let clamped = num & 0b011111; |
|||
num >>>= 5; |
|||
if (num > 0) |
|||
clamped |= 0b100000; |
|||
buf[pos++] = intToChar[clamped]; |
|||
} while (num > 0); |
|||
return pos; |
|||
} |
|||
|
|||
// Matches the scheme of a URL, eg "http://"
|
|||
const schemeRegex = /^[\w+.-]+:\/\//; |
|||
/** |
|||
* Matches the parts of a URL: |
|||
* 1. Scheme, including ":", guaranteed. |
|||
* 2. User/password, including "@", optional. |
|||
* 3. Host, guaranteed. |
|||
* 4. Port, including ":", optional. |
|||
* 5. Path, including "/", optional. |
|||
*/ |
|||
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?/; |
|||
/** |
|||
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start |
|||
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive). |
|||
* |
|||
* 1. Host, optional. |
|||
* 2. Path, which may inclue "/", guaranteed. |
|||
*/ |
|||
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/]*)?)?(\/?.*)/i; |
|||
function isAbsoluteUrl(input) { |
|||
return schemeRegex.test(input); |
|||
} |
|||
function isSchemeRelativeUrl(input) { |
|||
return input.startsWith('//'); |
|||
} |
|||
function isAbsolutePath(input) { |
|||
return input.startsWith('/'); |
|||
} |
|||
function isFileUrl(input) { |
|||
return input.startsWith('file:'); |
|||
} |
|||
function parseAbsoluteUrl(input) { |
|||
const match = urlRegex.exec(input); |
|||
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/'); |
|||
} |
|||
function parseFileUrl(input) { |
|||
const match = fileRegex.exec(input); |
|||
const path = match[2]; |
|||
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path); |
|||
} |
|||
function makeUrl(scheme, user, host, port, path) { |
|||
return { |
|||
scheme, |
|||
user, |
|||
host, |
|||
port, |
|||
path, |
|||
relativePath: false, |
|||
}; |
|||
} |
|||
function parseUrl(input) { |
|||
if (isSchemeRelativeUrl(input)) { |
|||
const url = parseAbsoluteUrl('http:' + input); |
|||
url.scheme = ''; |
|||
return url; |
|||
} |
|||
if (isAbsolutePath(input)) { |
|||
const url = parseAbsoluteUrl('http://foo.com' + input); |
|||
url.scheme = ''; |
|||
url.host = ''; |
|||
return url; |
|||
} |
|||
if (isFileUrl(input)) |
|||
return parseFileUrl(input); |
|||
if (isAbsoluteUrl(input)) |
|||
return parseAbsoluteUrl(input); |
|||
const url = parseAbsoluteUrl('http://foo.com/' + input); |
|||
url.scheme = ''; |
|||
url.host = ''; |
|||
url.relativePath = true; |
|||
return url; |
|||
} |
|||
function stripPathFilename(path) { |
|||
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
|||
// paths. It's not a file, so we can't strip it.
|
|||
if (path.endsWith('/..')) |
|||
return path; |
|||
const index = path.lastIndexOf('/'); |
|||
return path.slice(0, index + 1); |
|||
} |
|||
function mergePaths(url, base) { |
|||
// If we're not a relative path, then we're an absolute path, and it doesn't matter what base is.
|
|||
if (!url.relativePath) |
|||
return; |
|||
normalizePath(base); |
|||
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
|||
// path).
|
|||
if (url.path === '/') { |
|||
url.path = base.path; |
|||
} |
|||
else { |
|||
// Resolution happens relative to the base path's directory, not the file.
|
|||
url.path = stripPathFilename(base.path) + url.path; |
|||
} |
|||
// If the base path is absolute, then our path is now absolute too.
|
|||
url.relativePath = base.relativePath; |
|||
} |
|||
/** |
|||
* The path can have empty directories "//", unneeded parents "foo/..", or current directory |
|||
* "foo/.". We need to normalize to a standard representation. |
|||
*/ |
|||
function normalizePath(url) { |
|||
const { relativePath } = url; |
|||
const pieces = url.path.split('/'); |
|||
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
|||
// pieces[0] is an empty string.
|
|||
let pointer = 1; |
|||
// Positive is the number of real directories we've output, used for popping a parent directory.
|
|||
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
|||
let positive = 0; |
|||
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
|||
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
|||
// real directory, we won't need to append, unless the other conditions happen again.
|
|||
let addTrailingSlash = false; |
|||
for (let i = 1; i < pieces.length; i++) { |
|||
const piece = pieces[i]; |
|||
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
|||
if (!piece) { |
|||
addTrailingSlash = true; |
|||
continue; |
|||
} |
|||
// If we encounter a real directory, then we don't need to append anymore.
|
|||
addTrailingSlash = false; |
|||
// A current directory, which we can always drop.
|
|||
if (piece === '.') |
|||
continue; |
|||
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
|||
// have an excess of parents, and we'll need to keep the "..".
|
|||
if (piece === '..') { |
|||
if (positive) { |
|||
addTrailingSlash = true; |
|||
positive--; |
|||
pointer--; |
|||
} |
|||
else if (relativePath) { |
|||
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
|||
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
|||
pieces[pointer++] = piece; |
|||
} |
|||
continue; |
|||
} |
|||
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
|||
// any popped or dropped directories.
|
|||
pieces[pointer++] = piece; |
|||
positive++; |
|||
} |
|||
let path = ''; |
|||
for (let i = 1; i < pointer; i++) { |
|||
path += '/' + pieces[i]; |
|||
} |
|||
if (!path || (addTrailingSlash && !path.endsWith('/..'))) { |
|||
path += '/'; |
|||
} |
|||
url.path = path; |
|||
} |
|||
/** |
|||
* Attempts to resolve `input` URL/path relative to `base`. |
|||
*/ |
|||
function resolve$1(input, base) { |
|||
if (!input && !base) |
|||
return ''; |
|||
const url = parseUrl(input); |
|||
// If we have a base, and the input isn't already an absolute URL, then we need to merge.
|
|||
if (base && !url.scheme) { |
|||
const baseUrl = parseUrl(base); |
|||
url.scheme = baseUrl.scheme; |
|||
// If there's no host, then we were just a path.
|
|||
if (!url.host) { |
|||
// The host, user, and port are joined, you can't copy one without the others.
|
|||
url.user = baseUrl.user; |
|||
url.host = baseUrl.host; |
|||
url.port = baseUrl.port; |
|||
} |
|||
mergePaths(url, baseUrl); |
|||
} |
|||
normalizePath(url); |
|||
// If the input (and base, if there was one) are both relative, then we need to output a relative.
|
|||
if (url.relativePath) { |
|||
// The first char is always a "/".
|
|||
const path = url.path.slice(1); |
|||
if (!path) |
|||
return '.'; |
|||
// If base started with a leading ".", or there is no base and input started with a ".", then we
|
|||
// need to ensure that the relative path starts with a ".". We don't know if relative starts
|
|||
// with a "..", though, so check before prepending.
|
|||
const keepRelative = (base || input).startsWith('.'); |
|||
return !keepRelative || path.startsWith('.') ? path : './' + path; |
|||
} |
|||
// If there's no host (and no scheme/user/port), then we need to output an absolute path.
|
|||
if (!url.scheme && !url.host) |
|||
return url.path; |
|||
// We're outputting either an absolute URL, or a protocol relative one.
|
|||
return `${url.scheme}//${url.user}${url.host}${url.port}${url.path}`; |
|||
} |
|||
|
|||
function resolve(input, base) { |
|||
// The base is always treated as a directory, if it's not empty.
|
|||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
|||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
|||
if (base && !base.endsWith('/')) |
|||
base += '/'; |
|||
return resolve$1(input, base); |
|||
} |
|||
|
|||
/** |
|||
* Removes everything after the last "/", but leaves the slash. |
|||
*/ |
|||
function stripFilename(path) { |
|||
if (!path) |
|||
return ''; |
|||
const index = path.lastIndexOf('/'); |
|||
return path.slice(0, index + 1); |
|||
} |
|||
|
|||
const COLUMN$1 = 0; |
|||
const SOURCES_INDEX$1 = 1; |
|||
const SOURCE_LINE$1 = 2; |
|||
const SOURCE_COLUMN$1 = 3; |
|||
const NAMES_INDEX$1 = 4; |
|||
|
|||
function maybeSort(mappings, owned) { |
|||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0); |
|||
if (unsortedIndex === mappings.length) |
|||
return mappings; |
|||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
|||
// not, we do not want to modify the consumer's input array.
|
|||
if (!owned) |
|||
mappings = mappings.slice(); |
|||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) { |
|||
mappings[i] = sortSegments(mappings[i], owned); |
|||
} |
|||
return mappings; |
|||
} |
|||
function nextUnsortedSegmentLine(mappings, start) { |
|||
for (let i = start; i < mappings.length; i++) { |
|||
if (!isSorted(mappings[i])) |
|||
return i; |
|||
} |
|||
return mappings.length; |
|||
} |
|||
function isSorted(line) { |
|||
for (let j = 1; j < line.length; j++) { |
|||
if (line[j][COLUMN$1] < line[j - 1][COLUMN$1]) { |
|||
return false; |
|||
} |
|||
} |
|||
return true; |
|||
} |
|||
function sortSegments(line, owned) { |
|||
if (!owned) |
|||
line = line.slice(); |
|||
return line.sort(sortComparator); |
|||
} |
|||
function sortComparator(a, b) { |
|||
return a[COLUMN$1] - b[COLUMN$1]; |
|||
} |
|||
|
|||
let found = false; |
|||
/** |
|||
* A binary search implementation that returns the index if a match is found. |
|||
* If no match is found, then the left-index (the index associated with the item that comes just |
|||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at |
|||
* the next index: |
|||
* |
|||
* ```js
|
|||
* const array = [1, 3]; |
|||
* const needle = 2; |
|||
* const index = binarySearch(array, needle, (item, needle) => item - needle); |
|||
* |
|||
* assert.equal(index, 0); |
|||
* array.splice(index + 1, 0, needle); |
|||
* assert.deepEqual(array, [1, 2, 3]); |
|||
* ``` |
|||
*/ |
|||
function binarySearch(haystack, needle, low, high) { |
|||
while (low <= high) { |
|||
const mid = low + ((high - low) >> 1); |
|||
const cmp = haystack[mid][COLUMN$1] - needle; |
|||
if (cmp === 0) { |
|||
found = true; |
|||
return mid; |
|||
} |
|||
if (cmp < 0) { |
|||
low = mid + 1; |
|||
} |
|||
else { |
|||
high = mid - 1; |
|||
} |
|||
} |
|||
found = false; |
|||
return low - 1; |
|||
} |
|||
function upperBound(haystack, needle, index) { |
|||
for (let i = index + 1; i < haystack.length; i++, index++) { |
|||
if (haystack[i][COLUMN$1] !== needle) |
|||
break; |
|||
} |
|||
return index; |
|||
} |
|||
function lowerBound(haystack, needle, index) { |
|||
for (let i = index - 1; i >= 0; i--, index--) { |
|||
if (haystack[i][COLUMN$1] !== needle) |
|||
break; |
|||
} |
|||
return index; |
|||
} |
|||
function memoizedState() { |
|||
return { |
|||
lastKey: -1, |
|||
lastNeedle: -1, |
|||
lastIndex: -1, |
|||
}; |
|||
} |
|||
/** |
|||
* This overly complicated beast is just to record the last tested line/column and the resulting |
|||
* index, allowing us to skip a few tests if mappings are monotonically increasing. |
|||
*/ |
|||
function memoizedBinarySearch(haystack, needle, state, key) { |
|||
const { lastKey, lastNeedle, lastIndex } = state; |
|||
let low = 0; |
|||
let high = haystack.length - 1; |
|||
if (key === lastKey) { |
|||
if (needle === lastNeedle) { |
|||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN$1] === needle; |
|||
return lastIndex; |
|||
} |
|||
if (needle >= lastNeedle) { |
|||
// lastIndex may be -1 if the previous needle was not found.
|
|||
low = lastIndex === -1 ? 0 : lastIndex; |
|||
} |
|||
else { |
|||
high = lastIndex; |
|||
} |
|||
} |
|||
state.lastKey = key; |
|||
state.lastNeedle = needle; |
|||
return (state.lastIndex = binarySearch(haystack, needle, low, high)); |
|||
} |
|||
|
|||
const AnyMap = function (map, mapUrl) { |
|||
const parsed = typeof map === 'string' ? JSON.parse(map) : map; |
|||
if (!('sections' in parsed)) |
|||
return new TraceMap(parsed, mapUrl); |
|||
const mappings = []; |
|||
const sources = []; |
|||
const sourcesContent = []; |
|||
const names = []; |
|||
const { sections } = parsed; |
|||
let i = 0; |
|||
for (; i < sections.length - 1; i++) { |
|||
const no = sections[i + 1].offset; |
|||
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, no.line, no.column); |
|||
} |
|||
if (sections.length > 0) { |
|||
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, Infinity, Infinity); |
|||
} |
|||
const joined = { |
|||
version: 3, |
|||
file: parsed.file, |
|||
names, |
|||
sources, |
|||
sourcesContent, |
|||
mappings, |
|||
}; |
|||
return presortedDecodedMap(joined); |
|||
}; |
|||
function addSection(section, mapUrl, mappings, sources, sourcesContent, names, stopLine, stopColumn) { |
|||
const map = AnyMap(section.map, mapUrl); |
|||
const { line: lineOffset, column: columnOffset } = section.offset; |
|||
const sourcesOffset = sources.length; |
|||
const namesOffset = names.length; |
|||
const decoded = decodedMappings(map); |
|||
const { resolvedSources } = map; |
|||
append(sources, resolvedSources); |
|||
append(sourcesContent, map.sourcesContent || fillSourcesContent(resolvedSources.length)); |
|||
append(names, map.names); |
|||
// If this section jumps forwards several lines, we need to add lines to the output mappings catch up.
|
|||
for (let i = mappings.length; i <= lineOffset; i++) |
|||
mappings.push([]); |
|||
// We can only add so many lines before we step into the range that the next section's map
|
|||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
|||
// they've crossed into the column range.
|
|||
const stopI = stopLine - lineOffset; |
|||
const len = Math.min(decoded.length, stopI + 1); |
|||
for (let i = 0; i < len; i++) { |
|||
const line = decoded[i]; |
|||
// On the 0th loop, the line will already exist due to a previous section, or the line catch up
|
|||
// loop above.
|
|||
const out = i === 0 ? mappings[lineOffset] : (mappings[lineOffset + i] = []); |
|||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
|||
// map can be multiple lines), it doesn't.
|
|||
const cOffset = i === 0 ? columnOffset : 0; |
|||
for (let j = 0; j < line.length; j++) { |
|||
const seg = line[j]; |
|||
const column = cOffset + seg[COLUMN$1]; |
|||
// If this segment steps into the column range that the next section's map controls, we need
|
|||
// to stop early.
|
|||
if (i === stopI && column >= stopColumn) |
|||
break; |
|||
if (seg.length === 1) { |
|||
out.push([column]); |
|||
continue; |
|||
} |
|||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX$1]; |
|||
const sourceLine = seg[SOURCE_LINE$1]; |
|||
const sourceColumn = seg[SOURCE_COLUMN$1]; |
|||
if (seg.length === 4) { |
|||
out.push([column, sourcesIndex, sourceLine, sourceColumn]); |
|||
continue; |
|||
} |
|||
out.push([column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX$1]]); |
|||
} |
|||
} |
|||
} |
|||
function append(arr, other) { |
|||
for (let i = 0; i < other.length; i++) |
|||
arr.push(other[i]); |
|||
} |
|||
// Sourcemaps don't need to have sourcesContent, and if they don't, we need to create an array of
|
|||
// equal length to the sources. This is because the sources and sourcesContent are paired arrays,
|
|||
// where `sourcesContent[i]` is the content of the `sources[i]` file. If we didn't, then joined
|
|||
// sourcemap would desynchronize the sources/contents.
|
|||
function fillSourcesContent(len) { |
|||
const sourcesContent = []; |
|||
for (let i = 0; i < len; i++) |
|||
sourcesContent[i] = null; |
|||
return sourcesContent; |
|||
} |
|||
|
|||
const INVALID_ORIGINAL_MAPPING = Object.freeze({ |
|||
source: null, |
|||
line: null, |
|||
column: null, |
|||
name: null, |
|||
}); |
|||
Object.freeze({ |
|||
line: null, |
|||
column: null, |
|||
}); |
|||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)'; |
|||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)'; |
|||
const LEAST_UPPER_BOUND = -1; |
|||
const GREATEST_LOWER_BOUND = 1; |
|||
/** |
|||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field. |
|||
*/ |
|||
let decodedMappings; |
|||
/** |
|||
* A higher-level API to find the source/line/column associated with a generated line/column |
|||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in |
|||
* `source-map` library. |
|||
*/ |
|||
let originalPositionFor; |
|||
/** |
|||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger |
|||
* maps. |
|||
*/ |
|||
let presortedDecodedMap; |
|||
class TraceMap { |
|||
constructor(map, mapUrl) { |
|||
this._decodedMemo = memoizedState(); |
|||
this._bySources = undefined; |
|||
this._bySourceMemos = undefined; |
|||
const isString = typeof map === 'string'; |
|||
if (!isString && map.constructor === TraceMap) |
|||
return map; |
|||
const parsed = (isString ? JSON.parse(map) : map); |
|||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed; |
|||
this.version = version; |
|||
this.file = file; |
|||
this.names = names; |
|||
this.sourceRoot = sourceRoot; |
|||
this.sources = sources; |
|||
this.sourcesContent = sourcesContent; |
|||
if (sourceRoot || mapUrl) { |
|||
const from = resolve(sourceRoot || '', stripFilename(mapUrl)); |
|||
this.resolvedSources = sources.map((s) => resolve(s || '', from)); |
|||
} |
|||
else { |
|||
this.resolvedSources = sources.map((s) => s || ''); |
|||
} |
|||
const { mappings } = parsed; |
|||
if (typeof mappings === 'string') { |
|||
this._encoded = mappings; |
|||
this._decoded = undefined; |
|||
} |
|||
else { |
|||
this._encoded = undefined; |
|||
this._decoded = maybeSort(mappings, isString); |
|||
} |
|||
} |
|||
} |
|||
(() => { |
|||
decodedMappings = (map) => { |
|||
return (map._decoded || (map._decoded = decode(map._encoded))); |
|||
}; |
|||
originalPositionFor = (map, { line, column, bias }) => { |
|||
line--; |
|||
if (line < 0) |
|||
throw new Error(LINE_GTR_ZERO); |
|||
if (column < 0) |
|||
throw new Error(COL_GTR_EQ_ZERO); |
|||
const decoded = decodedMappings(map); |
|||
// It's common for parent source maps to have pointers to lines that have no
|
|||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
|||
if (line >= decoded.length) |
|||
return INVALID_ORIGINAL_MAPPING; |
|||
const segment = traceSegmentInternal(decoded[line], map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND); |
|||
if (segment == null) |
|||
return INVALID_ORIGINAL_MAPPING; |
|||
if (segment.length == 1) |
|||
return INVALID_ORIGINAL_MAPPING; |
|||
const { names, resolvedSources } = map; |
|||
return { |
|||
source: resolvedSources[segment[SOURCES_INDEX$1]], |
|||
line: segment[SOURCE_LINE$1] + 1, |
|||
column: segment[SOURCE_COLUMN$1], |
|||
name: segment.length === 5 ? names[segment[NAMES_INDEX$1]] : null, |
|||
}; |
|||
}; |
|||
presortedDecodedMap = (map, mapUrl) => { |
|||
const clone = Object.assign({}, map); |
|||
clone.mappings = []; |
|||
const tracer = new TraceMap(clone, mapUrl); |
|||
tracer._decoded = map.mappings; |
|||
return tracer; |
|||
}; |
|||
})(); |
|||
function traceSegmentInternal(segments, memo, line, column, bias) { |
|||
let index = memoizedBinarySearch(segments, column, memo, line); |
|||
if (found) { |
|||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index); |
|||
} |
|||
else if (bias === LEAST_UPPER_BOUND) |
|||
index++; |
|||
if (index === -1 || index === segments.length) |
|||
return null; |
|||
return segments[index]; |
|||
} |
|||
|
|||
/** |
|||
* Gets the index associated with `key` in the backing array, if it is already present. |
|||
*/ |
|||
let get; |
|||
/** |
|||
* Puts `key` into the backing array, if it is not already present. Returns |
|||
* the index of the `key` in the backing array. |
|||
*/ |
|||
let put; |
|||
/** |
|||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the |
|||
* index of the `key` in the backing array. |
|||
* |
|||
* This is designed to allow synchronizing a second array with the contents of the backing array, |
|||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, |
|||
* and there are never duplicates. |
|||
*/ |
|||
class SetArray { |
|||
constructor() { |
|||
this._indexes = { __proto__: null }; |
|||
this.array = []; |
|||
} |
|||
} |
|||
(() => { |
|||
get = (strarr, key) => strarr._indexes[key]; |
|||
put = (strarr, key) => { |
|||
// The key may or may not be present. If it is present, it's a number.
|
|||
const index = get(strarr, key); |
|||
if (index !== undefined) |
|||
return index; |
|||
const { array, _indexes: indexes } = strarr; |
|||
return (indexes[key] = array.push(key) - 1); |
|||
}; |
|||
})(); |
|||
|
|||
const COLUMN = 0; |
|||
const SOURCES_INDEX = 1; |
|||
const SOURCE_LINE = 2; |
|||
const SOURCE_COLUMN = 3; |
|||
const NAMES_INDEX = 4; |
|||
|
|||
const NO_NAME = -1; |
|||
/** |
|||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the |
|||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should |
|||
* not add a mapping with a lower generated line/column than one that came before. |
|||
*/ |
|||
let maybeAddMapping; |
|||
/** |
|||
* Adds/removes the content of the source file to the source map. |
|||
*/ |
|||
let setSourceContent; |
|||
/** |
|||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects |
|||
* a sourcemap, or to JSON.stringify. |
|||
*/ |
|||
let toDecodedMap; |
|||
/** |
|||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects |
|||
* a sourcemap, or to JSON.stringify. |
|||
*/ |
|||
let toEncodedMap; |
|||
// This split declaration is only so that terser can elminiate the static initialization block.
|
|||
let addSegmentInternal; |
|||
/** |
|||
* Provides the state to generate a sourcemap. |
|||
*/ |
|||
class GenMapping { |
|||
constructor({ file, sourceRoot } = {}) { |
|||
this._names = new SetArray(); |
|||
this._sources = new SetArray(); |
|||
this._sourcesContent = []; |
|||
this._mappings = []; |
|||
this.file = file; |
|||
this.sourceRoot = sourceRoot; |
|||
} |
|||
} |
|||
(() => { |
|||
maybeAddMapping = (map, mapping) => { |
|||
return addMappingInternal(true, map, mapping); |
|||
}; |
|||
setSourceContent = (map, source, content) => { |
|||
const { _sources: sources, _sourcesContent: sourcesContent } = map; |
|||
sourcesContent[put(sources, source)] = content; |
|||
}; |
|||
toDecodedMap = (map) => { |
|||
const { file, sourceRoot, _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map; |
|||
removeEmptyFinalLines(mappings); |
|||
return { |
|||
version: 3, |
|||
file: file || undefined, |
|||
names: names.array, |
|||
sourceRoot: sourceRoot || undefined, |
|||
sources: sources.array, |
|||
sourcesContent, |
|||
mappings, |
|||
}; |
|||
}; |
|||
toEncodedMap = (map) => { |
|||
const decoded = toDecodedMap(map); |
|||
return Object.assign(Object.assign({}, decoded), { mappings: encode(decoded.mappings) }); |
|||
}; |
|||
// Internal helpers
|
|||
addSegmentInternal = (skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name) => { |
|||
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map; |
|||
const line = getLine(mappings, genLine); |
|||
const index = getColumnIndex(line, genColumn); |
|||
if (!source) { |
|||
if (skipable && skipSourceless(line, index)) |
|||
return; |
|||
return insert(line, index, [genColumn]); |
|||
} |
|||
const sourcesIndex = put(sources, source); |
|||
const namesIndex = name ? put(names, name) : NO_NAME; |
|||
if (sourcesIndex === sourcesContent.length) |
|||
sourcesContent[sourcesIndex] = null; |
|||
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) { |
|||
return; |
|||
} |
|||
return insert(line, index, name |
|||
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] |
|||
: [genColumn, sourcesIndex, sourceLine, sourceColumn]); |
|||
}; |
|||
})(); |
|||
function getLine(mappings, index) { |
|||
for (let i = mappings.length; i <= index; i++) { |
|||
mappings[i] = []; |
|||
} |
|||
return mappings[index]; |
|||
} |
|||
function getColumnIndex(line, genColumn) { |
|||
let index = line.length; |
|||
for (let i = index - 1; i >= 0; index = i--) { |
|||
const current = line[i]; |
|||
if (genColumn >= current[COLUMN]) |
|||
break; |
|||
} |
|||
return index; |
|||
} |
|||
function insert(array, index, value) { |
|||
for (let i = array.length; i > index; i--) { |
|||
array[i] = array[i - 1]; |
|||
} |
|||
array[index] = value; |
|||
} |
|||
function removeEmptyFinalLines(mappings) { |
|||
const { length } = mappings; |
|||
let len = length; |
|||
for (let i = len - 1; i >= 0; len = i, i--) { |
|||
if (mappings[i].length > 0) |
|||
break; |
|||
} |
|||
if (len < length) |
|||
mappings.length = len; |
|||
} |
|||
function skipSourceless(line, index) { |
|||
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
|||
// doesn't generate any useful information.
|
|||
if (index === 0) |
|||
return true; |
|||
const prev = line[index - 1]; |
|||
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
|||
// genrate any new information. Else, this segment will end the source/named segment and point to
|
|||
// a sourceless position, which is useful.
|
|||
return prev.length === 1; |
|||
} |
|||
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) { |
|||
// A source/named segment at the start of a line gives position at that genColumn
|
|||
if (index === 0) |
|||
return false; |
|||
const prev = line[index - 1]; |
|||
// If the previous segment is sourceless, then we're transitioning to a source.
|
|||
if (prev.length === 1) |
|||
return false; |
|||
// If the previous segment maps to the exact same source position, then this segment doesn't
|
|||
// provide any new position information.
|
|||
return (sourcesIndex === prev[SOURCES_INDEX] && |
|||
sourceLine === prev[SOURCE_LINE] && |
|||
sourceColumn === prev[SOURCE_COLUMN] && |
|||
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME)); |
|||
} |
|||
function addMappingInternal(skipable, map, mapping) { |
|||
const { generated, source, original, name } = mapping; |
|||
if (!source) { |
|||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null); |
|||
} |
|||
const s = source; |
|||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, s, original.line - 1, original.column, name); |
|||
} |
|||
|
|||
class SourceMapConsumer { |
|||
constructor(map, mapUrl) { |
|||
const trace = (this._map = new AnyMap(map, mapUrl)); |
|||
this.file = trace.file; |
|||
this.names = trace.names; |
|||
this.sourceRoot = trace.sourceRoot; |
|||
this.sources = trace.resolvedSources; |
|||
this.sourcesContent = trace.sourcesContent; |
|||
} |
|||
originalPositionFor(needle) { |
|||
return originalPositionFor(this._map, needle); |
|||
} |
|||
destroy() { |
|||
// noop.
|
|||
} |
|||
} |
|||
class SourceMapGenerator { |
|||
constructor(opts) { |
|||
this._map = new GenMapping(opts); |
|||
} |
|||
addMapping(mapping) { |
|||
maybeAddMapping(this._map, mapping); |
|||
} |
|||
setSourceContent(source, content) { |
|||
setSourceContent(this._map, source, content); |
|||
} |
|||
toJSON() { |
|||
return toEncodedMap(this._map); |
|||
} |
|||
toDecodedMap() { |
|||
return toDecodedMap(this._map); |
|||
} |
|||
} |
|||
|
|||
export { SourceMapConsumer, SourceMapGenerator }; |
|||
//# sourceMappingURL=source-map.mjs.map
|
|||
File diff suppressed because one or more lines are too long
@ -0,0 +1,939 @@ |
|||
(function (global, factory) { |
|||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : |
|||
typeof define === 'function' && define.amd ? define(['exports'], factory) : |
|||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourceMap = {})); |
|||
})(this, (function (exports) { 'use strict'; |
|||
|
|||
const comma = ','.charCodeAt(0); |
|||
const semicolon = ';'.charCodeAt(0); |
|||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; |
|||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
|||
const charToInteger = new Uint8Array(128); // z is 122 in ASCII
|
|||
for (let i = 0; i < chars.length; i++) { |
|||
const c = chars.charCodeAt(i); |
|||
charToInteger[c] = i; |
|||
intToChar[i] = c; |
|||
} |
|||
// Provide a fallback for older environments.
|
|||
const td = typeof TextDecoder !== 'undefined' |
|||
? new TextDecoder() |
|||
: typeof Buffer !== 'undefined' |
|||
? { |
|||
decode(buf) { |
|||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength); |
|||
return out.toString(); |
|||
}, |
|||
} |
|||
: { |
|||
decode(buf) { |
|||
let out = ''; |
|||
for (let i = 0; i < buf.length; i++) { |
|||
out += String.fromCharCode(buf[i]); |
|||
} |
|||
return out; |
|||
}, |
|||
}; |
|||
function decode(mappings) { |
|||
const state = new Int32Array(5); |
|||
const decoded = []; |
|||
let line = []; |
|||
let sorted = true; |
|||
let lastCol = 0; |
|||
for (let i = 0; i < mappings.length;) { |
|||
const c = mappings.charCodeAt(i); |
|||
if (c === comma) { |
|||
i++; |
|||
} |
|||
else if (c === semicolon) { |
|||
state[0] = lastCol = 0; |
|||
if (!sorted) |
|||
sort(line); |
|||
sorted = true; |
|||
decoded.push(line); |
|||
line = []; |
|||
i++; |
|||
} |
|||
else { |
|||
i = decodeInteger(mappings, i, state, 0); // generatedCodeColumn
|
|||
const col = state[0]; |
|||
if (col < lastCol) |
|||
sorted = false; |
|||
lastCol = col; |
|||
if (!hasMoreSegments(mappings, i)) { |
|||
line.push([col]); |
|||
continue; |
|||
} |
|||
i = decodeInteger(mappings, i, state, 1); // sourceFileIndex
|
|||
i = decodeInteger(mappings, i, state, 2); // sourceCodeLine
|
|||
i = decodeInteger(mappings, i, state, 3); // sourceCodeColumn
|
|||
if (!hasMoreSegments(mappings, i)) { |
|||
line.push([col, state[1], state[2], state[3]]); |
|||
continue; |
|||
} |
|||
i = decodeInteger(mappings, i, state, 4); // nameIndex
|
|||
line.push([col, state[1], state[2], state[3], state[4]]); |
|||
} |
|||
} |
|||
if (!sorted) |
|||
sort(line); |
|||
decoded.push(line); |
|||
return decoded; |
|||
} |
|||
function decodeInteger(mappings, pos, state, j) { |
|||
let value = 0; |
|||
let shift = 0; |
|||
let integer = 0; |
|||
do { |
|||
const c = mappings.charCodeAt(pos++); |
|||
integer = charToInteger[c]; |
|||
value |= (integer & 31) << shift; |
|||
shift += 5; |
|||
} while (integer & 32); |
|||
const shouldNegate = value & 1; |
|||
value >>>= 1; |
|||
if (shouldNegate) { |
|||
value = -0x80000000 | -value; |
|||
} |
|||
state[j] += value; |
|||
return pos; |
|||
} |
|||
function hasMoreSegments(mappings, i) { |
|||
if (i >= mappings.length) |
|||
return false; |
|||
const c = mappings.charCodeAt(i); |
|||
if (c === comma || c === semicolon) |
|||
return false; |
|||
return true; |
|||
} |
|||
function sort(line) { |
|||
line.sort(sortComparator$1); |
|||
} |
|||
function sortComparator$1(a, b) { |
|||
return a[0] - b[0]; |
|||
} |
|||
function encode(decoded) { |
|||
const state = new Int32Array(5); |
|||
let buf = new Uint8Array(1024); |
|||
let pos = 0; |
|||
for (let i = 0; i < decoded.length; i++) { |
|||
const line = decoded[i]; |
|||
if (i > 0) { |
|||
buf = reserve(buf, pos, 1); |
|||
buf[pos++] = semicolon; |
|||
} |
|||
if (line.length === 0) |
|||
continue; |
|||
state[0] = 0; |
|||
for (let j = 0; j < line.length; j++) { |
|||
const segment = line[j]; |
|||
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
|||
// may push a comma.
|
|||
buf = reserve(buf, pos, 36); |
|||
if (j > 0) |
|||
buf[pos++] = comma; |
|||
pos = encodeInteger(buf, pos, state, segment, 0); // generatedCodeColumn
|
|||
if (segment.length === 1) |
|||
continue; |
|||
pos = encodeInteger(buf, pos, state, segment, 1); // sourceFileIndex
|
|||
pos = encodeInteger(buf, pos, state, segment, 2); // sourceCodeLine
|
|||
pos = encodeInteger(buf, pos, state, segment, 3); // sourceCodeColumn
|
|||
if (segment.length === 4) |
|||
continue; |
|||
pos = encodeInteger(buf, pos, state, segment, 4); // nameIndex
|
|||
} |
|||
} |
|||
return td.decode(buf.subarray(0, pos)); |
|||
} |
|||
function reserve(buf, pos, count) { |
|||
if (buf.length > pos + count) |
|||
return buf; |
|||
const swap = new Uint8Array(buf.length * 2); |
|||
swap.set(buf); |
|||
return swap; |
|||
} |
|||
function encodeInteger(buf, pos, state, segment, j) { |
|||
const next = segment[j]; |
|||
let num = next - state[j]; |
|||
state[j] = next; |
|||
num = num < 0 ? (-num << 1) | 1 : num << 1; |
|||
do { |
|||
let clamped = num & 0b011111; |
|||
num >>>= 5; |
|||
if (num > 0) |
|||
clamped |= 0b100000; |
|||
buf[pos++] = intToChar[clamped]; |
|||
} while (num > 0); |
|||
return pos; |
|||
} |
|||
|
|||
// Matches the scheme of a URL, eg "http://"
|
|||
const schemeRegex = /^[\w+.-]+:\/\//; |
|||
/** |
|||
* Matches the parts of a URL: |
|||
* 1. Scheme, including ":", guaranteed. |
|||
* 2. User/password, including "@", optional. |
|||
* 3. Host, guaranteed. |
|||
* 4. Port, including ":", optional. |
|||
* 5. Path, including "/", optional. |
|||
*/ |
|||
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?/; |
|||
/** |
|||
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start |
|||
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive). |
|||
* |
|||
* 1. Host, optional. |
|||
* 2. Path, which may inclue "/", guaranteed. |
|||
*/ |
|||
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/]*)?)?(\/?.*)/i; |
|||
function isAbsoluteUrl(input) { |
|||
return schemeRegex.test(input); |
|||
} |
|||
function isSchemeRelativeUrl(input) { |
|||
return input.startsWith('//'); |
|||
} |
|||
function isAbsolutePath(input) { |
|||
return input.startsWith('/'); |
|||
} |
|||
function isFileUrl(input) { |
|||
return input.startsWith('file:'); |
|||
} |
|||
function parseAbsoluteUrl(input) { |
|||
const match = urlRegex.exec(input); |
|||
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/'); |
|||
} |
|||
function parseFileUrl(input) { |
|||
const match = fileRegex.exec(input); |
|||
const path = match[2]; |
|||
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path); |
|||
} |
|||
function makeUrl(scheme, user, host, port, path) { |
|||
return { |
|||
scheme, |
|||
user, |
|||
host, |
|||
port, |
|||
path, |
|||
relativePath: false, |
|||
}; |
|||
} |
|||
function parseUrl(input) { |
|||
if (isSchemeRelativeUrl(input)) { |
|||
const url = parseAbsoluteUrl('http:' + input); |
|||
url.scheme = ''; |
|||
return url; |
|||
} |
|||
if (isAbsolutePath(input)) { |
|||
const url = parseAbsoluteUrl('http://foo.com' + input); |
|||
url.scheme = ''; |
|||
url.host = ''; |
|||
return url; |
|||
} |
|||
if (isFileUrl(input)) |
|||
return parseFileUrl(input); |
|||
if (isAbsoluteUrl(input)) |
|||
return parseAbsoluteUrl(input); |
|||
const url = parseAbsoluteUrl('http://foo.com/' + input); |
|||
url.scheme = ''; |
|||
url.host = ''; |
|||
url.relativePath = true; |
|||
return url; |
|||
} |
|||
function stripPathFilename(path) { |
|||
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
|||
// paths. It's not a file, so we can't strip it.
|
|||
if (path.endsWith('/..')) |
|||
return path; |
|||
const index = path.lastIndexOf('/'); |
|||
return path.slice(0, index + 1); |
|||
} |
|||
function mergePaths(url, base) { |
|||
// If we're not a relative path, then we're an absolute path, and it doesn't matter what base is.
|
|||
if (!url.relativePath) |
|||
return; |
|||
normalizePath(base); |
|||
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
|||
// path).
|
|||
if (url.path === '/') { |
|||
url.path = base.path; |
|||
} |
|||
else { |
|||
// Resolution happens relative to the base path's directory, not the file.
|
|||
url.path = stripPathFilename(base.path) + url.path; |
|||
} |
|||
// If the base path is absolute, then our path is now absolute too.
|
|||
url.relativePath = base.relativePath; |
|||
} |
|||
/** |
|||
* The path can have empty directories "//", unneeded parents "foo/..", or current directory |
|||
* "foo/.". We need to normalize to a standard representation. |
|||
*/ |
|||
function normalizePath(url) { |
|||
const { relativePath } = url; |
|||
const pieces = url.path.split('/'); |
|||
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
|||
// pieces[0] is an empty string.
|
|||
let pointer = 1; |
|||
// Positive is the number of real directories we've output, used for popping a parent directory.
|
|||
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
|||
let positive = 0; |
|||
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
|||
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
|||
// real directory, we won't need to append, unless the other conditions happen again.
|
|||
let addTrailingSlash = false; |
|||
for (let i = 1; i < pieces.length; i++) { |
|||
const piece = pieces[i]; |
|||
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
|||
if (!piece) { |
|||
addTrailingSlash = true; |
|||
continue; |
|||
} |
|||
// If we encounter a real directory, then we don't need to append anymore.
|
|||
addTrailingSlash = false; |
|||
// A current directory, which we can always drop.
|
|||
if (piece === '.') |
|||
continue; |
|||
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
|||
// have an excess of parents, and we'll need to keep the "..".
|
|||
if (piece === '..') { |
|||
if (positive) { |
|||
addTrailingSlash = true; |
|||
positive--; |
|||
pointer--; |
|||
} |
|||
else if (relativePath) { |
|||
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
|||
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
|||
pieces[pointer++] = piece; |
|||
} |
|||
continue; |
|||
} |
|||
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
|||
// any popped or dropped directories.
|
|||
pieces[pointer++] = piece; |
|||
positive++; |
|||
} |
|||
let path = ''; |
|||
for (let i = 1; i < pointer; i++) { |
|||
path += '/' + pieces[i]; |
|||
} |
|||
if (!path || (addTrailingSlash && !path.endsWith('/..'))) { |
|||
path += '/'; |
|||
} |
|||
url.path = path; |
|||
} |
|||
/** |
|||
* Attempts to resolve `input` URL/path relative to `base`. |
|||
*/ |
|||
function resolve$1(input, base) { |
|||
if (!input && !base) |
|||
return ''; |
|||
const url = parseUrl(input); |
|||
// If we have a base, and the input isn't already an absolute URL, then we need to merge.
|
|||
if (base && !url.scheme) { |
|||
const baseUrl = parseUrl(base); |
|||
url.scheme = baseUrl.scheme; |
|||
// If there's no host, then we were just a path.
|
|||
if (!url.host) { |
|||
// The host, user, and port are joined, you can't copy one without the others.
|
|||
url.user = baseUrl.user; |
|||
url.host = baseUrl.host; |
|||
url.port = baseUrl.port; |
|||
} |
|||
mergePaths(url, baseUrl); |
|||
} |
|||
normalizePath(url); |
|||
// If the input (and base, if there was one) are both relative, then we need to output a relative.
|
|||
if (url.relativePath) { |
|||
// The first char is always a "/".
|
|||
const path = url.path.slice(1); |
|||
if (!path) |
|||
return '.'; |
|||
// If base started with a leading ".", or there is no base and input started with a ".", then we
|
|||
// need to ensure that the relative path starts with a ".". We don't know if relative starts
|
|||
// with a "..", though, so check before prepending.
|
|||
const keepRelative = (base || input).startsWith('.'); |
|||
return !keepRelative || path.startsWith('.') ? path : './' + path; |
|||
} |
|||
// If there's no host (and no scheme/user/port), then we need to output an absolute path.
|
|||
if (!url.scheme && !url.host) |
|||
return url.path; |
|||
// We're outputting either an absolute URL, or a protocol relative one.
|
|||
return `${url.scheme}//${url.user}${url.host}${url.port}${url.path}`; |
|||
} |
|||
|
|||
function resolve(input, base) { |
|||
// The base is always treated as a directory, if it's not empty.
|
|||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
|||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
|||
if (base && !base.endsWith('/')) |
|||
base += '/'; |
|||
return resolve$1(input, base); |
|||
} |
|||
|
|||
/** |
|||
* Removes everything after the last "/", but leaves the slash. |
|||
*/ |
|||
function stripFilename(path) { |
|||
if (!path) |
|||
return ''; |
|||
const index = path.lastIndexOf('/'); |
|||
return path.slice(0, index + 1); |
|||
} |
|||
|
|||
const COLUMN$1 = 0; |
|||
const SOURCES_INDEX$1 = 1; |
|||
const SOURCE_LINE$1 = 2; |
|||
const SOURCE_COLUMN$1 = 3; |
|||
const NAMES_INDEX$1 = 4; |
|||
|
|||
function maybeSort(mappings, owned) { |
|||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0); |
|||
if (unsortedIndex === mappings.length) |
|||
return mappings; |
|||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
|||
// not, we do not want to modify the consumer's input array.
|
|||
if (!owned) |
|||
mappings = mappings.slice(); |
|||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) { |
|||
mappings[i] = sortSegments(mappings[i], owned); |
|||
} |
|||
return mappings; |
|||
} |
|||
function nextUnsortedSegmentLine(mappings, start) { |
|||
for (let i = start; i < mappings.length; i++) { |
|||
if (!isSorted(mappings[i])) |
|||
return i; |
|||
} |
|||
return mappings.length; |
|||
} |
|||
function isSorted(line) { |
|||
for (let j = 1; j < line.length; j++) { |
|||
if (line[j][COLUMN$1] < line[j - 1][COLUMN$1]) { |
|||
return false; |
|||
} |
|||
} |
|||
return true; |
|||
} |
|||
function sortSegments(line, owned) { |
|||
if (!owned) |
|||
line = line.slice(); |
|||
return line.sort(sortComparator); |
|||
} |
|||
function sortComparator(a, b) { |
|||
return a[COLUMN$1] - b[COLUMN$1]; |
|||
} |
|||
|
|||
let found = false; |
|||
/** |
|||
* A binary search implementation that returns the index if a match is found. |
|||
* If no match is found, then the left-index (the index associated with the item that comes just |
|||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at |
|||
* the next index: |
|||
* |
|||
* ```js
|
|||
* const array = [1, 3]; |
|||
* const needle = 2; |
|||
* const index = binarySearch(array, needle, (item, needle) => item - needle); |
|||
* |
|||
* assert.equal(index, 0); |
|||
* array.splice(index + 1, 0, needle); |
|||
* assert.deepEqual(array, [1, 2, 3]); |
|||
* ``` |
|||
*/ |
|||
function binarySearch(haystack, needle, low, high) { |
|||
while (low <= high) { |
|||
const mid = low + ((high - low) >> 1); |
|||
const cmp = haystack[mid][COLUMN$1] - needle; |
|||
if (cmp === 0) { |
|||
found = true; |
|||
return mid; |
|||
} |
|||
if (cmp < 0) { |
|||
low = mid + 1; |
|||
} |
|||
else { |
|||
high = mid - 1; |
|||
} |
|||
} |
|||
found = false; |
|||
return low - 1; |
|||
} |
|||
function upperBound(haystack, needle, index) { |
|||
for (let i = index + 1; i < haystack.length; i++, index++) { |
|||
if (haystack[i][COLUMN$1] !== needle) |
|||
break; |
|||
} |
|||
return index; |
|||
} |
|||
function lowerBound(haystack, needle, index) { |
|||
for (let i = index - 1; i >= 0; i--, index--) { |
|||
if (haystack[i][COLUMN$1] !== needle) |
|||
break; |
|||
} |
|||
return index; |
|||
} |
|||
function memoizedState() { |
|||
return { |
|||
lastKey: -1, |
|||
lastNeedle: -1, |
|||
lastIndex: -1, |
|||
}; |
|||
} |
|||
/** |
|||
* This overly complicated beast is just to record the last tested line/column and the resulting |
|||
* index, allowing us to skip a few tests if mappings are monotonically increasing. |
|||
*/ |
|||
function memoizedBinarySearch(haystack, needle, state, key) { |
|||
const { lastKey, lastNeedle, lastIndex } = state; |
|||
let low = 0; |
|||
let high = haystack.length - 1; |
|||
if (key === lastKey) { |
|||
if (needle === lastNeedle) { |
|||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN$1] === needle; |
|||
return lastIndex; |
|||
} |
|||
if (needle >= lastNeedle) { |
|||
// lastIndex may be -1 if the previous needle was not found.
|
|||
low = lastIndex === -1 ? 0 : lastIndex; |
|||
} |
|||
else { |
|||
high = lastIndex; |
|||
} |
|||
} |
|||
state.lastKey = key; |
|||
state.lastNeedle = needle; |
|||
return (state.lastIndex = binarySearch(haystack, needle, low, high)); |
|||
} |
|||
|
|||
const AnyMap = function (map, mapUrl) { |
|||
const parsed = typeof map === 'string' ? JSON.parse(map) : map; |
|||
if (!('sections' in parsed)) |
|||
return new TraceMap(parsed, mapUrl); |
|||
const mappings = []; |
|||
const sources = []; |
|||
const sourcesContent = []; |
|||
const names = []; |
|||
const { sections } = parsed; |
|||
let i = 0; |
|||
for (; i < sections.length - 1; i++) { |
|||
const no = sections[i + 1].offset; |
|||
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, no.line, no.column); |
|||
} |
|||
if (sections.length > 0) { |
|||
addSection(sections[i], mapUrl, mappings, sources, sourcesContent, names, Infinity, Infinity); |
|||
} |
|||
const joined = { |
|||
version: 3, |
|||
file: parsed.file, |
|||
names, |
|||
sources, |
|||
sourcesContent, |
|||
mappings, |
|||
}; |
|||
return presortedDecodedMap(joined); |
|||
}; |
|||
function addSection(section, mapUrl, mappings, sources, sourcesContent, names, stopLine, stopColumn) { |
|||
const map = AnyMap(section.map, mapUrl); |
|||
const { line: lineOffset, column: columnOffset } = section.offset; |
|||
const sourcesOffset = sources.length; |
|||
const namesOffset = names.length; |
|||
const decoded = decodedMappings(map); |
|||
const { resolvedSources } = map; |
|||
append(sources, resolvedSources); |
|||
append(sourcesContent, map.sourcesContent || fillSourcesContent(resolvedSources.length)); |
|||
append(names, map.names); |
|||
// If this section jumps forwards several lines, we need to add lines to the output mappings catch up.
|
|||
for (let i = mappings.length; i <= lineOffset; i++) |
|||
mappings.push([]); |
|||
// We can only add so many lines before we step into the range that the next section's map
|
|||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
|||
// they've crossed into the column range.
|
|||
const stopI = stopLine - lineOffset; |
|||
const len = Math.min(decoded.length, stopI + 1); |
|||
for (let i = 0; i < len; i++) { |
|||
const line = decoded[i]; |
|||
// On the 0th loop, the line will already exist due to a previous section, or the line catch up
|
|||
// loop above.
|
|||
const out = i === 0 ? mappings[lineOffset] : (mappings[lineOffset + i] = []); |
|||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
|||
// map can be multiple lines), it doesn't.
|
|||
const cOffset = i === 0 ? columnOffset : 0; |
|||
for (let j = 0; j < line.length; j++) { |
|||
const seg = line[j]; |
|||
const column = cOffset + seg[COLUMN$1]; |
|||
// If this segment steps into the column range that the next section's map controls, we need
|
|||
// to stop early.
|
|||
if (i === stopI && column >= stopColumn) |
|||
break; |
|||
if (seg.length === 1) { |
|||
out.push([column]); |
|||
continue; |
|||
} |
|||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX$1]; |
|||
const sourceLine = seg[SOURCE_LINE$1]; |
|||
const sourceColumn = seg[SOURCE_COLUMN$1]; |
|||
if (seg.length === 4) { |
|||
out.push([column, sourcesIndex, sourceLine, sourceColumn]); |
|||
continue; |
|||
} |
|||
out.push([column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX$1]]); |
|||
} |
|||
} |
|||
} |
|||
function append(arr, other) { |
|||
for (let i = 0; i < other.length; i++) |
|||
arr.push(other[i]); |
|||
} |
|||
// Sourcemaps don't need to have sourcesContent, and if they don't, we need to create an array of
|
|||
// equal length to the sources. This is because the sources and sourcesContent are paired arrays,
|
|||
// where `sourcesContent[i]` is the content of the `sources[i]` file. If we didn't, then joined
|
|||
// sourcemap would desynchronize the sources/contents.
|
|||
function fillSourcesContent(len) { |
|||
const sourcesContent = []; |
|||
for (let i = 0; i < len; i++) |
|||
sourcesContent[i] = null; |
|||
return sourcesContent; |
|||
} |
|||
|
|||
const INVALID_ORIGINAL_MAPPING = Object.freeze({ |
|||
source: null, |
|||
line: null, |
|||
column: null, |
|||
name: null, |
|||
}); |
|||
Object.freeze({ |
|||
line: null, |
|||
column: null, |
|||
}); |
|||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)'; |
|||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)'; |
|||
const LEAST_UPPER_BOUND = -1; |
|||
const GREATEST_LOWER_BOUND = 1; |
|||
/** |
|||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field. |
|||
*/ |
|||
let decodedMappings; |
|||
/** |
|||
* A higher-level API to find the source/line/column associated with a generated line/column |
|||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in |
|||
* `source-map` library. |
|||
*/ |
|||
let originalPositionFor; |
|||
/** |
|||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger |
|||
* maps. |
|||
*/ |
|||
let presortedDecodedMap; |
|||
class TraceMap { |
|||
constructor(map, mapUrl) { |
|||
this._decodedMemo = memoizedState(); |
|||
this._bySources = undefined; |
|||
this._bySourceMemos = undefined; |
|||
const isString = typeof map === 'string'; |
|||
if (!isString && map.constructor === TraceMap) |
|||
return map; |
|||
const parsed = (isString ? JSON.parse(map) : map); |
|||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed; |
|||
this.version = version; |
|||
this.file = file; |
|||
this.names = names; |
|||
this.sourceRoot = sourceRoot; |
|||
this.sources = sources; |
|||
this.sourcesContent = sourcesContent; |
|||
if (sourceRoot || mapUrl) { |
|||
const from = resolve(sourceRoot || '', stripFilename(mapUrl)); |
|||
this.resolvedSources = sources.map((s) => resolve(s || '', from)); |
|||
} |
|||
else { |
|||
this.resolvedSources = sources.map((s) => s || ''); |
|||
} |
|||
const { mappings } = parsed; |
|||
if (typeof mappings === 'string') { |
|||
this._encoded = mappings; |
|||
this._decoded = undefined; |
|||
} |
|||
else { |
|||
this._encoded = undefined; |
|||
this._decoded = maybeSort(mappings, isString); |
|||
} |
|||
} |
|||
} |
|||
(() => { |
|||
decodedMappings = (map) => { |
|||
return (map._decoded || (map._decoded = decode(map._encoded))); |
|||
}; |
|||
originalPositionFor = (map, { line, column, bias }) => { |
|||
line--; |
|||
if (line < 0) |
|||
throw new Error(LINE_GTR_ZERO); |
|||
if (column < 0) |
|||
throw new Error(COL_GTR_EQ_ZERO); |
|||
const decoded = decodedMappings(map); |
|||
// It's common for parent source maps to have pointers to lines that have no
|
|||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
|||
if (line >= decoded.length) |
|||
return INVALID_ORIGINAL_MAPPING; |
|||
const segment = traceSegmentInternal(decoded[line], map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND); |
|||
if (segment == null) |
|||
return INVALID_ORIGINAL_MAPPING; |
|||
if (segment.length == 1) |
|||
return INVALID_ORIGINAL_MAPPING; |
|||
const { names, resolvedSources } = map; |
|||
return { |
|||
source: resolvedSources[segment[SOURCES_INDEX$1]], |
|||
line: segment[SOURCE_LINE$1] + 1, |
|||
column: segment[SOURCE_COLUMN$1], |
|||
name: segment.length === 5 ? names[segment[NAMES_INDEX$1]] : null, |
|||
}; |
|||
}; |
|||
presortedDecodedMap = (map, mapUrl) => { |
|||
const clone = Object.assign({}, map); |
|||
clone.mappings = []; |
|||
const tracer = new TraceMap(clone, mapUrl); |
|||
tracer._decoded = map.mappings; |
|||
return tracer; |
|||
}; |
|||
})(); |
|||
function traceSegmentInternal(segments, memo, line, column, bias) { |
|||
let index = memoizedBinarySearch(segments, column, memo, line); |
|||
if (found) { |
|||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index); |
|||
} |
|||
else if (bias === LEAST_UPPER_BOUND) |
|||
index++; |
|||
if (index === -1 || index === segments.length) |
|||
return null; |
|||
return segments[index]; |
|||
} |
|||
|
|||
/** |
|||
* Gets the index associated with `key` in the backing array, if it is already present. |
|||
*/ |
|||
let get; |
|||
/** |
|||
* Puts `key` into the backing array, if it is not already present. Returns |
|||
* the index of the `key` in the backing array. |
|||
*/ |
|||
let put; |
|||
/** |
|||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the |
|||
* index of the `key` in the backing array. |
|||
* |
|||
* This is designed to allow synchronizing a second array with the contents of the backing array, |
|||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, |
|||
* and there are never duplicates. |
|||
*/ |
|||
class SetArray { |
|||
constructor() { |
|||
this._indexes = { __proto__: null }; |
|||
this.array = []; |
|||
} |
|||
} |
|||
(() => { |
|||
get = (strarr, key) => strarr._indexes[key]; |
|||
put = (strarr, key) => { |
|||
// The key may or may not be present. If it is present, it's a number.
|
|||
const index = get(strarr, key); |
|||
if (index !== undefined) |
|||
return index; |
|||
const { array, _indexes: indexes } = strarr; |
|||
return (indexes[key] = array.push(key) - 1); |
|||
}; |
|||
})(); |
|||
|
|||
const COLUMN = 0; |
|||
const SOURCES_INDEX = 1; |
|||
const SOURCE_LINE = 2; |
|||
const SOURCE_COLUMN = 3; |
|||
const NAMES_INDEX = 4; |
|||
|
|||
const NO_NAME = -1; |
|||
/** |
|||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the |
|||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should |
|||
* not add a mapping with a lower generated line/column than one that came before. |
|||
*/ |
|||
let maybeAddMapping; |
|||
/** |
|||
* Adds/removes the content of the source file to the source map. |
|||
*/ |
|||
let setSourceContent; |
|||
/** |
|||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects |
|||
* a sourcemap, or to JSON.stringify. |
|||
*/ |
|||
let toDecodedMap; |
|||
/** |
|||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects |
|||
* a sourcemap, or to JSON.stringify. |
|||
*/ |
|||
let toEncodedMap; |
|||
// This split declaration is only so that terser can elminiate the static initialization block.
|
|||
let addSegmentInternal; |
|||
/** |
|||
* Provides the state to generate a sourcemap. |
|||
*/ |
|||
class GenMapping { |
|||
constructor({ file, sourceRoot } = {}) { |
|||
this._names = new SetArray(); |
|||
this._sources = new SetArray(); |
|||
this._sourcesContent = []; |
|||
this._mappings = []; |
|||
this.file = file; |
|||
this.sourceRoot = sourceRoot; |
|||
} |
|||
} |
|||
(() => { |
|||
maybeAddMapping = (map, mapping) => { |
|||
return addMappingInternal(true, map, mapping); |
|||
}; |
|||
setSourceContent = (map, source, content) => { |
|||
const { _sources: sources, _sourcesContent: sourcesContent } = map; |
|||
sourcesContent[put(sources, source)] = content; |
|||
}; |
|||
toDecodedMap = (map) => { |
|||
const { file, sourceRoot, _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map; |
|||
removeEmptyFinalLines(mappings); |
|||
return { |
|||
version: 3, |
|||
file: file || undefined, |
|||
names: names.array, |
|||
sourceRoot: sourceRoot || undefined, |
|||
sources: sources.array, |
|||
sourcesContent, |
|||
mappings, |
|||
}; |
|||
}; |
|||
toEncodedMap = (map) => { |
|||
const decoded = toDecodedMap(map); |
|||
return Object.assign(Object.assign({}, decoded), { mappings: encode(decoded.mappings) }); |
|||
}; |
|||
// Internal helpers
|
|||
addSegmentInternal = (skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name) => { |
|||
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = map; |
|||
const line = getLine(mappings, genLine); |
|||
const index = getColumnIndex(line, genColumn); |
|||
if (!source) { |
|||
if (skipable && skipSourceless(line, index)) |
|||
return; |
|||
return insert(line, index, [genColumn]); |
|||
} |
|||
const sourcesIndex = put(sources, source); |
|||
const namesIndex = name ? put(names, name) : NO_NAME; |
|||
if (sourcesIndex === sourcesContent.length) |
|||
sourcesContent[sourcesIndex] = null; |
|||
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) { |
|||
return; |
|||
} |
|||
return insert(line, index, name |
|||
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] |
|||
: [genColumn, sourcesIndex, sourceLine, sourceColumn]); |
|||
}; |
|||
})(); |
|||
function getLine(mappings, index) { |
|||
for (let i = mappings.length; i <= index; i++) { |
|||
mappings[i] = []; |
|||
} |
|||
return mappings[index]; |
|||
} |
|||
function getColumnIndex(line, genColumn) { |
|||
let index = line.length; |
|||
for (let i = index - 1; i >= 0; index = i--) { |
|||
const current = line[i]; |
|||
if (genColumn >= current[COLUMN]) |
|||
break; |
|||
} |
|||
return index; |
|||
} |
|||
function insert(array, index, value) { |
|||
for (let i = array.length; i > index; i--) { |
|||
array[i] = array[i - 1]; |
|||
} |
|||
array[index] = value; |
|||
} |
|||
function removeEmptyFinalLines(mappings) { |
|||
const { length } = mappings; |
|||
let len = length; |
|||
for (let i = len - 1; i >= 0; len = i, i--) { |
|||
if (mappings[i].length > 0) |
|||
break; |
|||
} |
|||
if (len < length) |
|||
mappings.length = len; |
|||
} |
|||
function skipSourceless(line, index) { |
|||
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
|||
// doesn't generate any useful information.
|
|||
if (index === 0) |
|||
return true; |
|||
const prev = line[index - 1]; |
|||
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
|||
// genrate any new information. Else, this segment will end the source/named segment and point to
|
|||
// a sourceless position, which is useful.
|
|||
return prev.length === 1; |
|||
} |
|||
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) { |
|||
// A source/named segment at the start of a line gives position at that genColumn
|
|||
if (index === 0) |
|||
return false; |
|||
const prev = line[index - 1]; |
|||
// If the previous segment is sourceless, then we're transitioning to a source.
|
|||
if (prev.length === 1) |
|||
return false; |
|||
// If the previous segment maps to the exact same source position, then this segment doesn't
|
|||
// provide any new position information.
|
|||
return (sourcesIndex === prev[SOURCES_INDEX] && |
|||
sourceLine === prev[SOURCE_LINE] && |
|||
sourceColumn === prev[SOURCE_COLUMN] && |
|||
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME)); |
|||
} |
|||
function addMappingInternal(skipable, map, mapping) { |
|||
const { generated, source, original, name } = mapping; |
|||
if (!source) { |
|||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null); |
|||
} |
|||
const s = source; |
|||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, s, original.line - 1, original.column, name); |
|||
} |
|||
|
|||
class SourceMapConsumer { |
|||
constructor(map, mapUrl) { |
|||
const trace = (this._map = new AnyMap(map, mapUrl)); |
|||
this.file = trace.file; |
|||
this.names = trace.names; |
|||
this.sourceRoot = trace.sourceRoot; |
|||
this.sources = trace.resolvedSources; |
|||
this.sourcesContent = trace.sourcesContent; |
|||
} |
|||
originalPositionFor(needle) { |
|||
return originalPositionFor(this._map, needle); |
|||
} |
|||
destroy() { |
|||
// noop.
|
|||
} |
|||
} |
|||
class SourceMapGenerator { |
|||
constructor(opts) { |
|||
this._map = new GenMapping(opts); |
|||
} |
|||
addMapping(mapping) { |
|||
maybeAddMapping(this._map, mapping); |
|||
} |
|||
setSourceContent(source, content) { |
|||
setSourceContent(this._map, source, content); |
|||
} |
|||
toJSON() { |
|||
return toEncodedMap(this._map); |
|||
} |
|||
toDecodedMap() { |
|||
return toDecodedMap(this._map); |
|||
} |
|||
} |
|||
|
|||
exports.SourceMapConsumer = SourceMapConsumer; |
|||
exports.SourceMapGenerator = SourceMapGenerator; |
|||
|
|||
Object.defineProperty(exports, '__esModule', { value: true }); |
|||
|
|||
})); |
|||
//# sourceMappingURL=source-map.umd.js.map
|
|||
File diff suppressed because one or more lines are too long
@ -0,0 +1,25 @@ |
|||
import { AnyMap, originalPositionFor } from '@jridgewell/trace-mapping'; |
|||
import { GenMapping, maybeAddMapping, toDecodedMap, toEncodedMap, setSourceContent } from '@jridgewell/gen-mapping'; |
|||
import type { TraceMap, SectionedSourceMapInput } from '@jridgewell/trace-mapping'; |
|||
export type { TraceMap, SectionedSourceMapInput }; |
|||
import type { Mapping, EncodedSourceMap, DecodedSourceMap } from '@jridgewell/gen-mapping'; |
|||
export type { Mapping, EncodedSourceMap, DecodedSourceMap }; |
|||
export declare class SourceMapConsumer { |
|||
private _map; |
|||
file: TraceMap['file']; |
|||
names: TraceMap['names']; |
|||
sourceRoot: TraceMap['sourceRoot']; |
|||
sources: TraceMap['sources']; |
|||
sourcesContent: TraceMap['sourcesContent']; |
|||
constructor(map: ConstructorParameters<typeof AnyMap>[0], mapUrl: Parameters<typeof AnyMap>[1]); |
|||
originalPositionFor(needle: Parameters<typeof originalPositionFor>[1]): ReturnType<typeof originalPositionFor>; |
|||
destroy(): void; |
|||
} |
|||
export declare class SourceMapGenerator { |
|||
private _map; |
|||
constructor(opts: ConstructorParameters<typeof GenMapping>[0]); |
|||
addMapping(mapping: Parameters<typeof maybeAddMapping>[1]): ReturnType<typeof maybeAddMapping>; |
|||
setSourceContent(source: Parameters<typeof setSourceContent>[1], content: Parameters<typeof setSourceContent>[2]): ReturnType<typeof setSourceContent>; |
|||
toJSON(): ReturnType<typeof toEncodedMap>; |
|||
toDecodedMap(): ReturnType<typeof toDecodedMap>; |
|||
} |
|||
@ -0,0 +1,67 @@ |
|||
{ |
|||
"name": "@jridgewell/source-map", |
|||
"version": "0.3.2", |
|||
"description": "Packages @jridgewell/trace-mapping and @jridgewell/gen-mapping into the familiar source-map API", |
|||
"keywords": [ |
|||
"sourcemap", |
|||
"source", |
|||
"map" |
|||
], |
|||
"author": "Justin Ridgewell <justin@ridgewell.name>", |
|||
"license": "MIT", |
|||
"repository": "https://github.com/jridgewell/source-map", |
|||
"main": "dist/source-map.umd.js", |
|||
"module": "dist/source-map.mjs", |
|||
"typings": "dist/types/source-map.d.ts", |
|||
"exports": { |
|||
".": { |
|||
"browser": "./dist/source-map.umd.js", |
|||
"require": "./dist/source-map.umd.js", |
|||
"import": "./dist/source-map.mjs" |
|||
}, |
|||
"./package.json": "./package.json" |
|||
}, |
|||
"files": [ |
|||
"dist" |
|||
], |
|||
"scripts": { |
|||
"prebuild": "rm -rf dist", |
|||
"build": "run-s -n build:*", |
|||
"build:rollup": "rollup -c rollup.config.js", |
|||
"build:ts": "tsc --project tsconfig.build.json", |
|||
"lint": "run-s -n lint:*", |
|||
"lint:prettier": "npm run test:lint:prettier -- --write", |
|||
"lint:ts": "npm run test:lint:ts -- --fix", |
|||
"pretest": "run-s build:rollup", |
|||
"test": "run-s -n test:lint test:only", |
|||
"test:debug": "mocha --inspect-brk", |
|||
"test:lint": "run-s -n test:lint:*", |
|||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'", |
|||
"test:lint:ts": "eslint '{src,test}/**/*.ts'", |
|||
"test:only": "mocha", |
|||
"test:coverage": "c8 mocha", |
|||
"test:watch": "mocha --watch", |
|||
"prepublishOnly": "npm run preversion", |
|||
"preversion": "run-s test build" |
|||
}, |
|||
"devDependencies": { |
|||
"@rollup/plugin-node-resolve": "13.2.1", |
|||
"@rollup/plugin-typescript": "8.3.0", |
|||
"@types/mocha": "9.1.1", |
|||
"@types/node": "17.0.30", |
|||
"@typescript-eslint/eslint-plugin": "5.10.0", |
|||
"@typescript-eslint/parser": "5.10.0", |
|||
"c8": "7.11.0", |
|||
"eslint": "8.7.0", |
|||
"eslint-config-prettier": "8.3.0", |
|||
"mocha": "9.2.0", |
|||
"npm-run-all": "4.1.5", |
|||
"prettier": "2.5.1", |
|||
"rollup": "2.66.0", |
|||
"typescript": "4.5.5" |
|||
}, |
|||
"dependencies": { |
|||
"@jridgewell/gen-mapping": "^0.3.0", |
|||
"@jridgewell/trace-mapping": "^0.3.9" |
|||
} |
|||
} |
|||
@ -0,0 +1,21 @@ |
|||
The MIT License |
|||
|
|||
Copyright (c) 2015 Rich Harris |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
of this software and associated documentation files (the "Software"), to deal |
|||
in the Software without restriction, including without limitation the rights |
|||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|||
copies of the Software, and to permit persons to whom the Software is |
|||
furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in |
|||
all copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN |
|||
THE SOFTWARE. |
|||
@ -0,0 +1,200 @@ |
|||
# sourcemap-codec |
|||
|
|||
Encode/decode the `mappings` property of a [sourcemap](https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit). |
|||
|
|||
|
|||
## Why? |
|||
|
|||
Sourcemaps are difficult to generate and manipulate, because the `mappings` property – the part that actually links the generated code back to the original source – is encoded using an obscure method called [Variable-length quantity](https://en.wikipedia.org/wiki/Variable-length_quantity). On top of that, each segment in the mapping contains offsets rather than absolute indices, which means that you can't look at a segment in isolation – you have to understand the whole sourcemap. |
|||
|
|||
This package makes the process slightly easier. |
|||
|
|||
|
|||
## Installation |
|||
|
|||
```bash |
|||
npm install sourcemap-codec |
|||
``` |
|||
|
|||
|
|||
## Usage |
|||
|
|||
```js |
|||
import { encode, decode } from 'sourcemap-codec'; |
|||
|
|||
var decoded = decode( ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' ); |
|||
|
|||
assert.deepEqual( decoded, [ |
|||
// the first line (of the generated code) has no mappings, |
|||
// as shown by the starting semi-colon (which separates lines) |
|||
[], |
|||
|
|||
// the second line contains four (comma-separated) segments |
|||
[ |
|||
// segments are encoded as you'd expect: |
|||
// [ generatedCodeColumn, sourceIndex, sourceCodeLine, sourceCodeColumn, nameIndex ] |
|||
|
|||
// i.e. the first segment begins at column 2, and maps back to the second column |
|||
// of the second line (both zero-based) of the 0th source, and uses the 0th |
|||
// name in the `map.names` array |
|||
[ 2, 0, 2, 2, 0 ], |
|||
|
|||
// the remaining segments are 4-length rather than 5-length, |
|||
// because they don't map a name |
|||
[ 4, 0, 2, 4 ], |
|||
[ 6, 0, 2, 5 ], |
|||
[ 7, 0, 2, 7 ] |
|||
], |
|||
|
|||
// the final line contains two segments |
|||
[ |
|||
[ 2, 1, 10, 19 ], |
|||
[ 12, 1, 11, 20 ] |
|||
] |
|||
]); |
|||
|
|||
var encoded = encode( decoded ); |
|||
assert.equal( encoded, ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' ); |
|||
``` |
|||
|
|||
## Benchmarks |
|||
|
|||
``` |
|||
node v18.0.0 |
|||
|
|||
amp.js.map - 45120 segments |
|||
|
|||
Decode Memory Usage: |
|||
@jridgewell/sourcemap-codec 5479160 bytes |
|||
sourcemap-codec 5659336 bytes |
|||
source-map-0.6.1 17144440 bytes |
|||
source-map-0.8.0 6867424 bytes |
|||
Smallest memory usage is @jridgewell/sourcemap-codec |
|||
|
|||
Decode speed: |
|||
decode: @jridgewell/sourcemap-codec x 502 ops/sec ±1.03% (90 runs sampled) |
|||
decode: sourcemap-codec x 445 ops/sec ±0.97% (92 runs sampled) |
|||
decode: source-map-0.6.1 x 36.01 ops/sec ±1.64% (49 runs sampled) |
|||
decode: source-map-0.8.0 x 367 ops/sec ±0.04% (95 runs sampled) |
|||
Fastest is decode: @jridgewell/sourcemap-codec |
|||
|
|||
Encode Memory Usage: |
|||
@jridgewell/sourcemap-codec 1261620 bytes |
|||
sourcemap-codec 9119248 bytes |
|||
source-map-0.6.1 8968560 bytes |
|||
source-map-0.8.0 8952952 bytes |
|||
Smallest memory usage is @jridgewell/sourcemap-codec |
|||
|
|||
Encode speed: |
|||
encode: @jridgewell/sourcemap-codec x 738 ops/sec ±0.42% (98 runs sampled) |
|||
encode: sourcemap-codec x 238 ops/sec ±0.73% (88 runs sampled) |
|||
encode: source-map-0.6.1 x 162 ops/sec ±0.43% (84 runs sampled) |
|||
encode: source-map-0.8.0 x 191 ops/sec ±0.34% (90 runs sampled) |
|||
Fastest is encode: @jridgewell/sourcemap-codec |
|||
|
|||
|
|||
*** |
|||
|
|||
|
|||
babel.min.js.map - 347793 segments |
|||
|
|||
Decode Memory Usage: |
|||
@jridgewell/sourcemap-codec 35338184 bytes |
|||
sourcemap-codec 35922736 bytes |
|||
source-map-0.6.1 62366360 bytes |
|||
source-map-0.8.0 44337416 bytes |
|||
Smallest memory usage is @jridgewell/sourcemap-codec |
|||
|
|||
Decode speed: |
|||
decode: @jridgewell/sourcemap-codec x 40.35 ops/sec ±4.47% (54 runs sampled) |
|||
decode: sourcemap-codec x 36.76 ops/sec ±3.67% (51 runs sampled) |
|||
decode: source-map-0.6.1 x 4.44 ops/sec ±2.15% (16 runs sampled) |
|||
decode: source-map-0.8.0 x 59.35 ops/sec ±0.05% (78 runs sampled) |
|||
Fastest is decode: source-map-0.8.0 |
|||
|
|||
Encode Memory Usage: |
|||
@jridgewell/sourcemap-codec 7212604 bytes |
|||
sourcemap-codec 21421456 bytes |
|||
source-map-0.6.1 25286888 bytes |
|||
source-map-0.8.0 25498744 bytes |
|||
Smallest memory usage is @jridgewell/sourcemap-codec |
|||
|
|||
Encode speed: |
|||
encode: @jridgewell/sourcemap-codec x 112 ops/sec ±0.13% (84 runs sampled) |
|||
encode: sourcemap-codec x 30.23 ops/sec ±2.76% (53 runs sampled) |
|||
encode: source-map-0.6.1 x 19.43 ops/sec ±3.70% (37 runs sampled) |
|||
encode: source-map-0.8.0 x 19.40 ops/sec ±3.26% (37 runs sampled) |
|||
Fastest is encode: @jridgewell/sourcemap-codec |
|||
|
|||
|
|||
*** |
|||
|
|||
|
|||
preact.js.map - 1992 segments |
|||
|
|||
Decode Memory Usage: |
|||
@jridgewell/sourcemap-codec 500272 bytes |
|||
sourcemap-codec 516864 bytes |
|||
source-map-0.6.1 1596672 bytes |
|||
source-map-0.8.0 517272 bytes |
|||
Smallest memory usage is @jridgewell/sourcemap-codec |
|||
|
|||
Decode speed: |
|||
decode: @jridgewell/sourcemap-codec x 16,137 ops/sec ±0.17% (99 runs sampled) |
|||
decode: sourcemap-codec x 12,139 ops/sec ±0.13% (99 runs sampled) |
|||
decode: source-map-0.6.1 x 1,264 ops/sec ±0.12% (100 runs sampled) |
|||
decode: source-map-0.8.0 x 9,894 ops/sec ±0.08% (101 runs sampled) |
|||
Fastest is decode: @jridgewell/sourcemap-codec |
|||
|
|||
Encode Memory Usage: |
|||
@jridgewell/sourcemap-codec 321026 bytes |
|||
sourcemap-codec 830832 bytes |
|||
source-map-0.6.1 586608 bytes |
|||
source-map-0.8.0 586680 bytes |
|||
Smallest memory usage is @jridgewell/sourcemap-codec |
|||
|
|||
Encode speed: |
|||
encode: @jridgewell/sourcemap-codec x 19,876 ops/sec ±0.78% (95 runs sampled) |
|||
encode: sourcemap-codec x 6,983 ops/sec ±0.15% (100 runs sampled) |
|||
encode: source-map-0.6.1 x 5,070 ops/sec ±0.12% (102 runs sampled) |
|||
encode: source-map-0.8.0 x 5,641 ops/sec ±0.17% (100 runs sampled) |
|||
Fastest is encode: @jridgewell/sourcemap-codec |
|||
|
|||
|
|||
*** |
|||
|
|||
|
|||
react.js.map - 5726 segments |
|||
|
|||
Decode Memory Usage: |
|||
@jridgewell/sourcemap-codec 734848 bytes |
|||
sourcemap-codec 954200 bytes |
|||
source-map-0.6.1 2276432 bytes |
|||
source-map-0.8.0 955488 bytes |
|||
Smallest memory usage is @jridgewell/sourcemap-codec |
|||
|
|||
Decode speed: |
|||
decode: @jridgewell/sourcemap-codec x 5,723 ops/sec ±0.12% (98 runs sampled) |
|||
decode: sourcemap-codec x 4,555 ops/sec ±0.09% (101 runs sampled) |
|||
decode: source-map-0.6.1 x 437 ops/sec ±0.11% (93 runs sampled) |
|||
decode: source-map-0.8.0 x 3,441 ops/sec ±0.15% (100 runs sampled) |
|||
Fastest is decode: @jridgewell/sourcemap-codec |
|||
|
|||
Encode Memory Usage: |
|||
@jridgewell/sourcemap-codec 638672 bytes |
|||
sourcemap-codec 1109840 bytes |
|||
source-map-0.6.1 1321224 bytes |
|||
source-map-0.8.0 1324448 bytes |
|||
Smallest memory usage is @jridgewell/sourcemap-codec |
|||
|
|||
Encode speed: |
|||
encode: @jridgewell/sourcemap-codec x 6,801 ops/sec ±0.48% (98 runs sampled) |
|||
encode: sourcemap-codec x 2,533 ops/sec ±0.13% (101 runs sampled) |
|||
encode: source-map-0.6.1 x 2,248 ops/sec ±0.08% (100 runs sampled) |
|||
encode: source-map-0.8.0 x 2,303 ops/sec ±0.15% (100 runs sampled) |
|||
Fastest is encode: @jridgewell/sourcemap-codec |
|||
``` |
|||
|
|||
# License |
|||
|
|||
MIT |
|||
@ -0,0 +1,164 @@ |
|||
const comma = ','.charCodeAt(0); |
|||
const semicolon = ';'.charCodeAt(0); |
|||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; |
|||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
|||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
|||
for (let i = 0; i < chars.length; i++) { |
|||
const c = chars.charCodeAt(i); |
|||
intToChar[i] = c; |
|||
charToInt[c] = i; |
|||
} |
|||
// Provide a fallback for older environments.
|
|||
const td = typeof TextDecoder !== 'undefined' |
|||
? /* #__PURE__ */ new TextDecoder() |
|||
: typeof Buffer !== 'undefined' |
|||
? { |
|||
decode(buf) { |
|||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength); |
|||
return out.toString(); |
|||
}, |
|||
} |
|||
: { |
|||
decode(buf) { |
|||
let out = ''; |
|||
for (let i = 0; i < buf.length; i++) { |
|||
out += String.fromCharCode(buf[i]); |
|||
} |
|||
return out; |
|||
}, |
|||
}; |
|||
function decode(mappings) { |
|||
const state = new Int32Array(5); |
|||
const decoded = []; |
|||
let index = 0; |
|||
do { |
|||
const semi = indexOf(mappings, index); |
|||
const line = []; |
|||
let sorted = true; |
|||
let lastCol = 0; |
|||
state[0] = 0; |
|||
for (let i = index; i < semi; i++) { |
|||
let seg; |
|||
i = decodeInteger(mappings, i, state, 0); // genColumn
|
|||
const col = state[0]; |
|||
if (col < lastCol) |
|||
sorted = false; |
|||
lastCol = col; |
|||
if (hasMoreVlq(mappings, i, semi)) { |
|||
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
|
|||
i = decodeInteger(mappings, i, state, 2); // sourceLine
|
|||
i = decodeInteger(mappings, i, state, 3); // sourceColumn
|
|||
if (hasMoreVlq(mappings, i, semi)) { |
|||
i = decodeInteger(mappings, i, state, 4); // namesIndex
|
|||
seg = [col, state[1], state[2], state[3], state[4]]; |
|||
} |
|||
else { |
|||
seg = [col, state[1], state[2], state[3]]; |
|||
} |
|||
} |
|||
else { |
|||
seg = [col]; |
|||
} |
|||
line.push(seg); |
|||
} |
|||
if (!sorted) |
|||
sort(line); |
|||
decoded.push(line); |
|||
index = semi + 1; |
|||
} while (index <= mappings.length); |
|||
return decoded; |
|||
} |
|||
function indexOf(mappings, index) { |
|||
const idx = mappings.indexOf(';', index); |
|||
return idx === -1 ? mappings.length : idx; |
|||
} |
|||
function decodeInteger(mappings, pos, state, j) { |
|||
let value = 0; |
|||
let shift = 0; |
|||
let integer = 0; |
|||
do { |
|||
const c = mappings.charCodeAt(pos++); |
|||
integer = charToInt[c]; |
|||
value |= (integer & 31) << shift; |
|||
shift += 5; |
|||
} while (integer & 32); |
|||
const shouldNegate = value & 1; |
|||
value >>>= 1; |
|||
if (shouldNegate) { |
|||
value = -0x80000000 | -value; |
|||
} |
|||
state[j] += value; |
|||
return pos; |
|||
} |
|||
function hasMoreVlq(mappings, i, length) { |
|||
if (i >= length) |
|||
return false; |
|||
return mappings.charCodeAt(i) !== comma; |
|||
} |
|||
function sort(line) { |
|||
line.sort(sortComparator); |
|||
} |
|||
function sortComparator(a, b) { |
|||
return a[0] - b[0]; |
|||
} |
|||
function encode(decoded) { |
|||
const state = new Int32Array(5); |
|||
const bufLength = 1024 * 16; |
|||
const subLength = bufLength - 36; |
|||
const buf = new Uint8Array(bufLength); |
|||
const sub = buf.subarray(0, subLength); |
|||
let pos = 0; |
|||
let out = ''; |
|||
for (let i = 0; i < decoded.length; i++) { |
|||
const line = decoded[i]; |
|||
if (i > 0) { |
|||
if (pos === bufLength) { |
|||
out += td.decode(buf); |
|||
pos = 0; |
|||
} |
|||
buf[pos++] = semicolon; |
|||
} |
|||
if (line.length === 0) |
|||
continue; |
|||
state[0] = 0; |
|||
for (let j = 0; j < line.length; j++) { |
|||
const segment = line[j]; |
|||
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
|||
// may push a comma.
|
|||
if (pos > subLength) { |
|||
out += td.decode(sub); |
|||
buf.copyWithin(0, subLength, pos); |
|||
pos -= subLength; |
|||
} |
|||
if (j > 0) |
|||
buf[pos++] = comma; |
|||
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
|
|||
if (segment.length === 1) |
|||
continue; |
|||
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
|
|||
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
|
|||
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
|
|||
if (segment.length === 4) |
|||
continue; |
|||
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
|
|||
} |
|||
} |
|||
return out + td.decode(buf.subarray(0, pos)); |
|||
} |
|||
function encodeInteger(buf, pos, state, segment, j) { |
|||
const next = segment[j]; |
|||
let num = next - state[j]; |
|||
state[j] = next; |
|||
num = num < 0 ? (-num << 1) | 1 : num << 1; |
|||
do { |
|||
let clamped = num & 0b011111; |
|||
num >>>= 5; |
|||
if (num > 0) |
|||
clamped |= 0b100000; |
|||
buf[pos++] = intToChar[clamped]; |
|||
} while (num > 0); |
|||
return pos; |
|||
} |
|||
|
|||
export { decode, encode }; |
|||
//# sourceMappingURL=sourcemap-codec.mjs.map
|
|||
File diff suppressed because one or more lines are too long
@ -0,0 +1,175 @@ |
|||
(function (global, factory) { |
|||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : |
|||
typeof define === 'function' && define.amd ? define(['exports'], factory) : |
|||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourcemapCodec = {})); |
|||
})(this, (function (exports) { 'use strict'; |
|||
|
|||
const comma = ','.charCodeAt(0); |
|||
const semicolon = ';'.charCodeAt(0); |
|||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; |
|||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
|||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
|||
for (let i = 0; i < chars.length; i++) { |
|||
const c = chars.charCodeAt(i); |
|||
intToChar[i] = c; |
|||
charToInt[c] = i; |
|||
} |
|||
// Provide a fallback for older environments.
|
|||
const td = typeof TextDecoder !== 'undefined' |
|||
? /* #__PURE__ */ new TextDecoder() |
|||
: typeof Buffer !== 'undefined' |
|||
? { |
|||
decode(buf) { |
|||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength); |
|||
return out.toString(); |
|||
}, |
|||
} |
|||
: { |
|||
decode(buf) { |
|||
let out = ''; |
|||
for (let i = 0; i < buf.length; i++) { |
|||
out += String.fromCharCode(buf[i]); |
|||
} |
|||
return out; |
|||
}, |
|||
}; |
|||
function decode(mappings) { |
|||
const state = new Int32Array(5); |
|||
const decoded = []; |
|||
let index = 0; |
|||
do { |
|||
const semi = indexOf(mappings, index); |
|||
const line = []; |
|||
let sorted = true; |
|||
let lastCol = 0; |
|||
state[0] = 0; |
|||
for (let i = index; i < semi; i++) { |
|||
let seg; |
|||
i = decodeInteger(mappings, i, state, 0); // genColumn
|
|||
const col = state[0]; |
|||
if (col < lastCol) |
|||
sorted = false; |
|||
lastCol = col; |
|||
if (hasMoreVlq(mappings, i, semi)) { |
|||
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
|
|||
i = decodeInteger(mappings, i, state, 2); // sourceLine
|
|||
i = decodeInteger(mappings, i, state, 3); // sourceColumn
|
|||
if (hasMoreVlq(mappings, i, semi)) { |
|||
i = decodeInteger(mappings, i, state, 4); // namesIndex
|
|||
seg = [col, state[1], state[2], state[3], state[4]]; |
|||
} |
|||
else { |
|||
seg = [col, state[1], state[2], state[3]]; |
|||
} |
|||
} |
|||
else { |
|||
seg = [col]; |
|||
} |
|||
line.push(seg); |
|||
} |
|||
if (!sorted) |
|||
sort(line); |
|||
decoded.push(line); |
|||
index = semi + 1; |
|||
} while (index <= mappings.length); |
|||
return decoded; |
|||
} |
|||
function indexOf(mappings, index) { |
|||
const idx = mappings.indexOf(';', index); |
|||
return idx === -1 ? mappings.length : idx; |
|||
} |
|||
function decodeInteger(mappings, pos, state, j) { |
|||
let value = 0; |
|||
let shift = 0; |
|||
let integer = 0; |
|||
do { |
|||
const c = mappings.charCodeAt(pos++); |
|||
integer = charToInt[c]; |
|||
value |= (integer & 31) << shift; |
|||
shift += 5; |
|||
} while (integer & 32); |
|||
const shouldNegate = value & 1; |
|||
value >>>= 1; |
|||
if (shouldNegate) { |
|||
value = -0x80000000 | -value; |
|||
} |
|||
state[j] += value; |
|||
return pos; |
|||
} |
|||
function hasMoreVlq(mappings, i, length) { |
|||
if (i >= length) |
|||
return false; |
|||
return mappings.charCodeAt(i) !== comma; |
|||
} |
|||
function sort(line) { |
|||
line.sort(sortComparator); |
|||
} |
|||
function sortComparator(a, b) { |
|||
return a[0] - b[0]; |
|||
} |
|||
function encode(decoded) { |
|||
const state = new Int32Array(5); |
|||
const bufLength = 1024 * 16; |
|||
const subLength = bufLength - 36; |
|||
const buf = new Uint8Array(bufLength); |
|||
const sub = buf.subarray(0, subLength); |
|||
let pos = 0; |
|||
let out = ''; |
|||
for (let i = 0; i < decoded.length; i++) { |
|||
const line = decoded[i]; |
|||
if (i > 0) { |
|||
if (pos === bufLength) { |
|||
out += td.decode(buf); |
|||
pos = 0; |
|||
} |
|||
buf[pos++] = semicolon; |
|||
} |
|||
if (line.length === 0) |
|||
continue; |
|||
state[0] = 0; |
|||
for (let j = 0; j < line.length; j++) { |
|||
const segment = line[j]; |
|||
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
|||
// may push a comma.
|
|||
if (pos > subLength) { |
|||
out += td.decode(sub); |
|||
buf.copyWithin(0, subLength, pos); |
|||
pos -= subLength; |
|||
} |
|||
if (j > 0) |
|||
buf[pos++] = comma; |
|||
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
|
|||
if (segment.length === 1) |
|||
continue; |
|||
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
|
|||
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
|
|||
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
|
|||
if (segment.length === 4) |
|||
continue; |
|||
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
|
|||
} |
|||
} |
|||
return out + td.decode(buf.subarray(0, pos)); |
|||
} |
|||
function encodeInteger(buf, pos, state, segment, j) { |
|||
const next = segment[j]; |
|||
let num = next - state[j]; |
|||
state[j] = next; |
|||
num = num < 0 ? (-num << 1) | 1 : num << 1; |
|||
do { |
|||
let clamped = num & 0b011111; |
|||
num >>>= 5; |
|||
if (num > 0) |
|||
clamped |= 0b100000; |
|||
buf[pos++] = intToChar[clamped]; |
|||
} while (num > 0); |
|||
return pos; |
|||
} |
|||
|
|||
exports.decode = decode; |
|||
exports.encode = encode; |
|||
|
|||
Object.defineProperty(exports, '__esModule', { value: true }); |
|||
|
|||
})); |
|||
//# sourceMappingURL=sourcemap-codec.umd.js.map
|
|||
File diff suppressed because one or more lines are too long
@ -0,0 +1,6 @@ |
|||
export declare type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number]; |
|||
export declare type SourceMapLine = SourceMapSegment[]; |
|||
export declare type SourceMapMappings = SourceMapLine[]; |
|||
export declare function decode(mappings: string): SourceMapMappings; |
|||
export declare function encode(decoded: SourceMapMappings): string; |
|||
export declare function encode(decoded: Readonly<SourceMapMappings>): string; |
|||
@ -0,0 +1,75 @@ |
|||
{ |
|||
"name": "@jridgewell/sourcemap-codec", |
|||
"version": "1.4.14", |
|||
"description": "Encode/decode sourcemap mappings", |
|||
"keywords": [ |
|||
"sourcemap", |
|||
"vlq" |
|||
], |
|||
"main": "dist/sourcemap-codec.umd.js", |
|||
"module": "dist/sourcemap-codec.mjs", |
|||
"typings": "dist/types/sourcemap-codec.d.ts", |
|||
"files": [ |
|||
"dist", |
|||
"src" |
|||
], |
|||
"exports": { |
|||
".": [ |
|||
{ |
|||
"types": "./dist/types/sourcemap-codec.d.ts", |
|||
"browser": "./dist/sourcemap-codec.umd.js", |
|||
"import": "./dist/sourcemap-codec.mjs", |
|||
"require": "./dist/sourcemap-codec.umd.js" |
|||
}, |
|||
"./dist/sourcemap-codec.umd.js" |
|||
], |
|||
"./package.json": "./package.json" |
|||
}, |
|||
"scripts": { |
|||
"benchmark": "run-s build:rollup benchmark:*", |
|||
"benchmark:install": "cd benchmark && npm install", |
|||
"benchmark:only": "node --expose-gc benchmark/index.js", |
|||
"build": "run-s -n build:*", |
|||
"build:rollup": "rollup -c rollup.config.js", |
|||
"build:ts": "tsc --project tsconfig.build.json", |
|||
"lint": "run-s -n lint:*", |
|||
"lint:prettier": "npm run test:lint:prettier -- --write", |
|||
"lint:ts": "npm run test:lint:ts -- --fix", |
|||
"prebuild": "rm -rf dist", |
|||
"prepublishOnly": "npm run preversion", |
|||
"preversion": "run-s test build", |
|||
"pretest": "run-s build:rollup", |
|||
"test": "run-s -n test:lint test:only", |
|||
"test:debug": "mocha --inspect-brk", |
|||
"test:lint": "run-s -n test:lint:*", |
|||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'", |
|||
"test:lint:ts": "eslint '{src,test}/**/*.ts'", |
|||
"test:only": "mocha", |
|||
"test:coverage": "c8 mocha", |
|||
"test:watch": "mocha --watch" |
|||
}, |
|||
"repository": { |
|||
"type": "git", |
|||
"url": "git+https://github.com/jridgewell/sourcemap-codec.git" |
|||
}, |
|||
"author": "Rich Harris", |
|||
"license": "MIT", |
|||
"devDependencies": { |
|||
"@rollup/plugin-typescript": "8.3.0", |
|||
"@types/node": "17.0.15", |
|||
"@typescript-eslint/eslint-plugin": "5.10.0", |
|||
"@typescript-eslint/parser": "5.10.0", |
|||
"benchmark": "2.1.4", |
|||
"c8": "7.11.2", |
|||
"eslint": "8.7.0", |
|||
"eslint-config-prettier": "8.3.0", |
|||
"mocha": "9.2.0", |
|||
"npm-run-all": "4.1.5", |
|||
"prettier": "2.5.1", |
|||
"rollup": "2.64.0", |
|||
"source-map": "0.6.1", |
|||
"source-map-js": "1.0.2", |
|||
"sourcemap-codec": "1.4.8", |
|||
"typescript": "4.5.4" |
|||
} |
|||
} |
|||
@ -0,0 +1,198 @@ |
|||
export type SourceMapSegment = |
|||
| [number] |
|||
| [number, number, number, number] |
|||
| [number, number, number, number, number]; |
|||
export type SourceMapLine = SourceMapSegment[]; |
|||
export type SourceMapMappings = SourceMapLine[]; |
|||
|
|||
const comma = ','.charCodeAt(0); |
|||
const semicolon = ';'.charCodeAt(0); |
|||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; |
|||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
|||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
|||
|
|||
for (let i = 0; i < chars.length; i++) { |
|||
const c = chars.charCodeAt(i); |
|||
intToChar[i] = c; |
|||
charToInt[c] = i; |
|||
} |
|||
|
|||
// Provide a fallback for older environments.
|
|||
const td = |
|||
typeof TextDecoder !== 'undefined' |
|||
? /* #__PURE__ */ new TextDecoder() |
|||
: typeof Buffer !== 'undefined' |
|||
? { |
|||
decode(buf: Uint8Array) { |
|||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength); |
|||
return out.toString(); |
|||
}, |
|||
} |
|||
: { |
|||
decode(buf: Uint8Array) { |
|||
let out = ''; |
|||
for (let i = 0; i < buf.length; i++) { |
|||
out += String.fromCharCode(buf[i]); |
|||
} |
|||
return out; |
|||
}, |
|||
}; |
|||
|
|||
export function decode(mappings: string): SourceMapMappings { |
|||
const state: [number, number, number, number, number] = new Int32Array(5) as any; |
|||
const decoded: SourceMapMappings = []; |
|||
|
|||
let index = 0; |
|||
do { |
|||
const semi = indexOf(mappings, index); |
|||
const line: SourceMapLine = []; |
|||
let sorted = true; |
|||
let lastCol = 0; |
|||
state[0] = 0; |
|||
|
|||
for (let i = index; i < semi; i++) { |
|||
let seg: SourceMapSegment; |
|||
|
|||
i = decodeInteger(mappings, i, state, 0); // genColumn
|
|||
const col = state[0]; |
|||
if (col < lastCol) sorted = false; |
|||
lastCol = col; |
|||
|
|||
if (hasMoreVlq(mappings, i, semi)) { |
|||
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
|
|||
i = decodeInteger(mappings, i, state, 2); // sourceLine
|
|||
i = decodeInteger(mappings, i, state, 3); // sourceColumn
|
|||
|
|||
if (hasMoreVlq(mappings, i, semi)) { |
|||
i = decodeInteger(mappings, i, state, 4); // namesIndex
|
|||
seg = [col, state[1], state[2], state[3], state[4]]; |
|||
} else { |
|||
seg = [col, state[1], state[2], state[3]]; |
|||
} |
|||
} else { |
|||
seg = [col]; |
|||
} |
|||
|
|||
line.push(seg); |
|||
} |
|||
|
|||
if (!sorted) sort(line); |
|||
decoded.push(line); |
|||
index = semi + 1; |
|||
} while (index <= mappings.length); |
|||
|
|||
return decoded; |
|||
} |
|||
|
|||
function indexOf(mappings: string, index: number): number { |
|||
const idx = mappings.indexOf(';', index); |
|||
return idx === -1 ? mappings.length : idx; |
|||
} |
|||
|
|||
function decodeInteger(mappings: string, pos: number, state: SourceMapSegment, j: number): number { |
|||
let value = 0; |
|||
let shift = 0; |
|||
let integer = 0; |
|||
|
|||
do { |
|||
const c = mappings.charCodeAt(pos++); |
|||
integer = charToInt[c]; |
|||
value |= (integer & 31) << shift; |
|||
shift += 5; |
|||
} while (integer & 32); |
|||
|
|||
const shouldNegate = value & 1; |
|||
value >>>= 1; |
|||
|
|||
if (shouldNegate) { |
|||
value = -0x80000000 | -value; |
|||
} |
|||
|
|||
state[j] += value; |
|||
return pos; |
|||
} |
|||
|
|||
function hasMoreVlq(mappings: string, i: number, length: number): boolean { |
|||
if (i >= length) return false; |
|||
return mappings.charCodeAt(i) !== comma; |
|||
} |
|||
|
|||
function sort(line: SourceMapSegment[]) { |
|||
line.sort(sortComparator); |
|||
} |
|||
|
|||
function sortComparator(a: SourceMapSegment, b: SourceMapSegment): number { |
|||
return a[0] - b[0]; |
|||
} |
|||
|
|||
export function encode(decoded: SourceMapMappings): string; |
|||
export function encode(decoded: Readonly<SourceMapMappings>): string; |
|||
export function encode(decoded: Readonly<SourceMapMappings>): string { |
|||
const state: [number, number, number, number, number] = new Int32Array(5) as any; |
|||
const bufLength = 1024 * 16; |
|||
const subLength = bufLength - 36; |
|||
const buf = new Uint8Array(bufLength); |
|||
const sub = buf.subarray(0, subLength); |
|||
let pos = 0; |
|||
let out = ''; |
|||
|
|||
for (let i = 0; i < decoded.length; i++) { |
|||
const line = decoded[i]; |
|||
if (i > 0) { |
|||
if (pos === bufLength) { |
|||
out += td.decode(buf); |
|||
pos = 0; |
|||
} |
|||
buf[pos++] = semicolon; |
|||
} |
|||
if (line.length === 0) continue; |
|||
|
|||
state[0] = 0; |
|||
|
|||
for (let j = 0; j < line.length; j++) { |
|||
const segment = line[j]; |
|||
// We can push up to 5 ints, each int can take at most 7 chars, and we
|
|||
// may push a comma.
|
|||
if (pos > subLength) { |
|||
out += td.decode(sub); |
|||
buf.copyWithin(0, subLength, pos); |
|||
pos -= subLength; |
|||
} |
|||
if (j > 0) buf[pos++] = comma; |
|||
|
|||
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
|
|||
|
|||
if (segment.length === 1) continue; |
|||
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
|
|||
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
|
|||
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
|
|||
|
|||
if (segment.length === 4) continue; |
|||
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
|
|||
} |
|||
} |
|||
|
|||
return out + td.decode(buf.subarray(0, pos)); |
|||
} |
|||
|
|||
function encodeInteger( |
|||
buf: Uint8Array, |
|||
pos: number, |
|||
state: SourceMapSegment, |
|||
segment: SourceMapSegment, |
|||
j: number, |
|||
): number { |
|||
const next = segment[j]; |
|||
let num = next - state[j]; |
|||
state[j] = next; |
|||
|
|||
num = num < 0 ? (-num << 1) | 1 : num << 1; |
|||
do { |
|||
let clamped = num & 0b011111; |
|||
num >>>= 5; |
|||
if (num > 0) clamped |= 0b100000; |
|||
buf[pos++] = intToChar[clamped]; |
|||
} while (num > 0); |
|||
|
|||
return pos; |
|||
} |
|||
@ -0,0 +1,19 @@ |
|||
Copyright 2022 Justin Ridgewell <justin@ridgewell.name> |
|||
|
|||
Permission is hereby granted, free of charge, to any person obtaining a copy |
|||
of this software and associated documentation files (the "Software"), to deal |
|||
in the Software without restriction, including without limitation the rights |
|||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
|||
copies of the Software, and to permit persons to whom the Software is |
|||
furnished to do so, subject to the following conditions: |
|||
|
|||
The above copyright notice and this permission notice shall be included in |
|||
all copies or substantial portions of the Software. |
|||
|
|||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
|||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
|||
SOFTWARE. |
|||
@ -0,0 +1,252 @@ |
|||
# @jridgewell/trace-mapping |
|||
|
|||
> Trace the original position through a source map |
|||
|
|||
`trace-mapping` allows you to take the line and column of an output file and trace it to the |
|||
original location in the source file through a source map. |
|||
|
|||
You may already be familiar with the [`source-map`][source-map] package's `SourceMapConsumer`. This |
|||
provides the same `originalPositionFor` and `generatedPositionFor` API, without requiring WASM. |
|||
|
|||
## Installation |
|||
|
|||
```sh |
|||
npm install @jridgewell/trace-mapping |
|||
``` |
|||
|
|||
## Usage |
|||
|
|||
```typescript |
|||
import { |
|||
TraceMap, |
|||
originalPositionFor, |
|||
generatedPositionFor, |
|||
sourceContentFor, |
|||
} from '@jridgewell/trace-mapping'; |
|||
|
|||
const tracer = new TraceMap({ |
|||
version: 3, |
|||
sources: ['input.js'], |
|||
sourcesContent: ['content of input.js'], |
|||
names: ['foo'], |
|||
mappings: 'KAyCIA', |
|||
}); |
|||
|
|||
// Lines start at line 1, columns at column 0. |
|||
const traced = originalPositionFor(tracer, { line: 1, column: 5 }); |
|||
assert.deepEqual(traced, { |
|||
source: 'input.js', |
|||
line: 42, |
|||
column: 4, |
|||
name: 'foo', |
|||
}); |
|||
|
|||
const content = sourceContentFor(tracer, traced.source); |
|||
assert.strictEqual(content, 'content for input.js'); |
|||
|
|||
const generated = generatedPositionFor(tracer, { |
|||
source: 'input.js', |
|||
line: 42, |
|||
column: 4, |
|||
}); |
|||
assert.deepEqual(generated, { |
|||
line: 1, |
|||
column: 5, |
|||
}); |
|||
``` |
|||
|
|||
We also provide a lower level API to get the actual segment that matches our line and column. Unlike |
|||
`originalPositionFor`, `traceSegment` uses a 0-base for `line`: |
|||
|
|||
```typescript |
|||
import { traceSegment } from '@jridgewell/trace-mapping'; |
|||
|
|||
// line is 0-base. |
|||
const traced = traceSegment(tracer, /* line */ 0, /* column */ 5); |
|||
|
|||
// Segments are [outputColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex] |
|||
// Again, line is 0-base and so is sourceLine |
|||
assert.deepEqual(traced, [5, 0, 41, 4, 0]); |
|||
``` |
|||
|
|||
### SectionedSourceMaps |
|||
|
|||
The sourcemap spec defines a special `sections` field that's designed to handle concatenation of |
|||
output code with associated sourcemaps. This type of sourcemap is rarely used (no major build tool |
|||
produces it), but if you are hand coding a concatenation you may need it. We provide an `AnyMap` |
|||
helper that can receive either a regular sourcemap or a `SectionedSourceMap` and returns a |
|||
`TraceMap` instance: |
|||
|
|||
```typescript |
|||
import { AnyMap } from '@jridgewell/trace-mapping'; |
|||
const fooOutput = 'foo'; |
|||
const barOutput = 'bar'; |
|||
const output = [fooOutput, barOutput].join('\n'); |
|||
|
|||
const sectioned = new AnyMap({ |
|||
version: 3, |
|||
sections: [ |
|||
{ |
|||
// 0-base line and column |
|||
offset: { line: 0, column: 0 }, |
|||
// fooOutput's sourcemap |
|||
map: { |
|||
version: 3, |
|||
sources: ['foo.js'], |
|||
names: ['foo'], |
|||
mappings: 'AAAAA', |
|||
}, |
|||
}, |
|||
{ |
|||
// barOutput's sourcemap will not affect the first line, only the second |
|||
offset: { line: 1, column: 0 }, |
|||
map: { |
|||
version: 3, |
|||
sources: ['bar.js'], |
|||
names: ['bar'], |
|||
mappings: 'AAAAA', |
|||
}, |
|||
}, |
|||
], |
|||
}); |
|||
|
|||
const traced = originalPositionFor(sectioned, { |
|||
line: 2, |
|||
column: 0, |
|||
}); |
|||
|
|||
assert.deepEqual(traced, { |
|||
source: 'bar.js', |
|||
line: 1, |
|||
column: 0, |
|||
name: 'bar', |
|||
}); |
|||
``` |
|||
|
|||
## Benchmarks |
|||
|
|||
``` |
|||
node v18.0.0 |
|||
|
|||
amp.js.map - 45120 segments |
|||
|
|||
Memory Usage: |
|||
trace-mapping decoded 562400 bytes |
|||
trace-mapping encoded 5706544 bytes |
|||
source-map-js 10717664 bytes |
|||
source-map-0.6.1 17446384 bytes |
|||
source-map-0.8.0 9701757 bytes |
|||
Smallest memory usage is trace-mapping decoded |
|||
|
|||
Init speed: |
|||
trace-mapping: decoded JSON input x 180 ops/sec ±0.34% (85 runs sampled) |
|||
trace-mapping: encoded JSON input x 364 ops/sec ±1.77% (89 runs sampled) |
|||
trace-mapping: decoded Object input x 3,116 ops/sec ±0.50% (96 runs sampled) |
|||
trace-mapping: encoded Object input x 410 ops/sec ±2.62% (85 runs sampled) |
|||
source-map-js: encoded Object input x 84.23 ops/sec ±0.91% (73 runs sampled) |
|||
source-map-0.6.1: encoded Object input x 37.21 ops/sec ±2.08% (51 runs sampled) |
|||
Fastest is trace-mapping: decoded Object input |
|||
|
|||
Trace speed: |
|||
trace-mapping: decoded originalPositionFor x 3,952,212 ops/sec ±0.17% (98 runs sampled) |
|||
trace-mapping: encoded originalPositionFor x 3,487,468 ops/sec ±1.58% (90 runs sampled) |
|||
source-map-js: encoded originalPositionFor x 827,730 ops/sec ±0.78% (97 runs sampled) |
|||
source-map-0.6.1: encoded originalPositionFor x 748,991 ops/sec ±0.53% (94 runs sampled) |
|||
source-map-0.8.0: encoded originalPositionFor x 2,532,894 ops/sec ±0.57% (95 runs sampled) |
|||
Fastest is trace-mapping: decoded originalPositionFor |
|||
|
|||
|
|||
*** |
|||
|
|||
|
|||
babel.min.js.map - 347793 segments |
|||
|
|||
Memory Usage: |
|||
trace-mapping decoded 89832 bytes |
|||
trace-mapping encoded 35474640 bytes |
|||
source-map-js 51257176 bytes |
|||
source-map-0.6.1 63515664 bytes |
|||
source-map-0.8.0 42933752 bytes |
|||
Smallest memory usage is trace-mapping decoded |
|||
|
|||
Init speed: |
|||
trace-mapping: decoded JSON input x 15.41 ops/sec ±8.65% (34 runs sampled) |
|||
trace-mapping: encoded JSON input x 28.20 ops/sec ±12.87% (42 runs sampled) |
|||
trace-mapping: decoded Object input x 964 ops/sec ±0.36% (99 runs sampled) |
|||
trace-mapping: encoded Object input x 31.77 ops/sec ±13.79% (45 runs sampled) |
|||
source-map-js: encoded Object input x 6.45 ops/sec ±5.16% (21 runs sampled) |
|||
source-map-0.6.1: encoded Object input x 4.07 ops/sec ±5.24% (15 runs sampled) |
|||
Fastest is trace-mapping: decoded Object input |
|||
|
|||
Trace speed: |
|||
trace-mapping: decoded originalPositionFor x 7,183,038 ops/sec ±0.58% (95 runs sampled) |
|||
trace-mapping: encoded originalPositionFor x 5,192,185 ops/sec ±0.41% (100 runs sampled) |
|||
source-map-js: encoded originalPositionFor x 4,259,489 ops/sec ±0.79% (94 runs sampled) |
|||
source-map-0.6.1: encoded originalPositionFor x 3,742,629 ops/sec ±0.71% (95 runs sampled) |
|||
source-map-0.8.0: encoded originalPositionFor x 6,270,211 ops/sec ±0.64% (94 runs sampled) |
|||
Fastest is trace-mapping: decoded originalPositionFor |
|||
|
|||
|
|||
*** |
|||
|
|||
|
|||
preact.js.map - 1992 segments |
|||
|
|||
Memory Usage: |
|||
trace-mapping decoded 37128 bytes |
|||
trace-mapping encoded 247280 bytes |
|||
source-map-js 1143536 bytes |
|||
source-map-0.6.1 1290992 bytes |
|||
source-map-0.8.0 96544 bytes |
|||
Smallest memory usage is trace-mapping decoded |
|||
|
|||
Init speed: |
|||
trace-mapping: decoded JSON input x 3,483 ops/sec ±0.30% (98 runs sampled) |
|||
trace-mapping: encoded JSON input x 6,092 ops/sec ±0.18% (97 runs sampled) |
|||
trace-mapping: decoded Object input x 249,076 ops/sec ±0.24% (98 runs sampled) |
|||
trace-mapping: encoded Object input x 14,555 ops/sec ±0.48% (100 runs sampled) |
|||
source-map-js: encoded Object input x 2,447 ops/sec ±0.36% (99 runs sampled) |
|||
source-map-0.6.1: encoded Object input x 1,201 ops/sec ±0.57% (96 runs sampled) |
|||
Fastest is trace-mapping: decoded Object input |
|||
|
|||
Trace speed: |
|||
trace-mapping: decoded originalPositionFor x 7,620,192 ops/sec ±0.09% (99 runs sampled) |
|||
trace-mapping: encoded originalPositionFor x 6,872,554 ops/sec ±0.30% (97 runs sampled) |
|||
source-map-js: encoded originalPositionFor x 2,489,570 ops/sec ±0.35% (94 runs sampled) |
|||
source-map-0.6.1: encoded originalPositionFor x 1,698,633 ops/sec ±0.28% (98 runs sampled) |
|||
source-map-0.8.0: encoded originalPositionFor x 4,015,644 ops/sec ±0.22% (98 runs sampled) |
|||
Fastest is trace-mapping: decoded originalPositionFor |
|||
|
|||
|
|||
*** |
|||
|
|||
|
|||
react.js.map - 5726 segments |
|||
|
|||
Memory Usage: |
|||
trace-mapping decoded 16176 bytes |
|||
trace-mapping encoded 681552 bytes |
|||
source-map-js 2418352 bytes |
|||
source-map-0.6.1 2443672 bytes |
|||
source-map-0.8.0 111768 bytes |
|||
Smallest memory usage is trace-mapping decoded |
|||
|
|||
Init speed: |
|||
trace-mapping: decoded JSON input x 1,720 ops/sec ±0.34% (98 runs sampled) |
|||
trace-mapping: encoded JSON input x 4,406 ops/sec ±0.35% (100 runs sampled) |
|||
trace-mapping: decoded Object input x 92,122 ops/sec ±0.10% (99 runs sampled) |
|||
trace-mapping: encoded Object input x 5,385 ops/sec ±0.37% (99 runs sampled) |
|||
source-map-js: encoded Object input x 794 ops/sec ±0.40% (98 runs sampled) |
|||
source-map-0.6.1: encoded Object input x 416 ops/sec ±0.54% (91 runs sampled) |
|||
Fastest is trace-mapping: decoded Object input |
|||
|
|||
Trace speed: |
|||
trace-mapping: decoded originalPositionFor x 32,759,519 ops/sec ±0.33% (100 runs sampled) |
|||
trace-mapping: encoded originalPositionFor x 31,116,306 ops/sec ±0.33% (97 runs sampled) |
|||
source-map-js: encoded originalPositionFor x 17,458,435 ops/sec ±0.44% (97 runs sampled) |
|||
source-map-0.6.1: encoded originalPositionFor x 12,687,097 ops/sec ±0.43% (95 runs sampled) |
|||
source-map-0.8.0: encoded originalPositionFor x 23,538,275 ops/sec ±0.38% (95 runs sampled) |
|||
Fastest is trace-mapping: decoded originalPositionFor |
|||
``` |
|||
|
|||
[source-map]: https://www.npmjs.com/package/source-map |
|||
@ -0,0 +1,544 @@ |
|||
import { encode, decode } from '@jridgewell/sourcemap-codec'; |
|||
import resolveUri from '@jridgewell/resolve-uri'; |
|||
|
|||
function resolve(input, base) { |
|||
// The base is always treated as a directory, if it's not empty.
|
|||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
|||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
|||
if (base && !base.endsWith('/')) |
|||
base += '/'; |
|||
return resolveUri(input, base); |
|||
} |
|||
|
|||
/** |
|||
* Removes everything after the last "/", but leaves the slash. |
|||
*/ |
|||
function stripFilename(path) { |
|||
if (!path) |
|||
return ''; |
|||
const index = path.lastIndexOf('/'); |
|||
return path.slice(0, index + 1); |
|||
} |
|||
|
|||
const COLUMN = 0; |
|||
const SOURCES_INDEX = 1; |
|||
const SOURCE_LINE = 2; |
|||
const SOURCE_COLUMN = 3; |
|||
const NAMES_INDEX = 4; |
|||
const REV_GENERATED_LINE = 1; |
|||
const REV_GENERATED_COLUMN = 2; |
|||
|
|||
function maybeSort(mappings, owned) { |
|||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0); |
|||
if (unsortedIndex === mappings.length) |
|||
return mappings; |
|||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
|||
// not, we do not want to modify the consumer's input array.
|
|||
if (!owned) |
|||
mappings = mappings.slice(); |
|||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) { |
|||
mappings[i] = sortSegments(mappings[i], owned); |
|||
} |
|||
return mappings; |
|||
} |
|||
function nextUnsortedSegmentLine(mappings, start) { |
|||
for (let i = start; i < mappings.length; i++) { |
|||
if (!isSorted(mappings[i])) |
|||
return i; |
|||
} |
|||
return mappings.length; |
|||
} |
|||
function isSorted(line) { |
|||
for (let j = 1; j < line.length; j++) { |
|||
if (line[j][COLUMN] < line[j - 1][COLUMN]) { |
|||
return false; |
|||
} |
|||
} |
|||
return true; |
|||
} |
|||
function sortSegments(line, owned) { |
|||
if (!owned) |
|||
line = line.slice(); |
|||
return line.sort(sortComparator); |
|||
} |
|||
function sortComparator(a, b) { |
|||
return a[COLUMN] - b[COLUMN]; |
|||
} |
|||
|
|||
let found = false; |
|||
/** |
|||
* A binary search implementation that returns the index if a match is found. |
|||
* If no match is found, then the left-index (the index associated with the item that comes just |
|||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at |
|||
* the next index: |
|||
* |
|||
* ```js
|
|||
* const array = [1, 3]; |
|||
* const needle = 2; |
|||
* const index = binarySearch(array, needle, (item, needle) => item - needle); |
|||
* |
|||
* assert.equal(index, 0); |
|||
* array.splice(index + 1, 0, needle); |
|||
* assert.deepEqual(array, [1, 2, 3]); |
|||
* ``` |
|||
*/ |
|||
function binarySearch(haystack, needle, low, high) { |
|||
while (low <= high) { |
|||
const mid = low + ((high - low) >> 1); |
|||
const cmp = haystack[mid][COLUMN] - needle; |
|||
if (cmp === 0) { |
|||
found = true; |
|||
return mid; |
|||
} |
|||
if (cmp < 0) { |
|||
low = mid + 1; |
|||
} |
|||
else { |
|||
high = mid - 1; |
|||
} |
|||
} |
|||
found = false; |
|||
return low - 1; |
|||
} |
|||
function upperBound(haystack, needle, index) { |
|||
for (let i = index + 1; i < haystack.length; index = i++) { |
|||
if (haystack[i][COLUMN] !== needle) |
|||
break; |
|||
} |
|||
return index; |
|||
} |
|||
function lowerBound(haystack, needle, index) { |
|||
for (let i = index - 1; i >= 0; index = i--) { |
|||
if (haystack[i][COLUMN] !== needle) |
|||
break; |
|||
} |
|||
return index; |
|||
} |
|||
function memoizedState() { |
|||
return { |
|||
lastKey: -1, |
|||
lastNeedle: -1, |
|||
lastIndex: -1, |
|||
}; |
|||
} |
|||
/** |
|||
* This overly complicated beast is just to record the last tested line/column and the resulting |
|||
* index, allowing us to skip a few tests if mappings are monotonically increasing. |
|||
*/ |
|||
function memoizedBinarySearch(haystack, needle, state, key) { |
|||
const { lastKey, lastNeedle, lastIndex } = state; |
|||
let low = 0; |
|||
let high = haystack.length - 1; |
|||
if (key === lastKey) { |
|||
if (needle === lastNeedle) { |
|||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle; |
|||
return lastIndex; |
|||
} |
|||
if (needle >= lastNeedle) { |
|||
// lastIndex may be -1 if the previous needle was not found.
|
|||
low = lastIndex === -1 ? 0 : lastIndex; |
|||
} |
|||
else { |
|||
high = lastIndex; |
|||
} |
|||
} |
|||
state.lastKey = key; |
|||
state.lastNeedle = needle; |
|||
return (state.lastIndex = binarySearch(haystack, needle, low, high)); |
|||
} |
|||
|
|||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
|||
// of generated line/column.
|
|||
function buildBySources(decoded, memos) { |
|||
const sources = memos.map(buildNullArray); |
|||
for (let i = 0; i < decoded.length; i++) { |
|||
const line = decoded[i]; |
|||
for (let j = 0; j < line.length; j++) { |
|||
const seg = line[j]; |
|||
if (seg.length === 1) |
|||
continue; |
|||
const sourceIndex = seg[SOURCES_INDEX]; |
|||
const sourceLine = seg[SOURCE_LINE]; |
|||
const sourceColumn = seg[SOURCE_COLUMN]; |
|||
const originalSource = sources[sourceIndex]; |
|||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = [])); |
|||
const memo = memos[sourceIndex]; |
|||
// The binary search either found a match, or it found the left-index just before where the
|
|||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
|||
// generated segments associated with an original location, so there may need to move several
|
|||
// indexes before we find where we need to insert.
|
|||
const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine)); |
|||
insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]); |
|||
} |
|||
} |
|||
return sources; |
|||
} |
|||
function insert(array, index, value) { |
|||
for (let i = array.length; i > index; i--) { |
|||
array[i] = array[i - 1]; |
|||
} |
|||
array[index] = value; |
|||
} |
|||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
|||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
|||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
|||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
|||
// order when iterating with for-in.
|
|||
function buildNullArray() { |
|||
return { __proto__: null }; |
|||
} |
|||
|
|||
const AnyMap = function (map, mapUrl) { |
|||
const parsed = typeof map === 'string' ? JSON.parse(map) : map; |
|||
if (!('sections' in parsed)) |
|||
return new TraceMap(parsed, mapUrl); |
|||
const mappings = []; |
|||
const sources = []; |
|||
const sourcesContent = []; |
|||
const names = []; |
|||
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, 0, 0, Infinity, Infinity); |
|||
const joined = { |
|||
version: 3, |
|||
file: parsed.file, |
|||
names, |
|||
sources, |
|||
sourcesContent, |
|||
mappings, |
|||
}; |
|||
return presortedDecodedMap(joined); |
|||
}; |
|||
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) { |
|||
const { sections } = input; |
|||
for (let i = 0; i < sections.length; i++) { |
|||
const { map, offset } = sections[i]; |
|||
let sl = stopLine; |
|||
let sc = stopColumn; |
|||
if (i + 1 < sections.length) { |
|||
const nextOffset = sections[i + 1].offset; |
|||
sl = Math.min(stopLine, lineOffset + nextOffset.line); |
|||
if (sl === stopLine) { |
|||
sc = Math.min(stopColumn, columnOffset + nextOffset.column); |
|||
} |
|||
else if (sl < stopLine) { |
|||
sc = columnOffset + nextOffset.column; |
|||
} |
|||
} |
|||
addSection(map, mapUrl, mappings, sources, sourcesContent, names, lineOffset + offset.line, columnOffset + offset.column, sl, sc); |
|||
} |
|||
} |
|||
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) { |
|||
if ('sections' in input) |
|||
return recurse(...arguments); |
|||
const map = new TraceMap(input, mapUrl); |
|||
const sourcesOffset = sources.length; |
|||
const namesOffset = names.length; |
|||
const decoded = decodedMappings(map); |
|||
const { resolvedSources, sourcesContent: contents } = map; |
|||
append(sources, resolvedSources); |
|||
append(names, map.names); |
|||
if (contents) |
|||
append(sourcesContent, contents); |
|||
else |
|||
for (let i = 0; i < resolvedSources.length; i++) |
|||
sourcesContent.push(null); |
|||
for (let i = 0; i < decoded.length; i++) { |
|||
const lineI = lineOffset + i; |
|||
// We can only add so many lines before we step into the range that the next section's map
|
|||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
|||
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
|||
// still need to check that we don't overstep lines, too.
|
|||
if (lineI > stopLine) |
|||
return; |
|||
// The out line may already exist in mappings (if we're continuing the line started by a
|
|||
// previous section). Or, we may have jumped ahead several lines to start this section.
|
|||
const out = getLine(mappings, lineI); |
|||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
|||
// map can be multiple lines), it doesn't.
|
|||
const cOffset = i === 0 ? columnOffset : 0; |
|||
const line = decoded[i]; |
|||
for (let j = 0; j < line.length; j++) { |
|||
const seg = line[j]; |
|||
const column = cOffset + seg[COLUMN]; |
|||
// If this segment steps into the column range that the next section's map controls, we need
|
|||
// to stop early.
|
|||
if (lineI === stopLine && column >= stopColumn) |
|||
return; |
|||
if (seg.length === 1) { |
|||
out.push([column]); |
|||
continue; |
|||
} |
|||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX]; |
|||
const sourceLine = seg[SOURCE_LINE]; |
|||
const sourceColumn = seg[SOURCE_COLUMN]; |
|||
out.push(seg.length === 4 |
|||
? [column, sourcesIndex, sourceLine, sourceColumn] |
|||
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]); |
|||
} |
|||
} |
|||
} |
|||
function append(arr, other) { |
|||
for (let i = 0; i < other.length; i++) |
|||
arr.push(other[i]); |
|||
} |
|||
function getLine(arr, index) { |
|||
for (let i = arr.length; i <= index; i++) |
|||
arr[i] = []; |
|||
return arr[index]; |
|||
} |
|||
|
|||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)'; |
|||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)'; |
|||
const LEAST_UPPER_BOUND = -1; |
|||
const GREATEST_LOWER_BOUND = 1; |
|||
const ALL_BOUND = 0; |
|||
/** |
|||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field. |
|||
*/ |
|||
let encodedMappings; |
|||
/** |
|||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field. |
|||
*/ |
|||
let decodedMappings; |
|||
/** |
|||
* A low-level API to find the segment associated with a generated line/column (think, from a |
|||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`. |
|||
*/ |
|||
let traceSegment; |
|||
/** |
|||
* A higher-level API to find the source/line/column associated with a generated line/column |
|||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in |
|||
* `source-map` library. |
|||
*/ |
|||
let originalPositionFor; |
|||
/** |
|||
* Finds the generated line/column position of the provided source/line/column source position. |
|||
*/ |
|||
let generatedPositionFor; |
|||
/** |
|||
* Finds all generated line/column positions of the provided source/line/column source position. |
|||
*/ |
|||
let allGeneratedPositionsFor; |
|||
/** |
|||
* Iterates each mapping in generated position order. |
|||
*/ |
|||
let eachMapping; |
|||
/** |
|||
* Retrieves the source content for a particular source, if its found. Returns null if not. |
|||
*/ |
|||
let sourceContentFor; |
|||
/** |
|||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger |
|||
* maps. |
|||
*/ |
|||
let presortedDecodedMap; |
|||
/** |
|||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects |
|||
* a sourcemap, or to JSON.stringify. |
|||
*/ |
|||
let decodedMap; |
|||
/** |
|||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects |
|||
* a sourcemap, or to JSON.stringify. |
|||
*/ |
|||
let encodedMap; |
|||
class TraceMap { |
|||
constructor(map, mapUrl) { |
|||
const isString = typeof map === 'string'; |
|||
if (!isString && map._decodedMemo) |
|||
return map; |
|||
const parsed = (isString ? JSON.parse(map) : map); |
|||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed; |
|||
this.version = version; |
|||
this.file = file; |
|||
this.names = names; |
|||
this.sourceRoot = sourceRoot; |
|||
this.sources = sources; |
|||
this.sourcesContent = sourcesContent; |
|||
const from = resolve(sourceRoot || '', stripFilename(mapUrl)); |
|||
this.resolvedSources = sources.map((s) => resolve(s || '', from)); |
|||
const { mappings } = parsed; |
|||
if (typeof mappings === 'string') { |
|||
this._encoded = mappings; |
|||
this._decoded = undefined; |
|||
} |
|||
else { |
|||
this._encoded = undefined; |
|||
this._decoded = maybeSort(mappings, isString); |
|||
} |
|||
this._decodedMemo = memoizedState(); |
|||
this._bySources = undefined; |
|||
this._bySourceMemos = undefined; |
|||
} |
|||
} |
|||
(() => { |
|||
encodedMappings = (map) => { |
|||
var _a; |
|||
return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = encode(map._decoded))); |
|||
}; |
|||
decodedMappings = (map) => { |
|||
return (map._decoded || (map._decoded = decode(map._encoded))); |
|||
}; |
|||
traceSegment = (map, line, column) => { |
|||
const decoded = decodedMappings(map); |
|||
// It's common for parent source maps to have pointers to lines that have no
|
|||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
|||
if (line >= decoded.length) |
|||
return null; |
|||
const segments = decoded[line]; |
|||
const index = traceSegmentInternal(segments, map._decodedMemo, line, column, GREATEST_LOWER_BOUND); |
|||
return index === -1 ? null : segments[index]; |
|||
}; |
|||
originalPositionFor = (map, { line, column, bias }) => { |
|||
line--; |
|||
if (line < 0) |
|||
throw new Error(LINE_GTR_ZERO); |
|||
if (column < 0) |
|||
throw new Error(COL_GTR_EQ_ZERO); |
|||
const decoded = decodedMappings(map); |
|||
// It's common for parent source maps to have pointers to lines that have no
|
|||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
|||
if (line >= decoded.length) |
|||
return OMapping(null, null, null, null); |
|||
const segments = decoded[line]; |
|||
const index = traceSegmentInternal(segments, map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND); |
|||
if (index === -1) |
|||
return OMapping(null, null, null, null); |
|||
const segment = segments[index]; |
|||
if (segment.length === 1) |
|||
return OMapping(null, null, null, null); |
|||
const { names, resolvedSources } = map; |
|||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null); |
|||
}; |
|||
allGeneratedPositionsFor = (map, { source, line, column }) => { |
|||
return generatedPosition(map, source, line, column, ALL_BOUND); |
|||
}; |
|||
generatedPositionFor = (map, { source, line, column, bias }) => { |
|||
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND); |
|||
}; |
|||
eachMapping = (map, cb) => { |
|||
const decoded = decodedMappings(map); |
|||
const { names, resolvedSources } = map; |
|||
for (let i = 0; i < decoded.length; i++) { |
|||
const line = decoded[i]; |
|||
for (let j = 0; j < line.length; j++) { |
|||
const seg = line[j]; |
|||
const generatedLine = i + 1; |
|||
const generatedColumn = seg[0]; |
|||
let source = null; |
|||
let originalLine = null; |
|||
let originalColumn = null; |
|||
let name = null; |
|||
if (seg.length !== 1) { |
|||
source = resolvedSources[seg[1]]; |
|||
originalLine = seg[2] + 1; |
|||
originalColumn = seg[3]; |
|||
} |
|||
if (seg.length === 5) |
|||
name = names[seg[4]]; |
|||
cb({ |
|||
generatedLine, |
|||
generatedColumn, |
|||
source, |
|||
originalLine, |
|||
originalColumn, |
|||
name, |
|||
}); |
|||
} |
|||
} |
|||
}; |
|||
sourceContentFor = (map, source) => { |
|||
const { sources, resolvedSources, sourcesContent } = map; |
|||
if (sourcesContent == null) |
|||
return null; |
|||
let index = sources.indexOf(source); |
|||
if (index === -1) |
|||
index = resolvedSources.indexOf(source); |
|||
return index === -1 ? null : sourcesContent[index]; |
|||
}; |
|||
presortedDecodedMap = (map, mapUrl) => { |
|||
const tracer = new TraceMap(clone(map, []), mapUrl); |
|||
tracer._decoded = map.mappings; |
|||
return tracer; |
|||
}; |
|||
decodedMap = (map) => { |
|||
return clone(map, decodedMappings(map)); |
|||
}; |
|||
encodedMap = (map) => { |
|||
return clone(map, encodedMappings(map)); |
|||
}; |
|||
function generatedPosition(map, source, line, column, bias) { |
|||
line--; |
|||
if (line < 0) |
|||
throw new Error(LINE_GTR_ZERO); |
|||
if (column < 0) |
|||
throw new Error(COL_GTR_EQ_ZERO); |
|||
const { sources, resolvedSources } = map; |
|||
let sourceIndex = sources.indexOf(source); |
|||
if (sourceIndex === -1) |
|||
sourceIndex = resolvedSources.indexOf(source); |
|||
if (sourceIndex === -1) |
|||
return bias === ALL_BOUND ? [] : GMapping(null, null); |
|||
const generated = (map._bySources || (map._bySources = buildBySources(decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState))))); |
|||
const segments = generated[sourceIndex][line]; |
|||
if (segments == null) |
|||
return bias === ALL_BOUND ? [] : GMapping(null, null); |
|||
const memo = map._bySourceMemos[sourceIndex]; |
|||
if (bias === ALL_BOUND) |
|||
return sliceGeneratedPositions(segments, memo, line, column); |
|||
const index = traceSegmentInternal(segments, memo, line, column, bias); |
|||
if (index === -1) |
|||
return GMapping(null, null); |
|||
const segment = segments[index]; |
|||
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]); |
|||
} |
|||
})(); |
|||
function clone(map, mappings) { |
|||
return { |
|||
version: map.version, |
|||
file: map.file, |
|||
names: map.names, |
|||
sourceRoot: map.sourceRoot, |
|||
sources: map.sources, |
|||
sourcesContent: map.sourcesContent, |
|||
mappings, |
|||
}; |
|||
} |
|||
function OMapping(source, line, column, name) { |
|||
return { source, line, column, name }; |
|||
} |
|||
function GMapping(line, column) { |
|||
return { line, column }; |
|||
} |
|||
function traceSegmentInternal(segments, memo, line, column, bias) { |
|||
let index = memoizedBinarySearch(segments, column, memo, line); |
|||
if (found) { |
|||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index); |
|||
} |
|||
else if (bias === LEAST_UPPER_BOUND) |
|||
index++; |
|||
if (index === -1 || index === segments.length) |
|||
return -1; |
|||
return index; |
|||
} |
|||
function sliceGeneratedPositions(segments, memo, line, column) { |
|||
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND); |
|||
if (min === -1) |
|||
return []; |
|||
// We may have found the segment that started at an earlier column. If this is the case, then we
|
|||
// need to slice all generated segments that match _that_ column, because all such segments span
|
|||
// to our desired column.
|
|||
const matchedColumn = found ? column : segments[min][COLUMN]; |
|||
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
|||
if (!found) |
|||
min = lowerBound(segments, matchedColumn, min); |
|||
const max = upperBound(segments, matchedColumn, min); |
|||
const result = []; |
|||
for (; min <= max; min++) { |
|||
const segment = segments[min]; |
|||
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN])); |
|||
} |
|||
return result; |
|||
} |
|||
|
|||
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, allGeneratedPositionsFor, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, originalPositionFor, presortedDecodedMap, sourceContentFor, traceSegment }; |
|||
//# sourceMappingURL=trace-mapping.mjs.map
|
|||
File diff suppressed because one or more lines are too long
@ -0,0 +1,558 @@ |
|||
(function (global, factory) { |
|||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/sourcemap-codec'), require('@jridgewell/resolve-uri')) : |
|||
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/sourcemap-codec', '@jridgewell/resolve-uri'], factory) : |
|||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI)); |
|||
})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict'; |
|||
|
|||
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } |
|||
|
|||
var resolveUri__default = /*#__PURE__*/_interopDefaultLegacy(resolveUri); |
|||
|
|||
function resolve(input, base) { |
|||
// The base is always treated as a directory, if it's not empty.
|
|||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
|||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
|||
if (base && !base.endsWith('/')) |
|||
base += '/'; |
|||
return resolveUri__default["default"](input, base); |
|||
} |
|||
|
|||
/** |
|||
* Removes everything after the last "/", but leaves the slash. |
|||
*/ |
|||
function stripFilename(path) { |
|||
if (!path) |
|||
return ''; |
|||
const index = path.lastIndexOf('/'); |
|||
return path.slice(0, index + 1); |
|||
} |
|||
|
|||
const COLUMN = 0; |
|||
const SOURCES_INDEX = 1; |
|||
const SOURCE_LINE = 2; |
|||
const SOURCE_COLUMN = 3; |
|||
const NAMES_INDEX = 4; |
|||
const REV_GENERATED_LINE = 1; |
|||
const REV_GENERATED_COLUMN = 2; |
|||
|
|||
function maybeSort(mappings, owned) { |
|||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0); |
|||
if (unsortedIndex === mappings.length) |
|||
return mappings; |
|||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
|||
// not, we do not want to modify the consumer's input array.
|
|||
if (!owned) |
|||
mappings = mappings.slice(); |
|||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) { |
|||
mappings[i] = sortSegments(mappings[i], owned); |
|||
} |
|||
return mappings; |
|||
} |
|||
function nextUnsortedSegmentLine(mappings, start) { |
|||
for (let i = start; i < mappings.length; i++) { |
|||
if (!isSorted(mappings[i])) |
|||
return i; |
|||
} |
|||
return mappings.length; |
|||
} |
|||
function isSorted(line) { |
|||
for (let j = 1; j < line.length; j++) { |
|||
if (line[j][COLUMN] < line[j - 1][COLUMN]) { |
|||
return false; |
|||
} |
|||
} |
|||
return true; |
|||
} |
|||
function sortSegments(line, owned) { |
|||
if (!owned) |
|||
line = line.slice(); |
|||
return line.sort(sortComparator); |
|||
} |
|||
function sortComparator(a, b) { |
|||
return a[COLUMN] - b[COLUMN]; |
|||
} |
|||
|
|||
let found = false; |
|||
/** |
|||
* A binary search implementation that returns the index if a match is found. |
|||
* If no match is found, then the left-index (the index associated with the item that comes just |
|||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at |
|||
* the next index: |
|||
* |
|||
* ```js
|
|||
* const array = [1, 3]; |
|||
* const needle = 2; |
|||
* const index = binarySearch(array, needle, (item, needle) => item - needle); |
|||
* |
|||
* assert.equal(index, 0); |
|||
* array.splice(index + 1, 0, needle); |
|||
* assert.deepEqual(array, [1, 2, 3]); |
|||
* ``` |
|||
*/ |
|||
function binarySearch(haystack, needle, low, high) { |
|||
while (low <= high) { |
|||
const mid = low + ((high - low) >> 1); |
|||
const cmp = haystack[mid][COLUMN] - needle; |
|||
if (cmp === 0) { |
|||
found = true; |
|||
return mid; |
|||
} |
|||
if (cmp < 0) { |
|||
low = mid + 1; |
|||
} |
|||
else { |
|||
high = mid - 1; |
|||
} |
|||
} |
|||
found = false; |
|||
return low - 1; |
|||
} |
|||
function upperBound(haystack, needle, index) { |
|||
for (let i = index + 1; i < haystack.length; index = i++) { |
|||
if (haystack[i][COLUMN] !== needle) |
|||
break; |
|||
} |
|||
return index; |
|||
} |
|||
function lowerBound(haystack, needle, index) { |
|||
for (let i = index - 1; i >= 0; index = i--) { |
|||
if (haystack[i][COLUMN] !== needle) |
|||
break; |
|||
} |
|||
return index; |
|||
} |
|||
function memoizedState() { |
|||
return { |
|||
lastKey: -1, |
|||
lastNeedle: -1, |
|||
lastIndex: -1, |
|||
}; |
|||
} |
|||
/** |
|||
* This overly complicated beast is just to record the last tested line/column and the resulting |
|||
* index, allowing us to skip a few tests if mappings are monotonically increasing. |
|||
*/ |
|||
function memoizedBinarySearch(haystack, needle, state, key) { |
|||
const { lastKey, lastNeedle, lastIndex } = state; |
|||
let low = 0; |
|||
let high = haystack.length - 1; |
|||
if (key === lastKey) { |
|||
if (needle === lastNeedle) { |
|||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle; |
|||
return lastIndex; |
|||
} |
|||
if (needle >= lastNeedle) { |
|||
// lastIndex may be -1 if the previous needle was not found.
|
|||
low = lastIndex === -1 ? 0 : lastIndex; |
|||
} |
|||
else { |
|||
high = lastIndex; |
|||
} |
|||
} |
|||
state.lastKey = key; |
|||
state.lastNeedle = needle; |
|||
return (state.lastIndex = binarySearch(haystack, needle, low, high)); |
|||
} |
|||
|
|||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
|||
// of generated line/column.
|
|||
function buildBySources(decoded, memos) { |
|||
const sources = memos.map(buildNullArray); |
|||
for (let i = 0; i < decoded.length; i++) { |
|||
const line = decoded[i]; |
|||
for (let j = 0; j < line.length; j++) { |
|||
const seg = line[j]; |
|||
if (seg.length === 1) |
|||
continue; |
|||
const sourceIndex = seg[SOURCES_INDEX]; |
|||
const sourceLine = seg[SOURCE_LINE]; |
|||
const sourceColumn = seg[SOURCE_COLUMN]; |
|||
const originalSource = sources[sourceIndex]; |
|||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = [])); |
|||
const memo = memos[sourceIndex]; |
|||
// The binary search either found a match, or it found the left-index just before where the
|
|||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
|||
// generated segments associated with an original location, so there may need to move several
|
|||
// indexes before we find where we need to insert.
|
|||
const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine)); |
|||
insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]); |
|||
} |
|||
} |
|||
return sources; |
|||
} |
|||
function insert(array, index, value) { |
|||
for (let i = array.length; i > index; i--) { |
|||
array[i] = array[i - 1]; |
|||
} |
|||
array[index] = value; |
|||
} |
|||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
|||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
|||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
|||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
|||
// order when iterating with for-in.
|
|||
function buildNullArray() { |
|||
return { __proto__: null }; |
|||
} |
|||
|
|||
const AnyMap = function (map, mapUrl) { |
|||
const parsed = typeof map === 'string' ? JSON.parse(map) : map; |
|||
if (!('sections' in parsed)) |
|||
return new TraceMap(parsed, mapUrl); |
|||
const mappings = []; |
|||
const sources = []; |
|||
const sourcesContent = []; |
|||
const names = []; |
|||
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, 0, 0, Infinity, Infinity); |
|||
const joined = { |
|||
version: 3, |
|||
file: parsed.file, |
|||
names, |
|||
sources, |
|||
sourcesContent, |
|||
mappings, |
|||
}; |
|||
return exports.presortedDecodedMap(joined); |
|||
}; |
|||
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) { |
|||
const { sections } = input; |
|||
for (let i = 0; i < sections.length; i++) { |
|||
const { map, offset } = sections[i]; |
|||
let sl = stopLine; |
|||
let sc = stopColumn; |
|||
if (i + 1 < sections.length) { |
|||
const nextOffset = sections[i + 1].offset; |
|||
sl = Math.min(stopLine, lineOffset + nextOffset.line); |
|||
if (sl === stopLine) { |
|||
sc = Math.min(stopColumn, columnOffset + nextOffset.column); |
|||
} |
|||
else if (sl < stopLine) { |
|||
sc = columnOffset + nextOffset.column; |
|||
} |
|||
} |
|||
addSection(map, mapUrl, mappings, sources, sourcesContent, names, lineOffset + offset.line, columnOffset + offset.column, sl, sc); |
|||
} |
|||
} |
|||
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) { |
|||
if ('sections' in input) |
|||
return recurse(...arguments); |
|||
const map = new TraceMap(input, mapUrl); |
|||
const sourcesOffset = sources.length; |
|||
const namesOffset = names.length; |
|||
const decoded = exports.decodedMappings(map); |
|||
const { resolvedSources, sourcesContent: contents } = map; |
|||
append(sources, resolvedSources); |
|||
append(names, map.names); |
|||
if (contents) |
|||
append(sourcesContent, contents); |
|||
else |
|||
for (let i = 0; i < resolvedSources.length; i++) |
|||
sourcesContent.push(null); |
|||
for (let i = 0; i < decoded.length; i++) { |
|||
const lineI = lineOffset + i; |
|||
// We can only add so many lines before we step into the range that the next section's map
|
|||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
|||
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
|||
// still need to check that we don't overstep lines, too.
|
|||
if (lineI > stopLine) |
|||
return; |
|||
// The out line may already exist in mappings (if we're continuing the line started by a
|
|||
// previous section). Or, we may have jumped ahead several lines to start this section.
|
|||
const out = getLine(mappings, lineI); |
|||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
|||
// map can be multiple lines), it doesn't.
|
|||
const cOffset = i === 0 ? columnOffset : 0; |
|||
const line = decoded[i]; |
|||
for (let j = 0; j < line.length; j++) { |
|||
const seg = line[j]; |
|||
const column = cOffset + seg[COLUMN]; |
|||
// If this segment steps into the column range that the next section's map controls, we need
|
|||
// to stop early.
|
|||
if (lineI === stopLine && column >= stopColumn) |
|||
return; |
|||
if (seg.length === 1) { |
|||
out.push([column]); |
|||
continue; |
|||
} |
|||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX]; |
|||
const sourceLine = seg[SOURCE_LINE]; |
|||
const sourceColumn = seg[SOURCE_COLUMN]; |
|||
out.push(seg.length === 4 |
|||
? [column, sourcesIndex, sourceLine, sourceColumn] |
|||
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]); |
|||
} |
|||
} |
|||
} |
|||
function append(arr, other) { |
|||
for (let i = 0; i < other.length; i++) |
|||
arr.push(other[i]); |
|||
} |
|||
function getLine(arr, index) { |
|||
for (let i = arr.length; i <= index; i++) |
|||
arr[i] = []; |
|||
return arr[index]; |
|||
} |
|||
|
|||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)'; |
|||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)'; |
|||
const LEAST_UPPER_BOUND = -1; |
|||
const GREATEST_LOWER_BOUND = 1; |
|||
const ALL_BOUND = 0; |
|||
/** |
|||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field. |
|||
*/ |
|||
exports.encodedMappings = void 0; |
|||
/** |
|||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field. |
|||
*/ |
|||
exports.decodedMappings = void 0; |
|||
/** |
|||
* A low-level API to find the segment associated with a generated line/column (think, from a |
|||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`. |
|||
*/ |
|||
exports.traceSegment = void 0; |
|||
/** |
|||
* A higher-level API to find the source/line/column associated with a generated line/column |
|||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in |
|||
* `source-map` library. |
|||
*/ |
|||
exports.originalPositionFor = void 0; |
|||
/** |
|||
* Finds the generated line/column position of the provided source/line/column source position. |
|||
*/ |
|||
exports.generatedPositionFor = void 0; |
|||
/** |
|||
* Finds all generated line/column positions of the provided source/line/column source position. |
|||
*/ |
|||
exports.allGeneratedPositionsFor = void 0; |
|||
/** |
|||
* Iterates each mapping in generated position order. |
|||
*/ |
|||
exports.eachMapping = void 0; |
|||
/** |
|||
* Retrieves the source content for a particular source, if its found. Returns null if not. |
|||
*/ |
|||
exports.sourceContentFor = void 0; |
|||
/** |
|||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger |
|||
* maps. |
|||
*/ |
|||
exports.presortedDecodedMap = void 0; |
|||
/** |
|||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects |
|||
* a sourcemap, or to JSON.stringify. |
|||
*/ |
|||
exports.decodedMap = void 0; |
|||
/** |
|||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects |
|||
* a sourcemap, or to JSON.stringify. |
|||
*/ |
|||
exports.encodedMap = void 0; |
|||
class TraceMap { |
|||
constructor(map, mapUrl) { |
|||
const isString = typeof map === 'string'; |
|||
if (!isString && map._decodedMemo) |
|||
return map; |
|||
const parsed = (isString ? JSON.parse(map) : map); |
|||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed; |
|||
this.version = version; |
|||
this.file = file; |
|||
this.names = names; |
|||
this.sourceRoot = sourceRoot; |
|||
this.sources = sources; |
|||
this.sourcesContent = sourcesContent; |
|||
const from = resolve(sourceRoot || '', stripFilename(mapUrl)); |
|||
this.resolvedSources = sources.map((s) => resolve(s || '', from)); |
|||
const { mappings } = parsed; |
|||
if (typeof mappings === 'string') { |
|||
this._encoded = mappings; |
|||
this._decoded = undefined; |
|||
} |
|||
else { |
|||
this._encoded = undefined; |
|||
this._decoded = maybeSort(mappings, isString); |
|||
} |
|||
this._decodedMemo = memoizedState(); |
|||
this._bySources = undefined; |
|||
this._bySourceMemos = undefined; |
|||
} |
|||
} |
|||
(() => { |
|||
exports.encodedMappings = (map) => { |
|||
var _a; |
|||
return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = sourcemapCodec.encode(map._decoded))); |
|||
}; |
|||
exports.decodedMappings = (map) => { |
|||
return (map._decoded || (map._decoded = sourcemapCodec.decode(map._encoded))); |
|||
}; |
|||
exports.traceSegment = (map, line, column) => { |
|||
const decoded = exports.decodedMappings(map); |
|||
// It's common for parent source maps to have pointers to lines that have no
|
|||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
|||
if (line >= decoded.length) |
|||
return null; |
|||
const segments = decoded[line]; |
|||
const index = traceSegmentInternal(segments, map._decodedMemo, line, column, GREATEST_LOWER_BOUND); |
|||
return index === -1 ? null : segments[index]; |
|||
}; |
|||
exports.originalPositionFor = (map, { line, column, bias }) => { |
|||
line--; |
|||
if (line < 0) |
|||
throw new Error(LINE_GTR_ZERO); |
|||
if (column < 0) |
|||
throw new Error(COL_GTR_EQ_ZERO); |
|||
const decoded = exports.decodedMappings(map); |
|||
// It's common for parent source maps to have pointers to lines that have no
|
|||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
|||
if (line >= decoded.length) |
|||
return OMapping(null, null, null, null); |
|||
const segments = decoded[line]; |
|||
const index = traceSegmentInternal(segments, map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND); |
|||
if (index === -1) |
|||
return OMapping(null, null, null, null); |
|||
const segment = segments[index]; |
|||
if (segment.length === 1) |
|||
return OMapping(null, null, null, null); |
|||
const { names, resolvedSources } = map; |
|||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null); |
|||
}; |
|||
exports.allGeneratedPositionsFor = (map, { source, line, column }) => { |
|||
return generatedPosition(map, source, line, column, ALL_BOUND); |
|||
}; |
|||
exports.generatedPositionFor = (map, { source, line, column, bias }) => { |
|||
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND); |
|||
}; |
|||
exports.eachMapping = (map, cb) => { |
|||
const decoded = exports.decodedMappings(map); |
|||
const { names, resolvedSources } = map; |
|||
for (let i = 0; i < decoded.length; i++) { |
|||
const line = decoded[i]; |
|||
for (let j = 0; j < line.length; j++) { |
|||
const seg = line[j]; |
|||
const generatedLine = i + 1; |
|||
const generatedColumn = seg[0]; |
|||
let source = null; |
|||
let originalLine = null; |
|||
let originalColumn = null; |
|||
let name = null; |
|||
if (seg.length !== 1) { |
|||
source = resolvedSources[seg[1]]; |
|||
originalLine = seg[2] + 1; |
|||
originalColumn = seg[3]; |
|||
} |
|||
if (seg.length === 5) |
|||
name = names[seg[4]]; |
|||
cb({ |
|||
generatedLine, |
|||
generatedColumn, |
|||
source, |
|||
originalLine, |
|||
originalColumn, |
|||
name, |
|||
}); |
|||
} |
|||
} |
|||
}; |
|||
exports.sourceContentFor = (map, source) => { |
|||
const { sources, resolvedSources, sourcesContent } = map; |
|||
if (sourcesContent == null) |
|||
return null; |
|||
let index = sources.indexOf(source); |
|||
if (index === -1) |
|||
index = resolvedSources.indexOf(source); |
|||
return index === -1 ? null : sourcesContent[index]; |
|||
}; |
|||
exports.presortedDecodedMap = (map, mapUrl) => { |
|||
const tracer = new TraceMap(clone(map, []), mapUrl); |
|||
tracer._decoded = map.mappings; |
|||
return tracer; |
|||
}; |
|||
exports.decodedMap = (map) => { |
|||
return clone(map, exports.decodedMappings(map)); |
|||
}; |
|||
exports.encodedMap = (map) => { |
|||
return clone(map, exports.encodedMappings(map)); |
|||
}; |
|||
function generatedPosition(map, source, line, column, bias) { |
|||
line--; |
|||
if (line < 0) |
|||
throw new Error(LINE_GTR_ZERO); |
|||
if (column < 0) |
|||
throw new Error(COL_GTR_EQ_ZERO); |
|||
const { sources, resolvedSources } = map; |
|||
let sourceIndex = sources.indexOf(source); |
|||
if (sourceIndex === -1) |
|||
sourceIndex = resolvedSources.indexOf(source); |
|||
if (sourceIndex === -1) |
|||
return bias === ALL_BOUND ? [] : GMapping(null, null); |
|||
const generated = (map._bySources || (map._bySources = buildBySources(exports.decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState))))); |
|||
const segments = generated[sourceIndex][line]; |
|||
if (segments == null) |
|||
return bias === ALL_BOUND ? [] : GMapping(null, null); |
|||
const memo = map._bySourceMemos[sourceIndex]; |
|||
if (bias === ALL_BOUND) |
|||
return sliceGeneratedPositions(segments, memo, line, column); |
|||
const index = traceSegmentInternal(segments, memo, line, column, bias); |
|||
if (index === -1) |
|||
return GMapping(null, null); |
|||
const segment = segments[index]; |
|||
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]); |
|||
} |
|||
})(); |
|||
function clone(map, mappings) { |
|||
return { |
|||
version: map.version, |
|||
file: map.file, |
|||
names: map.names, |
|||
sourceRoot: map.sourceRoot, |
|||
sources: map.sources, |
|||
sourcesContent: map.sourcesContent, |
|||
mappings, |
|||
}; |
|||
} |
|||
function OMapping(source, line, column, name) { |
|||
return { source, line, column, name }; |
|||
} |
|||
function GMapping(line, column) { |
|||
return { line, column }; |
|||
} |
|||
function traceSegmentInternal(segments, memo, line, column, bias) { |
|||
let index = memoizedBinarySearch(segments, column, memo, line); |
|||
if (found) { |
|||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index); |
|||
} |
|||
else if (bias === LEAST_UPPER_BOUND) |
|||
index++; |
|||
if (index === -1 || index === segments.length) |
|||
return -1; |
|||
return index; |
|||
} |
|||
function sliceGeneratedPositions(segments, memo, line, column) { |
|||
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND); |
|||
if (min === -1) |
|||
return []; |
|||
// We may have found the segment that started at an earlier column. If this is the case, then we
|
|||
// need to slice all generated segments that match _that_ column, because all such segments span
|
|||
// to our desired column.
|
|||
const matchedColumn = found ? column : segments[min][COLUMN]; |
|||
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
|||
if (!found) |
|||
min = lowerBound(segments, matchedColumn, min); |
|||
const max = upperBound(segments, matchedColumn, min); |
|||
const result = []; |
|||
for (; min <= max; min++) { |
|||
const segment = segments[min]; |
|||
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN])); |
|||
} |
|||
return result; |
|||
} |
|||
|
|||
exports.AnyMap = AnyMap; |
|||
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND; |
|||
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND; |
|||
exports.TraceMap = TraceMap; |
|||
|
|||
Object.defineProperty(exports, '__esModule', { value: true }); |
|||
|
|||
})); |
|||
//# sourceMappingURL=trace-mapping.umd.js.map
|
|||
Some files were not shown because too many files changed in this diff
Loading…
Reference in new issue