9209 lines
525 KiB
JavaScript
9209 lines
525 KiB
JavaScript
|
module.exports =
|
|||
|
/******/ (() => { // webpackBootstrap
|
|||
|
/******/ var __webpack_modules__ = ({
|
|||
|
|
|||
|
/***/ 3848:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = JSON.parse("[[\"8740\",\"䏰䰲䘃䖦䕸𧉧䵷䖳𧲱䳢𧳅㮕䜶䝄䱇䱀𤊿𣘗𧍒𦺋𧃒䱗𪍑䝏䗚䲅𧱬䴇䪤䚡𦬣爥𥩔𡩣𣸆𣽡晍囻\"],[\"8767\",\"綕夝𨮹㷴霴𧯯寛𡵞媤㘥𩺰嫑宷峼杮薓𩥅瑡璝㡵𡵓𣚞𦀡㻬\"],[\"87a1\",\"𥣞㫵竼龗𤅡𨤍𣇪𠪊𣉞䌊蒄龖鐯䤰蘓墖靊鈘秐稲晠権袝瑌篅枂稬剏遆㓦珄𥶹瓆鿇垳䤯呌䄱𣚎堘穲𧭥讏䚮𦺈䆁𥶙箮𢒼鿈𢓁𢓉𢓌鿉蔄𣖻䂴鿊䓡𪷿拁灮鿋\"],[\"8840\",\"㇀\",4,\"𠄌㇅𠃑𠃍㇆㇇𠃋𡿨㇈𠃊㇉㇊㇋㇌𠄎㇍㇎ĀÁǍÀĒÉĚÈŌÓǑÒÊ̄ẾÊ̌ỀÊāáǎàɑēéěèīíǐìōóǒòūúǔùǖǘǚ\"],[\"88a1\",\"ǜüê̄ếê̌ềêɡ⏚⏛\"],[\"8940\",\"𪎩𡅅\"],[\"8943\",\"攊\"],[\"8946\",\"丽滝鵎釟\"],[\"894c\",\"𧜵撑会伨侨兖兴农凤务动医华发变团声处备夲头学实実岚庆总斉柾栄桥济炼电纤纬纺织经统缆缷艺苏药视设询车轧轮\"],[\"89a1\",\"琑糼緍楆竉刧\"],[\"89ab\",\"醌碸酞肼\"],[\"89b0\",\"贋胶𠧧\"],[\"89b5\",\"肟黇䳍鷉鸌䰾𩷶𧀎鸊𪄳㗁\"],[\"89c1\",\"溚舾甙\"],[\"89c5\",\"䤑马骏龙禇𨑬𡷊𠗐𢫦两亁亀亇亿仫伷㑌侽㹈倃傈㑽㒓㒥円夅凛凼刅争剹劐匧㗇厩㕑厰㕓参吣㕭㕲㚁咓咣咴咹哐哯唘唣唨㖘唿㖥㖿嗗㗅\"],[\"8a40\",\"𧶄唥\"],[\"8a43\",\"𠱂𠴕𥄫喐𢳆㧬𠍁蹆𤶸𩓥䁓𨂾睺𢰸㨴䟕𨅝𦧲𤷪擝𠵼𠾴𠳕𡃴撍蹾𠺖𠰋𠽤𢲩𨉖𤓓\"],[\"8a64\",\"𠵆𩩍𨃩䟴𤺧𢳂骲㩧𩗴㿭㔆𥋇𩟔𧣈𢵄鵮頕\"],[\"8a76\",\"䏙𦂥撴哣𢵌𢯊𡁷㧻𡁯\"],[\"8aa1\",\"𦛚𦜖𧦠擪𥁒𠱃蹨𢆡𨭌𠜱\"],[\"8aac\",\"䠋𠆩㿺塳𢶍\"],[\"8ab2\",\"𤗈𠓼𦂗𠽌𠶖啹䂻䎺\"],[\"8abb\",\"䪴𢩦𡂝膪飵𠶜捹㧾𢝵跀嚡摼㹃\"],[\"8ac9\",\"𪘁𠸉𢫏𢳉\"],[\"8ace\",\"𡃈𣧂㦒㨆𨊛㕸𥹉𢃇噒𠼱𢲲𩜠㒼氽𤸻\"],[\"8adf\",\"𧕴𢺋𢈈𪙛𨳍𠹺𠰴𦠜羓𡃏𢠃𢤹㗻𥇣𠺌𠾍𠺪㾓𠼰𠵇𡅏𠹌\"],[\"8af6\",\"𠺫𠮩𠵈𡃀𡄽㿹𢚖搲𠾭\"],[\"8b40\",\"𣏴𧘹𢯎𠵾𠵿𢱑𢱕㨘𠺘𡃇𠼮𪘲𦭐𨳒𨶙𨳊閪哌苄喹\"],[\"8b55\",\"𩻃鰦骶𧝞𢷮煀腭胬尜𦕲脴㞗卟𨂽醶𠻺𠸏𠹷𠻻㗝𤷫㘉𠳖嚯𢞵𡃉𠸐𠹸𡁸𡅈𨈇𡑕𠹹𤹐𢶤婔𡀝𡀞𡃵𡃶垜𠸑\"],[\"8ba1\",\"𧚔𨋍𠾵𠹻𥅾㜃𠾶𡆀𥋘𪊽𤧚𡠺𤅷𨉼墙剨㘚𥜽箲孨䠀䬬鼧䧧鰟鮍𥭴𣄽嗻㗲嚉丨夂𡯁屮靑𠂆乛亻㔾尣彑忄㣺扌攵歺氵氺灬爫丬犭𤣩罒礻糹罓𦉪㓁\"],[\"8bde\",\"𦍋耂肀𦘒𦥑卝衤见𧢲讠贝钅镸长门𨸏韦页风飞饣𩠐鱼鸟黄歯龜丷𠂇阝户钢\"],[\"8c40\",\"倻淾𩱳龦㷉袏𤅎灷峵䬠𥇍㕙𥴰愢𨨲辧釶熑朙玺𣊁𪄇㲋𡦀䬐磤琂冮𨜏䀉橣𪊺䈣蘏𠩯稪𩥇𨫪靕灍匤𢁾鏴盙𨧣龧矝亣俰傼丯众龨吴綋墒壐𡶶庒庙忂𢜒斋\"],[\"8ca1\",\"𣏹椙橃𣱣泿\"],[\"8ca7\",\"爀𤔅玌㻛𤨓嬕璹讃𥲤𥚕窓篬糃繬苸薗龩袐龪躹龫迏蕟駠鈡龬𨶹𡐿䁱䊢娚\"],[\"8cc9\",\"顨杫䉶圽\"],[\"8cce\",\"藖𤥻芿𧄍䲁𦵴嵻𦬕𦾾龭龮宖龯曧繛湗秊㶈䓃𣉖𢞖䎚䔶\"],[\"8ce6\",\"峕𣬚諹屸㴒𣕑嵸龲煗䕘𤃬𡸣䱷㥸㑊𠆤𦱁諌侴𠈹妿腬顖𩣺弻\"],[\"8d40\",\"𠮟\"],[\"8d42\",\"𢇁𨥭䄂䚻𩁹㼇龳𪆵䃸㟖䛷𦱆䅼𨚲𧏿䕭㣔𥒚䕡䔛䶉䱻䵶䗪㿈𤬏㙡䓞䒽䇭崾嵈嵖㷼㠏嶤嶹㠠㠸幂庽弥徃㤈㤔㤿㥍惗愽峥㦉憷憹懏㦸戬抐拥挘㧸嚱\"],[\"8da1\",\"㨃揢揻搇摚㩋擀崕嘡龟㪗斆㪽旿晓㫲暒㬢朖㭂枤栀㭘桊梄㭲㭱㭻椉楃牜楤榟榅㮼槖㯝橥橴橱檂㯬檙㯲檫檵櫔櫶殁毁毪汵沪㳋洂洆洦涁㳯涤涱渕渘温溆𨧀溻滢滚齿滨滩漤漴㵆𣽁澁澾㵪㵵熷岙㶊瀬㶑灐灔灯灿炉𠌥䏁㗱𠻘\"],[\"8e40\",\"𣻗垾𦻓焾𥟠㙎榢𨯩孴穉𥣡𩓙穥穽𥦬窻窰竂竃燑𦒍䇊竚竝竪䇯咲𥰁笋筕笩𥌎𥳾箢筯莜𥮴𦱿篐萡箒箸𥴠㶭𥱥蒒篺簆簵𥳁籄粃𤢂粦晽𤕸糉糇糦籴糳糵糎\"],[\"8ea1\",\"繧䔝𦹄絝𦻖璍綉綫焵綳緒𤁗𦀩緤㴓緵𡟹緥𨍭縝𦄡𦅚繮纒䌫鑬縧罀罁罇
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 5334:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = JSON.parse("[[\"0\",\"\\u0000\",127,\"€\"],[\"8140\",\"丂丄丅丆丏丒丗丟丠両丣並丩丮丯丱丳丵丷丼乀乁乂乄乆乊乑乕乗乚乛乢乣乤乥乧乨乪\",5,\"乲乴\",9,\"乿\",6,\"亇亊\"],[\"8180\",\"亐亖亗亙亜亝亞亣亪亯亰亱亴亶亷亸亹亼亽亾仈仌仏仐仒仚仛仜仠仢仦仧仩仭仮仯仱仴仸仹仺仼仾伀伂\",6,\"伋伌伒\",4,\"伜伝伡伣伨伩伬伭伮伱伳伵伷伹伻伾\",4,\"佄佅佇\",5,\"佒佔佖佡佢佦佨佪佫佭佮佱佲併佷佸佹佺佽侀侁侂侅來侇侊侌侎侐侒侓侕侖侘侙侚侜侞侟価侢\"],[\"8240\",\"侤侫侭侰\",4,\"侶\",8,\"俀俁係俆俇俈俉俋俌俍俒\",4,\"俙俛俠俢俤俥俧俫俬俰俲俴俵俶俷俹俻俼俽俿\",11],[\"8280\",\"個倎倐們倓倕倖倗倛倝倞倠倢倣値倧倫倯\",10,\"倻倽倿偀偁偂偄偅偆偉偊偋偍偐\",4,\"偖偗偘偙偛偝\",7,\"偦\",5,\"偭\",8,\"偸偹偺偼偽傁傂傃傄傆傇傉傊傋傌傎\",20,\"傤傦傪傫傭\",4,\"傳\",6,\"傼\"],[\"8340\",\"傽\",17,\"僐\",5,\"僗僘僙僛\",10,\"僨僩僪僫僯僰僱僲僴僶\",4,\"僼\",9,\"儈\"],[\"8380\",\"儉儊儌\",5,\"儓\",13,\"儢\",28,\"兂兇兊兌兎兏児兒兓兗兘兙兛兝\",4,\"兣兤兦內兩兪兯兲兺兾兿冃冄円冇冊冋冎冏冐冑冓冔冘冚冝冞冟冡冣冦\",4,\"冭冮冴冸冹冺冾冿凁凂凃凅凈凊凍凎凐凒\",5],[\"8440\",\"凘凙凚凜凞凟凢凣凥\",5,\"凬凮凱凲凴凷凾刄刅刉刋刌刏刐刓刔刕刜刞刟刡刢刣別刦刧刪刬刯刱刲刴刵刼刾剄\",5,\"剋剎剏剒剓剕剗剘\"],[\"8480\",\"剙剚剛剝剟剠剢剣剤剦剨剫剬剭剮剰剱剳\",9,\"剾劀劃\",4,\"劉\",6,\"劑劒劔\",6,\"劜劤劥劦劧劮劯劰労\",9,\"勀勁勂勄勅勆勈勊勌勍勎勏勑勓勔動勗務\",5,\"勠勡勢勣勥\",10,\"勱\",7,\"勻勼勽匁匂匃匄匇匉匊匋匌匎\"],[\"8540\",\"匑匒匓匔匘匛匜匞匟匢匤匥匧匨匩匫匬匭匯\",9,\"匼匽區卂卄卆卋卌卍卐協単卙卛卝卥卨卪卬卭卲卶卹卻卼卽卾厀厁厃厇厈厊厎厏\"],[\"8580\",\"厐\",4,\"厖厗厙厛厜厞厠厡厤厧厪厫厬厭厯\",6,\"厷厸厹厺厼厽厾叀參\",4,\"収叏叐叒叓叕叚叜叝叞叡叢叧叴叺叾叿吀吂吅吇吋吔吘吙吚吜吢吤吥吪吰吳吶吷吺吽吿呁呂呄呅呇呉呌呍呎呏呑呚呝\",4,\"呣呥呧呩\",7,\"呴呹呺呾呿咁咃咅咇咈咉咊咍咑咓咗咘咜咞咟咠咡\"],[\"8640\",\"咢咥咮咰咲咵咶咷咹咺咼咾哃哅哊哋哖哘哛哠\",4,\"哫哬哯哰哱哴\",5,\"哻哾唀唂唃唄唅唈唊\",4,\"唒唓唕\",5,\"唜唝唞唟唡唥唦\"],[\"8680\",\"唨唩唫唭唲唴唵唶唸唹唺唻唽啀啂啅啇啈啋\",4,\"啑啒啓啔啗\",4,\"啝啞啟啠啢啣啨啩啫啯\",5,\"啹啺啽啿喅喆喌喍喎喐喒喓喕喖喗喚喛喞喠\",6,\"喨\",8,\"喲喴営喸喺喼喿\",4,\"嗆嗇嗈嗊嗋嗎嗏嗐嗕嗗\",4,\"嗞嗠嗢嗧嗩嗭嗮嗰嗱嗴嗶嗸\",4,\"嗿嘂嘃嘄嘅\"],[\"8740\",\"嘆嘇嘊嘋嘍嘐\",7,\"嘙嘚嘜嘝嘠嘡嘢嘥嘦嘨嘩嘪嘫嘮嘯嘰嘳嘵嘷嘸嘺嘼嘽嘾噀\",11,\"噏\",4,\"噕噖噚噛噝\",4],[\"8780\",\"噣噥噦噧噭噮噯噰噲噳噴噵噷噸噹噺噽\",7,\"嚇\",6,\"嚐嚑嚒嚔\",14,\"嚤\",10,\"嚰\",6,\"嚸嚹嚺嚻嚽\",12,\"囋\",8,\"囕囖囘囙囜団囥\",5,\"囬囮囯囲図囶囷囸囻囼圀圁圂圅圇國\",6],[\"8840\",\"園\",9,\"圝圞圠圡圢圤圥圦圧圫圱圲圴\",4,\"圼圽圿坁坃坄坅坆坈坉坋坒\",4,\"坘坙坢坣坥坧坬坮坰坱坲坴坵坸坹坺坽坾坿垀\"],[\"8880\",\"垁垇垈垉垊垍\",4,\"垔\",6,\"垜垝垞垟垥垨垪垬垯垰垱垳垵垶垷垹\",8,\"埄\",6,\"埌埍埐埑埓埖埗埛埜埞埡埢埣埥\",7,\"埮埰埱埲埳埵埶執埻埼埾埿堁堃堄堅堈堉堊堌堎堏堐堒堓堔堖堗堘堚堛堜堝堟堢堣堥\",4,\"堫\",4,\"報堲堳場堶\",7],[\"8940\",\"堾\",5,\"塅\",6,\"塎塏塐塒塓塕塖塗塙\",4,\"塟\",5,\"塦\",4,\"塭\",16,\"塿墂墄墆墇墈墊墋墌\"],[\"8980\",\"墍\",4,\"墔\",4,\"墛墜墝墠\",7,\"墪\",17,\"墽墾墿壀壂壃壄壆\",10,\"壒壓壔壖\",13,\"壥\",5,\"壭壯壱売壴壵壷壸壺\",7,\"夃夅夆夈\",4,\"夎夐夑夒<EFBFBD>
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 4442:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = JSON.parse("[[\"0\",\"\\u0000\",127],[\"8141\",\"갂갃갅갆갋\",4,\"갘갞갟갡갢갣갥\",6,\"갮갲갳갴\"],[\"8161\",\"갵갶갷갺갻갽갾갿걁\",9,\"걌걎\",5,\"걕\"],[\"8181\",\"걖걗걙걚걛걝\",18,\"걲걳걵걶걹걻\",4,\"겂겇겈겍겎겏겑겒겓겕\",6,\"겞겢\",5,\"겫겭겮겱\",6,\"겺겾겿곀곂곃곅곆곇곉곊곋곍\",7,\"곖곘\",7,\"곢곣곥곦곩곫곭곮곲곴곷\",4,\"곾곿괁괂괃괅괇\",4,\"괎괐괒괓\"],[\"8241\",\"괔괕괖괗괙괚괛괝괞괟괡\",7,\"괪괫괮\",5],[\"8261\",\"괶괷괹괺괻괽\",6,\"굆굈굊\",5,\"굑굒굓굕굖굗\"],[\"8281\",\"굙\",7,\"굢굤\",7,\"굮굯굱굲굷굸굹굺굾궀궃\",4,\"궊궋궍궎궏궑\",10,\"궞\",5,\"궥\",17,\"궸\",7,\"귂귃귅귆귇귉\",6,\"귒귔\",7,\"귝귞귟귡귢귣귥\",18],[\"8341\",\"귺귻귽귾긂\",5,\"긊긌긎\",5,\"긕\",7],[\"8361\",\"긝\",18,\"긲긳긵긶긹긻긼\"],[\"8381\",\"긽긾긿깂깄깇깈깉깋깏깑깒깓깕깗\",4,\"깞깢깣깤깦깧깪깫깭깮깯깱\",6,\"깺깾\",5,\"꺆\",5,\"꺍\",46,\"꺿껁껂껃껅\",6,\"껎껒\",5,\"껚껛껝\",8],[\"8441\",\"껦껧껩껪껬껮\",5,\"껵껶껷껹껺껻껽\",8],[\"8461\",\"꼆꼉꼊꼋꼌꼎꼏꼑\",18],[\"8481\",\"꼤\",7,\"꼮꼯꼱꼳꼵\",6,\"꼾꽀꽄꽅꽆꽇꽊\",5,\"꽑\",10,\"꽞\",5,\"꽦\",18,\"꽺\",5,\"꾁꾂꾃꾅꾆꾇꾉\",6,\"꾒꾓꾔꾖\",5,\"꾝\",26,\"꾺꾻꾽꾾\"],[\"8541\",\"꾿꿁\",5,\"꿊꿌꿏\",4,\"꿕\",6,\"꿝\",4],[\"8561\",\"꿢\",5,\"꿪\",5,\"꿲꿳꿵꿶꿷꿹\",6,\"뀂뀃\"],[\"8581\",\"뀅\",6,\"뀍뀎뀏뀑뀒뀓뀕\",6,\"뀞\",9,\"뀩\",26,\"끆끇끉끋끍끏끐끑끒끖끘끚끛끜끞\",29,\"끾끿낁낂낃낅\",6,\"낎낐낒\",5,\"낛낝낞낣낤\"],[\"8641\",\"낥낦낧낪낰낲낶낷낹낺낻낽\",6,\"냆냊\",5,\"냒\"],[\"8661\",\"냓냕냖냗냙\",6,\"냡냢냣냤냦\",10],[\"8681\",\"냱\",22,\"넊넍넎넏넑넔넕넖넗넚넞\",4,\"넦넧넩넪넫넭\",6,\"넶넺\",5,\"녂녃녅녆녇녉\",6,\"녒녓녖녗녙녚녛녝녞녟녡\",22,\"녺녻녽녾녿놁놃\",4,\"놊놌놎놏놐놑놕놖놗놙놚놛놝\"],[\"8741\",\"놞\",9,\"놩\",15],[\"8761\",\"놹\",18,\"뇍뇎뇏뇑뇒뇓뇕\"],[\"8781\",\"뇖\",5,\"뇞뇠\",7,\"뇪뇫뇭뇮뇯뇱\",7,\"뇺뇼뇾\",5,\"눆눇눉눊눍\",6,\"눖눘눚\",5,\"눡\",18,\"눵\",6,\"눽\",26,\"뉙뉚뉛뉝뉞뉟뉡\",6,\"뉪\",4],[\"8841\",\"뉯\",4,\"뉶\",5,\"뉽\",6,\"늆늇늈늊\",4],[\"8861\",\"늏늒늓늕늖늗늛\",4,\"늢늤늧늨늩늫늭늮늯늱늲늳늵늶늷\"],[\"8881\",\"늸\",15,\"닊닋닍닎닏닑닓\",4,\"닚닜닞닟닠닡닣닧닩닪닰닱닲닶닼닽닾댂댃댅댆댇댉\",6,\"댒댖\",5,\"댝\",54,\"덗덙덚덝덠덡덢덣\"],[\"8941\",\"덦덨덪덬덭덯덲덳덵덶덷덹\",6,\"뎂뎆\",5,\"뎍\"],[\"8961\",\"뎎뎏뎑뎒뎓뎕\",10,\"뎢\",5,\"뎩뎪뎫뎭\"],[\"8981\",\"뎮\",21,\"돆돇돉돊돍돏돑돒돓돖돘돚돜돞돟돡돢돣돥돦돧돩\",18,\"돽\",18,\"됑\",6,\"됙됚됛됝됞됟됡\",6,\"됪됬\",7,\"됵\",15],[\"8a41\",\"둅\",10,\"둒둓둕둖둗둙\",6,\"둢둤둦\"],[\"8a61\",\"둧\",4,\"둭\",18,\"뒁뒂\"],[\"8a81\",\"뒃\",4,\"뒉\",19,\"뒞\",5,\"뒥뒦뒧뒩뒪뒫뒭\",7,\"뒶뒸뒺\",5,\"듁듂듃듅듆듇듉\",6,\"듑듒듓듔듖\",5,\"듞듟듡듢듥듧\",4,\"듮듰듲\",5,\"듹\",26,\"딖딗딙딚딝\"],[\"8b41\",\"딞\",5,\"딦딫\",4,\"딲딳딵딶딷딹\",6,\"땂땆\"],[\"8b61\",\"땇땈땉땊땎땏땑땒땓땕\",6,\"땞땢\",8],[\"8b81\",\"땫\",52,\"떢떣떥떦떧떩떬떭떮떯떲떶\",4,\"떾떿뗁뗂뗃뗅\",6,\"뗎뗒\",5,\"뗙\",18,\"뗭\",18],[\"8c41\",\"똀\",15,\"똒똓똕똖똗똙\",4],[\"8c61\",\"똞\",6,\"똦\",5,\"똭\",6,\"똵\",5],[\"8c81\",\"똻\",12,\"뙉\",26,\"뙥뙦뙧뙩\",50,\"뚞뚟뚡뚢뚣뚥\",5,\"뚭뚮뚯뚰뚲\",16],[\"8d41\",\"뛃\",16,\"뛕\",8],[\"8d61\",\"뛞\",17,\"뛱뛲뛳뛵뛶뛷뛹뛺\"],[\"8d81\",\"뛻\",4,\"뜂뜃뜄뜆\",33,\"뜪뜫뜭뜮뜱\",6,\"뜺뜼\",7,\"띅띆띇띉띊띋띍\",6,\"띖\",9,\"띡띢띣띥띦띧띩\",6,\"띲띴띶\",5,\"띾띿랁랂랃랅\",6,\"랎랓랔랕랚랛랝랞\"],[\"8e41\",\"랟랡\",6,\"랪랮\",5,\"랶랷랹\
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 6453:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = JSON.parse("[[\"0\",\"\\u0000\",127],[\"a140\",\" ,、。.‧;:?!︰…‥﹐﹑﹒·﹔﹕﹖﹗|–︱—︳╴︴﹏()︵︶{}︷︸〔〕︹︺【】︻︼《》︽︾〈〉︿﹀「」﹁﹂『』﹃﹄﹙﹚\"],[\"a1a1\",\"﹛﹜﹝﹞‘’“”〝〞‵′#&*※§〃○●△▲◎☆★◇◆□■▽▼㊣℅¯ ̄_ˍ﹉﹊﹍﹎﹋﹌﹟﹠﹡+-×÷±√<>=≦≧≠∞≒≡﹢\",4,\"~∩∪⊥∠∟⊿㏒㏑∫∮∵∴♀♂⊕⊙↑↓←→↖↗↙↘∥∣/\"],[\"a240\",\"\∕﹨$¥〒¢£%@℃℉﹩﹪﹫㏕㎜㎝㎞㏎㎡㎎㎏㏄°兙兛兞兝兡兣嗧瓩糎▁\",7,\"▏▎▍▌▋▊▉┼┴┬┤├▔─│▕┌┐└┘╭\"],[\"a2a1\",\"╮╰╯═╞╪╡◢◣◥◤╱╲╳0\",9,\"Ⅰ\",9,\"〡\",8,\"十卄卅A\",25,\"a\",21],[\"a340\",\"wxyzΑ\",16,\"Σ\",6,\"α\",16,\"σ\",6,\"ㄅ\",10],[\"a3a1\",\"ㄐ\",25,\"˙ˉˊˇˋ\"],[\"a3e1\",\"€\"],[\"a440\",\"一乙丁七乃九了二人儿入八几刀刁力匕十卜又三下丈上丫丸凡久么也乞于亡兀刃勺千叉口土士夕大女子孑孓寸小尢尸山川工己已巳巾干廾弋弓才\"],[\"a4a1\",\"丑丐不中丰丹之尹予云井互五亢仁什仃仆仇仍今介仄元允內六兮公冗凶分切刈勻勾勿化匹午升卅卞厄友及反壬天夫太夭孔少尤尺屯巴幻廿弔引心戈戶手扎支文斗斤方日曰月木欠止歹毋比毛氏水火爪父爻片牙牛犬王丙\"],[\"a540\",\"世丕且丘主乍乏乎以付仔仕他仗代令仙仞充兄冉冊冬凹出凸刊加功包匆北匝仟半卉卡占卯卮去可古右召叮叩叨叼司叵叫另只史叱台句叭叻四囚外\"],[\"a5a1\",\"央失奴奶孕它尼巨巧左市布平幼弁弘弗必戊打扔扒扑斥旦朮本未末札正母民氐永汁汀氾犯玄玉瓜瓦甘生用甩田由甲申疋白皮皿目矛矢石示禾穴立丞丟乒乓乩亙交亦亥仿伉伙伊伕伍伐休伏仲件任仰仳份企伋光兇兆先全\"],[\"a640\",\"共再冰列刑划刎刖劣匈匡匠印危吉吏同吊吐吁吋各向名合吃后吆吒因回囝圳地在圭圬圯圩夙多夷夸妄奸妃好她如妁字存宇守宅安寺尖屹州帆并年\"],[\"a6a1\",\"式弛忙忖戎戌戍成扣扛托收早旨旬旭曲曳有朽朴朱朵次此死氖汝汗汙江池汐汕污汛汍汎灰牟牝百竹米糸缶羊羽老考而耒耳聿肉肋肌臣自至臼舌舛舟艮色艾虫血行衣西阡串亨位住佇佗佞伴佛何估佐佑伽伺伸佃佔似但佣\"],[\"a740\",\"作你伯低伶余佝佈佚兌克免兵冶冷別判利刪刨劫助努劬匣即卵吝吭吞吾否呎吧呆呃吳呈呂君吩告吹吻吸吮吵吶吠吼呀吱含吟听囪困囤囫坊坑址坍\"],[\"a7a1\",\"均坎圾坐坏圻壯夾妝妒妨妞妣妙妖妍妤妓妊妥孝孜孚孛完宋宏尬局屁尿尾岐岑岔岌巫希序庇床廷弄弟彤形彷役忘忌志忍忱快忸忪戒我抄抗抖技扶抉扭把扼找批扳抒扯折扮投抓抑抆改攻攸旱更束李杏材村杜杖杞杉杆杠\"],[\"a840\",\"杓杗步每求汞沙沁沈沉沅沛汪決沐汰沌汨沖沒汽沃汲汾汴沆汶沍沔沘沂灶灼災灸牢牡牠狄狂玖甬甫男甸皂盯矣私秀禿究系罕肖肓肝肘肛肚育良芒\"],[\"a8a1\",\"芋芍見角言谷豆豕貝赤走足身車辛辰迂迆迅迄巡邑邢邪邦那酉釆里防阮阱阪阬並乖乳事些亞享京佯依侍佳使佬供例來侃佰併侈佩佻侖佾侏侑佺兔兒兕兩具其典冽函刻券刷刺到刮制剁劾劻卒協卓卑卦卷卸卹取叔受味呵\"],[\"a940\",\"咖呸咕咀呻呷咄咒咆呼咐呱呶和咚呢周咋命咎固垃坷坪坩坡坦坤坼夜奉奇奈奄奔妾妻委妹妮姑姆姐姍始姓姊妯妳姒姅孟孤季宗定官宜宙宛尚屈居\"],[\"a9a1\",\"屆岷岡岸岩岫岱岳帘帚帖帕帛帑幸庚店府底庖延弦弧弩往征彿彼忝忠忽念忿怏怔怯怵怖怪怕怡性怩怫怛或戕房戾所承拉拌拄抿拂抹拒招披拓拔拋拈抨抽押拐拙拇拍抵拚抱拘拖拗拆抬拎放斧於旺昔易昌昆昂明昀昏昕昊\"],[\"aa40\",\"昇服朋杭枋枕東果杳杷枇枝林杯杰板枉松析杵枚枓杼杪杲欣武歧歿<EFBFBD>
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 2653:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = JSON.parse("[[\"0\",\"\\u0000\",127],[\"8ea1\",\"。\",62],[\"a1a1\",\" 、。,.・:;?!゛゜´`¨^ ̄_ヽヾゝゞ〃仝々〆〇ー―‐/\~∥|…‥‘’“”()〔〕[]{}〈\",9,\"+-±×÷=≠<>≦≧∞∴♂♀°′″℃¥$¢£%#&*@§☆★○●◎◇\"],[\"a2a1\",\"◆□■△▲▽▼※〒→←↑↓〓\"],[\"a2ba\",\"∈∋⊆⊇⊂⊃∪∩\"],[\"a2ca\",\"∧∨¬⇒⇔∀∃\"],[\"a2dc\",\"∠⊥⌒∂∇≡≒≪≫√∽∝∵∫∬\"],[\"a2f2\",\"ʼn♯♭♪†‡¶\"],[\"a2fe\",\"◯\"],[\"a3b0\",\"0\",9],[\"a3c1\",\"A\",25],[\"a3e1\",\"a\",25],[\"a4a1\",\"ぁ\",82],[\"a5a1\",\"ァ\",85],[\"a6a1\",\"Α\",16,\"Σ\",6],[\"a6c1\",\"α\",16,\"σ\",6],[\"a7a1\",\"А\",5,\"ЁЖ\",25],[\"a7d1\",\"а\",5,\"ёж\",25],[\"a8a1\",\"─│┌┐┘└├┬┤┴┼━┃┏┓┛┗┣┳┫┻╋┠┯┨┷┿┝┰┥┸╂\"],[\"ada1\",\"①\",19,\"Ⅰ\",9],[\"adc0\",\"㍉㌔㌢㍍㌘㌧㌃㌶㍑㍗㌍㌦㌣㌫㍊㌻㎜㎝㎞㎎㎏㏄㎡\"],[\"addf\",\"㍻〝〟№㏍℡㊤\",4,\"㈱㈲㈹㍾㍽㍼≒≡∫∮∑√⊥∠∟⊿∵∩∪\"],[\"b0a1\",\"亜唖娃阿哀愛挨姶逢葵茜穐悪握渥旭葦芦鯵梓圧斡扱宛姐虻飴絢綾鮎或粟袷安庵按暗案闇鞍杏以伊位依偉囲夷委威尉惟意慰易椅為畏異移維緯胃萎衣謂違遺医井亥域育郁磯一壱溢逸稲茨芋鰯允印咽員因姻引飲淫胤蔭\"],[\"b1a1\",\"院陰隠韻吋右宇烏羽迂雨卯鵜窺丑碓臼渦嘘唄欝蔚鰻姥厩浦瓜閏噂云運雲荏餌叡営嬰影映曳栄永泳洩瑛盈穎頴英衛詠鋭液疫益駅悦謁越閲榎厭円園堰奄宴延怨掩援沿演炎焔煙燕猿縁艶苑薗遠鉛鴛塩於汚甥凹央奥往応\"],[\"b2a1\",\"押旺横欧殴王翁襖鴬鴎黄岡沖荻億屋憶臆桶牡乙俺卸恩温穏音下化仮何伽価佳加可嘉夏嫁家寡科暇果架歌河火珂禍禾稼箇花苛茄荷華菓蝦課嘩貨迦過霞蚊俄峨我牙画臥芽蛾賀雅餓駕介会解回塊壊廻快怪悔恢懐戒拐改\"],[\"b3a1\",\"魁晦械海灰界皆絵芥蟹開階貝凱劾外咳害崖慨概涯碍蓋街該鎧骸浬馨蛙垣柿蛎鈎劃嚇各廓拡撹格核殻獲確穫覚角赫較郭閣隔革学岳楽額顎掛笠樫橿梶鰍潟割喝恰括活渇滑葛褐轄且鰹叶椛樺鞄株兜竃蒲釜鎌噛鴨栢茅萱\"],[\"b4a1\",\"粥刈苅瓦乾侃冠寒刊勘勧巻喚堪姦完官寛干幹患感慣憾換敢柑桓棺款歓汗漢澗潅環甘監看竿管簡緩缶翰肝艦莞観諌貫還鑑間閑関陥韓館舘丸含岸巌玩癌眼岩翫贋雁頑顔願企伎危喜器基奇嬉寄岐希幾忌揮机旗既期棋棄\"],[\"b5a1\",\"機帰毅気汽畿祈季稀紀徽規記貴起軌輝飢騎鬼亀偽儀妓宜戯技擬欺犠疑祇義蟻誼議掬菊鞠吉吃喫桔橘詰砧杵黍却客脚虐逆丘久仇休及吸宮弓急救朽求汲泣灸球究窮笈級糾給旧牛去居巨拒拠挙渠虚許距鋸漁禦魚亨享京\"],[\"b6a1\",\"供侠僑兇競共凶協匡卿叫喬境峡強彊怯恐恭挟教橋況狂狭矯胸脅興蕎郷鏡響饗驚仰凝尭暁業局曲極玉桐粁僅勤均巾錦斤欣欽琴禁禽筋緊芹菌衿襟謹近金吟銀九倶句区狗玖矩苦躯駆駈駒具愚虞喰空偶寓遇隅串櫛釧屑屈\"],[\"b7a1\",\"掘窟沓靴轡窪熊隈粂栗繰桑鍬勲君薫訓群軍郡卦袈祁係傾刑兄啓圭珪型契形径恵慶慧憩掲携敬景桂渓畦稽系経継繋罫茎荊蛍計詣警軽頚鶏芸迎鯨劇戟撃激隙桁傑欠決潔穴結血訣月件倹倦健兼券剣喧圏堅嫌建憲懸拳捲\"],[\"b8a1\",\"検権牽犬献研硯絹県肩見謙賢軒遣鍵険顕験鹸元原厳幻弦減源玄現絃舷言諺限乎個古呼固姑孤己庫弧戸故枯湖狐糊袴股胡菰虎誇跨鈷雇顧鼓五互伍午呉吾娯後御悟梧檎瑚碁語誤護醐乞鯉交佼侯候倖光公功効勾厚口向\"],[\"b9a1\",\"后喉坑垢好孔孝宏工巧巷幸広庚康弘恒慌抗拘控攻昂晃更杭校梗構江洪浩港溝甲皇硬稿糠紅紘絞綱耕考肯肱腔膏航荒行衡講貢購郊酵鉱砿鋼閤降項香高鴻剛劫号合壕拷濠豪轟麹克刻
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 9621:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = JSON.parse("{\"uChars\":[128,165,169,178,184,216,226,235,238,244,248,251,253,258,276,284,300,325,329,334,364,463,465,467,469,471,473,475,477,506,594,610,712,716,730,930,938,962,970,1026,1104,1106,8209,8215,8218,8222,8231,8241,8244,8246,8252,8365,8452,8454,8458,8471,8482,8556,8570,8596,8602,8713,8720,8722,8726,8731,8737,8740,8742,8748,8751,8760,8766,8777,8781,8787,8802,8808,8816,8854,8858,8870,8896,8979,9322,9372,9548,9588,9616,9622,9634,9652,9662,9672,9676,9680,9702,9735,9738,9793,9795,11906,11909,11913,11917,11928,11944,11947,11951,11956,11960,11964,11979,12284,12292,12312,12319,12330,12351,12436,12447,12535,12543,12586,12842,12850,12964,13200,13215,13218,13253,13263,13267,13270,13384,13428,13727,13839,13851,14617,14703,14801,14816,14964,15183,15471,15585,16471,16736,17208,17325,17330,17374,17623,17997,18018,18212,18218,18301,18318,18760,18811,18814,18820,18823,18844,18848,18872,19576,19620,19738,19887,40870,59244,59336,59367,59413,59417,59423,59431,59437,59443,59452,59460,59478,59493,63789,63866,63894,63976,63986,64016,64018,64021,64025,64034,64037,64042,65074,65093,65107,65112,65127,65132,65375,65510,65536],\"gbChars\":[0,36,38,45,50,81,89,95,96,100,103,104,105,109,126,133,148,172,175,179,208,306,307,308,309,310,311,312,313,341,428,443,544,545,558,741,742,749,750,805,819,820,7922,7924,7925,7927,7934,7943,7944,7945,7950,8062,8148,8149,8152,8164,8174,8236,8240,8262,8264,8374,8380,8381,8384,8388,8390,8392,8393,8394,8396,8401,8406,8416,8419,8424,8437,8439,8445,8482,8485,8496,8521,8603,8936,8946,9046,9050,9063,9066,9076,9092,9100,9108,9111,9113,9131,9162,9164,9218,9219,11329,11331,11334,11336,11346,11361,11363,11366,11370,11372,11375,11389,11682,11686,11687,11692,11694,11714,11716,11723,11725,11730,11736,11982,11989,12102,12336,12348,12350,12384,12393,12395,12397,12510,12553,12851,12962,12973,13738,13823,13919,13933,14080,14298,14585,14698,15583,15847,16318,16434,16438,16481,16729,17102,17122,17315,17320,17402,17418,17859,17909,17911,17915,17916,17936,17939,17961,18664,18703,18814,18962,19043,33469,33470,33471,33484,33485,33490,33497,33501,33505,33513,33520,33536,33550,37845,37921,37948,38029,38038,38064,38065,38066,38069,38075,38076,38078,39108,39109,39113,39114,39115,39116,39265,39394,189000]}");
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 7714:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = JSON.parse("[[\"a140\",\"\",62],[\"a180\",\"\",32],[\"a240\",\"\",62],[\"a280\",\"\",32],[\"a2ab\",\"\",5],[\"a2e3\",\"€\"],[\"a2ef\",\"\"],[\"a2fd\",\"\"],[\"a340\",\"\",62],[\"a380\",\"\",31,\" \"],[\"a440\",\"\",62],[\"a480\",\"\",32],[\"a4f4\",\"\",10],[\"a540\",\"\",62],[\"a580\",\"\",32],[\"a5f7\",\"\",7],[\"a640\",\"\",62],[\"a680\",\"\",32],[\"a6b9\",\"\",7],[\"a6d9\",\"\",6],[\"a6ec\",\"\"],[\"a6f3\",\"\"],[\"a6f6\",\"\",8],[\"a740\",\"\",62],[\"a780\",\"\",32],[\"a7c2\",\"\",14],[\"a7f2\",\"\",12],[\"a896\",\"\",10],[\"a8bc\",\"ḿ\"],[\"a8bf\",\"ǹ\"],[\"a8c1\",\"\"],[\"a8ea\",\"\",20],[\"a958\",\"\"],[\"a95b\",\"\"],[\"a95d\",\"\"],[\"a989\",\"〾⿰\",11],[\"a997\",\"\",12],[\"a9f0\",\"\",14],[\"aaa1\",\"\",93],[\"aba1\",\"\",93],[\"aca1\",\"\",93],[\"ada1\",\"\",93],[\"aea1\",\"\",93],[\"afa1\",\"\",93],[\"d7fa\",\"\",4],[\"f8a1\",\"\",93],[\"f9a1\",\"\",93],[\"faa1\",\"\",93],[\"fba1\",\"\",93],[\"fca1\",\"\",93],[\"fda1\",\"\",93],[\"fe50\",\"⺁⺄㑳㑇⺈⺋㖞㘚㘎⺌⺗㥮㤘㧏㧟㩳㧐㭎㱮㳠⺧⺪䁖䅟⺮䌷⺳⺶⺷䎱䎬⺻䏝䓖䙡䙌\"],[\"fe80\",\"䜣䜩䝼䞍⻊䥇䥺䥽䦂䦃䦅䦆䦟䦛䦷䦶䲣䲟䲠䲡䱷䲢䴓\",6,\"䶮\",93],[\"8135f437\",\"\"]]");
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 5667:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = JSON.parse("[[\"0\",\"\\u0000\",128],[\"a1\",\"。\",62],[\"8140\",\" 、。,.・:;?!゛゜´`¨^ ̄_ヽヾゝゞ〃仝々〆〇ー―‐/\~∥|…‥‘’“”()〔〕[]{}〈\",9,\"+-±×\"],[\"8180\",\"÷=≠<>≦≧∞∴♂♀°′″℃¥$¢£%#&*@§☆★○●◎◇◆□■△▲▽▼※〒→←↑↓〓\"],[\"81b8\",\"∈∋⊆⊇⊂⊃∪∩\"],[\"81c8\",\"∧∨¬⇒⇔∀∃\"],[\"81da\",\"∠⊥⌒∂∇≡≒≪≫√∽∝∵∫∬\"],[\"81f0\",\"ʼn♯♭♪†‡¶\"],[\"81fc\",\"◯\"],[\"824f\",\"0\",9],[\"8260\",\"A\",25],[\"8281\",\"a\",25],[\"829f\",\"ぁ\",82],[\"8340\",\"ァ\",62],[\"8380\",\"ム\",22],[\"839f\",\"Α\",16,\"Σ\",6],[\"83bf\",\"α\",16,\"σ\",6],[\"8440\",\"А\",5,\"ЁЖ\",25],[\"8470\",\"а\",5,\"ёж\",7],[\"8480\",\"о\",17],[\"849f\",\"─│┌┐┘└├┬┤┴┼━┃┏┓┛┗┣┳┫┻╋┠┯┨┷┿┝┰┥┸╂\"],[\"8740\",\"①\",19,\"Ⅰ\",9],[\"875f\",\"㍉㌔㌢㍍㌘㌧㌃㌶㍑㍗㌍㌦㌣㌫㍊㌻㎜㎝㎞㎎㎏㏄㎡\"],[\"877e\",\"㍻\"],[\"8780\",\"〝〟№㏍℡㊤\",4,\"㈱㈲㈹㍾㍽㍼≒≡∫∮∑√⊥∠∟⊿∵∩∪\"],[\"889f\",\"亜唖娃阿哀愛挨姶逢葵茜穐悪握渥旭葦芦鯵梓圧斡扱宛姐虻飴絢綾鮎或粟袷安庵按暗案闇鞍杏以伊位依偉囲夷委威尉惟意慰易椅為畏異移維緯胃萎衣謂違遺医井亥域育郁磯一壱溢逸稲茨芋鰯允印咽員因姻引飲淫胤蔭\"],[\"8940\",\"院陰隠韻吋右宇烏羽迂雨卯鵜窺丑碓臼渦嘘唄欝蔚鰻姥厩浦瓜閏噂云運雲荏餌叡営嬰影映曳栄永泳洩瑛盈穎頴英衛詠鋭液疫益駅悦謁越閲榎厭円\"],[\"8980\",\"園堰奄宴延怨掩援沿演炎焔煙燕猿縁艶苑薗遠鉛鴛塩於汚甥凹央奥往応押旺横欧殴王翁襖鴬鴎黄岡沖荻億屋憶臆桶牡乙俺卸恩温穏音下化仮何伽価佳加可嘉夏嫁家寡科暇果架歌河火珂禍禾稼箇花苛茄荷華菓蝦課嘩貨迦過霞蚊俄峨我牙画臥芽蛾賀雅餓駕介会解回塊壊廻快怪悔恢懐戒拐改\"],[\"8a40\",\"魁晦械海灰界皆絵芥蟹開階貝凱劾外咳害崖慨概涯碍蓋街該鎧骸浬馨蛙垣柿蛎鈎劃嚇各廓拡撹格核殻獲確穫覚角赫較郭閣隔革学岳楽額顎掛笠樫\"],[\"8a80\",\"橿梶鰍潟割喝恰括活渇滑葛褐轄且鰹叶椛樺鞄株兜竃蒲釜鎌噛鴨栢茅萱粥刈苅瓦乾侃冠寒刊勘勧巻喚堪姦完官寛干幹患感慣憾換敢柑桓棺款歓汗漢澗潅環甘監看竿管簡緩缶翰肝艦莞観諌貫還鑑間閑関陥韓館舘丸含岸巌玩癌眼岩翫贋雁頑顔願企伎危喜器基奇嬉寄岐希幾忌揮机旗既期棋棄\"],[\"8b40\",\"機帰毅気汽畿祈季稀紀徽規記貴起軌輝飢騎鬼亀偽儀妓宜戯技擬欺犠疑祇義蟻誼議掬菊鞠吉吃喫桔橘詰砧杵黍却客脚虐逆丘久仇休及吸宮弓急救\"],[\"8b80\",\"朽求汲泣灸球究窮笈級糾給旧牛去居巨拒拠挙渠虚許距鋸漁禦魚亨享京供侠僑兇競共凶協匡卿叫喬境峡強彊怯恐恭挟教橋況狂狭矯胸脅興蕎郷鏡響饗驚仰凝尭暁業局曲極玉桐粁僅勤均巾錦斤欣欽琴禁禽筋緊芹菌衿襟謹近金吟銀九倶句区狗玖矩苦躯駆駈駒具愚虞喰空偶寓遇隅串櫛釧屑屈\"],[\"8c40\",\"掘窟沓靴轡窪熊隈粂栗繰桑鍬勲君薫訓群軍郡卦袈祁係傾刑兄啓圭珪型契形径恵慶慧憩掲携敬景桂渓畦稽系経継繋罫茎荊蛍計詣警軽頚鶏芸迎鯨\"],[\"8c80\",\"劇戟撃激隙桁傑欠決潔穴結血訣月件倹倦健兼券剣喧圏堅嫌建憲懸拳捲検権牽犬献研硯絹県肩見謙賢軒遣鍵険顕験鹸元原厳幻弦減源玄現絃舷言諺限乎個古呼固姑孤己庫弧戸故枯湖狐糊袴股胡菰虎誇跨鈷雇顧鼓五互伍午呉吾娯後御悟梧檎瑚碁語誤護醐乞鯉交佼侯候倖光公功効勾厚口向\"],[\"8d40\",\"后喉坑垢好孔孝宏工巧巷幸広庚康弘恒慌抗拘控攻昂晃更杭校梗構江洪浩港溝甲皇硬稿糠紅紘絞綱耕考肯肱腔膏航荒行衡講貢購郊酵鉱砿<EFBFBD>
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 7133:
|
|||
|
/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
const core = __nccwpck_require__( 6046 );
|
|||
|
const github = __nccwpck_require__( 9971 );
|
|||
|
|
|||
|
const getPRNumber = () => {
|
|||
|
const pr = github.context.payload.pull_request;
|
|||
|
return pr && pr.number ? pr.number : null;
|
|||
|
};
|
|||
|
|
|||
|
const addLabel = async ( client, label, prNumber ) => {
|
|||
|
await client.issues.addLabels( {
|
|||
|
owner: github.context.repo.owner,
|
|||
|
repo: github.context.repo.repo,
|
|||
|
issue_number: prNumber,
|
|||
|
labels: [ label ],
|
|||
|
} );
|
|||
|
};
|
|||
|
|
|||
|
const removeLabel = async ( client, label, prNumber ) => {
|
|||
|
await client.issues.removeLabel( {
|
|||
|
owner: github.context.repo.owner,
|
|||
|
repo: github.context.repo.repo,
|
|||
|
issue_number: prNumber,
|
|||
|
name: label,
|
|||
|
} );
|
|||
|
};
|
|||
|
|
|||
|
async function run() {
|
|||
|
try {
|
|||
|
const prNumber = getPRNumber();
|
|||
|
|
|||
|
if ( ! prNumber ) {
|
|||
|
console.log( 'This action only supports pull requests.' );
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
const token = core.getInput( 'access_token', { required: true } );
|
|||
|
const client = github.getOctokit( token );
|
|||
|
const label = core.getInput( 'label', { required: true } );
|
|||
|
const action = core.getInput( 'action', { required: true } );
|
|||
|
|
|||
|
const { data: pullRequest } = await client.pulls.get( {
|
|||
|
owner: github.context.repo.owner,
|
|||
|
repo: github.context.repo.repo,
|
|||
|
pull_number: prNumber,
|
|||
|
} );
|
|||
|
|
|||
|
const prHasLabel = pullRequest.labels.some( ( l ) => l.name === label );
|
|||
|
|
|||
|
if ( action === 'add' && ! prHasLabel ) {
|
|||
|
await addLabel( client, label, prNumber );
|
|||
|
} else if ( action === 'remove' && prHasLabel ) {
|
|||
|
await removeLabel( client, label, prNumber );
|
|||
|
}
|
|||
|
} catch ( e ) {
|
|||
|
core.error( e );
|
|||
|
core.setFailed( e.message );
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
run();
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 3202:
|
|||
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|||
|
if (mod && mod.__esModule) return mod;
|
|||
|
var result = {};
|
|||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|||
|
result["default"] = mod;
|
|||
|
return result;
|
|||
|
};
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
const os = __importStar(__nccwpck_require__(2087));
|
|||
|
const utils_1 = __nccwpck_require__(6102);
|
|||
|
/**
|
|||
|
* Commands
|
|||
|
*
|
|||
|
* Command Format:
|
|||
|
* ::name key=value,key=value::message
|
|||
|
*
|
|||
|
* Examples:
|
|||
|
* ::warning::This is the message
|
|||
|
* ::set-env name=MY_VAR::some value
|
|||
|
*/
|
|||
|
function issueCommand(command, properties, message) {
|
|||
|
const cmd = new Command(command, properties, message);
|
|||
|
process.stdout.write(cmd.toString() + os.EOL);
|
|||
|
}
|
|||
|
exports.issueCommand = issueCommand;
|
|||
|
function issue(name, message = '') {
|
|||
|
issueCommand(name, {}, message);
|
|||
|
}
|
|||
|
exports.issue = issue;
|
|||
|
const CMD_STRING = '::';
|
|||
|
class Command {
|
|||
|
constructor(command, properties, message) {
|
|||
|
if (!command) {
|
|||
|
command = 'missing.command';
|
|||
|
}
|
|||
|
this.command = command;
|
|||
|
this.properties = properties;
|
|||
|
this.message = message;
|
|||
|
}
|
|||
|
toString() {
|
|||
|
let cmdStr = CMD_STRING + this.command;
|
|||
|
if (this.properties && Object.keys(this.properties).length > 0) {
|
|||
|
cmdStr += ' ';
|
|||
|
let first = true;
|
|||
|
for (const key in this.properties) {
|
|||
|
if (this.properties.hasOwnProperty(key)) {
|
|||
|
const val = this.properties[key];
|
|||
|
if (val) {
|
|||
|
if (first) {
|
|||
|
first = false;
|
|||
|
}
|
|||
|
else {
|
|||
|
cmdStr += ',';
|
|||
|
}
|
|||
|
cmdStr += `${key}=${escapeProperty(val)}`;
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
|
|||
|
return cmdStr;
|
|||
|
}
|
|||
|
}
|
|||
|
function escapeData(s) {
|
|||
|
return utils_1.toCommandValue(s)
|
|||
|
.replace(/%/g, '%25')
|
|||
|
.replace(/\r/g, '%0D')
|
|||
|
.replace(/\n/g, '%0A');
|
|||
|
}
|
|||
|
function escapeProperty(s) {
|
|||
|
return utils_1.toCommandValue(s)
|
|||
|
.replace(/%/g, '%25')
|
|||
|
.replace(/\r/g, '%0D')
|
|||
|
.replace(/\n/g, '%0A')
|
|||
|
.replace(/:/g, '%3A')
|
|||
|
.replace(/,/g, '%2C');
|
|||
|
}
|
|||
|
//# sourceMappingURL=command.js.map
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 6046:
|
|||
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
|||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|||
|
});
|
|||
|
};
|
|||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|||
|
if (mod && mod.__esModule) return mod;
|
|||
|
var result = {};
|
|||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|||
|
result["default"] = mod;
|
|||
|
return result;
|
|||
|
};
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
const command_1 = __nccwpck_require__(3202);
|
|||
|
const file_command_1 = __nccwpck_require__(7207);
|
|||
|
const utils_1 = __nccwpck_require__(6102);
|
|||
|
const os = __importStar(__nccwpck_require__(2087));
|
|||
|
const path = __importStar(__nccwpck_require__(5622));
|
|||
|
/**
|
|||
|
* The code to exit an action
|
|||
|
*/
|
|||
|
var ExitCode;
|
|||
|
(function (ExitCode) {
|
|||
|
/**
|
|||
|
* A code indicating that the action was successful
|
|||
|
*/
|
|||
|
ExitCode[ExitCode["Success"] = 0] = "Success";
|
|||
|
/**
|
|||
|
* A code indicating that the action was a failure
|
|||
|
*/
|
|||
|
ExitCode[ExitCode["Failure"] = 1] = "Failure";
|
|||
|
})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
|
|||
|
//-----------------------------------------------------------------------
|
|||
|
// Variables
|
|||
|
//-----------------------------------------------------------------------
|
|||
|
/**
|
|||
|
* Sets env variable for this action and future actions in the job
|
|||
|
* @param name the name of the variable to set
|
|||
|
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
|
|||
|
*/
|
|||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|||
|
function exportVariable(name, val) {
|
|||
|
const convertedVal = utils_1.toCommandValue(val);
|
|||
|
process.env[name] = convertedVal;
|
|||
|
const filePath = process.env['GITHUB_ENV'] || '';
|
|||
|
if (filePath) {
|
|||
|
const delimiter = '_GitHubActionsFileCommandDelimeter_';
|
|||
|
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
|
|||
|
file_command_1.issueCommand('ENV', commandValue);
|
|||
|
}
|
|||
|
else {
|
|||
|
command_1.issueCommand('set-env', { name }, convertedVal);
|
|||
|
}
|
|||
|
}
|
|||
|
exports.exportVariable = exportVariable;
|
|||
|
/**
|
|||
|
* Registers a secret which will get masked from logs
|
|||
|
* @param secret value of the secret
|
|||
|
*/
|
|||
|
function setSecret(secret) {
|
|||
|
command_1.issueCommand('add-mask', {}, secret);
|
|||
|
}
|
|||
|
exports.setSecret = setSecret;
|
|||
|
/**
|
|||
|
* Prepends inputPath to the PATH (for this action and future actions)
|
|||
|
* @param inputPath
|
|||
|
*/
|
|||
|
function addPath(inputPath) {
|
|||
|
const filePath = process.env['GITHUB_PATH'] || '';
|
|||
|
if (filePath) {
|
|||
|
file_command_1.issueCommand('PATH', inputPath);
|
|||
|
}
|
|||
|
else {
|
|||
|
command_1.issueCommand('add-path', {}, inputPath);
|
|||
|
}
|
|||
|
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
|
|||
|
}
|
|||
|
exports.addPath = addPath;
|
|||
|
/**
|
|||
|
* Gets the value of an input. The value is also trimmed.
|
|||
|
*
|
|||
|
* @param name name of the input to get
|
|||
|
* @param options optional. See InputOptions.
|
|||
|
* @returns string
|
|||
|
*/
|
|||
|
function getInput(name, options) {
|
|||
|
const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
|
|||
|
if (options && options.required && !val) {
|
|||
|
throw new Error(`Input required and not supplied: ${name}`);
|
|||
|
}
|
|||
|
return val.trim();
|
|||
|
}
|
|||
|
exports.getInput = getInput;
|
|||
|
/**
|
|||
|
* Sets the value of an output.
|
|||
|
*
|
|||
|
* @param name name of the output to set
|
|||
|
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
|||
|
*/
|
|||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|||
|
function setOutput(name, value) {
|
|||
|
command_1.issueCommand('set-output', { name }, value);
|
|||
|
}
|
|||
|
exports.setOutput = setOutput;
|
|||
|
/**
|
|||
|
* Enables or disables the echoing of commands into stdout for the rest of the step.
|
|||
|
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
|
|||
|
*
|
|||
|
*/
|
|||
|
function setCommandEcho(enabled) {
|
|||
|
command_1.issue('echo', enabled ? 'on' : 'off');
|
|||
|
}
|
|||
|
exports.setCommandEcho = setCommandEcho;
|
|||
|
//-----------------------------------------------------------------------
|
|||
|
// Results
|
|||
|
//-----------------------------------------------------------------------
|
|||
|
/**
|
|||
|
* Sets the action status to failed.
|
|||
|
* When the action exits it will be with an exit code of 1
|
|||
|
* @param message add error issue message
|
|||
|
*/
|
|||
|
function setFailed(message) {
|
|||
|
process.exitCode = ExitCode.Failure;
|
|||
|
error(message);
|
|||
|
}
|
|||
|
exports.setFailed = setFailed;
|
|||
|
//-----------------------------------------------------------------------
|
|||
|
// Logging Commands
|
|||
|
//-----------------------------------------------------------------------
|
|||
|
/**
|
|||
|
* Gets whether Actions Step Debug is on or not
|
|||
|
*/
|
|||
|
function isDebug() {
|
|||
|
return process.env['RUNNER_DEBUG'] === '1';
|
|||
|
}
|
|||
|
exports.isDebug = isDebug;
|
|||
|
/**
|
|||
|
* Writes debug message to user log
|
|||
|
* @param message debug message
|
|||
|
*/
|
|||
|
function debug(message) {
|
|||
|
command_1.issueCommand('debug', {}, message);
|
|||
|
}
|
|||
|
exports.debug = debug;
|
|||
|
/**
|
|||
|
* Adds an error issue
|
|||
|
* @param message error issue message. Errors will be converted to string via toString()
|
|||
|
*/
|
|||
|
function error(message) {
|
|||
|
command_1.issue('error', message instanceof Error ? message.toString() : message);
|
|||
|
}
|
|||
|
exports.error = error;
|
|||
|
/**
|
|||
|
* Adds an warning issue
|
|||
|
* @param message warning issue message. Errors will be converted to string via toString()
|
|||
|
*/
|
|||
|
function warning(message) {
|
|||
|
command_1.issue('warning', message instanceof Error ? message.toString() : message);
|
|||
|
}
|
|||
|
exports.warning = warning;
|
|||
|
/**
|
|||
|
* Writes info to log with console.log.
|
|||
|
* @param message info message
|
|||
|
*/
|
|||
|
function info(message) {
|
|||
|
process.stdout.write(message + os.EOL);
|
|||
|
}
|
|||
|
exports.info = info;
|
|||
|
/**
|
|||
|
* Begin an output group.
|
|||
|
*
|
|||
|
* Output until the next `groupEnd` will be foldable in this group
|
|||
|
*
|
|||
|
* @param name The name of the output group
|
|||
|
*/
|
|||
|
function startGroup(name) {
|
|||
|
command_1.issue('group', name);
|
|||
|
}
|
|||
|
exports.startGroup = startGroup;
|
|||
|
/**
|
|||
|
* End an output group.
|
|||
|
*/
|
|||
|
function endGroup() {
|
|||
|
command_1.issue('endgroup');
|
|||
|
}
|
|||
|
exports.endGroup = endGroup;
|
|||
|
/**
|
|||
|
* Wrap an asynchronous function call in a group.
|
|||
|
*
|
|||
|
* Returns the same type as the function itself.
|
|||
|
*
|
|||
|
* @param name The name of the group
|
|||
|
* @param fn The function to wrap in the group
|
|||
|
*/
|
|||
|
function group(name, fn) {
|
|||
|
return __awaiter(this, void 0, void 0, function* () {
|
|||
|
startGroup(name);
|
|||
|
let result;
|
|||
|
try {
|
|||
|
result = yield fn();
|
|||
|
}
|
|||
|
finally {
|
|||
|
endGroup();
|
|||
|
}
|
|||
|
return result;
|
|||
|
});
|
|||
|
}
|
|||
|
exports.group = group;
|
|||
|
//-----------------------------------------------------------------------
|
|||
|
// Wrapper action state
|
|||
|
//-----------------------------------------------------------------------
|
|||
|
/**
|
|||
|
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
|||
|
*
|
|||
|
* @param name name of the state to store
|
|||
|
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
|||
|
*/
|
|||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|||
|
function saveState(name, value) {
|
|||
|
command_1.issueCommand('save-state', { name }, value);
|
|||
|
}
|
|||
|
exports.saveState = saveState;
|
|||
|
/**
|
|||
|
* Gets the value of an state set by this action's main execution.
|
|||
|
*
|
|||
|
* @param name name of the state to get
|
|||
|
* @returns string
|
|||
|
*/
|
|||
|
function getState(name) {
|
|||
|
return process.env[`STATE_${name}`] || '';
|
|||
|
}
|
|||
|
exports.getState = getState;
|
|||
|
//# sourceMappingURL=core.js.map
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 7207:
|
|||
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
// For internal use, subject to change.
|
|||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|||
|
if (mod && mod.__esModule) return mod;
|
|||
|
var result = {};
|
|||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|||
|
result["default"] = mod;
|
|||
|
return result;
|
|||
|
};
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
// We use any as a valid input type
|
|||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|||
|
const fs = __importStar(__nccwpck_require__(5747));
|
|||
|
const os = __importStar(__nccwpck_require__(2087));
|
|||
|
const utils_1 = __nccwpck_require__(6102);
|
|||
|
function issueCommand(command, message) {
|
|||
|
const filePath = process.env[`GITHUB_${command}`];
|
|||
|
if (!filePath) {
|
|||
|
throw new Error(`Unable to find environment variable for file command ${command}`);
|
|||
|
}
|
|||
|
if (!fs.existsSync(filePath)) {
|
|||
|
throw new Error(`Missing file at path: ${filePath}`);
|
|||
|
}
|
|||
|
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
|
|||
|
encoding: 'utf8'
|
|||
|
});
|
|||
|
}
|
|||
|
exports.issueCommand = issueCommand;
|
|||
|
//# sourceMappingURL=file-command.js.map
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 6102:
|
|||
|
/***/ ((__unused_webpack_module, exports) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
// We use any as a valid input type
|
|||
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
/**
|
|||
|
* Sanitizes an input into a string so it can be passed into issueCommand safely
|
|||
|
* @param input input to sanitize into a string
|
|||
|
*/
|
|||
|
function toCommandValue(input) {
|
|||
|
if (input === null || input === undefined) {
|
|||
|
return '';
|
|||
|
}
|
|||
|
else if (typeof input === 'string' || input instanceof String) {
|
|||
|
return input;
|
|||
|
}
|
|||
|
return JSON.stringify(input);
|
|||
|
}
|
|||
|
exports.toCommandValue = toCommandValue;
|
|||
|
//# sourceMappingURL=utils.js.map
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 8648:
|
|||
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
exports.Context = void 0;
|
|||
|
const fs_1 = __nccwpck_require__(5747);
|
|||
|
const os_1 = __nccwpck_require__(2087);
|
|||
|
class Context {
|
|||
|
/**
|
|||
|
* Hydrate the context from the environment
|
|||
|
*/
|
|||
|
constructor() {
|
|||
|
this.payload = {};
|
|||
|
if (process.env.GITHUB_EVENT_PATH) {
|
|||
|
if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
|
|||
|
this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
|
|||
|
}
|
|||
|
else {
|
|||
|
const path = process.env.GITHUB_EVENT_PATH;
|
|||
|
process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);
|
|||
|
}
|
|||
|
}
|
|||
|
this.eventName = process.env.GITHUB_EVENT_NAME;
|
|||
|
this.sha = process.env.GITHUB_SHA;
|
|||
|
this.ref = process.env.GITHUB_REF;
|
|||
|
this.workflow = process.env.GITHUB_WORKFLOW;
|
|||
|
this.action = process.env.GITHUB_ACTION;
|
|||
|
this.actor = process.env.GITHUB_ACTOR;
|
|||
|
this.job = process.env.GITHUB_JOB;
|
|||
|
this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);
|
|||
|
this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);
|
|||
|
}
|
|||
|
get issue() {
|
|||
|
const payload = this.payload;
|
|||
|
return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });
|
|||
|
}
|
|||
|
get repo() {
|
|||
|
if (process.env.GITHUB_REPOSITORY) {
|
|||
|
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
|
|||
|
return { owner, repo };
|
|||
|
}
|
|||
|
if (this.payload.repository) {
|
|||
|
return {
|
|||
|
owner: this.payload.repository.owner.login,
|
|||
|
repo: this.payload.repository.name
|
|||
|
};
|
|||
|
}
|
|||
|
throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'");
|
|||
|
}
|
|||
|
}
|
|||
|
exports.Context = Context;
|
|||
|
//# sourceMappingURL=context.js.map
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 9971:
|
|||
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|||
|
if (k2 === undefined) k2 = k;
|
|||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|||
|
}) : (function(o, m, k, k2) {
|
|||
|
if (k2 === undefined) k2 = k;
|
|||
|
o[k2] = m[k];
|
|||
|
}));
|
|||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|||
|
}) : function(o, v) {
|
|||
|
o["default"] = v;
|
|||
|
});
|
|||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|||
|
if (mod && mod.__esModule) return mod;
|
|||
|
var result = {};
|
|||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|||
|
__setModuleDefault(result, mod);
|
|||
|
return result;
|
|||
|
};
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
exports.getOctokit = exports.context = void 0;
|
|||
|
const Context = __importStar(__nccwpck_require__(8648));
|
|||
|
const utils_1 = __nccwpck_require__(7506);
|
|||
|
exports.context = new Context.Context();
|
|||
|
/**
|
|||
|
* Returns a hydrated octokit ready to use for GitHub Actions
|
|||
|
*
|
|||
|
* @param token the repo PAT or GITHUB_TOKEN
|
|||
|
* @param options other options to set
|
|||
|
*/
|
|||
|
function getOctokit(token, options) {
|
|||
|
return new utils_1.GitHub(utils_1.getOctokitOptions(token, options));
|
|||
|
}
|
|||
|
exports.getOctokit = getOctokit;
|
|||
|
//# sourceMappingURL=github.js.map
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 5988:
|
|||
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|||
|
if (k2 === undefined) k2 = k;
|
|||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|||
|
}) : (function(o, m, k, k2) {
|
|||
|
if (k2 === undefined) k2 = k;
|
|||
|
o[k2] = m[k];
|
|||
|
}));
|
|||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|||
|
}) : function(o, v) {
|
|||
|
o["default"] = v;
|
|||
|
});
|
|||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|||
|
if (mod && mod.__esModule) return mod;
|
|||
|
var result = {};
|
|||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|||
|
__setModuleDefault(result, mod);
|
|||
|
return result;
|
|||
|
};
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;
|
|||
|
const httpClient = __importStar(__nccwpck_require__(6952));
|
|||
|
function getAuthString(token, options) {
|
|||
|
if (!token && !options.auth) {
|
|||
|
throw new Error('Parameter token or opts.auth is required');
|
|||
|
}
|
|||
|
else if (token && options.auth) {
|
|||
|
throw new Error('Parameters token and opts.auth may not both be specified');
|
|||
|
}
|
|||
|
return typeof options.auth === 'string' ? options.auth : `token ${token}`;
|
|||
|
}
|
|||
|
exports.getAuthString = getAuthString;
|
|||
|
function getProxyAgent(destinationUrl) {
|
|||
|
const hc = new httpClient.HttpClient();
|
|||
|
return hc.getAgent(destinationUrl);
|
|||
|
}
|
|||
|
exports.getProxyAgent = getProxyAgent;
|
|||
|
function getApiBaseUrl() {
|
|||
|
return process.env['GITHUB_API_URL'] || 'https://api.github.com';
|
|||
|
}
|
|||
|
exports.getApiBaseUrl = getApiBaseUrl;
|
|||
|
//# sourceMappingURL=utils.js.map
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 7506:
|
|||
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|||
|
if (k2 === undefined) k2 = k;
|
|||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
|||
|
}) : (function(o, m, k, k2) {
|
|||
|
if (k2 === undefined) k2 = k;
|
|||
|
o[k2] = m[k];
|
|||
|
}));
|
|||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|||
|
}) : function(o, v) {
|
|||
|
o["default"] = v;
|
|||
|
});
|
|||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
|||
|
if (mod && mod.__esModule) return mod;
|
|||
|
var result = {};
|
|||
|
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|||
|
__setModuleDefault(result, mod);
|
|||
|
return result;
|
|||
|
};
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
exports.getOctokitOptions = exports.GitHub = exports.context = void 0;
|
|||
|
const Context = __importStar(__nccwpck_require__(8648));
|
|||
|
const Utils = __importStar(__nccwpck_require__(5988));
|
|||
|
// octokit + plugins
|
|||
|
const core_1 = __nccwpck_require__(8009);
|
|||
|
const plugin_rest_endpoint_methods_1 = __nccwpck_require__(4283);
|
|||
|
const plugin_paginate_rest_1 = __nccwpck_require__(8619);
|
|||
|
exports.context = new Context.Context();
|
|||
|
const baseUrl = Utils.getApiBaseUrl();
|
|||
|
const defaults = {
|
|||
|
baseUrl,
|
|||
|
request: {
|
|||
|
agent: Utils.getProxyAgent(baseUrl)
|
|||
|
}
|
|||
|
};
|
|||
|
exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults);
|
|||
|
/**
|
|||
|
* Convience function to correctly format Octokit Options to pass into the constructor.
|
|||
|
*
|
|||
|
* @param token the repo PAT or GITHUB_TOKEN
|
|||
|
* @param options other options to set
|
|||
|
*/
|
|||
|
function getOctokitOptions(token, options) {
|
|||
|
const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller
|
|||
|
// Auth
|
|||
|
const auth = Utils.getAuthString(token, opts);
|
|||
|
if (auth) {
|
|||
|
opts.auth = auth;
|
|||
|
}
|
|||
|
return opts;
|
|||
|
}
|
|||
|
exports.getOctokitOptions = getOctokitOptions;
|
|||
|
//# sourceMappingURL=utils.js.map
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 6952:
|
|||
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
const http = __nccwpck_require__(8605);
|
|||
|
const https = __nccwpck_require__(7211);
|
|||
|
const pm = __nccwpck_require__(1081);
|
|||
|
let tunnel;
|
|||
|
var HttpCodes;
|
|||
|
(function (HttpCodes) {
|
|||
|
HttpCodes[HttpCodes["OK"] = 200] = "OK";
|
|||
|
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
|
|||
|
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
|
|||
|
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
|
|||
|
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
|
|||
|
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
|
|||
|
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
|
|||
|
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
|
|||
|
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
|
|||
|
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
|
|||
|
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
|
|||
|
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
|
|||
|
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
|
|||
|
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
|
|||
|
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
|
|||
|
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
|
|||
|
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
|
|||
|
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
|
|||
|
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
|
|||
|
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
|
|||
|
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
|
|||
|
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
|
|||
|
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
|
|||
|
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
|
|||
|
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
|
|||
|
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
|
|||
|
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
|
|||
|
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
|
|||
|
var Headers;
|
|||
|
(function (Headers) {
|
|||
|
Headers["Accept"] = "accept";
|
|||
|
Headers["ContentType"] = "content-type";
|
|||
|
})(Headers = exports.Headers || (exports.Headers = {}));
|
|||
|
var MediaTypes;
|
|||
|
(function (MediaTypes) {
|
|||
|
MediaTypes["ApplicationJson"] = "application/json";
|
|||
|
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
|
|||
|
/**
|
|||
|
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
|
|||
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
|||
|
*/
|
|||
|
function getProxyUrl(serverUrl) {
|
|||
|
let proxyUrl = pm.getProxyUrl(new URL(serverUrl));
|
|||
|
return proxyUrl ? proxyUrl.href : '';
|
|||
|
}
|
|||
|
exports.getProxyUrl = getProxyUrl;
|
|||
|
const HttpRedirectCodes = [
|
|||
|
HttpCodes.MovedPermanently,
|
|||
|
HttpCodes.ResourceMoved,
|
|||
|
HttpCodes.SeeOther,
|
|||
|
HttpCodes.TemporaryRedirect,
|
|||
|
HttpCodes.PermanentRedirect
|
|||
|
];
|
|||
|
const HttpResponseRetryCodes = [
|
|||
|
HttpCodes.BadGateway,
|
|||
|
HttpCodes.ServiceUnavailable,
|
|||
|
HttpCodes.GatewayTimeout
|
|||
|
];
|
|||
|
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
|||
|
const ExponentialBackoffCeiling = 10;
|
|||
|
const ExponentialBackoffTimeSlice = 5;
|
|||
|
class HttpClientError extends Error {
|
|||
|
constructor(message, statusCode) {
|
|||
|
super(message);
|
|||
|
this.name = 'HttpClientError';
|
|||
|
this.statusCode = statusCode;
|
|||
|
Object.setPrototypeOf(this, HttpClientError.prototype);
|
|||
|
}
|
|||
|
}
|
|||
|
exports.HttpClientError = HttpClientError;
|
|||
|
class HttpClientResponse {
|
|||
|
constructor(message) {
|
|||
|
this.message = message;
|
|||
|
}
|
|||
|
readBody() {
|
|||
|
return new Promise(async (resolve, reject) => {
|
|||
|
let output = Buffer.alloc(0);
|
|||
|
this.message.on('data', (chunk) => {
|
|||
|
output = Buffer.concat([output, chunk]);
|
|||
|
});
|
|||
|
this.message.on('end', () => {
|
|||
|
resolve(output.toString());
|
|||
|
});
|
|||
|
});
|
|||
|
}
|
|||
|
}
|
|||
|
exports.HttpClientResponse = HttpClientResponse;
|
|||
|
function isHttps(requestUrl) {
|
|||
|
let parsedUrl = new URL(requestUrl);
|
|||
|
return parsedUrl.protocol === 'https:';
|
|||
|
}
|
|||
|
exports.isHttps = isHttps;
|
|||
|
class HttpClient {
|
|||
|
constructor(userAgent, handlers, requestOptions) {
|
|||
|
this._ignoreSslError = false;
|
|||
|
this._allowRedirects = true;
|
|||
|
this._allowRedirectDowngrade = false;
|
|||
|
this._maxRedirects = 50;
|
|||
|
this._allowRetries = false;
|
|||
|
this._maxRetries = 1;
|
|||
|
this._keepAlive = false;
|
|||
|
this._disposed = false;
|
|||
|
this.userAgent = userAgent;
|
|||
|
this.handlers = handlers || [];
|
|||
|
this.requestOptions = requestOptions;
|
|||
|
if (requestOptions) {
|
|||
|
if (requestOptions.ignoreSslError != null) {
|
|||
|
this._ignoreSslError = requestOptions.ignoreSslError;
|
|||
|
}
|
|||
|
this._socketTimeout = requestOptions.socketTimeout;
|
|||
|
if (requestOptions.allowRedirects != null) {
|
|||
|
this._allowRedirects = requestOptions.allowRedirects;
|
|||
|
}
|
|||
|
if (requestOptions.allowRedirectDowngrade != null) {
|
|||
|
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
|
|||
|
}
|
|||
|
if (requestOptions.maxRedirects != null) {
|
|||
|
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
|
|||
|
}
|
|||
|
if (requestOptions.keepAlive != null) {
|
|||
|
this._keepAlive = requestOptions.keepAlive;
|
|||
|
}
|
|||
|
if (requestOptions.allowRetries != null) {
|
|||
|
this._allowRetries = requestOptions.allowRetries;
|
|||
|
}
|
|||
|
if (requestOptions.maxRetries != null) {
|
|||
|
this._maxRetries = requestOptions.maxRetries;
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
options(requestUrl, additionalHeaders) {
|
|||
|
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
|
|||
|
}
|
|||
|
get(requestUrl, additionalHeaders) {
|
|||
|
return this.request('GET', requestUrl, null, additionalHeaders || {});
|
|||
|
}
|
|||
|
del(requestUrl, additionalHeaders) {
|
|||
|
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
|
|||
|
}
|
|||
|
post(requestUrl, data, additionalHeaders) {
|
|||
|
return this.request('POST', requestUrl, data, additionalHeaders || {});
|
|||
|
}
|
|||
|
patch(requestUrl, data, additionalHeaders) {
|
|||
|
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
|
|||
|
}
|
|||
|
put(requestUrl, data, additionalHeaders) {
|
|||
|
return this.request('PUT', requestUrl, data, additionalHeaders || {});
|
|||
|
}
|
|||
|
head(requestUrl, additionalHeaders) {
|
|||
|
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
|
|||
|
}
|
|||
|
sendStream(verb, requestUrl, stream, additionalHeaders) {
|
|||
|
return this.request(verb, requestUrl, stream, additionalHeaders);
|
|||
|
}
|
|||
|
/**
|
|||
|
* Gets a typed object from an endpoint
|
|||
|
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
|
|||
|
*/
|
|||
|
async getJson(requestUrl, additionalHeaders = {}) {
|
|||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|||
|
let res = await this.get(requestUrl, additionalHeaders);
|
|||
|
return this._processResponse(res, this.requestOptions);
|
|||
|
}
|
|||
|
async postJson(requestUrl, obj, additionalHeaders = {}) {
|
|||
|
let data = JSON.stringify(obj, null, 2);
|
|||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|||
|
let res = await this.post(requestUrl, data, additionalHeaders);
|
|||
|
return this._processResponse(res, this.requestOptions);
|
|||
|
}
|
|||
|
async putJson(requestUrl, obj, additionalHeaders = {}) {
|
|||
|
let data = JSON.stringify(obj, null, 2);
|
|||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|||
|
let res = await this.put(requestUrl, data, additionalHeaders);
|
|||
|
return this._processResponse(res, this.requestOptions);
|
|||
|
}
|
|||
|
async patchJson(requestUrl, obj, additionalHeaders = {}) {
|
|||
|
let data = JSON.stringify(obj, null, 2);
|
|||
|
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
|
|||
|
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
|
|||
|
let res = await this.patch(requestUrl, data, additionalHeaders);
|
|||
|
return this._processResponse(res, this.requestOptions);
|
|||
|
}
|
|||
|
/**
|
|||
|
* Makes a raw http request.
|
|||
|
* All other methods such as get, post, patch, and request ultimately call this.
|
|||
|
* Prefer get, del, post and patch
|
|||
|
*/
|
|||
|
async request(verb, requestUrl, data, headers) {
|
|||
|
if (this._disposed) {
|
|||
|
throw new Error('Client has already been disposed.');
|
|||
|
}
|
|||
|
let parsedUrl = new URL(requestUrl);
|
|||
|
let info = this._prepareRequest(verb, parsedUrl, headers);
|
|||
|
// Only perform retries on reads since writes may not be idempotent.
|
|||
|
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
|
|||
|
? this._maxRetries + 1
|
|||
|
: 1;
|
|||
|
let numTries = 0;
|
|||
|
let response;
|
|||
|
while (numTries < maxTries) {
|
|||
|
response = await this.requestRaw(info, data);
|
|||
|
// Check if it's an authentication challenge
|
|||
|
if (response &&
|
|||
|
response.message &&
|
|||
|
response.message.statusCode === HttpCodes.Unauthorized) {
|
|||
|
let authenticationHandler;
|
|||
|
for (let i = 0; i < this.handlers.length; i++) {
|
|||
|
if (this.handlers[i].canHandleAuthentication(response)) {
|
|||
|
authenticationHandler = this.handlers[i];
|
|||
|
break;
|
|||
|
}
|
|||
|
}
|
|||
|
if (authenticationHandler) {
|
|||
|
return authenticationHandler.handleAuthentication(this, info, data);
|
|||
|
}
|
|||
|
else {
|
|||
|
// We have received an unauthorized response but have no handlers to handle it.
|
|||
|
// Let the response return to the caller.
|
|||
|
return response;
|
|||
|
}
|
|||
|
}
|
|||
|
let redirectsRemaining = this._maxRedirects;
|
|||
|
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
|
|||
|
this._allowRedirects &&
|
|||
|
redirectsRemaining > 0) {
|
|||
|
const redirectUrl = response.message.headers['location'];
|
|||
|
if (!redirectUrl) {
|
|||
|
// if there's no location to redirect to, we won't
|
|||
|
break;
|
|||
|
}
|
|||
|
let parsedRedirectUrl = new URL(redirectUrl);
|
|||
|
if (parsedUrl.protocol == 'https:' &&
|
|||
|
parsedUrl.protocol != parsedRedirectUrl.protocol &&
|
|||
|
!this._allowRedirectDowngrade) {
|
|||
|
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
|
|||
|
}
|
|||
|
// we need to finish reading the response before reassigning response
|
|||
|
// which will leak the open socket.
|
|||
|
await response.readBody();
|
|||
|
// strip authorization header if redirected to a different hostname
|
|||
|
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
|
|||
|
for (let header in headers) {
|
|||
|
// header names are case insensitive
|
|||
|
if (header.toLowerCase() === 'authorization') {
|
|||
|
delete headers[header];
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
// let's make the request with the new redirectUrl
|
|||
|
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
|||
|
response = await this.requestRaw(info, data);
|
|||
|
redirectsRemaining--;
|
|||
|
}
|
|||
|
if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
|
|||
|
// If not a retry code, return immediately instead of retrying
|
|||
|
return response;
|
|||
|
}
|
|||
|
numTries += 1;
|
|||
|
if (numTries < maxTries) {
|
|||
|
await response.readBody();
|
|||
|
await this._performExponentialBackoff(numTries);
|
|||
|
}
|
|||
|
}
|
|||
|
return response;
|
|||
|
}
|
|||
|
/**
|
|||
|
* Needs to be called if keepAlive is set to true in request options.
|
|||
|
*/
|
|||
|
dispose() {
|
|||
|
if (this._agent) {
|
|||
|
this._agent.destroy();
|
|||
|
}
|
|||
|
this._disposed = true;
|
|||
|
}
|
|||
|
/**
|
|||
|
* Raw request.
|
|||
|
* @param info
|
|||
|
* @param data
|
|||
|
*/
|
|||
|
requestRaw(info, data) {
|
|||
|
return new Promise((resolve, reject) => {
|
|||
|
let callbackForResult = function (err, res) {
|
|||
|
if (err) {
|
|||
|
reject(err);
|
|||
|
}
|
|||
|
resolve(res);
|
|||
|
};
|
|||
|
this.requestRawWithCallback(info, data, callbackForResult);
|
|||
|
});
|
|||
|
}
|
|||
|
/**
|
|||
|
* Raw request with callback.
|
|||
|
* @param info
|
|||
|
* @param data
|
|||
|
* @param onResult
|
|||
|
*/
|
|||
|
requestRawWithCallback(info, data, onResult) {
|
|||
|
let socket;
|
|||
|
if (typeof data === 'string') {
|
|||
|
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
|
|||
|
}
|
|||
|
let callbackCalled = false;
|
|||
|
let handleResult = (err, res) => {
|
|||
|
if (!callbackCalled) {
|
|||
|
callbackCalled = true;
|
|||
|
onResult(err, res);
|
|||
|
}
|
|||
|
};
|
|||
|
let req = info.httpModule.request(info.options, (msg) => {
|
|||
|
let res = new HttpClientResponse(msg);
|
|||
|
handleResult(null, res);
|
|||
|
});
|
|||
|
req.on('socket', sock => {
|
|||
|
socket = sock;
|
|||
|
});
|
|||
|
// If we ever get disconnected, we want the socket to timeout eventually
|
|||
|
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
|
|||
|
if (socket) {
|
|||
|
socket.end();
|
|||
|
}
|
|||
|
handleResult(new Error('Request timeout: ' + info.options.path), null);
|
|||
|
});
|
|||
|
req.on('error', function (err) {
|
|||
|
// err has statusCode property
|
|||
|
// res should have headers
|
|||
|
handleResult(err, null);
|
|||
|
});
|
|||
|
if (data && typeof data === 'string') {
|
|||
|
req.write(data, 'utf8');
|
|||
|
}
|
|||
|
if (data && typeof data !== 'string') {
|
|||
|
data.on('close', function () {
|
|||
|
req.end();
|
|||
|
});
|
|||
|
data.pipe(req);
|
|||
|
}
|
|||
|
else {
|
|||
|
req.end();
|
|||
|
}
|
|||
|
}
|
|||
|
/**
|
|||
|
* Gets an http agent. This function is useful when you need an http agent that handles
|
|||
|
* routing through a proxy server - depending upon the url and proxy environment variables.
|
|||
|
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
|||
|
*/
|
|||
|
getAgent(serverUrl) {
|
|||
|
let parsedUrl = new URL(serverUrl);
|
|||
|
return this._getAgent(parsedUrl);
|
|||
|
}
|
|||
|
_prepareRequest(method, requestUrl, headers) {
|
|||
|
const info = {};
|
|||
|
info.parsedUrl = requestUrl;
|
|||
|
const usingSsl = info.parsedUrl.protocol === 'https:';
|
|||
|
info.httpModule = usingSsl ? https : http;
|
|||
|
const defaultPort = usingSsl ? 443 : 80;
|
|||
|
info.options = {};
|
|||
|
info.options.host = info.parsedUrl.hostname;
|
|||
|
info.options.port = info.parsedUrl.port
|
|||
|
? parseInt(info.parsedUrl.port)
|
|||
|
: defaultPort;
|
|||
|
info.options.path =
|
|||
|
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
|||
|
info.options.method = method;
|
|||
|
info.options.headers = this._mergeHeaders(headers);
|
|||
|
if (this.userAgent != null) {
|
|||
|
info.options.headers['user-agent'] = this.userAgent;
|
|||
|
}
|
|||
|
info.options.agent = this._getAgent(info.parsedUrl);
|
|||
|
// gives handlers an opportunity to participate
|
|||
|
if (this.handlers) {
|
|||
|
this.handlers.forEach(handler => {
|
|||
|
handler.prepareRequest(info.options);
|
|||
|
});
|
|||
|
}
|
|||
|
return info;
|
|||
|
}
|
|||
|
_mergeHeaders(headers) {
|
|||
|
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
|||
|
if (this.requestOptions && this.requestOptions.headers) {
|
|||
|
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
|
|||
|
}
|
|||
|
return lowercaseKeys(headers || {});
|
|||
|
}
|
|||
|
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
|||
|
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
|||
|
let clientHeader;
|
|||
|
if (this.requestOptions && this.requestOptions.headers) {
|
|||
|
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
|||
|
}
|
|||
|
return additionalHeaders[header] || clientHeader || _default;
|
|||
|
}
|
|||
|
_getAgent(parsedUrl) {
|
|||
|
let agent;
|
|||
|
let proxyUrl = pm.getProxyUrl(parsedUrl);
|
|||
|
let useProxy = proxyUrl && proxyUrl.hostname;
|
|||
|
if (this._keepAlive && useProxy) {
|
|||
|
agent = this._proxyAgent;
|
|||
|
}
|
|||
|
if (this._keepAlive && !useProxy) {
|
|||
|
agent = this._agent;
|
|||
|
}
|
|||
|
// if agent is already assigned use that agent.
|
|||
|
if (!!agent) {
|
|||
|
return agent;
|
|||
|
}
|
|||
|
const usingSsl = parsedUrl.protocol === 'https:';
|
|||
|
let maxSockets = 100;
|
|||
|
if (!!this.requestOptions) {
|
|||
|
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
|
|||
|
}
|
|||
|
if (useProxy) {
|
|||
|
// If using proxy, need tunnel
|
|||
|
if (!tunnel) {
|
|||
|
tunnel = __nccwpck_require__(7752);
|
|||
|
}
|
|||
|
const agentOptions = {
|
|||
|
maxSockets: maxSockets,
|
|||
|
keepAlive: this._keepAlive,
|
|||
|
proxy: {
|
|||
|
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`,
|
|||
|
host: proxyUrl.hostname,
|
|||
|
port: proxyUrl.port
|
|||
|
}
|
|||
|
};
|
|||
|
let tunnelAgent;
|
|||
|
const overHttps = proxyUrl.protocol === 'https:';
|
|||
|
if (usingSsl) {
|
|||
|
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
|
|||
|
}
|
|||
|
else {
|
|||
|
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
|
|||
|
}
|
|||
|
agent = tunnelAgent(agentOptions);
|
|||
|
this._proxyAgent = agent;
|
|||
|
}
|
|||
|
// if reusing agent across request and tunneling agent isn't assigned create a new agent
|
|||
|
if (this._keepAlive && !agent) {
|
|||
|
const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
|
|||
|
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
|
|||
|
this._agent = agent;
|
|||
|
}
|
|||
|
// if not using private agent and tunnel agent isn't setup then use global agent
|
|||
|
if (!agent) {
|
|||
|
agent = usingSsl ? https.globalAgent : http.globalAgent;
|
|||
|
}
|
|||
|
if (usingSsl && this._ignoreSslError) {
|
|||
|
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
|||
|
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
|||
|
// we have to cast it to any and change it directly
|
|||
|
agent.options = Object.assign(agent.options || {}, {
|
|||
|
rejectUnauthorized: false
|
|||
|
});
|
|||
|
}
|
|||
|
return agent;
|
|||
|
}
|
|||
|
_performExponentialBackoff(retryNumber) {
|
|||
|
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
|
|||
|
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
|
|||
|
return new Promise(resolve => setTimeout(() => resolve(), ms));
|
|||
|
}
|
|||
|
static dateTimeDeserializer(key, value) {
|
|||
|
if (typeof value === 'string') {
|
|||
|
let a = new Date(value);
|
|||
|
if (!isNaN(a.valueOf())) {
|
|||
|
return a;
|
|||
|
}
|
|||
|
}
|
|||
|
return value;
|
|||
|
}
|
|||
|
async _processResponse(res, options) {
|
|||
|
return new Promise(async (resolve, reject) => {
|
|||
|
const statusCode = res.message.statusCode;
|
|||
|
const response = {
|
|||
|
statusCode: statusCode,
|
|||
|
result: null,
|
|||
|
headers: {}
|
|||
|
};
|
|||
|
// not found leads to null obj returned
|
|||
|
if (statusCode == HttpCodes.NotFound) {
|
|||
|
resolve(response);
|
|||
|
}
|
|||
|
let obj;
|
|||
|
let contents;
|
|||
|
// get the result from the body
|
|||
|
try {
|
|||
|
contents = await res.readBody();
|
|||
|
if (contents && contents.length > 0) {
|
|||
|
if (options && options.deserializeDates) {
|
|||
|
obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);
|
|||
|
}
|
|||
|
else {
|
|||
|
obj = JSON.parse(contents);
|
|||
|
}
|
|||
|
response.result = obj;
|
|||
|
}
|
|||
|
response.headers = res.message.headers;
|
|||
|
}
|
|||
|
catch (err) {
|
|||
|
// Invalid resource (contents not json); leaving result obj null
|
|||
|
}
|
|||
|
// note that 3xx redirects are handled by the http layer.
|
|||
|
if (statusCode > 299) {
|
|||
|
let msg;
|
|||
|
// if exception/error in body, attempt to get better error
|
|||
|
if (obj && obj.message) {
|
|||
|
msg = obj.message;
|
|||
|
}
|
|||
|
else if (contents && contents.length > 0) {
|
|||
|
// it may be the case that the exception is in the body message as string
|
|||
|
msg = contents;
|
|||
|
}
|
|||
|
else {
|
|||
|
msg = 'Failed request: (' + statusCode + ')';
|
|||
|
}
|
|||
|
let err = new HttpClientError(msg, statusCode);
|
|||
|
err.result = response.result;
|
|||
|
reject(err);
|
|||
|
}
|
|||
|
else {
|
|||
|
resolve(response);
|
|||
|
}
|
|||
|
});
|
|||
|
}
|
|||
|
}
|
|||
|
exports.HttpClient = HttpClient;
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 1081:
|
|||
|
/***/ ((__unused_webpack_module, exports) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
function getProxyUrl(reqUrl) {
|
|||
|
let usingSsl = reqUrl.protocol === 'https:';
|
|||
|
let proxyUrl;
|
|||
|
if (checkBypass(reqUrl)) {
|
|||
|
return proxyUrl;
|
|||
|
}
|
|||
|
let proxyVar;
|
|||
|
if (usingSsl) {
|
|||
|
proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];
|
|||
|
}
|
|||
|
else {
|
|||
|
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];
|
|||
|
}
|
|||
|
if (proxyVar) {
|
|||
|
proxyUrl = new URL(proxyVar);
|
|||
|
}
|
|||
|
return proxyUrl;
|
|||
|
}
|
|||
|
exports.getProxyUrl = getProxyUrl;
|
|||
|
function checkBypass(reqUrl) {
|
|||
|
if (!reqUrl.hostname) {
|
|||
|
return false;
|
|||
|
}
|
|||
|
let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
|||
|
if (!noProxy) {
|
|||
|
return false;
|
|||
|
}
|
|||
|
// Determine the request port
|
|||
|
let reqPort;
|
|||
|
if (reqUrl.port) {
|
|||
|
reqPort = Number(reqUrl.port);
|
|||
|
}
|
|||
|
else if (reqUrl.protocol === 'http:') {
|
|||
|
reqPort = 80;
|
|||
|
}
|
|||
|
else if (reqUrl.protocol === 'https:') {
|
|||
|
reqPort = 443;
|
|||
|
}
|
|||
|
// Format the request hostname and hostname with port
|
|||
|
let upperReqHosts = [reqUrl.hostname.toUpperCase()];
|
|||
|
if (typeof reqPort === 'number') {
|
|||
|
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
|
|||
|
}
|
|||
|
// Compare request host against noproxy
|
|||
|
for (let upperNoProxyItem of noProxy
|
|||
|
.split(',')
|
|||
|
.map(x => x.trim().toUpperCase())
|
|||
|
.filter(x => x)) {
|
|||
|
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
|||
|
return true;
|
|||
|
}
|
|||
|
}
|
|||
|
return false;
|
|||
|
}
|
|||
|
exports.checkBypass = checkBypass;
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 9215:
|
|||
|
/***/ ((__unused_webpack_module, exports) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
|
|||
|
async function auth(token) {
|
|||
|
const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth";
|
|||
|
return {
|
|||
|
type: "token",
|
|||
|
token: token,
|
|||
|
tokenType
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Prefix token for usage in the Authorization header
|
|||
|
*
|
|||
|
* @param token OAuth token or JSON Web Token
|
|||
|
*/
|
|||
|
function withAuthorizationPrefix(token) {
|
|||
|
if (token.split(/\./).length === 3) {
|
|||
|
return `bearer ${token}`;
|
|||
|
}
|
|||
|
|
|||
|
return `token ${token}`;
|
|||
|
}
|
|||
|
|
|||
|
async function hook(token, request, route, parameters) {
|
|||
|
const endpoint = request.endpoint.merge(route, parameters);
|
|||
|
endpoint.headers.authorization = withAuthorizationPrefix(token);
|
|||
|
return request(endpoint);
|
|||
|
}
|
|||
|
|
|||
|
const createTokenAuth = function createTokenAuth(token) {
|
|||
|
if (!token) {
|
|||
|
throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
|
|||
|
}
|
|||
|
|
|||
|
if (typeof token !== "string") {
|
|||
|
throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string");
|
|||
|
}
|
|||
|
|
|||
|
token = token.replace(/^(token|bearer) +/i, "");
|
|||
|
return Object.assign(auth.bind(null, token), {
|
|||
|
hook: hook.bind(null, token)
|
|||
|
});
|
|||
|
};
|
|||
|
|
|||
|
exports.createTokenAuth = createTokenAuth;
|
|||
|
//# sourceMappingURL=index.js.map
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 8009:
|
|||
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
|
|||
|
var universalUserAgent = __nccwpck_require__(2102);
|
|||
|
var beforeAfterHook = __nccwpck_require__(526);
|
|||
|
var request = __nccwpck_require__(9826);
|
|||
|
var graphql = __nccwpck_require__(4625);
|
|||
|
var authToken = __nccwpck_require__(9215);
|
|||
|
|
|||
|
function _objectWithoutPropertiesLoose(source, excluded) {
|
|||
|
if (source == null) return {};
|
|||
|
var target = {};
|
|||
|
var sourceKeys = Object.keys(source);
|
|||
|
var key, i;
|
|||
|
|
|||
|
for (i = 0; i < sourceKeys.length; i++) {
|
|||
|
key = sourceKeys[i];
|
|||
|
if (excluded.indexOf(key) >= 0) continue;
|
|||
|
target[key] = source[key];
|
|||
|
}
|
|||
|
|
|||
|
return target;
|
|||
|
}
|
|||
|
|
|||
|
function _objectWithoutProperties(source, excluded) {
|
|||
|
if (source == null) return {};
|
|||
|
|
|||
|
var target = _objectWithoutPropertiesLoose(source, excluded);
|
|||
|
|
|||
|
var key, i;
|
|||
|
|
|||
|
if (Object.getOwnPropertySymbols) {
|
|||
|
var sourceSymbolKeys = Object.getOwnPropertySymbols(source);
|
|||
|
|
|||
|
for (i = 0; i < sourceSymbolKeys.length; i++) {
|
|||
|
key = sourceSymbolKeys[i];
|
|||
|
if (excluded.indexOf(key) >= 0) continue;
|
|||
|
if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;
|
|||
|
target[key] = source[key];
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
return target;
|
|||
|
}
|
|||
|
|
|||
|
const VERSION = "3.2.5";
|
|||
|
|
|||
|
class Octokit {
|
|||
|
constructor(options = {}) {
|
|||
|
const hook = new beforeAfterHook.Collection();
|
|||
|
const requestDefaults = {
|
|||
|
baseUrl: request.request.endpoint.DEFAULTS.baseUrl,
|
|||
|
headers: {},
|
|||
|
request: Object.assign({}, options.request, {
|
|||
|
hook: hook.bind(null, "request")
|
|||
|
}),
|
|||
|
mediaType: {
|
|||
|
previews: [],
|
|||
|
format: ""
|
|||
|
}
|
|||
|
}; // prepend default user agent with `options.userAgent` if set
|
|||
|
|
|||
|
requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" ");
|
|||
|
|
|||
|
if (options.baseUrl) {
|
|||
|
requestDefaults.baseUrl = options.baseUrl;
|
|||
|
}
|
|||
|
|
|||
|
if (options.previews) {
|
|||
|
requestDefaults.mediaType.previews = options.previews;
|
|||
|
}
|
|||
|
|
|||
|
if (options.timeZone) {
|
|||
|
requestDefaults.headers["time-zone"] = options.timeZone;
|
|||
|
}
|
|||
|
|
|||
|
this.request = request.request.defaults(requestDefaults);
|
|||
|
this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);
|
|||
|
this.log = Object.assign({
|
|||
|
debug: () => {},
|
|||
|
info: () => {},
|
|||
|
warn: console.warn.bind(console),
|
|||
|
error: console.error.bind(console)
|
|||
|
}, options.log);
|
|||
|
this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
|
|||
|
// is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.
|
|||
|
// (2) If only `options.auth` is set, use the default token authentication strategy.
|
|||
|
// (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
|
|||
|
// TODO: type `options.auth` based on `options.authStrategy`.
|
|||
|
|
|||
|
if (!options.authStrategy) {
|
|||
|
if (!options.auth) {
|
|||
|
// (1)
|
|||
|
this.auth = async () => ({
|
|||
|
type: "unauthenticated"
|
|||
|
});
|
|||
|
} else {
|
|||
|
// (2)
|
|||
|
const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯
|
|||
|
|
|||
|
hook.wrap("request", auth.hook);
|
|||
|
this.auth = auth;
|
|||
|
}
|
|||
|
} else {
|
|||
|
const {
|
|||
|
authStrategy
|
|||
|
} = options,
|
|||
|
otherOptions = _objectWithoutProperties(options, ["authStrategy"]);
|
|||
|
|
|||
|
const auth = authStrategy(Object.assign({
|
|||
|
request: this.request,
|
|||
|
log: this.log,
|
|||
|
// we pass the current octokit instance as well as its constructor options
|
|||
|
// to allow for authentication strategies that return a new octokit instance
|
|||
|
// that shares the same internal state as the current one. The original
|
|||
|
// requirement for this was the "event-octokit" authentication strategy
|
|||
|
// of https://github.com/probot/octokit-auth-probot.
|
|||
|
octokit: this,
|
|||
|
octokitOptions: otherOptions
|
|||
|
}, options.auth)); // @ts-ignore ¯\_(ツ)_/¯
|
|||
|
|
|||
|
hook.wrap("request", auth.hook);
|
|||
|
this.auth = auth;
|
|||
|
} // apply plugins
|
|||
|
// https://stackoverflow.com/a/16345172
|
|||
|
|
|||
|
|
|||
|
const classConstructor = this.constructor;
|
|||
|
classConstructor.plugins.forEach(plugin => {
|
|||
|
Object.assign(this, plugin(this, options));
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
static defaults(defaults) {
|
|||
|
const OctokitWithDefaults = class extends this {
|
|||
|
constructor(...args) {
|
|||
|
const options = args[0] || {};
|
|||
|
|
|||
|
if (typeof defaults === "function") {
|
|||
|
super(defaults(options));
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {
|
|||
|
userAgent: `${options.userAgent} ${defaults.userAgent}`
|
|||
|
} : null));
|
|||
|
}
|
|||
|
|
|||
|
};
|
|||
|
return OctokitWithDefaults;
|
|||
|
}
|
|||
|
/**
|
|||
|
* Attach a plugin (or many) to your Octokit instance.
|
|||
|
*
|
|||
|
* @example
|
|||
|
* const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)
|
|||
|
*/
|
|||
|
|
|||
|
|
|||
|
static plugin(...newPlugins) {
|
|||
|
var _a;
|
|||
|
|
|||
|
const currentPlugins = this.plugins;
|
|||
|
const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);
|
|||
|
return NewOctokit;
|
|||
|
}
|
|||
|
|
|||
|
}
|
|||
|
Octokit.VERSION = VERSION;
|
|||
|
Octokit.plugins = [];
|
|||
|
|
|||
|
exports.Octokit = Octokit;
|
|||
|
//# sourceMappingURL=index.js.map
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 6214:
|
|||
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
|
|||
|
var isPlainObject = __nccwpck_require__(8636);
|
|||
|
var universalUserAgent = __nccwpck_require__(2102);
|
|||
|
|
|||
|
function lowercaseKeys(object) {
|
|||
|
if (!object) {
|
|||
|
return {};
|
|||
|
}
|
|||
|
|
|||
|
return Object.keys(object).reduce((newObj, key) => {
|
|||
|
newObj[key.toLowerCase()] = object[key];
|
|||
|
return newObj;
|
|||
|
}, {});
|
|||
|
}
|
|||
|
|
|||
|
function mergeDeep(defaults, options) {
|
|||
|
const result = Object.assign({}, defaults);
|
|||
|
Object.keys(options).forEach(key => {
|
|||
|
if (isPlainObject.isPlainObject(options[key])) {
|
|||
|
if (!(key in defaults)) Object.assign(result, {
|
|||
|
[key]: options[key]
|
|||
|
});else result[key] = mergeDeep(defaults[key], options[key]);
|
|||
|
} else {
|
|||
|
Object.assign(result, {
|
|||
|
[key]: options[key]
|
|||
|
});
|
|||
|
}
|
|||
|
});
|
|||
|
return result;
|
|||
|
}
|
|||
|
|
|||
|
function removeUndefinedProperties(obj) {
|
|||
|
for (const key in obj) {
|
|||
|
if (obj[key] === undefined) {
|
|||
|
delete obj[key];
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
return obj;
|
|||
|
}
|
|||
|
|
|||
|
function merge(defaults, route, options) {
|
|||
|
if (typeof route === "string") {
|
|||
|
let [method, url] = route.split(" ");
|
|||
|
options = Object.assign(url ? {
|
|||
|
method,
|
|||
|
url
|
|||
|
} : {
|
|||
|
url: method
|
|||
|
}, options);
|
|||
|
} else {
|
|||
|
options = Object.assign({}, route);
|
|||
|
} // lowercase header names before merging with defaults to avoid duplicates
|
|||
|
|
|||
|
|
|||
|
options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging
|
|||
|
|
|||
|
removeUndefinedProperties(options);
|
|||
|
removeUndefinedProperties(options.headers);
|
|||
|
const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten
|
|||
|
|
|||
|
if (defaults && defaults.mediaType.previews.length) {
|
|||
|
mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);
|
|||
|
}
|
|||
|
|
|||
|
mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, ""));
|
|||
|
return mergedOptions;
|
|||
|
}
|
|||
|
|
|||
|
function addQueryParameters(url, parameters) {
|
|||
|
const separator = /\?/.test(url) ? "&" : "?";
|
|||
|
const names = Object.keys(parameters);
|
|||
|
|
|||
|
if (names.length === 0) {
|
|||
|
return url;
|
|||
|
}
|
|||
|
|
|||
|
return url + separator + names.map(name => {
|
|||
|
if (name === "q") {
|
|||
|
return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
|
|||
|
}
|
|||
|
|
|||
|
return `${name}=${encodeURIComponent(parameters[name])}`;
|
|||
|
}).join("&");
|
|||
|
}
|
|||
|
|
|||
|
const urlVariableRegex = /\{[^}]+\}/g;
|
|||
|
|
|||
|
function removeNonChars(variableName) {
|
|||
|
return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
|
|||
|
}
|
|||
|
|
|||
|
function extractUrlVariableNames(url) {
|
|||
|
const matches = url.match(urlVariableRegex);
|
|||
|
|
|||
|
if (!matches) {
|
|||
|
return [];
|
|||
|
}
|
|||
|
|
|||
|
return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);
|
|||
|
}
|
|||
|
|
|||
|
function omit(object, keysToOmit) {
|
|||
|
return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {
|
|||
|
obj[key] = object[key];
|
|||
|
return obj;
|
|||
|
}, {});
|
|||
|
}
|
|||
|
|
|||
|
// Based on https://github.com/bramstein/url-template, licensed under BSD
|
|||
|
// TODO: create separate package.
|
|||
|
//
|
|||
|
// Copyright (c) 2012-2014, Bram Stein
|
|||
|
// All rights reserved.
|
|||
|
// Redistribution and use in source and binary forms, with or without
|
|||
|
// modification, are permitted provided that the following conditions
|
|||
|
// are met:
|
|||
|
// 1. Redistributions of source code must retain the above copyright
|
|||
|
// notice, this list of conditions and the following disclaimer.
|
|||
|
// 2. Redistributions in binary form must reproduce the above copyright
|
|||
|
// notice, this list of conditions and the following disclaimer in the
|
|||
|
// documentation and/or other materials provided with the distribution.
|
|||
|
// 3. The name of the author may not be used to endorse or promote products
|
|||
|
// derived from this software without specific prior written permission.
|
|||
|
// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
|
|||
|
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
|||
|
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
|||
|
// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
|
|||
|
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
|||
|
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
|||
|
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
|||
|
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
|
|||
|
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
|
|||
|
/* istanbul ignore file */
|
|||
|
function encodeReserved(str) {
|
|||
|
return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {
|
|||
|
if (!/%[0-9A-Fa-f]/.test(part)) {
|
|||
|
part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]");
|
|||
|
}
|
|||
|
|
|||
|
return part;
|
|||
|
}).join("");
|
|||
|
}
|
|||
|
|
|||
|
function encodeUnreserved(str) {
|
|||
|
return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {
|
|||
|
return "%" + c.charCodeAt(0).toString(16).toUpperCase();
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
function encodeValue(operator, value, key) {
|
|||
|
value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value);
|
|||
|
|
|||
|
if (key) {
|
|||
|
return encodeUnreserved(key) + "=" + value;
|
|||
|
} else {
|
|||
|
return value;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
function isDefined(value) {
|
|||
|
return value !== undefined && value !== null;
|
|||
|
}
|
|||
|
|
|||
|
function isKeyOperator(operator) {
|
|||
|
return operator === ";" || operator === "&" || operator === "?";
|
|||
|
}
|
|||
|
|
|||
|
function getValues(context, operator, key, modifier) {
|
|||
|
var value = context[key],
|
|||
|
result = [];
|
|||
|
|
|||
|
if (isDefined(value) && value !== "") {
|
|||
|
if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
|
|||
|
value = value.toString();
|
|||
|
|
|||
|
if (modifier && modifier !== "*") {
|
|||
|
value = value.substring(0, parseInt(modifier, 10));
|
|||
|
}
|
|||
|
|
|||
|
result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
|
|||
|
} else {
|
|||
|
if (modifier === "*") {
|
|||
|
if (Array.isArray(value)) {
|
|||
|
value.filter(isDefined).forEach(function (value) {
|
|||
|
result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
|
|||
|
});
|
|||
|
} else {
|
|||
|
Object.keys(value).forEach(function (k) {
|
|||
|
if (isDefined(value[k])) {
|
|||
|
result.push(encodeValue(operator, value[k], k));
|
|||
|
}
|
|||
|
});
|
|||
|
}
|
|||
|
} else {
|
|||
|
const tmp = [];
|
|||
|
|
|||
|
if (Array.isArray(value)) {
|
|||
|
value.filter(isDefined).forEach(function (value) {
|
|||
|
tmp.push(encodeValue(operator, value));
|
|||
|
});
|
|||
|
} else {
|
|||
|
Object.keys(value).forEach(function (k) {
|
|||
|
if (isDefined(value[k])) {
|
|||
|
tmp.push(encodeUnreserved(k));
|
|||
|
tmp.push(encodeValue(operator, value[k].toString()));
|
|||
|
}
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
if (isKeyOperator(operator)) {
|
|||
|
result.push(encodeUnreserved(key) + "=" + tmp.join(","));
|
|||
|
} else if (tmp.length !== 0) {
|
|||
|
result.push(tmp.join(","));
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
} else {
|
|||
|
if (operator === ";") {
|
|||
|
if (isDefined(value)) {
|
|||
|
result.push(encodeUnreserved(key));
|
|||
|
}
|
|||
|
} else if (value === "" && (operator === "&" || operator === "?")) {
|
|||
|
result.push(encodeUnreserved(key) + "=");
|
|||
|
} else if (value === "") {
|
|||
|
result.push("");
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
return result;
|
|||
|
}
|
|||
|
|
|||
|
function parseUrl(template) {
|
|||
|
return {
|
|||
|
expand: expand.bind(null, template)
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
function expand(template, context) {
|
|||
|
var operators = ["+", "#", ".", "/", ";", "?", "&"];
|
|||
|
return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) {
|
|||
|
if (expression) {
|
|||
|
let operator = "";
|
|||
|
const values = [];
|
|||
|
|
|||
|
if (operators.indexOf(expression.charAt(0)) !== -1) {
|
|||
|
operator = expression.charAt(0);
|
|||
|
expression = expression.substr(1);
|
|||
|
}
|
|||
|
|
|||
|
expression.split(/,/g).forEach(function (variable) {
|
|||
|
var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
|
|||
|
values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
|
|||
|
});
|
|||
|
|
|||
|
if (operator && operator !== "+") {
|
|||
|
var separator = ",";
|
|||
|
|
|||
|
if (operator === "?") {
|
|||
|
separator = "&";
|
|||
|
} else if (operator !== "#") {
|
|||
|
separator = operator;
|
|||
|
}
|
|||
|
|
|||
|
return (values.length !== 0 ? operator : "") + values.join(separator);
|
|||
|
} else {
|
|||
|
return values.join(",");
|
|||
|
}
|
|||
|
} else {
|
|||
|
return encodeReserved(literal);
|
|||
|
}
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
function parse(options) {
|
|||
|
// https://fetch.spec.whatwg.org/#methods
|
|||
|
let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible
|
|||
|
|
|||
|
let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}");
|
|||
|
let headers = Object.assign({}, options.headers);
|
|||
|
let body;
|
|||
|
let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later
|
|||
|
|
|||
|
const urlVariableNames = extractUrlVariableNames(url);
|
|||
|
url = parseUrl(url).expand(parameters);
|
|||
|
|
|||
|
if (!/^http/.test(url)) {
|
|||
|
url = options.baseUrl + url;
|
|||
|
}
|
|||
|
|
|||
|
const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl");
|
|||
|
const remainingParameters = omit(parameters, omittedParameters);
|
|||
|
const isBinaryRequest = /application\/octet-stream/i.test(headers.accept);
|
|||
|
|
|||
|
if (!isBinaryRequest) {
|
|||
|
if (options.mediaType.format) {
|
|||
|
// e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
|
|||
|
headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(",");
|
|||
|
}
|
|||
|
|
|||
|
if (options.mediaType.previews.length) {
|
|||
|
const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
|
|||
|
headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {
|
|||
|
const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
|
|||
|
return `application/vnd.github.${preview}-preview${format}`;
|
|||
|
}).join(",");
|
|||
|
}
|
|||
|
} // for GET/HEAD requests, set URL query parameters from remaining parameters
|
|||
|
// for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
|
|||
|
|
|||
|
|
|||
|
if (["GET", "HEAD"].includes(method)) {
|
|||
|
url = addQueryParameters(url, remainingParameters);
|
|||
|
} else {
|
|||
|
if ("data" in remainingParameters) {
|
|||
|
body = remainingParameters.data;
|
|||
|
} else {
|
|||
|
if (Object.keys(remainingParameters).length) {
|
|||
|
body = remainingParameters;
|
|||
|
} else {
|
|||
|
headers["content-length"] = 0;
|
|||
|
}
|
|||
|
}
|
|||
|
} // default content-type for JSON if body is set
|
|||
|
|
|||
|
|
|||
|
if (!headers["content-type"] && typeof body !== "undefined") {
|
|||
|
headers["content-type"] = "application/json; charset=utf-8";
|
|||
|
} // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
|
|||
|
// fetch does not allow to set `content-length` header, but we can set body to an empty string
|
|||
|
|
|||
|
|
|||
|
if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
|
|||
|
body = "";
|
|||
|
} // Only return body/request keys if present
|
|||
|
|
|||
|
|
|||
|
return Object.assign({
|
|||
|
method,
|
|||
|
url,
|
|||
|
headers
|
|||
|
}, typeof body !== "undefined" ? {
|
|||
|
body
|
|||
|
} : null, options.request ? {
|
|||
|
request: options.request
|
|||
|
} : null);
|
|||
|
}
|
|||
|
|
|||
|
function endpointWithDefaults(defaults, route, options) {
|
|||
|
return parse(merge(defaults, route, options));
|
|||
|
}
|
|||
|
|
|||
|
function withDefaults(oldDefaults, newDefaults) {
|
|||
|
const DEFAULTS = merge(oldDefaults, newDefaults);
|
|||
|
const endpoint = endpointWithDefaults.bind(null, DEFAULTS);
|
|||
|
return Object.assign(endpoint, {
|
|||
|
DEFAULTS,
|
|||
|
defaults: withDefaults.bind(null, DEFAULTS),
|
|||
|
merge: merge.bind(null, DEFAULTS),
|
|||
|
parse
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
const VERSION = "6.0.11";
|
|||
|
|
|||
|
const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.
|
|||
|
// So we use RequestParameters and add method as additional required property.
|
|||
|
|
|||
|
const DEFAULTS = {
|
|||
|
method: "GET",
|
|||
|
baseUrl: "https://api.github.com",
|
|||
|
headers: {
|
|||
|
accept: "application/vnd.github.v3+json",
|
|||
|
"user-agent": userAgent
|
|||
|
},
|
|||
|
mediaType: {
|
|||
|
format: "",
|
|||
|
previews: []
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
const endpoint = withDefaults(null, DEFAULTS);
|
|||
|
|
|||
|
exports.endpoint = endpoint;
|
|||
|
//# sourceMappingURL=index.js.map
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 4625:
|
|||
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
|
|||
|
var request = __nccwpck_require__(9826);
|
|||
|
var universalUserAgent = __nccwpck_require__(2102);
|
|||
|
|
|||
|
const VERSION = "4.6.0";
|
|||
|
|
|||
|
class GraphqlError extends Error {
|
|||
|
constructor(request, response) {
|
|||
|
const message = response.data.errors[0].message;
|
|||
|
super(message);
|
|||
|
Object.assign(this, response.data);
|
|||
|
Object.assign(this, {
|
|||
|
headers: response.headers
|
|||
|
});
|
|||
|
this.name = "GraphqlError";
|
|||
|
this.request = request; // Maintains proper stack trace (only available on V8)
|
|||
|
|
|||
|
/* istanbul ignore next */
|
|||
|
|
|||
|
if (Error.captureStackTrace) {
|
|||
|
Error.captureStackTrace(this, this.constructor);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"];
|
|||
|
const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/;
|
|||
|
function graphql(request, query, options) {
|
|||
|
if (typeof query === "string" && options && "query" in options) {
|
|||
|
return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`));
|
|||
|
}
|
|||
|
|
|||
|
const parsedOptions = typeof query === "string" ? Object.assign({
|
|||
|
query
|
|||
|
}, options) : query;
|
|||
|
const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {
|
|||
|
if (NON_VARIABLE_OPTIONS.includes(key)) {
|
|||
|
result[key] = parsedOptions[key];
|
|||
|
return result;
|
|||
|
}
|
|||
|
|
|||
|
if (!result.variables) {
|
|||
|
result.variables = {};
|
|||
|
}
|
|||
|
|
|||
|
result.variables[key] = parsedOptions[key];
|
|||
|
return result;
|
|||
|
}, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix
|
|||
|
// https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451
|
|||
|
|
|||
|
const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;
|
|||
|
|
|||
|
if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {
|
|||
|
requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql");
|
|||
|
}
|
|||
|
|
|||
|
return request(requestOptions).then(response => {
|
|||
|
if (response.data.errors) {
|
|||
|
const headers = {};
|
|||
|
|
|||
|
for (const key of Object.keys(response.headers)) {
|
|||
|
headers[key] = response.headers[key];
|
|||
|
}
|
|||
|
|
|||
|
throw new GraphqlError(requestOptions, {
|
|||
|
headers,
|
|||
|
data: response.data
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
return response.data.data;
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
function withDefaults(request$1, newDefaults) {
|
|||
|
const newRequest = request$1.defaults(newDefaults);
|
|||
|
|
|||
|
const newApi = (query, options) => {
|
|||
|
return graphql(newRequest, query, options);
|
|||
|
};
|
|||
|
|
|||
|
return Object.assign(newApi, {
|
|||
|
defaults: withDefaults.bind(null, newRequest),
|
|||
|
endpoint: request.request.endpoint
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
const graphql$1 = withDefaults(request.request, {
|
|||
|
headers: {
|
|||
|
"user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`
|
|||
|
},
|
|||
|
method: "POST",
|
|||
|
url: "/graphql"
|
|||
|
});
|
|||
|
function withCustomRequest(customRequest) {
|
|||
|
return withDefaults(customRequest, {
|
|||
|
method: "POST",
|
|||
|
url: "/graphql"
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
exports.graphql = graphql$1;
|
|||
|
exports.withCustomRequest = withCustomRequest;
|
|||
|
//# sourceMappingURL=index.js.map
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 8619:
|
|||
|
/***/ ((__unused_webpack_module, exports) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
|
|||
|
const VERSION = "2.10.0";
|
|||
|
|
|||
|
/**
|
|||
|
* Some “list” response that can be paginated have a different response structure
|
|||
|
*
|
|||
|
* They have a `total_count` key in the response (search also has `incomplete_results`,
|
|||
|
* /installation/repositories also has `repository_selection`), as well as a key with
|
|||
|
* the list of the items which name varies from endpoint to endpoint.
|
|||
|
*
|
|||
|
* Octokit normalizes these responses so that paginated results are always returned following
|
|||
|
* the same structure. One challenge is that if the list response has only one page, no Link
|
|||
|
* header is provided, so this header alone is not sufficient to check wether a response is
|
|||
|
* paginated or not.
|
|||
|
*
|
|||
|
* We check if a "total_count" key is present in the response data, but also make sure that
|
|||
|
* a "url" property is not, as the "Get the combined status for a specific ref" endpoint would
|
|||
|
* otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
|
|||
|
*/
|
|||
|
function normalizePaginatedListResponse(response) {
|
|||
|
const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data);
|
|||
|
if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way
|
|||
|
// to retrieve the same information.
|
|||
|
|
|||
|
const incompleteResults = response.data.incomplete_results;
|
|||
|
const repositorySelection = response.data.repository_selection;
|
|||
|
const totalCount = response.data.total_count;
|
|||
|
delete response.data.incomplete_results;
|
|||
|
delete response.data.repository_selection;
|
|||
|
delete response.data.total_count;
|
|||
|
const namespaceKey = Object.keys(response.data)[0];
|
|||
|
const data = response.data[namespaceKey];
|
|||
|
response.data = data;
|
|||
|
|
|||
|
if (typeof incompleteResults !== "undefined") {
|
|||
|
response.data.incomplete_results = incompleteResults;
|
|||
|
}
|
|||
|
|
|||
|
if (typeof repositorySelection !== "undefined") {
|
|||
|
response.data.repository_selection = repositorySelection;
|
|||
|
}
|
|||
|
|
|||
|
response.data.total_count = totalCount;
|
|||
|
return response;
|
|||
|
}
|
|||
|
|
|||
|
function iterator(octokit, route, parameters) {
|
|||
|
const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);
|
|||
|
const requestMethod = typeof route === "function" ? route : octokit.request;
|
|||
|
const method = options.method;
|
|||
|
const headers = options.headers;
|
|||
|
let url = options.url;
|
|||
|
return {
|
|||
|
[Symbol.asyncIterator]: () => ({
|
|||
|
async next() {
|
|||
|
if (!url) return {
|
|||
|
done: true
|
|||
|
};
|
|||
|
const response = await requestMethod({
|
|||
|
method,
|
|||
|
url,
|
|||
|
headers
|
|||
|
});
|
|||
|
const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:
|
|||
|
// '<https://api.github.com/users/aseemk/followers?page=2>; rel="next", <https://api.github.com/users/aseemk/followers?page=2>; rel="last"'
|
|||
|
// sets `url` to undefined if "next" URL is not present or `link` header is not set
|
|||
|
|
|||
|
url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1];
|
|||
|
return {
|
|||
|
value: normalizedResponse
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
})
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
function paginate(octokit, route, parameters, mapFn) {
|
|||
|
if (typeof parameters === "function") {
|
|||
|
mapFn = parameters;
|
|||
|
parameters = undefined;
|
|||
|
}
|
|||
|
|
|||
|
return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);
|
|||
|
}
|
|||
|
|
|||
|
function gather(octokit, results, iterator, mapFn) {
|
|||
|
return iterator.next().then(result => {
|
|||
|
if (result.done) {
|
|||
|
return results;
|
|||
|
}
|
|||
|
|
|||
|
let earlyExit = false;
|
|||
|
|
|||
|
function done() {
|
|||
|
earlyExit = true;
|
|||
|
}
|
|||
|
|
|||
|
results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);
|
|||
|
|
|||
|
if (earlyExit) {
|
|||
|
return results;
|
|||
|
}
|
|||
|
|
|||
|
return gather(octokit, results, iterator, mapFn);
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
const composePaginateRest = Object.assign(paginate, {
|
|||
|
iterator
|
|||
|
});
|
|||
|
|
|||
|
/**
|
|||
|
* @param octokit Octokit instance
|
|||
|
* @param options Options passed to Octokit constructor
|
|||
|
*/
|
|||
|
|
|||
|
function paginateRest(octokit) {
|
|||
|
return {
|
|||
|
paginate: Object.assign(paginate.bind(null, octokit), {
|
|||
|
iterator: iterator.bind(null, octokit)
|
|||
|
})
|
|||
|
};
|
|||
|
}
|
|||
|
paginateRest.VERSION = VERSION;
|
|||
|
|
|||
|
exports.composePaginateRest = composePaginateRest;
|
|||
|
exports.paginateRest = paginateRest;
|
|||
|
//# sourceMappingURL=index.js.map
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 4283:
|
|||
|
/***/ ((__unused_webpack_module, exports) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
|
|||
|
const Endpoints = {
|
|||
|
actions: {
|
|||
|
addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
|
|||
|
cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"],
|
|||
|
createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"],
|
|||
|
createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
|
|||
|
createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"],
|
|||
|
createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"],
|
|||
|
createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"],
|
|||
|
createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"],
|
|||
|
createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"],
|
|||
|
deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
|
|||
|
deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"],
|
|||
|
deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
|
|||
|
deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"],
|
|||
|
deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"],
|
|||
|
deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"],
|
|||
|
deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
|
|||
|
disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"],
|
|||
|
disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"],
|
|||
|
downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"],
|
|||
|
downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"],
|
|||
|
downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
|
|||
|
enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"],
|
|||
|
enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"],
|
|||
|
getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"],
|
|||
|
getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"],
|
|||
|
getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
|
|||
|
getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"],
|
|||
|
getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"],
|
|||
|
getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"],
|
|||
|
getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"],
|
|||
|
getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"],
|
|||
|
getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, {
|
|||
|
renamed: ["actions", "getGithubActionsPermissionsRepository"]
|
|||
|
}],
|
|||
|
getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"],
|
|||
|
getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
|
|||
|
getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"],
|
|||
|
getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"],
|
|||
|
getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"],
|
|||
|
getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"],
|
|||
|
getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"],
|
|||
|
getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"],
|
|||
|
listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"],
|
|||
|
listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"],
|
|||
|
listOrgSecrets: ["GET /orgs/{org}/actions/secrets"],
|
|||
|
listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"],
|
|||
|
listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"],
|
|||
|
listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"],
|
|||
|
listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"],
|
|||
|
listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"],
|
|||
|
listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"],
|
|||
|
listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"],
|
|||
|
listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"],
|
|||
|
listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"],
|
|||
|
listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"],
|
|||
|
listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"],
|
|||
|
reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"],
|
|||
|
removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
|
|||
|
setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"],
|
|||
|
setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"],
|
|||
|
setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"],
|
|||
|
setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"],
|
|||
|
setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"],
|
|||
|
setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"]
|
|||
|
},
|
|||
|
activity: {
|
|||
|
checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"],
|
|||
|
deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"],
|
|||
|
deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"],
|
|||
|
getFeeds: ["GET /feeds"],
|
|||
|
getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"],
|
|||
|
getThread: ["GET /notifications/threads/{thread_id}"],
|
|||
|
getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"],
|
|||
|
listEventsForAuthenticatedUser: ["GET /users/{username}/events"],
|
|||
|
listNotificationsForAuthenticatedUser: ["GET /notifications"],
|
|||
|
listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"],
|
|||
|
listPublicEvents: ["GET /events"],
|
|||
|
listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"],
|
|||
|
listPublicEventsForUser: ["GET /users/{username}/events/public"],
|
|||
|
listPublicOrgEvents: ["GET /orgs/{org}/events"],
|
|||
|
listReceivedEventsForUser: ["GET /users/{username}/received_events"],
|
|||
|
listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"],
|
|||
|
listRepoEvents: ["GET /repos/{owner}/{repo}/events"],
|
|||
|
listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"],
|
|||
|
listReposStarredByAuthenticatedUser: ["GET /user/starred"],
|
|||
|
listReposStarredByUser: ["GET /users/{username}/starred"],
|
|||
|
listReposWatchedByUser: ["GET /users/{username}/subscriptions"],
|
|||
|
listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"],
|
|||
|
listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"],
|
|||
|
listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"],
|
|||
|
markNotificationsAsRead: ["PUT /notifications"],
|
|||
|
markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"],
|
|||
|
markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"],
|
|||
|
setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"],
|
|||
|
setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"],
|
|||
|
starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"],
|
|||
|
unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"]
|
|||
|
},
|
|||
|
apps: {
|
|||
|
addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"],
|
|||
|
checkToken: ["POST /applications/{client_id}/token"],
|
|||
|
createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", {
|
|||
|
mediaType: {
|
|||
|
previews: ["corsair"]
|
|||
|
}
|
|||
|
}],
|
|||
|
createFromManifest: ["POST /app-manifests/{code}/conversions"],
|
|||
|
createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"],
|
|||
|
deleteAuthorization: ["DELETE /applications/{client_id}/grant"],
|
|||
|
deleteInstallation: ["DELETE /app/installations/{installation_id}"],
|
|||
|
deleteToken: ["DELETE /applications/{client_id}/token"],
|
|||
|
getAuthenticated: ["GET /app"],
|
|||
|
getBySlug: ["GET /apps/{app_slug}"],
|
|||
|
getInstallation: ["GET /app/installations/{installation_id}"],
|
|||
|
getOrgInstallation: ["GET /orgs/{org}/installation"],
|
|||
|
getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"],
|
|||
|
getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"],
|
|||
|
getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"],
|
|||
|
getUserInstallation: ["GET /users/{username}/installation"],
|
|||
|
getWebhookConfigForApp: ["GET /app/hook/config"],
|
|||
|
listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"],
|
|||
|
listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"],
|
|||
|
listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"],
|
|||
|
listInstallations: ["GET /app/installations"],
|
|||
|
listInstallationsForAuthenticatedUser: ["GET /user/installations"],
|
|||
|
listPlans: ["GET /marketplace_listing/plans"],
|
|||
|
listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"],
|
|||
|
listReposAccessibleToInstallation: ["GET /installation/repositories"],
|
|||
|
listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"],
|
|||
|
listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"],
|
|||
|
removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"],
|
|||
|
resetToken: ["PATCH /applications/{client_id}/token"],
|
|||
|
revokeInstallationAccessToken: ["DELETE /installation/token"],
|
|||
|
scopeToken: ["POST /applications/{client_id}/token/scoped"],
|
|||
|
suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"],
|
|||
|
unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"],
|
|||
|
updateWebhookConfigForApp: ["PATCH /app/hook/config"]
|
|||
|
},
|
|||
|
billing: {
|
|||
|
getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"],
|
|||
|
getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"],
|
|||
|
getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"],
|
|||
|
getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"],
|
|||
|
getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"],
|
|||
|
getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"]
|
|||
|
},
|
|||
|
checks: {
|
|||
|
create: ["POST /repos/{owner}/{repo}/check-runs"],
|
|||
|
createSuite: ["POST /repos/{owner}/{repo}/check-suites"],
|
|||
|
get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"],
|
|||
|
getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"],
|
|||
|
listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"],
|
|||
|
listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"],
|
|||
|
listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"],
|
|||
|
listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"],
|
|||
|
rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"],
|
|||
|
setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"],
|
|||
|
update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"]
|
|||
|
},
|
|||
|
codeScanning: {
|
|||
|
deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"],
|
|||
|
getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, {
|
|||
|
renamedParameters: {
|
|||
|
alert_id: "alert_number"
|
|||
|
}
|
|||
|
}],
|
|||
|
getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"],
|
|||
|
getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"],
|
|||
|
listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"],
|
|||
|
listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"],
|
|||
|
listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"],
|
|||
|
updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"],
|
|||
|
uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"]
|
|||
|
},
|
|||
|
codesOfConduct: {
|
|||
|
getAllCodesOfConduct: ["GET /codes_of_conduct", {
|
|||
|
mediaType: {
|
|||
|
previews: ["scarlet-witch"]
|
|||
|
}
|
|||
|
}],
|
|||
|
getConductCode: ["GET /codes_of_conduct/{key}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["scarlet-witch"]
|
|||
|
}
|
|||
|
}],
|
|||
|
getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", {
|
|||
|
mediaType: {
|
|||
|
previews: ["scarlet-witch"]
|
|||
|
}
|
|||
|
}]
|
|||
|
},
|
|||
|
emojis: {
|
|||
|
get: ["GET /emojis"]
|
|||
|
},
|
|||
|
enterpriseAdmin: {
|
|||
|
disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"],
|
|||
|
enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"],
|
|||
|
getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"],
|
|||
|
getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"],
|
|||
|
listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"],
|
|||
|
setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"],
|
|||
|
setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"],
|
|||
|
setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"]
|
|||
|
},
|
|||
|
gists: {
|
|||
|
checkIsStarred: ["GET /gists/{gist_id}/star"],
|
|||
|
create: ["POST /gists"],
|
|||
|
createComment: ["POST /gists/{gist_id}/comments"],
|
|||
|
delete: ["DELETE /gists/{gist_id}"],
|
|||
|
deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"],
|
|||
|
fork: ["POST /gists/{gist_id}/forks"],
|
|||
|
get: ["GET /gists/{gist_id}"],
|
|||
|
getComment: ["GET /gists/{gist_id}/comments/{comment_id}"],
|
|||
|
getRevision: ["GET /gists/{gist_id}/{sha}"],
|
|||
|
list: ["GET /gists"],
|
|||
|
listComments: ["GET /gists/{gist_id}/comments"],
|
|||
|
listCommits: ["GET /gists/{gist_id}/commits"],
|
|||
|
listForUser: ["GET /users/{username}/gists"],
|
|||
|
listForks: ["GET /gists/{gist_id}/forks"],
|
|||
|
listPublic: ["GET /gists/public"],
|
|||
|
listStarred: ["GET /gists/starred"],
|
|||
|
star: ["PUT /gists/{gist_id}/star"],
|
|||
|
unstar: ["DELETE /gists/{gist_id}/star"],
|
|||
|
update: ["PATCH /gists/{gist_id}"],
|
|||
|
updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"]
|
|||
|
},
|
|||
|
git: {
|
|||
|
createBlob: ["POST /repos/{owner}/{repo}/git/blobs"],
|
|||
|
createCommit: ["POST /repos/{owner}/{repo}/git/commits"],
|
|||
|
createRef: ["POST /repos/{owner}/{repo}/git/refs"],
|
|||
|
createTag: ["POST /repos/{owner}/{repo}/git/tags"],
|
|||
|
createTree: ["POST /repos/{owner}/{repo}/git/trees"],
|
|||
|
deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"],
|
|||
|
getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"],
|
|||
|
getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"],
|
|||
|
getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"],
|
|||
|
getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"],
|
|||
|
getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"],
|
|||
|
listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"],
|
|||
|
updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"]
|
|||
|
},
|
|||
|
gitignore: {
|
|||
|
getAllTemplates: ["GET /gitignore/templates"],
|
|||
|
getTemplate: ["GET /gitignore/templates/{name}"]
|
|||
|
},
|
|||
|
interactions: {
|
|||
|
getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"],
|
|||
|
getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"],
|
|||
|
getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"],
|
|||
|
getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, {
|
|||
|
renamed: ["interactions", "getRestrictionsForAuthenticatedUser"]
|
|||
|
}],
|
|||
|
removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"],
|
|||
|
removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"],
|
|||
|
removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"],
|
|||
|
removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, {
|
|||
|
renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"]
|
|||
|
}],
|
|||
|
setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"],
|
|||
|
setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"],
|
|||
|
setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"],
|
|||
|
setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, {
|
|||
|
renamed: ["interactions", "setRestrictionsForAuthenticatedUser"]
|
|||
|
}]
|
|||
|
},
|
|||
|
issues: {
|
|||
|
addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"],
|
|||
|
addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"],
|
|||
|
checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"],
|
|||
|
create: ["POST /repos/{owner}/{repo}/issues"],
|
|||
|
createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"],
|
|||
|
createLabel: ["POST /repos/{owner}/{repo}/labels"],
|
|||
|
createMilestone: ["POST /repos/{owner}/{repo}/milestones"],
|
|||
|
deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"],
|
|||
|
deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"],
|
|||
|
deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"],
|
|||
|
get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"],
|
|||
|
getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"],
|
|||
|
getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"],
|
|||
|
getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"],
|
|||
|
getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"],
|
|||
|
list: ["GET /issues"],
|
|||
|
listAssignees: ["GET /repos/{owner}/{repo}/assignees"],
|
|||
|
listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"],
|
|||
|
listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"],
|
|||
|
listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"],
|
|||
|
listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"],
|
|||
|
listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", {
|
|||
|
mediaType: {
|
|||
|
previews: ["mockingbird"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listForAuthenticatedUser: ["GET /user/issues"],
|
|||
|
listForOrg: ["GET /orgs/{org}/issues"],
|
|||
|
listForRepo: ["GET /repos/{owner}/{repo}/issues"],
|
|||
|
listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"],
|
|||
|
listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"],
|
|||
|
listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"],
|
|||
|
listMilestones: ["GET /repos/{owner}/{repo}/milestones"],
|
|||
|
lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"],
|
|||
|
removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"],
|
|||
|
removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"],
|
|||
|
removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"],
|
|||
|
setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"],
|
|||
|
unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"],
|
|||
|
update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"],
|
|||
|
updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"],
|
|||
|
updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"],
|
|||
|
updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"]
|
|||
|
},
|
|||
|
licenses: {
|
|||
|
get: ["GET /licenses/{license}"],
|
|||
|
getAllCommonlyUsed: ["GET /licenses"],
|
|||
|
getForRepo: ["GET /repos/{owner}/{repo}/license"]
|
|||
|
},
|
|||
|
markdown: {
|
|||
|
render: ["POST /markdown"],
|
|||
|
renderRaw: ["POST /markdown/raw", {
|
|||
|
headers: {
|
|||
|
"content-type": "text/plain; charset=utf-8"
|
|||
|
}
|
|||
|
}]
|
|||
|
},
|
|||
|
meta: {
|
|||
|
get: ["GET /meta"],
|
|||
|
getOctocat: ["GET /octocat"],
|
|||
|
getZen: ["GET /zen"],
|
|||
|
root: ["GET /"]
|
|||
|
},
|
|||
|
migrations: {
|
|||
|
cancelImport: ["DELETE /repos/{owner}/{repo}/import"],
|
|||
|
deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", {
|
|||
|
mediaType: {
|
|||
|
previews: ["wyandotte"]
|
|||
|
}
|
|||
|
}],
|
|||
|
deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", {
|
|||
|
mediaType: {
|
|||
|
previews: ["wyandotte"]
|
|||
|
}
|
|||
|
}],
|
|||
|
downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", {
|
|||
|
mediaType: {
|
|||
|
previews: ["wyandotte"]
|
|||
|
}
|
|||
|
}],
|
|||
|
getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", {
|
|||
|
mediaType: {
|
|||
|
previews: ["wyandotte"]
|
|||
|
}
|
|||
|
}],
|
|||
|
getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"],
|
|||
|
getImportStatus: ["GET /repos/{owner}/{repo}/import"],
|
|||
|
getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"],
|
|||
|
getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["wyandotte"]
|
|||
|
}
|
|||
|
}],
|
|||
|
getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["wyandotte"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listForAuthenticatedUser: ["GET /user/migrations", {
|
|||
|
mediaType: {
|
|||
|
previews: ["wyandotte"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listForOrg: ["GET /orgs/{org}/migrations", {
|
|||
|
mediaType: {
|
|||
|
previews: ["wyandotte"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", {
|
|||
|
mediaType: {
|
|||
|
previews: ["wyandotte"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listReposForUser: ["GET /user/migrations/{migration_id}/repositories", {
|
|||
|
mediaType: {
|
|||
|
previews: ["wyandotte"]
|
|||
|
}
|
|||
|
}],
|
|||
|
mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"],
|
|||
|
setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"],
|
|||
|
startForAuthenticatedUser: ["POST /user/migrations"],
|
|||
|
startForOrg: ["POST /orgs/{org}/migrations"],
|
|||
|
startImport: ["PUT /repos/{owner}/{repo}/import"],
|
|||
|
unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", {
|
|||
|
mediaType: {
|
|||
|
previews: ["wyandotte"]
|
|||
|
}
|
|||
|
}],
|
|||
|
unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", {
|
|||
|
mediaType: {
|
|||
|
previews: ["wyandotte"]
|
|||
|
}
|
|||
|
}],
|
|||
|
updateImport: ["PATCH /repos/{owner}/{repo}/import"]
|
|||
|
},
|
|||
|
orgs: {
|
|||
|
blockUser: ["PUT /orgs/{org}/blocks/{username}"],
|
|||
|
cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"],
|
|||
|
checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"],
|
|||
|
checkMembershipForUser: ["GET /orgs/{org}/members/{username}"],
|
|||
|
checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"],
|
|||
|
convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"],
|
|||
|
createInvitation: ["POST /orgs/{org}/invitations"],
|
|||
|
createWebhook: ["POST /orgs/{org}/hooks"],
|
|||
|
deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"],
|
|||
|
get: ["GET /orgs/{org}"],
|
|||
|
getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"],
|
|||
|
getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"],
|
|||
|
getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"],
|
|||
|
getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"],
|
|||
|
list: ["GET /organizations"],
|
|||
|
listAppInstallations: ["GET /orgs/{org}/installations"],
|
|||
|
listBlockedUsers: ["GET /orgs/{org}/blocks"],
|
|||
|
listFailedInvitations: ["GET /orgs/{org}/failed_invitations"],
|
|||
|
listForAuthenticatedUser: ["GET /user/orgs"],
|
|||
|
listForUser: ["GET /users/{username}/orgs"],
|
|||
|
listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"],
|
|||
|
listMembers: ["GET /orgs/{org}/members"],
|
|||
|
listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"],
|
|||
|
listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"],
|
|||
|
listPendingInvitations: ["GET /orgs/{org}/invitations"],
|
|||
|
listPublicMembers: ["GET /orgs/{org}/public_members"],
|
|||
|
listWebhooks: ["GET /orgs/{org}/hooks"],
|
|||
|
pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"],
|
|||
|
removeMember: ["DELETE /orgs/{org}/members/{username}"],
|
|||
|
removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"],
|
|||
|
removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"],
|
|||
|
removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"],
|
|||
|
setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"],
|
|||
|
setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"],
|
|||
|
unblockUser: ["DELETE /orgs/{org}/blocks/{username}"],
|
|||
|
update: ["PATCH /orgs/{org}"],
|
|||
|
updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"],
|
|||
|
updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"],
|
|||
|
updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"]
|
|||
|
},
|
|||
|
packages: {
|
|||
|
deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"],
|
|||
|
deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"],
|
|||
|
deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"],
|
|||
|
deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
|
|||
|
getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"],
|
|||
|
getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"],
|
|||
|
getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"],
|
|||
|
getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"],
|
|||
|
getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"],
|
|||
|
getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"],
|
|||
|
getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"],
|
|||
|
getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
|
|||
|
getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
|
|||
|
restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore"],
|
|||
|
restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore"],
|
|||
|
restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"],
|
|||
|
restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"]
|
|||
|
},
|
|||
|
projects: {
|
|||
|
addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
createCard: ["POST /projects/columns/{column_id}/cards", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
createColumn: ["POST /projects/{project_id}/columns", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
createForAuthenticatedUser: ["POST /user/projects", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
createForOrg: ["POST /orgs/{org}/projects", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
createForRepo: ["POST /repos/{owner}/{repo}/projects", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
delete: ["DELETE /projects/{project_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
deleteCard: ["DELETE /projects/columns/cards/{card_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
deleteColumn: ["DELETE /projects/columns/{column_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
get: ["GET /projects/{project_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
getCard: ["GET /projects/columns/cards/{card_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
getColumn: ["GET /projects/columns/{column_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listCards: ["GET /projects/columns/{column_id}/cards", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listCollaborators: ["GET /projects/{project_id}/collaborators", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listColumns: ["GET /projects/{project_id}/columns", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listForOrg: ["GET /orgs/{org}/projects", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listForRepo: ["GET /repos/{owner}/{repo}/projects", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listForUser: ["GET /users/{username}/projects", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
moveCard: ["POST /projects/columns/cards/{card_id}/moves", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
moveColumn: ["POST /projects/columns/{column_id}/moves", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
update: ["PATCH /projects/{project_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
updateCard: ["PATCH /projects/columns/cards/{card_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
updateColumn: ["PATCH /projects/columns/{column_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}]
|
|||
|
},
|
|||
|
pulls: {
|
|||
|
checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
|
|||
|
create: ["POST /repos/{owner}/{repo}/pulls"],
|
|||
|
createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"],
|
|||
|
createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
|
|||
|
createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"],
|
|||
|
deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
|
|||
|
deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"],
|
|||
|
dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"],
|
|||
|
get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"],
|
|||
|
getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
|
|||
|
getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"],
|
|||
|
list: ["GET /repos/{owner}/{repo}/pulls"],
|
|||
|
listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"],
|
|||
|
listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"],
|
|||
|
listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"],
|
|||
|
listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
|
|||
|
listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"],
|
|||
|
listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"],
|
|||
|
listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
|
|||
|
merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
|
|||
|
removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
|
|||
|
requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
|
|||
|
submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"],
|
|||
|
update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"],
|
|||
|
updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", {
|
|||
|
mediaType: {
|
|||
|
previews: ["lydian"]
|
|||
|
}
|
|||
|
}],
|
|||
|
updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
|
|||
|
updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"]
|
|||
|
},
|
|||
|
rateLimit: {
|
|||
|
get: ["GET /rate_limit"]
|
|||
|
},
|
|||
|
reactions: {
|
|||
|
createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}],
|
|||
|
createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}],
|
|||
|
createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}],
|
|||
|
createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}],
|
|||
|
createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}],
|
|||
|
createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}],
|
|||
|
deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}],
|
|||
|
deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}],
|
|||
|
deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}],
|
|||
|
deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}],
|
|||
|
deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}],
|
|||
|
deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}],
|
|||
|
deleteLegacy: ["DELETE /reactions/{reaction_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}, {
|
|||
|
deprecated: "octokit.reactions.deleteLegacy() is deprecated, see https://docs.github.com/rest/reference/reactions/#delete-a-reaction-legacy"
|
|||
|
}],
|
|||
|
listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", {
|
|||
|
mediaType: {
|
|||
|
previews: ["squirrel-girl"]
|
|||
|
}
|
|||
|
}]
|
|||
|
},
|
|||
|
repos: {
|
|||
|
acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"],
|
|||
|
addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
|
|||
|
mapToData: "apps"
|
|||
|
}],
|
|||
|
addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"],
|
|||
|
addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
|
|||
|
mapToData: "contexts"
|
|||
|
}],
|
|||
|
addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
|
|||
|
mapToData: "teams"
|
|||
|
}],
|
|||
|
addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
|
|||
|
mapToData: "users"
|
|||
|
}],
|
|||
|
checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"],
|
|||
|
checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", {
|
|||
|
mediaType: {
|
|||
|
previews: ["dorian"]
|
|||
|
}
|
|||
|
}],
|
|||
|
compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"],
|
|||
|
createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"],
|
|||
|
createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", {
|
|||
|
mediaType: {
|
|||
|
previews: ["zzzax"]
|
|||
|
}
|
|||
|
}],
|
|||
|
createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"],
|
|||
|
createDeployKey: ["POST /repos/{owner}/{repo}/keys"],
|
|||
|
createDeployment: ["POST /repos/{owner}/{repo}/deployments"],
|
|||
|
createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"],
|
|||
|
createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"],
|
|||
|
createForAuthenticatedUser: ["POST /user/repos"],
|
|||
|
createFork: ["POST /repos/{owner}/{repo}/forks"],
|
|||
|
createInOrg: ["POST /orgs/{org}/repos"],
|
|||
|
createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"],
|
|||
|
createPagesSite: ["POST /repos/{owner}/{repo}/pages", {
|
|||
|
mediaType: {
|
|||
|
previews: ["switcheroo"]
|
|||
|
}
|
|||
|
}],
|
|||
|
createRelease: ["POST /repos/{owner}/{repo}/releases"],
|
|||
|
createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", {
|
|||
|
mediaType: {
|
|||
|
previews: ["baptiste"]
|
|||
|
}
|
|||
|
}],
|
|||
|
createWebhook: ["POST /repos/{owner}/{repo}/hooks"],
|
|||
|
declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"],
|
|||
|
delete: ["DELETE /repos/{owner}/{repo}"],
|
|||
|
deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"],
|
|||
|
deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
|
|||
|
deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"],
|
|||
|
deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"],
|
|||
|
deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", {
|
|||
|
mediaType: {
|
|||
|
previews: ["zzzax"]
|
|||
|
}
|
|||
|
}],
|
|||
|
deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"],
|
|||
|
deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"],
|
|||
|
deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"],
|
|||
|
deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"],
|
|||
|
deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", {
|
|||
|
mediaType: {
|
|||
|
previews: ["switcheroo"]
|
|||
|
}
|
|||
|
}],
|
|||
|
deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
|
|||
|
deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"],
|
|||
|
deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"],
|
|||
|
deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"],
|
|||
|
disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", {
|
|||
|
mediaType: {
|
|||
|
previews: ["london"]
|
|||
|
}
|
|||
|
}],
|
|||
|
disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", {
|
|||
|
mediaType: {
|
|||
|
previews: ["dorian"]
|
|||
|
}
|
|||
|
}],
|
|||
|
downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, {
|
|||
|
renamed: ["repos", "downloadZipballArchive"]
|
|||
|
}],
|
|||
|
downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"],
|
|||
|
downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"],
|
|||
|
enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", {
|
|||
|
mediaType: {
|
|||
|
previews: ["london"]
|
|||
|
}
|
|||
|
}],
|
|||
|
enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", {
|
|||
|
mediaType: {
|
|||
|
previews: ["dorian"]
|
|||
|
}
|
|||
|
}],
|
|||
|
get: ["GET /repos/{owner}/{repo}"],
|
|||
|
getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"],
|
|||
|
getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
|
|||
|
getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"],
|
|||
|
getAllTopics: ["GET /repos/{owner}/{repo}/topics", {
|
|||
|
mediaType: {
|
|||
|
previews: ["mercy"]
|
|||
|
}
|
|||
|
}],
|
|||
|
getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"],
|
|||
|
getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"],
|
|||
|
getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"],
|
|||
|
getClones: ["GET /repos/{owner}/{repo}/traffic/clones"],
|
|||
|
getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"],
|
|||
|
getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"],
|
|||
|
getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"],
|
|||
|
getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"],
|
|||
|
getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"],
|
|||
|
getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"],
|
|||
|
getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", {
|
|||
|
mediaType: {
|
|||
|
previews: ["zzzax"]
|
|||
|
}
|
|||
|
}],
|
|||
|
getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"],
|
|||
|
getContent: ["GET /repos/{owner}/{repo}/contents/{path}"],
|
|||
|
getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"],
|
|||
|
getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"],
|
|||
|
getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"],
|
|||
|
getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"],
|
|||
|
getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"],
|
|||
|
getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"],
|
|||
|
getPages: ["GET /repos/{owner}/{repo}/pages"],
|
|||
|
getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"],
|
|||
|
getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"],
|
|||
|
getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
|
|||
|
getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"],
|
|||
|
getReadme: ["GET /repos/{owner}/{repo}/readme"],
|
|||
|
getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"],
|
|||
|
getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"],
|
|||
|
getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"],
|
|||
|
getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
|
|||
|
getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"],
|
|||
|
getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"],
|
|||
|
getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"],
|
|||
|
getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"],
|
|||
|
getViews: ["GET /repos/{owner}/{repo}/traffic/views"],
|
|||
|
getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"],
|
|||
|
getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"],
|
|||
|
listBranches: ["GET /repos/{owner}/{repo}/branches"],
|
|||
|
listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", {
|
|||
|
mediaType: {
|
|||
|
previews: ["groot"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"],
|
|||
|
listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"],
|
|||
|
listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"],
|
|||
|
listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"],
|
|||
|
listCommits: ["GET /repos/{owner}/{repo}/commits"],
|
|||
|
listContributors: ["GET /repos/{owner}/{repo}/contributors"],
|
|||
|
listDeployKeys: ["GET /repos/{owner}/{repo}/keys"],
|
|||
|
listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"],
|
|||
|
listDeployments: ["GET /repos/{owner}/{repo}/deployments"],
|
|||
|
listForAuthenticatedUser: ["GET /user/repos"],
|
|||
|
listForOrg: ["GET /orgs/{org}/repos"],
|
|||
|
listForUser: ["GET /users/{username}/repos"],
|
|||
|
listForks: ["GET /repos/{owner}/{repo}/forks"],
|
|||
|
listInvitations: ["GET /repos/{owner}/{repo}/invitations"],
|
|||
|
listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"],
|
|||
|
listLanguages: ["GET /repos/{owner}/{repo}/languages"],
|
|||
|
listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"],
|
|||
|
listPublic: ["GET /repositories"],
|
|||
|
listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", {
|
|||
|
mediaType: {
|
|||
|
previews: ["groot"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"],
|
|||
|
listReleases: ["GET /repos/{owner}/{repo}/releases"],
|
|||
|
listTags: ["GET /repos/{owner}/{repo}/tags"],
|
|||
|
listTeams: ["GET /repos/{owner}/{repo}/teams"],
|
|||
|
listWebhooks: ["GET /repos/{owner}/{repo}/hooks"],
|
|||
|
merge: ["POST /repos/{owner}/{repo}/merges"],
|
|||
|
pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"],
|
|||
|
removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
|
|||
|
mapToData: "apps"
|
|||
|
}],
|
|||
|
removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"],
|
|||
|
removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
|
|||
|
mapToData: "contexts"
|
|||
|
}],
|
|||
|
removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
|
|||
|
removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
|
|||
|
mapToData: "teams"
|
|||
|
}],
|
|||
|
removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
|
|||
|
mapToData: "users"
|
|||
|
}],
|
|||
|
renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"],
|
|||
|
replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", {
|
|||
|
mediaType: {
|
|||
|
previews: ["mercy"]
|
|||
|
}
|
|||
|
}],
|
|||
|
requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"],
|
|||
|
setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
|
|||
|
setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
|
|||
|
mapToData: "apps"
|
|||
|
}],
|
|||
|
setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
|
|||
|
mapToData: "contexts"
|
|||
|
}],
|
|||
|
setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
|
|||
|
mapToData: "teams"
|
|||
|
}],
|
|||
|
setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
|
|||
|
mapToData: "users"
|
|||
|
}],
|
|||
|
testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"],
|
|||
|
transfer: ["POST /repos/{owner}/{repo}/transfer"],
|
|||
|
update: ["PATCH /repos/{owner}/{repo}"],
|
|||
|
updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"],
|
|||
|
updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"],
|
|||
|
updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"],
|
|||
|
updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"],
|
|||
|
updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
|
|||
|
updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"],
|
|||
|
updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"],
|
|||
|
updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, {
|
|||
|
renamed: ["repos", "updateStatusCheckProtection"]
|
|||
|
}],
|
|||
|
updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
|
|||
|
updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"],
|
|||
|
updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"],
|
|||
|
uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", {
|
|||
|
baseUrl: "https://uploads.github.com"
|
|||
|
}]
|
|||
|
},
|
|||
|
search: {
|
|||
|
code: ["GET /search/code"],
|
|||
|
commits: ["GET /search/commits", {
|
|||
|
mediaType: {
|
|||
|
previews: ["cloak"]
|
|||
|
}
|
|||
|
}],
|
|||
|
issuesAndPullRequests: ["GET /search/issues"],
|
|||
|
labels: ["GET /search/labels"],
|
|||
|
repos: ["GET /search/repositories"],
|
|||
|
topics: ["GET /search/topics", {
|
|||
|
mediaType: {
|
|||
|
previews: ["mercy"]
|
|||
|
}
|
|||
|
}],
|
|||
|
users: ["GET /search/users"]
|
|||
|
},
|
|||
|
secretScanning: {
|
|||
|
getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"],
|
|||
|
listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"],
|
|||
|
updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"]
|
|||
|
},
|
|||
|
teams: {
|
|||
|
addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"],
|
|||
|
addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
|
|||
|
checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
|
|||
|
create: ["POST /orgs/{org}/teams"],
|
|||
|
createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"],
|
|||
|
createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"],
|
|||
|
deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
|
|||
|
deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
|
|||
|
deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"],
|
|||
|
getByName: ["GET /orgs/{org}/teams/{team_slug}"],
|
|||
|
getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
|
|||
|
getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
|
|||
|
getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"],
|
|||
|
list: ["GET /orgs/{org}/teams"],
|
|||
|
listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"],
|
|||
|
listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"],
|
|||
|
listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"],
|
|||
|
listForAuthenticatedUser: ["GET /user/teams"],
|
|||
|
listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"],
|
|||
|
listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"],
|
|||
|
listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", {
|
|||
|
mediaType: {
|
|||
|
previews: ["inertia"]
|
|||
|
}
|
|||
|
}],
|
|||
|
listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"],
|
|||
|
removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"],
|
|||
|
removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"],
|
|||
|
removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
|
|||
|
updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
|
|||
|
updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
|
|||
|
updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"]
|
|||
|
},
|
|||
|
users: {
|
|||
|
addEmailForAuthenticated: ["POST /user/emails"],
|
|||
|
block: ["PUT /user/blocks/{username}"],
|
|||
|
checkBlocked: ["GET /user/blocks/{username}"],
|
|||
|
checkFollowingForUser: ["GET /users/{username}/following/{target_user}"],
|
|||
|
checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"],
|
|||
|
createGpgKeyForAuthenticated: ["POST /user/gpg_keys"],
|
|||
|
createPublicSshKeyForAuthenticated: ["POST /user/keys"],
|
|||
|
deleteEmailForAuthenticated: ["DELETE /user/emails"],
|
|||
|
deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"],
|
|||
|
deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"],
|
|||
|
follow: ["PUT /user/following/{username}"],
|
|||
|
getAuthenticated: ["GET /user"],
|
|||
|
getByUsername: ["GET /users/{username}"],
|
|||
|
getContextForUser: ["GET /users/{username}/hovercard"],
|
|||
|
getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"],
|
|||
|
getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"],
|
|||
|
list: ["GET /users"],
|
|||
|
listBlockedByAuthenticated: ["GET /user/blocks"],
|
|||
|
listEmailsForAuthenticated: ["GET /user/emails"],
|
|||
|
listFollowedByAuthenticated: ["GET /user/following"],
|
|||
|
listFollowersForAuthenticatedUser: ["GET /user/followers"],
|
|||
|
listFollowersForUser: ["GET /users/{username}/followers"],
|
|||
|
listFollowingForUser: ["GET /users/{username}/following"],
|
|||
|
listGpgKeysForAuthenticated: ["GET /user/gpg_keys"],
|
|||
|
listGpgKeysForUser: ["GET /users/{username}/gpg_keys"],
|
|||
|
listPublicEmailsForAuthenticated: ["GET /user/public_emails"],
|
|||
|
listPublicKeysForUser: ["GET /users/{username}/keys"],
|
|||
|
listPublicSshKeysForAuthenticated: ["GET /user/keys"],
|
|||
|
setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"],
|
|||
|
unblock: ["DELETE /user/blocks/{username}"],
|
|||
|
unfollow: ["DELETE /user/following/{username}"],
|
|||
|
updateAuthenticated: ["PATCH /user"]
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
const VERSION = "4.12.1";
|
|||
|
|
|||
|
function endpointsToMethods(octokit, endpointsMap) {
|
|||
|
const newMethods = {};
|
|||
|
|
|||
|
for (const [scope, endpoints] of Object.entries(endpointsMap)) {
|
|||
|
for (const [methodName, endpoint] of Object.entries(endpoints)) {
|
|||
|
const [route, defaults, decorations] = endpoint;
|
|||
|
const [method, url] = route.split(/ /);
|
|||
|
const endpointDefaults = Object.assign({
|
|||
|
method,
|
|||
|
url
|
|||
|
}, defaults);
|
|||
|
|
|||
|
if (!newMethods[scope]) {
|
|||
|
newMethods[scope] = {};
|
|||
|
}
|
|||
|
|
|||
|
const scopeMethods = newMethods[scope];
|
|||
|
|
|||
|
if (decorations) {
|
|||
|
scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);
|
|||
|
continue;
|
|||
|
}
|
|||
|
|
|||
|
scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
return newMethods;
|
|||
|
}
|
|||
|
|
|||
|
function decorate(octokit, scope, methodName, defaults, decorations) {
|
|||
|
const requestWithDefaults = octokit.request.defaults(defaults);
|
|||
|
/* istanbul ignore next */
|
|||
|
|
|||
|
function withDecorations(...args) {
|
|||
|
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
|
|||
|
let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`
|
|||
|
|
|||
|
if (decorations.mapToData) {
|
|||
|
options = Object.assign({}, options, {
|
|||
|
data: options[decorations.mapToData],
|
|||
|
[decorations.mapToData]: undefined
|
|||
|
});
|
|||
|
return requestWithDefaults(options);
|
|||
|
}
|
|||
|
|
|||
|
if (decorations.renamed) {
|
|||
|
const [newScope, newMethodName] = decorations.renamed;
|
|||
|
octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);
|
|||
|
}
|
|||
|
|
|||
|
if (decorations.deprecated) {
|
|||
|
octokit.log.warn(decorations.deprecated);
|
|||
|
}
|
|||
|
|
|||
|
if (decorations.renamedParameters) {
|
|||
|
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
|
|||
|
const options = requestWithDefaults.endpoint.merge(...args);
|
|||
|
|
|||
|
for (const [name, alias] of Object.entries(decorations.renamedParameters)) {
|
|||
|
if (name in options) {
|
|||
|
octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`);
|
|||
|
|
|||
|
if (!(alias in options)) {
|
|||
|
options[alias] = options[name];
|
|||
|
}
|
|||
|
|
|||
|
delete options[name];
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
return requestWithDefaults(options);
|
|||
|
} // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
|
|||
|
|
|||
|
|
|||
|
return requestWithDefaults(...args);
|
|||
|
}
|
|||
|
|
|||
|
return Object.assign(withDecorations, requestWithDefaults);
|
|||
|
}
|
|||
|
|
|||
|
function restEndpointMethods(octokit) {
|
|||
|
return endpointsToMethods(octokit, Endpoints);
|
|||
|
}
|
|||
|
restEndpointMethods.VERSION = VERSION;
|
|||
|
|
|||
|
exports.restEndpointMethods = restEndpointMethods;
|
|||
|
//# sourceMappingURL=index.js.map
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 6157:
|
|||
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
|
|||
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
|||
|
|
|||
|
var deprecation = __nccwpck_require__(6885);
|
|||
|
var once = _interopDefault(__nccwpck_require__(4497));
|
|||
|
|
|||
|
const logOnce = once(deprecation => console.warn(deprecation));
|
|||
|
/**
|
|||
|
* Error with extra properties to help with debugging
|
|||
|
*/
|
|||
|
|
|||
|
class RequestError extends Error {
|
|||
|
constructor(message, statusCode, options) {
|
|||
|
super(message); // Maintains proper stack trace (only available on V8)
|
|||
|
|
|||
|
/* istanbul ignore next */
|
|||
|
|
|||
|
if (Error.captureStackTrace) {
|
|||
|
Error.captureStackTrace(this, this.constructor);
|
|||
|
}
|
|||
|
|
|||
|
this.name = "HttpError";
|
|||
|
this.status = statusCode;
|
|||
|
Object.defineProperty(this, "code", {
|
|||
|
get() {
|
|||
|
logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
|
|||
|
return statusCode;
|
|||
|
}
|
|||
|
|
|||
|
});
|
|||
|
this.headers = options.headers || {}; // redact request credentials without mutating original request options
|
|||
|
|
|||
|
const requestCopy = Object.assign({}, options.request);
|
|||
|
|
|||
|
if (options.request.headers.authorization) {
|
|||
|
requestCopy.headers = Object.assign({}, options.request.headers, {
|
|||
|
authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
|
|||
|
// see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
|
|||
|
.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
|
|||
|
// see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
|
|||
|
.replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
|
|||
|
this.request = requestCopy;
|
|||
|
}
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
exports.RequestError = RequestError;
|
|||
|
//# sourceMappingURL=index.js.map
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 9826:
|
|||
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
|
|||
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
|||
|
|
|||
|
var endpoint = __nccwpck_require__(6214);
|
|||
|
var universalUserAgent = __nccwpck_require__(2102);
|
|||
|
var isPlainObject = __nccwpck_require__(8636);
|
|||
|
var nodeFetch = _interopDefault(__nccwpck_require__(4198));
|
|||
|
var requestError = __nccwpck_require__(6157);
|
|||
|
|
|||
|
const VERSION = "5.4.14";
|
|||
|
|
|||
|
function getBufferResponse(response) {
|
|||
|
return response.arrayBuffer();
|
|||
|
}
|
|||
|
|
|||
|
function fetchWrapper(requestOptions) {
|
|||
|
if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
|
|||
|
requestOptions.body = JSON.stringify(requestOptions.body);
|
|||
|
}
|
|||
|
|
|||
|
let headers = {};
|
|||
|
let status;
|
|||
|
let url;
|
|||
|
const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
|
|||
|
return fetch(requestOptions.url, Object.assign({
|
|||
|
method: requestOptions.method,
|
|||
|
body: requestOptions.body,
|
|||
|
headers: requestOptions.headers,
|
|||
|
redirect: requestOptions.redirect
|
|||
|
}, requestOptions.request)).then(response => {
|
|||
|
url = response.url;
|
|||
|
status = response.status;
|
|||
|
|
|||
|
for (const keyAndValue of response.headers) {
|
|||
|
headers[keyAndValue[0]] = keyAndValue[1];
|
|||
|
}
|
|||
|
|
|||
|
if (status === 204 || status === 205) {
|
|||
|
return;
|
|||
|
} // GitHub API returns 200 for HEAD requests
|
|||
|
|
|||
|
|
|||
|
if (requestOptions.method === "HEAD") {
|
|||
|
if (status < 400) {
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
throw new requestError.RequestError(response.statusText, status, {
|
|||
|
headers,
|
|||
|
request: requestOptions
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
if (status === 304) {
|
|||
|
throw new requestError.RequestError("Not modified", status, {
|
|||
|
headers,
|
|||
|
request: requestOptions
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
if (status >= 400) {
|
|||
|
return response.text().then(message => {
|
|||
|
const error = new requestError.RequestError(message, status, {
|
|||
|
headers,
|
|||
|
request: requestOptions
|
|||
|
});
|
|||
|
|
|||
|
try {
|
|||
|
let responseBody = JSON.parse(error.message);
|
|||
|
Object.assign(error, responseBody);
|
|||
|
let errors = responseBody.errors; // Assumption `errors` would always be in Array format
|
|||
|
|
|||
|
error.message = error.message + ": " + errors.map(JSON.stringify).join(", ");
|
|||
|
} catch (e) {// ignore, see octokit/rest.js#684
|
|||
|
}
|
|||
|
|
|||
|
throw error;
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
const contentType = response.headers.get("content-type");
|
|||
|
|
|||
|
if (/application\/json/.test(contentType)) {
|
|||
|
return response.json();
|
|||
|
}
|
|||
|
|
|||
|
if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
|
|||
|
return response.text();
|
|||
|
}
|
|||
|
|
|||
|
return getBufferResponse(response);
|
|||
|
}).then(data => {
|
|||
|
return {
|
|||
|
status,
|
|||
|
url,
|
|||
|
headers,
|
|||
|
data
|
|||
|
};
|
|||
|
}).catch(error => {
|
|||
|
if (error instanceof requestError.RequestError) {
|
|||
|
throw error;
|
|||
|
}
|
|||
|
|
|||
|
throw new requestError.RequestError(error.message, 500, {
|
|||
|
headers,
|
|||
|
request: requestOptions
|
|||
|
});
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
function withDefaults(oldEndpoint, newDefaults) {
|
|||
|
const endpoint = oldEndpoint.defaults(newDefaults);
|
|||
|
|
|||
|
const newApi = function (route, parameters) {
|
|||
|
const endpointOptions = endpoint.merge(route, parameters);
|
|||
|
|
|||
|
if (!endpointOptions.request || !endpointOptions.request.hook) {
|
|||
|
return fetchWrapper(endpoint.parse(endpointOptions));
|
|||
|
}
|
|||
|
|
|||
|
const request = (route, parameters) => {
|
|||
|
return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));
|
|||
|
};
|
|||
|
|
|||
|
Object.assign(request, {
|
|||
|
endpoint,
|
|||
|
defaults: withDefaults.bind(null, endpoint)
|
|||
|
});
|
|||
|
return endpointOptions.request.hook(request, endpointOptions);
|
|||
|
};
|
|||
|
|
|||
|
return Object.assign(newApi, {
|
|||
|
endpoint,
|
|||
|
defaults: withDefaults.bind(null, endpoint)
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
const request = withDefaults(endpoint.endpoint, {
|
|||
|
headers: {
|
|||
|
"user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`
|
|||
|
}
|
|||
|
});
|
|||
|
|
|||
|
exports.request = request;
|
|||
|
//# sourceMappingURL=index.js.map
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 526:
|
|||
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
var register = __nccwpck_require__(130)
|
|||
|
var addHook = __nccwpck_require__(1197)
|
|||
|
var removeHook = __nccwpck_require__(325)
|
|||
|
|
|||
|
// bind with array of arguments: https://stackoverflow.com/a/21792913
|
|||
|
var bind = Function.bind
|
|||
|
var bindable = bind.bind(bind)
|
|||
|
|
|||
|
function bindApi (hook, state, name) {
|
|||
|
var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])
|
|||
|
hook.api = { remove: removeHookRef }
|
|||
|
hook.remove = removeHookRef
|
|||
|
|
|||
|
;['before', 'error', 'after', 'wrap'].forEach(function (kind) {
|
|||
|
var args = name ? [state, kind, name] : [state, kind]
|
|||
|
hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)
|
|||
|
})
|
|||
|
}
|
|||
|
|
|||
|
function HookSingular () {
|
|||
|
var singularHookName = 'h'
|
|||
|
var singularHookState = {
|
|||
|
registry: {}
|
|||
|
}
|
|||
|
var singularHook = register.bind(null, singularHookState, singularHookName)
|
|||
|
bindApi(singularHook, singularHookState, singularHookName)
|
|||
|
return singularHook
|
|||
|
}
|
|||
|
|
|||
|
function HookCollection () {
|
|||
|
var state = {
|
|||
|
registry: {}
|
|||
|
}
|
|||
|
|
|||
|
var hook = register.bind(null, state)
|
|||
|
bindApi(hook, state)
|
|||
|
|
|||
|
return hook
|
|||
|
}
|
|||
|
|
|||
|
var collectionHookDeprecationMessageDisplayed = false
|
|||
|
function Hook () {
|
|||
|
if (!collectionHookDeprecationMessageDisplayed) {
|
|||
|
console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4')
|
|||
|
collectionHookDeprecationMessageDisplayed = true
|
|||
|
}
|
|||
|
return HookCollection()
|
|||
|
}
|
|||
|
|
|||
|
Hook.Singular = HookSingular.bind()
|
|||
|
Hook.Collection = HookCollection.bind()
|
|||
|
|
|||
|
module.exports = Hook
|
|||
|
// expose constructors as a named property for TypeScript
|
|||
|
module.exports.Hook = Hook
|
|||
|
module.exports.Singular = Hook.Singular
|
|||
|
module.exports.Collection = Hook.Collection
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 1197:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
module.exports = addHook;
|
|||
|
|
|||
|
function addHook(state, kind, name, hook) {
|
|||
|
var orig = hook;
|
|||
|
if (!state.registry[name]) {
|
|||
|
state.registry[name] = [];
|
|||
|
}
|
|||
|
|
|||
|
if (kind === "before") {
|
|||
|
hook = function (method, options) {
|
|||
|
return Promise.resolve()
|
|||
|
.then(orig.bind(null, options))
|
|||
|
.then(method.bind(null, options));
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (kind === "after") {
|
|||
|
hook = function (method, options) {
|
|||
|
var result;
|
|||
|
return Promise.resolve()
|
|||
|
.then(method.bind(null, options))
|
|||
|
.then(function (result_) {
|
|||
|
result = result_;
|
|||
|
return orig(result, options);
|
|||
|
})
|
|||
|
.then(function () {
|
|||
|
return result;
|
|||
|
});
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (kind === "error") {
|
|||
|
hook = function (method, options) {
|
|||
|
return Promise.resolve()
|
|||
|
.then(method.bind(null, options))
|
|||
|
.catch(function (error) {
|
|||
|
return orig(error, options);
|
|||
|
});
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
state.registry[name].push({
|
|||
|
hook: hook,
|
|||
|
orig: orig,
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 130:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
module.exports = register;
|
|||
|
|
|||
|
function register(state, name, method, options) {
|
|||
|
if (typeof method !== "function") {
|
|||
|
throw new Error("method for before hook must be a function");
|
|||
|
}
|
|||
|
|
|||
|
if (!options) {
|
|||
|
options = {};
|
|||
|
}
|
|||
|
|
|||
|
if (Array.isArray(name)) {
|
|||
|
return name.reverse().reduce(function (callback, name) {
|
|||
|
return register.bind(null, state, name, callback, options);
|
|||
|
}, method)();
|
|||
|
}
|
|||
|
|
|||
|
return Promise.resolve().then(function () {
|
|||
|
if (!state.registry[name]) {
|
|||
|
return method(options);
|
|||
|
}
|
|||
|
|
|||
|
return state.registry[name].reduce(function (method, registered) {
|
|||
|
return registered.hook.bind(null, method, options);
|
|||
|
}, method)();
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 325:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
module.exports = removeHook;
|
|||
|
|
|||
|
function removeHook(state, name, method) {
|
|||
|
if (!state.registry[name]) {
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
var index = state.registry[name]
|
|||
|
.map(function (registered) {
|
|||
|
return registered.orig;
|
|||
|
})
|
|||
|
.indexOf(method);
|
|||
|
|
|||
|
if (index === -1) {
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
state.registry[name].splice(index, 1);
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 6885:
|
|||
|
/***/ ((__unused_webpack_module, exports) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
|
|||
|
class Deprecation extends Error {
|
|||
|
constructor(message) {
|
|||
|
super(message); // Maintains proper stack trace (only available on V8)
|
|||
|
|
|||
|
/* istanbul ignore next */
|
|||
|
|
|||
|
if (Error.captureStackTrace) {
|
|||
|
Error.captureStackTrace(this, this.constructor);
|
|||
|
}
|
|||
|
|
|||
|
this.name = 'Deprecation';
|
|||
|
}
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
exports.Deprecation = Deprecation;
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 8636:
|
|||
|
/***/ ((__unused_webpack_module, exports) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
|
|||
|
/*!
|
|||
|
* is-plain-object <https://github.com/jonschlinkert/is-plain-object>
|
|||
|
*
|
|||
|
* Copyright (c) 2014-2017, Jon Schlinkert.
|
|||
|
* Released under the MIT License.
|
|||
|
*/
|
|||
|
|
|||
|
function isObject(o) {
|
|||
|
return Object.prototype.toString.call(o) === '[object Object]';
|
|||
|
}
|
|||
|
|
|||
|
function isPlainObject(o) {
|
|||
|
var ctor,prot;
|
|||
|
|
|||
|
if (isObject(o) === false) return false;
|
|||
|
|
|||
|
// If has modified constructor
|
|||
|
ctor = o.constructor;
|
|||
|
if (ctor === undefined) return true;
|
|||
|
|
|||
|
// If has modified prototype
|
|||
|
prot = ctor.prototype;
|
|||
|
if (isObject(prot) === false) return false;
|
|||
|
|
|||
|
// If constructor does not have an Object-specific method
|
|||
|
if (prot.hasOwnProperty('isPrototypeOf') === false) {
|
|||
|
return false;
|
|||
|
}
|
|||
|
|
|||
|
// Most likely a plain Object
|
|||
|
return true;
|
|||
|
}
|
|||
|
|
|||
|
exports.isPlainObject = isPlainObject;
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 4198:
|
|||
|
/***/ ((module, exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
|
|||
|
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
|
|||
|
|
|||
|
var Stream = _interopDefault(__nccwpck_require__(2413));
|
|||
|
var http = _interopDefault(__nccwpck_require__(8605));
|
|||
|
var Url = _interopDefault(__nccwpck_require__(8835));
|
|||
|
var https = _interopDefault(__nccwpck_require__(7211));
|
|||
|
var zlib = _interopDefault(__nccwpck_require__(8761));
|
|||
|
|
|||
|
// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
|
|||
|
|
|||
|
// fix for "Readable" isn't a named export issue
|
|||
|
const Readable = Stream.Readable;
|
|||
|
|
|||
|
const BUFFER = Symbol('buffer');
|
|||
|
const TYPE = Symbol('type');
|
|||
|
|
|||
|
class Blob {
|
|||
|
constructor() {
|
|||
|
this[TYPE] = '';
|
|||
|
|
|||
|
const blobParts = arguments[0];
|
|||
|
const options = arguments[1];
|
|||
|
|
|||
|
const buffers = [];
|
|||
|
let size = 0;
|
|||
|
|
|||
|
if (blobParts) {
|
|||
|
const a = blobParts;
|
|||
|
const length = Number(a.length);
|
|||
|
for (let i = 0; i < length; i++) {
|
|||
|
const element = a[i];
|
|||
|
let buffer;
|
|||
|
if (element instanceof Buffer) {
|
|||
|
buffer = element;
|
|||
|
} else if (ArrayBuffer.isView(element)) {
|
|||
|
buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
|
|||
|
} else if (element instanceof ArrayBuffer) {
|
|||
|
buffer = Buffer.from(element);
|
|||
|
} else if (element instanceof Blob) {
|
|||
|
buffer = element[BUFFER];
|
|||
|
} else {
|
|||
|
buffer = Buffer.from(typeof element === 'string' ? element : String(element));
|
|||
|
}
|
|||
|
size += buffer.length;
|
|||
|
buffers.push(buffer);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
this[BUFFER] = Buffer.concat(buffers);
|
|||
|
|
|||
|
let type = options && options.type !== undefined && String(options.type).toLowerCase();
|
|||
|
if (type && !/[^\u0020-\u007E]/.test(type)) {
|
|||
|
this[TYPE] = type;
|
|||
|
}
|
|||
|
}
|
|||
|
get size() {
|
|||
|
return this[BUFFER].length;
|
|||
|
}
|
|||
|
get type() {
|
|||
|
return this[TYPE];
|
|||
|
}
|
|||
|
text() {
|
|||
|
return Promise.resolve(this[BUFFER].toString());
|
|||
|
}
|
|||
|
arrayBuffer() {
|
|||
|
const buf = this[BUFFER];
|
|||
|
const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
|
|||
|
return Promise.resolve(ab);
|
|||
|
}
|
|||
|
stream() {
|
|||
|
const readable = new Readable();
|
|||
|
readable._read = function () {};
|
|||
|
readable.push(this[BUFFER]);
|
|||
|
readable.push(null);
|
|||
|
return readable;
|
|||
|
}
|
|||
|
toString() {
|
|||
|
return '[object Blob]';
|
|||
|
}
|
|||
|
slice() {
|
|||
|
const size = this.size;
|
|||
|
|
|||
|
const start = arguments[0];
|
|||
|
const end = arguments[1];
|
|||
|
let relativeStart, relativeEnd;
|
|||
|
if (start === undefined) {
|
|||
|
relativeStart = 0;
|
|||
|
} else if (start < 0) {
|
|||
|
relativeStart = Math.max(size + start, 0);
|
|||
|
} else {
|
|||
|
relativeStart = Math.min(start, size);
|
|||
|
}
|
|||
|
if (end === undefined) {
|
|||
|
relativeEnd = size;
|
|||
|
} else if (end < 0) {
|
|||
|
relativeEnd = Math.max(size + end, 0);
|
|||
|
} else {
|
|||
|
relativeEnd = Math.min(end, size);
|
|||
|
}
|
|||
|
const span = Math.max(relativeEnd - relativeStart, 0);
|
|||
|
|
|||
|
const buffer = this[BUFFER];
|
|||
|
const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
|
|||
|
const blob = new Blob([], { type: arguments[2] });
|
|||
|
blob[BUFFER] = slicedBuffer;
|
|||
|
return blob;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
Object.defineProperties(Blob.prototype, {
|
|||
|
size: { enumerable: true },
|
|||
|
type: { enumerable: true },
|
|||
|
slice: { enumerable: true }
|
|||
|
});
|
|||
|
|
|||
|
Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
|
|||
|
value: 'Blob',
|
|||
|
writable: false,
|
|||
|
enumerable: false,
|
|||
|
configurable: true
|
|||
|
});
|
|||
|
|
|||
|
/**
|
|||
|
* fetch-error.js
|
|||
|
*
|
|||
|
* FetchError interface for operational errors
|
|||
|
*/
|
|||
|
|
|||
|
/**
|
|||
|
* Create FetchError instance
|
|||
|
*
|
|||
|
* @param String message Error message for human
|
|||
|
* @param String type Error type for machine
|
|||
|
* @param String systemError For Node.js system error
|
|||
|
* @return FetchError
|
|||
|
*/
|
|||
|
function FetchError(message, type, systemError) {
|
|||
|
Error.call(this, message);
|
|||
|
|
|||
|
this.message = message;
|
|||
|
this.type = type;
|
|||
|
|
|||
|
// when err.type is `system`, err.code contains system error code
|
|||
|
if (systemError) {
|
|||
|
this.code = this.errno = systemError.code;
|
|||
|
}
|
|||
|
|
|||
|
// hide custom error implementation details from end-users
|
|||
|
Error.captureStackTrace(this, this.constructor);
|
|||
|
}
|
|||
|
|
|||
|
FetchError.prototype = Object.create(Error.prototype);
|
|||
|
FetchError.prototype.constructor = FetchError;
|
|||
|
FetchError.prototype.name = 'FetchError';
|
|||
|
|
|||
|
let convert;
|
|||
|
try {
|
|||
|
convert = __nccwpck_require__(7058)/* .convert */ .O;
|
|||
|
} catch (e) {}
|
|||
|
|
|||
|
const INTERNALS = Symbol('Body internals');
|
|||
|
|
|||
|
// fix an issue where "PassThrough" isn't a named export for node <10
|
|||
|
const PassThrough = Stream.PassThrough;
|
|||
|
|
|||
|
/**
|
|||
|
* Body mixin
|
|||
|
*
|
|||
|
* Ref: https://fetch.spec.whatwg.org/#body
|
|||
|
*
|
|||
|
* @param Stream body Readable stream
|
|||
|
* @param Object opts Response options
|
|||
|
* @return Void
|
|||
|
*/
|
|||
|
function Body(body) {
|
|||
|
var _this = this;
|
|||
|
|
|||
|
var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
|
|||
|
_ref$size = _ref.size;
|
|||
|
|
|||
|
let size = _ref$size === undefined ? 0 : _ref$size;
|
|||
|
var _ref$timeout = _ref.timeout;
|
|||
|
let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
|
|||
|
|
|||
|
if (body == null) {
|
|||
|
// body is undefined or null
|
|||
|
body = null;
|
|||
|
} else if (isURLSearchParams(body)) {
|
|||
|
// body is a URLSearchParams
|
|||
|
body = Buffer.from(body.toString());
|
|||
|
} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
|
|||
|
// body is ArrayBuffer
|
|||
|
body = Buffer.from(body);
|
|||
|
} else if (ArrayBuffer.isView(body)) {
|
|||
|
// body is ArrayBufferView
|
|||
|
body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
|
|||
|
} else if (body instanceof Stream) ; else {
|
|||
|
// none of the above
|
|||
|
// coerce to string then buffer
|
|||
|
body = Buffer.from(String(body));
|
|||
|
}
|
|||
|
this[INTERNALS] = {
|
|||
|
body,
|
|||
|
disturbed: false,
|
|||
|
error: null
|
|||
|
};
|
|||
|
this.size = size;
|
|||
|
this.timeout = timeout;
|
|||
|
|
|||
|
if (body instanceof Stream) {
|
|||
|
body.on('error', function (err) {
|
|||
|
const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
|
|||
|
_this[INTERNALS].error = error;
|
|||
|
});
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
Body.prototype = {
|
|||
|
get body() {
|
|||
|
return this[INTERNALS].body;
|
|||
|
},
|
|||
|
|
|||
|
get bodyUsed() {
|
|||
|
return this[INTERNALS].disturbed;
|
|||
|
},
|
|||
|
|
|||
|
/**
|
|||
|
* Decode response as ArrayBuffer
|
|||
|
*
|
|||
|
* @return Promise
|
|||
|
*/
|
|||
|
arrayBuffer() {
|
|||
|
return consumeBody.call(this).then(function (buf) {
|
|||
|
return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
|
|||
|
});
|
|||
|
},
|
|||
|
|
|||
|
/**
|
|||
|
* Return raw response as Blob
|
|||
|
*
|
|||
|
* @return Promise
|
|||
|
*/
|
|||
|
blob() {
|
|||
|
let ct = this.headers && this.headers.get('content-type') || '';
|
|||
|
return consumeBody.call(this).then(function (buf) {
|
|||
|
return Object.assign(
|
|||
|
// Prevent copying
|
|||
|
new Blob([], {
|
|||
|
type: ct.toLowerCase()
|
|||
|
}), {
|
|||
|
[BUFFER]: buf
|
|||
|
});
|
|||
|
});
|
|||
|
},
|
|||
|
|
|||
|
/**
|
|||
|
* Decode response as json
|
|||
|
*
|
|||
|
* @return Promise
|
|||
|
*/
|
|||
|
json() {
|
|||
|
var _this2 = this;
|
|||
|
|
|||
|
return consumeBody.call(this).then(function (buffer) {
|
|||
|
try {
|
|||
|
return JSON.parse(buffer.toString());
|
|||
|
} catch (err) {
|
|||
|
return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
|
|||
|
}
|
|||
|
});
|
|||
|
},
|
|||
|
|
|||
|
/**
|
|||
|
* Decode response as text
|
|||
|
*
|
|||
|
* @return Promise
|
|||
|
*/
|
|||
|
text() {
|
|||
|
return consumeBody.call(this).then(function (buffer) {
|
|||
|
return buffer.toString();
|
|||
|
});
|
|||
|
},
|
|||
|
|
|||
|
/**
|
|||
|
* Decode response as buffer (non-spec api)
|
|||
|
*
|
|||
|
* @return Promise
|
|||
|
*/
|
|||
|
buffer() {
|
|||
|
return consumeBody.call(this);
|
|||
|
},
|
|||
|
|
|||
|
/**
|
|||
|
* Decode response as text, while automatically detecting the encoding and
|
|||
|
* trying to decode to UTF-8 (non-spec api)
|
|||
|
*
|
|||
|
* @return Promise
|
|||
|
*/
|
|||
|
textConverted() {
|
|||
|
var _this3 = this;
|
|||
|
|
|||
|
return consumeBody.call(this).then(function (buffer) {
|
|||
|
return convertBody(buffer, _this3.headers);
|
|||
|
});
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
// In browsers, all properties are enumerable.
|
|||
|
Object.defineProperties(Body.prototype, {
|
|||
|
body: { enumerable: true },
|
|||
|
bodyUsed: { enumerable: true },
|
|||
|
arrayBuffer: { enumerable: true },
|
|||
|
blob: { enumerable: true },
|
|||
|
json: { enumerable: true },
|
|||
|
text: { enumerable: true }
|
|||
|
});
|
|||
|
|
|||
|
Body.mixIn = function (proto) {
|
|||
|
for (const name of Object.getOwnPropertyNames(Body.prototype)) {
|
|||
|
// istanbul ignore else: future proof
|
|||
|
if (!(name in proto)) {
|
|||
|
const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
|
|||
|
Object.defineProperty(proto, name, desc);
|
|||
|
}
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
/**
|
|||
|
* Consume and convert an entire Body to a Buffer.
|
|||
|
*
|
|||
|
* Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
|
|||
|
*
|
|||
|
* @return Promise
|
|||
|
*/
|
|||
|
function consumeBody() {
|
|||
|
var _this4 = this;
|
|||
|
|
|||
|
if (this[INTERNALS].disturbed) {
|
|||
|
return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
|
|||
|
}
|
|||
|
|
|||
|
this[INTERNALS].disturbed = true;
|
|||
|
|
|||
|
if (this[INTERNALS].error) {
|
|||
|
return Body.Promise.reject(this[INTERNALS].error);
|
|||
|
}
|
|||
|
|
|||
|
let body = this.body;
|
|||
|
|
|||
|
// body is null
|
|||
|
if (body === null) {
|
|||
|
return Body.Promise.resolve(Buffer.alloc(0));
|
|||
|
}
|
|||
|
|
|||
|
// body is blob
|
|||
|
if (isBlob(body)) {
|
|||
|
body = body.stream();
|
|||
|
}
|
|||
|
|
|||
|
// body is buffer
|
|||
|
if (Buffer.isBuffer(body)) {
|
|||
|
return Body.Promise.resolve(body);
|
|||
|
}
|
|||
|
|
|||
|
// istanbul ignore if: should never happen
|
|||
|
if (!(body instanceof Stream)) {
|
|||
|
return Body.Promise.resolve(Buffer.alloc(0));
|
|||
|
}
|
|||
|
|
|||
|
// body is stream
|
|||
|
// get ready to actually consume the body
|
|||
|
let accum = [];
|
|||
|
let accumBytes = 0;
|
|||
|
let abort = false;
|
|||
|
|
|||
|
return new Body.Promise(function (resolve, reject) {
|
|||
|
let resTimeout;
|
|||
|
|
|||
|
// allow timeout on slow response body
|
|||
|
if (_this4.timeout) {
|
|||
|
resTimeout = setTimeout(function () {
|
|||
|
abort = true;
|
|||
|
reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
|
|||
|
}, _this4.timeout);
|
|||
|
}
|
|||
|
|
|||
|
// handle stream errors
|
|||
|
body.on('error', function (err) {
|
|||
|
if (err.name === 'AbortError') {
|
|||
|
// if the request was aborted, reject with this Error
|
|||
|
abort = true;
|
|||
|
reject(err);
|
|||
|
} else {
|
|||
|
// other errors, such as incorrect content-encoding
|
|||
|
reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
|
|||
|
}
|
|||
|
});
|
|||
|
|
|||
|
body.on('data', function (chunk) {
|
|||
|
if (abort || chunk === null) {
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
if (_this4.size && accumBytes + chunk.length > _this4.size) {
|
|||
|
abort = true;
|
|||
|
reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
accumBytes += chunk.length;
|
|||
|
accum.push(chunk);
|
|||
|
});
|
|||
|
|
|||
|
body.on('end', function () {
|
|||
|
if (abort) {
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
clearTimeout(resTimeout);
|
|||
|
|
|||
|
try {
|
|||
|
resolve(Buffer.concat(accum, accumBytes));
|
|||
|
} catch (err) {
|
|||
|
// handle streams that have accumulated too much data (issue #414)
|
|||
|
reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
|
|||
|
}
|
|||
|
});
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Detect buffer encoding and convert to target encoding
|
|||
|
* ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
|
|||
|
*
|
|||
|
* @param Buffer buffer Incoming buffer
|
|||
|
* @param String encoding Target encoding
|
|||
|
* @return String
|
|||
|
*/
|
|||
|
function convertBody(buffer, headers) {
|
|||
|
if (typeof convert !== 'function') {
|
|||
|
throw new Error('The package `encoding` must be installed to use the textConverted() function');
|
|||
|
}
|
|||
|
|
|||
|
const ct = headers.get('content-type');
|
|||
|
let charset = 'utf-8';
|
|||
|
let res, str;
|
|||
|
|
|||
|
// header
|
|||
|
if (ct) {
|
|||
|
res = /charset=([^;]*)/i.exec(ct);
|
|||
|
}
|
|||
|
|
|||
|
// no charset in content type, peek at response body for at most 1024 bytes
|
|||
|
str = buffer.slice(0, 1024).toString();
|
|||
|
|
|||
|
// html5
|
|||
|
if (!res && str) {
|
|||
|
res = /<meta.+?charset=(['"])(.+?)\1/i.exec(str);
|
|||
|
}
|
|||
|
|
|||
|
// html4
|
|||
|
if (!res && str) {
|
|||
|
res = /<meta[\s]+?http-equiv=(['"])content-type\1[\s]+?content=(['"])(.+?)\2/i.exec(str);
|
|||
|
if (!res) {
|
|||
|
res = /<meta[\s]+?content=(['"])(.+?)\1[\s]+?http-equiv=(['"])content-type\3/i.exec(str);
|
|||
|
if (res) {
|
|||
|
res.pop(); // drop last quote
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (res) {
|
|||
|
res = /charset=(.*)/i.exec(res.pop());
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// xml
|
|||
|
if (!res && str) {
|
|||
|
res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str);
|
|||
|
}
|
|||
|
|
|||
|
// found charset
|
|||
|
if (res) {
|
|||
|
charset = res.pop();
|
|||
|
|
|||
|
// prevent decode issues when sites use incorrect encoding
|
|||
|
// ref: https://hsivonen.fi/encoding-menu/
|
|||
|
if (charset === 'gb2312' || charset === 'gbk') {
|
|||
|
charset = 'gb18030';
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// turn raw buffers into a single utf-8 buffer
|
|||
|
return convert(buffer, 'UTF-8', charset).toString();
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Detect a URLSearchParams object
|
|||
|
* ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143
|
|||
|
*
|
|||
|
* @param Object obj Object to detect by type or brand
|
|||
|
* @return String
|
|||
|
*/
|
|||
|
function isURLSearchParams(obj) {
|
|||
|
// Duck-typing as a necessary condition.
|
|||
|
if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') {
|
|||
|
return false;
|
|||
|
}
|
|||
|
|
|||
|
// Brand-checking and more duck-typing as optional condition.
|
|||
|
return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function';
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Check if `obj` is a W3C `Blob` object (which `File` inherits from)
|
|||
|
* @param {*} obj
|
|||
|
* @return {boolean}
|
|||
|
*/
|
|||
|
function isBlob(obj) {
|
|||
|
return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]);
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Clone body given Res/Req instance
|
|||
|
*
|
|||
|
* @param Mixed instance Response or Request instance
|
|||
|
* @return Mixed
|
|||
|
*/
|
|||
|
function clone(instance) {
|
|||
|
let p1, p2;
|
|||
|
let body = instance.body;
|
|||
|
|
|||
|
// don't allow cloning a used body
|
|||
|
if (instance.bodyUsed) {
|
|||
|
throw new Error('cannot clone body after it is used');
|
|||
|
}
|
|||
|
|
|||
|
// check that body is a stream and not form-data object
|
|||
|
// note: we can't clone the form-data object without having it as a dependency
|
|||
|
if (body instanceof Stream && typeof body.getBoundary !== 'function') {
|
|||
|
// tee instance body
|
|||
|
p1 = new PassThrough();
|
|||
|
p2 = new PassThrough();
|
|||
|
body.pipe(p1);
|
|||
|
body.pipe(p2);
|
|||
|
// set instance body to teed body and return the other teed body
|
|||
|
instance[INTERNALS].body = p1;
|
|||
|
body = p2;
|
|||
|
}
|
|||
|
|
|||
|
return body;
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Performs the operation "extract a `Content-Type` value from |object|" as
|
|||
|
* specified in the specification:
|
|||
|
* https://fetch.spec.whatwg.org/#concept-bodyinit-extract
|
|||
|
*
|
|||
|
* This function assumes that instance.body is present.
|
|||
|
*
|
|||
|
* @param Mixed instance Any options.body input
|
|||
|
*/
|
|||
|
function extractContentType(body) {
|
|||
|
if (body === null) {
|
|||
|
// body is null
|
|||
|
return null;
|
|||
|
} else if (typeof body === 'string') {
|
|||
|
// body is string
|
|||
|
return 'text/plain;charset=UTF-8';
|
|||
|
} else if (isURLSearchParams(body)) {
|
|||
|
// body is a URLSearchParams
|
|||
|
return 'application/x-www-form-urlencoded;charset=UTF-8';
|
|||
|
} else if (isBlob(body)) {
|
|||
|
// body is blob
|
|||
|
return body.type || null;
|
|||
|
} else if (Buffer.isBuffer(body)) {
|
|||
|
// body is buffer
|
|||
|
return null;
|
|||
|
} else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
|
|||
|
// body is ArrayBuffer
|
|||
|
return null;
|
|||
|
} else if (ArrayBuffer.isView(body)) {
|
|||
|
// body is ArrayBufferView
|
|||
|
return null;
|
|||
|
} else if (typeof body.getBoundary === 'function') {
|
|||
|
// detect form data input from form-data module
|
|||
|
return `multipart/form-data;boundary=${body.getBoundary()}`;
|
|||
|
} else if (body instanceof Stream) {
|
|||
|
// body is stream
|
|||
|
// can't really do much about this
|
|||
|
return null;
|
|||
|
} else {
|
|||
|
// Body constructor defaults other things to string
|
|||
|
return 'text/plain;charset=UTF-8';
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* The Fetch Standard treats this as if "total bytes" is a property on the body.
|
|||
|
* For us, we have to explicitly get it with a function.
|
|||
|
*
|
|||
|
* ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes
|
|||
|
*
|
|||
|
* @param Body instance Instance of Body
|
|||
|
* @return Number? Number of bytes, or null if not possible
|
|||
|
*/
|
|||
|
function getTotalBytes(instance) {
|
|||
|
const body = instance.body;
|
|||
|
|
|||
|
|
|||
|
if (body === null) {
|
|||
|
// body is null
|
|||
|
return 0;
|
|||
|
} else if (isBlob(body)) {
|
|||
|
return body.size;
|
|||
|
} else if (Buffer.isBuffer(body)) {
|
|||
|
// body is buffer
|
|||
|
return body.length;
|
|||
|
} else if (body && typeof body.getLengthSync === 'function') {
|
|||
|
// detect form data input from form-data module
|
|||
|
if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x
|
|||
|
body.hasKnownLength && body.hasKnownLength()) {
|
|||
|
// 2.x
|
|||
|
return body.getLengthSync();
|
|||
|
}
|
|||
|
return null;
|
|||
|
} else {
|
|||
|
// body is stream
|
|||
|
return null;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Write a Body to a Node.js WritableStream (e.g. http.Request) object.
|
|||
|
*
|
|||
|
* @param Body instance Instance of Body
|
|||
|
* @return Void
|
|||
|
*/
|
|||
|
function writeToStream(dest, instance) {
|
|||
|
const body = instance.body;
|
|||
|
|
|||
|
|
|||
|
if (body === null) {
|
|||
|
// body is null
|
|||
|
dest.end();
|
|||
|
} else if (isBlob(body)) {
|
|||
|
body.stream().pipe(dest);
|
|||
|
} else if (Buffer.isBuffer(body)) {
|
|||
|
// body is buffer
|
|||
|
dest.write(body);
|
|||
|
dest.end();
|
|||
|
} else {
|
|||
|
// body is stream
|
|||
|
body.pipe(dest);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// expose Promise
|
|||
|
Body.Promise = global.Promise;
|
|||
|
|
|||
|
/**
|
|||
|
* headers.js
|
|||
|
*
|
|||
|
* Headers class offers convenient helpers
|
|||
|
*/
|
|||
|
|
|||
|
const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/;
|
|||
|
const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/;
|
|||
|
|
|||
|
function validateName(name) {
|
|||
|
name = `${name}`;
|
|||
|
if (invalidTokenRegex.test(name) || name === '') {
|
|||
|
throw new TypeError(`${name} is not a legal HTTP header name`);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
function validateValue(value) {
|
|||
|
value = `${value}`;
|
|||
|
if (invalidHeaderCharRegex.test(value)) {
|
|||
|
throw new TypeError(`${value} is not a legal HTTP header value`);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Find the key in the map object given a header name.
|
|||
|
*
|
|||
|
* Returns undefined if not found.
|
|||
|
*
|
|||
|
* @param String name Header name
|
|||
|
* @return String|Undefined
|
|||
|
*/
|
|||
|
function find(map, name) {
|
|||
|
name = name.toLowerCase();
|
|||
|
for (const key in map) {
|
|||
|
if (key.toLowerCase() === name) {
|
|||
|
return key;
|
|||
|
}
|
|||
|
}
|
|||
|
return undefined;
|
|||
|
}
|
|||
|
|
|||
|
const MAP = Symbol('map');
|
|||
|
class Headers {
|
|||
|
/**
|
|||
|
* Headers class
|
|||
|
*
|
|||
|
* @param Object headers Response headers
|
|||
|
* @return Void
|
|||
|
*/
|
|||
|
constructor() {
|
|||
|
let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined;
|
|||
|
|
|||
|
this[MAP] = Object.create(null);
|
|||
|
|
|||
|
if (init instanceof Headers) {
|
|||
|
const rawHeaders = init.raw();
|
|||
|
const headerNames = Object.keys(rawHeaders);
|
|||
|
|
|||
|
for (const headerName of headerNames) {
|
|||
|
for (const value of rawHeaders[headerName]) {
|
|||
|
this.append(headerName, value);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
// We don't worry about converting prop to ByteString here as append()
|
|||
|
// will handle it.
|
|||
|
if (init == null) ; else if (typeof init === 'object') {
|
|||
|
const method = init[Symbol.iterator];
|
|||
|
if (method != null) {
|
|||
|
if (typeof method !== 'function') {
|
|||
|
throw new TypeError('Header pairs must be iterable');
|
|||
|
}
|
|||
|
|
|||
|
// sequence<sequence<ByteString>>
|
|||
|
// Note: per spec we have to first exhaust the lists then process them
|
|||
|
const pairs = [];
|
|||
|
for (const pair of init) {
|
|||
|
if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
|
|||
|
throw new TypeError('Each header pair must be iterable');
|
|||
|
}
|
|||
|
pairs.push(Array.from(pair));
|
|||
|
}
|
|||
|
|
|||
|
for (const pair of pairs) {
|
|||
|
if (pair.length !== 2) {
|
|||
|
throw new TypeError('Each header pair must be a name/value tuple');
|
|||
|
}
|
|||
|
this.append(pair[0], pair[1]);
|
|||
|
}
|
|||
|
} else {
|
|||
|
// record<ByteString, ByteString>
|
|||
|
for (const key of Object.keys(init)) {
|
|||
|
const value = init[key];
|
|||
|
this.append(key, value);
|
|||
|
}
|
|||
|
}
|
|||
|
} else {
|
|||
|
throw new TypeError('Provided initializer must be an object');
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Return combined header value given name
|
|||
|
*
|
|||
|
* @param String name Header name
|
|||
|
* @return Mixed
|
|||
|
*/
|
|||
|
get(name) {
|
|||
|
name = `${name}`;
|
|||
|
validateName(name);
|
|||
|
const key = find(this[MAP], name);
|
|||
|
if (key === undefined) {
|
|||
|
return null;
|
|||
|
}
|
|||
|
|
|||
|
return this[MAP][key].join(', ');
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Iterate over all headers
|
|||
|
*
|
|||
|
* @param Function callback Executed for each item with parameters (value, name, thisArg)
|
|||
|
* @param Boolean thisArg `this` context for callback function
|
|||
|
* @return Void
|
|||
|
*/
|
|||
|
forEach(callback) {
|
|||
|
let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
|
|||
|
|
|||
|
let pairs = getHeaders(this);
|
|||
|
let i = 0;
|
|||
|
while (i < pairs.length) {
|
|||
|
var _pairs$i = pairs[i];
|
|||
|
const name = _pairs$i[0],
|
|||
|
value = _pairs$i[1];
|
|||
|
|
|||
|
callback.call(thisArg, value, name, this);
|
|||
|
pairs = getHeaders(this);
|
|||
|
i++;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Overwrite header values given name
|
|||
|
*
|
|||
|
* @param String name Header name
|
|||
|
* @param String value Header value
|
|||
|
* @return Void
|
|||
|
*/
|
|||
|
set(name, value) {
|
|||
|
name = `${name}`;
|
|||
|
value = `${value}`;
|
|||
|
validateName(name);
|
|||
|
validateValue(value);
|
|||
|
const key = find(this[MAP], name);
|
|||
|
this[MAP][key !== undefined ? key : name] = [value];
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Append a value onto existing header
|
|||
|
*
|
|||
|
* @param String name Header name
|
|||
|
* @param String value Header value
|
|||
|
* @return Void
|
|||
|
*/
|
|||
|
append(name, value) {
|
|||
|
name = `${name}`;
|
|||
|
value = `${value}`;
|
|||
|
validateName(name);
|
|||
|
validateValue(value);
|
|||
|
const key = find(this[MAP], name);
|
|||
|
if (key !== undefined) {
|
|||
|
this[MAP][key].push(value);
|
|||
|
} else {
|
|||
|
this[MAP][name] = [value];
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Check for header name existence
|
|||
|
*
|
|||
|
* @param String name Header name
|
|||
|
* @return Boolean
|
|||
|
*/
|
|||
|
has(name) {
|
|||
|
name = `${name}`;
|
|||
|
validateName(name);
|
|||
|
return find(this[MAP], name) !== undefined;
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Delete all header values given name
|
|||
|
*
|
|||
|
* @param String name Header name
|
|||
|
* @return Void
|
|||
|
*/
|
|||
|
delete(name) {
|
|||
|
name = `${name}`;
|
|||
|
validateName(name);
|
|||
|
const key = find(this[MAP], name);
|
|||
|
if (key !== undefined) {
|
|||
|
delete this[MAP][key];
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Return raw headers (non-spec api)
|
|||
|
*
|
|||
|
* @return Object
|
|||
|
*/
|
|||
|
raw() {
|
|||
|
return this[MAP];
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Get an iterator on keys.
|
|||
|
*
|
|||
|
* @return Iterator
|
|||
|
*/
|
|||
|
keys() {
|
|||
|
return createHeadersIterator(this, 'key');
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Get an iterator on values.
|
|||
|
*
|
|||
|
* @return Iterator
|
|||
|
*/
|
|||
|
values() {
|
|||
|
return createHeadersIterator(this, 'value');
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Get an iterator on entries.
|
|||
|
*
|
|||
|
* This is the default iterator of the Headers object.
|
|||
|
*
|
|||
|
* @return Iterator
|
|||
|
*/
|
|||
|
[Symbol.iterator]() {
|
|||
|
return createHeadersIterator(this, 'key+value');
|
|||
|
}
|
|||
|
}
|
|||
|
Headers.prototype.entries = Headers.prototype[Symbol.iterator];
|
|||
|
|
|||
|
Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
|
|||
|
value: 'Headers',
|
|||
|
writable: false,
|
|||
|
enumerable: false,
|
|||
|
configurable: true
|
|||
|
});
|
|||
|
|
|||
|
Object.defineProperties(Headers.prototype, {
|
|||
|
get: { enumerable: true },
|
|||
|
forEach: { enumerable: true },
|
|||
|
set: { enumerable: true },
|
|||
|
append: { enumerable: true },
|
|||
|
has: { enumerable: true },
|
|||
|
delete: { enumerable: true },
|
|||
|
keys: { enumerable: true },
|
|||
|
values: { enumerable: true },
|
|||
|
entries: { enumerable: true }
|
|||
|
});
|
|||
|
|
|||
|
function getHeaders(headers) {
|
|||
|
let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
|
|||
|
|
|||
|
const keys = Object.keys(headers[MAP]).sort();
|
|||
|
return keys.map(kind === 'key' ? function (k) {
|
|||
|
return k.toLowerCase();
|
|||
|
} : kind === 'value' ? function (k) {
|
|||
|
return headers[MAP][k].join(', ');
|
|||
|
} : function (k) {
|
|||
|
return [k.toLowerCase(), headers[MAP][k].join(', ')];
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
const INTERNAL = Symbol('internal');
|
|||
|
|
|||
|
function createHeadersIterator(target, kind) {
|
|||
|
const iterator = Object.create(HeadersIteratorPrototype);
|
|||
|
iterator[INTERNAL] = {
|
|||
|
target,
|
|||
|
kind,
|
|||
|
index: 0
|
|||
|
};
|
|||
|
return iterator;
|
|||
|
}
|
|||
|
|
|||
|
const HeadersIteratorPrototype = Object.setPrototypeOf({
|
|||
|
next() {
|
|||
|
// istanbul ignore if
|
|||
|
if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
|
|||
|
throw new TypeError('Value of `this` is not a HeadersIterator');
|
|||
|
}
|
|||
|
|
|||
|
var _INTERNAL = this[INTERNAL];
|
|||
|
const target = _INTERNAL.target,
|
|||
|
kind = _INTERNAL.kind,
|
|||
|
index = _INTERNAL.index;
|
|||
|
|
|||
|
const values = getHeaders(target, kind);
|
|||
|
const len = values.length;
|
|||
|
if (index >= len) {
|
|||
|
return {
|
|||
|
value: undefined,
|
|||
|
done: true
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
this[INTERNAL].index = index + 1;
|
|||
|
|
|||
|
return {
|
|||
|
value: values[index],
|
|||
|
done: false
|
|||
|
};
|
|||
|
}
|
|||
|
}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
|
|||
|
|
|||
|
Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
|
|||
|
value: 'HeadersIterator',
|
|||
|
writable: false,
|
|||
|
enumerable: false,
|
|||
|
configurable: true
|
|||
|
});
|
|||
|
|
|||
|
/**
|
|||
|
* Export the Headers object in a form that Node.js can consume.
|
|||
|
*
|
|||
|
* @param Headers headers
|
|||
|
* @return Object
|
|||
|
*/
|
|||
|
function exportNodeCompatibleHeaders(headers) {
|
|||
|
const obj = Object.assign({ __proto__: null }, headers[MAP]);
|
|||
|
|
|||
|
// http.request() only supports string as Host header. This hack makes
|
|||
|
// specifying custom Host header possible.
|
|||
|
const hostHeaderKey = find(headers[MAP], 'Host');
|
|||
|
if (hostHeaderKey !== undefined) {
|
|||
|
obj[hostHeaderKey] = obj[hostHeaderKey][0];
|
|||
|
}
|
|||
|
|
|||
|
return obj;
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Create a Headers object from an object of headers, ignoring those that do
|
|||
|
* not conform to HTTP grammar productions.
|
|||
|
*
|
|||
|
* @param Object obj Object of headers
|
|||
|
* @return Headers
|
|||
|
*/
|
|||
|
function createHeadersLenient(obj) {
|
|||
|
const headers = new Headers();
|
|||
|
for (const name of Object.keys(obj)) {
|
|||
|
if (invalidTokenRegex.test(name)) {
|
|||
|
continue;
|
|||
|
}
|
|||
|
if (Array.isArray(obj[name])) {
|
|||
|
for (const val of obj[name]) {
|
|||
|
if (invalidHeaderCharRegex.test(val)) {
|
|||
|
continue;
|
|||
|
}
|
|||
|
if (headers[MAP][name] === undefined) {
|
|||
|
headers[MAP][name] = [val];
|
|||
|
} else {
|
|||
|
headers[MAP][name].push(val);
|
|||
|
}
|
|||
|
}
|
|||
|
} else if (!invalidHeaderCharRegex.test(obj[name])) {
|
|||
|
headers[MAP][name] = [obj[name]];
|
|||
|
}
|
|||
|
}
|
|||
|
return headers;
|
|||
|
}
|
|||
|
|
|||
|
const INTERNALS$1 = Symbol('Response internals');
|
|||
|
|
|||
|
// fix an issue where "STATUS_CODES" aren't a named export for node <10
|
|||
|
const STATUS_CODES = http.STATUS_CODES;
|
|||
|
|
|||
|
/**
|
|||
|
* Response class
|
|||
|
*
|
|||
|
* @param Stream body Readable stream
|
|||
|
* @param Object opts Response options
|
|||
|
* @return Void
|
|||
|
*/
|
|||
|
class Response {
|
|||
|
constructor() {
|
|||
|
let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
|
|||
|
let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|||
|
|
|||
|
Body.call(this, body, opts);
|
|||
|
|
|||
|
const status = opts.status || 200;
|
|||
|
const headers = new Headers(opts.headers);
|
|||
|
|
|||
|
if (body != null && !headers.has('Content-Type')) {
|
|||
|
const contentType = extractContentType(body);
|
|||
|
if (contentType) {
|
|||
|
headers.append('Content-Type', contentType);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
this[INTERNALS$1] = {
|
|||
|
url: opts.url,
|
|||
|
status,
|
|||
|
statusText: opts.statusText || STATUS_CODES[status],
|
|||
|
headers,
|
|||
|
counter: opts.counter
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
get url() {
|
|||
|
return this[INTERNALS$1].url || '';
|
|||
|
}
|
|||
|
|
|||
|
get status() {
|
|||
|
return this[INTERNALS$1].status;
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Convenience property representing if the request ended normally
|
|||
|
*/
|
|||
|
get ok() {
|
|||
|
return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
|
|||
|
}
|
|||
|
|
|||
|
get redirected() {
|
|||
|
return this[INTERNALS$1].counter > 0;
|
|||
|
}
|
|||
|
|
|||
|
get statusText() {
|
|||
|
return this[INTERNALS$1].statusText;
|
|||
|
}
|
|||
|
|
|||
|
get headers() {
|
|||
|
return this[INTERNALS$1].headers;
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Clone this response
|
|||
|
*
|
|||
|
* @return Response
|
|||
|
*/
|
|||
|
clone() {
|
|||
|
return new Response(clone(this), {
|
|||
|
url: this.url,
|
|||
|
status: this.status,
|
|||
|
statusText: this.statusText,
|
|||
|
headers: this.headers,
|
|||
|
ok: this.ok,
|
|||
|
redirected: this.redirected
|
|||
|
});
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
Body.mixIn(Response.prototype);
|
|||
|
|
|||
|
Object.defineProperties(Response.prototype, {
|
|||
|
url: { enumerable: true },
|
|||
|
status: { enumerable: true },
|
|||
|
ok: { enumerable: true },
|
|||
|
redirected: { enumerable: true },
|
|||
|
statusText: { enumerable: true },
|
|||
|
headers: { enumerable: true },
|
|||
|
clone: { enumerable: true }
|
|||
|
});
|
|||
|
|
|||
|
Object.defineProperty(Response.prototype, Symbol.toStringTag, {
|
|||
|
value: 'Response',
|
|||
|
writable: false,
|
|||
|
enumerable: false,
|
|||
|
configurable: true
|
|||
|
});
|
|||
|
|
|||
|
const INTERNALS$2 = Symbol('Request internals');
|
|||
|
|
|||
|
// fix an issue where "format", "parse" aren't a named export for node <10
|
|||
|
const parse_url = Url.parse;
|
|||
|
const format_url = Url.format;
|
|||
|
|
|||
|
const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
|
|||
|
|
|||
|
/**
|
|||
|
* Check if a value is an instance of Request.
|
|||
|
*
|
|||
|
* @param Mixed input
|
|||
|
* @return Boolean
|
|||
|
*/
|
|||
|
function isRequest(input) {
|
|||
|
return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
|
|||
|
}
|
|||
|
|
|||
|
function isAbortSignal(signal) {
|
|||
|
const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
|
|||
|
return !!(proto && proto.constructor.name === 'AbortSignal');
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Request class
|
|||
|
*
|
|||
|
* @param Mixed input Url or Request instance
|
|||
|
* @param Object init Custom options
|
|||
|
* @return Void
|
|||
|
*/
|
|||
|
class Request {
|
|||
|
constructor(input) {
|
|||
|
let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|||
|
|
|||
|
let parsedURL;
|
|||
|
|
|||
|
// normalize input
|
|||
|
if (!isRequest(input)) {
|
|||
|
if (input && input.href) {
|
|||
|
// in order to support Node.js' Url objects; though WHATWG's URL objects
|
|||
|
// will fall into this branch also (since their `toString()` will return
|
|||
|
// `href` property anyway)
|
|||
|
parsedURL = parse_url(input.href);
|
|||
|
} else {
|
|||
|
// coerce input to a string before attempting to parse
|
|||
|
parsedURL = parse_url(`${input}`);
|
|||
|
}
|
|||
|
input = {};
|
|||
|
} else {
|
|||
|
parsedURL = parse_url(input.url);
|
|||
|
}
|
|||
|
|
|||
|
let method = init.method || input.method || 'GET';
|
|||
|
method = method.toUpperCase();
|
|||
|
|
|||
|
if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
|
|||
|
throw new TypeError('Request with GET/HEAD method cannot have body');
|
|||
|
}
|
|||
|
|
|||
|
let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
|
|||
|
|
|||
|
Body.call(this, inputBody, {
|
|||
|
timeout: init.timeout || input.timeout || 0,
|
|||
|
size: init.size || input.size || 0
|
|||
|
});
|
|||
|
|
|||
|
const headers = new Headers(init.headers || input.headers || {});
|
|||
|
|
|||
|
if (inputBody != null && !headers.has('Content-Type')) {
|
|||
|
const contentType = extractContentType(inputBody);
|
|||
|
if (contentType) {
|
|||
|
headers.append('Content-Type', contentType);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
let signal = isRequest(input) ? input.signal : null;
|
|||
|
if ('signal' in init) signal = init.signal;
|
|||
|
|
|||
|
if (signal != null && !isAbortSignal(signal)) {
|
|||
|
throw new TypeError('Expected signal to be an instanceof AbortSignal');
|
|||
|
}
|
|||
|
|
|||
|
this[INTERNALS$2] = {
|
|||
|
method,
|
|||
|
redirect: init.redirect || input.redirect || 'follow',
|
|||
|
headers,
|
|||
|
parsedURL,
|
|||
|
signal
|
|||
|
};
|
|||
|
|
|||
|
// node-fetch-only options
|
|||
|
this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
|
|||
|
this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
|
|||
|
this.counter = init.counter || input.counter || 0;
|
|||
|
this.agent = init.agent || input.agent;
|
|||
|
}
|
|||
|
|
|||
|
get method() {
|
|||
|
return this[INTERNALS$2].method;
|
|||
|
}
|
|||
|
|
|||
|
get url() {
|
|||
|
return format_url(this[INTERNALS$2].parsedURL);
|
|||
|
}
|
|||
|
|
|||
|
get headers() {
|
|||
|
return this[INTERNALS$2].headers;
|
|||
|
}
|
|||
|
|
|||
|
get redirect() {
|
|||
|
return this[INTERNALS$2].redirect;
|
|||
|
}
|
|||
|
|
|||
|
get signal() {
|
|||
|
return this[INTERNALS$2].signal;
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Clone this request
|
|||
|
*
|
|||
|
* @return Request
|
|||
|
*/
|
|||
|
clone() {
|
|||
|
return new Request(this);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
Body.mixIn(Request.prototype);
|
|||
|
|
|||
|
Object.defineProperty(Request.prototype, Symbol.toStringTag, {
|
|||
|
value: 'Request',
|
|||
|
writable: false,
|
|||
|
enumerable: false,
|
|||
|
configurable: true
|
|||
|
});
|
|||
|
|
|||
|
Object.defineProperties(Request.prototype, {
|
|||
|
method: { enumerable: true },
|
|||
|
url: { enumerable: true },
|
|||
|
headers: { enumerable: true },
|
|||
|
redirect: { enumerable: true },
|
|||
|
clone: { enumerable: true },
|
|||
|
signal: { enumerable: true }
|
|||
|
});
|
|||
|
|
|||
|
/**
|
|||
|
* Convert a Request to Node.js http request options.
|
|||
|
*
|
|||
|
* @param Request A Request instance
|
|||
|
* @return Object The options object to be passed to http.request
|
|||
|
*/
|
|||
|
function getNodeRequestOptions(request) {
|
|||
|
const parsedURL = request[INTERNALS$2].parsedURL;
|
|||
|
const headers = new Headers(request[INTERNALS$2].headers);
|
|||
|
|
|||
|
// fetch step 1.3
|
|||
|
if (!headers.has('Accept')) {
|
|||
|
headers.set('Accept', '*/*');
|
|||
|
}
|
|||
|
|
|||
|
// Basic fetch
|
|||
|
if (!parsedURL.protocol || !parsedURL.hostname) {
|
|||
|
throw new TypeError('Only absolute URLs are supported');
|
|||
|
}
|
|||
|
|
|||
|
if (!/^https?:$/.test(parsedURL.protocol)) {
|
|||
|
throw new TypeError('Only HTTP(S) protocols are supported');
|
|||
|
}
|
|||
|
|
|||
|
if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
|
|||
|
throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
|
|||
|
}
|
|||
|
|
|||
|
// HTTP-network-or-cache fetch steps 2.4-2.7
|
|||
|
let contentLengthValue = null;
|
|||
|
if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
|
|||
|
contentLengthValue = '0';
|
|||
|
}
|
|||
|
if (request.body != null) {
|
|||
|
const totalBytes = getTotalBytes(request);
|
|||
|
if (typeof totalBytes === 'number') {
|
|||
|
contentLengthValue = String(totalBytes);
|
|||
|
}
|
|||
|
}
|
|||
|
if (contentLengthValue) {
|
|||
|
headers.set('Content-Length', contentLengthValue);
|
|||
|
}
|
|||
|
|
|||
|
// HTTP-network-or-cache fetch step 2.11
|
|||
|
if (!headers.has('User-Agent')) {
|
|||
|
headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
|
|||
|
}
|
|||
|
|
|||
|
// HTTP-network-or-cache fetch step 2.15
|
|||
|
if (request.compress && !headers.has('Accept-Encoding')) {
|
|||
|
headers.set('Accept-Encoding', 'gzip,deflate');
|
|||
|
}
|
|||
|
|
|||
|
let agent = request.agent;
|
|||
|
if (typeof agent === 'function') {
|
|||
|
agent = agent(parsedURL);
|
|||
|
}
|
|||
|
|
|||
|
if (!headers.has('Connection') && !agent) {
|
|||
|
headers.set('Connection', 'close');
|
|||
|
}
|
|||
|
|
|||
|
// HTTP-network fetch step 4.2
|
|||
|
// chunked encoding is handled by Node.js
|
|||
|
|
|||
|
return Object.assign({}, parsedURL, {
|
|||
|
method: request.method,
|
|||
|
headers: exportNodeCompatibleHeaders(headers),
|
|||
|
agent
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* abort-error.js
|
|||
|
*
|
|||
|
* AbortError interface for cancelled requests
|
|||
|
*/
|
|||
|
|
|||
|
/**
|
|||
|
* Create AbortError instance
|
|||
|
*
|
|||
|
* @param String message Error message for human
|
|||
|
* @return AbortError
|
|||
|
*/
|
|||
|
function AbortError(message) {
|
|||
|
Error.call(this, message);
|
|||
|
|
|||
|
this.type = 'aborted';
|
|||
|
this.message = message;
|
|||
|
|
|||
|
// hide custom error implementation details from end-users
|
|||
|
Error.captureStackTrace(this, this.constructor);
|
|||
|
}
|
|||
|
|
|||
|
AbortError.prototype = Object.create(Error.prototype);
|
|||
|
AbortError.prototype.constructor = AbortError;
|
|||
|
AbortError.prototype.name = 'AbortError';
|
|||
|
|
|||
|
// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
|
|||
|
const PassThrough$1 = Stream.PassThrough;
|
|||
|
const resolve_url = Url.resolve;
|
|||
|
|
|||
|
/**
|
|||
|
* Fetch function
|
|||
|
*
|
|||
|
* @param Mixed url Absolute url or Request instance
|
|||
|
* @param Object opts Fetch options
|
|||
|
* @return Promise
|
|||
|
*/
|
|||
|
function fetch(url, opts) {
|
|||
|
|
|||
|
// allow custom promise
|
|||
|
if (!fetch.Promise) {
|
|||
|
throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
|
|||
|
}
|
|||
|
|
|||
|
Body.Promise = fetch.Promise;
|
|||
|
|
|||
|
// wrap http.request into fetch
|
|||
|
return new fetch.Promise(function (resolve, reject) {
|
|||
|
// build request object
|
|||
|
const request = new Request(url, opts);
|
|||
|
const options = getNodeRequestOptions(request);
|
|||
|
|
|||
|
const send = (options.protocol === 'https:' ? https : http).request;
|
|||
|
const signal = request.signal;
|
|||
|
|
|||
|
let response = null;
|
|||
|
|
|||
|
const abort = function abort() {
|
|||
|
let error = new AbortError('The user aborted a request.');
|
|||
|
reject(error);
|
|||
|
if (request.body && request.body instanceof Stream.Readable) {
|
|||
|
request.body.destroy(error);
|
|||
|
}
|
|||
|
if (!response || !response.body) return;
|
|||
|
response.body.emit('error', error);
|
|||
|
};
|
|||
|
|
|||
|
if (signal && signal.aborted) {
|
|||
|
abort();
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
const abortAndFinalize = function abortAndFinalize() {
|
|||
|
abort();
|
|||
|
finalize();
|
|||
|
};
|
|||
|
|
|||
|
// send request
|
|||
|
const req = send(options);
|
|||
|
let reqTimeout;
|
|||
|
|
|||
|
if (signal) {
|
|||
|
signal.addEventListener('abort', abortAndFinalize);
|
|||
|
}
|
|||
|
|
|||
|
function finalize() {
|
|||
|
req.abort();
|
|||
|
if (signal) signal.removeEventListener('abort', abortAndFinalize);
|
|||
|
clearTimeout(reqTimeout);
|
|||
|
}
|
|||
|
|
|||
|
if (request.timeout) {
|
|||
|
req.once('socket', function (socket) {
|
|||
|
reqTimeout = setTimeout(function () {
|
|||
|
reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
|
|||
|
finalize();
|
|||
|
}, request.timeout);
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
req.on('error', function (err) {
|
|||
|
reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
|
|||
|
finalize();
|
|||
|
});
|
|||
|
|
|||
|
req.on('response', function (res) {
|
|||
|
clearTimeout(reqTimeout);
|
|||
|
|
|||
|
const headers = createHeadersLenient(res.headers);
|
|||
|
|
|||
|
// HTTP fetch step 5
|
|||
|
if (fetch.isRedirect(res.statusCode)) {
|
|||
|
// HTTP fetch step 5.2
|
|||
|
const location = headers.get('Location');
|
|||
|
|
|||
|
// HTTP fetch step 5.3
|
|||
|
const locationURL = location === null ? null : resolve_url(request.url, location);
|
|||
|
|
|||
|
// HTTP fetch step 5.5
|
|||
|
switch (request.redirect) {
|
|||
|
case 'error':
|
|||
|
reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));
|
|||
|
finalize();
|
|||
|
return;
|
|||
|
case 'manual':
|
|||
|
// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
|
|||
|
if (locationURL !== null) {
|
|||
|
// handle corrupted header
|
|||
|
try {
|
|||
|
headers.set('Location', locationURL);
|
|||
|
} catch (err) {
|
|||
|
// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
|
|||
|
reject(err);
|
|||
|
}
|
|||
|
}
|
|||
|
break;
|
|||
|
case 'follow':
|
|||
|
// HTTP-redirect fetch step 2
|
|||
|
if (locationURL === null) {
|
|||
|
break;
|
|||
|
}
|
|||
|
|
|||
|
// HTTP-redirect fetch step 5
|
|||
|
if (request.counter >= request.follow) {
|
|||
|
reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
|
|||
|
finalize();
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
// HTTP-redirect fetch step 6 (counter increment)
|
|||
|
// Create a new Request object.
|
|||
|
const requestOpts = {
|
|||
|
headers: new Headers(request.headers),
|
|||
|
follow: request.follow,
|
|||
|
counter: request.counter + 1,
|
|||
|
agent: request.agent,
|
|||
|
compress: request.compress,
|
|||
|
method: request.method,
|
|||
|
body: request.body,
|
|||
|
signal: request.signal,
|
|||
|
timeout: request.timeout,
|
|||
|
size: request.size
|
|||
|
};
|
|||
|
|
|||
|
// HTTP-redirect fetch step 9
|
|||
|
if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
|
|||
|
reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
|
|||
|
finalize();
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
// HTTP-redirect fetch step 11
|
|||
|
if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
|
|||
|
requestOpts.method = 'GET';
|
|||
|
requestOpts.body = undefined;
|
|||
|
requestOpts.headers.delete('content-length');
|
|||
|
}
|
|||
|
|
|||
|
// HTTP-redirect fetch step 15
|
|||
|
resolve(fetch(new Request(locationURL, requestOpts)));
|
|||
|
finalize();
|
|||
|
return;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// prepare response
|
|||
|
res.once('end', function () {
|
|||
|
if (signal) signal.removeEventListener('abort', abortAndFinalize);
|
|||
|
});
|
|||
|
let body = res.pipe(new PassThrough$1());
|
|||
|
|
|||
|
const response_options = {
|
|||
|
url: request.url,
|
|||
|
status: res.statusCode,
|
|||
|
statusText: res.statusMessage,
|
|||
|
headers: headers,
|
|||
|
size: request.size,
|
|||
|
timeout: request.timeout,
|
|||
|
counter: request.counter
|
|||
|
};
|
|||
|
|
|||
|
// HTTP-network fetch step 12.1.1.3
|
|||
|
const codings = headers.get('Content-Encoding');
|
|||
|
|
|||
|
// HTTP-network fetch step 12.1.1.4: handle content codings
|
|||
|
|
|||
|
// in following scenarios we ignore compression support
|
|||
|
// 1. compression support is disabled
|
|||
|
// 2. HEAD request
|
|||
|
// 3. no Content-Encoding header
|
|||
|
// 4. no content response (204)
|
|||
|
// 5. content not modified response (304)
|
|||
|
if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
|
|||
|
response = new Response(body, response_options);
|
|||
|
resolve(response);
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
// For Node v6+
|
|||
|
// Be less strict when decoding compressed responses, since sometimes
|
|||
|
// servers send slightly invalid responses that are still accepted
|
|||
|
// by common browsers.
|
|||
|
// Always using Z_SYNC_FLUSH is what cURL does.
|
|||
|
const zlibOptions = {
|
|||
|
flush: zlib.Z_SYNC_FLUSH,
|
|||
|
finishFlush: zlib.Z_SYNC_FLUSH
|
|||
|
};
|
|||
|
|
|||
|
// for gzip
|
|||
|
if (codings == 'gzip' || codings == 'x-gzip') {
|
|||
|
body = body.pipe(zlib.createGunzip(zlibOptions));
|
|||
|
response = new Response(body, response_options);
|
|||
|
resolve(response);
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
// for deflate
|
|||
|
if (codings == 'deflate' || codings == 'x-deflate') {
|
|||
|
// handle the infamous raw deflate response from old servers
|
|||
|
// a hack for old IIS and Apache servers
|
|||
|
const raw = res.pipe(new PassThrough$1());
|
|||
|
raw.once('data', function (chunk) {
|
|||
|
// see http://stackoverflow.com/questions/37519828
|
|||
|
if ((chunk[0] & 0x0F) === 0x08) {
|
|||
|
body = body.pipe(zlib.createInflate());
|
|||
|
} else {
|
|||
|
body = body.pipe(zlib.createInflateRaw());
|
|||
|
}
|
|||
|
response = new Response(body, response_options);
|
|||
|
resolve(response);
|
|||
|
});
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
// for br
|
|||
|
if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
|
|||
|
body = body.pipe(zlib.createBrotliDecompress());
|
|||
|
response = new Response(body, response_options);
|
|||
|
resolve(response);
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
// otherwise, use response as-is
|
|||
|
response = new Response(body, response_options);
|
|||
|
resolve(response);
|
|||
|
});
|
|||
|
|
|||
|
writeToStream(req, request);
|
|||
|
});
|
|||
|
}
|
|||
|
/**
|
|||
|
* Redirect code matching
|
|||
|
*
|
|||
|
* @param Number code Status code
|
|||
|
* @return Boolean
|
|||
|
*/
|
|||
|
fetch.isRedirect = function (code) {
|
|||
|
return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
|
|||
|
};
|
|||
|
|
|||
|
// expose Promise
|
|||
|
fetch.Promise = global.Promise;
|
|||
|
|
|||
|
module.exports = exports = fetch;
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
exports.default = exports;
|
|||
|
exports.Headers = Headers;
|
|||
|
exports.Request = Request;
|
|||
|
exports.Response = Response;
|
|||
|
exports.FetchError = FetchError;
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 4497:
|
|||
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
var wrappy = __nccwpck_require__(5812)
|
|||
|
module.exports = wrappy(once)
|
|||
|
module.exports.strict = wrappy(onceStrict)
|
|||
|
|
|||
|
once.proto = once(function () {
|
|||
|
Object.defineProperty(Function.prototype, 'once', {
|
|||
|
value: function () {
|
|||
|
return once(this)
|
|||
|
},
|
|||
|
configurable: true
|
|||
|
})
|
|||
|
|
|||
|
Object.defineProperty(Function.prototype, 'onceStrict', {
|
|||
|
value: function () {
|
|||
|
return onceStrict(this)
|
|||
|
},
|
|||
|
configurable: true
|
|||
|
})
|
|||
|
})
|
|||
|
|
|||
|
function once (fn) {
|
|||
|
var f = function () {
|
|||
|
if (f.called) return f.value
|
|||
|
f.called = true
|
|||
|
return f.value = fn.apply(this, arguments)
|
|||
|
}
|
|||
|
f.called = false
|
|||
|
return f
|
|||
|
}
|
|||
|
|
|||
|
function onceStrict (fn) {
|
|||
|
var f = function () {
|
|||
|
if (f.called)
|
|||
|
throw new Error(f.onceError)
|
|||
|
f.called = true
|
|||
|
return f.value = fn.apply(this, arguments)
|
|||
|
}
|
|||
|
var name = fn.name || 'Function wrapped with `once`'
|
|||
|
f.onceError = name + " shouldn't be called more than once"
|
|||
|
f.called = false
|
|||
|
return f
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 7752:
|
|||
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
module.exports = __nccwpck_require__(6341);
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 6341:
|
|||
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
var net = __nccwpck_require__(1631);
|
|||
|
var tls = __nccwpck_require__(4016);
|
|||
|
var http = __nccwpck_require__(8605);
|
|||
|
var https = __nccwpck_require__(7211);
|
|||
|
var events = __nccwpck_require__(8614);
|
|||
|
var assert = __nccwpck_require__(2357);
|
|||
|
var util = __nccwpck_require__(1669);
|
|||
|
|
|||
|
|
|||
|
exports.httpOverHttp = httpOverHttp;
|
|||
|
exports.httpsOverHttp = httpsOverHttp;
|
|||
|
exports.httpOverHttps = httpOverHttps;
|
|||
|
exports.httpsOverHttps = httpsOverHttps;
|
|||
|
|
|||
|
|
|||
|
function httpOverHttp(options) {
|
|||
|
var agent = new TunnelingAgent(options);
|
|||
|
agent.request = http.request;
|
|||
|
return agent;
|
|||
|
}
|
|||
|
|
|||
|
function httpsOverHttp(options) {
|
|||
|
var agent = new TunnelingAgent(options);
|
|||
|
agent.request = http.request;
|
|||
|
agent.createSocket = createSecureSocket;
|
|||
|
agent.defaultPort = 443;
|
|||
|
return agent;
|
|||
|
}
|
|||
|
|
|||
|
function httpOverHttps(options) {
|
|||
|
var agent = new TunnelingAgent(options);
|
|||
|
agent.request = https.request;
|
|||
|
return agent;
|
|||
|
}
|
|||
|
|
|||
|
function httpsOverHttps(options) {
|
|||
|
var agent = new TunnelingAgent(options);
|
|||
|
agent.request = https.request;
|
|||
|
agent.createSocket = createSecureSocket;
|
|||
|
agent.defaultPort = 443;
|
|||
|
return agent;
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
function TunnelingAgent(options) {
|
|||
|
var self = this;
|
|||
|
self.options = options || {};
|
|||
|
self.proxyOptions = self.options.proxy || {};
|
|||
|
self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
|
|||
|
self.requests = [];
|
|||
|
self.sockets = [];
|
|||
|
|
|||
|
self.on('free', function onFree(socket, host, port, localAddress) {
|
|||
|
var options = toOptions(host, port, localAddress);
|
|||
|
for (var i = 0, len = self.requests.length; i < len; ++i) {
|
|||
|
var pending = self.requests[i];
|
|||
|
if (pending.host === options.host && pending.port === options.port) {
|
|||
|
// Detect the request to connect same origin server,
|
|||
|
// reuse the connection.
|
|||
|
self.requests.splice(i, 1);
|
|||
|
pending.request.onSocket(socket);
|
|||
|
return;
|
|||
|
}
|
|||
|
}
|
|||
|
socket.destroy();
|
|||
|
self.removeSocket(socket);
|
|||
|
});
|
|||
|
}
|
|||
|
util.inherits(TunnelingAgent, events.EventEmitter);
|
|||
|
|
|||
|
TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
|
|||
|
var self = this;
|
|||
|
var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
|
|||
|
|
|||
|
if (self.sockets.length >= this.maxSockets) {
|
|||
|
// We are over limit so we'll add it to the queue.
|
|||
|
self.requests.push(options);
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
// If we are under maxSockets create a new one.
|
|||
|
self.createSocket(options, function(socket) {
|
|||
|
socket.on('free', onFree);
|
|||
|
socket.on('close', onCloseOrRemove);
|
|||
|
socket.on('agentRemove', onCloseOrRemove);
|
|||
|
req.onSocket(socket);
|
|||
|
|
|||
|
function onFree() {
|
|||
|
self.emit('free', socket, options);
|
|||
|
}
|
|||
|
|
|||
|
function onCloseOrRemove(err) {
|
|||
|
self.removeSocket(socket);
|
|||
|
socket.removeListener('free', onFree);
|
|||
|
socket.removeListener('close', onCloseOrRemove);
|
|||
|
socket.removeListener('agentRemove', onCloseOrRemove);
|
|||
|
}
|
|||
|
});
|
|||
|
};
|
|||
|
|
|||
|
TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
|
|||
|
var self = this;
|
|||
|
var placeholder = {};
|
|||
|
self.sockets.push(placeholder);
|
|||
|
|
|||
|
var connectOptions = mergeOptions({}, self.proxyOptions, {
|
|||
|
method: 'CONNECT',
|
|||
|
path: options.host + ':' + options.port,
|
|||
|
agent: false,
|
|||
|
headers: {
|
|||
|
host: options.host + ':' + options.port
|
|||
|
}
|
|||
|
});
|
|||
|
if (options.localAddress) {
|
|||
|
connectOptions.localAddress = options.localAddress;
|
|||
|
}
|
|||
|
if (connectOptions.proxyAuth) {
|
|||
|
connectOptions.headers = connectOptions.headers || {};
|
|||
|
connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
|
|||
|
new Buffer(connectOptions.proxyAuth).toString('base64');
|
|||
|
}
|
|||
|
|
|||
|
debug('making CONNECT request');
|
|||
|
var connectReq = self.request(connectOptions);
|
|||
|
connectReq.useChunkedEncodingByDefault = false; // for v0.6
|
|||
|
connectReq.once('response', onResponse); // for v0.6
|
|||
|
connectReq.once('upgrade', onUpgrade); // for v0.6
|
|||
|
connectReq.once('connect', onConnect); // for v0.7 or later
|
|||
|
connectReq.once('error', onError);
|
|||
|
connectReq.end();
|
|||
|
|
|||
|
function onResponse(res) {
|
|||
|
// Very hacky. This is necessary to avoid http-parser leaks.
|
|||
|
res.upgrade = true;
|
|||
|
}
|
|||
|
|
|||
|
function onUpgrade(res, socket, head) {
|
|||
|
// Hacky.
|
|||
|
process.nextTick(function() {
|
|||
|
onConnect(res, socket, head);
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
function onConnect(res, socket, head) {
|
|||
|
connectReq.removeAllListeners();
|
|||
|
socket.removeAllListeners();
|
|||
|
|
|||
|
if (res.statusCode !== 200) {
|
|||
|
debug('tunneling socket could not be established, statusCode=%d',
|
|||
|
res.statusCode);
|
|||
|
socket.destroy();
|
|||
|
var error = new Error('tunneling socket could not be established, ' +
|
|||
|
'statusCode=' + res.statusCode);
|
|||
|
error.code = 'ECONNRESET';
|
|||
|
options.request.emit('error', error);
|
|||
|
self.removeSocket(placeholder);
|
|||
|
return;
|
|||
|
}
|
|||
|
if (head.length > 0) {
|
|||
|
debug('got illegal response body from proxy');
|
|||
|
socket.destroy();
|
|||
|
var error = new Error('got illegal response body from proxy');
|
|||
|
error.code = 'ECONNRESET';
|
|||
|
options.request.emit('error', error);
|
|||
|
self.removeSocket(placeholder);
|
|||
|
return;
|
|||
|
}
|
|||
|
debug('tunneling connection has established');
|
|||
|
self.sockets[self.sockets.indexOf(placeholder)] = socket;
|
|||
|
return cb(socket);
|
|||
|
}
|
|||
|
|
|||
|
function onError(cause) {
|
|||
|
connectReq.removeAllListeners();
|
|||
|
|
|||
|
debug('tunneling socket could not be established, cause=%s\n',
|
|||
|
cause.message, cause.stack);
|
|||
|
var error = new Error('tunneling socket could not be established, ' +
|
|||
|
'cause=' + cause.message);
|
|||
|
error.code = 'ECONNRESET';
|
|||
|
options.request.emit('error', error);
|
|||
|
self.removeSocket(placeholder);
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
|
|||
|
var pos = this.sockets.indexOf(socket)
|
|||
|
if (pos === -1) {
|
|||
|
return;
|
|||
|
}
|
|||
|
this.sockets.splice(pos, 1);
|
|||
|
|
|||
|
var pending = this.requests.shift();
|
|||
|
if (pending) {
|
|||
|
// If we have pending requests and a socket gets closed a new one
|
|||
|
// needs to be created to take over in the pool for the one that closed.
|
|||
|
this.createSocket(pending, function(socket) {
|
|||
|
pending.request.onSocket(socket);
|
|||
|
});
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
function createSecureSocket(options, cb) {
|
|||
|
var self = this;
|
|||
|
TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
|
|||
|
var hostHeader = options.request.getHeader('host');
|
|||
|
var tlsOptions = mergeOptions({}, self.options, {
|
|||
|
socket: socket,
|
|||
|
servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
|
|||
|
});
|
|||
|
|
|||
|
// 0 is dummy port for v0.6
|
|||
|
var secureSocket = tls.connect(0, tlsOptions);
|
|||
|
self.sockets[self.sockets.indexOf(socket)] = secureSocket;
|
|||
|
cb(secureSocket);
|
|||
|
});
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
function toOptions(host, port, localAddress) {
|
|||
|
if (typeof host === 'string') { // since v0.10
|
|||
|
return {
|
|||
|
host: host,
|
|||
|
port: port,
|
|||
|
localAddress: localAddress
|
|||
|
};
|
|||
|
}
|
|||
|
return host; // for v0.11 or later
|
|||
|
}
|
|||
|
|
|||
|
function mergeOptions(target) {
|
|||
|
for (var i = 1, len = arguments.length; i < len; ++i) {
|
|||
|
var overrides = arguments[i];
|
|||
|
if (typeof overrides === 'object') {
|
|||
|
var keys = Object.keys(overrides);
|
|||
|
for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
|
|||
|
var k = keys[j];
|
|||
|
if (overrides[k] !== undefined) {
|
|||
|
target[k] = overrides[k];
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
return target;
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
var debug;
|
|||
|
if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
|
|||
|
debug = function() {
|
|||
|
var args = Array.prototype.slice.call(arguments);
|
|||
|
if (typeof args[0] === 'string') {
|
|||
|
args[0] = 'TUNNEL: ' + args[0];
|
|||
|
} else {
|
|||
|
args.unshift('TUNNEL:');
|
|||
|
}
|
|||
|
console.error.apply(console, args);
|
|||
|
}
|
|||
|
} else {
|
|||
|
debug = function() {};
|
|||
|
}
|
|||
|
exports.debug = debug; // for test
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 2102:
|
|||
|
/***/ ((__unused_webpack_module, exports) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|||
|
|
|||
|
function getUserAgent() {
|
|||
|
if (typeof navigator === "object" && "userAgent" in navigator) {
|
|||
|
return navigator.userAgent;
|
|||
|
}
|
|||
|
|
|||
|
if (typeof process === "object" && "version" in process) {
|
|||
|
return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;
|
|||
|
}
|
|||
|
|
|||
|
return "<environment undetectable>";
|
|||
|
}
|
|||
|
|
|||
|
exports.getUserAgent = getUserAgent;
|
|||
|
//# sourceMappingURL=index.js.map
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 5812:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
// Returns a wrapper function that returns a wrapped callback
|
|||
|
// The wrapper function should do some stuff, and return a
|
|||
|
// presumably different callback function.
|
|||
|
// This makes sure that own properties are retained, so that
|
|||
|
// decorations and such are not lost along the way.
|
|||
|
module.exports = wrappy
|
|||
|
function wrappy (fn, cb) {
|
|||
|
if (fn && cb) return wrappy(fn)(cb)
|
|||
|
|
|||
|
if (typeof fn !== 'function')
|
|||
|
throw new TypeError('need wrapper function')
|
|||
|
|
|||
|
Object.keys(fn).forEach(function (k) {
|
|||
|
wrapper[k] = fn[k]
|
|||
|
})
|
|||
|
|
|||
|
return wrapper
|
|||
|
|
|||
|
function wrapper() {
|
|||
|
var args = new Array(arguments.length)
|
|||
|
for (var i = 0; i < args.length; i++) {
|
|||
|
args[i] = arguments[i]
|
|||
|
}
|
|||
|
var ret = fn.apply(this, args)
|
|||
|
var cb = args[args.length-1]
|
|||
|
if (typeof ret === 'function' && ret !== cb) {
|
|||
|
Object.keys(cb).forEach(function (k) {
|
|||
|
ret[k] = cb[k]
|
|||
|
})
|
|||
|
}
|
|||
|
return ret
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 7058:
|
|||
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
var iconvLite = __nccwpck_require__(8735);
|
|||
|
|
|||
|
// Expose to the world
|
|||
|
module.exports.O = convert;
|
|||
|
|
|||
|
/**
|
|||
|
* Convert encoding of an UTF-8 string or a buffer
|
|||
|
*
|
|||
|
* @param {String|Buffer} str String to be converted
|
|||
|
* @param {String} to Encoding to be converted to
|
|||
|
* @param {String} [from='UTF-8'] Encoding to be converted from
|
|||
|
* @return {Buffer} Encoded string
|
|||
|
*/
|
|||
|
function convert(str, to, from) {
|
|||
|
from = checkEncoding(from || 'UTF-8');
|
|||
|
to = checkEncoding(to || 'UTF-8');
|
|||
|
str = str || '';
|
|||
|
|
|||
|
var result;
|
|||
|
|
|||
|
if (from !== 'UTF-8' && typeof str === 'string') {
|
|||
|
str = Buffer.from(str, 'binary');
|
|||
|
}
|
|||
|
|
|||
|
if (from === to) {
|
|||
|
if (typeof str === 'string') {
|
|||
|
result = Buffer.from(str);
|
|||
|
} else {
|
|||
|
result = str;
|
|||
|
}
|
|||
|
} else {
|
|||
|
try {
|
|||
|
result = convertIconvLite(str, to, from);
|
|||
|
} catch (E) {
|
|||
|
console.error(E);
|
|||
|
result = str;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (typeof result === 'string') {
|
|||
|
result = Buffer.from(result, 'utf-8');
|
|||
|
}
|
|||
|
|
|||
|
return result;
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Convert encoding of astring with iconv-lite
|
|||
|
*
|
|||
|
* @param {String|Buffer} str String to be converted
|
|||
|
* @param {String} to Encoding to be converted to
|
|||
|
* @param {String} [from='UTF-8'] Encoding to be converted from
|
|||
|
* @return {Buffer} Encoded string
|
|||
|
*/
|
|||
|
function convertIconvLite(str, to, from) {
|
|||
|
if (to === 'UTF-8') {
|
|||
|
return iconvLite.decode(str, from);
|
|||
|
} else if (from === 'UTF-8') {
|
|||
|
return iconvLite.encode(str, to);
|
|||
|
} else {
|
|||
|
return iconvLite.encode(iconvLite.decode(str, from), to);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
/**
|
|||
|
* Converts charset name if needed
|
|||
|
*
|
|||
|
* @param {String} name Character set
|
|||
|
* @return {String} Character set name
|
|||
|
*/
|
|||
|
function checkEncoding(name) {
|
|||
|
return (name || '')
|
|||
|
.toString()
|
|||
|
.trim()
|
|||
|
.replace(/^latin[\-_]?(\d+)$/i, 'ISO-8859-$1')
|
|||
|
.replace(/^win(?:dows)?[\-_]?(\d+)$/i, 'WINDOWS-$1')
|
|||
|
.replace(/^utf[\-_]?(\d+)$/i, 'UTF-$1')
|
|||
|
.replace(/^ks_c_5601\-1987$/i, 'CP949')
|
|||
|
.replace(/^us[\-_]?ascii$/i, 'ASCII')
|
|||
|
.toUpperCase();
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 7898:
|
|||
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
var Buffer = __nccwpck_require__(2750).Buffer;
|
|||
|
|
|||
|
// Multibyte codec. In this scheme, a character is represented by 1 or more bytes.
|
|||
|
// Our codec supports UTF-16 surrogates, extensions for GB18030 and unicode sequences.
|
|||
|
// To save memory and loading time, we read table files only when requested.
|
|||
|
|
|||
|
exports._dbcs = DBCSCodec;
|
|||
|
|
|||
|
var UNASSIGNED = -1,
|
|||
|
GB18030_CODE = -2,
|
|||
|
SEQ_START = -10,
|
|||
|
NODE_START = -1000,
|
|||
|
UNASSIGNED_NODE = new Array(0x100),
|
|||
|
DEF_CHAR = -1;
|
|||
|
|
|||
|
for (var i = 0; i < 0x100; i++)
|
|||
|
UNASSIGNED_NODE[i] = UNASSIGNED;
|
|||
|
|
|||
|
|
|||
|
// Class DBCSCodec reads and initializes mapping tables.
|
|||
|
function DBCSCodec(codecOptions, iconv) {
|
|||
|
this.encodingName = codecOptions.encodingName;
|
|||
|
if (!codecOptions)
|
|||
|
throw new Error("DBCS codec is called without the data.")
|
|||
|
if (!codecOptions.table)
|
|||
|
throw new Error("Encoding '" + this.encodingName + "' has no data.");
|
|||
|
|
|||
|
// Load tables.
|
|||
|
var mappingTable = codecOptions.table();
|
|||
|
|
|||
|
|
|||
|
// Decode tables: MBCS -> Unicode.
|
|||
|
|
|||
|
// decodeTables is a trie, encoded as an array of arrays of integers. Internal arrays are trie nodes and all have len = 256.
|
|||
|
// Trie root is decodeTables[0].
|
|||
|
// Values: >= 0 -> unicode character code. can be > 0xFFFF
|
|||
|
// == UNASSIGNED -> unknown/unassigned sequence.
|
|||
|
// == GB18030_CODE -> this is the end of a GB18030 4-byte sequence.
|
|||
|
// <= NODE_START -> index of the next node in our trie to process next byte.
|
|||
|
// <= SEQ_START -> index of the start of a character code sequence, in decodeTableSeq.
|
|||
|
this.decodeTables = [];
|
|||
|
this.decodeTables[0] = UNASSIGNED_NODE.slice(0); // Create root node.
|
|||
|
|
|||
|
// Sometimes a MBCS char corresponds to a sequence of unicode chars. We store them as arrays of integers here.
|
|||
|
this.decodeTableSeq = [];
|
|||
|
|
|||
|
// Actual mapping tables consist of chunks. Use them to fill up decode tables.
|
|||
|
for (var i = 0; i < mappingTable.length; i++)
|
|||
|
this._addDecodeChunk(mappingTable[i]);
|
|||
|
|
|||
|
// Load & create GB18030 tables when needed.
|
|||
|
if (typeof codecOptions.gb18030 === 'function') {
|
|||
|
this.gb18030 = codecOptions.gb18030(); // Load GB18030 ranges.
|
|||
|
|
|||
|
// Add GB18030 common decode nodes.
|
|||
|
var commonThirdByteNodeIdx = this.decodeTables.length;
|
|||
|
this.decodeTables.push(UNASSIGNED_NODE.slice(0));
|
|||
|
|
|||
|
var commonFourthByteNodeIdx = this.decodeTables.length;
|
|||
|
this.decodeTables.push(UNASSIGNED_NODE.slice(0));
|
|||
|
|
|||
|
// Fill out the tree
|
|||
|
var firstByteNode = this.decodeTables[0];
|
|||
|
for (var i = 0x81; i <= 0xFE; i++) {
|
|||
|
var secondByteNode = this.decodeTables[NODE_START - firstByteNode[i]];
|
|||
|
for (var j = 0x30; j <= 0x39; j++) {
|
|||
|
if (secondByteNode[j] === UNASSIGNED) {
|
|||
|
secondByteNode[j] = NODE_START - commonThirdByteNodeIdx;
|
|||
|
} else if (secondByteNode[j] > NODE_START) {
|
|||
|
throw new Error("gb18030 decode tables conflict at byte 2");
|
|||
|
}
|
|||
|
|
|||
|
var thirdByteNode = this.decodeTables[NODE_START - secondByteNode[j]];
|
|||
|
for (var k = 0x81; k <= 0xFE; k++) {
|
|||
|
if (thirdByteNode[k] === UNASSIGNED) {
|
|||
|
thirdByteNode[k] = NODE_START - commonFourthByteNodeIdx;
|
|||
|
} else if (thirdByteNode[k] === NODE_START - commonFourthByteNodeIdx) {
|
|||
|
continue;
|
|||
|
} else if (thirdByteNode[k] > NODE_START) {
|
|||
|
throw new Error("gb18030 decode tables conflict at byte 3");
|
|||
|
}
|
|||
|
|
|||
|
var fourthByteNode = this.decodeTables[NODE_START - thirdByteNode[k]];
|
|||
|
for (var l = 0x30; l <= 0x39; l++) {
|
|||
|
if (fourthByteNode[l] === UNASSIGNED)
|
|||
|
fourthByteNode[l] = GB18030_CODE;
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
this.defaultCharUnicode = iconv.defaultCharUnicode;
|
|||
|
|
|||
|
|
|||
|
// Encode tables: Unicode -> DBCS.
|
|||
|
|
|||
|
// `encodeTable` is array mapping from unicode char to encoded char. All its values are integers for performance.
|
|||
|
// Because it can be sparse, it is represented as array of buckets by 256 chars each. Bucket can be null.
|
|||
|
// Values: >= 0 -> it is a normal char. Write the value (if <=256 then 1 byte, if <=65536 then 2 bytes, etc.).
|
|||
|
// == UNASSIGNED -> no conversion found. Output a default char.
|
|||
|
// <= SEQ_START -> it's an index in encodeTableSeq, see below. The character starts a sequence.
|
|||
|
this.encodeTable = [];
|
|||
|
|
|||
|
// `encodeTableSeq` is used when a sequence of unicode characters is encoded as a single code. We use a tree of
|
|||
|
// objects where keys correspond to characters in sequence and leafs are the encoded dbcs values. A special DEF_CHAR key
|
|||
|
// means end of sequence (needed when one sequence is a strict subsequence of another).
|
|||
|
// Objects are kept separately from encodeTable to increase performance.
|
|||
|
this.encodeTableSeq = [];
|
|||
|
|
|||
|
// Some chars can be decoded, but need not be encoded.
|
|||
|
var skipEncodeChars = {};
|
|||
|
if (codecOptions.encodeSkipVals)
|
|||
|
for (var i = 0; i < codecOptions.encodeSkipVals.length; i++) {
|
|||
|
var val = codecOptions.encodeSkipVals[i];
|
|||
|
if (typeof val === 'number')
|
|||
|
skipEncodeChars[val] = true;
|
|||
|
else
|
|||
|
for (var j = val.from; j <= val.to; j++)
|
|||
|
skipEncodeChars[j] = true;
|
|||
|
}
|
|||
|
|
|||
|
// Use decode trie to recursively fill out encode tables.
|
|||
|
this._fillEncodeTable(0, 0, skipEncodeChars);
|
|||
|
|
|||
|
// Add more encoding pairs when needed.
|
|||
|
if (codecOptions.encodeAdd) {
|
|||
|
for (var uChar in codecOptions.encodeAdd)
|
|||
|
if (Object.prototype.hasOwnProperty.call(codecOptions.encodeAdd, uChar))
|
|||
|
this._setEncodeChar(uChar.charCodeAt(0), codecOptions.encodeAdd[uChar]);
|
|||
|
}
|
|||
|
|
|||
|
this.defCharSB = this.encodeTable[0][iconv.defaultCharSingleByte.charCodeAt(0)];
|
|||
|
if (this.defCharSB === UNASSIGNED) this.defCharSB = this.encodeTable[0]['?'];
|
|||
|
if (this.defCharSB === UNASSIGNED) this.defCharSB = "?".charCodeAt(0);
|
|||
|
}
|
|||
|
|
|||
|
DBCSCodec.prototype.encoder = DBCSEncoder;
|
|||
|
DBCSCodec.prototype.decoder = DBCSDecoder;
|
|||
|
|
|||
|
// Decoder helpers
|
|||
|
DBCSCodec.prototype._getDecodeTrieNode = function(addr) {
|
|||
|
var bytes = [];
|
|||
|
for (; addr > 0; addr >>>= 8)
|
|||
|
bytes.push(addr & 0xFF);
|
|||
|
if (bytes.length == 0)
|
|||
|
bytes.push(0);
|
|||
|
|
|||
|
var node = this.decodeTables[0];
|
|||
|
for (var i = bytes.length-1; i > 0; i--) { // Traverse nodes deeper into the trie.
|
|||
|
var val = node[bytes[i]];
|
|||
|
|
|||
|
if (val == UNASSIGNED) { // Create new node.
|
|||
|
node[bytes[i]] = NODE_START - this.decodeTables.length;
|
|||
|
this.decodeTables.push(node = UNASSIGNED_NODE.slice(0));
|
|||
|
}
|
|||
|
else if (val <= NODE_START) { // Existing node.
|
|||
|
node = this.decodeTables[NODE_START - val];
|
|||
|
}
|
|||
|
else
|
|||
|
throw new Error("Overwrite byte in " + this.encodingName + ", addr: " + addr.toString(16));
|
|||
|
}
|
|||
|
return node;
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
DBCSCodec.prototype._addDecodeChunk = function(chunk) {
|
|||
|
// First element of chunk is the hex mbcs code where we start.
|
|||
|
var curAddr = parseInt(chunk[0], 16);
|
|||
|
|
|||
|
// Choose the decoding node where we'll write our chars.
|
|||
|
var writeTable = this._getDecodeTrieNode(curAddr);
|
|||
|
curAddr = curAddr & 0xFF;
|
|||
|
|
|||
|
// Write all other elements of the chunk to the table.
|
|||
|
for (var k = 1; k < chunk.length; k++) {
|
|||
|
var part = chunk[k];
|
|||
|
if (typeof part === "string") { // String, write as-is.
|
|||
|
for (var l = 0; l < part.length;) {
|
|||
|
var code = part.charCodeAt(l++);
|
|||
|
if (0xD800 <= code && code < 0xDC00) { // Decode surrogate
|
|||
|
var codeTrail = part.charCodeAt(l++);
|
|||
|
if (0xDC00 <= codeTrail && codeTrail < 0xE000)
|
|||
|
writeTable[curAddr++] = 0x10000 + (code - 0xD800) * 0x400 + (codeTrail - 0xDC00);
|
|||
|
else
|
|||
|
throw new Error("Incorrect surrogate pair in " + this.encodingName + " at chunk " + chunk[0]);
|
|||
|
}
|
|||
|
else if (0x0FF0 < code && code <= 0x0FFF) { // Character sequence (our own encoding used)
|
|||
|
var len = 0xFFF - code + 2;
|
|||
|
var seq = [];
|
|||
|
for (var m = 0; m < len; m++)
|
|||
|
seq.push(part.charCodeAt(l++)); // Simple variation: don't support surrogates or subsequences in seq.
|
|||
|
|
|||
|
writeTable[curAddr++] = SEQ_START - this.decodeTableSeq.length;
|
|||
|
this.decodeTableSeq.push(seq);
|
|||
|
}
|
|||
|
else
|
|||
|
writeTable[curAddr++] = code; // Basic char
|
|||
|
}
|
|||
|
}
|
|||
|
else if (typeof part === "number") { // Integer, meaning increasing sequence starting with prev character.
|
|||
|
var charCode = writeTable[curAddr - 1] + 1;
|
|||
|
for (var l = 0; l < part; l++)
|
|||
|
writeTable[curAddr++] = charCode++;
|
|||
|
}
|
|||
|
else
|
|||
|
throw new Error("Incorrect type '" + typeof part + "' given in " + this.encodingName + " at chunk " + chunk[0]);
|
|||
|
}
|
|||
|
if (curAddr > 0xFF)
|
|||
|
throw new Error("Incorrect chunk in " + this.encodingName + " at addr " + chunk[0] + ": too long" + curAddr);
|
|||
|
}
|
|||
|
|
|||
|
// Encoder helpers
|
|||
|
DBCSCodec.prototype._getEncodeBucket = function(uCode) {
|
|||
|
var high = uCode >> 8; // This could be > 0xFF because of astral characters.
|
|||
|
if (this.encodeTable[high] === undefined)
|
|||
|
this.encodeTable[high] = UNASSIGNED_NODE.slice(0); // Create bucket on demand.
|
|||
|
return this.encodeTable[high];
|
|||
|
}
|
|||
|
|
|||
|
DBCSCodec.prototype._setEncodeChar = function(uCode, dbcsCode) {
|
|||
|
var bucket = this._getEncodeBucket(uCode);
|
|||
|
var low = uCode & 0xFF;
|
|||
|
if (bucket[low] <= SEQ_START)
|
|||
|
this.encodeTableSeq[SEQ_START-bucket[low]][DEF_CHAR] = dbcsCode; // There's already a sequence, set a single-char subsequence of it.
|
|||
|
else if (bucket[low] == UNASSIGNED)
|
|||
|
bucket[low] = dbcsCode;
|
|||
|
}
|
|||
|
|
|||
|
DBCSCodec.prototype._setEncodeSequence = function(seq, dbcsCode) {
|
|||
|
|
|||
|
// Get the root of character tree according to first character of the sequence.
|
|||
|
var uCode = seq[0];
|
|||
|
var bucket = this._getEncodeBucket(uCode);
|
|||
|
var low = uCode & 0xFF;
|
|||
|
|
|||
|
var node;
|
|||
|
if (bucket[low] <= SEQ_START) {
|
|||
|
// There's already a sequence with - use it.
|
|||
|
node = this.encodeTableSeq[SEQ_START-bucket[low]];
|
|||
|
}
|
|||
|
else {
|
|||
|
// There was no sequence object - allocate a new one.
|
|||
|
node = {};
|
|||
|
if (bucket[low] !== UNASSIGNED) node[DEF_CHAR] = bucket[low]; // If a char was set before - make it a single-char subsequence.
|
|||
|
bucket[low] = SEQ_START - this.encodeTableSeq.length;
|
|||
|
this.encodeTableSeq.push(node);
|
|||
|
}
|
|||
|
|
|||
|
// Traverse the character tree, allocating new nodes as needed.
|
|||
|
for (var j = 1; j < seq.length-1; j++) {
|
|||
|
var oldVal = node[uCode];
|
|||
|
if (typeof oldVal === 'object')
|
|||
|
node = oldVal;
|
|||
|
else {
|
|||
|
node = node[uCode] = {}
|
|||
|
if (oldVal !== undefined)
|
|||
|
node[DEF_CHAR] = oldVal
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// Set the leaf to given dbcsCode.
|
|||
|
uCode = seq[seq.length-1];
|
|||
|
node[uCode] = dbcsCode;
|
|||
|
}
|
|||
|
|
|||
|
DBCSCodec.prototype._fillEncodeTable = function(nodeIdx, prefix, skipEncodeChars) {
|
|||
|
var node = this.decodeTables[nodeIdx];
|
|||
|
var hasValues = false;
|
|||
|
var subNodeEmpty = {};
|
|||
|
for (var i = 0; i < 0x100; i++) {
|
|||
|
var uCode = node[i];
|
|||
|
var mbCode = prefix + i;
|
|||
|
if (skipEncodeChars[mbCode])
|
|||
|
continue;
|
|||
|
|
|||
|
if (uCode >= 0) {
|
|||
|
this._setEncodeChar(uCode, mbCode);
|
|||
|
hasValues = true;
|
|||
|
} else if (uCode <= NODE_START) {
|
|||
|
var subNodeIdx = NODE_START - uCode;
|
|||
|
if (!subNodeEmpty[subNodeIdx]) { // Skip empty subtrees (they are too large in gb18030).
|
|||
|
var newPrefix = (mbCode << 8) >>> 0; // NOTE: '>>> 0' keeps 32-bit num positive.
|
|||
|
if (this._fillEncodeTable(subNodeIdx, newPrefix, skipEncodeChars))
|
|||
|
hasValues = true;
|
|||
|
else
|
|||
|
subNodeEmpty[subNodeIdx] = true;
|
|||
|
}
|
|||
|
} else if (uCode <= SEQ_START) {
|
|||
|
this._setEncodeSequence(this.decodeTableSeq[SEQ_START - uCode], mbCode);
|
|||
|
hasValues = true;
|
|||
|
}
|
|||
|
}
|
|||
|
return hasValues;
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
|
|||
|
// == Encoder ==================================================================
|
|||
|
|
|||
|
function DBCSEncoder(options, codec) {
|
|||
|
// Encoder state
|
|||
|
this.leadSurrogate = -1;
|
|||
|
this.seqObj = undefined;
|
|||
|
|
|||
|
// Static data
|
|||
|
this.encodeTable = codec.encodeTable;
|
|||
|
this.encodeTableSeq = codec.encodeTableSeq;
|
|||
|
this.defaultCharSingleByte = codec.defCharSB;
|
|||
|
this.gb18030 = codec.gb18030;
|
|||
|
}
|
|||
|
|
|||
|
DBCSEncoder.prototype.write = function(str) {
|
|||
|
var newBuf = Buffer.alloc(str.length * (this.gb18030 ? 4 : 3)),
|
|||
|
leadSurrogate = this.leadSurrogate,
|
|||
|
seqObj = this.seqObj, nextChar = -1,
|
|||
|
i = 0, j = 0;
|
|||
|
|
|||
|
while (true) {
|
|||
|
// 0. Get next character.
|
|||
|
if (nextChar === -1) {
|
|||
|
if (i == str.length) break;
|
|||
|
var uCode = str.charCodeAt(i++);
|
|||
|
}
|
|||
|
else {
|
|||
|
var uCode = nextChar;
|
|||
|
nextChar = -1;
|
|||
|
}
|
|||
|
|
|||
|
// 1. Handle surrogates.
|
|||
|
if (0xD800 <= uCode && uCode < 0xE000) { // Char is one of surrogates.
|
|||
|
if (uCode < 0xDC00) { // We've got lead surrogate.
|
|||
|
if (leadSurrogate === -1) {
|
|||
|
leadSurrogate = uCode;
|
|||
|
continue;
|
|||
|
} else {
|
|||
|
leadSurrogate = uCode;
|
|||
|
// Double lead surrogate found.
|
|||
|
uCode = UNASSIGNED;
|
|||
|
}
|
|||
|
} else { // We've got trail surrogate.
|
|||
|
if (leadSurrogate !== -1) {
|
|||
|
uCode = 0x10000 + (leadSurrogate - 0xD800) * 0x400 + (uCode - 0xDC00);
|
|||
|
leadSurrogate = -1;
|
|||
|
} else {
|
|||
|
// Incomplete surrogate pair - only trail surrogate found.
|
|||
|
uCode = UNASSIGNED;
|
|||
|
}
|
|||
|
|
|||
|
}
|
|||
|
}
|
|||
|
else if (leadSurrogate !== -1) {
|
|||
|
// Incomplete surrogate pair - only lead surrogate found.
|
|||
|
nextChar = uCode; uCode = UNASSIGNED; // Write an error, then current char.
|
|||
|
leadSurrogate = -1;
|
|||
|
}
|
|||
|
|
|||
|
// 2. Convert uCode character.
|
|||
|
var dbcsCode = UNASSIGNED;
|
|||
|
if (seqObj !== undefined && uCode != UNASSIGNED) { // We are in the middle of the sequence
|
|||
|
var resCode = seqObj[uCode];
|
|||
|
if (typeof resCode === 'object') { // Sequence continues.
|
|||
|
seqObj = resCode;
|
|||
|
continue;
|
|||
|
|
|||
|
} else if (typeof resCode == 'number') { // Sequence finished. Write it.
|
|||
|
dbcsCode = resCode;
|
|||
|
|
|||
|
} else if (resCode == undefined) { // Current character is not part of the sequence.
|
|||
|
|
|||
|
// Try default character for this sequence
|
|||
|
resCode = seqObj[DEF_CHAR];
|
|||
|
if (resCode !== undefined) {
|
|||
|
dbcsCode = resCode; // Found. Write it.
|
|||
|
nextChar = uCode; // Current character will be written too in the next iteration.
|
|||
|
|
|||
|
} else {
|
|||
|
// TODO: What if we have no default? (resCode == undefined)
|
|||
|
// Then, we should write first char of the sequence as-is and try the rest recursively.
|
|||
|
// Didn't do it for now because no encoding has this situation yet.
|
|||
|
// Currently, just skip the sequence and write current char.
|
|||
|
}
|
|||
|
}
|
|||
|
seqObj = undefined;
|
|||
|
}
|
|||
|
else if (uCode >= 0) { // Regular character
|
|||
|
var subtable = this.encodeTable[uCode >> 8];
|
|||
|
if (subtable !== undefined)
|
|||
|
dbcsCode = subtable[uCode & 0xFF];
|
|||
|
|
|||
|
if (dbcsCode <= SEQ_START) { // Sequence start
|
|||
|
seqObj = this.encodeTableSeq[SEQ_START-dbcsCode];
|
|||
|
continue;
|
|||
|
}
|
|||
|
|
|||
|
if (dbcsCode == UNASSIGNED && this.gb18030) {
|
|||
|
// Use GB18030 algorithm to find character(s) to write.
|
|||
|
var idx = findIdx(this.gb18030.uChars, uCode);
|
|||
|
if (idx != -1) {
|
|||
|
var dbcsCode = this.gb18030.gbChars[idx] + (uCode - this.gb18030.uChars[idx]);
|
|||
|
newBuf[j++] = 0x81 + Math.floor(dbcsCode / 12600); dbcsCode = dbcsCode % 12600;
|
|||
|
newBuf[j++] = 0x30 + Math.floor(dbcsCode / 1260); dbcsCode = dbcsCode % 1260;
|
|||
|
newBuf[j++] = 0x81 + Math.floor(dbcsCode / 10); dbcsCode = dbcsCode % 10;
|
|||
|
newBuf[j++] = 0x30 + dbcsCode;
|
|||
|
continue;
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// 3. Write dbcsCode character.
|
|||
|
if (dbcsCode === UNASSIGNED)
|
|||
|
dbcsCode = this.defaultCharSingleByte;
|
|||
|
|
|||
|
if (dbcsCode < 0x100) {
|
|||
|
newBuf[j++] = dbcsCode;
|
|||
|
}
|
|||
|
else if (dbcsCode < 0x10000) {
|
|||
|
newBuf[j++] = dbcsCode >> 8; // high byte
|
|||
|
newBuf[j++] = dbcsCode & 0xFF; // low byte
|
|||
|
}
|
|||
|
else if (dbcsCode < 0x1000000) {
|
|||
|
newBuf[j++] = dbcsCode >> 16;
|
|||
|
newBuf[j++] = (dbcsCode >> 8) & 0xFF;
|
|||
|
newBuf[j++] = dbcsCode & 0xFF;
|
|||
|
} else {
|
|||
|
newBuf[j++] = dbcsCode >>> 24;
|
|||
|
newBuf[j++] = (dbcsCode >>> 16) & 0xFF;
|
|||
|
newBuf[j++] = (dbcsCode >>> 8) & 0xFF;
|
|||
|
newBuf[j++] = dbcsCode & 0xFF;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
this.seqObj = seqObj;
|
|||
|
this.leadSurrogate = leadSurrogate;
|
|||
|
return newBuf.slice(0, j);
|
|||
|
}
|
|||
|
|
|||
|
DBCSEncoder.prototype.end = function() {
|
|||
|
if (this.leadSurrogate === -1 && this.seqObj === undefined)
|
|||
|
return; // All clean. Most often case.
|
|||
|
|
|||
|
var newBuf = Buffer.alloc(10), j = 0;
|
|||
|
|
|||
|
if (this.seqObj) { // We're in the sequence.
|
|||
|
var dbcsCode = this.seqObj[DEF_CHAR];
|
|||
|
if (dbcsCode !== undefined) { // Write beginning of the sequence.
|
|||
|
if (dbcsCode < 0x100) {
|
|||
|
newBuf[j++] = dbcsCode;
|
|||
|
}
|
|||
|
else {
|
|||
|
newBuf[j++] = dbcsCode >> 8; // high byte
|
|||
|
newBuf[j++] = dbcsCode & 0xFF; // low byte
|
|||
|
}
|
|||
|
} else {
|
|||
|
// See todo above.
|
|||
|
}
|
|||
|
this.seqObj = undefined;
|
|||
|
}
|
|||
|
|
|||
|
if (this.leadSurrogate !== -1) {
|
|||
|
// Incomplete surrogate pair - only lead surrogate found.
|
|||
|
newBuf[j++] = this.defaultCharSingleByte;
|
|||
|
this.leadSurrogate = -1;
|
|||
|
}
|
|||
|
|
|||
|
return newBuf.slice(0, j);
|
|||
|
}
|
|||
|
|
|||
|
// Export for testing
|
|||
|
DBCSEncoder.prototype.findIdx = findIdx;
|
|||
|
|
|||
|
|
|||
|
// == Decoder ==================================================================
|
|||
|
|
|||
|
function DBCSDecoder(options, codec) {
|
|||
|
// Decoder state
|
|||
|
this.nodeIdx = 0;
|
|||
|
this.prevBytes = [];
|
|||
|
|
|||
|
// Static data
|
|||
|
this.decodeTables = codec.decodeTables;
|
|||
|
this.decodeTableSeq = codec.decodeTableSeq;
|
|||
|
this.defaultCharUnicode = codec.defaultCharUnicode;
|
|||
|
this.gb18030 = codec.gb18030;
|
|||
|
}
|
|||
|
|
|||
|
DBCSDecoder.prototype.write = function(buf) {
|
|||
|
var newBuf = Buffer.alloc(buf.length*2),
|
|||
|
nodeIdx = this.nodeIdx,
|
|||
|
prevBytes = this.prevBytes, prevOffset = this.prevBytes.length,
|
|||
|
seqStart = -this.prevBytes.length, // idx of the start of current parsed sequence.
|
|||
|
uCode;
|
|||
|
|
|||
|
for (var i = 0, j = 0; i < buf.length; i++) {
|
|||
|
var curByte = (i >= 0) ? buf[i] : prevBytes[i + prevOffset];
|
|||
|
|
|||
|
// Lookup in current trie node.
|
|||
|
var uCode = this.decodeTables[nodeIdx][curByte];
|
|||
|
|
|||
|
if (uCode >= 0) {
|
|||
|
// Normal character, just use it.
|
|||
|
}
|
|||
|
else if (uCode === UNASSIGNED) { // Unknown char.
|
|||
|
// TODO: Callback with seq.
|
|||
|
uCode = this.defaultCharUnicode.charCodeAt(0);
|
|||
|
i = seqStart; // Skip one byte ('i' will be incremented by the for loop) and try to parse again.
|
|||
|
}
|
|||
|
else if (uCode === GB18030_CODE) {
|
|||
|
if (i >= 3) {
|
|||
|
var ptr = (buf[i-3]-0x81)*12600 + (buf[i-2]-0x30)*1260 + (buf[i-1]-0x81)*10 + (curByte-0x30);
|
|||
|
} else {
|
|||
|
var ptr = (prevBytes[i-3+prevOffset]-0x81)*12600 +
|
|||
|
(((i-2 >= 0) ? buf[i-2] : prevBytes[i-2+prevOffset])-0x30)*1260 +
|
|||
|
(((i-1 >= 0) ? buf[i-1] : prevBytes[i-1+prevOffset])-0x81)*10 +
|
|||
|
(curByte-0x30);
|
|||
|
}
|
|||
|
var idx = findIdx(this.gb18030.gbChars, ptr);
|
|||
|
uCode = this.gb18030.uChars[idx] + ptr - this.gb18030.gbChars[idx];
|
|||
|
}
|
|||
|
else if (uCode <= NODE_START) { // Go to next trie node.
|
|||
|
nodeIdx = NODE_START - uCode;
|
|||
|
continue;
|
|||
|
}
|
|||
|
else if (uCode <= SEQ_START) { // Output a sequence of chars.
|
|||
|
var seq = this.decodeTableSeq[SEQ_START - uCode];
|
|||
|
for (var k = 0; k < seq.length - 1; k++) {
|
|||
|
uCode = seq[k];
|
|||
|
newBuf[j++] = uCode & 0xFF;
|
|||
|
newBuf[j++] = uCode >> 8;
|
|||
|
}
|
|||
|
uCode = seq[seq.length-1];
|
|||
|
}
|
|||
|
else
|
|||
|
throw new Error("iconv-lite internal error: invalid decoding table value " + uCode + " at " + nodeIdx + "/" + curByte);
|
|||
|
|
|||
|
// Write the character to buffer, handling higher planes using surrogate pair.
|
|||
|
if (uCode >= 0x10000) {
|
|||
|
uCode -= 0x10000;
|
|||
|
var uCodeLead = 0xD800 | (uCode >> 10);
|
|||
|
newBuf[j++] = uCodeLead & 0xFF;
|
|||
|
newBuf[j++] = uCodeLead >> 8;
|
|||
|
|
|||
|
uCode = 0xDC00 | (uCode & 0x3FF);
|
|||
|
}
|
|||
|
newBuf[j++] = uCode & 0xFF;
|
|||
|
newBuf[j++] = uCode >> 8;
|
|||
|
|
|||
|
// Reset trie node.
|
|||
|
nodeIdx = 0; seqStart = i+1;
|
|||
|
}
|
|||
|
|
|||
|
this.nodeIdx = nodeIdx;
|
|||
|
this.prevBytes = (seqStart >= 0)
|
|||
|
? Array.prototype.slice.call(buf, seqStart)
|
|||
|
: prevBytes.slice(seqStart + prevOffset).concat(Array.prototype.slice.call(buf));
|
|||
|
|
|||
|
return newBuf.slice(0, j).toString('ucs2');
|
|||
|
}
|
|||
|
|
|||
|
DBCSDecoder.prototype.end = function() {
|
|||
|
var ret = '';
|
|||
|
|
|||
|
// Try to parse all remaining chars.
|
|||
|
while (this.prevBytes.length > 0) {
|
|||
|
// Skip 1 character in the buffer.
|
|||
|
ret += this.defaultCharUnicode;
|
|||
|
var bytesArr = this.prevBytes.slice(1);
|
|||
|
|
|||
|
// Parse remaining as usual.
|
|||
|
this.prevBytes = [];
|
|||
|
this.nodeIdx = 0;
|
|||
|
if (bytesArr.length > 0)
|
|||
|
ret += this.write(bytesArr);
|
|||
|
}
|
|||
|
|
|||
|
this.prevBytes = [];
|
|||
|
this.nodeIdx = 0;
|
|||
|
return ret;
|
|||
|
}
|
|||
|
|
|||
|
// Binary search for GB18030. Returns largest i such that table[i] <= val.
|
|||
|
function findIdx(table, val) {
|
|||
|
if (table[0] > val)
|
|||
|
return -1;
|
|||
|
|
|||
|
var l = 0, r = table.length;
|
|||
|
while (l < r-1) { // always table[l] <= val < table[r]
|
|||
|
var mid = l + ((r-l+1) >> 1);
|
|||
|
if (table[mid] <= val)
|
|||
|
l = mid;
|
|||
|
else
|
|||
|
r = mid;
|
|||
|
}
|
|||
|
return l;
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 8682:
|
|||
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
// Description of supported double byte encodings and aliases.
|
|||
|
// Tables are not require()-d until they are needed to speed up library load.
|
|||
|
// require()-s are direct to support Browserify.
|
|||
|
|
|||
|
module.exports = {
|
|||
|
|
|||
|
// == Japanese/ShiftJIS ====================================================
|
|||
|
// All japanese encodings are based on JIS X set of standards:
|
|||
|
// JIS X 0201 - Single-byte encoding of ASCII + ¥ + Kana chars at 0xA1-0xDF.
|
|||
|
// JIS X 0208 - Main set of 6879 characters, placed in 94x94 plane, to be encoded by 2 bytes.
|
|||
|
// Has several variations in 1978, 1983, 1990 and 1997.
|
|||
|
// JIS X 0212 - Supplementary plane of 6067 chars in 94x94 plane. 1990. Effectively dead.
|
|||
|
// JIS X 0213 - Extension and modern replacement of 0208 and 0212. Total chars: 11233.
|
|||
|
// 2 planes, first is superset of 0208, second - revised 0212.
|
|||
|
// Introduced in 2000, revised 2004. Some characters are in Unicode Plane 2 (0x2xxxx)
|
|||
|
|
|||
|
// Byte encodings are:
|
|||
|
// * Shift_JIS: Compatible with 0201, uses not defined chars in top half as lead bytes for double-byte
|
|||
|
// encoding of 0208. Lead byte ranges: 0x81-0x9F, 0xE0-0xEF; Trail byte ranges: 0x40-0x7E, 0x80-0x9E, 0x9F-0xFC.
|
|||
|
// Windows CP932 is a superset of Shift_JIS. Some companies added more chars, notably KDDI.
|
|||
|
// * EUC-JP: Up to 3 bytes per character. Used mostly on *nixes.
|
|||
|
// 0x00-0x7F - lower part of 0201
|
|||
|
// 0x8E, 0xA1-0xDF - upper part of 0201
|
|||
|
// (0xA1-0xFE)x2 - 0208 plane (94x94).
|
|||
|
// 0x8F, (0xA1-0xFE)x2 - 0212 plane (94x94).
|
|||
|
// * JIS X 208: 7-bit, direct encoding of 0208. Byte ranges: 0x21-0x7E (94 values). Uncommon.
|
|||
|
// Used as-is in ISO2022 family.
|
|||
|
// * ISO2022-JP: Stateful encoding, with escape sequences to switch between ASCII,
|
|||
|
// 0201-1976 Roman, 0208-1978, 0208-1983.
|
|||
|
// * ISO2022-JP-1: Adds esc seq for 0212-1990.
|
|||
|
// * ISO2022-JP-2: Adds esc seq for GB2313-1980, KSX1001-1992, ISO8859-1, ISO8859-7.
|
|||
|
// * ISO2022-JP-3: Adds esc seq for 0201-1976 Kana set, 0213-2000 Planes 1, 2.
|
|||
|
// * ISO2022-JP-2004: Adds 0213-2004 Plane 1.
|
|||
|
//
|
|||
|
// After JIS X 0213 appeared, Shift_JIS-2004, EUC-JISX0213 and ISO2022-JP-2004 followed, with just changing the planes.
|
|||
|
//
|
|||
|
// Overall, it seems that it's a mess :( http://www8.plala.or.jp/tkubota1/unicode-symbols-map2.html
|
|||
|
|
|||
|
'shiftjis': {
|
|||
|
type: '_dbcs',
|
|||
|
table: function() { return __nccwpck_require__(5667) },
|
|||
|
encodeAdd: {'\u00a5': 0x5C, '\u203E': 0x7E},
|
|||
|
encodeSkipVals: [{from: 0xED40, to: 0xF940}],
|
|||
|
},
|
|||
|
'csshiftjis': 'shiftjis',
|
|||
|
'mskanji': 'shiftjis',
|
|||
|
'sjis': 'shiftjis',
|
|||
|
'windows31j': 'shiftjis',
|
|||
|
'ms31j': 'shiftjis',
|
|||
|
'xsjis': 'shiftjis',
|
|||
|
'windows932': 'shiftjis',
|
|||
|
'ms932': 'shiftjis',
|
|||
|
'932': 'shiftjis',
|
|||
|
'cp932': 'shiftjis',
|
|||
|
|
|||
|
'eucjp': {
|
|||
|
type: '_dbcs',
|
|||
|
table: function() { return __nccwpck_require__(2653) },
|
|||
|
encodeAdd: {'\u00a5': 0x5C, '\u203E': 0x7E},
|
|||
|
},
|
|||
|
|
|||
|
// TODO: KDDI extension to Shift_JIS
|
|||
|
// TODO: IBM CCSID 942 = CP932, but F0-F9 custom chars and other char changes.
|
|||
|
// TODO: IBM CCSID 943 = Shift_JIS = CP932 with original Shift_JIS lower 128 chars.
|
|||
|
|
|||
|
|
|||
|
// == Chinese/GBK ==========================================================
|
|||
|
// http://en.wikipedia.org/wiki/GBK
|
|||
|
// We mostly implement W3C recommendation: https://www.w3.org/TR/encoding/#gbk-encoder
|
|||
|
|
|||
|
// Oldest GB2312 (1981, ~7600 chars) is a subset of CP936
|
|||
|
'gb2312': 'cp936',
|
|||
|
'gb231280': 'cp936',
|
|||
|
'gb23121980': 'cp936',
|
|||
|
'csgb2312': 'cp936',
|
|||
|
'csiso58gb231280': 'cp936',
|
|||
|
'euccn': 'cp936',
|
|||
|
|
|||
|
// Microsoft's CP936 is a subset and approximation of GBK.
|
|||
|
'windows936': 'cp936',
|
|||
|
'ms936': 'cp936',
|
|||
|
'936': 'cp936',
|
|||
|
'cp936': {
|
|||
|
type: '_dbcs',
|
|||
|
table: function() { return __nccwpck_require__(5334) },
|
|||
|
},
|
|||
|
|
|||
|
// GBK (~22000 chars) is an extension of CP936 that added user-mapped chars and some other.
|
|||
|
'gbk': {
|
|||
|
type: '_dbcs',
|
|||
|
table: function() { return __nccwpck_require__(5334).concat(__nccwpck_require__(7714)) },
|
|||
|
},
|
|||
|
'xgbk': 'gbk',
|
|||
|
'isoir58': 'gbk',
|
|||
|
|
|||
|
// GB18030 is an algorithmic extension of GBK.
|
|||
|
// Main source: https://www.w3.org/TR/encoding/#gbk-encoder
|
|||
|
// http://icu-project.org/docs/papers/gb18030.html
|
|||
|
// http://source.icu-project.org/repos/icu/data/trunk/charset/data/xml/gb-18030-2000.xml
|
|||
|
// http://www.khngai.com/chinese/charmap/tblgbk.php?page=0
|
|||
|
'gb18030': {
|
|||
|
type: '_dbcs',
|
|||
|
table: function() { return __nccwpck_require__(5334).concat(__nccwpck_require__(7714)) },
|
|||
|
gb18030: function() { return __nccwpck_require__(9621) },
|
|||
|
encodeSkipVals: [0x80],
|
|||
|
encodeAdd: {'€': 0xA2E3},
|
|||
|
},
|
|||
|
|
|||
|
'chinese': 'gb18030',
|
|||
|
|
|||
|
|
|||
|
// == Korean ===============================================================
|
|||
|
// EUC-KR, KS_C_5601 and KS X 1001 are exactly the same.
|
|||
|
'windows949': 'cp949',
|
|||
|
'ms949': 'cp949',
|
|||
|
'949': 'cp949',
|
|||
|
'cp949': {
|
|||
|
type: '_dbcs',
|
|||
|
table: function() { return __nccwpck_require__(4442) },
|
|||
|
},
|
|||
|
|
|||
|
'cseuckr': 'cp949',
|
|||
|
'csksc56011987': 'cp949',
|
|||
|
'euckr': 'cp949',
|
|||
|
'isoir149': 'cp949',
|
|||
|
'korean': 'cp949',
|
|||
|
'ksc56011987': 'cp949',
|
|||
|
'ksc56011989': 'cp949',
|
|||
|
'ksc5601': 'cp949',
|
|||
|
|
|||
|
|
|||
|
// == Big5/Taiwan/Hong Kong ================================================
|
|||
|
// There are lots of tables for Big5 and cp950. Please see the following links for history:
|
|||
|
// http://moztw.org/docs/big5/ http://www.haible.de/bruno/charsets/conversion-tables/Big5.html
|
|||
|
// Variations, in roughly number of defined chars:
|
|||
|
// * Windows CP 950: Microsoft variant of Big5. Canonical: http://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP950.TXT
|
|||
|
// * Windows CP 951: Microsoft variant of Big5-HKSCS-2001. Seems to be never public. http://me.abelcheung.org/articles/research/what-is-cp951/
|
|||
|
// * Big5-2003 (Taiwan standard) almost superset of cp950.
|
|||
|
// * Unicode-at-on (UAO) / Mozilla 1.8. Falling out of use on the Web. Not supported by other browsers.
|
|||
|
// * Big5-HKSCS (-2001, -2004, -2008). Hong Kong standard.
|
|||
|
// many unicode code points moved from PUA to Supplementary plane (U+2XXXX) over the years.
|
|||
|
// Plus, it has 4 combining sequences.
|
|||
|
// Seems that Mozilla refused to support it for 10 yrs. https://bugzilla.mozilla.org/show_bug.cgi?id=162431 https://bugzilla.mozilla.org/show_bug.cgi?id=310299
|
|||
|
// because big5-hkscs is the only encoding to include astral characters in non-algorithmic way.
|
|||
|
// Implementations are not consistent within browsers; sometimes labeled as just big5.
|
|||
|
// MS Internet Explorer switches from big5 to big5-hkscs when a patch applied.
|
|||
|
// Great discussion & recap of what's going on https://bugzilla.mozilla.org/show_bug.cgi?id=912470#c31
|
|||
|
// In the encoder, it might make sense to support encoding old PUA mappings to Big5 bytes seq-s.
|
|||
|
// Official spec: http://www.ogcio.gov.hk/en/business/tech_promotion/ccli/terms/doc/2003cmp_2008.txt
|
|||
|
// http://www.ogcio.gov.hk/tc/business/tech_promotion/ccli/terms/doc/hkscs-2008-big5-iso.txt
|
|||
|
//
|
|||
|
// Current understanding of how to deal with Big5(-HKSCS) is in the Encoding Standard, http://encoding.spec.whatwg.org/#big5-encoder
|
|||
|
// Unicode mapping (http://www.unicode.org/Public/MAPPINGS/OBSOLETE/EASTASIA/OTHER/BIG5.TXT) is said to be wrong.
|
|||
|
|
|||
|
'windows950': 'cp950',
|
|||
|
'ms950': 'cp950',
|
|||
|
'950': 'cp950',
|
|||
|
'cp950': {
|
|||
|
type: '_dbcs',
|
|||
|
table: function() { return __nccwpck_require__(6453) },
|
|||
|
},
|
|||
|
|
|||
|
// Big5 has many variations and is an extension of cp950. We use Encoding Standard's as a consensus.
|
|||
|
'big5': 'big5hkscs',
|
|||
|
'big5hkscs': {
|
|||
|
type: '_dbcs',
|
|||
|
table: function() { return __nccwpck_require__(6453).concat(__nccwpck_require__(3848)) },
|
|||
|
encodeSkipVals: [0xa2cc],
|
|||
|
},
|
|||
|
|
|||
|
'cnbig5': 'big5hkscs',
|
|||
|
'csbig5': 'big5hkscs',
|
|||
|
'xxbig5': 'big5hkscs',
|
|||
|
};
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 8956:
|
|||
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
// Update this array if you add/rename/remove files in this directory.
|
|||
|
// We support Browserify by skipping automatic module discovery and requiring modules directly.
|
|||
|
var modules = [
|
|||
|
__nccwpck_require__(9557),
|
|||
|
__nccwpck_require__(2866),
|
|||
|
__nccwpck_require__(2478),
|
|||
|
__nccwpck_require__(244),
|
|||
|
__nccwpck_require__(5998),
|
|||
|
__nccwpck_require__(9201),
|
|||
|
__nccwpck_require__(4234),
|
|||
|
__nccwpck_require__(7898),
|
|||
|
__nccwpck_require__(8682),
|
|||
|
];
|
|||
|
|
|||
|
// Put all encoding/alias/codec definitions to single object and export it.
|
|||
|
for (var i = 0; i < modules.length; i++) {
|
|||
|
var module = modules[i];
|
|||
|
for (var enc in module)
|
|||
|
if (Object.prototype.hasOwnProperty.call(module, enc))
|
|||
|
exports[enc] = module[enc];
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 9557:
|
|||
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
var Buffer = __nccwpck_require__(2750).Buffer;
|
|||
|
|
|||
|
// Export Node.js internal encodings.
|
|||
|
|
|||
|
module.exports = {
|
|||
|
// Encodings
|
|||
|
utf8: { type: "_internal", bomAware: true},
|
|||
|
cesu8: { type: "_internal", bomAware: true},
|
|||
|
unicode11utf8: "utf8",
|
|||
|
|
|||
|
ucs2: { type: "_internal", bomAware: true},
|
|||
|
utf16le: "ucs2",
|
|||
|
|
|||
|
binary: { type: "_internal" },
|
|||
|
base64: { type: "_internal" },
|
|||
|
hex: { type: "_internal" },
|
|||
|
|
|||
|
// Codec.
|
|||
|
_internal: InternalCodec,
|
|||
|
};
|
|||
|
|
|||
|
//------------------------------------------------------------------------------
|
|||
|
|
|||
|
function InternalCodec(codecOptions, iconv) {
|
|||
|
this.enc = codecOptions.encodingName;
|
|||
|
this.bomAware = codecOptions.bomAware;
|
|||
|
|
|||
|
if (this.enc === "base64")
|
|||
|
this.encoder = InternalEncoderBase64;
|
|||
|
else if (this.enc === "cesu8") {
|
|||
|
this.enc = "utf8"; // Use utf8 for decoding.
|
|||
|
this.encoder = InternalEncoderCesu8;
|
|||
|
|
|||
|
// Add decoder for versions of Node not supporting CESU-8
|
|||
|
if (Buffer.from('eda0bdedb2a9', 'hex').toString() !== '💩') {
|
|||
|
this.decoder = InternalDecoderCesu8;
|
|||
|
this.defaultCharUnicode = iconv.defaultCharUnicode;
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
InternalCodec.prototype.encoder = InternalEncoder;
|
|||
|
InternalCodec.prototype.decoder = InternalDecoder;
|
|||
|
|
|||
|
//------------------------------------------------------------------------------
|
|||
|
|
|||
|
// We use node.js internal decoder. Its signature is the same as ours.
|
|||
|
var StringDecoder = __nccwpck_require__(4304).StringDecoder;
|
|||
|
|
|||
|
if (!StringDecoder.prototype.end) // Node v0.8 doesn't have this method.
|
|||
|
StringDecoder.prototype.end = function() {};
|
|||
|
|
|||
|
|
|||
|
function InternalDecoder(options, codec) {
|
|||
|
this.decoder = new StringDecoder(codec.enc);
|
|||
|
}
|
|||
|
|
|||
|
InternalDecoder.prototype.write = function(buf) {
|
|||
|
if (!Buffer.isBuffer(buf)) {
|
|||
|
buf = Buffer.from(buf);
|
|||
|
}
|
|||
|
|
|||
|
return this.decoder.write(buf);
|
|||
|
}
|
|||
|
|
|||
|
InternalDecoder.prototype.end = function() {
|
|||
|
return this.decoder.end();
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
//------------------------------------------------------------------------------
|
|||
|
// Encoder is mostly trivial
|
|||
|
|
|||
|
function InternalEncoder(options, codec) {
|
|||
|
this.enc = codec.enc;
|
|||
|
}
|
|||
|
|
|||
|
InternalEncoder.prototype.write = function(str) {
|
|||
|
return Buffer.from(str, this.enc);
|
|||
|
}
|
|||
|
|
|||
|
InternalEncoder.prototype.end = function() {
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
//------------------------------------------------------------------------------
|
|||
|
// Except base64 encoder, which must keep its state.
|
|||
|
|
|||
|
function InternalEncoderBase64(options, codec) {
|
|||
|
this.prevStr = '';
|
|||
|
}
|
|||
|
|
|||
|
InternalEncoderBase64.prototype.write = function(str) {
|
|||
|
str = this.prevStr + str;
|
|||
|
var completeQuads = str.length - (str.length % 4);
|
|||
|
this.prevStr = str.slice(completeQuads);
|
|||
|
str = str.slice(0, completeQuads);
|
|||
|
|
|||
|
return Buffer.from(str, "base64");
|
|||
|
}
|
|||
|
|
|||
|
InternalEncoderBase64.prototype.end = function() {
|
|||
|
return Buffer.from(this.prevStr, "base64");
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
//------------------------------------------------------------------------------
|
|||
|
// CESU-8 encoder is also special.
|
|||
|
|
|||
|
function InternalEncoderCesu8(options, codec) {
|
|||
|
}
|
|||
|
|
|||
|
InternalEncoderCesu8.prototype.write = function(str) {
|
|||
|
var buf = Buffer.alloc(str.length * 3), bufIdx = 0;
|
|||
|
for (var i = 0; i < str.length; i++) {
|
|||
|
var charCode = str.charCodeAt(i);
|
|||
|
// Naive implementation, but it works because CESU-8 is especially easy
|
|||
|
// to convert from UTF-16 (which all JS strings are encoded in).
|
|||
|
if (charCode < 0x80)
|
|||
|
buf[bufIdx++] = charCode;
|
|||
|
else if (charCode < 0x800) {
|
|||
|
buf[bufIdx++] = 0xC0 + (charCode >>> 6);
|
|||
|
buf[bufIdx++] = 0x80 + (charCode & 0x3f);
|
|||
|
}
|
|||
|
else { // charCode will always be < 0x10000 in javascript.
|
|||
|
buf[bufIdx++] = 0xE0 + (charCode >>> 12);
|
|||
|
buf[bufIdx++] = 0x80 + ((charCode >>> 6) & 0x3f);
|
|||
|
buf[bufIdx++] = 0x80 + (charCode & 0x3f);
|
|||
|
}
|
|||
|
}
|
|||
|
return buf.slice(0, bufIdx);
|
|||
|
}
|
|||
|
|
|||
|
InternalEncoderCesu8.prototype.end = function() {
|
|||
|
}
|
|||
|
|
|||
|
//------------------------------------------------------------------------------
|
|||
|
// CESU-8 decoder is not implemented in Node v4.0+
|
|||
|
|
|||
|
function InternalDecoderCesu8(options, codec) {
|
|||
|
this.acc = 0;
|
|||
|
this.contBytes = 0;
|
|||
|
this.accBytes = 0;
|
|||
|
this.defaultCharUnicode = codec.defaultCharUnicode;
|
|||
|
}
|
|||
|
|
|||
|
InternalDecoderCesu8.prototype.write = function(buf) {
|
|||
|
var acc = this.acc, contBytes = this.contBytes, accBytes = this.accBytes,
|
|||
|
res = '';
|
|||
|
for (var i = 0; i < buf.length; i++) {
|
|||
|
var curByte = buf[i];
|
|||
|
if ((curByte & 0xC0) !== 0x80) { // Leading byte
|
|||
|
if (contBytes > 0) { // Previous code is invalid
|
|||
|
res += this.defaultCharUnicode;
|
|||
|
contBytes = 0;
|
|||
|
}
|
|||
|
|
|||
|
if (curByte < 0x80) { // Single-byte code
|
|||
|
res += String.fromCharCode(curByte);
|
|||
|
} else if (curByte < 0xE0) { // Two-byte code
|
|||
|
acc = curByte & 0x1F;
|
|||
|
contBytes = 1; accBytes = 1;
|
|||
|
} else if (curByte < 0xF0) { // Three-byte code
|
|||
|
acc = curByte & 0x0F;
|
|||
|
contBytes = 2; accBytes = 1;
|
|||
|
} else { // Four or more are not supported for CESU-8.
|
|||
|
res += this.defaultCharUnicode;
|
|||
|
}
|
|||
|
} else { // Continuation byte
|
|||
|
if (contBytes > 0) { // We're waiting for it.
|
|||
|
acc = (acc << 6) | (curByte & 0x3f);
|
|||
|
contBytes--; accBytes++;
|
|||
|
if (contBytes === 0) {
|
|||
|
// Check for overlong encoding, but support Modified UTF-8 (encoding NULL as C0 80)
|
|||
|
if (accBytes === 2 && acc < 0x80 && acc > 0)
|
|||
|
res += this.defaultCharUnicode;
|
|||
|
else if (accBytes === 3 && acc < 0x800)
|
|||
|
res += this.defaultCharUnicode;
|
|||
|
else
|
|||
|
// Actually add character.
|
|||
|
res += String.fromCharCode(acc);
|
|||
|
}
|
|||
|
} else { // Unexpected continuation byte
|
|||
|
res += this.defaultCharUnicode;
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
this.acc = acc; this.contBytes = contBytes; this.accBytes = accBytes;
|
|||
|
return res;
|
|||
|
}
|
|||
|
|
|||
|
InternalDecoderCesu8.prototype.end = function() {
|
|||
|
var res = 0;
|
|||
|
if (this.contBytes > 0)
|
|||
|
res += this.defaultCharUnicode;
|
|||
|
return res;
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 5998:
|
|||
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
var Buffer = __nccwpck_require__(2750).Buffer;
|
|||
|
|
|||
|
// Single-byte codec. Needs a 'chars' string parameter that contains 256 or 128 chars that
|
|||
|
// correspond to encoded bytes (if 128 - then lower half is ASCII).
|
|||
|
|
|||
|
exports._sbcs = SBCSCodec;
|
|||
|
function SBCSCodec(codecOptions, iconv) {
|
|||
|
if (!codecOptions)
|
|||
|
throw new Error("SBCS codec is called without the data.")
|
|||
|
|
|||
|
// Prepare char buffer for decoding.
|
|||
|
if (!codecOptions.chars || (codecOptions.chars.length !== 128 && codecOptions.chars.length !== 256))
|
|||
|
throw new Error("Encoding '"+codecOptions.type+"' has incorrect 'chars' (must be of len 128 or 256)");
|
|||
|
|
|||
|
if (codecOptions.chars.length === 128) {
|
|||
|
var asciiString = "";
|
|||
|
for (var i = 0; i < 128; i++)
|
|||
|
asciiString += String.fromCharCode(i);
|
|||
|
codecOptions.chars = asciiString + codecOptions.chars;
|
|||
|
}
|
|||
|
|
|||
|
this.decodeBuf = Buffer.from(codecOptions.chars, 'ucs2');
|
|||
|
|
|||
|
// Encoding buffer.
|
|||
|
var encodeBuf = Buffer.alloc(65536, iconv.defaultCharSingleByte.charCodeAt(0));
|
|||
|
|
|||
|
for (var i = 0; i < codecOptions.chars.length; i++)
|
|||
|
encodeBuf[codecOptions.chars.charCodeAt(i)] = i;
|
|||
|
|
|||
|
this.encodeBuf = encodeBuf;
|
|||
|
}
|
|||
|
|
|||
|
SBCSCodec.prototype.encoder = SBCSEncoder;
|
|||
|
SBCSCodec.prototype.decoder = SBCSDecoder;
|
|||
|
|
|||
|
|
|||
|
function SBCSEncoder(options, codec) {
|
|||
|
this.encodeBuf = codec.encodeBuf;
|
|||
|
}
|
|||
|
|
|||
|
SBCSEncoder.prototype.write = function(str) {
|
|||
|
var buf = Buffer.alloc(str.length);
|
|||
|
for (var i = 0; i < str.length; i++)
|
|||
|
buf[i] = this.encodeBuf[str.charCodeAt(i)];
|
|||
|
|
|||
|
return buf;
|
|||
|
}
|
|||
|
|
|||
|
SBCSEncoder.prototype.end = function() {
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
function SBCSDecoder(options, codec) {
|
|||
|
this.decodeBuf = codec.decodeBuf;
|
|||
|
}
|
|||
|
|
|||
|
SBCSDecoder.prototype.write = function(buf) {
|
|||
|
// Strings are immutable in JS -> we use ucs2 buffer to speed up computations.
|
|||
|
var decodeBuf = this.decodeBuf;
|
|||
|
var newBuf = Buffer.alloc(buf.length*2);
|
|||
|
var idx1 = 0, idx2 = 0;
|
|||
|
for (var i = 0; i < buf.length; i++) {
|
|||
|
idx1 = buf[i]*2; idx2 = i*2;
|
|||
|
newBuf[idx2] = decodeBuf[idx1];
|
|||
|
newBuf[idx2+1] = decodeBuf[idx1+1];
|
|||
|
}
|
|||
|
return newBuf.toString('ucs2');
|
|||
|
}
|
|||
|
|
|||
|
SBCSDecoder.prototype.end = function() {
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 4234:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
// Generated data for sbcs codec. Don't edit manually. Regenerate using generation/gen-sbcs.js script.
|
|||
|
module.exports = {
|
|||
|
"437": "cp437",
|
|||
|
"737": "cp737",
|
|||
|
"775": "cp775",
|
|||
|
"850": "cp850",
|
|||
|
"852": "cp852",
|
|||
|
"855": "cp855",
|
|||
|
"856": "cp856",
|
|||
|
"857": "cp857",
|
|||
|
"858": "cp858",
|
|||
|
"860": "cp860",
|
|||
|
"861": "cp861",
|
|||
|
"862": "cp862",
|
|||
|
"863": "cp863",
|
|||
|
"864": "cp864",
|
|||
|
"865": "cp865",
|
|||
|
"866": "cp866",
|
|||
|
"869": "cp869",
|
|||
|
"874": "windows874",
|
|||
|
"922": "cp922",
|
|||
|
"1046": "cp1046",
|
|||
|
"1124": "cp1124",
|
|||
|
"1125": "cp1125",
|
|||
|
"1129": "cp1129",
|
|||
|
"1133": "cp1133",
|
|||
|
"1161": "cp1161",
|
|||
|
"1162": "cp1162",
|
|||
|
"1163": "cp1163",
|
|||
|
"1250": "windows1250",
|
|||
|
"1251": "windows1251",
|
|||
|
"1252": "windows1252",
|
|||
|
"1253": "windows1253",
|
|||
|
"1254": "windows1254",
|
|||
|
"1255": "windows1255",
|
|||
|
"1256": "windows1256",
|
|||
|
"1257": "windows1257",
|
|||
|
"1258": "windows1258",
|
|||
|
"28591": "iso88591",
|
|||
|
"28592": "iso88592",
|
|||
|
"28593": "iso88593",
|
|||
|
"28594": "iso88594",
|
|||
|
"28595": "iso88595",
|
|||
|
"28596": "iso88596",
|
|||
|
"28597": "iso88597",
|
|||
|
"28598": "iso88598",
|
|||
|
"28599": "iso88599",
|
|||
|
"28600": "iso885910",
|
|||
|
"28601": "iso885911",
|
|||
|
"28603": "iso885913",
|
|||
|
"28604": "iso885914",
|
|||
|
"28605": "iso885915",
|
|||
|
"28606": "iso885916",
|
|||
|
"windows874": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "€<><E282AC><EFBFBD><EFBFBD>…<EFBFBD><E280A6><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>‘’“”•–—<E28093><E28094><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD> กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู<E0B8B9><E0B8BA><EFBFBD><EFBFBD>฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛<E0B99A><E0B99B><EFBFBD><EFBFBD>"
|
|||
|
},
|
|||
|
"win874": "windows874",
|
|||
|
"cp874": "windows874",
|
|||
|
"windows1250": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "€<>‚<EFBFBD>„…†‡<E280A0>‰Š‹ŚŤŽŹ<C5BD>‘’“”•–—<E28093>™š›śťžź ˇ˘Ł¤Ą¦§¨©Ş«¬®Ż°±˛ł´µ¶·¸ąş»Ľ˝ľżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙"
|
|||
|
},
|
|||
|
"win1250": "windows1250",
|
|||
|
"cp1250": "windows1250",
|
|||
|
"windows1251": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ЂЃ‚ѓ„…†‡€‰Љ‹ЊЌЋЏђ‘’“”•–—<E28093>™љ›њќћџ ЎўЈ¤Ґ¦§Ё©Є«¬®Ї°±Ііґµ¶·ё№є»јЅѕїАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя"
|
|||
|
},
|
|||
|
"win1251": "windows1251",
|
|||
|
"cp1251": "windows1251",
|
|||
|
"windows1252": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "€<>‚ƒ„…†‡ˆ‰Š‹Œ<E280B9>Ž<EFBFBD><C5BD>‘’“”•–—˜™š›œ<E280BA>žŸ ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
|
|||
|
},
|
|||
|
"win1252": "windows1252",
|
|||
|
"cp1252": "windows1252",
|
|||
|
"windows1253": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "€<>‚ƒ„…†‡<E280A0>‰<EFBFBD>‹<EFBFBD><E280B9><EFBFBD><EFBFBD><EFBFBD>‘’“”•–—<E28093>™<EFBFBD>›<EFBFBD><E280BA><EFBFBD><EFBFBD> ΅Ά£¤¥¦§¨©<C2A8>«¬®―°±²³΄µ¶·ΈΉΊ»Ό½ΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡ<CEA0>ΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύώ<CF8D>"
|
|||
|
},
|
|||
|
"win1253": "windows1253",
|
|||
|
"cp1253": "windows1253",
|
|||
|
"windows1254": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "€<>‚ƒ„…†‡ˆ‰Š‹Œ<E280B9><C592><EFBFBD><EFBFBD>‘’“”•–—˜™š›œ<E280BA><C593>Ÿ ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖ×ØÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüışÿ"
|
|||
|
},
|
|||
|
"win1254": "windows1254",
|
|||
|
"cp1254": "windows1254",
|
|||
|
"windows1255": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "€<>‚ƒ„…†‡ˆ‰<CB86>‹<EFBFBD><E280B9><EFBFBD><EFBFBD><EFBFBD>‘’“”•–—˜™<CB9C>›<EFBFBD><E280BA><EFBFBD><EFBFBD> ¡¢£₪¥¦§¨©×«¬®¯°±²³´µ¶·¸¹÷»¼½¾¿ְֱֲֳִֵֶַָֹֺֻּֽ־ֿ׀ׁׂ׃װױײ׳״<D7B3><D7B4><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>אבגדהוזחטיךכלםמןנסעףפץצקרשת<D7A9><D7AA><E2808E>"
|
|||
|
},
|
|||
|
"win1255": "windows1255",
|
|||
|
"cp1255": "windows1255",
|
|||
|
"windows1256": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "€پ‚ƒ„…†‡ˆ‰ٹ‹Œچژڈگ‘’“”•–—ک™ڑ›œں ،¢£¤¥¦§¨©ھ«¬®¯°±²³´µ¶·¸¹؛»¼½¾؟ہءآأؤإئابةتثجحخدذرزسشصض×طظعغـفقكàلâمنهوçèéêëىيîïًٌٍَôُِ÷ّùْûüے"
|
|||
|
},
|
|||
|
"win1256": "windows1256",
|
|||
|
"cp1256": "windows1256",
|
|||
|
"windows1257": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "€<>‚<EFBFBD>„…†‡<E280A0>‰<EFBFBD>‹<EFBFBD>¨ˇ¸<CB87>‘’“”•–—<E28093>™<EFBFBD>›<EFBFBD>¯˛<C2AF> <EFBFBD>¢£¤<C2A3>¦§Ø©Ŗ«¬®Æ°±²³´µ¶·ø¹ŗ»¼½¾æĄĮĀĆÄÅĘĒČÉŹĖĢĶĪĻŠŃŅÓŌÕÖ×ŲŁŚŪÜŻŽßąįāćäåęēčéźėģķīļšńņóōõö÷ųłśūüżž˙"
|
|||
|
},
|
|||
|
"win1257": "windows1257",
|
|||
|
"cp1257": "windows1257",
|
|||
|
"windows1258": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "€<>‚ƒ„…†‡ˆ‰<CB86>‹Œ<E280B9><C592><EFBFBD><EFBFBD>‘’“”•–—˜™<CB9C>›œ<E280BA><C593>Ÿ ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖ×ØÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ"
|
|||
|
},
|
|||
|
"win1258": "windows1258",
|
|||
|
"cp1258": "windows1258",
|
|||
|
"iso88591": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
|
|||
|
},
|
|||
|
"cp28591": "iso88591",
|
|||
|
"iso88592": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
Ą˘Ł¤ĽŚ§¨ŠŞŤŹŽŻ°ą˛ł´ľśˇ¸šşťź˝žżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙"
|
|||
|
},
|
|||
|
"cp28592": "iso88592",
|
|||
|
"iso88593": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
Ħ˘£¤<C2A3>Ĥ§¨İŞĞĴ<C4B4>Ż°ħ²³´µĥ·¸ışğĵ½<C4B5>żÀÁÂ<C381>ÄĊĈÇÈÉÊËÌÍÎÏ<C38E>ÑÒÓÔĠÖ×ĜÙÚÛÜŬŜßàáâ<C3A1>äċĉçèéêëìíîï<C3AE>ñòóôġö÷ĝùúûüŭŝ˙"
|
|||
|
},
|
|||
|
"cp28593": "iso88593",
|
|||
|
"iso88594": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
ĄĸŖ¤ĨĻ§¨ŠĒĢŦŽ¯°ą˛ŗ´ĩļˇ¸šēģŧŊžŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎĪĐŅŌĶÔÕÖ×ØŲÚÛÜŨŪßāáâãäåæįčéęëėíîīđņōķôõö÷øųúûüũū˙"
|
|||
|
},
|
|||
|
"cp28594": "iso88594",
|
|||
|
"iso88595": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
ЁЂЃЄЅІЇЈЉЊЋЌЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђѓєѕіїјљњћќ§ўџ"
|
|||
|
},
|
|||
|
"cp28595": "iso88595",
|
|||
|
"iso88596": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
<C29F><C2A0><EFBFBD>¤<EFBFBD><C2A4><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>،<D88C><C2AD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>؛<EFBFBD><D89B><EFBFBD>؟<EFBFBD>ءآأؤإئابةتثجحخدذرزسشصضطظعغ<D8B9><D8BA><EFBFBD><EFBFBD><EFBFBD>ـفقكلمنهوىيًٌٍَُِّْ<D991><D992><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>"
|
|||
|
},
|
|||
|
"cp28596": "iso88596",
|
|||
|
"iso88597": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
‘’£€₯¦§¨©ͺ«¬<C2AC>―°±²³΄΅Ά·ΈΉΊ»Ό½ΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡ<CEA0>ΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύώ<CF8D>"
|
|||
|
},
|
|||
|
"cp28597": "iso88597",
|
|||
|
"iso88598": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
<C29F>¢£¤¥¦§¨©×«¬®¯°±²³´µ¶·¸¹÷»¼½¾<C2BD><C2BE><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>‗אבגדהוזחטיךכלםמןנסעףפץצקרשת<D7A9><D7AA><E2808E>"
|
|||
|
},
|
|||
|
"cp28598": "iso88598",
|
|||
|
"iso88599": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖ×ØÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüışÿ"
|
|||
|
},
|
|||
|
"cp28599": "iso88599",
|
|||
|
"iso885910": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
ĄĒĢĪĨĶ§ĻĐŠŦŽŪŊ°ąēģīĩķ·ļđšŧž―ūŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎÏÐŅŌÓÔÕÖŨØŲÚÛÜÝÞßāáâãäåæįčéęëėíîïðņōóôõöũøųúûüýþĸ"
|
|||
|
},
|
|||
|
"cp28600": "iso885910",
|
|||
|
"iso885911": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู<E0B8B9><E0B8BA><EFBFBD><EFBFBD>฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛<E0B99A><E0B99B><EFBFBD><EFBFBD>"
|
|||
|
},
|
|||
|
"cp28601": "iso885911",
|
|||
|
"iso885913": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
”¢£¤„¦§Ø©Ŗ«¬®Æ°±²³“µ¶·ø¹ŗ»¼½¾æĄĮĀĆÄÅĘĒČÉŹĖĢĶĪĻŠŃŅÓŌÕÖ×ŲŁŚŪÜŻŽßąįāćäåęēčéźėģķīļšńņóōõö÷ųłśūüżž’"
|
|||
|
},
|
|||
|
"cp28603": "iso885913",
|
|||
|
"iso885914": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
Ḃḃ£ĊċḊ§Ẁ©ẂḋỲ®ŸḞḟĠġṀṁ¶ṖẁṗẃṠỳẄẅṡÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏŴÑÒÓÔÕÖṪØÙÚÛÜÝŶßàáâãäåæçèéêëìíîïŵñòóôõöṫøùúûüýŷÿ"
|
|||
|
},
|
|||
|
"cp28604": "iso885914",
|
|||
|
"iso885915": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
¡¢£€¥Š§š©ª«¬®¯°±²³Žµ¶·ž¹º»ŒœŸ¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
|
|||
|
},
|
|||
|
"cp28605": "iso885915",
|
|||
|
"iso885916": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
ĄąŁ€„Š§š©Ș«ŹźŻ°±ČłŽ”¶·žčș»ŒœŸżÀÁÂĂÄĆÆÇÈÉÊËÌÍÎÏĐŃÒÓÔŐÖŚŰÙÚÛÜĘȚßàáâăäćæçèéêëìíîïđńòóôőöśűùúûüęțÿ"
|
|||
|
},
|
|||
|
"cp28606": "iso885916",
|
|||
|
"cp437": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
|
|||
|
},
|
|||
|
"ibm437": "cp437",
|
|||
|
"csibm437": "cp437",
|
|||
|
"cp737": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩαβγδεζηθικλμνξοπρσςτυφχψ░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀ωάέήϊίόύϋώΆΈΉΊΌΎΏ±≥≤ΪΫ÷≈°∙·√ⁿ²■ "
|
|||
|
},
|
|||
|
"ibm737": "cp737",
|
|||
|
"csibm737": "cp737",
|
|||
|
"cp775": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ĆüéāäģåćłēŖŗīŹÄÅÉæÆōöĢ¢ŚśÖÜø£ØפĀĪóŻżź”¦©®¬½¼Ł«»░▒▓│┤ĄČĘĖ╣║╗╝ĮŠ┐└┴┬├─┼ŲŪ╚╔╩╦╠═╬Žąčęėįšųūž┘┌█▄▌▐▀ÓßŌŃõÕµńĶķĻļņĒŅ’±“¾¶§÷„°∙·¹³²■ "
|
|||
|
},
|
|||
|
"ibm775": "cp775",
|
|||
|
"csibm775": "cp775",
|
|||
|
"cp850": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø׃áíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈıÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´±‗¾¶§÷¸°¨·¹³²■ "
|
|||
|
},
|
|||
|
"ibm850": "cp850",
|
|||
|
"csibm850": "cp850",
|
|||
|
"cp852": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ÇüéâäůćçłëŐőîŹÄĆÉĹĺôöĽľŚśÖÜŤťŁ×čáíóúĄąŽžĘ꬟Ⱥ«»░▒▓│┤ÁÂĚŞ╣║╗╝Żż┐└┴┬├─┼Ăă╚╔╩╦╠═╬¤đĐĎËďŇÍÎě┘┌█▄ŢŮ▀ÓßÔŃńňŠšŔÚŕŰýÝţ´˝˛ˇ˘§÷¸°¨˙űŘř■ "
|
|||
|
},
|
|||
|
"ibm852": "cp852",
|
|||
|
"csibm852": "cp852",
|
|||
|
"cp855": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ђЂѓЃёЁєЄѕЅіІїЇјЈљЉњЊћЋќЌўЎџЏюЮъЪаАбБцЦдДеЕфФгГ«»░▒▓│┤хХиИ╣║╗╝йЙ┐└┴┬├─┼кК╚╔╩╦╠═╬¤лЛмМнНоОп┘┌█▄Пя▀ЯрРсСтТуУжЖвВьЬ№ыЫзЗшШэЭщЩчЧ§■ "
|
|||
|
},
|
|||
|
"ibm855": "cp855",
|
|||
|
"csibm855": "cp855",
|
|||
|
"cp856": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "אבגדהוזחטיךכלםמןנסעףפץצקרשת<D7A9>£<EFBFBD>×<EFBFBD><C397><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>®¬½¼<C2BD>«»░▒▓│┤<E29482><E294A4><EFBFBD>©╣║╗╝¢¥┐└┴┬├─┼<E29480><E294BC>╚╔╩╦╠═╬¤<E295AC><C2A4><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>┘┌█▄¦<E29684>▀<EFBFBD><E29680><EFBFBD><EFBFBD><EFBFBD><EFBFBD>µ<EFBFBD><C2B5><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>¯´±‗¾¶§÷¸°¨·¹³²■ "
|
|||
|
},
|
|||
|
"ibm856": "cp856",
|
|||
|
"csibm856": "cp856",
|
|||
|
"cp857": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ÇüéâäàåçêëèïîıÄÅÉæÆôöòûùİÖÜø£ØŞşáíóúñÑĞ𿮬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ºªÊËÈ<C38B>ÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµ<C395>×ÚÛÙìÿ¯´±<C2AD>¾¶§÷¸°¨·¹³²■ "
|
|||
|
},
|
|||
|
"ibm857": "cp857",
|
|||
|
"csibm857": "cp857",
|
|||
|
"cp858": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø׃áíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈ€ÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´±‗¾¶§÷¸°¨·¹³²■ "
|
|||
|
},
|
|||
|
"ibm858": "cp858",
|
|||
|
"csibm858": "cp858",
|
|||
|
"cp860": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ÇüéâãàÁçêÊèÍÔìÃÂÉÀÈôõòÚùÌÕÜ¢£Ù₧ÓáíóúñѪº¿Ò¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
|
|||
|
},
|
|||
|
"ibm860": "cp860",
|
|||
|
"csibm860": "cp860",
|
|||
|
"cp861": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ÇüéâäàåçêëèÐðÞÄÅÉæÆôöþûÝýÖÜø£Ø₧ƒáíóúÁÍÓÚ¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
|
|||
|
},
|
|||
|
"ibm861": "cp861",
|
|||
|
"csibm861": "cp861",
|
|||
|
"cp862": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "אבגדהוזחטיךכלםמןנסעףפץצקרשת¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
|
|||
|
},
|
|||
|
"ibm862": "cp862",
|
|||
|
"csibm862": "cp862",
|
|||
|
"cp863": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ÇüéâÂà¶çêëèïî‗À§ÉÈÊôËÏûù¤ÔÜ¢£ÙÛƒ¦´óú¨¸³¯Î⌐¬½¼¾«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
|
|||
|
},
|
|||
|
"ibm863": "cp863",
|
|||
|
"csibm863": "cp863",
|
|||
|
"cp864": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$٪&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~°·∙√▒─│┼┤┬├┴┐┌└┘β∞φ±½¼≈«»ﻷﻸ<EFBBB7><EFBBB8>ﻻﻼ<EFBBBB> ﺂ£¤ﺄ<C2A4><EFBA84>ﺎﺏﺕﺙ،ﺝﺡﺥ٠١٢٣٤٥٦٧٨٩ﻑ؛ﺱﺵﺹ؟¢ﺀﺁﺃﺅﻊﺋﺍﺑﺓﺗﺛﺟﺣﺧﺩﺫﺭﺯﺳﺷﺻﺿﻁﻅﻋﻏ¦¬÷×ﻉـﻓﻗﻛﻟﻣﻧﻫﻭﻯﻳﺽﻌﻎﻍﻡﹽّﻥﻩﻬﻰﻲﻐﻕﻵﻶﻝﻙﻱ■<EFBBB1>"
|
|||
|
},
|
|||
|
"ibm864": "cp864",
|
|||
|
"csibm864": "cp864",
|
|||
|
"cp865": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø₧ƒáíóúñѪº¿⌐¬½¼¡«¤░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
|
|||
|
},
|
|||
|
"ibm865": "cp865",
|
|||
|
"csibm865": "cp865",
|
|||
|
"cp866": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёЄєЇїЎў°∙·√№¤■ "
|
|||
|
},
|
|||
|
"ibm866": "cp866",
|
|||
|
"csibm866": "cp866",
|
|||
|
"cp869": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "<22><><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Ά<EFBFBD>·¬¦‘’Έ―ΉΊΪΌ<CEAA><CE8C>ΎΫ©Ώ²³ά£έήίϊΐόύΑΒΓΔΕΖΗ½ΘΙ«»░▒▓│┤ΚΛΜΝ╣║╗╝ΞΟ┐└┴┬├─┼ΠΡ╚╔╩╦╠═╬ΣΤΥΦΧΨΩαβγ┘┌█▄δε▀ζηθικλμνξοπρσςτ΄±υφχ§ψ΅°¨ωϋΰώ■ "
|
|||
|
},
|
|||
|
"ibm869": "cp869",
|
|||
|
"csibm869": "cp869",
|
|||
|
"cp922": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
¡¢£¤¥¦§¨©ª«¬®‾°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏŠÑÒÓÔÕÖ×ØÙÚÛÜÝŽßàáâãäåæçèéêëìíîïšñòóôõö÷øùúûüýžÿ"
|
|||
|
},
|
|||
|
"ibm922": "cp922",
|
|||
|
"csibm922": "cp922",
|
|||
|
"cp1046": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ﺈ×÷ﹱ■│─┐┌└┘ﹹﹻﹽﹿﹷﺊﻰﻳﻲﻎﻏﻐﻶﻸﻺﻼ ¤ﺋﺑﺗﺛﺟﺣ،ﺧﺳ٠١٢٣٤٥٦٧٨٩ﺷ؛ﺻﺿﻊ؟ﻋءآأؤإئابةتثجحخدذرزسشصضطﻇعغﻌﺂﺄﺎﻓـفقكلمنهوىيًٌٍَُِّْﻗﻛﻟﻵﻷﻹﻻﻣﻧﻬﻩ<EFBBAC>"
|
|||
|
},
|
|||
|
"ibm1046": "cp1046",
|
|||
|
"csibm1046": "cp1046",
|
|||
|
"cp1124": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
ЁЂҐЄЅІЇЈЉЊЋЌЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђґєѕіїјљњћќ§ўџ"
|
|||
|
},
|
|||
|
"ibm1124": "cp1124",
|
|||
|
"csibm1124": "cp1124",
|
|||
|
"cp1125": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёҐґЄєІіЇї·√№¤■ "
|
|||
|
},
|
|||
|
"ibm1125": "cp1125",
|
|||
|
"csibm1125": "cp1125",
|
|||
|
"cp1129": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
¡¢£¤¥¦§œ©ª«¬®¯°±²³Ÿµ¶·Œ¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖ×ØÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ"
|
|||
|
},
|
|||
|
"ibm1129": "cp1129",
|
|||
|
"csibm1129": "cp1129",
|
|||
|
"cp1133": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
ກຂຄງຈສຊຍດຕຖທນບປຜຝພຟມຢຣລວຫອຮ<E0BAAD><E0BAAE><EFBFBD>ຯະາຳິີຶືຸູຼັົຽ<E0BABB><E0BABD><EFBFBD>ເແໂໃໄ່້໊໋໌ໍໆ<E0BB8D>ໜໝ₭<E0BB9D><E282AD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>໐໑໒໓໔໕໖໗໘໙<E0BB98><E0BB99>¢¬¦<C2AC>"
|
|||
|
},
|
|||
|
"ibm1133": "cp1133",
|
|||
|
"csibm1133": "cp1133",
|
|||
|
"cp1161": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "<22><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>่กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู้๊๋€฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛¢¬¦ "
|
|||
|
},
|
|||
|
"ibm1161": "cp1161",
|
|||
|
"csibm1161": "cp1161",
|
|||
|
"cp1162": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "€…‘’“”•–— กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู<E0B8B9><E0B8BA><EFBFBD><EFBFBD>฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛<E0B99A><E0B99B><EFBFBD><EFBFBD>"
|
|||
|
},
|
|||
|
"ibm1162": "cp1162",
|
|||
|
"csibm1162": "cp1162",
|
|||
|
"cp1163": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
¡¢£€¥¦§œ©ª«¬®¯°±²³Ÿµ¶·Œ¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖ×ØÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ"
|
|||
|
},
|
|||
|
"ibm1163": "cp1163",
|
|||
|
"csibm1163": "cp1163",
|
|||
|
"maccroatian": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®Š™´¨≠ŽØ∞±≤≥∆µ∂∑∏š∫ªºΩžø¿¡¬√ƒ≈Ć«Č… ÀÃÕŒœĐ—“”‘’÷◊<C3B7>©⁄¤‹›Æ»–·‚„‰ÂćÁčÈÍÎÏÌÓÔđÒÚÛÙıˆ˜¯πË˚¸Êæˇ"
|
|||
|
},
|
|||
|
"maccyrillic": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ†°¢£§•¶І®©™Ђђ≠Ѓѓ∞±≤≥іµ∂ЈЄєЇїЉљЊњјЅ¬√ƒ≈∆«»… ЋћЌќѕ–—“”‘’÷„ЎўЏџ№Ёёяабвгдежзийклмнопрстуфхцчшщъыьэю¤"
|
|||
|
},
|
|||
|
"macgreek": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "Ĺ²É³ÖÜ΅àâä΄¨çéèê룙î‰ôö¦ùûü†ΓΔΘΛΞΠß®©ΣΪ§≠°·Α±≤≥¥ΒΕΖΗΙΚΜΦΫΨΩάΝ¬ΟΡ≈Τ«»… ΥΧΆΈœ–―“”‘’÷ΉΊΌΎέήίόΏύαβψδεφγηιξκλμνοπώρστθωςχυζϊϋΐΰ<CE90>"
|
|||
|
},
|
|||
|
"maciceland": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûüÝ°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤ÐðÞþý·‚„‰ÂÊÁËÈÍÎÏÌÓÔ<C393>ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ"
|
|||
|
},
|
|||
|
"macroman": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›fifl‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ<C393>ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ"
|
|||
|
},
|
|||
|
"macromania": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ĂŞ∞±≤≥¥µ∂∑∏π∫ªºΩăş¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›Ţţ‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ<C393>ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ"
|
|||
|
},
|
|||
|
"macthai": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "«»…“”<E2809D>•‘’<E28098> กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู–—฿เแโใไๅๆ็่้๊๋์ํ™๏๐๑๒๓๔๕๖๗๘๙®©<C2AE><C2A9><EFBFBD><EFBFBD>"
|
|||
|
},
|
|||
|
"macturkish": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸĞğİıŞş‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ<C393>ÒÚÛÙ<C39B>ˆ˜¯˘˙˚¸˝˛ˇ"
|
|||
|
},
|
|||
|
"macukraine": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ†°Ґ£§•¶І®©™Ђђ≠Ѓѓ∞±≤≥іµґЈЄєЇїЉљЊњјЅ¬√ƒ≈∆«»… ЋћЌќѕ–—“”‘’÷„ЎўЏџ№Ёёяабвгдежзийклмнопрстуфхцчшщъыьэю¤"
|
|||
|
},
|
|||
|
"koi8r": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ё╓╔╕╖╗╘╙╚╛╜╝╞╟╠╡Ё╢╣╤╥╦╧╨╩╪╫╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ"
|
|||
|
},
|
|||
|
"koi8u": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ёє╔ії╗╘╙╚╛ґ╝╞╟╠╡ЁЄ╣ІЇ╦╧╨╩╪Ґ╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ"
|
|||
|
},
|
|||
|
"koi8ru": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ёє╔ії╗╘╙╚╛ґў╞╟╠╡ЁЄ╣ІЇ╦╧╨╩╪ҐЎ©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ"
|
|||
|
},
|
|||
|
"koi8t": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "қғ‚Ғ„…†‡<E280A0>‰ҳ‹ҲҷҶ<D2B7>Қ‘’“”•–—<E28093>™<EFBFBD>›<EFBFBD><E280BA><EFBFBD><EFBFBD><EFBFBD>ӯӮё¤ӣ¦§<C2A6><C2A7><EFBFBD>«¬®<C2AD>°±²Ё<C2B2>Ӣ¶·<C2B6>№<EFBFBD>»<EFBFBD><C2BB><EFBFBD>©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ"
|
|||
|
},
|
|||
|
"armscii8": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
<C29F>և։)(»«—.՝,-֊…՜՛՞ԱաԲբԳգԴդԵեԶզԷէԸըԹթԺժԻիԼլԽխԾծԿկՀհՁձՂղՃճՄմՅյՆնՇշՈոՉչՊպՋջՌռՍսՎվՏտՐրՑցՒւՓփՔքՕօՖֆ՚<D686>"
|
|||
|
},
|
|||
|
"rk1048": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ЂЃ‚ѓ„…†‡€‰Љ‹ЊҚҺЏђ‘’“”•–—<E28093>™љ›њқһџ ҰұӘ¤Ө¦§Ё©Ғ«¬®Ү°±Ііөµ¶·ё№ғ»әҢңүАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя"
|
|||
|
},
|
|||
|
"tcvn": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "\u0000ÚỤ\u0003ỪỬỮ\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010ỨỰỲỶỸÝỴ\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~ÀẢÃÁẠẶẬÈẺẼÉẸỆÌỈĨÍỊÒỎÕÓỌỘỜỞỠỚỢÙỦŨ ĂÂÊÔƠƯĐăâêôơưđẶ̀̀̉̃́àảãáạẲằẳẵắẴẮẦẨẪẤỀặầẩẫấậèỂẻẽéẹềểễếệìỉỄẾỒĩíịòỔỏõóọồổỗốộờởỡớợùỖủũúụừửữứựỳỷỹýỵỐ"
|
|||
|
},
|
|||
|
"georgianacademy": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "‚ƒ„…†‡ˆ‰Š‹Œ‘’“”•–—˜™š›œŸ ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿აბგდევზთიკლმნოპჟრსტუფქღყშჩცძწჭხჯჰჱჲჳჴჵჶçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
|
|||
|
},
|
|||
|
"georgianps": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "‚ƒ„…†‡ˆ‰Š‹Œ‘’“”•–—˜™š›œŸ ¡¢£¤¥¦§¨©ª«¬®¯°±²³´µ¶·¸¹º»¼½¾¿აბგდევზჱთიკლმნჲოპჟრსტჳუფქღყშჩცძწჭხჴჯჰჵæçèéêëìíîïðñòóôõö÷øùúûüýþÿ"
|
|||
|
},
|
|||
|
"pt154": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ҖҒӮғ„…ҶҮҲүҠӢҢҚҺҸҗ‘’“”•–—ҳҷҡӣңқһҹ ЎўЈӨҘҰ§Ё©Ә«¬ӯ®Ҝ°ұІіҙө¶·ё№ә»јҪҫҝАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя"
|
|||
|
},
|
|||
|
"viscii": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "\u0000\u0001Ẳ\u0003\u0004ẴẪ\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013Ỷ\u0015\u0016\u0017\u0018Ỹ\u001a\u001b\u001c\u001dỴ\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~ẠẮẰẶẤẦẨẬẼẸẾỀỂỄỆỐỒỔỖỘỢỚỜỞỊỎỌỈỦŨỤỲÕắằặấầẩậẽẹếềểễệốồổỗỠƠộờởịỰỨỪỬơớƯÀÁÂÃẢĂẳẵÈÉÊẺÌÍĨỳĐứÒÓÔạỷừửÙÚỹỵÝỡưàáâãảăữẫèéêẻìíĩỉđựòóôõỏọụùúũủýợỮ"
|
|||
|
},
|
|||
|
"iso646cn": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#¥%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}‾<E280BE><7F><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>"
|
|||
|
},
|
|||
|
"iso646jp": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[¥]^_`abcdefghijklmnopqrstuvwxyz{|}‾<E280BE><7F><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>"
|
|||
|
},
|
|||
|
"hproman8": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "
ÀÂÈÊËÎÏ´ˋˆ¨˜ÙÛ₤¯Ýý°ÇçÑñ¡¿¤£¥§ƒ¢âêôûáéóúàèòùäëöüÅîØÆåíøæÄìÖÜÉïßÔÁÃãÐðÍÌÓÒÕõŠšÚŸÿÞþ·µ¶¾—¼½ªº«■»±<C2BB>"
|
|||
|
},
|
|||
|
"macintosh": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›fifl‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ<C393>ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ"
|
|||
|
},
|
|||
|
"ascii": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "<22><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>"
|
|||
|
},
|
|||
|
"tis620": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "<22><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู<E0B8B9><E0B8BA><EFBFBD><EFBFBD>฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛<E0B99A><E0B99B><EFBFBD><EFBFBD>"
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 9201:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
// Manually added data to be used by sbcs codec in addition to generated one.
|
|||
|
|
|||
|
module.exports = {
|
|||
|
// Not supported by iconv, not sure why.
|
|||
|
"10029": "maccenteuro",
|
|||
|
"maccenteuro": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "ÄĀāÉĄÖÜáąČäčĆć鏟ĎíďĒēĖóėôöõúĚěü†°Ę£§•¶ß®©™ę¨≠ģĮįĪ≤≥īĶ∂∑łĻļĽľĹĺŅņѬ√ńŇ∆«»… ňŐÕőŌ–—“”‘’÷◊ōŔŕŘ‹›řŖŗŠ‚„šŚśÁŤťÍŽžŪÓÔūŮÚůŰűŲųÝýķŻŁżĢˇ"
|
|||
|
},
|
|||
|
|
|||
|
"808": "cp808",
|
|||
|
"ibm808": "cp808",
|
|||
|
"cp808": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёЄєЇїЎў°∙·√№€■ "
|
|||
|
},
|
|||
|
|
|||
|
"mik": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя└┴┬├─┼╣║╚╔╩╦╠═╬┐░▒▓│┤№§╗╝┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ "
|
|||
|
},
|
|||
|
|
|||
|
"cp720": {
|
|||
|
"type": "_sbcs",
|
|||
|
"chars": "\x80\x81éâ\x84à\x86çêëèïî\x8d\x8e\x8f\x90\u0651\u0652ô¤ـûùءآأؤ£إئابةتثجحخدذرزسشص«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀ضطظعغفµقكلمنهوىي≡\u064b\u064c\u064d\u064e\u064f\u0650≈°∙·√ⁿ²■\u00a0"
|
|||
|
},
|
|||
|
|
|||
|
// Aliases of generated encodings.
|
|||
|
"ascii8bit": "ascii",
|
|||
|
"usascii": "ascii",
|
|||
|
"ansix34": "ascii",
|
|||
|
"ansix341968": "ascii",
|
|||
|
"ansix341986": "ascii",
|
|||
|
"csascii": "ascii",
|
|||
|
"cp367": "ascii",
|
|||
|
"ibm367": "ascii",
|
|||
|
"isoir6": "ascii",
|
|||
|
"iso646us": "ascii",
|
|||
|
"iso646irv": "ascii",
|
|||
|
"us": "ascii",
|
|||
|
|
|||
|
"latin1": "iso88591",
|
|||
|
"latin2": "iso88592",
|
|||
|
"latin3": "iso88593",
|
|||
|
"latin4": "iso88594",
|
|||
|
"latin5": "iso88599",
|
|||
|
"latin6": "iso885910",
|
|||
|
"latin7": "iso885913",
|
|||
|
"latin8": "iso885914",
|
|||
|
"latin9": "iso885915",
|
|||
|
"latin10": "iso885916",
|
|||
|
|
|||
|
"csisolatin1": "iso88591",
|
|||
|
"csisolatin2": "iso88592",
|
|||
|
"csisolatin3": "iso88593",
|
|||
|
"csisolatin4": "iso88594",
|
|||
|
"csisolatincyrillic": "iso88595",
|
|||
|
"csisolatinarabic": "iso88596",
|
|||
|
"csisolatingreek" : "iso88597",
|
|||
|
"csisolatinhebrew": "iso88598",
|
|||
|
"csisolatin5": "iso88599",
|
|||
|
"csisolatin6": "iso885910",
|
|||
|
|
|||
|
"l1": "iso88591",
|
|||
|
"l2": "iso88592",
|
|||
|
"l3": "iso88593",
|
|||
|
"l4": "iso88594",
|
|||
|
"l5": "iso88599",
|
|||
|
"l6": "iso885910",
|
|||
|
"l7": "iso885913",
|
|||
|
"l8": "iso885914",
|
|||
|
"l9": "iso885915",
|
|||
|
"l10": "iso885916",
|
|||
|
|
|||
|
"isoir14": "iso646jp",
|
|||
|
"isoir57": "iso646cn",
|
|||
|
"isoir100": "iso88591",
|
|||
|
"isoir101": "iso88592",
|
|||
|
"isoir109": "iso88593",
|
|||
|
"isoir110": "iso88594",
|
|||
|
"isoir144": "iso88595",
|
|||
|
"isoir127": "iso88596",
|
|||
|
"isoir126": "iso88597",
|
|||
|
"isoir138": "iso88598",
|
|||
|
"isoir148": "iso88599",
|
|||
|
"isoir157": "iso885910",
|
|||
|
"isoir166": "tis620",
|
|||
|
"isoir179": "iso885913",
|
|||
|
"isoir199": "iso885914",
|
|||
|
"isoir203": "iso885915",
|
|||
|
"isoir226": "iso885916",
|
|||
|
|
|||
|
"cp819": "iso88591",
|
|||
|
"ibm819": "iso88591",
|
|||
|
|
|||
|
"cyrillic": "iso88595",
|
|||
|
|
|||
|
"arabic": "iso88596",
|
|||
|
"arabic8": "iso88596",
|
|||
|
"ecma114": "iso88596",
|
|||
|
"asmo708": "iso88596",
|
|||
|
|
|||
|
"greek" : "iso88597",
|
|||
|
"greek8" : "iso88597",
|
|||
|
"ecma118" : "iso88597",
|
|||
|
"elot928" : "iso88597",
|
|||
|
|
|||
|
"hebrew": "iso88598",
|
|||
|
"hebrew8": "iso88598",
|
|||
|
|
|||
|
"turkish": "iso88599",
|
|||
|
"turkish8": "iso88599",
|
|||
|
|
|||
|
"thai": "iso885911",
|
|||
|
"thai8": "iso885911",
|
|||
|
|
|||
|
"celtic": "iso885914",
|
|||
|
"celtic8": "iso885914",
|
|||
|
"isoceltic": "iso885914",
|
|||
|
|
|||
|
"tis6200": "tis620",
|
|||
|
"tis62025291": "tis620",
|
|||
|
"tis62025330": "tis620",
|
|||
|
|
|||
|
"10000": "macroman",
|
|||
|
"10006": "macgreek",
|
|||
|
"10007": "maccyrillic",
|
|||
|
"10079": "maciceland",
|
|||
|
"10081": "macturkish",
|
|||
|
|
|||
|
"cspc8codepage437": "cp437",
|
|||
|
"cspc775baltic": "cp775",
|
|||
|
"cspc850multilingual": "cp850",
|
|||
|
"cspcp852": "cp852",
|
|||
|
"cspc862latinhebrew": "cp862",
|
|||
|
"cpgr": "cp869",
|
|||
|
|
|||
|
"msee": "cp1250",
|
|||
|
"mscyrl": "cp1251",
|
|||
|
"msansi": "cp1252",
|
|||
|
"msgreek": "cp1253",
|
|||
|
"msturk": "cp1254",
|
|||
|
"mshebr": "cp1255",
|
|||
|
"msarab": "cp1256",
|
|||
|
"winbaltrim": "cp1257",
|
|||
|
|
|||
|
"cp20866": "koi8r",
|
|||
|
"20866": "koi8r",
|
|||
|
"ibm878": "koi8r",
|
|||
|
"cskoi8r": "koi8r",
|
|||
|
|
|||
|
"cp21866": "koi8u",
|
|||
|
"21866": "koi8u",
|
|||
|
"ibm1168": "koi8u",
|
|||
|
|
|||
|
"strk10482002": "rk1048",
|
|||
|
|
|||
|
"tcvn5712": "tcvn",
|
|||
|
"tcvn57121": "tcvn",
|
|||
|
|
|||
|
"gb198880": "iso646cn",
|
|||
|
"cn": "iso646cn",
|
|||
|
|
|||
|
"csiso14jisc6220ro": "iso646jp",
|
|||
|
"jisc62201969ro": "iso646jp",
|
|||
|
"jp": "iso646jp",
|
|||
|
|
|||
|
"cshproman8": "hproman8",
|
|||
|
"r8": "hproman8",
|
|||
|
"roman8": "hproman8",
|
|||
|
"xroman8": "hproman8",
|
|||
|
"ibm1051": "hproman8",
|
|||
|
|
|||
|
"mac": "macintosh",
|
|||
|
"csmacintosh": "macintosh",
|
|||
|
};
|
|||
|
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 2478:
|
|||
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
var Buffer = __nccwpck_require__(2750).Buffer;
|
|||
|
|
|||
|
// Note: UTF16-LE (or UCS2) codec is Node.js native. See encodings/internal.js
|
|||
|
|
|||
|
// == UTF16-BE codec. ==========================================================
|
|||
|
|
|||
|
exports.utf16be = Utf16BECodec;
|
|||
|
function Utf16BECodec() {
|
|||
|
}
|
|||
|
|
|||
|
Utf16BECodec.prototype.encoder = Utf16BEEncoder;
|
|||
|
Utf16BECodec.prototype.decoder = Utf16BEDecoder;
|
|||
|
Utf16BECodec.prototype.bomAware = true;
|
|||
|
|
|||
|
|
|||
|
// -- Encoding
|
|||
|
|
|||
|
function Utf16BEEncoder() {
|
|||
|
}
|
|||
|
|
|||
|
Utf16BEEncoder.prototype.write = function(str) {
|
|||
|
var buf = Buffer.from(str, 'ucs2');
|
|||
|
for (var i = 0; i < buf.length; i += 2) {
|
|||
|
var tmp = buf[i]; buf[i] = buf[i+1]; buf[i+1] = tmp;
|
|||
|
}
|
|||
|
return buf;
|
|||
|
}
|
|||
|
|
|||
|
Utf16BEEncoder.prototype.end = function() {
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
// -- Decoding
|
|||
|
|
|||
|
function Utf16BEDecoder() {
|
|||
|
this.overflowByte = -1;
|
|||
|
}
|
|||
|
|
|||
|
Utf16BEDecoder.prototype.write = function(buf) {
|
|||
|
if (buf.length == 0)
|
|||
|
return '';
|
|||
|
|
|||
|
var buf2 = Buffer.alloc(buf.length + 1),
|
|||
|
i = 0, j = 0;
|
|||
|
|
|||
|
if (this.overflowByte !== -1) {
|
|||
|
buf2[0] = buf[0];
|
|||
|
buf2[1] = this.overflowByte;
|
|||
|
i = 1; j = 2;
|
|||
|
}
|
|||
|
|
|||
|
for (; i < buf.length-1; i += 2, j+= 2) {
|
|||
|
buf2[j] = buf[i+1];
|
|||
|
buf2[j+1] = buf[i];
|
|||
|
}
|
|||
|
|
|||
|
this.overflowByte = (i == buf.length-1) ? buf[buf.length-1] : -1;
|
|||
|
|
|||
|
return buf2.slice(0, j).toString('ucs2');
|
|||
|
}
|
|||
|
|
|||
|
Utf16BEDecoder.prototype.end = function() {
|
|||
|
this.overflowByte = -1;
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
// == UTF-16 codec =============================================================
|
|||
|
// Decoder chooses automatically from UTF-16LE and UTF-16BE using BOM and space-based heuristic.
|
|||
|
// Defaults to UTF-16LE, as it's prevalent and default in Node.
|
|||
|
// http://en.wikipedia.org/wiki/UTF-16 and http://encoding.spec.whatwg.org/#utf-16le
|
|||
|
// Decoder default can be changed: iconv.decode(buf, 'utf16', {defaultEncoding: 'utf-16be'});
|
|||
|
|
|||
|
// Encoder uses UTF-16LE and prepends BOM (which can be overridden with addBOM: false).
|
|||
|
|
|||
|
exports.utf16 = Utf16Codec;
|
|||
|
function Utf16Codec(codecOptions, iconv) {
|
|||
|
this.iconv = iconv;
|
|||
|
}
|
|||
|
|
|||
|
Utf16Codec.prototype.encoder = Utf16Encoder;
|
|||
|
Utf16Codec.prototype.decoder = Utf16Decoder;
|
|||
|
|
|||
|
|
|||
|
// -- Encoding (pass-through)
|
|||
|
|
|||
|
function Utf16Encoder(options, codec) {
|
|||
|
options = options || {};
|
|||
|
if (options.addBOM === undefined)
|
|||
|
options.addBOM = true;
|
|||
|
this.encoder = codec.iconv.getEncoder('utf-16le', options);
|
|||
|
}
|
|||
|
|
|||
|
Utf16Encoder.prototype.write = function(str) {
|
|||
|
return this.encoder.write(str);
|
|||
|
}
|
|||
|
|
|||
|
Utf16Encoder.prototype.end = function() {
|
|||
|
return this.encoder.end();
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
// -- Decoding
|
|||
|
|
|||
|
function Utf16Decoder(options, codec) {
|
|||
|
this.decoder = null;
|
|||
|
this.initialBufs = [];
|
|||
|
this.initialBufsLen = 0;
|
|||
|
|
|||
|
this.options = options || {};
|
|||
|
this.iconv = codec.iconv;
|
|||
|
}
|
|||
|
|
|||
|
Utf16Decoder.prototype.write = function(buf) {
|
|||
|
if (!this.decoder) {
|
|||
|
// Codec is not chosen yet. Accumulate initial bytes.
|
|||
|
this.initialBufs.push(buf);
|
|||
|
this.initialBufsLen += buf.length;
|
|||
|
|
|||
|
if (this.initialBufsLen < 16) // We need more bytes to use space heuristic (see below)
|
|||
|
return '';
|
|||
|
|
|||
|
// We have enough bytes -> detect endianness.
|
|||
|
var encoding = detectEncoding(this.initialBufs, this.options.defaultEncoding);
|
|||
|
this.decoder = this.iconv.getDecoder(encoding, this.options);
|
|||
|
|
|||
|
var resStr = '';
|
|||
|
for (var i = 0; i < this.initialBufs.length; i++)
|
|||
|
resStr += this.decoder.write(this.initialBufs[i]);
|
|||
|
|
|||
|
this.initialBufs.length = this.initialBufsLen = 0;
|
|||
|
return resStr;
|
|||
|
}
|
|||
|
|
|||
|
return this.decoder.write(buf);
|
|||
|
}
|
|||
|
|
|||
|
Utf16Decoder.prototype.end = function() {
|
|||
|
if (!this.decoder) {
|
|||
|
var encoding = detectEncoding(this.initialBufs, this.options.defaultEncoding);
|
|||
|
this.decoder = this.iconv.getDecoder(encoding, this.options);
|
|||
|
|
|||
|
var resStr = '';
|
|||
|
for (var i = 0; i < this.initialBufs.length; i++)
|
|||
|
resStr += this.decoder.write(this.initialBufs[i]);
|
|||
|
|
|||
|
var trail = this.decoder.end();
|
|||
|
if (trail)
|
|||
|
resStr += trail;
|
|||
|
|
|||
|
this.initialBufs.length = this.initialBufsLen = 0;
|
|||
|
return resStr;
|
|||
|
}
|
|||
|
return this.decoder.end();
|
|||
|
}
|
|||
|
|
|||
|
function detectEncoding(bufs, defaultEncoding) {
|
|||
|
var b = [];
|
|||
|
var charsProcessed = 0;
|
|||
|
var asciiCharsLE = 0, asciiCharsBE = 0; // Number of ASCII chars when decoded as LE or BE.
|
|||
|
|
|||
|
outer_loop:
|
|||
|
for (var i = 0; i < bufs.length; i++) {
|
|||
|
var buf = bufs[i];
|
|||
|
for (var j = 0; j < buf.length; j++) {
|
|||
|
b.push(buf[j]);
|
|||
|
if (b.length === 2) {
|
|||
|
if (charsProcessed === 0) {
|
|||
|
// Check BOM first.
|
|||
|
if (b[0] === 0xFF && b[1] === 0xFE) return 'utf-16le';
|
|||
|
if (b[0] === 0xFE && b[1] === 0xFF) return 'utf-16be';
|
|||
|
}
|
|||
|
|
|||
|
if (b[0] === 0 && b[1] !== 0) asciiCharsBE++;
|
|||
|
if (b[0] !== 0 && b[1] === 0) asciiCharsLE++;
|
|||
|
|
|||
|
b.length = 0;
|
|||
|
charsProcessed++;
|
|||
|
|
|||
|
if (charsProcessed >= 100) {
|
|||
|
break outer_loop;
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// Make decisions.
|
|||
|
// Most of the time, the content has ASCII chars (U+00**), but the opposite (U+**00) is uncommon.
|
|||
|
// So, we count ASCII as if it was LE or BE, and decide from that.
|
|||
|
if (asciiCharsBE > asciiCharsLE) return 'utf-16be';
|
|||
|
if (asciiCharsBE < asciiCharsLE) return 'utf-16le';
|
|||
|
|
|||
|
// Couldn't decide (likely all zeros or not enough data).
|
|||
|
return defaultEncoding || 'utf-16le';
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 2866:
|
|||
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
var Buffer = __nccwpck_require__(2750).Buffer;
|
|||
|
|
|||
|
// == UTF32-LE/BE codec. ==========================================================
|
|||
|
|
|||
|
exports._utf32 = Utf32Codec;
|
|||
|
|
|||
|
function Utf32Codec(codecOptions, iconv) {
|
|||
|
this.iconv = iconv;
|
|||
|
this.bomAware = true;
|
|||
|
this.isLE = codecOptions.isLE;
|
|||
|
}
|
|||
|
|
|||
|
exports.utf32le = { type: '_utf32', isLE: true };
|
|||
|
exports.utf32be = { type: '_utf32', isLE: false };
|
|||
|
|
|||
|
// Aliases
|
|||
|
exports.ucs4le = 'utf32le';
|
|||
|
exports.ucs4be = 'utf32be';
|
|||
|
|
|||
|
Utf32Codec.prototype.encoder = Utf32Encoder;
|
|||
|
Utf32Codec.prototype.decoder = Utf32Decoder;
|
|||
|
|
|||
|
// -- Encoding
|
|||
|
|
|||
|
function Utf32Encoder(options, codec) {
|
|||
|
this.isLE = codec.isLE;
|
|||
|
this.highSurrogate = 0;
|
|||
|
}
|
|||
|
|
|||
|
Utf32Encoder.prototype.write = function(str) {
|
|||
|
var src = Buffer.from(str, 'ucs2');
|
|||
|
var dst = Buffer.alloc(src.length * 2);
|
|||
|
var write32 = this.isLE ? dst.writeUInt32LE : dst.writeUInt32BE;
|
|||
|
var offset = 0;
|
|||
|
|
|||
|
for (var i = 0; i < src.length; i += 2) {
|
|||
|
var code = src.readUInt16LE(i);
|
|||
|
var isHighSurrogate = (0xD800 <= code && code < 0xDC00);
|
|||
|
var isLowSurrogate = (0xDC00 <= code && code < 0xE000);
|
|||
|
|
|||
|
if (this.highSurrogate) {
|
|||
|
if (isHighSurrogate || !isLowSurrogate) {
|
|||
|
// There shouldn't be two high surrogates in a row, nor a high surrogate which isn't followed by a low
|
|||
|
// surrogate. If this happens, keep the pending high surrogate as a stand-alone semi-invalid character
|
|||
|
// (technically wrong, but expected by some applications, like Windows file names).
|
|||
|
write32.call(dst, this.highSurrogate, offset);
|
|||
|
offset += 4;
|
|||
|
}
|
|||
|
else {
|
|||
|
// Create 32-bit value from high and low surrogates;
|
|||
|
var codepoint = (((this.highSurrogate - 0xD800) << 10) | (code - 0xDC00)) + 0x10000;
|
|||
|
|
|||
|
write32.call(dst, codepoint, offset);
|
|||
|
offset += 4;
|
|||
|
this.highSurrogate = 0;
|
|||
|
|
|||
|
continue;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (isHighSurrogate)
|
|||
|
this.highSurrogate = code;
|
|||
|
else {
|
|||
|
// Even if the current character is a low surrogate, with no previous high surrogate, we'll
|
|||
|
// encode it as a semi-invalid stand-alone character for the same reasons expressed above for
|
|||
|
// unpaired high surrogates.
|
|||
|
write32.call(dst, code, offset);
|
|||
|
offset += 4;
|
|||
|
this.highSurrogate = 0;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (offset < dst.length)
|
|||
|
dst = dst.slice(0, offset);
|
|||
|
|
|||
|
return dst;
|
|||
|
};
|
|||
|
|
|||
|
Utf32Encoder.prototype.end = function() {
|
|||
|
// Treat any leftover high surrogate as a semi-valid independent character.
|
|||
|
if (!this.highSurrogate)
|
|||
|
return;
|
|||
|
|
|||
|
var buf = Buffer.alloc(4);
|
|||
|
|
|||
|
if (this.isLE)
|
|||
|
buf.writeUInt32LE(this.highSurrogate, 0);
|
|||
|
else
|
|||
|
buf.writeUInt32BE(this.highSurrogate, 0);
|
|||
|
|
|||
|
this.highSurrogate = 0;
|
|||
|
|
|||
|
return buf;
|
|||
|
};
|
|||
|
|
|||
|
// -- Decoding
|
|||
|
|
|||
|
function Utf32Decoder(options, codec) {
|
|||
|
this.isLE = codec.isLE;
|
|||
|
this.badChar = codec.iconv.defaultCharUnicode.charCodeAt(0);
|
|||
|
this.overflow = [];
|
|||
|
}
|
|||
|
|
|||
|
Utf32Decoder.prototype.write = function(src) {
|
|||
|
if (src.length === 0)
|
|||
|
return '';
|
|||
|
|
|||
|
var i = 0;
|
|||
|
var codepoint = 0;
|
|||
|
var dst = Buffer.alloc(src.length + 4);
|
|||
|
var offset = 0;
|
|||
|
var isLE = this.isLE;
|
|||
|
var overflow = this.overflow;
|
|||
|
var badChar = this.badChar;
|
|||
|
|
|||
|
if (overflow.length > 0) {
|
|||
|
for (; i < src.length && overflow.length < 4; i++)
|
|||
|
overflow.push(src[i]);
|
|||
|
|
|||
|
if (overflow.length === 4) {
|
|||
|
// NOTE: codepoint is a signed int32 and can be negative.
|
|||
|
// NOTE: We copied this block from below to help V8 optimize it (it works with array, not buffer).
|
|||
|
if (isLE) {
|
|||
|
codepoint = overflow[i] | (overflow[i+1] << 8) | (overflow[i+2] << 16) | (overflow[i+3] << 24);
|
|||
|
} else {
|
|||
|
codepoint = overflow[i+3] | (overflow[i+2] << 8) | (overflow[i+1] << 16) | (overflow[i] << 24);
|
|||
|
}
|
|||
|
overflow.length = 0;
|
|||
|
|
|||
|
offset = _writeCodepoint(dst, offset, codepoint, badChar);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// Main loop. Should be as optimized as possible.
|
|||
|
for (; i < src.length - 3; i += 4) {
|
|||
|
// NOTE: codepoint is a signed int32 and can be negative.
|
|||
|
if (isLE) {
|
|||
|
codepoint = src[i] | (src[i+1] << 8) | (src[i+2] << 16) | (src[i+3] << 24);
|
|||
|
} else {
|
|||
|
codepoint = src[i+3] | (src[i+2] << 8) | (src[i+1] << 16) | (src[i] << 24);
|
|||
|
}
|
|||
|
offset = _writeCodepoint(dst, offset, codepoint, badChar);
|
|||
|
}
|
|||
|
|
|||
|
// Keep overflowing bytes.
|
|||
|
for (; i < src.length; i++) {
|
|||
|
overflow.push(src[i]);
|
|||
|
}
|
|||
|
|
|||
|
return dst.slice(0, offset).toString('ucs2');
|
|||
|
};
|
|||
|
|
|||
|
function _writeCodepoint(dst, offset, codepoint, badChar) {
|
|||
|
// NOTE: codepoint is signed int32 and can be negative. We keep it that way to help V8 with optimizations.
|
|||
|
if (codepoint < 0 || codepoint > 0x10FFFF) {
|
|||
|
// Not a valid Unicode codepoint
|
|||
|
codepoint = badChar;
|
|||
|
}
|
|||
|
|
|||
|
// Ephemeral Planes: Write high surrogate.
|
|||
|
if (codepoint >= 0x10000) {
|
|||
|
codepoint -= 0x10000;
|
|||
|
|
|||
|
var high = 0xD800 | (codepoint >> 10);
|
|||
|
dst[offset++] = high & 0xff;
|
|||
|
dst[offset++] = high >> 8;
|
|||
|
|
|||
|
// Low surrogate is written below.
|
|||
|
var codepoint = 0xDC00 | (codepoint & 0x3FF);
|
|||
|
}
|
|||
|
|
|||
|
// Write BMP char or low surrogate.
|
|||
|
dst[offset++] = codepoint & 0xff;
|
|||
|
dst[offset++] = codepoint >> 8;
|
|||
|
|
|||
|
return offset;
|
|||
|
};
|
|||
|
|
|||
|
Utf32Decoder.prototype.end = function() {
|
|||
|
this.overflow.length = 0;
|
|||
|
};
|
|||
|
|
|||
|
// == UTF-32 Auto codec =============================================================
|
|||
|
// Decoder chooses automatically from UTF-32LE and UTF-32BE using BOM and space-based heuristic.
|
|||
|
// Defaults to UTF-32LE. http://en.wikipedia.org/wiki/UTF-32
|
|||
|
// Encoder/decoder default can be changed: iconv.decode(buf, 'utf32', {defaultEncoding: 'utf-32be'});
|
|||
|
|
|||
|
// Encoder prepends BOM (which can be overridden with (addBOM: false}).
|
|||
|
|
|||
|
exports.utf32 = Utf32AutoCodec;
|
|||
|
exports.ucs4 = 'utf32';
|
|||
|
|
|||
|
function Utf32AutoCodec(options, iconv) {
|
|||
|
this.iconv = iconv;
|
|||
|
}
|
|||
|
|
|||
|
Utf32AutoCodec.prototype.encoder = Utf32AutoEncoder;
|
|||
|
Utf32AutoCodec.prototype.decoder = Utf32AutoDecoder;
|
|||
|
|
|||
|
// -- Encoding
|
|||
|
|
|||
|
function Utf32AutoEncoder(options, codec) {
|
|||
|
options = options || {};
|
|||
|
|
|||
|
if (options.addBOM === undefined)
|
|||
|
options.addBOM = true;
|
|||
|
|
|||
|
this.encoder = codec.iconv.getEncoder(options.defaultEncoding || 'utf-32le', options);
|
|||
|
}
|
|||
|
|
|||
|
Utf32AutoEncoder.prototype.write = function(str) {
|
|||
|
return this.encoder.write(str);
|
|||
|
};
|
|||
|
|
|||
|
Utf32AutoEncoder.prototype.end = function() {
|
|||
|
return this.encoder.end();
|
|||
|
};
|
|||
|
|
|||
|
// -- Decoding
|
|||
|
|
|||
|
function Utf32AutoDecoder(options, codec) {
|
|||
|
this.decoder = null;
|
|||
|
this.initialBufs = [];
|
|||
|
this.initialBufsLen = 0;
|
|||
|
this.options = options || {};
|
|||
|
this.iconv = codec.iconv;
|
|||
|
}
|
|||
|
|
|||
|
Utf32AutoDecoder.prototype.write = function(buf) {
|
|||
|
if (!this.decoder) {
|
|||
|
// Codec is not chosen yet. Accumulate initial bytes.
|
|||
|
this.initialBufs.push(buf);
|
|||
|
this.initialBufsLen += buf.length;
|
|||
|
|
|||
|
if (this.initialBufsLen < 32) // We need more bytes to use space heuristic (see below)
|
|||
|
return '';
|
|||
|
|
|||
|
// We have enough bytes -> detect endianness.
|
|||
|
var encoding = detectEncoding(this.initialBufs, this.options.defaultEncoding);
|
|||
|
this.decoder = this.iconv.getDecoder(encoding, this.options);
|
|||
|
|
|||
|
var resStr = '';
|
|||
|
for (var i = 0; i < this.initialBufs.length; i++)
|
|||
|
resStr += this.decoder.write(this.initialBufs[i]);
|
|||
|
|
|||
|
this.initialBufs.length = this.initialBufsLen = 0;
|
|||
|
return resStr;
|
|||
|
}
|
|||
|
|
|||
|
return this.decoder.write(buf);
|
|||
|
};
|
|||
|
|
|||
|
Utf32AutoDecoder.prototype.end = function() {
|
|||
|
if (!this.decoder) {
|
|||
|
var encoding = detectEncoding(this.initialBufs, this.options.defaultEncoding);
|
|||
|
this.decoder = this.iconv.getDecoder(encoding, this.options);
|
|||
|
|
|||
|
var resStr = '';
|
|||
|
for (var i = 0; i < this.initialBufs.length; i++)
|
|||
|
resStr += this.decoder.write(this.initialBufs[i]);
|
|||
|
|
|||
|
var trail = this.decoder.end();
|
|||
|
if (trail)
|
|||
|
resStr += trail;
|
|||
|
|
|||
|
this.initialBufs.length = this.initialBufsLen = 0;
|
|||
|
return resStr;
|
|||
|
}
|
|||
|
|
|||
|
return this.decoder.end();
|
|||
|
};
|
|||
|
|
|||
|
function detectEncoding(bufs, defaultEncoding) {
|
|||
|
var b = [];
|
|||
|
var charsProcessed = 0;
|
|||
|
var invalidLE = 0, invalidBE = 0; // Number of invalid chars when decoded as LE or BE.
|
|||
|
var bmpCharsLE = 0, bmpCharsBE = 0; // Number of BMP chars when decoded as LE or BE.
|
|||
|
|
|||
|
outer_loop:
|
|||
|
for (var i = 0; i < bufs.length; i++) {
|
|||
|
var buf = bufs[i];
|
|||
|
for (var j = 0; j < buf.length; j++) {
|
|||
|
b.push(buf[j]);
|
|||
|
if (b.length === 4) {
|
|||
|
if (charsProcessed === 0) {
|
|||
|
// Check BOM first.
|
|||
|
if (b[0] === 0xFF && b[1] === 0xFE && b[2] === 0 && b[3] === 0) {
|
|||
|
return 'utf-32le';
|
|||
|
}
|
|||
|
if (b[0] === 0 && b[1] === 0 && b[2] === 0xFE && b[3] === 0xFF) {
|
|||
|
return 'utf-32be';
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (b[0] !== 0 || b[1] > 0x10) invalidBE++;
|
|||
|
if (b[3] !== 0 || b[2] > 0x10) invalidLE++;
|
|||
|
|
|||
|
if (b[0] === 0 && b[1] === 0 && (b[2] !== 0 || b[3] !== 0)) bmpCharsBE++;
|
|||
|
if ((b[0] !== 0 || b[1] !== 0) && b[2] === 0 && b[3] === 0) bmpCharsLE++;
|
|||
|
|
|||
|
b.length = 0;
|
|||
|
charsProcessed++;
|
|||
|
|
|||
|
if (charsProcessed >= 100) {
|
|||
|
break outer_loop;
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// Make decisions.
|
|||
|
if (bmpCharsBE - invalidBE > bmpCharsLE - invalidLE) return 'utf-32be';
|
|||
|
if (bmpCharsBE - invalidBE < bmpCharsLE - invalidLE) return 'utf-32le';
|
|||
|
|
|||
|
// Couldn't decide (likely all zeros or not enough data).
|
|||
|
return defaultEncoding || 'utf-32le';
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 244:
|
|||
|
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
var Buffer = __nccwpck_require__(2750).Buffer;
|
|||
|
|
|||
|
// UTF-7 codec, according to https://tools.ietf.org/html/rfc2152
|
|||
|
// See also below a UTF-7-IMAP codec, according to http://tools.ietf.org/html/rfc3501#section-5.1.3
|
|||
|
|
|||
|
exports.utf7 = Utf7Codec;
|
|||
|
exports.unicode11utf7 = 'utf7'; // Alias UNICODE-1-1-UTF-7
|
|||
|
function Utf7Codec(codecOptions, iconv) {
|
|||
|
this.iconv = iconv;
|
|||
|
};
|
|||
|
|
|||
|
Utf7Codec.prototype.encoder = Utf7Encoder;
|
|||
|
Utf7Codec.prototype.decoder = Utf7Decoder;
|
|||
|
Utf7Codec.prototype.bomAware = true;
|
|||
|
|
|||
|
|
|||
|
// -- Encoding
|
|||
|
|
|||
|
var nonDirectChars = /[^A-Za-z0-9'\(\),-\.\/:\? \n\r\t]+/g;
|
|||
|
|
|||
|
function Utf7Encoder(options, codec) {
|
|||
|
this.iconv = codec.iconv;
|
|||
|
}
|
|||
|
|
|||
|
Utf7Encoder.prototype.write = function(str) {
|
|||
|
// Naive implementation.
|
|||
|
// Non-direct chars are encoded as "+<base64>-"; single "+" char is encoded as "+-".
|
|||
|
return Buffer.from(str.replace(nonDirectChars, function(chunk) {
|
|||
|
return "+" + (chunk === '+' ? '' :
|
|||
|
this.iconv.encode(chunk, 'utf16-be').toString('base64').replace(/=+$/, ''))
|
|||
|
+ "-";
|
|||
|
}.bind(this)));
|
|||
|
}
|
|||
|
|
|||
|
Utf7Encoder.prototype.end = function() {
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
// -- Decoding
|
|||
|
|
|||
|
function Utf7Decoder(options, codec) {
|
|||
|
this.iconv = codec.iconv;
|
|||
|
this.inBase64 = false;
|
|||
|
this.base64Accum = '';
|
|||
|
}
|
|||
|
|
|||
|
var base64Regex = /[A-Za-z0-9\/+]/;
|
|||
|
var base64Chars = [];
|
|||
|
for (var i = 0; i < 256; i++)
|
|||
|
base64Chars[i] = base64Regex.test(String.fromCharCode(i));
|
|||
|
|
|||
|
var plusChar = '+'.charCodeAt(0),
|
|||
|
minusChar = '-'.charCodeAt(0),
|
|||
|
andChar = '&'.charCodeAt(0);
|
|||
|
|
|||
|
Utf7Decoder.prototype.write = function(buf) {
|
|||
|
var res = "", lastI = 0,
|
|||
|
inBase64 = this.inBase64,
|
|||
|
base64Accum = this.base64Accum;
|
|||
|
|
|||
|
// The decoder is more involved as we must handle chunks in stream.
|
|||
|
|
|||
|
for (var i = 0; i < buf.length; i++) {
|
|||
|
if (!inBase64) { // We're in direct mode.
|
|||
|
// Write direct chars until '+'
|
|||
|
if (buf[i] == plusChar) {
|
|||
|
res += this.iconv.decode(buf.slice(lastI, i), "ascii"); // Write direct chars.
|
|||
|
lastI = i+1;
|
|||
|
inBase64 = true;
|
|||
|
}
|
|||
|
} else { // We decode base64.
|
|||
|
if (!base64Chars[buf[i]]) { // Base64 ended.
|
|||
|
if (i == lastI && buf[i] == minusChar) {// "+-" -> "+"
|
|||
|
res += "+";
|
|||
|
} else {
|
|||
|
var b64str = base64Accum + this.iconv.decode(buf.slice(lastI, i), "ascii");
|
|||
|
res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be");
|
|||
|
}
|
|||
|
|
|||
|
if (buf[i] != minusChar) // Minus is absorbed after base64.
|
|||
|
i--;
|
|||
|
|
|||
|
lastI = i+1;
|
|||
|
inBase64 = false;
|
|||
|
base64Accum = '';
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (!inBase64) {
|
|||
|
res += this.iconv.decode(buf.slice(lastI), "ascii"); // Write direct chars.
|
|||
|
} else {
|
|||
|
var b64str = base64Accum + this.iconv.decode(buf.slice(lastI), "ascii");
|
|||
|
|
|||
|
var canBeDecoded = b64str.length - (b64str.length % 8); // Minimal chunk: 2 quads -> 2x3 bytes -> 3 chars.
|
|||
|
base64Accum = b64str.slice(canBeDecoded); // The rest will be decoded in future.
|
|||
|
b64str = b64str.slice(0, canBeDecoded);
|
|||
|
|
|||
|
res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be");
|
|||
|
}
|
|||
|
|
|||
|
this.inBase64 = inBase64;
|
|||
|
this.base64Accum = base64Accum;
|
|||
|
|
|||
|
return res;
|
|||
|
}
|
|||
|
|
|||
|
Utf7Decoder.prototype.end = function() {
|
|||
|
var res = "";
|
|||
|
if (this.inBase64 && this.base64Accum.length > 0)
|
|||
|
res = this.iconv.decode(Buffer.from(this.base64Accum, 'base64'), "utf16-be");
|
|||
|
|
|||
|
this.inBase64 = false;
|
|||
|
this.base64Accum = '';
|
|||
|
return res;
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
// UTF-7-IMAP codec.
|
|||
|
// RFC3501 Sec. 5.1.3 Modified UTF-7 (http://tools.ietf.org/html/rfc3501#section-5.1.3)
|
|||
|
// Differences:
|
|||
|
// * Base64 part is started by "&" instead of "+"
|
|||
|
// * Direct characters are 0x20-0x7E, except "&" (0x26)
|
|||
|
// * In Base64, "," is used instead of "/"
|
|||
|
// * Base64 must not be used to represent direct characters.
|
|||
|
// * No implicit shift back from Base64 (should always end with '-')
|
|||
|
// * String must end in non-shifted position.
|
|||
|
// * "-&" while in base64 is not allowed.
|
|||
|
|
|||
|
|
|||
|
exports.utf7imap = Utf7IMAPCodec;
|
|||
|
function Utf7IMAPCodec(codecOptions, iconv) {
|
|||
|
this.iconv = iconv;
|
|||
|
};
|
|||
|
|
|||
|
Utf7IMAPCodec.prototype.encoder = Utf7IMAPEncoder;
|
|||
|
Utf7IMAPCodec.prototype.decoder = Utf7IMAPDecoder;
|
|||
|
Utf7IMAPCodec.prototype.bomAware = true;
|
|||
|
|
|||
|
|
|||
|
// -- Encoding
|
|||
|
|
|||
|
function Utf7IMAPEncoder(options, codec) {
|
|||
|
this.iconv = codec.iconv;
|
|||
|
this.inBase64 = false;
|
|||
|
this.base64Accum = Buffer.alloc(6);
|
|||
|
this.base64AccumIdx = 0;
|
|||
|
}
|
|||
|
|
|||
|
Utf7IMAPEncoder.prototype.write = function(str) {
|
|||
|
var inBase64 = this.inBase64,
|
|||
|
base64Accum = this.base64Accum,
|
|||
|
base64AccumIdx = this.base64AccumIdx,
|
|||
|
buf = Buffer.alloc(str.length*5 + 10), bufIdx = 0;
|
|||
|
|
|||
|
for (var i = 0; i < str.length; i++) {
|
|||
|
var uChar = str.charCodeAt(i);
|
|||
|
if (0x20 <= uChar && uChar <= 0x7E) { // Direct character or '&'.
|
|||
|
if (inBase64) {
|
|||
|
if (base64AccumIdx > 0) {
|
|||
|
bufIdx += buf.write(base64Accum.slice(0, base64AccumIdx).toString('base64').replace(/\//g, ',').replace(/=+$/, ''), bufIdx);
|
|||
|
base64AccumIdx = 0;
|
|||
|
}
|
|||
|
|
|||
|
buf[bufIdx++] = minusChar; // Write '-', then go to direct mode.
|
|||
|
inBase64 = false;
|
|||
|
}
|
|||
|
|
|||
|
if (!inBase64) {
|
|||
|
buf[bufIdx++] = uChar; // Write direct character
|
|||
|
|
|||
|
if (uChar === andChar) // Ampersand -> '&-'
|
|||
|
buf[bufIdx++] = minusChar;
|
|||
|
}
|
|||
|
|
|||
|
} else { // Non-direct character
|
|||
|
if (!inBase64) {
|
|||
|
buf[bufIdx++] = andChar; // Write '&', then go to base64 mode.
|
|||
|
inBase64 = true;
|
|||
|
}
|
|||
|
if (inBase64) {
|
|||
|
base64Accum[base64AccumIdx++] = uChar >> 8;
|
|||
|
base64Accum[base64AccumIdx++] = uChar & 0xFF;
|
|||
|
|
|||
|
if (base64AccumIdx == base64Accum.length) {
|
|||
|
bufIdx += buf.write(base64Accum.toString('base64').replace(/\//g, ','), bufIdx);
|
|||
|
base64AccumIdx = 0;
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
this.inBase64 = inBase64;
|
|||
|
this.base64AccumIdx = base64AccumIdx;
|
|||
|
|
|||
|
return buf.slice(0, bufIdx);
|
|||
|
}
|
|||
|
|
|||
|
Utf7IMAPEncoder.prototype.end = function() {
|
|||
|
var buf = Buffer.alloc(10), bufIdx = 0;
|
|||
|
if (this.inBase64) {
|
|||
|
if (this.base64AccumIdx > 0) {
|
|||
|
bufIdx += buf.write(this.base64Accum.slice(0, this.base64AccumIdx).toString('base64').replace(/\//g, ',').replace(/=+$/, ''), bufIdx);
|
|||
|
this.base64AccumIdx = 0;
|
|||
|
}
|
|||
|
|
|||
|
buf[bufIdx++] = minusChar; // Write '-', then go to direct mode.
|
|||
|
this.inBase64 = false;
|
|||
|
}
|
|||
|
|
|||
|
return buf.slice(0, bufIdx);
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
// -- Decoding
|
|||
|
|
|||
|
function Utf7IMAPDecoder(options, codec) {
|
|||
|
this.iconv = codec.iconv;
|
|||
|
this.inBase64 = false;
|
|||
|
this.base64Accum = '';
|
|||
|
}
|
|||
|
|
|||
|
var base64IMAPChars = base64Chars.slice();
|
|||
|
base64IMAPChars[','.charCodeAt(0)] = true;
|
|||
|
|
|||
|
Utf7IMAPDecoder.prototype.write = function(buf) {
|
|||
|
var res = "", lastI = 0,
|
|||
|
inBase64 = this.inBase64,
|
|||
|
base64Accum = this.base64Accum;
|
|||
|
|
|||
|
// The decoder is more involved as we must handle chunks in stream.
|
|||
|
// It is forgiving, closer to standard UTF-7 (for example, '-' is optional at the end).
|
|||
|
|
|||
|
for (var i = 0; i < buf.length; i++) {
|
|||
|
if (!inBase64) { // We're in direct mode.
|
|||
|
// Write direct chars until '&'
|
|||
|
if (buf[i] == andChar) {
|
|||
|
res += this.iconv.decode(buf.slice(lastI, i), "ascii"); // Write direct chars.
|
|||
|
lastI = i+1;
|
|||
|
inBase64 = true;
|
|||
|
}
|
|||
|
} else { // We decode base64.
|
|||
|
if (!base64IMAPChars[buf[i]]) { // Base64 ended.
|
|||
|
if (i == lastI && buf[i] == minusChar) { // "&-" -> "&"
|
|||
|
res += "&";
|
|||
|
} else {
|
|||
|
var b64str = base64Accum + this.iconv.decode(buf.slice(lastI, i), "ascii").replace(/,/g, '/');
|
|||
|
res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be");
|
|||
|
}
|
|||
|
|
|||
|
if (buf[i] != minusChar) // Minus may be absorbed after base64.
|
|||
|
i--;
|
|||
|
|
|||
|
lastI = i+1;
|
|||
|
inBase64 = false;
|
|||
|
base64Accum = '';
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (!inBase64) {
|
|||
|
res += this.iconv.decode(buf.slice(lastI), "ascii"); // Write direct chars.
|
|||
|
} else {
|
|||
|
var b64str = base64Accum + this.iconv.decode(buf.slice(lastI), "ascii").replace(/,/g, '/');
|
|||
|
|
|||
|
var canBeDecoded = b64str.length - (b64str.length % 8); // Minimal chunk: 2 quads -> 2x3 bytes -> 3 chars.
|
|||
|
base64Accum = b64str.slice(canBeDecoded); // The rest will be decoded in future.
|
|||
|
b64str = b64str.slice(0, canBeDecoded);
|
|||
|
|
|||
|
res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be");
|
|||
|
}
|
|||
|
|
|||
|
this.inBase64 = inBase64;
|
|||
|
this.base64Accum = base64Accum;
|
|||
|
|
|||
|
return res;
|
|||
|
}
|
|||
|
|
|||
|
Utf7IMAPDecoder.prototype.end = function() {
|
|||
|
var res = "";
|
|||
|
if (this.inBase64 && this.base64Accum.length > 0)
|
|||
|
res = this.iconv.decode(Buffer.from(this.base64Accum, 'base64'), "utf16-be");
|
|||
|
|
|||
|
this.inBase64 = false;
|
|||
|
this.base64Accum = '';
|
|||
|
return res;
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 5271:
|
|||
|
/***/ ((__unused_webpack_module, exports) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
var BOMChar = '\uFEFF';
|
|||
|
|
|||
|
exports.PrependBOM = PrependBOMWrapper
|
|||
|
function PrependBOMWrapper(encoder, options) {
|
|||
|
this.encoder = encoder;
|
|||
|
this.addBOM = true;
|
|||
|
}
|
|||
|
|
|||
|
PrependBOMWrapper.prototype.write = function(str) {
|
|||
|
if (this.addBOM) {
|
|||
|
str = BOMChar + str;
|
|||
|
this.addBOM = false;
|
|||
|
}
|
|||
|
|
|||
|
return this.encoder.write(str);
|
|||
|
}
|
|||
|
|
|||
|
PrependBOMWrapper.prototype.end = function() {
|
|||
|
return this.encoder.end();
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
//------------------------------------------------------------------------------
|
|||
|
|
|||
|
exports.StripBOM = StripBOMWrapper;
|
|||
|
function StripBOMWrapper(decoder, options) {
|
|||
|
this.decoder = decoder;
|
|||
|
this.pass = false;
|
|||
|
this.options = options || {};
|
|||
|
}
|
|||
|
|
|||
|
StripBOMWrapper.prototype.write = function(buf) {
|
|||
|
var res = this.decoder.write(buf);
|
|||
|
if (this.pass || !res)
|
|||
|
return res;
|
|||
|
|
|||
|
if (res[0] === BOMChar) {
|
|||
|
res = res.slice(1);
|
|||
|
if (typeof this.options.stripBOM === 'function')
|
|||
|
this.options.stripBOM();
|
|||
|
}
|
|||
|
|
|||
|
this.pass = true;
|
|||
|
return res;
|
|||
|
}
|
|||
|
|
|||
|
StripBOMWrapper.prototype.end = function() {
|
|||
|
return this.decoder.end();
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 8735:
|
|||
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
var Buffer = __nccwpck_require__(2750).Buffer;
|
|||
|
|
|||
|
var bomHandling = __nccwpck_require__(5271),
|
|||
|
iconv = module.exports;
|
|||
|
|
|||
|
// All codecs and aliases are kept here, keyed by encoding name/alias.
|
|||
|
// They are lazy loaded in `iconv.getCodec` from `encodings/index.js`.
|
|||
|
iconv.encodings = null;
|
|||
|
|
|||
|
// Characters emitted in case of error.
|
|||
|
iconv.defaultCharUnicode = '<27>';
|
|||
|
iconv.defaultCharSingleByte = '?';
|
|||
|
|
|||
|
// Public API.
|
|||
|
iconv.encode = function encode(str, encoding, options) {
|
|||
|
str = "" + (str || ""); // Ensure string.
|
|||
|
|
|||
|
var encoder = iconv.getEncoder(encoding, options);
|
|||
|
|
|||
|
var res = encoder.write(str);
|
|||
|
var trail = encoder.end();
|
|||
|
|
|||
|
return (trail && trail.length > 0) ? Buffer.concat([res, trail]) : res;
|
|||
|
}
|
|||
|
|
|||
|
iconv.decode = function decode(buf, encoding, options) {
|
|||
|
if (typeof buf === 'string') {
|
|||
|
if (!iconv.skipDecodeWarning) {
|
|||
|
console.error('Iconv-lite warning: decode()-ing strings is deprecated. Refer to https://github.com/ashtuchkin/iconv-lite/wiki/Use-Buffers-when-decoding');
|
|||
|
iconv.skipDecodeWarning = true;
|
|||
|
}
|
|||
|
|
|||
|
buf = Buffer.from("" + (buf || ""), "binary"); // Ensure buffer.
|
|||
|
}
|
|||
|
|
|||
|
var decoder = iconv.getDecoder(encoding, options);
|
|||
|
|
|||
|
var res = decoder.write(buf);
|
|||
|
var trail = decoder.end();
|
|||
|
|
|||
|
return trail ? (res + trail) : res;
|
|||
|
}
|
|||
|
|
|||
|
iconv.encodingExists = function encodingExists(enc) {
|
|||
|
try {
|
|||
|
iconv.getCodec(enc);
|
|||
|
return true;
|
|||
|
} catch (e) {
|
|||
|
return false;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// Legacy aliases to convert functions
|
|||
|
iconv.toEncoding = iconv.encode;
|
|||
|
iconv.fromEncoding = iconv.decode;
|
|||
|
|
|||
|
// Search for a codec in iconv.encodings. Cache codec data in iconv._codecDataCache.
|
|||
|
iconv._codecDataCache = {};
|
|||
|
iconv.getCodec = function getCodec(encoding) {
|
|||
|
if (!iconv.encodings)
|
|||
|
iconv.encodings = __nccwpck_require__(8956); // Lazy load all encoding definitions.
|
|||
|
|
|||
|
// Canonicalize encoding name: strip all non-alphanumeric chars and appended year.
|
|||
|
var enc = iconv._canonicalizeEncoding(encoding);
|
|||
|
|
|||
|
// Traverse iconv.encodings to find actual codec.
|
|||
|
var codecOptions = {};
|
|||
|
while (true) {
|
|||
|
var codec = iconv._codecDataCache[enc];
|
|||
|
if (codec)
|
|||
|
return codec;
|
|||
|
|
|||
|
var codecDef = iconv.encodings[enc];
|
|||
|
|
|||
|
switch (typeof codecDef) {
|
|||
|
case "string": // Direct alias to other encoding.
|
|||
|
enc = codecDef;
|
|||
|
break;
|
|||
|
|
|||
|
case "object": // Alias with options. Can be layered.
|
|||
|
for (var key in codecDef)
|
|||
|
codecOptions[key] = codecDef[key];
|
|||
|
|
|||
|
if (!codecOptions.encodingName)
|
|||
|
codecOptions.encodingName = enc;
|
|||
|
|
|||
|
enc = codecDef.type;
|
|||
|
break;
|
|||
|
|
|||
|
case "function": // Codec itself.
|
|||
|
if (!codecOptions.encodingName)
|
|||
|
codecOptions.encodingName = enc;
|
|||
|
|
|||
|
// The codec function must load all tables and return object with .encoder and .decoder methods.
|
|||
|
// It'll be called only once (for each different options object).
|
|||
|
codec = new codecDef(codecOptions, iconv);
|
|||
|
|
|||
|
iconv._codecDataCache[codecOptions.encodingName] = codec; // Save it to be reused later.
|
|||
|
return codec;
|
|||
|
|
|||
|
default:
|
|||
|
throw new Error("Encoding not recognized: '" + encoding + "' (searched as: '"+enc+"')");
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
iconv._canonicalizeEncoding = function(encoding) {
|
|||
|
// Canonicalize encoding name: strip all non-alphanumeric chars and appended year.
|
|||
|
return (''+encoding).toLowerCase().replace(/:\d{4}$|[^0-9a-z]/g, "");
|
|||
|
}
|
|||
|
|
|||
|
iconv.getEncoder = function getEncoder(encoding, options) {
|
|||
|
var codec = iconv.getCodec(encoding),
|
|||
|
encoder = new codec.encoder(options, codec);
|
|||
|
|
|||
|
if (codec.bomAware && options && options.addBOM)
|
|||
|
encoder = new bomHandling.PrependBOM(encoder, options);
|
|||
|
|
|||
|
return encoder;
|
|||
|
}
|
|||
|
|
|||
|
iconv.getDecoder = function getDecoder(encoding, options) {
|
|||
|
var codec = iconv.getCodec(encoding),
|
|||
|
decoder = new codec.decoder(options, codec);
|
|||
|
|
|||
|
if (codec.bomAware && !(options && options.stripBOM === false))
|
|||
|
decoder = new bomHandling.StripBOM(decoder, options);
|
|||
|
|
|||
|
return decoder;
|
|||
|
}
|
|||
|
|
|||
|
// Streaming API
|
|||
|
// NOTE: Streaming API naturally depends on 'stream' module from Node.js. Unfortunately in browser environments this module can add
|
|||
|
// up to 100Kb to the output bundle. To avoid unnecessary code bloat, we don't enable Streaming API in browser by default.
|
|||
|
// If you would like to enable it explicitly, please add the following code to your app:
|
|||
|
// > iconv.enableStreamingAPI(require('stream'));
|
|||
|
iconv.enableStreamingAPI = function enableStreamingAPI(stream_module) {
|
|||
|
if (iconv.supportsStreams)
|
|||
|
return;
|
|||
|
|
|||
|
// Dependency-inject stream module to create IconvLite stream classes.
|
|||
|
var streams = __nccwpck_require__(8054)(stream_module);
|
|||
|
|
|||
|
// Not public API yet, but expose the stream classes.
|
|||
|
iconv.IconvLiteEncoderStream = streams.IconvLiteEncoderStream;
|
|||
|
iconv.IconvLiteDecoderStream = streams.IconvLiteDecoderStream;
|
|||
|
|
|||
|
// Streaming API.
|
|||
|
iconv.encodeStream = function encodeStream(encoding, options) {
|
|||
|
return new iconv.IconvLiteEncoderStream(iconv.getEncoder(encoding, options), options);
|
|||
|
}
|
|||
|
|
|||
|
iconv.decodeStream = function decodeStream(encoding, options) {
|
|||
|
return new iconv.IconvLiteDecoderStream(iconv.getDecoder(encoding, options), options);
|
|||
|
}
|
|||
|
|
|||
|
iconv.supportsStreams = true;
|
|||
|
}
|
|||
|
|
|||
|
// Enable Streaming API automatically if 'stream' module is available and non-empty (the majority of environments).
|
|||
|
var stream_module;
|
|||
|
try {
|
|||
|
stream_module = __nccwpck_require__(2413);
|
|||
|
} catch (e) {}
|
|||
|
|
|||
|
if (stream_module && stream_module.Transform) {
|
|||
|
iconv.enableStreamingAPI(stream_module);
|
|||
|
|
|||
|
} else {
|
|||
|
// In rare cases where 'stream' module is not available by default, throw a helpful exception.
|
|||
|
iconv.encodeStream = iconv.decodeStream = function() {
|
|||
|
throw new Error("iconv-lite Streaming API is not enabled. Use iconv.enableStreamingAPI(require('stream')); to enable it.");
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
if (false) {}
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 8054:
|
|||
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
|
|||
|
|
|||
|
var Buffer = __nccwpck_require__(2750).Buffer;
|
|||
|
|
|||
|
// NOTE: Due to 'stream' module being pretty large (~100Kb, significant in browser environments),
|
|||
|
// we opt to dependency-inject it instead of creating a hard dependency.
|
|||
|
module.exports = function(stream_module) {
|
|||
|
var Transform = stream_module.Transform;
|
|||
|
|
|||
|
// == Encoder stream =======================================================
|
|||
|
|
|||
|
function IconvLiteEncoderStream(conv, options) {
|
|||
|
this.conv = conv;
|
|||
|
options = options || {};
|
|||
|
options.decodeStrings = false; // We accept only strings, so we don't need to decode them.
|
|||
|
Transform.call(this, options);
|
|||
|
}
|
|||
|
|
|||
|
IconvLiteEncoderStream.prototype = Object.create(Transform.prototype, {
|
|||
|
constructor: { value: IconvLiteEncoderStream }
|
|||
|
});
|
|||
|
|
|||
|
IconvLiteEncoderStream.prototype._transform = function(chunk, encoding, done) {
|
|||
|
if (typeof chunk != 'string')
|
|||
|
return done(new Error("Iconv encoding stream needs strings as its input."));
|
|||
|
try {
|
|||
|
var res = this.conv.write(chunk);
|
|||
|
if (res && res.length) this.push(res);
|
|||
|
done();
|
|||
|
}
|
|||
|
catch (e) {
|
|||
|
done(e);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
IconvLiteEncoderStream.prototype._flush = function(done) {
|
|||
|
try {
|
|||
|
var res = this.conv.end();
|
|||
|
if (res && res.length) this.push(res);
|
|||
|
done();
|
|||
|
}
|
|||
|
catch (e) {
|
|||
|
done(e);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
IconvLiteEncoderStream.prototype.collect = function(cb) {
|
|||
|
var chunks = [];
|
|||
|
this.on('error', cb);
|
|||
|
this.on('data', function(chunk) { chunks.push(chunk); });
|
|||
|
this.on('end', function() {
|
|||
|
cb(null, Buffer.concat(chunks));
|
|||
|
});
|
|||
|
return this;
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
// == Decoder stream =======================================================
|
|||
|
|
|||
|
function IconvLiteDecoderStream(conv, options) {
|
|||
|
this.conv = conv;
|
|||
|
options = options || {};
|
|||
|
options.encoding = this.encoding = 'utf8'; // We output strings.
|
|||
|
Transform.call(this, options);
|
|||
|
}
|
|||
|
|
|||
|
IconvLiteDecoderStream.prototype = Object.create(Transform.prototype, {
|
|||
|
constructor: { value: IconvLiteDecoderStream }
|
|||
|
});
|
|||
|
|
|||
|
IconvLiteDecoderStream.prototype._transform = function(chunk, encoding, done) {
|
|||
|
if (!Buffer.isBuffer(chunk) && !(chunk instanceof Uint8Array))
|
|||
|
return done(new Error("Iconv decoding stream needs buffers as its input."));
|
|||
|
try {
|
|||
|
var res = this.conv.write(chunk);
|
|||
|
if (res && res.length) this.push(res, this.encoding);
|
|||
|
done();
|
|||
|
}
|
|||
|
catch (e) {
|
|||
|
done(e);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
IconvLiteDecoderStream.prototype._flush = function(done) {
|
|||
|
try {
|
|||
|
var res = this.conv.end();
|
|||
|
if (res && res.length) this.push(res, this.encoding);
|
|||
|
done();
|
|||
|
}
|
|||
|
catch (e) {
|
|||
|
done(e);
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
IconvLiteDecoderStream.prototype.collect = function(cb) {
|
|||
|
var res = '';
|
|||
|
this.on('error', cb);
|
|||
|
this.on('data', function(chunk) { res += chunk; });
|
|||
|
this.on('end', function() {
|
|||
|
cb(null, res);
|
|||
|
});
|
|||
|
return this;
|
|||
|
}
|
|||
|
|
|||
|
return {
|
|||
|
IconvLiteEncoderStream: IconvLiteEncoderStream,
|
|||
|
IconvLiteDecoderStream: IconvLiteDecoderStream,
|
|||
|
};
|
|||
|
};
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 2750:
|
|||
|
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
/* eslint-disable node/no-deprecated-api */
|
|||
|
|
|||
|
|
|||
|
|
|||
|
var buffer = __nccwpck_require__(4293)
|
|||
|
var Buffer = buffer.Buffer
|
|||
|
|
|||
|
var safer = {}
|
|||
|
|
|||
|
var key
|
|||
|
|
|||
|
for (key in buffer) {
|
|||
|
if (!buffer.hasOwnProperty(key)) continue
|
|||
|
if (key === 'SlowBuffer' || key === 'Buffer') continue
|
|||
|
safer[key] = buffer[key]
|
|||
|
}
|
|||
|
|
|||
|
var Safer = safer.Buffer = {}
|
|||
|
for (key in Buffer) {
|
|||
|
if (!Buffer.hasOwnProperty(key)) continue
|
|||
|
if (key === 'allocUnsafe' || key === 'allocUnsafeSlow') continue
|
|||
|
Safer[key] = Buffer[key]
|
|||
|
}
|
|||
|
|
|||
|
safer.Buffer.prototype = Buffer.prototype
|
|||
|
|
|||
|
if (!Safer.from || Safer.from === Uint8Array.from) {
|
|||
|
Safer.from = function (value, encodingOrOffset, length) {
|
|||
|
if (typeof value === 'number') {
|
|||
|
throw new TypeError('The "value" argument must not be of type number. Received type ' + typeof value)
|
|||
|
}
|
|||
|
if (value && typeof value.length === 'undefined') {
|
|||
|
throw new TypeError('The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type ' + typeof value)
|
|||
|
}
|
|||
|
return Buffer(value, encodingOrOffset, length)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (!Safer.alloc) {
|
|||
|
Safer.alloc = function (size, fill, encoding) {
|
|||
|
if (typeof size !== 'number') {
|
|||
|
throw new TypeError('The "size" argument must be of type number. Received type ' + typeof size)
|
|||
|
}
|
|||
|
if (size < 0 || size >= 2 * (1 << 30)) {
|
|||
|
throw new RangeError('The value "' + size + '" is invalid for option "size"')
|
|||
|
}
|
|||
|
var buf = Buffer(size)
|
|||
|
if (!fill || fill.length === 0) {
|
|||
|
buf.fill(0)
|
|||
|
} else if (typeof encoding === 'string') {
|
|||
|
buf.fill(fill, encoding)
|
|||
|
} else {
|
|||
|
buf.fill(fill)
|
|||
|
}
|
|||
|
return buf
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (!safer.kStringMaxLength) {
|
|||
|
try {
|
|||
|
safer.kStringMaxLength = process.binding('buffer').kStringMaxLength
|
|||
|
} catch (e) {
|
|||
|
// we can't determine kStringMaxLength in environments where process.binding
|
|||
|
// is unsupported, so let's not set it
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (!safer.constants) {
|
|||
|
safer.constants = {
|
|||
|
MAX_LENGTH: safer.kMaxLength
|
|||
|
}
|
|||
|
if (safer.kStringMaxLength) {
|
|||
|
safer.constants.MAX_STRING_LENGTH = safer.kStringMaxLength
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
module.exports = safer
|
|||
|
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 2357:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = require("assert");;
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 4293:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = require("buffer");;
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 8614:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = require("events");;
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 5747:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = require("fs");;
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 8605:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = require("http");;
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 7211:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = require("https");;
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 1631:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = require("net");;
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 2087:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = require("os");;
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 5622:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = require("path");;
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 2413:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = require("stream");;
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 4304:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = require("string_decoder");;
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 4016:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = require("tls");;
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 8835:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = require("url");;
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 1669:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = require("util");;
|
|||
|
|
|||
|
/***/ }),
|
|||
|
|
|||
|
/***/ 8761:
|
|||
|
/***/ ((module) => {
|
|||
|
|
|||
|
"use strict";
|
|||
|
module.exports = require("zlib");;
|
|||
|
|
|||
|
/***/ })
|
|||
|
|
|||
|
/******/ });
|
|||
|
/************************************************************************/
|
|||
|
/******/ // The module cache
|
|||
|
/******/ var __webpack_module_cache__ = {};
|
|||
|
/******/
|
|||
|
/******/ // The require function
|
|||
|
/******/ function __nccwpck_require__(moduleId) {
|
|||
|
/******/ // Check if module is in cache
|
|||
|
/******/ if(__webpack_module_cache__[moduleId]) {
|
|||
|
/******/ return __webpack_module_cache__[moduleId].exports;
|
|||
|
/******/ }
|
|||
|
/******/ // Create a new module (and put it into the cache)
|
|||
|
/******/ var module = __webpack_module_cache__[moduleId] = {
|
|||
|
/******/ // no module.id needed
|
|||
|
/******/ // no module.loaded needed
|
|||
|
/******/ exports: {}
|
|||
|
/******/ };
|
|||
|
/******/
|
|||
|
/******/ // Execute the module function
|
|||
|
/******/ var threw = true;
|
|||
|
/******/ try {
|
|||
|
/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__);
|
|||
|
/******/ threw = false;
|
|||
|
/******/ } finally {
|
|||
|
/******/ if(threw) delete __webpack_module_cache__[moduleId];
|
|||
|
/******/ }
|
|||
|
/******/
|
|||
|
/******/ // Return the exports of the module
|
|||
|
/******/ return module.exports;
|
|||
|
/******/ }
|
|||
|
/******/
|
|||
|
/************************************************************************/
|
|||
|
/******/ /* webpack/runtime/compat */
|
|||
|
/******/
|
|||
|
/******/ __nccwpck_require__.ab = __dirname + "/";/************************************************************************/
|
|||
|
/******/ // module exports must be returned from runtime so entry inlining is disabled
|
|||
|
/******/ // startup
|
|||
|
/******/ // Load entry module and return exports
|
|||
|
/******/ return __nccwpck_require__(7133);
|
|||
|
/******/ })()
|
|||
|
;
|