PNG  IHDRQgAMA a cHRMz&u0`:pQ<bKGDgmIDATxwUﹻ& ^CX(J I@ "% (** BX +*i"]j(IH{~R)[~>h{}gy)I$Ij .I$I$ʊy@}x.: $I$Ii}VZPC)I$IF ^0ʐJ$I$Q^}{"r=OzI$gRZeC.IOvH eKX $IMpxsk.쒷/&r[޳<v| .I~)@$updYRa$I |M.e JaֶpSYR6j>h%IRز if&uJ)M$I vLi=H;7UJ,],X$I1AҒJ$ XY XzI@GNҥRT)E@;]K*Mw;#5_wOn~\ DC&$(A5 RRFkvIR}l!RytRl;~^ǷJj اy뷦BZJr&ӥ8Pjw~vnv X^(I;4R=P[3]J,]ȏ~:3?[ a&e)`e*P[4]T=Cq6R[ ~ޤrXR Հg(t_HZ-Hg M$ãmL5R uk*`%C-E6/%[t X.{8P9Z.vkXŐKjgKZHg(aK9ڦmKjѺm_ \#$5,)-  61eJ,5m| r'= &ڡd%-]J on Xm|{ RҞe $eڧY XYrԮ-a7RK6h>n$5AVڴi*ֆK)mѦtmr1p| q:흺,)Oi*ֺK)ܬ֦K-5r3>0ԔHjJئEZj,%re~/z%jVMڸmrt)3]J,T K֦OvԒgii*bKiNO~%PW0=dii2tJ9Jݕ{7"I P9JKTbu,%r"6RKU}Ij2HKZXJ,妝 XYrP ެ24c%i^IK|.H,%rb:XRl1X4Pe/`x&P8Pj28Mzsx2r\zRPz4J}yP[g=L) .Q[6RjWgp FIH*-`IMRaK9TXcq*I y[jE>cw%gLRԕiFCj-ďa`#e~I j,%r,)?[gp FI˨mnWX#>mʔ XA DZf9,nKҲzIZXJ,L#kiPz4JZF,I,`61%2s $,VOϚ2/UFJfy7K> X+6 STXIeJILzMfKm LRaK9%|4p9LwJI!`NsiazĔ)%- XMq>pk$-$Q2x#N ؎-QR}ᶦHZډ)J,l#i@yn3LN`;nڔ XuX5pF)m|^0(>BHF9(cզEerJI rg7 4I@z0\JIi䵙RR0s;$s6eJ,`n 䂦0a)S)A 1eJ,堌#635RIgpNHuTH_SԕqVe ` &S)>p;S$魁eKIuX`I4춒o}`m$1":PI<[v9^\pTJjriRŭ P{#{R2,`)e-`mgj~1ϣLKam7&U\j/3mJ,`F;M'䱀 .KR#)yhTq;pcK9(q!w?uRR,n.yw*UXj#\]ɱ(qv2=RqfB#iJmmL<]Y͙#$5 uTU7ӦXR+q,`I}qL'`6Kͷ6r,]0S$- [RKR3oiRE|nӦXR.(i:LDLTJjY%o:)6rxzҒqTJjh㞦I.$YR.ʼnGZ\ֿf:%55 I˼!6dKxm4E"mG_ s? .e*?LRfK9%q#uh$)i3ULRfK9yxm܌bj84$i1U^@Wbm4uJ,ҪA>_Ij?1v32[gLRD96oTaR׿N7%L2 NT,`)7&ƝL*꽙yp_$M2#AS,`)7$rkTA29_Iye"|/0t)$n XT2`YJ;6Jx".e<`$) PI$5V4]29SRI>~=@j]lp2`K9Jaai^" Ԋ29ORI%:XV5]JmN9]H;1UC39NI%Xe78t)a;Oi Ҙ>Xt"~G>_mn:%|~ޅ_+]$o)@ǀ{hgN;IK6G&rp)T2i୦KJuv*T=TOSV>(~D>dm,I*Ɛ:R#ۙNI%D>G.n$o;+#RR!.eU˽TRI28t)1LWϚ>IJa3oFbu&:tJ*(F7y0ZR ^p'Ii L24x| XRI%ۄ>S1]Jy[zL$adB7.eh4%%누>WETf+3IR:I3Xה)3אOۦSRO'ٺ)S}"qOr[B7ϙ.edG)^ETR"RtRݜh0}LFVӦDB^k_JDj\=LS(Iv─aTeZ%eUAM-0;~˃@i|l @S4y72>sX-vA}ϛBI!ݎߨWl*)3{'Y|iSlEڻ(5KtSI$Uv02,~ԩ~x;P4ցCrO%tyn425:KMlD ^4JRxSهF_}شJTS6uj+ﷸk$eZO%G*^V2u3EMj3k%)okI]dT)URKDS 7~m@TJR~荪fT"֛L \sM -0T KfJz+nإKr L&j()[E&I ߴ>e FW_kJR|!O:5/2跌3T-'|zX ryp0JS ~^F>-2< `*%ZFP)bSn"L :)+pʷf(pO3TMW$~>@~ū:TAIsV1}S2<%ޟM?@iT ,Eūoz%i~g|`wS(]oȤ8)$ ntu`өe`6yPl IzMI{ʣzʨ )IZ2= ld:5+請M$-ї;U>_gsY$ÁN5WzWfIZ)-yuXIfp~S*IZdt;t>KūKR|$#LcԀ+2\;kJ`]YǔM1B)UbG"IRߊ<xܾӔJ0Z='Y嵤 Leveg)$znV-º^3Ւof#0Tfk^Zs[*I꯳3{)ˬW4Ւ4 OdpbZRS|*I 55#"&-IvT&/윚Ye:i$ 9{LkuRe[I~_\ؠ%>GL$iY8 9ܕ"S`kS.IlC;Ҏ4x&>u_0JLr<J2(^$5L s=MgV ~,Iju> 7r2)^=G$1:3G< `J3~&IR% 6Tx/rIj3O< ʔ&#f_yXJiގNSz; Tx(i8%#4 ~AS+IjerIUrIj362v885+IjAhK__5X%nV%Iͳ-y|7XV2v4fzo_68"S/I-qbf; LkF)KSM$ Ms>K WNV}^`-큧32ŒVؙGdu,^^m%6~Nn&͓3ŒVZMsRpfEW%IwdǀLm[7W&bIRL@Q|)* i ImsIMmKmyV`i$G+R 0tV'!V)֏28vU7͒vHꦼtxꗞT ;S}7Mf+fIRHNZUkUx5SAJㄌ9MqμAIRi|j5)o*^'<$TwI1hEU^c_j?Е$%d`z cyf,XO IJnTgA UXRD }{H}^S,P5V2\Xx`pZ|Yk:$e ~ @nWL.j+ϝYb퇪bZ BVu)u/IJ_ 1[p.p60bC >|X91P:N\!5qUB}5a5ja `ubcVxYt1N0Zzl4]7­gKj]?4ϻ *[bg$)+À*x쳀ogO$~,5 زUS9 lq3+5mgw@np1sso Ӻ=|N6 /g(Wv7U;zωM=wk,0uTg_`_P`uz?2yI!b`kĸSo+Qx%!\οe|އԁKS-s6pu_(ֿ$i++T8=eY; צP+phxWQv*|p1. ά. XRkIQYP,drZ | B%wP|S5`~́@i޾ E;Չaw{o'Q?%iL{u D?N1BD!owPHReFZ* k_-~{E9b-~P`fE{AܶBJAFO wx6Rox5 K5=WwehS8 (JClJ~ p+Fi;ŗo+:bD#g(C"wA^ r.F8L;dzdIHUX݆ϞXg )IFqem%I4dj&ppT{'{HOx( Rk6^C٫O.)3:s(۳(Z?~ٻ89zmT"PLtw䥈5&b<8GZ-Y&K?e8,`I6e(֍xb83 `rzXj)F=l($Ij 2*(F?h(/9ik:I`m#p3MgLaKjc/U#n5S# m(^)=y=đx8ŬI[U]~SцA4p$-F i(R,7Cx;X=cI>{Km\ o(Tv2vx2qiiDJN,Ҏ!1f 5quBj1!8 rDFd(!WQl,gSkL1Bxg''՞^ǘ;pQ P(c_ IRujg(Wz bs#P­rz> k c&nB=q+ؔXn#r5)co*Ũ+G?7< |PQӣ'G`uOd>%Mctz# Ԫڞ&7CaQ~N'-P.W`Oedp03C!IZcIAMPUۀ5J<\u~+{9(FbbyAeBhOSܳ1 bÈT#ŠyDžs,`5}DC-`̞%r&ڙa87QWWp6e7 Rϫ/oY ꇅ Nܶըtc!LA T7V4Jsū I-0Pxz7QNF_iZgúWkG83 0eWr9 X]㾮݁#Jˢ C}0=3ݱtBi]_ &{{[/o[~ \q鯜00٩|cD3=4B_b RYb$óBRsf&lLX#M*C_L܄:gx)WΘsGSbuL rF$9';\4Ɍq'n[%p.Q`u hNb`eCQyQ|l_C>Lb꟟3hSb #xNxSs^ 88|Mz)}:](vbۢamŖ࿥ 0)Q7@0=?^k(*J}3ibkFn HjB׻NO z x}7p 0tfDX.lwgȔhԾŲ }6g E |LkLZteu+=q\Iv0쮑)QٵpH8/2?Σo>Jvppho~f>%bMM}\//":PTc(v9v!gոQ )UfVG+! 35{=x\2+ki,y$~A1iC6#)vC5^>+gǵ@1Hy٪7u;p psϰu/S <aʸGu'tD1ԝI<pg|6j'p:tպhX{o(7v],*}6a_ wXRk,O]Lܳ~Vo45rp"N5k;m{rZbΦ${#)`(Ŵg,;j%6j.pyYT?}-kBDc3qA`NWQū20/^AZW%NQ MI.X#P#,^Ebc&?XR tAV|Y.1!؅⨉ccww>ivl(JT~ u`ٵDm q)+Ri x/x8cyFO!/*!/&,7<.N,YDŽ&ܑQF1Bz)FPʛ?5d 6`kQձ λc؎%582Y&nD_$Je4>a?! ͨ|ȎWZSsv8 j(I&yj Jb5m?HWp=g}G3#|I,5v珿] H~R3@B[☉9Ox~oMy=J;xUVoj bUsl_35t-(ՃɼRB7U!qc+x4H_Qo֮$[GO<4`&č\GOc[.[*Af%mG/ ňM/r W/Nw~B1U3J?P&Y )`ѓZ1p]^l“W#)lWZilUQu`-m|xĐ,_ƪ|9i:_{*(3Gѧ}UoD+>m_?VPۅ15&}2|/pIOʵ> GZ9cmíتmnz)yߐbD >e}:) r|@R5qVSA10C%E_'^8cR7O;6[eKePGϦX7jb}OTGO^jn*媓7nGMC t,k31Rb (vyܴʭ!iTh8~ZYZp(qsRL ?b}cŨʊGO^!rPJO15MJ[c&~Z`"ѓޔH1C&^|Ш|rʼ,AwĴ?b5)tLU)F| &g٣O]oqSUjy(x<Ϳ3 .FSkoYg2 \_#wj{u'rQ>o;%n|F*O_L"e9umDds?.fuuQbIWz |4\0 sb;OvxOSs; G%T4gFRurj(֍ڑb uԖKDu1MK{1^ q; C=6\8FR艇!%\YÔU| 88m)֓NcLve C6z;o&X x59:q61Z(T7>C?gcļxѐ Z oo-08jہ x,`' ҔOcRlf~`jj".Nv+sM_]Zk g( UOPyεx%pUh2(@il0ݽQXxppx-NS( WO+轾 nFߢ3M<;z)FBZjciu/QoF 7R¥ ZFLF~#ȣߨ^<쩡ݛкvџ))ME>ώx4m#!-m!L;vv#~Y[đKmx9.[,UFS CVkZ +ߟrY٧IZd/ioi$%͝ب_ֶX3ܫhNU ZZgk=]=bbJS[wjU()*I =ώ:}-蹞lUj:1}MWm=̛ _ ¾,8{__m{_PVK^n3esw5ӫh#$-q=A̟> ,^I}P^J$qY~Q[ Xq9{#&T.^GVj__RKpn,b=`żY@^՝;z{paVKkQXj/)y TIc&F;FBG7wg ZZDG!x r_tƢ!}i/V=M/#nB8 XxЫ ^@CR<{䤭YCN)eKOSƟa $&g[i3.C6xrOc8TI;o hH6P&L{@q6[ Gzp^71j(l`J}]e6X☉#͕ ׈$AB1Vjh㭦IRsqFBjwQ_7Xk>y"N=MB0 ,C #o6MRc0|$)ف"1!ixY<B9mx `,tA>)5ػQ?jQ?cn>YZe Tisvh# GMމȇp:ԴVuږ8ɼH]C.5C!UV;F`mbBk LTMvPʍϤj?ԯ/Qr1NB`9s"s TYsz &9S%U԰> {<ؿSMxB|H\3@!U| k']$U+> |HHMLޢ?V9iD!-@x TIî%6Z*9X@HMW#?nN ,oe6?tQwڱ.]-y':mW0#!J82qFjH -`ѓ&M0u Uγmxϵ^-_\])@0Rt.8/?ٰCY]x}=sD3ojަЫNuS%U}ԤwHH>ڗjܷ_3gN q7[q2la*ArǓԖ+p8/RGM ]jacd(JhWko6ڎbj]i5Bj3+3!\j1UZLsLTv8HHmup<>gKMJj0@H%,W΃7R) ">c, xixј^ aܖ>H[i.UIHc U1=yW\=S*GR~)AF=`&2h`DzT󑓶J+?W+}C%P:|0H܆}-<;OC[~o.$~i}~HQ TvXΈr=b}$vizL4:ȰT|4~*!oXQR6Lk+#t/g lԁߖ[Jڶ_N$k*". xsxX7jRVbAAʯKҎU3)zSNN _'s?f)6X!%ssAkʱ>qƷb hg %n ~p1REGMHH=BJiy[<5 ǁJҖgKR*倳e~HUy)Ag,K)`Vw6bRR:qL#\rclK/$sh*$ 6덤 KԖc 3Z9=Ɣ=o>X Ώ"1 )a`SJJ6k(<c e{%kϊP+SL'TcMJWRm ŏ"w)qc ef꒵i?b7b('"2r%~HUS1\<(`1Wx9=8HY9m:X18bgD1u ~|H;K-Uep,, C1 RV.MR5άh,tWO8WC$ XRVsQS]3GJ|12 [vM :k#~tH30Rf-HYݺ-`I9%lIDTm\ S{]9gOڒMNCV\G*2JRŨ;Rҏ^ڽ̱mq1Eu?To3I)y^#jJw^Ńj^vvlB_⋌P4x>0$c>K†Aļ9s_VjTt0l#m>E-,,x,-W)سo&96RE XR.6bXw+)GAEvL)͞K4$p=Ũi_ѱOjb HY/+@θH9޼]Nԥ%n{ &zjT? Ty) s^ULlb,PiTf^<À] 62R^V7)S!nllS6~͝V}-=%* ʻ>G DnK<y&>LPy7'r=Hj 9V`[c"*^8HpcO8bnU`4JȪAƋ#1_\ XϘHPRgik(~G~0DAA_2p|J묭a2\NCr]M_0 ^T%e#vD^%xy-n}-E\3aS%yN!r_{ )sAw ڼp1pEAk~v<:`'ӭ^5 ArXOI驻T (dk)_\ PuA*BY]yB"l\ey hH*tbK)3 IKZ򹞋XjN n *n>k]X_d!ryBH ]*R 0(#'7 %es9??ښFC,ՁQPjARJ\Ρw K#jahgw;2$l*) %Xq5!U᢯6Re] |0[__64ch&_}iL8KEgҎ7 M/\`|.p,~`a=BR?xܐrQ8K XR2M8f ?`sgWS%" Ԉ 7R%$ N}?QL1|-эټwIZ%pvL3Hk>,ImgW7{E xPHx73RA @RS CC !\ȟ5IXR^ZxHл$Q[ŝ40 (>+ _C >BRt<,TrT {O/H+˟Pl6 I B)/VC<6a2~(XwV4gnXR ϱ5ǀHٻ?tw똤Eyxp{#WK qG%5],(0ӈH HZ])ג=K1j&G(FbM@)%I` XRg ʔ KZG(vP,<`[ Kn^ SJRsAʠ5xՅF`0&RbV tx:EaUE/{fi2;.IAwW8/tTxAGOoN?G}l L(n`Zv?pB8K_gI+ܗ #i?ޙ.) p$utc ~DžfՈEo3l/)I-U?aԅ^jxArA ΧX}DmZ@QLےbTXGd.^|xKHR{|ΕW_h] IJ`[G9{).y) 0X YA1]qp?p_k+J*Y@HI>^?gt.06Rn ,` ?);p pSF9ZXLBJPWjgQ|&)7! HjQt<| ؅W5 x W HIzYoVMGP Hjn`+\(dNW)F+IrS[|/a`K|ͻ0Hj{R,Q=\ (F}\WR)AgSG`IsnAR=|8$}G(vC$)s FBJ?]_u XRvύ6z ŨG[36-T9HzpW̞ú Xg큽=7CufzI$)ki^qk-) 0H*N` QZkk]/tnnsI^Gu't=7$ Z;{8^jB% IItRQS7[ϭ3 $_OQJ`7!]W"W,)Iy W AJA;KWG`IY{8k$I$^%9.^(`N|LJ%@$I}ֽp=FB*xN=gI?Q{٥4B)mw $Igc~dZ@G9K X?7)aK%݅K$IZ-`IpC U6$I\0>!9k} Xa IIS0H$I H ?1R.Чj:4~Rw@p$IrA*u}WjWFPJ$I➓/6#! LӾ+ X36x8J |+L;v$Io4301R20M I$-E}@,pS^ޟR[/s¹'0H$IKyfŸfVOπFT*a$I>He~VY/3R/)>d$I>28`Cjw,n@FU*9ttf$I~<;=/4RD~@ X-ѕzἱI$: ԍR a@b X{+Qxuq$IЛzo /~3\8ڒ4BN7$IҀj V]n18H$IYFBj3̵̚ja pp $Is/3R Ӻ-Yj+L;.0ŔI$Av? #!5"aʄj}UKmɽH$IjCYs?h$IDl843.v}m7UiI=&=0Lg0$I4: embe` eQbm0u? $IT!Sƍ'-sv)s#C0:XB2a w I$zbww{."pPzO =Ɔ\[ o($Iaw]`E).Kvi:L*#gР7[$IyGPI=@R 4yR~̮´cg I$I/<tPͽ hDgo 94Z^k盇΄8I56^W$I^0̜N?4*H`237}g+hxoq)SJ@p|` $I%>-hO0eO>\ԣNߌZD6R=K ~n($I$y3D>o4b#px2$yڪtzW~a $I~?x'BwwpH$IZݑnC㧄Pc_9sO gwJ=l1:mKB>Ab<4Lp$Ib o1ZQ@85b̍ S'F,Fe,^I$IjEdù{l4 8Ys_s Z8.x m"+{~?q,Z D!I$ϻ'|XhB)=…']M>5 rgotԎ 獽PH$IjIPhh)n#cÔqA'ug5qwU&rF|1E%I$%]!'3AFD/;Ck_`9 v!ٴtPV;x`'*bQa w I$Ix5 FC3D_~A_#O݆DvV?<qw+I$I{=Z8".#RIYyjǪ=fDl9%M,a8$I$Ywi[7ݍFe$s1ՋBVA?`]#!oz4zjLJo8$I$%@3jAa4(o ;p,,dya=F9ً[LSPH$IJYЉ+3> 5"39aZ<ñh!{TpBGkj}Sp $IlvF.F$I z< '\K*qq.f<2Y!S"-\I$IYwčjF$ w9 \ߪB.1v!Ʊ?+r:^!I$BϹB H"B;L'G[ 4U#5>੐)|#o0aڱ$I>}k&1`U#V?YsV x>{t1[I~D&(I$I/{H0fw"q"y%4 IXyE~M3 8XψL}qE$I[> nD?~sf ]o΁ cT6"?'_Ἣ $I>~.f|'!N?⟩0G KkXZE]ޡ;/&?k OۘH$IRۀwXӨ<7@PnS04aӶp.:@\IWQJ6sS%I$e5ڑv`3:x';wq_vpgHyXZ 3gЂ7{{EuԹn±}$I$8t;b|591nءQ"P6O5i }iR̈́%Q̄p!I䮢]O{H$IRϻ9s֧ a=`- aB\X0"+5"C1Hb?߮3x3&gşggl_hZ^,`5?ߎvĸ%̀M!OZC2#0x LJ0 Gw$I$I}<{Eb+y;iI,`ܚF:5ܛA8-O-|8K7s|#Z8a&><a&/VtbtLʌI$I$I$I$I$I$IRjDD%tEXtdate:create2022-05-31T04:40:26+00:00!Î%tEXtdate:modify2022-05-31T04:40:26+00:00|{2IENDB`Mini Shell

HOME


Mini Shell 1.0
DIR:/proc/self/root/usr/local/lib/node_modules/@angular/cli/node_modules/lru-cache/dist/esm/
Upload File :
Current File : //proc/self/root/usr/local/lib/node_modules/@angular/cli/node_modules/lru-cache/dist/esm/index.d.ts
/**
 * @module LRUCache
 */
declare const TYPE: unique symbol;
export type PosInt = number & {
    [TYPE]: 'Positive Integer';
};
export type Index = number & {
    [TYPE]: 'LRUCache Index';
};
export type UintArray = Uint8Array | Uint16Array | Uint32Array;
export type NumberArray = UintArray | number[];
declare class ZeroArray extends Array<number> {
    constructor(size: number);
}
export type { ZeroArray };
export type { Stack };
export type StackLike = Stack | Index[];
declare class Stack {
    #private;
    heap: NumberArray;
    length: number;
    static create(max: number): StackLike;
    constructor(max: number, HeapCls: {
        new (n: number): NumberArray;
    });
    push(n: Index): void;
    pop(): Index;
}
/**
 * Promise representing an in-progress {@link LRUCache#fetch} call
 */
export type BackgroundFetch<V> = Promise<V | undefined> & {
    __returned: BackgroundFetch<V> | undefined;
    __abortController: AbortController;
    __staleWhileFetching: V | undefined;
};
export type DisposeTask<K, V> = [
    value: V,
    key: K,
    reason: LRUCache.DisposeReason
];
export declare namespace LRUCache {
    /**
     * An integer greater than 0, reflecting the calculated size of items
     */
    type Size = number;
    /**
     * Integer greater than 0, representing some number of milliseconds, or the
     * time at which a TTL started counting from.
     */
    type Milliseconds = number;
    /**
     * An integer greater than 0, reflecting a number of items
     */
    type Count = number;
    /**
     * The reason why an item was removed from the cache, passed
     * to the {@link Disposer} methods.
     */
    type DisposeReason = 'evict' | 'set' | 'delete';
    /**
     * A method called upon item removal, passed as the
     * {@link OptionsBase.dispose} and/or
     * {@link OptionsBase.disposeAfter} options.
     */
    type Disposer<K, V> = (value: V, key: K, reason: DisposeReason) => void;
    /**
     * A function that returns the effective calculated size
     * of an entry in the cache.
     */
    type SizeCalculator<K, V> = (value: V, key: K) => Size;
    /**
     * Options provided to the
     * {@link OptionsBase.fetchMethod} function.
     */
    interface FetcherOptions<K, V, FC = unknown> {
        signal: AbortSignal;
        options: FetcherFetchOptions<K, V, FC>;
        /**
         * Object provided in the {@link FetchOptions.context} option to
         * {@link LRUCache#fetch}
         */
        context: FC;
    }
    /**
     * Status object that may be passed to {@link LRUCache#fetch},
     * {@link LRUCache#get}, {@link LRUCache#set}, and {@link LRUCache#has}.
     */
    interface Status<V> {
        /**
         * The status of a set() operation.
         *
         * - add: the item was not found in the cache, and was added
         * - update: the item was in the cache, with the same value provided
         * - replace: the item was in the cache, and replaced
         * - miss: the item was not added to the cache for some reason
         */
        set?: 'add' | 'update' | 'replace' | 'miss';
        /**
         * the ttl stored for the item, or undefined if ttls are not used.
         */
        ttl?: Milliseconds;
        /**
         * the start time for the item, or undefined if ttls are not used.
         */
        start?: Milliseconds;
        /**
         * The timestamp used for TTL calculation
         */
        now?: Milliseconds;
        /**
         * the remaining ttl for the item, or undefined if ttls are not used.
         */
        remainingTTL?: Milliseconds;
        /**
         * The calculated size for the item, if sizes are used.
         */
        entrySize?: Size;
        /**
         * The total calculated size of the cache, if sizes are used.
         */
        totalCalculatedSize?: Size;
        /**
         * A flag indicating that the item was not stored, due to exceeding the
         * {@link OptionsBase.maxEntrySize}
         */
        maxEntrySizeExceeded?: true;
        /**
         * The old value, specified in the case of `set:'update'` or
         * `set:'replace'`
         */
        oldValue?: V;
        /**
         * The results of a {@link LRUCache#has} operation
         *
         * - hit: the item was found in the cache
         * - stale: the item was found in the cache, but is stale
         * - miss: the item was not found in the cache
         */
        has?: 'hit' | 'stale' | 'miss';
        /**
         * The status of a {@link LRUCache#fetch} operation.
         * Note that this can change as the underlying fetch() moves through
         * various states.
         *
         * - inflight: there is another fetch() for this key which is in process
         * - get: there is no fetchMethod, so {@link LRUCache#get} was called.
         * - miss: the item is not in cache, and will be fetched.
         * - hit: the item is in the cache, and was resolved immediately.
         * - stale: the item is in the cache, but stale.
         * - refresh: the item is in the cache, and not stale, but
         *   {@link FetchOptions.forceRefresh} was specified.
         */
        fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh';
        /**
         * The {@link OptionsBase.fetchMethod} was called
         */
        fetchDispatched?: true;
        /**
         * The cached value was updated after a successful call to
         * {@link OptionsBase.fetchMethod}
         */
        fetchUpdated?: true;
        /**
         * The reason for a fetch() rejection.  Either the error raised by the
         * {@link OptionsBase.fetchMethod}, or the reason for an
         * AbortSignal.
         */
        fetchError?: Error;
        /**
         * The fetch received an abort signal
         */
        fetchAborted?: true;
        /**
         * The abort signal received was ignored, and the fetch was allowed to
         * continue.
         */
        fetchAbortIgnored?: true;
        /**
         * The fetchMethod promise resolved successfully
         */
        fetchResolved?: true;
        /**
         * The fetchMethod promise was rejected
         */
        fetchRejected?: true;
        /**
         * The status of a {@link LRUCache#get} operation.
         *
         * - fetching: The item is currently being fetched.  If a previous value
         *   is present and allowed, that will be returned.
         * - stale: The item is in the cache, and is stale.
         * - hit: the item is in the cache
         * - miss: the item is not in the cache
         */
        get?: 'stale' | 'hit' | 'miss';
        /**
         * A fetch or get operation returned a stale value.
         */
        returnedStale?: true;
    }
    /**
     * options which override the options set in the LRUCache constructor
     * when calling {@link LRUCache#fetch}.
     *
     * This is the union of {@link GetOptions} and {@link SetOptions}, plus
     * {@link OptionsBase.noDeleteOnFetchRejection},
     * {@link OptionsBase.allowStaleOnFetchRejection},
     * {@link FetchOptions.forceRefresh}, and
     * {@link FetcherOptions.context}
     *
     * Any of these may be modified in the {@link OptionsBase.fetchMethod}
     * function, but the {@link GetOptions} fields will of course have no
     * effect, as the {@link LRUCache#get} call already happened by the time
     * the fetchMethod is called.
     */
    interface FetcherFetchOptions<K, V, FC = unknown> extends Pick<OptionsBase<K, V, FC>, 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet' | 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL' | 'noDeleteOnFetchRejection' | 'allowStaleOnFetchRejection' | 'ignoreFetchAbort' | 'allowStaleOnFetchAbort'> {
        status?: Status<V>;
        size?: Size;
    }
    /**
     * Options that may be passed to the {@link LRUCache#fetch} method.
     */
    interface FetchOptions<K, V, FC> extends FetcherFetchOptions<K, V, FC> {
        /**
         * Set to true to force a re-load of the existing data, even if it
         * is not yet stale.
         */
        forceRefresh?: boolean;
        /**
         * Context provided to the {@link OptionsBase.fetchMethod} as
         * the {@link FetcherOptions.context} param.
         *
         * If the FC type is specified as unknown (the default),
         * undefined or void, then this is optional.  Otherwise, it will
         * be required.
         */
        context?: FC;
        signal?: AbortSignal;
        status?: Status<V>;
    }
    /**
     * Options provided to {@link LRUCache#fetch} when the FC type is something
     * other than `unknown`, `undefined`, or `void`
     */
    interface FetchOptionsWithContext<K, V, FC> extends FetchOptions<K, V, FC> {
        context: FC;
    }
    /**
     * Options provided to {@link LRUCache#fetch} when the FC type is
     * `undefined` or `void`
     */
    interface FetchOptionsNoContext<K, V> extends FetchOptions<K, V, undefined> {
        context?: undefined;
    }
    /**
     * Options that may be passed to the {@link LRUCache#has} method.
     */
    interface HasOptions<K, V, FC> extends Pick<OptionsBase<K, V, FC>, 'updateAgeOnHas'> {
        status?: Status<V>;
    }
    /**
     * Options that may be passed to the {@link LRUCache#get} method.
     */
    interface GetOptions<K, V, FC> extends Pick<OptionsBase<K, V, FC>, 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet'> {
        status?: Status<V>;
    }
    /**
     * Options that may be passed to the {@link LRUCache#peek} method.
     */
    interface PeekOptions<K, V, FC> extends Pick<OptionsBase<K, V, FC>, 'allowStale'> {
    }
    /**
     * Options that may be passed to the {@link LRUCache#set} method.
     */
    interface SetOptions<K, V, FC> extends Pick<OptionsBase<K, V, FC>, 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL'> {
        /**
         * If size tracking is enabled, then setting an explicit size
         * in the {@link LRUCache#set} call will prevent calling the
         * {@link OptionsBase.sizeCalculation} function.
         */
        size?: Size;
        /**
         * If TTL tracking is enabled, then setting an explicit start
         * time in the {@link LRUCache#set} call will override the
         * default time from `performance.now()` or `Date.now()`.
         *
         * Note that it must be a valid value for whichever time-tracking
         * method is in use.
         */
        start?: Milliseconds;
        status?: Status<V>;
    }
    /**
     * The type signature for the {@link OptionsBase.fetchMethod} option.
     */
    type Fetcher<K, V, FC = unknown> = (key: K, staleValue: V | undefined, options: FetcherOptions<K, V, FC>) => Promise<V | undefined | void> | V | undefined | void;
    /**
     * Options which may be passed to the {@link LRUCache} constructor.
     *
     * Most of these may be overridden in the various options that use
     * them.
     *
     * Despite all being technically optional, the constructor requires that
     * a cache is at minimum limited by one or more of {@link OptionsBase.max},
     * {@link OptionsBase.ttl}, or {@link OptionsBase.maxSize}.
     *
     * If {@link OptionsBase.ttl} is used alone, then it is strongly advised
     * (and in fact required by the type definitions here) that the cache
     * also set {@link OptionsBase.ttlAutopurge}, to prevent potentially
     * unbounded storage.
     */
    interface OptionsBase<K, V, FC> {
        /**
         * The maximum number of items to store in the cache before evicting
         * old entries. This is read-only on the {@link LRUCache} instance,
         * and may not be overridden.
         *
         * If set, then storage space will be pre-allocated at construction
         * time, and the cache will perform significantly faster.
         *
         * Note that significantly fewer items may be stored, if
         * {@link OptionsBase.maxSize} and/or {@link OptionsBase.ttl} are also
         * set.
         */
        max?: Count;
        /**
         * Max time in milliseconds for items to live in cache before they are
         * considered stale.  Note that stale items are NOT preemptively removed
         * by default, and MAY live in the cache long after they have expired.
         *
         * Also, as this cache is optimized for LRU/MRU operations, some of
         * the staleness/TTL checks will reduce performance, as they will incur
         * overhead by deleting items.
         *
         * Must be an integer number of ms. If set to 0, this indicates "no TTL"
         *
         * @default 0
         */
        ttl?: Milliseconds;
        /**
         * Minimum amount of time in ms in which to check for staleness.
         * Defaults to 1, which means that the current time is checked
         * at most once per millisecond.
         *
         * Set to 0 to check the current time every time staleness is tested.
         * (This reduces performance, and is theoretically unnecessary.)
         *
         * Setting this to a higher value will improve performance somewhat
         * while using ttl tracking, albeit at the expense of keeping stale
         * items around a bit longer than their TTLs would indicate.
         *
         * @default 1
         */
        ttlResolution?: Milliseconds;
        /**
         * Preemptively remove stale items from the cache.
         * Note that this may significantly degrade performance,
         * especially if the cache is storing a large number of items.
         * It is almost always best to just leave the stale items in
         * the cache, and let them fall out as new items are added.
         *
         * Note that this means that {@link OptionsBase.allowStale} is a bit
         * pointless, as stale items will be deleted almost as soon as they
         * expire.
         *
         * @default false
         */
        ttlAutopurge?: boolean;
        /**
         * Update the age of items on {@link LRUCache#get}, renewing their TTL
         *
         * Has no effect if {@link OptionsBase.ttl} is not set.
         *
         * @default false
         */
        updateAgeOnGet?: boolean;
        /**
         * Update the age of items on {@link LRUCache#has}, renewing their TTL
         *
         * Has no effect if {@link OptionsBase.ttl} is not set.
         *
         * @default false
         */
        updateAgeOnHas?: boolean;
        /**
         * Allow {@link LRUCache#get} and {@link LRUCache#fetch} calls to return
         * stale data, if available.
         */
        allowStale?: boolean;
        /**
         * Function that is called on items when they are dropped from the cache.
         * This can be handy if you want to close file descriptors or do other
         * cleanup tasks when items are no longer accessible. Called with `key,
         * value`.  It's called before actually removing the item from the
         * internal cache, so it is *NOT* safe to re-add them.
         *
         * Use {@link OptionsBase.disposeAfter} if you wish to dispose items after
         * they have been full removed, when it is safe to add them back to the
         * cache.
         */
        dispose?: Disposer<K, V>;
        /**
         * The same as {@link OptionsBase.dispose}, but called *after* the entry
         * is completely removed and the cache is once again in a clean state.
         * It is safe to add an item right back into the cache at this point.
         * However, note that it is *very* easy to inadvertently create infinite
         * recursion this way.
         */
        disposeAfter?: Disposer<K, V>;
        /**
         * Set to true to suppress calling the
         * {@link OptionsBase.dispose} function if the entry key is
         * still accessible within the cache.
         * This may be overridden by passing an options object to
         * {@link LRUCache#set}.
         */
        noDisposeOnSet?: boolean;
        /**
         * Boolean flag to tell the cache to not update the TTL when
         * setting a new value for an existing key (ie, when updating a value
         * rather than inserting a new value).  Note that the TTL value is
         * _always_ set (if provided) when adding a new entry into the cache.
         *
         * Has no effect if a {@link OptionsBase.ttl} is not set.
         */
        noUpdateTTL?: boolean;
        /**
         * If you wish to track item size, you must provide a maxSize
         * note that we still will only keep up to max *actual items*,
         * if max is set, so size tracking may cause fewer than max items
         * to be stored.  At the extreme, a single item of maxSize size
         * will cause everything else in the cache to be dropped when it
         * is added.  Use with caution!
         *
         * Note also that size tracking can negatively impact performance,
         * though for most cases, only minimally.
         */
        maxSize?: Size;
        /**
         * The maximum allowed size for any single item in the cache.
         *
         * If a larger item is passed to {@link LRUCache#set} or returned by a
         * {@link OptionsBase.fetchMethod}, then it will not be stored in the
         * cache.
         */
        maxEntrySize?: Size;
        /**
         * A function that returns a number indicating the item's size.
         *
         * If not provided, and {@link OptionsBase.maxSize} or
         * {@link OptionsBase.maxEntrySize} are set, then all
         * {@link LRUCache#set} calls **must** provide an explicit
         * {@link SetOptions.size} or sizeCalculation param.
         */
        sizeCalculation?: SizeCalculator<K, V>;
        /**
         * Method that provides the implementation for {@link LRUCache#fetch}
         */
        fetchMethod?: Fetcher<K, V, FC>;
        /**
         * Set to true to suppress the deletion of stale data when a
         * {@link OptionsBase.fetchMethod} returns a rejected promise.
         */
        noDeleteOnFetchRejection?: boolean;
        /**
         * Do not delete stale items when they are retrieved with
         * {@link LRUCache#get}.
         *
         * Note that the `get` return value will still be `undefined`
         * unless {@link OptionsBase.allowStale} is true.
         */
        noDeleteOnStaleGet?: boolean;
        /**
         * Set to true to allow returning stale data when a
         * {@link OptionsBase.fetchMethod} throws an error or returns a rejected
         * promise.
         *
         * This differs from using {@link OptionsBase.allowStale} in that stale
         * data will ONLY be returned in the case that the
         * {@link LRUCache#fetch} fails, not any other times.
         */
        allowStaleOnFetchRejection?: boolean;
        /**
         * Set to true to return a stale value from the cache when the
         * `AbortSignal` passed to the {@link OptionsBase.fetchMethod} dispatches an `'abort'`
         * event, whether user-triggered, or due to internal cache behavior.
         *
         * Unless {@link OptionsBase.ignoreFetchAbort} is also set, the underlying
         * {@link OptionsBase.fetchMethod} will still be considered canceled, and
         * any value it returns will be ignored and not cached.
         *
         * Caveat: since fetches are aborted when a new value is explicitly
         * set in the cache, this can lead to fetch returning a stale value,
         * since that was the fallback value _at the moment the `fetch()` was
         * initiated_, even though the new updated value is now present in
         * the cache.
         *
         * For example:
         *
         * ```ts
         * const cache = new LRUCache<string, any>({
         *   ttl: 100,
         *   fetchMethod: async (url, oldValue, { signal }) =>  {
         *     const res = await fetch(url, { signal })
         *     return await res.json()
         *   }
         * })
         * cache.set('https://example.com/', { some: 'data' })
         * // 100ms go by...
         * const result = cache.fetch('https://example.com/')
         * cache.set('https://example.com/', { other: 'thing' })
         * console.log(await result) // { some: 'data' }
         * console.log(cache.get('https://example.com/')) // { other: 'thing' }
         * ```
         */
        allowStaleOnFetchAbort?: boolean;
        /**
         * Set to true to ignore the `abort` event emitted by the `AbortSignal`
         * object passed to {@link OptionsBase.fetchMethod}, and still cache the
         * resulting resolution value, as long as it is not `undefined`.
         *
         * When used on its own, this means aborted {@link LRUCache#fetch} calls are not
         * immediately resolved or rejected when they are aborted, and instead
         * take the full time to await.
         *
         * When used with {@link OptionsBase.allowStaleOnFetchAbort}, aborted
         * {@link LRUCache#fetch} calls will resolve immediately to their stale
         * cached value or `undefined`, and will continue to process and eventually
         * update the cache when they resolve, as long as the resulting value is
         * not `undefined`, thus supporting a "return stale on timeout while
         * refreshing" mechanism by passing `AbortSignal.timeout(n)` as the signal.
         *
         * **Note**: regardless of this setting, an `abort` event _is still
         * emitted on the `AbortSignal` object_, so may result in invalid results
         * when passed to other underlying APIs that use AbortSignals.
         *
         * This may be overridden in the {@link OptionsBase.fetchMethod} or the
         * call to {@link LRUCache#fetch}.
         */
        ignoreFetchAbort?: boolean;
    }
    interface OptionsMaxLimit<K, V, FC> extends OptionsBase<K, V, FC> {
        max: Count;
    }
    interface OptionsTTLLimit<K, V, FC> extends OptionsBase<K, V, FC> {
        ttl: Milliseconds;
        ttlAutopurge: boolean;
    }
    interface OptionsSizeLimit<K, V, FC> extends OptionsBase<K, V, FC> {
        maxSize: Size;
    }
    /**
     * The valid safe options for the {@link LRUCache} constructor
     */
    type Options<K, V, FC> = OptionsMaxLimit<K, V, FC> | OptionsSizeLimit<K, V, FC> | OptionsTTLLimit<K, V, FC>;
    /**
     * Entry objects used by {@link LRUCache#load} and {@link LRUCache#dump},
     * and returned by {@link LRUCache#info}.
     */
    interface Entry<V> {
        value: V;
        ttl?: Milliseconds;
        size?: Size;
        start?: Milliseconds;
    }
}
/**
 * Default export, the thing you're using this module to get.
 *
 * All properties from the options object (with the exception of
 * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as
 * normal public members. (`max` and `maxBase` are read-only getters.)
 * Changing any of these will alter the defaults for subsequent method calls,
 * but is otherwise safe.
 */
export declare class LRUCache<K extends {}, V extends {}, FC = unknown> implements Map<K, V> {
    #private;
    /**
     * {@link LRUCache.OptionsBase.ttl}
     */
    ttl: LRUCache.Milliseconds;
    /**
     * {@link LRUCache.OptionsBase.ttlResolution}
     */
    ttlResolution: LRUCache.Milliseconds;
    /**
     * {@link LRUCache.OptionsBase.ttlAutopurge}
     */
    ttlAutopurge: boolean;
    /**
     * {@link LRUCache.OptionsBase.updateAgeOnGet}
     */
    updateAgeOnGet: boolean;
    /**
     * {@link LRUCache.OptionsBase.updateAgeOnHas}
     */
    updateAgeOnHas: boolean;
    /**
     * {@link LRUCache.OptionsBase.allowStale}
     */
    allowStale: boolean;
    /**
     * {@link LRUCache.OptionsBase.noDisposeOnSet}
     */
    noDisposeOnSet: boolean;
    /**
     * {@link LRUCache.OptionsBase.noUpdateTTL}
     */
    noUpdateTTL: boolean;
    /**
     * {@link LRUCache.OptionsBase.maxEntrySize}
     */
    maxEntrySize: LRUCache.Size;
    /**
     * {@link LRUCache.OptionsBase.sizeCalculation}
     */
    sizeCalculation?: LRUCache.SizeCalculator<K, V>;
    /**
     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
     */
    noDeleteOnFetchRejection: boolean;
    /**
     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
     */
    noDeleteOnStaleGet: boolean;
    /**
     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
     */
    allowStaleOnFetchAbort: boolean;
    /**
     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
     */
    allowStaleOnFetchRejection: boolean;
    /**
     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
     */
    ignoreFetchAbort: boolean;
    /**
     * Do not call this method unless you need to inspect the
     * inner workings of the cache.  If anything returned by this
     * object is modified in any way, strange breakage may occur.
     *
     * These fields are private for a reason!
     *
     * @internal
     */
    static unsafeExposeInternals<K extends {}, V extends {}, FC extends unknown = unknown>(c: LRUCache<K, V, FC>): {
        starts: ZeroArray | undefined;
        ttls: ZeroArray | undefined;
        sizes: ZeroArray | undefined;
        keyMap: Map<K, number>;
        keyList: (K | undefined)[];
        valList: (V | BackgroundFetch<V> | undefined)[];
        next: NumberArray;
        prev: NumberArray;
        readonly head: Index;
        readonly tail: Index;
        free: StackLike;
        isBackgroundFetch: (p: any) => boolean;
        backgroundFetch: (k: K, index: number | undefined, options: LRUCache.FetchOptions<K, V, FC>, context: any) => BackgroundFetch<V>;
        moveToTail: (index: number) => void;
        indexes: (options?: {
            allowStale: boolean;
        }) => Generator<Index, void, unknown>;
        rindexes: (options?: {
            allowStale: boolean;
        }) => Generator<Index, void, unknown>;
        isStale: (index: number | undefined) => boolean;
    };
    /**
     * {@link LRUCache.OptionsBase.max} (read-only)
     */
    get max(): LRUCache.Count;
    /**
     * {@link LRUCache.OptionsBase.maxSize} (read-only)
     */
    get maxSize(): LRUCache.Count;
    /**
     * The total computed size of items in the cache (read-only)
     */
    get calculatedSize(): LRUCache.Size;
    /**
     * The number of items stored in the cache (read-only)
     */
    get size(): LRUCache.Count;
    /**
     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
     */
    get fetchMethod(): LRUCache.Fetcher<K, V, FC> | undefined;
    /**
     * {@link LRUCache.OptionsBase.dispose} (read-only)
     */
    get dispose(): LRUCache.Disposer<K, V> | undefined;
    /**
     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
     */
    get disposeAfter(): LRUCache.Disposer<K, V> | undefined;
    constructor(options: LRUCache.Options<K, V, FC> | LRUCache<K, V, FC>);
    /**
     * Return the remaining TTL time for a given entry key
     */
    getRemainingTTL(key: K): number;
    /**
     * Return a generator yielding `[key, value]` pairs,
     * in order from most recently used to least recently used.
     */
    entries(): Generator<[K, V], void, unknown>;
    /**
     * Inverse order version of {@link LRUCache.entries}
     *
     * Return a generator yielding `[key, value]` pairs,
     * in order from least recently used to most recently used.
     */
    rentries(): Generator<(K | V | BackgroundFetch<V> | undefined)[], void, unknown>;
    /**
     * Return a generator yielding the keys in the cache,
     * in order from most recently used to least recently used.
     */
    keys(): Generator<K, void, unknown>;
    /**
     * Inverse order version of {@link LRUCache.keys}
     *
     * Return a generator yielding the keys in the cache,
     * in order from least recently used to most recently used.
     */
    rkeys(): Generator<K, void, unknown>;
    /**
     * Return a generator yielding the values in the cache,
     * in order from most recently used to least recently used.
     */
    values(): Generator<V, void, unknown>;
    /**
     * Inverse order version of {@link LRUCache.values}
     *
     * Return a generator yielding the values in the cache,
     * in order from least recently used to most recently used.
     */
    rvalues(): Generator<V | BackgroundFetch<V> | undefined, void, unknown>;
    /**
     * Iterating over the cache itself yields the same results as
     * {@link LRUCache.entries}
     */
    [Symbol.iterator](): Generator<[K, V], void, unknown>;
    /**
     * A String value that is used in the creation of the default string description of an object.
     * Called by the built-in method Object.prototype.toString.
     */
    [Symbol.toStringTag]: string;
    /**
     * Find a value for which the supplied fn method returns a truthy value,
     * similar to Array.find().  fn is called as fn(value, key, cache).
     */
    find(fn: (v: V, k: K, self: LRUCache<K, V, FC>) => boolean, getOptions?: LRUCache.GetOptions<K, V, FC>): V | undefined;
    /**
     * Call the supplied function on each item in the cache, in order from
     * most recently used to least recently used.  fn is called as
     * fn(value, key, cache).  Does not update age or recenty of use.
     * Does not iterate over stale values.
     */
    forEach(fn: (v: V, k: K, self: LRUCache<K, V, FC>) => any, thisp?: any): void;
    /**
     * The same as {@link LRUCache.forEach} but items are iterated over in
     * reverse order.  (ie, less recently used items are iterated over first.)
     */
    rforEach(fn: (v: V, k: K, self: LRUCache<K, V, FC>) => any, thisp?: any): void;
    /**
     * Delete any stale entries. Returns true if anything was removed,
     * false otherwise.
     */
    purgeStale(): boolean;
    /**
     * Get the extended info about a given entry, to get its value, size, and
     * TTL info simultaneously. Like {@link LRUCache#dump}, but just for a
     * single key. Always returns stale values, if their info is found in the
     * cache, so be sure to check for expired TTLs if relevant.
     */
    info(key: K): LRUCache.Entry<V> | undefined;
    /**
     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
     * passed to cache.load()
     */
    dump(): [K, LRUCache.Entry<V>][];
    /**
     * Reset the cache and load in the items in entries in the order listed.
     * Note that the shape of the resulting cache may be different if the
     * same options are not used in both caches.
     */
    load(arr: [K, LRUCache.Entry<V>][]): void;
    /**
     * Add a value to the cache.
     *
     * Note: if `undefined` is specified as a value, this is an alias for
     * {@link LRUCache#delete}
     */
    set(k: K, v: V | BackgroundFetch<V> | undefined, setOptions?: LRUCache.SetOptions<K, V, FC>): this;
    /**
     * Evict the least recently used item, returning its value or
     * `undefined` if cache is empty.
     */
    pop(): V | undefined;
    /**
     * Check if a key is in the cache, without updating the recency of use.
     * Will return false if the item is stale, even though it is technically
     * in the cache.
     *
     * Will not update item age unless
     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
     */
    has(k: K, hasOptions?: LRUCache.HasOptions<K, V, FC>): boolean;
    /**
     * Like {@link LRUCache#get} but doesn't update recency or delete stale
     * items.
     *
     * Returns `undefined` if the item is stale, unless
     * {@link LRUCache.OptionsBase.allowStale} is set.
     */
    peek(k: K, peekOptions?: LRUCache.PeekOptions<K, V, FC>): V | undefined;
    /**
     * Make an asynchronous cached fetch using the
     * {@link LRUCache.OptionsBase.fetchMethod} function.
     *
     * If multiple fetches for the same key are issued, then they will all be
     * coalesced into a single call to fetchMethod.
     *
     * Note that this means that handling options such as
     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort},
     * {@link LRUCache.FetchOptions.signal},
     * and {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} will be
     * determined by the FIRST fetch() call for a given key.
     *
     * This is a known (fixable) shortcoming which will be addresed on when
     * someone complains about it, as the fix would involve added complexity and
     * may not be worth the costs for this edge case.
     */
    fetch(k: K, fetchOptions: unknown extends FC ? LRUCache.FetchOptions<K, V, FC> : FC extends undefined | void ? LRUCache.FetchOptionsNoContext<K, V> : LRUCache.FetchOptionsWithContext<K, V, FC>): Promise<undefined | V>;
    fetch(k: unknown extends FC ? K : FC extends undefined | void ? K : never, fetchOptions?: unknown extends FC ? LRUCache.FetchOptions<K, V, FC> : FC extends undefined | void ? LRUCache.FetchOptionsNoContext<K, V> : never): Promise<undefined | V>;
    /**
     * Return a value from the cache. Will update the recency of the cache
     * entry found.
     *
     * If the key is not found, get() will return `undefined`.
     */
    get(k: K, getOptions?: LRUCache.GetOptions<K, V, FC>): V | undefined;
    /**
     * Deletes a key out of the cache.
     * Returns true if the key was deleted, false otherwise.
     */
    delete(k: K): boolean;
    /**
     * Clear the cache entirely, throwing away all values.
     */
    clear(): void;
}
//# sourceMappingURL=index.d.ts.map