* changes: ApplyCommand: fix "no newline at end" detection ApplyCommand: handle completely empty context lines in text patches ApplyCommand: use byte arrays for text patches, not strings ApplyCommand: support binary patches ApplyCommand: add a stream to apply a delta patch ApplyCommand: add streams to read/write binary patch hunks ApplyCommand: add a base-85 codec ApplyCommand: convert to git internal format before applying patchtags/v5.12.0.202105261145-m3
#Sat Dec 20 21:21:24 CET 2008 | |||||
eclipse.preferences.version=1 | eclipse.preferences.version=1 | ||||
encoding//tst-rsrc/org/eclipse/jgit/diff/umlaut.patch=ISO-8859-1 | |||||
encoding//tst-rsrc/org/eclipse/jgit/diff/umlaut_PostImage=ISO-8859-1 | |||||
encoding//tst-rsrc/org/eclipse/jgit/patch/testGetText_BothISO88591.patch=ISO-8859-1 | encoding//tst-rsrc/org/eclipse/jgit/patch/testGetText_BothISO88591.patch=ISO-8859-1 | ||||
encoding//tst-rsrc/org/eclipse/jgit/patch/testGetText_Convert.patch=ISO-8859-1 | encoding//tst-rsrc/org/eclipse/jgit/patch/testGetText_Convert.patch=ISO-8859-1 | ||||
encoding//tst-rsrc/org/eclipse/jgit/patch/testGetText_DiffCc.patch=ISO-8859-1 | encoding//tst-rsrc/org/eclipse/jgit/patch/testGetText_DiffCc.patch=ISO-8859-1 |
*.patch -crlf | *.patch -crlf | ||||
*Image -crlf | *Image -crlf | ||||
*.out -crlf | *.out -crlf | ||||
delta* -text | |||||
literal* -text |
diff --git a/crlf b/crlf | |||||
index 9206ee6..95dd193 100644 | |||||
--- a/crlf | |||||
+++ b/crlf | |||||
@@ -1,3 +1,3 @@ | |||||
foo | |||||
-fie | |||||
+bar | |||||
fum |
diff --git a/crlf2 b/crlf2 | |||||
index 05c1c78..91e246d 100644 | |||||
--- a/crlf2 | |||||
+++ b/crlf2 | |||||
@@ -1,3 +1,3 @@ | |||||
foo | |||||
-fie | |||||
+bar | |||||
fum |
foo | |||||
bar | |||||
fum |
foo | |||||
fie | |||||
fum |
diff --git a/crlf3 b/crlf3 | |||||
index e69de29..9206ee6 100644 | |||||
--- a/crlf3 | |||||
+++ b/crlf3 | |||||
@@ -0,0 +1,3 @@ | |||||
+foo | |||||
+fie | |||||
+fum |
foo | |||||
fie | |||||
fum |
diff --git a/crlf4 b/crlf4 | |||||
new file mode 100644 | |||||
index 0000000..9206ee6 | |||||
--- /dev/null | |||||
+++ b/crlf4 | |||||
@@ -0,0 +1,3 @@ | |||||
+foo | |||||
+fie | |||||
+fum |
foo | |||||
fie | |||||
fum |
foo | |||||
bar | |||||
fum |
foo | |||||
fie | |||||
fum |
diff --git a/delta b/delta | |||||
index b4527005bf9e4da2dd1d7185b51bdac623a4218f..8b370bb5f2bc3261b6b62e80d6edd784a61ec225 100644 | |||||
GIT binary patch | |||||
delta 14 | |||||
ScmZp0Xmwa1z*+$U3j_csN(Dmz | |||||
delta 12 | |||||
TcmZp5XmD5{u!xa=5hEi28?FP4 |
diff --git a/emptyLine b/emptyLine | |||||
index 1fd3fa2..45c2c9b 100644 | |||||
--- a/emptyLine | |||||
+++ b/emptyLine | |||||
@@ -1,4 +1,4 @@ | |||||
foo | |||||
-fie | |||||
+bar | |||||
fum |
foo | |||||
bar | |||||
fum |
foo | |||||
fie | |||||
fum |
diff --git a/hello b/hello | |||||
index b6fc4c6..0abaeaa 100644 | |||||
--- a/hello | |||||
+++ b/hello | |||||
@@ -1 +1 @@ | |||||
-hello | |||||
\ No newline at end of file | |||||
+bye | |||||
\ No newline at end of file | |||||
diff --git a/yello b/yello | |||||
index 391a8cb..d1ed081 100644 | |||||
--- a/yello | |||||
+++ b/yello | |||||
@@ -1 +1 @@ | |||||
-yello | |||||
+yellow |
bye |
hello |
diff --git a/literal b/literal | |||||
index 799df8578e3cae8a5e979182391b8e9a6a3deded..74e4201af6378ac87bf9e118fbab6553c7fd355c 100644 | |||||
GIT binary patch | |||||
literal 5389 | |||||
zcmc&&X;f25x2=0gFa(g;$fO8C5fl|r2$Mhpf-;XXh+qO@qwRo*3aF3^Peq(ZoX{Y4 | |||||
zI{-=?P(d+4RGeCZATuQ301AQ)VKCt)uP*jm>#bhz`}L-^YNgIORkhFFyDIsSoS=YZ | |||||
zn$&4j0Dz{qmq!QyC=5|RQGrv{71I}RA|<%_y8&?Pi28Ue8RkfG$TD|u^R3|*xU(zB | |||||
zZ@K5P&3+xejCMcUc0Y!I$?Wj6>+rQxa)B}|OzgvWV`Bp=t=@L+unvcheY-FGWVHFi | |||||
zl+hUAG;tQLGGOrYEIbZ_wx8)lP#?yGs}Q8a$4&`AtDjvvjQwm{y=_{&ObKM)zwukV | |||||
z?ApAEov=%*pABr(4!6O;+fK5~27)yC*}z_K8|2pNXFEZO(FtixfQbDV^<ft6u(gj} | |||||
zSC|82*zP^S3I<T1uU)sNCEWAL&!)}Kw$+yaA^88K>TUBe=x-IU#ZN;Z$u`KA0EhnA | |||||
zLo!6&-=5&_Z6}_v3(9Hrv4vE{zR(;7T<!3NRBZ_#yzScjq09-#h*cXn+I(!G@U~z_ | |||||
z!ZgAPc<`rzP%@*_*SXEl9x^9fBJPDD4z!0H2ur<f+C6N<kOR@qjwlkhU?&cMscqL% | |||||
zhyS<&`_{mT?h_Y5koRY_`V)#E!ww(Ew!gJNMTF1oK6YY%A|N<J!zVCdQQ~h0sY?7N | |||||
zY=lg_C-8mjpg@QKuSpVM*8y8Y5PwM0iU^=@KG0`aBwTIrA}$L;br3g26x*Pj3By~w | |||||
ztf4<$z7B*6Uwg3+lnh_m&u#GW5DpN`f1V|*n|KcdNr+6a?SvdAx`_RujSP6CjnE6< | |||||
z47^st95Hl^IBu6bXFLc#{o$=40ioc^rK_ah|3Fo|$Tx=bzuWUJx)*5{-%L%}w*Bel | |||||
zf`iWvXdK25A&Ten@k#CHlVp=WHM)!ref;*rpnU34L+k$L)@fs>997ReQrr7yVJYLD | |||||
z`dVs3lYV+~Opo@m+<1RMi}Zelv(aO{zFtaOw^J2so#)_}Rh^3mTy5Ulm5u0bd1$sd | |||||
z%1SaNcGG8UC;ePqht{0v`kKJ63u}K_vov5v_U_lm!{_Bh8qW{?ZT<4o4jb||uFmi^ | |||||
z30ZU^a{;@{vf9?de5)Ugd$z>m!pd1kt}&nNxfQ%$Cv5tI#JL~tM!fXdm38~gug64p | |||||
zPQHzsxrEPoH~acuuiPqwgeLkX9stl@czd`lPi^UIPuqI(lTP(tdKzaJng-1~7E-it | |||||
z^}F4)Uyk$l@U7J3X7b1TZoizoMla|<)LiP!1Nk#4H#fGIR{!zvUg_6cr7kf6jdd66 | |||||
zD!Qc#F=d2LiSHOK8+x;I0C(Yj=B4!G8I;6d7oDZ^(^PBbaSI=dUyaxIy}Ywy^7z`I | |||||
zRoLoo=^Wk)|L%+9vXYI@<NWfT=E$bT^qf>|SURQQGX8NqLpNr;n9In~x$Y`9UUu4b | |||||
z=jT}O9W^IA>!s^fWqQta2@~XR4QTc%_W6`nbn@Ne3#VgO$O3|`e3XWjkL%m<?vOKe | |||||
zb2CqU`O2NWXYrpV!mV5izrz?U9?b^5xQTGb|5BQBGZWPP_web96Qm(opzdnbdm2Fg | |||||
z)o6BNV}K&e1cnnIN3Lp;Sm2+d$HALooxX)yk_}1a(1qLop<2mDTb5wA6*Nu-lN33i | |||||
zg*3>QV8j1RJ?y=7L@4SY`!CkuNp2*<-VN6L3(>ruD2jFae>PqZX-`bsIQ&+JMt{gP | |||||
zMGkCfg<q`wb?lc#E0ep22R3Da>v$FQnqcs~f~OdiY|DPg!rmxM$kF4{{`Zqm(~$&i | |||||
z+QAv2st76{Qlo6S65)=WRxC)<duc}3ce!gr+Sgk%m!7&tp9n1I15p%OBQK2gJ>yUy | |||||
z;w?RL0h<wH!iwS+^y(Xb8ec)9-(Z$tM(IRp6|$}9=bU&q3#`M_$bSBA*j>dV)Qm>* | |||||
zBES}EqfdtubT$WSPJnZ!yI~I$dFb&`ckB_IEzK!M8+oC^5DKk{7mRf$B2MJ7ftooS | |||||
z7m`<B;I}d(T$==0*KNI-t^9`2=zQ)raw`zeA{Z|H?r)t#XGL?JUfR2FTnO<42dpRG | |||||
z%BHiTl&A7So!x2lVkVxOd@ziJJdxGC&P$e>QD_Z3v!u!If;5QIFAc%E+N2j>1FKgg | |||||
z7%ZjHi<qUcGp-~we173yz&%-aHszdq76``Y@>24miQsya7RW(wcks%Keps;zu)b0A | |||||
zs25JPhFO?2aFvEEbVje2)&!nBGNI6va`ZjV^<&<;*oAIbv0|%zcTmYw4&4OpddEAL | |||||
z6i|AB&Whx=JaY`U$^x-?&ZhXh-V_q@PFA<@aasSI*_5LVydg75<c7WbOR?19tEG+` | |||||
zPB12P_t09`-dO1d{gbW&NUl888@|yt-l&!;;py_^Nu<dBKwDc3yvDD~93~QHBFPzX | |||||
z$G#|R)2KvAx@yYAiV3kYcbiuU_N%uvQMREli`W_U?ca#0ykauM*S--sF`dfW4U5wQ | |||||
zE7ub3t|hJ~raAA6P*h`7IkqQrp(Bm+fcs(YV$l>>Np#kd?j=AhtCi>#xxE@)4Jm|^ | |||||
z<YR&fD_LoV+<l#m`AP-*>s9C!Q5#be(DKW;PJqrzQr-nDtV8hy!9XcqM*>Kc{Hi)Y | |||||
zPmTmm2c(`po-3dPH8V+oB5Go4z_^Jy6~>KBLl`$QO+h7U>07{50ljGbu$HFq9!STl | |||||
zRQU5*u}TW)qryW}P=qcYIYJU%0T=Mg2!9T1opK-OQxml?Ex>xTe&mRTFc(zdmsR+t | |||||
ztR(Ivu${EFe*mQyVqfLUoaL3f?`<5A#U40Ybl`&(Ya7=bOd)MOJ2G-U=Z>H$_T|0Y | |||||
z^{W@KQn>cOguJ0?aGWf-7(JT(@|I5iBf%`}C~BFpWn|N#tKb_RgYeZTSsi<x{iU7% | |||||
zSYQK|B9*-Z&;2olq8~VrNhdZPJ_S_p^Q7--gBF}2UYx1WojVy6qgjK;A5g4od1O$6 | |||||
ztxP<e4W{F1;5+rf#t1`_{+AjO79k9*PGDoF6wx4+EVX)l4|~c~1MY}KXLC*0eD%~E | |||||
zKsN=ekXb`6{dD^z#V6)6K$`}^?x$OJULWGpyb%@&JVN)5wLN*7Y#FoC8_>w02)mW; | |||||
zvgwkPnZPcejj(JX5!D~Nk~Vjrw9#|*0yG^^1WJ@@<2`R|zz2lx1OkxK8@F%oTSx+v | |||||
zp`5$JW&137yLLy#O#u!FXR4^OH*u1oLwc>28fXL7)Um@g{(VmPZizpTP_aRKYb4TI | |||||
z*llsqd3ccr733kJ;G2m^Rr<}i{_2P}cOcP_#NKNavgOg9e~mw%r_cj!h+n+Q(e7dY | |||||
zC7;|xx2D1TQPpU?h~b)&6iQnTo2o=U9Xl>ccTK6SsO(t4G6P%VNTRCVMTO(8c(IO9 | |||||
ztdEfH%PZ8bO#&%E{E-yYlWb6<iUXkm>kB6USjj8Yqt(D$@XA3B!#=%_Dwqv+wirB8 | |||||
zL^qgwAuxFziHukE)?`+00Gm~<kLn0Bc(3P7HY4^`M<TXk_+)E!s6=B8`KWZKmPi}M | |||||
zhQE?UwF7~O3If|$g#(=o-lQ=K$P>>VH3$dQxU2S<Eo81=NtO7zo(?wp_@^p}09v=v | |||||
zRB`vB&*aK*u$dfGMdcJ!$7rmjWF-LMj|+oCj!1Opu#q*z?li#WVN3GB-n0txTIXaS | |||||
z)DZ^i%a06?X${sVk|)^^g^;D-wad+}dO4a^AV-8#Slv`tV4NEL?8H^8GAf8xM!&nf | |||||
z3phWH5cHj3>gG17fWu(#onhwJB<(sSx0V4UlzdrHG%y;vzb`345ye5k10>)4L5IP; | |||||
zI`Paa7l+a12C$+Z@l=KLP~eg`bYR1*#VVLKkmcS@$g~*cC3h|a5><(KK90MXhsr(- | |||||
zS*DXf9uTQ(;M&~LdwT{0V;*V4H`CKtJ43iN(f5JSYLWRl6-*t-_9iQ%61lUAJ!kUg | |||||
z(L+JGpp!~(Y}EpKiZ`@b=pr(v=o>StUA>||(UbKYh<pR^M0vzL`SdAA9l_mKm$>J5 | |||||
z-VuxUVj8GE-%=*eT>9*iKn*b<vDT+}^s$%Ijwp0V@xK6<@2PhcbzSeo7<LfN-!yBM | |||||
zv}W$UKrW6hQpW~I_j2te&mD#%tmsu>4#VW5Z`H%<XGMbT$XQjEZGzhwMbU3v4eT=P | |||||
zjY|H?jI$$!iSvIS8*W@6Q~?FSiC0oaNBxQ-M<Nyo)q%y?@J8uz<KfZZV<z(Tx{^sC | |||||
zcr@U2MgXT%Iy_aZ*iRlqMX4>uP25`v85fP34#l;zORy_|pRt|oo&HcS`K{o|XYD2B | |||||
zJ7zeBa7)ogsKao*=)JgoOWdFR*}770hIp{Vxa^gDK(ef_Bn(Rd&92Af^K>|x*b?pq | |||||
z^dPeNYe@*U035~{RBN)ZOyG{>0&8ibxsV2O6<<kBh_If09@u~;**t9y8LZ~Upa%ix | |||||
zTZs>*3EtogHERT`5X?k$L43Zz2vG&`zYC@zG9><*Ko|Ln#1{*GL0XV_zF;!)3W>if | |||||
zpd#f+`~!h1Qi#Mq5}?RAB(=wijm3dgvIBbgde~LPJMuNM<c44~5{0DpS*^rEz$*Cx | |||||
z-F$7VTXCFxjU=fMaFA(8{3Ss&5;}~$@ZnxUcNonNnMC)+XH(C#mVddvLu0NR)TaB# | |||||
zkIsyT_61PV_1<akF0S8cFKslFlonbi5BKCK)=_DLm31qx?#}DI87GY}<wT8nR`BO4 | |||||
zRx@5_a1GE5?KOP2bP+oTyI-K$@K8%uM5Fy!5@TP?;*!xTp2mF~HOQD>yC(Rxh+U4g | |||||
zUUkq6#}v28CoPlCukTBh>Mh`0>y4h}Tg&ESk7r!yE=O>(dSt(ALywPCZytwK9m#d$ | |||||
zUPt$*@ckG@_#W!myqe{smjppbnDdfPTRpkQP{j*IeS42pi12jTuZ6ys1T5r_(hW9$ | |||||
zdU7+-&x42VYT#sQ({uaa88Y>$n@=VcSoT;2u<YMvKF(5fkq&m{bq7fG%q7;{4ps#M | |||||
z2jqmq)pc{0a@U~^{D3?9_#bL#YMkDbh}h?_+TuG0S=?CECw19NPdE>znU|sX9<|8I | |||||
z#a`x8Ju?m%F#NNYeFlr`c&}3+utiFXL*IBnB%_2G1?n<i?eB)Y8b5lh{8^6OB&|!Z | |||||
z7gJ)$v5+0m{!Z`yz{4P$llNkGja?S8FJcC}v|LM6@lR^{FGiaZ2`S~ew_K#lMC<}A | |||||
zEn3TY#Sg(qe;cE44!G=~--S*oy%n5g?O~Rrf6&RqE&43QVxLbC?J3D^<|ca92Q=zI | |||||
z{!%oniDD7H>1oIEO?&oxik>_V5QV2zH0A7$&pmD5vGXmi?%F>v^^WO~RO=R2-$8j> | |||||
z$9#dRvai_64=G}G`Q{gD#mTbl6~b3Hm)jlbG5!s?KJG3&RG_$|YwdjQi9{E#S|Ew} | |||||
zkgR3d$ZSKM=4o=NyOt`Csy}G%=@s6PWyc6xp66bC`^7jFIWoQ3akGcgz=9JSjufwm | |||||
zP`;}AAj@X6zHDlrem}uB$*zX^8l9|B?|3VpYe?RjSh!xi9$!6|^Xyqm<F<-M=0$Xe | |||||
zYkm2~xE#eoouegx6?I;ojeoG98+^Ty(a;u=hF_kS-<?*<ogLMj+hZJyyi6@C3{+Ab | |||||
z@mUM#KelWycg$|NBVWeB?^w|tM>vH8ll0z*^LmU^ke9M!>G+BM)T9M<yiAc`1=;ka | |||||
z<Zv(3OtFezv?Xrc^FYdb!CCUSW7VjxUT*rh?EA{sHNMSE9UtXqJ<*%X%!dGPCim1> | |||||
zvbft3*&THeF9!a=qS6Gt{Qt`(rJo+ZM=+B3-uVBe{r`m2>TuR2Gvx|d^>(M0_1^{J | |||||
z&OOgkw(AystYHt|E^z)78usIKcv{%8i?zQQxwP+SKCpk(P++5}ym5b3NZWww$fh${ | |||||
z+z&czyqYIeTzcz0oSZKBoa(Z#PTn*0gDZ_->&rhr!vd6b34w3Fxi*Jyc-Y>4e@$^4 | |||||
cxguN4kni(NU3<6%{;31JJp(*$x^uGr9fGaZv;Y7A | |||||
literal 1629 | |||||
zcmV-j2BP_iP)<h;3K|Lk000e1NJLTq001BW001Be1^@s6b9#F800004b3#c}2nYxW | |||||
zd<bNS000IQNkl<ZScS!xeQcH08Nh$fdGGB9P;Q}RCD0F7u}~l_3X>XOp-ouEG7tw0 | |||||
zrz1-=F%F0OAZ*Bt%4AWe%cd6G%m`sY6u$yfhB9DOrfkevkPjQ|7bz{Y^t;geeV+Z% | |||||
z-b-(9d+9KXPx9t{&pFR|p5ODF^FHT&;Y${|kx3oD%S#{awv*l7K)tMQzWRR$@NLb_ | |||||
zw;H}v)GQIf6%3j8H~%fa;jd>h$695QRWckW+^S($6E4nXW;Y+LsHg~iVF2&*`MKhR | |||||
zHmmCQ1byyU`lD^F&Xm<pKK62+wdxsmqG|_L|6&~1bD!4kKAp&iGu%LLdCpHn>|TNV | |||||
z%8R3a_|Tzmm#*3P@Q-HSUYM1eH!<b#!H-eZVZxGBjLfoXADrNsadU=e>c3jMs;&&F | |||||
z8VCRQ^d#Seu9v{M5A2zz4}Ep>n<uhf6p_p+1vzZn`55CfJnY+hl#T1RV8b?1fPFTV | |||||
zMq{}1RLvXzNtgju#D6Av7dYQX?EYncy0!eD=hJtHNE$$UTQ~nWa)NIxE}{6=0&;V+ | |||||
zNSKLeLKv&c>a1}yj<<B4tsNF%LuSqsh<Qk$W5YWI51(ur^O}gH4%BIH@8;;=D_OL- | |||||
zgyP!@m^v+w?3^qDfgml-9f=^^DlE+!TezpC>$BEqf8?QeYhH<1JqFP0=f?}mZ~v(g | |||||
zxoJ3q`BNsdeb*C=&+znTPg(oGZ>TsNm*p6-s^<dE)uQU^{eA!w88Fs9)AywRE_>-~ | |||||
zo<z<8YO5Qmue&%PLSgx;vV_h7j7ULpIBiX|4d5%!S)|C<QMHv#>5qiMvi`c3fXjnY | |||||
z?nzI>n30qgyK(yTm1Cj+ioF{^3@0CWrSO)>rn{c}{@GoeufLXs+vZGT(^CeYib | |||||
z@v#)Z=<7IOmIA1i6V}+JBXJIJp`n@GI}ZTx-oB4$Ywcp$iqa857|UG%CfGW821q7= | |||||
zq9+APM+z`d>S`Lf|K3fsw_Ssnls$oQ<HruEuZBecMsac?omI_gbs~~-BLDz3)fc$` | |||||
z-e1wt-p#TVrMzFUg##aK;af}Q#pw!orcPa&X4EpdpGluTlGk>l&LdL&c>{lV@vUe( | |||||
zB{i9~>+T+MNs|1@lU%4xMhqz7rXF{kl4D#XI<rJf|J}hcee;cpJz~<*Mw6VBg4gGb | |||||
zooq-lR=Ch_!L_ceK5qAmI2hv|_7ZWTAd$Xz_Z*|K;WE%ypkCQ=fIu+*%<#Kh7hMP! | |||||
z=r839ZtBcdRV$IO`b<Y78%j(+fEkbhUvG%-EPj-E3uaPZcZr7cEpa(i)PL^WtF5k7 | |||||
zKc{_3+6ywu7%f6L4wOU{)g}fo*D?^Jr|{7Sp8?<)H^ESE%sf%~un+ua7rY{J06+}0 | |||||
zBy8Rps2H|^J|j+2+~rC4Fn?hY0JCP_Lh7i|L+U9?*al+&tkFFXsf0HCR$sKt{%-N* | |||||
z3RR6+FmIp}5m(&$ue@g=bLJKRFlTN7-&`_x$UWlocgCh)3Al~C9R)~fZF~*18f4VZ | |||||
z7oA&Ukw3@K+7SLg7mjhqTqW}hV+`Lhzc5b6=kLU7I5v9W<&&Sk(HI4gO@x470;o9m | |||||
zY+rTyJynfaG+&^Lp59A<M5C)XcqYbh#lf@DF@T<{7x4#s21TzuZbDB++R+zw^S<u0 | |||||
z;g3DVz_f{JftkB1C;cKAHg^r29JT@cfi7H53U1dZYR<KAsi_ldm7Uv;a`aFYDEI>1 | |||||
zbX{p6Z2g0st1+xPRr~fQk-nI>L$^#A<JHs;1&T!MQ?sAF12H_pkRrHUeIPC;nNZkA | |||||
zAlOSN><5PaL=!6;PSwBuOsthb;@vtaN3H8!fg-Rw7QQgI>Pk?49a|gzM^OzTtUX=( | |||||
z<{t+S#TiXq)6|eE-gzSI0rP_+YmYB4y}5I>Ds?yLyvmA{4domX6nRy|Tb{c@gw3Go | |||||
zSAy>Q%C;02*syDzNaO$;UaV@p5uVP-xx6jWU($8IpY(BDzOQ7j6qV%)(*@c8YURW; | |||||
zzpcp2S4#n^S%{1S+QBwkHC1k7_I_Hk`;+V09uYtc%=Ww#?-e@}G}|!}PU;OD{-Qsp | |||||
bU%LDkMDBWX&Ru`<00000NkvXXu0mjfJA@b~ | |||||
diff --git a/literal_add b/literal_add | |||||
new file mode 100644 | |||||
index 0000000000000000000000000000000000000000..799df8578e3cae8a5e979182391b8e9a6a3deded | |||||
GIT binary patch | |||||
literal 1629 | |||||
zcmV-j2BP_iP)<h;3K|Lk000e1NJLTq001BW001Be1^@s6b9#F800004b3#c}2nYxW | |||||
zd<bNS000IQNkl<ZScS!xeQcH08Nh$fdGGB9P;Q}RCD0F7u}~l_3X>XOp-ouEG7tw0 | |||||
zrz1-=F%F0OAZ*Bt%4AWe%cd6G%m`sY6u$yfhB9DOrfkevkPjQ|7bz{Y^t;geeV+Z% | |||||
z-b-(9d+9KXPx9t{&pFR|p5ODF^FHT&;Y${|kx3oD%S#{awv*l7K)tMQzWRR$@NLb_ | |||||
zw;H}v)GQIf6%3j8H~%fa;jd>h$695QRWckW+^S($6E4nXW;Y+LsHg~iVF2&*`MKhR | |||||
zHmmCQ1byyU`lD^F&Xm<pKK62+wdxsmqG|_L|6&~1bD!4kKAp&iGu%LLdCpHn>|TNV | |||||
z%8R3a_|Tzmm#*3P@Q-HSUYM1eH!<b#!H-eZVZxGBjLfoXADrNsadU=e>c3jMs;&&F | |||||
z8VCRQ^d#Seu9v{M5A2zz4}Ep>n<uhf6p_p+1vzZn`55CfJnY+hl#T1RV8b?1fPFTV | |||||
zMq{}1RLvXzNtgju#D6Av7dYQX?EYncy0!eD=hJtHNE$$UTQ~nWa)NIxE}{6=0&;V+ | |||||
zNSKLeLKv&c>a1}yj<<B4tsNF%LuSqsh<Qk$W5YWI51(ur^O}gH4%BIH@8;;=D_OL- | |||||
zgyP!@m^v+w?3^qDfgml-9f=^^DlE+!TezpC>$BEqf8?QeYhH<1JqFP0=f?}mZ~v(g | |||||
zxoJ3q`BNsdeb*C=&+znTPg(oGZ>TsNm*p6-s^<dE)uQU^{eA!w88Fs9)AywRE_>-~ | |||||
zo<z<8YO5Qmue&%PLSgx;vV_h7j7ULpIBiX|4d5%!S)|C<QMHv#>5qiMvi`c3fXjnY | |||||
z?nzI>n30qgyK(yTm1Cj+ioF{^3@0CWrSO)>rn{c}{@GoeufLXs+vZGT(^CeYib | |||||
z@v#)Z=<7IOmIA1i6V}+JBXJIJp`n@GI}ZTx-oB4$Ywcp$iqa857|UG%CfGW821q7= | |||||
zq9+APM+z`d>S`Lf|K3fsw_Ssnls$oQ<HruEuZBecMsac?omI_gbs~~-BLDz3)fc$` | |||||
z-e1wt-p#TVrMzFUg##aK;af}Q#pw!orcPa&X4EpdpGluTlGk>l&LdL&c>{lV@vUe( | |||||
zB{i9~>+T+MNs|1@lU%4xMhqz7rXF{kl4D#XI<rJf|J}hcee;cpJz~<*Mw6VBg4gGb | |||||
zooq-lR=Ch_!L_ceK5qAmI2hv|_7ZWTAd$Xz_Z*|K;WE%ypkCQ=fIu+*%<#Kh7hMP! | |||||
z=r839ZtBcdRV$IO`b<Y78%j(+fEkbhUvG%-EPj-E3uaPZcZr7cEpa(i)PL^WtF5k7 | |||||
zKc{_3+6ywu7%f6L4wOU{)g}fo*D?^Jr|{7Sp8?<)H^ESE%sf%~un+ua7rY{J06+}0 | |||||
zBy8Rps2H|^J|j+2+~rC4Fn?hY0JCP_Lh7i|L+U9?*al+&tkFFXsf0HCR$sKt{%-N* | |||||
z3RR6+FmIp}5m(&$ue@g=bLJKRFlTN7-&`_x$UWlocgCh)3Al~C9R)~fZF~*18f4VZ | |||||
z7oA&Ukw3@K+7SLg7mjhqTqW}hV+`Lhzc5b6=kLU7I5v9W<&&Sk(HI4gO@x470;o9m | |||||
zY+rTyJynfaG+&^Lp59A<M5C)XcqYbh#lf@DF@T<{7x4#s21TzuZbDB++R+zw^S<u0 | |||||
z;g3DVz_f{JftkB1C;cKAHg^r29JT@cfi7H53U1dZYR<KAsi_ldm7Uv;a`aFYDEI>1 | |||||
zbX{p6Z2g0st1+xPRr~fQk-nI>L$^#A<JHs;1&T!MQ?sAF12H_pkRrHUeIPC;nNZkA | |||||
zAlOSN><5PaL=!6;PSwBuOsthb;@vtaN3H8!fg-Rw7QQgI>Pk?49a|gzM^OzTtUX=( | |||||
z<{t+S#TiXq)6|eE-gzSI0rP_+YmYB4y}5I>Ds?yLyvmA{4domX6nRy|Tb{c@gw3Go | |||||
zSAy>Q%C;02*syDzNaO$;UaV@p5uVP-xx6jWU($8IpY(BDzOQ7j6qV%)(*@c8YURW; | |||||
zzpcp2S4#n^S%{1S+QBwkHC1k7_I_Hk`;+V09uYtc%=Ww#?-e@}G}|!}PU;OD{-Qsp | |||||
bU%LDkMDBWX&Ru`<00000NkvXXu0mjfJA@b~ | |||||
literal 0 | |||||
HcmV?d00001 |
diff --git a/smudgetest b/smudgetest | |||||
index a24d41e..762c4d0 100644 | |||||
--- a/smudgetest | |||||
+++ b/smudgetest | |||||
@@ -1,3 +1,3 @@ | |||||
PERLE | |||||
-HEBLE | |||||
+sprich | |||||
speak |
PARLA | |||||
sprich | |||||
speak |
PARLA | |||||
HABLA | |||||
speak |
diff --git a/umlaut b/umlaut | |||||
index 003a054..557f72f 100644 | |||||
--- a/umlaut | |||||
+++ b/umlaut | |||||
@@ -1 +1 @@ | |||||
-ÄÖÜ | |||||
+ÄÖÜ |
トヨワ |
ÄÖÜ |
ScmZp0Xmwa1z*+$U3j_csN(Dmz |
TcmZp5XmD5{u!xa=5hEi28?FP4 |
/* | /* | ||||
* Copyright (C) 2011, 2020 IBM Corporation and others | |||||
* Copyright (C) 2011, 2021 IBM Corporation and others | |||||
* | * | ||||
* This program and the accompanying materials are made available under the | * This program and the accompanying materials are made available under the | ||||
* terms of the Eclipse Distribution License v. 1.0 which is available at | * terms of the Eclipse Distribution License v. 1.0 which is available at | ||||
*/ | */ | ||||
package org.eclipse.jgit.api; | package org.eclipse.jgit.api; | ||||
import static org.junit.Assert.assertArrayEquals; | |||||
import static org.junit.Assert.assertEquals; | import static org.junit.Assert.assertEquals; | ||||
import static org.junit.Assert.assertFalse; | import static org.junit.Assert.assertFalse; | ||||
import static org.junit.Assert.assertTrue; | import static org.junit.Assert.assertTrue; | ||||
import java.io.File; | import java.io.File; | ||||
import java.io.IOException; | import java.io.IOException; | ||||
import java.io.InputStream; | import java.io.InputStream; | ||||
import java.io.OutputStream; | |||||
import java.nio.charset.StandardCharsets; | |||||
import java.nio.file.Files; | |||||
import org.eclipse.jgit.api.errors.PatchApplyException; | import org.eclipse.jgit.api.errors.PatchApplyException; | ||||
import org.eclipse.jgit.api.errors.PatchFormatException; | import org.eclipse.jgit.api.errors.PatchFormatException; | ||||
import org.eclipse.jgit.attributes.FilterCommand; | |||||
import org.eclipse.jgit.attributes.FilterCommandFactory; | |||||
import org.eclipse.jgit.attributes.FilterCommandRegistry; | |||||
import org.eclipse.jgit.diff.RawText; | import org.eclipse.jgit.diff.RawText; | ||||
import org.eclipse.jgit.junit.RepositoryTestCase; | import org.eclipse.jgit.junit.RepositoryTestCase; | ||||
import org.eclipse.jgit.lib.Config; | |||||
import org.eclipse.jgit.lib.ConfigConstants; | |||||
import org.eclipse.jgit.util.IO; | |||||
import org.junit.Test; | import org.junit.Test; | ||||
public class ApplyCommandTest extends RepositoryTestCase { | public class ApplyCommandTest extends RepositoryTestCase { | ||||
} | } | ||||
} | } | ||||
@Test | |||||
public void testCrLf() throws Exception { | |||||
try { | |||||
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null, | |||||
ConfigConstants.CONFIG_KEY_AUTOCRLF, true); | |||||
ApplyResult result = init("crlf", true, true); | |||||
assertEquals(1, result.getUpdatedFiles().size()); | |||||
assertEquals(new File(db.getWorkTree(), "crlf"), | |||||
result.getUpdatedFiles().get(0)); | |||||
checkFile(new File(db.getWorkTree(), "crlf"), | |||||
b.getString(0, b.size(), false)); | |||||
} finally { | |||||
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null, | |||||
ConfigConstants.CONFIG_KEY_AUTOCRLF); | |||||
} | |||||
} | |||||
@Test | |||||
public void testCrLfOff() throws Exception { | |||||
try { | |||||
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null, | |||||
ConfigConstants.CONFIG_KEY_AUTOCRLF, false); | |||||
ApplyResult result = init("crlf", true, true); | |||||
assertEquals(1, result.getUpdatedFiles().size()); | |||||
assertEquals(new File(db.getWorkTree(), "crlf"), | |||||
result.getUpdatedFiles().get(0)); | |||||
checkFile(new File(db.getWorkTree(), "crlf"), | |||||
b.getString(0, b.size(), false)); | |||||
} finally { | |||||
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null, | |||||
ConfigConstants.CONFIG_KEY_AUTOCRLF); | |||||
} | |||||
} | |||||
@Test | |||||
public void testCrLfEmptyCommitted() throws Exception { | |||||
try { | |||||
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null, | |||||
ConfigConstants.CONFIG_KEY_AUTOCRLF, true); | |||||
ApplyResult result = init("crlf3", true, true); | |||||
assertEquals(1, result.getUpdatedFiles().size()); | |||||
assertEquals(new File(db.getWorkTree(), "crlf3"), | |||||
result.getUpdatedFiles().get(0)); | |||||
checkFile(new File(db.getWorkTree(), "crlf3"), | |||||
b.getString(0, b.size(), false)); | |||||
} finally { | |||||
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null, | |||||
ConfigConstants.CONFIG_KEY_AUTOCRLF); | |||||
} | |||||
} | |||||
@Test | |||||
public void testCrLfNewFile() throws Exception { | |||||
try { | |||||
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null, | |||||
ConfigConstants.CONFIG_KEY_AUTOCRLF, true); | |||||
ApplyResult result = init("crlf4", false, true); | |||||
assertEquals(1, result.getUpdatedFiles().size()); | |||||
assertEquals(new File(db.getWorkTree(), "crlf4"), | |||||
result.getUpdatedFiles().get(0)); | |||||
checkFile(new File(db.getWorkTree(), "crlf4"), | |||||
b.getString(0, b.size(), false)); | |||||
} finally { | |||||
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null, | |||||
ConfigConstants.CONFIG_KEY_AUTOCRLF); | |||||
} | |||||
} | |||||
@Test | |||||
public void testPatchWithCrLf() throws Exception { | |||||
try { | |||||
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null, | |||||
ConfigConstants.CONFIG_KEY_AUTOCRLF, false); | |||||
ApplyResult result = init("crlf2", true, true); | |||||
assertEquals(1, result.getUpdatedFiles().size()); | |||||
assertEquals(new File(db.getWorkTree(), "crlf2"), | |||||
result.getUpdatedFiles().get(0)); | |||||
checkFile(new File(db.getWorkTree(), "crlf2"), | |||||
b.getString(0, b.size(), false)); | |||||
} finally { | |||||
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null, | |||||
ConfigConstants.CONFIG_KEY_AUTOCRLF); | |||||
} | |||||
} | |||||
@Test | |||||
public void testPatchWithCrLf2() throws Exception { | |||||
String name = "crlf2"; | |||||
try (Git git = new Git(db)) { | |||||
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null, | |||||
ConfigConstants.CONFIG_KEY_AUTOCRLF, false); | |||||
a = new RawText(readFile(name + "_PreImage")); | |||||
write(new File(db.getWorkTree(), name), | |||||
a.getString(0, a.size(), false)); | |||||
git.add().addFilepattern(name).call(); | |||||
git.commit().setMessage("PreImage").call(); | |||||
b = new RawText(readFile(name + "_PostImage")); | |||||
db.getConfig().setBoolean(ConfigConstants.CONFIG_CORE_SECTION, null, | |||||
ConfigConstants.CONFIG_KEY_AUTOCRLF, true); | |||||
ApplyResult result = git.apply() | |||||
.setPatch(getTestResource(name + ".patch")).call(); | |||||
assertEquals(1, result.getUpdatedFiles().size()); | |||||
assertEquals(new File(db.getWorkTree(), name), | |||||
result.getUpdatedFiles().get(0)); | |||||
checkFile(new File(db.getWorkTree(), name), | |||||
b.getString(0, b.size(), false)); | |||||
} finally { | |||||
db.getConfig().unset(ConfigConstants.CONFIG_CORE_SECTION, null, | |||||
ConfigConstants.CONFIG_KEY_AUTOCRLF); | |||||
} | |||||
} | |||||
// Clean/smudge filter for testFiltering. The smudgetest test resources were | |||||
// created with C git using a clean filter sed -e "s/A/E/g" and the smudge | |||||
// filter sed -e "s/E/A/g". To keep the test independent of the presence of | |||||
// sed, implement this with a built-in filter. | |||||
private static class ReplaceFilter extends FilterCommand { | |||||
private final char toReplace; | |||||
private final char replacement; | |||||
ReplaceFilter(InputStream in, OutputStream out, char toReplace, | |||||
char replacement) { | |||||
super(in, out); | |||||
this.toReplace = toReplace; | |||||
this.replacement = replacement; | |||||
} | |||||
@Override | |||||
public int run() throws IOException { | |||||
int b = in.read(); | |||||
if (b < 0) { | |||||
in.close(); | |||||
out.close(); | |||||
return -1; | |||||
} | |||||
if ((b & 0xFF) == toReplace) { | |||||
b = replacement; | |||||
} | |||||
out.write(b); | |||||
return 1; | |||||
} | |||||
} | |||||
@Test | |||||
public void testFiltering() throws Exception { | |||||
// Set up filter | |||||
FilterCommandFactory clean = (repo, in, out) -> { | |||||
return new ReplaceFilter(in, out, 'A', 'E'); | |||||
}; | |||||
FilterCommandFactory smudge = (repo, in, out) -> { | |||||
return new ReplaceFilter(in, out, 'E', 'A'); | |||||
}; | |||||
FilterCommandRegistry.register("jgit://builtin/a2e/clean", clean); | |||||
FilterCommandRegistry.register("jgit://builtin/a2e/smudge", smudge); | |||||
try (Git git = new Git(db)) { | |||||
Config config = db.getConfig(); | |||||
config.setString(ConfigConstants.CONFIG_FILTER_SECTION, "a2e", | |||||
"clean", "jgit://builtin/a2e/clean"); | |||||
config.setString(ConfigConstants.CONFIG_FILTER_SECTION, "a2e", | |||||
"smudge", "jgit://builtin/a2e/smudge"); | |||||
write(new File(db.getWorkTree(), ".gitattributes"), | |||||
"smudgetest filter=a2e"); | |||||
git.add().addFilepattern(".gitattributes").call(); | |||||
git.commit().setMessage("Attributes").call(); | |||||
ApplyResult result = init("smudgetest", true, true); | |||||
assertEquals(1, result.getUpdatedFiles().size()); | |||||
assertEquals(new File(db.getWorkTree(), "smudgetest"), | |||||
result.getUpdatedFiles().get(0)); | |||||
checkFile(new File(db.getWorkTree(), "smudgetest"), | |||||
b.getString(0, b.size(), false)); | |||||
} finally { | |||||
// Tear down filter | |||||
FilterCommandRegistry.unregister("jgit://builtin/a2e/clean"); | |||||
FilterCommandRegistry.unregister("jgit://builtin/a2e/smudge"); | |||||
} | |||||
} | |||||
private void checkBinary(String name, boolean hasPreImage) | |||||
throws Exception { | |||||
checkBinary(name, hasPreImage, 1); | |||||
} | |||||
private void checkBinary(String name, boolean hasPreImage, | |||||
int numberOfFiles) throws Exception { | |||||
try (Git git = new Git(db)) { | |||||
byte[] post = IO | |||||
.readWholeStream(getTestResource(name + "_PostImage"), 0) | |||||
.array(); | |||||
File f = new File(db.getWorkTree(), name); | |||||
if (hasPreImage) { | |||||
byte[] pre = IO | |||||
.readWholeStream(getTestResource(name + "_PreImage"), 0) | |||||
.array(); | |||||
Files.write(f.toPath(), pre); | |||||
git.add().addFilepattern(name).call(); | |||||
git.commit().setMessage("PreImage").call(); | |||||
} | |||||
ApplyResult result = git.apply() | |||||
.setPatch(getTestResource(name + ".patch")).call(); | |||||
assertEquals(numberOfFiles, result.getUpdatedFiles().size()); | |||||
assertEquals(f, result.getUpdatedFiles().get(0)); | |||||
assertArrayEquals(post, Files.readAllBytes(f.toPath())); | |||||
} | |||||
} | |||||
@Test | |||||
public void testBinaryDelta() throws Exception { | |||||
checkBinary("delta", true); | |||||
} | |||||
@Test | |||||
public void testBinaryLiteral() throws Exception { | |||||
checkBinary("literal", true); | |||||
} | |||||
@Test | |||||
public void testBinaryLiteralAdd() throws Exception { | |||||
checkBinary("literal_add", false); | |||||
} | |||||
@Test | |||||
public void testEncodingChange() throws Exception { | |||||
// This is a text patch that changes a file containing ÄÖÜ in UTF-8 to | |||||
// the same characters in ISO-8859-1. The patch file itself uses mixed | |||||
// encoding. Since checkFile() works with strings use the binary check. | |||||
checkBinary("umlaut", true); | |||||
} | |||||
@Test | |||||
public void testEmptyLine() throws Exception { | |||||
// C git accepts completely empty lines as empty context lines. | |||||
// According to comments in the C git sources (apply.c), newer GNU diff | |||||
// may produce such diffs. | |||||
checkBinary("emptyLine", true); | |||||
} | |||||
@Test | |||||
public void testMultiFileNoNewline() throws Exception { | |||||
// This test needs two files. One is in the test resources. | |||||
try (Git git = new Git(db)) { | |||||
Files.write(db.getWorkTree().toPath().resolve("yello"), | |||||
"yello".getBytes(StandardCharsets.US_ASCII)); | |||||
git.add().addFilepattern("yello").call(); | |||||
git.commit().setMessage("yello").call(); | |||||
} | |||||
checkBinary("hello", true, 2); | |||||
} | |||||
@Test | @Test | ||||
public void testAddA1() throws Exception { | public void testAddA1() throws Exception { | ||||
ApplyResult result = init("A1", false, true); | ApplyResult result = init("A1", false, true); |
/* | |||||
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others | |||||
* | |||||
* This program and the accompanying materials are made available under the | |||||
* terms of the Eclipse Distribution License v. 1.0 which is available at | |||||
* https://www.eclipse.org/org/documents/edl-v10.php. | |||||
* | |||||
* SPDX-License-Identifier: BSD-3-Clause | |||||
*/ | |||||
package org.eclipse.jgit.util; | |||||
import static org.junit.Assert.assertArrayEquals; | |||||
import static org.junit.Assert.assertEquals; | |||||
import static org.junit.Assert.assertNotNull; | |||||
import static org.junit.Assert.assertThrows; | |||||
import static org.junit.Assert.assertTrue; | |||||
import java.nio.charset.StandardCharsets; | |||||
import org.junit.Test; | |||||
/** | |||||
* Tests for {@link Base85}. | |||||
*/ | |||||
public class Base85Test { | |||||
private static final String VALID_CHARS = "0123456789" | |||||
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" | |||||
+ "!#$%&()*+-;<=>?@^_`{|}~"; | |||||
@Test | |||||
public void testChars() { | |||||
for (int i = 0; i < 256; i++) { | |||||
byte[] testData = { '1', '2', '3', '4', (byte) i }; | |||||
if (VALID_CHARS.indexOf(i) >= 0) { | |||||
byte[] decoded = Base85.decode(testData, 4); | |||||
assertNotNull(decoded); | |||||
} else { | |||||
assertThrows(IllegalArgumentException.class, | |||||
() -> Base85.decode(testData, 4)); | |||||
} | |||||
} | |||||
} | |||||
private void roundtrip(byte[] data, int expectedLength) { | |||||
byte[] encoded = Base85.encode(data); | |||||
assertEquals(expectedLength, encoded.length); | |||||
assertArrayEquals(data, Base85.decode(encoded, data.length)); | |||||
} | |||||
private void roundtrip(String data, int expectedLength) { | |||||
roundtrip(data.getBytes(StandardCharsets.US_ASCII), expectedLength); | |||||
} | |||||
@Test | |||||
public void testPadding() { | |||||
roundtrip("", 0); | |||||
roundtrip("a", 5); | |||||
roundtrip("ab", 5); | |||||
roundtrip("abc", 5); | |||||
roundtrip("abcd", 5); | |||||
roundtrip("abcde", 10); | |||||
roundtrip("abcdef", 10); | |||||
roundtrip("abcdefg", 10); | |||||
roundtrip("abcdefgh", 10); | |||||
roundtrip("abcdefghi", 15); | |||||
} | |||||
@Test | |||||
public void testBinary() { | |||||
roundtrip(new byte[] { 1 }, 5); | |||||
roundtrip(new byte[] { 1, 2 }, 5); | |||||
roundtrip(new byte[] { 1, 2, 3 }, 5); | |||||
roundtrip(new byte[] { 1, 2, 3, 4 }, 5); | |||||
roundtrip(new byte[] { 1, 2, 3, 4, 5 }, 10); | |||||
roundtrip(new byte[] { 1, 2, 3, 4, 5, 0, 0, 0 }, 10); | |||||
roundtrip(new byte[] { 1, 2, 3, 4, 0, 0, 0, 5 }, 10); | |||||
} | |||||
@Test | |||||
public void testOverflow() { | |||||
IllegalArgumentException e = assertThrows( | |||||
IllegalArgumentException.class, | |||||
() -> Base85.decode(new byte[] { '~', '~', '~', '~', '~' }, 4)); | |||||
assertTrue(e.getMessage().contains("overflow")); | |||||
} | |||||
} |
/* | |||||
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others | |||||
* | |||||
* This program and the accompanying materials are made available under the | |||||
* terms of the Eclipse Distribution License v. 1.0 which is available at | |||||
* https://www.eclipse.org/org/documents/edl-v10.php. | |||||
* | |||||
* SPDX-License-Identifier: BSD-3-Clause | |||||
*/ | |||||
package org.eclipse.jgit.util.io; | |||||
import static org.junit.Assert.assertArrayEquals; | |||||
import static org.junit.Assert.assertEquals; | |||||
import static org.junit.Assert.assertTrue; | |||||
import java.io.ByteArrayOutputStream; | |||||
import java.io.InputStream; | |||||
import java.util.zip.InflaterInputStream; | |||||
import org.junit.Test; | |||||
/** | |||||
* Crude tests for the {@link BinaryDeltaInputStream} using delta diffs | |||||
* generated by C git. | |||||
*/ | |||||
public class BinaryDeltaInputStreamTest { | |||||
private InputStream getBinaryHunk(String name) { | |||||
return this.getClass().getResourceAsStream(name); | |||||
} | |||||
@Test | |||||
public void testBinaryDelta() throws Exception { | |||||
// Prepare our test data | |||||
byte[] data = new byte[8192]; | |||||
for (int i = 0; i < data.length; i++) { | |||||
data[i] = (byte) (255 - (i % 256)); | |||||
} | |||||
// Same, but with five 'x' inserted in the middle. | |||||
int middle = data.length / 2; | |||||
byte[] newData = new byte[data.length + 5]; | |||||
System.arraycopy(data, 0, newData, 0, middle); | |||||
for (int i = 0; i < 5; i++) { | |||||
newData[middle + i] = 'x'; | |||||
} | |||||
System.arraycopy(data, middle, newData, middle + 5, middle); | |||||
// delta1.forward has the instructions | |||||
// @formatter:off | |||||
// COPY 0 4096 | |||||
// INSERT 5 xxxxx | |||||
// COPY 0 4096 | |||||
// @formatter:on | |||||
// Note that the way we built newData could be expressed as | |||||
// @formatter:off | |||||
// COPY 0 4096 | |||||
// INSERT 5 xxxxx | |||||
// COPY 4096 4096 | |||||
// @formatter:on | |||||
try (ByteArrayOutputStream out = new ByteArrayOutputStream(); | |||||
BinaryDeltaInputStream input = new BinaryDeltaInputStream(data, | |||||
new InflaterInputStream(new BinaryHunkInputStream( | |||||
getBinaryHunk("delta1.forward"))))) { | |||||
byte[] buf = new byte[1024]; | |||||
int n; | |||||
while ((n = input.read(buf)) >= 0) { | |||||
out.write(buf, 0, n); | |||||
} | |||||
assertArrayEquals(newData, out.toByteArray()); | |||||
assertTrue(input.isFullyConsumed()); | |||||
} | |||||
// delta1.reverse has the instructions | |||||
// @formatter:off | |||||
// COPY 0 4096 | |||||
// COPY 256 3840 | |||||
// COPY 256 256 | |||||
// @formatter:on | |||||
// Note that there are alternatives, for instance | |||||
// @formatter:off | |||||
// COPY 0 4096 | |||||
// COPY 4101 4096 | |||||
// @formatter:on | |||||
// or | |||||
// @formatter:off | |||||
// COPY 0 4096 | |||||
// COPY 0 4096 | |||||
// @formatter:on | |||||
try (ByteArrayOutputStream out = new ByteArrayOutputStream(); | |||||
BinaryDeltaInputStream input = new BinaryDeltaInputStream( | |||||
newData, | |||||
new InflaterInputStream(new BinaryHunkInputStream( | |||||
getBinaryHunk("delta1.reverse"))))) { | |||||
long expectedSize = input.getExpectedResultSize(); | |||||
assertEquals(data.length, expectedSize); | |||||
byte[] buf = new byte[1024]; | |||||
int n; | |||||
while ((n = input.read(buf)) >= 0) { | |||||
out.write(buf, 0, n); | |||||
} | |||||
assertArrayEquals(data, out.toByteArray()); | |||||
assertTrue(input.isFullyConsumed()); | |||||
} | |||||
} | |||||
} |
/* | |||||
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others | |||||
* | |||||
* This program and the accompanying materials are made available under the | |||||
* terms of the Eclipse Distribution License v. 1.0 which is available at | |||||
* https://www.eclipse.org/org/documents/edl-v10.php. | |||||
* | |||||
* SPDX-License-Identifier: BSD-3-Clause | |||||
*/ | |||||
package org.eclipse.jgit.util.io; | |||||
import static org.junit.Assert.assertArrayEquals; | |||||
import static org.junit.Assert.assertEquals; | |||||
import static org.junit.Assert.assertFalse; | |||||
import static org.junit.Assert.assertTrue; | |||||
import java.io.ByteArrayInputStream; | |||||
import java.io.ByteArrayOutputStream; | |||||
import java.io.IOException; | |||||
import java.util.Arrays; | |||||
import org.junit.Test; | |||||
/** | |||||
* Tests for {@link BinaryHunkInputStream} and {@link BinaryHunkOutputStream}. | |||||
*/ | |||||
public class BinaryHunkStreamTest { | |||||
@Test | |||||
public void testRoundtripWholeBuffer() throws IOException { | |||||
for (int length = 1; length < 520 + 52; length++) { | |||||
byte[] data = new byte[length]; | |||||
for (int i = 0; i < data.length; i++) { | |||||
data[i] = (byte) (255 - (i % 256)); | |||||
} | |||||
try (ByteArrayOutputStream bos = new ByteArrayOutputStream(); | |||||
BinaryHunkOutputStream out = new BinaryHunkOutputStream( | |||||
bos)) { | |||||
out.write(data); | |||||
out.flush(); | |||||
byte[] encoded = bos.toByteArray(); | |||||
assertFalse(Arrays.equals(data, encoded)); | |||||
try (BinaryHunkInputStream in = new BinaryHunkInputStream( | |||||
new ByteArrayInputStream(encoded))) { | |||||
byte[] decoded = new byte[data.length]; | |||||
int newLength = in.read(decoded); | |||||
assertEquals(newLength, decoded.length); | |||||
assertEquals(-1, in.read()); | |||||
assertArrayEquals(data, decoded); | |||||
} | |||||
} | |||||
} | |||||
} | |||||
@Test | |||||
public void testRoundtripChunks() throws IOException { | |||||
for (int length = 1; length < 520 + 52; length++) { | |||||
byte[] data = new byte[length]; | |||||
for (int i = 0; i < data.length; i++) { | |||||
data[i] = (byte) (255 - (i % 256)); | |||||
} | |||||
try (ByteArrayOutputStream bos = new ByteArrayOutputStream(); | |||||
BinaryHunkOutputStream out = new BinaryHunkOutputStream( | |||||
bos)) { | |||||
out.write(data, 0, data.length / 2); | |||||
out.write(data, data.length / 2, data.length - data.length / 2); | |||||
out.flush(); | |||||
byte[] encoded = bos.toByteArray(); | |||||
assertFalse(Arrays.equals(data, encoded)); | |||||
try (BinaryHunkInputStream in = new BinaryHunkInputStream( | |||||
new ByteArrayInputStream(encoded))) { | |||||
byte[] decoded = new byte[data.length]; | |||||
int p = 0; | |||||
int n; | |||||
while ((n = in.read(decoded, p, | |||||
Math.min(decoded.length - p, 57))) >= 0) { | |||||
p += n; | |||||
if (p == decoded.length) { | |||||
break; | |||||
} | |||||
} | |||||
assertEquals(p, decoded.length); | |||||
assertEquals(-1, in.read()); | |||||
assertArrayEquals(data, decoded); | |||||
} | |||||
} | |||||
} | |||||
} | |||||
@Test | |||||
public void testRoundtripBytes() throws IOException { | |||||
for (int length = 1; length < 520 + 52; length++) { | |||||
byte[] data = new byte[length]; | |||||
for (int i = 0; i < data.length; i++) { | |||||
data[i] = (byte) (255 - (i % 256)); | |||||
} | |||||
try (ByteArrayOutputStream bos = new ByteArrayOutputStream(); | |||||
BinaryHunkOutputStream out = new BinaryHunkOutputStream( | |||||
bos)) { | |||||
for (int i = 0; i < data.length; i++) { | |||||
out.write(data[i]); | |||||
} | |||||
out.flush(); | |||||
byte[] encoded = bos.toByteArray(); | |||||
assertFalse(Arrays.equals(data, encoded)); | |||||
try (BinaryHunkInputStream in = new BinaryHunkInputStream( | |||||
new ByteArrayInputStream(encoded))) { | |||||
byte[] decoded = new byte[data.length]; | |||||
for (int i = 0; i < decoded.length; i++) { | |||||
int val = in.read(); | |||||
assertTrue(0 <= val && val <= 255); | |||||
decoded[i] = (byte) val; | |||||
} | |||||
assertEquals(-1, in.read()); | |||||
assertArrayEquals(data, decoded); | |||||
} | |||||
} | |||||
} | |||||
} | |||||
@Test | |||||
public void testRoundtripWithClose() throws IOException { | |||||
for (int length = 1; length < 520 + 52; length++) { | |||||
byte[] data = new byte[length]; | |||||
for (int i = 0; i < data.length; i++) { | |||||
data[i] = (byte) (255 - (i % 256)); | |||||
} | |||||
try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) { | |||||
try (BinaryHunkOutputStream out = new BinaryHunkOutputStream( | |||||
bos)) { | |||||
out.write(data); | |||||
} | |||||
byte[] encoded = bos.toByteArray(); | |||||
assertFalse(Arrays.equals(data, encoded)); | |||||
try (BinaryHunkInputStream in = new BinaryHunkInputStream( | |||||
new ByteArrayInputStream(encoded))) { | |||||
byte[] decoded = new byte[data.length]; | |||||
int newLength = in.read(decoded); | |||||
assertEquals(newLength, decoded.length); | |||||
assertEquals(-1, in.read()); | |||||
assertArrayEquals(data, decoded); | |||||
} | |||||
} | |||||
} | |||||
} | |||||
} |
aNewObjectIdIsRequired=A NewObjectId is required. | aNewObjectIdIsRequired=A NewObjectId is required. | ||||
anExceptionOccurredWhileTryingToAddTheIdOfHEAD=An exception occurred while trying to add the Id of HEAD | anExceptionOccurredWhileTryingToAddTheIdOfHEAD=An exception occurred while trying to add the Id of HEAD | ||||
anSSHSessionHasBeenAlreadyCreated=An SSH session has been already created | anSSHSessionHasBeenAlreadyCreated=An SSH session has been already created | ||||
applyBinaryBaseOidWrong=Cannot apply binary patch; OID for file {0} does not match | |||||
applyBinaryOidTooShort=Binary patch for file {0} does not have full IDs | |||||
applyBinaryResultOidWrong=Result of binary patch for file {0} has wrong OID. | |||||
applyingCommit=Applying {0} | applyingCommit=Applying {0} | ||||
archiveFormatAlreadyAbsent=Archive format already absent: {0} | archiveFormatAlreadyAbsent=Archive format already absent: {0} | ||||
archiveFormatAlreadyRegistered=Archive format already registered with different implementation: {0} | archiveFormatAlreadyRegistered=Archive format already registered with different implementation: {0} | ||||
badSectionEntry=Bad section entry: {0} | badSectionEntry=Bad section entry: {0} | ||||
badShallowLine=Bad shallow line: {0} | badShallowLine=Bad shallow line: {0} | ||||
bareRepositoryNoWorkdirAndIndex=Bare Repository has neither a working tree, nor an index | bareRepositoryNoWorkdirAndIndex=Bare Repository has neither a working tree, nor an index | ||||
base85invalidChar=Invalid base-85 character: 0x{0} | |||||
base85length=Base-85 encoded data must have a length that is a multiple of 5 | |||||
base85overflow=Base-85 value overflow, does not fit into 32 bits: 0x{0} | |||||
base85tooLong=Extra base-85 encoded data for output size of {0} bytes | |||||
base85tooShort=Base-85 data decoded into less than {0} bytes | |||||
baseLengthIncorrect=base length incorrect | baseLengthIncorrect=base length incorrect | ||||
binaryDeltaBaseLengthMismatch=Binary delta base length does not match, expected {0}, got {1} | |||||
binaryDeltaInvalidOffset=Binary delta offset + length too large: {0} + {1} | |||||
binaryDeltaInvalidResultLength=Binary delta expected result length is negative | |||||
binaryHunkDecodeError=Binary hunk, line {0}: invalid input | |||||
binaryHunkInvalidLength=Binary hunk, line {0}: input corrupt; expected length byte, got 0x{1} | |||||
binaryHunkLineTooShort=Binary hunk, line {0}: input ended prematurely | |||||
binaryHunkMissingNewline=Binary hunk, line {0}: input line not terminated by newline | |||||
bitmapMissingObject=Bitmap at {0} is missing {1}. | bitmapMissingObject=Bitmap at {0} is missing {1}. | ||||
bitmapsMustBePrepared=Bitmaps must be prepared before they may be written. | bitmapsMustBePrepared=Bitmaps must be prepared before they may be written. | ||||
blameNotCommittedYet=Not Committed Yet | blameNotCommittedYet=Not Committed Yet |
/* | /* | ||||
* Copyright (C) 2011, 2020 IBM Corporation and others | |||||
* Copyright (C) 2011, 2021 IBM Corporation and others | |||||
* | * | ||||
* This program and the accompanying materials are made available under the | * This program and the accompanying materials are made available under the | ||||
* terms of the Eclipse Distribution License v. 1.0 which is available at | * terms of the Eclipse Distribution License v. 1.0 which is available at | ||||
*/ | */ | ||||
package org.eclipse.jgit.api; | package org.eclipse.jgit.api; | ||||
import java.io.BufferedInputStream; | |||||
import java.io.ByteArrayInputStream; | |||||
import java.io.File; | import java.io.File; | ||||
import java.io.FileInputStream; | |||||
import java.io.FileOutputStream; | |||||
import java.io.IOException; | import java.io.IOException; | ||||
import java.io.InputStream; | import java.io.InputStream; | ||||
import java.io.Writer; | |||||
import java.io.OutputStream; | |||||
import java.nio.ByteBuffer; | |||||
import java.nio.file.Files; | import java.nio.file.Files; | ||||
import java.nio.file.StandardCopyOption; | import java.nio.file.StandardCopyOption; | ||||
import java.text.MessageFormat; | import java.text.MessageFormat; | ||||
import java.util.ArrayList; | import java.util.ArrayList; | ||||
import java.util.Iterator; | import java.util.Iterator; | ||||
import java.util.List; | import java.util.List; | ||||
import java.util.zip.InflaterInputStream; | |||||
import org.eclipse.jgit.api.errors.FilterFailedException; | |||||
import org.eclipse.jgit.api.errors.GitAPIException; | import org.eclipse.jgit.api.errors.GitAPIException; | ||||
import org.eclipse.jgit.api.errors.PatchApplyException; | import org.eclipse.jgit.api.errors.PatchApplyException; | ||||
import org.eclipse.jgit.api.errors.PatchFormatException; | import org.eclipse.jgit.api.errors.PatchFormatException; | ||||
import org.eclipse.jgit.attributes.FilterCommand; | |||||
import org.eclipse.jgit.attributes.FilterCommandRegistry; | |||||
import org.eclipse.jgit.diff.DiffEntry.ChangeType; | import org.eclipse.jgit.diff.DiffEntry.ChangeType; | ||||
import org.eclipse.jgit.diff.RawText; | import org.eclipse.jgit.diff.RawText; | ||||
import org.eclipse.jgit.dircache.DirCache; | |||||
import org.eclipse.jgit.dircache.DirCacheCheckout; | |||||
import org.eclipse.jgit.dircache.DirCacheCheckout.CheckoutMetadata; | |||||
import org.eclipse.jgit.dircache.DirCacheIterator; | |||||
import org.eclipse.jgit.errors.LargeObjectException; | |||||
import org.eclipse.jgit.errors.MissingObjectException; | |||||
import org.eclipse.jgit.internal.JGitText; | import org.eclipse.jgit.internal.JGitText; | ||||
import org.eclipse.jgit.lib.Constants; | |||||
import org.eclipse.jgit.lib.CoreConfig.EolStreamType; | |||||
import org.eclipse.jgit.lib.FileMode; | import org.eclipse.jgit.lib.FileMode; | ||||
import org.eclipse.jgit.lib.ObjectId; | |||||
import org.eclipse.jgit.lib.ObjectLoader; | |||||
import org.eclipse.jgit.lib.ObjectStream; | |||||
import org.eclipse.jgit.lib.Repository; | import org.eclipse.jgit.lib.Repository; | ||||
import org.eclipse.jgit.patch.BinaryHunk; | |||||
import org.eclipse.jgit.patch.FileHeader; | import org.eclipse.jgit.patch.FileHeader; | ||||
import org.eclipse.jgit.patch.FileHeader.PatchType; | |||||
import org.eclipse.jgit.patch.HunkHeader; | import org.eclipse.jgit.patch.HunkHeader; | ||||
import org.eclipse.jgit.patch.Patch; | import org.eclipse.jgit.patch.Patch; | ||||
import org.eclipse.jgit.treewalk.FileTreeIterator; | |||||
import org.eclipse.jgit.treewalk.TreeWalk; | |||||
import org.eclipse.jgit.treewalk.TreeWalk.OperationType; | |||||
import org.eclipse.jgit.treewalk.filter.AndTreeFilter; | |||||
import org.eclipse.jgit.treewalk.filter.NotIgnoredFilter; | |||||
import org.eclipse.jgit.treewalk.filter.PathFilterGroup; | |||||
import org.eclipse.jgit.util.FS; | |||||
import org.eclipse.jgit.util.FS.ExecutionResult; | |||||
import org.eclipse.jgit.util.FileUtils; | import org.eclipse.jgit.util.FileUtils; | ||||
import org.eclipse.jgit.util.IO; | |||||
import org.eclipse.jgit.util.RawParseUtils; | |||||
import org.eclipse.jgit.util.StringUtils; | |||||
import org.eclipse.jgit.util.TemporaryBuffer; | |||||
import org.eclipse.jgit.util.TemporaryBuffer.LocalFile; | |||||
import org.eclipse.jgit.util.io.BinaryDeltaInputStream; | |||||
import org.eclipse.jgit.util.io.BinaryHunkInputStream; | |||||
import org.eclipse.jgit.util.io.EolStreamTypeUtil; | |||||
import org.eclipse.jgit.util.sha1.SHA1; | |||||
/** | /** | ||||
* Apply a patch to files and/or to the index. | * Apply a patch to files and/or to the index. | ||||
private InputStream in; | private InputStream in; | ||||
/** | /** | ||||
* Constructs the command if the patch is to be applied to the index. | |||||
* Constructs the command. | |||||
* | * | ||||
* @param repo | * @param repo | ||||
*/ | */ | ||||
public ApplyResult call() throws GitAPIException, PatchFormatException, | public ApplyResult call() throws GitAPIException, PatchFormatException, | ||||
PatchApplyException { | PatchApplyException { | ||||
checkCallable(); | checkCallable(); | ||||
setCallable(false); | |||||
ApplyResult r = new ApplyResult(); | ApplyResult r = new ApplyResult(); | ||||
try { | try { | ||||
final Patch p = new Patch(); | final Patch p = new Patch(); | ||||
} finally { | } finally { | ||||
in.close(); | in.close(); | ||||
} | } | ||||
if (!p.getErrors().isEmpty()) | |||||
if (!p.getErrors().isEmpty()) { | |||||
throw new PatchFormatException(p.getErrors()); | throw new PatchFormatException(p.getErrors()); | ||||
} | |||||
Repository repository = getRepository(); | |||||
DirCache cache = repository.readDirCache(); | |||||
for (FileHeader fh : p.getFiles()) { | for (FileHeader fh : p.getFiles()) { | ||||
ChangeType type = fh.getChangeType(); | ChangeType type = fh.getChangeType(); | ||||
File f = null; | File f = null; | ||||
switch (type) { | switch (type) { | ||||
case ADD: | case ADD: | ||||
f = getFile(fh.getNewPath(), true); | f = getFile(fh.getNewPath(), true); | ||||
apply(f, fh); | |||||
apply(repository, fh.getNewPath(), cache, f, fh); | |||||
break; | break; | ||||
case MODIFY: | case MODIFY: | ||||
f = getFile(fh.getOldPath(), false); | f = getFile(fh.getOldPath(), false); | ||||
apply(f, fh); | |||||
apply(repository, fh.getOldPath(), cache, f, fh); | |||||
break; | break; | ||||
case DELETE: | case DELETE: | ||||
f = getFile(fh.getOldPath(), false); | f = getFile(fh.getOldPath(), false); | ||||
throw new PatchApplyException(MessageFormat.format( | throw new PatchApplyException(MessageFormat.format( | ||||
JGitText.get().renameFileFailed, f, dest), e); | JGitText.get().renameFileFailed, f, dest), e); | ||||
} | } | ||||
apply(dest, fh); | |||||
apply(repository, fh.getOldPath(), cache, dest, fh); | |||||
break; | break; | ||||
case COPY: | case COPY: | ||||
f = getFile(fh.getOldPath(), false); | f = getFile(fh.getOldPath(), false); | ||||
File target = getFile(fh.getNewPath(), false); | File target = getFile(fh.getNewPath(), false); | ||||
FileUtils.mkdirs(target.getParentFile(), true); | FileUtils.mkdirs(target.getParentFile(), true); | ||||
Files.copy(f.toPath(), target.toPath()); | Files.copy(f.toPath(), target.toPath()); | ||||
apply(target, fh); | |||||
apply(repository, fh.getOldPath(), cache, target, fh); | |||||
} | } | ||||
r.addUpdatedFile(f); | r.addUpdatedFile(f); | ||||
} | } | ||||
throw new PatchApplyException(MessageFormat.format( | throw new PatchApplyException(MessageFormat.format( | ||||
JGitText.get().patchApplyException, e.getMessage()), e); | JGitText.get().patchApplyException, e.getMessage()), e); | ||||
} | } | ||||
setCallable(false); | |||||
return r; | return r; | ||||
} | } | ||||
private File getFile(String path, boolean create) | private File getFile(String path, boolean create) | ||||
throws PatchApplyException { | throws PatchApplyException { | ||||
File f = new File(getRepository().getWorkTree(), path); | File f = new File(getRepository().getWorkTree(), path); | ||||
if (create) | |||||
if (create) { | |||||
try { | try { | ||||
File parent = f.getParentFile(); | File parent = f.getParentFile(); | ||||
FileUtils.mkdirs(parent, true); | FileUtils.mkdirs(parent, true); | ||||
throw new PatchApplyException(MessageFormat.format( | throw new PatchApplyException(MessageFormat.format( | ||||
JGitText.get().createNewFileFailed, f), e); | JGitText.get().createNewFileFailed, f), e); | ||||
} | } | ||||
} | |||||
return f; | return f; | ||||
} | } | ||||
private void apply(Repository repository, String path, DirCache cache, | |||||
File f, FileHeader fh) throws IOException, PatchApplyException { | |||||
if (PatchType.BINARY.equals(fh.getPatchType())) { | |||||
return; | |||||
} | |||||
boolean convertCrLf = needsCrLfConversion(f, fh); | |||||
// Use a TreeWalk with a DirCacheIterator to pick up the correct | |||||
// clean/smudge filters. CR-LF handling is completely determined by | |||||
// whether the file or the patch have CR-LF line endings. | |||||
try (TreeWalk walk = new TreeWalk(repository)) { | |||||
walk.setOperationType(OperationType.CHECKIN_OP); | |||||
FileTreeIterator files = new FileTreeIterator(repository); | |||||
int fileIdx = walk.addTree(files); | |||||
int cacheIdx = walk.addTree(new DirCacheIterator(cache)); | |||||
files.setDirCacheIterator(walk, cacheIdx); | |||||
walk.setFilter(AndTreeFilter.create( | |||||
PathFilterGroup.createFromStrings(path), | |||||
new NotIgnoredFilter(fileIdx))); | |||||
walk.setRecursive(true); | |||||
if (walk.next()) { | |||||
// If the file on disk has no newline characters, convertCrLf | |||||
// will be false. In that case we want to honor the normal git | |||||
// settings. | |||||
EolStreamType streamType = convertCrLf ? EolStreamType.TEXT_CRLF | |||||
: walk.getEolStreamType(OperationType.CHECKOUT_OP); | |||||
String command = walk.getFilterCommand( | |||||
Constants.ATTR_FILTER_TYPE_SMUDGE); | |||||
CheckoutMetadata checkOut = new CheckoutMetadata(streamType, command); | |||||
FileTreeIterator file = walk.getTree(fileIdx, | |||||
FileTreeIterator.class); | |||||
if (file != null) { | |||||
if (PatchType.GIT_BINARY.equals(fh.getPatchType())) { | |||||
applyBinary(repository, path, f, fh, | |||||
file::openEntryStream, file.getEntryObjectId(), | |||||
checkOut); | |||||
} else { | |||||
command = walk.getFilterCommand( | |||||
Constants.ATTR_FILTER_TYPE_CLEAN); | |||||
RawText raw; | |||||
// Can't use file.openEntryStream() as it would do CR-LF | |||||
// conversion as usual, not as wanted by us. | |||||
try (InputStream input = filterClean(repository, path, | |||||
new FileInputStream(f), convertCrLf, command)) { | |||||
raw = new RawText( | |||||
IO.readWholeStream(input, 0).array()); | |||||
} | |||||
applyText(repository, path, raw, f, fh, checkOut); | |||||
} | |||||
return; | |||||
} | |||||
} | |||||
} | |||||
// File ignored? | |||||
RawText raw; | |||||
CheckoutMetadata checkOut; | |||||
if (PatchType.GIT_BINARY.equals(fh.getPatchType())) { | |||||
checkOut = new CheckoutMetadata(EolStreamType.DIRECT, null); | |||||
applyBinary(repository, path, f, fh, () -> new FileInputStream(f), | |||||
null, checkOut); | |||||
} else { | |||||
if (convertCrLf) { | |||||
try (InputStream input = EolStreamTypeUtil.wrapInputStream( | |||||
new FileInputStream(f), EolStreamType.TEXT_LF)) { | |||||
raw = new RawText(IO.readWholeStream(input, 0).array()); | |||||
} | |||||
checkOut = new CheckoutMetadata(EolStreamType.TEXT_CRLF, null); | |||||
} else { | |||||
raw = new RawText(f); | |||||
checkOut = new CheckoutMetadata(EolStreamType.DIRECT, null); | |||||
} | |||||
applyText(repository, path, raw, f, fh, checkOut); | |||||
} | |||||
} | |||||
private boolean needsCrLfConversion(File f, FileHeader fileHeader) | |||||
throws IOException { | |||||
if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) { | |||||
return false; | |||||
} | |||||
if (!hasCrLf(fileHeader)) { | |||||
try (InputStream input = new FileInputStream(f)) { | |||||
return RawText.isCrLfText(input); | |||||
} | |||||
} | |||||
return false; | |||||
} | |||||
private static boolean hasCrLf(FileHeader fileHeader) { | |||||
if (PatchType.GIT_BINARY.equals(fileHeader.getPatchType())) { | |||||
return false; | |||||
} | |||||
for (HunkHeader header : fileHeader.getHunks()) { | |||||
byte[] buf = header.getBuffer(); | |||||
int hunkEnd = header.getEndOffset(); | |||||
int lineStart = header.getStartOffset(); | |||||
while (lineStart < hunkEnd) { | |||||
int nextLineStart = RawParseUtils.nextLF(buf, lineStart); | |||||
if (nextLineStart > hunkEnd) { | |||||
nextLineStart = hunkEnd; | |||||
} | |||||
if (nextLineStart <= lineStart) { | |||||
break; | |||||
} | |||||
if (nextLineStart - lineStart > 1) { | |||||
char first = (char) (buf[lineStart] & 0xFF); | |||||
if (first == ' ' || first == '-') { | |||||
// It's an old line. Does it end in CR-LF? | |||||
if (buf[nextLineStart - 2] == '\r') { | |||||
return true; | |||||
} | |||||
} | |||||
} | |||||
lineStart = nextLineStart; | |||||
} | |||||
} | |||||
return false; | |||||
} | |||||
private InputStream filterClean(Repository repository, String path, | |||||
InputStream fromFile, boolean convertCrLf, String filterCommand) | |||||
throws IOException { | |||||
InputStream input = fromFile; | |||||
if (convertCrLf) { | |||||
input = EolStreamTypeUtil.wrapInputStream(input, | |||||
EolStreamType.TEXT_LF); | |||||
} | |||||
if (StringUtils.isEmptyOrNull(filterCommand)) { | |||||
return input; | |||||
} | |||||
if (FilterCommandRegistry.isRegistered(filterCommand)) { | |||||
LocalFile buffer = new TemporaryBuffer.LocalFile(null); | |||||
FilterCommand command = FilterCommandRegistry.createFilterCommand( | |||||
filterCommand, repository, input, buffer); | |||||
while (command.run() != -1) { | |||||
// loop as long as command.run() tells there is work to do | |||||
} | |||||
return buffer.openInputStreamWithAutoDestroy(); | |||||
} | |||||
FS fs = repository.getFS(); | |||||
ProcessBuilder filterProcessBuilder = fs.runInShell(filterCommand, | |||||
new String[0]); | |||||
filterProcessBuilder.directory(repository.getWorkTree()); | |||||
filterProcessBuilder.environment().put(Constants.GIT_DIR_KEY, | |||||
repository.getDirectory().getAbsolutePath()); | |||||
ExecutionResult result; | |||||
try { | |||||
result = fs.execute(filterProcessBuilder, in); | |||||
} catch (IOException | InterruptedException e) { | |||||
throw new IOException( | |||||
new FilterFailedException(e, filterCommand, path)); | |||||
} | |||||
int rc = result.getRc(); | |||||
if (rc != 0) { | |||||
throw new IOException(new FilterFailedException(rc, filterCommand, | |||||
path, result.getStdout().toByteArray(4096), RawParseUtils | |||||
.decode(result.getStderr().toByteArray(4096)))); | |||||
} | |||||
return result.getStdout().openInputStreamWithAutoDestroy(); | |||||
} | |||||
/** | |||||
* Something that can supply an {@link InputStream}. | |||||
*/ | |||||
private interface StreamSupplier { | |||||
InputStream load() throws IOException; | |||||
} | |||||
/** | /** | ||||
* @param f | |||||
* @param fh | |||||
* @throws IOException | |||||
* @throws PatchApplyException | |||||
* We write the patch result to a {@link TemporaryBuffer} and then use | |||||
* {@link DirCacheCheckout}.getContent() to run the result through the CR-LF | |||||
* and smudge filters. DirCacheCheckout needs an ObjectLoader, not a | |||||
* TemporaryBuffer, so this class bridges between the two, making any Stream | |||||
* provided by a {@link StreamSupplier} look like an ordinary git blob to | |||||
* DirCacheCheckout. | |||||
*/ | */ | ||||
private void apply(File f, FileHeader fh) | |||||
private static class StreamLoader extends ObjectLoader { | |||||
private StreamSupplier data; | |||||
private long size; | |||||
StreamLoader(StreamSupplier data, long length) { | |||||
this.data = data; | |||||
this.size = length; | |||||
} | |||||
@Override | |||||
public int getType() { | |||||
return Constants.OBJ_BLOB; | |||||
} | |||||
@Override | |||||
public long getSize() { | |||||
return size; | |||||
} | |||||
@Override | |||||
public boolean isLarge() { | |||||
return true; | |||||
} | |||||
@Override | |||||
public byte[] getCachedBytes() throws LargeObjectException { | |||||
throw new LargeObjectException(); | |||||
} | |||||
@Override | |||||
public ObjectStream openStream() | |||||
throws MissingObjectException, IOException { | |||||
return new ObjectStream.Filter(getType(), getSize(), | |||||
new BufferedInputStream(data.load())); | |||||
} | |||||
} | |||||
private void initHash(SHA1 hash, long size) { | |||||
hash.update(Constants.encodedTypeString(Constants.OBJ_BLOB)); | |||||
hash.update((byte) ' '); | |||||
hash.update(Constants.encodeASCII(size)); | |||||
hash.update((byte) 0); | |||||
} | |||||
private ObjectId hash(File f) throws IOException { | |||||
SHA1 hash = SHA1.newInstance(); | |||||
initHash(hash, f.length()); | |||||
try (InputStream input = new FileInputStream(f)) { | |||||
byte[] buf = new byte[8192]; | |||||
int n; | |||||
while ((n = input.read(buf)) >= 0) { | |||||
hash.update(buf, 0, n); | |||||
} | |||||
} | |||||
return hash.toObjectId(); | |||||
} | |||||
private void checkOid(ObjectId baseId, ObjectId id, ChangeType type, File f, | |||||
String path) | |||||
throws PatchApplyException, IOException { | |||||
boolean hashOk = false; | |||||
if (id != null) { | |||||
hashOk = baseId.equals(id); | |||||
if (!hashOk && ChangeType.ADD.equals(type) | |||||
&& ObjectId.zeroId().equals(baseId)) { | |||||
// We create the file first. The OID of an empty file is not the | |||||
// zero id! | |||||
hashOk = Constants.EMPTY_BLOB_ID.equals(id); | |||||
} | |||||
} else { | |||||
if (ObjectId.zeroId().equals(baseId)) { | |||||
// File empty is OK. | |||||
hashOk = !f.exists() || f.length() == 0; | |||||
} else { | |||||
hashOk = baseId.equals(hash(f)); | |||||
} | |||||
} | |||||
if (!hashOk) { | |||||
throw new PatchApplyException(MessageFormat | |||||
.format(JGitText.get().applyBinaryBaseOidWrong, path)); | |||||
} | |||||
} | |||||
private void applyBinary(Repository repository, String path, File f, | |||||
FileHeader fh, StreamSupplier loader, ObjectId id, | |||||
CheckoutMetadata checkOut) | |||||
throws PatchApplyException, IOException { | |||||
if (!fh.getOldId().isComplete() || !fh.getNewId().isComplete()) { | |||||
throw new PatchApplyException(MessageFormat | |||||
.format(JGitText.get().applyBinaryOidTooShort, path)); | |||||
} | |||||
BinaryHunk hunk = fh.getForwardBinaryHunk(); | |||||
// A BinaryHunk has the start at the "literal" or "delta" token. Data | |||||
// starts on the next line. | |||||
int start = RawParseUtils.nextLF(hunk.getBuffer(), | |||||
hunk.getStartOffset()); | |||||
int length = hunk.getEndOffset() - start; | |||||
SHA1 hash = SHA1.newInstance(); | |||||
// Write to a buffer and copy to the file only if everything was fine | |||||
TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null); | |||||
try { | |||||
switch (hunk.getType()) { | |||||
case LITERAL_DEFLATED: | |||||
// This just overwrites the file. We need to check the hash of | |||||
// the base. | |||||
checkOid(fh.getOldId().toObjectId(), id, fh.getChangeType(), f, | |||||
path); | |||||
initHash(hash, hunk.getSize()); | |||||
try (OutputStream out = buffer; | |||||
InputStream inflated = new SHA1InputStream(hash, | |||||
new InflaterInputStream( | |||||
new BinaryHunkInputStream( | |||||
new ByteArrayInputStream( | |||||
hunk.getBuffer(), start, | |||||
length))))) { | |||||
DirCacheCheckout.getContent(repository, path, checkOut, | |||||
new StreamLoader(() -> inflated, hunk.getSize()), | |||||
null, out); | |||||
if (!fh.getNewId().toObjectId().equals(hash.toObjectId())) { | |||||
throw new PatchApplyException(MessageFormat.format( | |||||
JGitText.get().applyBinaryResultOidWrong, | |||||
path)); | |||||
} | |||||
} | |||||
try (InputStream bufIn = buffer.openInputStream()) { | |||||
Files.copy(bufIn, f.toPath(), | |||||
StandardCopyOption.REPLACE_EXISTING); | |||||
} | |||||
break; | |||||
case DELTA_DEFLATED: | |||||
// Unfortunately delta application needs random access to the | |||||
// base to construct the result. | |||||
byte[] base; | |||||
try (InputStream input = loader.load()) { | |||||
base = IO.readWholeStream(input, 0).array(); | |||||
} | |||||
// At least stream the result! | |||||
try (BinaryDeltaInputStream input = new BinaryDeltaInputStream( | |||||
base, | |||||
new InflaterInputStream(new BinaryHunkInputStream( | |||||
new ByteArrayInputStream(hunk.getBuffer(), | |||||
start, length))))) { | |||||
long finalSize = input.getExpectedResultSize(); | |||||
initHash(hash, finalSize); | |||||
try (OutputStream out = buffer; | |||||
SHA1InputStream hashed = new SHA1InputStream(hash, | |||||
input)) { | |||||
DirCacheCheckout.getContent(repository, path, checkOut, | |||||
new StreamLoader(() -> hashed, finalSize), null, | |||||
out); | |||||
if (!fh.getNewId().toObjectId() | |||||
.equals(hash.toObjectId())) { | |||||
throw new PatchApplyException(MessageFormat.format( | |||||
JGitText.get().applyBinaryResultOidWrong, | |||||
path)); | |||||
} | |||||
} | |||||
} | |||||
try (InputStream bufIn = buffer.openInputStream()) { | |||||
Files.copy(bufIn, f.toPath(), | |||||
StandardCopyOption.REPLACE_EXISTING); | |||||
} | |||||
break; | |||||
default: | |||||
break; | |||||
} | |||||
} finally { | |||||
buffer.destroy(); | |||||
} | |||||
} | |||||
private void applyText(Repository repository, String path, RawText rt, | |||||
File f, FileHeader fh, CheckoutMetadata checkOut) | |||||
throws IOException, PatchApplyException { | throws IOException, PatchApplyException { | ||||
RawText rt = new RawText(f); | |||||
List<String> oldLines = new ArrayList<>(rt.size()); | |||||
for (int i = 0; i < rt.size(); i++) | |||||
oldLines.add(rt.getString(i)); | |||||
List<String> newLines = new ArrayList<>(oldLines); | |||||
List<ByteBuffer> oldLines = new ArrayList<>(rt.size()); | |||||
for (int i = 0; i < rt.size(); i++) { | |||||
oldLines.add(rt.getRawString(i)); | |||||
} | |||||
List<ByteBuffer> newLines = new ArrayList<>(oldLines); | |||||
int afterLastHunk = 0; | int afterLastHunk = 0; | ||||
int lineNumberShift = 0; | int lineNumberShift = 0; | ||||
int lastHunkNewLine = -1; | int lastHunkNewLine = -1; | ||||
b.length); | b.length); | ||||
RawText hrt = new RawText(b); | RawText hrt = new RawText(b); | ||||
List<String> hunkLines = new ArrayList<>(hrt.size()); | |||||
List<ByteBuffer> hunkLines = new ArrayList<>(hrt.size()); | |||||
for (int i = 0; i < hrt.size(); i++) { | for (int i = 0; i < hrt.size(); i++) { | ||||
hunkLines.add(hrt.getString(i)); | |||||
hunkLines.add(hrt.getRawString(i)); | |||||
} | } | ||||
if (hh.getNewStartLine() == 0) { | if (hh.getNewStartLine() == 0) { | ||||
lineNumberShift = applyAt - hh.getNewStartLine() + 1; | lineNumberShift = applyAt - hh.getNewStartLine() + 1; | ||||
int sz = hunkLines.size(); | int sz = hunkLines.size(); | ||||
for (int j = 1; j < sz; j++) { | for (int j = 1; j < sz; j++) { | ||||
String hunkLine = hunkLines.get(j); | |||||
switch (hunkLine.charAt(0)) { | |||||
ByteBuffer hunkLine = hunkLines.get(j); | |||||
if (!hunkLine.hasRemaining()) { | |||||
// Completely empty line; accept as empty context line | |||||
applyAt++; | |||||
continue; | |||||
} | |||||
switch (hunkLine.array()[hunkLine.position()]) { | |||||
case ' ': | case ' ': | ||||
applyAt++; | applyAt++; | ||||
break; | break; | ||||
newLines.remove(applyAt); | newLines.remove(applyAt); | ||||
break; | break; | ||||
case '+': | case '+': | ||||
newLines.add(applyAt++, hunkLine.substring(1)); | |||||
newLines.add(applyAt++, slice(hunkLine, 1)); | |||||
break; | break; | ||||
default: | default: | ||||
break; | break; | ||||
afterLastHunk = applyAt; | afterLastHunk = applyAt; | ||||
} | } | ||||
if (!isNoNewlineAtEndOfFile(fh)) { | if (!isNoNewlineAtEndOfFile(fh)) { | ||||
newLines.add(""); //$NON-NLS-1$ | |||||
newLines.add(null); | |||||
} | } | ||||
if (!rt.isMissingNewlineAtEnd()) { | if (!rt.isMissingNewlineAtEnd()) { | ||||
oldLines.add(""); //$NON-NLS-1$ | |||||
} | |||||
if (!isChanged(oldLines, newLines)) { | |||||
return; // Don't touch the file | |||||
} | |||||
try (Writer fw = Files.newBufferedWriter(f.toPath())) { | |||||
for (Iterator<String> l = newLines.iterator(); l.hasNext();) { | |||||
fw.write(l.next()); | |||||
if (l.hasNext()) { | |||||
// Don't bother handling line endings - if it was Windows, | |||||
// the \r is still there! | |||||
fw.write('\n'); | |||||
oldLines.add(null); | |||||
} | |||||
if (oldLines.equals(newLines)) { | |||||
return; // Unchanged; don't touch the file | |||||
} | |||||
TemporaryBuffer buffer = new TemporaryBuffer.LocalFile(null); | |||||
try { | |||||
try (OutputStream out = buffer) { | |||||
for (Iterator<ByteBuffer> l = newLines.iterator(); l | |||||
.hasNext();) { | |||||
ByteBuffer line = l.next(); | |||||
if (line == null) { | |||||
// Must be the marker for the final newline | |||||
break; | |||||
} | |||||
out.write(line.array(), line.position(), line.remaining()); | |||||
if (l.hasNext()) { | |||||
out.write('\n'); | |||||
} | |||||
} | } | ||||
} | } | ||||
try (OutputStream output = new FileOutputStream(f)) { | |||||
DirCacheCheckout.getContent(repository, path, checkOut, | |||||
new StreamLoader(buffer::openInputStream, | |||||
buffer.length()), | |||||
null, output); | |||||
} | |||||
} finally { | |||||
buffer.destroy(); | |||||
} | } | ||||
getRepository().getFS().setExecute(f, fh.getNewMode() == FileMode.EXECUTABLE_FILE); | |||||
repository.getFS().setExecute(f, | |||||
fh.getNewMode() == FileMode.EXECUTABLE_FILE); | |||||
} | } | ||||
private boolean canApplyAt(List<String> hunkLines, List<String> newLines, | |||||
int line) { | |||||
private boolean canApplyAt(List<ByteBuffer> hunkLines, | |||||
List<ByteBuffer> newLines, int line) { | |||||
int sz = hunkLines.size(); | int sz = hunkLines.size(); | ||||
int limit = newLines.size(); | int limit = newLines.size(); | ||||
int pos = line; | int pos = line; | ||||
for (int j = 1; j < sz; j++) { | for (int j = 1; j < sz; j++) { | ||||
String hunkLine = hunkLines.get(j); | |||||
switch (hunkLine.charAt(0)) { | |||||
ByteBuffer hunkLine = hunkLines.get(j); | |||||
if (!hunkLine.hasRemaining()) { | |||||
// Empty line. Accept as empty context line. | |||||
if (pos >= limit || newLines.get(pos).hasRemaining()) { | |||||
return false; | |||||
} | |||||
pos++; | |||||
continue; | |||||
} | |||||
switch (hunkLine.array()[hunkLine.position()]) { | |||||
case ' ': | case ' ': | ||||
case '-': | case '-': | ||||
if (pos >= limit | if (pos >= limit | ||||
|| !newLines.get(pos).equals(hunkLine.substring(1))) { | |||||
|| !newLines.get(pos).equals(slice(hunkLine, 1))) { | |||||
return false; | return false; | ||||
} | } | ||||
pos++; | pos++; | ||||
return true; | return true; | ||||
} | } | ||||
private static boolean isChanged(List<String> ol, List<String> nl) { | |||||
if (ol.size() != nl.size()) | |||||
return true; | |||||
for (int i = 0; i < ol.size(); i++) | |||||
if (!ol.get(i).equals(nl.get(i))) | |||||
return true; | |||||
return false; | |||||
private ByteBuffer slice(ByteBuffer b, int off) { | |||||
int newOffset = b.position() + off; | |||||
return ByteBuffer.wrap(b.array(), newOffset, b.limit() - newOffset); | |||||
} | } | ||||
private boolean isNoNewlineAtEndOfFile(FileHeader fh) { | private boolean isNoNewlineAtEndOfFile(FileHeader fh) { | ||||
return false; | return false; | ||||
} | } | ||||
HunkHeader lastHunk = hunks.get(hunks.size() - 1); | HunkHeader lastHunk = hunks.get(hunks.size() - 1); | ||||
RawText lhrt = new RawText(lastHunk.getBuffer()); | |||||
byte[] buf = new byte[lastHunk.getEndOffset() | |||||
- lastHunk.getStartOffset()]; | |||||
System.arraycopy(lastHunk.getBuffer(), lastHunk.getStartOffset(), buf, | |||||
0, buf.length); | |||||
RawText lhrt = new RawText(buf); | |||||
return lhrt.getString(lhrt.size() - 1) | return lhrt.getString(lhrt.size() - 1) | ||||
.equals("\\ No newline at end of file"); //$NON-NLS-1$ | .equals("\\ No newline at end of file"); //$NON-NLS-1$ | ||||
} | } | ||||
/** | |||||
* An {@link InputStream} that updates a {@link SHA1} on every byte read. | |||||
* The hash is supposed to have been initialized before reading starts. | |||||
*/ | |||||
private static class SHA1InputStream extends InputStream { | |||||
private final SHA1 hash; | |||||
private final InputStream in; | |||||
SHA1InputStream(SHA1 hash, InputStream in) { | |||||
this.hash = hash; | |||||
this.in = in; | |||||
} | |||||
@Override | |||||
public int read() throws IOException { | |||||
int b = in.read(); | |||||
if (b >= 0) { | |||||
hash.update((byte) b); | |||||
} | |||||
return b; | |||||
} | |||||
@Override | |||||
public int read(byte[] b, int off, int len) throws IOException { | |||||
int n = in.read(b, off, len); | |||||
if (n > 0) { | |||||
hash.update(b, off, n); | |||||
} | |||||
return n; | |||||
} | |||||
@Override | |||||
public void close() throws IOException { | |||||
in.close(); | |||||
} | |||||
} | |||||
} | } |
/* | /* | ||||
* Copyright (C) 2009, Google Inc. | * Copyright (C) 2009, Google Inc. | ||||
* Copyright (C) 2008-2009, Johannes E. Schindelin <johannes.schindelin@gmx.de> and others | |||||
* Copyright (C) 2008-2021, Johannes E. Schindelin <johannes.schindelin@gmx.de> and others | |||||
* | * | ||||
* This program and the accompanying materials are made available under the | * This program and the accompanying materials are made available under the | ||||
* terms of the Eclipse Distribution License v. 1.0 which is available at | * terms of the Eclipse Distribution License v. 1.0 which is available at | ||||
import java.io.IOException; | import java.io.IOException; | ||||
import java.io.InputStream; | import java.io.InputStream; | ||||
import java.io.OutputStream; | import java.io.OutputStream; | ||||
import java.nio.ByteBuffer; | |||||
import org.eclipse.jgit.errors.BinaryBlobException; | import org.eclipse.jgit.errors.BinaryBlobException; | ||||
import org.eclipse.jgit.errors.LargeObjectException; | import org.eclipse.jgit.errors.LargeObjectException; | ||||
return getString(i, i + 1, true); | return getString(i, i + 1, true); | ||||
} | } | ||||
/** | |||||
* Get the raw text for a single line. | |||||
* | |||||
* @param i | |||||
* index of the line to extract. Note this is 0-based, so line | |||||
* number 1 is actually index 0. | |||||
* @return the text for the line, without a trailing LF, as a | |||||
* {@link ByteBuffer} that is backed by a slice of the | |||||
* {@link #getRawContent() raw content}, with the buffer's position | |||||
* on the start of the line and the limit at the end. | |||||
* @since 5.12 | |||||
*/ | |||||
public ByteBuffer getRawString(int i) { | |||||
int s = getStart(i); | |||||
int e = getEnd(i); | |||||
if (e > 0 && content[e - 1] == '\n') { | |||||
e--; | |||||
} | |||||
return ByteBuffer.wrap(content, s, e - s); | |||||
} | |||||
/** | /** | ||||
* Get the text for a region of lines. | * Get the text for a region of lines. | ||||
* | * |
/***/ public String aNewObjectIdIsRequired; | /***/ public String aNewObjectIdIsRequired; | ||||
/***/ public String anExceptionOccurredWhileTryingToAddTheIdOfHEAD; | /***/ public String anExceptionOccurredWhileTryingToAddTheIdOfHEAD; | ||||
/***/ public String anSSHSessionHasBeenAlreadyCreated; | /***/ public String anSSHSessionHasBeenAlreadyCreated; | ||||
/***/ public String applyBinaryBaseOidWrong; | |||||
/***/ public String applyBinaryOidTooShort; | |||||
/***/ public String applyBinaryResultOidWrong; | |||||
/***/ public String applyingCommit; | /***/ public String applyingCommit; | ||||
/***/ public String archiveFormatAlreadyAbsent; | /***/ public String archiveFormatAlreadyAbsent; | ||||
/***/ public String archiveFormatAlreadyRegistered; | /***/ public String archiveFormatAlreadyRegistered; | ||||
/***/ public String badSectionEntry; | /***/ public String badSectionEntry; | ||||
/***/ public String badShallowLine; | /***/ public String badShallowLine; | ||||
/***/ public String bareRepositoryNoWorkdirAndIndex; | /***/ public String bareRepositoryNoWorkdirAndIndex; | ||||
/***/ public String base85invalidChar; | |||||
/***/ public String base85length; | |||||
/***/ public String base85overflow; | |||||
/***/ public String base85tooLong; | |||||
/***/ public String base85tooShort; | |||||
/***/ public String baseLengthIncorrect; | /***/ public String baseLengthIncorrect; | ||||
/***/ public String binaryDeltaBaseLengthMismatch; | |||||
/***/ public String binaryDeltaInvalidOffset; | |||||
/***/ public String binaryDeltaInvalidResultLength; | |||||
/***/ public String binaryHunkDecodeError; | |||||
/***/ public String binaryHunkInvalidLength; | |||||
/***/ public String binaryHunkLineTooShort; | |||||
/***/ public String binaryHunkMissingNewline; | |||||
/***/ public String bitmapMissingObject; | /***/ public String bitmapMissingObject; | ||||
/***/ public String bitmapsMustBePrepared; | /***/ public String bitmapsMustBePrepared; | ||||
/***/ public String blameNotCommittedYet; | /***/ public String blameNotCommittedYet; |
/* | |||||
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others | |||||
* | |||||
* This program and the accompanying materials are made available under the | |||||
* terms of the Eclipse Distribution License v. 1.0 which is available at | |||||
* https://www.eclipse.org/org/documents/edl-v10.php. | |||||
* | |||||
* SPDX-License-Identifier: BSD-3-Clause | |||||
*/ | |||||
package org.eclipse.jgit.util; | |||||
import java.nio.charset.StandardCharsets; | |||||
import java.text.MessageFormat; | |||||
import java.util.Arrays; | |||||
import org.eclipse.jgit.internal.JGitText; | |||||
/** | |||||
* Base-85 encoder/decoder. | |||||
* | |||||
* @since 5.12 | |||||
*/ | |||||
public final class Base85 { | |||||
private static final byte[] ENCODE = ("0123456789" //$NON-NLS-1$ | |||||
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZ" //$NON-NLS-1$ | |||||
+ "abcdefghijklmnopqrstuvwxyz" //$NON-NLS-1$ | |||||
+ "!#$%&()*+-;<=>?@^_`{|}~") //$NON-NLS-1$ | |||||
.getBytes(StandardCharsets.US_ASCII); | |||||
private static final int[] DECODE = new int[256]; | |||||
static { | |||||
Arrays.fill(DECODE, -1); | |||||
for (int i = 0; i < ENCODE.length; i++) { | |||||
DECODE[ENCODE[i]] = i; | |||||
} | |||||
} | |||||
private Base85() { | |||||
// No instantiation | |||||
} | |||||
/** | |||||
* Determines the length of the base-85 encoding for {@code rawLength} | |||||
* bytes. | |||||
* | |||||
* @param rawLength | |||||
* number of bytes to encode | |||||
* @return number of bytes needed for the base-85 encoding of | |||||
* {@code rawLength} bytes | |||||
*/ | |||||
public static int encodedLength(int rawLength) { | |||||
return (rawLength + 3) / 4 * 5; | |||||
} | |||||
/** | |||||
* Encodes the given {@code data} in Base-85. | |||||
* | |||||
* @param data | |||||
* to encode | |||||
* @return encoded data | |||||
*/ | |||||
public static byte[] encode(byte[] data) { | |||||
return encode(data, 0, data.length); | |||||
} | |||||
/** | |||||
* Encodes {@code length} bytes of {@code data} in Base-85, beginning at the | |||||
* {@code start} index. | |||||
* | |||||
* @param data | |||||
* to encode | |||||
* @param start | |||||
* index of the first byte to encode | |||||
* @param length | |||||
* number of bytes to encode | |||||
* @return encoded data | |||||
*/ | |||||
public static byte[] encode(byte[] data, int start, int length) { | |||||
byte[] result = new byte[encodedLength(length)]; | |||||
int end = start + length; | |||||
int in = start; | |||||
int out = 0; | |||||
while (in < end) { | |||||
// Accumulate remaining bytes MSB first as a 32bit value | |||||
long accumulator = ((long) (data[in++] & 0xFF)) << 24; | |||||
if (in < end) { | |||||
accumulator |= (data[in++] & 0xFF) << 16; | |||||
if (in < end) { | |||||
accumulator |= (data[in++] & 0xFF) << 8; | |||||
if (in < end) { | |||||
accumulator |= (data[in++] & 0xFF); | |||||
} | |||||
} | |||||
} | |||||
// Write the 32bit value in base-85 encoding, also MSB first | |||||
for (int i = 4; i >= 0; i--) { | |||||
result[out + i] = ENCODE[(int) (accumulator % 85)]; | |||||
accumulator /= 85; | |||||
} | |||||
out += 5; | |||||
} | |||||
return result; | |||||
} | |||||
/** | |||||
* Decodes the Base-85 {@code encoded} data into a byte array of | |||||
* {@code expectedSize} bytes. | |||||
* | |||||
* @param encoded | |||||
* Base-85 encoded data | |||||
* @param expectedSize | |||||
* of the result | |||||
* @return the decoded bytes | |||||
* @throws IllegalArgumentException | |||||
* if expectedSize doesn't match, the encoded data has a length | |||||
* that is not a multiple of 5, or there are invalid characters | |||||
* in the encoded data | |||||
*/ | |||||
public static byte[] decode(byte[] encoded, int expectedSize) { | |||||
return decode(encoded, 0, encoded.length, expectedSize); | |||||
} | |||||
/** | |||||
* Decodes {@code length} bytes of Base-85 {@code encoded} data, beginning | |||||
* at the {@code start} index, into a byte array of {@code expectedSize} | |||||
* bytes. | |||||
* | |||||
* @param encoded | |||||
* Base-85 encoded data | |||||
* @param start | |||||
* index at which the data to decode starts in {@code encoded} | |||||
* @param length | |||||
* of the Base-85 encoded data | |||||
* @param expectedSize | |||||
* of the result | |||||
* @return the decoded bytes | |||||
* @throws IllegalArgumentException | |||||
* if expectedSize doesn't match, {@code length} is not a | |||||
* multiple of 5, or there are invalid characters in the encoded | |||||
* data | |||||
*/ | |||||
public static byte[] decode(byte[] encoded, int start, int length, | |||||
int expectedSize) { | |||||
if (length % 5 != 0) { | |||||
throw new IllegalArgumentException(JGitText.get().base85length); | |||||
} | |||||
byte[] result = new byte[expectedSize]; | |||||
int end = start + length; | |||||
int in = start; | |||||
int out = 0; | |||||
while (in < end && out < expectedSize) { | |||||
// Accumulate 5 bytes, "MSB" first | |||||
long accumulator = 0; | |||||
for (int i = 4; i >= 0; i--) { | |||||
int val = DECODE[encoded[in++] & 0xFF]; | |||||
if (val < 0) { | |||||
throw new IllegalArgumentException(MessageFormat.format( | |||||
JGitText.get().base85invalidChar, | |||||
Integer.toHexString(encoded[in - 1] & 0xFF))); | |||||
} | |||||
accumulator = accumulator * 85 + val; | |||||
} | |||||
if (accumulator > 0xFFFF_FFFFL) { | |||||
throw new IllegalArgumentException( | |||||
MessageFormat.format(JGitText.get().base85overflow, | |||||
Long.toHexString(accumulator))); | |||||
} | |||||
// Write remaining bytes, MSB first | |||||
result[out++] = (byte) (accumulator >>> 24); | |||||
if (out < expectedSize) { | |||||
result[out++] = (byte) (accumulator >>> 16); | |||||
if (out < expectedSize) { | |||||
result[out++] = (byte) (accumulator >>> 8); | |||||
if (out < expectedSize) { | |||||
result[out++] = (byte) accumulator; | |||||
} | |||||
} | |||||
} | |||||
} | |||||
// Should have exhausted 'in' and filled 'out' completely | |||||
if (in < end) { | |||||
throw new IllegalArgumentException( | |||||
MessageFormat.format(JGitText.get().base85tooLong, | |||||
Integer.valueOf(expectedSize))); | |||||
} | |||||
if (out < expectedSize) { | |||||
throw new IllegalArgumentException( | |||||
MessageFormat.format(JGitText.get().base85tooShort, | |||||
Integer.valueOf(expectedSize))); | |||||
} | |||||
return result; | |||||
} | |||||
} |
/* | |||||
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others | |||||
* | |||||
* This program and the accompanying materials are made available under the | |||||
* terms of the Eclipse Distribution License v. 1.0 which is available at | |||||
* https://www.eclipse.org/org/documents/edl-v10.php. | |||||
* | |||||
* SPDX-License-Identifier: BSD-3-Clause | |||||
*/ | |||||
package org.eclipse.jgit.util.io; | |||||
import java.io.EOFException; | |||||
import java.io.IOException; | |||||
import java.io.InputStream; | |||||
import java.io.StreamCorruptedException; | |||||
import java.text.MessageFormat; | |||||
import org.eclipse.jgit.internal.JGitText; | |||||
/** | |||||
* An {@link InputStream} that applies a binary delta to a base on the fly. | |||||
* <p> | |||||
* Delta application to a base needs random access to the base data. The delta | |||||
* is expressed as a sequence of copy and insert instructions. A copy | |||||
* instruction has the form "COPY fromOffset length" and says "copy length bytes | |||||
* from the base, starting at offset fromOffset, to the result". An insert | |||||
* instruction has the form "INSERT length" followed by length bytes and says | |||||
* "copy the next length bytes from the delta to the result". | |||||
* </p> | |||||
* <p> | |||||
* These instructions are generated using a content-defined chunking algorithm | |||||
* (currently C git uses the standard Rabin variant; but there are others that | |||||
* could be used) that identifies equal chunks. It is entirely possible that a | |||||
* later copy instruction has a fromOffset that is before the fromOffset of an | |||||
* earlier copy instruction. | |||||
* </p> | |||||
* <p> | |||||
* This makes it impossible to stream the base. | |||||
* </p> | |||||
* <p> | |||||
* JGit is limited to 2GB maximum size for the base since array indices are | |||||
* signed 32bit values. | |||||
* | |||||
* @since 5.12 | |||||
*/ | |||||
public class BinaryDeltaInputStream extends InputStream { | |||||
private final byte[] base; | |||||
private final InputStream delta; | |||||
private long resultLength; | |||||
private long toDeliver = -1; | |||||
private int fromBase; | |||||
private int fromDelta; | |||||
private int baseOffset = -1; | |||||
/** | |||||
* Creates a new {@link BinaryDeltaInputStream} that applies {@code delta} | |||||
* to {@code base}. | |||||
* | |||||
* @param base | |||||
* data to apply the delta to | |||||
* @param delta | |||||
* {@link InputStream} delivering the delta to apply | |||||
*/ | |||||
public BinaryDeltaInputStream(byte[] base, InputStream delta) { | |||||
this.base = base; | |||||
this.delta = delta; | |||||
} | |||||
@Override | |||||
public int read() throws IOException { | |||||
int b = readNext(); | |||||
if (b >= 0) { | |||||
toDeliver--; | |||||
} | |||||
return b; | |||||
} | |||||
private void initialize() throws IOException { | |||||
long baseSize = readVarInt(delta); | |||||
if (baseSize > Integer.MAX_VALUE || baseSize < 0 | |||||
|| (int) baseSize != base.length) { | |||||
throw new IOException(MessageFormat.format( | |||||
JGitText.get().binaryDeltaBaseLengthMismatch, | |||||
Integer.valueOf(base.length), Long.valueOf(baseSize))); | |||||
} | |||||
resultLength = readVarInt(delta); | |||||
if (resultLength < 0) { | |||||
throw new StreamCorruptedException( | |||||
JGitText.get().binaryDeltaInvalidResultLength); | |||||
} | |||||
toDeliver = resultLength; | |||||
baseOffset = 0; | |||||
} | |||||
private int readNext() throws IOException { | |||||
if (baseOffset < 0) { | |||||
initialize(); | |||||
} | |||||
if (fromBase > 0) { | |||||
fromBase--; | |||||
return base[baseOffset++] & 0xFF; | |||||
} else if (fromDelta > 0) { | |||||
fromDelta--; | |||||
return delta.read(); | |||||
} | |||||
int command = delta.read(); | |||||
if (command < 0) { | |||||
return -1; | |||||
} | |||||
if ((command & 0x80) != 0) { | |||||
// Decode offset and length to read from base | |||||
long copyOffset = 0; | |||||
for (int i = 1, shift = 0; i < 0x10; i *= 2, shift += 8) { | |||||
if ((command & i) != 0) { | |||||
copyOffset |= ((long) next(delta)) << shift; | |||||
} | |||||
} | |||||
int copySize = 0; | |||||
for (int i = 0x10, shift = 0; i < 0x80; i *= 2, shift += 8) { | |||||
if ((command & i) != 0) { | |||||
copySize |= next(delta) << shift; | |||||
} | |||||
} | |||||
if (copySize == 0) { | |||||
copySize = 0x10000; | |||||
} | |||||
if (copyOffset > base.length - copySize) { | |||||
throw new StreamCorruptedException(MessageFormat.format( | |||||
JGitText.get().binaryDeltaInvalidOffset, | |||||
Long.valueOf(copyOffset), Integer.valueOf(copySize))); | |||||
} | |||||
baseOffset = (int) copyOffset; | |||||
fromBase = copySize; | |||||
return readNext(); | |||||
} else if (command != 0) { | |||||
// The next 'command' bytes come from the delta | |||||
fromDelta = command - 1; | |||||
return delta.read(); | |||||
} else { | |||||
// Zero is reserved | |||||
throw new StreamCorruptedException( | |||||
JGitText.get().unsupportedCommand0); | |||||
} | |||||
} | |||||
private int next(InputStream in) throws IOException { | |||||
int b = in.read(); | |||||
if (b < 0) { | |||||
throw new EOFException(); | |||||
} | |||||
return b; | |||||
} | |||||
private long readVarInt(InputStream in) throws IOException { | |||||
long val = 0; | |||||
int shift = 0; | |||||
int b; | |||||
do { | |||||
b = next(in); | |||||
val |= ((long) (b & 0x7f)) << shift; | |||||
shift += 7; | |||||
} while ((b & 0x80) != 0); | |||||
return val; | |||||
} | |||||
/** | |||||
* Tells the expected size of the final result. | |||||
* | |||||
* @return the size | |||||
* @throws IOException | |||||
* if the size cannot be determined from {@code delta} | |||||
*/ | |||||
public long getExpectedResultSize() throws IOException { | |||||
if (baseOffset < 0) { | |||||
initialize(); | |||||
} | |||||
return resultLength; | |||||
} | |||||
/** | |||||
* Tells whether the delta has been fully consumed, and the expected number | |||||
* of bytes for the combined result have been read from this | |||||
* {@link BinaryDeltaInputStream}. | |||||
* | |||||
* @return whether delta application was successful | |||||
*/ | |||||
public boolean isFullyConsumed() { | |||||
try { | |||||
return toDeliver == 0 && delta.read() < 0; | |||||
} catch (IOException e) { | |||||
return toDeliver == 0; | |||||
} | |||||
} | |||||
@Override | |||||
public void close() throws IOException { | |||||
delta.close(); | |||||
} | |||||
} |
/* | |||||
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others | |||||
* | |||||
* This program and the accompanying materials are made available under the | |||||
* terms of the Eclipse Distribution License v. 1.0 which is available at | |||||
* https://www.eclipse.org/org/documents/edl-v10.php. | |||||
* | |||||
* SPDX-License-Identifier: BSD-3-Clause | |||||
*/ | |||||
package org.eclipse.jgit.util.io; | |||||
import java.io.EOFException; | |||||
import java.io.IOException; | |||||
import java.io.InputStream; | |||||
import java.io.StreamCorruptedException; | |||||
import java.text.MessageFormat; | |||||
import org.eclipse.jgit.internal.JGitText; | |||||
import org.eclipse.jgit.util.Base85; | |||||
/** | |||||
* A stream that decodes git binary patch data on the fly. | |||||
* | |||||
* @since 5.12 | |||||
*/ | |||||
public class BinaryHunkInputStream extends InputStream { | |||||
private final InputStream in; | |||||
private int lineNumber; | |||||
private byte[] buffer; | |||||
private int pos = 0; | |||||
/** | |||||
* Creates a new {@link BinaryHunkInputStream}. | |||||
* | |||||
* @param in | |||||
* {@link InputStream} to read the base-85 encoded patch data | |||||
* from | |||||
*/ | |||||
public BinaryHunkInputStream(InputStream in) { | |||||
this.in = in; | |||||
} | |||||
@Override | |||||
public int read() throws IOException { | |||||
if (pos < 0) { | |||||
return -1; | |||||
} | |||||
if (buffer == null || pos == buffer.length) { | |||||
fillBuffer(); | |||||
} | |||||
if (pos >= 0) { | |||||
return buffer[pos++] & 0xFF; | |||||
} | |||||
return -1; | |||||
} | |||||
@Override | |||||
public void close() throws IOException { | |||||
in.close(); | |||||
buffer = null; | |||||
} | |||||
private void fillBuffer() throws IOException { | |||||
int length = in.read(); | |||||
if (length < 0) { | |||||
pos = length; | |||||
buffer = null; | |||||
return; | |||||
} | |||||
lineNumber++; | |||||
// Length is encoded with characters, A..Z for 1..26 and a..z for 27..52 | |||||
if ('A' <= length && length <= 'Z') { | |||||
length = length - 'A' + 1; | |||||
} else if ('a' <= length && length <= 'z') { | |||||
length = length - 'a' + 27; | |||||
} else { | |||||
throw new StreamCorruptedException(MessageFormat.format( | |||||
JGitText.get().binaryHunkInvalidLength, | |||||
Integer.valueOf(lineNumber), Integer.toHexString(length))); | |||||
} | |||||
byte[] encoded = new byte[Base85.encodedLength(length)]; | |||||
for (int i = 0; i < encoded.length; i++) { | |||||
int b = in.read(); | |||||
if (b < 0 || b == '\n') { | |||||
throw new EOFException(MessageFormat.format( | |||||
JGitText.get().binaryHunkInvalidLength, | |||||
Integer.valueOf(lineNumber))); | |||||
} | |||||
encoded[i] = (byte) b; | |||||
} | |||||
// Must be followed by a newline; tolerate EOF. | |||||
int b = in.read(); | |||||
if (b >= 0 && b != '\n') { | |||||
throw new StreamCorruptedException(MessageFormat.format( | |||||
JGitText.get().binaryHunkMissingNewline, | |||||
Integer.valueOf(lineNumber))); | |||||
} | |||||
try { | |||||
buffer = Base85.decode(encoded, length); | |||||
} catch (IllegalArgumentException e) { | |||||
StreamCorruptedException ex = new StreamCorruptedException( | |||||
MessageFormat.format(JGitText.get().binaryHunkDecodeError, | |||||
Integer.valueOf(lineNumber))); | |||||
ex.initCause(e); | |||||
throw ex; | |||||
} | |||||
pos = 0; | |||||
} | |||||
} |
/* | |||||
* Copyright (C) 2021 Thomas Wolf <thomas.wolf@paranor.ch> and others | |||||
* | |||||
* This program and the accompanying materials are made available under the | |||||
* terms of the Eclipse Distribution License v. 1.0 which is available at | |||||
* https://www.eclipse.org/org/documents/edl-v10.php. | |||||
* | |||||
* SPDX-License-Identifier: BSD-3-Clause | |||||
*/ | |||||
package org.eclipse.jgit.util.io; | |||||
import java.io.IOException; | |||||
import java.io.OutputStream; | |||||
import org.eclipse.jgit.util.Base85; | |||||
/** | |||||
* An {@link OutputStream} that encodes data for a git binary patch. | |||||
* | |||||
* @since 5.12 | |||||
*/ | |||||
public class BinaryHunkOutputStream extends OutputStream { | |||||
private static final int MAX_BYTES = 52; | |||||
private final OutputStream out; | |||||
private final byte[] buffer = new byte[MAX_BYTES]; | |||||
private int pos; | |||||
/** | |||||
* Creates a new {@link BinaryHunkOutputStream}. | |||||
* | |||||
* @param out | |||||
* {@link OutputStream} to write the encoded data to | |||||
*/ | |||||
public BinaryHunkOutputStream(OutputStream out) { | |||||
this.out = out; | |||||
} | |||||
/** | |||||
* Flushes and closes this stream, and closes the underlying | |||||
* {@link OutputStream}. | |||||
*/ | |||||
@Override | |||||
public void close() throws IOException { | |||||
flush(); | |||||
out.close(); | |||||
} | |||||
/** | |||||
* Writes any buffered output as a binary patch line to the underlying | |||||
* {@link OutputStream} and flushes that stream, too. | |||||
*/ | |||||
@Override | |||||
public void flush() throws IOException { | |||||
if (pos > 0) { | |||||
encode(buffer, 0, pos); | |||||
pos = 0; | |||||
} | |||||
out.flush(); | |||||
} | |||||
@Override | |||||
public void write(int b) throws IOException { | |||||
buffer[pos++] = (byte) b; | |||||
if (pos == buffer.length) { | |||||
encode(buffer, 0, pos); | |||||
pos = 0; | |||||
} | |||||
} | |||||
@Override | |||||
public void write(byte[] b, int off, int len) throws IOException { | |||||
if (len == 0) { | |||||
return; | |||||
} | |||||
int toCopy = len; | |||||
int in = off; | |||||
if (pos > 0) { | |||||
// Fill the buffer | |||||
int chunk = Math.min(toCopy, buffer.length - pos); | |||||
System.arraycopy(b, in, buffer, pos, chunk); | |||||
in += chunk; | |||||
pos += chunk; | |||||
toCopy -= chunk; | |||||
if (pos == buffer.length) { | |||||
encode(buffer, 0, pos); | |||||
pos = 0; | |||||
} | |||||
if (toCopy == 0) { | |||||
return; | |||||
} | |||||
} | |||||
while (toCopy >= MAX_BYTES) { | |||||
encode(b, in, MAX_BYTES); | |||||
toCopy -= MAX_BYTES; | |||||
in += MAX_BYTES; | |||||
} | |||||
if (toCopy > 0) { | |||||
System.arraycopy(b, in, buffer, 0, toCopy); | |||||
pos = toCopy; | |||||
} | |||||
} | |||||
private void encode(byte[] data, int off, int length) throws IOException { | |||||
if (length <= 26) { | |||||
out.write('A' + length - 1); | |||||
} else { | |||||
out.write('a' + length - 27); | |||||
} | |||||
out.write(Base85.encode(data, off, length)); | |||||
out.write('\n'); | |||||
} | |||||
} |