From d2dc3c024f31a6f0653851e10dff7c8528b7e5b4 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Wed, 13 Nov 2024 07:02:28 +0100 Subject: [PATCH 01/13] SNOW-1734031: Bump google dependencies (#1918) --- FIPS/pom.xml | 47 ++++++++++--------------------------------- parent-pom.xml | 40 ++++++++++++++++++------------------ pom.xml | 49 ++++++++++----------------------------------- thin_public_pom.xml | 25 +++++++++-------------- 4 files changed, 51 insertions(+), 110 deletions(-) diff --git a/FIPS/pom.xml b/FIPS/pom.xml index ba1929bd2..a572eb653 100644 --- a/FIPS/pom.xml +++ b/FIPS/pom.xml @@ -398,8 +398,8 @@ ${shadeBase}.google - google.geo - ${shadeBase}.google.geo + google + ${shadeBase}.google org.joda @@ -445,18 +445,6 @@ com.carrotsearch ${shadeBase}.com.carrotsearch - - google.type - ${shadeBase}.google.type - - - google.rpc - ${shadeBase}.google.rpc - - - google.iam - ${shadeBase}.google.iam - io.opencensus ${shadeBase}.opencensus @@ -465,33 +453,13 @@ org.threeten ${shadeBase}.threeten - - google.protobuf - ${shadeBase}.google.protobuf - - - google.api - ${shadeBase}.google.api - - - google.storage - ${shadeBase}.google.storage - io.grpc ${shadeBase}.grpc - google.longrunning - ${shadeBase}.google.longrunning - - - google.cloud - ${shadeBase}.google.cloud - - - google.logging - ${shadeBase}.google.logging + io.opentelemetry + ${shadeBase}.io.opentelemetry org.checkerframework @@ -594,17 +562,24 @@ + + + + + + + diff --git a/parent-pom.xml b/parent-pom.xml index 7f165f376..17b6103f1 100644 --- a/parent-pom.xml +++ b/parent-pom.xml @@ -31,30 +31,31 @@ 1.0.7 1.14.17 1.1 - 3.33.0 + 3.48.2 1.2 - 1.17.0 + 1.17.1 1.4 2.17.0 1.2 1.5.4 0.9.5.4 - 2.22.0 - 1.19.0 - 2.21.0 - 2.22.6 - 2.10.1 - 2.18.0 + 2.48.0 + 1.29.0 + 2.47.0 + 2.44.1 + 2.11.0 + 2.35.1 24.3.25 - 2.31.0 - 32.1.1-jre - 1.43.3 + 2.57.0 + 33.3.1-jre + 1.45.0 + 3.0.0 3.0.2 - 3.25.5 - 1.60.0 + 4.28.2 + 1.68.1 2.2 2.4.3 - 2.17.2 + 2.18.1 true 3.1.0 5.13.0 @@ -77,7 +78,6 @@ 2.0.13 5.1.4 net.snowflake.client.category.AllTestCategory - 1.6.9 2.4.1 1.9 3.6.3 @@ -200,6 +200,11 @@ guava ${google.guava.version} + + com.google.j2objc + j2objc-annotations + ${google.j2objc-annotations.version} + com.microsoft.azure azure-storage @@ -474,11 +479,6 @@ ${bouncycastle.bcpkixfips.version} provided - - org.threeten - threetenbp - ${threeten.version} - org.tukaani xz diff --git a/pom.xml b/pom.xml index 69ff5b183..e80b0c75f 100644 --- a/pom.xml +++ b/pom.xml @@ -824,12 +824,8 @@ ${shadeBase}.google - google.geo - ${shadeBase}.google.geo - - - google.storage - ${shadeBase}.google.storage + google + ${shadeBase}.google org.joda @@ -876,20 +872,12 @@ ${shadeBase}.io.netty - com.carrotsearch - ${shadeBase}.com.carrotsearch + io.opentelemetry + ${shadeBase}.io.opentelemetry - google.type - ${shadeBase}.google.type - - - google.rpc - ${shadeBase}.google.rpc - - - google.iam - ${shadeBase}.google.iam + com.carrotsearch + ${shadeBase}.com.carrotsearch io.opencensus @@ -899,30 +887,10 @@ org.threeten ${shadeBase}.threeten - - google.protobuf - ${shadeBase}.google.protobuf - - - google.api - ${shadeBase}.google.api - io.grpc ${shadeBase}.grpc - - google.longrunning - ${shadeBase}.google.longrunning - - - google.cloud - ${shadeBase}.google.cloud - - - google.logging - ${shadeBase}.google.logging - org.checkerframework ${shadeBase}.org.checkerframework @@ -1031,6 +999,7 @@ + @@ -1040,12 +1009,16 @@ + + + + diff --git a/thin_public_pom.xml b/thin_public_pom.xml index 460ceaec2..1d3d26fdd 100644 --- a/thin_public_pom.xml +++ b/thin_public_pom.xml @@ -41,17 +41,17 @@ 1.17.0 2.17.0 1.2 - 2.21.0 - 2.22.6 + 1.29.0 + 2.47.0 + 2.44.1 24.3.25 - 1.19.0 - 2.31.0 - 32.1.1-jre - 1.43.3 + 2.57.0 + 33.3.1-jre + 1.45.0 3.0.2 - 3.25.5 - 1.60.0 - 2.17.2 + 4.28.2 + 1.68.1 + 2.18.1 3.1.0 5.13.0 2.8.1 @@ -63,7 +63,6 @@ UTF-8 UTF-8 2.0.13 - 1.6.9 1.5.6-5 @@ -156,12 +155,6 @@ gax ${google.gax.version} - - - org.threeten - threetenbp - ${threeten.version} - com.google.auth google-auth-library-oauth2-http From 2314d35d73b7cccc6512d47a05066236f3268e51 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Wed, 13 Nov 2024 09:57:54 +0100 Subject: [PATCH 02/13] SNOW-1799642: Add array bind supported log for prepared statements (#1960) --- .../client/jdbc/SnowflakePreparedStatementV1.java | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java index 000d4634d..cb293690d 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakePreparedStatementV1.java @@ -58,14 +58,19 @@ class SnowflakePreparedStatementV1 extends SnowflakeStatementV1 implements PreparedStatement, SnowflakePreparedStatement { private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakePreparedStatementV1.class); + /** Error code returned when describing a statement that is binding table name */ private static final Integer ERROR_CODE_TABLE_BIND_VARIABLE_NOT_SET = 2128; + /** Error code when preparing statement with binding object names */ private static final Integer ERROR_CODE_OBJECT_BIND_NOT_SET = 2129; + /** Error code returned when describing a ddl command */ private static final Integer ERROR_CODE_STATEMENT_CANNOT_BE_PREPARED = 7; + /** snow-44393 Workaround for compiler cannot prepare to_timestamp(?, 3) */ private static final Integer ERROR_CODE_FORMAT_ARGUMENT_NOT_STRING = 1026; + /** A hash set that contains the error code that will not lead to exception in describe mode */ private static final Set errorCodesIgnoredInDescribeMode = new HashSet<>( @@ -88,10 +93,12 @@ class SnowflakePreparedStatementV1 extends SnowflakeStatementV1 *

Currently, bind name is just value index */ private Map parameterBindings = new HashMap<>(); + /** map of bind values for batch query executions */ private Map batchParameterBindings = new HashMap<>(); private Map wasPrevValueNull = new HashMap<>(); + /** Counter for batch size if we are executing a statement with array bind supported */ private int batchSize = 0; @@ -133,6 +140,12 @@ private void describeSqlIfNotTried() throws SQLException { if (!alreadyDescribed) { try { this.preparedStatementMetaData = sfBaseStatement.describe(sql); + if (preparedStatementMetaData != null + && !preparedStatementMetaData.isArrayBindSupported()) { + logger.debug( + "Array bind is not supported - each batch entry will be executed as a single request for query: {}", + sql); + } } catch (SFException e) { throw new SnowflakeSQLLoggedException(connection.getSFBaseSession(), e); } catch (SnowflakeSQLException e) { From be85cf7092b22a9b77cf1e41c7afacbc33ac59c0 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Thu, 14 Nov 2024 13:06:26 +0100 Subject: [PATCH 03/13] SNOW-1803984: Fix shading of google properties (#1965) --- FIPS/pom.xml | 61 ++++++++++++++++++++++++++++++++++++++++++++++------ pom.xml | 61 ++++++++++++++++++++++++++++++++++++++++++++++------ 2 files changed, 110 insertions(+), 12 deletions(-) diff --git a/FIPS/pom.xml b/FIPS/pom.xml index a572eb653..13e7db8f4 100644 --- a/FIPS/pom.xml +++ b/FIPS/pom.xml @@ -397,9 +397,58 @@ com.google ${shadeBase}.google + - google - ${shadeBase}.google + google.api + ${shadeBase}.google.api + + + google.apps + ${shadeBase}.google.apps + + + google.cloud + ${shadeBase}.google.cloud + + + google.geo + ${shadeBase}.google.geo + + + google.iam + ${shadeBase}.google.iam + + + google.logging + ${shadeBase}.google.logging + + + google.longrunning + ${shadeBase}.google.longrunning + + + google.monitoring + ${shadeBase}.google.monitoring + + + google.protobuf + ${shadeBase}.google.protobuf + + + google.rpc + ${shadeBase}.google.rpc + + + google.shopping + ${shadeBase}.google.shopping + + + google.storage + ${shadeBase}.google.storage + + + google.type + ${shadeBase}.google.type org.joda @@ -449,6 +498,10 @@ io.opencensus ${shadeBase}.opencensus + + io.opentelemetry + ${shadeBase}.opentelemetry + org.threeten ${shadeBase}.threeten @@ -457,10 +510,6 @@ io.grpc ${shadeBase}.grpc - - io.opentelemetry - ${shadeBase}.io.opentelemetry - org.checkerframework ${shadeBase}.org.checkerframework diff --git a/pom.xml b/pom.xml index e80b0c75f..96f2a65b6 100644 --- a/pom.xml +++ b/pom.xml @@ -823,9 +823,58 @@ com.google ${shadeBase}.google + - google - ${shadeBase}.google + google.api + ${shadeBase}.google.api + + + google.apps + ${shadeBase}.google.apps + + + google.cloud + ${shadeBase}.google.cloud + + + google.geo + ${shadeBase}.google.geo + + + google.iam + ${shadeBase}.google.iam + + + google.logging + ${shadeBase}.google.logging + + + google.longrunning + ${shadeBase}.google.longrunning + + + google.monitoring + ${shadeBase}.google.monitoring + + + google.protobuf + ${shadeBase}.google.protobuf + + + google.rpc + ${shadeBase}.google.rpc + + + google.shopping + ${shadeBase}.google.shopping + + + google.storage + ${shadeBase}.google.storage + + + google.type + ${shadeBase}.google.type org.joda @@ -871,10 +920,6 @@ io.netty ${shadeBase}.io.netty - - io.opentelemetry - ${shadeBase}.io.opentelemetry - com.carrotsearch ${shadeBase}.com.carrotsearch @@ -883,6 +928,10 @@ io.opencensus ${shadeBase}.opencensus + + io.opentelemetry + ${shadeBase}.opentelemetry + org.threeten ${shadeBase}.threeten From 4364399bd454b290851927bc927e1cbd9d36823c Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Mon, 18 Nov 2024 18:43:51 +0100 Subject: [PATCH 04/13] SNOW-1800995: Merge io.netty.versions.properties during shade (#1967) --- FIPS/pom.xml | 4 +++- pom.xml | 3 +++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/FIPS/pom.xml b/FIPS/pom.xml index 13e7db8f4..0b874551d 100644 --- a/FIPS/pom.xml +++ b/FIPS/pom.xml @@ -589,7 +589,9 @@ - + + + META-INF/io.netty.versions.properties diff --git a/pom.xml b/pom.xml index 96f2a65b6..2a1cf4ea6 100644 --- a/pom.xml +++ b/pom.xml @@ -1023,6 +1023,9 @@ + + META-INF/io.netty.versions.properties + From e57b1ccf50163fda0dd01d24578900cf37f7878e Mon Sep 17 00:00:00 2001 From: John Yun <140559986+sfc-gh-ext-simba-jy@users.noreply.github.com> Date: Mon, 18 Nov 2024 22:18:35 -0800 Subject: [PATCH 05/13] SNOW-1787996: Uncontrolled logging in snowflake-jdbc (#1966) --- dependencies/Readme.md | 2 +- dependencies/arrow-format-17.0.0.jar | Bin 118095 -> 118063 bytes dependencies/arrow-memory-core-17.0.0.jar | Bin 122805 -> 122795 bytes ...arrow-memory-netty-buffer-patch-17.0.0.jar | Bin 35441 -> 35409 bytes dependencies/arrow-memory-unsafe-17.0.0.jar | Bin 10746 -> 10714 bytes dependencies/arrow-vector-17.0.0.jar | Bin 2063599 -> 2063567 bytes 6 files changed, 1 insertion(+), 1 deletion(-) diff --git a/dependencies/Readme.md b/dependencies/Readme.md index 5abaea2ae..28afe4031 100644 --- a/dependencies/Readme.md +++ b/dependencies/Readme.md @@ -1,2 +1,2 @@ -Arrow dependencies are built from internal branch `upgradeTo17.0.0-v2`. This build was applied the AIX fix and the customer logger instead of slf4j logger. +Arrow dependencies are built from internal branch `upgradeTo17.0.0-v3`. This build was applied the AIX fix and the customer logger instead of slf4j logger. diff --git a/dependencies/arrow-format-17.0.0.jar b/dependencies/arrow-format-17.0.0.jar index b4a34e86febd233cb6b3595c7e1bd9ddbea83687..103c9c00de6d6deee775ea57bab4a58c1ab3b82d 100644 GIT binary patch delta 2184 zcmY*bc~n$o6!*Tz;$aYG6d`dGLZQHb5g{x`Ku}}L0*-)$x#cm12(w59vZJC0Ad z3aD9b!!nF846@oNiZX=a(M;)3+9p<{m1!_y_q~S`&L12}6DM1n94J zTyfhq_Roh!DIb^iTSeN1F7IvbR`z{WkaG{`9yb}> z7RC&8Z8It7s&9GTEB5P%Z(oyB`%y{B?l(8}&fc6-^0VKk85V|&z3Y-rW|w<>oF7>5 zSm(81^5jE4>(=FlFWdX|b{B)aN9M+^p{{{}=?p(3c#(d-vz^W6OVVu(|QYddU+_Q~oc}`@8dJ{(8G~Uc!vNa=XD2yf$?0 zL2TAdY1)xR-bt4y9=$RD!Ry9viUZ2B>l@W;T6@a%tEy6iGqU1-O;CCj?MOF;W$#)O zzF227R8SqO+L2H@KRm)NWbn7L@GW!0d~3sQp9zu64KmxQMPd0BZ4G~Wcb4|{$NMh6 zqS+VGvu#y(6c!&$zH{WQ`FW9StR%xSW@%-1j$0w*eDba_y<2`jvF%cyc1g7)^~IW! zn~5*DENJ0^=Ty`cPA}^S-1{~L`)HzfPMN;6q0hzoulE+?zk`;!j(SJm6VEIhPh9 z#WXbqR8&zwStw894^>nx=`{iSYe<@V1Yut>uu?;T+9)F2IJ;;oyr-eHSj6BQEx2pM;EY1~u zFjCB_4|p$Lt^oxEAk;!Kqs}I3D(2PFw(rj+Jc--6bqs!2OHKT?4%_1}Tt|%_ZRc%N z=^&KZRVes=E|iH!v|cr`QKX8za;&TaTTY`IUg(E)w0+-mPd!SeexHDs>cJ8vdTJP4 z`jiM@5|45*Rgtc3Hsi32yijoLSb|?3=|F$V8)82F~IpL^*(T%>VO0$-x{j z1~UwxU zi1cS?A&mXQPcVwxK*@S2MaPzNAj40UKo3wGc(Hy#W_m1^27$fU;!Yd2xE3FJ9IZ`k!USuN9Ll9e+C>pklXy^DB4C|8VtIPm4Y(cWE7 tBh7N;e$g1+Nu{oripD#gR7!IQO`R|Uzv={Un!dqKSYV|)LVr3&{RbfvM|A)I delta 2218 zcmY*bYgAKL7G~dzs30!|!59$47orjc6byz()q*%6&?2HOVrn(MsxFG6gN#M1gA6vk z(ruyEPHW4<00F`q50w7KfKPC#B(cj|K+*X1!J0m2v!qqE4-Ny|Jz&F>ZQK z!L_io)azjF%X(tFY(&4MS=nCQoAm$vvaWW>)n)>P`vBdlhiCOZA_4ujiA@ z&$|44BXMGd+ov8U6|bJwsrEKLNvm`cA~StwqLaZaqt~&z*AaeEk2+xN&W9MzY%RqaEFAWb;)wzXu)3-`oY;mK0UpOjz}<_m!d)~xCnEl8jO-L&vNm#bvi^-4=D)lzoi zX3@%^78KlrEh3IBBU_a_gt8r8SY1Xr8M}n&G${_3!D4RcBf@Ti^KMpL_;~MVGspHC zqo^$h8N7>q<*8B zG1iu4&yFZwJ-X30HASVERS6S>Z1u>4zEertXLjmMB#`^*w1|VNU<^K}q=t!kZv;PX z|M$YeVpq(mBK^PlB7Rs!CjE*D%P_K<3^kP!_7HkYEtrW^-DndhbYiqfXNg0`?2IaL z2F}*NIE<*FvNpAE1b^X7}A@Lk)jKZ9S~z zZ0`t3ebaDG12z1)hp-2xHBiINeS|&nZUZ%x_hXVCq&T~gZk7R)8X*c!Vp$_3aFR#H z&~)_H(|YL(BZ=pJ_tN<5g17Z_bK{1Lk1OmPVs3@YukCP0C*7*<*Wx~88o(C{(O`gg z`7Cn~+@mG<*#)q|5_1?i!gUuw!h4K0#pHaxJtKO1kg!egasWH)|6hP*4yFWajB5fJ zKh>Ej-!;K9_!j$`AcmhelWA+3VHGc(Z6XseXf`;q!UN6JT<`hY*f{jLNHq`0nEZ!} z@D^|D!$|K%2;^4=GP1E1>~U8h(EFza=JGKLlROa*D!`WY=xCuH&w@wATtjGFrnJ(M z>~NDD!7p5GA`aNyN){sGnBv(63jSt1BM;+g>x}O(%-KL+zi$FdK7Rv?tx=Jgo;Fy* zdnb-k{M)HmfXbvyX3oyHle0_PncTab42xyY?V+;9cOzf5hs8eY0CNoNptAC0lj6(o zOEnR5?C$_ye$T;CO4dPO?=ReNi5hW-M#WqX(GZB;`ZIoWwu2cg%jO8Mp~mCK$c|Sz z4OQ+jbnB#1m0zaWch6+x)MdI9QEEoEsKEwR6|`WCF!B(blVFU+ac6Rjzta#qX_~iv o$;dz_l^V=rWO)~rx_TD7yTAvFyI?*n#XDUPX>sEm{Vg&3A3Lu@$p8QV diff --git a/dependencies/arrow-memory-core-17.0.0.jar b/dependencies/arrow-memory-core-17.0.0.jar index 12d9c9116dc1242937aaa40993259eb331662c47..916e38238080a99b7c33949baee5cce45043fa4e 100644 GIT binary patch delta 4857 zcmZ9PbzD^2*Ty-61JWSfAc)k^DJ7kf(ukzAq@*yEq~rjHMx_;LL0|ysj-gvXMG!?o zE~1FPgS?k}?>qm@=lQN@uf6tKd**Y_t9INM?YPxBlt{+R+7HD57Sn-pj|y-aFl(BW_WEw3Ye?NqBytS_640(4jz}sYyc60xIyOiZ)CQ zPFzj6vVWBB;+M07qd}}-vx3G*Z;eC;?UG&#=lGKb6dpx>uZ+e@OIh#NPX$zram0o7#@} zrY+hn3$^6rXAC7a?5J!84sQ+3Wr*fBHkUiMEk18>cq(Qn+yycWQ6SLijgn6o4z52b5?eY`0q?qW~wj7IAAov zjC>2*%_4J^2n}p~0$;eERRuEa>SG`*%5lg8!;mKsGqzQIw~D~EHhaP3I2X8 ztH*-5gW_qHpd4SNRy>=aRnrzT?t$AK@c1WOR}=dZG{ei6v4(0Kjd!oQ?e0JmEkWf_ z(;?-xy97v;f8SK+g5%E>hnl2eTd~Qz1JMEri|p*4@26ONbK~K-tGbtk$z1XM_wK16 zPajKvWv+6^x+lR#$OA@ASZ*A49?D00e*_0b?&X`>qy2QA*QOtgIq#q;=hzAH4;-gj zoot(wGnFEC2(yWrh1I^{P0A^Kxl=ZYwnS} zi-zod_U-1^zmf&GCvYi0iHRf}@Z{iMR?x3M{o%|ZkjQ*`L*1P3K7N>b+Rk@7{x>*n z%xfb1R5F!;Y7!b+Y)O)G)*N#iue+A)cZ5kYRprR{@c!F~sIYCGy5}@)xOg}_g(2nX$}qdTP=GpWOJ$+{mhnVZ5!X|eaWgz84cro^fyg4D25o1_F{yoT zofi>fn8B^9dy|4Q-sVnR+~9<^LJj4TNj)!LnHuF$@Dcy*T`6}CcaE(bvG4DamYRl| z-pYKG8fxVqYYW-D9BTL-#MSi7i(BTFT~#Tim%0B9*<90;AWI?}vn}JX8iilKu0rzH zD#zBcMHtCimk?SxuuPW-8AT-1x zut3+d9hd0!DE4((t`S*r+2Zv{riVBM>3bojAvd$f)DHqk+Cp)QkDuY*?F4!DNSQKL z*tnTqBD*yhbUnu-cvWO*x@^Ne)Ux{wA1cec1yv&`78d3kYHYVRDiaUG5cud0w;#l< z*Gl@>5Kn!C=#naobZZc3namia$3@|il3`jm=v-nw6R#keMDYV9>M&{~*E+f2lX2{d zDrO!E7&QwWht#O>9&+PZ2IP*}4(DC+1l36wB;~NHw53%*63jU0s%yU~<~1gZc1JvQ zIS@Has(1xEutHTw#Xh&dr#SlAl&hXV%H#S?wjXbjG0(1}V)JY?hTz+L(r)SY@jR6z z-kJ|YUX5;!Dcx9ykr}PJT7<+(E_WoTyEJoW<9Fkd&t$@u)q~$~Qf*G7JY6iPs7%(0 zkfn2C3v+%>n5mLex89ozB25Z9DneZ<(zi5A7W+AO=R+w+d`K`hn9fLp3A}AIgfas*LM zrAJ;XeOfPD9b71zdgQ*CgL!(X^@}@BA(H(ClMKVklfEv&fvjWSNpOtHiP&6LCYd)9SXm%xc$DC8sHQ>omEfbU5`+;r>QfX|I(@ zET(*Flr~_0XvslGkdxg^Tl6JXE4RZ?iY6giv7`#ybJ99>bjU5Q($&_aSgBWwNd@`+ zB-ZO452K)>eyI6WQOaQV1Q(^$ZHwM+ao^3F$N6I6KRl2o?wTt4sg}Mg4Y!!U`#YRt z9nUwP3A*6e9N*~vO#O9Z*pxZ_$qTOM^gNezyP95S`%ap2~}*HJvNIq}U! z?P~Fwl*Dsu`UV$&lFHOI?+NLuHQwBp)>swtb(#{oT?GDmU4DBq`<~MiyOns^AItRj zf{EnbXwrqmFgtx0w9MGJPG{L6NGUhHD~x`=_VN3pxVz5Z)3r{`y4-ioiQ2ma;Ygwe z(aarudleNMJ-zj#?}}VaS4Ry`^tuXm9Vn5QU1NsHUGbAUU2?l)6VcV;`SPN0aZk&3 z{fAYh;TVeLS5soHaT8kIG{O&h%4hRqdN`?Xb&sH>il>4HG?=XSH-7SHI28W6Bh{i* zYw$}~RY%nr_fDhi_yDtx$h8))Yx_tg!|4?b*_V{YAGxWA?b7$U*|u2gtg3x7NC>wE#8lE=2v(zm(PEiD-{Xj~ z-Ar$`ccr^65Uf1Z&Tq5WFh~>>ct$mPX zEV?GDVT*HRubwHPltV_QFV_CKW29r!R{3q>4%(Dp^20{Hbf-s>-UO)B^_=nyCc)wD zYDeRcz{5y^8au=L!+9p^MLsM@Cb7!oCn^0lMTSK}zsk|{G^zl-suio8M214g;&(3H z>$8B|BL=VK%yx9d>xP-n?brwNa@20FWABEU%5veO2MG#$GQ_F{KLv7wwX1lhKJI5c z`Z0+9@U+dVitdv^>&`e0x5}Dij79;Ha6$g|G4Yd6WVamzzgUyE=Y3&FL)IO3JTj;u zOmGY<$QIojzE|;Xuwp58^G)EJ&v`lt$3|~(F6{^A$Qy8(4Fe3K2@ks7RH`Hvac_j^%S{odJ-H}z$NJ4@kyb<`u2hqIAbyG{a^y!iSo43WVUT$Oa03H?GU%^O-_$ zb62j+`>GwJTrv5P;fMwbt;S-O-rz2S603LweA11JhIPNCR7<`90T=$vsfG$4R_ENT zE4GS4z>PiLrbBy^{4Hp1zq)X6Z}zUPeM*^E;!tjxx1{;DMWqq*NWr9ZRFmlxb^YLS{j9+Ydfj;X zfC;9RCUN~2Ik&zTku_)V!!X+4j?5R15noOMz7$=T^+-f+ZwYg4k8KASSdLwT%Y0ne zoS(|=K@Zd~E5O~0uQpl>-sjgJcHwI1)_099p1Y!1bFG8`(L*)4eX=mZLnRPQ&_w(3 z;MSFIKWj~NFQ++ABK&r}(aw(MLH z2aE_gqaO4lRfCxRdL^CEl~nlDoq|C8#_WwTpAnxFjPg&98e|Die2=K{a#EeQcjc~Z z#cIX2SJvAHa#Ugg;A>)ArvmR4nv6YpUuUX>)|R}%`(Q2e^exFq-i)KcZh8`d$H63~ zr@MlTp{kjyNwt2V+|2BKrj~CCeNJgKcUc2ZumU7pk;mn6PUN;kvN!Ut#4yR*oZ{Xw$yt(~@YdgQV2VU+&U# zx2>L}?;v6M&w}!!PG;?K@;|U0a^6LadHxEJMOd;B@YfPD7Sk6zeZX(3#oL#~o7kx& zYHw5WQiOFZR-;9DP99rSr~#Fdr+GZj8N@ivU8(C?yw5zWXQmdlTcdiKgi4B6JT~)d z{(3o&-lwf~Mb!m7-VQ!|sbllOa;o;=l96oGBn9%bs-)`EW#WauYTK^X?lp zG=~g?LzHagzF&T0ojt9B*4+SI1P4ze2M<6C;{Wqj2C8=g67(0(^3aljC8=7 zUVsO@*#`&!zMy9x-~@YX^oOAYjEO*8=vXLGi+|V7XmOs`!Ga8w9)nK(s2`vM9Kk33 zP`*CP^LkhY0C8A<7#KDPTn9tX#6sk`SRDYQVW&xd1WGfR^iO+0`#~rN2N*gCn7~3? z&Vuyd;UFLb!)ZSkIz!MHT01WU9P}6laKKkXfDo*v>;GetVW>AOz5m5xFo(H+2%V?R=rg9EDR%RnCo-7=lwbq==K@avvJOr`)=+_SLOKm-!_>vk#Tx+t z4~&?GhKZ2)7ulEwlwnv>V9hLaGqq-*>2b)Pi;5ZO7UU|P3;bC?9u}Z{F4~l#-}2c7 z1=DAt`bWU(S-=cdd;2`fGY4I|IrDSDXaQMs=b)m*EiVWW_-zh4W4`@4QJ#l(9dbAq zq>j)j8t0(}HqIA>03=!f1YsukF9<&9v;d`u4LB#G0Z=i63(%^b;B)dj1b~AiApix) zw+P|pkPA!=CZ7?`M;8PRt}a4dRfs$53wbIw91M*Es6eeH$R_sq0wym3H-RP)vjp8& zzLX0Lc}c*a6o49ZUWOKEr~f-b45F8zI?3`bpiUliK$;cEi7fm-hy%72LeTvT`n6GperDH`(Kwy;yLBf}m2I-JSV5LJsSlA&1K@f{tx{+8wq)R%L zMnD9`;y=m{-uFMxKF{p;bI!fz+;h*&bLVvnVO0xZSr#3|dI!5W4uHW}&cR@`Fi^J* zphp$!n>tS;XhTH5C@>yM7pXPlh>1)j`Ixv#D~nh@D3>R?x|Y*=J7%F2WPNKY`v^j> zFXK zTbKd7sx#pGLEx{Wkg85;&BV*1vbnWN5xJ;yypG;<$u147%@Imj(+xIm9+v05wFET8 z9{e)vMl0Z%n;eJgnzp%5+HA?CuJo5=cDK(pYo$ zjMO&2qHJ&_lv~HG+i$Lp!(N5!eewPEQC_6STRUzo@IcslJhSVCWVMa_tB)hfrfp`C2bgWarHhFdnD9OwY`rv#=IMAoqwPZU`Q@cVE9N$7y4CjLuP_m;9YGa2uZZHqCz-&f7wQ4Z1d*k05^tn;x>Bb| zmpfC*_T$3vJoq9qh$Mgd?Z*Mmr}aj<37&Y3eaq2o;+NYr+GwO7K_9^^)5WNWNsb%acU;P!+8S%d*Gr*p z<8JDz#}hLg^Lx8p7s2YJ|9<4d!V{sMG5dNVVxqd=-G#?Pi7~a+qK^T70r#DZ{neNc zKjLP->F@NrmZTQEkvAKWNe%1-W=WSdQ-c(&4zcY%U|pA-C40zD8(~E#;~*i4xC?LpSjPCbbujZWXXX+GWhvI`WW_#)|z>q58a`zgQ z0}qdq^Pb&`ofslR{VpJEQmps>pW|+had+RH{$HQ2&KrLU-tRS~WP_JcAK$aNTrys!-yVHu%{LD<5E;TCS%+BY zl&BpiCoFk=UADw;nP7&mDal|lzg9g;N^^(aQJ$x-csOY0X0nc&SC)8(w9Hod`_$~|9 z8fKQ(?@0tTF@IeQS-+Nxva*oCEF;N(%ywJ%Cg4f)5mBn|mqn@4j`|VEgthj~vTZg< zx^JzUY|aGFV?9K%l9)pwJ}+ZJM}jwxqkxgLr_aISYRO_^&)ASZO1w#>HGZWXA9Kim zOP0n;zdx?C?^ADgZpM}bflVP}3K0`lo|`GnGeqwurRvMcFvSZ@j#pF?2FoVxFQQ|& ztu?0W?mqEr>j@KfnqgeTOgU`0cp+%k<4Bc^LMsB+@yERxqMO~Uxi*v0=2xh=&xiZdJM-5}%-c}JyVYp09y$Bwb;s2cWY5#Cdhz!;kKzqpxsLxnF}ZN z+m5td;F8!ssLzPDRqa$3i}kpS6!^^3OMM1B7;VRi!o>?b&2@b-cJY-=2o-{M6;-j6 zlrQD`)vljkL{;UB5x&lg?ku9fefEi&6rJnKsE`yu8XD?JVTf}RuXtYC774V;PG+`G z&fl`ll8}3~Ox4VFpM3<&{M^~!tV?Q7nC-b$T9o_dHxz~BraxR~!(Ss1>fm_s`Lq?d z8D9cdUvA<>J((PwmrS+g>qJV+XtF2Z3O?KO1p_v&2p_U=^K9eJP#N=%uiil%3Y;9W z^wFYEasv_t$Q4HSs6z^tyS~LHf_rh|r5pa_rOw}8CQ9vIVN>W575}N%RcibBy-Y~> z{KhvnbCltrt!emriKV@_I6d8^kR*jvwBVvyJBQCtV$}F90~-prDf2t3W%C>1Z{y!o zhn-hml&taevw(3s2?m^*PaEi`qi(8a#xmLz=7}H3&|n!qK8>wS^Xp1$5H)_wtu+^f zGCp#tY;nGeQTGjWDQ;ro=c{!)5Z<~54U_rT}C1HG|h-VkaNN^~gJ@Le{r z{@3iPM`VKv-It$FK4Q1xJ#3q&bM2h&f0&hj?((ZNK4VWUa6*7lC_63BihHs(Mlb7^ zrfipUc$JRZEkU^WOVgDoF8nS%K9BSWx#%eBx97tV&$ajJEDdc06NYrJ^fRb!riwhU zzUl6Q`Xo18E||^OVv^`seXY(7Zd%Ay`t%l0hAkHdht5n!R@%dy3_YRWy_;{pb}fEw z+u{~lg?WjNxou_`URjjTu(=-VKNZudkIu`!GF!utz_ADff49?2x-2$R4dsZEb1}bwt34 zd=nJnBNP>eKH?wb^n)uzvfi6DIu0Q8bWIpj!1J0^pkZ?vSn9PKlqP+=#1_63%$Nwg z&-b8^zUJhicwlgjP(UuNHB=*so^?>JWC1WpXl>#cGoJ_6e&~9&L{7auG%_eVxQ^R1 zLWS8^wyFl3cvM%}XNQUu;TO$a9ZfF9D+*UAeH)vq`>kc_2d5LeqAUH#C*cQ%CYrAF z={Yp-!uh3ZSe3HLJ4j4p_Ir?{(~6H>NYC>TE@DHC>`1ZAtR3DgF;`r;>PZu~E%}Jn zZ=2s+XKBBztW#GUsa;E?`$hY9tL*Iw%K+5sjfm;=Xwtz_?Mdg>s(7nT+eWWi_w2~` zsc4~vx_$rQ`;>t~)Ai?tqJy?mKbw$%{ zSNFF|(pLu@vsbYnN-aH`P}Dw^DE5--{xp^niji~)24iM%I&p`#AB@LwV>EYId(R>D zR~7}1a^Jl!&!RIc(Xh}>!++Z2_)20ZS7w^UplVigpT&gI@*nz&hbDbX_6y8TKEfNG zY&!KhU$=cmH&iqm(jrP$SrjX2eg)%i2SLAp?E+qTkHpRODn6E8)MkK<`GB(WwY$@6 zlf#+H%eGT)Ps612lTFL0YA}2&{Cw3fawndKSwAF+6D~J1y5+*v)SoodXdUvd=24a> zg#{PzMCxH3PVr^c%smzk$(sF;#{7##&X(3O#}X#=i@xFo6HB(Ym`6ib5Nbh(L>^hsH4qn=f}J+uNa8cTT=Fa?YmWBWK^^4_Bb<3zx{q$ ze8mP%40SY{KEGsKARmXr(|%PIp;NY8;qYmG-N5%%kNIfz2dZ+TDN!2t`F6zNoZ)UX zxDJ~cIEQ-B6l`#Wyw~`I!2{FJVM8phmbAl7nMC{3puIBIssr_TM6FeaJI0iF<-$Ya zaW2BW>;4L>ULmidztcO3i}UxiEc&&mb4LFROkPHMQfd3gu)_LWhz)kL)kRjmQEVec zxnE65J-_yFJltsIjYv_Bjb}%dud{2EiP20#6#ZSdr@|9YSHLjguf{71s@8|i!S0&L zp}zH`{$Za2s6RhMy}Rt6NjZ8K%R1osD?q-%oRd_voa~yWl)Un$xc~x7%Zo*j&{UhsrQb_jmU{`>AEn7->QdI@B5*QQGsnUcB+G78%m~whMRT$fTz2 zeoQ~Pw2u3i;i*f_D(*HvmW`T854Pl@J`#F{g*CnSz*jqhL`|FT(Ye06R&QY+)1*Oe zVROADj%HUSPqE%1)Ir#)J}k0=@NdO&0xC%EtHZEOYa-5VqqEH_$b-5C{rx6~n|Jr#X5MMWdFb3aDzNfUk%4e3xf z*6ebMli@GQTj9q7HOB&gBuMh_Cm5*E21xUt9n_)Evh&b^{q!mbjbMPmT-|MbT4nNI;Jnu>)SWbXv1z`AyT z8IEE5YXP^~0WQ{m&&V(s&uKj9UJz=+dEy2gnPuTbf+r#s62JyH*a6&z+be)W9RLfa z6Cg(aRwM%Taex@STk*e;0<6P9xiH;0g=%m$rPKdZ;G+R(6%RY1ocOg)DKXkYix!;k zgoat_oIy~u3y^|8&^rTY4nqnsvkQ;}*1@4Jzy{81_OI=Z*?G{p8=B$Xol{5v)^-C( z_>lQufc|Yx4*CrNROs4(>T|CJ4ahUZ_mTlb~)Nz>02}qy|xa&@>W# zP@^EgTYZ2ryleTdJ6d~%1N_hjT!vp*IR!GHb3atH-)pCU9USNf5CA=905F61e+Gd8 zKoCv3PY&My(@cc}(5w?+>mTFD!QU~MA%GH1ark%JsX+Td$iWB<9fY>06AwfV0nCtt z0>B0;4gpqhBj&#dY#IWT;H>N?B4iji4~h%};_!8jGolLL=LADX09MfXB0vF(asmIV z5Ag_8pKBM-Y_untJ?8YK5{$JRRkW)K8_&o$F-(M%pG?cy= zh@FO(s}%<(&jF0!!VEwO%FaOH732Ry(*BV4q%)!jNP}FnfFs-}?Tk@?kJ6w=rgj#} zlqLHF|C|Nn;Ia89q9Pxl`D3O6edeIZA>gAqzy!~r8bIsdae)-`zzsa2rZWM(edYmX Lf}$qqg$w&X@u9JQ diff --git a/dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar b/dependencies/arrow-memory-netty-buffer-patch-17.0.0.jar index 72d3742479bcabb1b0d0002a4feed11dec4771ee..63f032a2b99ef3275eca1c100c651cae6465f4bf 100644 GIT binary patch delta 646 zcmex3h3VoHrVV#R>t$zbdb*U6f#EhY1A_j#B+}_^&+`Fec=}j`-cIDJ1rCy;=-=}7m&wj_|#pjfC zZ_h=WmuK6nKM1HcslD=fr@PLswNU4__};35$_@Yi?mnm#8u;s6$Deu|mz57SKTI<^ z6?QzUadYXcSrYWm?W4Xe#~4)|Fmg*^T#LE-=@D1Oxkp?d*@5ZmoXj_CM=R! zmh@PJ>*nGL?*Hp{w>R1+*}iLK*1p@TwxzGlu`jC)ejRS3HMOhYk8^!*mPhOsu7B<8gC=%`Z+7$W z4J+ga}7GqzuXYNo;zpLtXmQ4@_u_K+dHn_ z+G9B9l=+mn>Gpc-e7cnl6L!9oXaAfVJN@Tx*3HMnU30k*F*;eaHHdN9{oq z@`Uyprk?>o0gK6Q9R^GjV}MM}$qRwZbxD(7B^iK?74DQ`axI_SSZ+Pp2WU$bklRr+ bd2O@OWZf>6$#*(+nRpwZTsyXt4It|QcoiqI delta 697 zcmcaOh3VrIrVV#R>t7y`Gdar0z#z@Sz#ziFz>ru}lwYo!o>`(-P?TSgT2zvmTD&$m zI{UVPNS*!{dx>urYdrQH<>ZJm(~wl=-m*seiR-4m+}uqfC7HA9?XAnUZ#m1>Xe*Ie zJALu{7ccJeW^6dq-0{P1*F?L23A@(Chbi4#alxau@}fuoj?`UQne{hJr*VF0J0s{; zai}STY3s4dUtg>K{;b=2%)-0RP~UpN4wse%Eb5shq7!p=%j|1$nSFbAa$Z4#V2O{G zm(iT(-L8>)%1cGfb9wyj$~HaL`L}VuR0m&Xa^muDE6g*P-KW01HzV^}`2UnB*;@Wz z616XQmt6c};j^@d`T4~3vkBMgd85@b{O=!Pzn@v1vgtu|^6?#-jum!C&Mw>Gxi2;? zbIZ?)?@q>ga_6RaItK4K;k6>jW7DLmm4O?Xf(}iVc`mbRjowF@pui8|R!1zhfB2wN z-^*1nb*jv~j}acPzt<-pUC00C|G~EhEUz)FY7Y8R_V=ls-l-D5t`}8bb(5~mI=-Ub zY+JJ3s;8GdpGG{?c^NtPk3Vzyo3f-wd~+AxW0Uwlb>)m%^F)($4IDpmwm55l%|GFk zZljQ#`rjaML)W%@@9&gZ&i-q8?o8(A2<>meEc0b=U5OXktk9<1Z}<1huFRsnA8WUY zPxk9FQJpTnpCQ1TnFScf92^X%H)ez{nG~;Ev3aMsQ7#uE5lsHt62y3HvPWwOQ?vZ! zdU@r^?HvM>kEsf91Jec=ENQ$k`AusCmYfV1bX2`VB6U>xr4`lp5 zxuCsdH43Mcgxe>@bmo)iQk^$J*&mB@sspXRs%dIEt0Bwotlww*@ cGr6}}X)OO21*V>V&1&bJn7Acd+-n?f+p%r?k%o9g>fv~ z7rlCLd9eQdyTKw=QTon^{5(Xd;VJNzGRjuRYJszLnwpSMeBM2NYntXVj&=mWc9e5k zv45&2cZ4^G{j@4h9v+`Ab8~`7VU*U1AHfeMdwS0P&iZ5rgDLcQk`ZVZ#y2c4%Qatv z@#K5*AY5S2O9V4*Qyy z)&AA>QhnMGZlqZ)TF@J%*+%tE%h;ARoPW2vscOF4sldmT}0*x1wVJIP!5GYRq>lu?_C~5*K9Ft)vIFp_n29paZDgvS(lb|U& UlW{360U48@DOCo+BLDyZ0MB3c1^@s6 delta 562 zcmV-20?qx}Q~Fb|2NeqG#2z%o0RR9S1d|aJ8h@=*TWi}e6n-!CKN$8ntVq7a2?$y? zC>bLQw5%KCMY4`Z*v#(Dbndj0u+wZ=G0jxjGgptwYY zpnn7>X>=8lY`Yx3ghZR$yJK!}U=egoDoQe(=Zi&IymxgKH*E}0K6bL>EdH^-8i)>Q zV_~QEs5fW>N0sT`m}%Bm|72PneGmE=edrCO(eyY>rHljTk!Q_d)(u-GXiwk52j6LX zWU}B}W5>8DL3lpI&8E0XzFlT%wE1}VLVq(Z9?q0WK~ucQN~lsuvXYf~QLzG2!jl~5 z9IC7?@f=c7@K-v-IJfZdEPss(aFvxDp7B^n>-T?S#jX$R|H19RIo1HGgHrT%{pCI` z%5+YO=z8@nV%D0+s5Z7^KC0!@N#$7KEa_RD{7(aS>~><}4xORi1{nX9shFCc6-Z(& zFhTJNghMO!Z^%MtJRoCb|1eRoiniYG+;%ye{W;FeY4cbs_7eo39op1i60;zNBac4) z^tx$sz43ju7n4qkG$of8zW}q)7Ge+z>BJs1#Q^{S8w8U)CsG0MlaD7p0<9O5K`0@U z@F!0K3mTI_C~5*q9FsvPIFo)H29xV3DgweElYl8YlTj%x0XdU@DOCpHBLDyZ07Vo8 AO8@`> diff --git a/dependencies/arrow-vector-17.0.0.jar b/dependencies/arrow-vector-17.0.0.jar index 29055d60315db4241acc51a1368993fd4860257f..73061da73f30d1a512c3dc365c0b06537d709bc4 100644 GIT binary patch delta 14386 zcmZ9T2Urx>7RTMYuyj~DyRb8M1x1PtrHi1TBG?rX5DQhnf`}qw?+xYHOKh>1B-U6^ z<1?CA5_`dtn1tA{cYXiayADg<_s#SB|IWGRwsXr2o2T!J&c7?#bxLEYTe9cD21Tt} zH~reQ!5$)w+&*9XBb;+gWw$WM_4V+BI9)a_^-xztx@N@chl< z%+brAsA6o}4XpU_Vo>F;)5cb*8%%8YZ|5i8J3^-9HNEM2xG;CjuFszimDlYV|Nh+c zk3sD>-}9_0^FPz;e;Fm;jas&Be%!c<;PIKu{_>xjZ&IGWXhgr=#akMVp4@ucr|-RD z1z~lw5hEscOj&eghFEUbaBShZ&*$C`GMnW2vS6Won(L|7O?}R`Oq%rU&#|xRANzrO z%4Z#@)KvEGIDh^(9vcV#;beJzU4hk?9fv0W-FfN7$pH`VFOTfqymE`}hh=p6^W|6F zif*Wft?%pI?}Gb=>rt<|oV`}oaz*jJgPSvs+}yHv==QA8{G#p;dk1+fo|*eGqIgb5 z$Hea~KTq4)ZNbdm-$iwdwhjC6Xhp{f?IV1?i@1M0%)_JH-n#zch{;<|9(d{f%kqjR zy?hdX+p#42=JcT#J5kxH{{O5`h?*3m>}Hj38lSwaxTNlEMak5(v$+>NR{BmqU%4x3 zr&U(tJg0Job1T@Hpae!sCp`1&=EpH#{0V zT0Bx+9(Z*7cDf6#8XF}IJ&h2$SFYSXxq{ zrPu9+Fcw^oo<#~mDb;f!?{qxgoZB88#sLmFq5QG!)za=MOI zM+si+ZH7z(S#kjn7V?d4sa?1gC3b+vpKHHD9k1b12h{N50@~nFTKlpkQG!s9+H?dw zeaQgTG&dD(Ss=m43cywrH&{@YM&4Ij*E0NXeTnORSnARARKcZm>?>nSmsgUdblWRy zvW`Tcvr2`rd8LvTbP}|5Jz5B2t4+9gWunkgYz$f)WX{Q=7$Hc}m@dW$gIO&{i3wB{ zBUn&UC&8CJaF@ssc1)E1^`z;^f(r$7MvJQ&OT?XWIt!iIm8P<=qrW?&;6EXpgoY|a zeYYsJ3rd}ACu3`B6op_v>TwT_-Fqrr`S@vL;k}_J$J=8CTS|%*{MggpGKpa~6J%jS zq5TyqUbJ;r6m69z*|e11Rp`iirgO11U7_JV?@#YIjx!Ia+4QfPT!56-JG6|?6Vd3kgbTC>1qGKpsP zt7KtKce@F`ip6B#T^PhNx5@@ns@kM5qm|vIN%`Ighy9>%;)&$Y1MOvdxvgUlp&fgE zk_(TY6&AF!hY-w8otB9mFV(0gO8Nc9abiy)mNmMMn*_zNBvA5KslvF`mreKYD=uf>7 znoPb@C-RL{s-&S;Q(_;OezpB-GN5GaieghRrU?U3I3SPP=I1eiS5+=y z^(Yxz@zttL0)Ab@ao9X&K?zA%JWZw)HB2KpdNIq*GO?oU&CFVAs24l2OD>9akb+t(4;Yb8DX6uo+=z5Nj)>BZN>)GB z7>U~_YkH~Z=Yd#OkAIdhvhWOZlu^ z?9Bt&W=|o5&{*-QOe|@_Q+U6P64kVE5CWc~@pO`)0 zruGd+^zY3{kqO7ggRzy(ww8qz^&f&}hq!TKJyfuw)ow~d+pi2kRnGKmh|r5|Z^T_v zha$p+CS3e)D28xqYe{%e?KB~RHEb&jdz#l4t4@Ddew&6y>Es+7VFb)vaqDg479&ql0+KO)Fh>$*AFsK;j?53Co-Rb z(!cIBHdBaH*i(6?(3?HU;jCqr5Xx>%;iCCebp7T;%!${$8jbllmpjxRCa9=usZ!tM zVD@}rP3*w;nBr_yetfBH!)2lC*DZ0lCawlKwYN*RU**>9GZmV7oTEvaG{dNpf1 z#Sa(iQss7~hH`Szs)QUAe(^oWOLK&lEMOlOW&4zF+((-WpPw#q+$UEEXDx4Yaqb?b zW9eN6L?Xy`xDbv^jvFpaVWy8b(~dwk z^sACYF#Xm?bfJACP`%>0L8HvCOUeWh5-u5hnHj1YVJVO^My7_lQ2DM zLvx40%VspZYDMaiKjlQ4IPkSPUgD{-dhSQRj23*AKP@xFs$C#NC||8Ks1F5#BP}Zs zf|X|9R-*{T6$%|ANC;E3pxs4iv_I7uk89eG7Yc*=8G(DzQ-js99Ov4AD-z>Cg#B~F|ecjBFbNA;p8u0j;!K3$cc<6_v z#cT}SPIEdoTWCh(XCqIwbab|m&Pv;I_NlG8n!6UvfordL347AvIk;12^^%1x-RWhn zqU?V$n4Rasao}`ryFJI;mR8Tj1+8F?Oq@tP597M_|1X>u!zS7pn z>-tBJhGa7-g-^<2 zjysp4l^3>iai|mpx}P$LQ(p^2E<*mS30g->KInrhYE;s?qS`k~+2Dm<`P1p8 zNSlRL6uts$XU#Iy5xfi&>QrAQQS4fPBz!2Yo0W>rZ4eq##BvPhgLoMm8nqgfZ-G3r3D)xj;zP-60;s8p^;2_uq9!#FsDu%g!T$k zn!iC9&m4zKhStg(k7UivJ?zec={`4n>Zx&k8um?7V%x$n4bB6)h zER0dUb+)D5pKSc;$!4Lwa#%Biy4}oHLld?LUdrF14Cu}lAx1ecsajd$R;4kg+^tw2 z4h3KNR(O3ozgk(+hhx}B=5Ir)57_*bZxf=G|CU!P3mURrXs0Z@@fB^s_XNwEXv78S zB(vIq(&Jwk88hltz8#hKwDYHP-8E)p)4C5d~#1ckLDG*46`#{)c<4~3iTd_LUAWBOXH^6+0nnd@W$I^D&_7L{OQnCI}cvE z$8HR466Nd`MzihvC95xK4%pS@)|fq*pPw&sT((Du!RLj0dxQ+-g2#50cTClg3Lo0p z)0FRomdajL)ksbICSiIXI;oA<%_<4qa3mHk!o!D0Qt;wneahjN%Vqk3PZd4F_o_ zS9{~TijLwA2zl%~^yUD@R`j(ubj0x>Vuf_%IO!l-V3H__CX|_Iua(x97n?GQlSiZM zo%oC-96~9xxf~Ch3#>oAPXQ}V(!E2tSO!r1k2tr#|D8LzU;5fI%fl#}{}0C#jtOSe z=P+*BjtU35=kMr4#R>;!UdwZMDlcoQxv9KxYE$MB7}xk48-F|^1Sx$x86%&gK+0qT z%1K6OeM^rYMd{y0e&yv@G_wk94ddJ97>xOIjEysoVXcO4tVXyX9K-F%=4dqvSFEAk z<-%}f;iGD3O35E_FeM)sS}40H9HlNcr^5efNnR)6P?c&>7o?!A(FywTB)pCl7{d6DaWtGWO@9KPF-OmRjCO=?H#!h={ z#MH@e z<^=}6tiZ|MkT4S8J>S6FoDl+*`5W}A2~})xGIZwj8F>AJZ2(c5koqjfbP3gpSL z^v4;{u7QT zTtGmN3qmX90bEMUQ(c-6P>53DRjo{@N^5|up^Y3(eBHF7v)?=u`gz>hUD_R44Zvz z*%qeQK)WsrIqa`ks&fUeU+F_7TQfRv1>=2doFp1kSdpvY?C5$G<^NbLV{2M{6^=u8 z7!kW`Anc?}RFrknRm)Q<|F)~~;n5Md976>vdVCEB#OYT?B;h(p2qyIb-bspY;k(-T z>-c!S%i4(4wsBLF?QI-618<;hmu-w}k+yDjwCg5p4eZ>E>wRNu{&fl;$w_dn|Ek5fG{P)$7NiYkq$3?(hX%Opk z;oZQ^o<}-<7oBR~kmJn_-P}30Xk24hZZ_f$VU67!X!~Q_GG=+Y*%|#2tiPx1U`l`7 z!>#VNml27%50coFlWq6W#v9&TsQ(wz^1-~G$?CZqrYiw zjl9PGg_@rP$|RVL2$qE@sl(k=)ctR1o1%&PyPG1UnL5Uiu|ubaV7G}vcrGz(@L~>UC008{{JF5 zTgGs_ECvY|Qi19IyOW!p(dW$lWe@S!$TnR_@esG6MGrB%hhw>TFNx4NE|MQX9F6DE z>UVQ<<}LmE5n3AEo#P$dQAy}yETZ3ga1zkdO(SKQ7aP)xle4|tTqPehc|JkWx4o&P z3hzZ@o?y{m?aRfZCzz(nL@xS2Men*KNunhkNy03wdW+JJpQ3cFff8;@`2&&6*#GUx zYfz13{*Wp;(ET@@N1@9R8OurV z8=;-zFim)a6Zd`*Z}zrg?4{ycq+*9+nYc-_;>A`>;N-_n3FhR3nlz)6I;q4ozw9RJ33B0TmWb`>#XM2b9||y(YG# zryqm>rkYt3yYL|!_Ys5OH>;-2lvd9|R_SN;k>s#o_J19|%*Hh69r}HO!@)WKb@=uZ zy680Tzu5INegzRo@t<)&>b`(m_b)&m>SLLFL99UwIgb8<2>GR4?3Bc`MO-{u^tHhM z7P}e0xGY^9FQ&7XW!$YVVNL2+OvaKL`5m-`J9JTq5sKBcKp|$bXUn8TD$uUAnBw|O z?8wa5$i$5b*I+4JFu`Ym$!lxG>a|w(5=g5Qz1Y!jWa3Co*CEkoNqF%_Bm9q2Y{BBT z$i$p#Z%1Q$n}~kw$TlOq%S4P}qjwn*I66=e!{1S2OhqkQ`W-zp6B|%7GtrV3n~J`y zV4o3rZ7Q~AXO0_@0V*++_5I0+oKuNySpN$$>B9zJk%a>tH4}Xm`>4tcLwoBUH%u@W zBiRpCQnT>d_Yy-^X)Xq_jn9opv<1S>d?^!mDze1*{cM3&KYzt>vZZKER+ezQ_eLfi z*tK`EupnP6QANS;u|DA zI>t2aG|vugDX~VZDJC2rwMGx}Ra}TR5NYOI6icEnzGX_K@7SQVVb)yO*@@<~(OP3i zan6XBYKwU7ZH;WW2e?t~2CywsYmDz`L)5ZYJ2t?9i`!24(o*APJEmmIAEMo%N)gA#c0ZNL~Z>WlfqGKLZ(h)AXaR+6PSBbUha|;#^9|t z7XzAU@X^K@DgMGoCg!xm87()XXU<|ad)b^Da$BK)1%2_&?1F=aPLK^MRs(p<$5X5NHmE!03FsBRz}fJK({zX!-IL>sIxKqc-dqIl!i_pqKwoSqR}hD= zSN-?^CM0N_`4lt~QTR)LjMSpD23M(8&X9bbn8qy7fmz1PS(Rr zM2x8;TC&5L+$Lsyow1X3QGQRhgd34rJuyJBmcr^GOTWpJn6LEt$Ci)In|g5lVT6Re zDXc!md+#VN#C%l6_oe#q`FXU2@t)s6Y|DNg!-a1lzAC}To|ZKbHSFeCj_)==oNGl~ zgpU6jXJ|vj`KwsM^=VT>bn6QK9@-dvv}}ZFDWAkeI)q{;t!*R@P%JBJEDm7PXGoTM z^!E((P~W7>vudQkCLGhA2x>MP`;Fd#X4h~?$1&}Rv*65ZG+4jE4fe#c9#0cI#bm`? zy61`M--_ea6V+sSiDB%-BC69A!ncWNLYKXetn=5&q%AwRgNu1h#FmV9b5YquY^B&i zwVNX5aVo(-?C;;XN#%_Q4gQpbK*fJ*MEWrVGd;^&8vX|yCqG5&g}1pm=|@u7{c%lAB|ReDKE!o$Vx(HtbIqqjHybD>#nulLLH>I0in2Q~)F&F3YLz&h{;;PnS0MicPA}&CTVkv2|MMZ6!pumj)6j(e{!U8>= zskNu3=}3c?ZGfN5qxd#BjFtFZjATw+sCD4xmy-De{>T-G1sK>?Y^j(;d2MlYrLE!Y z{2Hw_|BLs`^;+XJ=ucsR@LRK)`&|pf!rZ@&3voLN#RQ@Er}lB;yb`|`f3N33$cY~#K=e#UzsYeJ#a`7cjwuhkd-B+kQ z6kCn?k}OYt|+lIaWep;bITAu&KmqlU+N}ln%kbR6a6k$tL3bmdq7_ z?haJY4ujD?i2Nc!)&#lR(vk=?<$gO+L zMYYF{Ha%7n4QWGr41+R}i_Z{>=J-*iIFNl#m6#v>g#Tk#hH&8-1+gogi#d{*p2fv8 zNes&2qGtz)u;E;6AMS2O$2y?-x>20yI>P(`FM?A15=mSs&PwE z+tH0UoUYlsxnFD?%0^L19LkFOICF_d+4l#yD3pYFn2QPsMOU)#hDmOGg4TA&By8-4 z7WO*Dg(V2v_A?j5AQaiOraNN$UgT`$Ma)HT5A-nM5+}=gph)mlEXL6y6fwWe z*{PmzczlNouU-&i{@~4-BZ=6*xTumu<4P{N_l97PxES^bbsp}GI&VMWMAHZ6Jn{#x z>7G=Qh}8YFzu25TXvRf!BF^C}G$Bz;W4!};1kC_k-ui}d5f|lQLGuTowg-_iaU}N? zB%L-1jZf{w@wrYOj#Q9@5vhvfBq1I}Z%O1!FHSOgd)P|9w^CEjWOQ&e9P z`6%a+ib%PIoZKu#>+lM#re&#UwC^~Mmyh$%O53;>8#95EeuFR|_}Tm*RCH)2XAK97 zZSc3k3L{fdQTbK(#1TqZeyYmu^XVXq_E-WV)sED?--=h@bcdeNBOzKIet1EwQe5Cg~=#g zU4GBij@B8UCy($ayb-vc?J44p%|?phiaC@!671(Gls6K&&r}? zH6PGUb83Xs&5<2Usc4SQjE+d4!61ZP=Ie} z_3ukUOKU2TWoct@sW|YrOz=zIF=9IwQz;X7`d7Lr9(-7%g|i=Whg~0auJrDqPXD80 zHC-#jjP&~`+ZvPYI24W=ixs4%l(D#azr&B}#)@gm8O;Ui8i*f}WRDZQl@5V=6-3wJ Mui6$U;8%|S2SsO7qW}N^ delta 14403 zcmZ9T2Urx>7RTMYuyk0O>@F|^78FE^1*HiVELadLiU=qwSkWi~ieT@eq8wwzUSdzM zCW>9gh?25ks>|KW?@B8NY{=akXx$WFC!@dh2b6$ST*?CfH`S`fd=!BwL zHCK~@i&|aowyyC*ZUi_@@P8AbdDnDn+e0TT%zFpb)z-B7WvlD`sNM614cqNBI&E2Q zL}*^n$$@f?Yv;bJ%B>T}Fe4-mH|7UhouV2TGac(lqI;L61?t8vpcOBNvuubcqyz$+pCaKCc ziyz-yl2+RDTS{;D9J^1NoJTh6F1=~mVf`HCKMMw(ODeUA_ezbqb=2%u+PIY6x8wW& zzR$aPX0zkxHtcJ6C}CjQFCR1Cx5#biC!Al@u6fsA|A^kyrS0y8i?d>Pu`Wjo-2QRf zveoaETbJ1Ko*~DZ?kz9(&7EtRzId_Q%2#o1pT5uSe`34cvu{V9mjstq+cLS!n~aJF z&HPS}?lkLB_S^dXN|*k=DQHjsW?LTouib;VJHC&5t@sk79Q!Pz-(A}kt8c0_-xhCP zvUH_$+kW)}n!mD`(z4#$5r4E86z0)?z_))RRtyXQ=h?FrysY46Vjas)7bc&Od+)C)h)E))NH+59nN{D0$HR)26AQX(P zS)FWJ3u@{UCFt1Z2tB33C?QyJlrBaIgP3a?XQx^VRt1r1?v&eF@L`%EG7V*UnLL=V z#LD3$ml1)b) zV}!PBOd1!n(-d0nb6xWB8qKk~vk=OvWpPn4Ug1n-Lor#aI>YCm@iKO$^3E8zgR?jZ zibY=iHk*r>E`ljtovqOEzAnwLG8J3%jl90Z3Qbu2Qkk@4fh%QUO(k7~07Wsq>LR4D z*+0n!Q@XfOVMaM!rAa9^!XCv6C!R>JyP~}{_HbL>ZbA#@aDt03e<&;{znc)w9{wQ{ zJ6_7HJ4(g=$#Hmhp%ZIU%0)scCgV_dba~KSPIB%l>hlgZ?}1XI9?Dopb(1lf?O#{v zcfxD#P}W0;VXfcE!hwdr!)zt>MC{YmsCj=uxA-=z!$X(#MCiRH->DO+qm(LX=+zV+ z2h(%g?x)E8CCRC;>6%1Bfkuj-|u&Hb>fY)(rU zS$LW`@^ZD2n2LK8*`6#LV?%#U@O~d zEek6O8;EB2)o}7`pkPJ08l|D_dj_Ja#&l_*(1V?;!(CbqLWH7vTx=YKA>7bJ5^ja5 zLQB><S-(4A&dpb zaB(9IOVzHcEG()0V6?tvqD1P@z(l2?&nM7VW}htK1Qydz`q!1>hX_%MKw3RS=*eu; zIddK=M6fSYxp16@jvt(a8L`Z-(ipdVy+h$}K}G%rN_~sNnL|Mp>cIDx#9^rV?vLM1 zxFy7VAI?*f>DY?zrbq>5Z#WtgxLHCvvfHS%q(j5eqo1}>;3%O6o!X|ts;w%o2bi;T z+*;<}k)0hWcO)`!!##<#q!%NF=E&iIQNmPK`x$4SMj;Qf%Ow#`yW_;_L_eT<&({(W z3NYq^O_ivn;vI=u!U&wW`B_3g zHuMvDWZ~?e{mI0QSFp~+)Ht{In9!X(%x+$`5W=c?$- z+vllq80Oe`jz`3s29Ty5I@e}A@J*z=6dBk>8ovNmGn13@@wmZRWpU9tM~GB(qXju= zbQe-gz~$`k@!XU?&eW0?PY|Nm#~e<+PC%Q2Cvp*=i+rs;O%~P^GtE@RZvpLBRq18I zD#@!p4ID0Ls8=3Z_i7y{p<^*RCqNVv>2{vbmo2T}0ryp)*;x~%{rejyJ55yf)a#q6 zHBD%Oz3L=m)l+aIAopw%zx7!qWZ_DPmG+U+S*tI}qym0DM(G($-J=`eyd6U&9 zl?(UlJr%{fm2kXjDmKZhSGlOTiVnpbKowroFaYzZ`7|uON3SJo9kTjYj^Iaq=O9r- zXw@7cjr|#U|F^SYId~0q-Wyj17qt@ncbK~XP>+edZU-OZEf4|WK@|ce; zcZmfxUnJC}Uo9+Z@jf4(kJ?Ankg+WVb+Ixe)cp_( zoQRPeeCc_Ng(1}(76LcNy-dQr7b4XT<#F*a&%%&u_a&c+i#hf!Kr73)aj~ob1%5bb z5GTJEsJ#f;=TDJ~gyC%Ezml~+l`X==yINb4qClueQHzC8MJY{OEDTYG2U}8LEh}%* z6vCxA)}SUxYQ#Xj@}s2#Ee*NhoNH;EgmrQa9tnhvbq^WT8A%mxHK_VO;+}ABtEFK^ zhAcsPojGA_OuvWG3e`DU@T!cV`u}& zW4SPzP5V)DX-KD*qf+bLT!ila?i#lO^<@7p;X0(QZf!!VR$yh_?k;iHwlZ7v$1#1xL^lH4_d2W+LBkHsU_ktNTe~pmAD#|#kf79BKnf=!y`tWyB zK!4i47PrHbwls1*#L9KpTYT1mjH@G)ZfswqEX=9idZCq~Hl?o@Ca{L1BtsLbvq5OZ zrs8#33bJ#yjX)&_@B(#agP>)v)^a?5qm7DOH)22sY?O%;-IR`xEgR9IdfQ}dP2Y;K zfx2u0PRIMH6c**JY4|3*U%O+RHw%ra(*qkr=9+HC++jdA3t37{bz53cVbg%lZx&i9 z=lL1bZ+^B~O4=g$C||cWpu<~)Xyv5DN@a-)mDZd(|Ah6?AoDx_30{B9uT++FqXhd% z`c|a+gw5aiR-v8J@@S>9pw`=j7Ro)>zoRYqi16e(8nIJ4$DVFO>6QN)8zZ(0QOYQ5 zJE`>nv{zDlYuVB2?sg4mdsnR)J+EbF*q!2bptMi8!PqO@&M>p*CBCk^p10!_y&Gm{ zyr|8G8wxcUjzR&)FiU-=+1b(Qop{Y1HjO&(5*pC5X?AYBbfaAu*j$R)C5&a~_DR+N z`nb=o2Dkd`#{4|K#Bs)MAzIOdj_ei&D|bD&qu3Ia7xjK>XHNru78)xv$}5qYXfmev zjFZ}UU6wgf;2sQYeze+@RL*LV)T=O3$-khI^6EymEx+J(dx5`9tf<6aZ5WyGVt5aU zmTg)?f=Sq~vyAPf1Pf3spnyF>I&)kp8T_f)%I{%b?ScP-JsgMaMbdUT!wVPf#T9JC zB}rgs+J|px%Py%68%_8=l=|>kwzXv0i~{##k6z2{4aaCb7klHIN;n1V7e=!Gp*Q<6 zw%`2i4ITM+KVtQ5%W=d3w4hdkBTJje|?DRueZ=BHRy3a5t)dxDqv2l+c1B!YJj^XO+;DBER5Z ziaaVbQVvr%N?mM7D-@2^spKd&2~(W`c^pFp#~bKXd*1zh$Iy%!?e(ZJSssUjI>n%N zNBk@b~ z4P1L#2vruY*Q5biJ;(6>Hz1*A)DZl3|QvL&7Ix7rg>Fv1n;yJ;DhMvP(dXy}aW^8SSB!a2kc_Bz~ zk@C(9>56*v>bx+N<)4#Gz9d}0LN7VzY)wTMu<#0BaGZ1z0l!=jf|S>ACtcmYdOZSa zru6iwR3>!sA{?GY8&IQ5c>mql(V#ANscu1~O+*d-dI_DoA7jjI{>0R8>Qag9_$w3r z30Kp;dLBqW&Bc3Mg@o4i*P|xoD3xY7{E?lMwz$Nz)|%Jg-WC`h^QMRmSu z-@c5_T3o?}YQ<%22|t_Bg1-bGiunuWZkf3lvdiKx*aAIdTcqLy&Hqan!BjD%D8<{? z%t4aPkA5u0fPWb;iMr&G<6<~G{I8(AvQWm>lzRn^)3zItmlCOQTqY{&aNI@5bE-#~ zi}C4^iffLc0u`ORiWB0|TO$&B4WzFXy}FNAlElC9HEqW=d@{diT}cXUG-`VBH;$YZ z*U_}MHkBr-ZL6`P`8QxQwbK|^>tbi*by2M`rx-Z!Fy1#1rbu1Y!9@z=!|FO1;qn`} zJ{3F4qzy~0E(?6r`Wx>8M_puMNgd1Z1)+hClTBs#g7C8|7j`%CQNZ7wi*D{3N1h9X zH?bOuY1d8c{H;B>b>}T?@f~i#|B9zf!dY@nE;P5LF|5UfO>K=mkF@4CI+fzZ@k}p` zE60!PR2i1fb-05^U5x|HeU4kk5O0m0(O@{bnFgpbrn8F#OE$ZjxQ&3?xKyK z>T^+k7jH>({AFQIvuw52gtv?!78GcNuiZoL9B;r$v-=p~xJI%tr{eoUh~g=g-$$mV zH{phohgi-(Hu>JwO%Kr3_f3s#9>K7c-$WW@gjDJ8j1VKQ_=l+3HcTertRP$#rc@rO zQBlKxq1HLG_hZcsx}Mid)UF8<~~A_d~8|OW(J<@nBKBZTwIq#VrMQQpF!M;<>5@aXqWdIX)MW7jMKy7hT#gr`UN5ld-}mC;>YL>&GV5yuRvGO^Q#aEEO}G`8H~AE{iyPzigJ?`t%8^Dr*X55suq zKbq}-4WHw~IsW<@FUvoq%fgJ(|HW!>9mPrHD2@Irikja3i(Oz4so!8KPLC%0H@Ho% zd4spT#u<|Eq4cr%gqxV5vEj8e9g9@)eT6rJ=&`bcJ-7Y%7A;83l(7lT%tX2Jx0r;{ zS(N-9r{?l34PM!#PN%$s_rz=o2h#RzY^Eje;JAJqC+_cs7K$=TdXFQQP2kO*lZ(yt z+k52Vpj??~q*?J{zvglBzYo}>N+)s=Fa?c|{D^dXHOYuu_rhra|uzQUQnSbN}lwqXJ!Ql>cA+z5>5|h^N4>xFG$o zfLoU=KpyI2-T#VMQxZMkbDAy%vc+Lw|%`VIMxq!h+ORqKe!}B2!Hl$OR_a$tvBI#l{I=$ zsN&+ABxaa%k!S;vg-@AM>BBZ?ZIU$?FKtD0nr5xFqX1{bYiWykDYiy7Tmv+PwP6~q z)*9c?rm1D87Hon87r!~-J5PcgDqH1bAa-I4#eqV#IEby$NXVB?YqU0W-UcCet5I8? zPQpI)K`n-}zHVI13cv@ObbHKO2U=~9+#ce=4NV>3HK_&{n`&sC_$Zxlz$i_o+YVy7 zf+*Gz)s3%9cO1le^uSRJ#d7s@0t@x!1%GkE5NQ0kXy&KICmUzvw_|`z%xRu8T3&}P zIg7)XQzLHZ9EASG^}-jkoeo+Rp8`!#m2vS2l3g{FB%+t~npSZLZ^|9ox`@qK-4F`Y zihk7LfsGZN^2D0>7+S^KDvUcc4%6C`e=US-p~0ZN4d+;Frd8`_XRAbSMM%`24*A() zkJn)Iex(4dI6|?BinQWLwlhkyH>S2abboytF22sUwW6MusQQ`?F}Ampa6`J-UTZ?W zu3`{d-CiaRe6;tw!f|6qj?LW=Y)K3k_}FF$HY>JDKbFTz4!(5DO$=pIx>BrwUgPtd z4Uag;9TC&IbG*wPOTJ@IF5GwGIxPz5OMU7rhy$5(JRiT%c&#&^0W%SW#RQH!ibxQz zL@qicYViXJ5mOtK%t@bQOtyYnI(VQ|tNs$MLFFD;27~)+)rQ2BY^zdaTPs#=AQ!?w zq+Gch=3%@ieBKNq&zhna9r8rCo}_VMJ^1@vof(3NKAxf_D;vUX-$3*ewyp+>|2a&e zb?BcOVpGKt@~DX%Jvvfi0n(=*TRu2fYQpueA0%9#JZfRMOUH2WZ47GSn^G?$enJC0~?MEz$r zb{oCJ*f~`kX5xtU#!0Yc4qB|=-lDv*tXEQ!x0s~ZPDi{k{bz8rdZU^SK4K)hw}=$J z5bAoO2^ISwS$D3LNeH{Xor@9m#KvszE-p^h6N41z3w;stE+yk1W_Fod9{M6c)O|?^ zlz3k&((-1Q=ppr`;eWt!jUYt@t?SWosn!epU5_MDz1-})YK2y>!plqky#A9 z_rBH|AN8l`h#xkWQ}1Ly_rmvxtoQBxsmj>kkL>zpp?-I|=8r?x>Z=iH69Ceg(VZY^ z>DJJh(JujF14b$%a!#egUm*gq7}OR;$T)YTr&95K_AS;ye627mq`-wqKi$$ zrYvM27XeMh)@)*`Y*CS}9ts?2iUP&6BrMQ{Svq^NPD2_v2Ls#Ar@&yG#5EUkbNE7? z12VlBmn;N+T7#MzD_oi+bk_uzFp<1`pg9-;6n+06a+g<@e| z-O9zcttjLZhVDPy%ZbfCohRMuj|R2cuQNW7d+g^9Is0`sJcX)-d&=JpwJ5 z3aofYMG;~OJ9dj(pWV`_Y4#{&d)G)5y7@&KC|$vA6BXr0BKS2s@@$SJ`#cg!wwgSe zi{036Ux|6p%jTjfEo+W^^zxTUW40!gi$kHV4iwh{gOL(O_E9L17v^e9nJv+jqb*R` z`3R1`w!p+@wd7)8OI$}>+Hi5aCGL*zqb1=rNS(YOsn(kL$Gj&e1lqBdxL_!v$q zB(p1C_N4g9Z6V%eb8$ivS8}-UZU^ymo+JY3Rvx10cZgGyU5)4e#$@jBvK_{6|1>VT zMne?L5($dW(| z{IL!g_+3jmw(BTWw}OkqE8sJ(BT9`Z;^az4bfncC!ohy2x&WabDe3Z<8zLJDL#m8Zf{-mVUy|6xtM!1?vKUNE1&rCTn zSGn2oQLotxqu$Lz!v2(N;b!=8=2T#$eo-%MNj21xwJAmQ#zIMT=C0d%>QH}|93P$ z8Jip%MhlX0!g}{ZPsNcu^t^uP)zl0wc4na5mwt#eKbw<-*=QYJpVgGzAC2xjp5t-j z-E`7E?!$h}<0K>n1G0rirl6u5vp7=?5Q7yBC~|-}fGr|wJ^-UwLg?J>0Vug+87E-_ zF+XQkNk#l`e2-?2h_A{Fe3k@cK4#|(*NNkbqJb^KWOQ3iIcmA z4h}(cnTt&9$w!Gg>(Y<@V(p z;TL0YJsG%=la3ikzPlT^sMz4{z<>GqaATFQA2xD_Zy9)53Emzs_u#IOu1 z7HsfinYhwv>8iN&X_Xdkeaao?e{pxAYfs(vzcyCWzHH1$_7~Y!mtKrRVXtvmK>^f! s9B$v*+O%_=n5x{|P$2(M{Dh?Ac(J~+L8xAZ(f;wGT6H#5z%LyC59LI000000 From 6dd6ea0e3145c45ce47f5050f027a6f38d038ac4 Mon Sep 17 00:00:00 2001 From: sfc-gh-snowflakedb-snyk-sa <130097897+sfc-gh-snowflakedb-snyk-sa@users.noreply.github.com> Date: Mon, 18 Nov 2024 22:18:56 -0800 Subject: [PATCH 06/13] SNOW-1813471: CVE-2024-47535 [Snyk] Fix for 1 vulnerabilities (#1962) Co-authored-by: snyk-bot Co-authored-by: Dominik Przybysz --- TestOnly/pom.xml | 4 ++-- parent-pom.xml | 2 +- thin_public_pom.xml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/TestOnly/pom.xml b/TestOnly/pom.xml index 0b8143b5f..e03f87ef1 100644 --- a/TestOnly/pom.xml +++ b/TestOnly/pom.xml @@ -27,12 +27,12 @@ io.netty netty-common - 4.1.111.Final + 4.1.115.Final io.netty netty-buffer - 4.1.111.Final + 4.1.115.Final org.apache.maven.plugins diff --git a/parent-pom.xml b/parent-pom.xml index 17b6103f1..d69c49cbf 100644 --- a/parent-pom.xml +++ b/parent-pom.xml @@ -66,7 +66,7 @@ 1.3.6 2.2.0 4.11.0 - 4.1.111.Final + 4.1.115.Final 9.37.3 0.31.1 1.0-alpha-9-stable-1 diff --git a/thin_public_pom.xml b/thin_public_pom.xml index 1d3d26fdd..09c6bf079 100644 --- a/thin_public_pom.xml +++ b/thin_public_pom.xml @@ -58,7 +58,7 @@ 2.4.9 1.15.3 2.2.0 - 4.1.111.Final + 4.1.115.Final 9.37.3 UTF-8 UTF-8 From 1286a3eba65442573afa55037093c791f57c317e Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Tue, 19 Nov 2024 07:28:35 +0100 Subject: [PATCH 07/13] SNOW-1800995: Merge io.netty.versions.properties during shading thin (#1969) --- pom.xml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pom.xml b/pom.xml index 2a1cf4ea6..45e57505b 100644 --- a/pom.xml +++ b/pom.xml @@ -724,6 +724,9 @@ + + META-INF/io.netty.versions.properties + From 938d8468653a1ff8641417c6700cf727bbc435b5 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Mon, 25 Nov 2024 15:02:14 +0100 Subject: [PATCH 08/13] SNOW-1789749: Support regional GCS endpoints (#1972) --- .../jdbc/SnowflakeFileTransferAgent.java | 24 +- .../cloud/storage/SnowflakeGCSClient.java | 9 +- .../client/jdbc/cloud/storage/StageInfo.java | 46 ++- .../cloud/storage/StorageClientFactory.java | 5 +- .../client/jdbc/FileUploaderPrep.java | 273 +++--------------- .../jdbc/FileUploaderSessionlessTest.java | 56 +++- .../StageInfoGcsCustomEndpointTest.java | 57 ++++ .../FileUploaderPrep/exampleAzure.json | 51 ++++ .../FileUploaderPrep/exampleGCS.json | 47 +++ .../exampleGCSWithEndpoint.json | 47 +++ .../exampleGCSWithUseRegionalUrl.json | 49 ++++ .../resources/FileUploaderPrep/exampleS3.json | 60 ++++ .../exampleS3WithStageEndpoint.json | 59 ++++ 13 files changed, 525 insertions(+), 258 deletions(-) create mode 100644 src/test/java/net/snowflake/client/jdbc/cloud/storage/StageInfoGcsCustomEndpointTest.java create mode 100644 src/test/resources/FileUploaderPrep/exampleAzure.json create mode 100644 src/test/resources/FileUploaderPrep/exampleGCS.json create mode 100644 src/test/resources/FileUploaderPrep/exampleGCSWithEndpoint.json create mode 100644 src/test/resources/FileUploaderPrep/exampleGCSWithUseRegionalUrl.json create mode 100644 src/test/resources/FileUploaderPrep/exampleS3.json create mode 100644 src/test/resources/FileUploaderPrep/exampleS3WithStageEndpoint.json diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java index 2b660eb08..4213b33b0 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeFileTransferAgent.java @@ -1111,8 +1111,16 @@ static StageInfo getStageInfo(JsonNode jsonNode, SFSession session) throws Snowf // specifically // for FIPS or VPCE S3 endpoint. SNOW-652696 String endPoint = null; - if ("AZURE".equalsIgnoreCase(stageLocationType) || "S3".equalsIgnoreCase(stageLocationType)) { + if ("AZURE".equalsIgnoreCase(stageLocationType) + || "S3".equalsIgnoreCase(stageLocationType) + || "GCS".equalsIgnoreCase(stageLocationType)) { endPoint = jsonNode.path("data").path("stageInfo").findValue("endPoint").asText(); + if ("GCS".equalsIgnoreCase(stageLocationType) + && endPoint != null + && (endPoint.trim().isEmpty() || "null".equals(endPoint))) { + // setting to null to preserve previous behaviour for GCS + endPoint = null; + } } String stgAcct = null; @@ -1179,6 +1187,8 @@ static StageInfo getStageInfo(JsonNode jsonNode, SFSession session) throws Snowf } } + setupUseRegionalUrl(jsonNode, stageInfo); + if (stageInfo.getStageType() == StageInfo.StageType.S3) { if (session == null) { // This node's value is set if PUT is used without Session. (For Snowpipe Streaming, we rely @@ -1200,6 +1210,18 @@ static StageInfo getStageInfo(JsonNode jsonNode, SFSession session) throws Snowf return stageInfo; } + private static void setupUseRegionalUrl(JsonNode jsonNode, StageInfo stageInfo) { + if (stageInfo.getStageType() != StageInfo.StageType.GCS + && stageInfo.getStageType() != StageInfo.StageType.S3) { + return; + } + JsonNode useRegionalURLNode = jsonNode.path("data").path("stageInfo").path("useRegionalUrl"); + if (!useRegionalURLNode.isMissingNode()) { + boolean useRegionalURL = useRegionalURLNode.asBoolean(false); + stageInfo.setUseRegionalUrl(useRegionalURL); + } + } + /** * A helper method to verify if the local file path from GS matches what's parsed locally. This is * for security purpose as documented in SNOW-15153. diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java index 188ba40d4..d6bf6ba84 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeGCSClient.java @@ -18,9 +18,11 @@ import com.google.api.gax.rpc.FixedHeaderProvider; import com.google.auth.oauth2.AccessToken; import com.google.auth.oauth2.GoogleCredentials; +import com.google.cloud.NoCredentials; import com.google.cloud.storage.Blob; import com.google.cloud.storage.BlobId; import com.google.cloud.storage.BlobInfo; +import com.google.cloud.storage.HttpStorageOptions; import com.google.cloud.storage.Storage; import com.google.cloud.storage.Storage.BlobListOption; import com.google.cloud.storage.StorageException; @@ -1312,6 +1314,8 @@ private void setupGCSClient( if (accessToken != null) { // We are authenticated with an oauth access token. StorageOptions.Builder builder = StorageOptions.newBuilder(); + stage.gcsCustomEndpoint().ifPresent(builder::setHost); + if (areDisabledGcsDefaultCredentials(session)) { logger.debug( "Adding explicit credentials to avoid default credential lookup by the GCS client"); @@ -1329,7 +1333,10 @@ private void setupGCSClient( .getService(); } else { // Use anonymous authentication. - this.gcsClient = StorageOptions.getUnauthenticatedInstance().getService(); + HttpStorageOptions.Builder builder = + HttpStorageOptions.newBuilder().setCredentials(NoCredentials.getInstance()); + stage.gcsCustomEndpoint().ifPresent(builder::setHost); + this.gcsClient = builder.build().getService(); } if (encMat != null) { diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/StageInfo.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StageInfo.java index 7a8bf4d36..3a14b8fa0 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/StageInfo.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StageInfo.java @@ -2,10 +2,17 @@ import java.io.Serializable; import java.util.Map; +import java.util.Optional; import java.util.Properties; +import net.snowflake.client.core.SnowflakeJdbcInternalApi; -/** Encapsulates all the required stage properties used by GET/PUT for Azure and S3 stages */ +/** Encapsulates all the required stage properties used by GET/PUT for Azure, GCS and S3 stages */ public class StageInfo implements Serializable { + + // me-central2 GCS region always use regional urls + // TODO SNOW-1818804: the value is hardcoded now, but it should be server driven + private static final String GCS_REGION_ME_CENTRAL_2 = "me-central2"; + public enum StageType { S3, AZURE, @@ -17,12 +24,18 @@ public enum StageType { private StageType stageType; // The stage type private String location; // The container or bucket private Map credentials; // the credentials required for the stage - private String region; // AWS/S3/GCS region (S3/GCS only) - private String endPoint; // The Azure Storage endpoint (Azure only) + private String region; // S3/GCS region + // An endpoint (Azure, AWS FIPS and GCS custom endpoint override) + private String endPoint; private String storageAccount; // The Azure Storage account (Azure only) private String presignedUrl; // GCS gives us back a presigned URL instead of a cred private boolean isClientSideEncrypted; // whether to encrypt/decrypt files on the stage - private boolean useS3RegionalUrl; // whether to use s3 regional URL (AWS Only) + // whether to use s3 regional URL (AWS Only) + // TODO SNOW-1818804: this field will be deprecated when the server returns {@link + // #useRegionalUrl} + private boolean useS3RegionalUrl; + // whether to use regional URL (AWS and GCS only) + private boolean useRegionalUrl; private Properties proxyProperties; /* @@ -166,6 +179,16 @@ public boolean getUseS3RegionalUrl() { return useS3RegionalUrl; } + @SnowflakeJdbcInternalApi + public void setUseRegionalUrl(boolean useRegionalUrl) { + this.useRegionalUrl = useRegionalUrl; + } + + @SnowflakeJdbcInternalApi + public boolean getUseRegionalUrl() { + return useRegionalUrl; + } + private static boolean isSpecified(String arg) { return !(arg == null || arg.equalsIgnoreCase("")); } @@ -173,9 +196,22 @@ private static boolean isSpecified(String arg) { public void setProxyProperties(Properties proxyProperties) { this.proxyProperties = proxyProperties; } - ; public Properties getProxyProperties() { return proxyProperties; } + + @SnowflakeJdbcInternalApi + public Optional gcsCustomEndpoint() { + if (stageType != StageType.GCS) { + return Optional.empty(); + } + if (endPoint != null && !endPoint.trim().isEmpty() && !"null".equals(endPoint)) { + return Optional.of(endPoint); + } + if (GCS_REGION_ME_CENTRAL_2.equalsIgnoreCase(region) || useRegionalUrl) { + return Optional.of(String.format("storage.%s.rep.googleapis.com", region.toLowerCase())); + } + return Optional.empty(); + } } diff --git a/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java index a321b6ebd..69d56e195 100644 --- a/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java +++ b/src/main/java/net/snowflake/client/jdbc/cloud/storage/StorageClientFactory.java @@ -59,8 +59,9 @@ public SnowflakeStorageClient createClient( switch (stage.getStageType()) { case S3: boolean useS3RegionalUrl = - (stage.getUseS3RegionalUrl() - || (session != null && session.getUseRegionalS3EndpointsForPresignedURL())); + stage.getUseS3RegionalUrl() + || stage.getUseRegionalUrl() + || session != null && session.getUseRegionalS3EndpointsForPresignedURL(); return createS3Client( stage.getCredentials(), parallel, diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderPrep.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderPrep.java index d8aa8143f..d801a00ac 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderPrep.java +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderPrep.java @@ -6,254 +6,49 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import java.io.IOException; +import java.io.InputStream; import java.util.Arrays; import java.util.List; -import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Rule; import org.junit.rules.TemporaryFolder; /** File uploader test prep reused by IT/connection tests and sessionless tests */ abstract class FileUploaderPrep extends BaseJDBCTest { @Rule public TemporaryFolder folder = new TemporaryFolder(); - private ObjectMapper mapper = new ObjectMapper(); - private final String exampleS3JsonStringWithStageEndpoint = - "{\n" - + " \"data\": {\n" - + " \"uploadInfo\": {\n" - + " \"locationType\": \"S3\",\n" - + " \"location\": \"example/location\",\n" - + " \"path\": \"tables/19805757505/\",\n" - + " \"region\": \"us-west-2\",\n" - + " \"storageAccount\": null,\n" - + " \"isClientSideEncrypted\": true,\n" - + " \"creds\": {\n" - + " \"AWS_KEY_ID\": \"EXAMPLE_AWS_KEY_ID\",\n" - + " \"AWS_SECRET_KEY\": \"EXAMPLE_AWS_SECRET_KEY\",\n" - + " \"AWS_TOKEN\": \"EXAMPLE_AWS_TOKEN\",\n" - + " \"AWS_ID\": \"EXAMPLE_AWS_ID\",\n" - + " \"AWS_KEY\": \"EXAMPLE_AWS_KEY\"\n" - + " },\n" - + " \"presignedUrl\": null,\n" - + " \"endPoint\": null\n" - + " },\n" - + " \"src_locations\": [\n" - + " \"/tmp/files/orders_100.csv\"\n" - + " ],\n" - + " \"parallel\": 4,\n" - + " \"threshold\": 209715200,\n" - + " \"autoCompress\": true,\n" - + " \"overwrite\": false,\n" - + " \"sourceCompression\": \"auto_detect\",\n" - + " \"clientShowEncryptionParameter\": true,\n" - + " \"queryId\": \"EXAMPLE_QUERY_ID\",\n" - + " \"encryptionMaterial\": {\n" - + " \"queryStageMasterKey\": \"EXAMPLE_QUERY_STAGE_MASTER_KEY\",\n" - + " \"queryId\": \"EXAMPLE_QUERY_ID\",\n" - + " \"smkId\": 123\n" - + " },\n" - + " \"stageInfo\": {\n" - + " \"locationType\": \"S3\",\n" - + " \"location\": \"stage/location/foo/\",\n" - + " \"path\": \"tables/19805757505/\",\n" - + " \"region\": \"us-west-2\",\n" - + " \"storageAccount\": null,\n" - + " \"isClientSideEncrypted\": true,\n" - + " \"creds\": {\n" - + " \"AWS_KEY_ID\": \"EXAMPLE_AWS_KEY_ID\",\n" - + " \"AWS_SECRET_KEY\": \"EXAMPLE_AWS_SECRET_KEY\",\n" - + " \"AWS_TOKEN\": \"EXAMPLE_AWS_TOKEN\",\n" - + " \"AWS_ID\": \"EXAMPLE_AWS_ID\",\n" - + " \"AWS_KEY\": \"EXAMPLE_AWS_KEY\"\n" - + " },\n" - + " \"presignedUrl\": null,\n" - + " \"endPoint\": \"s3-fips.us-east-1.amazonaws.com\"\n" - + " },\n" - + " \"command\": \"UPLOAD\",\n" - + " \"kind\": null,\n" - + " \"operation\": \"Node\"\n" - + " },\n" - + " \"code\": null,\n" - + " \"message\": null,\n" - + " \"success\": true\n" - + "}"; - - private final String exampleS3JsonString = - "{\n" - + " \"data\": {\n" - + " \"uploadInfo\": {\n" - + " \"locationType\": \"S3\",\n" - + " \"location\": \"example/location\",\n" - + " \"path\": \"tables/19805757505/\",\n" - + " \"region\": \"us-west-2\",\n" - + " \"storageAccount\": null,\n" - + " \"isClientSideEncrypted\": true,\n" - + " \"creds\": {\n" - + " \"AWS_KEY_ID\": \"EXAMPLE_AWS_KEY_ID\",\n" - + " \"AWS_SECRET_KEY\": \"EXAMPLE_AWS_SECRET_KEY\",\n" - + " \"AWS_TOKEN\": \"EXAMPLE_AWS_TOKEN\",\n" - + " \"AWS_ID\": \"EXAMPLE_AWS_ID\",\n" - + " \"AWS_KEY\": \"EXAMPLE_AWS_KEY\"\n" - + " },\n" - + " \"presignedUrl\": null,\n" - + " \"endPoint\": null\n" - + " },\n" - + " \"src_locations\": [\n" - + " \"/tmp/files/orders_100.csv\"\n" - + " ],\n" - + " \"parallel\": 4,\n" - + " \"threshold\": 209715200,\n" - + " \"autoCompress\": true,\n" - + " \"overwrite\": false,\n" - + " \"sourceCompression\": \"auto_detect\",\n" - + " \"clientShowEncryptionParameter\": true,\n" - + " \"queryId\": \"EXAMPLE_QUERY_ID\",\n" - + " \"encryptionMaterial\": {\n" - + " \"queryStageMasterKey\": \"EXAMPLE_QUERY_STAGE_MASTER_KEY\",\n" - + " \"queryId\": \"EXAMPLE_QUERY_ID\",\n" - + " \"smkId\": 123\n" - + " },\n" - + " \"stageInfo\": {\n" - + " \"locationType\": \"S3\",\n" - + " \"location\": \"stage/location/foo/\",\n" - + " \"path\": \"tables/19805757505/\",\n" - + " \"region\": \"us-west-2\",\n" - + " \"storageAccount\": null,\n" - + " \"isClientSideEncrypted\": true,\n" - + " \"useS3RegionalUrl\": true,\n" - + " \"creds\": {\n" - + " \"AWS_KEY_ID\": \"EXAMPLE_AWS_KEY_ID\",\n" - + " \"AWS_SECRET_KEY\": \"EXAMPLE_AWS_SECRET_KEY\",\n" - + " \"AWS_TOKEN\": \"EXAMPLE_AWS_TOKEN\",\n" - + " \"AWS_ID\": \"EXAMPLE_AWS_ID\",\n" - + " \"AWS_KEY\": \"EXAMPLE_AWS_KEY\"\n" - + " },\n" - + " \"presignedUrl\": null,\n" - + " \"endPoint\": null\n" - + " },\n" - + " \"command\": \"UPLOAD\",\n" - + " \"kind\": null,\n" - + " \"operation\": \"Node\"\n" - + " },\n" - + " \"code\": null,\n" - + " \"message\": null,\n" - + " \"success\": true\n" - + "}"; - - private final String exampleAzureJsonString = - "{\n" - + " \"data\": {\n" - + " \"uploadInfo\": {\n" - + " \"locationType\": \"AZURE\",\n" - + " \"location\": \"EXAMPLE_LOCATION/\",\n" - + " \"path\": \"EXAMPLE_PATH/\",\n" - + " \"region\": \"westus\",\n" - + " \"storageAccount\": \"sfcdev2stage\",\n" - + " \"isClientSideEncrypted\": true,\n" - + " \"creds\": {\n" - + " \"AZURE_SAS_TOKEN\": \"EXAMPLE_AZURE_SAS_TOKEN\"\n" - + " },\n" - + " \"presignedUrl\": null,\n" - + " \"endPoint\": \"blob.core.windows.net\"\n" - + " },\n" - + " \"src_locations\": [\n" - + " \"/foo/orders_100.csv\"\n" - + " ],\n" - + " \"parallel\": 4,\n" - + " \"threshold\": 209715200,\n" - + " \"autoCompress\": true,\n" - + " \"overwrite\": false,\n" - + " \"sourceCompression\": \"auto_detect\",\n" - + " \"clientShowEncryptionParameter\": false,\n" - + " \"queryId\": \"EXAMPLE_QUERY_ID\",\n" - + " \"encryptionMaterial\": {\n" - + " \"queryStageMasterKey\": \"EXAMPLE_QUERY_STAGE_MASTER_KEY\",\n" - + " \"queryId\": \"EXAMPLE_QUERY_ID\",\n" - + " \"smkId\": 123\n" - + " },\n" - + " \"stageInfo\": {\n" - + " \"locationType\": \"AZURE\",\n" - + " \"location\": \"EXAMPLE_LOCATION/\",\n" - + " \"path\": \"EXAMPLE_PATH/\",\n" - + " \"region\": \"westus\",\n" - + " \"storageAccount\": \"EXAMPLE_STORAGE_ACCOUNT\",\n" - + " \"isClientSideEncrypted\": true,\n" - + " \"creds\": {\n" - + " \"AZURE_SAS_TOKEN\": \"EXAMPLE_AZURE_SAS_TOKEN\"\n" - + " },\n" - + " \"presignedUrl\": null,\n" - + " \"endPoint\": \"blob.core.windows.net\"\n" - + " },\n" - + " \"command\": \"UPLOAD\",\n" - + " \"kind\": null,\n" - + " \"operation\": \"Node\"\n" - + " },\n" - + " \"code\": null,\n" - + " \"message\": null,\n" - + " \"success\": true\n" - + "}"; - - private final String exampleGCSJsonString = - "{\n" - + " \"data\": {\n" - + " \"uploadInfo\": {\n" - + " \"locationType\": \"GCS\",\n" - + " \"location\": \"foo/tables/9224/\",\n" - + " \"path\": \"tables/9224/\",\n" - + " \"region\": \"US-WEST1\",\n" - + " \"storageAccount\": \"\",\n" - + " \"isClientSideEncrypted\": true,\n" - + " \"creds\": {},\n" - + " \"presignedUrl\": \"EXAMPLE_PRESIGNED_URL\",\n" - + " \"endPoint\": \"\"\n" - + " },\n" - + " \"src_locations\": [\n" - + " \"/foo/bart/orders_100.csv\"\n" - + " ],\n" - + " \"parallel\": 4,\n" - + " \"threshold\": 209715200,\n" - + " \"autoCompress\": true,\n" - + " \"overwrite\": false,\n" - + " \"sourceCompression\": \"auto_detect\",\n" - + " \"clientShowEncryptionParameter\": false,\n" - + " \"queryId\": \"EXAMPLE_QUERY_ID\",\n" - + " \"encryptionMaterial\": {\n" - + " \"queryStageMasterKey\": \"EXAMPLE_QUERY_STAGE_MASTER_KEY\",\n" - + " \"queryId\": \"EXAMPLE_QUERY_ID\",\n" - + " \"smkId\": 123\n" - + " },\n" - + " \"stageInfo\": {\n" - + " \"locationType\": \"GCS\",\n" - + " \"location\": \"foo/tables/9224/\",\n" - + " \"path\": \"tables/9224/\",\n" - + " \"region\": \"US-WEST1\",\n" - + " \"storageAccount\": \"\",\n" - + " \"isClientSideEncrypted\": true,\n" - + " \"creds\": {},\n" - + " \"presignedUrl\": \"EXAMPLE_PRESIGNED_URL\",\n" - + " \"endPoint\": \"\"\n" - + " },\n" - + " \"command\": \"UPLOAD\",\n" - + " \"kind\": null,\n" - + " \"operation\": \"Node\"\n" - + " },\n" - + " \"code\": null,\n" - + " \"message\": null,\n" - + " \"success\": true\n" - + "}"; - - protected JsonNode exampleS3JsonNode; - protected JsonNode exampleS3StageEndpointJsonNode; - protected JsonNode exampleAzureJsonNode; - protected JsonNode exampleGCSJsonNode; - protected List exampleNodes; + private static final ObjectMapper mapper = new ObjectMapper(); + + static JsonNode exampleS3JsonNode; + static JsonNode exampleS3StageEndpointJsonNode; + static JsonNode exampleAzureJsonNode; + static JsonNode exampleGCSJsonNode; + static JsonNode exampleGCSJsonNodeWithUseRegionalUrl; + static JsonNode exampleGCSJsonNodeWithEndPoint; + static List exampleNodes; + + private static JsonNode readJsonFromFile(String name) throws IOException { + try (InputStream is = + FileUploaderPrep.class.getResourceAsStream("/FileUploaderPrep/" + name + ".json")) { + return mapper.readTree(is); + } + } - @Before - public void setup() throws Exception { - exampleS3JsonNode = mapper.readTree(exampleS3JsonString); - exampleS3StageEndpointJsonNode = mapper.readTree(exampleS3JsonStringWithStageEndpoint); - exampleAzureJsonNode = mapper.readTree(exampleAzureJsonString); - exampleGCSJsonNode = mapper.readTree(exampleGCSJsonString); - exampleNodes = Arrays.asList(exampleS3JsonNode, exampleAzureJsonNode, exampleGCSJsonNode); + @BeforeClass + public static void setup() throws Exception { + exampleS3JsonNode = readJsonFromFile("exampleS3"); + exampleS3StageEndpointJsonNode = readJsonFromFile("exampleS3WithStageEndpoint"); + exampleAzureJsonNode = readJsonFromFile("exampleAzure"); + exampleGCSJsonNode = readJsonFromFile("exampleGCS"); + exampleGCSJsonNodeWithUseRegionalUrl = readJsonFromFile("exampleGCSWithUseRegionalUrl"); + exampleGCSJsonNodeWithEndPoint = readJsonFromFile("exampleGCSWithEndpoint"); + exampleNodes = + Arrays.asList( + exampleS3JsonNode, + exampleAzureJsonNode, + exampleGCSJsonNode, + exampleGCSJsonNodeWithUseRegionalUrl, + exampleGCSJsonNodeWithEndPoint); } } diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderSessionlessTest.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderSessionlessTest.java index e23800e4e..f5fb7f719 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderSessionlessTest.java +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderSessionlessTest.java @@ -3,6 +3,11 @@ */ package net.snowflake.client.jdbc; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -11,6 +16,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import net.snowflake.client.jdbc.cloud.storage.StageInfo; import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; import org.junit.Assert; @@ -265,6 +271,7 @@ public void testGetFileTransferMetadatasGCS() throws Exception { Assert.assertEquals(null, stageInfo.getEndPoint()); Assert.assertEquals(null, stageInfo.getStorageAccount()); Assert.assertEquals(true, stageInfo.getIsClientSideEncrypted()); + assertEquals(Optional.empty(), stageInfo.gcsCustomEndpoint()); // EncryptionMaterial check Assert.assertEquals("EXAMPLE_QUERY_ID", metadata.getEncryptionMaterial().getQueryId()); @@ -279,12 +286,42 @@ public void testGetFileTransferMetadatasGCS() throws Exception { Assert.assertEquals("orders_100.csv", metadata.getPresignedUrlFileName()); } + @Test + public void testGetFileTransferMetadataGCSWithUseRegionalUrl() throws Exception { + List metadataList = + SnowflakeFileTransferAgent.getFileTransferMetadatas(exampleGCSJsonNodeWithUseRegionalUrl); + Assert.assertEquals(1, metadataList.size()); + + SnowflakeFileTransferMetadataV1 metadata = + (SnowflakeFileTransferMetadataV1) metadataList.get(0); + + StageInfo stageInfo = metadata.getStageInfo(); + + assertTrue(stageInfo.getUseRegionalUrl()); + assertEquals(Optional.of("storage.us-west1.rep.googleapis.com"), stageInfo.gcsCustomEndpoint()); + } + + @Test + public void testGetFileTransferMetadataGCSWithEndPoint() throws Exception { + List metadataList = + SnowflakeFileTransferAgent.getFileTransferMetadatas(exampleGCSJsonNodeWithEndPoint); + Assert.assertEquals(1, metadataList.size()); + + SnowflakeFileTransferMetadataV1 metadata = + (SnowflakeFileTransferMetadataV1) metadataList.get(0); + + StageInfo stageInfo = metadata.getStageInfo(); + + assertFalse(stageInfo.getUseRegionalUrl()); + assertEquals(Optional.of("example.com"), stageInfo.gcsCustomEndpoint()); + } + @Test public void testGetFileTransferMetadatasUploadError() throws Exception { JsonNode downloadNode = mapper.readTree("{\"data\": {\"command\": \"DOWNLOAD\"}}"); try { SnowflakeFileTransferAgent.getFileTransferMetadatas(downloadNode); - Assert.assertTrue(false); + fail(); } catch (SnowflakeSQLException err) { Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); Assert.assertEquals( @@ -297,10 +334,10 @@ public void testGetFileTransferMetadatasEncryptionMaterialError() throws Excepti JsonNode garbageNode = mapper.readTree("{\"data\": {\"src_locations\": [1, 2]}}"); try { SnowflakeFileTransferAgent.getFileTransferMetadatas(garbageNode); - Assert.assertTrue(false); + fail(); } catch (SnowflakeSQLException err) { Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue( + assertTrue( err.getMessage().contains("JDBC driver internal error: Failed to parse the credentials")); } } @@ -312,11 +349,10 @@ public void testGetFileTransferMetadatasUnsupportedLocationError() throws Except foo.put("locationType", "LOCAL_FS"); try { SnowflakeFileTransferAgent.getFileTransferMetadatas(modifiedNode); - Assert.assertTrue(false); + fail(); } catch (SnowflakeSQLException err) { Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue( - err.getMessage().contains("JDBC driver internal error: This API only supports")); + assertTrue(err.getMessage().contains("JDBC driver internal error: This API only supports")); } } @@ -325,10 +361,10 @@ public void testGetFileTransferMetadatasSrcLocationsArrayError() throws JsonProc JsonNode garbageNode = mapper.readTree("{\"data\": {\"src_locations\": \"abc\"}}"); try { SnowflakeFileTransferAgent.getFileTransferMetadatas(garbageNode); - Assert.assertTrue(false); + fail(); } catch (SnowflakeSQLException err) { Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue( + assertTrue( err.getMessage().contains("JDBC driver internal error: src_locations must be an array")); } } @@ -340,10 +376,10 @@ public void testGetFileMetadatasEncryptionMaterialsException() { foo.put("encryptionMaterial", "[1, 2, 3]]"); try { SnowflakeFileTransferAgent.getFileTransferMetadatas(modifiedNode); - Assert.assertTrue(false); + fail(); } catch (SnowflakeSQLException err) { Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue(err.getMessage().contains("Failed to parse encryptionMaterial")); + assertTrue(err.getMessage().contains("Failed to parse encryptionMaterial")); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/StageInfoGcsCustomEndpointTest.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/StageInfoGcsCustomEndpointTest.java new file mode 100644 index 000000000..f8e00d7eb --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/StageInfoGcsCustomEndpointTest.java @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.jdbc.cloud.storage; + +import static org.junit.Assert.assertEquals; + +import java.util.HashMap; +import java.util.Optional; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Parameterized.class) +public class StageInfoGcsCustomEndpointTest { + private final String region; + private final boolean useRegionalUrl; + private final String endPoint; + private final Optional expectedHost; + + public StageInfoGcsCustomEndpointTest( + String region, boolean useRegionalUrl, String endPoint, Optional expectedHost) { + this.region = region; + this.useRegionalUrl = useRegionalUrl; + this.endPoint = endPoint; + this.expectedHost = expectedHost; + } + + @Test + public void shouldReturnEmptyGCSRegionalUrlWhenNotMeCentral1AndNotUseRegionalUrl() { + StageInfo stageInfo = + StageInfo.createStageInfo("GCS", "bla", new HashMap<>(), region, endPoint, "account", true); + stageInfo.setUseRegionalUrl(useRegionalUrl); + assertEquals(expectedHost, stageInfo.gcsCustomEndpoint()); + } + + @Parameterized.Parameters() + public static Object[][] data() { + return new Object[][] { + {"US-CENTRAL1", false, null, Optional.empty()}, + {"US-CENTRAL1", false, "", Optional.empty()}, + {"US-CENTRAL1", false, "null", Optional.empty()}, + {"US-CENTRAL1", false, " ", Optional.empty()}, + {"US-CENTRAL1", false, "example.com", Optional.of("example.com")}, + {"ME-CENTRAL2", false, null, Optional.of("storage.me-central2.rep.googleapis.com")}, + {"ME-CENTRAL2", true, null, Optional.of("storage.me-central2.rep.googleapis.com")}, + {"ME-CENTRAL2", true, "", Optional.of("storage.me-central2.rep.googleapis.com")}, + {"ME-CENTRAL2", true, " ", Optional.of("storage.me-central2.rep.googleapis.com")}, + {"ME-CENTRAL2", true, "example.com", Optional.of("example.com")}, + {"US-CENTRAL1", true, null, Optional.of("storage.us-central1.rep.googleapis.com")}, + {"US-CENTRAL1", true, "", Optional.of("storage.us-central1.rep.googleapis.com")}, + {"US-CENTRAL1", true, " ", Optional.of("storage.us-central1.rep.googleapis.com")}, + {"US-CENTRAL1", true, "null", Optional.of("storage.us-central1.rep.googleapis.com")}, + {"US-CENTRAL1", true, "example.com", Optional.of("example.com")}, + }; + } +} diff --git a/src/test/resources/FileUploaderPrep/exampleAzure.json b/src/test/resources/FileUploaderPrep/exampleAzure.json new file mode 100644 index 000000000..a2b1835c3 --- /dev/null +++ b/src/test/resources/FileUploaderPrep/exampleAzure.json @@ -0,0 +1,51 @@ +{ + "data": { + "uploadInfo": { + "locationType": "AZURE", + "location": "EXAMPLE_LOCATION/", + "path": "EXAMPLE_PATH/", + "region": "westus", + "storageAccount": "sfcdev2stage", + "isClientSideEncrypted": true, + "creds": { + "AZURE_SAS_TOKEN": "EXAMPLE_AZURE_SAS_TOKEN" + }, + "presignedUrl": null, + "endPoint": "blob.core.windows.net" + }, + "src_locations": [ + "/foo/orders_100.csv" + ], + "parallel": 4, + "threshold": 209715200, + "autoCompress": true, + "overwrite": false, + "sourceCompression": "auto_detect", + "clientShowEncryptionParameter": false, + "queryId": "EXAMPLE_QUERY_ID", + "encryptionMaterial": { + "queryStageMasterKey": "EXAMPLE_QUERY_STAGE_MASTER_KEY", + "queryId": "EXAMPLE_QUERY_ID", + "smkId": 123 + }, + "stageInfo": { + "locationType": "AZURE", + "location": "EXAMPLE_LOCATION/", + "path": "EXAMPLE_PATH/", + "region": "westus", + "storageAccount": "EXAMPLE_STORAGE_ACCOUNT", + "isClientSideEncrypted": true, + "creds": { + "AZURE_SAS_TOKEN": "EXAMPLE_AZURE_SAS_TOKEN" + }, + "presignedUrl": null, + "endPoint": "blob.core.windows.net" + }, + "command": "UPLOAD", + "kind": null, + "operation": "Node" + }, + "code": null, + "message": null, + "success": true +} \ No newline at end of file diff --git a/src/test/resources/FileUploaderPrep/exampleGCS.json b/src/test/resources/FileUploaderPrep/exampleGCS.json new file mode 100644 index 000000000..8cd605f1c --- /dev/null +++ b/src/test/resources/FileUploaderPrep/exampleGCS.json @@ -0,0 +1,47 @@ +{ + "data": { + "uploadInfo": { + "locationType": "GCS", + "location": "foo/tables/9224/", + "path": "tables/9224/", + "region": "US-WEST1", + "storageAccount": "", + "isClientSideEncrypted": true, + "creds": {}, + "presignedUrl": "EXAMPLE_PRESIGNED_URL", + "endPoint": "" + }, + "src_locations": [ + "/foo/bart/orders_100.csv" + ], + "parallel": 4, + "threshold": 209715200, + "autoCompress": true, + "overwrite": false, + "sourceCompression": "auto_detect", + "clientShowEncryptionParameter": false, + "queryId": "EXAMPLE_QUERY_ID", + "encryptionMaterial": { + "queryStageMasterKey": "EXAMPLE_QUERY_STAGE_MASTER_KEY", + "queryId": "EXAMPLE_QUERY_ID", + "smkId": 123 + }, + "stageInfo": { + "locationType": "GCS", + "location": "foo/tables/9224/", + "path": "tables/9224/", + "region": "US-WEST1", + "storageAccount": "", + "isClientSideEncrypted": true, + "creds": {}, + "presignedUrl": "EXAMPLE_PRESIGNED_URL", + "endPoint": "" + }, + "command": "UPLOAD", + "kind": null, + "operation": "Node" + }, + "code": null, + "message": null, + "success": true +} \ No newline at end of file diff --git a/src/test/resources/FileUploaderPrep/exampleGCSWithEndpoint.json b/src/test/resources/FileUploaderPrep/exampleGCSWithEndpoint.json new file mode 100644 index 000000000..8ba946c76 --- /dev/null +++ b/src/test/resources/FileUploaderPrep/exampleGCSWithEndpoint.json @@ -0,0 +1,47 @@ +{ + "data": { + "uploadInfo": { + "locationType": "GCS", + "location": "foo/tables/9224/", + "path": "tables/9224/", + "region": "US-WEST1", + "storageAccount": "", + "isClientSideEncrypted": true, + "creds": {}, + "presignedUrl": "EXAMPLE_PRESIGNED_URL", + "endPoint": "example.com" + }, + "src_locations": [ + "/foo/bart/orders_100.csv" + ], + "parallel": 4, + "threshold": 209715200, + "autoCompress": true, + "overwrite": false, + "sourceCompression": "auto_detect", + "clientShowEncryptionParameter": false, + "queryId": "EXAMPLE_QUERY_ID", + "encryptionMaterial": { + "queryStageMasterKey": "EXAMPLE_QUERY_STAGE_MASTER_KEY", + "queryId": "EXAMPLE_QUERY_ID", + "smkId": 123 + }, + "stageInfo": { + "locationType": "GCS", + "location": "foo/tables/9224/", + "path": "tables/9224/", + "region": "US-WEST1", + "storageAccount": "", + "isClientSideEncrypted": true, + "creds": {}, + "presignedUrl": "EXAMPLE_PRESIGNED_URL", + "endPoint": "example.com" + }, + "command": "UPLOAD", + "kind": null, + "operation": "Node" + }, + "code": null, + "message": null, + "success": true +} \ No newline at end of file diff --git a/src/test/resources/FileUploaderPrep/exampleGCSWithUseRegionalUrl.json b/src/test/resources/FileUploaderPrep/exampleGCSWithUseRegionalUrl.json new file mode 100644 index 000000000..79f4dc678 --- /dev/null +++ b/src/test/resources/FileUploaderPrep/exampleGCSWithUseRegionalUrl.json @@ -0,0 +1,49 @@ +{ + "data": { + "uploadInfo": { + "locationType": "GCS", + "useRegionalUrl": true, + "location": "foo/tables/9224/", + "path": "tables/9224/", + "region": "US-WEST1", + "storageAccount": "", + "isClientSideEncrypted": true, + "creds": {}, + "presignedUrl": "EXAMPLE_PRESIGNED_URL", + "endPoint": "" + }, + "src_locations": [ + "/foo/bart/orders_100.csv" + ], + "parallel": 4, + "threshold": 209715200, + "autoCompress": true, + "overwrite": false, + "sourceCompression": "auto_detect", + "clientShowEncryptionParameter": false, + "queryId": "EXAMPLE_QUERY_ID", + "encryptionMaterial": { + "queryStageMasterKey": "EXAMPLE_QUERY_STAGE_MASTER_KEY", + "queryId": "EXAMPLE_QUERY_ID", + "smkId": 123 + }, + "stageInfo": { + "locationType": "GCS", + "useRegionalUrl": true, + "location": "foo/tables/9224/", + "path": "tables/9224/", + "region": "US-WEST1", + "storageAccount": "", + "isClientSideEncrypted": true, + "creds": {}, + "presignedUrl": "EXAMPLE_PRESIGNED_URL", + "endPoint": "" + }, + "command": "UPLOAD", + "kind": null, + "operation": "Node" + }, + "code": null, + "message": null, + "success": true +} \ No newline at end of file diff --git a/src/test/resources/FileUploaderPrep/exampleS3.json b/src/test/resources/FileUploaderPrep/exampleS3.json new file mode 100644 index 000000000..eadc166d8 --- /dev/null +++ b/src/test/resources/FileUploaderPrep/exampleS3.json @@ -0,0 +1,60 @@ +{ + "data": { + "uploadInfo": { + "locationType": "S3", + "location": "example/location", + "path": "tables/19805757505/", + "region": "us-west-2", + "storageAccount": null, + "isClientSideEncrypted": true, + "creds": { + "AWS_KEY_ID": "EXAMPLE_AWS_KEY_ID", + "AWS_SECRET_KEY": "EXAMPLE_AWS_SECRET_KEY", + "AWS_TOKEN": "EXAMPLE_AWS_TOKEN", + "AWS_ID": "EXAMPLE_AWS_ID", + "AWS_KEY": "EXAMPLE_AWS_KEY" + }, + "presignedUrl": null, + "endPoint": null + }, + "src_locations": [ + "/tmp/files/orders_100.csv" + ], + "parallel": 4, + "threshold": 209715200, + "autoCompress": true, + "overwrite": false, + "sourceCompression": "auto_detect", + "clientShowEncryptionParameter": true, + "queryId": "EXAMPLE_QUERY_ID", + "encryptionMaterial": { + "queryStageMasterKey": "EXAMPLE_QUERY_STAGE_MASTER_KEY", + "queryId": "EXAMPLE_QUERY_ID", + "smkId": 123 + }, + "stageInfo": { + "locationType": "S3", + "location": "stage/location/foo/", + "path": "tables/19805757505/", + "region": "us-west-2", + "storageAccount": null, + "isClientSideEncrypted": true, + "useS3RegionalUrl": true, + "creds": { + "AWS_KEY_ID": "EXAMPLE_AWS_KEY_ID", + "AWS_SECRET_KEY": "EXAMPLE_AWS_SECRET_KEY", + "AWS_TOKEN": "EXAMPLE_AWS_TOKEN", + "AWS_ID": "EXAMPLE_AWS_ID", + "AWS_KEY": "EXAMPLE_AWS_KEY" + }, + "presignedUrl": null, + "endPoint": null + }, + "command": "UPLOAD", + "kind": null, + "operation": "Node" + }, + "code": null, + "message": null, + "success": true +} \ No newline at end of file diff --git a/src/test/resources/FileUploaderPrep/exampleS3WithStageEndpoint.json b/src/test/resources/FileUploaderPrep/exampleS3WithStageEndpoint.json new file mode 100644 index 000000000..32b8a66a1 --- /dev/null +++ b/src/test/resources/FileUploaderPrep/exampleS3WithStageEndpoint.json @@ -0,0 +1,59 @@ +{ + "data": { + "uploadInfo": { + "locationType": "S3", + "location": "example/location", + "path": "tables/19805757505/", + "region": "us-west-2", + "storageAccount": null, + "isClientSideEncrypted": true, + "creds": { + "AWS_KEY_ID": "EXAMPLE_AWS_KEY_ID", + "AWS_SECRET_KEY": "EXAMPLE_AWS_SECRET_KEY", + "AWS_TOKEN": "EXAMPLE_AWS_TOKEN", + "AWS_ID": "EXAMPLE_AWS_ID", + "AWS_KEY": "EXAMPLE_AWS_KEY" + }, + "presignedUrl": null, + "endPoint": null + }, + "src_locations": [ + "/tmp/files/orders_100.csv" + ], + "parallel": 4, + "threshold": 209715200, + "autoCompress": true, + "overwrite": false, + "sourceCompression": "auto_detect", + "clientShowEncryptionParameter": true, + "queryId": "EXAMPLE_QUERY_ID", + "encryptionMaterial": { + "queryStageMasterKey": "EXAMPLE_QUERY_STAGE_MASTER_KEY", + "queryId": "EXAMPLE_QUERY_ID", + "smkId": 123 + }, + "stageInfo": { + "locationType": "S3", + "location": "stage/location/foo/", + "path": "tables/19805757505/", + "region": "us-west-2", + "storageAccount": null, + "isClientSideEncrypted": true, + "creds": { + "AWS_KEY_ID": "EXAMPLE_AWS_KEY_ID", + "AWS_SECRET_KEY": "EXAMPLE_AWS_SECRET_KEY", + "AWS_TOKEN": "EXAMPLE_AWS_TOKEN", + "AWS_ID": "EXAMPLE_AWS_ID", + "AWS_KEY": "EXAMPLE_AWS_KEY" + }, + "presignedUrl": null, + "endPoint": "s3-fips.us-east-1.amazonaws.com" + }, + "command": "UPLOAD", + "kind": null, + "operation": "Node" + }, + "code": null, + "message": null, + "success": true +} \ No newline at end of file From 930f419cc95ce8c7d616d5476b1a87bd45678dfb Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Mon, 25 Nov 2024 16:19:58 +0100 Subject: [PATCH 09/13] SNOW-1787626: Fix flaky test ConnectionLatestIT.testAsyncQueryOpenAndCloseConnection (#1974) --- .../client/jdbc/ConnectionLatestIT.java | 30 ++++++------------- 1 file changed, 9 insertions(+), 21 deletions(-) diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java index 4dbbcb021..30ff6728f 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java @@ -274,7 +274,6 @@ public void testAsyncQueryOpenAndCloseConnection() throws SQLException, IOException, InterruptedException { // open connection and run asynchronous query String queryID = null; - QueryStatusV2 statusV2 = null; try (Connection con = getConnection(); Statement statement = con.createStatement(); ResultSet rs1 = @@ -288,7 +287,7 @@ public void testAsyncQueryOpenAndCloseConnection() await() .atMost(Duration.ofSeconds(5)) .until(() -> sfrs.getStatusV2().getStatus(), not(equalTo(QueryStatus.NO_DATA))); - statusV2 = sfrs.getStatusV2(); + QueryStatusV2 statusV2 = sfrs.getStatusV2(); // Query should take 60 seconds so should be running assertEquals(QueryStatus.RUNNING, statusV2.getStatus()); assertEquals(QueryStatus.RUNNING.name(), statusV2.getName()); @@ -305,7 +304,7 @@ public void testAsyncQueryOpenAndCloseConnection() assertEquals(SqlState.INVALID_PARAMETER_VALUE, e.getSQLState()); } try (ResultSet rs = con.unwrap(SnowflakeConnection.class).createResultSet(queryID)) { - statusV2 = rs.unwrap(SnowflakeResultSet.class).getStatusV2(); + QueryStatusV2 statusV2 = rs.unwrap(SnowflakeResultSet.class).getStatusV2(); // Assert status of query is a success assertEquals(QueryStatus.SUCCESS, statusV2.getStatus()); assertEquals("No error reported", statusV2.getErrorMessage()); @@ -318,27 +317,16 @@ public void testAsyncQueryOpenAndCloseConnection() .unwrap(SnowflakeStatement.class) .executeAsyncQuery("select * from nonexistentTable")) { Thread.sleep(100); - statusV2 = rs1.unwrap(SnowflakeResultSet.class).getStatusV2(); - // when GS response is slow, allow up to 1 second of retries to get final query status SnowflakeResultSet sfrs1 = rs1.unwrap(SnowflakeResultSet.class); await() .atMost(Duration.ofSeconds(10)) - .until( - () -> { - QueryStatus qs = sfrs1.getStatusV2().getStatus(); - return !(qs == QueryStatus.NO_DATA || qs == QueryStatus.RUNNING); - }); - // If GS response is too slow to return data, do nothing to avoid flaky test failure. If - // response has returned, - // assert it is the error message that we are expecting. - if (statusV2.getStatus() != QueryStatus.NO_DATA) { - assertEquals(QueryStatus.FAILED_WITH_ERROR, statusV2.getStatus()); - assertEquals(2003, statusV2.getErrorCode()); - assertEquals( - "SQL compilation error:\n" - + "Object 'NONEXISTENTTABLE' does not exist or not authorized.", - statusV2.getErrorMessage()); - } + .until(() -> sfrs1.getStatusV2().getStatus() == QueryStatus.FAILED_WITH_ERROR); + statusV2 = sfrs1.getStatusV2(); + assertEquals(2003, statusV2.getErrorCode()); + assertEquals( + "SQL compilation error:\n" + + "Object 'NONEXISTENTTABLE' does not exist or not authorized.", + statusV2.getErrorMessage()); } } } From ecccc3613f35dc229f9cfbb670bded1c0db6ea30 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Tue, 26 Nov 2024 07:56:29 +0100 Subject: [PATCH 10/13] SNOW-1825712: Update GH actions versions (#1975) --- .github/workflows/build-test.yml | 10 +++++----- .github/workflows/check-style.yml | 2 +- .github/workflows/jira_close.yml | 2 +- .github/workflows/jira_issue.yml | 2 +- .github/workflows/snyk-issue.yml | 2 +- .github/workflows/snyk-pr.yml | 4 ++-- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index 2607c5d46..ef331f720 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -29,7 +29,7 @@ jobs: name: Build runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 - name: Build shell: bash env: @@ -53,7 +53,7 @@ jobs: java-version: ${{ matrix.runConfig.javaVersion }} distribution: 'temurin' cache: maven - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: '3.7' architecture: 'x64' @@ -83,7 +83,7 @@ jobs: java-version: ${{ matrix.runConfig.javaVersion }} distribution: 'temurin' cache: maven - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: '3.7' - name: Install Homebrew Bash @@ -110,7 +110,7 @@ jobs: category: ['TestCategoryResultSet,TestCategoryStatement,TestCategoryLoader', 'TestCategoryOthers', 'TestCategoryArrow,TestCategoryConnection,TestCategoryCore,TestCategoryDiagnostic', 'TestCategoryFips'] additionalMavenProfile: ['', '-Dthin-jar'] steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 - name: Tests shell: bash env: @@ -132,7 +132,7 @@ jobs: category: ['TestCategoryOthers', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryCore,TestCategoryLoader,TestCategoryResultSet'] is_old_driver: ['true'] steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 - name: Tests shell: bash env: diff --git a/.github/workflows/check-style.yml b/.github/workflows/check-style.yml index 221651298..d26f41865 100644 --- a/.github/workflows/check-style.yml +++ b/.github/workflows/check-style.yml @@ -9,7 +9,7 @@ jobs: name: Check Style runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 - name: Check Style shell: bash run: mvn clean validate --batch-mode --show-version -P check-style diff --git a/.github/workflows/jira_close.yml b/.github/workflows/jira_close.yml index dfcb8bc73..0dacf7fab 100644 --- a/.github/workflows/jira_close.yml +++ b/.github/workflows/jira_close.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: repository: snowflakedb/gh-actions ref: jira_v1 diff --git a/.github/workflows/jira_issue.yml b/.github/workflows/jira_issue.yml index 943ad70aa..92501da8f 100644 --- a/.github/workflows/jira_issue.yml +++ b/.github/workflows/jira_issue.yml @@ -14,7 +14,7 @@ jobs: if: ((github.event_name == 'issue_comment' && github.event.comment.body == 'recreate jira' && github.event.comment.user.login == 'sfc-gh-mkeller') || (github.event_name == 'issues' && github.event.pull_request.user.login != 'whitesource-for-github-com[bot]')) steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: repository: snowflakedb/gh-actions ref: jira_v1 diff --git a/.github/workflows/snyk-issue.yml b/.github/workflows/snyk-issue.yml index 7b58bb12a..1e36dae35 100644 --- a/.github/workflows/snyk-issue.yml +++ b/.github/workflows/snyk-issue.yml @@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest steps: - name: checkout action - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: snowflakedb/whitesource-actions token: ${{ secrets.WHITESOURCE_ACTION_TOKEN }} diff --git a/.github/workflows/snyk-pr.yml b/.github/workflows/snyk-pr.yml index 5fc21951b..0c101e391 100644 --- a/.github/workflows/snyk-pr.yml +++ b/.github/workflows/snyk-pr.yml @@ -15,13 +15,13 @@ jobs: if: ${{ github.event.pull_request.user.login == 'sfc-gh-snyk-sca-sa' }} steps: - name: checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.ref }} fetch-depth: 0 - name: checkout action - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: snowflakedb/whitesource-actions token: ${{ secrets.WHITESOURCE_ACTION_TOKEN }} From f1038e6e42918b0bd044662f76be814b7852e473 Mon Sep 17 00:00:00 2001 From: Laurent Goujon Date: Tue, 26 Nov 2024 01:09:03 -0800 Subject: [PATCH 11/13] SNOW-1747516: Fix native libraries relocation (#1927) --- FIPS/pom.xml | 28 ++++++++++++++++++++++++++++ parent-pom.xml | 1 + pom.xml | 28 ++++++++++++++++++++++++++++ 3 files changed, 57 insertions(+) diff --git a/FIPS/pom.xml b/FIPS/pom.xml index 0b874551d..b8efd8c05 100644 --- a/FIPS/pom.xml +++ b/FIPS/pom.xml @@ -510,6 +510,22 @@ io.grpc ${shadeBase}.grpc + + META-INF.native.io_grpc_netty_shaded_netty_tcnative + META-INF.native.${shadeNativeBase}_grpc_netty_shaded_netty_tcnative + + + META-INF.native.libio_grpc_netty_shaded_netty_tcnative + META-INF.native.lib${shadeNativeBase}_grpc_netty_shaded_netty_tcnative + + + META-INF.native.io_grpc_netty_shaded_netty_transport_native_epoll + META-INF.native.${shadeNativeBase}_grpc_netty_shaded_netty_transport_native_epoll + + + META-INF.native.libio_grpc_netty_shaded_netty_transport_native_epoll + META-INF.native.lib${shadeNativeBase}_grpc_netty_shaded_netty_transport_native_epoll + org.checkerframework ${shadeBase}.org.checkerframework @@ -526,6 +542,18 @@ org.conscrypt ${shadeBase}.org.conscrypt + + conscrypt_openjdk_jni + ${shadeNativeBase}_conscrypt_openjdk_jni + + + META-INF.native.conscrypt_openjdk_jni + META-INF.native.${shadeNativeBase}_conscrypt_openjdk_jni + + + META-INF.native.libconscrypt_openjdk_jni + META-INF.native.lib${shadeNativeBase}_conscrypt_openjdk_jni + opencensus ${shadeBase}.opencensus diff --git a/parent-pom.xml b/parent-pom.xml index d69c49cbf..1e5aac29c 100644 --- a/parent-pom.xml +++ b/parent-pom.xml @@ -75,6 +75,7 @@ UTF-8 net/snowflake/client/jdbc/internal net.snowflake.client.jdbc.internal + net_snowflake_client_jdbc_internal 2.0.13 5.1.4 net.snowflake.client.category.AllTestCategory diff --git a/pom.xml b/pom.xml index 45e57505b..0f34e2953 100644 --- a/pom.xml +++ b/pom.xml @@ -943,6 +943,22 @@ io.grpc ${shadeBase}.grpc + + META-INF.native.io_grpc_netty_shaded_netty_tcnative + META-INF.native.${shadeNativeBase}_grpc_netty_shaded_netty_tcnative + + + META-INF.native.libio_grpc_netty_shaded_netty_tcnative + META-INF.native.lib${shadeNativeBase}_grpc_netty_shaded_netty_tcnative + + + META-INF.native.io_grpc_netty_shaded_netty_transport_native_epoll + META-INF.native.${shadeNativeBase}_grpc_netty_shaded_netty_transport_native_epoll + + + META-INF.native.libio_grpc_netty_shaded_netty_transport_native_epoll + META-INF.native.lib${shadeNativeBase}_grpc_netty_shaded_netty_transport_native_epoll + org.checkerframework ${shadeBase}.org.checkerframework @@ -959,6 +975,18 @@ org.conscrypt ${shadeBase}.org.conscrypt + + conscrypt_openjdk_jni + ${shadeNativeBase}_conscrypt_openjdk_jni + + + META-INF.native.conscrypt_openjdk_jni + META-INF.native.${shadeNativeBase}_conscrypt_openjdk_jni + + + META-INF.native.libconscrypt_openjdk_jni + META-INF.native.lib${shadeNativeBase}_conscrypt_openjdk_jni + opencensus ${shadeBase}.opencensus From 7fd70cdc5fbb31798b56b1335de87ed3b56b3725 Mon Sep 17 00:00:00 2001 From: Przemyslaw Motacki Date: Tue, 26 Nov 2024 10:25:02 +0100 Subject: [PATCH 12/13] SNOW-1689931 Adding flag to skip token file permission verification (#1959) --- .../config/SFConnectionConfigParser.java | 86 +++++++++++-------- .../snowflake/client/jdbc/SnowflakeUtil.java | 16 ++++ .../config/SFConnectionConfigParserTest.java | 69 ++++++++++++++- 3 files changed, 132 insertions(+), 39 deletions(-) diff --git a/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java b/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java index 35698c557..1da9f766a 100644 --- a/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java +++ b/src/main/java/net/snowflake/client/config/SFConnectionConfigParser.java @@ -1,5 +1,6 @@ package net.snowflake.client.config; +import static net.snowflake.client.jdbc.SnowflakeUtil.convertSystemGetEnvToBooleanValue; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetEnv; import com.fasterxml.jackson.dataformat.toml.TomlMapper; @@ -34,6 +35,53 @@ public class SFConnectionConfigParser { "SNOWFLAKE_DEFAULT_CONNECTION_NAME"; public static final String DEFAULT = "default"; public static final String SNOWFLAKE_TOKEN_FILE_PATH = "/snowflake/session/token"; + public static final String SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION = + "SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION"; + + public static ConnectionParameters buildConnectionParameters() throws SnowflakeSQLException { + String defaultConnectionName = + Optional.ofNullable(systemGetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY)).orElse(DEFAULT); + Map fileConnectionConfiguration = + loadDefaultConnectionConfiguration(defaultConnectionName); + + if (fileConnectionConfiguration != null && !fileConnectionConfiguration.isEmpty()) { + Properties connectionProperties = new Properties(); + connectionProperties.putAll(fileConnectionConfiguration); + + String url = createUrl(fileConnectionConfiguration); + logger.debug("Url created using parameters from connection configuration file: {}", url); + + if ("oauth".equals(fileConnectionConfiguration.get("authenticator")) + && fileConnectionConfiguration.get("token") == null) { + Path path = + Paths.get( + Optional.ofNullable(fileConnectionConfiguration.get("token_file_path")) + .orElse(SNOWFLAKE_TOKEN_FILE_PATH)); + logger.debug("Token used in connect is read from file: {}", path); + try { + boolean shouldSkipTokenFilePermissionsVerification = + convertSystemGetEnvToBooleanValue(SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION, false); + if (!shouldSkipTokenFilePermissionsVerification) { + verifyFilePermissionSecure(path); + } else { + logger.debug("Skip token file permissions verification"); + } + String token = new String(Files.readAllBytes(path), Charset.defaultCharset()); + if (!token.isEmpty()) { + putPropertyIfNotNull(connectionProperties, "token", token.trim()); + } else { + throw new SnowflakeSQLException( + "Non-empty token must be set when the authenticator type is OAUTH"); + } + } catch (Exception ex) { + throw new SnowflakeSQLException(ex, "There is a problem during reading token from file"); + } + } + return new ConnectionParameters(url, connectionProperties); + } else { + return null; + } + } private static Map loadDefaultConnectionConfiguration( String defaultConnectionName) throws SnowflakeSQLException { @@ -88,44 +136,6 @@ private static void verifyFilePermissionSecure(Path configFilePath) } } - public static ConnectionParameters buildConnectionParameters() throws SnowflakeSQLException { - String defaultConnectionName = - Optional.ofNullable(systemGetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY)).orElse(DEFAULT); - Map fileConnectionConfiguration = - loadDefaultConnectionConfiguration(defaultConnectionName); - - if (fileConnectionConfiguration != null && !fileConnectionConfiguration.isEmpty()) { - Properties conectionProperties = new Properties(); - conectionProperties.putAll(fileConnectionConfiguration); - - String url = createUrl(fileConnectionConfiguration); - logger.debug("Url created using parameters from connection configuration file: {}", url); - - if ("oauth".equals(fileConnectionConfiguration.get("authenticator")) - && fileConnectionConfiguration.get("token") == null) { - Path path = - Paths.get( - Optional.ofNullable(fileConnectionConfiguration.get("token_file_path")) - .orElse(SNOWFLAKE_TOKEN_FILE_PATH)); - logger.debug("Token used in connect is read from file: {}", path); - try { - verifyFilePermissionSecure(path); - String token = new String(Files.readAllBytes(path), Charset.defaultCharset()); - if (!token.isEmpty()) { - putPropertyIfNotNull(conectionProperties, "token", token.trim()); - } else { - logger.warn("The token has empty value"); - } - } catch (Exception ex) { - throw new SnowflakeSQLException(ex, "There is a problem during reading token from file"); - } - } - return new ConnectionParameters(url, conectionProperties); - } else { - return null; - } - } - private static String createUrl(Map fileConnectionConfiguration) throws SnowflakeSQLException { Optional maybeAccount = Optional.ofNullable(fileConnectionConfiguration.get("account")); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java index 635384972..8e9a683a0 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeUtil.java @@ -837,6 +837,22 @@ public static boolean convertSystemPropertyToBooleanValue( } return defaultValue; } + /** + * Helper function to convert environment variable to boolean + * + * @param envVariableKey property name of the environment variable + * @param defaultValue default value used + * @return the value of the environment variable as boolean, else the default value + */ + @SnowflakeJdbcInternalApi + public static boolean convertSystemGetEnvToBooleanValue( + String envVariableKey, boolean defaultValue) { + String environmentVariableValue = systemGetEnv(envVariableKey); + if (environmentVariableValue != null) { + return Boolean.parseBoolean(environmentVariableValue); + } + return defaultValue; + } @SnowflakeJdbcInternalApi public static T mapSFExceptionToSQLException(ThrowingCallable action) diff --git a/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java b/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java index 01da714e5..bfb30f645 100644 --- a/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java +++ b/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java @@ -1,5 +1,6 @@ package net.snowflake.client.config; +import static net.snowflake.client.config.SFConnectionConfigParser.SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION; import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY; import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_HOME_KEY; import static org.junit.Assert.assertEquals; @@ -17,8 +18,11 @@ import java.nio.file.attribute.FileAttribute; import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.PosixFilePermissions; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Set; import net.snowflake.client.RunningNotOnLinuxMac; @@ -32,20 +36,36 @@ public class SFConnectionConfigParserTest { + private static final List ENV_VARIABLES_KEYS = + new ArrayList<>( + Arrays.asList( + SNOWFLAKE_HOME_KEY, + SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, + SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION)); private Path tempPath = null; private TomlMapper tomlMapper = new TomlMapper(); + private Map envVariables = new HashMap(); @Before public void setUp() throws IOException { tempPath = Files.createTempDirectory(".snowflake"); + ENV_VARIABLES_KEYS.stream() + .forEach( + key -> { + if (SnowflakeUtil.systemGetEnv(key) != null) { + envVariables.put(key, SnowflakeUtil.systemGetEnv(key)); + } + }); } @After public void close() throws IOException { SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_HOME_KEY); SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY); + SnowflakeUtil.systemUnsetEnv(SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION); Files.walk(tempPath).map(Path::toFile).forEach(File::delete); Files.delete(tempPath); + envVariables.forEach((key, value) -> SnowflakeUtil.systemSetEnv(key, value)); } @Test @@ -103,6 +123,21 @@ public void testThrowErrorWhenWrongPermissionsForTokenFile() throws IOException SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters()); } + @Test + public void testNoThrowErrorWhenWrongPermissionsForTokenFileButSkippingFlagIsEnabled() + throws SnowflakeSQLException, IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString()); + SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, "default"); + SnowflakeUtil.systemSetEnv(SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION, "true"); + File tokenFile = new File(Paths.get(tempPath.toString(), "token").toUri()); + prepareConnectionConfigurationTomlFile( + Collections.singletonMap("token_file_path", tokenFile.toString()), true, false); + + ConnectionParameters data = SFConnectionConfigParser.buildConnectionParameters(); + assertNotNull(data); + assertEquals(tokenFile.toString(), data.getParams().get("token_file_path")); + } + @Test public void testLoadSFConnectionConfigWithHostConfigured() throws SnowflakeSQLException, IOException { @@ -133,6 +168,19 @@ public void shouldThrowExceptionIfNoneOfHostAndAccountIsSet() throws IOException SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters()); } + @Test + public void shouldThrowExceptionIfTokenIsNotSetForOauth() throws IOException { + SnowflakeUtil.systemSetEnv(SNOWFLAKE_HOME_KEY, tempPath.toString()); + SnowflakeUtil.systemSetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY, "default"); + SnowflakeUtil.systemSetEnv(SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION, "true"); + File tokenFile = new File(Paths.get(tempPath.toString(), "token").toUri()); + prepareConnectionConfigurationTomlFile( + Collections.singletonMap("token_file_path", tokenFile.toString()), true, false, ""); + + Assert.assertThrows( + SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters()); + } + private void prepareConnectionConfigurationTomlFile() throws IOException { prepareConnectionConfigurationTomlFile(null, true, true); } @@ -144,6 +192,16 @@ private void prepareConnectionConfigurationTomlFile(Map moreParameters) throws I private void prepareConnectionConfigurationTomlFile( Map moreParameters, boolean onlyUserPermissionConnection, boolean onlyUserPermissionToken) throws IOException { + prepareConnectionConfigurationTomlFile( + moreParameters, onlyUserPermissionConnection, onlyUserPermissionToken, "token_from_file"); + } + + private void prepareConnectionConfigurationTomlFile( + Map moreParameters, + boolean onlyUserPermissionConnection, + boolean onlyUserPermissionToken, + String token) + throws IOException { Path path = Paths.get(tempPath.toString(), "connections.toml"); Path filePath = createFilePathWithPermission(path, onlyUserPermissionConnection); File file = filePath.toFile(); @@ -166,7 +224,16 @@ private void prepareConnectionConfigurationTomlFile( createFilePathWithPermission( Paths.get(configurationParams.get("token_file_path").toString()), onlyUserPermissionToken); - Files.write(tokenFilePath, "token_from_file".getBytes()); + Files.write(tokenFilePath, token.getBytes()); + Path emptyTokenFilePath = + createFilePathWithPermission( + Paths.get( + configurationParams + .get("token_file_path") + .toString() + .replaceAll("token", "emptytoken")), + onlyUserPermissionToken); + Files.write(emptyTokenFilePath, "".getBytes()); } } From a20f2cff732c68b176351d7566f4743e79a65379 Mon Sep 17 00:00:00 2001 From: Antoni Stachowski Date: Wed, 27 Nov 2024 15:13:34 +0100 Subject: [PATCH 13/13] SNOW-1693588 Upgrade to JUnit5 (#1909) --- .github/workflows/build-test.yml | 35 +- FIPS/pom.xml | 14 + .../snowflake/client/AbstractDriverIT.java | 2 - .../client/ConditionalIgnoreRule.java | 125 ----- .../net/snowflake/client/DontRunOnGCP.java | 15 + .../client/DontRunOnGithubActions.java | 15 + .../net/snowflake/client/RunningOnGCP.java | 12 - .../client/RunningOnGithubActions.java | 11 - .../net/snowflake/client/RunningOnWinMac.java | 0 .../java/net/snowflake/client/TestUtil.java | 4 +- .../client/category/FipsTestSuite.java | 22 + .../client/category/TestCategoryFips.java | 3 - .../client/jdbc/ConnectionFipsIT.java | 33 +- TestOnly/pom.xml | 117 +++- ci/container/test_component.sh | 63 +-- ci/log_analyze_setup.sh | 4 +- ci/test.sh | 6 +- ci/test_windows.bat | 80 ++- parent-pom.xml | 131 ++++- pom.xml | 115 ++-- .../client/jdbc/SnowflakeDriverIT.java | 10 +- .../snowflake/client/AbstractDriverIT.java | 3 - .../net/snowflake/client/AssumptionUtils.java | 36 ++ .../client/ConditionalIgnoreRule.java | 125 ----- .../net/snowflake/client/RunningNotOnAWS.java | 12 - .../snowflake/client/RunningNotOnAzure.java | 12 - .../net/snowflake/client/RunningNotOnGCP.java | 12 - .../client/RunningNotOnGithubActionsMac.java | 16 - .../snowflake/client/RunningNotOnJava21.java | 13 - .../snowflake/client/RunningNotOnJava8.java | 13 - .../snowflake/client/RunningNotOnLinux.java | 9 - .../client/RunningNotOnLinuxMac.java | 13 - .../client/RunningNotOnTestaccount.java | 10 - .../net/snowflake/client/RunningNotOnWin.java | 9 - .../snowflake/client/RunningNotOnWinMac.java | 9 - .../client/RunningOnGithubAction.java | 15 - .../client/RunningOnTestaccount.java | 7 - .../net/snowflake/client/RunningOnWin.java | 9 - .../net/snowflake/client/SkipOnThinJar.java | 12 - .../java/net/snowflake/client/TestUtil.java | 16 +- .../annotations/DontRunOnGithubActions.java | 15 + .../client/annotations/DontRunOnJava21.java | 16 + .../client/annotations/DontRunOnJava8.java | 16 + .../annotations/DontRunOnTestaccount.java | 15 + .../client/annotations/DontRunOnThinJar.java | 15 + .../client/annotations/DontRunOnWindows.java | 16 + .../client/annotations/RunOnAWS.java | 15 + .../client/annotations/RunOnAzure.java | 15 + .../client/annotations/RunOnGCP.java | 15 + .../annotations/RunOnGithubActionsNotMac.java | 18 + .../client/annotations/RunOnLinux.java | 16 + .../client/annotations/RunOnLinuxOrMac.java | 16 + .../client/annotations/RunOnMac.java | 16 + .../RunOnTestaccountNotOnGithubActions.java | 17 + .../client/annotations/RunOnWindows.java | 16 + .../client/annotations/RunOnWindowsOrMac.java | 16 + .../client/category/TestCategoryArrow.java | 3 - .../category/TestCategoryConnection.java | 3 - .../client/category/TestCategoryCore.java | 3 - .../category/TestCategoryDiagnostic.java | 3 - .../client/category/TestCategoryLoader.java | 4 - .../client/category/TestCategoryOthers.java | 3 - .../category/TestCategoryResultSet.java | 3 - .../category/TestCategoryStatement.java | 3 - .../snowflake/client/category/TestTags.java | 17 + .../config/SFClientConfigParserTest.java | 14 +- .../config/SFConnectionConfigParserTest.java | 30 +- .../client/config/SFPermissionsTest.java | 86 ++- .../core/CoreUtilsMiscellaneousTest.java | 15 +- .../client/core/EventHandlerTest.java | 19 +- .../net/snowflake/client/core/EventTest.java | 27 +- .../core/ExecTimeTelemetryDataTest.java | 8 +- .../client/core/HttpUtilLatestIT.java | 16 +- .../client/core/IncidentUtilLatestIT.java | 38 +- .../client/core/OCSPCacheServerTest.java | 145 +++-- .../client/core/ObjectMapperTest.java | 72 +-- .../client/core/PrivateLinkDetectorTest.java | 56 +- .../client/core/QueryContextCacheTest.java | 4 +- .../client/core/SFArrowResultSetIT.java | 66 ++- .../client/core/SFLoginInputTest.java | 4 +- .../client/core/SFSessionPropertyTest.java | 10 +- .../client/core/SFTrustManagerIT.java | 99 ++-- .../SFTrustManagerMockitoMockLatestIT.java | 20 +- .../client/core/SFTrustManagerTest.java | 16 +- .../client/core/SQLInputOutputTest.java | 2 +- .../client/core/SecureStorageManagerTest.java | 18 +- .../core/SessionUtilExternalBrowserTest.java | 24 +- .../client/core/SessionUtilLatestIT.java | 16 +- .../client/core/SessionUtilTest.java | 24 +- .../client/core/SnowflakeMFACacheTest.java | 14 +- .../core/SqlInputTimestampUtilTest.java | 12 +- .../snowflake/client/core/StmtUtilTest.java | 8 +- .../snowflake/client/core/URLUtilTest.java | 8 +- .../core/arrow/ArrowResultUtilTest.java | 89 +-- .../client/core/arrow/BaseConverterTest.java | 16 +- .../arrow/BigIntToFixedConverterTest.java | 6 +- .../core/arrow/BigIntToTimeConverterTest.java | 36 +- .../BigIntToTimestampLTZConverterTest.java | 33 +- .../BigIntToTimestampNTZConverterTest.java | 65 +-- .../core/arrow/BitToBooleanConverterTest.java | 6 +- .../client/core/arrow/DateConverterTest.java | 56 +- .../core/arrow/DoubleToRealConverterTest.java | 6 +- .../core/arrow/IntToFixedConverterTest.java | 8 +- .../core/arrow/IntToTimeConverterTest.java | 42 +- .../arrow/SmallIntToFixedConverterTest.java | 8 +- ...FieldStructToTimestampTZConverterTest.java | 125 +++-- .../arrow/TinyIntToFixedConverterTest.java | 8 +- ...ieldStructToTimestampLTZConverterTest.java | 114 ++-- ...ieldStructToTimestampNTZConverterTest.java | 152 +++--- ...FieldStructToTimestampTZConverterTest.java | 36 +- .../arrow/VarBinaryToBinaryConverterTest.java | 6 +- .../core/arrow/VarCharConverterTest.java | 6 +- .../client/core/bind/BindExceptionTest.java | 4 +- .../core/json/BooleanConverterTest.java | 9 +- .../client/core/json/BytesConverterTest.java | 4 +- .../core/json/DateTimeConverterTest.java | 6 +- .../client/core/json/NumberConverterTest.java | 2 +- .../client/core/json/StringConverterTest.java | 8 +- .../client/jdbc/ArrowResultChunkTest.java | 2 +- .../snowflake/client/jdbc/BaseJDBCTest.java | 6 +- .../jdbc/BaseJDBCWithSharedConnectionIT.java | 15 +- .../client/jdbc/BaseWiremockTest.java | 38 +- .../snowflake/client/jdbc/BindUploaderIT.java | 32 +- .../client/jdbc/BindUploaderLatestIT.java | 28 +- ...ngAndInsertingStructuredTypesLatestIT.java | 131 +++-- .../snowflake/client/jdbc/BindingDataIT.java | 127 +++-- .../client/jdbc/BindingDataLatestIT.java | 21 +- .../client/jdbc/CallableStatementIT.java | 76 +-- .../client/jdbc/CallableStatementITBase.java | 48 ++ .../jdbc/CallableStatementLatestIT.java | 28 +- .../ChunkDownloaderS3RetryUrlLatestIT.java | 14 +- .../jdbc/ClientMemoryLimitParallelIT.java | 27 +- .../jdbc/CompressedStreamFactoryTest.java | 6 +- .../client/jdbc/ConnectStringParseTest.java | 4 +- .../jdbc/ConnectionAlreadyClosedIT.java | 8 +- .../jdbc/ConnectionFeatureNotSupportedIT.java | 8 +- .../snowflake/client/jdbc/ConnectionIT.java | 66 ++- .../client/jdbc/ConnectionLatestIT.java | 123 +++-- .../client/jdbc/ConnectionManual.java | 2 +- .../client/jdbc/ConnectionPoolingIT.java | 18 +- .../client/jdbc/ConnectionWithOCSPModeIT.java | 31 +- .../client/jdbc/CustomProxyLatestIT.java | 80 ++- .../client/jdbc/DatabaseMetaDataIT.java | 27 +- .../jdbc/DatabaseMetaDataInternalIT.java | 44 +- .../DatabaseMetaDataInternalLatestIT.java | 29 +- .../client/jdbc/DatabaseMetaDataLatestIT.java | 51 +- .../DatabaseMetaDataResultSetLatestIT.java | 21 +- .../jdbc/DatabaseMetaDataResultsetIT.java | 14 +- .../client/jdbc/DellBoomiCloudIT.java | 14 +- .../FileConnectionConfigurationLatestIT.java | 17 +- .../jdbc/FileUploaderExpandFileNamesTest.java | 33 +- .../client/jdbc/FileUploaderLatestIT.java | 92 ++-- ...UploaderMimeTypeToCompressionTypeTest.java | 66 +-- .../client/jdbc/FileUploaderPrep.java | 7 +- .../jdbc/FileUploaderSessionlessTest.java | 210 +++---- .../snowflake/client/jdbc/GCPLargeResult.java | 34 +- .../jdbc/GitRepositoryDownloadLatestIT.java | 25 +- .../client/jdbc/HeartbeatAsyncLatestIT.java | 23 +- .../snowflake/client/jdbc/HeartbeatIT.java | 36 +- .../client/jdbc/LobSizeLatestIT.java | 95 ++-- .../client/jdbc/MaxLobSizeLatestIT.java | 17 +- .../client/jdbc/MockConnectionTest.java | 19 +- .../client/jdbc/MultiStatementArrowIT.java | 6 +- .../client/jdbc/MultiStatementIT.java | 30 +- .../client/jdbc/MultiStatementLatestIT.java | 22 +- .../client/jdbc/OpenGroupCLIFuncIT.java | 16 +- .../client/jdbc/OpenGroupCLIFuncLatestIT.java | 8 +- .../client/jdbc/PreparedMultiStmtIT.java | 66 ++- .../client/jdbc/PreparedStatement0IT.java | 18 +- .../client/jdbc/PreparedStatement1IT.java | 212 ++++---- .../jdbc/PreparedStatement1LatestIT.java | 118 ++-- .../client/jdbc/PreparedStatement2IT.java | 226 ++++---- .../jdbc/PreparedStatement2LatestIT.java | 114 ++-- .../jdbc/PreparedStatementArrow1IT.java | 15 - .../jdbc/PreparedStatementArrow1LatestIT.java | 18 - .../jdbc/PreparedStatementArrow2IT.java | 15 - .../jdbc/PreparedStatementArrow2LatestIT.java | 18 - ...reparedStatementFeatureNotSupportedIT.java | 8 +- .../PreparedStatementLargeUpdateLatestIT.java | 17 +- .../snowflake/client/jdbc/ProxyLatestIT.java | 20 +- .../jdbc/PutFileWithSpaceIncludedIT.java | 24 +- .../client/jdbc/PutUnescapeBackslashIT.java | 12 +- .../client/jdbc/RestRequestTest.java | 57 +- .../jdbc/RestRequestWiremockLatestIT.java | 8 +- .../client/jdbc/ResultJsonParserV2Test.java | 6 +- .../snowflake/client/jdbc/ResultSet0IT.java | 38 +- .../client/jdbc/ResultSetAlreadyClosedIT.java | 12 +- .../ResultSetArrowForce0MultiTimeZone.java | 56 +- ...ResultSetArrowForceLTZMultiTimeZoneIT.java | 70 +-- .../ResultSetArrowForceTZMultiTimeZoneIT.java | 61 +-- .../client/jdbc/ResultSetArrowIT.java | 14 - .../client/jdbc/ResultSetArrowLatestIT.java | 18 - .../client/jdbc/ResultSetAsyncIT.java | 26 +- .../client/jdbc/ResultSetAsyncLatestIT.java | 10 +- .../jdbc/ResultSetFeatureNotSupportedIT.java | 8 +- .../snowflake/client/jdbc/ResultSetIT.java | 317 ++++++----- .../client/jdbc/ResultSetJsonVsArrowIT.java | 287 +++++----- .../jdbc/ResultSetJsonVsArrowMultiTZIT.java | 138 +++-- .../client/jdbc/ResultSetLatestIT.java | 293 +++++----- .../client/jdbc/ResultSetMultiTimeZoneIT.java | 213 +++++--- .../jdbc/ResultSetMultiTimeZoneLatestIT.java | 147 ++--- .../client/jdbc/ResultSetVectorLatestIT.java | 120 ++-- .../client/jdbc/SSOConnectionTest.java | 4 +- .../client/jdbc/ServiceNameTest.java | 2 +- .../client/jdbc/SessionUtilTest.java | 31 +- .../client/jdbc/SessionVariablesIT.java | 12 +- ...akeAzureClientHandleExceptionLatestIT.java | 176 +++--- .../jdbc/SnowflakeBasicDataSourceTest.java | 4 +- .../SnowflakeChunkDownloaderLatestIT.java | 18 +- .../client/jdbc/SnowflakeClobTest.java | 8 +- .../jdbc/SnowflakeConnectionV1Test.java | 2 +- .../SnowflakeDriverConnectionStressTest.java | 2 +- .../client/jdbc/SnowflakeDriverIT.java | 431 +++++++-------- .../client/jdbc/SnowflakeDriverLatestIT.java | 276 ++++++---- .../client/jdbc/SnowflakeDriverTest.java | 28 +- ...flakeGcsClientHandleExceptionLatestIT.java | 154 +++--- ...SnowflakeResultSetSerializableArrowIT.java | 12 - .../SnowflakeResultSetSerializableIT.java | 202 +++---- ...wflakeS3ClientHandleExceptionLatestIT.java | 173 +++--- .../jdbc/SnowflakeSerializableTest.java | 10 +- .../SnowflakeTimestampWithTimezoneTest.java | 87 ++- .../client/jdbc/SnowflakeTypeTest.java | 11 +- .../client/jdbc/SnowflakeUtilTest.java | 16 +- .../SqlFeatureNotSupportedTelemetryTest.java | 4 +- .../client/jdbc/StatementAlreadyClosedIT.java | 12 +- .../client/jdbc/StatementArrowIT.java | 6 +- .../jdbc/StatementFeatureNotSupportedIT.java | 8 +- .../snowflake/client/jdbc/StatementIT.java | 39 +- .../client/jdbc/StatementLargeUpdateIT.java | 10 +- .../client/jdbc/StatementLatestIT.java | 33 +- .../client/jdbc/StatementNoOpLatestIT.java | 12 +- .../net/snowflake/client/jdbc/StreamIT.java | 21 +- .../snowflake/client/jdbc/StreamLatestIT.java | 43 +- .../storage/CloudStorageClientLatestIT.java | 14 +- .../cloud/storage/EncryptionProviderTest.java | 8 +- .../storage/GcmEncryptionProviderTest.java | 46 +- .../storage/SnowflakeAzureClientLatestIT.java | 21 +- .../storage/SnowflakeAzureClientTest.java | 4 +- .../storage/SnowflakeS3ClientLatestIT.java | 29 +- .../cloud/storage/SnowflakeS3ClientTest.java | 4 +- .../StageInfoGcsCustomEndpointTest.java | 78 +-- .../diagnostic/DiagnosticContextLatestIT.java | 50 +- .../diagnostic/SnowflakeEndpointTest.java | 8 +- .../ResultSetStructuredTypesLatestIT.java | 512 ++++++++++-------- ...ypesGetStringArrowJsonCompatibilityIT.java | 228 ++++---- .../StructuredTypesGetStringBaseIT.java | 13 +- .../client/jdbc/telemetry/TelemetryIT.java | 48 +- .../client/jdbc/telemetry/TelemetryTest.java | 4 +- .../jdbc/telemetryOOB/TelemetryServiceIT.java | 63 ++- .../telemetryOOB/TelemetryServiceTest.java | 10 +- .../loader/FlatfileReadMultithreadIT.java | 16 +- .../snowflake/client/loader/LoaderBase.java | 8 +- .../net/snowflake/client/loader/LoaderIT.java | 16 +- .../client/loader/LoaderLatestIT.java | 12 +- .../client/loader/LoaderMultipleBatchIT.java | 10 +- .../client/loader/LoaderTimestampIT.java | 10 +- .../snowflake/client/loader/OnErrorTest.java | 2 +- .../client/log/AbstractLoggerIT.java | 30 +- .../client/log/JDK14JCLWrapperLatestIT.java | 22 +- .../client/log/JDK14LoggerLatestIT.java | 22 +- .../snowflake/client/log/JDK14LoggerTest.java | 8 +- .../log/JDK14LoggerWithClientLatestIT.java | 90 +-- .../snowflake/client/log/SFFormatterTest.java | 12 +- .../snowflake/client/log/SFLogLevelTest.java | 4 +- .../client/log/SFLoggerFactoryTest.java | 4 +- .../client/log/SFToJavaLogMapperTest.java | 14 +- .../client/log/SLF4JJJCLWrapperLatestIT.java | 22 +- .../client/log/SLF4JLoggerLatestIT.java | 23 +- .../ConnectionPoolingDataSourceIT.java | 10 +- ...ogicalConnectionAlreadyClosedLatestIT.java | 8 +- ...ConnectionFeatureNotSupportedLatestIT.java | 8 +- .../pooling/LogicalConnectionLatestIT.java | 20 +- .../client/providers/BooleanProvider.java | 16 + .../client/providers/ProvidersUtil.java | 37 ++ .../providers/ResultFormatProvider.java | 20 + .../client/providers/ScaleProvider.java | 20 + .../providers/SimpleResultFormatProvider.java | 27 + .../providers/SnowflakeArgumentsProvider.java | 19 + .../client/providers/TimezoneProvider.java | 36 ++ .../client/suites/ArrowTestSuite.java | 11 + .../client/suites/BaseTestSuite.java | 23 + .../suites/ConnectionOldDriverTestSuite.java | 10 + .../client/suites/ConnectionTestSuite.java | 11 + .../client/suites/CoreOldDriverTestSuite.java | 10 + .../client/suites/CoreTestSuite.java | 11 + .../suites/DiagnosticOldDriverTestSuite.java | 10 + .../client/suites/DiagnosticTestSuite.java | 11 + .../suites/LoaderOldDriverTestSuite.java | 10 + .../client/suites/LoaderTestSuite.java | 11 + .../client/suites/OldDriverTestSuite.java | 23 + .../suites/OthersOldDriverTestSuite.java | 10 + .../client/suites/OthersTestSuite.java | 11 + .../suites/ResultSetOldDriverTestSuite.java | 10 + .../client/suites/ResultSetTestSuite.java | 11 + .../suites/StatementOldDriverTestSuite.java | 10 + .../client/suites/StatementTestSuite.java | 11 + .../client/suites/UnitOldDriverTestSuite.java | 19 + .../client/suites/UnitTestSuite.java | 22 + .../client/util/SecretDetectorTest.java | 4 +- .../snowflake/client/util/StopwatchTest.java | 12 +- 300 files changed, 6261 insertions(+), 5413 deletions(-) delete mode 100644 FIPS/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java create mode 100644 FIPS/src/test/java/net/snowflake/client/DontRunOnGCP.java create mode 100644 FIPS/src/test/java/net/snowflake/client/DontRunOnGithubActions.java delete mode 100644 FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java delete mode 100644 FIPS/src/test/java/net/snowflake/client/RunningOnGithubActions.java delete mode 100644 FIPS/src/test/java/net/snowflake/client/RunningOnWinMac.java create mode 100644 FIPS/src/test/java/net/snowflake/client/category/FipsTestSuite.java delete mode 100644 FIPS/src/test/java/net/snowflake/client/category/TestCategoryFips.java create mode 100644 src/test/java/net/snowflake/client/AssumptionUtils.java delete mode 100644 src/test/java/net/snowflake/client/ConditionalIgnoreRule.java delete mode 100644 src/test/java/net/snowflake/client/RunningNotOnAWS.java delete mode 100644 src/test/java/net/snowflake/client/RunningNotOnAzure.java delete mode 100644 src/test/java/net/snowflake/client/RunningNotOnGCP.java delete mode 100644 src/test/java/net/snowflake/client/RunningNotOnGithubActionsMac.java delete mode 100644 src/test/java/net/snowflake/client/RunningNotOnJava21.java delete mode 100644 src/test/java/net/snowflake/client/RunningNotOnJava8.java delete mode 100644 src/test/java/net/snowflake/client/RunningNotOnLinux.java delete mode 100644 src/test/java/net/snowflake/client/RunningNotOnLinuxMac.java delete mode 100644 src/test/java/net/snowflake/client/RunningNotOnTestaccount.java delete mode 100644 src/test/java/net/snowflake/client/RunningNotOnWin.java delete mode 100644 src/test/java/net/snowflake/client/RunningNotOnWinMac.java delete mode 100644 src/test/java/net/snowflake/client/RunningOnGithubAction.java delete mode 100644 src/test/java/net/snowflake/client/RunningOnTestaccount.java delete mode 100644 src/test/java/net/snowflake/client/RunningOnWin.java delete mode 100644 src/test/java/net/snowflake/client/SkipOnThinJar.java create mode 100644 src/test/java/net/snowflake/client/annotations/DontRunOnGithubActions.java create mode 100644 src/test/java/net/snowflake/client/annotations/DontRunOnJava21.java create mode 100644 src/test/java/net/snowflake/client/annotations/DontRunOnJava8.java create mode 100644 src/test/java/net/snowflake/client/annotations/DontRunOnTestaccount.java create mode 100644 src/test/java/net/snowflake/client/annotations/DontRunOnThinJar.java create mode 100644 src/test/java/net/snowflake/client/annotations/DontRunOnWindows.java create mode 100644 src/test/java/net/snowflake/client/annotations/RunOnAWS.java create mode 100644 src/test/java/net/snowflake/client/annotations/RunOnAzure.java create mode 100644 src/test/java/net/snowflake/client/annotations/RunOnGCP.java create mode 100644 src/test/java/net/snowflake/client/annotations/RunOnGithubActionsNotMac.java create mode 100644 src/test/java/net/snowflake/client/annotations/RunOnLinux.java create mode 100644 src/test/java/net/snowflake/client/annotations/RunOnLinuxOrMac.java create mode 100644 src/test/java/net/snowflake/client/annotations/RunOnMac.java create mode 100644 src/test/java/net/snowflake/client/annotations/RunOnTestaccountNotOnGithubActions.java create mode 100644 src/test/java/net/snowflake/client/annotations/RunOnWindows.java create mode 100644 src/test/java/net/snowflake/client/annotations/RunOnWindowsOrMac.java delete mode 100644 src/test/java/net/snowflake/client/category/TestCategoryArrow.java delete mode 100644 src/test/java/net/snowflake/client/category/TestCategoryConnection.java delete mode 100644 src/test/java/net/snowflake/client/category/TestCategoryCore.java delete mode 100644 src/test/java/net/snowflake/client/category/TestCategoryDiagnostic.java delete mode 100644 src/test/java/net/snowflake/client/category/TestCategoryLoader.java delete mode 100644 src/test/java/net/snowflake/client/category/TestCategoryOthers.java delete mode 100644 src/test/java/net/snowflake/client/category/TestCategoryResultSet.java delete mode 100644 src/test/java/net/snowflake/client/category/TestCategoryStatement.java create mode 100644 src/test/java/net/snowflake/client/category/TestTags.java create mode 100644 src/test/java/net/snowflake/client/jdbc/CallableStatementITBase.java delete mode 100644 src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow1IT.java delete mode 100644 src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow1LatestIT.java delete mode 100644 src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow2IT.java delete mode 100644 src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow2LatestIT.java delete mode 100644 src/test/java/net/snowflake/client/jdbc/ResultSetArrowIT.java delete mode 100644 src/test/java/net/snowflake/client/jdbc/ResultSetArrowLatestIT.java delete mode 100644 src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableArrowIT.java create mode 100644 src/test/java/net/snowflake/client/providers/BooleanProvider.java create mode 100644 src/test/java/net/snowflake/client/providers/ProvidersUtil.java create mode 100644 src/test/java/net/snowflake/client/providers/ResultFormatProvider.java create mode 100644 src/test/java/net/snowflake/client/providers/ScaleProvider.java create mode 100644 src/test/java/net/snowflake/client/providers/SimpleResultFormatProvider.java create mode 100644 src/test/java/net/snowflake/client/providers/SnowflakeArgumentsProvider.java create mode 100644 src/test/java/net/snowflake/client/providers/TimezoneProvider.java create mode 100644 src/test/java/net/snowflake/client/suites/ArrowTestSuite.java create mode 100644 src/test/java/net/snowflake/client/suites/BaseTestSuite.java create mode 100644 src/test/java/net/snowflake/client/suites/ConnectionOldDriverTestSuite.java create mode 100644 src/test/java/net/snowflake/client/suites/ConnectionTestSuite.java create mode 100644 src/test/java/net/snowflake/client/suites/CoreOldDriverTestSuite.java create mode 100644 src/test/java/net/snowflake/client/suites/CoreTestSuite.java create mode 100644 src/test/java/net/snowflake/client/suites/DiagnosticOldDriverTestSuite.java create mode 100644 src/test/java/net/snowflake/client/suites/DiagnosticTestSuite.java create mode 100644 src/test/java/net/snowflake/client/suites/LoaderOldDriverTestSuite.java create mode 100644 src/test/java/net/snowflake/client/suites/LoaderTestSuite.java create mode 100644 src/test/java/net/snowflake/client/suites/OldDriverTestSuite.java create mode 100644 src/test/java/net/snowflake/client/suites/OthersOldDriverTestSuite.java create mode 100644 src/test/java/net/snowflake/client/suites/OthersTestSuite.java create mode 100644 src/test/java/net/snowflake/client/suites/ResultSetOldDriverTestSuite.java create mode 100644 src/test/java/net/snowflake/client/suites/ResultSetTestSuite.java create mode 100644 src/test/java/net/snowflake/client/suites/StatementOldDriverTestSuite.java create mode 100644 src/test/java/net/snowflake/client/suites/StatementTestSuite.java create mode 100644 src/test/java/net/snowflake/client/suites/UnitOldDriverTestSuite.java create mode 100644 src/test/java/net/snowflake/client/suites/UnitTestSuite.java diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index ef331f720..c93f081f0 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -38,13 +38,16 @@ jobs: test-windows: needs: build - name: ${{ matrix.runConfig.cloud }} Windows java ${{ matrix.runConfig.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} + name: ${{ matrix.runConfig.cloud }} Windows java ${{ matrix.runConfig.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category.name }} runs-on: windows-latest strategy: fail-fast: false matrix: runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}, {cloud: 'AWS', javaVersion: '21'}] - category: ['TestCategoryResultSet,TestCategoryStatement,TestCategoryLoader', 'TestCategoryOthers', 'TestCategoryArrow,TestCategoryConnection,TestCategoryCore,TestCategoryDiagnostic', 'TestCategoryFips'] + category: [{suites: 'ResultSetTestSuite,StatementTestSuite,LoaderTestSuite', name: 'TestCategoryResultSet,TestCategoryStatement,TestCategoryLoader'}, + {suites: 'OthersTestSuite', name: 'TestCategoryOthers'}, + {suites: 'ArrowTestSuite,ConnectionTestSuite,CoreTestSuite,DiagnosticTestSuite', name: 'TestCategoryArrow,TestCategoryConnection,TestCategoryCore,TestCategoryDiagnostic'}, + {suites: 'FipsTestSuite', name: "TestCategoryFips"}] additionalMavenProfile: [''] steps: - uses: actions/checkout@v4 @@ -62,19 +65,22 @@ jobs: env: PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} CLOUD_PROVIDER: ${{ matrix.runConfig.cloud }} - JDBC_TEST_CATEGORY: ${{ matrix.category }} + JDBC_TEST_SUITES: ${{ matrix.category.suites }} ADDITIONAL_MAVEN_PROFILE: ${{ matrix.additionalMavenProfile }} run: ci\\test_windows.bat test-mac: needs: build - name: ${{ matrix.runConfig.cloud }} Mac java ${{ matrix.runConfig.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} + name: ${{ matrix.runConfig.cloud }} Mac java ${{ matrix.runConfig.javaVersion }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category.name }} runs-on: macos-13 strategy: fail-fast: false matrix: runConfig: [ {cloud: 'AWS', javaVersion: '8'}, {cloud: 'GCP', javaVersion: '11'}, {cloud: 'AZURE', javaVersion: '17'}, {cloud: 'AWS', javaVersion: '21'}] - category: ['TestCategoryResultSet,TestCategoryStatement,TestCategoryLoader', 'TestCategoryOthers', 'TestCategoryArrow,TestCategoryConnection,TestCategoryCore,TestCategoryDiagnostic', 'TestCategoryFips'] + category: [{suites: 'ResultSetTestSuite,StatementTestSuite,LoaderTestSuite', name: 'TestCategoryResultSet,TestCategoryStatement,TestCategoryLoader'}, + {suites: 'OthersTestSuite', name: 'TestCategoryOthers'}, + {suites: 'ArrowTestSuite,ConnectionTestSuite,CoreTestSuite,DiagnosticTestSuite', name: 'TestCategoryArrow,TestCategoryConnection,TestCategoryCore,TestCategoryDiagnostic'}, + {suites: 'FipsTestSuite', name: "TestCategoryFips"}] additionalMavenProfile: [''] steps: - uses: actions/checkout@v4 @@ -94,20 +100,23 @@ jobs: env: PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} CLOUD_PROVIDER: ${{ matrix.runConfig.cloud }} - JDBC_TEST_CATEGORY: ${{ matrix.category }} + JDBC_TEST_SUITES: ${{ matrix.category.suites }} ADDITIONAL_MAVEN_PROFILE: ${{ matrix.additionalMavenProfile }} run: /usr/local/bin/bash ./ci/test_mac.sh test-linux: needs: build - name: ${{ matrix.cloud }} Linux java on ${{ matrix.image }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category }} + name: ${{ matrix.cloud }} Linux java on ${{ matrix.image }} JDBC${{ matrix.additionalMavenProfile }} ${{ matrix.category.name }} runs-on: ubuntu-latest strategy: fail-fast: false matrix: image: [ 'jdbc-centos7-openjdk8', 'jdbc-centos7-openjdk11', 'jdbc-centos7-openjdk17', 'jdbc-centos7-openjdk21' ] cloud: [ 'AWS', 'AZURE', 'GCP' ] - category: ['TestCategoryResultSet,TestCategoryStatement,TestCategoryLoader', 'TestCategoryOthers', 'TestCategoryArrow,TestCategoryConnection,TestCategoryCore,TestCategoryDiagnostic', 'TestCategoryFips'] + category: [{suites: 'ResultSetTestSuite,StatementTestSuite,LoaderTestSuite', name: 'TestCategoryResultSet,TestCategoryStatement,TestCategoryLoader'}, + {suites: 'OthersTestSuite', name: 'TestCategoryOthers'}, + {suites: 'ArrowTestSuite,ConnectionTestSuite,CoreTestSuite,DiagnosticTestSuite', name: 'TestCategoryArrow,TestCategoryConnection,TestCategoryCore,TestCategoryDiagnostic'}, + {suites: 'FipsTestSuite', name: "TestCategoryFips"}] additionalMavenProfile: ['', '-Dthin-jar'] steps: - uses: actions/checkout@v4 @@ -117,19 +126,21 @@ jobs: PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} CLOUD_PROVIDER: ${{ matrix.cloud }} TARGET_DOCKER_TEST_IMAGE: ${{ matrix.image }} - JDBC_TEST_CATEGORY: ${{ matrix.category }} + JDBC_TEST_SUITES: ${{ matrix.category.suites }} ADDITIONAL_MAVEN_PROFILE: ${{ matrix.additionalMavenProfile }} run: ./ci/test.sh test-linux-old-driver: - name: Old JDBC ${{ matrix.category }} on ${{ matrix.image }} + name: Old JDBC ${{ matrix.category.name }} on ${{ matrix.image }} runs-on: ubuntu-latest strategy: fail-fast: false matrix: image: [ 'jdbc-centos7-openjdk8' ] cloud: [ 'AWS' ] - category: ['TestCategoryOthers', 'TestCategoryConnection,TestCategoryStatement', 'TestCategoryCore,TestCategoryLoader,TestCategoryResultSet'] + category: [{suites: 'OthersOldDriverTestSuite', name: 'TestCategoryOthers'}, + {suites: 'ConnectionOldDriverTestSuite,StatementOldDriverTestSuite', name: 'TestCategoryConnection,TestCategoryStatement'}, + {suites: 'LoaderOldDriverTestSuite,ResultSetOldDriverTestSuite', name: 'TestCategoryLoader,TestCategoryResultSet'}] is_old_driver: ['true'] steps: - uses: actions/checkout@v4 @@ -139,6 +150,6 @@ jobs: PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} CLOUD_PROVIDER: ${{ matrix.cloud }} TARGET_DOCKER_TEST_IMAGE: ${{ matrix.image }} - JDBC_TEST_CATEGORY: ${{ matrix.category }} + JDBC_TEST_SUITES: ${{ matrix.category.suites }} is_old_driver: ${{ matrix.is_old_driver }} run: ./ci/test.sh diff --git a/FIPS/pom.xml b/FIPS/pom.xml index b8efd8c05..78e83700d 100644 --- a/FIPS/pom.xml +++ b/FIPS/pom.xml @@ -725,6 +725,13 @@ maven-failsafe-plugin + + + org.apache.maven.surefire + surefire-junit-platform + ${version.plugin.surefire} + + ${version.plugin.failsafe} @@ -769,6 +776,13 @@ org.apache.maven.plugins maven-failsafe-plugin + + + org.apache.maven.surefire + surefire-junit-platform + ${version.plugin.surefire} + + ${version.plugin.failsafe} diff --git a/FIPS/src/test/java/net/snowflake/client/AbstractDriverIT.java b/FIPS/src/test/java/net/snowflake/client/AbstractDriverIT.java index 05c389208..360a1fcbb 100644 --- a/FIPS/src/test/java/net/snowflake/client/AbstractDriverIT.java +++ b/FIPS/src/test/java/net/snowflake/client/AbstractDriverIT.java @@ -21,12 +21,10 @@ import java.util.TimeZone; import java.util.logging.Level; import java.util.logging.Logger; -import org.junit.Rule; /** Base test class with common constants, data structures and methods */ public class AbstractDriverIT { // This is required to use ConditionalIgnore annotation. - @Rule public ConditionalIgnoreRule rule = new ConditionalIgnoreRule(); public static final String DRIVER_CLASS = "net.snowflake.client.jdbc.SnowflakeDriver"; public static final String DRIVER_CLASS_COM = "com.snowflake.client.jdbc.SnowflakeDriver"; diff --git a/FIPS/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java b/FIPS/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java deleted file mode 100644 index fe20883db..000000000 --- a/FIPS/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java +++ /dev/null @@ -1,125 +0,0 @@ -package net.snowflake.client; - -/* - * Created by hyu on 1/22/18. - */ - -/* -Copyright (c) 2013,2014 Rüdiger Herrmann -All rights reserved. This program and the accompanying materials -are made available under the terms of the Eclipse Public License v1.0 -which accompanies this distribution, and is available at -http://www.eclipse.org/legal/epl-v10.html - -Contributors: -Rüdiger Herrmann - initial API and implementation -Matt Morrissette - allow to use non-static inner IgnoreConditions -*/ - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; -import java.lang.reflect.Modifier; -import org.junit.Assume; -import org.junit.rules.MethodRule; -import org.junit.runners.model.FrameworkMethod; -import org.junit.runners.model.Statement; - -public class ConditionalIgnoreRule implements MethodRule { - - public interface IgnoreCondition { - boolean isSatisfied(); - } - - @Retention(RetentionPolicy.RUNTIME) - @Target({ElementType.METHOD}) - public @interface ConditionalIgnore { - Class condition(); - } - - @Override - public Statement apply(Statement base, FrameworkMethod method, Object target) { - Statement result = base; - if (hasConditionalIgnoreAnnotation(method)) { - IgnoreCondition condition = getIgnoreCondition(target, method); - if (condition.isSatisfied()) { - result = new IgnoreStatement(condition); - } - } - return result; - } - - private static boolean hasConditionalIgnoreAnnotation(FrameworkMethod method) { - return method.getAnnotation(ConditionalIgnore.class) != null; - } - - private static IgnoreCondition getIgnoreCondition(Object target, FrameworkMethod method) { - ConditionalIgnore annotation = method.getAnnotation(ConditionalIgnore.class); - return new IgnoreConditionCreator(target, annotation).create(); - } - - private static class IgnoreConditionCreator { - private final Object target; - private final Class conditionType; - - IgnoreConditionCreator(Object target, ConditionalIgnore annotation) { - this.target = target; - this.conditionType = annotation.condition(); - } - - IgnoreCondition create() { - checkConditionType(); - try { - return createCondition(); - } catch (RuntimeException re) { - throw re; - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - private IgnoreCondition createCondition() throws Exception { - IgnoreCondition result; - if (isConditionTypeStandalone()) { - result = conditionType.newInstance(); - } else { - result = conditionType.getDeclaredConstructor(target.getClass()).newInstance(target); - } - return result; - } - - private void checkConditionType() { - if (!isConditionTypeStandalone() && !isConditionTypeDeclaredInTarget()) { - String msg = - "Conditional class '%s' is a member class " - + "but was not declared inside the test case using it.\n" - + "Either make this class a static class, " - + "standalone class (by declaring it in it's own file) " - + "or move it inside the test case using it"; - throw new IllegalArgumentException(String.format(msg, conditionType.getName())); - } - } - - private boolean isConditionTypeStandalone() { - return !conditionType.isMemberClass() || Modifier.isStatic(conditionType.getModifiers()); - } - - private boolean isConditionTypeDeclaredInTarget() { - return target.getClass().isAssignableFrom(conditionType.getDeclaringClass()); - } - } - - private static class IgnoreStatement extends Statement { - private final IgnoreCondition condition; - - IgnoreStatement(IgnoreCondition condition) { - this.condition = condition; - } - - @Override - public void evaluate() { - Assume.assumeTrue("Ignored by " + condition.getClass().getSimpleName(), false); - } - } -} diff --git a/FIPS/src/test/java/net/snowflake/client/DontRunOnGCP.java b/FIPS/src/test/java/net/snowflake/client/DontRunOnGCP.java new file mode 100644 index 000000000..ccdf83206 --- /dev/null +++ b/FIPS/src/test/java/net/snowflake/client/DontRunOnGCP.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@DisabledIfEnvironmentVariable(named = "CLOUD_PROVIDER", matches = "(?i)GCP(?-i)") +public @interface DontRunOnGCP {} \ No newline at end of file diff --git a/FIPS/src/test/java/net/snowflake/client/DontRunOnGithubActions.java b/FIPS/src/test/java/net/snowflake/client/DontRunOnGithubActions.java new file mode 100644 index 000000000..98232e097 --- /dev/null +++ b/FIPS/src/test/java/net/snowflake/client/DontRunOnGithubActions.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@DisabledIfEnvironmentVariable(named = "GITHUB_ACTIONS", matches = ".*") +public @interface DontRunOnGithubActions {} diff --git a/FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java b/FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java deleted file mode 100644 index c902dc5f9..000000000 --- a/FIPS/src/test/java/net/snowflake/client/RunningOnGCP.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client; - -/** Run tests only on specified cloud provider or ignore */ -public class RunningOnGCP implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); - return cloudProvider != null && cloudProvider.equalsIgnoreCase("GCP"); - } -} diff --git a/FIPS/src/test/java/net/snowflake/client/RunningOnGithubActions.java b/FIPS/src/test/java/net/snowflake/client/RunningOnGithubActions.java deleted file mode 100644 index d717b65dc..000000000 --- a/FIPS/src/test/java/net/snowflake/client/RunningOnGithubActions.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client; - -/** Run tests on CI */ -public class RunningOnGithubActions implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return TestUtil.systemGetEnv("GITHUB_ACTIONS") != null; - } -} diff --git a/FIPS/src/test/java/net/snowflake/client/RunningOnWinMac.java b/FIPS/src/test/java/net/snowflake/client/RunningOnWinMac.java deleted file mode 100644 index e69de29bb..000000000 diff --git a/FIPS/src/test/java/net/snowflake/client/TestUtil.java b/FIPS/src/test/java/net/snowflake/client/TestUtil.java index 703d59953..8bec5498f 100644 --- a/FIPS/src/test/java/net/snowflake/client/TestUtil.java +++ b/FIPS/src/test/java/net/snowflake/client/TestUtil.java @@ -9,7 +9,7 @@ import net.snowflake.client.core.SFException; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; -import org.junit.Assert; +import org.junit.jupiter.api.Assertions; public class TestUtil { private static final SFLogger logger = SFLoggerFactory.getLogger(TestUtil.class); @@ -22,7 +22,7 @@ public class TestUtil { public static void assertSFException(int errorCode, TestRunInterface testCode) { try { testCode.run(); - Assert.fail(); + Assertions.fail(); } catch (SFException e) { assertThat(e.getVendorCode(), is(errorCode)); } diff --git a/FIPS/src/test/java/net/snowflake/client/category/FipsTestSuite.java b/FIPS/src/test/java/net/snowflake/client/category/FipsTestSuite.java new file mode 100644 index 000000000..d61ce2a83 --- /dev/null +++ b/FIPS/src/test/java/net/snowflake/client/category/FipsTestSuite.java @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.category; + +import org.junit.platform.suite.api.IncludeTags; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.platform.suite.api.ExcludePackages; +import org.junit.platform.suite.api.IncludeClassNamePatterns; +import org.junit.platform.suite.api.SelectPackages; +import org.junit.platform.suite.api.Suite; +import org.junit.platform.suite.api.SuiteDisplayName; + +@Suite +@SelectPackages("net.snowflake.client") +@ExcludePackages("net.snowflake.client.suites") +@IncludeClassNamePatterns(".+") +public class FipsTestSuite { +} diff --git a/FIPS/src/test/java/net/snowflake/client/category/TestCategoryFips.java b/FIPS/src/test/java/net/snowflake/client/category/TestCategoryFips.java deleted file mode 100644 index 06ae9faad..000000000 --- a/FIPS/src/test/java/net/snowflake/client/category/TestCategoryFips.java +++ /dev/null @@ -1,3 +0,0 @@ -package net.snowflake.client.category; - -public interface TestCategoryFips {} diff --git a/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java b/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java index c1509a6a8..0204e9a5d 100644 --- a/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java +++ b/FIPS/src/test/java/net/snowflake/client/jdbc/ConnectionFipsIT.java @@ -3,7 +3,7 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import java.net.URL; import java.nio.file.Files; @@ -20,21 +20,20 @@ import java.util.Properties; import javax.net.ssl.HttpsURLConnection; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGCP; -import net.snowflake.client.RunningOnGithubActions; -import net.snowflake.client.category.TestCategoryFips; +import net.snowflake.client.DontRunOnGCP; +import net.snowflake.client.DontRunOnGithubActions; import net.snowflake.client.core.SecurityUtil; import org.apache.commons.codec.binary.Base64; import org.bouncycastle.crypto.CryptoServicesRegistrar; import org.bouncycastle.crypto.fips.FipsStatus; import org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryFips.class) + +@Tag("fips") public class ConnectionFipsIT extends AbstractDriverIT { private static final String JCE_PROVIDER_BOUNCY_CASTLE_FIPS = "BCFIPS"; private static final String JCE_PROVIDER_SUN_JCE = "SunJCE"; @@ -106,7 +105,7 @@ public class ConnectionFipsIT extends AbstractDriverIT { private static int JCE_PROVIDER_SUN_JCE_PROVIDER_POSITION; private static int JCE_PROVIDER_SUN_RSA_SIGN_PROVIDER_POSITION; - @BeforeClass + @BeforeAll public static void setup() throws Exception { System.setProperty("javax.net.debug", "ssl"); // get keystore types for BouncyCastle libraries @@ -166,7 +165,7 @@ public static void setup() throws Exception { // connectToGoogle(); } - @AfterClass + @AfterAll public static void teardown() throws Exception { // Remove BouncyCastle FIPS Provider Security.removeProvider(JCE_PROVIDER_BOUNCY_CASTLE_FIPS); @@ -227,7 +226,7 @@ public void connectWithFips() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubActions.class) + @DontRunOnGithubActions public void connectWithFipsKeyPair() throws Exception { Map parameters = getConnectionParameters(); String testUser = parameters.get("user"); @@ -256,7 +255,7 @@ public void connectWithFipsKeyPair() throws Exception { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubActions.class) + @DontRunOnGithubActions public void testConnectUsingKeyPair() throws Exception { Map parameters = getConnectionParameters(); String testUser = parameters.get("user"); @@ -295,7 +294,7 @@ public void testConnectUsingKeyPair() throws Exception { * Currently ignored execution on GCP due to exception thrown "SSlException Could not generate XDH keypair" */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGCP.class) + @DontRunOnGCP public void connectWithFipsAndQuery() throws SQLException { try (Connection con = getConnection()) { Statement statement = con.createStatement(); @@ -329,7 +328,7 @@ public void connectWithFipsAndPut() throws Exception { /** Added in > 3.15.1 */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubActions.class) + @DontRunOnGithubActions public void connectWithFipsKeyPairWithBouncyCastle() throws Exception { System.setProperty(SecurityUtil.ENABLE_BOUNCYCASTLE_PROVIDER_JVM, "true"); connectWithFipsKeyPair(); @@ -337,7 +336,7 @@ public void connectWithFipsKeyPairWithBouncyCastle() throws Exception { /** Added in > 3.15.1 */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubActions.class) + @DontRunOnGithubActions public void testConnectUsingKeyPairWithBouncyCastle() throws Exception { System.setProperty(SecurityUtil.ENABLE_BOUNCYCASTLE_PROVIDER_JVM, "true"); testConnectUsingKeyPair(); diff --git a/TestOnly/pom.xml b/TestOnly/pom.xml index e03f87ef1..509cb8925 100644 --- a/TestOnly/pom.xml +++ b/TestOnly/pom.xml @@ -18,9 +18,10 @@ 0.8.4 true 5.13.0 + 5.11.1 + 3.5.1 3.5.6 net.snowflake.client.jdbc.internal - net.snowflake.client.category.AllTestCategory @@ -38,13 +39,61 @@ org.apache.maven.plugins maven-failsafe-plugin 3.0.0-M1 + test - junit - junit - 4.13.1 - jar + org.junit.jupiter + junit-jupiter-api + ${junit.version} + test + + + org.junit.jupiter + junit-jupiter-params + ${junit.version} + test + + + org.junit.jupiter + junit-jupiter-engine + ${junit.version} + test + + + org.junit.platform + junit-platform-suite + 1.11.1 + test + + + org.junit.platform + junit-platform-engine + 1.11.1 + test + + + org.junit.platform + junit-platform-runner + 1.11.1 + test + + + org.junit.platform + junit-platform-suite-api + 1.11.1 + test + + + org.junit.platform + junit-platform-suite-engine + 1.11.1 + test + + + org.junit.platform + junit-platform-launcher + 1.11.1 test @@ -371,7 +420,26 @@ org.apache.maven.plugins maven-surefire-plugin - 3.0.0-M5 + + + org.apache.maven.surefire + surefire-junit-platform + ${surefire.version} + + + ${surefire.version} + + + org.apache.maven.plugins + maven-failsafe-plugin + + + org.apache.maven.surefire + surefire-junit-platform + ${surefire.version} + + + ${surefire.version} @@ -387,35 +455,40 @@ org.apache.maven.plugins - maven-failsafe-plugin + maven-surefire-plugin - ${testCategory} + false + + + test + + + + + + org.apache.maven.plugins + maven-failsafe-plugin + + + + verify + + DefaultIT integration-test - - **/DellBoomiCloudIT.java - - - net.snowflake.client.log.JDK14Logger - - - ${basedir}/../src/test/resources/logging.properties - + net.snowflake.client.log.JDK14Logger + ${basedir}/src/test/resources/logging.properties + ${integrationTestSuites} - - - verify - - diff --git a/ci/container/test_component.sh b/ci/container/test_component.sh index da245a627..65efed88d 100755 --- a/ci/container/test_component.sh +++ b/ci/container/test_component.sh @@ -68,9 +68,6 @@ echo "[INFO] Running Hang Web Server" kill -9 $(ps -ewf | grep hang_webserver | grep -v grep | awk '{print $2}') || true python3 $THIS_DIR/hang_webserver.py 12345& -IFS=',' -read -ra CATEGORY <<< "$JDBC_TEST_CATEGORY" - # Avoid connection timeouts export MAVEN_OPTS="$MAVEN_OPTS -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.http.retryHandler.class=standard -Dmaven.wagon.http.retryHandler.count=3 -Dmaven.wagon.httpconnectionManager.ttlSeconds=120" @@ -79,41 +76,39 @@ cd $SOURCE_ROOT # Avoid connection timeout on plugin dependency fetch or fail-fast when dependency cannot be fetched $MVNW_EXE --batch-mode --show-version dependency:go-offline -for c in "${CATEGORY[@]}"; do - c=$(echo $c | sed 's/ *$//g') - if [[ "$is_old_driver" == "true" ]]; then - pushd TestOnly >& /dev/null - JDBC_VERSION=$($MVNW_EXE org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version --batch-mode | grep -v "[INFO]") - echo "[INFO] Run JDBC $JDBC_VERSION tests" - $MVNW_EXE -DjenkinsIT \ - -Djava.io.tmpdir=$WORKSPACE \ - -Djacoco.skip.instrument=false \ - -DtestCategory=net.snowflake.client.category.$c \ - -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ - verify \ - --batch-mode --show-version - popd >& /dev/null - elif [[ "$c" == "TestCategoryFips" ]]; then - pushd FIPS >& /dev/null - echo "[INFO] Run Fips tests" - $MVNW_EXE -DjenkinsIT \ - -Djava.io.tmpdir=$WORKSPACE \ - -Djacoco.skip.instrument=false \ - -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ - -Dnot-self-contained-jar \ - verify \ - --batch-mode --show-version - popd >& /dev/null - else - echo "[INFO] Run $c tests" +if [[ "$is_old_driver" == "true" ]]; then + pushd TestOnly >& /dev/null + JDBC_VERSION=$($MVNW_EXE org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version --batch-mode | grep -v "[INFO]") + echo "[INFO] Run JDBC $JDBC_VERSION tests" $MVNW_EXE -DjenkinsIT \ -Djava.io.tmpdir=$WORKSPACE \ -Djacoco.skip.instrument=false \ - -DtestCategory=net.snowflake.client.category.$c \ + -DintegrationTestSuites="$JDBC_TEST_SUITES" \ -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ - -Dnot-self-contained-jar $ADDITIONAL_MAVEN_PROFILE \ verify \ --batch-mode --show-version - fi -done + popd >& /dev/null +elif [[ "$JDBC_TEST_SUITES" == "FipsTestSuite" ]]; then + pushd FIPS >& /dev/null + echo "[INFO] Run Fips tests" + $MVNW_EXE -DjenkinsIT \ + -Djava.io.tmpdir=$WORKSPACE \ + -Djacoco.skip.instrument=false \ + -DintegrationTestSuites=FipsTestSuite \ + -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ + -Dnot-self-contained-jar \ + verify \ + --batch-mode --show-version + popd >& /dev/null +else + echo "[INFO] Run $JDBC_TEST_SUITES tests" + $MVNW_EXE -DjenkinsIT \ + -Djava.io.tmpdir=$WORKSPACE \ + -Djacoco.skip.instrument=false \ + -DintegrationTestSuites="$JDBC_TEST_SUITES" \ + -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \ + -Dnot-self-contained-jar $ADDITIONAL_MAVEN_PROFILE \ + verify \ + --batch-mode --show-version +fi IFS=' ' diff --git a/ci/log_analyze_setup.sh b/ci/log_analyze_setup.sh index fd573d194..63303964e 100755 --- a/ci/log_analyze_setup.sh +++ b/ci/log_analyze_setup.sh @@ -36,7 +36,7 @@ LOG_PROPERTY_FILE=$(cd "$(dirname "${BASH_SOURCE[0]}")/.."; pwd)/src/test/resour export CLIENT_DRIVER_NAME=JDBC function setup_log_env() { - if ["$WORKSPACE" == "/mnt/workspace"]; then + if [[ "$WORKSPACE" == "/mnt/workspace" ]]; then CLIENT_LOG_DIR_PATH=$LOCAL_CLIENT_LOG_DIR_PATH_DOCKER CLIENT_LOG_FILE_PATH=$CLIENT_LOG_FILE_PATH_DOCKER CLIENT_KNOWN_SSM_FILE_PATH=$CLIENT_KNOWN_SSM_FILE_PATH_DOCKER @@ -53,7 +53,7 @@ function setup_log_env() { sed -i'' -e "s|^java.util.logging.FileHandler.pattern.*|java.util.logging.FileHandler.pattern = $CLIENT_LOG_FILE_PATH|" ${LOG_PROPERTY_FILE} if [[ ! -d ${CLIENT_LOG_DIR_PATH} ]]; then - echo "[INFO] create clien log directory $CLIENT_LOG_DIR_PATH" + echo "[INFO] create client log directory $CLIENT_LOG_DIR_PATH" mkdir -p ${CLIENT_LOG_DIR_PATH} fi diff --git a/ci/test.sh b/ci/test.sh index 03c66c502..125e91d1f 100755 --- a/ci/test.sh +++ b/ci/test.sh @@ -30,8 +30,8 @@ else exit 2 fi -if [[ -z "$JDBC_TEST_CATEGORY" ]]; then - echo "[ERROR] Set JDBC_TEST_CATEGORY to the JDBC test category." +if [[ -z "$JDBC_TEST_SUITES" ]]; then + echo "[ERROR] Set JDBC_TEST_SUITES to the JDBC test category." find $THIS_DIR/../src/test/java -type f -exec grep -E "^import net.snowflake.client.category" {} \; | sort | uniq | awk -F. '{print $NF}' | awk -F\; '{print $1}' exit 2 fi @@ -56,7 +56,7 @@ for name in "${!TARGET_TEST_IMAGES[@]}"; do -e RUNNER_TRACKING_ID \ -e JOB_NAME \ -e BUILD_NUMBER \ - -e JDBC_TEST_CATEGORY \ + -e JDBC_TEST_SUITES \ -e ADDITIONAL_MAVEN_PROFILE \ -e CLOUD_PROVIDER \ -e is_old_driver \ diff --git a/ci/test_windows.bat b/ci/test_windows.bat index 4a5a8ebe3..0234b105c 100644 --- a/ci/test_windows.bat +++ b/ci/test_windows.bat @@ -111,47 +111,45 @@ echo "MAVEN OPTIONS %MAVEN_OPTS%" REM Avoid connection timeout on plugin dependency fetch or fail-fast when dependency cannot be fetched cmd /c %MVNW_EXE% --batch-mode --show-version dependency:go-offline -echo list = "%JDBC_TEST_CATEGORY%" -for %%a in ("%JDBC_TEST_CATEGORY:,=" "%") do ( - echo "Current category to execute" %%a - if /i %%a=="TestCategoryFips" ( - pushd FIPS - echo "[INFO] Run Fips tests" - cmd /c %MVNW_EXE% -B -DjenkinsIT ^ - -Djava.io.tmpdir=%GITHUB_WORKSPACE% ^ - -Djacoco.skip.instrument=false ^ - -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn ^ - -Dnot-self-contained-jar ^ - verify ^ - --batch-mode --show-version > log.txt & type log.txt - echo "[INFO] Check for test execution status" - find /i /c "BUILD FAILURE" log.txt > NUL - set isfound=!errorlevel! - if !isfound! equ 0 ( - echo [ERROR] Failed run %%a test - exit /b 1 - ) else ( - echo [INFO] Success run %%a test - ) - popd ) else ( - echo "[INFO] Run %%a tests" - cmd /c %MVNW_EXE% -B -DjenkinsIT ^ - -Djava.io.tmpdir=%GITHUB_WORKSPACE% ^ - -Djacoco.skip.instrument=false ^ - -DtestCategory=net.snowflake.client.category.%%a ^ - -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn ^ - -Dnot-self-contained-jar %ADDITIONAL_MAVEN_PROFILE% ^ - verify ^ - --batch-mode --show-version > log.txt & type log.txt - echo "[INFO] Check for test execution status" - find /i /c "BUILD FAILURE" log.txt > NUL - set isfound=!errorlevel! - if !isfound! equ 0 ( - echo [ERROR] Failed run %%a test - exit /b 1 - ) else ( - echo [INFO] Success run %%a test - ) +if "%JDBC_TEST_SUITES%"=="FipsTestSuite" ( + pushd FIPS + echo "[INFO] Run Fips tests" + cmd /c %MVNW_EXE% -B -DjenkinsIT ^ + -Djava.io.tmpdir=%GITHUB_WORKSPACE% ^ + -Djacoco.skip.instrument=false ^ + -DintegrationTestSuites=FipsTestSuite ^ + -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn ^ + -Dnot-self-contained-jar ^ + verify ^ + --batch-mode --show-version > log.txt & type log.txt + echo "[INFO] Check for test execution status" + find /i /c "BUILD FAILURE" log.txt > NUL + set isfound=!errorlevel! + if !isfound! equ 0 ( + echo [ERROR] Failed run %%a test + exit /b 1 + ) else ( + echo [INFO] Success run %%a test + ) + popd +) else ( + echo "[INFO] Run %JDBC_TEST_SUITES% tests" + cmd /c %MVNW_EXE% -B -DjenkinsIT ^ + -Djava.io.tmpdir=%GITHUB_WORKSPACE% ^ + -Djacoco.skip.instrument=false ^ + -DintegrationTestSuites="%JDBC_TEST_SUITES%" ^ + -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn ^ + -Dnot-self-contained-jar %ADDITIONAL_MAVEN_PROFILE% ^ + verify ^ + --batch-mode --show-version > log.txt & type log.txt + echo "[INFO] Check for test execution status" + find /i /c "BUILD FAILURE" log.txt > NUL + set isfound=!errorlevel! + if !isfound! equ 0 ( + echo [ERROR] Failed run %%a test + exit /b 1 + ) else ( + echo [INFO] Success run %%a test ) ) diff --git a/parent-pom.xml b/parent-pom.xml index 1e5aac29c..b1742d64e 100644 --- a/parent-pom.xml +++ b/parent-pom.xml @@ -61,7 +61,9 @@ 5.13.0 2.8.1 2.4.9 - 4.13.2 + 4.13.2 + 5.11.1 + 1.11.1 1.15.3 1.3.6 2.2.0 @@ -78,7 +80,7 @@ net_snowflake_client_jdbc_internal 2.0.13 5.1.4 - net.snowflake.client.category.AllTestCategory + UnitTestSuite 2.4.1 1.9 3.6.3 @@ -91,7 +93,7 @@ 3.1.1 3.0.0-M3 3.1.0 - 3.0.0 + 3.5.1 2.19 3.0.1 3.1.1 @@ -103,7 +105,7 @@ 3.6.0 3.0.1 3.2.1 - 3.0.0 + 3.5.1 3.8.0 @@ -267,9 +269,69 @@ junit junit + ${junit4.version} + test + + + org.junit.jupiter + junit-jupiter + ${junit.version} + test + + + org.junit.jupiter + junit-jupiter-api + ${junit.version} + test + + + org.junit.jupiter + junit-jupiter-engine + ${junit.version} + test + + + org.junit.jupiter + junit-jupiter-params ${junit.version} test + + org.junit.platform + junit-platform-suite + ${junit.platform.version} + test + + + org.junit.platform + junit-platform-engine + ${junit.platform.version} + test + + + org.junit.platform + junit-platform-runner + ${junit.platform.version} + test + + + org.junit.platform + junit-platform-suite-api + ${junit.platform.version} + test + + + org.junit.platform + junit-platform-suite-engine + ${junit.platform.version} + test + + + org.junit.platform + junit-platform-launcher + ${junit.platform.version} + test + org.apache.avro avro @@ -509,18 +571,6 @@ ${awaitility.version} test - - org.apache.maven.surefire - surefire-junit4 - ${version.plugin.surefire} - test - - - org.apache.maven.surefire - common-junit48 - ${version.plugin.surefire} - test - org.wiremock wiremock-standalone @@ -740,6 +790,46 @@ junit junit + + org.junit.jupiter + junit-jupiter + + + org.junit.jupiter + junit-jupiter-api + + + org.junit.jupiter + junit-jupiter-engine + + + org.junit.jupiter + junit-jupiter-params + + + org.junit.platform + junit-platform-suite + + + org.junit.platform + junit-platform-engine + + + org.junit.platform + junit-platform-runner + + + org.junit.platform + junit-platform-suite-api + + + org.junit.platform + junit-platform-suite-engine + + + org.junit.platform + junit-platform-launcher + org.apache.avro avro @@ -772,15 +862,6 @@ org.awaitility awaitility - - - org.apache.maven.surefire - surefire-junit4 - - - org.apache.maven.surefire - common-junit48 - org.wiremock wiremock-standalone diff --git a/pom.xml b/pom.xml index 0f34e2953..2cfb0425e 100644 --- a/pom.xml +++ b/pom.xml @@ -106,6 +106,13 @@ org.apache.maven.plugins maven-failsafe-plugin ${version.plugin.failsafe} + + + org.apache.maven.surefire + surefire-junit-platform + ${version.plugin.surefire} + + org.apache.maven.plugins @@ -146,6 +153,13 @@ org.apache.maven.plugins maven-surefire-plugin ${version.plugin.surefire} + + + org.apache.maven.surefire + surefire-junit-platform + ${version.plugin.surefire} + + org.codehaus.mojo @@ -1197,10 +1211,28 @@ org.apache.maven.plugins - maven-failsafe-plugin + maven-surefire-plugin - ${testCategory} + UnitTestSuite + + + org.apache.maven.surefire + surefire-junit-platform + ${version.plugin.surefire} + + + + + + test + + + + + + org.apache.maven.plugins + maven-failsafe-plugin @@ -1213,13 +1245,11 @@ integration-test - - **/DellBoomiCloudIT.java - net.snowflake.client.log.JDK14Logger ${basedir}/src/test/resources/logging.properties + ${integrationTestSuites} @@ -1338,27 +1368,24 @@ org.apache.maven.plugins maven-failsafe-plugin + + + **/*IT.java + + + + + org.apache.maven.surefire + surefire-junit-platform + ${version.plugin.surefire} + + verify - - ClientTelemetryIT - - integration-test - - - - **/ConnectionIT.java - **/SFTrustManagerIT.java - - - ${basedir}/src/test/resources/logback-test.xml - - - @@ -1376,21 +1403,24 @@ org.apache.maven.plugins maven-failsafe-plugin + + + **/*IT.java + + + + + org.apache.maven.surefire + surefire-junit-platform + ${version.plugin.surefire} + + verify - - DellBoomiIT - - integration-test - - - DellBoomiCloudIT.java - - @@ -1408,27 +1438,24 @@ org.apache.maven.plugins maven-failsafe-plugin + + + **/*IT.java + + + + + org.apache.maven.surefire + surefire-junit-platform + ${version.plugin.surefire} + + verify - - ClientTelemetryIT - - integration-test - - - - **/ConnectionIT.java - **/SFTrustManagerIT.java - - - ${basedir}/src/test/resources/logback-test.xml - - - diff --git a/src/test/java/com/snowflake/client/jdbc/SnowflakeDriverIT.java b/src/test/java/com/snowflake/client/jdbc/SnowflakeDriverIT.java index d2a7246f8..f4f226fa9 100644 --- a/src/test/java/com/snowflake/client/jdbc/SnowflakeDriverIT.java +++ b/src/test/java/com/snowflake/client/jdbc/SnowflakeDriverIT.java @@ -1,15 +1,15 @@ package com.snowflake.client.jdbc; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.SQLException; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.category.TestCategoryConnection; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class SnowflakeDriverIT extends AbstractDriverIT { @Test diff --git a/src/test/java/net/snowflake/client/AbstractDriverIT.java b/src/test/java/net/snowflake/client/AbstractDriverIT.java index 4a3acea23..3104ce7e9 100644 --- a/src/test/java/net/snowflake/client/AbstractDriverIT.java +++ b/src/test/java/net/snowflake/client/AbstractDriverIT.java @@ -24,12 +24,9 @@ import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.Nullable; -import org.junit.Rule; /** Base test class with common constants, data structures and methods */ public class AbstractDriverIT { - // This is required to use ConditionalIgnore annotation. - @Rule public ConditionalIgnoreRule rule = new ConditionalIgnoreRule(); public static final String DRIVER_CLASS = "net.snowflake.client.jdbc.SnowflakeDriver"; public static final String DRIVER_CLASS_COM = "com.snowflake.client.jdbc.SnowflakeDriver"; diff --git a/src/test/java/net/snowflake/client/AssumptionUtils.java b/src/test/java/net/snowflake/client/AssumptionUtils.java new file mode 100644 index 000000000..73ae13fbb --- /dev/null +++ b/src/test/java/net/snowflake/client/AssumptionUtils.java @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client; + +import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; +import static org.junit.jupiter.api.Assumptions.assumeFalse; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + +import net.snowflake.client.core.Constants; + +public class AssumptionUtils { + public static void assumeNotRunningOnGithubActionsMac() { + assumeFalse(isRunningOnGithubActions() && Constants.getOS() == Constants.OS.MAC); + } + + public static void assumeNotRunningOnJava8() { + assumeFalse(systemGetProperty("java.version").startsWith("1.8.0")); + } + + public static void assumeNotRunningOnJava21() { + assumeFalse(systemGetProperty("java.version").startsWith("21.")); + } + + public static void assumeRunningOnGithubActions() { + assumeTrue(isRunningOnGithubActions()); + } + + public static boolean isRunningOnGithubActions() { + return TestUtil.systemGetEnv("GITHUB_ACTIONS") != null; + } + + public static void assumeRunningOnLinuxMac() { + assumeTrue(Constants.getOS() == Constants.OS.LINUX || Constants.getOS() == Constants.OS.MAC); + } +} diff --git a/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java b/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java deleted file mode 100644 index fe20883db..000000000 --- a/src/test/java/net/snowflake/client/ConditionalIgnoreRule.java +++ /dev/null @@ -1,125 +0,0 @@ -package net.snowflake.client; - -/* - * Created by hyu on 1/22/18. - */ - -/* -Copyright (c) 2013,2014 Rüdiger Herrmann -All rights reserved. This program and the accompanying materials -are made available under the terms of the Eclipse Public License v1.0 -which accompanies this distribution, and is available at -http://www.eclipse.org/legal/epl-v10.html - -Contributors: -Rüdiger Herrmann - initial API and implementation -Matt Morrissette - allow to use non-static inner IgnoreConditions -*/ - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; -import java.lang.reflect.Modifier; -import org.junit.Assume; -import org.junit.rules.MethodRule; -import org.junit.runners.model.FrameworkMethod; -import org.junit.runners.model.Statement; - -public class ConditionalIgnoreRule implements MethodRule { - - public interface IgnoreCondition { - boolean isSatisfied(); - } - - @Retention(RetentionPolicy.RUNTIME) - @Target({ElementType.METHOD}) - public @interface ConditionalIgnore { - Class condition(); - } - - @Override - public Statement apply(Statement base, FrameworkMethod method, Object target) { - Statement result = base; - if (hasConditionalIgnoreAnnotation(method)) { - IgnoreCondition condition = getIgnoreCondition(target, method); - if (condition.isSatisfied()) { - result = new IgnoreStatement(condition); - } - } - return result; - } - - private static boolean hasConditionalIgnoreAnnotation(FrameworkMethod method) { - return method.getAnnotation(ConditionalIgnore.class) != null; - } - - private static IgnoreCondition getIgnoreCondition(Object target, FrameworkMethod method) { - ConditionalIgnore annotation = method.getAnnotation(ConditionalIgnore.class); - return new IgnoreConditionCreator(target, annotation).create(); - } - - private static class IgnoreConditionCreator { - private final Object target; - private final Class conditionType; - - IgnoreConditionCreator(Object target, ConditionalIgnore annotation) { - this.target = target; - this.conditionType = annotation.condition(); - } - - IgnoreCondition create() { - checkConditionType(); - try { - return createCondition(); - } catch (RuntimeException re) { - throw re; - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - private IgnoreCondition createCondition() throws Exception { - IgnoreCondition result; - if (isConditionTypeStandalone()) { - result = conditionType.newInstance(); - } else { - result = conditionType.getDeclaredConstructor(target.getClass()).newInstance(target); - } - return result; - } - - private void checkConditionType() { - if (!isConditionTypeStandalone() && !isConditionTypeDeclaredInTarget()) { - String msg = - "Conditional class '%s' is a member class " - + "but was not declared inside the test case using it.\n" - + "Either make this class a static class, " - + "standalone class (by declaring it in it's own file) " - + "or move it inside the test case using it"; - throw new IllegalArgumentException(String.format(msg, conditionType.getName())); - } - } - - private boolean isConditionTypeStandalone() { - return !conditionType.isMemberClass() || Modifier.isStatic(conditionType.getModifiers()); - } - - private boolean isConditionTypeDeclaredInTarget() { - return target.getClass().isAssignableFrom(conditionType.getDeclaringClass()); - } - } - - private static class IgnoreStatement extends Statement { - private final IgnoreCondition condition; - - IgnoreStatement(IgnoreCondition condition) { - this.condition = condition; - } - - @Override - public void evaluate() { - Assume.assumeTrue("Ignored by " + condition.getClass().getSimpleName(), false); - } - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnAWS.java b/src/test/java/net/snowflake/client/RunningNotOnAWS.java deleted file mode 100644 index 70f54ab8f..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnAWS.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client; - -/** Run tests only on specified cloud provider or ignore */ -public class RunningNotOnAWS implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); - return cloudProvider != null && !cloudProvider.equalsIgnoreCase("AWS"); - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnAzure.java b/src/test/java/net/snowflake/client/RunningNotOnAzure.java deleted file mode 100644 index e2a00966c..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnAzure.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client; - -/** Run tests only on specified cloud provider or ignore */ -public class RunningNotOnAzure implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); - return cloudProvider != null && !cloudProvider.equalsIgnoreCase("Azure"); - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnGCP.java b/src/test/java/net/snowflake/client/RunningNotOnGCP.java deleted file mode 100644 index 7a5c7aafb..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnGCP.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client; - -/** Run tests only on specified cloud provider or ignore */ -public class RunningNotOnGCP implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - String cloudProvider = TestUtil.systemGetEnv("CLOUD_PROVIDER"); - return cloudProvider != null && !cloudProvider.equalsIgnoreCase("GCP"); - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnGithubActionsMac.java b/src/test/java/net/snowflake/client/RunningNotOnGithubActionsMac.java deleted file mode 100644 index 9b872fc8b..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnGithubActionsMac.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client; - -import net.snowflake.client.core.Constants; - -public class RunningNotOnGithubActionsMac implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return isRunningOnGithubActionsMac(); - } - - public static boolean isRunningOnGithubActionsMac() { - return TestUtil.systemGetEnv("GITHUB_ACTIONS") != null && Constants.getOS() == Constants.OS.MAC; - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnJava21.java b/src/test/java/net/snowflake/client/RunningNotOnJava21.java deleted file mode 100644 index 4e2e3e03c..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnJava21.java +++ /dev/null @@ -1,13 +0,0 @@ -package net.snowflake.client; - -import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; - -public class RunningNotOnJava21 implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return isRunningOnJava21(); - } - - public static boolean isRunningOnJava21() { - return systemGetProperty("java.version").startsWith("21."); - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnJava8.java b/src/test/java/net/snowflake/client/RunningNotOnJava8.java deleted file mode 100644 index 8ee4b3e40..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnJava8.java +++ /dev/null @@ -1,13 +0,0 @@ -package net.snowflake.client; - -import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; - -public class RunningNotOnJava8 implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return isRunningOnJava8(); - } - - public static boolean isRunningOnJava8() { - return systemGetProperty("java.version").startsWith("1.8.0"); - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnLinux.java b/src/test/java/net/snowflake/client/RunningNotOnLinux.java deleted file mode 100644 index 3cbaf1339..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnLinux.java +++ /dev/null @@ -1,9 +0,0 @@ -package net.snowflake.client; - -import net.snowflake.client.core.Constants; - -public class RunningNotOnLinux implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return Constants.getOS() != Constants.OS.LINUX; - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnLinuxMac.java b/src/test/java/net/snowflake/client/RunningNotOnLinuxMac.java deleted file mode 100644 index a99eaa3b7..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnLinuxMac.java +++ /dev/null @@ -1,13 +0,0 @@ -package net.snowflake.client; - -import net.snowflake.client.core.Constants; - -public class RunningNotOnLinuxMac implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return Constants.getOS() != Constants.OS.LINUX && Constants.getOS() != Constants.OS.MAC; - } - - public static boolean isNotRunningOnLinuxMac() { - return Constants.getOS() != Constants.OS.LINUX && Constants.getOS() != Constants.OS.MAC; - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnTestaccount.java b/src/test/java/net/snowflake/client/RunningNotOnTestaccount.java deleted file mode 100644 index 596f5ca55..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnTestaccount.java +++ /dev/null @@ -1,10 +0,0 @@ -package net.snowflake.client; - -import static net.snowflake.client.RunningOnGithubAction.isRunningOnGithubAction; - -public class RunningNotOnTestaccount implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return (!("testaccount".equals(TestUtil.systemGetEnv("SNOWFLAKE_TEST_ACCOUNT"))) - || isRunningOnGithubAction()); - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnWin.java b/src/test/java/net/snowflake/client/RunningNotOnWin.java deleted file mode 100644 index ce5cdf7d1..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnWin.java +++ /dev/null @@ -1,9 +0,0 @@ -package net.snowflake.client; - -import net.snowflake.client.core.Constants; - -public class RunningNotOnWin implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return Constants.getOS() != Constants.OS.WINDOWS; - } -} diff --git a/src/test/java/net/snowflake/client/RunningNotOnWinMac.java b/src/test/java/net/snowflake/client/RunningNotOnWinMac.java deleted file mode 100644 index 9d1c32bdc..000000000 --- a/src/test/java/net/snowflake/client/RunningNotOnWinMac.java +++ /dev/null @@ -1,9 +0,0 @@ -package net.snowflake.client; - -import net.snowflake.client.core.Constants; - -public class RunningNotOnWinMac implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return Constants.getOS() != Constants.OS.MAC && Constants.getOS() != Constants.OS.WINDOWS; - } -} diff --git a/src/test/java/net/snowflake/client/RunningOnGithubAction.java b/src/test/java/net/snowflake/client/RunningOnGithubAction.java deleted file mode 100644 index 0326c4fca..000000000 --- a/src/test/java/net/snowflake/client/RunningOnGithubAction.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client; - -/** Run tests on CI */ -public class RunningOnGithubAction implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return TestUtil.systemGetEnv("GITHUB_ACTIONS") != null; - } - - public static boolean isRunningOnGithubAction() { - return TestUtil.systemGetEnv("GITHUB_ACTIONS") != null; - } -} diff --git a/src/test/java/net/snowflake/client/RunningOnTestaccount.java b/src/test/java/net/snowflake/client/RunningOnTestaccount.java deleted file mode 100644 index 186496977..000000000 --- a/src/test/java/net/snowflake/client/RunningOnTestaccount.java +++ /dev/null @@ -1,7 +0,0 @@ -package net.snowflake.client; - -public class RunningOnTestaccount implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return TestUtil.systemGetEnv("SNOWFLAKE_TEST_ACCOUNT").contains("testaccount"); - } -} diff --git a/src/test/java/net/snowflake/client/RunningOnWin.java b/src/test/java/net/snowflake/client/RunningOnWin.java deleted file mode 100644 index 025ab1e04..000000000 --- a/src/test/java/net/snowflake/client/RunningOnWin.java +++ /dev/null @@ -1,9 +0,0 @@ -package net.snowflake.client; - -import net.snowflake.client.core.Constants; - -public class RunningOnWin implements ConditionalIgnoreRule.IgnoreCondition { - public boolean isSatisfied() { - return Constants.getOS() == Constants.OS.WINDOWS; - } -} diff --git a/src/test/java/net/snowflake/client/SkipOnThinJar.java b/src/test/java/net/snowflake/client/SkipOnThinJar.java deleted file mode 100644 index d02d104dd..000000000 --- a/src/test/java/net/snowflake/client/SkipOnThinJar.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * Copyright (c) 2012-2024 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client; - -/** Skip tests on CI when thin jar is tested */ -public class SkipOnThinJar implements ConditionalIgnoreRule.IgnoreCondition { - @Override - public boolean isSatisfied() { - return "-Dthin-jar".equals(TestUtil.systemGetEnv("ADDITIONAL_MAVEN_PROFILE")); - } -} diff --git a/src/test/java/net/snowflake/client/TestUtil.java b/src/test/java/net/snowflake/client/TestUtil.java index ba73dbb01..7f4b8d90a 100644 --- a/src/test/java/net/snowflake/client/TestUtil.java +++ b/src/test/java/net/snowflake/client/TestUtil.java @@ -5,10 +5,10 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.hamcrest.Matchers.matchesPattern; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.SQLException; import java.sql.Statement; @@ -19,7 +19,7 @@ import net.snowflake.client.jdbc.SnowflakeUtil; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; -import org.junit.Assert; +import org.hamcrest.MatcherAssert; public class TestUtil { private static final SFLogger logger = SFLoggerFactory.getLogger(TestUtil.class); @@ -53,7 +53,7 @@ public static boolean isSchemaGeneratedInTests(String schema) { public static void assertSFException(int errorCode, TestRunInterface testCode) { try { testCode.run(); - Assert.fail(); + fail(); } catch (SFException e) { assertThat(e.getVendorCode(), is(errorCode)); } @@ -91,8 +91,8 @@ public static String systemGetEnv(String env) { public static void assertValidQueryId(String queryId) { assertNotNull(queryId); - assertTrue( - "Expecting " + queryId + " is a valid UUID", QUERY_ID_REGEX.matcher(queryId).matches()); + MatcherAssert.assertThat( + "Expecting " + queryId + " is a valid UUID", queryId, matchesPattern(QUERY_ID_REGEX)); } /** diff --git a/src/test/java/net/snowflake/client/annotations/DontRunOnGithubActions.java b/src/test/java/net/snowflake/client/annotations/DontRunOnGithubActions.java new file mode 100644 index 000000000..993d9d6ad --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/DontRunOnGithubActions.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@DisabledIfEnvironmentVariable(named = "GITHUB_ACTIONS", matches = ".*") +public @interface DontRunOnGithubActions {} diff --git a/src/test/java/net/snowflake/client/annotations/DontRunOnJava21.java b/src/test/java/net/snowflake/client/annotations/DontRunOnJava21.java new file mode 100644 index 000000000..29374b837 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/DontRunOnJava21.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledOnJre; +import org.junit.jupiter.api.condition.JRE; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@DisabledOnJre(JRE.JAVA_21) +public @interface DontRunOnJava21 {} diff --git a/src/test/java/net/snowflake/client/annotations/DontRunOnJava8.java b/src/test/java/net/snowflake/client/annotations/DontRunOnJava8.java new file mode 100644 index 000000000..81a3a0c03 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/DontRunOnJava8.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledOnJre; +import org.junit.jupiter.api.condition.JRE; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@DisabledOnJre(JRE.JAVA_8) +public @interface DontRunOnJava8 {} diff --git a/src/test/java/net/snowflake/client/annotations/DontRunOnTestaccount.java b/src/test/java/net/snowflake/client/annotations/DontRunOnTestaccount.java new file mode 100644 index 000000000..5c9fff944 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/DontRunOnTestaccount.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@DisabledIfEnvironmentVariable(named = "SNOWFLAKE_TEST_ACCOUNT", matches = "testaccount") +public @interface DontRunOnTestaccount {} diff --git a/src/test/java/net/snowflake/client/annotations/DontRunOnThinJar.java b/src/test/java/net/snowflake/client/annotations/DontRunOnThinJar.java new file mode 100644 index 000000000..bb254a2c4 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/DontRunOnThinJar.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@DisabledIfEnvironmentVariable(named = "ADDITIONAL_MAVEN_PROFILE", matches = "-Dthin-jar") +public @interface DontRunOnThinJar {} diff --git a/src/test/java/net/snowflake/client/annotations/DontRunOnWindows.java b/src/test/java/net/snowflake/client/annotations/DontRunOnWindows.java new file mode 100644 index 000000000..140f0d752 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/DontRunOnWindows.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledOnOs; +import org.junit.jupiter.api.condition.OS; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@DisabledOnOs(OS.WINDOWS) +public @interface DontRunOnWindows {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnAWS.java b/src/test/java/net/snowflake/client/annotations/RunOnAWS.java new file mode 100644 index 000000000..fd3acc546 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnAWS.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledIfEnvironmentVariable(named = "CLOUD_PROVIDER", matches = "(?i)AWS(?-i)") +public @interface RunOnAWS {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnAzure.java b/src/test/java/net/snowflake/client/annotations/RunOnAzure.java new file mode 100644 index 000000000..13c8379b3 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnAzure.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledIfEnvironmentVariable(named = "CLOUD_PROVIDER", matches = "(?i)Azure(?-i)") +public @interface RunOnAzure {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnGCP.java b/src/test/java/net/snowflake/client/annotations/RunOnGCP.java new file mode 100644 index 000000000..e361aa808 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnGCP.java @@ -0,0 +1,15 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledIfEnvironmentVariable(named = "CLOUD_PROVIDER", matches = "(?i)GCP(?-i)") +public @interface RunOnGCP {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnGithubActionsNotMac.java b/src/test/java/net/snowflake/client/annotations/RunOnGithubActionsNotMac.java new file mode 100644 index 000000000..f133022e3 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnGithubActionsNotMac.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledOnOs; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; +import org.junit.jupiter.api.condition.OS; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledIfEnvironmentVariable(named = "GITHUB_ACTIONS", matches = ".*") +@DisabledOnOs(OS.MAC) +public @interface RunOnGithubActionsNotMac {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnLinux.java b/src/test/java/net/snowflake/client/annotations/RunOnLinux.java new file mode 100644 index 000000000..33231effe --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnLinux.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.EnabledOnOs; +import org.junit.jupiter.api.condition.OS; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledOnOs({OS.LINUX, OS.AIX}) +public @interface RunOnLinux {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnLinuxOrMac.java b/src/test/java/net/snowflake/client/annotations/RunOnLinuxOrMac.java new file mode 100644 index 000000000..6c6013154 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnLinuxOrMac.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.EnabledOnOs; +import org.junit.jupiter.api.condition.OS; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledOnOs({OS.MAC, OS.LINUX, OS.AIX}) +public @interface RunOnLinuxOrMac {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnMac.java b/src/test/java/net/snowflake/client/annotations/RunOnMac.java new file mode 100644 index 000000000..a5f18a345 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnMac.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.EnabledOnOs; +import org.junit.jupiter.api.condition.OS; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledOnOs(OS.MAC) +public @interface RunOnMac {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnTestaccountNotOnGithubActions.java b/src/test/java/net/snowflake/client/annotations/RunOnTestaccountNotOnGithubActions.java new file mode 100644 index 000000000..6dacdb993 --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnTestaccountNotOnGithubActions.java @@ -0,0 +1,17 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledIfEnvironmentVariable(named = "SNOWFLAKE_TEST_ACCOUNT", matches = "testaccount") +@DisabledIfEnvironmentVariable(named = "GITHUB_ACTIONS", matches = ".*") +public @interface RunOnTestaccountNotOnGithubActions {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnWindows.java b/src/test/java/net/snowflake/client/annotations/RunOnWindows.java new file mode 100644 index 000000000..69a2ee7ff --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnWindows.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.EnabledOnOs; +import org.junit.jupiter.api.condition.OS; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledOnOs(OS.WINDOWS) +public @interface RunOnWindows {} diff --git a/src/test/java/net/snowflake/client/annotations/RunOnWindowsOrMac.java b/src/test/java/net/snowflake/client/annotations/RunOnWindowsOrMac.java new file mode 100644 index 000000000..77d50109c --- /dev/null +++ b/src/test/java/net/snowflake/client/annotations/RunOnWindowsOrMac.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.jupiter.api.condition.EnabledOnOs; +import org.junit.jupiter.api.condition.OS; + +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.RUNTIME) +@EnabledOnOs({OS.WINDOWS, OS.MAC}) +public @interface RunOnWindowsOrMac {} diff --git a/src/test/java/net/snowflake/client/category/TestCategoryArrow.java b/src/test/java/net/snowflake/client/category/TestCategoryArrow.java deleted file mode 100644 index 59a8396cd..000000000 --- a/src/test/java/net/snowflake/client/category/TestCategoryArrow.java +++ /dev/null @@ -1,3 +0,0 @@ -package net.snowflake.client.category; - -public interface TestCategoryArrow {} diff --git a/src/test/java/net/snowflake/client/category/TestCategoryConnection.java b/src/test/java/net/snowflake/client/category/TestCategoryConnection.java deleted file mode 100644 index cfa5bfd30..000000000 --- a/src/test/java/net/snowflake/client/category/TestCategoryConnection.java +++ /dev/null @@ -1,3 +0,0 @@ -package net.snowflake.client.category; - -public interface TestCategoryConnection {} diff --git a/src/test/java/net/snowflake/client/category/TestCategoryCore.java b/src/test/java/net/snowflake/client/category/TestCategoryCore.java deleted file mode 100644 index 7c97c58ef..000000000 --- a/src/test/java/net/snowflake/client/category/TestCategoryCore.java +++ /dev/null @@ -1,3 +0,0 @@ -package net.snowflake.client.category; - -public interface TestCategoryCore {} diff --git a/src/test/java/net/snowflake/client/category/TestCategoryDiagnostic.java b/src/test/java/net/snowflake/client/category/TestCategoryDiagnostic.java deleted file mode 100644 index ecb5c0509..000000000 --- a/src/test/java/net/snowflake/client/category/TestCategoryDiagnostic.java +++ /dev/null @@ -1,3 +0,0 @@ -package net.snowflake.client.category; - -public interface TestCategoryDiagnostic {} diff --git a/src/test/java/net/snowflake/client/category/TestCategoryLoader.java b/src/test/java/net/snowflake/client/category/TestCategoryLoader.java deleted file mode 100644 index eac9e7bef..000000000 --- a/src/test/java/net/snowflake/client/category/TestCategoryLoader.java +++ /dev/null @@ -1,4 +0,0 @@ -package net.snowflake.client.category; - -/** Test category Loader */ -public interface TestCategoryLoader {} diff --git a/src/test/java/net/snowflake/client/category/TestCategoryOthers.java b/src/test/java/net/snowflake/client/category/TestCategoryOthers.java deleted file mode 100644 index 7f11baaa9..000000000 --- a/src/test/java/net/snowflake/client/category/TestCategoryOthers.java +++ /dev/null @@ -1,3 +0,0 @@ -package net.snowflake.client.category; - -public interface TestCategoryOthers {} diff --git a/src/test/java/net/snowflake/client/category/TestCategoryResultSet.java b/src/test/java/net/snowflake/client/category/TestCategoryResultSet.java deleted file mode 100644 index 7d9824823..000000000 --- a/src/test/java/net/snowflake/client/category/TestCategoryResultSet.java +++ /dev/null @@ -1,3 +0,0 @@ -package net.snowflake.client.category; - -public interface TestCategoryResultSet {} diff --git a/src/test/java/net/snowflake/client/category/TestCategoryStatement.java b/src/test/java/net/snowflake/client/category/TestCategoryStatement.java deleted file mode 100644 index 5381cbb00..000000000 --- a/src/test/java/net/snowflake/client/category/TestCategoryStatement.java +++ /dev/null @@ -1,3 +0,0 @@ -package net.snowflake.client.category; - -public interface TestCategoryStatement {} diff --git a/src/test/java/net/snowflake/client/category/TestTags.java b/src/test/java/net/snowflake/client/category/TestTags.java new file mode 100644 index 000000000..92cd7ce3b --- /dev/null +++ b/src/test/java/net/snowflake/client/category/TestTags.java @@ -0,0 +1,17 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.category; + +public class TestTags { + private TestTags() {} + + public static final String ARROW = "arrow"; + public static final String CONNECTION = "connection"; + public static final String CORE = "core"; + public static final String DIAGNOSTIC = "diagnostic"; + public static final String LOADER = "loader"; + public static final String OTHERS = "others"; + public static final String RESULT_SET = "resultSet"; + public static final String STATEMENT = "statement"; +} diff --git a/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java b/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java index a00784f68..f570cfb7f 100644 --- a/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java +++ b/src/test/java/net/snowflake/client/config/SFClientConfigParserTest.java @@ -7,10 +7,10 @@ import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; import static net.snowflake.client.jdbc.SnowflakeUtil.systemSetEnv; import static net.snowflake.client.jdbc.SnowflakeUtil.systemUnsetEnv; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.mockStatic; import java.io.IOException; @@ -18,8 +18,8 @@ import java.nio.file.Path; import java.nio.file.Paths; import net.snowflake.client.jdbc.SnowflakeUtil; -import org.junit.After; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; import org.mockito.MockedStatic; public class SFClientConfigParserTest { @@ -30,7 +30,7 @@ public class SFClientConfigParserTest { private Path configFilePath; - @After + @AfterEach public void cleanup() throws IOException { if (configFilePath != null) { Files.deleteIfExists(configFilePath); diff --git a/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java b/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java index bfb30f645..50dd75ff2 100644 --- a/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java +++ b/src/test/java/net/snowflake/client/config/SFConnectionConfigParserTest.java @@ -1,13 +1,13 @@ package net.snowflake.client.config; +import static net.snowflake.client.AssumptionUtils.assumeRunningOnLinuxMac; import static net.snowflake.client.config.SFConnectionConfigParser.SKIP_TOKEN_FILE_PERMISSIONS_VERIFICATION; import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY; import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_HOME_KEY; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThrows; -import static org.junit.Assume.assumeFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; import com.fasterxml.jackson.dataformat.toml.TomlMapper; import java.io.File; @@ -25,14 +25,12 @@ import java.util.List; import java.util.Map; import java.util.Set; -import net.snowflake.client.RunningNotOnLinuxMac; import net.snowflake.client.core.Constants; import net.snowflake.client.jdbc.SnowflakeSQLException; import net.snowflake.client.jdbc.SnowflakeUtil; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class SFConnectionConfigParserTest { @@ -46,7 +44,7 @@ public class SFConnectionConfigParserTest { private TomlMapper tomlMapper = new TomlMapper(); private Map envVariables = new HashMap(); - @Before + @BeforeEach public void setUp() throws IOException { tempPath = Files.createTempDirectory(".snowflake"); ENV_VARIABLES_KEYS.stream() @@ -58,7 +56,7 @@ public void setUp() throws IOException { }); } - @After + @AfterEach public void close() throws IOException { SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_HOME_KEY); SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY); @@ -107,7 +105,7 @@ public void testThrowErrorWhenWrongPermissionsForConnectionConfigurationFile() File tokenFile = new File(Paths.get(tempPath.toString(), "token").toUri()); prepareConnectionConfigurationTomlFile( Collections.singletonMap("token_file_path", tokenFile.toString()), false, false); - assumeFalse(RunningNotOnLinuxMac.isNotRunningOnLinuxMac()); + assumeRunningOnLinuxMac(); assertThrows( SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters()); } @@ -118,7 +116,7 @@ public void testThrowErrorWhenWrongPermissionsForTokenFile() throws IOException File tokenFile = new File(Paths.get(tempPath.toString(), "token").toUri()); prepareConnectionConfigurationTomlFile( Collections.singletonMap("token_file_path", tokenFile.toString()), true, false); - assumeFalse(RunningNotOnLinuxMac.isNotRunningOnLinuxMac()); + assumeRunningOnLinuxMac(); assertThrows( SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters()); } @@ -164,7 +162,7 @@ public void shouldThrowExceptionIfNoneOfHostAndAccountIsSet() throws IOException extraparams.put("host", null); extraparams.put("account", null); prepareConnectionConfigurationTomlFile(extraparams); - Assert.assertThrows( + assertThrows( SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters()); } @@ -177,7 +175,7 @@ public void shouldThrowExceptionIfTokenIsNotSetForOauth() throws IOException { prepareConnectionConfigurationTomlFile( Collections.singletonMap("token_file_path", tokenFile.toString()), true, false, ""); - Assert.assertThrows( + assertThrows( SnowflakeSQLException.class, () -> SFConnectionConfigParser.buildConnectionParameters()); } diff --git a/src/test/java/net/snowflake/client/config/SFPermissionsTest.java b/src/test/java/net/snowflake/client/config/SFPermissionsTest.java index 92ec8a624..f5e41e260 100644 --- a/src/test/java/net/snowflake/client/config/SFPermissionsTest.java +++ b/src/test/java/net/snowflake/client/config/SFPermissionsTest.java @@ -1,79 +1,55 @@ package net.snowflake.client.config; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.attribute.PosixFilePermissions; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnWin; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import net.snowflake.client.annotations.DontRunOnWindows; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; -@RunWith(Parameterized.class) public class SFPermissionsTest { - @Rule public ConditionalIgnoreRule rule = new ConditionalIgnoreRule(); - - @Parameterized.Parameters(name = "permission={0}") - public static Set> data() { - Map testConfigFilePermissions = - new HashMap() { - { - put("rwx------", false); - put("rw-------", false); - put("r-x------", false); - put("r--------", false); - put("rwxrwx---", true); - put("rwxrw----", true); - put("rwxr-x---", false); - put("rwxr-----", false); - put("rwx-wx---", true); - put("rwx-w----", true); - put("rwx--x---", false); - put("rwx---rwx", true); - put("rwx---rw-", true); - put("rwx---r-x", false); - put("rwx---r--", false); - put("rwx----wx", true); - put("rwx----w-", true); - put("rwx-----x", false); - } - }; - return testConfigFilePermissions.entrySet(); - } - Path configFilePath = Paths.get("config.json"); String configJson = "{\"common\":{\"log_level\":\"debug\",\"log_path\":\"logs\"}}"; - String permission; - Boolean isSucceed; - - public SFPermissionsTest(Map.Entry permission) { - this.permission = permission.getKey(); - this.isSucceed = permission.getValue(); - } - @Before + @BeforeEach public void createConfigFile() throws IOException { Files.write(configFilePath, configJson.getBytes()); } - @After + @AfterEach public void cleanupConfigFile() throws IOException { Files.deleteIfExists(configFilePath); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnWin.class) - public void testLogDirectoryPermissions() throws IOException { + @ParameterizedTest + @CsvSource({ + "rwx------,false", + "rw-------,false", + "r-x------,false", + "r--------,false", + "rwxrwx---,true", + "rwxrw----,true", + "rwxr-x---,false", + "rwxr-----,false", + "rwx-wx---,true", + "rwx-w----,true", + "rwx--x---,false", + "rwx---rwx,true", + "rwx---rw-,true", + "rwx---r-x,false", + "rwx---r--,false", + "rwx----wx,true", + "rwx----w-,true", + "rwx-----x,false" + }) + @DontRunOnWindows + public void testLogDirectoryPermissions(String permission, boolean isSucceed) throws IOException { // TODO: SNOW-1503722 Change to check for thrown exceptions // Don't run on Windows Files.setPosixFilePermissions(configFilePath, PosixFilePermissions.fromString(permission)); diff --git a/src/test/java/net/snowflake/client/core/CoreUtilsMiscellaneousTest.java b/src/test/java/net/snowflake/client/core/CoreUtilsMiscellaneousTest.java index f11614c8b..beb0ad292 100644 --- a/src/test/java/net/snowflake/client/core/CoreUtilsMiscellaneousTest.java +++ b/src/test/java/net/snowflake/client/core/CoreUtilsMiscellaneousTest.java @@ -4,10 +4,10 @@ package net.snowflake.client.core; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; @@ -16,13 +16,12 @@ import java.net.Proxy; import java.util.HashMap; import java.util.Properties; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; +import net.snowflake.client.annotations.DontRunOnGithubActions; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.client.jdbc.SnowflakeSQLException; import net.snowflake.client.jdbc.SnowflakeUtil; import net.snowflake.client.jdbc.cloud.storage.S3HttpUtil; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class CoreUtilsMiscellaneousTest { @@ -41,7 +40,7 @@ public void testSnowflakeAssertTrue() { /** Test that Constants.getOS function is working as expected */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testgetOS() { Constants.clearOSForTesting(); String originalOS = systemGetProperty("os.name"); diff --git a/src/test/java/net/snowflake/client/core/EventHandlerTest.java b/src/test/java/net/snowflake/client/core/EventHandlerTest.java index eb930f7c6..56b48b987 100644 --- a/src/test/java/net/snowflake/client/core/EventHandlerTest.java +++ b/src/test/java/net/snowflake/client/core/EventHandlerTest.java @@ -3,8 +3,8 @@ */ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.io.IOException; @@ -14,18 +14,17 @@ import java.util.logging.LogRecord; import java.util.zip.GZIPInputStream; import org.apache.commons.io.IOUtils; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; public class EventHandlerTest { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; - @Before + @BeforeEach public void setUp() throws IOException { - tmpFolder.newFolder("snowflake_dumps"); - System.setProperty("snowflake.dump_path", tmpFolder.getRoot().getCanonicalPath()); + new File(tmpFolder, "snowflake_dumps").mkdirs(); + System.setProperty("snowflake.dump_path", tmpFolder.getCanonicalPath()); } @Test diff --git a/src/test/java/net/snowflake/client/core/EventTest.java b/src/test/java/net/snowflake/client/core/EventTest.java index e9ee978e5..7ca041744 100644 --- a/src/test/java/net/snowflake/client/core/EventTest.java +++ b/src/test/java/net/snowflake/client/core/EventTest.java @@ -5,8 +5,8 @@ package net.snowflake.client.core; import static net.snowflake.client.core.EventUtil.DUMP_PATH_PROP; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.io.IOException; @@ -14,24 +14,25 @@ import java.nio.file.Files; import java.util.zip.GZIPInputStream; import org.apache.commons.io.IOUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; public class EventTest { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; private File homeDirectory; private File dmpDirectory; - @Before + @BeforeEach public void setUp() throws IOException { - homeDirectory = tmpFolder.newFolder("homedir"); - dmpDirectory = tmpFolder.newFolder("homedir", "snowflake_dumps"); + homeDirectory = new File(tmpFolder, "homedir"); + homeDirectory.mkdirs(); + dmpDirectory = new File(homeDirectory, "snowflake_dumps"); + dmpDirectory.mkdirs(); } - @After + @AfterEach public void tearDown() { dmpDirectory.delete(); } @@ -58,7 +59,7 @@ public void testWriteEventDumpLine() throws IOException { // created String dmpPath1 = EventUtil.getDumpPathPrefix(); String dmpPath2 = dmpDirectory.getCanonicalPath(); - assertEquals("dump path is: " + EventUtil.getDumpPathPrefix(), dmpPath2, dmpPath1); + assertEquals(dmpPath2, dmpPath1, "dump path is: " + EventUtil.getDumpPathPrefix()); File dumpFile = new File( EventUtil.getDumpPathPrefix() diff --git a/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java b/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java index f7ad06b46..04cec29fb 100644 --- a/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java +++ b/src/test/java/net/snowflake/client/core/ExecTimeTelemetryDataTest.java @@ -1,14 +1,14 @@ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import net.minidev.json.JSONObject; import net.minidev.json.parser.JSONParser; import net.minidev.json.parser.ParseException; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class ExecTimeTelemetryDataTest { diff --git a/src/test/java/net/snowflake/client/core/HttpUtilLatestIT.java b/src/test/java/net/snowflake/client/core/HttpUtilLatestIT.java index 34892843c..00c318227 100644 --- a/src/test/java/net/snowflake/client/core/HttpUtilLatestIT.java +++ b/src/test/java/net/snowflake/client/core/HttpUtilLatestIT.java @@ -3,21 +3,22 @@ */ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.net.SocketTimeoutException; import java.time.Duration; -import net.snowflake.client.category.TestCategoryCore; +import net.snowflake.client.category.TestTags; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.CloseableHttpClient; import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class HttpUtilLatestIT { private static final String HANG_WEBSERVER_ADDRESS = "http://localhost:12345/hang"; @@ -30,7 +31,8 @@ public void shouldGetDefaultConnectionAndSocketTimeouts() { } /** Added in > 3.14.5 */ - @Test(timeout = 1000L) + @Test + @Timeout(1) public void shouldOverrideConnectionAndSocketTimeouts() { // it's hard to test connection timeout so there is only a test for socket timeout HttpUtil.setConnectionTimeout(100); diff --git a/src/test/java/net/snowflake/client/core/IncidentUtilLatestIT.java b/src/test/java/net/snowflake/client/core/IncidentUtilLatestIT.java index cd2e89806..5ffe7c5d3 100644 --- a/src/test/java/net/snowflake/client/core/IncidentUtilLatestIT.java +++ b/src/test/java/net/snowflake/client/core/IncidentUtilLatestIT.java @@ -6,23 +6,23 @@ import static net.snowflake.client.core.IncidentUtil.INC_DUMP_FILE_EXT; import static net.snowflake.client.core.IncidentUtil.INC_DUMP_FILE_NAME; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.StringWriter; import java.util.zip.GZIPInputStream; -import net.snowflake.client.category.TestCategoryCore; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.BaseJDBCTest; import org.apache.commons.io.IOUtils; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class IncidentUtilLatestIT extends BaseJDBCTest { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; private static final String FILE_NAME = "sf_incident_123456.dmp.gz"; @Test @@ -34,7 +34,9 @@ public void testOneLinerDescription() { /** Tests dumping JVM metrics for the current process */ @Test public void testDumpVmMetrics() throws IOException { - String dumpPath = tmpFolder.newFolder().getCanonicalPath(); + File dumpDir = new File(tmpFolder, "dump"); + dumpDir.mkdirs(); + String dumpPath = dumpDir.getCanonicalPath(); System.setProperty("snowflake.dump_path", dumpPath); String incidentId = "123456"; @@ -47,13 +49,15 @@ public void testDumpVmMetrics() throws IOException { EventUtil.getDumpPathPrefix() + "/" + INC_DUMP_FILE_NAME + incidentId + INC_DUMP_FILE_EXT; // Read back the file contents - GZIPInputStream gzip = new GZIPInputStream(new FileInputStream(targetVMFileLocation)); - StringWriter sWriter = new StringWriter(); - IOUtils.copy(gzip, sWriter, "UTF-8"); - String output = sWriter.toString(); - assertEquals( - "\n\n\n--------------------------- METRICS " + "---------------------------\n\n", - output.substring(0, 69)); - sWriter.close(); + try (FileInputStream fis = new FileInputStream(targetVMFileLocation); + GZIPInputStream gzip = new GZIPInputStream(fis)) { + StringWriter sWriter = new StringWriter(); + IOUtils.copy(gzip, sWriter, "UTF-8"); + String output = sWriter.toString(); + assertEquals( + "\n\n\n--------------------------- METRICS " + "---------------------------\n\n", + output.substring(0, 69)); + sWriter.close(); + } } } diff --git a/src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java b/src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java index 9a5af03b2..37bfea5c6 100644 --- a/src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java +++ b/src/test/java/net/snowflake/client/core/OCSPCacheServerTest.java @@ -1,93 +1,76 @@ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import java.util.stream.Stream; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class OCSPCacheServerTest { - @Parameterized.Parameters( - name = "For host {0} cache server fetch url should be {1} and retry url {2}") - public static Object[][] data() { - return new Object[][] { - { - "bla-12345.global.snowflakecomputing.com", - "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/fetch", - "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/retry" - }, - { - "bla-12345.global.snowflakecomputing.cn", - "https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/fetch", - "https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/retry" - }, - { - "bla-12345.global.snowflakecomputing.xyz", - "https://ocspssd-12345.global.snowflakecomputing.xyz/ocsp/fetch", - "https://ocspssd-12345.global.snowflakecomputing.xyz/ocsp/retry" - }, - { - "bla-12345.GLOBAL.snowflakecomputing.xyz", - "https://ocspssd-12345.GLOBAL.snowflakecomputing.xyz/ocsp/fetch", - "https://ocspssd-12345.GLOBAL.snowflakecomputing.xyz/ocsp/retry" - }, - { - "bla-12345.snowflakecomputing.com", - "https://ocspssd.snowflakecomputing.com/ocsp/fetch", - "https://ocspssd.snowflakecomputing.com/ocsp/retry" - }, - { - "bla-12345.snowflakecomputing.cn", - "https://ocspssd.snowflakecomputing.cn/ocsp/fetch", - "https://ocspssd.snowflakecomputing.cn/ocsp/retry" - }, - { - "bla-12345.snowflakecomputing.xyz", - "https://ocspssd.snowflakecomputing.xyz/ocsp/fetch", - "https://ocspssd.snowflakecomputing.xyz/ocsp/retry" - }, - { - "bla-12345.SNOWFLAKEcomputing.xyz", - "https://ocspssd.SNOWFLAKEcomputing.xyz/ocsp/fetch", - "https://ocspssd.SNOWFLAKEcomputing.xyz/ocsp/retry" - }, - { - "s3.amazoncomaws.com", - "https://ocspssd.snowflakecomputing.com/ocsp/fetch", - "https://ocspssd.snowflakecomputing.com/ocsp/retry" - }, - { - "s3.amazoncomaws.COM", - "https://ocspssd.snowflakecomputing.COM/ocsp/fetch", - "https://ocspssd.snowflakecomputing.COM/ocsp/retry" - }, - { - "s3.amazoncomaws.com.cn", - "https://ocspssd.snowflakecomputing.cn/ocsp/fetch", - "https://ocspssd.snowflakecomputing.cn/ocsp/retry" - }, - { - "S3.AMAZONCOMAWS.COM.CN", - "https://ocspssd.snowflakecomputing.CN/ocsp/fetch", - "https://ocspssd.snowflakecomputing.CN/ocsp/retry" - }, - }; - } - - private final String host; - private final String expectedFetchUrl; - private final String expectedRetryUrl; + static class URLProvider implements ArgumentsProvider { - public OCSPCacheServerTest(String host, String expectedFetchUrl, String expectedRetryUrl) { - this.host = host; - this.expectedFetchUrl = expectedFetchUrl; - this.expectedRetryUrl = expectedRetryUrl; + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + return Stream.of( + Arguments.of( + "bla-12345.global.snowflakecomputing.com", + "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/fetch", + "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/retry"), + Arguments.of( + "bla-12345.global.snowflakecomputing.cn", + "https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/fetch", + "https://ocspssd-12345.global.snowflakecomputing.cn/ocsp/retry"), + Arguments.of( + "bla-12345.global.snowflakecomputing.xyz", + "https://ocspssd-12345.global.snowflakecomputing.xyz/ocsp/fetch", + "https://ocspssd-12345.global.snowflakecomputing.xyz/ocsp/retry"), + Arguments.of( + "bla-12345.GLOBAL.snowflakecomputing.xyz", + "https://ocspssd-12345.GLOBAL.snowflakecomputing.xyz/ocsp/fetch", + "https://ocspssd-12345.GLOBAL.snowflakecomputing.xyz/ocsp/retry"), + Arguments.of( + "bla-12345.snowflakecomputing.com", + "https://ocspssd.snowflakecomputing.com/ocsp/fetch", + "https://ocspssd.snowflakecomputing.com/ocsp/retry"), + Arguments.of( + "bla-12345.snowflakecomputing.cn", + "https://ocspssd.snowflakecomputing.cn/ocsp/fetch", + "https://ocspssd.snowflakecomputing.cn/ocsp/retry"), + Arguments.of( + "bla-12345.snowflakecomputing.xyz", + "https://ocspssd.snowflakecomputing.xyz/ocsp/fetch", + "https://ocspssd.snowflakecomputing.xyz/ocsp/retry"), + Arguments.of( + "bla-12345.SNOWFLAKEcomputing.xyz", + "https://ocspssd.SNOWFLAKEcomputing.xyz/ocsp/fetch", + "https://ocspssd.SNOWFLAKEcomputing.xyz/ocsp/retry"), + Arguments.of( + "s3.amazoncomaws.com", + "https://ocspssd.snowflakecomputing.com/ocsp/fetch", + "https://ocspssd.snowflakecomputing.com/ocsp/retry"), + Arguments.of( + "s3.amazoncomaws.COM", + "https://ocspssd.snowflakecomputing.COM/ocsp/fetch", + "https://ocspssd.snowflakecomputing.COM/ocsp/retry"), + Arguments.of( + "s3.amazoncomaws.com.cn", + "https://ocspssd.snowflakecomputing.cn/ocsp/fetch", + "https://ocspssd.snowflakecomputing.cn/ocsp/retry"), + Arguments.of( + "S3.AMAZONCOMAWS.COM.CN", + "https://ocspssd.snowflakecomputing.CN/ocsp/fetch", + "https://ocspssd.snowflakecomputing.CN/ocsp/retry")); + } } - @Test - public void shouldChooseOcspCacheServerUrls() { + @ParameterizedTest(name = "For host {0} cache server fetch url should be {1} and retry url {2}") + @ArgumentsSource(URLProvider.class) + public void shouldChooseOcspCacheServerUrls( + String host, String expectedFetchUrl, String expectedRetryUrl) { SFTrustManager.OCSPCacheServer ocspCacheServer = new SFTrustManager.OCSPCacheServer(); ocspCacheServer.resetOCSPResponseCacheServer(host); diff --git a/src/test/java/net/snowflake/client/core/ObjectMapperTest.java b/src/test/java/net/snowflake/client/core/ObjectMapperTest.java index 6868d186e..e0a9e11ab 100644 --- a/src/test/java/net/snowflake/client/core/ObjectMapperTest.java +++ b/src/test/java/net/snowflake/client/core/ObjectMapperTest.java @@ -4,52 +4,58 @@ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import java.nio.charset.StandardCharsets; import java.sql.SQLException; -import java.util.ArrayList; import java.util.Base64; -import java.util.Collection; -import java.util.List; +import java.util.stream.Stream; import net.snowflake.client.jdbc.SnowflakeUtil; -import org.junit.After; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class ObjectMapperTest { private static final int jacksonDefaultMaxStringLength = 20_000_000; + static String originalLogger; - @Parameterized.Parameters(name = "lobSizeInMB={0}, maxJsonStringLength={1}") - public static Collection data() { - int[] lobSizeInMB = new int[] {16, 16, 32, 64, 128}; - // maxJsonStringLength to be set for the corresponding LOB size - int[] maxJsonStringLengths = - new int[] {jacksonDefaultMaxStringLength, 23_000_000, 45_000_000, 90_000_000, 180_000_000}; - List ret = new ArrayList<>(); - for (int i = 0; i < lobSizeInMB.length; i++) { - ret.add(new Object[] {lobSizeInMB[i], maxJsonStringLengths[i]}); + static class DataProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + return Stream.of( + Arguments.of(16 * 1024 * 1024, jacksonDefaultMaxStringLength), + Arguments.of(16 * 1024 * 1024, 23_000_000), + Arguments.of(32 * 1024 * 1024, 45_000_000), + Arguments.of(64 * 1024 * 1024, 90_000_000), + Arguments.of(128 * 1024 * 1024, 180_000_000)); } - return ret; } - private final int lobSizeInBytes; - private final int maxJsonStringLength; + @BeforeAll + public static void setProperty() { + originalLogger = System.getProperty("net.snowflake.jdbc.loggerImpl"); + System.setProperty("net.snowflake.jdbc.loggerImpl", "net.snowflake.client.log.JDK14Logger"); + } - @After - public void clearProperty() { + @AfterAll + public static void clearProperty() { + if (originalLogger != null) { + System.setProperty("net.snowflake.jdbc.loggerImpl", originalLogger); + } else { + System.clearProperty("net.snowflake.jdbc.loggerImpl"); + } System.clearProperty(ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM); } - public ObjectMapperTest(int lobSizeInMB, int maxJsonStringLength) { - // convert LOB size from MB to bytes - this.lobSizeInBytes = lobSizeInMB * 1024 * 1024; - this.maxJsonStringLength = maxJsonStringLength; + private static void setJacksonDefaultMaxStringLength(int maxJsonStringLength) { System.setProperty( ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM, Integer.toString(maxJsonStringLength)); } @@ -61,15 +67,17 @@ public void testInvalidMaxJsonStringLength() throws SQLException { // default maxJsonStringLength value will be used ObjectMapper mapper = ObjectMapperFactory.getObjectMapper(); int stringLengthInMapper = mapper.getFactory().streamReadConstraints().getMaxStringLength(); - Assert.assertEquals(ObjectMapperFactory.DEFAULT_MAX_JSON_STRING_LEN, stringLengthInMapper); + assertEquals(ObjectMapperFactory.DEFAULT_MAX_JSON_STRING_LEN, stringLengthInMapper); } - @Test - public void testObjectMapperWithLargeJsonString() { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testObjectMapperWithLargeJsonString(int lobSizeInBytes, int maxJsonStringLength) { + setJacksonDefaultMaxStringLength(maxJsonStringLength); ObjectMapper mapper = ObjectMapperFactory.getObjectMapper(); try { JsonNode jsonNode = mapper.readTree(generateBase64EncodedJsonString(lobSizeInBytes)); - Assert.assertNotNull(jsonNode); + assertNotNull(jsonNode); } catch (Exception e) { // exception is expected when jackson's default maxStringLength value is used while retrieving // 16M string data diff --git a/src/test/java/net/snowflake/client/core/PrivateLinkDetectorTest.java b/src/test/java/net/snowflake/client/core/PrivateLinkDetectorTest.java index b3af68011..d5afb1af5 100644 --- a/src/test/java/net/snowflake/client/core/PrivateLinkDetectorTest.java +++ b/src/test/java/net/snowflake/client/core/PrivateLinkDetectorTest.java @@ -1,42 +1,38 @@ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import java.util.stream.Stream; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class PrivateLinkDetectorTest { + static class DataProvider implements ArgumentsProvider { - @Parameterized.Parameters(name = "Host {0} is private link: {1}") - public static Object[][] data() { - return new Object[][] { - {"snowhouse.snowflakecomputing.com", false}, - {"snowhouse.privatelink.snowflakecomputing.com", true}, - {"snowhouse.PRIVATELINK.snowflakecomputing.com", true}, - {"snowhouse.snowflakecomputing.cn", false}, - {"snowhouse.privatelink.snowflakecomputing.cn", true}, - {"snowhouse.PRIVATELINK.snowflakecomputing.cn", true}, - {"snowhouse.snowflakecomputing.xyz", false}, - {"snowhouse.privatelink.snowflakecomputing.xyz", true}, - {"snowhouse.PRIVATELINK.snowflakecomputing.xyz", true}, - }; + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + return Stream.of( + Arguments.of("snowhouse.snowflakecomputing.com", false), + Arguments.of("snowhouse.privatelink.snowflakecomputing.com", true), + Arguments.of("snowhouse.PRIVATELINK.snowflakecomputing.com", true), + Arguments.of("snowhouse.snowflakecomputing.cn", false), + Arguments.of("snowhouse.privatelink.snowflakecomputing.cn", true), + Arguments.of("snowhouse.PRIVATELINK.snowflakecomputing.cn", true), + Arguments.of("snowhouse.snowflakecomputing.xyz", false), + Arguments.of("snowhouse.privatelink.snowflakecomputing.xyz", true), + Arguments.of("snowhouse.PRIVATELINK.snowflakecomputing.xyz", true)); + } } - private final String host; - private final boolean expectedToBePrivateLink; - - public PrivateLinkDetectorTest(String host, boolean expectedToBePrivateLink) { - this.host = host; - this.expectedToBePrivateLink = expectedToBePrivateLink; - } - - @Test - public void shouldDetectPrivateLinkHost() { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void shouldDetectPrivateLinkHost(String host, boolean expectedToBePrivateLink) { assertEquals( - String.format("Expecting %s to be private link: %s", host, expectedToBePrivateLink), expectedToBePrivateLink, - PrivateLinkDetector.isPrivateLink(host)); + PrivateLinkDetector.isPrivateLink(host), + String.format("Expecting %s to be private link: %s", host, expectedToBePrivateLink)); } } diff --git a/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java b/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java index 862dd1c40..e13ecd673 100644 --- a/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java +++ b/src/test/java/net/snowflake/client/core/QueryContextCacheTest.java @@ -5,12 +5,12 @@ package net.snowflake.client.core; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class QueryContextCacheTest { private QueryContextCache qcc = null; diff --git a/src/test/java/net/snowflake/client/core/SFArrowResultSetIT.java b/src/test/java/net/snowflake/client/core/SFArrowResultSetIT.java index af6ac5219..8be8fd471 100644 --- a/src/test/java/net/snowflake/client/core/SFArrowResultSetIT.java +++ b/src/test/java/net/snowflake/client/core/SFArrowResultSetIT.java @@ -6,8 +6,8 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.io.File; import java.io.FileInputStream; @@ -26,9 +26,8 @@ import java.util.List; import java.util.Map; import java.util.Random; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.SkipOnThinJar; -import net.snowflake.client.category.TestCategoryArrow; +import net.snowflake.client.annotations.DontRunOnThinJar; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.ArrowResultChunk; import net.snowflake.client.jdbc.BaseJDBCWithSharedConnectionIT; import net.snowflake.client.jdbc.ErrorCode; @@ -63,17 +62,12 @@ import org.apache.arrow.vector.types.pojo.Schema; import org.apache.arrow.vector.util.Text; import org.apache.commons.lang3.RandomStringUtils; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; -@Category(TestCategoryArrow.class) +@Tag(TestTags.ARROW) public class SFArrowResultSetIT extends BaseJDBCWithSharedConnectionIT { - - /** Necessary to conditional ignore tests */ - @Rule public ConditionalIgnoreRule rule = new ConditionalIgnoreRule(); - private Random random = new Random(); /** @@ -83,11 +77,11 @@ public class SFArrowResultSetIT extends BaseJDBCWithSharedConnectionIT { protected BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); /** temporary folder to store result files */ - @Rule public TemporaryFolder resultFolder = new TemporaryFolder(); + @TempDir private File tempDir; /** Test the case that all results are returned in first chunk */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = SkipOnThinJar.class) + @DontRunOnThinJar public void testNoOfflineData() throws Throwable { List fieldList = new ArrayList<>(); Map customFieldMeta = new HashMap<>(); @@ -103,8 +97,9 @@ public void testNoOfflineData() throws Throwable { int dataSize = (int) file.length(); byte[] dataBytes = new byte[dataSize]; - InputStream is = new FileInputStream(file); - is.read(dataBytes, 0, dataSize); + try (InputStream is = new FileInputStream(file)) { + is.read(dataBytes, 0, dataSize); + } SnowflakeResultSetSerializableV1 resultSetSerializable = new SnowflakeResultSetSerializableV1(); resultSetSerializable.setRootAllocator(new RootAllocator(Long.MAX_VALUE)); @@ -149,7 +144,7 @@ public void testEmptyResultSet() throws Throwable { /** Testing the case that all data comes from chunk downloader */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = SkipOnThinJar.class) + @DontRunOnThinJar public void testOnlyOfflineData() throws Throwable { final int colCount = 2; final int chunkCount = 10; @@ -199,7 +194,7 @@ public void testOnlyOfflineData() throws Throwable { /** Testing the case that all data comes from chunk downloader */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = SkipOnThinJar.class) + @DontRunOnThinJar public void testFirstResponseAndOfflineData() throws Throwable { final int colCount = 2; final int chunkCount = 10; @@ -229,8 +224,9 @@ public void testFirstResponseAndOfflineData() throws Throwable { int dataSize = (int) arrowFile.length(); byte[] dataBytes = new byte[dataSize]; - InputStream is = new FileInputStream(arrowFile); - is.read(dataBytes, 0, dataSize); + try (InputStream is = new FileInputStream(arrowFile)) { + is.read(dataBytes, 0, dataSize); + } SnowflakeResultSetSerializableV1 resultSetSerializable = new SnowflakeResultSetSerializableV1(); resultSetSerializable.setFirstChunkStringData(Base64.getEncoder().encodeToString(dataBytes)); @@ -280,8 +276,7 @@ private class MockChunkDownloader implements ChunkDownloader { public SnowflakeResultChunk getNextChunkToConsume() throws SnowflakeSQLException { if (currentFileIndex < resultFileNames.size()) { ArrowResultChunk resultChunk = new ArrowResultChunk("", 0, 0, 0, rootAllocator, null); - try { - InputStream is = new FileInputStream(resultFileNames.get(currentFileIndex)); + try (InputStream is = new FileInputStream(resultFileNames.get(currentFileIndex))) { resultChunk.readArrowStream(is); currentFileIndex++; @@ -380,12 +375,13 @@ Object[][] generateData(Schema schema, int rowCount) { File createArrowFile(String fileName, Schema schema, Object[][] data, int rowsPerRecordBatch) throws IOException { - File file = resultFolder.newFile(fileName); + File file = new File(tempDir, fileName); + file.createNewFile(); VectorSchemaRoot root = VectorSchemaRoot.create(schema, allocator); - try (ArrowWriter writer = - new ArrowStreamWriter( - root, new DictionaryProvider.MapDictionaryProvider(), new FileOutputStream(file))) { + try (FileOutputStream fos = new FileOutputStream(file); + ArrowWriter writer = + new ArrowStreamWriter(root, new DictionaryProvider.MapDictionaryProvider(), fos)) { writer.start(); for (int i = 0; i < data[0].length; ) { @@ -592,7 +588,7 @@ private void writeTimestampStructToField( /** Test that first chunk containing struct vectors (used for timestamps) can be sorted */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = SkipOnThinJar.class) + @DontRunOnThinJar public void testSortedResultChunkWithStructVectors() throws Throwable { try (Statement statement = connection.createStatement()) { statement.execute("create or replace table teststructtimestamp (t1 timestamp_ltz)"); @@ -638,8 +634,9 @@ public void testSortedResultChunkWithStructVectors() throws Throwable { int dataSize = (int) file.length(); byte[] dataBytes = new byte[dataSize]; - InputStream is = new FileInputStream(file); - is.read(dataBytes, 0, dataSize); + try (InputStream is = new FileInputStream(file)) { + is.read(dataBytes, 0, dataSize); + } resultSetSerializable.setRootAllocator(new RootAllocator(Long.MAX_VALUE)); resultSetSerializable.setFirstChunkStringData( @@ -663,7 +660,7 @@ public void testSortedResultChunkWithStructVectors() throws Throwable { /** Test that the first chunk can be sorted */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = SkipOnThinJar.class) + @DontRunOnThinJar public void testSortedResultChunk() throws Throwable { try (Statement statement = connection.createStatement()) { statement.execute( @@ -725,8 +722,9 @@ public void testSortedResultChunk() throws Throwable { int dataSize = (int) file.length(); byte[] dataBytes = new byte[dataSize]; - InputStream is = new FileInputStream(file); - is.read(dataBytes, 0, dataSize); + try (InputStream is = new FileInputStream(file)) { + is.read(dataBytes, 0, dataSize); + } resultSetSerializable.setRootAllocator(new RootAllocator(Long.MAX_VALUE)); resultSetSerializable.setFirstChunkStringData( diff --git a/src/test/java/net/snowflake/client/core/SFLoginInputTest.java b/src/test/java/net/snowflake/client/core/SFLoginInputTest.java index 7d8a5b67b..b34eebc02 100644 --- a/src/test/java/net/snowflake/client/core/SFLoginInputTest.java +++ b/src/test/java/net/snowflake/client/core/SFLoginInputTest.java @@ -1,8 +1,8 @@ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SFLoginInputTest { diff --git a/src/test/java/net/snowflake/client/core/SFSessionPropertyTest.java b/src/test/java/net/snowflake/client/core/SFSessionPropertyTest.java index 8c7a6fb1f..142f92217 100644 --- a/src/test/java/net/snowflake/client/core/SFSessionPropertyTest.java +++ b/src/test/java/net/snowflake/client/core/SFSessionPropertyTest.java @@ -7,10 +7,10 @@ import static org.hamcrest.CoreMatchers.endsWith; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.fail; import net.snowflake.client.jdbc.ErrorCode; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SFSessionPropertyTest { @Test @@ -28,7 +28,7 @@ public void testCheckApplicationName() throws SFException { for (String invalid : invalidApplicationName) { try { SFSessionProperty.checkPropertyValue(SFSessionProperty.APPLICATION, invalid); - Assert.fail(); + fail(); } catch (SFException e) { assertThat(e.getVendorCode(), is(ErrorCode.INVALID_PARAMETER_VALUE.getMessageCode())); } @@ -48,7 +48,7 @@ public void testCustomSuffixForUserAgentHeaders() { public void testInvalidMaxRetries() { try { SFSessionProperty.checkPropertyValue(SFSessionProperty.MAX_HTTP_RETRIES, "invalidValue"); - Assert.fail("testInvalidMaxRetries"); + fail("testInvalidMaxRetries"); } catch (SFException e) { assertThat(e.getVendorCode(), is(ErrorCode.INVALID_PARAMETER_VALUE.getMessageCode())); } @@ -67,7 +67,7 @@ public void testvalidMaxRetries() throws SFException { public void testInvalidPutGetMaxRetries() { try { SFSessionProperty.checkPropertyValue(SFSessionProperty.PUT_GET_MAX_RETRIES, "invalidValue"); - Assert.fail("testInvalidMaxRetries"); + fail("testInvalidMaxRetries"); } catch (SFException e) { assertThat(e.getVendorCode(), is(ErrorCode.INVALID_PARAMETER_VALUE.getMessageCode())); } diff --git a/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java b/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java index f30cd88e1..2645277b1 100644 --- a/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java +++ b/src/test/java/net/snowflake/client/core/SFTrustManagerIT.java @@ -20,8 +20,9 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; +import java.util.stream.Stream; import javax.net.ssl.SSLHandshakeException; -import net.snowflake.client.category.TestCategoryCore; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.BaseJDBCTest; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; import net.snowflake.client.log.SFLogger; @@ -29,45 +30,41 @@ import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; -import org.junit.After; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.io.TempDir; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class SFTrustManagerIT extends BaseJDBCTest { private static final SFLogger logger = SFLoggerFactory.getLogger(SFTrustManagerIT.class); - public SFTrustManagerIT(String host) { - this.host = host; - } - - @Parameterized.Parameters(name = "host={0}") - public static Object[][] data() { - return new Object[][] { - // this host generates many "SSLHandshake Certificate Revocation - // check failed. Could not retrieve OCSP Response." when running in parallel CI builds - // {"storage.googleapis.com"}, - {"ocspssd.us-east-1.snowflakecomputing.com/ocsp/fetch"}, - {"sfcsupport.snowflakecomputing.com"}, - {"sfcsupport.us-east-1.snowflakecomputing.com"}, - {"sfcsupport.eu-central-1.snowflakecomputing.com"}, - {"sfc-dev1-regression.s3.amazonaws.com"}, - {"sfc-ds2-customer-stage.s3.amazonaws.com"}, - {"snowflake.okta.com"}, - {"sfcdev2.blob.core.windows.net"} - }; + private static class HostProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + return Stream.of( + // this host generates many "SSLHandshake Certificate Revocation + // check failed. Could not retrieve OCSP Response." when running in parallel CI builds + // Arguments.of("storage.googleapis.com"), + Arguments.of("ocspssd.us-east-1.snowflakecomputing.com/ocsp/fetch"), + Arguments.of("sfcsupport.snowflakecomputing.com"), + Arguments.of("sfcsupport.us-east-1.snowflakecomputing.com"), + Arguments.of("sfcsupport.eu-central-1.snowflakecomputing.com"), + Arguments.of("sfc-dev1-regression.s3.amazonaws.com"), + Arguments.of("sfc-ds2-customer-stage.s3.amazonaws.com"), + Arguments.of("snowflake.okta.com"), + Arguments.of("sfcdev2.blob.core.windows.net")); + } } private boolean defaultState; - private final String host; - @Before + @BeforeEach public void setUp() { TelemetryService service = TelemetryService.getInstance(); service.updateContextForIT(getConnectionParameters()); @@ -76,7 +73,7 @@ public void setUp() { service.enable(); } - @After + @AfterEach public void tearDown() throws InterruptedException { TelemetryService service = TelemetryService.getInstance(); // wait 5 seconds while the service is flushing @@ -90,15 +87,16 @@ public void tearDown() throws InterruptedException { System.clearProperty(SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_URL); } - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir File tmpFolder; /** * OCSP tests for the Snowflake and AWS S3 HTTPS connections. * *

Whatever the default method is used. */ - @Test - public void testOcsp() throws Throwable { + @ParameterizedTest + @ArgumentsSource(HostProvider.class) + public void testOcsp(String host) throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.TRUE.toString()); HttpClient client = @@ -115,11 +113,13 @@ public void testOcsp() throws Throwable { * *

Specifying an non-existing file will force to fetch OCSP response. */ - @Test - public void testOcspWithFileCache() throws Throwable { + @ParameterizedTest + @ArgumentsSource(HostProvider.class) + public void testOcspWithFileCache(String host) throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.FALSE.toString()); - File ocspCacheFile = tmpFolder.newFile(); + File ocspCacheFile = new File(tmpFolder, "ocsp-cache"); + ocspCacheFile.createNewFile(); HttpClient client = HttpUtil.buildHttpClient( new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), @@ -130,11 +130,13 @@ public void testOcspWithFileCache() throws Throwable { } /** OCSP tests for the Snowflake and AWS S3 HTTPS connections using the server cache. */ - @Test - public void testOcspWithServerCache() throws Throwable { + @ParameterizedTest + @ArgumentsSource(HostProvider.class) + public void testOcspWithServerCache(String host) throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.TRUE.toString()); - File ocspCacheFile = tmpFolder.newFile(); + File ocspCacheFile = new File(tmpFolder, "ocsp-cache"); + ocspCacheFile.createNewFile(); HttpClient client = HttpUtil.buildHttpClient( new HttpClientSettingsKey(OCSPMode.FAIL_CLOSED), @@ -148,11 +150,13 @@ public void testOcspWithServerCache() throws Throwable { * OCSP tests for the Snowflake and AWS S3 HTTPS connections without using the server cache. This * test should always pass - even with OCSP Outage. */ - @Test - public void testOcspWithoutServerCache() throws Throwable { + @ParameterizedTest + @ArgumentsSource(HostProvider.class) + public void testOcspWithoutServerCache(String host) throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.FALSE.toString()); - File ocspCacheFile = tmpFolder.newFile(); + File ocspCacheFile = new File(tmpFolder, "ocsp-cache"); + ocspCacheFile.createNewFile(); HttpClient client = HttpUtil.buildHttpClient( new HttpClientSettingsKey(OCSPMode.FAIL_OPEN), @@ -163,8 +167,9 @@ public void testOcspWithoutServerCache() throws Throwable { } /** OCSP tests for the Snowflake and AWS S3 HTTPS connections using the server cache. */ - @Test - public void testInvalidCacheFile() throws Throwable { + @ParameterizedTest + @ArgumentsSource(HostProvider.class) + public void testInvalidCacheFile(String host) throws Throwable { System.setProperty( SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED, Boolean.TRUE.toString()); // a file under never exists. diff --git a/src/test/java/net/snowflake/client/core/SFTrustManagerMockitoMockLatestIT.java b/src/test/java/net/snowflake/client/core/SFTrustManagerMockitoMockLatestIT.java index 862f4867e..077ec6829 100644 --- a/src/test/java/net/snowflake/client/core/SFTrustManagerMockitoMockLatestIT.java +++ b/src/test/java/net/snowflake/client/core/SFTrustManagerMockitoMockLatestIT.java @@ -14,32 +14,32 @@ import javax.net.ssl.TrustManager; import javax.net.ssl.TrustManagerFactory; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryCore; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.SnowflakeUtil; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import org.mockito.MockedStatic; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class SFTrustManagerMockitoMockLatestIT { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; /* * Test SF_OCSP_RESPONSE_CACHE_DIR environment variable changes the * location of the OCSP cache directory. */ @Test - @Ignore("static initialization block of SFTrustManager class doesn't run sometimes") + @Disabled("static initialization block of SFTrustManager class doesn't run sometimes") public void testUnitOCSPWithCustomCacheDirectory() throws IOException { try (MockedStatic mockedTrustManagerFactory = mockStatic(TrustManagerFactory.class); MockedStatic mockedSnowflakeUtil = mockStatic(SnowflakeUtil.class)) { - File cacheFolder = tmpFolder.newFolder(); + File cacheFolder = new File(tmpFolder, "cache"); + cacheFolder.mkdirs(); mockedSnowflakeUtil .when(() -> TestUtil.systemGetEnv("SF_OCSP_RESPONSE_CACHE_DIR")) .thenReturn(cacheFolder.getCanonicalPath()); diff --git a/src/test/java/net/snowflake/client/core/SFTrustManagerTest.java b/src/test/java/net/snowflake/client/core/SFTrustManagerTest.java index 6a55b2cd4..77a06cb2a 100644 --- a/src/test/java/net/snowflake/client/core/SFTrustManagerTest.java +++ b/src/test/java/net/snowflake/client/core/SFTrustManagerTest.java @@ -11,10 +11,24 @@ import java.util.Properties; import net.snowflake.client.jdbc.SnowflakeResultSetSerializable; import net.snowflake.client.jdbc.SnowflakeResultSetSerializableV1; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; public class SFTrustManagerTest { /** Test building OCSP retry URL */ + static String originalRetryUrlPattern; + + @BeforeAll + public static void saveStaticValues() { + originalRetryUrlPattern = SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN; + } + + @AfterAll + public static void restoreStaticValues() { + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN = originalRetryUrlPattern; + } + @Test public void testBuildRetryURL() throws Exception { // private link diff --git a/src/test/java/net/snowflake/client/core/SQLInputOutputTest.java b/src/test/java/net/snowflake/client/core/SQLInputOutputTest.java index 346d43c34..f8224a8eb 100644 --- a/src/test/java/net/snowflake/client/core/SQLInputOutputTest.java +++ b/src/test/java/net/snowflake/client/core/SQLInputOutputTest.java @@ -4,7 +4,7 @@ import static org.mockito.Mockito.mock; import java.sql.SQLData; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SQLInputOutputTest { diff --git a/src/test/java/net/snowflake/client/core/SecureStorageManagerTest.java b/src/test/java/net/snowflake/client/core/SecureStorageManagerTest.java index b6f8a16ac..b79875038 100644 --- a/src/test/java/net/snowflake/client/core/SecureStorageManagerTest.java +++ b/src/test/java/net/snowflake/client/core/SecureStorageManagerTest.java @@ -16,11 +16,11 @@ import java.util.HashMap; import java.util.Iterator; import java.util.Map; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningNotOnLinux; -import net.snowflake.client.RunningNotOnWinMac; -import org.junit.Rule; -import org.junit.Test; +import net.snowflake.client.annotations.RunOnLinux; +import net.snowflake.client.annotations.RunOnMac; +import net.snowflake.client.annotations.RunOnWindows; +import net.snowflake.client.annotations.RunOnWindowsOrMac; +import org.junit.jupiter.api.Test; class MockAdvapi32Lib implements SecureStorageWindowsManager.Advapi32Lib { @Override @@ -213,8 +213,6 @@ Pointer getPointer() { } public class SecureStorageManagerTest { - // This is required to use ConditionalIgnore annotation - @Rule public ConditionalIgnoreRule rule = new ConditionalIgnoreRule(); private static final String host = "fakeHost"; private static final String user = "fakeUser"; @@ -227,7 +225,7 @@ public class SecureStorageManagerTest { private static final String MFA_TOKEN = "MFATOKEN"; @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningNotOnWinMac.class) + @RunOnWindowsOrMac public void testLoadNativeLibrary() { // Only run on Mac or Windows. Make sure the loading of native platform library won't break. if (Constants.getOS() == Constants.OS.MAC) { @@ -240,6 +238,7 @@ public void testLoadNativeLibrary() { } @Test + @RunOnWindows public void testWindowsManager() { SecureStorageWindowsManager.Advapi32LibManager.setInstance(new MockAdvapi32Lib()); SecureStorageManager manager = SecureStorageWindowsManager.builder(); @@ -249,6 +248,7 @@ public void testWindowsManager() { } @Test + @RunOnMac public void testMacManager() { SecureStorageAppleManager.SecurityLibManager.setInstance(new MockSecurityLib()); SecureStorageManager manager = SecureStorageAppleManager.builder(); @@ -258,7 +258,7 @@ public void testMacManager() { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningNotOnLinux.class) + @RunOnLinux public void testLinuxManager() { SecureStorageManager manager = SecureStorageLinuxManager.getInstance(); diff --git a/src/test/java/net/snowflake/client/core/SessionUtilExternalBrowserTest.java b/src/test/java/net/snowflake/client/core/SessionUtilExternalBrowserTest.java index 2ba00f378..02f6193d6 100644 --- a/src/test/java/net/snowflake/client/core/SessionUtilExternalBrowserTest.java +++ b/src/test/java/net/snowflake/client/core/SessionUtilExternalBrowserTest.java @@ -5,10 +5,9 @@ package net.snowflake.client.core; import static org.hamcrest.CoreMatchers.equalTo; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mockStatic; import static org.mockito.Mockito.when; @@ -32,8 +31,9 @@ import net.snowflake.common.core.ClientAuthnDTO; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpRequestBase; -import org.junit.Ignore; -import org.junit.Test; +import org.hamcrest.MatcherAssert; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.mockito.MockedStatic; import org.mockito.Mockito; @@ -164,11 +164,13 @@ public void testSessionUtilExternalBrowser() throws Throwable { SessionUtilExternalBrowser sub = FakeSessionUtilExternalBrowser.createInstance(loginInput, false); sub.authenticate(); - assertThat("", sub.getToken(), equalTo(FakeSessionUtilExternalBrowser.MOCK_SAML_TOKEN)); + MatcherAssert.assertThat( + "", sub.getToken(), equalTo(FakeSessionUtilExternalBrowser.MOCK_SAML_TOKEN)); sub = FakeSessionUtilExternalBrowser.createInstance(loginInput, true); sub.authenticate(); - assertThat("", sub.getToken(), equalTo(FakeSessionUtilExternalBrowser.MOCK_SAML_TOKEN)); + MatcherAssert.assertThat( + "", sub.getToken(), equalTo(FakeSessionUtilExternalBrowser.MOCK_SAML_TOKEN)); } } @@ -200,7 +202,7 @@ public void testSessionUtilExternalBrowserFail() throws Throwable { sub.authenticate(); fail("should have failed with an exception."); } catch (SnowflakeSQLException ex) { - assertThat("Error is expected", ex.getErrorCode(), equalTo(123456)); + MatcherAssert.assertThat("Error is expected", ex.getErrorCode(), equalTo(123456)); } } } @@ -248,7 +250,7 @@ private SFLoginInput initMockLoginInput() { // Run this test manually to test disabling storing temporary credetials with external browser // auth. This is valid for versions after 3.18.0. @Test - @Ignore + @Disabled public void testEnableClientStoreTemporaryCredential() throws Exception { Map params = AbstractDriverIT.getConnectionParameters(); SnowflakeBasicDataSource ds = new SnowflakeBasicDataSource(); @@ -270,7 +272,7 @@ public void testEnableClientStoreTemporaryCredential() throws Exception { // open a browser window for authentication, close the window, and you should get the expected // error message within the set timeout. Valid for driver versions after 3.18.0. @Test - @Ignore + @Disabled public void testExternalBrowserTimeout() throws Exception { Map params = AbstractDriverIT.getConnectionParameters(); SnowflakeBasicDataSource ds = new SnowflakeBasicDataSource(); diff --git a/src/test/java/net/snowflake/client/core/SessionUtilLatestIT.java b/src/test/java/net/snowflake/client/core/SessionUtilLatestIT.java index be6c03b01..57dde2a7b 100644 --- a/src/test/java/net/snowflake/client/core/SessionUtilLatestIT.java +++ b/src/test/java/net/snowflake/client/core/SessionUtilLatestIT.java @@ -5,8 +5,8 @@ package net.snowflake.client.core; import static net.snowflake.client.TestUtil.systemGetEnv; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mockStatic; @@ -21,7 +21,7 @@ import java.util.Map.Entry; import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; -import net.snowflake.client.category.TestCategoryCore; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.BaseJDBCTest; import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.client.jdbc.SnowflakeSQLException; @@ -33,14 +33,14 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpRequestBase; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.MockedStatic; import org.mockito.MockedStatic.Verification; import org.mockito.Mockito; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class SessionUtilLatestIT extends BaseJDBCTest { /** @@ -50,7 +50,7 @@ public class SessionUtilLatestIT extends BaseJDBCTest { * @throws SFException * @throws SnowflakeSQLException */ - @Ignore + @Disabled @Test public void testJwtAuthTimeoutRetry() throws SFException, SnowflakeSQLException { final SFLoginInput loginInput = initMockLoginInput(); diff --git a/src/test/java/net/snowflake/client/core/SessionUtilTest.java b/src/test/java/net/snowflake/client/core/SessionUtilTest.java index cab5fb68f..86819dc5b 100644 --- a/src/test/java/net/snowflake/client/core/SessionUtilTest.java +++ b/src/test/java/net/snowflake/client/core/SessionUtilTest.java @@ -5,9 +5,9 @@ package net.snowflake.client.core; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.node.BooleanNode; import java.io.IOException; @@ -20,9 +20,25 @@ import net.snowflake.client.jdbc.MockConnectionTest; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.utils.URIBuilder; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; public class SessionUtilTest { + private static String originalUrlValue; + private static String originalRetryUrlPattern; + + @BeforeAll + public static void saveStaticValues() { + originalUrlValue = SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE; + originalRetryUrlPattern = SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN; + } + + @AfterAll + public static void restoreStaticValues() { + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_URL_VALUE = originalUrlValue; + SFTrustManager.SF_OCSP_RESPONSE_CACHE_SERVER_RETRY_URL_PATTERN = originalRetryUrlPattern; + } /** Test isPrefixEqual */ @Test diff --git a/src/test/java/net/snowflake/client/core/SnowflakeMFACacheTest.java b/src/test/java/net/snowflake/client/core/SnowflakeMFACacheTest.java index f1f0a3e73..0524ab6b8 100644 --- a/src/test/java/net/snowflake/client/core/SnowflakeMFACacheTest.java +++ b/src/test/java/net/snowflake/client/core/SnowflakeMFACacheTest.java @@ -4,8 +4,9 @@ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; @@ -29,9 +30,8 @@ import net.snowflake.client.jdbc.SnowflakeSQLException; import org.apache.commons.io.IOUtils; import org.apache.http.client.methods.HttpPost; -import org.junit.Assert; -import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; @@ -216,7 +216,7 @@ public String answer(InvocationOnMock invocation) throws Throwable { // This connection would receive an exception and then should clean up the mfa cache try { Connection con3 = DriverManager.getConnection(url, prop); - Assert.fail(); + fail(); } catch (SnowflakeSQLException ex) { // An exception is forced to happen by mocking. Do nothing. } @@ -336,7 +336,7 @@ public void testUnavailableLocalSecureStorage() throws SQLException { // Run this test manually to test disabling the client request MFA token. Use an MFA // authentication enabled user. This is valid for versions after 3.18.0. @Test - @Ignore + @Disabled public void testEnableClientRequestMfaToken() throws SQLException { Map params = AbstractDriverIT.getConnectionParameters(); SnowflakeBasicDataSource ds = new SnowflakeBasicDataSource(); diff --git a/src/test/java/net/snowflake/client/core/SqlInputTimestampUtilTest.java b/src/test/java/net/snowflake/client/core/SqlInputTimestampUtilTest.java index 752229fc9..305f5563d 100644 --- a/src/test/java/net/snowflake/client/core/SqlInputTimestampUtilTest.java +++ b/src/test/java/net/snowflake/client/core/SqlInputTimestampUtilTest.java @@ -1,6 +1,6 @@ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.sql.Timestamp; import java.time.LocalDateTime; @@ -8,12 +8,12 @@ import java.util.Map; import java.util.TimeZone; import net.snowflake.client.jdbc.SnowflakeUtil; -import org.junit.BeforeClass; -import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; -@Ignore +@Disabled public class SqlInputTimestampUtilTest { private static final String TIMESTAMP_IN_FORMAT_1 = "2021-12-22 09:43:44.000 +0100"; @@ -24,7 +24,7 @@ public class SqlInputTimestampUtilTest { private static SFBaseSession mockSession; - @BeforeClass + @BeforeAll public static void setup() { CONNECTION_PARAMS.put("TIMESTAMP_OUTPUT_FORMAT", "YYYY-MM-DD HH24:MI:SS.FF3 TZHTZM"); CONNECTION_PARAMS.put("TIMESTAMP_TZ_OUTPUT_FORMAT", "DY, DD MON YYYY HH24:MI:SS TZHTZM"); diff --git a/src/test/java/net/snowflake/client/core/StmtUtilTest.java b/src/test/java/net/snowflake/client/core/StmtUtilTest.java index 75daa9a03..7075416e4 100644 --- a/src/test/java/net/snowflake/client/core/StmtUtilTest.java +++ b/src/test/java/net/snowflake/client/core/StmtUtilTest.java @@ -13,17 +13,17 @@ import java.util.Map.Entry; import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; -import net.snowflake.client.category.TestCategoryCore; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.StmtUtil.StmtInput; import net.snowflake.client.jdbc.BaseJDBCTest; import org.apache.http.Header; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.MockedStatic; import org.mockito.MockedStatic.Verification; import org.mockito.Mockito; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class StmtUtilTest extends BaseJDBCTest { /** SNOW-862760 Verify that additional headers are added to request */ diff --git a/src/test/java/net/snowflake/client/core/URLUtilTest.java b/src/test/java/net/snowflake/client/core/URLUtilTest.java index b61324eee..d2903b2c5 100644 --- a/src/test/java/net/snowflake/client/core/URLUtilTest.java +++ b/src/test/java/net/snowflake/client/core/URLUtilTest.java @@ -3,11 +3,11 @@ */ package net.snowflake.client.core; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class URLUtilTest { diff --git a/src/test/java/net/snowflake/client/core/arrow/ArrowResultUtilTest.java b/src/test/java/net/snowflake/client/core/arrow/ArrowResultUtilTest.java index 75b24cc07..4dc6855b1 100644 --- a/src/test/java/net/snowflake/client/core/arrow/ArrowResultUtilTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/ArrowResultUtilTest.java @@ -4,43 +4,42 @@ package net.snowflake.client.core.arrow; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.List; import java.util.Random; import java.util.TimeZone; +import java.util.stream.Stream; import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; import net.snowflake.client.core.SFSession; -import org.junit.After; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import net.snowflake.client.providers.TimezoneProvider; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class ArrowResultUtilTest { - // test on multiple time zones - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, {"America/Los_Angeles"}, {"America/New_York"}, {"Asia/Singapore"}, {"MEZ"}, - }; - } - - @After - public void clearTimeZone() { + @AfterAll + public static void clearTimeZone() { System.clearProperty("user.timezone"); } - public ArrowResultUtilTest(String tz) { - System.setProperty("user.timezone", tz); + public static void setTimeZone(String string) { + System.setProperty("user.timezone", string); } - @Test - @Ignore + @ParameterizedTest(name = "Timezone = {0}") + @ArgumentsSource(TimezoneProvider.class) + @Disabled /** This is to show we can have 30X improvement using new API */ - public void testGetDatePerformance() throws SFException { + public void testGetDatePerformance(String timezone) throws SFException { + setTimeZone(timezone); Random random = new Random(); int dateBound = 50000; int times = 100000; @@ -71,17 +70,43 @@ public void testGetDatePerformance() throws SFException { System.out.println(duration1 + " " + duration2 + " " + duration3); } - @Test - public void testToJavaTimestamp() { + private static class testCasesProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + List timezones = + new ArrayList() { + { + add("UTC"); + add("America/Los_Angeles"); + add("America/New_York"); + add("Asia/Singapore"); + add("MEZ"); + } + }; + + long[] cases = {-1123456789, -123456789, 123456789, 123123456789L, -123123456789L}; + long[] millisecs = {-1124, -124, 123, 123123, -123124}; + int[] nanos = {876543211, 876543211, 123456789, 123456789, 876543211}; + + List args = new ArrayList<>(); + for (String timezone : timezones) { + for (int i = 0; i < cases.length; i++) { + args.add(Arguments.of(timezone, cases[i], millisecs[i], nanos[i])); + } + } + + return args.stream(); + } + } + + @ParameterizedTest + @ArgumentsSource(testCasesProvider.class) + public void testToJavaTimestamp(String timezone, long cas, long millisecs, int nanos) { // ex: -1.123456789, -0.123456789, 0.123456789, 123.123456789, -123.123456789 - long[] cases = {-1123456789, -123456789, 123456789, 123123456789l, -123123456789l}; - long[] millisecs = {-1124, -124, 123, 123123, -123124}; - int[] nanos = {876543211, 876543211, 123456789, 123456789, 876543211}; + setTimeZone(timezone); int scale = 9; - for (int i = 0; i < cases.length; i++) { - Timestamp ts = ArrowResultUtil.toJavaTimestamp(cases[i], scale); - assertEquals(millisecs[i], ts.getTime()); - assertEquals(nanos[i], ts.getNanos()); - } + Timestamp ts = ArrowResultUtil.toJavaTimestamp(cas, scale); + assertEquals(millisecs, ts.getTime()); + assertEquals(nanos, ts.getNanos()); } } diff --git a/src/test/java/net/snowflake/client/core/arrow/BaseConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/BaseConverterTest.java index e669ac006..a738676fb 100644 --- a/src/test/java/net/snowflake/client/core/arrow/BaseConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/BaseConverterTest.java @@ -10,9 +10,9 @@ import net.snowflake.client.jdbc.ErrorCode; import net.snowflake.common.core.SFBinaryFormat; import net.snowflake.common.core.SnowflakeDateTimeFormat; -import org.junit.After; -import org.junit.Assume; -import org.junit.Before; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assumptions; +import org.junit.jupiter.api.BeforeEach; public class BaseConverterTest implements DataConversionContext { private SnowflakeDateTimeFormat dateTimeFormat = @@ -30,16 +30,16 @@ public class BaseConverterTest implements DataConversionContext { private boolean honorClientTZForTimestampNTZ; protected final int invalidConversionErrorCode = ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(); - @After + @AfterEach public void clearTimeZone() { System.clearProperty("user.timezone"); } - @Before + @BeforeEach public void assumeLittleEndian() { - Assume.assumeTrue( - "Arrow doesn't support cross endianness", - ByteOrder.nativeOrder().equals(ByteOrder.LITTLE_ENDIAN)); + Assumptions.assumeTrue( + ByteOrder.nativeOrder().equals(ByteOrder.LITTLE_ENDIAN), + "Arrow doesn't support cross endianness"); } @Override diff --git a/src/test/java/net/snowflake/client/core/arrow/BigIntToFixedConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/BigIntToFixedConverterTest.java index 74eabad29..230288f4a 100644 --- a/src/test/java/net/snowflake/client/core/arrow/BigIntToFixedConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/BigIntToFixedConverterTest.java @@ -7,8 +7,8 @@ import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.nio.ByteBuffer; @@ -27,7 +27,7 @@ import org.apache.arrow.vector.BigIntVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class BigIntToFixedConverterTest extends BaseConverterTest { /** allocator for arrow */ diff --git a/src/test/java/net/snowflake/client/core/arrow/BigIntToTimeConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/BigIntToTimeConverterTest.java index 9248440bb..b2be8f8cd 100644 --- a/src/test/java/net/snowflake/client/core/arrow/BigIntToTimeConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/BigIntToTimeConverterTest.java @@ -7,8 +7,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Time; import java.util.HashMap; @@ -20,32 +20,24 @@ import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; import net.snowflake.client.core.SFSession; +import net.snowflake.client.providers.TimezoneProvider; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; import org.apache.arrow.vector.BigIntVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class BigIntToTimeConverterTest extends BaseConverterTest { - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, - {"America/Los_Angeles"}, - {"America/New_York"}, - {"Pacific/Honolulu"}, - {"Asia/Singapore"}, - {"MEZ"}, - {"MESZ"} - }; + public void setTimezone(String tz) { + System.setProperty("user.timezone", tz); } - public BigIntToTimeConverterTest(String tz) { - System.setProperty("user.timezone", tz); + @AfterAll + public static void clearTimezone() { + System.clearProperty("user.timezone"); } /** allocator for arrow */ @@ -55,8 +47,10 @@ public BigIntToTimeConverterTest(String tz) { private int scale = 9; - @Test - public void testTime() throws SFException { + @ParameterizedTest(name = "{0}") + @ArgumentsSource(TimezoneProvider.class) + public void testTime(String tz) throws SFException { + setTimezone(tz); // test old and new dates long[] testTimesInt64 = {12345678000000L}; diff --git a/src/test/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverterTest.java index 26fdbc052..298bf443b 100644 --- a/src/test/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/BigIntToTimestampLTZConverterTest.java @@ -8,8 +8,8 @@ import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Date; import java.sql.Time; @@ -23,34 +23,17 @@ import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; import net.snowflake.client.jdbc.SnowflakeUtil; +import net.snowflake.client.providers.TimezoneProvider; import net.snowflake.common.core.SFTimestamp; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; import org.apache.arrow.vector.BigIntVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class BigIntToTimestampLTZConverterTest extends BaseConverterTest { - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, - {"America/Los_Angeles"}, - {"America/New_York"}, - {"Pacific/Honolulu"}, - {"Asia/Singapore"}, - {"MEZ"}, - {"MESZ"} - }; - } - - public BigIntToTimestampLTZConverterTest(String tz) { - System.setProperty("user.timezone", tz); - } /** allocator for arrow */ private BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); @@ -59,8 +42,10 @@ public BigIntToTimestampLTZConverterTest(String tz) { private int oldScale = 9; - @Test - public void testTimestampLTZ() throws SFException { + @ParameterizedTest + @ArgumentsSource(TimezoneProvider.class) + public void testTimestampLTZ(String timezone) throws SFException { + System.setProperty("user.timezone", timezone); // test old and new dates long[] testTimestampsInt64 = { 1546391837, diff --git a/src/test/java/net/snowflake/client/core/arrow/BigIntToTimestampNTZConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/BigIntToTimestampNTZConverterTest.java index df4370641..6f2c0420d 100644 --- a/src/test/java/net/snowflake/client/core/arrow/BigIntToTimestampNTZConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/BigIntToTimestampNTZConverterTest.java @@ -4,18 +4,21 @@ package net.snowflake.client.core.arrow; +import static net.snowflake.client.providers.ProvidersUtil.cartesianProduct; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Date; import java.sql.Time; import java.sql.Timestamp; +import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; @@ -23,33 +26,32 @@ import net.snowflake.client.TestUtil; import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; +import net.snowflake.client.providers.SnowflakeArgumentsProvider; +import net.snowflake.client.providers.TimezoneProvider; import net.snowflake.common.core.SFTimestamp; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; import org.apache.arrow.vector.BigIntVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class BigIntToTimestampNTZConverterTest extends BaseConverterTest { - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, - {"America/Los_Angeles"}, - {"America/New_York"}, - {"Pacific/Honolulu"}, - {"Asia/Singapore"}, - {"MEZ"}, - {"MESZ"} - }; + static class FlagProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return Arrays.asList(Arguments.of(true), Arguments.of(false)); + } } - public BigIntToTimestampNTZConverterTest(String tz) { - System.setProperty("user.timezone", tz); + static class DataProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return cartesianProduct(context, new TimezoneProvider(), new FlagProvider()); + } } /** allocator for arrow */ @@ -59,25 +61,18 @@ public BigIntToTimestampNTZConverterTest(String tz) { private int oldScale = 9; - @Test - public void testHonorClientTZForTimestampNTZDisabled() throws SFException { - this.setHonorClientTZForTimestampNTZ(false); - testTimestampNTZ(); - } - - @Test - public void testHonorClientTZForTimestampNTZEnabled() throws SFException { - this.setHonorClientTZForTimestampNTZ(true); - testTimestampNTZ(); - } - - @Test - public void testWithNullTimezone() throws SFException { + @ParameterizedTest + @ArgumentsSource(TimezoneProvider.class) + public void testWithNullTimezone(String tz) throws SFException { + System.setProperty("user.timezone", tz); testTimestampNTZ(null); } - @Test - public void testTimestampNTZ() throws SFException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampNTZ(String tz, boolean flag) throws SFException { + this.setHonorClientTZForTimestampNTZ(flag); + System.setProperty("user.timezone", tz); testTimestampNTZ(TimeZone.getDefault()); } diff --git a/src/test/java/net/snowflake/client/core/arrow/BitToBooleanConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/BitToBooleanConverterTest.java index e5091d6fc..c30bbd0e6 100644 --- a/src/test/java/net/snowflake/client/core/arrow/BitToBooleanConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/BitToBooleanConverterTest.java @@ -3,8 +3,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.HashMap; @@ -19,7 +19,7 @@ import org.apache.arrow.vector.BitVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class BitToBooleanConverterTest extends BaseConverterTest { /** allocator for arrow */ diff --git a/src/test/java/net/snowflake/client/core/arrow/DateConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/DateConverterTest.java index b63ae9a2d..6857394fc 100644 --- a/src/test/java/net/snowflake/client/core/arrow/DateConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/DateConverterTest.java @@ -3,8 +3,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Date; import java.util.Arrays; @@ -18,33 +18,20 @@ import net.snowflake.client.TestUtil; import net.snowflake.client.core.SFException; import net.snowflake.client.core.json.DateTimeConverter; +import net.snowflake.client.providers.TimezoneProvider; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; import org.apache.arrow.vector.DateDayVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class DateConverterTest extends BaseConverterTest { - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, - {"America/Los_Angeles"}, - {"America/New_York"}, - {"Pacific/Honolulu"}, - {"Asia/Singapore"}, - {"MEZ"}, - {"MESZ"} - }; - } - public DateConverterTest(String tz) { + private static void setTimeZone(String tz) { System.setProperty("user.timezone", tz); } @@ -78,26 +65,28 @@ public DateConverterTest(String tz) { put("America/New_York", Arrays.asList("2016-04-20", -4)); put("Pacific/Honolulu", Arrays.asList("2016-04-20", -10)); put("Asia/Singapore", Arrays.asList("2016-04-19", 8)); - put("MEZ", Arrays.asList("2016-04-20", 0)); - put("MESZ", Arrays.asList("2016-04-20", 0)); + put("CET", Arrays.asList("2016-04-19", 2)); // because of daylight savings + put("GMT+0200", Arrays.asList("2016-04-19", 2)); } }; public static final int MILLIS_IN_ONE_HOUR = 3600000; private TimeZone defaultTimeZone; - @Before + @BeforeEach public void getDefaultTimeZone() { this.defaultTimeZone = TimeZone.getDefault(); } - @After + @AfterEach public void restoreDefaultTimeZone() { TimeZone.setDefault(defaultTimeZone); } - @Test - public void testDate() throws SFException { + @ParameterizedTest + @ArgumentsSource(TimezoneProvider.class) + public void testDate(String tz) throws SFException { + setTimeZone(tz); Map customFieldMeta = new HashMap<>(); customFieldMeta.put("logicalType", "DATE"); Set nullValIndex = new HashSet<>(); @@ -153,8 +142,10 @@ public void testDate() throws SFException { vector.clear(); } - @Test - public void testRandomDates() throws SFException { + @ParameterizedTest + @ArgumentsSource(TimezoneProvider.class) + public void testRandomDates(String tz) throws SFException { + setTimeZone(tz); int dateBound = 50000; int rowCount = 50000; Map customFieldMeta = new HashMap<>(); @@ -196,8 +187,10 @@ public void testRandomDates() throws SFException { } } - @Test - public void testTimezoneDates() throws SFException { + @ParameterizedTest + @ArgumentsSource(TimezoneProvider.class) + public void testTimezoneDates(String tz) throws SFException { + setTimeZone(tz); int testDay = 16911; Map customFieldMeta = new HashMap<>(); customFieldMeta.put("logicalType", "DATE"); @@ -211,7 +204,6 @@ public void testTimezoneDates() throws SFException { // Test JDBC_FORMAT_DATE_WITH_TIMEZONE=TRUE with different session timezones TimeZone.setDefault(TimeZone.getTimeZone("UTC")); - String tz = System.getProperty("user.timezone"); ArrowVectorConverter converter = new DateConverter(vector, 0, this, true); converter.setUseSessionTimezone(true); converter.setSessionTimeZone(TimeZone.getTimeZone(tz)); diff --git a/src/test/java/net/snowflake/client/core/arrow/DoubleToRealConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/DoubleToRealConverterTest.java index b242a2be8..718daa69c 100644 --- a/src/test/java/net/snowflake/client/core/arrow/DoubleToRealConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/DoubleToRealConverterTest.java @@ -6,8 +6,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.nio.ByteBuffer; import java.util.ArrayList; @@ -25,7 +25,7 @@ import org.apache.arrow.vector.Float8Vector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class DoubleToRealConverterTest extends BaseConverterTest { /** allocator for arrow */ diff --git a/src/test/java/net/snowflake/client/core/arrow/IntToFixedConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/IntToFixedConverterTest.java index c11d8275d..fc4db1875 100644 --- a/src/test/java/net/snowflake/client/core/arrow/IntToFixedConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/IntToFixedConverterTest.java @@ -7,9 +7,9 @@ import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.nio.ByteBuffer; @@ -29,7 +29,7 @@ import org.apache.arrow.vector.IntVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class IntToFixedConverterTest extends BaseConverterTest { /** allocator for arrow */ diff --git a/src/test/java/net/snowflake/client/core/arrow/IntToTimeConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/IntToTimeConverterTest.java index 92c560db3..1f4bd955f 100644 --- a/src/test/java/net/snowflake/client/core/arrow/IntToTimeConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/IntToTimeConverterTest.java @@ -8,10 +8,9 @@ import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; -import java.nio.ByteBuffer; import java.sql.Time; import java.util.HashMap; import java.util.HashSet; @@ -22,46 +21,31 @@ import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; import net.snowflake.client.core.SFSession; +import net.snowflake.client.providers.TimezoneProvider; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; import org.apache.arrow.vector.IntVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class IntToTimeConverterTest extends BaseConverterTest { - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, - {"America/Los_Angeles"}, - {"America/New_York"}, - {"Pacific/Honolulu"}, - {"Asia/Singapore"}, - {"MEZ"}, - {"MESZ"} - }; - } - - private ByteBuffer bb; - - public IntToTimeConverterTest(String tz) { - System.setProperty("user.timezone", tz); - this.setScale(scale); - } - /** allocator for arrow */ private BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); private Random random = new Random(); + public IntToTimeConverterTest() { + this.setScale(scale); + } + private int scale = 3; - @Test - public void testTime() throws SFException { + @ParameterizedTest + @ArgumentsSource(TimezoneProvider.class) + public void testTime(String timezone) throws SFException { + System.setProperty("user.timezone", timezone); // test old and new dates int[] testTimesInt = {12345678}; diff --git a/src/test/java/net/snowflake/client/core/arrow/SmallIntToFixedConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/SmallIntToFixedConverterTest.java index 5513a420b..d37b005b1 100644 --- a/src/test/java/net/snowflake/client/core/arrow/SmallIntToFixedConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/SmallIntToFixedConverterTest.java @@ -7,9 +7,9 @@ import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.nio.ByteBuffer; @@ -29,7 +29,7 @@ import org.apache.arrow.vector.SmallIntVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SmallIntToFixedConverterTest extends BaseConverterTest { /** allocator for arrow */ diff --git a/src/test/java/net/snowflake/client/core/arrow/ThreeFieldStructToTimestampTZConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/ThreeFieldStructToTimestampTZConverterTest.java index 10721fbc1..09cd4a587 100644 --- a/src/test/java/net/snowflake/client/core/arrow/ThreeFieldStructToTimestampTZConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/ThreeFieldStructToTimestampTZConverterTest.java @@ -4,15 +4,17 @@ package net.snowflake.client.core.arrow; +import static java.util.stream.Stream.concat; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Date; import java.sql.Time; import java.sql.Timestamp; +import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; @@ -20,6 +22,7 @@ import java.util.Map; import java.util.Random; import java.util.Set; +import java.util.stream.Stream; import net.snowflake.client.TestUtil; import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; @@ -33,29 +36,73 @@ import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class ThreeFieldStructToTimestampTZConverterTest extends BaseConverterTest { - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, - {"America/Los_Angeles"}, - {"America/New_York"}, - {"Pacific/Honolulu"}, - {"Asia/Singapore"}, - {"MEZ"}, - {"MESZ"} - }; + private static class TimezoneProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + List timezones = + new ArrayList() { + { + add("America/Los_Angeles"); + add("America/New_York"); + add("Pacific/Honolulu"); + add("Asia/Singapore"); + add("MESZ"); + add("MEZ"); + add("UTC"); + } + }; + + Stream args = Stream.empty(); + + for (String timezone : timezones) { + args = + concat( + args, + Stream.of( + Arguments.argumentSet( + timezone, + timezone, + new long[] {1546391837, 1546391837, 0, 123, -12346, -12345}, + new int[] {0, 10, 100, 456, 876543211, 0}, + new int[] {960, 1440, 960, 960, 1440, 1440}, + new String[] { + "1546391837.000000000 960", + "1546391837.000000010 1440", + "0.000000100 960", + "123.000000456 960", + "-12345.123456789 1440", + "-12345.000000000 1440" + }), + Arguments.argumentSet( + timezone + " Overflow", + timezone, + new long[] {1546391837}, + new int[] {0}, + new int[] {960}, + new String[] {"1546391837.000000000 960"}))); + } + + return args; + } } - public ThreeFieldStructToTimestampTZConverterTest(String tz) { + private static void setTimezone(String tz) { System.setProperty("user.timezone", tz); } + @AfterAll + public static void clearTimezone() { + System.clearProperty("user.timezone"); + } + /** allocator for arrow */ private BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); @@ -63,42 +110,16 @@ public ThreeFieldStructToTimestampTZConverterTest(String tz) { private int oldScale = 9; - @Test - public void simpleTest() throws SFException { - // test old and new dates - long[] testSecondsInt64 = {1546391837, 1546391837, 0, 123, -12346, -12345}; - - int[] testNanos = {0, 10, 100, 456, 876543211, 0}; - - int[] testTimeZoneIndices = {960, 1440, 960, 960, 1440, 1440}; - - String[] testTimesJson = { - "1546391837.000000000 960", - "1546391837.000000010 1440", - "0.000000100 960", - "123.000000456 960", - "-12345.123456789 1440", - "-12345.000000000 1440" - }; - testTimestampTZ(testSecondsInt64, testNanos, testTimeZoneIndices, testTimesJson); - } - - @Test - public void timestampOverflowTest() throws SFException { - // test old and new dates - long[] testSecondsInt64 = {1546391837}; - - int[] testNanos = {0}; - - int[] testTimeZoneIndices = {960}; - - String[] testTimesJson = {"1546391837.000000000 960"}; - testTimestampTZ(testSecondsInt64, testNanos, testTimeZoneIndices, testTimesJson); - } - + @ParameterizedTest + @ArgumentsSource(TimezoneProvider.class) public void testTimestampTZ( - long[] testSecondsInt64, int[] testNanos, int[] testTimeZoneIndices, String[] testTimesJson) + String tz, + long[] testSecondsInt64, + int[] testNanos, + int[] testTimeZoneIndices, + String[] testTimesJson) throws SFException { + setTimezone(tz); Map customFieldMeta = new HashMap<>(); customFieldMeta.put("logicalType", "TIMESTAMP"); diff --git a/src/test/java/net/snowflake/client/core/arrow/TinyIntToFixedConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/TinyIntToFixedConverterTest.java index 8000ec885..8a1e9b359 100644 --- a/src/test/java/net/snowflake/client/core/arrow/TinyIntToFixedConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/TinyIntToFixedConverterTest.java @@ -6,9 +6,9 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.util.ArrayList; @@ -27,7 +27,7 @@ import org.apache.arrow.vector.TinyIntVector; import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TinyIntToFixedConverterTest extends BaseConverterTest { /** allocator for arrow */ diff --git a/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampLTZConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampLTZConverterTest.java index 8ce93fb6a..4fd4f07f3 100644 --- a/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampLTZConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampLTZConverterTest.java @@ -4,15 +4,17 @@ package net.snowflake.client.core.arrow; +import static java.util.stream.Stream.concat; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Date; import java.sql.Time; import java.sql.Timestamp; +import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; @@ -20,6 +22,7 @@ import java.util.Map; import java.util.Random; import java.util.Set; +import java.util.stream.Stream; import net.snowflake.client.TestUtil; import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; @@ -33,29 +36,70 @@ import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class TwoFieldStructToTimestampLTZConverterTest extends BaseConverterTest { - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, - {"America/Los_Angeles"}, - {"America/New_York"}, - {"Pacific/Honolulu"}, - {"Asia/Singapore"}, - {"MEZ"}, - {"MESZ"} - }; + + static class DataProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + List timezones = + new ArrayList() { + { + add("America/Los_Angeles"); + add("America/New_York"); + add("Pacific/Honolulu"); + add("Asia/Singapore"); + add("MESZ"); + add("MEZ"); + add("UTC"); + } + }; + + Stream args = Stream.empty(); + + for (String timezone : timezones) { + args = + concat( + args, + Stream.of( + Arguments.argumentSet( + timezone, + timezone, + new long[] {1546391837, 0, -1546391838, -1546391838, -1546391838}, + new int[] {0, 1, 999999990, 876543211, 1}, + new String[] { + "1546391837.000000000", + "0.000000001", + "-1546391837.000000010", + "-1546391837.123456789", + "-1546391837.999999999" + }), + Arguments.argumentSet( + timezone + " Overflow", + timezone, + new long[] {154639183700000L}, + new int[] {0}, + new String[] {"154639183700000.000000000"}))); + } + return args; + } } - public TwoFieldStructToTimestampLTZConverterTest(String tz) { + private static void setTimezone(String tz) { System.setProperty("user.timezone", tz); } + @AfterAll + public static void clearTimezone() { + System.clearProperty("user.timezone"); + } + /** allocator for arrow */ private BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); @@ -63,37 +107,13 @@ public TwoFieldStructToTimestampLTZConverterTest(String tz) { private int oldScale = 9; - @Test - public void simpleTests() throws SFException { - // test old and new dates - long[] testSecondsInt64 = {1546391837, 0, -1546391838, -1546391838, -1546391838}; - - int[] testNanoSecs = {0, 1, 999999990, 876543211, 1}; - - String[] testTimesJson = { - "1546391837.000000000", - "0.000000001", - "-1546391837.000000010", - "-1546391837.123456789", - "-1546391837.999999999" - }; - testTimestampLTZ(testSecondsInt64, testNanoSecs, testTimesJson); - } - - @Test - public void timestampOverflowTests() throws SFException { - // test old and new dates - long[] testSecondsInt64 = {154639183700000l}; - - int[] testNanoSecs = {0}; - - String[] testTimesJson = {"154639183700000.000000000"}; - testTimestampLTZ(testSecondsInt64, testNanoSecs, testTimesJson); - } - - public void testTimestampLTZ(long[] testSecondsInt64, int[] testNanoSecs, String[] testTimesJson) + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampLTZ( + String timezone, long[] testSecondsInt64, int[] testNanoSecs, String[] testTimesJson) throws SFException { + setTimezone(timezone); Map customFieldMeta = new HashMap<>(); customFieldMeta.put("logicalType", "TIMESTAMP"); Set nullValIndex = new HashSet<>(); diff --git a/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampNTZConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampNTZConverterTest.java index 2b5bf0e16..3b84176e4 100644 --- a/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampNTZConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampNTZConverterTest.java @@ -4,15 +4,17 @@ package net.snowflake.client.core.arrow; +import static java.util.stream.Stream.concat; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Date; import java.sql.Time; import java.sql.Timestamp; +import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; @@ -21,6 +23,7 @@ import java.util.Random; import java.util.Set; import java.util.TimeZone; +import java.util.stream.Stream; import net.snowflake.client.TestUtil; import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; @@ -33,29 +36,24 @@ import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class TwoFieldStructToTimestampNTZConverterTest extends BaseConverterTest { - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, - {"America/Los_Angeles"}, - {"America/New_York"}, - {"Pacific/Honolulu"}, - {"Asia/Singapore"}, - {"MEZ"}, - {"MESZ"} - }; - } - public TwoFieldStructToTimestampNTZConverterTest(String tz) { + private static void setTimezone(String tz) { System.setProperty("user.timezone", tz); } + @AfterAll + public static void clearTimezone() { + System.clearProperty("user.timezone"); + } + /** allocator for arrow */ private BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); @@ -63,56 +61,80 @@ public TwoFieldStructToTimestampNTZConverterTest(String tz) { private int oldScale = 9; - @Test - public void timestampOverflowTest() throws SFException { - // test old and new dates - long[] testSecondsInt64 = {154639183700000l}; - - int[] testNanoSecs = {0}; - - String[] testTimesJson = {"154639183700000.000000000"}; - this.setHonorClientTZForTimestampNTZ(false); - testTimestampNTZ(testSecondsInt64, testNanoSecs, testTimesJson); - } - - @Test - public void testHonorClientTZForTimestampNTZDisabled() throws SFException { - // test old and new dates - long[] testSecondsInt64 = {1546391837, 0, -1546391838, -1546391838, -1546391838}; - - int[] testNanoSecs = {0, 1, 999999990, 876543211, 1}; - - String[] testTimesJson = { - "1546391837.000000000", - "0.000000001", - "-1546391837.000000010", - "-1546391837.123456789", - "-1546391837.999999999" - }; - this.setHonorClientTZForTimestampNTZ(false); - testTimestampNTZ(testSecondsInt64, testNanoSecs, testTimesJson); - } + static class DataProvider implements ArgumentsProvider { + + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + List timezones = + new ArrayList() { + { + add("America/Los_Angeles"); + add("America/New_York"); + add("Pacific/Honolulu"); + add("Asia/Singapore"); + add("MESZ"); + add("MEZ"); + add("UTC"); + } + }; + + Stream args = Stream.empty(); + + for (String timezone : timezones) { + args = + concat( + args, + Stream.of( + Arguments.argumentSet( + timezone + " Overflow", + timezone, + false, + new long[] {154639183700000L}, + new int[] {0}, + new String[] {"154639183700000.000000000"}), + Arguments.argumentSet( + timezone + " HonorClientTZForTimestampNTZ Disabled", + timezone, + false, + new long[] {1546391837, 0, -1546391838, -1546391838, -1546391838}, + new int[] {0, 1, 999999990, 876543211, 1}, + new String[] { + "1546391837.000000000", + "0.000000001", + "-1546391837.000000010", + "-1546391837.123456789", + "-1546391837.999999999" + }), + Arguments.argumentSet( + timezone + " HonorClientTZForTimestampNTZ Enabled", + timezone, + true, + new long[] {1546391837, 1546391837, 1546391837, 1546391837, 1546391837}, + new int[] {0, 1, 10, 100, 999999999}, + new String[] { + "1546391837.000000000", + "1546391837.000000001", + "1546391837.000000010", + "1546391837.000000100", + "1546391837.999999999" + }))); + } - @Test - public void testHonorClientTZForTimestampNTZEnabled() throws SFException { - // test old and new dates - long[] testSecondsInt64 = {1546391837, 1546391837, 1546391837, 1546391837, 1546391837}; - - int[] testNanoSecs = {0, 1, 10, 100, 999999999}; - - String[] testTimesJson = { - "1546391837.000000000", - "1546391837.000000001", - "1546391837.000000010", - "1546391837.000000100", - "1546391837.999999999" - }; - this.setHonorClientTZForTimestampNTZ(true); - testTimestampNTZ(testSecondsInt64, testNanoSecs, testTimesJson); + return args; + } } - public void testTimestampNTZ(long[] testSecondsInt64, int[] testNanoSecs, String[] testTimesJson) + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampNTZ( + String timezone, + boolean honorClientTZForTimestampNTZ, + long[] testSecondsInt64, + int[] testNanoSecs, + String[] testTimesJson) throws SFException { + this.setHonorClientTZForTimestampNTZ(honorClientTZForTimestampNTZ); + setTimezone(timezone); Map customFieldMeta = new HashMap<>(); customFieldMeta.put("logicalType", "TIMESTAMP"); diff --git a/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampTZConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampTZConverterTest.java index 742b82751..767938d06 100644 --- a/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampTZConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/TwoFieldStructToTimestampTZConverterTest.java @@ -7,8 +7,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Date; import java.sql.Time; @@ -24,6 +24,7 @@ import net.snowflake.client.core.ResultUtil; import net.snowflake.client.core.SFException; import net.snowflake.client.jdbc.SnowflakeUtil; +import net.snowflake.client.providers.TimezoneProvider; import net.snowflake.common.core.SFTimestamp; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.memory.RootAllocator; @@ -33,27 +34,18 @@ import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.Field; import org.apache.arrow.vector.types.pojo.FieldType; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class TwoFieldStructToTimestampTZConverterTest extends BaseConverterTest { - @Parameterized.Parameters - public static Object[][] data() { - return new Object[][] { - {"UTC"}, - {"America/Los_Angeles"}, - {"America/New_York"}, - {"Pacific/Honolulu"}, - {"Asia/Singapore"}, - {"MEZ"}, - {"MESZ"} - }; + public static void setTimezone(String tz) { + System.setProperty("user.timezone", tz); } - public TwoFieldStructToTimestampTZConverterTest(String tz) { - System.setProperty("user.timezone", tz); + @AfterAll + public static void clearTimezone() { + System.clearProperty("user.timezone"); } /** allocator for arrow */ @@ -63,8 +55,10 @@ public TwoFieldStructToTimestampTZConverterTest(String tz) { private int oldScale = 9; - @Test - public void testTimestampTZ() throws SFException { + @ParameterizedTest + @ArgumentsSource(TimezoneProvider.class) + public void testTimestampTZ(String tz) throws SFException { + setTimezone(tz); // test old and new dates long[] testEpochesInt64 = {1546391837, 1546391837, 0, 123, -12345, -12345678}; diff --git a/src/test/java/net/snowflake/client/core/arrow/VarBinaryToBinaryConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/VarBinaryToBinaryConverterTest.java index b6ea49f05..231df247c 100644 --- a/src/test/java/net/snowflake/client/core/arrow/VarBinaryToBinaryConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/VarBinaryToBinaryConverterTest.java @@ -6,8 +6,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.Base64; @@ -25,7 +25,7 @@ import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; import org.apache.commons.lang3.RandomStringUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class VarBinaryToBinaryConverterTest extends BaseConverterTest { /** allocator for arrow */ diff --git a/src/test/java/net/snowflake/client/core/arrow/VarCharConverterTest.java b/src/test/java/net/snowflake/client/core/arrow/VarCharConverterTest.java index 6569c0309..692e171d0 100644 --- a/src/test/java/net/snowflake/client/core/arrow/VarCharConverterTest.java +++ b/src/test/java/net/snowflake/client/core/arrow/VarCharConverterTest.java @@ -6,8 +6,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.nio.charset.StandardCharsets; import java.sql.Date; @@ -27,7 +27,7 @@ import org.apache.arrow.vector.types.Types; import org.apache.arrow.vector.types.pojo.FieldType; import org.apache.commons.lang3.RandomStringUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class VarCharConverterTest extends BaseConverterTest { /** allocator for arrow */ diff --git a/src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java b/src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java index f3ae88eee..d50118ff8 100644 --- a/src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java +++ b/src/test/java/net/snowflake/client/core/bind/BindExceptionTest.java @@ -1,9 +1,9 @@ package net.snowflake.client.core.bind; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import net.snowflake.client.jdbc.telemetry.TelemetryField; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class BindExceptionTest { diff --git a/src/test/java/net/snowflake/client/core/json/BooleanConverterTest.java b/src/test/java/net/snowflake/client/core/json/BooleanConverterTest.java index 2162d651a..292c3862f 100644 --- a/src/test/java/net/snowflake/client/core/json/BooleanConverterTest.java +++ b/src/test/java/net/snowflake/client/core/json/BooleanConverterTest.java @@ -2,10 +2,11 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; +import static org.junit.jupiter.api.Assertions.assertThrows; import java.sql.Types; import net.snowflake.client.core.SFException; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class BooleanConverterTest { private final BooleanConverter booleanConverter = new BooleanConverter(); @@ -44,8 +45,8 @@ public void testConvertString() throws SFException { assertThat(booleanConverter.getBoolean("FALSE", Types.CHAR), equalTo(false)); } - @Test(expected = SFException.class) - public void testConvertOtherType() throws SFException { - booleanConverter.getBoolean("1", Types.BINARY); + @Test + public void testConvertOtherType() { + assertThrows(SFException.class, () -> booleanConverter.getBoolean("1", Types.BINARY)); } } diff --git a/src/test/java/net/snowflake/client/core/json/BytesConverterTest.java b/src/test/java/net/snowflake/client/core/json/BytesConverterTest.java index 47e898486..3f7956ad7 100644 --- a/src/test/java/net/snowflake/client/core/json/BytesConverterTest.java +++ b/src/test/java/net/snowflake/client/core/json/BytesConverterTest.java @@ -1,6 +1,6 @@ package net.snowflake.client.core.json; -import static org.junit.Assert.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; import java.math.BigInteger; import java.nio.ByteBuffer; @@ -8,7 +8,7 @@ import net.snowflake.client.core.SFException; import net.snowflake.client.core.SFSession; import org.apache.arrow.vector.Float8Vector; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class BytesConverterTest { private final Converters converters = diff --git a/src/test/java/net/snowflake/client/core/json/DateTimeConverterTest.java b/src/test/java/net/snowflake/client/core/json/DateTimeConverterTest.java index 985264f3e..21fe82043 100644 --- a/src/test/java/net/snowflake/client/core/json/DateTimeConverterTest.java +++ b/src/test/java/net/snowflake/client/core/json/DateTimeConverterTest.java @@ -1,7 +1,7 @@ package net.snowflake.client.core.json; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import java.sql.Date; import java.sql.Time; @@ -15,7 +15,7 @@ import net.snowflake.client.core.SFException; import net.snowflake.client.core.SFSession; import net.snowflake.client.jdbc.SnowflakeUtil; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class DateTimeConverterTest { private final TimeZone honoluluTimeZone = diff --git a/src/test/java/net/snowflake/client/core/json/NumberConverterTest.java b/src/test/java/net/snowflake/client/core/json/NumberConverterTest.java index c37573b72..41f6460b4 100644 --- a/src/test/java/net/snowflake/client/core/json/NumberConverterTest.java +++ b/src/test/java/net/snowflake/client/core/json/NumberConverterTest.java @@ -6,7 +6,7 @@ import java.math.BigDecimal; import java.sql.Types; import net.snowflake.client.core.SFException; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class NumberConverterTest { private final NumberConverter numberConverter = new NumberConverter(); diff --git a/src/test/java/net/snowflake/client/core/json/StringConverterTest.java b/src/test/java/net/snowflake/client/core/json/StringConverterTest.java index 5fe3dd2cb..d2ddb3eee 100644 --- a/src/test/java/net/snowflake/client/core/json/StringConverterTest.java +++ b/src/test/java/net/snowflake/client/core/json/StringConverterTest.java @@ -1,6 +1,6 @@ package net.snowflake.client.core.json; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.mock; import java.sql.Types; @@ -12,8 +12,8 @@ import net.snowflake.client.jdbc.SnowflakeUtil; import net.snowflake.common.core.SFBinaryFormat; import net.snowflake.common.core.SnowflakeDateTimeFormat; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class StringConverterTest { private final TimeZone honoluluTimeZone = @@ -24,7 +24,7 @@ public class StringConverterTest { private StringConverter stringConverter; - @Before + @BeforeEach public void init() { SnowflakeDateTimeFormat timestampNTZFormatter = SnowflakeDateTimeFormat.fromSqlFormat("YYYY-MM-DD HH24:MI:SS.FF3"); diff --git a/src/test/java/net/snowflake/client/jdbc/ArrowResultChunkTest.java b/src/test/java/net/snowflake/client/jdbc/ArrowResultChunkTest.java index 2c37ddf5d..59e2b30a2 100644 --- a/src/test/java/net/snowflake/client/jdbc/ArrowResultChunkTest.java +++ b/src/test/java/net/snowflake/client/jdbc/ArrowResultChunkTest.java @@ -6,7 +6,7 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class ArrowResultChunkTest { @Test diff --git a/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java b/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java index a326dea12..c1abedf68 100644 --- a/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java +++ b/src/test/java/net/snowflake/client/jdbc/BaseJDBCTest.java @@ -3,9 +3,9 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.io.InputStream; diff --git a/src/test/java/net/snowflake/client/jdbc/BaseJDBCWithSharedConnectionIT.java b/src/test/java/net/snowflake/client/jdbc/BaseJDBCWithSharedConnectionIT.java index 5602bffca..f05d45afe 100644 --- a/src/test/java/net/snowflake/client/jdbc/BaseJDBCWithSharedConnectionIT.java +++ b/src/test/java/net/snowflake/client/jdbc/BaseJDBCWithSharedConnectionIT.java @@ -2,22 +2,29 @@ import java.sql.Connection; import java.sql.SQLException; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import java.sql.Statement; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; public class BaseJDBCWithSharedConnectionIT extends BaseJDBCTest { protected static Connection connection; - @BeforeClass + @BeforeAll public static void setUpConnection() throws SQLException { connection = getConnection(); } - @AfterClass + @AfterAll public static void closeConnection() throws SQLException { if (connection != null && !connection.isClosed()) { connection.close(); } } + + public Statement createStatement(String queryResultFormat) throws SQLException { + Statement stmt = connection.createStatement(); + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + return stmt; + } } diff --git a/src/test/java/net/snowflake/client/jdbc/BaseWiremockTest.java b/src/test/java/net/snowflake/client/jdbc/BaseWiremockTest.java index 5a2fe8e96..08069b95c 100644 --- a/src/test/java/net/snowflake/client/jdbc/BaseWiremockTest.java +++ b/src/test/java/net/snowflake/client/jdbc/BaseWiremockTest.java @@ -1,12 +1,12 @@ package net.snowflake.client.jdbc; -import static junit.framework.TestCase.assertEquals; import static net.snowflake.client.AbstractDriverIT.getConnectionParameters; +import static net.snowflake.client.AssumptionUtils.assumeNotRunningOnGithubActionsMac; +import static net.snowflake.client.AssumptionUtils.assumeNotRunningOnJava21; +import static net.snowflake.client.AssumptionUtils.assumeNotRunningOnJava8; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; import static org.awaitility.Awaitility.await; -import static org.junit.Assume.assumeFalse; -import static org.junit.Assume.assumeNoException; -import static org.junit.Assume.assumeTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.File; import java.io.IOException; @@ -16,9 +16,6 @@ import java.time.Duration; import java.util.Map; import java.util.Properties; -import net.snowflake.client.RunningNotOnGithubActionsMac; -import net.snowflake.client.RunningNotOnJava21; -import net.snowflake.client.RunningNotOnJava8; import net.snowflake.client.core.HttpUtil; import net.snowflake.client.log.SFLogger; import net.snowflake.client.log.SFLoggerFactory; @@ -28,11 +25,12 @@ import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assumptions; +import org.junit.jupiter.api.BeforeAll; -public abstract class BaseWiremockTest { +abstract class BaseWiremockTest { protected static final SFLogger logger = SFLoggerFactory.getLogger(BaseWiremockTest.class); protected static final String WIREMOCK_HOME_DIR = ".wiremock"; @@ -45,27 +43,25 @@ public abstract class BaseWiremockTest { private static String originalTrustStorePath; protected static Process wiremockStandalone; - @BeforeClass + @BeforeAll public static void setUpClass() { - assumeFalse(RunningNotOnJava8.isRunningOnJava8()); - assumeFalse(RunningNotOnJava21.isRunningOnJava21()); - assumeFalse( - RunningNotOnGithubActionsMac - .isRunningOnGithubActionsMac()); // disabled until issue with access to localhost + assumeNotRunningOnJava8(); + assumeNotRunningOnJava21(); + assumeNotRunningOnGithubActionsMac(); // disabled until issue with access to localhost // (https://github.com/snowflakedb/snowflake-jdbc/pull/1807#discussion_r1686229430) is fixed on // github actions mac image. Ticket to enable when fixed: SNOW-1555950 originalTrustStorePath = systemGetProperty(TRUST_STORE_PROPERTY); startWiremockStandAlone(); } - @After + @AfterEach public void tearDown() { restoreTrustStorePathProperty(); resetWiremock(); HttpUtil.httpClient.clear(); } - @AfterClass + @AfterAll public static void tearDownClass() { stopWiremockStandAlone(); } @@ -225,10 +221,10 @@ protected void importMapping(String mappingImport) { HttpPost request = createWiremockPostRequest(mappingImport, "/__admin/mappings/import"); try (CloseableHttpClient httpClient = HttpClients.createDefault(); CloseableHttpResponse response = httpClient.execute(request)) { - assumeTrue(response.getStatusLine().getStatusCode() == 200); + Assumptions.assumeTrue(response.getStatusLine().getStatusCode() == 200); } catch (Exception e) { logger.error("Importing mapping failed", e); - assumeNoException(e); + Assumptions.abort("Importing mapping failed"); } } diff --git a/src/test/java/net/snowflake/client/jdbc/BindUploaderIT.java b/src/test/java/net/snowflake/client/jdbc/BindUploaderIT.java index dec8bd6aa..80bd20724 100644 --- a/src/test/java/net/snowflake/client/jdbc/BindUploaderIT.java +++ b/src/test/java/net/snowflake/client/jdbc/BindUploaderIT.java @@ -4,9 +4,9 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.sql.Connection; @@ -20,18 +20,18 @@ import java.util.List; import java.util.Map; import java.util.TimeZone; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.ParameterBindingDTO; import net.snowflake.client.core.SFSession; import net.snowflake.client.core.bind.BindUploader; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -@Category(TestCategoryOthers.class) +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; + +@Tag(TestTags.OTHERS) public class BindUploaderIT extends BaseJDBCTest { BindUploader bindUploader; Connection conn; @@ -86,21 +86,21 @@ public class BindUploaderIT extends BaseJDBCTest { + STAGE_DIR + "' ORDER BY $1 ASC"; - @BeforeClass + @BeforeAll public static void classSetUp() throws Exception { Connection connection = getConnection(); connection.createStatement().execute(createTableSQL); connection.close(); } - @AfterClass + @AfterAll public static void classTearDown() throws Exception { Connection connection = getConnection(); connection.createStatement().execute(deleteTableSQL); connection.close(); } - @Before + @BeforeEach public void setUp() throws Exception { conn = getConnection(); session = conn.unwrap(SnowflakeConnectionV1.class).getSfSession(); @@ -109,7 +109,7 @@ public void setUp() throws Exception { TimeZone.setDefault(TimeZone.getTimeZone("UTC")); } - @After + @AfterEach public void tearDown() throws SQLException { conn.close(); bindUploader.close(); diff --git a/src/test/java/net/snowflake/client/jdbc/BindUploaderLatestIT.java b/src/test/java/net/snowflake/client/jdbc/BindUploaderLatestIT.java index 41c409d8b..badd3fee0 100644 --- a/src/test/java/net/snowflake/client/jdbc/BindUploaderLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/BindUploaderLatestIT.java @@ -12,8 +12,8 @@ import static net.snowflake.client.jdbc.BindUploaderIT.getBindings; import static net.snowflake.client.jdbc.BindUploaderIT.parseRow; import static net.snowflake.client.jdbc.BindUploaderIT.row1; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.sql.Connection; import java.sql.ResultSet; @@ -21,16 +21,16 @@ import java.sql.Statement; import java.util.Map; import java.util.TimeZone; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.ParameterBindingDTO; import net.snowflake.client.core.SFSession; import net.snowflake.client.core.bind.BindUploader; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Bind Uploader tests for the latest JDBC driver. This doesn't work for the oldest supported @@ -38,24 +38,24 @@ * tests still is not applicable. If it is applicable, move tests to BindUploaderIT so that both the * latest and oldest supported driver run the tests. */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class BindUploaderLatestIT extends BaseJDBCTest { BindUploader bindUploader; Connection conn; SFSession session; TimeZone prevTimeZone; // store last time zone and restore after tests - @BeforeClass + @BeforeAll public static void classSetUp() throws Exception { BindUploaderIT.classSetUp(); } - @AfterClass + @AfterAll public static void classTearDown() throws Exception { BindUploaderIT.classTearDown(); } - @Before + @BeforeEach public void setUp() throws Exception { conn = getConnection(); session = conn.unwrap(SnowflakeConnectionV1.class).getSfSession(); @@ -64,7 +64,7 @@ public void setUp() throws Exception { TimeZone.setDefault(TimeZone.getTimeZone("UTC")); } - @After + @AfterEach public void tearDown() throws SQLException { conn.close(); bindUploader.close(); diff --git a/src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java b/src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java index a408e5d5a..55cdf9996 100644 --- a/src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/BindingAndInsertingStructuredTypesLatestIT.java @@ -3,11 +3,11 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.nio.charset.StandardCharsets; @@ -30,40 +30,22 @@ import java.util.TimeZone; import java.util.stream.Collectors; import java.util.stream.Stream; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryResultSet; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.structs.SnowflakeObjectTypeFactories; import net.snowflake.client.jdbc.structuredtypes.sqldata.AllTypesClass; import net.snowflake.client.jdbc.structuredtypes.sqldata.SimpleClass; -import org.junit.After; -import org.junit.Assume; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -@RunWith(Parameterized.class) -@Category(TestCategoryResultSet.class) +import net.snowflake.client.providers.ResultFormatProvider; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assumptions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; + +@Tag(TestTags.RESULT_SET) public class BindingAndInsertingStructuredTypesLatestIT extends BaseJDBCTest { - - @Parameterized.Parameters(name = "format={0}") - public static Object[][] data() { - return new Object[][] { - {ResultSetFormatType.JSON}, - {ResultSetFormatType.ARROW_WITH_JSON_STRUCTURED_TYPES}, - {ResultSetFormatType.NATIVE_ARROW} - }; - } - - private final ResultSetFormatType queryResultFormat; - - public BindingAndInsertingStructuredTypesLatestIT(ResultSetFormatType queryResultFormat) { - this.queryResultFormat = queryResultFormat; - } - - public Connection init() throws SQLException { + public Connection init(ResultSetFormatType queryResultFormat) throws SQLException { Connection conn = BaseJDBCTest.getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); try (Statement stmt = conn.createStatement()) { stmt.execute("alter session set ENABLE_STRUCTURED_TYPES_IN_CLIENT_RESPONSE = true"); @@ -84,25 +66,26 @@ public Connection init() throws SQLException { return conn; } - @Before + @BeforeEach public void setup() { SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); SnowflakeObjectTypeFactories.register(AllTypesClass.class, AllTypesClass::new); } - @After + @AfterEach public void clean() { SnowflakeObjectTypeFactories.unregister(SimpleClass.class); SnowflakeObjectTypeFactories.unregister(AllTypesClass.class); } // TODO Structured types feature exists only on QA environments - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testWriteObject() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testWriteObject(ResultSetFormatType queryResultFormat) throws SQLException { SimpleClass sc = new SimpleClass("text1", 2); SimpleClass sc2 = new SimpleClass("text2", 3); - try (Connection connection = init()) { + try (Connection connection = init(queryResultFormat)) { Statement statement = connection.createStatement(); statement.execute( "CREATE OR REPLACE TABLE test_table (ob OBJECT(string varchar, intValue NUMBER))"); @@ -133,11 +116,12 @@ public void testWriteObject() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testWriteNullObject() throws SQLException { - Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW); - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testWriteNullObject(ResultSetFormatType queryResultFormat) throws SQLException { + Assumptions.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmtement2 = (SnowflakePreparedStatementV1) @@ -158,10 +142,12 @@ public void testWriteNullObject() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testWriteObjectBindingNull() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testWriteObjectBindingNull(ResultSetFormatType queryResultFormat) + throws SQLException { + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmt = (SnowflakePreparedStatementV1) @@ -181,11 +167,12 @@ public void testWriteObjectBindingNull() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testWriteObjectAllTypes() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testWriteObjectAllTypes(ResultSetFormatType queryResultFormat) throws SQLException { TimeZone.setDefault(TimeZone.getTimeZone(ZoneOffset.UTC)); - try (Connection connection = init(); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmt = (SnowflakePreparedStatementV1) @@ -271,10 +258,11 @@ public static Timestamp toTimestamp(ZonedDateTime dateTime) { return new Timestamp(dateTime.toInstant().getEpochSecond() * 1000L); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testWriteArray() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testWriteArray(ResultSetFormatType queryResultFormat) throws SQLException { + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmt = (SnowflakePreparedStatementV1) @@ -298,10 +286,11 @@ public void testWriteArray() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testWriteArrayNoBinds() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testWriteArrayNoBinds(ResultSetFormatType queryResultFormat) throws SQLException { + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmt = (SnowflakePreparedStatementV1) @@ -322,10 +311,11 @@ public void testWriteArrayNoBinds() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testWriteMapOfSqlData() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testWriteMapOfSqlData(ResultSetFormatType queryResultFormat) throws SQLException { + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmt = (SnowflakePreparedStatementV1) @@ -358,10 +348,11 @@ public void testWriteMapOfSqlData() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testWriteMapOfInteger() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testWriteMapOfInteger(ResultSetFormatType queryResultFormat) throws SQLException { + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement(); SnowflakePreparedStatementV1 stmt = (SnowflakePreparedStatementV1) diff --git a/src/test/java/net/snowflake/client/jdbc/BindingDataIT.java b/src/test/java/net/snowflake/client/jdbc/BindingDataIT.java index c2a8bc3ee..86a3b4613 100644 --- a/src/test/java/net/snowflake/client/jdbc/BindingDataIT.java +++ b/src/test/java/net/snowflake/client/jdbc/BindingDataIT.java @@ -6,8 +6,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import java.sql.Date; import java.sql.PreparedStatement; @@ -18,21 +18,37 @@ import java.sql.Types; import java.util.Calendar; import java.util.TimeZone; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.experimental.theories.DataPoints; -import org.junit.experimental.theories.Theories; -import org.junit.experimental.theories.Theory; -import org.junit.runner.RunWith; +import java.util.stream.Stream; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; +import org.junit.jupiter.params.provider.ValueSource; /** Integration tests for binding variable */ -@RunWith(Theories.class) -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class BindingDataIT extends BaseJDBCWithSharedConnectionIT { - @DataPoints public static short[] shortValues = {0, 1, -1, Short.MIN_VALUE, Short.MAX_VALUE}; + static TimeZone timeZone; - @Theory + @BeforeAll + public static void setTimeZone() { + timeZone = TimeZone.getDefault(); + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + } + + @AfterAll + public static void resetTimeZone() { + TimeZone.setDefault(timeZone); + } + + @ParameterizedTest + @ValueSource(shorts = {0, 1, -1, Short.MIN_VALUE, Short.MAX_VALUE}) public void testBindShort(short shortValue) throws SQLException { try (Statement statement = connection.createStatement()) { try { @@ -58,7 +74,8 @@ public void testBindShort(short shortValue) throws SQLException { } } - @Theory + @ParameterizedTest + @ValueSource(shorts = {0, 1, -1, Short.MIN_VALUE, Short.MAX_VALUE}) public void testBindShortViaSetObject(short shortValue) throws SQLException { try (Statement statement = connection.createStatement()) { try { @@ -84,9 +101,8 @@ public void testBindShortViaSetObject(short shortValue) throws SQLException { } } - @DataPoints public static int[] intValues = {0, 1, -1, Integer.MAX_VALUE, Integer.MIN_VALUE}; - - @Theory + @ParameterizedTest + @ValueSource(ints = {0, 1, -1, Integer.MIN_VALUE, Integer.MAX_VALUE}) public void testBindInt(int intValue) throws SQLException { try (Statement statement = connection.createStatement()) { try { @@ -113,9 +129,8 @@ public void testBindInt(int intValue) throws SQLException { } } - @DataPoints public static byte[] byteValues = {0, 1, -1, Byte.MAX_VALUE, Byte.MIN_VALUE}; - - @Theory + @ParameterizedTest + @ValueSource(bytes = {0, 1, -1, Byte.MAX_VALUE, Byte.MIN_VALUE}) public void testBindByte(byte byteValue) throws SQLException { try (Statement statement = connection.createStatement()) { try { @@ -184,18 +199,21 @@ public void testBindNull() throws SQLException { } } - @DataPoints - public static Time[] timeValues = { - Time.valueOf("00:00:00"), - Time.valueOf("12:34:56"), - Time.valueOf("12:00:00"), - Time.valueOf("11:59:59"), - Time.valueOf("15:30:00"), - Time.valueOf("13:01:01"), - Time.valueOf("12:00:00"), - }; - - @Theory + static class TimeProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + return Stream.of( + Arguments.of(Time.valueOf("00:00:00")), + Arguments.of(Time.valueOf("12:34:56")), + Arguments.of(Time.valueOf("12:00:00")), + Arguments.of(Time.valueOf("11:59:59")), + Arguments.of(Time.valueOf("15:30:00")), + Arguments.of(Time.valueOf("13:01:01"))); + } + } + + @ParameterizedTest + @ArgumentsSource(TimeProvider.class) public void testBindTime(Time timeVal) throws SQLException { try (Statement statement = connection.createStatement()) { try { @@ -225,7 +243,8 @@ public void testBindTime(Time timeVal) throws SQLException { * Bind time with calendar is not supported now. Everything is in UTC, need to revisit in the * future */ - @Theory + @ParameterizedTest + @ArgumentsSource(TimeProvider.class) public void testBindTimeWithCalendar(Time timeVal) throws SQLException { Calendar utcCal = Calendar.getInstance(TimeZone.getTimeZone("UTC")); Calendar laCal = Calendar.getInstance(TimeZone.getTimeZone("PST")); @@ -256,7 +275,8 @@ public void testBindTimeWithCalendar(Time timeVal) throws SQLException { } } - @Theory + @ParameterizedTest + @ArgumentsSource(TimeProvider.class) public void testBindTimeViaSetObject(Time timeVal) throws SQLException { try (Statement statement = connection.createStatement()) { try { @@ -282,7 +302,8 @@ public void testBindTimeViaSetObject(Time timeVal) throws SQLException { } } - @Theory + @ParameterizedTest + @ArgumentsSource(TimeProvider.class) public void testBindTimeViaSetObjectCast(Time timeVal) throws SQLException { try (Statement statement = connection.createStatement()) { try { @@ -308,18 +329,22 @@ public void testBindTimeViaSetObjectCast(Time timeVal) throws SQLException { } } - @DataPoints - public static Date[] dateValues = { - Date.valueOf("2000-01-01"), - Date.valueOf("3000-01-01"), - Date.valueOf("1970-01-01"), - Date.valueOf("1969-01-01"), - Date.valueOf("1500-01-01"), - Date.valueOf("1400-01-01"), - Date.valueOf("1000-01-01") - }; - - @Theory + static class DateProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + return Stream.of( + Arguments.of(Date.valueOf("2000-01-01")), + Arguments.of(Date.valueOf("3000-01-01")), + Arguments.of(Date.valueOf("1970-01-01")), + Arguments.of(Date.valueOf("1969-01-01")), + Arguments.of(Date.valueOf("1500-01-01")), + Arguments.of(Date.valueOf("1400-01-01")), + Arguments.of(Date.valueOf("1000-01-01"))); + } + } + + @ParameterizedTest + @ArgumentsSource(DateProvider.class) public void testBindDate(Date dateValue) throws SQLException { try (Statement statement = connection.createStatement()) { try { @@ -346,7 +371,8 @@ public void testBindDate(Date dateValue) throws SQLException { } } - @Theory + @ParameterizedTest + @ArgumentsSource(DateProvider.class) public void testBindDateWithCalendar(Date dateValue) throws SQLException { Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC")); @@ -375,7 +401,8 @@ public void testBindDateWithCalendar(Date dateValue) throws SQLException { } } - @Theory + @ParameterizedTest + @ValueSource(ints = {0, 1, -1, Integer.MIN_VALUE, Integer.MAX_VALUE}) public void testBindObjectWithScaleZero(int intValue) throws SQLException { try (Statement statement = connection.createStatement()) { try { @@ -429,7 +456,7 @@ public void testBindNullForAllTypes() throws Throwable { while (result.next()) { String testType = result.getString(1); for (int i = 2; i <= 13; ++i) { - assertNull(String.format("Java Type: %s is not null", testType), result.getString(i)); + assertNull(result.getString(i), String.format("Java Type: %s is not null", testType)); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/BindingDataLatestIT.java b/src/test/java/net/snowflake/client/jdbc/BindingDataLatestIT.java index 71c556686..58298df8a 100644 --- a/src/test/java/net/snowflake/client/jdbc/BindingDataLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/BindingDataLatestIT.java @@ -5,8 +5,8 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.PreparedStatement; @@ -17,11 +17,10 @@ import java.util.Calendar; import java.util.TimeZone; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Binding Data integration tests for the latest JDBC driver. This doesn't work for the oldest @@ -29,7 +28,7 @@ * to examine if the tests still are not applicable. If it is applicable, move tests to * BindingDataIT so that both the latest and oldest supported driver run the tests. */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class BindingDataLatestIT extends AbstractDriverIT { TimeZone origTz = TimeZone.getDefault(); TimeZone tokyoTz = TimeZone.getTimeZone("Asia/Tokyo"); @@ -67,7 +66,7 @@ public void testBindTimestampTZ() throws SQLException { * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testTimestampBindingWithNTZType() throws SQLException { TimeZone.setDefault(tokyoTz); try (Connection connection = getConnection(); @@ -124,7 +123,7 @@ public void testTimestampBindingWithNTZType() throws SQLException { * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testTimestampBindingWithLTZType() throws SQLException { TimeZone.setDefault(tokyoTz); try (Connection connection = getConnection(); @@ -188,7 +187,7 @@ public void testTimestampBindingWithLTZType() throws SQLException { * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testTimestampBindingWithLTZTypeForDayLightSavingTimeZone() throws SQLException { Calendar australia = Calendar.getInstance(australiaTz); TimeZone.setDefault(australiaTz); diff --git a/src/test/java/net/snowflake/client/jdbc/CallableStatementIT.java b/src/test/java/net/snowflake/client/jdbc/CallableStatementIT.java index d6536dc93..1c2900958 100644 --- a/src/test/java/net/snowflake/client/jdbc/CallableStatementIT.java +++ b/src/test/java/net/snowflake/client/jdbc/CallableStatementIT.java @@ -5,8 +5,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.net.URL; @@ -15,74 +15,26 @@ import java.sql.Date; import java.sql.ResultSet; import java.sql.SQLException; -import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; import java.util.Calendar; import java.util.HashMap; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) -@Category(TestCategoryStatement.class) -public class CallableStatementIT extends BaseJDBCTest { - @Parameterized.Parameters - public static Object[][] data() { - // all tests in this class need to run for both query result formats json and arrow - return new Object[][] {{"JSON"}, {"arrow"}}; - } - - private static String queryResultFormat; - - public CallableStatementIT(String format) { - queryResultFormat = format; - } - - public static Connection getConnection() throws SQLException { - Connection conn = BaseJDBCTest.getConnection(); - try (Statement stmt = conn.createStatement()) { - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); - } - return conn; - } - - private final String createStoredProcedure = - "create or replace procedure square_it(num FLOAT) returns float not " - + "null language javascript as $$ return NUM * NUM; $$"; - private final String createSecondStoredProcedure = - "create or replace procedure add_nums(x DOUBLE, y DOUBLE) " - + "returns double not null language javascript as $$ return X + Y; $$"; - private final String deleteStoredProcedure = "drop procedure if exists square_it(FLOAT)"; - private final String deleteSecondStoredProcedure = "drop procedure if exists add_nums(INT, INT)"; +@Tag(TestTags.STATEMENT) +public class CallableStatementIT extends CallableStatementITBase { - @Before - public void setUp() throws SQLException { - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { - statement.execute(createStoredProcedure); - statement.execute(createSecondStoredProcedure); - } - } - - @After - public void tearDown() throws SQLException { - try (Connection con = getConnection(); - Statement statement = con.createStatement()) { - statement.execute(deleteStoredProcedure); - statement.execute(deleteSecondStoredProcedure); - } - } - - @Test - public void testPrepareCall() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepareCall(String queryResultFormat) throws SQLException { // test CallableStatement with no binding parameters - try (Connection connection = getConnection()) { + try (Connection connection = getConnection(queryResultFormat)) { try (CallableStatement callableStatement = connection.prepareCall("call square_it(5)")) { assertThat(callableStatement.getParameterMetaData().getParameterCount(), is(0)); } diff --git a/src/test/java/net/snowflake/client/jdbc/CallableStatementITBase.java b/src/test/java/net/snowflake/client/jdbc/CallableStatementITBase.java new file mode 100644 index 000000000..8635d4246 --- /dev/null +++ b/src/test/java/net/snowflake/client/jdbc/CallableStatementITBase.java @@ -0,0 +1,48 @@ +package net.snowflake.client.jdbc; + +import java.sql.Connection; +import java.sql.SQLException; +import java.sql.Statement; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; + +public class CallableStatementITBase extends BaseJDBCTest { + public static Connection getConnection() throws SQLException { + return BaseJDBCTest.getConnection(); + } + + public static Connection getConnection(String queryResultFormat) throws SQLException { + Connection conn = BaseJDBCTest.getConnection(); + try (Statement stmt = conn.createStatement()) { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + } + return conn; + } + + private final String createStoredProcedure = + "create or replace procedure square_it(num FLOAT) returns float not " + + "null language javascript as $$ return NUM * NUM; $$"; + private final String createSecondStoredProcedure = + "create or replace procedure add_nums(x DOUBLE, y DOUBLE) " + + "returns double not null language javascript as $$ return X + Y; $$"; + private final String deleteStoredProcedure = "drop procedure if exists square_it(FLOAT)"; + private final String deleteSecondStoredProcedure = "drop procedure if exists add_nums(INT, INT)"; + + @BeforeEach + public void setUp() throws SQLException { + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + statement.execute(createStoredProcedure); + statement.execute(createSecondStoredProcedure); + } + } + + @AfterEach + public void tearDown() throws SQLException { + try (Connection con = getConnection(); + Statement statement = con.createStatement()) { + statement.execute(deleteStoredProcedure); + statement.execute(deleteSecondStoredProcedure); + } + } +} diff --git a/src/test/java/net/snowflake/client/jdbc/CallableStatementLatestIT.java b/src/test/java/net/snowflake/client/jdbc/CallableStatementLatestIT.java index a4aaea709..af33e102c 100644 --- a/src/test/java/net/snowflake/client/jdbc/CallableStatementLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/CallableStatementLatestIT.java @@ -2,24 +2,23 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.CallableStatement; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; -@Category(TestCategoryStatement.class) -public class CallableStatementLatestIT extends CallableStatementIT { - - public CallableStatementLatestIT(String format) { - super(format); - } +@Tag(TestTags.STATEMENT) +public class CallableStatementLatestIT extends CallableStatementITBase { /** * Test that function that removes curly brackets from outside of call statements works properly @@ -44,10 +43,11 @@ public void testParseSqlEscapeSyntaxFunction() { * * @throws SQLException */ - @Test - public void testPrepareCallWithCurlyBracketSyntax() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepareCallWithCurlyBracketSyntax(String queryResultFormat) throws SQLException { // test CallableStatement with no binding parameters - try (Connection connection = getConnection()) { + try (Connection connection = getConnection(queryResultFormat)) { try (CallableStatement callableStatement = connection.prepareCall("{call square_it(5)}")) { assertThat(callableStatement.getParameterMetaData().getParameterCount(), is(0)); } diff --git a/src/test/java/net/snowflake/client/jdbc/ChunkDownloaderS3RetryUrlLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ChunkDownloaderS3RetryUrlLatestIT.java index cfb8e086d..7824c9a01 100644 --- a/src/test/java/net/snowflake/client/jdbc/ChunkDownloaderS3RetryUrlLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ChunkDownloaderS3RetryUrlLatestIT.java @@ -3,7 +3,7 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.sql.Connection; import java.sql.ResultSet; @@ -12,7 +12,7 @@ import java.util.List; import java.util.Map; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.ExecTimeTelemetryData; import net.snowflake.client.core.HttpUtil; import net.snowflake.client.core.SFBaseSession; @@ -20,18 +20,18 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.utils.URIBuilder; import org.apache.http.impl.client.CloseableHttpClient; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class ChunkDownloaderS3RetryUrlLatestIT extends AbstractDriverIT { private SFStatement sfStatement; private SFBaseSession sfBaseSession; private ChunkDownloadContext sfContext; - @Before + @BeforeEach public void setup() throws SQLException, InterruptedException { try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { diff --git a/src/test/java/net/snowflake/client/jdbc/ClientMemoryLimitParallelIT.java b/src/test/java/net/snowflake/client/jdbc/ClientMemoryLimitParallelIT.java index 56d954653..b90868b39 100644 --- a/src/test/java/net/snowflake/client/jdbc/ClientMemoryLimitParallelIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ClientMemoryLimitParallelIT.java @@ -1,26 +1,26 @@ package net.snowflake.client.jdbc; import static net.snowflake.client.AbstractDriverIT.getConnection; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Properties; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author azhan attempts to test the CLIENT_MEMORY_LIMIT working in multi-threading */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class ClientMemoryLimitParallelIT extends BaseJDBCWithSharedConnectionIT { private static Logger LOGGER = LoggerFactory.getLogger(ClientMemoryLimitParallelIT.class.getName()); @@ -62,14 +62,14 @@ public class ClientMemoryLimitParallelIT extends BaseJDBCWithSharedConnectionIT + rowCount + "));"; - @Before + @BeforeEach public void setUp() throws SQLException { try (Statement statement = connection.createStatement()) { statement.execute(createTestTableSQL); } } - @After + @AfterEach public void tearDown() throws SQLException { try (Statement statement = connection.createStatement()) { statement.execute("drop table if exists testtable_cml"); @@ -81,8 +81,8 @@ public void tearDown() throws SQLException { * in multi-threading */ @Test - @Ignore("Long term high memory usage test") - public void testParallelQueries() throws Exception { + @Disabled("Long term high memory usage test") + void testParallelQueries() throws Exception { Runnable testQuery = new Runnable() { public void run() { @@ -122,8 +122,7 @@ public void run() { * make sure there is no hanging */ @Test - public void testQueryNotHanging() throws SQLException { - Properties paramProperties = new Properties(); + void testQueryNotHanging() throws SQLException { try (Statement statement = connection.createStatement()) { queryRows(statement, 100, 160); } diff --git a/src/test/java/net/snowflake/client/jdbc/CompressedStreamFactoryTest.java b/src/test/java/net/snowflake/client/jdbc/CompressedStreamFactoryTest.java index 86eb5764a..0c3f69470 100644 --- a/src/test/java/net/snowflake/client/jdbc/CompressedStreamFactoryTest.java +++ b/src/test/java/net/snowflake/client/jdbc/CompressedStreamFactoryTest.java @@ -1,7 +1,7 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.github.luben.zstd.ZstdInputStream; import com.github.luben.zstd.ZstdOutputStream; @@ -14,7 +14,7 @@ import org.apache.commons.io.IOUtils; import org.apache.http.Header; import org.apache.http.message.BasicHeader; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class CompressedStreamFactoryTest { diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectStringParseTest.java b/src/test/java/net/snowflake/client/jdbc/ConnectStringParseTest.java index 871a6cfcd..c2f7eeb5c 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectStringParseTest.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectStringParseTest.java @@ -2,11 +2,11 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.Properties; import net.snowflake.client.core.SFSessionProperty; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class ConnectStringParseTest { @Test diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionAlreadyClosedIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionAlreadyClosedIT.java index fd0b69488..98826eaa8 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionAlreadyClosedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionAlreadyClosedIT.java @@ -5,11 +5,11 @@ import java.sql.Connection; import java.util.Properties; -import net.snowflake.client.category.TestCategoryConnection; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class ConnectionAlreadyClosedIT extends BaseJDBCTest { @Test diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionFeatureNotSupportedIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionFeatureNotSupportedIT.java index f91eee092..b0b120683 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionFeatureNotSupportedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionFeatureNotSupportedIT.java @@ -8,11 +8,11 @@ import java.sql.SQLException; import java.sql.Savepoint; import java.util.HashMap; -import net.snowflake.client.category.TestCategoryConnection; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class ConnectionFeatureNotSupportedIT extends BaseJDBCTest { @Test public void testFeatureNotSupportedException() throws Throwable { diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionIT.java index 00656e305..9d99e01a1 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionIT.java @@ -3,16 +3,16 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.AssumptionUtils.assumeRunningOnGithubActions; import static net.snowflake.client.core.SessionUtil.CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.junit.Assume.assumeTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.io.FileInputStream; @@ -42,23 +42,20 @@ import java.util.Properties; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import net.snowflake.client.ConditionalIgnoreRule.ConditionalIgnore; -import net.snowflake.client.RunningNotOnTestaccount; -import net.snowflake.client.RunningOnGithubAction; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryConnection; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.annotations.RunOnTestaccountNotOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.SFSession; import net.snowflake.common.core.SqlState; import org.apache.commons.codec.binary.Base64; -import org.junit.Assert; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; /** Connection integration tests */ -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class ConnectionIT extends BaseJDBCWithSharedConnectionIT { // create a local constant for this code for testing purposes (already defined in GS) public static final int INVALID_CONNECTION_INFO_CODE = 390100; @@ -70,7 +67,7 @@ public class ConnectionIT extends BaseJDBCWithSharedConnectionIT { String errorMessage = null; - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; @Test public void testSimpleConnection() throws SQLException { @@ -86,7 +83,7 @@ public void testSimpleConnection() throws SQLException { } @Test - @Ignore + @Disabled public void test300ConnectionsWithSingleClientInstance() throws SQLException { // concurrent testing int size = 300; @@ -214,7 +211,7 @@ public void testDataCompletenessInLowMemory() throws Exception { } @Test - @ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testConnectionGetAndSetDBAndSchema() throws SQLException { final String SECOND_DATABASE = "SECOND_DATABASE"; final String SECOND_SCHEMA = "SECOND_SCHEMA"; @@ -350,7 +347,7 @@ public void testConnectViaDataSource() throws SQLException { } @Test - @Ignore + @Disabled public void testDataSourceOktaSerialization() throws Exception { // test with username/password authentication // set up DataSource object and ensure connection works @@ -368,7 +365,8 @@ public void testDataSourceOktaSerialization() throws Exception { ResultSet resultSet = statement.executeQuery("select 1")) { resultSet.next(); assertThat("select 1", resultSet.getInt(1), equalTo(1)); - File serializedFile = tmpFolder.newFile("serializedStuff.ser"); + File serializedFile = new File(tmpFolder, "serializedStuff.ser"); + serializedFile.createNewFile(); // serialize datasource object into a file try (FileOutputStream outputFile = new FileOutputStream(serializedFile); ObjectOutputStream out = new ObjectOutputStream(outputFile)) { @@ -391,7 +389,7 @@ public void testDataSourceOktaSerialization() throws Exception { } @Test - @ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testConnectUsingKeyPair() throws Exception { Map parameters = getConnectionParameters(); String testUser = parameters.get("user"); @@ -449,7 +447,7 @@ public void testConnectUsingKeyPair() throws Exception { DriverManager.getConnection(uri, properties); fail(); } catch (SQLException e) { - Assert.assertEquals(390144, e.getErrorCode()); + assertEquals(390144, e.getErrorCode()); } // test multiple key pair try (Connection connection = getConnection(); @@ -506,7 +504,7 @@ public void testBadPrivateKey() throws Exception { } @Test - @ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDifferentKeyLength() throws Exception { Map parameters = getConnectionParameters(); String testUser = parameters.get("user"); @@ -840,7 +838,7 @@ public void testResultSetsClosedByStatement() throws SQLException { } @Test - @ConditionalIgnore(condition = RunningNotOnTestaccount.class) + @RunOnTestaccountNotOnGithubActions public void testOKTAConnection() throws Throwable { Map params = getConnectionParameters(); Properties properties = new Properties(); @@ -857,7 +855,7 @@ public void testOKTAConnection() throws Throwable { } @Test - @ConditionalIgnore(condition = RunningNotOnTestaccount.class) + @RunOnTestaccountNotOnGithubActions public void testOKTAConnectionWithOktauserParam() throws Throwable { Map params = getConnectionParameters(); Properties properties = new Properties(); @@ -888,7 +886,7 @@ public void testValidateDefaultParameters() throws Throwable { fail("should fail"); } catch (SQLException ex) { assertEquals( - "error code", ex.getErrorCode(), SESSION_CREATION_OBJECT_DOES_NOT_EXIST_NOT_AUTHORIZED); + ex.getErrorCode(), SESSION_CREATION_OBJECT_DOES_NOT_EXIST_NOT_AUTHORIZED, "error code"); } // schema is invalid @@ -899,7 +897,7 @@ public void testValidateDefaultParameters() throws Throwable { fail("should fail"); } catch (SQLException ex) { assertEquals( - "error code", ex.getErrorCode(), SESSION_CREATION_OBJECT_DOES_NOT_EXIST_NOT_AUTHORIZED); + ex.getErrorCode(), SESSION_CREATION_OBJECT_DOES_NOT_EXIST_NOT_AUTHORIZED, "error code"); } // warehouse is invalid @@ -910,7 +908,7 @@ public void testValidateDefaultParameters() throws Throwable { fail("should fail"); } catch (SQLException ex) { assertEquals( - "error code", ex.getErrorCode(), SESSION_CREATION_OBJECT_DOES_NOT_EXIST_NOT_AUTHORIZED); + ex.getErrorCode(), SESSION_CREATION_OBJECT_DOES_NOT_EXIST_NOT_AUTHORIZED, "error code"); } // role is invalid @@ -920,7 +918,7 @@ public void testValidateDefaultParameters() throws Throwable { DriverManager.getConnection(params.get("uri"), props); fail("should fail"); } catch (SQLException ex) { - assertEquals("error code", ex.getErrorCode(), ROLE_IN_CONNECT_STRING_DOES_NOT_EXIST); + assertEquals(ex.getErrorCode(), ROLE_IN_CONNECT_STRING_DOES_NOT_EXIST, "error code"); } } @@ -950,7 +948,7 @@ public void testNoValidateDefaultParameters() throws Throwable { DriverManager.getConnection(params.get("uri"), props); fail("should fail"); } catch (SQLException ex) { - assertEquals("error code", ex.getErrorCode(), ROLE_IN_CONNECT_STRING_DOES_NOT_EXIST); + assertEquals(ex.getErrorCode(), ROLE_IN_CONNECT_STRING_DOES_NOT_EXIST, "error code"); } } @@ -961,7 +959,7 @@ public void testNoValidateDefaultParameters() throws Throwable { * * @throws SQLException */ - @Ignore + @Disabled @Test public void testOrgAccountUrl() throws SQLException { Properties props = new Properties(); @@ -987,7 +985,7 @@ public void testOrgAccountUrl() throws SQLException { * @throws SQLException * @throws NoSuchAlgorithmException */ - @Ignore + @Disabled @Test public void testOrgAccountUrlWithKeyPair() throws SQLException, NoSuchAlgorithmException { @@ -1042,7 +1040,7 @@ private Properties setCommonConnectionParameters(boolean validateDefaultParamete @Test public void testFailOverOrgAccount() throws SQLException { // only when set_git_info.sh picks up a SOURCE_PARAMETER_FILE - assumeTrue(RunningOnGithubAction.isRunningOnGithubAction()); + assumeRunningOnGithubActions(); Map kvParams = getConnectionParameters(null, "ORG"); Properties connProps = kvMap2Properties(kvParams, false); diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java index 30ff6728f..68cd101bf 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionLatestIT.java @@ -14,13 +14,13 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.core.AnyOf.anyOf; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -51,11 +51,10 @@ import java.util.Properties; import java.util.concurrent.TimeUnit; import javax.net.ssl.SSLHandshakeException; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningNotOnAWS; -import net.snowflake.client.RunningOnGithubAction; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryConnection; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.annotations.RunOnAWS; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.HttpClientSettingsKey; import net.snowflake.client.core.HttpUtil; import net.snowflake.client.core.ObjectMapperFactory; @@ -73,13 +72,12 @@ import org.apache.http.client.methods.HttpPost; import org.apache.http.client.utils.URIBuilder; import org.apache.http.entity.StringEntity; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; /** * Connection integration tests for the latest JDBC driver. This doesn't work for the oldest @@ -87,14 +85,14 @@ * if the tests still is not applicable. If it is applicable, move tests to ConnectionIT so that * both the latest and oldest supported driver run the tests. */ -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class ConnectionLatestIT extends BaseJDBCTest { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; private static final SFLogger logger = SFLoggerFactory.getLogger(ConnectionLatestIT.class); private boolean defaultState; - @Before + @BeforeEach public void setUp() { TelemetryService service = TelemetryService.getInstance(); service.updateContextForIT(getConnectionParameters()); @@ -103,7 +101,7 @@ public void setUp() { TelemetryService.enable(); } - @After + @AfterEach public void tearDown() throws InterruptedException { TelemetryService service = TelemetryService.getInstance(); // wait 5 seconds while the service is flushing @@ -193,12 +191,13 @@ public void testHeartbeatFrequencyTooSmall() throws Exception { * @throws Throwable */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void putGetStatementsHaveQueryID() throws Throwable { try (Connection con = getConnection(); Statement statement = con.createStatement()) { String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); statement.execute("CREATE OR REPLACE STAGE testPutGet_stage"); SnowflakeStatement snowflakeStatement = statement.unwrap(SnowflakeStatement.class); @@ -210,7 +209,7 @@ public void putGetStatementsHaveQueryID() throws Throwable { String statementPutQueryId = snowflakeStatement.getQueryID(); TestUtil.assertValidQueryId(statementPutQueryId); assertNotEquals( - "create query id is override by put query id", createStageQueryId, statementPutQueryId); + createStageQueryId, statementPutQueryId, "create query id is override by put query id"); resultSetPutQueryId = resultSet.unwrap(SnowflakeResultSet.class).getQueryID(); TestUtil.assertValidQueryId(resultSetPutQueryId); assertEquals(resultSetPutQueryId, statementPutQueryId); @@ -222,7 +221,7 @@ public void putGetStatementsHaveQueryID() throws Throwable { String resultSetGetQueryId = resultSet.unwrap(SnowflakeResultSet.class).getQueryID(); TestUtil.assertValidQueryId(resultSetGetQueryId); assertNotEquals( - "put and get query id should be different", resultSetGetQueryId, resultSetPutQueryId); + resultSetGetQueryId, resultSetPutQueryId, "put and get query id should be different"); assertEquals(resultSetGetQueryId, statementGetQueryId); } } @@ -230,12 +229,13 @@ public void putGetStatementsHaveQueryID() throws Throwable { /** Added in > 3.14.4 */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void putGetStatementsHaveQueryIDEvenWhenFail() throws Throwable { try (Connection con = getConnection(); Statement statement = con.createStatement()) { String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); SnowflakeStatement snowflakeStatement = statement.unwrap(SnowflakeStatement.class); try { @@ -255,7 +255,7 @@ public void putGetStatementsHaveQueryIDEvenWhenFail() throws Throwable { assertEquals(snowflakeStatement.getQueryID(), e.getQueryId()); } String getQueryId = snowflakeStatement.getQueryID(); - assertNotEquals("put and get query id should be different", putQueryId, getQueryId); + assertNotEquals(putQueryId, getQueryId, "put and get query id should be different"); String stageName = "stage_" + SnowflakeUtil.randomAlphaNumeric(10); statement.execute("CREATE OR REPLACE STAGE " + stageName); TestUtil.assertValidQueryId(snowflakeStatement.getQueryID()); @@ -707,7 +707,7 @@ public void testHttpsLoginTimeoutWithSSL() throws InterruptedException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testKeyPairFileDataSourceSerialization() throws Exception { // test with key/pair authentication where key is in file // set up DataSource object and ensure connection works @@ -727,7 +727,8 @@ public void testKeyPairFileDataSourceSerialization() throws Exception { connectAndExecuteSelect1(ds); - File serializedFile = tmpFolder.newFile("serializedStuff.ser"); + File serializedFile = new File(tmpFolder, "serializedStuff.ser"); + serializedFile.createNewFile(); // serialize datasource object into a file try (FileOutputStream outputFile = new FileOutputStream(serializedFile); ObjectOutputStream out = new ObjectOutputStream(outputFile)) { @@ -752,7 +753,7 @@ private static String readPrivateKeyFileToBase64Content(String fileName) throws /** Works in > 3.18.0 */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testKeyPairBase64DataSourceSerialization() throws Exception { // test with key/pair authentication where key is passed as a Base64 string value // set up DataSource object and ensure connection works @@ -772,7 +773,8 @@ public void testKeyPairBase64DataSourceSerialization() throws Exception { connectAndExecuteSelect1(ds); - File serializedFile = tmpFolder.newFile("serializedStuff.ser"); + File serializedFile = new File(tmpFolder, "serializedStuff.ser"); + serializedFile.createNewFile(); // serialize datasource object into a file try (FileOutputStream outputFile = new FileOutputStream(serializedFile); ObjectOutputStream out = new ObjectOutputStream(outputFile)) { @@ -795,7 +797,7 @@ public void testKeyPairBase64DataSourceSerialization() throws Exception { * executions */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPrivateKeyInConnectionString() throws SQLException, IOException { Map parameters = getConnectionParameters(); String testUser = parameters.get("user"); @@ -898,7 +900,7 @@ private static void unsetPublicKey(String testUser) throws SQLException { // This will only work with JDBC driver versions higher than 3.15.1 @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPrivateKeyInConnectionStringWithBouncyCastle() throws SQLException, IOException { System.setProperty(SecurityUtil.ENABLE_BOUNCYCASTLE_PROVIDER_JVM, "true"); testPrivateKeyInConnectionString(); @@ -911,7 +913,7 @@ public void testPrivateKeyInConnectionStringWithBouncyCastle() throws SQLExcepti * executions */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPrivateKeyBase64InConnectionString() throws SQLException, IOException { Map parameters = getConnectionParameters(); String testUser = parameters.get("user"); @@ -999,7 +1001,7 @@ private static void connectExpectingInvalidOrUnsupportedPrivateKey( /** Works in > 3.18.0 */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPrivateKeyBase64InConnectionStringWithBouncyCastle() throws SQLException, IOException { System.setProperty(SecurityUtil.ENABLE_BOUNCYCASTLE_PROVIDER_JVM, "true"); @@ -1007,7 +1009,7 @@ public void testPrivateKeyBase64InConnectionStringWithBouncyCastle() } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testBasicDataSourceSerialization() throws Exception { // test with username/password authentication // set up DataSource object and ensure connection works @@ -1022,7 +1024,8 @@ public void testBasicDataSourceSerialization() throws Exception { connectAndExecuteSelect1(ds); - File serializedFile = tmpFolder.newFile("serializedStuff.ser"); + File serializedFile = new File(tmpFolder, "serializedStuff.ser"); + serializedFile.createNewFile(); // serialize datasource object into a file try (FileOutputStream outputFile = new FileOutputStream(serializedFile); ObjectOutputStream out = new ObjectOutputStream(outputFile)) { @@ -1233,7 +1236,7 @@ public void testGetChildQueryIdsNegativeTestQueryFailed() throws Exception { * likely not having the test account we used here. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testAuthenticatorEndpointWithDashInAccountName() throws Exception { Map params = getConnectionParameters(); String serverUrl = @@ -1292,7 +1295,7 @@ public void testReadOnly() throws Throwable { * the error code is ErrorCode.S3_OPERATION_ERROR so only runs on AWS. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningNotOnAWS.class) + @RunOnAWS public void testDownloadStreamWithFileNotFoundException() throws SQLException { try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { @@ -1354,19 +1357,19 @@ private Boolean isPbes2KeySupported() throws SQLException, IOException, Security String passphrase = System.getenv(passphraseEnv); assertNotNull( + passphrase, privateKeyFileNameEnv + " environment variable can't be empty. " - + "Please provide the filename for your private key located in the resource folder", - passphrase); + + "Please provide the filename for your private key located in the resource folder"); assertNotNull( + passphrase, publicKeyFileNameEnv + " environment variable can't be empty. " - + "Please provide the filename for your public key located in the resource folder", - passphrase); + + "Please provide the filename for your public key located in the resource folder"); assertNotNull( - passphraseEnv + " environment variable is required to decrypt private key.", passphrase); + passphrase, passphraseEnv + " environment variable is required to decrypt private key."); Map parameters = getConnectionParameters(); String testUser = parameters.get("user"); Properties properties = new Properties(); @@ -1424,8 +1427,8 @@ private Boolean isPbes2KeySupported() throws SQLException, IOException, Security * @throws IOException */ @Test - @Ignore - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @Disabled + @DontRunOnGithubActions public void testPbes2Support() throws SQLException, IOException { System.clearProperty(SecurityUtil.ENABLE_BOUNCYCASTLE_PROVIDER_JVM); boolean pbes2Supported = isPbes2KeySupported(); @@ -1438,7 +1441,7 @@ public void testPbes2Support() throws SQLException, IOException { String failureMessage = "The failure means that the JDK version can decrypt a private key generated by OpenSSL v3 and " + "BouncyCastle shouldn't be needed anymore"; - assertFalse(failureMessage, pbes2Supported); + assertFalse(pbes2Supported, failureMessage); // The expectation is that this is going to pass once we add Bouncy Castle in the list of // providers @@ -1448,12 +1451,12 @@ public void testPbes2Support() throws SQLException, IOException { "Bouncy Castle Provider should have been loaded with the -D" + SecurityUtil.ENABLE_BOUNCYCASTLE_PROVIDER_JVM + "JVM argument and this should have decrypted the private key generated by OpenSSL v3"; - assertTrue(failureMessage, pbes2Supported); + assertTrue(pbes2Supported, failureMessage); } // Test for regenerating okta one-time token for versions > 3.15.1 @Test - @Ignore + @Disabled public void testDataSourceOktaGenerates429StatusCode() throws Exception { // test with username/password authentication // set up DataSource object and ensure connection works @@ -1551,26 +1554,26 @@ public void shouldGetDifferentTimestampLtzConsistentBetweenFormats() throws Exce arrowResultSet.getTimestamp(column).getTimezoneOffset(), arrowResultSet.getTimestamp(column).getClass()); assertEquals( + jsonResultSet.getString(column), + arrowResultSet.getString(column), "Expecting that string representation are the same for row " + rowIdx + " and column " - + column, - jsonResultSet.getString(column), - arrowResultSet.getString(column)); + + column); assertEquals( + jsonResultSet.getTimestamp(column).toString(), + arrowResultSet.getTimestamp(column).toString(), "Expecting that string representation (via toString) are the same for row " + rowIdx + " and column " - + column, - jsonResultSet.getTimestamp(column).toString(), - arrowResultSet.getTimestamp(column).toString()); + + column); assertEquals( + jsonResultSet.getTimestamp(column), + arrowResultSet.getTimestamp(column), "Expecting that timestamps are the same for row " + rowIdx + " and column " - + column, - jsonResultSet.getTimestamp(column), - arrowResultSet.getTimestamp(column)); + + column); } rowIdx++; } diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionManual.java b/src/test/java/net/snowflake/client/jdbc/ConnectionManual.java index 91d5f7bc8..4b7d569d5 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionManual.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionManual.java @@ -1,6 +1,6 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.DriverManager; diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionPoolingIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionPoolingIT.java index 770acda0a..a539dc7f9 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionPoolingIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionPoolingIT.java @@ -3,7 +3,7 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.mchange.v2.c3p0.ComboPooledDataSource; import com.zaxxer.hikari.HikariConfig; @@ -15,17 +15,17 @@ import java.sql.Statement; import java.util.Map; import java.util.Properties; -import net.snowflake.client.category.TestCategoryConnection; +import net.snowflake.client.category.TestTags; import org.apache.commons.dbcp.BasicDataSource; import org.apache.commons.dbcp.PoolingDataSource; import org.apache.commons.pool.impl.GenericObjectPool; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Connection pool interface test */ -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class ConnectionPoolingIT { private BasicDataSource bds = null; private ComboPooledDataSource cpds = null; @@ -48,7 +48,7 @@ public ConnectionPoolingIT() { ssl = params.get("ssl"); } - @Before + @BeforeEach public void setUp() throws SQLException { try (Connection connection = BaseJDBCTest.getConnection(); Statement statement = connection.createStatement()) { @@ -57,7 +57,7 @@ public void setUp() throws SQLException { } } - @After + @AfterEach public void tearDown() throws SQLException { try (Connection connection = BaseJDBCTest.getConnection(); Statement statement = connection.createStatement(); ) { diff --git a/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java b/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java index 49c6c6d10..96b896247 100644 --- a/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ConnectionWithOCSPModeIT.java @@ -9,8 +9,8 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.fail; import java.net.SocketTimeoutException; import java.security.cert.CertificateExpiredException; @@ -19,17 +19,16 @@ import java.util.Properties; import javax.net.ssl.SSLHandshakeException; import javax.net.ssl.SSLPeerUnverifiedException; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryConnection; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.SFOCSPException; import net.snowflake.client.core.SFTrustManager; import org.hamcrest.Matcher; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Tests for connection with OCSP mode mainly negative cases by injecting errors. @@ -38,7 +37,7 @@ * *

hang_webserver.py 12345 */ -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class ConnectionWithOCSPModeIT extends BaseJDBCTest { private final String testUser = "fakeuser"; private final String testPassword = "testpassword"; @@ -46,12 +45,12 @@ public class ConnectionWithOCSPModeIT extends BaseJDBCTest { private static int nameCounter = 0; - @Before + @BeforeEach public void setUp() { SFTrustManager.deleteCache(); } - @After + @AfterEach public void tearDown() { SFTrustManager.cleanTestSystemParameters(); } @@ -340,7 +339,7 @@ public void testOCSPResponderTimeoutFailOpen() { /** Test OCSP Responder hang and timeout. SocketTimeoutException exception should be raised. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testOCSPResponderTimeoutFailClosed() { System.setProperty(SFTrustManager.SF_OCSP_TEST_OCSP_RESPONDER_TIMEOUT, "1000"); System.setProperty(SFTrustManager.SF_OCSP_TEST_RESPONDER_URL, "http://localhost:12345/hang"); @@ -380,7 +379,7 @@ public void testOCSPResponder403FailOpen() { * is invalid. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testOCSPResponder403FailClosed() { System.setProperty(SFTrustManager.SF_OCSP_TEST_RESPONDER_URL, "http://localhost:12345/403"); System.setProperty( @@ -397,7 +396,7 @@ public void testOCSPResponder403FailClosed() { /** Test Certificate Expired. Will fail in both FAIL_OPEN and FAIL_CLOSED. */ @Test - @Ignore("Issuer of root CA expired") + @Disabled("Issuer of root CA expired") // https://support.sectigo.com/articles/Knowledge/Sectigo-AddTrust-External-CA-Root-Expiring-May-30-2020 public void testExpiredCert() { try { diff --git a/src/test/java/net/snowflake/client/jdbc/CustomProxyLatestIT.java b/src/test/java/net/snowflake/client/jdbc/CustomProxyLatestIT.java index c6fb29bf4..2673d543c 100644 --- a/src/test/java/net/snowflake/client/jdbc/CustomProxyLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/CustomProxyLatestIT.java @@ -1,13 +1,13 @@ package net.snowflake.client.jdbc; -import static junit.framework.TestCase.assertEquals; -import static junit.framework.TestCase.fail; import static net.snowflake.client.AbstractDriverIT.getFullPathFileInResource; import static net.snowflake.client.jdbc.SnowflakeDriverIT.findFile; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.net.Authenticator; @@ -18,17 +18,16 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.Properties; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.HttpClientSettingsKey; import net.snowflake.client.core.HttpProtocol; import net.snowflake.client.core.HttpUtil; import net.snowflake.client.core.SFSession; import net.snowflake.common.core.SqlState; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; // To run these tests, you must: // 1.) Start up a proxy connection. The simplest ways are via Squid or BurpSuite. Confluence doc on @@ -37,9 +36,9 @@ // 2.) Enter your own username and password for the account you're connecting to // 3.) Adjust parameters like role, database, schema, etc to match with account accordingly -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class CustomProxyLatestIT { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; /** * Before running this test, change the user and password to appropriate values. Set up 2 @@ -51,7 +50,7 @@ public class CustomProxyLatestIT { * @throws SQLException */ @Test - @Ignore + @Disabled public void test2ProxiesWithSameJVM() throws SQLException { Properties props = new Properties(); props.put("user", "USER"); @@ -107,7 +106,7 @@ public void test2ProxiesWithSameJVM() throws SQLException { * @throws SQLException */ @Test - @Ignore + @Disabled public void testTLSIssue() throws SQLException { Properties props = new Properties(); props.put("user", "USER"); @@ -149,7 +148,7 @@ public void testTLSIssue() throws SQLException { * http instead of https proxy parameters for non-TLS proxy */ @Test - @Ignore + @Disabled public void testJVMParamsWithNonProxyHostsHonored() throws SQLException { Properties props = new Properties(); props.put("user", "USER"); @@ -172,7 +171,7 @@ public void testJVMParamsWithNonProxyHostsHonored() throws SQLException { /** Test TLS issue against S3 client to ensure proxy works with PUT/GET statements */ @Test - @Ignore + @Disabled public void testTLSIssueWithConnectionStringAgainstS3() throws ClassNotFoundException, SQLException { @@ -193,7 +192,7 @@ public void testTLSIssueWithConnectionStringAgainstS3() * @throws SQLException */ @Test - @Ignore + @Disabled public void testNonProxyHostAltering() throws SQLException { Properties props = new Properties(); props.put("user", "USER"); @@ -243,7 +242,7 @@ public void testNonProxyHostAltering() throws SQLException { * @throws SQLException */ @Test - @Ignore + @Disabled public void testSizeOfHttpClientNoProxies() throws SQLException { Properties props = new Properties(); props.put("user", "USER"); @@ -279,7 +278,7 @@ public void testSizeOfHttpClientNoProxies() throws SQLException { } @Test - @Ignore + @Disabled public void testCorrectProxySettingFromConnectionString() throws ClassNotFoundException, SQLException { String connectionUrl = @@ -299,7 +298,7 @@ public void testCorrectProxySettingFromConnectionString() } @Test - @Ignore + @Disabled public void testWrongProxyPortSettingFromConnectionString() throws ClassNotFoundException, SQLException { @@ -313,7 +312,7 @@ public void testWrongProxyPortSettingFromConnectionString() } @Test - @Ignore + @Disabled public void testWrongProxyPasswordSettingFromConnectionString() throws ClassNotFoundException, SQLException { @@ -334,7 +333,7 @@ public void testWrongProxyPasswordSettingFromConnectionString() } @Test - @Ignore + @Disabled public void testInvalidProxyPortFromConnectionString() throws ClassNotFoundException, SQLException { @@ -355,7 +354,7 @@ public void testInvalidProxyPortFromConnectionString() } @Test - @Ignore + @Disabled public void testNonProxyHostsFromConnectionString() throws ClassNotFoundException, SQLException { String connectionUrl = @@ -368,7 +367,7 @@ public void testNonProxyHostsFromConnectionString() throws ClassNotFoundExceptio } @Test - @Ignore + @Disabled public void testWrongNonProxyHostsFromConnectionString() throws ClassNotFoundException, SQLException { @@ -383,7 +382,7 @@ public void testWrongNonProxyHostsFromConnectionString() } @Test - @Ignore + @Disabled public void testUnsetJvmPropertiesForInvalidSettings() throws SQLException { Properties props = new Properties(); props.put("user", "USER"); @@ -435,11 +434,9 @@ public PasswordAuthentication getPasswordAuthentication() { stmt.execute("use warehouse TINY_WAREHOUSE"); stmt.execute("CREATE OR REPLACE STAGE testPutGet_stage"); assertTrue( - "Failed to put a file", stmt.execute( - "PUT file://" - + getFullPathFileInResource("orders_100.csv") - + " @testPutGet_stage")); + "PUT file://" + getFullPathFileInResource("orders_100.csv") + " @testPutGet_stage"), + "Failed to put a file"); String sql = "select $1 from values(1),(3),(5),(7)"; try (ResultSet res = stmt.executeQuery(sql)) { while (res.next()) { @@ -454,7 +451,7 @@ public PasswordAuthentication getPasswordAuthentication() { } @Test - @Ignore + @Disabled public void testProxyConnectionWithAzure() throws ClassNotFoundException, SQLException { String connectionUrl = "jdbc:snowflake://aztestaccount.east-us-2.azure.snowflakecomputing.com/?tracing=ALL"; @@ -463,7 +460,7 @@ public void testProxyConnectionWithAzure() throws ClassNotFoundException, SQLExc } @Test - @Ignore + @Disabled public void testProxyConnectionWithAzureWithConnectionString() throws ClassNotFoundException, SQLException { String connectionUrl = @@ -476,7 +473,7 @@ public void testProxyConnectionWithAzureWithConnectionString() } @Test - @Ignore + @Disabled public void testProxyConnectionWithoutProxyPortOrHost() throws ClassNotFoundException, SQLException { // proxyPort is empty @@ -553,7 +550,7 @@ public void testProxyConnectionWithoutProxyPortOrHost() * @throws SQLException */ @Test - @Ignore + @Disabled public void testProxyConnectionWithJVMParameters() throws SQLException, ClassNotFoundException { String connectionUrl = "jdbc:snowflake://aztestaccount.east-us-2.azure.snowflakecomputing.com/?tracing=ALL"; @@ -571,7 +568,7 @@ public void testProxyConnectionWithJVMParameters() throws SQLException, ClassNot } @Test - @Ignore + @Disabled public void testProxyConnectionWithAzureWithWrongConnectionString() throws ClassNotFoundException { String connectionUrl = @@ -598,7 +595,7 @@ public void testProxyConnectionWithAzureWithWrongConnectionString() * is specified. Set up a http proxy and change the settings below. */ @Test - @Ignore + @Disabled public void testSetJVMProxyHttp() throws SQLException { Properties props = new Properties(); props.put("user", "USER"); @@ -624,7 +621,7 @@ public void testSetJVMProxyHttp() throws SQLException { * below. */ @Test - @Ignore + @Disabled public void testSetJVMProxyHttps() throws SQLException { Properties props = new Properties(); props.put("user", "USER"); @@ -649,7 +646,7 @@ public void testSetJVMProxyHttps() throws SQLException { * https proxy and change the settings below. */ @Test - @Ignore + @Disabled public void testSetJVMProxyDefaultHttps() throws SQLException { Properties props = new Properties(); props.put("user", "USER"); @@ -725,19 +722,20 @@ public PasswordAuthentication getPasswordAuthentication() { String TEST_DATA_FILE = "orders_100.csv"; String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; assertTrue( - "Failed to put a file", - stmt.execute("PUT file://" + sourceFilePath + " @testPutGet_stage")); + stmt.execute("PUT file://" + sourceFilePath + " @testPutGet_stage"), + "Failed to put a file"); findFile(stmt, "ls @testPutGet_stage/"); // download the file we just uploaded to stage assertTrue( - "Failed to get a file", stmt.execute( - "GET @testPutGet_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + "GET @testPutGet_stage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get a file"); // Make sure that the downloaded file exists, it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE + ".gz"); diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java index ce3130761..8f1f5b964 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataIT.java @@ -5,15 +5,15 @@ import static java.sql.DatabaseMetaData.procedureReturnsResult; import static java.sql.ResultSetMetaData.columnNullableUnknown; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItem; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.google.common.base.Strings; import java.sql.Connection; @@ -28,15 +28,14 @@ import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Database Metadata IT */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class DatabaseMetaDataIT extends BaseJDBCWithSharedConnectionIT { private static final Pattern VERSION_PATTERN = Pattern.compile("^(\\d+)\\.(\\d+)(?:\\.\\d+)+\\s*.*"); @@ -205,7 +204,7 @@ public void testGetTableTypes() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetTables() throws Throwable { Set tables = null; try (Statement statement = connection.createStatement()) { @@ -564,7 +563,7 @@ public void testProcedure() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetTablePrivileges() throws Exception { try (Statement statement = connection.createStatement()) { String database = connection.getCatalog(); diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java index ec590b066..00838fd1b 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalIT.java @@ -5,9 +5,9 @@ import static net.snowflake.client.jdbc.DatabaseMetaDataIT.EXPECTED_MAX_BINARY_LENGTH; import static net.snowflake.client.jdbc.DatabaseMetaDataIT.verifyResultSetMetaDataColumns; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.DatabaseMetaData; @@ -15,24 +15,23 @@ import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Database Metadata IT */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class DatabaseMetaDataInternalIT extends BaseJDBCTest { private Connection connection; private Statement statement; private DatabaseMetaData databaseMetaData; private ResultSet resultSet; - @Before + @BeforeEach public void setUp() throws SQLException { try (Connection con = getConnection()) { initMetaData(con); @@ -68,7 +67,7 @@ static void initMetaData(Connection con) throws SQLException { } } - @After + @AfterEach public void tearDown() throws SQLException { try (Connection con = getConnection()) { endMetaData(con); @@ -83,7 +82,8 @@ static void endMetaData(Connection con) throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @Disabled // TODO: SNOW-1805299 + @DontRunOnGithubActions public void testGetColumn() throws SQLException { String getAllColumnsCount = "select count(*) from db.information_schema.columns"; connection = getConnection(); @@ -166,7 +166,7 @@ public void testGetColumn() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetFunctions() throws SQLException { connection = getConnection(); statement = connection.createStatement(); @@ -241,7 +241,8 @@ public void testGetFunctions() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @Disabled // TODO: SNOW-1805299 + @DontRunOnGithubActions public void testGetSchema() throws SQLException { String getSchemaCount = "select count(*) from db.information_schema.schemata"; connection = getConnection(); @@ -290,9 +291,9 @@ public void testGetSchema() throws SQLException { * getTables() function Author: Andong Zhan Created on 09/28/2018 */ @Test - @Ignore // SNOW-85084 detected this is a flaky test, so ignore it here. + @Disabled // SNOW-85084 detected this is a flaky test, so ignore it here. // We have other regression tests to cover it - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetTablesReusingCachedResults() throws SQLException { Connection snowflakeConnection = getSnowflakeAdminConnection(); Statement snowflake = snowflakeConnection.createStatement(); @@ -449,7 +450,8 @@ private long getAccountId(Statement stmt, String accountName) throws SQLExceptio } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @Disabled // TODO: SNOW-1805299 + @DontRunOnGithubActions public void testGetTables() throws SQLException { String getAllTable = "select count(*) from db.information_schema.tables"; String getAllBaseTable = @@ -579,7 +581,7 @@ public void testGetTables() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetMetaDataUseConnectionCtx() throws SQLException { try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java index 15701ca17..622f94a0a 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataInternalLatestIT.java @@ -2,8 +2,8 @@ import static net.snowflake.client.jdbc.DatabaseMetaDataInternalIT.endMetaData; import static net.snowflake.client.jdbc.DatabaseMetaDataInternalIT.initMetaData; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.sql.Connection; import java.sql.DatabaseMetaData; @@ -17,13 +17,12 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Database Metadata tests for the latest JDBC driver. This doesn't work for the oldest supported @@ -31,17 +30,17 @@ * tests still is not applicable. If it is applicable, move tests to DatabaseMetaDataIT so that both * the latest and oldest supported driver run the tests. */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class DatabaseMetaDataInternalLatestIT extends BaseJDBCTest { - @Before + @BeforeEach public void setUp() throws Exception { try (Connection con = getConnection()) { initMetaData(con); } } - @After + @AfterEach public void tearDown() throws Exception { try (Connection con = getConnection()) { endMetaData(con); @@ -49,7 +48,7 @@ public void tearDown() throws Exception { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetMetaDataUseConnectionCtx() throws SQLException { try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { @@ -79,7 +78,7 @@ public void testGetMetaDataUseConnectionCtx() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetFunctionColumns() throws SQLException { try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { @@ -253,7 +252,7 @@ public void testGetFunctionColumns() throws SQLException { /** Tests that calling getTables() concurrently doesn't cause data race condition. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetTablesRaceCondition() throws SQLException, ExecutionException, InterruptedException { try (Connection connection = getConnection()) { diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java index 082907502..c038be49e 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataLatestIT.java @@ -9,13 +9,13 @@ import static net.snowflake.client.jdbc.SnowflakeDatabaseMetaData.NumericFunctionsSupported; import static net.snowflake.client.jdbc.SnowflakeDatabaseMetaData.StringFunctionsSupported; import static net.snowflake.client.jdbc.SnowflakeDatabaseMetaData.SystemFunctionsSupported; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.lang.reflect.Field; import java.sql.Connection; @@ -30,16 +30,15 @@ import java.util.Map; import java.util.Properties; import java.util.Set; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.SFBaseSession; import net.snowflake.client.core.SFSessionProperty; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * DatabaseMetaData test for the latest JDBC driver. This doesn't work for the oldest supported @@ -47,7 +46,7 @@ * tests still is not applicable. If it is applicable, move tests to DatabaseMetaDataIT so that both * the latest and oldest supported driver run the tests. */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class DatabaseMetaDataLatestIT extends BaseJDBCWithSharedConnectionIT { private static final String TEST_PROC = "create or replace procedure testproc(param1 float, param2 string)\n" @@ -99,7 +98,7 @@ public void createDoubleQuotedSchemaAndCatalog(Statement statement) throws SQLEx statement.execute("create or replace schema \"dbwith\"\"quotes\".\"schemawith\"\"quotes\""); } - @Before + @BeforeEach public void setUp() throws SQLException { try (Statement stmt = connection.createStatement()) { stmt.execute("USE DATABASE " + startingDatabase); @@ -272,7 +271,7 @@ public void testDoubleQuotedDatabaseAndSchema() throws Exception { * This tests the ability to have quotes inside a database or schema within getSchemas() function. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDoubleQuotedDatabaseInGetSchemas() throws SQLException { try (Statement statement = connection.createStatement()) { // Create a database with double quotes inside the database name @@ -300,7 +299,7 @@ public void testDoubleQuotedDatabaseInGetSchemas() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDoubleQuotedDatabaseInGetTables() throws SQLException { try (Statement statement = connection.createStatement()) { // Create a database with double quotes inside the database name @@ -316,7 +315,7 @@ public void testDoubleQuotedDatabaseInGetTables() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDoubleQuotedDatabaseInGetColumns() throws SQLException { try (Statement statement = connection.createStatement()) { // Create a database and schema with double quotes inside the database name @@ -332,7 +331,7 @@ public void testDoubleQuotedDatabaseInGetColumns() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDoubleQuotedDatabaseforGetPrimaryKeysAndForeignKeys() throws SQLException { try (Statement statement = connection.createStatement()) { // Create a database and schema with double quotes inside the database name @@ -361,7 +360,7 @@ public void testDoubleQuotedDatabaseforGetPrimaryKeysAndForeignKeys() throws SQL * getPrimaryKeys and getImportedKeys functions by setting enablePatternSearch = false. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDoubleQuotedDatabaseforGetPrimaryKeysAndForeignKeysWithPatternSearchDisabled() throws SQLException { Properties properties = new Properties(); @@ -390,7 +389,7 @@ public void testDoubleQuotedDatabaseforGetPrimaryKeysAndForeignKeysWithPatternSe } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDoubleQuotedDatabaseInGetProcedures() throws SQLException { try (Statement statement = connection.createStatement()) { // Create a database and schema with double quotes inside the database name @@ -407,7 +406,7 @@ public void testDoubleQuotedDatabaseInGetProcedures() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDoubleQuotedDatabaseInGetTablePrivileges() throws SQLException { try (Statement statement = connection.createStatement()) { // Create a database and schema with double quotes inside the database name @@ -589,7 +588,7 @@ public void testGetColumnsNullable() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testSessionDatabaseParameter() throws Throwable { String altdb = "ALTERNATEDB"; String altschema1 = "ALTERNATESCHEMA1"; @@ -756,7 +755,7 @@ public void testSessionDatabaseParameter() throws Throwable { * returns 1 row per return value and 1 row per input parameter. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetFunctionColumns() throws Exception { try (Statement statement = connection.createStatement()) { String database = startingDatabase; @@ -1667,7 +1666,7 @@ public void testGetStreams() throws SQLException { * This tests that an empty resultset will be returned for getProcedures when using a reader account. */ @Test - @Ignore + @Disabled public void testGetProceduresWithReaderAccount() throws SQLException { DatabaseMetaData metadata = connection.getMetaData(); try (ResultSet rs = metadata.getProcedures(null, null, null)) { @@ -1676,7 +1675,7 @@ public void testGetProceduresWithReaderAccount() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetProcedureColumns() throws Exception { try (Statement statement = connection.createStatement()) { String database = startingDatabase; diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultSetLatestIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultSetLatestIT.java index f69260a69..2a62be5a2 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultSetLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultSetLatestIT.java @@ -3,9 +3,10 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.DatabaseMetaData; @@ -15,14 +16,14 @@ import java.sql.Types; import java.util.Arrays; import java.util.List; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class DatabaseMetaDataResultSetLatestIT extends BaseJDBCTest { - @Test(expected = SnowflakeLoggedFeatureNotSupportedException.class) + @Test public void testGetObjectNotSupported() throws SQLException { try (Connection con = getConnection(); Statement st = con.createStatement()) { @@ -34,7 +35,9 @@ public void testGetObjectNotSupported() throws SQLException { new SnowflakeDatabaseMetaDataResultSet( columnNames, columnTypeNames, columnTypes, rows, st)) { resultSet.next(); - assertEquals(1.2F, resultSet.getObject(1)); + assertThrows( + SnowflakeLoggedFeatureNotSupportedException.class, + () -> assertEquals(1.2F, resultSet.getObject(1))); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultsetIT.java b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultsetIT.java index ccc984e3c..605ca3698 100644 --- a/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultsetIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DatabaseMetaDataResultsetIT.java @@ -3,9 +3,9 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.sql.Date; @@ -17,11 +17,11 @@ import java.sql.Types; import java.util.Arrays; import java.util.List; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class DatabaseMetaDataResultsetIT extends BaseJDBCWithSharedConnectionIT { private static final int columnCount = 9; private static final int INT_DATA = 1; diff --git a/src/test/java/net/snowflake/client/jdbc/DellBoomiCloudIT.java b/src/test/java/net/snowflake/client/jdbc/DellBoomiCloudIT.java index 794af78df..bd2680be1 100644 --- a/src/test/java/net/snowflake/client/jdbc/DellBoomiCloudIT.java +++ b/src/test/java/net/snowflake/client/jdbc/DellBoomiCloudIT.java @@ -7,15 +7,16 @@ import java.sql.SQLException; import java.sql.Statement; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** A simple run on fetch result under boomi cloud environment's policy file */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class DellBoomiCloudIT extends AbstractDriverIT { - @Before + @BeforeEach public void setup() { File file = new File(DellBoomiCloudIT.class.getResource("boomi.policy").getFile()); @@ -25,6 +26,7 @@ public void setup() { } @Test + @Disabled // TODO: SNOW-1805239 public void testSelectLargeResultSet() throws SQLException { try (Connection connection = getConnection(); Statement statement = connection.createStatement(); diff --git a/src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java b/src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java index 734446c92..5474488b3 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/FileConnectionConfigurationLatestIT.java @@ -4,22 +4,23 @@ package net.snowflake.client.jdbc; import static net.snowflake.client.config.SFConnectionConfigParser.SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import org.junit.After; -import org.junit.Assert; -import org.junit.Ignore; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; /** This test could be run only on environment where file connection.toml is configured */ -@Ignore +@Disabled public class FileConnectionConfigurationLatestIT { - @After + @AfterEach public void cleanUp() { SnowflakeUtil.systemUnsetEnv(SNOWFLAKE_DEFAULT_CONNECTION_NAME_KEY); } @@ -27,7 +28,7 @@ public void cleanUp() { @Test public void testThrowExceptionIfConfigurationDoesNotExist() { SnowflakeUtil.systemSetEnv("SNOWFLAKE_DEFAULT_CONNECTION_NAME", "non-existent"); - Assert.assertThrows(SnowflakeSQLException.class, () -> SnowflakeDriver.INSTANCE.connect()); + assertThrows(SnowflakeSQLException.class, () -> SnowflakeDriver.INSTANCE.connect()); } @Test @@ -46,7 +47,7 @@ private static void verifyConnetionToSnowflake(String connectionName) throws SQL DriverManager.getConnection(SnowflakeDriver.AUTO_CONNECTION_STRING_PREFIX, null); Statement statement = con.createStatement(); ResultSet resultSet = statement.executeQuery("show parameters")) { - Assert.assertTrue(resultSet.next()); + assertTrue(resultSet.next()); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java index c874b3e33..8545ca998 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderExpandFileNamesTest.java @@ -4,9 +4,9 @@ package net.snowflake.client.jdbc; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.io.IOException; @@ -23,23 +23,20 @@ import java.util.concurrent.TimeUnit; import java.util.stream.IntStream; import net.snowflake.client.core.OCSPMode; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; /** Tests for SnowflakeFileTransferAgent.expandFileNames */ public class FileUploaderExpandFileNamesTest { - @Rule public TemporaryFolder folder = new TemporaryFolder(); - @Rule public TemporaryFolder secondFolder = new TemporaryFolder(); + @TempDir private File folder; private String localFSFileSep = systemGetProperty("file.separator"); @Test public void testProcessFileNames() throws Exception { - folder.newFile("TestFileA"); - folder.newFile("TestFileB"); + new File(folder, "TestFileA").createNewFile(); + new File(folder, "TestFileB").createNewFile(); - String folderName = folder.getRoot().getCanonicalPath(); + String folderName = folder.getCanonicalPath(); String originalUserDir = System.getProperty("user.dir"); String originalUserHome = System.getProperty("user.home"); System.setProperty("user.dir", folderName); @@ -82,8 +79,8 @@ public void testProcessFileNamesException() { try { SnowflakeFileTransferAgent.expandFileNames(locations, null); } catch (SnowflakeSQLException err) { - Assert.assertEquals(200007, err.getErrorCode()); - Assert.assertEquals("22000", err.getSQLState()); + assertEquals(200007, err.getErrorCode()); + assertEquals("22000", err.getSQLState()); } SnowflakeFileTransferAgent.setInjectedFileTransferException(null); } @@ -163,8 +160,8 @@ public int read() throws IOException { */ @Test public void testFileListingDoesNotFailOnMissingFilesOfAnotherPattern() throws Exception { - folder.newFolder("TestFiles"); - String folderName = folder.getRoot().getCanonicalPath(); + new File(folder, "TestFiles").mkdirs(); + String folderName = folder.getCanonicalPath(); int filePatterns = 10; int filesPerPattern = 100; @@ -224,8 +221,8 @@ public void testFileListingDoesNotFailOnMissingFilesOfAnotherPattern() throws Ex @Test public void testFileListingDoesNotFailOnNotExistingDirectory() throws Exception { - folder.newFolder("TestFiles"); - String folderName = folder.getRoot().getCanonicalPath(); + new File(folder, "TestFiles").mkdirs(); + String folderName = folder.getCanonicalPath(); String[] locations = { folderName + localFSFileSep + "foo*", }; diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java index 995832362..a116a794b 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderLatestIT.java @@ -5,10 +5,10 @@ import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; import static org.hamcrest.CoreMatchers.instanceOf; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.amazonaws.services.s3.model.ObjectMetadata; import com.fasterxml.jackson.databind.ObjectMapper; @@ -32,9 +32,8 @@ import java.util.List; import java.util.Map; import java.util.Properties; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.OCSPMode; import net.snowflake.client.core.SFSession; import net.snowflake.client.core.SFStatement; @@ -47,12 +46,11 @@ import net.snowflake.client.jdbc.cloud.storage.StorageProviderException; import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; import org.apache.commons.io.FileUtils; -import org.junit.Assert; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Tests for SnowflakeFileTransferAgent that require an active connection */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class FileUploaderLatestIT extends FileUploaderPrep { private static final String OBJ_META_STAGE = "testObjMeta"; private ObjectMapper mapper = new ObjectMapper(); @@ -65,7 +63,7 @@ public class FileUploaderLatestIT extends FileUploaderPrep { * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetS3StageDataWithS3Session() throws SQLException { try (Connection con = getConnection("s3testaccount")) { SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); @@ -74,16 +72,16 @@ public void testGetS3StageDataWithS3Session() throws SQLException { // Get sample stage info with session StageInfo stageInfo = SnowflakeFileTransferAgent.getStageInfo(exampleS3JsonNode, sfSession); - Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); + assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); // Assert that true value from session is reflected in StageInfo - Assert.assertEquals(true, stageInfo.getUseS3RegionalUrl()); + assertEquals(true, stageInfo.getUseS3RegionalUrl()); // Set UseRegionalS3EndpointsForPresignedURL to false in session sfSession.setUseRegionalS3EndpointsForPresignedURL(false); stageInfo = SnowflakeFileTransferAgent.getStageInfo(exampleS3JsonNode, sfSession); - Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); + assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); // Assert that false value from session is reflected in StageInfo - Assert.assertEquals(false, stageInfo.getUseS3RegionalUrl()); + assertEquals(false, stageInfo.getUseS3RegionalUrl()); } } @@ -94,7 +92,7 @@ public void testGetS3StageDataWithS3Session() throws SQLException { * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetS3StageDataWithAzureSession() throws SQLException { try (Connection con = getConnection("azureaccount")) { SFSession sfSession = con.unwrap(SnowflakeConnectionV1.class).getSfSession(); @@ -106,18 +104,18 @@ public void testGetS3StageDataWithAzureSession() throws SQLException { // Get sample stage info with session StageInfo stageInfo = SnowflakeFileTransferAgent.getStageInfo(exampleAzureJsonNode, sfSession); - Assert.assertEquals(StageInfo.StageType.AZURE, stageInfo.getStageType()); - Assert.assertEquals("EXAMPLE_LOCATION/", stageInfo.getLocation()); + assertEquals(StageInfo.StageType.AZURE, stageInfo.getStageType()); + assertEquals("EXAMPLE_LOCATION/", stageInfo.getLocation()); // Assert that UseRegionalS3EndpointsForPresignedURL is false in StageInfo even if it was set // to // true. // The value should always be false for non-S3 accounts - Assert.assertEquals(false, stageInfo.getUseS3RegionalUrl()); + assertEquals(false, stageInfo.getUseS3RegionalUrl()); } } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetObjectMetadataWithGCS() throws Exception { Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); @@ -143,7 +141,7 @@ public void testGetObjectMetadataWithGCS() throws Exception { String remoteStageLocation = location.substring(0, idx); String path = location.substring(idx + 1) + TEST_DATA_FILE + ".gz"; StorageObjectMetadata metadata = client.getObjectMetadata(remoteStageLocation, path); - Assert.assertEquals("gzip", metadata.getContentEncoding()); + assertEquals("gzip", metadata.getContentEncoding()); } finally { statement.execute("DROP STAGE if exists " + OBJ_META_STAGE); } @@ -151,7 +149,7 @@ public void testGetObjectMetadataWithGCS() throws Exception { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetObjectMetadataFileNotFoundWithGCS() throws Exception { Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); @@ -180,8 +178,8 @@ public void testGetObjectMetadataFileNotFoundWithGCS() throws Exception { fail("should raise exception"); } catch (Exception ex) { assertTrue( - "Wrong type of exception. Message: " + ex.getMessage(), - ex instanceof StorageProviderException); + ex instanceof StorageProviderException, + "Wrong type of exception. Message: " + ex.getMessage()); assertTrue(ex.getMessage().matches(".*Blob.*not found in bucket.*")); } finally { statement.execute("DROP STAGE if exists " + OBJ_META_STAGE); @@ -190,7 +188,7 @@ public void testGetObjectMetadataFileNotFoundWithGCS() throws Exception { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetObjectMetadataStorageExceptionWithGCS() throws Exception { Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); @@ -218,8 +216,8 @@ public void testGetObjectMetadataStorageExceptionWithGCS() throws Exception { fail("should raise exception"); } catch (Exception ex) { assertTrue( - "Wrong type of exception. Message: " + ex.getMessage(), - ex instanceof StorageProviderException); + ex instanceof StorageProviderException, + "Wrong type of exception. Message: " + ex.getMessage()); assertTrue(ex.getMessage().matches(".*Permission.*denied.*")); } finally { statement.execute("DROP STAGE if exists " + OBJ_META_STAGE); @@ -253,8 +251,8 @@ public void testNullCommand() throws SQLException { SnowflakeFileTransferAgent sfAgent = new SnowflakeFileTransferAgent(null, sfSession, new SFStatement(sfSession)); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue( + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertTrue( err.getMessage() .contains("JDBC driver internal error: Missing sql for statement execution")); } finally { @@ -294,8 +292,8 @@ public void testCompressStreamWithGzipException() throws Exception { .setCommand(PUT_COMMAND) .build()); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue( + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertTrue( err.getMessage() .contains("JDBC driver internal error: error encountered for compression")); } finally { @@ -338,8 +336,8 @@ public void testCompressStreamWithGzipNoDigestException() throws Exception { .setCommand(PUT_COMMAND) .build()); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue( + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertTrue( err.getMessage() .contains("JDBC driver internal error: error encountered for compression")); } finally { @@ -382,7 +380,7 @@ public void testUploadWithoutConnectionException() throws Exception { .setCommand(PUT_COMMAND) .build()); } catch (Exception err) { - Assert.assertTrue( + assertTrue( err.getMessage() .contains( "Exception encountered during file upload: failed to push to remote store")); @@ -405,7 +403,7 @@ public void testInitFileMetadataFileNotFound() throws Exception { sfAgent.execute(); } catch (SnowflakeSQLException err) { - Assert.assertEquals(200008, err.getErrorCode()); + assertEquals(200008, err.getErrorCode()); } finally { statement.execute("DROP STAGE if exists testStage"); } @@ -426,7 +424,7 @@ public void testInitFileMetadataFileIsDirectory() throws Exception { new SnowflakeFileTransferAgent(command, sfSession, new SFStatement(sfSession)); sfAgent.execute(); } catch (SnowflakeSQLException err) { - Assert.assertEquals(200009, err.getErrorCode()); + assertEquals(200009, err.getErrorCode()); } finally { statement.execute("DROP STAGE if exists testStage"); } @@ -449,8 +447,8 @@ public void testCompareAndSkipFilesException() throws Exception { sfAgent.execute(); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue(err.getMessage().contains("Error reading:")); + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertTrue(err.getMessage().contains("Error reading:")); } finally { statement.execute("DROP STAGE if exists testStage"); } @@ -472,8 +470,8 @@ public void testParseCommandException() throws SQLException { new SnowflakeFileTransferAgent(PUT_COMMAND, sfSession, new SFStatement(sfSession)); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertTrue(err.getMessage().contains("Failed to parse the locations")); + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertTrue(err.getMessage().contains("Failed to parse the locations")); } finally { statement.execute("DROP STAGE if exists testStage"); } @@ -534,8 +532,8 @@ public void testListObjectsStorageException() throws Exception { sfAgent.execute(); } catch (SnowflakeSQLException err) { - Assert.assertEquals(200016, err.getErrorCode()); - Assert.assertTrue(err.getMessage().contains("Encountered exception during listObjects")); + assertEquals(200016, err.getErrorCode()); + assertTrue(err.getMessage().contains("Encountered exception during listObjects")); } finally { statement.execute("DROP STAGE if exists testStage"); } @@ -563,7 +561,7 @@ public void testUploadStreamInterruptedException() throws IOException, SQLExcept "~", DEST_PREFIX, outputStream.asByteSource().openStream(), "hello.txt", false); } catch (SnowflakeSQLLoggedException err) { - Assert.assertEquals(200003, err.getErrorCode()); + assertEquals(200003, err.getErrorCode()); } finally { statement.execute("rm @~/" + DEST_PREFIX); } @@ -666,7 +664,7 @@ public void testUploadFileStreamWithNoOverwrite() throws Exception { assertEquals(expectedValue, actualValue); } } catch (Exception e) { - Assert.fail("testUploadFileStreamWithNoOverwrite failed " + e.getMessage()); + fail("testUploadFileStreamWithNoOverwrite failed " + e.getMessage()); } finally { statement.execute("DROP STAGE if exists testStage"); } @@ -696,7 +694,7 @@ public void testUploadFileStreamWithOverwrite() throws Exception { assertFalse(expectedValue.equals(actualValue)); } } catch (Exception e) { - Assert.fail("testUploadFileStreamWithNoOverwrite failed " + e.getMessage()); + fail("testUploadFileStreamWithNoOverwrite failed " + e.getMessage()); } finally { statement.execute("DROP STAGE if exists testStage"); } @@ -704,7 +702,7 @@ public void testUploadFileStreamWithOverwrite() throws Exception { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetS3StorageObjectMetadata() throws Throwable { try (Connection connection = getConnection("s3testaccount"); Statement statement = connection.createStatement()) { diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderMimeTypeToCompressionTypeTest.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderMimeTypeToCompressionTypeTest.java index d9ad6d2c1..418c70a05 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderMimeTypeToCompressionTypeTest.java +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderMimeTypeToCompressionTypeTest.java @@ -3,55 +3,49 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import java.util.Arrays; -import java.util.Collection; import java.util.Optional; +import java.util.stream.Stream; import net.snowflake.common.core.FileCompressionType; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; /** * Tests for SnowflakeFileTransferAgent.mimeTypeToCompressionType See * https://github.com/apache/tika/blob/master/tika-core/src/main/resources/org/apache/tika/mime/tika-mimetypes.xml * for test cases */ -@RunWith(Parameterized.class) public class FileUploaderMimeTypeToCompressionTypeTest { - private final String mimeType; - private final FileCompressionType mimeSubType; - public FileUploaderMimeTypeToCompressionTypeTest( - String mimeType, FileCompressionType mimeSubType) { - this.mimeType = mimeType; - this.mimeSubType = mimeSubType; - } - - @Parameterized.Parameters(name = "mimeType={0}, mimeSubType={1}") - public static Collection primeNumbers() { - return Arrays.asList( - new Object[][] { - {"text/", null}, - {"text/csv", null}, - {"snowflake/orc", FileCompressionType.ORC}, - {"snowflake/orc;p=1", FileCompressionType.ORC}, - {"snowflake/parquet", FileCompressionType.PARQUET}, - {"application/zlib", FileCompressionType.DEFLATE}, - {"application/x-bzip2", FileCompressionType.BZIP2}, - {"application/zstd", FileCompressionType.ZSTD}, - {"application/x-brotli", FileCompressionType.BROTLI}, - {"application/x-lzip", FileCompressionType.LZIP}, - {"application/x-lzma", FileCompressionType.LZMA}, - {"application/x-xz", FileCompressionType.XZ}, - {"application/x-compress", FileCompressionType.COMPRESS}, - {"application/x-gzip", FileCompressionType.GZIP} - }); + static class MimeTypesProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + return Stream.of( + Arguments.of("text/", null), + Arguments.of("text/csv", null), + Arguments.of("snowflake/orc", FileCompressionType.ORC), + Arguments.of("snowflake/orc;p=1", FileCompressionType.ORC), + Arguments.of("snowflake/parquet", FileCompressionType.PARQUET), + Arguments.of("application/zlib", FileCompressionType.DEFLATE), + Arguments.of("application/x-bzip2", FileCompressionType.BZIP2), + Arguments.of("application/zstd", FileCompressionType.ZSTD), + Arguments.of("application/x-brotli", FileCompressionType.BROTLI), + Arguments.of("application/x-lzip", FileCompressionType.LZIP), + Arguments.of("application/x-lzma", FileCompressionType.LZMA), + Arguments.of("application/x-xz", FileCompressionType.XZ), + Arguments.of("application/x-compress", FileCompressionType.COMPRESS), + Arguments.of("application/x-gzip", FileCompressionType.GZIP)); + } } - @Test - public void testMimeTypeToCompressionType() throws Throwable { + @ParameterizedTest + @ArgumentsSource(MimeTypesProvider.class) + public void testMimeTypeToCompressionType(String mimeType, FileCompressionType mimeSubType) + throws Throwable { Optional foundCompType = SnowflakeFileTransferAgent.mimeTypeToCompressionType(mimeType); if (foundCompType.isPresent()) { diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderPrep.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderPrep.java index d801a00ac..276eb0234 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderPrep.java +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderPrep.java @@ -10,13 +10,10 @@ import java.io.InputStream; import java.util.Arrays; import java.util.List; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.BeforeAll; /** File uploader test prep reused by IT/connection tests and sessionless tests */ abstract class FileUploaderPrep extends BaseJDBCTest { - @Rule public TemporaryFolder folder = new TemporaryFolder(); private static final ObjectMapper mapper = new ObjectMapper(); @@ -35,7 +32,7 @@ private static JsonNode readJsonFromFile(String name) throws IOException { } } - @BeforeClass + @BeforeAll public static void setup() throws Exception { exampleS3JsonNode = readJsonFromFile("exampleS3"); exampleS3StageEndpointJsonNode = readJsonFromFile("exampleS3WithStageEndpoint"); diff --git a/src/test/java/net/snowflake/client/jdbc/FileUploaderSessionlessTest.java b/src/test/java/net/snowflake/client/jdbc/FileUploaderSessionlessTest.java index f5fb7f719..1c74869f0 100644 --- a/src/test/java/net/snowflake/client/jdbc/FileUploaderSessionlessTest.java +++ b/src/test/java/net/snowflake/client/jdbc/FileUploaderSessionlessTest.java @@ -3,10 +3,11 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -19,8 +20,7 @@ import java.util.Optional; import net.snowflake.client.jdbc.cloud.storage.StageInfo; import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** Tests for SnowflakeFileTransferAgent.expandFileNames. */ public class FileUploaderSessionlessTest extends FileUploaderPrep { @@ -37,8 +37,8 @@ public void testGetEncryptionMaterialMissing() throws Exception { SnowflakeFileTransferAgent.getEncryptionMaterial( SFBaseFileTransferAgent.CommandType.UPLOAD, modifiedNode); - Assert.assertEquals(1, encryptionMaterials.size()); - Assert.assertNull(encryptionMaterials.get(0)); + assertEquals(1, encryptionMaterials.size()); + assertNull(encryptionMaterials.get(0)); } @Test @@ -54,12 +54,12 @@ public void testGetEncryptionMaterial() throws Exception { SnowflakeFileTransferAgent.getEncryptionMaterial( SFBaseFileTransferAgent.CommandType.UPLOAD, exampleNode); - Assert.assertEquals(1, encryptionMaterials.size()); - Assert.assertEquals( + assertEquals(1, encryptionMaterials.size()); + assertEquals( expected.get(0).getQueryStageMasterKey(), encryptionMaterials.get(0).getQueryStageMasterKey()); - Assert.assertEquals(expected.get(0).getQueryId(), encryptionMaterials.get(0).getQueryId()); - Assert.assertEquals(expected.get(0).getSmkId(), encryptionMaterials.get(0).getSmkId()); + assertEquals(expected.get(0).getQueryId(), encryptionMaterials.get(0).getQueryId()); + assertEquals(expected.get(0).getSmkId(), encryptionMaterials.get(0).getSmkId()); } } @@ -73,14 +73,14 @@ public void testGetS3StageData() throws Exception { expectedCreds.put("AWS_SECRET_KEY", "EXAMPLE_AWS_SECRET_KEY"); expectedCreds.put("AWS_TOKEN", "EXAMPLE_AWS_TOKEN"); - Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); - Assert.assertEquals("stage/location/foo/", stageInfo.getLocation()); - Assert.assertEquals(expectedCreds, stageInfo.getCredentials()); - Assert.assertEquals("us-west-2", stageInfo.getRegion()); - Assert.assertEquals("null", stageInfo.getEndPoint()); - Assert.assertEquals(null, stageInfo.getStorageAccount()); - Assert.assertEquals(true, stageInfo.getIsClientSideEncrypted()); - Assert.assertEquals(true, stageInfo.getUseS3RegionalUrl()); + assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); + assertEquals("stage/location/foo/", stageInfo.getLocation()); + assertEquals(expectedCreds, stageInfo.getCredentials()); + assertEquals("us-west-2", stageInfo.getRegion()); + assertEquals("null", stageInfo.getEndPoint()); + assertEquals(null, stageInfo.getStorageAccount()); + assertEquals(true, stageInfo.getIsClientSideEncrypted()); + assertEquals(true, stageInfo.getUseS3RegionalUrl()); } @Test @@ -94,13 +94,13 @@ public void testGetS3StageDataWithStageEndpoint() throws Exception { expectedCreds.put("AWS_SECRET_KEY", "EXAMPLE_AWS_SECRET_KEY"); expectedCreds.put("AWS_TOKEN", "EXAMPLE_AWS_TOKEN"); - Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); - Assert.assertEquals("stage/location/foo/", stageInfo.getLocation()); - Assert.assertEquals(expectedCreds, stageInfo.getCredentials()); - Assert.assertEquals("us-west-2", stageInfo.getRegion()); - Assert.assertEquals("s3-fips.us-east-1.amazonaws.com", stageInfo.getEndPoint()); - Assert.assertEquals(null, stageInfo.getStorageAccount()); - Assert.assertEquals(true, stageInfo.getIsClientSideEncrypted()); + assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); + assertEquals("stage/location/foo/", stageInfo.getLocation()); + assertEquals(expectedCreds, stageInfo.getCredentials()); + assertEquals("us-west-2", stageInfo.getRegion()); + assertEquals("s3-fips.us-east-1.amazonaws.com", stageInfo.getEndPoint()); + assertEquals(null, stageInfo.getStorageAccount()); + assertEquals(true, stageInfo.getIsClientSideEncrypted()); } @Test @@ -109,13 +109,13 @@ public void testGetAzureStageData() throws Exception { Map expectedCreds = new HashMap<>(); expectedCreds.put("AZURE_SAS_TOKEN", "EXAMPLE_AZURE_SAS_TOKEN"); - Assert.assertEquals(StageInfo.StageType.AZURE, stageInfo.getStageType()); - Assert.assertEquals("EXAMPLE_LOCATION/", stageInfo.getLocation()); - Assert.assertEquals(expectedCreds, stageInfo.getCredentials()); - Assert.assertEquals("westus", stageInfo.getRegion()); - Assert.assertEquals("blob.core.windows.net", stageInfo.getEndPoint()); - Assert.assertEquals("EXAMPLE_STORAGE_ACCOUNT", stageInfo.getStorageAccount()); - Assert.assertEquals(true, stageInfo.getIsClientSideEncrypted()); + assertEquals(StageInfo.StageType.AZURE, stageInfo.getStageType()); + assertEquals("EXAMPLE_LOCATION/", stageInfo.getLocation()); + assertEquals(expectedCreds, stageInfo.getCredentials()); + assertEquals("westus", stageInfo.getRegion()); + assertEquals("blob.core.windows.net", stageInfo.getEndPoint()); + assertEquals("EXAMPLE_STORAGE_ACCOUNT", stageInfo.getStorageAccount()); + assertEquals(true, stageInfo.getIsClientSideEncrypted()); } @Test @@ -123,20 +123,20 @@ public void testGetGCSStageData() throws Exception { StageInfo stageInfo = SnowflakeFileTransferAgent.getStageInfo(exampleGCSJsonNode, null); Map expectedCreds = new HashMap<>(); - Assert.assertEquals(StageInfo.StageType.GCS, stageInfo.getStageType()); - Assert.assertEquals("foo/tables/9224/", stageInfo.getLocation()); - Assert.assertEquals(expectedCreds, stageInfo.getCredentials()); - Assert.assertEquals("US-WEST1", stageInfo.getRegion()); - Assert.assertEquals(null, stageInfo.getEndPoint()); - Assert.assertEquals(null, stageInfo.getStorageAccount()); - Assert.assertEquals(true, stageInfo.getIsClientSideEncrypted()); + assertEquals(StageInfo.StageType.GCS, stageInfo.getStageType()); + assertEquals("foo/tables/9224/", stageInfo.getLocation()); + assertEquals(expectedCreds, stageInfo.getCredentials()); + assertEquals("US-WEST1", stageInfo.getRegion()); + assertEquals(null, stageInfo.getEndPoint()); + assertEquals(null, stageInfo.getStorageAccount()); + assertEquals(true, stageInfo.getIsClientSideEncrypted()); } @Test public void testGetFileTransferMetadatasS3() throws Exception { List metadataList = SnowflakeFileTransferAgent.getFileTransferMetadatas(exampleS3JsonNode); - Assert.assertEquals(1, metadataList.size()); + assertEquals(1, metadataList.size()); SnowflakeFileTransferMetadataV1 metadata = (SnowflakeFileTransferMetadataV1) metadataList.get(0); @@ -151,25 +151,25 @@ public void testGetFileTransferMetadatasS3() throws Exception { expectedCreds.put("AWS_SECRET_KEY", "EXAMPLE_AWS_SECRET_KEY"); expectedCreds.put("AWS_TOKEN", "EXAMPLE_AWS_TOKEN"); - Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); - Assert.assertEquals("stage/location/foo/", stageInfo.getLocation()); - Assert.assertEquals(expectedCreds, stageInfo.getCredentials()); - Assert.assertEquals("us-west-2", stageInfo.getRegion()); - Assert.assertEquals("null", stageInfo.getEndPoint()); - Assert.assertEquals(null, stageInfo.getStorageAccount()); - Assert.assertEquals(true, stageInfo.getIsClientSideEncrypted()); + assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); + assertEquals("stage/location/foo/", stageInfo.getLocation()); + assertEquals(expectedCreds, stageInfo.getCredentials()); + assertEquals("us-west-2", stageInfo.getRegion()); + assertEquals("null", stageInfo.getEndPoint()); + assertEquals(null, stageInfo.getStorageAccount()); + assertEquals(true, stageInfo.getIsClientSideEncrypted()); // EncryptionMaterial check - Assert.assertEquals("EXAMPLE_QUERY_ID", metadata.getEncryptionMaterial().getQueryId()); - Assert.assertEquals( + assertEquals("EXAMPLE_QUERY_ID", metadata.getEncryptionMaterial().getQueryId()); + assertEquals( "EXAMPLE_QUERY_STAGE_MASTER_KEY", metadata.getEncryptionMaterial().getQueryStageMasterKey()); - Assert.assertEquals(123L, (long) metadata.getEncryptionMaterial().getSmkId()); + assertEquals(123L, (long) metadata.getEncryptionMaterial().getSmkId()); // Misc check - Assert.assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, metadata.getCommandType()); - Assert.assertNull(metadata.getPresignedUrl()); - Assert.assertEquals("orders_100.csv", metadata.getPresignedUrlFileName()); + assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, metadata.getCommandType()); + assertNull(metadata.getPresignedUrl()); + assertEquals("orders_100.csv", metadata.getPresignedUrlFileName()); } @Test @@ -180,7 +180,7 @@ public void testGetFileTransferMetadatasS3MissingEncryption() throws Exception { List metadataList = SnowflakeFileTransferAgent.getFileTransferMetadatas(modifiedNode); - Assert.assertEquals(1, metadataList.size()); + assertEquals(1, metadataList.size()); SnowflakeFileTransferMetadataV1 metadata = (SnowflakeFileTransferMetadataV1) metadataList.get(0); @@ -195,30 +195,30 @@ public void testGetFileTransferMetadatasS3MissingEncryption() throws Exception { expectedCreds.put("AWS_SECRET_KEY", "EXAMPLE_AWS_SECRET_KEY"); expectedCreds.put("AWS_TOKEN", "EXAMPLE_AWS_TOKEN"); - Assert.assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); - Assert.assertEquals("stage/location/foo/", stageInfo.getLocation()); - Assert.assertEquals(expectedCreds, stageInfo.getCredentials()); - Assert.assertEquals("us-west-2", stageInfo.getRegion()); - Assert.assertEquals("null", stageInfo.getEndPoint()); - Assert.assertEquals(null, stageInfo.getStorageAccount()); - Assert.assertEquals(true, stageInfo.getIsClientSideEncrypted()); + assertEquals(StageInfo.StageType.S3, stageInfo.getStageType()); + assertEquals("stage/location/foo/", stageInfo.getLocation()); + assertEquals(expectedCreds, stageInfo.getCredentials()); + assertEquals("us-west-2", stageInfo.getRegion()); + assertEquals("null", stageInfo.getEndPoint()); + assertEquals(null, stageInfo.getStorageAccount()); + assertEquals(true, stageInfo.getIsClientSideEncrypted()); // EncryptionMaterial check - Assert.assertNull(metadata.getEncryptionMaterial().getQueryId()); - Assert.assertNull(metadata.getEncryptionMaterial().getQueryStageMasterKey()); - Assert.assertNull(metadata.getEncryptionMaterial().getSmkId()); + assertNull(metadata.getEncryptionMaterial().getQueryId()); + assertNull(metadata.getEncryptionMaterial().getQueryStageMasterKey()); + assertNull(metadata.getEncryptionMaterial().getSmkId()); // Misc check - Assert.assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, metadata.getCommandType()); - Assert.assertNull(metadata.getPresignedUrl()); - Assert.assertEquals("orders_100.csv", metadata.getPresignedUrlFileName()); + assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, metadata.getCommandType()); + assertNull(metadata.getPresignedUrl()); + assertEquals("orders_100.csv", metadata.getPresignedUrlFileName()); } @Test public void testGetFileTransferMetadatasAzure() throws Exception { List metadataList = SnowflakeFileTransferAgent.getFileTransferMetadatas(exampleAzureJsonNode); - Assert.assertEquals(1, metadataList.size()); + assertEquals(1, metadataList.size()); SnowflakeFileTransferMetadataV1 metadata = (SnowflakeFileTransferMetadataV1) metadataList.get(0); @@ -229,32 +229,32 @@ public void testGetFileTransferMetadatasAzure() throws Exception { Map expectedCreds = new HashMap<>(); expectedCreds.put("AZURE_SAS_TOKEN", "EXAMPLE_AZURE_SAS_TOKEN"); - Assert.assertEquals(StageInfo.StageType.AZURE, stageInfo.getStageType()); - Assert.assertEquals("EXAMPLE_LOCATION/", stageInfo.getLocation()); - Assert.assertEquals(expectedCreds, stageInfo.getCredentials()); - Assert.assertEquals("westus", stageInfo.getRegion()); - Assert.assertEquals("blob.core.windows.net", stageInfo.getEndPoint()); - Assert.assertEquals("EXAMPLE_STORAGE_ACCOUNT", stageInfo.getStorageAccount()); - Assert.assertEquals(true, stageInfo.getIsClientSideEncrypted()); + assertEquals(StageInfo.StageType.AZURE, stageInfo.getStageType()); + assertEquals("EXAMPLE_LOCATION/", stageInfo.getLocation()); + assertEquals(expectedCreds, stageInfo.getCredentials()); + assertEquals("westus", stageInfo.getRegion()); + assertEquals("blob.core.windows.net", stageInfo.getEndPoint()); + assertEquals("EXAMPLE_STORAGE_ACCOUNT", stageInfo.getStorageAccount()); + assertEquals(true, stageInfo.getIsClientSideEncrypted()); // EncryptionMaterial check - Assert.assertEquals("EXAMPLE_QUERY_ID", metadata.getEncryptionMaterial().getQueryId()); - Assert.assertEquals( + assertEquals("EXAMPLE_QUERY_ID", metadata.getEncryptionMaterial().getQueryId()); + assertEquals( "EXAMPLE_QUERY_STAGE_MASTER_KEY", metadata.getEncryptionMaterial().getQueryStageMasterKey()); - Assert.assertEquals(123L, (long) metadata.getEncryptionMaterial().getSmkId()); + assertEquals(123L, (long) metadata.getEncryptionMaterial().getSmkId()); // Misc check - Assert.assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, metadata.getCommandType()); - Assert.assertNull(metadata.getPresignedUrl()); - Assert.assertEquals("orders_100.csv", metadata.getPresignedUrlFileName()); + assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, metadata.getCommandType()); + assertNull(metadata.getPresignedUrl()); + assertEquals("orders_100.csv", metadata.getPresignedUrlFileName()); } @Test public void testGetFileTransferMetadatasGCS() throws Exception { List metadataList = SnowflakeFileTransferAgent.getFileTransferMetadatas(exampleGCSJsonNode); - Assert.assertEquals(1, metadataList.size()); + assertEquals(1, metadataList.size()); SnowflakeFileTransferMetadataV1 metadata = (SnowflakeFileTransferMetadataV1) metadataList.get(0); @@ -264,33 +264,33 @@ public void testGetFileTransferMetadatasGCS() throws Exception { Map expectedCreds = new HashMap<>(); - Assert.assertEquals(StageInfo.StageType.GCS, stageInfo.getStageType()); - Assert.assertEquals("foo/tables/9224/", stageInfo.getLocation()); - Assert.assertEquals(expectedCreds, stageInfo.getCredentials()); - Assert.assertEquals("US-WEST1", stageInfo.getRegion()); - Assert.assertEquals(null, stageInfo.getEndPoint()); - Assert.assertEquals(null, stageInfo.getStorageAccount()); - Assert.assertEquals(true, stageInfo.getIsClientSideEncrypted()); + assertEquals(StageInfo.StageType.GCS, stageInfo.getStageType()); + assertEquals("foo/tables/9224/", stageInfo.getLocation()); + assertEquals(expectedCreds, stageInfo.getCredentials()); + assertEquals("US-WEST1", stageInfo.getRegion()); + assertEquals(null, stageInfo.getEndPoint()); + assertEquals(null, stageInfo.getStorageAccount()); + assertEquals(true, stageInfo.getIsClientSideEncrypted()); assertEquals(Optional.empty(), stageInfo.gcsCustomEndpoint()); // EncryptionMaterial check - Assert.assertEquals("EXAMPLE_QUERY_ID", metadata.getEncryptionMaterial().getQueryId()); - Assert.assertEquals( + assertEquals("EXAMPLE_QUERY_ID", metadata.getEncryptionMaterial().getQueryId()); + assertEquals( "EXAMPLE_QUERY_STAGE_MASTER_KEY", metadata.getEncryptionMaterial().getQueryStageMasterKey()); - Assert.assertEquals(123L, (long) metadata.getEncryptionMaterial().getSmkId()); + assertEquals(123L, (long) metadata.getEncryptionMaterial().getSmkId()); // Misc check - Assert.assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, metadata.getCommandType()); - Assert.assertEquals("EXAMPLE_PRESIGNED_URL", metadata.getPresignedUrl()); - Assert.assertEquals("orders_100.csv", metadata.getPresignedUrlFileName()); + assertEquals(SFBaseFileTransferAgent.CommandType.UPLOAD, metadata.getCommandType()); + assertEquals("EXAMPLE_PRESIGNED_URL", metadata.getPresignedUrl()); + assertEquals("orders_100.csv", metadata.getPresignedUrlFileName()); } @Test public void testGetFileTransferMetadataGCSWithUseRegionalUrl() throws Exception { List metadataList = SnowflakeFileTransferAgent.getFileTransferMetadatas(exampleGCSJsonNodeWithUseRegionalUrl); - Assert.assertEquals(1, metadataList.size()); + assertEquals(1, metadataList.size()); SnowflakeFileTransferMetadataV1 metadata = (SnowflakeFileTransferMetadataV1) metadataList.get(0); @@ -305,7 +305,7 @@ public void testGetFileTransferMetadataGCSWithUseRegionalUrl() throws Exception public void testGetFileTransferMetadataGCSWithEndPoint() throws Exception { List metadataList = SnowflakeFileTransferAgent.getFileTransferMetadatas(exampleGCSJsonNodeWithEndPoint); - Assert.assertEquals(1, metadataList.size()); + assertEquals(1, metadataList.size()); SnowflakeFileTransferMetadataV1 metadata = (SnowflakeFileTransferMetadataV1) metadataList.get(0); @@ -323,8 +323,8 @@ public void testGetFileTransferMetadatasUploadError() throws Exception { SnowflakeFileTransferAgent.getFileTransferMetadatas(downloadNode); fail(); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); - Assert.assertEquals( + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertEquals( "JDBC driver internal error: This API only supports PUT commands.", err.getMessage()); } } @@ -336,7 +336,7 @@ public void testGetFileTransferMetadatasEncryptionMaterialError() throws Excepti SnowflakeFileTransferAgent.getFileTransferMetadatas(garbageNode); fail(); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); assertTrue( err.getMessage().contains("JDBC driver internal error: Failed to parse the credentials")); } @@ -351,7 +351,7 @@ public void testGetFileTransferMetadatasUnsupportedLocationError() throws Except SnowflakeFileTransferAgent.getFileTransferMetadatas(modifiedNode); fail(); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); assertTrue(err.getMessage().contains("JDBC driver internal error: This API only supports")); } } @@ -363,7 +363,7 @@ public void testGetFileTransferMetadatasSrcLocationsArrayError() throws JsonProc SnowflakeFileTransferAgent.getFileTransferMetadatas(garbageNode); fail(); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); assertTrue( err.getMessage().contains("JDBC driver internal error: src_locations must be an array")); } @@ -378,7 +378,7 @@ public void testGetFileMetadatasEncryptionMaterialsException() { SnowflakeFileTransferAgent.getFileTransferMetadatas(modifiedNode); fail(); } catch (SnowflakeSQLException err) { - Assert.assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); + assertEquals((long) ErrorCode.INTERNAL_ERROR.getMessageCode(), err.getErrorCode()); assertTrue(err.getMessage().contains("Failed to parse encryptionMaterial")); } } diff --git a/src/test/java/net/snowflake/client/jdbc/GCPLargeResult.java b/src/test/java/net/snowflake/client/jdbc/GCPLargeResult.java index b2c316d50..44f9b7a48 100644 --- a/src/test/java/net/snowflake/client/jdbc/GCPLargeResult.java +++ b/src/test/java/net/snowflake/client/jdbc/GCPLargeResult.java @@ -3,41 +3,37 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) +@Tag(TestTags.RESULT_SET) public class GCPLargeResult extends BaseJDBCTest { - private final String queryResultFormat; - @Parameterized.Parameters(name = "format={0}") - public static Object[][] data() { - return new Object[][] {{"JSON"}, {"ARROW"}}; - } - - public GCPLargeResult(String queryResultFormat) { - this.queryResultFormat = queryResultFormat; - } - - Connection init() throws SQLException { + Connection init(String queryResultFormat) throws SQLException { Connection conn = BaseJDBCTest.getConnection("gcpaccount"); + System.out.println("Connected"); try (Statement stmt = conn.createStatement()) { stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); } return conn; } - @Test - public void testLargeResultSetGCP() throws Throwable { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testLargeResultSetGCP(String queryResultFormat) throws Throwable { + try (Connection con = init(queryResultFormat); PreparedStatement stmt = con.prepareStatement( "select seq8(), randstr(1000, random()) from table(generator(rowcount=>1000))")) { diff --git a/src/test/java/net/snowflake/client/jdbc/GitRepositoryDownloadLatestIT.java b/src/test/java/net/snowflake/client/jdbc/GitRepositoryDownloadLatestIT.java index b720591de..975f8ebb7 100644 --- a/src/test/java/net/snowflake/client/jdbc/GitRepositoryDownloadLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/GitRepositoryDownloadLatestIT.java @@ -3,9 +3,9 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.io.InputStream; @@ -17,14 +17,13 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.List; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import org.apache.commons.io.IOUtils; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class GitRepositoryDownloadLatestIT extends BaseJDBCTest { /** @@ -32,7 +31,7 @@ public class GitRepositoryDownloadLatestIT extends BaseJDBCTest { * accountadmin role. Added in > 3.19.0 */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void shouldDownloadFileAndStreamFromGitRepository() throws Exception { try (Connection connection = getConnection()) { prepareJdbcRepoInSnowflake(connection); @@ -48,8 +47,8 @@ public void shouldDownloadFileAndStreamFromGitRepository() throws Exception { List fetchedStreamContent = getContentFromStream(connection, stageName, filePathInGitRepo); - assertFalse("File content cannot be empty", fetchedFileContent.isEmpty()); - assertFalse("Stream content cannot be empty", fetchedStreamContent.isEmpty()); + assertFalse(fetchedFileContent.isEmpty(), "File content cannot be empty"); + assertFalse(fetchedStreamContent.isEmpty(), "Stream content cannot be empty"); assertEquals(fetchedFileContent, fetchedStreamContent); } } @@ -80,7 +79,7 @@ private static List getContentFromFile( try (Statement statement = connection.createStatement(); ResultSet rs = statement.executeQuery(command); ) { // then - assertTrue("has result", rs.next()); + assertTrue(rs.next(), "has result"); return Files.readAllLines(downloadedFile); } finally { Files.delete(downloadedFile); diff --git a/src/test/java/net/snowflake/client/jdbc/HeartbeatAsyncLatestIT.java b/src/test/java/net/snowflake/client/jdbc/HeartbeatAsyncLatestIT.java index e7217f695..5dda21d55 100644 --- a/src/test/java/net/snowflake/client/jdbc/HeartbeatAsyncLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/HeartbeatAsyncLatestIT.java @@ -1,9 +1,9 @@ package net.snowflake.client.jdbc; import static org.awaitility.Awaitility.await; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.ResultSet; @@ -12,18 +12,17 @@ import java.time.Duration; import java.util.Properties; import java.util.logging.Logger; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.QueryStatus; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Test class for using heartbeat with asynchronous querying. This is a "Latest" class because old * driver versions do not contain the asynchronous querying API. */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class HeartbeatAsyncLatestIT extends HeartbeatIT { private static Logger logger = Logger.getLogger(HeartbeatAsyncLatestIT.class.getName()); @@ -69,20 +68,20 @@ protected void submitQuery(boolean useKeepAliveSession, int queryIdx) } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testAsynchronousQuerySuccess() throws Exception { testSuccess(); } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testAsynchronousQueryFailure() throws Exception { testFailure(); } /** Test that isValid() function returns false when session is expired */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testIsValidWithInvalidSession() throws Exception { try (Connection connection = getConnection()) { // assert that connection starts out valid diff --git a/src/test/java/net/snowflake/client/jdbc/HeartbeatIT.java b/src/test/java/net/snowflake/client/jdbc/HeartbeatIT.java index eb41ce76f..5f6d7867b 100644 --- a/src/test/java/net/snowflake/client/jdbc/HeartbeatIT.java +++ b/src/test/java/net/snowflake/client/jdbc/HeartbeatIT.java @@ -3,12 +3,13 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.AssumptionUtils.isRunningOnGithubActions; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.Connection; import java.sql.ResultSet; @@ -24,16 +25,15 @@ import java.util.concurrent.Future; import java.util.logging.Logger; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** This test assumes that GS has been set up */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class HeartbeatIT extends AbstractDriverIT { private static Logger logger = Logger.getLogger(HeartbeatIT.class.getName()); @@ -43,9 +43,9 @@ public class HeartbeatIT extends AbstractDriverIT { *

change the master token validity to 10 seconds change the session token validity to 5 * seconds change the SESSION_RECORD_ACCESS_INTERVAL_SECS to 1 second */ - @BeforeClass + @BeforeAll public static void setUpClass() throws Exception { - if (!RunningOnGithubAction.isRunningOnGithubAction()) { + if (!isRunningOnGithubActions()) { try (Connection connection = getSnowflakeAdminConnection(); Statement statement = connection.createStatement()) { statement.execute( @@ -61,9 +61,9 @@ public static void setUpClass() throws Exception { * Reset master_token_validity, session_token_validity, SESSION_RECORD_ACCESS_INTERVAL_SECS to * default. */ - @AfterClass + @AfterAll public static void tearDownClass() throws Exception { - if (!RunningOnGithubAction.isRunningOnGithubAction()) { + if (!isRunningOnGithubActions()) { try (Connection connection = getSnowflakeAdminConnection(); Statement statement = connection.createStatement()) { statement.execute( @@ -115,7 +115,7 @@ protected void submitQuery(boolean useKeepAliveSession, int queryIdx) * master token validity and issue a query to make sure the query succeeds. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testSuccess() throws Exception { int concurrency = 10; ExecutorService executorService = Executors.newFixedThreadPool(10); @@ -146,7 +146,7 @@ public void testSuccess() throws Exception { * master token validity and issue a query to make sure the query fails. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testFailure() throws Exception { ExecutorService executorService = Executors.newFixedThreadPool(1); try { diff --git a/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java b/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java index 56f02c6d5..eb2bdfeb1 100644 --- a/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/LobSizeLatestIT.java @@ -3,8 +3,8 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.io.IOException; @@ -17,25 +17,26 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; -import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; -import net.snowflake.client.category.TestCategoryStatement; +import java.util.stream.Stream; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.ObjectMapperFactory; import net.snowflake.client.core.UUIDUtils; import org.apache.commons.text.RandomStringGenerator; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -@RunWith(Parameterized.class) -@Category(TestCategoryStatement.class) +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; + +@Tag(TestTags.STATEMENT) public class LobSizeLatestIT extends BaseJDBCTest { private static final Logger logger = Logger.getLogger(SnowflakeDriverIT.class.getName()); @@ -48,15 +49,16 @@ public class LobSizeLatestIT extends BaseJDBCTest { private static int smallLobSize = 16; private static int originLobSize = 16 * 1024 * 1024; - @BeforeClass + @BeforeAll public static void setUp() throws SQLException { - System.setProperty( - // the max json string should be ~1.33 for Arrow response so let's use 1.5 to be sure - ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM, Integer.toString((int) (maxLobSize * 1.5))); try (Connection con = BaseJDBCTest.getConnection()) { // get max LOB size from session maxLobSize = con.getMetaData().getMaxCharLiteralLength(); logger.log(Level.INFO, "Using max lob size: " + maxLobSize); + System.setProperty( + // the max json string should be ~1.33 for Arrow response so let's use 1.5 to be sure + ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM, + Integer.toString((int) (maxLobSize * 1.5))); LobSizeStringValues.put(smallLobSize, generateRandomString(smallLobSize)); LobSizeStringValues.put(originLobSize, generateRandomString(originLobSize)); LobSizeStringValues.put(mediumLobSize, generateRandomString(mediumLobSize)); @@ -65,31 +67,20 @@ public static void setUp() throws SQLException { } } - @Parameterized.Parameters(name = "lobSize={0}, resultFormat={1}") - public static Collection data() { - int[] lobSizes = - new int[] {smallLobSize, originLobSize, mediumLobSize, largeLobSize, maxLobSize}; - String[] resultFormats = new String[] {"Arrow", "JSON"}; - List ret = new ArrayList<>(); - for (int i = 0; i < lobSizes.length; i++) { - for (int j = 0; j < resultFormats.length; j++) { - ret.add(new Object[] {lobSizes[i], resultFormats[j]}); + static class DataProvider implements ArgumentsProvider { + + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + int[] lobSizes = + new int[] {smallLobSize, originLobSize, mediumLobSize, largeLobSize, maxLobSize}; + String[] resultFormats = new String[] {"Arrow", "JSON"}; + List ret = new ArrayList<>(); + for (int size : lobSizes) { + for (String format : resultFormats) { + ret.add(Arguments.of(size, format)); + } } - } - return ret; - } - - private final int lobSize; - - private final String resultFormat; - - public LobSizeLatestIT(int lobSize, String resultFormat) throws SQLException { - this.lobSize = lobSize; - this.resultFormat = resultFormat; - - try (Connection con = BaseJDBCTest.getConnection(); - Statement stmt = con.createStatement()) { - createTable(lobSize, stmt); + return ret.stream(); } } @@ -134,7 +125,7 @@ private void preparedInsertQuery(String varCharValue, String uuidValue, Connecti } } - @AfterClass + @AfterAll public static void tearDown() throws SQLException { try (Connection con = BaseJDBCTest.getConnection(); Statement stmt = con.createStatement()) { @@ -142,10 +133,13 @@ public static void tearDown() throws SQLException { } } - @Test - public void testStandardInsertAndSelectWithMaxLobSizeEnabled() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testStandardInsertAndSelectWithMaxLobSizeEnabled(int lobSize, String resultFormat) + throws SQLException { try (Connection con = BaseJDBCTest.getConnection(); Statement stmt = con.createStatement()) { + createTable(lobSize, stmt); setResultFormat(stmt, resultFormat); String varCharValue = LobSizeStringValues.get(lobSize); @@ -161,10 +155,13 @@ public void testStandardInsertAndSelectWithMaxLobSizeEnabled() throws SQLExcepti } } - @Test - public void testPreparedInsertWithMaxLobSizeEnabled() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testPreparedInsertWithMaxLobSizeEnabled(int lobSize, String resultFormat) + throws SQLException { try (Connection con = BaseJDBCTest.getConnection(); Statement stmt = con.createStatement()) { + createTable(lobSize, stmt); setResultFormat(stmt, resultFormat); String maxVarCharValue = LobSizeStringValues.get(lobSize); @@ -180,8 +177,9 @@ public void testPreparedInsertWithMaxLobSizeEnabled() throws SQLException { } } - @Test - public void testPutAndGet() throws IOException, SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testPutAndGet(int lobSize, String resultFormat) throws IOException, SQLException { File tempFile = File.createTempFile("LobSizeTest", ".csv"); // Delete file when JVM shuts down tempFile.deleteOnExit(); @@ -201,6 +199,7 @@ public void testPutAndGet() throws IOException, SQLException { try (Connection con = BaseJDBCTest.getConnection(); Statement stmt = con.createStatement()) { + createTable(lobSize, stmt); setResultFormat(stmt, resultFormat); if (lobSize > originLobSize) { // for increased LOB size (16MB < lobSize < 128MB) stmt.execute("alter session set ALLOW_LARGE_LOBS_IN_EXTERNAL_SCAN = true"); diff --git a/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java b/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java index fd2957528..17afeeb53 100644 --- a/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/MaxLobSizeLatestIT.java @@ -4,20 +4,19 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.text.IsEmptyString.emptyOrNullString; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryStatement; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import org.hamcrest.CoreMatchers; -import org.junit.Assert; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class MaxLobSizeLatestIT extends BaseJDBCTest { /** @@ -26,7 +25,7 @@ public class MaxLobSizeLatestIT extends BaseJDBCTest { * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testIncreasedMaxLobSize() throws SQLException { try (Connection con = BaseJDBCTest.getConnection(); Statement stmt = con.createStatement()) { @@ -41,7 +40,7 @@ public void testIncreasedMaxLobSize() throws SQLException { stmt.execute("alter session set ENABLE_LARGE_VARCHAR_AND_BINARY_IN_RESULT=true"); try (ResultSet resultSet = stmt.executeQuery("select randstr(20000000, random()) as large_str")) { - Assert.assertTrue(resultSet.next()); + assertTrue(resultSet.next()); assertThat(resultSet.getString(1), is(not(emptyOrNullString()))); } finally { stmt.execute("alter session unset ENABLE_LARGE_VARCHAR_AND_BINARY_IN_RESULT"); diff --git a/src/test/java/net/snowflake/client/jdbc/MockConnectionTest.java b/src/test/java/net/snowflake/client/jdbc/MockConnectionTest.java index c763606fe..65118cec6 100644 --- a/src/test/java/net/snowflake/client/jdbc/MockConnectionTest.java +++ b/src/test/java/net/snowflake/client/jdbc/MockConnectionTest.java @@ -1,8 +1,8 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -31,7 +31,6 @@ import java.util.concurrent.Future; import java.util.stream.Collectors; import java.util.stream.IntStream; -import net.snowflake.client.category.TestCategoryConnection; import net.snowflake.client.core.ExecTimeTelemetryData; import net.snowflake.client.core.ParameterBindingDTO; import net.snowflake.client.core.QueryContextDTO; @@ -52,15 +51,15 @@ import net.snowflake.client.jdbc.telemetry.TelemetryData; import net.snowflake.common.core.SFBinaryFormat; import net.snowflake.common.core.SnowflakeDateTimeFormat; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Test; /** * IT test for testing the "pluggable" implementation of SnowflakeConnection, SnowflakeStatement, * and ResultSet. These tests will query Snowflake normally, retrieve the JSON result, and replay it * back using a custom implementation of these objects that simply echoes a given JSON response. */ -@Category(TestCategoryConnection.class) +// TODO: SNOW-1821554 +// @Tag(TestTags.CONNECTION) public class MockConnectionTest extends BaseJDBCTest { // Simple pair class container for the error test. @@ -277,7 +276,7 @@ public void testMockResponse() throws SQLException, JsonProcessingException { mockConnection.prepareStatement("select count(*) from " + testTableName).executeQuery(); fakeResultSet.next(); String val = fakeResultSet.getString(1); - assertEquals("colA value from the mock connection was not what was expected", "rowOne", val); + assertEquals("rowOne", val, "colA value from the mock connection was not what was expected"); mockConnection.close(); } @@ -411,7 +410,7 @@ public void testMockTransferAgent() throws SQLException, IOException { InputStream downloadStream1 = mockConnection.downloadStream("@fakeStage", "file1", false); byte[] outputBytes1 = new byte[downloadStream1.available()]; downloadStream1.read(outputBytes1); - assertArrayEquals("downloaded bytes not what was expected", outputBytes1, inputBytes1); + assertArrayEquals(outputBytes1, inputBytes1, "downloaded bytes not what was expected"); } private JsonNode createDummyResponseWithRows(List> rows, List dataTypes) { @@ -540,7 +539,7 @@ private void compareResultSets( resultSetRows++; } - assertEquals("row-count was not what was expected", numRows, resultSetRows); + assertEquals(numRows, resultSetRows, "row-count was not what was expected"); } // DataTypes supported with mock responses in test: diff --git a/src/test/java/net/snowflake/client/jdbc/MultiStatementArrowIT.java b/src/test/java/net/snowflake/client/jdbc/MultiStatementArrowIT.java index 0a1fb9ce2..5ea3b3f27 100644 --- a/src/test/java/net/snowflake/client/jdbc/MultiStatementArrowIT.java +++ b/src/test/java/net/snowflake/client/jdbc/MultiStatementArrowIT.java @@ -1,9 +1,9 @@ package net.snowflake.client.jdbc; -import net.snowflake.client.category.TestCategoryArrow; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; -@Category(TestCategoryArrow.class) +@Tag(TestTags.ARROW) public class MultiStatementArrowIT extends MultiStatementIT { public MultiStatementArrowIT() { diff --git a/src/test/java/net/snowflake/client/jdbc/MultiStatementIT.java b/src/test/java/net/snowflake/client/jdbc/MultiStatementIT.java index 06ccc4196..f4e9da56d 100644 --- a/src/test/java/net/snowflake/client/jdbc/MultiStatementIT.java +++ b/src/test/java/net/snowflake/client/jdbc/MultiStatementIT.java @@ -3,33 +3,31 @@ */ package net.snowflake.client.jdbc; -import static net.snowflake.client.ConditionalIgnoreRule.ConditionalIgnore; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryStatement; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.SFSession; import net.snowflake.common.core.SqlState; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Multi Statement tests */ -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class MultiStatementIT extends BaseJDBCWithSharedConnectionIT { protected static String queryResultFormat = "json"; - @Before + @BeforeEach public void setQueryResultFormat() throws SQLException { try (Statement stmt = connection.createStatement()) { stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); @@ -418,7 +416,7 @@ public void testMultiStmtCountNotMatch() throws SQLException { } @Test - @ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testInvalidParameterCount() throws SQLException { String userName = null; String accountName = null; @@ -455,7 +453,7 @@ public void testInvalidParameterCount() throws SQLException { for (int i = 0; i < testSuites.length; i++) { try { statement.execute(testSuites[i]); - Assert.fail(); + fail(); } catch (SQLException e) { assertThat(e.getErrorCode(), is(expectedErrorCodes[i])); } diff --git a/src/test/java/net/snowflake/client/jdbc/MultiStatementLatestIT.java b/src/test/java/net/snowflake/client/jdbc/MultiStatementLatestIT.java index eedf56114..849742ef9 100644 --- a/src/test/java/net/snowflake/client/jdbc/MultiStatementLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/MultiStatementLatestIT.java @@ -3,19 +3,19 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * MultiStatement integration tests for the latest JDBC driver. This doesn't work for the oldest @@ -23,11 +23,11 @@ * if the tests still is not applicable. If it is applicable, move tests to MultiStatementIT so that * both the latest and oldest supported driver run the tests. */ -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class MultiStatementLatestIT extends BaseJDBCWithSharedConnectionIT { protected static String queryResultFormat = "json"; - @Before + @BeforeEach public void setQueryResultFormat() throws SQLException { try (Statement stmt = connection.createStatement()) { stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); diff --git a/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncIT.java b/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncIT.java index d68dc8fc5..f8ca2f48a 100644 --- a/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncIT.java +++ b/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncIT.java @@ -3,24 +3,24 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Test OpenGroup CLI */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class OpenGroupCLIFuncIT extends BaseJDBCWithSharedConnectionIT { - @BeforeClass + @BeforeAll public static void setSessionTimezone() throws SQLException { try (Statement statement = connection.createStatement()) { statement.execute( diff --git a/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncLatestIT.java b/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncLatestIT.java index 4f7004004..3b79dc616 100644 --- a/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/OpenGroupCLIFuncLatestIT.java @@ -7,9 +7,9 @@ import java.sql.Connection; import java.sql.SQLException; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Open Group CLI function integration tests for the latest JDBC driver. This doesn't work for the @@ -17,7 +17,7 @@ * examine if the tests still are not applicable. If it is applicable, move tests to * OpenGroupCLIFuncIT so that both the latest and oldest supported driver run the tests. */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class OpenGroupCLIFuncLatestIT extends BaseJDBCTest { /** * Numeric function tests diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtIT.java index 224c538d0..adb92036d 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedMultiStmtIT.java @@ -3,47 +3,37 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -@RunWith(Parameterized.class) -@Category(TestCategoryStatement.class) -public class PreparedMultiStmtIT extends BaseJDBCWithSharedConnectionIT { - - @Parameterized.Parameters(name = "format={0}") - public static Object[][] data() { - // all tests in this class need to run for both query result formats json and arrow - return new Object[][] {{"JSON"}, {"Arrow"}}; - } +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; - protected String queryResultFormat; +@Tag(TestTags.STATEMENT) +public class PreparedMultiStmtIT extends BaseJDBCWithSharedConnectionIT { private static SnowflakeConnectionV1 sfConnectionV1; - public PreparedMultiStmtIT(String queryResultFormat) { - this.queryResultFormat = queryResultFormat; + public PreparedMultiStmtIT() { this.sfConnectionV1 = (SnowflakeConnectionV1) connection; } - @Before - public void setSessionResultFormat() throws SQLException { + public void setSessionResultFormat(String queryResultFormat) throws SQLException { try (Statement stmt = connection.createStatement()) { stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); } } - @Test - public void testExecuteUpdateCount() throws Exception { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testExecuteUpdateCount(String queryResultFormat) throws Exception { + setSessionResultFormat(queryResultFormat); try (Statement statement = sfConnectionV1.createStatement()) { try { statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); @@ -87,8 +77,10 @@ public void testExecuteUpdateCount() throws Exception { } /** Less bindings than expected in statement */ - @Test - public void testExecuteLessBindings() throws Exception { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testExecuteLessBindings(String queryResultFormat) throws Exception { + setSessionResultFormat(queryResultFormat); try (Statement statement = sfConnectionV1.createStatement()) { try { statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); @@ -107,7 +99,7 @@ public void testExecuteLessBindings() throws Exception { // first statement try { preparedStatement.executeUpdate(); - Assert.fail(); + fail(); } catch (SQLException e) { // error code comes from xp, which is js execution failed. assertThat(e.getErrorCode(), is(100132)); @@ -119,8 +111,10 @@ public void testExecuteLessBindings() throws Exception { } } - @Test - public void testExecuteMoreBindings() throws Exception { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testExecuteMoreBindings(String queryResultFormat) throws Exception { + setSessionResultFormat(queryResultFormat); try (Statement statement = sfConnectionV1.createStatement()) { try { statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); @@ -165,8 +159,10 @@ public void testExecuteMoreBindings() throws Exception { } } - @Test - public void testExecuteQueryBindings() throws Exception { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testExecuteQueryBindings(String queryResultFormat) throws Exception { + setSessionResultFormat(queryResultFormat); try (Statement statement = sfConnectionV1.createStatement()) { statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); @@ -207,8 +203,10 @@ public void testExecuteQueryBindings() throws Exception { } } - @Test - public void testExecuteQueryNoBindings() throws Exception { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testExecuteQueryNoBindings(String queryResultFormat) throws Exception { + setSessionResultFormat(queryResultFormat); try (Statement statement = sfConnectionV1.createStatement()) { statement.execute("alter session set MULTI_STATEMENT_COUNT=0"); diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement0IT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement0IT.java index 7c05163dc..aa9a90859 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement0IT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement0IT.java @@ -6,14 +6,16 @@ import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; -import org.junit.After; -import org.junit.Before; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; /** Prepared statement integration tests */ abstract class PreparedStatement0IT extends BaseJDBCTest { - private final String queryResultFormat; - Connection init() throws SQLException { + return BaseJDBCTest.getConnection(); + } + + protected Connection getConn(String queryResultFormat) throws SQLException { Connection conn = BaseJDBCTest.getConnection(); try (Statement stmt = conn.createStatement()) { stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); @@ -34,21 +36,17 @@ Connection init() throws SQLException { final String enableCacheReuse = "alter session set USE_CACHED_RESULT=true"; final String tableFuncSQL = "select 1 from table(generator(rowCount => ?))"; - @Before + @BeforeEach public void setUp() throws SQLException { try (Connection con = init()) { con.createStatement().execute(createTableSQL); } } - @After + @AfterEach public void tearDown() throws SQLException { try (Connection con = init()) { con.createStatement().execute(deleteTableSQL); } } - - PreparedStatement0IT(String queryResultFormat) { - this.queryResultFormat = queryResultFormat; - } } diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement1IT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement1IT.java index 56bef419f..d0074230d 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement1IT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement1IT.java @@ -6,12 +6,12 @@ import static net.snowflake.client.jdbc.ErrorCode.NUMERIC_VALUE_OUT_OF_RANGE; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.Connection; import java.sql.DriverManager; @@ -25,26 +25,22 @@ import java.sql.Types; import java.util.Map; import java.util.Properties; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -@Category(TestCategoryStatement.class) +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; + +@Tag(TestTags.STATEMENT) public class PreparedStatement1IT extends PreparedStatement0IT { - public PreparedStatement1IT() { - super("json"); - } - - PreparedStatement1IT(String queryFormat) { - super(queryFormat); - } - @Test - public void testGetParameterMetaData() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetParameterMetaData(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement preparedStatement = connection.prepareStatement(updateSQL)) { /* All binding parameters are of type text and have null precision and scale and are not nullable. Since every binding parameter currently has identical properties, testing is minimal until this changes. @@ -83,9 +79,10 @@ public void testGetParameterMetaData() throws SQLException { } /** Trigger default stage array binding threshold so that it can be run on travis */ - @Test - public void testInsertStageArrayBind() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testInsertStageArrayBind(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { connection .createStatement() @@ -122,9 +119,10 @@ static void bindOneParamSet( prepst.setShort(6, colE); } - @Test - public void testPrepareStatementWithKeys() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepareStatementWithKeys(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { connection.createStatement().execute(createTableSQL); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL, Statement.NO_GENERATED_KEYS)) { @@ -138,11 +136,12 @@ public void testPrepareStatementWithKeys() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testInsertBatch() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testInsertBatch(String queryResultFormat) throws SQLException { int[] countResult; - try (Connection connection = init()) { + try (Connection connection = getConn(queryResultFormat)) { connection .createStatement() .execute( @@ -164,11 +163,12 @@ public void testInsertBatch() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testInsertBatchStage() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testInsertBatchStage(String queryResultFormat) throws SQLException { int[] countResult; - try (Connection connection = init()) { + try (Connection connection = getConn(queryResultFormat)) { connection .createStatement() .execute( @@ -188,12 +188,13 @@ public void testInsertBatchStage() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testInsertBatchStageMultipleTimes() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testInsertBatchStageMultipleTimes(String queryResultFormat) throws SQLException { // using the same statement to run a query multiple times shouldn't result in duplicates int[] countResult; - try (Connection connection = init()) { + try (Connection connection = getConn(queryResultFormat)) { connection .createStatement() .execute( @@ -223,10 +224,11 @@ public void testInsertBatchStageMultipleTimes() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testStageBatchNull() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testStageBatchNull(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { int[] thresholds = {0, 6}; // disabled, enabled @@ -253,26 +255,27 @@ public void testStageBatchNull() throws SQLException { String errorMessage = "Column should be null (" + (threshold > 0 ? "stage" : "non-stage") + ")"; resultSet.getInt(1); - assertTrue(errorMessage, resultSet.wasNull()); + assertTrue(resultSet.wasNull(), errorMessage); resultSet.getDouble(2); - assertTrue(errorMessage, resultSet.wasNull()); + assertTrue(resultSet.wasNull(), errorMessage); resultSet.getFloat(3); - assertTrue(errorMessage, resultSet.wasNull()); + assertTrue(resultSet.wasNull(), errorMessage); resultSet.getString(4); - assertTrue(errorMessage, resultSet.wasNull()); + assertTrue(resultSet.wasNull(), errorMessage); resultSet.getLong(5); - assertTrue(errorMessage, resultSet.wasNull()); + assertTrue(resultSet.wasNull(), errorMessage); resultSet.getShort(6); - assertTrue(errorMessage, resultSet.wasNull()); + assertTrue(resultSet.wasNull(), errorMessage); } } } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testStageString() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testStageString(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { int[] thresholds = {0, 6}; // disabled, enabled String[] rows = { @@ -297,7 +300,7 @@ public void testStageString() throws SQLException { "Strings should match (" + (threshold > 0 ? "stage" : "non-stage") + ")"; for (String row : rows) { assertTrue(resultSet.next()); - assertEquals(errorMessage, row, resultSet.getString(1)); + assertEquals(row, resultSet.getString(1), errorMessage); } } } @@ -305,10 +308,11 @@ public void testStageString() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testIncorrectTypes() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testIncorrectTypes(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { int[] thresholds = {0, 6}; // disabled, enabled @@ -338,10 +342,11 @@ public void testIncorrectTypes() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testStageBatchTimestamps() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testStageBatchTimestamps(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { Timestamp tsEpoch = new Timestamp(0L); Timestamp tsEpochMinusOneSec = new Timestamp(-1000L); // negative epoch no fraction of seconds @@ -409,11 +414,11 @@ public void testStageBatchTimestamps() throws SQLException { for (int i = 0; i < timestamps.length; i++) { assertEquals( + nonStageResult[i], + stageResult[i], "Stage binding timestamp should match non-stage binding timestamp (" + tsType - + ")", - nonStageResult[i], - stageResult[i]); + + ")"); } } } @@ -424,10 +429,11 @@ public void testStageBatchTimestamps() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testStageBatchTimes() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testStageBatchTimes(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { Time tMidnight = new Time(0); Time tNeg = new Time(-1); @@ -487,9 +493,9 @@ public void testStageBatchTimes() throws SQLException { for (int i = 0; i < times.length; i++) { assertEquals( - "Stage binding time should match non-stage binding time", nonStageResult[i], - stageResult[i]); + stageResult[i], + "Stage binding time should match non-stage binding time"); } } } @@ -499,9 +505,10 @@ public void testStageBatchTimes() throws SQLException { } } - @Test - public void testClearParameters() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testClearParameters(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); prepStatement.clearParameters(); @@ -522,9 +529,10 @@ public void testClearParameters() throws SQLException { } } - @Test - public void testClearBatch() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testClearBatch(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); prepStatement.addBatch(); @@ -555,9 +563,10 @@ public void testClearBatch() throws SQLException { } } - @Test - public void testInsertOneRow() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testInsertOneRow(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { @@ -576,9 +585,10 @@ public void testInsertOneRow() throws SQLException { } } - @Test - public void testUpdateOneRow() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testUpdateOneRow(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { @@ -611,9 +621,10 @@ public void testUpdateOneRow() throws SQLException { } } - @Test - public void testDeleteOneRow() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testDeleteOneRow(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute("CREATE OR REPLACE TABLE test_prepst_date (id INTEGER, d DATE)"); try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { @@ -654,9 +665,10 @@ public void testDeleteOneRow() throws SQLException { } } - @Test - public void testSelectOneRow() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSelectOneRow(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); prepStatement.addBatch(); @@ -680,9 +692,10 @@ public void testSelectOneRow() throws SQLException { } } - @Test - public void testUpdateBatch() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testUpdateBatch(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); prepStatement.addBatch(); @@ -715,10 +728,11 @@ public void testUpdateBatch() throws SQLException { } } - @Test - public void testBatchInsertWithCacheEnabled() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testBatchInsertWithCacheEnabled(String queryResultFormat) throws SQLException { int[] countResult; - try (Connection connection = init(); + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { // ensure enable the cache result use statement.execute(enableCacheReuse); @@ -764,7 +778,7 @@ public void testBatchInsertWithCacheEnabled() throws SQLException { * @throws SQLException arises if any exception occurs */ @Test - @Ignore + @Disabled public void manualTestForPreparedStatementLogging() throws SQLException { Map params = getConnectionParameters(); Properties props = new Properties(); diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement1LatestIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement1LatestIT.java index 872c8aab6..9c316edba 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement1LatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement1LatestIT.java @@ -4,10 +4,10 @@ package net.snowflake.client.jdbc; import static net.snowflake.client.jdbc.PreparedStatement1IT.bindOneParamSet; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.math.BigInteger; import java.sql.Connection; @@ -17,12 +17,14 @@ import java.sql.Statement; import java.sql.Time; import java.sql.Timestamp; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import java.util.TimeZone; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; /** * PreparedStatement integration tests for the latest JDBC driver. This doesn't work for the oldest @@ -30,19 +32,13 @@ * if the tests still are not applicable. If it is applicable, move tests to PreparedStatement1IT so * that both the latest and oldest supported driver run the tests. */ -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class PreparedStatement1LatestIT extends PreparedStatement0IT { - public PreparedStatement1LatestIT() { - super("json"); - } - - PreparedStatement1LatestIT(String queryResultFormat) { - super(queryResultFormat); - } - @Test - public void testPrepStWithCacheEnabled() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepStWithCacheEnabled(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { // ensure enable the cache result use statement.execute(enableCacheReuse); @@ -107,10 +103,13 @@ public void testPrepStWithCacheEnabled() throws SQLException { * * @throws SQLException arises if any exception occurs */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testInsertStageArrayBindWithTime() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testInsertStageArrayBindWithTime(String queryResultFormat) throws SQLException { + TimeZone originalTimeZone = TimeZone.getDefault(); + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute("alter session set CLIENT_STAGE_ARRAY_BINDING_THRESHOLD=2"); @@ -140,6 +139,7 @@ public void testInsertStageArrayBindWithTime() throws SQLException { } finally { statement.execute("drop table if exists testStageBindTime"); statement.execute("alter session unset CLIENT_STAGE_ARRAY_BINDING_THRESHOLD"); + TimeZone.setDefault(originalTimeZone); } } } @@ -154,10 +154,11 @@ public void testInsertStageArrayBindWithTime() throws SQLException { * * @throws SQLException */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testSetObjectForTimestampTypes() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testSetObjectForTimestampTypes(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { // set timestamp mapping to default value try { @@ -210,14 +211,15 @@ public void testSetObjectForTimestampTypes() throws SQLException { * * @throws SQLException arises if any exception occurs */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testExecuteEmptyBatch() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testExecuteEmptyBatch(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { // executeBatch shouldn't throw exceptions assertEquals( - "For empty batch, we should return int[0].", 0, prepStatement.executeBatch().length); + 0, prepStatement.executeBatch().length, "For empty batch, we should return int[0]."); } connection @@ -228,7 +230,7 @@ public void testExecuteEmptyBatch() throws SQLException { try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { // executeBatch shouldn't throw exceptions assertEquals( - "For empty batch, we should return int[0].", 0, prepStatement.executeBatch().length); + 0, prepStatement.executeBatch().length, "For empty batch, we should return int[0]."); } } } @@ -238,9 +240,10 @@ public void testExecuteEmptyBatch() throws SQLException { * * @throws SQLException */ - @Test - public void testSetObjectMethodWithVarbinaryColumn() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSetObjectMethodWithVarbinaryColumn(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { connection.createStatement().execute("create or replace table test_binary(b VARBINARY)"); try (PreparedStatement prepStatement = @@ -251,16 +254,17 @@ public void testSetObjectMethodWithVarbinaryColumn() throws SQLException { } } - @Test - public void testSetObjectMethodWithBigIntegerColumn() { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSetObjectMethodWithBigIntegerColumn(String queryResultFormat) { + try (Connection connection = getConn(queryResultFormat)) { connection.createStatement().execute("create or replace table test_bigint(id NUMBER)"); try (PreparedStatement prepStatement = connection.prepareStatement("insert into test_bigint(id) values(?)")) { prepStatement.setObject(1, BigInteger.valueOf(9999)); int rows = prepStatement.executeUpdate(); - assertTrue("Row count doesn't match", rows == 1); + assertTrue(rows == 1, "Row count doesn't match"); } } catch (SQLException e) { e.printStackTrace(); @@ -270,9 +274,10 @@ public void testSetObjectMethodWithBigIntegerColumn() { } } - @Test - public void testSetObjectMethodWithLargeBigIntegerColumn() { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSetObjectMethodWithLargeBigIntegerColumn(String queryResultFormat) { + try (Connection connection = getConn(queryResultFormat)) { connection.createStatement().execute("create or replace table test_bigint(id NUMBER)"); try (PreparedStatement prepStatement = @@ -280,7 +285,7 @@ public void testSetObjectMethodWithLargeBigIntegerColumn() { BigInteger largeBigInt = BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.TEN); prepStatement.setObject(1, largeBigInt); int rows = prepStatement.executeUpdate(); - assertTrue("Row count doesn't match", rows == 1); + assertTrue(rows == 1, "Row count doesn't match"); } } catch (SQLException e) { e.printStackTrace(); @@ -290,9 +295,13 @@ public void testSetObjectMethodWithLargeBigIntegerColumn() { } } - @Test - public void testBatchInsertWithTimestampInputFormatSet() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testBatchInsertWithTimestampInputFormatSet(String queryResultFormat) + throws SQLException { + TimeZone originalTimeZone = TimeZone.getDefault(); + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute("alter session set TIMESTAMP_INPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FFTZH'"); @@ -315,6 +324,8 @@ public void testBatchInsertWithTimestampInputFormatSet() throws SQLException { statement.execute("drop table if exists testStageBindTypes"); statement.execute("alter session unset TIMESTAMP_INPUT_FORMAT"); } + } finally { + TimeZone.setDefault(originalTimeZone); } } @@ -324,10 +335,11 @@ public void testBatchInsertWithTimestampInputFormatSet() throws SQLException { * * @throws SQLException */ - @Test - @Ignore - public void testCallStatement() throws SQLException { - try (Connection connection = getConnection(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @Disabled + public void testCallStatement(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.executeQuery( diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement2IT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement2IT.java index efb8ef944..96765131a 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement2IT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement2IT.java @@ -7,12 +7,12 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.google.common.collect.Sets; import java.math.BigDecimal; @@ -28,27 +28,21 @@ import java.sql.Timestamp; import java.util.Calendar; import java.util.Set; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Assert; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -@Category(TestCategoryStatement.class) +import java.util.TimeZone; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; + +@Tag(TestTags.STATEMENT) public class PreparedStatement2IT extends PreparedStatement0IT { - public PreparedStatement2IT() { - super("json"); - } - - PreparedStatement2IT(String queryFormat) { - super(queryFormat); - } - - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testStageBatchDates() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testStageBatchDates(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { Date dEpoch = new Date(0); Date dAfterEpoch = new Date(24 * 60 * 60 * 1000); @@ -111,9 +105,9 @@ public void testStageBatchDates() throws SQLException { for (int i = 0; i < dates.length; i++) { assertEquals( - "Stage binding date should match non-stage binding date", nonStageResult[i], - stageResult[i]); + stageResult[i], + "Stage binding date should match non-stage binding date"); } } } @@ -123,9 +117,10 @@ public void testStageBatchDates() throws SQLException { } } - @Test - public void testBindWithNullValue() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testBindWithNullValue(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute( "create or replace table testBindNull(cola date, colb time, colc timestamp, cold number)"); @@ -183,9 +178,10 @@ public void testBindWithNullValue() throws SQLException { } } - @Test - public void testPrepareDDL() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepareDDL(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { try (PreparedStatement prepStatement = @@ -203,9 +199,10 @@ public void testPrepareDDL() throws SQLException { } } - @Test - public void testPrepareSCL() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepareSCL(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement("use SCHEMA PUBLIC")) { prepStatement.execute(); } @@ -217,9 +214,10 @@ public void testPrepareSCL() throws SQLException { } } - @Test - public void testPrepareTCL() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepareTCL(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { connection.setAutoCommit(false); String[] testCases = {"BEGIN", "COMMIT"}; @@ -234,9 +232,10 @@ public void testPrepareTCL() throws SQLException { } } - @Test - public void testPrepareShowCommand() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepareShowCommand(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement("show databases")) { try (ResultSet resultSet = prepStatement.executeQuery()) { assertTrue(resultSet.next()); @@ -253,14 +252,16 @@ public void testPrepareShowCommand() throws SQLException { * @throws SQLException Will be thrown if any of driver calls fail * @throws InterruptedException Will be thrown if the sleep is interrupted */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testPrepareTimeout() throws SQLException, InterruptedException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testPrepareTimeout(String queryResultFormat) + throws SQLException, InterruptedException { try (Connection adminCon = getSnowflakeAdminConnection(); Statement adminStatement = adminCon.createStatement()) { adminStatement.execute("alter system set enable_combined_describe=true"); try { - try (Connection connection = init(); + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute("create or replace table t(c1 string) as select 1"); statement.execute("alter session set jdbc_enable_combined_describe=true"); @@ -281,11 +282,12 @@ public void testPrepareTimeout() throws SQLException, InterruptedException { } /** Test case to make sure 2 non null bind refs was not constant folded into one */ - @Test - public void testSnow36284() throws Exception { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSnow36284(String queryResultFormat) throws Exception { String query = "select * from (values ('a'), ('b')) x where x.COLUMN1 in (?,?);"; - try (Connection connection = init(); + try (Connection connection = getConn(queryResultFormat); PreparedStatement preparedStatement = connection.prepareStatement(query)) { preparedStatement.setString(1, "a"); preparedStatement.setString(2, "b"); @@ -296,17 +298,18 @@ public void testSnow36284() throws Exception { rowcount++; valuesReturned.add(rs.getString(1)); } - assertEquals("Should get back 2 rows", 2, rowcount); - assertEquals("", valuesReturned, Sets.newHashSet("a", "b")); + assertEquals(2, rowcount, "Should get back 2 rows"); + assertEquals(valuesReturned, Sets.newHashSet("a", "b"), ""); } } } /** Test for coalesce with bind and null arguments in a prepared statement */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testSnow35923() throws Exception { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testSnow35923(String queryResultFormat) throws Exception { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute( "alter session set " + "optimizer_eliminate_scans_for_constant_select=false"); @@ -325,14 +328,15 @@ public void testSnow35923() throws Exception { * Tests binding of object literals, including binding with object names as well as binding with * object IDs */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testBindObjectLiteral() throws Exception { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testBindObjectLiteral(String queryResultFormat) throws Exception { long t1Id = 0; long t2Id = 0; String t1 = null; - try (Connection conn = init(); + try (Connection conn = getConn(queryResultFormat); Statement stmt = conn.createStatement()) { String sqlText = "create or replace table identifier(?) (c1 number)"; @@ -480,9 +484,10 @@ public void testBindObjectLiteral() throws Exception { } } - @Test - public void testBindTimestampTZViaString() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testBindTimestampTZViaString(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute( @@ -509,16 +514,23 @@ public void testBindTimestampTZViaString() throws SQLException { * Ensures binding a string type with TIMESTAMP_TZ works. The customer has to use the specific * timestamp format: YYYY-MM-DD HH24:MI:SS.FF9 TZH:TZM */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testBindTimestampTZViaStringBatch() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testBindTimestampTZViaStringBatch(String queryResultFormat) throws SQLException { + TimeZone originalTimeZone = TimeZone.getDefault(); + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute( "ALTER SESSION SET CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 1"); // enable stage bind statement.execute( "create or replace table testbindtstz(cola timestamp_tz, colb timestamp_ntz)"); + statement.execute( + "ALTER SESSION SET TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); + statement.execute( + "ALTER SESSION SET TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); try (PreparedStatement preparedStatement = connection.prepareStatement("insert into testbindtstz values(?,?)")) { @@ -546,6 +558,8 @@ public void testBindTimestampTZViaStringBatch() throws SQLException { } finally { statement.execute("drop table if exists testbindtstz"); } + } finally { + TimeZone.setDefault(originalTimeZone); } } @@ -555,9 +569,10 @@ public void testBindTimestampTZViaStringBatch() throws SQLException { * * @throws Exception raises if any error occurs */ - @Test - public void testSnow41620() throws Exception { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSnow41620(String queryResultFormat) throws Exception { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { // Create a table and insert 3 records statement.execute("CREATE or REPLACE TABLE SNOW41620 (c1 varchar(20)," + "c2 int" + " )"); @@ -592,9 +607,10 @@ public void testSnow41620() throws Exception { } } - @Test - public void testSnow50141() throws Exception { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSnow50141(String queryResultFormat) throws Exception { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement("select 1 where true=?")) { prepStatement.setObject(1, true); try (ResultSet resultSet = prepStatement.executeQuery()) { @@ -617,9 +633,9 @@ public void testSnow50141() throws Exception { private void checkResultSetEqual(ResultSet rs1, ResultSet rs2) throws SQLException { int columns = rs1.getMetaData().getColumnCount(); assertEquals( - "Resultsets do not match in the number of columns returned", columns, - rs2.getMetaData().getColumnCount()); + rs2.getMetaData().getColumnCount(), + "Resultsets do not match in the number of columns returned"); while (rs1.next() && rs2.next()) { for (int columnIndex = 1; columnIndex <= columns; columnIndex++) { @@ -627,19 +643,20 @@ private void checkResultSetEqual(ResultSet rs1, ResultSet rs2) throws SQLExcepti final Object res2 = rs2.getObject(columnIndex); assertEquals( - String.format("%s and %s are not equal values at column %d", res1, res2, columnIndex), res1, - res2); + res2, + String.format("%s and %s are not equal values at column %d", res1, res2, columnIndex)); } assertEquals( - "Number of records returned by the results does not match", rs1.isLast(), rs2.isLast()); + rs1.isLast(), rs2.isLast(), "Number of records returned by the results does not match"); } } - @Test - public void testPreparedStatementWithSkipParsing() throws Exception { - try (Connection con = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPreparedStatementWithSkipParsing(String queryResultFormat) throws Exception { + try (Connection con = getConn(queryResultFormat)) { PreparedStatement stmt = con.unwrap(SnowflakeConnectionV1.class).prepareStatement("select 1", true); try (ResultSet rs = stmt.executeQuery()) { @@ -649,9 +666,11 @@ public void testPreparedStatementWithSkipParsing() throws Exception { } } - @Test - public void testPreparedStatementWithSkipParsingAndBinding() throws Exception { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPreparedStatementWithSkipParsingAndBinding(String queryResultFormat) + throws Exception { + try (Connection con = getConn(queryResultFormat); Statement statement = con.createStatement()) { statement.execute("create or replace table t(c1 int)"); try { @@ -679,9 +698,10 @@ public void testPreparedStatementWithSkipParsingAndBinding() throws Exception { * workaround is added. More specifically, ErrorCode returned for this statement is caught in * SnowflakePreparedStatementV1 so that execution can continue */ - @Test - public void testSnow44393() throws Exception { - try (Connection con = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSnow44393(String queryResultFormat) throws Exception { + try (Connection con = getConn(queryResultFormat)) { assertFalse( con.createStatement() .execute("alter session set timestamp_ntz_output_format='YYYY-MM-DD HH24:MI:SS'")); @@ -697,10 +717,11 @@ public void testSnow44393() throws Exception { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testAddBatchNumericNullFloatMixed() throws Exception { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testAddBatchNumericNullFloatMixed(String queryResultFormat) throws Exception { + try (Connection connection = getConn(queryResultFormat)) { for (int threshold = 0; threshold < 2; ++threshold) { connection .createStatement() @@ -776,9 +797,10 @@ public void testAddBatchNumericNullFloatMixed() throws Exception { } } - @Test - public void testInvalidUsageOfApi() throws Exception { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testInvalidUsageOfApi(String queryResultFormat) throws Exception { + try (Connection connection = getConn(queryResultFormat); PreparedStatement preparedStatement = connection.prepareStatement("select 1")) { final int expectedCode = ErrorCode.UNSUPPORTED_STATEMENT_TYPE_IN_EXECUTION_API.getMessageCode(); @@ -815,7 +837,7 @@ public void run() throws SQLException { private void assertException(RunnableWithSQLException runnable, int expectedCode) { try { runnable.run(); - Assert.fail(); + fail(); } catch (SQLException e) { assertThat(e.getErrorCode(), is(expectedCode)); } @@ -825,9 +847,10 @@ private interface RunnableWithSQLException { void run() throws SQLException; } - @Test - public void testCreatePreparedStatementWithParameters() throws Throwable { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testCreatePreparedStatementWithParameters(String queryResultFormat) throws Throwable { + try (Connection connection = getConn(queryResultFormat)) { connection.prepareStatement( "select 1", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); try { @@ -855,9 +878,10 @@ public void testCreatePreparedStatementWithParameters() throws Throwable { } } - @Test - public void testPrepareAndGetMeta() throws SQLException { - try (Connection con = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepareAndGetMeta(String queryResultFormat) throws SQLException { + try (Connection con = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = con.prepareStatement("select 1 where 1 > ?")) { ResultSetMetaData meta = prepStatement.getMetaData(); assertThat(meta.getColumnCount(), is(1)); diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatement2LatestIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatement2LatestIT.java index f7ca395de..563406d23 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatement2LatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatement2LatestIT.java @@ -6,22 +6,23 @@ import static net.snowflake.client.jdbc.PreparedStatement1IT.bindOneParamSet; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Assert; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; /** * PreparedStatement integration tests for the latest JDBC driver. This doesn't work for the oldest @@ -29,19 +30,13 @@ * if the tests still are not applicable. If it is applicable, move tests to PreparedStatement2IT so * that both the latest and oldest supported driver run the tests. */ -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class PreparedStatement2LatestIT extends PreparedStatement0IT { - public PreparedStatement2LatestIT() { - super("json"); - } - - PreparedStatement2LatestIT(String queryFormat) { - super(queryFormat); - } - @Test - public void testPrepareUDTF() throws Exception { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testPrepareUDTF(String queryResultFormat) throws Exception { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute("create or replace table employee(id number, address text)"); @@ -77,10 +72,10 @@ public void testPrepareUDTF() throws Exception { // second argument is invalid prepStatement.setInt(1, 1); prepStatement.execute(); - Assert.fail(); + fail(); } catch (SQLException e) { // failed because argument type did not match - Assert.assertThat(e.getErrorCode(), is(1044)); + assertThat(e.getErrorCode(), is(1044)); } // create a udf with same name but different arguments and return type @@ -110,9 +105,10 @@ public void testPrepareUDTF() throws Exception { * SNOW-88426: skip bind parameter index check if prepare fails and defer the error checks to * execute */ - @Test - public void testSelectWithBinding() throws Throwable { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSelectWithBinding(String queryResultFormat) throws Throwable { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute("create or replace table TESTNULL(created_time timestamp_ntz, mid int)"); @@ -144,9 +140,10 @@ public void testSelectWithBinding() throws Throwable { } } - @Test - public void testLimitBind() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testLimitBind(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { String stmtStr = "select seq4() from table(generator(rowcount=>100)) limit ?"; try (PreparedStatement prepStatement = connection.prepareStatement(stmtStr)) { prepStatement.setInt(1, 10); @@ -156,9 +153,10 @@ public void testLimitBind() throws SQLException { } /** SNOW-31746 */ - @Test - public void testConstOptLimitBind() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testConstOptLimitBind(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { String stmtStr = "select 1 limit ? offset ?"; try (PreparedStatement prepStatement = connection.prepareStatement(stmtStr)) { prepStatement.setInt(1, 10); @@ -172,10 +170,11 @@ public void testConstOptLimitBind() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testTableFuncBindInput() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testTableFuncBindInput(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement(tableFuncSQL)) { prepStatement.setInt(1, 2); try (ResultSet resultSet = prepStatement.executeQuery()) { @@ -185,9 +184,10 @@ public void testTableFuncBindInput() throws SQLException { } } - @Test - public void testExecuteLargeBatch() throws SQLException { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testExecuteLargeBatch(String queryResultFormat) throws SQLException { + try (Connection con = getConn(queryResultFormat); Statement statement = con.createStatement()) { try { statement.execute("create or replace table mytab(id int)"); @@ -212,11 +212,12 @@ public void testExecuteLargeBatch() throws SQLException { } } - @Test - public void testRemoveExtraDescribeCalls() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testRemoveExtraDescribeCalls(String queryResultFormat) throws SQLException { String queryId1 = null; String queryId2 = null; - try (Connection connection = init(); + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute("create or replace table test_uuid_with_bind(c1 number)"); @@ -264,10 +265,12 @@ public void testRemoveExtraDescribeCalls() throws SQLException { } } - @Test - public void testRemoveExtraDescribeCallsSanityCheck() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testRemoveExtraDescribeCallsSanityCheck(String queryResultFormat) + throws SQLException { String queryId1; - try (Connection connection = init()) { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement preparedStatement = connection.prepareStatement( "create or replace table test_uuid_with_bind(c1 number, c2 string)")) { @@ -307,9 +310,10 @@ public void testRemoveExtraDescribeCallsSanityCheck() throws SQLException { } } - @Test - public void testAlreadyDescribedMultipleResults() throws SQLException { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testAlreadyDescribedMultipleResults(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat)) { try (PreparedStatement prepStatement = connection.prepareStatement(insertSQL)) { bindOneParamSet(prepStatement, 1, 1.22222, (float) 1.2, "test", 12121212121L, (short) 12); prepStatement.execute(); @@ -342,9 +346,10 @@ public void testAlreadyDescribedMultipleResults() throws SQLException { * * @throws Exception */ - @Test - public void testConsecutiveBatchInsertError() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testConsecutiveBatchInsertError(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute("create or replace table testStageArrayBind(c1 integer, c2 string)"); @@ -381,9 +386,10 @@ public void testConsecutiveBatchInsertError() throws SQLException { } } - @Test - public void testToString() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testToString(String queryResultFormat) throws SQLException { + try (Connection connection = getConn(queryResultFormat); PreparedStatement prepStatement = connection.prepareStatement("select current_version() --testing toString()")) { diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow1IT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow1IT.java deleted file mode 100644 index 379a471dd..000000000 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow1IT.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client.jdbc; - -import net.snowflake.client.category.TestCategoryArrow; -import org.junit.experimental.categories.Category; - -/** Test PreparedStatement in ARROW format 2/2 */ -@Category(TestCategoryArrow.class) -public class PreparedStatementArrow1IT extends PreparedStatement1IT { - public PreparedStatementArrow1IT() { - super("arrow"); - } -} diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow1LatestIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow1LatestIT.java deleted file mode 100644 index 5c68c198b..000000000 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow1LatestIT.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (c) 2012-2020 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client.jdbc; - -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.experimental.categories.Category; - -/** - * PreparedStatement integration tests for the latest JDBC driver. This doesn't work for the oldest - * supported driver. Drop this file when PrepareStatement1IT is dropped. - */ -@Category(TestCategoryStatement.class) -public class PreparedStatementArrow1LatestIT extends PreparedStatement1LatestIT { - public PreparedStatementArrow1LatestIT() { - super("arrow"); - } -} diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow2IT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow2IT.java deleted file mode 100644 index d2b7b9f85..000000000 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow2IT.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client.jdbc; - -import net.snowflake.client.category.TestCategoryArrow; -import org.junit.experimental.categories.Category; - -/** Test PreparedStatement in ARROW format 2/2 */ -@Category(TestCategoryArrow.class) -public class PreparedStatementArrow2IT extends PreparedStatement2IT { - public PreparedStatementArrow2IT() { - super("arrow"); - } -} diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow2LatestIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow2LatestIT.java deleted file mode 100644 index 9c3922de4..000000000 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatementArrow2LatestIT.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (c) 2012-2020 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client.jdbc; - -import net.snowflake.client.category.TestCategoryArrow; -import org.junit.experimental.categories.Category; - -/** - * PreparedStatement integration tests for the latest JDBC driver. This doesn't work for the oldest - * supported driver. Drop this file when PrepareStatement2IT is dropped. - */ -@Category(TestCategoryArrow.class) -public class PreparedStatementArrow2LatestIT extends PreparedStatement2LatestIT { - public PreparedStatementArrow2LatestIT() { - super("arrow"); - } -} diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java index f80a00528..be20395a5 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatementFeatureNotSupportedIT.java @@ -6,11 +6,11 @@ import java.net.URL; import java.sql.Connection; import java.sql.PreparedStatement; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class PreparedStatementFeatureNotSupportedIT extends BaseJDBCTest { @Test public void testFeatureNotSupportedException() throws Throwable { diff --git a/src/test/java/net/snowflake/client/jdbc/PreparedStatementLargeUpdateLatestIT.java b/src/test/java/net/snowflake/client/jdbc/PreparedStatementLargeUpdateLatestIT.java index 883fe0c4d..c12242af8 100644 --- a/src/test/java/net/snowflake/client/jdbc/PreparedStatementLargeUpdateLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PreparedStatementLargeUpdateLatestIT.java @@ -3,7 +3,7 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.spy; import java.sql.Connection; @@ -11,15 +11,14 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.Map; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryStatement; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.ExecTimeTelemetryData; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class PreparedStatementLargeUpdateLatestIT extends BaseJDBCTest { /** @@ -28,7 +27,7 @@ public class PreparedStatementLargeUpdateLatestIT extends BaseJDBCTest { * @throws Throwable */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testLargeUpdate() throws Throwable { try (Connection con = getConnection(); Statement statement = con.createStatement()) { @@ -64,7 +63,7 @@ public void testLargeUpdate() throws Throwable { * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testExecuteLargeBatchOverIntMax() throws SQLException { try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { diff --git a/src/test/java/net/snowflake/client/jdbc/ProxyLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ProxyLatestIT.java index 4fca52d1c..85f1e1a28 100644 --- a/src/test/java/net/snowflake/client/jdbc/ProxyLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ProxyLatestIT.java @@ -1,7 +1,7 @@ package net.snowflake.client.jdbc; -import static junit.framework.TestCase.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -13,20 +13,20 @@ import java.sql.Statement; import java.util.Objects; import java.util.Properties; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.category.TestTags; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; -import org.junit.After; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class ProxyLatestIT extends BaseWiremockTest { - @After + @AfterEach public void tearDown() { super.tearDown(); unsetJvmProperties(); @@ -122,9 +122,9 @@ private void verifyRequestToProxy(String pathPattern, int expectedCount) { ObjectMapper mapper = new ObjectMapper(); JsonNode json = mapper.readTree(responseString); assertEquals( - "expected request count not matched for pattern: " + pathPattern, expectedCount, - json.get("count").asInt()); + json.get("count").asInt(), + "expected request count not matched for pattern: " + pathPattern); } catch (IOException e) { throw new RuntimeException(e); } diff --git a/src/test/java/net/snowflake/client/jdbc/PutFileWithSpaceIncludedIT.java b/src/test/java/net/snowflake/client/jdbc/PutFileWithSpaceIncludedIT.java index 5cd03355c..940ab44e2 100644 --- a/src/test/java/net/snowflake/client/jdbc/PutFileWithSpaceIncludedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PutFileWithSpaceIncludedIT.java @@ -3,8 +3,8 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import java.io.File; import java.io.FileInputStream; @@ -13,23 +13,22 @@ import java.sql.ResultSet; import java.sql.Statement; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.category.TestTags; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.io.IOUtils; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class PutFileWithSpaceIncludedIT extends BaseJDBCTest { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; /** Test PUT command to send a data file, which file name contains a space. */ @Test - @Ignore + @Disabled public void putFileWithSpaceIncluded() throws Exception { String AWS_SECRET_KEY = TestUtil.systemGetEnv("AWS_SECRET_ACCESS_KEY"); String AWS_KEY_ID = TestUtil.systemGetEnv("AWS_ACCESS_KEY_ID"); @@ -43,7 +42,8 @@ public void putFileWithSpaceIncluded() throws Exception { assertNotNull(AWS_SECRET_KEY); assertNotNull(AWS_KEY_ID); - File dataFolder = tmpFolder.newFolder(); + File dataFolder = new File(tmpFolder, "data"); + dataFolder.mkdirs(); String tarFile = getFullPathFileInResource("snow-13400.tar"); FileInputStream fis = new FileInputStream(tarFile); TarArchiveInputStream tis = new TarArchiveInputStream(fis); diff --git a/src/test/java/net/snowflake/client/jdbc/PutUnescapeBackslashIT.java b/src/test/java/net/snowflake/client/jdbc/PutUnescapeBackslashIT.java index f9579636d..5de4ec5bf 100644 --- a/src/test/java/net/snowflake/client/jdbc/PutUnescapeBackslashIT.java +++ b/src/test/java/net/snowflake/client/jdbc/PutUnescapeBackslashIT.java @@ -18,15 +18,15 @@ import java.sql.ResultSet; import java.sql.Statement; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.category.TestTags; import org.apache.commons.io.FileUtils; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class PutUnescapeBackslashIT extends AbstractDriverIT { - @BeforeClass + @BeforeAll public static void setUpClass() throws Exception {} /** diff --git a/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java b/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java index 6e1a26428..7fdaab9bb 100644 --- a/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java +++ b/src/test/java/net/snowflake/client/jdbc/RestRequestTest.java @@ -3,14 +3,14 @@ */ package net.snowflake.client.jdbc; +import static net.snowflake.client.AssumptionUtils.assumeRunningOnLinuxMac; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.junit.Assume.assumeFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -21,7 +21,6 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; -import net.snowflake.client.RunningNotOnLinuxMac; import net.snowflake.client.core.ExecTimeTelemetryData; import net.snowflake.client.core.HttpUtil; import net.snowflake.client.jdbc.telemetryOOB.TelemetryService; @@ -32,7 +31,7 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.impl.client.CloseableHttpClient; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -337,18 +336,16 @@ class TestCase { for (TestCase t : testCases) { if (t.result) { assertTrue( + RestRequest.isNonRetryableHTTPCode(anyStatusCodeResponse(t.statusCode), t.retryHTTP403), String.format( "Result must be true but false: HTTP Code: %d, RetryHTTP403: %s", - t.statusCode, t.retryHTTP403), - RestRequest.isNonRetryableHTTPCode( - anyStatusCodeResponse(t.statusCode), t.retryHTTP403)); + t.statusCode, t.retryHTTP403)); } else { assertFalse( + RestRequest.isNonRetryableHTTPCode(anyStatusCodeResponse(t.statusCode), t.retryHTTP403), String.format( "Result must be false but true: HTTP Code: %d, RetryHTTP403: %s", - t.statusCode, t.retryHTTP403), - RestRequest.isNonRetryableHTTPCode( - anyStatusCodeResponse(t.statusCode), t.retryHTTP403)); + t.statusCode, t.retryHTTP403)); } } } @@ -459,8 +456,8 @@ public CloseableHttpResponse answer(InvocationOnMock invocation) throws Throwabl execute(client, "fakeurl.com/?requestId=abcd-1234", 0, 0, 0, true, false); } - @Test(expected = SnowflakeSQLException.class) - public void testMaxRetriesExceeded() throws IOException, SnowflakeSQLException { + @Test + public void testMaxRetriesExceeded() throws IOException { boolean telemetryEnabled = TelemetryService.getInstance().isEnabled(); CloseableHttpClient client = mock(CloseableHttpClient.class); @@ -482,8 +479,9 @@ public CloseableHttpResponse answer(InvocationOnMock invocation) throws Throwabl try { TelemetryService.disable(); - execute(client, "fakeurl.com/?requestId=abcd-1234", 0, 0, 0, true, false, 1); - fail("testMaxRetries"); + assertThrows( + SnowflakeSQLException.class, + () -> execute(client, "fakeurl.com/?requestId=abcd-1234", 0, 0, 0, true, false, 1)); } finally { if (telemetryEnabled) { TelemetryService.enable(); @@ -516,8 +514,8 @@ public CloseableHttpResponse answer(InvocationOnMock invocationOnMock) execute(client, "fakeurl.com/?requestId=abcd-1234", 0, 0, 0, true, false, 1); } - @Test(expected = SnowflakeSQLException.class) - public void testLoginMaxRetries() throws IOException, SnowflakeSQLException { + @Test + public void testLoginMaxRetries() throws IOException { boolean telemetryEnabled = TelemetryService.getInstance().isEnabled(); CloseableHttpClient client = mock(CloseableHttpClient.class); @@ -539,8 +537,9 @@ public CloseableHttpResponse answer(InvocationOnMock invocation) throws Throwabl try { TelemetryService.disable(); - execute(client, "/session/v1/login-request", 0, 0, 0, true, false, 1); - fail("testMaxRetries"); + assertThrows( + SnowflakeSQLException.class, + () -> execute(client, "/session/v1/login-request", 0, 0, 0, true, false, 1)); } finally { if (telemetryEnabled) { TelemetryService.enable(); @@ -552,7 +551,7 @@ public CloseableHttpResponse answer(InvocationOnMock invocation) throws Throwabl @Test public void testLoginTimeout() throws IOException { - assumeFalse(RunningNotOnLinuxMac.isNotRunningOnLinuxMac()); + assumeRunningOnLinuxMac(); boolean telemetryEnabled = TelemetryService.getInstance().isEnabled(); CloseableHttpClient client = mock(CloseableHttpClient.class); @@ -643,18 +642,18 @@ public void shouldGenerateBackoffInRangeExceptTheLastBackoff() { elapsedMilliForTransientIssues); assertTrue( - "Backoff should be lower or equal to max backoff limit", - backoffInMilli <= maxBackoffInMilli); + backoffInMilli <= maxBackoffInMilli, + "Backoff should be lower or equal to max backoff limit"); if (elapsedMilliForTransientIssues + backoffInMilli >= retryTimeoutInMilli) { assertEquals( - "Backoff should fill time till retry timeout", retryTimeoutInMilli - elapsedMilliForTransientIssues, - backoffInMilli); + backoffInMilli, + "Backoff should fill time till retry timeout"); break; } else { assertTrue( - "Backoff should be higher or equal to min backoff limit", - backoffInMilli >= minBackoffInMilli); + backoffInMilli >= minBackoffInMilli, + "Backoff should be higher or equal to min backoff limit"); } elapsedMilliForTransientIssues += backoffInMilli; } diff --git a/src/test/java/net/snowflake/client/jdbc/RestRequestWiremockLatestIT.java b/src/test/java/net/snowflake/client/jdbc/RestRequestWiremockLatestIT.java index 76856b985..505af0b53 100644 --- a/src/test/java/net/snowflake/client/jdbc/RestRequestWiremockLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/RestRequestWiremockLatestIT.java @@ -1,16 +1,16 @@ package net.snowflake.client.jdbc; import java.util.concurrent.atomic.AtomicBoolean; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.ExecTimeTelemetryData; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class RestRequestWiremockLatestIT extends BaseWiremockTest { String connectionResetByPeerScenario = diff --git a/src/test/java/net/snowflake/client/jdbc/ResultJsonParserV2Test.java b/src/test/java/net/snowflake/client/jdbc/ResultJsonParserV2Test.java index 7349a26f2..0675b9758 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultJsonParserV2Test.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultJsonParserV2Test.java @@ -3,14 +3,14 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import net.snowflake.client.core.SFSession; import org.apache.commons.text.StringEscapeUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** This is the unit tests for ResultJsonParserV2 */ public class ResultJsonParserV2Test { diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSet0IT.java b/src/test/java/net/snowflake/client/jdbc/ResultSet0IT.java index 90cc98aa6..bebe0a54e 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSet0IT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSet0IT.java @@ -3,24 +3,22 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Properties; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.Before; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; /** Result set test base class. */ -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class ResultSet0IT extends BaseJDBCWithSharedConnectionIT { - private final String queryResultFormat; - - public Connection init(Properties paramProperties) throws SQLException { + public Connection init(Properties paramProperties, String queryResultFormat) throws SQLException { Connection conn = BaseJDBCTest.getConnection(DONT_INJECT_SOCKET_TIMEOUT, paramProperties, false, false); try (Statement stmt = conn.createStatement()) { @@ -29,11 +27,9 @@ public Connection init(Properties paramProperties) throws SQLException { return conn; } - @Before + @BeforeEach public void setUp() throws SQLException { try (Statement statement = connection.createStatement()) { - - statement.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); // TEST_RS statement.execute("create or replace table test_rs (colA string)"); statement.execute("insert into test_rs values('rowOne')"); @@ -50,22 +46,22 @@ public void setUp() throws SQLException { + "error_on_column_count_mismatch=false)"); // put files assertTrue( - "Failed to put a file", statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc")); + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc"), + "Failed to put a file"); assertTrue( - "Failed to put a file", statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc")); + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc"), + "Failed to put a file"); int numRows = statement.executeUpdate("copy into orders_jdbc"); - assertEquals("Unexpected number of rows copied: " + numRows, 73, numRows); + assertEquals(73, numRows, "Unexpected number of rows copied: " + numRows); } } - ResultSet numberCrossTesting() throws SQLException { - Statement statement = connection.createStatement(); + ResultSet numberCrossTesting(String queryResultFormat) throws SQLException { + Statement statement = createStatement(queryResultFormat); statement.execute( "create or replace table test_types(c1 number, c2 integer, c3 float, c4 boolean," + "c5 char, c6 varchar, c7 date, c8 datetime, c9 time, c10 timestamp_ltz, " @@ -80,8 +76,4 @@ ResultSet numberCrossTesting() throws SQLException { statement.execute("insert into test_types (c5, c6) values('h', 'hello')"); return statement.executeQuery("select * from test_types"); } - - ResultSet0IT(String queryResultFormat) { - this.queryResultFormat = queryResultFormat; - } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java index 82f3c3244..091c7928e 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetAlreadyClosedIT.java @@ -3,8 +3,8 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.sql.DatabaseMetaData; @@ -12,11 +12,11 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.Calendar; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class ResultSetAlreadyClosedIT extends BaseJDBCWithSharedConnectionIT { @Test diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForce0MultiTimeZone.java b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForce0MultiTimeZone.java index c6edc67fb..69c6031f3 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForce0MultiTimeZone.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForce0MultiTimeZone.java @@ -6,46 +6,52 @@ import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; -import java.util.ArrayList; import java.util.List; import java.util.TimeZone; -import org.junit.After; -import org.junit.Before; +import net.snowflake.client.providers.ProvidersUtil; +import net.snowflake.client.providers.ScaleProvider; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import net.snowflake.client.providers.SnowflakeArgumentsProvider; +import net.snowflake.client.providers.TimezoneProvider; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.provider.Arguments; abstract class ResultSetArrowForce0MultiTimeZone extends BaseJDBCTest { - static List testData() { - String[] timeZones = new String[] {"UTC", "America/New_York", "MEZ"}; - String[] queryFormats = new String[] {"json", "arrow"}; - List ret = new ArrayList<>(); - for (String queryFormat : queryFormats) { - for (String timeZone : timeZones) { - ret.add(new Object[] {queryFormat, timeZone}); - } + protected static class DataProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return ProvidersUtil.cartesianProduct( + context, new SimpleResultFormatProvider(), new TimezoneProvider(3)); } - return ret; } - protected final String queryResultFormat; - protected final String tz; - private TimeZone origTz; - - ResultSetArrowForce0MultiTimeZone(String queryResultFormat, String timeZone) { - this.queryResultFormat = queryResultFormat; - this.tz = timeZone; + protected static class DataWithScaleProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return ProvidersUtil.cartesianProduct(context, new DataProvider(), new ScaleProvider()); + } } - @Before - public void setUp() { + private static TimeZone origTz; + + @BeforeAll + public static void setUp() { origTz = TimeZone.getDefault(); - TimeZone.setDefault(TimeZone.getTimeZone(this.tz)); } - @After - public void tearDown() { + @AfterAll + public static void tearDown() { TimeZone.setDefault(origTz); } - Connection init(String table, String column, String values) throws SQLException { + protected static void setTimezone(String tz) { + TimeZone.setDefault(TimeZone.getTimeZone(tz)); + } + + Connection init(String table, String column, String values, String queryResultFormat) + throws SQLException { Connection con = BaseJDBCTest.getConnection(); try (Statement statement = con.createStatement()) { diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceLTZMultiTimeZoneIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceLTZMultiTimeZoneIT.java index f998fb5d4..a612870a5 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceLTZMultiTimeZoneIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceLTZMultiTimeZoneIT.java @@ -3,45 +3,31 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.text.SimpleDateFormat; -import java.util.Collection; import java.util.TimeZone; -import net.snowflake.client.category.TestCategoryArrow; +import net.snowflake.client.category.TestTags; import org.apache.commons.lang3.StringUtils; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; /** Compare json and arrow resultSet behaviors 1/2 */ -@RunWith(Parameterized.class) -@Category(TestCategoryArrow.class) +@Tag(TestTags.ARROW) public class ResultSetArrowForceLTZMultiTimeZoneIT extends ResultSetArrowForce0MultiTimeZone { - @Parameterized.Parameters(name = "format={0}, tz={1}") - public static Collection data() { - return ResultSetArrowForce0MultiTimeZone.testData(); - } - - public ResultSetArrowForceLTZMultiTimeZoneIT(String queryResultFormat, String timeZone) { - super(queryResultFormat, timeZone); - } - - @Test - public void testTimestampLTZ() throws SQLException { - for (int scale = 0; scale <= 9; scale++) { - testTimestampLTZWithScale(scale); - } - } - private void testTimestampLTZWithScale(int scale) throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataWithScaleProvider.class) + public void testTimestampLTZWithScale(String queryResultFormat, String tz, int scale) + throws SQLException { + setTimezone(tz); String[] cases = { "2017-01-01 12:00:00 Z", "2014-01-02 16:00:00 Z", @@ -72,7 +58,7 @@ private void testTimestampLTZWithScale(int scale) throws SQLException { String column = "(a timestamp_ltz(" + scale + "))"; String values = "('" + StringUtils.join(cases, "'),('") + "'), (null)"; - Connection con = init(table, column, values); + Connection con = init(table, column, values, queryResultFormat); ResultSet rs = con.createStatement().executeQuery("select * from " + table); int i = 0; while (i < cases.length) { @@ -85,8 +71,11 @@ private void testTimestampLTZWithScale(int scale) throws SQLException { finish(table, con); } - @Test - public void testTimestampLTZOutputFormat() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampLTZOutputFormat(String queryResultFormat, String tz) + throws SQLException { + setTimezone(tz); String[] cases = {"2017-01-01 12:00:00 Z", "2014-01-02 16:00:00 Z", "2014-01-02 12:34:56 Z"}; long[] times = {1483272000000L, 1388678400000L, 1388666096000L}; @@ -99,7 +88,7 @@ public void testTimestampLTZOutputFormat() throws SQLException { String column = "(a timestamp_ltz)"; String values = "('" + StringUtils.join(cases, "'),('") + "')"; - try (Connection con = init(table, column, values); + try (Connection con = init(table, column, values, queryResultFormat); Statement statement = con.createStatement()) { try { // use initialized ltz output format @@ -146,13 +135,14 @@ public void testTimestampLTZOutputFormat() throws SQLException { } } finally { statement.execute("drop table " + table); - System.clearProperty("user.timezone"); } } } - @Test - public void testTimestampLTZWithNulls() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampLTZWithNulls(String queryResultFormat, String tz) throws SQLException { + setTimezone(tz); String[] cases = { "2017-01-01 12:00:00 Z", "2014-01-02 16:00:00 Z", @@ -183,7 +173,7 @@ public void testTimestampLTZWithNulls() throws SQLException { String column = "(a timestamp_ltz)"; String values = "('" + StringUtils.join(cases, "'), (null),('") + "')"; - try (Connection con = init(table, column, values); + try (Connection con = init(table, column, values, queryResultFormat); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -200,13 +190,14 @@ public void testTimestampLTZWithNulls() throws SQLException { } } finally { statement.execute("drop table " + table); - System.clearProperty("user.timezone"); } } } - @Test - public void testTimestampLTZWithNanos() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampLTZWithNanos(String queryResultFormat, String tz) throws SQLException { + setTimezone(tz); String[] cases = { "2017-01-01 12:00:00.123456789", "2014-01-02 16:00:00.000000001", @@ -229,7 +220,7 @@ public void testTimestampLTZWithNanos() throws SQLException { String column = "(a timestamp_ltz)"; String values = "('" + StringUtils.join(cases, " Z'),('") + " Z'), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(table, column, values, queryResultFormat); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -243,7 +234,6 @@ public void testTimestampLTZWithNanos() throws SQLException { assertNull(rs.getString(1)); } finally { statement.execute("drop table " + table); - System.clearProperty("user.timezone"); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceTZMultiTimeZoneIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceTZMultiTimeZoneIT.java index e073bfccf..db0984081 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceTZMultiTimeZoneIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowForceTZMultiTimeZoneIT.java @@ -3,43 +3,29 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import java.util.Collection; -import net.snowflake.client.category.TestCategoryArrow; +import net.snowflake.client.category.TestTags; import org.apache.commons.lang3.StringUtils; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; /** Compare json and arrow resultSet behaviors 2/2 */ -@RunWith(Parameterized.class) -@Category(TestCategoryArrow.class) +@Tag(TestTags.ARROW) public class ResultSetArrowForceTZMultiTimeZoneIT extends ResultSetArrowForce0MultiTimeZone { - @Parameterized.Parameters(name = "format={0}, tz={1}") - public static Collection data() { - return ResultSetArrowForce0MultiTimeZone.testData(); - } - - public ResultSetArrowForceTZMultiTimeZoneIT(String queryResultFormat, String timeZone) { - super(queryResultFormat, timeZone); - } - - @Test - public void testTimestampTZ() throws SQLException { - for (int scale = 0; scale <= 9; scale++) { - testTimestampTZWithScale(scale); - } - } - private void testTimestampTZWithScale(int scale) throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataWithScaleProvider.class) + public void testTimestampTZWithScale(String queryResultFormat, String tz, int scale) + throws SQLException { + setTimezone(tz); String[] cases = { "2017-01-01 12:00:00 Z", "2014-01-02 16:00:00 Z", @@ -67,7 +53,7 @@ private void testTimestampTZWithScale(int scale) throws SQLException { String column = "(a timestamp_tz(" + scale + "))"; String values = "('" + StringUtils.join(cases, "'),('") + "'), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(table, column, values, queryResultFormat); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -81,13 +67,14 @@ private void testTimestampTZWithScale(int scale) throws SQLException { assertNull(rs.getString(1)); } finally { statement.execute("drop table " + table); - System.clearProperty("user.timezone"); } } } - @Test - public void testTimestampTZWithNanos() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampTZWithNanos(String queryResultFormat, String tz) throws SQLException { + setTimezone(tz); String[] cases = { "2017-01-01 12:00:00.1", "2014-01-02 16:00:00.123456789", @@ -119,7 +106,7 @@ public void testTimestampTZWithNanos() throws SQLException { String column = "(a timestamp_tz)"; String values = "('" + StringUtils.join(cases, " Z'),('") + " Z'), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(table, column, values, queryResultFormat); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -138,13 +125,14 @@ public void testTimestampTZWithNanos() throws SQLException { assertNull(rs.getString(1)); } finally { statement.execute("drop table " + table); - System.clearProperty("user.timezone"); } } } - @Test - public void testTimestampTZWithMicros() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampTZWithMicros(String queryResultFormat, String tz) throws SQLException { + setTimezone(tz); String[] cases = { "2017-01-01 12:00:00.1", "2014-01-02 16:00:00.123456", @@ -178,7 +166,7 @@ public void testTimestampTZWithMicros() throws SQLException { String column = "(a timestamp_tz(6))"; String values = "('" + StringUtils.join(cases, " Z'),('") + " Z'), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(table, column, values, queryResultFormat); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -197,7 +185,6 @@ public void testTimestampTZWithMicros() throws SQLException { assertNull(rs.getString(1)); } finally { statement.execute("drop table " + table); - System.clearProperty("user.timezone"); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowIT.java deleted file mode 100644 index a7e982024..000000000 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowIT.java +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright (c) 2012-2020 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client.jdbc; - -import net.snowflake.client.category.TestCategoryArrow; -import org.junit.experimental.categories.Category; - -@Category(TestCategoryArrow.class) -public class ResultSetArrowIT extends ResultSetIT { - public ResultSetArrowIT() { - super("arrow"); - } -} diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetArrowLatestIT.java deleted file mode 100644 index 4ea7f7d8f..000000000 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetArrowLatestIT.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright (c) 2012-2020 Snowflake Computing Inc. All right reserved. - */ -package net.snowflake.client.jdbc; - -import net.snowflake.client.category.TestCategoryArrow; -import org.junit.experimental.categories.Category; - -/** - * ResultSet integration tests for the latest JDBC driver. This doesn't work for the oldest - * supported driver. Drop this file when ResultSetLatestIT is dropped. - */ -@Category(TestCategoryArrow.class) -public class ResultSetArrowLatestIT extends ResultSetLatestIT { - public ResultSetArrowLatestIT() { - super("arrow"); - } -} diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java index a6a63a65d..b86a65c95 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncIT.java @@ -4,11 +4,11 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.Reader; import java.math.BigDecimal; @@ -28,13 +28,13 @@ import java.util.List; import java.util.Map; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryResultSet; +import net.snowflake.client.category.TestTags; import net.snowflake.common.core.SqlState; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Test AsyncResultSet */ -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class ResultSetAsyncIT extends BaseJDBCWithSharedConnectionIT { @Test @@ -155,11 +155,11 @@ public void testOrderAndClosureFunctions() throws SQLException { statement.unwrap(SnowflakeStatement.class).executeAsyncQuery("select * from test_rsmd"); // test isFirst, isBeforeFirst - assertTrue("should be before the first", resultSet.isBeforeFirst()); - assertFalse("should not be the first", resultSet.isFirst()); + assertTrue(resultSet.isBeforeFirst(), "should be before the first"); + assertFalse(resultSet.isFirst(), "should not be the first"); resultSet.next(); - assertFalse("should not be before the first", resultSet.isBeforeFirst()); - assertTrue("should be the first", resultSet.isFirst()); + assertFalse(resultSet.isBeforeFirst(), "should not be before the first"); + assertTrue(resultSet.isFirst(), "should be the first"); // test isClosed functions queryID = resultSet.unwrap(SnowflakeResultSet.class).getQueryID(); diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncLatestIT.java index dd534d469..9bcbd83b4 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetAsyncLatestIT.java @@ -4,19 +4,19 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.sql.Connection; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Test AsyncResultSet */ -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class ResultSetAsyncLatestIT extends BaseJDBCTest { @Test public void testAsyncResultSet() throws SQLException { diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java index 423661c77..8f9da34e1 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetFeatureNotSupportedIT.java @@ -9,11 +9,11 @@ import java.sql.Time; import java.sql.Timestamp; import java.util.Collections; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class ResultSetFeatureNotSupportedIT extends BaseJDBCWithSharedConnectionIT { @Test public void testQueryResultSetNotSupportedException() throws Throwable { diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java index 193246368..760d83a75 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetIT.java @@ -7,13 +7,13 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.InputStream; import java.io.InputStreamReader; @@ -30,14 +30,15 @@ import java.sql.Statement; import java.sql.Types; import java.util.Properties; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; /** Test ResultSet */ -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class ResultSetIT extends ResultSet0IT { private final String selectAllSQL = "select * from test_rs"; @@ -47,25 +48,19 @@ public class ResultSetIT extends ResultSet0IT { (byte) 0x00, (byte) 0xFF, (byte) 0x42, (byte) 0x01 }; - public ResultSetIT() { - this("json"); - } - - ResultSetIT(String queryResultFormat) { - super(queryResultFormat); - } - - @Test - public void testFindColumn() throws SQLException { - try (Statement statement = connection.createStatement(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testFindColumn(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat); ResultSet resultSet = statement.executeQuery(selectAllSQL)) { assertEquals(1, resultSet.findColumn("COLA")); } } - @Test - public void testGetColumnClassNameForBinary() throws Throwable { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetColumnClassNameForBinary(String queryResultFormat) throws Throwable { + try (Statement statement = createStatement(queryResultFormat); ) { try { statement.execute("create or replace table bintable (b binary)"); statement.execute("insert into bintable values ('00f1f2')"); @@ -88,8 +83,9 @@ public void testGetColumnClassNameForBinary() throws Throwable { } } - @Test - public void testGetMethod() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetMethod(String queryResultFormat) throws Throwable { String prepInsertString = "insert into test_get values(?, ?, ?, ?, ?, ?, ?, ?)"; int bigInt = Integer.MAX_VALUE; long bigLong = Long.MAX_VALUE; @@ -100,7 +96,7 @@ public void testGetMethod() throws Throwable { Clob clob = connection.createClob(); clob.setString(1, "hello world"); - try (Statement statement = connection.createStatement()) { + try (Statement statement = createStatement(queryResultFormat)) { try { statement.execute( "create or replace table test_get(colA integer, colB number, colC number, " @@ -153,8 +149,11 @@ public void testGetMethod() throws Throwable { } } - @Test - public void testGetObjectOnDatabaseMetadataResultSet() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetObjectOnDatabaseMetadataResultSet(String queryResultFormat) + throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) {} DatabaseMetaData databaseMetaData = connection.getMetaData(); try (ResultSet resultSet = databaseMetaData.getTypeInfo()) { assertTrue(resultSet.next()); @@ -163,9 +162,10 @@ public void testGetObjectOnDatabaseMetadataResultSet() throws SQLException { } } - @Test - public void testGetShort() throws SQLException { - try (ResultSet resultSet = numberCrossTesting()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetShort(String queryResultFormat) throws SQLException { + try (ResultSet resultSet = numberCrossTesting(queryResultFormat)) { assertTrue(resultSet.next()); // assert that 0 is returned for null values for every type of value for (int i = 1; i < 13; i++) { @@ -205,9 +205,10 @@ public void testGetShort() throws SQLException { } } - @Test - public void testGetInt() throws SQLException { - try (ResultSet resultSet = numberCrossTesting()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetInt(String queryResultFormat) throws SQLException { + try (ResultSet resultSet = numberCrossTesting(queryResultFormat)) { assertTrue(resultSet.next()); // assert that 0 is returned for null values for every type of value for (int i = 1; i < 13; i++) { @@ -246,9 +247,10 @@ public void testGetInt() throws SQLException { } } - @Test - public void testGetLong() throws SQLException { - try (ResultSet resultSet = numberCrossTesting()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetLong(String queryResultFormat) throws SQLException { + try (ResultSet resultSet = numberCrossTesting(queryResultFormat)) { assertTrue(resultSet.next()); // assert that 0 is returned for null values for every type of value for (int i = 1; i < 13; i++) { @@ -287,9 +289,10 @@ public void testGetLong() throws SQLException { } } - @Test - public void testGetFloat() throws SQLException { - try (ResultSet resultSet = numberCrossTesting()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetFloat(String queryResultFormat) throws SQLException { + try (ResultSet resultSet = numberCrossTesting(queryResultFormat)) { assertTrue(resultSet.next()); // assert that 0 is returned for null values for every type of value for (int i = 1; i < 13; i++) { @@ -328,9 +331,10 @@ public void testGetFloat() throws SQLException { } } - @Test - public void testGetDouble() throws SQLException { - try (ResultSet resultSet = numberCrossTesting()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetDouble(String queryResultFormat) throws SQLException { + try (ResultSet resultSet = numberCrossTesting(queryResultFormat)) { assertTrue(resultSet.next()); // assert that 0 is returned for null values for every type of value for (int i = 1; i < 13; i++) { @@ -369,9 +373,10 @@ public void testGetDouble() throws SQLException { } } - @Test - public void testGetBigDecimal() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetBigDecimal(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { statement.execute("create or replace table test_get(colA number(38,9))"); try (PreparedStatement preparedStatement = connection.prepareStatement("insert into test_get values(?)")) { @@ -393,7 +398,7 @@ public void testGetBigDecimal() throws SQLException { statement.execute("drop table if exists test_get"); } - try (ResultSet resultSet = numberCrossTesting()) { + try (ResultSet resultSet = numberCrossTesting(queryResultFormat)) { assertTrue(resultSet.next()); for (int i = 1; i < 13; i++) { assertNull(resultSet.getBigDecimal(i)); @@ -426,9 +431,10 @@ public void testGetBigDecimal() throws SQLException { } } - @Test - public void testGetBigDecimalNegative() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetBigDecimalNegative(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { try { statement.execute("create or replace table test_dec(colA time)"); try (PreparedStatement preparedStatement = @@ -454,9 +460,10 @@ public void testGetBigDecimalNegative() throws SQLException { } } - @Test - public void testCursorPosition() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testCursorPosition(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { statement.execute(selectAllSQL); try (ResultSet resultSet = statement.getResultSet()) { assertTrue(resultSet.next()); @@ -480,10 +487,11 @@ public void testCursorPosition() throws SQLException { * * @throws SQLException arises if any exception occurs. */ - @Test - public void testGetBytes() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetBytes(String queryResultFormat) throws SQLException { Properties props = new Properties(); - try (Connection connection = init(props); + try (Connection connection = init(props, queryResultFormat); Statement statement = connection.createStatement()) { try { ingestBinaryTestData(connection); @@ -530,11 +538,12 @@ private void ingestBinaryTestData(Connection connection) throws SQLException { * * @throws Exception arises if any error occurs */ - @Test - public void testGetBytesInBase64() throws Exception { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetBytesInBase64(String queryResultFormat) throws Exception { Properties props = new Properties(); props.setProperty("binary_output_format", "BAse64"); - try (Connection connection = init(props); + try (Connection connection = init(props, queryResultFormat); Statement statement = connection.createStatement()) { try { ingestBinaryTestData(connection); @@ -557,9 +566,10 @@ public void testGetBytesInBase64() throws Exception { } // SNOW-31647 - @Test - public void testColumnMetaWithZeroPrecision() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testColumnMetaWithZeroPrecision(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { try { statement.execute( "create or replace table testColDecimal(cola number(38, 0), " + "colb number(17, 5))"); @@ -578,9 +588,10 @@ public void testColumnMetaWithZeroPrecision() throws SQLException { } } - @Test - public void testGetObjectOnFixedView() throws Exception { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetObjectOnFixedView(String queryResultFormat) throws Exception { + try (Statement statement = createStatement(queryResultFormat)) { try { statement.execute( "create or replace table testFixedView" @@ -592,9 +603,9 @@ public void testGetObjectOnFixedView() throws Exception { // put files assertTrue( - "Failed to put a file", statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%testFixedView")); + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%testFixedView"), + "Failed to put a file"); try (ResultSet resultSet = statement.executeQuery( @@ -613,11 +624,12 @@ public void testGetObjectOnFixedView() throws Exception { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testGetColumnDisplaySizeAndPrecision() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testGetColumnDisplaySizeAndPrecision(String queryResultFormat) throws SQLException { ResultSetMetaData resultSetMetaData = null; - try (Statement statement = connection.createStatement()) { + try (Statement statement = createStatement(queryResultFormat)) { try (ResultSet resultSet = statement.executeQuery("select cast(1 as char)")) { resultSetMetaData = resultSet.getMetaData(); @@ -665,9 +677,10 @@ public void testGetColumnDisplaySizeAndPrecision() throws SQLException { } } - @Test - public void testGetBoolean() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetBoolean(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { statement.execute("create or replace table testBoolean(cola boolean)"); statement.execute("insert into testBoolean values(false)"); try (ResultSet resultSet = statement.executeQuery("select * from testBoolean")) { @@ -733,9 +746,10 @@ public void testGetBoolean() throws SQLException { } } - @Test - public void testGetClob() throws Throwable { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetClob(String queryResultFormat) throws Throwable { + try (Statement statement = createStatement(queryResultFormat)) { statement.execute("create or replace table testClob(cola text)"); statement.execute("insert into testClob values('hello world')"); statement.execute("insert into testClob values('hello world1')"); @@ -772,9 +786,10 @@ public void testGetClob() throws Throwable { } } - @Test - public void testFetchOnClosedResultSet() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testFetchOnClosedResultSet(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { ResultSet resultSet = statement.executeQuery(selectAllSQL); assertFalse(resultSet.isClosed()); resultSet.close(); @@ -783,11 +798,13 @@ public void testFetchOnClosedResultSet() throws SQLException { } } - @Test - public void testReleaseDownloaderCurrentMemoryUsage() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testReleaseDownloaderCurrentMemoryUsage(String queryResultFormat) + throws SQLException { final long initialMemoryUsage = SnowflakeChunkDownloader.getCurrentMemoryUsage(); - try (Statement statement = connection.createStatement()) { + try (Statement statement = createStatement(queryResultFormat)) { statement.executeQuery( "select current_date(), true,2345234, 2343.0, 'testrgint\\n\\t' from table(generator(rowcount=>1000000))"); @@ -802,21 +819,25 @@ public void testReleaseDownloaderCurrentMemoryUsage() throws SQLException { equalTo(initialMemoryUsage)); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testResultColumnSearchCaseSensitiveOld() throws Exception { - subTestResultColumnSearchCaseSensitive("JDBC_RS_COLUMN_CASE_INSENSITIVE"); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testResultColumnSearchCaseSensitiveOld(String queryResultFormat) throws Exception { + subTestResultColumnSearchCaseSensitive("JDBC_RS_COLUMN_CASE_INSENSITIVE", queryResultFormat); } - @Test - public void testResultColumnSearchCaseSensitive() throws Exception { - subTestResultColumnSearchCaseSensitive("CLIENT_RESULT_COLUMN_CASE_INSENSITIVE"); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testResultColumnSearchCaseSensitive(String queryResultFormat) throws Exception { + subTestResultColumnSearchCaseSensitive( + "CLIENT_RESULT_COLUMN_CASE_INSENSITIVE", queryResultFormat); } - private void subTestResultColumnSearchCaseSensitive(String parameterName) throws Exception { + private void subTestResultColumnSearchCaseSensitive( + String parameterName, String queryResultFormat) throws Exception { Properties prop = new Properties(); prop.put("tracing", "FINEST"); - try (Connection connection = init(prop); + try (Connection connection = init(prop, queryResultFormat); Statement statement = connection.createStatement()) { try (ResultSet resultSet = statement.executeQuery("select 1 AS TESTCOL")) { @@ -847,9 +868,10 @@ private void subTestResultColumnSearchCaseSensitive(String parameterName) throws } } - @Test - public void testInvalidColumnIndex() throws SQLException { - try (Statement statement = connection.createStatement(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testInvalidColumnIndex(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat); ResultSet resultSet = statement.executeQuery(selectAllSQL)) { assertTrue(resultSet.next()); @@ -869,11 +891,11 @@ public void testInvalidColumnIndex() throws SQLException { } /** SNOW-28882: wasNull was not set properly */ - @Test - public void testWasNull() throws Exception { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testWasNull(String queryResultFormat) throws Exception { try (ResultSet ret = - connection - .createStatement() + createStatement(queryResultFormat) .executeQuery( "select cast(1/nullif(0,0) as double)," + "cast(1/nullif(0,0) as int), 100, " @@ -891,9 +913,10 @@ public void testWasNull() throws Exception { } /** SNOW-28390 */ - @Test - public void testParseInfAndNaNNumber() throws Exception { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testParseInfAndNaNNumber(String queryResultFormat) throws Exception { + try (Statement statement = createStatement(queryResultFormat)) { try (ResultSet ret = statement.executeQuery("select to_double('inf'), to_double('-inf')")) { assertTrue(ret.next()); assertThat("Positive Infinite Number", ret.getDouble(1), equalTo(Double.POSITIVE_INFINITY)); @@ -910,10 +933,11 @@ public void testParseInfAndNaNNumber() throws Exception { } /** SNOW-33227 */ - @Test - public void testTreatDecimalAsInt() throws Exception { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testTreatDecimalAsInt(String queryResultFormat) throws Exception { ResultSetMetaData metaData; - try (Statement statement = connection.createStatement()) { + try (Statement statement = createStatement(queryResultFormat)) { try (ResultSet ret = statement.executeQuery("select 1")) { metaData = ret.getMetaData(); @@ -929,60 +953,62 @@ public void testTreatDecimalAsInt() throws Exception { } } - @Test - public void testIsLast() throws Exception { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testIsLast(String queryResultFormat) throws Exception { + try (Statement statement = createStatement(queryResultFormat)) { try (ResultSet ret = statement.executeQuery("select * from orders_jdbc")) { - assertTrue("should be before the first", ret.isBeforeFirst()); - assertFalse("should not be the first", ret.isFirst()); + assertTrue(ret.isBeforeFirst(), "should be before the first"); + assertFalse(ret.isFirst(), "should not be the first"); assertTrue(ret.next()); - assertFalse("should not be before the first", ret.isBeforeFirst()); - assertTrue("should be the first", ret.isFirst()); + assertFalse(ret.isBeforeFirst(), "should not be before the first"); + assertTrue(ret.isFirst(), "should be the first"); int cnt = 0; while (ret.next()) { cnt++; if (cnt == 72) { - assertTrue("should be the last", ret.isLast()); - assertFalse("should not be after the last", ret.isAfterLast()); + assertTrue(ret.isLast(), "should be the last"); + assertFalse(ret.isAfterLast(), "should not be after the last"); } } assertEquals(72, cnt); assertFalse(ret.next()); - assertFalse("should not be the last", ret.isLast()); - assertTrue("should be afterthe last", ret.isAfterLast()); + assertFalse(ret.isLast(), "should not be the last"); + assertTrue(ret.isAfterLast(), "should be afterthe last"); } // PUT one file try (ResultSet ret = statement.executeQuery( "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @~")) { - assertTrue("should be before the first", ret.isBeforeFirst()); - assertFalse("should not be the first", ret.isFirst()); + assertTrue(ret.isBeforeFirst(), "should be before the first"); + assertFalse(ret.isFirst(), "should not be the first"); assertTrue(ret.next()); - assertFalse("should not be before the first", ret.isBeforeFirst()); - assertTrue("should be the first", ret.isFirst()); + assertFalse(ret.isBeforeFirst(), "should not be before the first"); + assertTrue(ret.isFirst(), "should be the first"); - assertTrue("should be the last", ret.isLast()); - assertFalse("should not be after the last", ret.isAfterLast()); + assertTrue(ret.isLast(), "should be the last"); + assertFalse(ret.isAfterLast(), "should not be after the last"); assertFalse(ret.next()); - assertFalse("should not be the last", ret.isLast()); - assertTrue("should be after the last", ret.isAfterLast()); + assertFalse(ret.isLast(), "should not be the last"); + assertTrue(ret.isAfterLast(), "should be after the last"); } } } - @Test - public void testUpdateCountOnCopyCmd() throws Exception { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testUpdateCountOnCopyCmd(String queryResultFormat) throws Exception { + try (Statement statement = createStatement(queryResultFormat)) { try { statement.execute("create or replace table testcopy(cola string)"); @@ -1001,16 +1027,18 @@ public void testUpdateCountOnCopyCmd() throws Exception { } } - @Test - public void testGetTimeNullTimestampAndTimestampNullTime() throws Throwable { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetTimeNullTimestampAndTimestampNullTime(String queryResultFormat) + throws Throwable { + try (Statement statement = createStatement(queryResultFormat)) { try { statement.execute("create or replace table testnullts(c1 timestamp, c2 time)"); statement.execute("insert into testnullts(c1, c2) values(null, null)"); try (ResultSet rs = statement.executeQuery("select * from testnullts")) { - assertTrue("should return result", rs.next()); - assertNull("return value must be null", rs.getTime(1)); - assertNull("return value must be null", rs.getTimestamp(2)); + assertTrue(rs.next(), "should return result"); + assertNull(rs.getTime(1), "return value must be null"); + assertNull(rs.getTimestamp(2), "return value must be null"); } } finally { statement.execute("drop table if exists testnullts"); @@ -1018,9 +1046,10 @@ public void testGetTimeNullTimestampAndTimestampNullTime() throws Throwable { } } - @Test - public void testNextNegative() throws SQLException { - try (ResultSet rs = connection.createStatement().executeQuery("select 1")) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testNextNegative(String queryResultFormat) throws SQLException { + try (ResultSet rs = createStatement(queryResultFormat).executeQuery("select 1")) { assertTrue(rs.next()); System.setProperty("snowflake.enable_incident_test2", "true"); try { @@ -1034,9 +1063,11 @@ public void testNextNegative() throws SQLException { } /** SNOW-1416051; Added in > 3.16.0 */ - @Test - public void shouldSerializeArrayAndObjectAsStringOnGetObject() throws SQLException { - try (Statement statement = connection.createStatement(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void shouldSerializeArrayAndObjectAsStringOnGetObject(String queryResultFormat) + throws SQLException { + try (Statement statement = createStatement(queryResultFormat); ResultSet resultSet = statement.executeQuery( "select ARRAY_CONSTRUCT(1,2,3), OBJECT_CONSTRUCT('a', 4, 'b', 'test')")) { diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowIT.java index 65cc27242..d8e3d111a 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowIT.java @@ -3,15 +3,15 @@ */ package net.snowflake.client.jdbc; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.math.BigDecimal; import java.nio.ByteBuffer; @@ -26,34 +26,21 @@ import java.util.List; import java.util.TimeZone; import java.util.stream.Collectors; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryArrow; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; import org.apache.arrow.vector.BigIntVector; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; /** Completely compare json and arrow resultSet behaviors */ -@RunWith(Parameterized.class) -@Category(TestCategoryArrow.class) +@Tag(TestTags.ARROW) public class ResultSetJsonVsArrowIT extends BaseJDBCTest { - @Parameterized.Parameters(name = "format={0}") - public static Object[][] data() { - // all tests in this class need to run for both query result formats json and arrow - return new Object[][] {{"JSON"}, {"Arrow"}}; - } - - protected String queryResultFormat; - - public ResultSetJsonVsArrowIT(String queryResultFormat) { - this.queryResultFormat = queryResultFormat; - } - public Connection init() throws SQLException { + public Connection init(String queryResultFormat) throws SQLException { Connection conn = getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); try (Statement stmt = conn.createStatement()) { stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); @@ -61,9 +48,10 @@ public Connection init() throws SQLException { return conn; } - @Test - public void testGSResult() throws SQLException { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGSResult(String queryResultFormat) throws SQLException { + try (Connection con = init(queryResultFormat); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery( @@ -89,9 +77,10 @@ public void testGSResult() throws SQLException { } } - @Test - public void testGSResultReal() throws SQLException { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGSResultReal(String queryResultFormat) throws SQLException { + try (Connection con = init(queryResultFormat); Statement statement = con.createStatement()) { try { statement.execute("create or replace table t (a real)"); @@ -106,10 +95,11 @@ public void testGSResultReal() throws SQLException { } } - @Test - public void testGSResultScan() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGSResultScan(String queryResultFormat) throws SQLException { String queryId = null; - try (Connection con = init(); + try (Connection con = init(queryResultFormat); Statement statement = con.createStatement()) { try { statement.execute("create or replace table t (a text)"); @@ -130,9 +120,10 @@ public void testGSResultScan() throws SQLException { } } - @Test - public void testGSResultForEmptyAndSmallTable() throws SQLException { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGSResultForEmptyAndSmallTable(String queryResultFormat) throws SQLException { + try (Connection con = init(queryResultFormat); Statement statement = con.createStatement()) { try { statement.execute("create or replace table t (a int)"); @@ -150,9 +141,10 @@ public void testGSResultForEmptyAndSmallTable() throws SQLException { } } - @Test - public void testSNOW89737() throws SQLException { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSNOW89737(String queryResultFormat) throws SQLException { + try (Connection con = init(queryResultFormat); Statement statement = con.createStatement()) { try { statement.execute( @@ -203,9 +195,10 @@ public void testSNOW89737() throws SQLException { * * @throws SQLException */ - @Test - public void testSemiStructuredData() throws SQLException { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSemiStructuredData(String queryResultFormat) throws SQLException { + try (Connection con = init(queryResultFormat); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery( @@ -240,10 +233,11 @@ public void testSemiStructuredData() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testStructuredTypes() throws SQLException { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testStructuredTypes(String queryResultFormat) throws SQLException { + try (Connection con = init(queryResultFormat); Statement stmt = con.createStatement()) { stmt.execute("alter session set feature_structured_types = 'ENABLED';"); @@ -263,8 +257,9 @@ public void testStructuredTypes() throws SQLException { } } - private Connection init(String table, String column, String values) throws SQLException { - Connection con = init(); + private Connection init(String queryResultFormat, String table, String column, String values) + throws SQLException { + Connection con = init(queryResultFormat); try (Statement statement = con.createStatement()) { statement.execute("create or replace table " + table + " " + column); statement.execute("insert into " + table + " values " + values); @@ -272,7 +267,7 @@ private Connection init(String table, String column, String values) throws SQLEx return con; } - private boolean isJSON() { + private boolean isJSON(String queryResultFormat) { return queryResultFormat.equalsIgnoreCase("json"); } @@ -287,13 +282,14 @@ private boolean isJSON() { * * @throws SQLException */ - @Test - public void testTinyInt() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testTinyInt(String queryResultFormat) throws SQLException { int[] cases = {0, 1, -1, 127, -128}; String table = "test_arrow_tiny_int"; String column = "(a int)"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (NULL)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -349,13 +345,14 @@ public void testTinyInt() throws SQLException { * * @throws SQLException */ - @Test - public void testScaledTinyInt() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testScaledTinyInt(String queryResultFormat) throws SQLException { float[] cases = {0.0f, 0.11f, -0.11f, 1.27f, -1.28f}; String table = "test_arrow_tiny_int"; String column = "(a number(3,2))"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = con.createStatement().executeQuery("select * from test_arrow_tiny_int")) { try { @@ -396,7 +393,7 @@ public void testScaledTinyInt() throws SQLException { assertEquals(val, rs.getDouble(1), delta); assertEquals(new BigDecimal(rs.getString(1)), rs.getBigDecimal(1)); assertEquals(rs.getBigDecimal(1), rs.getObject(1)); - if (isJSON()) { + if (isJSON(queryResultFormat)) { try { rs.getByte(1); fail(); @@ -408,7 +405,7 @@ public void testScaledTinyInt() throws SQLException { assertEquals(((byte) (cases[i] * 100)), rs.getByte(1)); } - if (!isJSON()) { + if (!isJSON(queryResultFormat)) { byte[] bytes = new byte[1]; bytes[0] = rs.getByte(1); assertArrayEquals(bytes, rs.getBytes(1)); @@ -446,13 +443,14 @@ public void testScaledTinyInt() throws SQLException { * * @throws SQLException */ - @Test - public void testSmallInt() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSmallInt(String queryResultFormat) throws SQLException { short[] cases = {0, 1, -1, 127, -128, 128, -129, 32767, -32768}; String table = "test_arrow_small_int"; String column = "(a int)"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (NULL)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -477,7 +475,7 @@ public void testSmallInt() throws SQLException { rs.getByte(1); fail(); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { // Note: not caught by SQLException! assertTrue(e.toString().contains("NumberFormatException")); } else { @@ -490,7 +488,7 @@ public void testSmallInt() throws SQLException { } ByteBuffer bb = ByteBuffer.allocate(2); bb.putShort(cases[i]); - if (isJSON()) { + if (isJSON(queryResultFormat)) { byte[] res = rs.getBytes(1); for (int j = res.length - 1; j >= 0; j--) { assertEquals(bb.array()[2 - res.length + j], res[j]); @@ -531,14 +529,15 @@ public void testSmallInt() throws SQLException { * * @throws SQLException */ - @Test - public void testScaledSmallInt() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testScaledSmallInt(String queryResultFormat) throws SQLException { float[] cases = {0, 2.0f, -2.0f, 32.767f, -32.768f}; short[] shortCompact = {0, 2000, -2000, 32767, -32768}; String table = "test_arrow_small_int"; String column = "(a number(5,3))"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = con.createStatement().executeQuery("select * from test_arrow_small_int")) { try { @@ -583,7 +582,7 @@ public void testScaledSmallInt() throws SQLException { rs.getByte(1); fail(); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { // Note: not caught by SQLException! assertTrue(e.toString().contains("NumberFormatException")); } else { @@ -598,7 +597,7 @@ public void testScaledSmallInt() throws SQLException { byteBuffer.putShort(shortCompact[i]); assertArrayEquals(byteBuffer.array(), rs.getBytes(1)); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { SQLException se = (SQLException) e; assertEquals( (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); @@ -639,15 +638,16 @@ public void testScaledSmallInt() throws SQLException { * * @throws SQLException */ - @Test - public void testInt() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testInt(String queryResultFormat) throws SQLException { int[] cases = { 0, 1, -1, 127, -128, 128, -129, 32767, -32768, 32768, -32769, 2147483647, -2147483648 }; String table = "test_arrow_int"; String column = "(a int)"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (NULL)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = con.createStatement().executeQuery("select * from " + table)) { try { @@ -686,7 +686,7 @@ public void testInt() throws SQLException { rs.getByte(1); fail(); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { // Note: not caught by SQLException! assertTrue(e.toString().contains("NumberFormatException")); } else { @@ -699,7 +699,7 @@ public void testInt() throws SQLException { } ByteBuffer bb = ByteBuffer.allocate(4); bb.putInt(cases[i]); - if (isJSON()) { + if (isJSON(queryResultFormat)) { byte[] res = rs.getBytes(1); for (int j = res.length - 1; j >= 0; j--) { assertEquals(bb.array()[4 - res.length + j], res[j]); @@ -740,8 +740,9 @@ public void testInt() throws SQLException { * * @throws SQLException */ - @Test - public void testScaledInt() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testScaledInt(String queryResultFormat) throws SQLException { int scale = 9; int[] intCompacts = {0, 123456789, -123456789, 2147483647, -2147483647}; List caseList = @@ -755,7 +756,7 @@ public void testScaledInt() throws SQLException { String column = String.format("(a number(10,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = con.createStatement().executeQuery("select * from test_arrow_int")) { try { @@ -800,7 +801,7 @@ public void testScaledInt() throws SQLException { rs.getByte(1); fail(); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { // Note: not caught by SQLException! assertTrue(e.toString().contains("NumberFormatException")); } else { @@ -815,7 +816,7 @@ public void testScaledInt() throws SQLException { byteBuffer.putInt(intCompacts[i]); assertArrayEquals(byteBuffer.array(), rs.getBytes(1)); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { SQLException se = (SQLException) e; assertEquals( (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); @@ -856,8 +857,9 @@ public void testScaledInt() throws SQLException { * * @throws SQLException */ - @Test - public void testBigInt() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testBigInt(String queryResultFormat) throws SQLException { long[] cases = { 0, 1, @@ -880,7 +882,7 @@ public void testBigInt() throws SQLException { String table = "test_arrow_big_int"; String column = "(a int)"; String values = "(" + StringUtils.join(ArrayUtils.toObject(cases), "),(") + "), (NULL)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -934,7 +936,7 @@ public void testBigInt() throws SQLException { rs.getByte(1); fail(); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { // Note: not caught by SQLException! assertTrue(e.toString().contains("NumberFormatException")); } else { @@ -984,8 +986,9 @@ public void testBigInt() throws SQLException { * * @throws SQLException */ - @Test - public void testScaledBigInt() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testScaledBigInt(String queryResultFormat) throws SQLException { int scale = 18; long[] longCompacts = { 0, 123456789, -123456789, 2147483647, -2147483647, Long.MIN_VALUE, Long.MAX_VALUE @@ -1001,7 +1004,7 @@ public void testScaledBigInt() throws SQLException { String column = String.format("(a number(38,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -1046,7 +1049,7 @@ public void testScaledBigInt() throws SQLException { rs.getByte(1); fail(); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { // Note: not caught by SQLException! assertTrue(e.toString().contains("NumberFormatException")); } else { @@ -1061,7 +1064,7 @@ public void testScaledBigInt() throws SQLException { byteBuffer.putLong(longCompacts[i]); assertArrayEquals(byteBuffer.array(), rs.getBytes(1)); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { SQLException se = (SQLException) e; assertEquals( (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); @@ -1103,8 +1106,9 @@ public void testScaledBigInt() throws SQLException { * * @throws SQLException */ - @Test - public void testDecimalNoScale() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testDecimalNoScale(String queryResultFormat) throws SQLException { int scale = 0; String[] longCompacts = { "10000000000000000000000000000000000000", @@ -1120,7 +1124,7 @@ public void testDecimalNoScale() throws SQLException { String column = String.format("(a number(38,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -1166,7 +1170,7 @@ public void testDecimalNoScale() throws SQLException { rs.getByte(1); fail(); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { // Note: not caught by SQLException! assertTrue(e.toString().contains("NumberFormatException")); } else { @@ -1212,8 +1216,9 @@ public void testDecimalNoScale() throws SQLException { * * @throws SQLException */ - @Test - public void testDecimalWithLargeScale() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testDecimalWithLargeScale(String queryResultFormat) throws SQLException { int scale = 37; String[] longCompacts = { "1.0000000000000000000000000000000000000", @@ -1229,7 +1234,7 @@ public void testDecimalWithLargeScale() throws SQLException { String column = String.format("(a number(38,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { @@ -1274,7 +1279,7 @@ public void testDecimalWithLargeScale() throws SQLException { rs.getByte(1); fail(); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { // Note: not caught by SQLException! assertTrue(e.toString().contains("NumberFormatException")); } else { @@ -1287,7 +1292,7 @@ public void testDecimalWithLargeScale() throws SQLException { try { assertArrayEquals(cases[i].toBigInteger().toByteArray(), rs.getBytes(1)); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { SQLException se = (SQLException) e; assertEquals( (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); @@ -1329,9 +1334,10 @@ public void testDecimalWithLargeScale() throws SQLException { * * @throws SQLException */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testDecimal() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testDecimal(String queryResultFormat) throws SQLException { int scale = 37; long[] longCompacts = { 0, 123456789, -123456789, 2147483647, -2147483647, Long.MIN_VALUE, Long.MAX_VALUE @@ -1347,7 +1353,7 @@ public void testDecimal() throws SQLException { String column = String.format("(a number(38,%d))", scale); String values = "(" + StringUtils.join(cases, "),(") + "), (null)"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = con.createStatement().executeQuery("select * from " + table)) { try { @@ -1393,7 +1399,7 @@ public void testDecimal() throws SQLException { rs.getByte(1); fail(); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { // Note: not caught by SQLException! assertTrue(e.toString().contains("NumberFormatException")); } else { @@ -1406,7 +1412,7 @@ public void testDecimal() throws SQLException { try { assertArrayEquals(byteBuf.putLong(0, longCompacts[i]).array(), rs.getBytes(1)); } catch (Exception e) { - if (isJSON()) { + if (isJSON(queryResultFormat)) { SQLException se = (SQLException) e; assertEquals( (int) ErrorCode.INVALID_VALUE_CONVERT.getMessageCode(), se.getErrorCode()); @@ -1440,8 +1446,9 @@ public void testDecimal() throws SQLException { * * @throws SQLException */ - @Test - public void testDoublePrecision() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testDoublePrecision(String queryResultFormat) throws SQLException { String[] cases = { // SNOW-31249 "-86.6426540296895", @@ -1470,12 +1477,12 @@ public void testDoublePrecision() throws SQLException { String column = "(a double)"; String values = "(" + StringUtils.join(cases, "),(") + ")"; - try (Connection con = init(table, column, values); + try (Connection con = init(queryResultFormat, table, column, values); Statement statement = con.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { try { int i = 0; - if (isJSON()) { + if (isJSON(queryResultFormat)) { while (rs.next()) { assertEquals(json_results[i++], Double.toString(rs.getDouble(1))); } @@ -1491,12 +1498,13 @@ public void testDoublePrecision() throws SQLException { } } - @Test - public void testBoolean() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testBoolean(String queryResultFormat) throws SQLException { String table = "test_arrow_boolean"; String column = "(a boolean)"; String values = "(true),(null),(false)"; - try (Connection conn = init(table, column, values); + try (Connection conn = init(queryResultFormat, table, column, values); Statement statement = conn.createStatement(); ResultSet rs = statement.executeQuery("select * from " + table)) { assertTrue(rs.next()); @@ -1512,12 +1520,13 @@ public void testBoolean() throws SQLException { } } - @Test - public void testClientSideSorting() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testClientSideSorting(String queryResultFormat) throws SQLException { String table = "test_arrow_sort_on"; String column = "( a int, b double, c string)"; String values = "(1,2.0,'test'),(0,2.0, 'test'),(1,2.0,'abc')"; - try (Connection conn = init(table, column, values); + try (Connection conn = init(queryResultFormat, table, column, values); Statement statement = conn.createStatement()) { try { // turn on sorting mode @@ -1537,9 +1546,10 @@ public void testClientSideSorting() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testClientSideSortingOnBatchedChunk() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testClientSideSortingOnBatchedChunk(String queryResultFormat) throws SQLException { // in this test, the first chunk contains multiple batches when the format is Arrow String[] queries = { "set-sf-property sort on", @@ -1557,7 +1567,7 @@ public void testClientSideSortingOnBatchedChunk() throws SQLException { "insert into T values (3);", }; - try (Connection conn = init(); + try (Connection conn = init(queryResultFormat); Statement stat = conn.createStatement()) { try { for (String q : queries) { @@ -1580,9 +1590,10 @@ public void testClientSideSortingOnBatchedChunk() throws SQLException { } } - @Test - public void testTimestampNTZAreAllNulls() throws SQLException { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testTimestampNTZAreAllNulls(String queryResultFormat) throws SQLException { + try (Connection con = init(queryResultFormat); Statement statement = con.createStatement()) { try { statement.executeQuery( @@ -1600,10 +1611,11 @@ public void testTimestampNTZAreAllNulls() throws SQLException { } } - @Test - public void TestArrowStringRoundTrip() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void TestArrowStringRoundTrip(String queryResultFormat) throws SQLException { String big_number = "11111111112222222222333333333344444444"; - try (Connection con = init(); + try (Connection con = init(queryResultFormat); Statement st = con.createStatement()) { try { for (int i = 0; i < 38; i++) { @@ -1625,10 +1637,11 @@ public void TestArrowStringRoundTrip() throws SQLException { } } - @Test - public void TestArrowFloatRoundTrip() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void TestArrowFloatRoundTrip(String queryResultFormat) throws SQLException { float[] cases = {Float.MAX_VALUE, Float.MIN_VALUE}; - try (Connection con = init(); + try (Connection con = init(queryResultFormat); Statement st = con.createStatement()) { try { for (float f : cases) { @@ -1645,12 +1658,13 @@ public void TestArrowFloatRoundTrip() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void TestTimestampNTZWithDLS() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void TestTimestampNTZWithDLS(String queryResultFormat) throws SQLException { TimeZone origTz = TimeZone.getDefault(); String[] timeZones = new String[] {"America/New_York", "America/Los_Angeles"}; - try (Connection con = init(); + try (Connection con = init(queryResultFormat); Statement st = con.createStatement()) { for (String timeZone : timeZones) { TimeZone.setDefault(TimeZone.getTimeZone(timeZone)); @@ -1751,10 +1765,11 @@ public void TestTimestampNTZWithDLS() throws SQLException { } } - @Test - public void TestTimestampNTZBinding() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void TestTimestampNTZBinding(String queryResultFormat) throws SQLException { TimeZone origTz = TimeZone.getDefault(); - try (Connection con = init()) { + try (Connection con = init(queryResultFormat)) { TimeZone.setDefault(TimeZone.getTimeZone("PST")); try (Statement st = con.createStatement()) { st.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_NTZ"); diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowMultiTZIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowMultiTZIT.java index f62e7701c..d89cebfc6 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowMultiTZIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetJsonVsArrowMultiTZIT.java @@ -3,46 +3,51 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import java.util.ArrayList; -import java.util.Collection; import java.util.List; -import net.snowflake.client.category.TestCategoryArrow; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.ProvidersUtil; +import net.snowflake.client.providers.ScaleProvider; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import net.snowflake.client.providers.SnowflakeArgumentsProvider; +import net.snowflake.client.providers.TimezoneProvider; import org.apache.commons.lang3.StringUtils; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsSource; /** Completely compare json and arrow resultSet behaviors */ -@RunWith(Parameterized.class) -@Category(TestCategoryArrow.class) +@Tag(TestTags.ARROW) public class ResultSetJsonVsArrowMultiTZIT extends BaseJDBCWithSharedConnectionIT { - @Parameterized.Parameters(name = "format={0}, tz={1}") - public static Collection data() { - // all tests in this class need to run for both query result formats json and arrow - String[] timeZones = new String[] {"UTC", "America/New_York", "Asia/Singapore"}; - String[] queryFormats = new String[] {"json", "arrow"}; - List ret = new ArrayList<>(); - for (String queryFormat : queryFormats) { - for (String timeZone : timeZones) { - ret.add(new Object[] {queryFormat, timeZone}); - } + static String originalTz; + + static class DataProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return ProvidersUtil.cartesianProduct( + context, new SimpleResultFormatProvider(), new TimezoneProvider(3)); } - return ret; } - private final String queryResultFormat; - private final String tz; + static class DataWithScaleProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return ProvidersUtil.cartesianProduct(context, new DataProvider(), new ScaleProvider()); + } + } - @Before + @BeforeEach public void setSessionTimezone() throws SQLException { try (Statement statement = connection.createStatement()) { statement.execute( @@ -56,13 +61,26 @@ public void setSessionTimezone() throws SQLException { } } - public ResultSetJsonVsArrowMultiTZIT(String queryResultFormat, String timeZone) { - this.queryResultFormat = queryResultFormat; - System.setProperty("user.timezone", timeZone); - tz = timeZone; + private static void setTimezone(String tz) { + System.setProperty("user.timezone", tz); + } + + @BeforeAll + public static void saveTimezone() { + originalTz = System.getProperty("user.timezone"); + } + + @AfterAll + public static void restoreTimezone() { + if (originalTz != null) { + System.setProperty("user.timezone", originalTz); + } else { + System.clearProperty("user.timezone"); + } } - private void init(String table, String column, String values) throws SQLException { + private void init(String table, String column, String values, String queryResultFormat) + throws SQLException { try (Statement statement = connection.createStatement()) { statement.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); statement.execute("create or replace table " + table + " " + column); @@ -70,8 +88,10 @@ private void init(String table, String column, String values) throws SQLExceptio } } - @Test - public void testTime() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataWithScaleProvider.class) + public void testTime(String queryResultFormat, String tz, int scale) throws SQLException { + setTimezone(tz); String[] times = { "00:01:23", "00:01:23.1", @@ -84,13 +104,13 @@ public void testTime() throws SQLException { "00:01:23.12345678", "00:01:23.123456789" }; - for (int scale = 0; scale <= 9; scale++) { - testTimeWithScale(times, scale); - } + testTimeWithScale(times, scale, queryResultFormat); } - @Test - public void testDate() throws Exception { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testDate(String queryResultFormat, String tz) throws Exception { + setTimezone(tz); String[] cases = { "2017-01-01", "2014-01-02", @@ -108,8 +128,8 @@ public void testDate() throws Exception { String column = "(a date)"; String values = "('" + StringUtils.join(cases, "'),('") + "'), (null)"; - init(table, column, values); - try (Statement statement = connection.createStatement()) { + init(table, column, values, queryResultFormat); + try (Statement statement = createStatement(queryResultFormat)) { try (ResultSet rs = statement.executeQuery("select * from " + table)) { int i = 0; while (i < cases.length) { @@ -129,12 +149,13 @@ public void testDate() throws Exception { } } - public void testTimeWithScale(String[] times, int scale) throws SQLException { + public void testTimeWithScale(String[] times, int scale, String queryResultFormat) + throws SQLException { String table = "test_arrow_time"; String column = "(a time(" + scale + "))"; String values = "('" + StringUtils.join(times, "'),('") + "'), (null)"; - init(table, column, values); - try (Statement statement = connection.createStatement(); + init(table, column, values, queryResultFormat); + try (Statement statement = createStatement(queryResultFormat); ResultSet rs = statement.executeQuery("select * from " + table)) { for (int i = 0; i < times.length; i++) { assertTrue(rs.next()); @@ -146,14 +167,11 @@ public void testTimeWithScale(String[] times, int scale) throws SQLException { } } - @Test - public void testTimestampNTZ() throws SQLException { - for (int scale = 0; scale <= 9; scale++) { - testTimestampNTZWithScale(scale); - } - } - - public void testTimestampNTZWithScale(int scale) throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataWithScaleProvider.class) + public void testTimestampNTZWithScale(String queryResultFormat, String tz, int scale) + throws SQLException { + setTimezone(tz); String[] cases = { "2017-01-01 12:00:00", "2014-01-02 16:00:00", @@ -181,8 +199,8 @@ public void testTimestampNTZWithScale(int scale) throws SQLException { String column = "(a timestamp_ntz(" + scale + "))"; String values = "('" + StringUtils.join(cases, "'),('") + "'), (null)"; - init(table, column, values); - try (Statement statement = connection.createStatement()) { + init(table, column, values, queryResultFormat); + try (Statement statement = createStatement(queryResultFormat)) { try (ResultSet rs = statement.executeQuery("select * from " + table)) { int i = 0; while (i < cases.length) { @@ -193,12 +211,13 @@ public void testTimestampNTZWithScale(int scale) throws SQLException { assertNull(rs.getString(1)); } statement.execute("drop table " + table); - System.clearProperty("user.timezone"); } } - @Test - public void testTimestampNTZWithNanos() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampNTZWithNanos(String queryResultFormat, String tz) throws SQLException { + setTimezone(tz); String[] cases = { "2017-01-01 12:00:00.123456789", "2014-01-02 16:00:00.0123", @@ -215,8 +234,8 @@ public void testTimestampNTZWithNanos() throws SQLException { String column = "(a timestamp_ntz)"; String values = "('" + StringUtils.join(cases, "'),('") + "'), (null)"; - init(table, column, values); - try (Statement statement = connection.createStatement()) { + init(table, column, values, queryResultFormat); + try (Statement statement = createStatement(queryResultFormat)) { try (ResultSet rs = statement.executeQuery("select * from " + table)) { int i = 0; while (i < cases.length) { @@ -227,7 +246,6 @@ public void testTimestampNTZWithNanos() throws SQLException { assertNull(rs.getString(1)); } finally { statement.execute("drop table " + table); - System.clearProperty("user.timezone"); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java index dc16d5dcf..ab7a6e081 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetLatestIT.java @@ -6,12 +6,12 @@ import static net.snowflake.client.TestUtil.expectSnowflakeLoggedFeatureNotSupportedException; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.fasterxml.jackson.databind.JsonNode; import java.lang.reflect.Field; @@ -43,10 +43,9 @@ import java.util.TimeZone; import java.util.concurrent.ExecutionException; import java.util.regex.Pattern; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryResultSet; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.ObjectMapperFactory; import net.snowflake.client.core.SFBaseSession; import net.snowflake.client.core.SessionUtil; @@ -55,10 +54,14 @@ import net.snowflake.client.jdbc.telemetry.TelemetryData; import net.snowflake.client.jdbc.telemetry.TelemetryField; import net.snowflake.client.jdbc.telemetry.TelemetryUtil; +import net.snowflake.client.providers.SimpleResultFormatProvider; import net.snowflake.common.core.SFBinary; import org.apache.arrow.vector.Float8Vector; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; /** * ResultSet integration tests for the latest JDBC driver. This doesn't work for the oldest @@ -66,15 +69,11 @@ * if the tests still is not applicable. If it is applicable, move tests to ResultSetIT so that both * the latest and oldest supported driver run the tests. */ -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class ResultSetLatestIT extends ResultSet0IT { - - public ResultSetLatestIT() { - this("json"); - } - - ResultSetLatestIT(String queryResultFormat) { - super(queryResultFormat); + private static void setQueryResultFormat(Statement stmt, String queryResultFormat) + throws SQLException { + stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); } private String createTableSql = @@ -91,10 +90,10 @@ public ResultSetLatestIT() { * * @throws Throwable */ - @Test - public void testMemoryClearingAfterInterrupt() throws Throwable { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testMemoryClearingAfterInterrupt(String queryResultFormat) throws Throwable { + try (Statement statement = createStatement(queryResultFormat)) { final long initialMemoryUsage = SnowflakeChunkDownloader.getCurrentMemoryUsage(); try { // Inject an InterruptedException into the SnowflakeChunkDownloader.terminate() function @@ -128,12 +127,12 @@ public void testMemoryClearingAfterInterrupt() throws Throwable { * multiple statements concurrently uses a lot of memory. This checks that chunks download even * when there is not enough memory available for concurrent prefetching. */ - @Test - public void testChunkDownloaderNoHang() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testChunkDownloaderNoHang(String queryResultFormat) throws SQLException { int stmtCount = 30; int rowCount = 170000; - try (Connection connection = getConnection(); - Statement stmt = connection.createStatement()) { + try (Statement stmt = createStatement(queryResultFormat)) { List rsList = new ArrayList<>(); // Set memory limit to low number connection @@ -165,12 +164,12 @@ public void testChunkDownloaderNoHang() throws SQLException { } /** This tests that the SnowflakeChunkDownloader doesn't hang when memory limits are low. */ - @Test - public void testChunkDownloaderSetRetry() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testChunkDownloaderSetRetry(String queryResultFormat) throws SQLException { int stmtCount = 3; int rowCount = 170000; - try (Connection connection = getConnection(); - Statement stmt = connection.createStatement()) { + try (Statement stmt = createStatement(queryResultFormat)) { connection .unwrap(SnowflakeConnectionV1.class) .getSFBaseSession() @@ -214,9 +213,12 @@ public void testChunkDownloaderSetRetry() throws SQLException { * @throws ExecutionException arises if error occurred when sending telemetry events * @throws InterruptedException arises if error occurred when sending telemetry events */ - @Test - public void testMetadataAPIMetricCollection() + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testMetadataAPIMetricCollection(String queryResultFormat) throws SQLException, ExecutionException, InterruptedException { + Statement stmt = createStatement(queryResultFormat); + stmt.close(); Telemetry telemetry = connection.unwrap(SnowflakeConnectionV1.class).getSfSession().getTelemetryClient(); DatabaseMetaData metadata = connection.getMetaData(); @@ -276,9 +278,10 @@ public void testMetadataAPIMetricCollection() * * @throws SQLException */ - @Test - public void testGetCharacterStreamNull() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetCharacterStreamNull(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { statement.execute("create or replace table JDBC_NULL_CHARSTREAM (col1 varchar(16))"); statement.execute("insert into JDBC_NULL_CHARSTREAM values(NULL)"); try (ResultSet rs = statement.executeQuery("select * from JDBC_NULL_CHARSTREAM")) { @@ -293,9 +296,10 @@ public void testGetCharacterStreamNull() throws SQLException { * * @throws SQLException arises if any exception occurs */ - @Test - public void testMultipleChunks() throws Exception { - try (Statement statement = connection.createStatement(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testMultipleChunks(String queryResultFormat) throws Exception { + try (Statement statement = createStatement(queryResultFormat); // 10000 rows should be enough to force result into multiple chunks ResultSet resultSet = @@ -345,10 +349,11 @@ public void testMultipleChunks() throws Exception { * * @throws SQLException arises if any exception occurs */ - @Test - public void testResultSetMetadata() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testResultSetMetadata(String queryResultFormat) throws SQLException { final Map params = getConnectionParameters(); - try (Statement statement = connection.createStatement()) { + try (Statement statement = createStatement(queryResultFormat)) { try { statement.execute("create or replace table test_rsmd(colA number(20, 5), colB string)"); statement.execute("insert into test_rsmd values(1.00, 'str'),(2.00, 'str2')"); @@ -396,9 +401,10 @@ public void testResultSetMetadata() throws SQLException { * * @throws SQLException */ - @Test - public void testEmptyResultSet() throws SQLException { - try (Statement statement = connection.createStatement(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testEmptyResultSet(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat); // the only function that returns ResultSetV1.emptyResultSet() ResultSet rs = statement.getGeneratedKeys()) { assertFalse(rs.next()); @@ -505,9 +511,10 @@ public void testEmptyResultSet() throws SQLException { * * @throws Exception arises if any exception occurs. */ - @Test - public void testBytesCrossTypeTests() throws Exception { - try (ResultSet resultSet = numberCrossTesting()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testBytesCrossTypeTests(String queryResultFormat) throws Exception { + try (ResultSet resultSet = numberCrossTesting(queryResultFormat)) { assertTrue(resultSet.next()); // assert that 0 is returned for null values for every type of value for (int i = 1; i < 13; i++) { @@ -538,9 +545,11 @@ public void testBytesCrossTypeTests() throws Exception { // SNOW-204185 // 30s for timeout. This test usually finishes in around 10s. - @Test(timeout = 30000) - public void testResultChunkDownloaderException() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @Timeout(30) + public void testResultChunkDownloaderException(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { // The generated resultSet must be big enough for triggering result chunk downloader String query = @@ -578,8 +587,7 @@ public void testResultChunkDownloaderException() throws SQLException { */ @Test public void testGetObjectWithBigInt() throws SQLException { - try (Statement statement = connection.createStatement()) { - statement.execute("alter session set jdbc_query_result_format ='json'"); + try (Statement statement = createStatement("json")) { // test with greatest possible number and greatest negative possible number String[] extremeNumbers = { "99999999999999999999999999999999999999", "-99999999999999999999999999999999999999" @@ -608,9 +616,10 @@ private byte[] floatToByteArray(float i) { * * @throws SQLException */ - @Test - public void testGetBigDecimalWithScale() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetBigDecimalWithScale(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { statement.execute("create or replace table test_get(colA number(38,9))"); try (PreparedStatement preparedStatement = connection.prepareStatement("insert into test_get values(?)")) { @@ -634,11 +643,13 @@ public void testGetBigDecimalWithScale() throws SQLException { } } - @Test - public void testGetDataTypeWithTimestampTz() throws Exception { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetDataTypeWithTimestampTz(String queryResultFormat) throws Exception { try (Connection connection = getConnection()) { ResultSetMetaData resultSetMetaData = null; try (Statement statement = connection.createStatement()) { + setQueryResultFormat(statement, queryResultFormat); statement.executeQuery("create or replace table ts_test(ts timestamp_tz)"); try (ResultSet resultSet = statement.executeQuery("select * from ts_test")) { resultSetMetaData = resultSet.getMetaData(); @@ -669,13 +680,14 @@ public void testGetDataTypeWithTimestampTz() throws Exception { * * @throws SQLException */ - @Test - public void testGetEmptyOrNullClob() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetEmptyOrNullClob(String queryResultFormat) throws SQLException { Clob clob = connection.createClob(); clob.setString(1, "hello world"); Clob emptyClob = connection.createClob(); emptyClob.setString(1, ""); - try (Statement statement = connection.createStatement()) { + try (Statement statement = createStatement(queryResultFormat)) { statement.execute( "create or replace table test_get_clob(colA varchar, colNull varchar, colEmpty text)"); try (PreparedStatement preparedStatement = @@ -703,10 +715,11 @@ public void testGetEmptyOrNullClob() throws SQLException { * * @throws SQLException */ - @Test - public void testSetNullClob() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testSetNullClob(String queryResultFormat) throws SQLException { Clob clob = null; - try (Statement statement = connection.createStatement()) { + try (Statement statement = createStatement(queryResultFormat)) { statement.execute("create or replace table test_set_clob(colNull varchar)"); try (PreparedStatement preparedStatement = connection.prepareStatement("insert into test_set_clob values(?)")) { @@ -722,12 +735,14 @@ public void testSetNullClob() throws SQLException { } } - @Test - public void testCallStatementType() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testCallStatementType(String queryResultFormat) throws SQLException { Properties props = new Properties(); props.put("USE_STATEMENT_TYPE_CALL_FOR_STORED_PROC_CALLS", "true"); try (Connection connection = getConnection(props); Statement statement = connection.createStatement()) { + setQueryResultFormat(statement, queryResultFormat); try { String sp = "CREATE OR REPLACE PROCEDURE \"SP_ZSDLEADTIME_ARCHIVE_DAILY\"()\n" @@ -793,9 +808,10 @@ public void testCallStatementType() throws SQLException { * Test that new query error message function for checking async query error messages is not * implemented for synchronous queries * */ - @Test - public void testNewFeaturesNotSupportedExeceptions() throws SQLException { - try (Statement statement = connection.createStatement(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testNewFeaturesNotSupportedExeceptions(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat); ResultSet rs = statement.executeQuery("select 1")) { expectSnowflakeLoggedFeatureNotSupportedException( rs.unwrap(SnowflakeResultSet.class)::getQueryErrorMessage); @@ -841,9 +857,10 @@ public void testNewFeaturesNotSupportedExeceptions() throws SQLException { } } - @Test - public void testInvalidUnWrap() throws SQLException { - try (ResultSet rs = connection.createStatement().executeQuery("select 1")) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testInvalidUnWrap(String queryResultFormat) throws SQLException { + try (ResultSet rs = createStatement(queryResultFormat).executeQuery("select 1")) { try { rs.unwrap(SnowflakeUtil.class); } catch (SQLException ex) { @@ -856,9 +873,8 @@ public void testInvalidUnWrap() throws SQLException { @Test public void testGetObjectJsonResult() throws SQLException { - try (Statement statement = connection.createStatement()) { + try (Statement statement = createStatement("json")) { try { - statement.execute("alter session set jdbc_query_result_format ='json'"); statement.execute("create or replace table testObj (colA double, colB boolean)"); try (PreparedStatement preparedStatement = @@ -878,9 +894,10 @@ public void testGetObjectJsonResult() throws SQLException { } } - @Test - public void testMetadataIsCaseSensitive() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testMetadataIsCaseSensitive(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { String sampleCreateTableWithAllColTypes = "CREATE or replace TABLE case_sensitive (" @@ -929,14 +946,14 @@ public void testMetadataIsCaseSensitive() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testAutoIncrementJsonResult() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testAutoIncrementResult(String queryResultFormat) throws SQLException { Properties paramProperties = new Properties(); paramProperties.put("ENABLE_FIX_759900", true); - try (Connection connection = init(paramProperties); + try (Connection connection = init(paramProperties, queryResultFormat); Statement statement = connection.createStatement()) { - statement.execute("alter session set jdbc_query_result_format ='json'"); statement.execute( "create or replace table auto_inc(id int autoincrement, name varchar(10), another_col int autoincrement)"); @@ -953,34 +970,11 @@ public void testAutoIncrementJsonResult() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testAutoIncrementArrowResult() throws SQLException { - Properties paramProperties = new Properties(); - paramProperties.put("ENABLE_FIX_759900", true); - try (Connection connection = init(paramProperties); - Statement statement = connection.createStatement()) { - statement.execute("alter session set jdbc_query_result_format ='arrow'"); - - statement.execute( - "create or replace table auto_inc(id int autoincrement, name varchar(10), another_col int autoincrement)"); - statement.execute("insert into auto_inc(name) values('test1')"); - - try (ResultSet resultSet = statement.executeQuery("select * from auto_inc")) { - assertTrue(resultSet.next()); - - ResultSetMetaData metaData = resultSet.getMetaData(); - assertTrue(metaData.isAutoIncrement(1)); - assertFalse(metaData.isAutoIncrement(2)); - assertTrue(metaData.isAutoIncrement(3)); - } - } - } - - @Test - public void testGranularTimeFunctionsInSessionTimezone() throws SQLException { - try (Connection connection = getConnection(); - Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGranularTimeFunctionsInSessionTimezone(String queryResultFormat) + throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { try { statement.execute("create or replace table testGranularTime(t time)"); statement.execute("insert into testGranularTime values ('10:10:10')"); @@ -997,39 +991,43 @@ public void testGranularTimeFunctionsInSessionTimezone() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testGranularTimeFunctionsInUTC() throws SQLException { - try (Connection connection = getConnection()) { - TimeZone origTz = TimeZone.getDefault(); - try (Statement statement = connection.createStatement()) { - try { - TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles")); - statement.execute("alter session set JDBC_USE_SESSION_TIMEZONE=false"); - statement.execute("create or replace table testGranularTime(t time)"); - statement.execute("insert into testGranularTime values ('10:10:10')"); - try (ResultSet resultSet = statement.executeQuery("select * from testGranularTime")) { - assertTrue(resultSet.next()); - assertEquals(Time.valueOf("02:10:10"), resultSet.getTime(1)); - assertEquals(02, resultSet.getTime(1).getHours()); - assertEquals(10, resultSet.getTime(1).getMinutes()); - assertEquals(10, resultSet.getTime(1).getSeconds()); - } - } finally { - TimeZone.setDefault(origTz); - statement.execute("drop table if exists testGranularTime"); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testGranularTimeFunctionsInUTC(String queryResultFormat) throws SQLException { + TimeZone origTz = TimeZone.getDefault(); + try (Statement statement = createStatement(queryResultFormat)) { + try { + TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles")); + statement.execute("alter session set JDBC_USE_SESSION_TIMEZONE=false"); + statement.execute("create or replace table testGranularTime(t time)"); + statement.execute("insert into testGranularTime values ('10:10:10')"); + try (ResultSet resultSet = statement.executeQuery("select * from testGranularTime")) { + assertTrue(resultSet.next()); + assertEquals(Time.valueOf("02:10:10"), resultSet.getTime(1)); + assertEquals(02, resultSet.getTime(1).getHours()); + assertEquals(10, resultSet.getTime(1).getMinutes()); + assertEquals(10, resultSet.getTime(1).getSeconds()); } + } finally { + TimeZone.setDefault(origTz); + statement.execute("drop table if exists testGranularTime"); } } } /** Added in > 3.14.5 */ - @Test - public void testLargeStringRetrieval() throws SQLException { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testLargeStringRetrieval(String queryResultFormat) throws SQLException { + String originalMaxJsonStringLength = + System.getProperty(ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM); + System.clearProperty(ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM); String tableName = "maxJsonStringLength_table"; int colLength = 16777216; try (Connection con = getConnection(); Statement statement = con.createStatement()) { + setQueryResultFormat(statement, queryResultFormat); SFBaseSession session = con.unwrap(SnowflakeConnectionV1.class).getSFBaseSession(); Integer maxVarcharSize = (Integer) session.getOtherParameter("VARCHAR_AND_BINARY_MAX_SIZE_IN_RESULT"); @@ -1039,7 +1037,6 @@ public void testLargeStringRetrieval() throws SQLException { statement.execute("create or replace table " + tableName + " (c1 string(" + colLength + "))"); statement.execute( "insert into " + tableName + " select randstr(" + colLength + ", random())"); - assertNull(System.getProperty(ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM)); try (ResultSet rs = statement.executeQuery("select * from " + tableName)) { assertTrue(rs.next()); assertEquals(colLength, rs.getString(1).length()); @@ -1047,25 +1044,30 @@ public void testLargeStringRetrieval() throws SQLException { } } catch (Exception e) { fail("executeQuery should not fail"); + } finally { + if (originalMaxJsonStringLength != null) { + System.setProperty( + ObjectMapperFactory.MAX_JSON_STRING_LENGTH_JVM, originalMaxJsonStringLength); + } } } private static void assertAllColumnsAreLongButBigIntIsBigDecimal(ResultSet rs) throws SQLException { while (rs.next()) { - assertEquals(java.lang.Long.class, rs.getObject(1).getClass()); - assertEquals(java.math.BigDecimal.class, rs.getObject(2).getClass()); - assertEquals(java.lang.Long.class, rs.getObject(3).getClass()); - assertEquals(java.lang.Long.class, rs.getObject(4).getClass()); + assertEquals(Long.class, rs.getObject(1).getClass()); + assertEquals(BigDecimal.class, rs.getObject(2).getClass()); + assertEquals(Long.class, rs.getObject(3).getClass()); + assertEquals(Long.class, rs.getObject(4).getClass()); } } private static void assertAllColumnsAreBigDecimal(ResultSet rs) throws SQLException { while (rs.next()) { - assertEquals(java.math.BigDecimal.class, rs.getObject(1).getClass()); - assertEquals(java.math.BigDecimal.class, rs.getObject(2).getClass()); - assertEquals(java.math.BigDecimal.class, rs.getObject(3).getClass()); - assertEquals(java.math.BigDecimal.class, rs.getObject(4).getClass()); + assertEquals(BigDecimal.class, rs.getObject(1).getClass()); + assertEquals(BigDecimal.class, rs.getObject(2).getClass()); + assertEquals(BigDecimal.class, rs.getObject(3).getClass()); + assertEquals(BigDecimal.class, rs.getObject(4).getClass()); } } @@ -1140,9 +1142,10 @@ public void testGetObjectForJSONResultFormatUsingJDBCDecimalAsInt() throws SQLEx } } - @Test - public void testGetObjectWithType() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetObjectWithType(String queryResultFormat) throws SQLException { + try (Statement statement = createStatement(queryResultFormat)) { statement.execute( " CREATE OR REPLACE TABLE test_all_types (" + " string VARCHAR, " diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneIT.java index c0a494613..93266290c 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneIT.java @@ -5,11 +5,11 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.Connection; import java.sql.Date; @@ -22,46 +22,74 @@ import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; -import java.util.Collection; import java.util.List; -import java.util.Properties; import java.util.TimeZone; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import java.util.stream.Stream; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; /** Test ResultSet */ -@RunWith(Parameterized.class) -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class ResultSetMultiTimeZoneIT extends BaseJDBCTest { - @Parameterized.Parameters(name = "format={0}, tz={1}") - public static Collection data() { - // all tests in this class need to run for both query result formats json and arrow - String[] timeZones = new String[] {"UTC", "Asia/Singapore", "MEZ"}; - String[] queryFormats = new String[] {"json", "arrow"}; - List ret = new ArrayList<>(); - for (String queryFormat : queryFormats) { - for (String timeZone : timeZones) { - ret.add(new Object[] {queryFormat, timeZone}); + static TimeZone ogTz; + + static class DataProvider implements ArgumentsProvider { + @Override + public Stream provideArguments(ExtensionContext context) throws Exception { + List timezones = + new ArrayList() { + { + add("UTC"); + add("Asia/Singapore"); + add("CET"); + } + }; + List queryFormats = + new ArrayList() { + { + add("json"); + add("arrow"); + } + }; + + List args = new ArrayList<>(); + for (String timeZone : timezones) { + for (String queryFormat : queryFormats) { + args.add(Arguments.argumentSet(timeZone + " " + queryFormat, timeZone, queryFormat)); + } } + + return args.stream(); } - return ret; } - private final String queryResultFormat; + @BeforeAll + public static void setDefaultTimezone() { + ogTz = TimeZone.getDefault(); + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + } - public ResultSetMultiTimeZoneIT(String queryResultFormat, String timeZone) { - this.queryResultFormat = queryResultFormat; + private static void setTimezone(String timeZone) { System.setProperty("user.timezone", timeZone); } - public Connection init() throws SQLException { + @AfterAll + public static void clearTimezone() { + TimeZone.setDefault(ogTz); + System.clearProperty("user.timezone"); + } + + public Connection init(String queryResultFormat) throws SQLException { Connection connection = BaseJDBCTest.getConnection(); try (Statement statement = connection.createStatement()) { @@ -78,15 +106,23 @@ public Connection init() throws SQLException { return connection; } - public Connection init(Properties paramProperties) throws SQLException { - Connection conn = getConnection(DONT_INJECT_SOCKET_TIMEOUT, paramProperties, false, false); - try (Statement stmt = conn.createStatement()) { - stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); + public Connection init() throws SQLException { + Connection connection = BaseJDBCTest.getConnection(); + + try (Statement statement = connection.createStatement()) { + statement.execute( + "alter session set " + + "TIMEZONE='America/Los_Angeles'," + + "TIMESTAMP_TYPE_MAPPING='TIMESTAMP_LTZ'," + + "TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_TZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'," + + "TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'"); } - return conn; + return connection; } - @Before + @BeforeEach public void setUp() throws SQLException { try (Connection con = init(); Statement statement = con.createStatement()) { @@ -107,21 +143,21 @@ public void setUp() throws SQLException { + "error_on_column_count_mismatch=false)"); // put files assertTrue( - "Failed to put a file", statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc")); + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc"), + "Failed to put a file"); assertTrue( - "Failed to put a file", statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc")); + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc"), + "Failed to put a file"); int numRows = statement.executeUpdate("copy into orders_jdbc"); - assertEquals("Unexpected number of rows copied: " + numRows, 73, numRows); + assertEquals(73, numRows, "Unexpected number of rows copied: " + numRows); } } - @After + @AfterEach public void tearDown() throws SQLException { System.clearProperty("user.timezone"); try (Connection con = init(); @@ -131,10 +167,12 @@ public void tearDown() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testGetDateAndTime() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + @DontRunOnGithubActions + public void testGetDateAndTime(String tz, String queryResultFormat) throws SQLException { + setTimezone(tz); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute("create or replace table dateTime(colA Date, colB Timestamp, colC Time)"); @@ -189,11 +227,13 @@ public void testGetDateAndTime() throws SQLException { } // SNOW-25029: The driver should reduce Time milliseconds mod 24h. - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testTimeRange() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + @DontRunOnGithubActions + public void testTimeRange(String tz, String queryResultFormat) throws SQLException { + setTimezone(tz); final String insertTime = "insert into timeTest values (?), (?), (?), (?)"; - try (Connection connection = init(); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute("create or replace table timeTest (c1 time)"); @@ -243,11 +283,13 @@ public void testTimeRange() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testCurrentTime() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + @DontRunOnGithubActions + public void testCurrentTime(String tz, String queryResultFormat) throws SQLException { + setTimezone(tz); final String insertTime = "insert into datetime values (?, ?, ?)"; - try (Connection connection = init()) { + try (Connection connection = init(queryResultFormat)) { assertFalse(connection.createStatement().execute("alter session set TIMEZONE='UTC'")); @@ -285,10 +327,12 @@ public void testCurrentTime() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testBindTimestampTZ() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + @DontRunOnGithubActions + public void testBindTimestampTZ(String tz, String queryResultFormat) throws SQLException { + setTimezone(tz); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute( @@ -315,10 +359,12 @@ public void testBindTimestampTZ() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testGetOldDate() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + @DontRunOnGithubActions + public void testGetOldDate(String tz, String queryResultFormat) throws SQLException { + setTimezone(tz); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute("create or replace table testOldDate(d date)"); @@ -353,9 +399,11 @@ public void testGetOldDate() throws SQLException { } } - @Test - public void testGetStringForDates() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testGetStringForDates(String tz, String queryResultFormat) throws SQLException { + setTimezone(tz); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { String expectedDate1 = "2020-08-01"; String expectedDate2 = "1920-11-11"; @@ -370,10 +418,13 @@ public void testGetStringForDates() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testDateTimeRelatedTypeConversion() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + @DontRunOnGithubActions + public void testDateTimeRelatedTypeConversion(String tz, String queryResultFormat) + throws SQLException { + setTimezone(tz); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute( @@ -437,10 +488,12 @@ public void testDateTimeRelatedTypeConversion() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testGetOldTimestamp() throws SQLException { - try (Connection con = init(); + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + @DontRunOnGithubActions + public void testGetOldTimestamp(String tz, String queryResultFormat) throws SQLException { + setTimezone(tz); + try (Connection con = init(queryResultFormat); Statement statement = con.createStatement()) { try { statement.execute("create or replace table testOldTs(cola timestamp_ntz)"); @@ -464,12 +517,14 @@ public void testGetOldTimestamp() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testPrepareOldTimestamp() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + @DontRunOnGithubActions + public void testPrepareOldTimestamp(String tz, String queryResultFormat) throws SQLException { + setTimezone(tz); TimeZone origTz = TimeZone.getDefault(); TimeZone.setDefault(TimeZone.getTimeZone("UTC")); - try (Connection con = init(); + try (Connection con = init(queryResultFormat); Statement statement = con.createStatement()) { try { statement.execute("create or replace table testPrepOldTs(cola timestamp_ntz, colb date)"); diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneLatestIT.java index e03dc35df..2d0bbd6b6 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetMultiTimeZoneLatestIT.java @@ -1,7 +1,7 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Date; import java.sql.PreparedStatement; @@ -10,51 +10,67 @@ import java.sql.Statement; import java.sql.Timestamp; import java.text.SimpleDateFormat; -import java.util.ArrayList; import java.util.Calendar; -import java.util.Collection; import java.util.List; import java.util.TimeZone; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.BooleanProvider; +import net.snowflake.client.providers.ProvidersUtil; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import net.snowflake.client.providers.SnowflakeArgumentsProvider; +import net.snowflake.client.providers.TimezoneProvider; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsSource; /** * ResultSet multi timezone tests for the latest JDBC driver. This cannot run for the old driver. */ -@RunWith(Parameterized.class) -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class ResultSetMultiTimeZoneLatestIT extends BaseJDBCWithSharedConnectionIT { - @Parameterized.Parameters(name = "format={0}, tz={1}") - public static Collection data() { - // all tests in this class need to run for both query result formats json and arrow - // UTC and Europe/London have different offsets during daylight savings time so it is important - // to test both to ensure daylight savings time is correct - String[] timeZones = new String[] {"UTC", "Asia/Singapore", "MEZ", "Europe/London"}; - String[] queryFormats = new String[] {"json", "arrow"}; - List ret = new ArrayList<>(); - for (String queryFormat : queryFormats) { - for (String timeZone : timeZones) { - ret.add(new Object[] {queryFormat, timeZone}); - } + + private static String originalTz; + + private static class DataProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return ProvidersUtil.cartesianProduct( + context, new SimpleResultFormatProvider(), new TimezoneProvider(4)); } - return ret; } - private final String queryResultFormat; + private static class DataWithFlagProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return ProvidersUtil.cartesianProduct(context, new DataProvider(), new BooleanProvider()); + } + } + + @BeforeAll + public static void saveTimezone() { + originalTz = System.getProperty("user.timezone"); + } + + @AfterAll + public static void restoreTimezone() { + if (originalTz != null) { + System.setProperty("user.timezone", originalTz); + } else { + System.clearProperty("user.timezone"); + } + } - public ResultSetMultiTimeZoneLatestIT(String queryResultFormat, String timeZone) { - this.queryResultFormat = queryResultFormat; - System.setProperty("user.timezone", timeZone); + private static void setTimezone(String tz) { + System.setProperty("user.timezone", tz); } - @Before - public void init() throws SQLException { + public void init(String queryResultFormat, String tz) throws SQLException { + setTimezone(tz); try (Statement statement = connection.createStatement()) { statement.execute( "alter session set " @@ -74,9 +90,11 @@ public void init() throws SQLException { * * @throws SQLException */ - @Test - public void testTimesWithGetTimestamp() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimesWithGetTimestamp(String queryResultFormat, String tz) throws SQLException { + init(queryResultFormat, tz); + try (Statement statement = createStatement(queryResultFormat)) { String timeStringValue = "10:30:50.123456789"; String timestampStringValue = "1970-01-01 " + timeStringValue; int length = timestampStringValue.length(); @@ -108,9 +126,12 @@ public void testTimesWithGetTimestamp() throws SQLException { * * @throws SQLException */ - @Test - public void testTimestampNTZWithDaylightSavings() throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void testTimestampNTZWithDaylightSavings(String queryResultFormat, String tz) + throws SQLException { + init(queryResultFormat, tz); + try (Statement statement = createStatement(queryResultFormat)) { statement.execute( "alter session set TIMESTAMP_TYPE_MAPPING='TIMESTAMP_NTZ'," + "TIMEZONE='Europe/London'"); try (ResultSet rs = statement.executeQuery("select TIMESTAMP '2011-09-04 00:00:00'")) { @@ -125,13 +146,18 @@ public void testTimestampNTZWithDaylightSavings() throws SQLException { * Test for getDate(int columnIndex, Calendar cal) function to ensure it matches values with * getTimestamp function */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testDateAndTimestampWithTimezone() throws SQLException { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + @DontRunOnGithubActions + public void testDateAndTimestampWithTimezone(String queryResultFormat, String tz) + throws SQLException { + init(queryResultFormat, tz); Calendar cal = null; SimpleDateFormat sdf = null; - - try (Statement statement = connection.createStatement()) { + // The following line allows for the tests to work locally. This should be removed when the + // tests are properly fixed. + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + try (Statement statement = createStatement(queryResultFormat)) { statement.execute("alter session set JDBC_FORMAT_DATE_WITH_TIMEZONE=true"); try (ResultSet rs = statement.executeQuery( @@ -186,31 +212,6 @@ public void testDateAndTimestampWithTimezone() throws SQLException { } } - /** - * Tests that formats are correct when JDBC_USE_SESSION_TIMEZONE=true and other related time/date - * formatting parameters are at their default values - * - * @throws SQLException - */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testSessionTimezoneUsage() throws SQLException { - testUseSessionTimeZoneHelper(true); - } - - /** - * Tests that the new param overrides previous time/date/timestamp formatting parameters such as - * JDBC_TREAT_TIMESTAMP_NTZ_AS_UTC, CLIENT_HONOR_CLIENT_TZ_FOR_TIMESTAMP_NTZ, and - * JDBC_FORMAT_DATE_WITH_TIMEZONE. - * - * @throws SQLException - */ - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testUseSessionTimeZoneOverrides() throws SQLException { - testUseSessionTimeZoneHelper(false); - } - /** * Helper function to test behavior of parameter JDBC_USE_SESSION_TIMEZONE. When * JDBC_USE_SESSION_TIMEZONE=true, time/date/timestamp values are displayed using the session @@ -226,8 +227,12 @@ public void testUseSessionTimeZoneOverrides() throws SQLException { * parameters * @throws SQLException */ - private void testUseSessionTimeZoneHelper(boolean useDefaultParamSettings) throws SQLException { - try (Statement statement = connection.createStatement()) { + @ParameterizedTest + @ArgumentsSource(DataWithFlagProvider.class) + public void testUseSessionTimeZoneHelper( + String queryResultFormat, String tz, boolean useDefaultParamSettings) throws SQLException { + init(queryResultFormat, tz); + try (Statement statement = createStatement(queryResultFormat)) { try { // create table with all timestamp types, time, and date statement.execute( diff --git a/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java b/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java index bc553b2f6..23cdc5b6b 100644 --- a/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/ResultSetVectorLatestIT.java @@ -1,9 +1,10 @@ package net.snowflake.client.jdbc; import static net.snowflake.client.jdbc.SnowflakeUtil.EXTRA_TYPES_VECTOR; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.ResultSet; import java.sql.ResultSetMetaData; @@ -12,11 +13,11 @@ import java.sql.Types; import java.util.Arrays; import java.util.List; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; /** * ResultSet integration tests for the latest JDBC driver. This doesn't work for the oldest @@ -25,69 +26,60 @@ * If it is applicable, move tests to ResultSetVectorIT so that both the latest and oldest supported * driver run the tests. */ -@Category(TestCategoryResultSet.class) -@RunWith(Parameterized.class) +@Tag(TestTags.RESULT_SET) public class ResultSetVectorLatestIT extends ResultSet0IT { - private final String queryResultFormat; - - public ResultSetVectorLatestIT(String queryResultFormat) { - super(queryResultFormat); - this.queryResultFormat = queryResultFormat; - } - - @Parameterized.Parameters(name = "format={0}") - public static List queryResultFormats() { - return Arrays.asList("json", "arrow"); - } - - @Test - public void testGetIntVectorAsIntArray() throws SQLException { - try (Statement stmt = connection.createStatement()) { - enforceQueryResultFormat(stmt); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetIntVectorAsIntArray(String queryResultFormat) throws SQLException { + try (Statement stmt = createStatement(queryResultFormat)) { + enforceQueryResultFormat(stmt, queryResultFormat); Integer[] vector = {-1, 5}; try (ResultSet resultSet = stmt.executeQuery("select " + vectorToString(vector, "int"))) { assertTrue(resultSet.next()); Integer[] result = resultSet.unwrap(SnowflakeBaseResultSet.class).getArray(1, Integer.class); - assertEquals(vector, result); + assertArrayEquals(vector, result); assertVectorMetadata(resultSet, 1, Types.INTEGER, 1); } } } - @Test - public void testGetIntVectorAsLongArray() throws SQLException { - try (Statement stmt = connection.createStatement()) { - enforceQueryResultFormat(stmt); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetIntVectorAsLongArray(String queryResultFormat) throws SQLException { + try (Statement stmt = createStatement(queryResultFormat)) { + enforceQueryResultFormat(stmt, queryResultFormat); Long[] vector = {-1L, 5L}; try (ResultSet resultSet = stmt.executeQuery("select " + vectorToString(vector, "int"))) { assertTrue(resultSet.next()); Long[] result = resultSet.unwrap(SnowflakeBaseResultSet.class).getArray(1, Long.class); - assertEquals(vector, result); + assertArrayEquals(vector, result); assertVectorMetadata(resultSet, 1, Types.INTEGER, 1); } } } - @Test - public void testGetFloatVectorAsFloatArray() throws SQLException { - try (Statement stmt = connection.createStatement()) { - enforceQueryResultFormat(stmt); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetFloatVectorAsFloatArray(String queryResultFormat) throws SQLException { + try (Statement stmt = createStatement(queryResultFormat)) { + enforceQueryResultFormat(stmt, queryResultFormat); Float[] vector = {-1.2f, 5.1f, 15.87f}; try (ResultSet resultSet = stmt.executeQuery("select " + vectorToString(vector, "float"))) { assertTrue(resultSet.next()); Float[] result = resultSet.unwrap(SnowflakeBaseResultSet.class).getArray(1, Float.class); - assertEquals(vector, result); + assertArrayEquals(vector, result); assertVectorMetadata(resultSet, 1, Types.FLOAT, 1); } } } - @Test - public void testGetNullAsIntVector() throws SQLException { - try (Statement stmt = connection.createStatement()) { - enforceQueryResultFormat(stmt); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetNullAsIntVector(String queryResultFormat) throws SQLException { + try (Statement stmt = createStatement(queryResultFormat)) { + enforceQueryResultFormat(stmt, queryResultFormat); try (ResultSet resultSet = stmt.executeQuery("select null::vector(int, 2)")) { assertTrue(resultSet.next()); Integer[] result = @@ -98,10 +90,11 @@ public void testGetNullAsIntVector() throws SQLException { } } - @Test - public void testGetNullAsFloatVector() throws SQLException { - try (Statement stmt = connection.createStatement()) { - enforceQueryResultFormat(stmt); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetNullAsFloatVector(String queryResultFormat) throws SQLException { + try (Statement stmt = createStatement(queryResultFormat)) { + enforceQueryResultFormat(stmt, queryResultFormat); try (ResultSet resultSet = stmt.executeQuery("select null::vector(float, 2)")) { assertTrue(resultSet.next()); Integer[] result = @@ -112,42 +105,46 @@ public void testGetNullAsFloatVector() throws SQLException { } } - @Test - public void testGetIntVectorFromTable() throws SQLException { - try (Statement stmt = connection.createStatement()) { - enforceQueryResultFormat(stmt); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetIntVectorFromTable(String queryResultFormat) throws SQLException { + try (Statement stmt = createStatement(queryResultFormat)) { + enforceQueryResultFormat(stmt, queryResultFormat); stmt.execute("create or replace table test_vector_int(x vector(int, 2), y int)"); stmt.execute("insert into test_vector_int select [3, 7]::vector(int, 2), 15"); try (ResultSet resultSet = stmt.executeQuery("select x, y from test_vector_int")) { assertTrue(resultSet.next()); Integer[] result = resultSet.unwrap(SnowflakeBaseResultSet.class).getArray(1, Integer.class); - assertEquals(new Integer[] {3, 7}, result); + assertArrayEquals(new Integer[] {3, 7}, result); assertVectorMetadata(resultSet, 1, Types.INTEGER, 2); } } } - @Test - public void testGetFloatVectorFromTable() throws SQLException { - try (Statement stmt = connection.createStatement()) { - enforceQueryResultFormat(stmt); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetFloatVectorFromTable(String queryResultFormat) throws SQLException { + try (Statement stmt = createStatement(queryResultFormat)) { + enforceQueryResultFormat(stmt, queryResultFormat); stmt.execute("create or replace table test_vector_float(x vector(float, 2), y float)"); stmt.execute("insert into test_vector_float select [-3, 7.1]::vector(float, 2), 20.3"); try (ResultSet resultSet = stmt.executeQuery("select x, y from test_vector_float")) { assertTrue(resultSet.next()); Float[] result = resultSet.unwrap(SnowflakeBaseResultSet.class).getArray(1, Float.class); - assertEquals(new Float[] {-3f, 7.1f}, result); + assertArrayEquals(new Float[] {-3f, 7.1f}, result); assertVectorMetadata(resultSet, 1, Types.FLOAT, 2); } } } /** Added in > 3.16.1 */ - @Test - public void testGetVectorViaGetStringIsEqualToTheGetObject() throws SQLException { - try (Statement stmt = connection.createStatement()) { - enforceQueryResultFormat(stmt); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + public void testGetVectorViaGetStringIsEqualToTheGetObject(String queryResultFormat) + throws SQLException { + try (Statement stmt = createStatement(queryResultFormat)) { + enforceQueryResultFormat(stmt, queryResultFormat); Integer[] intVector = {-1, 5}; Float[] floatVector = {-1.2f, 5.1f, 15.87f}; try (ResultSet resultSet = @@ -164,7 +161,7 @@ public void testGetVectorViaGetStringIsEqualToTheGetObject() throws SQLException assertTrue(resultSet.next()); assertGetObjectAndGetStringBeTheSame(resultSet, "[-1,5]", 1); String floatArrayRepresentation = - "json".equals(queryResultFormat) + "json".equalsIgnoreCase(queryResultFormat) // in json we have slightly different format that we accept in the result ? "[-1.200000,5.100000,15.870000]" : "[-1.2,5.1,15.87]"; @@ -195,7 +192,8 @@ private String nullVectorToString(String vectorType) { return "null::vector(" + vectorType + ", 2)"; } - private void enforceQueryResultFormat(Statement stmt) throws SQLException { + private void enforceQueryResultFormat(Statement stmt, String queryResultFormat) + throws SQLException { String sql = String.format( "alter session set jdbc_query_result_format = '%s'", queryResultFormat.toUpperCase()); diff --git a/src/test/java/net/snowflake/client/jdbc/SSOConnectionTest.java b/src/test/java/net/snowflake/client/jdbc/SSOConnectionTest.java index 51c9179b4..6fbadd92f 100644 --- a/src/test/java/net/snowflake/client/jdbc/SSOConnectionTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SSOConnectionTest.java @@ -6,7 +6,7 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyInt; import static org.mockito.Mockito.mock; @@ -38,7 +38,7 @@ import net.snowflake.common.core.ClientAuthnDTO; import org.apache.commons.io.IOUtils; import org.apache.http.client.methods.HttpPost; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; diff --git a/src/test/java/net/snowflake/client/jdbc/ServiceNameTest.java b/src/test/java/net/snowflake/client/jdbc/ServiceNameTest.java index bd51ef533..737cc1ffe 100644 --- a/src/test/java/net/snowflake/client/jdbc/ServiceNameTest.java +++ b/src/test/java/net/snowflake/client/jdbc/ServiceNameTest.java @@ -13,7 +13,7 @@ import net.snowflake.client.core.HttpUtil; import net.snowflake.client.core.SFSessionProperty; import org.apache.http.client.methods.HttpRequestBase; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.MockedStatic; import org.mockito.Mockito; diff --git a/src/test/java/net/snowflake/client/jdbc/SessionUtilTest.java b/src/test/java/net/snowflake/client/jdbc/SessionUtilTest.java index a91fa4a89..4056dda1b 100644 --- a/src/test/java/net/snowflake/client/jdbc/SessionUtilTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SessionUtilTest.java @@ -1,11 +1,14 @@ package net.snowflake.client.jdbc; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + import com.fasterxml.jackson.databind.ObjectMapper; import java.util.Map; import net.snowflake.client.core.ObjectMapperFactory; import net.snowflake.client.core.SessionUtil; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SessionUtilTest { @Test @@ -16,56 +19,56 @@ public void testGetCommonParams() throws Exception { Map result = SessionUtil.getCommonParams( mapper.readTree("[{\"name\": \"testParam\", \"value\": true}]")); - Assert.assertTrue((boolean) result.get("testParam")); + assertTrue((boolean) result.get("testParam")); result = SessionUtil.getCommonParams( mapper.readTree("[{\"name\": \"testParam\", \"value\": false}]")); - Assert.assertFalse((boolean) result.get("testParam")); + assertFalse((boolean) result.get("testParam")); result = SessionUtil.getCommonParams(mapper.readTree("[{\"name\": \"testParam\", \"value\": 0}]")); - Assert.assertEquals(0, (int) result.get("testParam")); + assertEquals(0, (int) result.get("testParam")); result = SessionUtil.getCommonParams( mapper.readTree("[{\"name\": \"testParam\", \"value\": 1000}]")); - Assert.assertEquals(1000, (int) result.get("testParam")); + assertEquals(1000, (int) result.get("testParam")); result = SessionUtil.getCommonParams( mapper.readTree("[{\"name\": \"testParam\", \"value\": \"\"}]")); - Assert.assertEquals("", result.get("testParam")); + assertEquals("", result.get("testParam")); result = SessionUtil.getCommonParams( mapper.readTree("[{\"name\": \"testParam\", \"value\": \"value\"}]")); - Assert.assertEquals("value", result.get("testParam")); + assertEquals("value", result.get("testParam")); // Test known param name result = SessionUtil.getCommonParams( mapper.readTree("[{\"name\": \"CLIENT_DISABLE_INCIDENTS\", \"value\": true}]")); - Assert.assertTrue((boolean) result.get("CLIENT_DISABLE_INCIDENTS")); + assertTrue((boolean) result.get("CLIENT_DISABLE_INCIDENTS")); result = SessionUtil.getCommonParams( mapper.readTree("[{\"name\": \"CLIENT_DISABLE_INCIDENTS\", \"value\": false}]")); - Assert.assertFalse((boolean) result.get("CLIENT_DISABLE_INCIDENTS")); + assertFalse((boolean) result.get("CLIENT_DISABLE_INCIDENTS")); result = SessionUtil.getCommonParams( mapper.readTree( "[{\"name\": \"CLIENT_STAGE_ARRAY_BINDING_THRESHOLD\", \"value\": 0}]")); - Assert.assertEquals(0, (int) result.get("CLIENT_STAGE_ARRAY_BINDING_THRESHOLD")); + assertEquals(0, (int) result.get("CLIENT_STAGE_ARRAY_BINDING_THRESHOLD")); result = SessionUtil.getCommonParams( mapper.readTree( "[{\"name\": \"CLIENT_STAGE_ARRAY_BINDING_THRESHOLD\", \"value\": 1000}]")); - Assert.assertEquals(1000, (int) result.get("CLIENT_STAGE_ARRAY_BINDING_THRESHOLD")); + assertEquals(1000, (int) result.get("CLIENT_STAGE_ARRAY_BINDING_THRESHOLD")); result = SessionUtil.getCommonParams(mapper.readTree("[{\"name\": \"TIMEZONE\", \"value\": \"\"}]")); - Assert.assertEquals("", result.get("TIMEZONE")); + assertEquals("", result.get("TIMEZONE")); result = SessionUtil.getCommonParams( mapper.readTree("[{\"name\": \"TIMEZONE\", \"value\": \"value\"}]")); - Assert.assertEquals("value", result.get("TIMEZONE")); + assertEquals("value", result.get("TIMEZONE")); } } diff --git a/src/test/java/net/snowflake/client/jdbc/SessionVariablesIT.java b/src/test/java/net/snowflake/client/jdbc/SessionVariablesIT.java index 5a8d28922..c5f3c8a1f 100644 --- a/src/test/java/net/snowflake/client/jdbc/SessionVariablesIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SessionVariablesIT.java @@ -3,8 +3,8 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import java.sql.Connection; import java.sql.ResultSet; @@ -14,11 +14,11 @@ import java.util.Map; import java.util.Properties; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.category.TestCategoryOthers; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public final class SessionVariablesIT extends AbstractDriverIT { @Test public void testSettingSessionVariablesInConnectionProperties() throws SQLException { diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeAzureClientHandleExceptionLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeAzureClientHandleExceptionLatestIT.java index c0c5dc18d..b57bdc86b 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeAzureClientHandleExceptionLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeAzureClientHandleExceptionLatestIT.java @@ -3,6 +3,10 @@ */ package net.snowflake.client.jdbc; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.fail; + import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.StorageExtendedErrorInformation; import java.io.File; @@ -13,26 +17,23 @@ import java.sql.SQLException; import java.sql.Statement; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.Constants; import net.snowflake.client.core.SFSession; import net.snowflake.client.core.SFStatement; import net.snowflake.client.jdbc.cloud.storage.SnowflakeAzureClient; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import org.mockito.Mockito; /** Test for SnowflakeAzureClient handle exception function */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class SnowflakeAzureClientHandleExceptionLatestIT extends AbstractDriverIT { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; private Connection connection; private SFStatement sfStatement; private SFSession sfSession; @@ -41,7 +42,7 @@ public class SnowflakeAzureClientHandleExceptionLatestIT extends AbstractDriverI private int overMaxRetry; private int maxRetry; - @Before + @BeforeEach public void setup() throws SQLException { connection = getConnection("azureaccount"); sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); @@ -60,7 +61,7 @@ public void setup() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void error403RenewExpired() throws SQLException, InterruptedException { // Unauthenticated, renew is called. spyingClient.handleStorageException( @@ -101,99 +102,130 @@ public void run() { thread.start(); thread.interrupt(); thread.join(); - Assert.assertNull("Exception must not have been thrown in here", exceptionContainer[0]); + assertNull(exceptionContainer[0], "Exception must not have been thrown in here"); Mockito.verify(spyingClient, Mockito.times(4)).renew(Mockito.anyMap()); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void error403OverMaxRetryThrow() throws SQLException { - spyingClient.handleStorageException( - new StorageException( - "403", "Unauthenticated", 403, new StorageExtendedErrorInformation(), new Exception()), - overMaxRetry, - "upload", - sfSession, - command, - null); + @Test + @DontRunOnGithubActions + public void error403OverMaxRetryThrow() { + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new StorageException( + "403", + "Unauthenticated", + 403, + new StorageExtendedErrorInformation(), + new Exception()), + overMaxRetry, + "upload", + sfSession, + command, + null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void error403NullSession() throws SQLException { - spyingClient.handleStorageException( - new StorageException( - "403", "Unauthenticated", 403, new StorageExtendedErrorInformation(), new Exception()), - 0, - "upload", - null, - command, - null); + @Test + @DontRunOnGithubActions + public void error403NullSession() { + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new StorageException( + "403", + "Unauthenticated", + 403, + new StorageExtendedErrorInformation(), + new Exception()), + 0, + "upload", + null, + command, + null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorInvalidKey() throws SQLException { - spyingClient.handleStorageException( - new Exception(new InvalidKeyException()), 0, "upload", sfSession, command, null); + @Test + @DontRunOnGithubActions + public void errorInvalidKey() { + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new Exception(new InvalidKeyException()), 0, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @Test + @DontRunOnGithubActions public void errorInterruptedException() throws SQLException { // Can still retry, no error thrown try { spyingClient.handleStorageException( new InterruptedException(), 0, "upload", sfSession, command, null); } catch (Exception e) { - Assert.fail("Should not have exception here"); + fail("Should not have exception here"); } Mockito.verify(spyingClient, Mockito.never()).renew(Mockito.anyMap()); - spyingClient.handleStorageException( - new InterruptedException(), 26, "upload", sfSession, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new InterruptedException(), 26, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorSocketTimeoutException() throws SQLException { + @Test + @DontRunOnGithubActions + public void errorSocketTimeoutException() throws SnowflakeSQLException { // Can still retry, no error thrown try { spyingClient.handleStorageException( new SocketTimeoutException(), 0, "upload", sfSession, command, null); } catch (Exception e) { - Assert.fail("Should not have exception here"); + fail("Should not have exception here"); } Mockito.verify(spyingClient, Mockito.never()).renew(Mockito.anyMap()); - spyingClient.handleStorageException( - new SocketTimeoutException(), 26, "upload", sfSession, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new SocketTimeoutException(), 26, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorUnknownException() throws SQLException { - spyingClient.handleStorageException(new Exception(), 0, "upload", sfSession, command, null); + @Test + @DontRunOnGithubActions + public void errorUnknownException() { + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new Exception(), 0, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorNoSpaceLeftOnDevice() throws SQLException, IOException { - File destFolder = tmpFolder.newFolder(); + @Test + @DontRunOnGithubActions + public void errorNoSpaceLeftOnDevice() throws IOException { + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String getCommand = "get @testPutGet_stage/" + TEST_DATA_FILE + " 'file://" + destFolderCanonicalPath + "'"; - spyingClient.handleStorageException( - new StorageException( - "", - Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, - new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), - 0, - "download", - null, - getCommand, - null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new StorageException( + "", + Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, + new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), + 0, + "download", + null, + getCommand, + null)); } - @After + @AfterEach public void cleanUp() throws SQLException { sfStatement.close(); connection.close(); diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeBasicDataSourceTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeBasicDataSourceTest.java index 8df351889..a2f4638b1 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeBasicDataSourceTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeBasicDataSourceTest.java @@ -5,12 +5,12 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.sql.SQLException; import java.util.Properties; import net.snowflake.client.core.SFSessionProperty; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** Data source unit test */ public class SnowflakeBasicDataSourceTest { diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java index 7dca1028e..0251f3984 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeChunkDownloaderLatestIT.java @@ -3,7 +3,7 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.ResultSet; @@ -11,20 +11,20 @@ import java.sql.Statement; import java.util.List; import java.util.Properties; -import net.snowflake.client.category.TestCategoryCore; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class SnowflakeChunkDownloaderLatestIT extends BaseJDBCTest { private static String originalProxyHost; private static String originalProxyPort; private static String originalNonProxyHosts; - @BeforeClass + @BeforeAll public static void setUp() throws Exception { originalProxyHost = System.getProperty("https.proxyHost"); originalProxyPort = System.getProperty("https.proxyPort"); @@ -39,7 +39,7 @@ private static void restoreProperty(String key, String value) { } } - @AfterClass + @AfterAll public static void tearDown() throws Exception { restoreProperty("https.proxyHost", originalProxyHost); restoreProperty("https.proxyPort", originalProxyPort); diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeClobTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeClobTest.java index fa3d4de6e..b08221a41 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeClobTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeClobTest.java @@ -3,9 +3,9 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.io.InputStream; @@ -13,7 +13,7 @@ import java.io.Reader; import java.nio.charset.StandardCharsets; import java.sql.SQLException; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SnowflakeClobTest extends BaseJDBCTest { diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeConnectionV1Test.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeConnectionV1Test.java index a17a89b15..de6c4fb70 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeConnectionV1Test.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeConnectionV1Test.java @@ -6,7 +6,7 @@ import java.util.Map; import java.util.Properties; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** Created by hyu on 2/2/18. */ public class SnowflakeConnectionV1Test { diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverConnectionStressTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverConnectionStressTest.java index 161e9c939..b50388d5d 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverConnectionStressTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverConnectionStressTest.java @@ -4,7 +4,7 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNotNull; import java.sql.Connection; import java.sql.ResultSet; diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java index a540adcec..b245f8c0b 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverIT.java @@ -5,13 +5,14 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -24,6 +25,7 @@ import java.nio.channels.FileChannel; import java.sql.Connection; import java.sql.DatabaseMetaData; +import java.sql.Date; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; @@ -49,23 +51,21 @@ import java.util.logging.Level; import java.util.logging.Logger; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.RunningOnTestaccount; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.annotations.DontRunOnTestaccount; +import net.snowflake.client.category.TestTags; import net.snowflake.common.core.ClientAuthnDTO; import net.snowflake.common.core.SqlState; import org.apache.commons.io.FileUtils; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; /** General integration tests */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class SnowflakeDriverIT extends BaseJDBCTest { private static final int MAX_CONCURRENT_QUERIES_PER_USER = 50; private static final String getCurrenTransactionStmt = "SELECT CURRENT_TRANSACTION()"; @@ -73,15 +73,15 @@ public class SnowflakeDriverIT extends BaseJDBCTest { private static String ORDERS_JDBC = "ORDERS_JDBC"; - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; private ObjectMapper mapper = new ObjectMapper(); - @Rule public TemporaryFolder tmpFolder2 = new TemporaryFolder(); + @TempDir public File tmpFolder2; public String testStageName = String.format("test_stage_%s", UUID.randomUUID().toString()).replaceAll("-", "_"); - @BeforeClass + @BeforeAll public static void setUp() throws Throwable { try (Connection connection = getConnection()) { try (Statement statement = connection.createStatement()) { @@ -99,22 +99,22 @@ public static void setUp() throws Throwable { // put files assertTrue( - "Failed to put a file", statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc")); + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%orders_jdbc"), + "Failed to put a file"); assertTrue( - "Failed to put a file", statement.execute( - "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc")); + "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE_2) + " @%orders_jdbc"), + "Failed to put a file"); int numRows = statement.executeUpdate("copy into orders_jdbc"); - assertEquals("Unexpected number of rows copied: " + numRows, 73, numRows); + assertEquals(73, numRows, "Unexpected number of rows copied: " + numRows); } } } - @AfterClass + @AfterAll public static void tearDown() throws SQLException { try (Connection connection = getConnection(); Statement statement = connection.createStatement()) { @@ -145,7 +145,7 @@ public static Connection getConnection() throws SQLException { /** Test connection to database using Snowflake Oauth instead of username/pw * */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testOauthConnection() throws SQLException { Map params = getConnectionParameters(); String role = null; @@ -182,7 +182,7 @@ public void testOauthConnection() throws SQLException { } } - @Ignore + @Disabled @Test public void testConnections() throws Throwable { ExecutorService executorService = Executors.newFixedThreadPool(MAX_CONCURRENT_QUERIES_PER_USER); @@ -237,7 +237,7 @@ public void testShowColumns() throws Throwable { try (Connection connection = getConnection(paramProperties); Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery("show columns in clustered_jdbc")) { - assertEquals("number of columns", 2, countRows(resultSet)); + assertEquals(2, countRows(resultSet), "number of columns"); } } @@ -259,7 +259,7 @@ public void testRowsPerResultset() throws Throwable { ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); int numColumns = resultSetMetaData.getColumnCount(); assertEquals(9, numColumns); - assertEquals("number of columns", 73, countRows(resultSet)); + assertEquals(73, countRows(resultSet), "number of columns"); } } } @@ -356,18 +356,18 @@ private void assertConstraintResults( // primary key for testConstraintsP1 should contain two rows for (int i = 0; i < numRows; i++) { - assertTrue("get constraint result row count", resultSet.next()); + assertTrue(resultSet.next(), "get constraint result row count"); if (pkTableName != null) { assertTrue( - "get constraint result primary table name", - pkTableName.equalsIgnoreCase(resultSet.getString(3))); + pkTableName.equalsIgnoreCase(resultSet.getString(3)), + "get constraint result primary table name"); } if (fkTableName != null) { assertTrue( - "get constraint result foreign table name", - fkTableName.equalsIgnoreCase(resultSet.getString(7))); + fkTableName.equalsIgnoreCase(resultSet.getString(7)), + "get constraint result foreign table name"); } } } @@ -513,8 +513,8 @@ public void testConstraints() throws Throwable { null, null, "TESTCONSTRAINTSP2", null, null, "TESTCONSTRAINTSF1"); assertFalse( - "cross reference from testConstraintsP2 to " + "testConstraintsF2 should be empty", - manualResultSet.next()); + manualResultSet.next(), + "cross reference from testConstraintsP2 to " + "testConstraintsF2 should be empty"); manualResultSet.close(); assertFalse(manualResultSet.next()); } finally { @@ -543,7 +543,7 @@ public void testQueryWithMaxRows() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testCancelQueryBySystemFunction() throws Throwable { try (Connection connection = getConnection(); Statement getSessionIdStmt = connection.createStatement()) { @@ -579,7 +579,7 @@ public void run() { fail("should raise an exception"); } catch (SQLException ex) { // assert the sqlstate is what we expect (QUERY CANCELLED) - assertEquals("sqlstate mismatch", SqlState.QUERY_CANCELED, ex.getSQLState()); + assertEquals(SqlState.QUERY_CANCELED, ex.getSQLState(), "sqlstate mismatch"); } } @@ -595,34 +595,34 @@ public void testDBMetadata() throws Throwable { // the following will issue try (ResultSet databaseSet = metaData.getCatalogs()) { - assertTrue("databases shouldn't be empty", databaseSet.next()); + assertTrue(databaseSet.next(), "databases shouldn't be empty"); // "show schemas in [databaseName]" ResultSet schemaSet = metaData.getSchemas(connection.getCatalog(), connection.getSchema()); - assertTrue("schemas shouldn't be empty", schemaSet.next()); + assertTrue(schemaSet.next(), "schemas shouldn't be empty"); assertTrue( - "database should be " + connection.getCatalog(), - connection.getCatalog().equalsIgnoreCase(schemaSet.getString(2))); + connection.getCatalog().equalsIgnoreCase(schemaSet.getString(2)), + "database should be " + connection.getCatalog()); assertTrue( - "schema should be " + connection.getSchema(), - connection.getSchema().equalsIgnoreCase(schemaSet.getString(1))); + connection.getSchema().equalsIgnoreCase(schemaSet.getString(1)), + "schema should be " + connection.getSchema()); // snow tables in a schema try (ResultSet tableSet = metaData.getTables( connection.getCatalog(), connection.getSchema(), ORDERS_JDBC, null)) { // types assertTrue( + tableSet.next(), String.format( "table %s should exists in db: %s, schema: %s", - ORDERS_JDBC, connection.getCatalog(), connection.getSchema()), - tableSet.next()); + ORDERS_JDBC, connection.getCatalog(), connection.getSchema())); assertTrue( - "database should be " + connection.getCatalog(), - connection.getCatalog().equalsIgnoreCase(schemaSet.getString(2))); + connection.getCatalog().equalsIgnoreCase(schemaSet.getString(2)), + "database should be " + connection.getCatalog()); assertTrue( - "schema should be " + connection.getSchema(), - connection.getSchema().equalsIgnoreCase(schemaSet.getString(1))); + connection.getSchema().equalsIgnoreCase(schemaSet.getString(1)), + "schema should be " + connection.getSchema()); assertTrue( - "table should be orders_jdbc", ORDERS_JDBC.equalsIgnoreCase(tableSet.getString(3))); + ORDERS_JDBC.equalsIgnoreCase(tableSet.getString(3)), "table should be orders_jdbc"); } } @@ -643,7 +643,7 @@ public void testDBMetadata() throws Throwable { assertTrue(ORDERS_JDBC.equalsIgnoreCase(tableMetaDataResultSet.getString(3))); ++cnt; } - assertEquals("number of tables", 1, cnt); + assertEquals(1, cnt, "number of tables"); } // test pattern try (ResultSet tableMetaDataResultSet = @@ -667,7 +667,7 @@ public void testDBMetadata() throws Throwable { break; } } - assertTrue("orders_jdbc not found", found); + assertTrue(found, "orders_jdbc not found"); } // get column metadata @@ -734,7 +734,7 @@ public void testDBMetadata() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutWithWildcardGCP() throws Throwable { Properties _connectionProperties = new Properties(); _connectionProperties.put("inject_wait_in_put", 5); @@ -748,21 +748,22 @@ public void testPutWithWildcardGCP() throws Throwable { // replace file name with wildcard character sourceFilePath = sourceFilePath.replace("orders_100.csv", "orders_10*.csv"); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; statement.execute("alter session set ENABLE_GCP_PUT_EXCEPTION_FOR_OLD_DRIVERS=false"); statement.execute("CREATE OR REPLACE STAGE wildcard_stage"); assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @wildcard_stage")); + statement.execute("PUT file://" + sourceFilePath + " @wildcard_stage"), + "Failed to put a file"); findFile(statement, "ls @wildcard_stage/"); assertTrue( - "Failed to get files", statement.execute( - "GET @wildcard_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + "GET @wildcard_stage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get files"); File downloaded; // download the files we just uploaded to stage @@ -808,23 +809,26 @@ private void copyContentFrom(File file1, File file2) throws Exception { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutGetLargeFileGCP() throws Throwable { try (Connection connection = getConnection("gcpaccount"); Statement statement = connection.createStatement()) { try { - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; - File largeTempFile = tmpFolder.newFile("largeFile.csv"); + File largeTempFile = new File(tmpFolder, "largeFile.csv"); + largeTempFile.createNewFile(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(largeTempFile))) { bw.write("Creating large test file for GCP PUT/GET test"); bw.write(System.lineSeparator()); bw.write("Creating large test file for GCP PUT/GET test"); bw.write(System.lineSeparator()); } - File largeTempFile2 = tmpFolder.newFile("largeFile2.csv"); + File largeTempFile2 = new File(tmpFolder, "largeFile2.csv"); + largeTempFile2.createNewFile(); String sourceFilePath = largeTempFile.getCanonicalPath(); @@ -840,8 +844,8 @@ public void testPutGetLargeFileGCP() throws Throwable { // create a stage to put the file in statement.execute("CREATE OR REPLACE STAGE largefile_stage"); assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @largefile_stage")); + statement.execute("PUT file://" + sourceFilePath + " @largefile_stage"), + "Failed to put a file"); // check that file exists in stage after PUT findFile(statement, "ls @largefile_stage/"); @@ -856,9 +860,9 @@ public void testPutGetLargeFileGCP() throws Throwable { // get file from new stage assertTrue( - "Failed to get files", statement.execute( - "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get files"); // Make sure that the downloaded file exists; it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); @@ -885,15 +889,17 @@ public void testPutGetLargeFileGCP() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutOverwrite() throws Throwable { // create 2 files: an original, and one that will overwrite the original - File file1 = tmpFolder.newFile("testfile.csv"); + File file1 = new File(tmpFolder, "testfile.csv"); + file1.createNewFile(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(file1))) { bw.write("Writing original file content. This should get overwritten."); } - File file2 = tmpFolder2.newFile("testfile.csv"); + File file2 = new File(tmpFolder2, "testfile.csv"); + file2.createNewFile(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(file2))) { bw.write("This is all new! This should be the result of the overwriting."); } @@ -901,7 +907,8 @@ public void testPutOverwrite() throws Throwable { String sourceFilePathOriginal = file1.getCanonicalPath(); String sourceFilePathOverwrite = file2.getCanonicalPath(); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; @@ -915,25 +922,25 @@ public void testPutOverwrite() throws Throwable { // create a stage to put the file in statement.execute("CREATE OR REPLACE STAGE testing_stage"); assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePathOriginal + " @testing_stage")); + statement.execute("PUT file://" + sourceFilePathOriginal + " @testing_stage"), + "Failed to put a file"); // check that file exists in stage after PUT findFile(statement, "ls @testing_stage/"); // put another file in same stage with same filename with overwrite = true assertTrue( - "Failed to put a file", statement.execute( - "PUT file://" + sourceFilePathOverwrite + " @testing_stage overwrite=true")); + "PUT file://" + sourceFilePathOverwrite + " @testing_stage overwrite=true"), + "Failed to put a file"); // check that file exists in stage after PUT findFile(statement, "ls @testing_stage/"); // get file from new stage assertTrue( - "Failed to get files", statement.execute( - "GET @testing_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + "GET @testing_stage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get files"); // Make sure that the downloaded file exists; it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); @@ -955,7 +962,7 @@ public void testPutOverwrite() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPut() throws Throwable { List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); @@ -971,11 +978,11 @@ public void testPut() throws Throwable { // put files assertTrue( - "Failed to put a file", statement.execute( "PUT file://" + getFullPathFileInResource(TEST_DATA_FILE) - + " @%testLoadToLocalFS/orders parallel=10")); + + " @%testLoadToLocalFS/orders parallel=10"), + "Failed to put a file"); try (ResultSet resultSet = statement.getResultSet()) { @@ -1042,16 +1049,16 @@ static void findFile(Statement statement, String checkSQL) throws Throwable { } // give enough time for s3 eventual consistency for US region Thread.sleep(1000); - assertTrue("Could not find a file", fileFound); + assertTrue(fileFound, "Could not find a file"); // assert the first column not null - assertNotNull("Null result", resultSet.getString(1)); + assertNotNull(resultSet.getString(1), "Null result"); } } } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testSQLError42S02() throws SQLException { try (Connection connection = getConnection(); @@ -1061,13 +1068,13 @@ public void testSQLError42S02() throws SQLException { fail("SQL exception not raised"); } catch (SQLException ex1) { // assert the sqlstate "42S02" which means BASE_TABLE_OR_VIEW_NOT_FOUND - assertEquals("sqlstate mismatch", "42S02", ex1.getSQLState()); + assertEquals("42S02", ex1.getSQLState(), "sqlstate mismatch"); } } } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testExplainPlan() throws Throwable { try (Connection connection = getConnection(); Statement statement = connection.createStatement(); @@ -1077,8 +1084,8 @@ public void testExplainPlan() throws Throwable { statement.executeQuery("EXPLAIN PLAN FOR SELECT c1 FROM orders_jdbc")) { ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); - assertTrue("must return more than 4 columns", resultSetMetaData.getColumnCount() >= 4); - assertTrue("must return more than 3 rows", countRows(resultSet) > 3); + assertTrue(resultSetMetaData.getColumnCount() >= 4, "must return more than 4 columns"); + assertTrue(countRows(resultSet) > 3, "must return more than 3 rows"); } } @@ -1175,7 +1182,7 @@ public void testUpdateCount() throws Throwable { int numRows = statement.executeUpdate("INSERT INTO testUpdateCount values (1, 'a'), (2, 'b')"); - assertEquals("Unexpected number of rows inserted: " + numRows, 2, numRows); + assertEquals(2, numRows, "Unexpected number of rows inserted: " + numRows); } finally { statement.execute("DROP TABLE if exists testUpdateCount"); } @@ -1202,7 +1209,7 @@ public void testSnow4245() throws Throwable { "insert into testSnow4245 values(NULL,NULL,NULL)," + "('2013-06-04 01:00:04','2013-06-04 01:00:04','2013-06-04 01:00:04')," + "('2013-06-05 23:00:05','2013-06-05 23:00:05','2013-06-05 23:00:05')"); - assertEquals("Unexpected number of rows inserted: " + numRows, 3, numRows); + assertEquals(3, numRows, "Unexpected number of rows inserted: " + numRows); // query the data try (ResultSet resultSet = @@ -1251,7 +1258,7 @@ public void testSnow4394() throws Throwable { int numRows = statement.executeUpdate( String.format("INSERT INTO %s(str) values('%s')", tableName, data)); - assertEquals("Unexpected number of rows inserted: " + numRows, 1, numRows); + assertEquals(1, numRows, "Unexpected number of rows inserted: " + numRows); try (ResultSet rset = statement.executeQuery(String.format("SELECT str FROM %s", tableName))) { @@ -1259,7 +1266,7 @@ public void testSnow4394() throws Throwable { while (rset.next()) { ret = rset.getString(1); } - assertEquals("Unexpected string value: " + ret, data, ret); + assertEquals(data, ret, "Unexpected string value: " + ret); } } finally { statement.execute(String.format("DROP TABLE if exists %s", tableName)); @@ -1318,8 +1325,8 @@ public void testBind() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("integer", 1, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); + assertEquals(1, resultSet.getInt(1), "integer"); + assertEquals("hello", resultSet.getString(2), "string"); } // bind float preparedStatement.setDouble(1, 1.2); @@ -1332,8 +1339,8 @@ public void testBind() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("double", 1.2, resultSet.getDouble(1), 0); - assertEquals("string", "hello", resultSet.getString(2)); + assertEquals(1.2, resultSet.getDouble(1), 0, "double"); + assertEquals("hello", resultSet.getString(2), "string"); } // bind string preparedStatement.setString(1, "hello"); @@ -1346,8 +1353,8 @@ public void testBind() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("string1", "hello", resultSet.getString(1)); - assertEquals("string2", "hello", resultSet.getString(2)); + assertEquals("hello", resultSet.getString(1), "string1"); + assertEquals("hello", resultSet.getString(2), "string2"); } // bind date sqlDate = java.sql.Date.valueOf("2014-08-26"); @@ -1361,8 +1368,8 @@ public void testBind() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("string", "2014-08-26", resultSet.getString(1)); - assertEquals("string", "hello", resultSet.getString(2)); + assertEquals("2014-08-26", resultSet.getString(1), "string"); + assertEquals("hello", resultSet.getString(2), "string"); } // bind timestamp ts = buildTimestamp(2014, 7, 26, 3, 52, 0, 0); @@ -1378,8 +1385,8 @@ public void testBind() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); assertEquals( - "Incorrect timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(1)); - assertEquals("string", "hello", resultSet.getString(2)); + "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(1), "Incorrect timestamp"); + assertEquals("hello", resultSet.getString(2), "string"); } // bind time tm = new Time(12345678); // 03:25:45.678 @@ -1393,8 +1400,8 @@ public void testBind() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("Incorrect time", "03:25:45", resultSet.getString(1)); - assertEquals("string", "hello", resultSet.getString(2)); + assertEquals("03:25:45", resultSet.getString(1), "Incorrect time"); + assertEquals("hello", resultSet.getString(2), "string"); } } // bind in where clause @@ -1412,8 +1419,8 @@ public void testBind() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("c1", "100", resultSet.getString(1)); - assertEquals("c2", "147004", resultSet.getString(2)); + assertEquals("100", resultSet.getString(1), "c1"); + assertEquals("147004", resultSet.getString(2), "c2"); } } @@ -1437,20 +1444,20 @@ public void testBind() throws Throwable { int rowCount = preparedStatement.executeUpdate(); // update count should be 1 - assertEquals("update count", 1, rowCount); + assertEquals(1, rowCount, "update count"); // test the inserted rows try (ResultSet resultSet = regularStatement.executeQuery("select * from testBind")) { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("int", 1, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("double", 1.2, resultSet.getDouble(3), 0); - assertEquals("date", "2014-08-26", resultSet.getString(4)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); - assertEquals("time", "03:25:45", resultSet.getString(6)); - assertNull("date", resultSet.getString(7)); + assertEquals(1, resultSet.getInt(1), "int"); + assertEquals("hello", resultSet.getString(2), "string"); + assertEquals(1.2, resultSet.getDouble(3), 0, "double"); + assertEquals("2014-08-26", resultSet.getString(4), "date"); + assertEquals("Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5), "timestamp"); + assertEquals("03:25:45", resultSet.getString(6), "time"); + assertNull(resultSet.getString(7), "date"); } } // bind in update statement @@ -1465,13 +1472,13 @@ public void testBind() throws Throwable { try (ResultSet resultSet = regularStatement.executeQuery("select * from testBind")) { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("int", 1, resultSet.getInt(1)); - assertEquals("string", "world", resultSet.getString(2)); - assertEquals("double", 1.2, resultSet.getDouble(3), 0); - assertEquals("date", "2014-08-26", resultSet.getString(4)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); - assertEquals("time", "03:25:45", resultSet.getString(6)); - assertNull("date", resultSet.getString(7)); + assertEquals(1, resultSet.getInt(1), "int"); + assertEquals("world", resultSet.getString(2), "string"); + assertEquals(1.2, resultSet.getDouble(3), 0, "double"); + assertEquals("2014-08-26", resultSet.getString(4), "date"); + assertEquals("Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5), "timestamp"); + assertEquals("03:25:45", resultSet.getString(6), "time"); + assertNull(resultSet.getString(7), "date"); } // array bind for insert try (PreparedStatement preparedStatement = @@ -1499,11 +1506,11 @@ public void testBind() throws Throwable { // GS optimizes this into one insert execution, but we expand the // return count into an array - assertEquals("Number of update counts", 2, updateCounts.length); + assertEquals(2, updateCounts.length, "Number of update counts"); // update count should be 1 for each - assertEquals("update count", 1, updateCounts[0]); - assertEquals("update count", 1, updateCounts[1]); + assertEquals(1, updateCounts[0], "update count"); + assertEquals(1, updateCounts[1], "update count"); } // test the inserted rows try (ResultSet resultSet = @@ -1511,12 +1518,12 @@ public void testBind() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("int", 2, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("double", 1.2, resultSet.getDouble(3), 0); - assertEquals("date", "2014-08-26", resultSet.getString(4)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); - assertEquals("time", "03:25:45", resultSet.getString(6)); + assertEquals(2, resultSet.getInt(1), "int"); + assertEquals("hello", resultSet.getString(2), "string"); + assertEquals(1.2, resultSet.getDouble(3), 0, "double"); + assertEquals("2014-08-26", resultSet.getString(4), "date"); + assertEquals("Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5), "timestamp"); + assertEquals("03:25:45", resultSet.getString(6), "time"); } try (ResultSet resultSet = @@ -1524,12 +1531,12 @@ public void testBind() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("int", 3, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("double", 1.2, resultSet.getDouble(3), 0); - assertEquals("date", "2014-08-26", resultSet.getString(4)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5)); - assertEquals("time", "03:25:45", resultSet.getString(6)); + assertEquals(3, resultSet.getInt(1), "int"); + assertEquals("hello", resultSet.getString(2), "string"); + assertEquals(1.2, resultSet.getDouble(3), 0, "double"); + assertEquals("2014-08-26", resultSet.getString(4), "date"); + assertEquals("Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(5), "timestamp"); + assertEquals("03:25:45", resultSet.getString(6), "time"); } // describe mode @@ -1620,10 +1627,10 @@ public void testBind() throws Throwable { updateCounts = preparedStatement.executeBatch(); // GS optimizes this into one insert execution - assertEquals("Number of update counts", 16, updateCounts.length); + assertEquals(16, updateCounts.length, "Number of update counts"); for (int idx = 0; idx < 16; idx++) { - assertEquals("update count", 1, updateCounts[idx]); + assertEquals(1, updateCounts[idx], "update count"); } } } @@ -1798,7 +1805,7 @@ public void testBindTimestampNTZ() throws Throwable { int updateCount = preparedStatement.executeUpdate(); // update count should be 1 - assertEquals("update count", 1, updateCount); + assertEquals(1, updateCount, "update count"); // test the inserted rows try (ResultSet resultSet = @@ -1806,7 +1813,7 @@ public void testBindTimestampNTZ() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("timestamp", "Tue, 26 Aug 2014 03:52:00 Z", resultSet.getString(1)); + assertEquals("Tue, 26 Aug 2014 03:52:00 Z", resultSet.getString(1), "timestamp"); regularStatement.executeUpdate("truncate table testBindTimestampNTZ"); @@ -1816,7 +1823,7 @@ public void testBindTimestampNTZ() throws Throwable { updateCount = preparedStatement.executeUpdate(); // update count should be 1 - assertEquals("update count", 1, updateCount); + assertEquals(1, updateCount, "update count"); } // test the inserted rows try (ResultSet resultSet = @@ -1852,11 +1859,11 @@ public void testNullBind() throws Throwable { int[] updateCounts = preparedStatement.executeBatch(); // GS optimizes this into one insert execution - assertEquals("Number of update counts", 2, updateCounts.length); + assertEquals(2, updateCounts.length, "Number of update counts"); // update count should be 1 - assertEquals("update count", 1, updateCounts[0]); - assertEquals("update count", 1, updateCounts[1]); + assertEquals(1, updateCounts[0], "update count"); + assertEquals(1, updateCounts[1], "update count"); preparedStatement.clearBatch(); @@ -1869,11 +1876,11 @@ public void testNullBind() throws Throwable { updateCounts = preparedStatement.executeBatch(); // GS optimizes this into one insert execution - assertEquals("Number of update counts", 2, updateCounts.length); + assertEquals(2, updateCounts.length, "Number of update counts"); // update count should be 1 - assertEquals("update count", 1, updateCounts[0]); - assertEquals("update count", 1, updateCounts[1]); + assertEquals(1, updateCounts[0], "update count"); + assertEquals(1, updateCounts[1], "update count"); preparedStatement.clearBatch(); @@ -1883,10 +1890,10 @@ public void testNullBind() throws Throwable { updateCounts = preparedStatement.executeBatch(); // GS optimizes this into one insert execution - assertEquals("Number of update counts", 1, updateCounts.length); + assertEquals(1, updateCounts.length, "Number of update counts"); // update count should be 1 - assertEquals("update count", 1, updateCounts[0]); + assertEquals(1, updateCounts[0], "update count"); preparedStatement.clearBatch(); @@ -1956,12 +1963,12 @@ public void testSnow12603() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("integer", 1, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("decimal", new BigDecimal("1.3"), resultSet.getBigDecimal(3)); - assertEquals("double", 1.3, resultSet.getDouble(4), 0); - assertEquals("date", "2014-08-26", resultSet.getString(5)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(6)); + assertEquals(1, resultSet.getInt(1), "integer"); + assertEquals("hello", resultSet.getString(2), "string"); + assertEquals(new BigDecimal("1.3"), resultSet.getBigDecimal(3), "decimal"); + assertEquals(1.3, resultSet.getDouble(4), 0, "double"); + assertEquals("2014-08-26", resultSet.getString(5), "date"); + assertEquals("Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(6), "timestamp"); preparedStatement.setObject(1, 1, Types.INTEGER); preparedStatement.setObject(2, "hello", Types.VARCHAR); @@ -1986,12 +1993,12 @@ public void testSnow12603() throws Throwable { // assert we get 1 rows assertTrue(resultSet.next()); - assertEquals("integer", 1, resultSet.getInt(1)); - assertEquals("string", "hello", resultSet.getString(2)); - assertEquals("decimal", new BigDecimal("1.3"), resultSet.getBigDecimal(3)); - assertEquals("double", 1.3, resultSet.getDouble(4), 0); - assertEquals("date", "2014-08-26", resultSet.getString(5)); - assertEquals("timestamp", "Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(6)); + assertEquals(1, resultSet.getInt(1), "integer"); + assertEquals("hello", resultSet.getString(2), "string"); + assertEquals(new BigDecimal("1.3"), resultSet.getBigDecimal(3), "decimal"); + assertEquals(1.3, resultSet.getDouble(4), 0, "double"); + assertEquals("2014-08-26", resultSet.getString(5), "date"); + assertEquals("Mon, 25 Aug 2014 20:52:00 -0700", resultSet.getString(6), "timestamp"); } } } @@ -2016,11 +2023,11 @@ public void testSnow6290() throws Throwable { ResultSet res = statement.executeQuery("select ts from testSnow6290"); - assertTrue("expect a row", res.next()); + assertTrue(res.next(), "expect a row"); Timestamp tsFromDB = res.getTimestamp(1); - assertEquals("timestamp mismatch", ts.getTime(), tsFromDB.getTime()); + assertEquals(ts.getTime(), tsFromDB.getTime(), "timestamp mismatch"); } finally { statement.execute("DROP TABLE if exists testSnow6290"); } @@ -2056,28 +2063,28 @@ public void testGetObject() throws Throwable { resultSetMetaData = resultSet.getMetaData(); assertEquals( - "column class name=BigDecimal", Long.class.getName(), - resultSetMetaData.getColumnClassName(1)); + resultSetMetaData.getColumnClassName(1), + "column class name=BigDecimal"); // assert we get 1 rows assertTrue(resultSet.next()); - assertTrue("integer", resultSet.getObject(1) instanceof Long); + assertTrue(resultSet.getObject(1) instanceof Long, "integer"); } preparedStatement.setString(1, "hello"); try (ResultSet resultSet = preparedStatement.executeQuery()) { resultSetMetaData = resultSet.getMetaData(); assertEquals( - "column class name=String", String.class.getName(), - resultSetMetaData.getColumnClassName(1)); + resultSetMetaData.getColumnClassName(1), + "column class name=String"); // assert we get 1 rows assertTrue(resultSet.next()); - assertTrue("string", resultSet.getObject(1) instanceof String); + assertTrue(resultSet.getObject(1) instanceof String, "string"); } preparedStatement.setDouble(1, 1.2); @@ -2086,14 +2093,14 @@ public void testGetObject() throws Throwable { resultSetMetaData = resultSet.getMetaData(); assertEquals( - "column class name=Double", Double.class.getName(), - resultSetMetaData.getColumnClassName(1)); + resultSetMetaData.getColumnClassName(1), + "column class name=Double"); // assert we get 1 rows assertTrue(resultSet.next()); - assertTrue("double", resultSet.getObject(1) instanceof Double); + assertTrue(resultSet.getObject(1) instanceof Double, "double"); } preparedStatement.setTimestamp(1, new Timestamp(0)); @@ -2102,14 +2109,14 @@ public void testGetObject() throws Throwable { resultSetMetaData = resultSet.getMetaData(); assertEquals( - "column class name=Timestamp", Timestamp.class.getName(), - resultSetMetaData.getColumnClassName(1)); + resultSetMetaData.getColumnClassName(1), + "column class name=Timestamp"); // assert we get 1 rows assertTrue(resultSet.next()); - assertTrue("timestamp", resultSet.getObject(1) instanceof Timestamp); + assertTrue(resultSet.getObject(1) instanceof Timestamp, "timestamp"); } preparedStatement.setDate(1, new java.sql.Date(0)); @@ -2117,14 +2124,14 @@ public void testGetObject() throws Throwable { resultSetMetaData = resultSet.getMetaData(); assertEquals( - "column class name=Date", - java.sql.Date.class.getName(), - resultSetMetaData.getColumnClassName(1)); + Date.class.getName(), + resultSetMetaData.getColumnClassName(1), + "column class name=Date"); // assert we get 1 rows assertTrue(resultSet.next()); - assertTrue("date", resultSet.getObject(1) instanceof java.sql.Date); + assertTrue(resultSet.getObject(1) instanceof Date, "date"); } } } @@ -2135,7 +2142,7 @@ public void testGetDoubleForNull() throws Throwable { Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery("select cast(null as int) as null_int")) { assertTrue(resultSet.next()); - assertEquals("0 for null", 0, resultSet.getDouble(1), 0.0001); + assertEquals(0, resultSet.getDouble(1), 0.0001, "0 for null"); } } @@ -2182,7 +2189,7 @@ public void testPutViaExecuteQuery() throws Throwable { } } - @Ignore("takes 7 min. enable this for long running tests") + @Disabled("takes 7 min. enable this for long running tests") @Test public void testSnow16332() throws Throwable { // use v1 query request API and inject 200ms socket timeout for first @@ -2292,7 +2299,7 @@ public void run() { fail("should be canceled"); } catch (SQLException ex) { // assert the sqlstate is what we expect (QUERY CANCELLED) - assertEquals("sqlstate mismatch", SqlState.QUERY_CANCELED, ex.getSQLState()); + assertEquals(SqlState.QUERY_CANCELED, ex.getSQLState(), "sqlstate mismatch"); } } } @@ -2329,7 +2336,7 @@ public void testSnow14774() throws Throwable { tsStrInLA = sdf.format(tsInLA); // the timestamp in LA and in UTC should be the same - assertEquals("timestamp values not equal", tsStrInUTC, tsStrInLA); + assertEquals(tsStrInUTC, tsStrInLA, "timestamp values not equal"); } // 30 minutes before daylight saving change try (ResultSet res = statement.executeQuery("select '2015-03-08 01:30:00'::timestamp_ntz")) { @@ -2351,7 +2358,7 @@ public void testSnow14774() throws Throwable { tsStrInLA = sdf.format(tsInLA); // the timestamp in LA and in UTC should be the same - assertEquals("timestamp values not equal", tsStrInUTC, tsStrInLA); + assertEquals(tsStrInUTC, tsStrInLA, "timestamp values not equal"); } } } @@ -2416,7 +2423,7 @@ public void testSnow19819() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnTestaccount.class) + @DontRunOnTestaccount public void testClientInfo() throws Throwable { System.setProperty( "snowflake.client.info", @@ -2425,22 +2432,22 @@ public void testClientInfo() throws Throwable { Statement statement = connection.createStatement(); ResultSet res = statement.executeQuery("select current_session_client_info()")) { - assertTrue("result expected", res.next()); + assertTrue(res.next(), "result expected"); String clientInfoJSONStr = res.getString(1); JsonNode clientInfoJSON = mapper.readTree(clientInfoJSONStr); // assert that spark version and spark app are found - assertEquals("spark version mismatch", "3.0.0", clientInfoJSON.get("spark.version").asText()); + assertEquals("3.0.0", clientInfoJSON.get("spark.version").asText(), "spark version mismatch"); assertEquals( - "snowflakedb version mismatch", "2.8.5", - clientInfoJSON.get("spark.snowflakedb.version").asText()); + clientInfoJSON.get("spark.snowflakedb.version").asText(), + "snowflakedb version mismatch"); assertEquals( - "spark app mismatch", "SnowflakeSourceSuite", - clientInfoJSON.get("spark.app.name").asText()); + clientInfoJSON.get("spark.app.name").asText(), + "spark app mismatch"); closeSQLObjects(res, statement, connection); } @@ -2466,7 +2473,7 @@ public void testLargeResultSet() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testSnow26503() throws Throwable { ResultSetMetaData resultSetMetaData; String queryId = null; @@ -2630,7 +2637,7 @@ public void testSnow31104() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutGet() throws Throwable { List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); @@ -2640,7 +2647,8 @@ public void testPutGet() throws Throwable { try { String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; @@ -2648,16 +2656,16 @@ public void testPutGet() throws Throwable { statement.execute("CREATE OR REPLACE STAGE testPutGet_stage"); assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @testPutGet_stage")); + statement.execute("PUT file://" + sourceFilePath + " @testPutGet_stage"), + "Failed to put a file"); findFile(statement, "ls @testPutGet_stage/"); // download the file we just uploaded to stage assertTrue( - "Failed to get a file", statement.execute( - "GET @testPutGet_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + "GET @testPutGet_stage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get a file"); // Make sure that the downloaded file exists, it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE + ".gz"); @@ -2685,7 +2693,7 @@ public void testPutGet() throws Throwable { * @throws Throwable */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutGetToUnencryptedStage() throws Throwable { List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); @@ -2695,7 +2703,8 @@ public void testPutGetToUnencryptedStage() throws Throwable { try { String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; @@ -2705,18 +2714,16 @@ public void testPutGetToUnencryptedStage() throws Throwable { "CREATE OR REPLACE STAGE testPutGet_unencstage encryption=(TYPE='SNOWFLAKE_SSE')"); assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @testPutGet_unencstage")); + statement.execute("PUT file://" + sourceFilePath + " @testPutGet_unencstage"), + "Failed to put a file"); findFile(statement, "ls @testPutGet_unencstage/"); // download the file we just uploaded to stage assertTrue( - "Failed to get a file", statement.execute( - "GET @testPutGet_unencstage 'file://" - + destFolderCanonicalPath - + "' parallel=8")); + "GET @testPutGet_unencstage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get a file"); // Make sure that the downloaded file exists, it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE + ".gz"); @@ -2738,15 +2745,15 @@ public void testPutGetToUnencryptedStage() throws Throwable { } /** Prepare statement will fail if the connection is already closed. */ - @Test(expected = SQLException.class) - public void testNotClosedSession() throws Throwable { + @Test + public void testNotClosedSession() throws SQLException { Connection connection = getConnection(); connection.close(); - connection.prepareStatement("select 1"); + assertThrows(SnowflakeSQLException.class, () -> connection.prepareStatement("select 1")); } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testToTimestampNullBind() throws Throwable { try (Connection connection = getConnection(); PreparedStatement preparedStatement = diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java index 91052fd7c..fde744f15 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverLatestIT.java @@ -7,10 +7,11 @@ import static net.snowflake.client.jdbc.SnowflakeDriver.implementVersion; import static net.snowflake.client.jdbc.SnowflakeDriverIT.findFile; import static net.snowflake.client.jdbc.SnowflakeResultSetSerializableV1.mapper; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.fasterxml.jackson.databind.JsonNode; import com.google.cloud.storage.StorageException; @@ -39,11 +40,10 @@ import java.util.Properties; import java.util.UUID; import java.util.zip.GZIPInputStream; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.RunningOnTestaccount; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.annotations.DontRunOnTestaccount; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.Constants; import net.snowflake.client.core.OCSPMode; import net.snowflake.client.core.SFSession; @@ -58,11 +58,10 @@ import net.snowflake.common.core.SqlState; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; /** * General JDBC tests for the latest JDBC driver. This doesn't work for the oldest supported driver. @@ -70,10 +69,10 @@ * is not applicable. If it is applicable, move tests to SnowflakeDriverIT so that both the latest * and oldest supported driver run the tests. */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class SnowflakeDriverLatestIT extends BaseJDBCTest { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); - @Rule public TemporaryFolder tmpFolder2 = new TemporaryFolder(); + @TempDir private File tmpFolder; + @TempDir private File tmpFolder2; public String testStageName = String.format("test_stage_%s", UUID.randomUUID().toString()).replaceAll("-", "_"); @@ -105,7 +104,7 @@ public void testStaticVersionMatchesManifest() { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnTestaccount.class) + @DontRunOnTestaccount public void testClientInfoConnectionProperty() throws Throwable { String clientInfoJSONStr = null; JsonNode clientInfoJSON = null; @@ -122,11 +121,11 @@ public void testClientInfoConnectionProperty() throws Throwable { clientInfoJSONStr = res.getString(1); clientInfoJSON = mapper.readTree(clientInfoJSONStr); // assert that spart version and spark app are found - assertEquals("spark version mismatch", "3.0.0", clientInfoJSON.get("spark.version").asText()); + assertEquals("3.0.0", clientInfoJSON.get("spark.version").asText(), "spark version mismatch"); assertEquals( - "spark app mismatch", "SnowflakeSourceSuite", - clientInfoJSON.get("spark.app.name").asText()); + clientInfoJSON.get("spark.app.name").asText(), + "spark app mismatch"); } // Test that when session property is set, connection parameter overrides it @@ -142,11 +141,11 @@ public void testClientInfoConnectionProperty() throws Throwable { clientInfoJSONStr = res.getString(1); clientInfoJSON = mapper.readTree(clientInfoJSONStr); // assert that spart version and spark app are found - assertEquals("spark version mismatch", "3.0.0", clientInfoJSON.get("spark.version").asText()); + assertEquals("3.0.0", clientInfoJSON.get("spark.version").asText(), "spark version mismatch"); assertEquals( - "spark app mismatch", "SnowflakeSourceSuite", - clientInfoJSON.get("spark.app.name").asText()); + clientInfoJSON.get("spark.app.name").asText(), + "spark app mismatch"); } System.clearProperty("snowflake.client.info"); } @@ -163,7 +162,7 @@ public void testGetSessionID() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutThreshold() throws SQLException { try (Connection connection = getConnection()) { // assert that threshold equals default 200 from server side @@ -202,9 +201,10 @@ public void testPutThreshold() throws SQLException { /** Test API for Spark connector for FileTransferMetadata */ @Test - @Ignore + @Disabled public void testGCPFileTransferMetadataWithOneFile() throws Throwable { - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); try (Connection connection = getConnection("gcpaccount"); @@ -266,9 +266,9 @@ public void testGCPFileTransferMetadataWithOneFile() throws Throwable { // Download two files and verify their content. assertTrue( - "Failed to get files", statement.execute( - "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/' parallel=8")); + "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/' parallel=8"), + "Failed to get files"); // Make sure that the downloaded files are EQUAL, // they should be gzip compressed @@ -284,9 +284,10 @@ public void testGCPFileTransferMetadataWithOneFile() throws Throwable { /** Test API for Kafka connector for FileTransferMetadata */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); List supportedAccounts = Arrays.asList("s3testaccount", "azureaccount"); @@ -353,13 +354,13 @@ public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { // Download two files and verify their content. assertTrue( - "Failed to get files", statement.execute( "GET @" + testStageName + " 'file://" + destFolderCanonicalPath - + "/' parallel=8")); + + "/' parallel=8"), + "Failed to get files"); // Make sure that the downloaded files are EQUAL, // they should be gzip compressed @@ -376,7 +377,7 @@ public void testAzureS3FileTransferMetadataWithOneFile() throws Throwable { /** Negative test for FileTransferMetadata. It is only supported for PUT. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGCPFileTransferMetadataNegativeOnlySupportPut() throws Throwable { int expectExceptionCount = 1; int actualExceptionCount = -1; @@ -392,7 +393,8 @@ public void testGCPFileTransferMetadataNegativeOnlySupportPut() throws Throwable SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String getCommand = "get @" + testStageName + " file://" + destFolderCanonicalPath; @@ -487,23 +489,26 @@ public void testGetPropertyInfo() throws SQLException { * @throws Throwable */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutOverwriteFalseNoDigest() throws Throwable { // create 2 files: an original, and one that will overwrite the original - File file1 = tmpFolder.newFile("testfile.csv"); + File file1 = new File(tmpFolder, "testfile.csv"); + file1.createNewFile(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(file1))) { bw.write("Writing original file content. This should get overwritten."); } - File file2 = tmpFolder2.newFile("testfile.csv"); + File file2 = new File(tmpFolder2, "testfile.csv"); + file2.createNewFile(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(file2))) { bw.write("This is all new! This should be the result of the overwriting."); } String sourceFilePathOriginal = file1.getCanonicalPath(); String sourceFilePathOverwrite = file2.getCanonicalPath(); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; @@ -518,25 +523,25 @@ public void testPutOverwriteFalseNoDigest() throws Throwable { // create a stage to put the file in statement.execute("CREATE OR REPLACE STAGE testing_stage"); assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePathOriginal + " @testing_stage")); + statement.execute("PUT file://" + sourceFilePathOriginal + " @testing_stage"), + "Failed to put a file"); // check that file exists in stage after PUT findFile(statement, "ls @testing_stage/"); // put another file in same stage with same filename with overwrite = true assertTrue( - "Failed to put a file", statement.execute( - "PUT file://" + sourceFilePathOverwrite + " @testing_stage overwrite=false")); + "PUT file://" + sourceFilePathOverwrite + " @testing_stage overwrite=false"), + "Failed to put a file"); // check that file exists in stage after PUT findFile(statement, "ls @testing_stage/"); // get file from new stage assertTrue( - "Failed to get files", statement.execute( - "GET @testing_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + "GET @testing_stage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get files"); // Make sure that the downloaded file exists; it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + "testfile.csv.gz"); @@ -564,11 +569,12 @@ public void testPutOverwriteFalseNoDigest() throws Throwable { * @throws Throwable */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutDisable() throws Throwable { // create a file - File file = tmpFolder.newFile("testfile99.csv"); + File file = new File(tmpFolder, "testfile99.csv"); + file.createNewFile(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(file))) { bw.write("This content won't be uploaded as PUT is disabled."); } @@ -584,7 +590,7 @@ public void testPutDisable() throws Throwable { Statement statement = connection.createStatement()) { statement.execute("PUT file://" + sourceFilePathOriginal + " @testPutGet_disable_stage"); - assertTrue("Shouldn't come here", false); + assertTrue(false, "Shouldn't come here"); } catch (Exception ex) { // Expected assertTrue(ex.getMessage().equalsIgnoreCase("File transfers have been disabled.")); @@ -598,11 +604,12 @@ public void testPutDisable() throws Throwable { * @throws Throwable */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGetDisable() throws Throwable { // create a folder - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); Properties paramProperties = new Properties(); @@ -616,7 +623,7 @@ public void testGetDisable() throws Throwable { statement.execute( "GET @testPutGet_disable_stage 'file://" + destFolderCanonicalPath + "' parallel=8"); - assertTrue("Shouldn't come here", false); + assertTrue(false, "Shouldn't come here"); } catch (Exception ex) { // Expected assertTrue(ex.getMessage().equalsIgnoreCase("File transfers have been disabled.")); @@ -798,7 +805,7 @@ public void testSnow76376() throws Throwable { * @throws Throwable */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGeoOutputTypes() throws Throwable { Properties paramProperties = new Properties(); @@ -862,7 +869,7 @@ private void testGeoOutputTypeSingle( } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGeoMetadata() throws Throwable { Properties paramProperties = new Properties(); @@ -913,7 +920,7 @@ private void testGeoMetadataSingle( } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGeometryOutputTypes() throws Throwable { Properties paramProperties = new Properties(); @@ -967,7 +974,7 @@ private void testGeometryOutputTypeSingle( } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testGeometryMetadata() throws Throwable { Properties paramProperties = new Properties(); @@ -1015,7 +1022,7 @@ private void testGeometryMetadataSingle( * @throws Throwable */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutGetGcsDownscopedCredential() throws Throwable { Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); @@ -1027,7 +1034,7 @@ public void testPutGetGcsDownscopedCredential() throws Throwable { /** Added in > 3.15.0 */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutGetGcsDownscopedCredentialWithDisabledDefaultCredentials() throws Throwable { Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); @@ -1041,7 +1048,8 @@ public void testPutGetGcsDownscopedCredentialWithDisabledDefaultCredentials() th private void putAndGetFile(Statement statement) throws Throwable { String sourceFilePath = getFullPathFileInResource(TEST_DATA_FILE_2); - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; @@ -1049,16 +1057,16 @@ private void putAndGetFile(Statement statement) throws Throwable { statement.execute("CREATE OR REPLACE STAGE testPutGet_stage"); assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @testPutGet_stage")); + statement.execute("PUT file://" + sourceFilePath + " @testPutGet_stage"), + "Failed to put a file"); findFile(statement, "ls @testPutGet_stage/"); // download the file we just uploaded to stage assertTrue( - "Failed to get a file", statement.execute( - "GET @testPutGet_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + "GET @testPutGet_stage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get a file"); // Make sure that the downloaded file exists, it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + TEST_DATA_FILE_2 + ".gz"); @@ -1088,25 +1096,28 @@ private void putAndGetFile(Statement statement) throws Throwable { * @throws Throwable */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutGetLargeFileGCSDownscopedCredential() throws Throwable { Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); try (Connection connection = getConnection("gcpaccount", paramProperties); Statement statement = connection.createStatement()) { try { - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; - File largeTempFile = tmpFolder.newFile("largeFile.csv"); + File largeTempFile = new File(tmpFolder, "largeFile.csv"); + largeTempFile.createNewFile(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(largeTempFile))) { bw.write("Creating large test file for GCP PUT/GET test"); bw.write(System.lineSeparator()); bw.write("Creating large test file for GCP PUT/GET test"); bw.write(System.lineSeparator()); } - File largeTempFile2 = tmpFolder.newFile("largeFile2.csv"); + File largeTempFile2 = new File(tmpFolder, "largeFile2.csv"); + largeTempFile2.createNewFile(); String sourceFilePath = largeTempFile.getCanonicalPath(); @@ -1120,8 +1131,8 @@ public void testPutGetLargeFileGCSDownscopedCredential() throws Throwable { // create a stage to put the file in statement.execute("CREATE OR REPLACE STAGE largefile_stage"); assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @largefile_stage")); + statement.execute("PUT file://" + sourceFilePath + " @largefile_stage"), + "Failed to put a file"); // check that file exists in stage after PUT findFile(statement, "ls @largefile_stage/"); @@ -1136,9 +1147,9 @@ public void testPutGetLargeFileGCSDownscopedCredential() throws Throwable { // get file from new stage assertTrue( - "Failed to get files", statement.execute( - "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get files"); // Make sure that the downloaded file exists; it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); @@ -1165,24 +1176,27 @@ public void testPutGetLargeFileGCSDownscopedCredential() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutGetLargeFileAzure() throws Throwable { Properties paramProperties = new Properties(); try (Connection connection = getConnection("azureaccount", paramProperties); Statement statement = connection.createStatement()) { try { - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String destFolderCanonicalPathWithSeparator = destFolderCanonicalPath + File.separator; - File largeTempFile = tmpFolder.newFile("largeFile.csv"); + File largeTempFile = new File(tmpFolder, "largeFile.csv"); + largeTempFile.createNewFile(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(largeTempFile))) { bw.write("Creating large test file for Azure PUT/GET test"); bw.write(System.lineSeparator()); bw.write("Creating large test file for Azure PUT/GET test"); bw.write(System.lineSeparator()); } - File largeTempFile2 = tmpFolder.newFile("largeFile2.csv"); + File largeTempFile2 = new File(tmpFolder, "largeFile2.csv"); + largeTempFile2.createNewFile(); String sourceFilePath = largeTempFile.getCanonicalPath(); @@ -1196,8 +1210,8 @@ public void testPutGetLargeFileAzure() throws Throwable { // create a stage to put the file in statement.execute("CREATE OR REPLACE STAGE largefile_stage"); assertTrue( - "Failed to put a file", - statement.execute("PUT file://" + sourceFilePath + " @largefile_stage")); + statement.execute("PUT file://" + sourceFilePath + " @largefile_stage"), + "Failed to put a file"); // check that file exists in stage after PUT findFile(statement, "ls @largefile_stage/"); @@ -1212,9 +1226,9 @@ public void testPutGetLargeFileAzure() throws Throwable { // get file from new stage assertTrue( - "Failed to get files", statement.execute( - "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8")); + "GET @extra_stage 'file://" + destFolderCanonicalPath + "' parallel=8"), + "Failed to get files"); // Make sure that the downloaded file exists; it should be gzip compressed File downloaded = new File(destFolderCanonicalPathWithSeparator + "bigFile.csv.gz"); @@ -1259,9 +1273,10 @@ private void copyContentFrom(File file1, File file2) throws Exception { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutS3RegionalUrl() throws Throwable { - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); List supportedAccounts = Arrays.asList("s3testaccount", "azureaccount"); @@ -1344,13 +1359,13 @@ public void testPutS3RegionalUrl() throws Throwable { // Download two files and verify their content. assertTrue( - "Failed to get files", statement.execute( "GET @" + testStageName + " 'file://" + destFolderCanonicalPath - + "/' parallel=8")); + + "/' parallel=8"), + "Failed to get files"); // Make sure that the downloaded files are EQUAL, // they should be gzip compressed @@ -1370,7 +1385,7 @@ public void testPutS3RegionalUrl() throws Throwable { * and Azure */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testAzureS3UploadStreamingIngestFileMetadata() throws Throwable { String clientName = "clientName"; String clientKey = "clientKey"; @@ -1433,7 +1448,8 @@ public void testAzureS3UploadStreamingIngestFileMetadata() throws Throwable { } } - @Test(expected = SnowflakeSQLException.class) + @Test + @DontRunOnGithubActions public void testNoSpaceLeftOnDeviceException() throws SQLException { List supportedAccounts = Arrays.asList("gcpaccount", "s3testaccount", "azureaccount"); for (String accountName : supportedAccounts) { @@ -1452,16 +1468,19 @@ public void testNoSpaceLeftOnDeviceException() throws SQLException { SnowflakeStorageClient client = StorageClientFactory.getFactory().createClient(info, 1, null, /* session= */ null); - client.handleStorageException( - new StorageException( - client.getMaxRetries(), - Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, - new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), - client.getMaxRetries(), - "download", - null, - command, - null); + assertThrows( + SnowflakeSQLException.class, + () -> + client.handleStorageException( + new StorageException( + client.getMaxRetries(), + Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, + new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), + client.getMaxRetries(), + "download", + null, + command, + null)); } finally { statement.execute("DROP STAGE if exists testPutGet_stage"); } @@ -1471,14 +1490,63 @@ public void testNoSpaceLeftOnDeviceException() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @Disabled // TODO: ignored until SNOW-1616480 is resolved + public void testUploadWithGCSPresignedUrlWithoutConnection() throws Throwable { + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); + String destFolderCanonicalPath = destFolder.getCanonicalPath(); + // set parameter for presignedUrl upload instead of downscoped token + Properties paramProperties = new Properties(); + paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", false); + try (Connection connection = getConnection("gcpaccount", paramProperties); + Statement statement = connection.createStatement()) { + try { + // create a stage to put the file in + statement.execute("CREATE OR REPLACE STAGE " + testStageName); + + SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); + + // Test put file with internal compression + String putCommand = "put file:///dummy/path/file1.gz @" + testStageName; + SnowflakeFileTransferAgent sfAgent = + new SnowflakeFileTransferAgent(putCommand, sfSession, new SFStatement(sfSession)); + List metadata = sfAgent.getFileTransferMetadatas(); + + String srcPath = getFullPathFileInResource(TEST_DATA_FILE); + for (SnowflakeFileTransferMetadata oneMetadata : metadata) { + InputStream inputStream = new FileInputStream(srcPath); + + assertTrue(oneMetadata.isForOneFile()); + SnowflakeFileTransferAgent.uploadWithoutConnection( + SnowflakeFileTransferConfig.Builder.newInstance() + .setSnowflakeFileTransferMetadata(oneMetadata) + .setUploadStream(inputStream) + .setRequireCompress(true) + .setNetworkTimeoutInMilli(0) + .setOcspMode(OCSPMode.FAIL_OPEN) + .build()); + } + + assertTrue( + statement.execute( + "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/' parallel=8"), + "Failed to get files"); + assertTrue(isFileContentEqual(srcPath, false, destFolderCanonicalPath + "/file1.gz", true)); + } finally { + statement.execute("DROP STAGE if exists " + testStageName); + } + } + } + + @Test + @DontRunOnGithubActions public void testUploadWithGCSDownscopedCredentialWithoutConnection() throws Throwable { uploadWithGCSDownscopedCredentialWithoutConnection(); } /** Added in > 3.15.0 */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testUploadWithGCSDownscopedCredentialAndDisabledGcsDefaultCredentialsWithoutConnection() throws Throwable { @@ -1491,7 +1559,8 @@ public void testUploadWithGCSDownscopedCredentialWithoutConnection() throws Thro } private void uploadWithGCSDownscopedCredentialWithoutConnection() throws Throwable { - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); @@ -1529,9 +1598,9 @@ private void uploadWithGCSDownscopedCredentialWithoutConnection() throws Throwab .setOcspMode(OCSPMode.FAIL_OPEN) .build()); assertTrue( - "Failed to get files with down-scoped token", statement.execute( - "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/'")); + "GET @" + testStageName + " 'file://" + destFolderCanonicalPath + "/'"), + "Failed to get files with down-scoped token"); assertTrue( isFileContentEqual( srcPath, false, destFolderCanonicalPath + "/" + targetFileName, true)); @@ -1551,7 +1620,7 @@ private void uploadWithGCSDownscopedCredentialWithoutConnection() throws Throwab * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testHTAPOptimizations() throws SQLException { try { // Set the HTAP test parameter to true @@ -1623,7 +1692,7 @@ public void testHTAPOptimizations() throws SQLException { * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testHTAPStatementParameterCaching() throws SQLException { // Set the HTAP test parameter to true try (Connection con = getSnowflakeAdminConnection()) { @@ -1682,9 +1751,10 @@ public void testHTAPStatementParameterCaching() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testS3PutInGS() throws Throwable { - File destFolder = tmpFolder.newFolder(); + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); Properties paramProperties = new Properties(); try (Connection connection = getConnection("s3testaccount", paramProperties); @@ -1711,7 +1781,7 @@ public void testS3PutInGS() throws Throwable { new FileInputStream(destFolderCanonicalPath + "/" + fileName); String downloadedFile = IOUtils.toString(downloadedFileStream, StandardCharsets.UTF_8); assertTrue( - "downloaded content does not equal uploaded content", content.equals(downloadedFile)); + content.equals(downloadedFile), "downloaded content does not equal uploaded content"); } finally { statement.execute("DROP STAGE if exists " + testStageName); } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverTest.java index 8b42be5c4..f268577e1 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeDriverTest.java @@ -3,13 +3,13 @@ */ package net.snowflake.client.jdbc; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.SQLException; import java.util.ArrayList; @@ -19,7 +19,7 @@ import java.util.Locale; import java.util.Map; import java.util.Properties; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** Driver unit test */ public class SnowflakeDriverTest { @@ -52,16 +52,16 @@ void match(String url, SnowflakeConnectString sc) { int port = sc.getPort(); Map parameters = sc.getParameters(); - assertEquals("URL scheme: " + url, this.scheme, scheme); - assertEquals("URL scheme: " + url, this.host, host); - assertEquals("URL scheme: " + url, this.port, port); - assertEquals("URL scheme: " + url, this.parameters.size(), parameters.size()); - assertEquals("URL scheme. " + url, this.account, account); + assertEquals(this.scheme, scheme, "URL scheme: " + url); + assertEquals(this.host, host, "URL scheme: " + url); + assertEquals(this.port, port, "URL scheme: " + url); + assertEquals(this.parameters.size(), parameters.size(), "URL scheme: " + url); + assertEquals(this.account, account, "URL scheme. " + url); for (Map.Entry entry : this.parameters.entrySet()) { String k = entry.getKey().toUpperCase(Locale.US); Object v = parameters.get(k); - assertEquals("URL scheme: " + url + ", key: " + k, entry.getValue(), v); + assertEquals(entry.getValue(), v, "URL scheme: " + url + ", key: " + k); } } } @@ -355,7 +355,7 @@ public void testAcceptUrls() throws Exception { expectedParameters)); for (TestCase t : testCases) { - assertTrue("URL is not valid: " + t.url, snowflakeDriver.acceptsURL(t.url)); + assertTrue(snowflakeDriver.acceptsURL(t.url), "URL is not valid: " + t.url); t.match(t.url, SnowflakeConnectString.parse(t.url, SnowflakeDriver.EMPTY_PROPERTIES)); } diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeGcsClientHandleExceptionLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeGcsClientHandleExceptionLatestIT.java index 22c26465d..fd6ee0d81 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeGcsClientHandleExceptionLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeGcsClientHandleExceptionLatestIT.java @@ -1,5 +1,9 @@ package net.snowflake.client.jdbc; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.fail; + import com.google.cloud.storage.StorageException; import java.io.File; import java.io.IOException; @@ -10,26 +14,23 @@ import java.sql.Statement; import java.util.Properties; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.Constants; import net.snowflake.client.core.SFSession; import net.snowflake.client.core.SFStatement; import net.snowflake.client.jdbc.cloud.storage.SnowflakeGCSClient; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import org.mockito.Mockito; /** Test for SnowflakeGcsClient handle exception function, only work with latest driver */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class SnowflakeGcsClientHandleExceptionLatestIT extends AbstractDriverIT { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; private Connection connection; private SFStatement sfStatement; private SFSession sfSession; @@ -38,7 +39,7 @@ public class SnowflakeGcsClientHandleExceptionLatestIT extends AbstractDriverIT private int overMaxRetry; private int maxRetry; - @Before + @BeforeEach public void setup() throws SQLException { Properties paramProperties = new Properties(); paramProperties.put("GCS_USE_DOWNSCOPED_CREDENTIAL", true); @@ -59,7 +60,7 @@ public void setup() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void error401RenewExpired() throws SQLException, InterruptedException { // Unauthenticated, renew is called. spyingClient.handleStorageException( @@ -94,94 +95,117 @@ public void run() { thread.start(); thread.interrupt(); thread.join(); - Assert.assertNull("Exception must not have been thrown in here", exceptionContainer[0]); + assertNull(exceptionContainer[0], "Exception must not have been thrown in here"); Mockito.verify(spyingClient, Mockito.times(2)).renew(Mockito.anyMap()); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void error401OverMaxRetryThrow() throws SQLException { - spyingClient.handleStorageException( - new StorageException(401, "Unauthenticated"), - overMaxRetry, - "upload", - sfSession, - command, - null); + @Test + @DontRunOnGithubActions + public void error401OverMaxRetryThrow() { + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new StorageException(401, "Unauthenticated"), + overMaxRetry, + "upload", + sfSession, + command, + null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorInvalidKey() throws SQLException { + @Test + @DontRunOnGithubActions + public void errorInvalidKey() { // Unauthenticated, renew is called. - spyingClient.handleStorageException( - new Exception(new InvalidKeyException()), 0, "upload", sfSession, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new Exception(new InvalidKeyException()), 0, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @Test + @DontRunOnGithubActions public void errorInterruptedException() throws SQLException { // Can still retry, no error thrown try { spyingClient.handleStorageException( new InterruptedException(), 0, "upload", sfSession, command, null); } catch (Exception e) { - Assert.fail("Should not have exception here"); + fail("Should not have exception here"); } Mockito.verify(spyingClient, Mockito.never()).renew(Mockito.anyMap()); - spyingClient.handleStorageException( - new InterruptedException(), 26, "upload", sfSession, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new InterruptedException(), 26, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorSocketTimeoutException() throws SQLException { + @Test + @DontRunOnGithubActions + public void errorSocketTimeoutException() throws SnowflakeSQLException { // Can still retry, no error thrown try { spyingClient.handleStorageException( new SocketTimeoutException(), 0, "upload", sfSession, command, null); } catch (Exception e) { - Assert.fail("Should not have exception here"); + fail("Should not have exception here"); } Mockito.verify(spyingClient, Mockito.never()).renew(Mockito.anyMap()); - spyingClient.handleStorageException( - new SocketTimeoutException(), 26, "upload", sfSession, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new SocketTimeoutException(), 26, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorUnknownException() throws SQLException { + @Test + @DontRunOnGithubActions + public void errorUnknownException() { // Unauthenticated, renew is called. - spyingClient.handleStorageException(new Exception(), 0, "upload", sfSession, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new Exception(), 0, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorWithNullSession() throws SQLException { - spyingClient.handleStorageException( - new StorageException(401, "Unauthenticated"), 0, "upload", null, command, null); + @Test + @DontRunOnGithubActions + public void errorWithNullSession() { + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new StorageException(401, "Unauthenticated"), 0, "upload", null, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorNoSpaceLeftOnDevice() throws SQLException, IOException { - File destFolder = tmpFolder.newFolder(); + @Test + @DontRunOnGithubActions + public void errorNoSpaceLeftOnDevice() throws IOException { + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String getCommand = "get @testPutGet_stage/" + TEST_DATA_FILE + " 'file://" + destFolderCanonicalPath + "'"; - spyingClient.handleStorageException( - new StorageException( - maxRetry, - Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, - new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), - 0, - "download", - null, - getCommand, - null); + assertThrows( + SQLException.class, + () -> + spyingClient.handleStorageException( + new StorageException( + maxRetry, + Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, + new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), + 0, + "download", + null, + getCommand, + null)); } - @After + @AfterEach public void cleanUp() throws SQLException { sfStatement.close(); connection.close(); diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableArrowIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableArrowIT.java deleted file mode 100644 index d9cb057d2..000000000 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableArrowIT.java +++ /dev/null @@ -1,12 +0,0 @@ -package net.snowflake.client.jdbc; - -import net.snowflake.client.category.TestCategoryArrow; -import org.junit.experimental.categories.Category; - -/** Test SnowflakeResultSetSerializable for Arrow */ -@Category(TestCategoryArrow.class) -public class SnowflakeResultSetSerializableArrowIT extends SnowflakeResultSetSerializableIT { - public SnowflakeResultSetSerializableArrowIT() { - super("arrow"); - } -} diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableIT.java index f9c2bb66d..3b6206f55 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeResultSetSerializableIT.java @@ -1,10 +1,10 @@ package net.snowflake.client.jdbc; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.greaterThan; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.io.FileInputStream; @@ -20,41 +20,33 @@ import java.util.List; import java.util.Properties; import javax.annotation.Nullable; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryResultSet; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; /** SnowflakeResultSetSerializable tests */ -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class SnowflakeResultSetSerializableIT extends BaseJDBCTest { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; private static boolean developPrint = false; - private String queryResultFormat; - // sfFullURL is used to support private link URL. // This test case is not for private link env, so just use a valid URL for testing purpose. private String sfFullURL = "https://sfctest0.snowflakecomputing.com"; - public SnowflakeResultSetSerializableIT() { - this("json"); - } - - SnowflakeResultSetSerializableIT(String format) { - queryResultFormat = format; + public Connection init(String queryResultFormat) throws SQLException { + return init(null, queryResultFormat); } - public Connection init() throws SQLException { - return init(null); - } - - public Connection init(@Nullable Properties properties) throws SQLException { + public Connection init(@Nullable Properties properties, String queryResultFormat) + throws SQLException { Connection conn = BaseJDBCTest.getConnection(properties); try (Statement stmt = conn.createStatement()) { stmt.execute("alter session set jdbc_query_result_format = '" + queryResultFormat + "'"); @@ -121,7 +113,7 @@ private List serializeResultSet( SnowflakeResultSetSerializable entry = resultSetChunks.get(i); // Write object to file - String tmpFileName = tmpFolder.getRoot().getPath() + "_result_" + i + "." + fileNameAppendix; + String tmpFileName = tmpFolder.getPath() + "_result_" + i + "." + fileNameAppendix; try (FileOutputStream fo = new FileOutputStream(tmpFileName); ObjectOutputStream so = new ObjectOutputStream(fo)) { so.writeObject(entry); @@ -238,11 +230,16 @@ private String deserializeResultSetWithProperties(List files, Properties * @throws Throwable If any error happens. */ private void testBasicTableHarness( - int rowCount, long maxSizeInBytes, String whereClause, boolean needSetupTable, boolean async) + int rowCount, + long maxSizeInBytes, + String whereClause, + boolean needSetupTable, + boolean async, + String queryResultFormat) throws Throwable { List fileNameList = null; String originalResultCSVString = null; - try (Connection connection = init()) { + try (Connection connection = init(queryResultFormat)) { Statement statement = connection.createStatement(); if (developPrint) { @@ -289,57 +286,61 @@ private void testBasicTableHarness( assertEquals(chunkResultString, originalResultCSVString); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testBasicTableWithEmptyResult() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testBasicTableWithEmptyResult(String queryResultFormat) throws Throwable { // Use complex WHERE clause in order to test both ARROW and JSON. // It looks GS only generates JSON format result. - testBasicTableHarness(10, 1024, "where int_c * int_c = 2", true, false); + testBasicTableHarness(10, 1024, "where int_c * int_c = 2", true, false, queryResultFormat); // Test Async mode - testBasicTableHarness(10, 1024, "where int_c * int_c = 2", true, true); + testBasicTableHarness(10, 1024, "where int_c * int_c = 2", true, true, queryResultFormat); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testBasicTableWithOnlyFirstChunk() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testBasicTableWithOnlyFirstChunk(String queryResultFormat) throws Throwable { // Result only includes first data chunk, test maxSize is small. - testBasicTableHarness(1, 1, "", true, false); + testBasicTableHarness(1, 1, "", true, false, queryResultFormat); // Test Async mode - testBasicTableHarness(1, 1, "", true, true); + testBasicTableHarness(1, 1, "", true, true, queryResultFormat); // Result only includes first data chunk, test maxSize is big. - testBasicTableHarness(1, 1024 * 1024, "", false, false); + testBasicTableHarness(1, 1024 * 1024, "", false, false, queryResultFormat); // Test async mode - testBasicTableHarness(1, 1024 * 1024, "", false, true); + testBasicTableHarness(1, 1024 * 1024, "", false, true, queryResultFormat); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testBasicTableWithOneFileChunk() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testBasicTableWithOneFileChunk(String queryResultFormat) throws Throwable { // Result only includes first data chunk, test maxSize is small. - testBasicTableHarness(300, 1, "", true, false); + testBasicTableHarness(300, 1, "", true, false, queryResultFormat); // Test Async mode - testBasicTableHarness(300, 1, "", true, true); + testBasicTableHarness(300, 1, "", true, true, queryResultFormat); // Result only includes first data chunk, test maxSize is big. - testBasicTableHarness(300, 1024 * 1024, "", false, false); + testBasicTableHarness(300, 1024 * 1024, "", false, false, queryResultFormat); // Test Async mode - testBasicTableHarness(300, 1024 * 1024, "", false, true); + testBasicTableHarness(300, 1024 * 1024, "", false, true, queryResultFormat); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testBasicTableWithSomeFileChunks() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testBasicTableWithSomeFileChunks(String queryResultFormat) throws Throwable { // Result only includes first data chunk, test maxSize is small. - testBasicTableHarness(90000, 1, "", true, false); + testBasicTableHarness(90000, 1, "", true, false, queryResultFormat); // Test Async mode - testBasicTableHarness(90000, 1, "", true, true); + testBasicTableHarness(90000, 1, "", true, true, queryResultFormat); // Result only includes first data chunk, test maxSize is median. - testBasicTableHarness(90000, 3 * 1024 * 1024, "", false, false); + testBasicTableHarness(90000, 3 * 1024 * 1024, "", false, false, queryResultFormat); // Test Async mode - testBasicTableHarness(90000, 3 * 1024 * 1024, "", false, true); + testBasicTableHarness(90000, 3 * 1024 * 1024, "", false, true, queryResultFormat); // Result only includes first data chunk, test maxSize is big. - testBasicTableHarness(90000, 100 * 1024 * 1024, "", false, false); + testBasicTableHarness(90000, 100 * 1024 * 1024, "", false, false, queryResultFormat); // Test Async mode - testBasicTableHarness(90000, 100 * 1024 * 1024, "", false, true); + testBasicTableHarness(90000, 100 * 1024 * 1024, "", false, true, queryResultFormat); } /** @@ -365,11 +366,12 @@ private void testTimestampHarness( String format_ntz, String format_ltz, String format_tz, - String timezone) + String timezone, + String queryResultFormat) throws Throwable { List fileNameList = null; String originalResultCSVString = null; - try (Connection connection = init(); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute("alter session set DATE_OUTPUT_FORMAT = '" + format_date + "'"); statement.execute("alter session set TIME_OUTPUT_FORMAT = '" + format_time + "'"); @@ -419,9 +421,10 @@ private void testTimestampHarness( assertEquals(chunkResultString, originalResultCSVString); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testTimestamp() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testTimestamp(String queryResultFormat) throws Throwable { String[] dateFormats = {"YYYY-MM-DD", "DD-MON-YYYY", "MM/DD/YYYY"}; String[] timeFormats = {"HH24:MI:SS.FFTZH:TZM", "HH24:MI:SS.FF", "HH24:MI:SS"}; String[] timestampFormats = { @@ -441,16 +444,19 @@ public void testTimestamp() throws Throwable { timestampFormats[i], timestampFormats[i], timestampFormats[i], - timezones[i]); + timezones[i], + queryResultFormat); } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testBasicTableWithSerializeObjectsAfterReadResultSet() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testBasicTableWithSerializeObjectsAfterReadResultSet(String queryResultFormat) + throws Throwable { List fileNameList = null; String originalResultCSVString = null; - try (Connection connection = init(); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute("create or replace schema testschema"); @@ -528,13 +534,14 @@ private synchronized List splitResultSetSerializables( return resultFileList; } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testSplitResultSetSerializable() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testSplitResultSetSerializable(String queryResultFormat) throws Throwable { List fileNameList = null; String originalResultCSVString = null; int rowCount = 90000; - try (Connection connection = init(); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute( @@ -594,10 +601,11 @@ private void hackToSetupWrongURL(List resultSetS } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testCloseUnconsumedResultSet() throws Throwable { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testCloseUnconsumedResultSet(String queryResultFormat) throws Throwable { + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { try { statement.execute( @@ -624,13 +632,14 @@ public void testCloseUnconsumedResultSet() throws Throwable { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testNegativeWithChunkFileNotExist() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testNegativeWithChunkFileNotExist(String queryResultFormat) throws Throwable { // This test takes about (download worker retry times * networkTimeout) long to finish Properties properties = new Properties(); properties.put("networkTimeout", 10000); // 10000 millisec - try (Connection connection = init(properties)) { + try (Connection connection = init(properties, queryResultFormat)) { try (Statement statement = connection.createStatement()) { statement.execute( "create or replace table table_basic " + " (int_c int, string_c string(128))"); @@ -678,10 +687,11 @@ public void testNegativeWithChunkFileNotExist() throws Throwable { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testNegativeWithClosedResultSet() throws Throwable { - try (Connection connection = init()) { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testNegativeWithClosedResultSet(String queryResultFormat) throws Throwable { + try (Connection connection = init(queryResultFormat)) { Statement statement = connection.createStatement(); statement.execute( @@ -730,15 +740,16 @@ public void testNegativeWithClosedResultSet() throws Throwable { * * @throws Throwable */ - @Test - @Ignore - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testCustomProxyWithFiles() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @Disabled + @DontRunOnGithubActions + public void testCustomProxyWithFiles(String queryResultFormat) throws Throwable { boolean generateFiles = false; boolean correctProxy = false; if (generateFiles) { - generateTestFiles(); + generateTestFiles(queryResultFormat); fail("This is generate test file."); } @@ -775,8 +786,8 @@ public void testCustomProxyWithFiles() throws Throwable { } } - private void generateTestFiles() throws Throwable { - try (Connection connection = init(); + private void generateTestFiles(String queryResultFormat) throws Throwable { + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute( @@ -800,15 +811,16 @@ private void generateTestFiles() throws Throwable { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testRetrieveMetadata() throws Throwable { + @ParameterizedTest + @ArgumentsSource(SimpleResultFormatProvider.class) + @DontRunOnGithubActions + public void testRetrieveMetadata(String queryResultFormat) throws Throwable { List fileNameList; int rowCount = 90000; long expectedTotalRowCount = 0; long expectedTotalCompressedSize = 0; long expectedTotalUncompressedSize = 0; - try (Connection connection = init(); + try (Connection connection = init(queryResultFormat); Statement statement = connection.createStatement()) { statement.execute( diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeS3ClientHandleExceptionLatestIT.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeS3ClientHandleExceptionLatestIT.java index aed4d1f39..e104abc66 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeS3ClientHandleExceptionLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeS3ClientHandleExceptionLatestIT.java @@ -3,6 +3,10 @@ */ package net.snowflake.client.jdbc; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.fail; + import com.amazonaws.AmazonClientException; import com.amazonaws.AmazonServiceException; import com.amazonaws.ClientConfiguration; @@ -16,27 +20,24 @@ import java.sql.SQLException; import java.sql.Statement; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.Constants; import net.snowflake.client.core.SFSession; import net.snowflake.client.core.SFStatement; import net.snowflake.client.jdbc.cloud.storage.SnowflakeS3Client; import net.snowflake.client.jdbc.cloud.storage.StageInfo; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import org.mockito.Mockito; /** Test for SnowflakeS3Client handle exception function */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class SnowflakeS3ClientHandleExceptionLatestIT extends AbstractDriverIT { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; private Connection connection; private SFStatement sfStatement; private SFSession sfSession; @@ -46,7 +47,7 @@ public class SnowflakeS3ClientHandleExceptionLatestIT extends AbstractDriverIT { private int maxRetry; private static final String EXPIRED_AWS_TOKEN_ERROR_CODE = "ExpiredToken"; - @Before + @BeforeEach public void setup() throws SQLException { connection = getConnection("s3testaccount"); sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); @@ -75,7 +76,7 @@ public void setup() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void errorRenewExpired() throws SQLException, InterruptedException { AmazonS3Exception ex = new AmazonS3Exception("unauthenticated"); ex.setErrorCode(EXPIRED_AWS_TOKEN_ERROR_CODE); @@ -100,19 +101,27 @@ public void run() { thread.start(); thread.interrupt(); thread.join(); - Assert.assertNull("Exception must not have been thrown in here", exceptionContainer[0]); + assertNull(exceptionContainer[0], "Exception must not have been thrown in here"); Mockito.verify(spyingClient, Mockito.times(2)).renew(Mockito.anyMap()); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorNotFound() throws SQLException { - spyingClient.handleStorageException( - new AmazonS3Exception("Not found"), overMaxRetry, "upload", sfSession, command, null); + @Test + @DontRunOnGithubActions + public void errorNotFound() { + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new AmazonS3Exception("Not found"), + overMaxRetry, + "upload", + sfSession, + command, + null)); } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void errorBadRequestTokenExpired() throws SQLException { AmazonServiceException ex = new AmazonServiceException("Bad Request"); ex.setServiceName("Amazon S3"); @@ -126,91 +135,113 @@ public void errorBadRequestTokenExpired() throws SQLException { Mockito.verify(spyingClient, Mockito.times(1)).renew(Mockito.anyMap()); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorClientUnknown() throws SQLException { - spyingClient.handleStorageException( - new AmazonClientException("Not found", new IOException()), - overMaxRetry, - "upload", - sfSession, - command, - null); + @Test + @DontRunOnGithubActions + public void errorClientUnknown() { + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new AmazonClientException("Not found", new IOException()), + overMaxRetry, + "upload", + sfSession, + command, + null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorInvalidKey() throws SQLException { + @Test + @DontRunOnGithubActions + public void errorInvalidKey() { // Unauthenticated, renew is called. - spyingClient.handleStorageException( - new Exception(new InvalidKeyException()), 0, "upload", sfSession, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new Exception(new InvalidKeyException()), 0, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorInterruptedException() throws SQLException { + @Test + @DontRunOnGithubActions + public void errorInterruptedException() throws SnowflakeSQLException { // Can still retry, no error thrown try { spyingClient.handleStorageException( new InterruptedException(), 0, "upload", sfSession, command, null); } catch (Exception e) { - Assert.fail("Should not have exception here"); + fail("Should not have exception here"); } Mockito.verify(spyingClient, Mockito.never()).renew(Mockito.anyMap()); - spyingClient.handleStorageException( - new InterruptedException(), 26, "upload", sfSession, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new InterruptedException(), 26, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorSocketTimeoutException() throws SQLException { + @Test + @DontRunOnGithubActions + public void errorSocketTimeoutException() throws SnowflakeSQLException { // Can still retry, no error thrown try { spyingClient.handleStorageException( new SocketTimeoutException(), 0, "upload", sfSession, command, null); } catch (Exception e) { - Assert.fail("Should not have exception here"); + fail("Should not have exception here"); } Mockito.verify(spyingClient, Mockito.never()).renew(Mockito.anyMap()); - spyingClient.handleStorageException( - new SocketTimeoutException(), 26, "upload", sfSession, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new SocketTimeoutException(), 26, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorUnknownException() throws SQLException { - spyingClient.handleStorageException(new Exception(), 0, "upload", sfSession, command, null); + @Test + @DontRunOnGithubActions + public void errorUnknownException() { + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new Exception(), 0, "upload", sfSession, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorRenewExpiredNullSession() throws SQLException { + @Test + @DontRunOnGithubActions + public void errorRenewExpiredNullSession() { // Unauthenticated, renew is called. AmazonS3Exception ex = new AmazonS3Exception("unauthenticated"); ex.setErrorCode(EXPIRED_AWS_TOKEN_ERROR_CODE); - spyingClient.handleStorageException(ex, 0, "upload", null, command, null); + assertThrows( + SnowflakeSQLException.class, + () -> spyingClient.handleStorageException(ex, 0, "upload", null, command, null)); } - @Test(expected = SnowflakeSQLException.class) - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void errorNoSpaceLeftOnDevice() throws SQLException, IOException { - File destFolder = tmpFolder.newFolder(); + @Test + @DontRunOnGithubActions + public void errorNoSpaceLeftOnDevice() throws IOException { + File destFolder = new File(tmpFolder, "dest"); + destFolder.mkdirs(); String destFolderCanonicalPath = destFolder.getCanonicalPath(); String getCommand = "get @testPutGet_stage/" + TEST_DATA_FILE + " 'file://" + destFolderCanonicalPath + "'"; - spyingClient.handleStorageException( - new StorageException( - maxRetry, - Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, - new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), - 0, - "download", - null, - getCommand, - null); + assertThrows( + SnowflakeSQLException.class, + () -> + spyingClient.handleStorageException( + new StorageException( + maxRetry, + Constants.NO_SPACE_LEFT_ON_DEVICE_ERR, + new IOException(Constants.NO_SPACE_LEFT_ON_DEVICE_ERR)), + 0, + "download", + null, + getCommand, + null)); } - @After + @AfterEach public void cleanUp() throws SQLException { sfStatement.close(); connection.close(); diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeSerializableTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeSerializableTest.java index 4cd2fa7e8..92d00affc 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeSerializableTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeSerializableTest.java @@ -2,10 +2,10 @@ import static net.snowflake.client.jdbc.SnowflakeChunkDownloader.NoOpChunkDownloader; import static net.snowflake.client.jdbc.SnowflakeResultSetSerializableV1.ChunkFileMetadata; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -16,7 +16,7 @@ import net.snowflake.client.core.SFBaseSession; import net.snowflake.client.core.SFBaseStatement; import net.snowflake.client.core.SFStatementType; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SnowflakeSerializableTest { diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeTimestampWithTimezoneTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeTimestampWithTimezoneTest.java index ebf32dcef..41bee6e85 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeTimestampWithTimezoneTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeTimestampWithTimezoneTest.java @@ -3,88 +3,81 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.sql.Timestamp; import java.time.LocalDateTime; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.ArrayList; -import java.util.Collection; import java.util.List; import java.util.TimeZone; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import java.util.stream.Stream; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; /** * Tests SnowflakeTimestampWithTimezone to ensure the output is not impacted by Day Light Saving * Time. Not this test case is not thread safe, because TimeZone.setDefault is called. */ -@RunWith(Parameterized.class) public class SnowflakeTimestampWithTimezoneTest extends BaseJDBCTest { private static TimeZone orgTimeZone; - private final String timeZone; - private final String inputTimestamp; - private final String outputTimestamp; + static class Params implements ArgumentsProvider { + public Stream provideArguments(ExtensionContext context) { + String[] timeZoneList = {"PST", "America/New_York", "UTC", "Asia/Singapore"}; - public SnowflakeTimestampWithTimezoneTest( - String timeZone, String inputTimestamp, String outputTimestamp) { - this.timeZone = timeZone; - this.inputTimestamp = inputTimestamp; - this.outputTimestamp = outputTimestamp; - } - - @Parameterized.Parameters(name = "tz={0}, input={1}, output={2}") - public static Collection convert() { - String[] timeZoneList = {"PST", "America/New_York", "UTC", "Asia/Singapore"}; + String[] dateTimeList = { + "2018-03-11 01:10:34.0123456", + "2018-03-11 02:10:34.0123456", + "2018-03-11 03:10:34.0123456", + "2018-11-04 01:10:34.123", + "2018-11-04 02:10:34.123", + "2018-11-04 03:10:34.123", + "2020-03-11 01:10:34.456", + "2020-03-11 02:10:34.456", + "2020-03-11 03:10:34.456", + "2020-11-01 01:10:34.123", + "2020-11-01 02:10:34.123", + "2020-11-01 03:10:34.123" + }; - String[] dateTimeList = { - "2018-03-11 01:10:34.0123456", - "2018-03-11 02:10:34.0123456", - "2018-03-11 03:10:34.0123456", - "2018-11-04 01:10:34.123", - "2018-11-04 02:10:34.123", - "2018-11-04 03:10:34.123", - "2020-03-11 01:10:34.456", - "2020-03-11 02:10:34.456", - "2020-03-11 03:10:34.456", - "2020-11-01 01:10:34.123", - "2020-11-01 02:10:34.123", - "2020-11-01 03:10:34.123" - }; - - List testCases = new ArrayList<>(); - for (String timeZone : timeZoneList) { - for (String dateTime : dateTimeList) { - testCases.add(new Object[] {timeZone, dateTime, dateTime}); + List testCases = new ArrayList<>(); + for (String timeZone : timeZoneList) { + for (String dateTime : dateTimeList) { + testCases.add(Arguments.of(timeZone, dateTime, dateTime)); + } } + return testCases.stream(); } - return testCases; } /** Records the original TimeZone */ - @BeforeClass + @BeforeAll public static void keepOriginalTimeZone() { orgTimeZone = TimeZone.getDefault(); } - @AfterClass + @AfterAll public static void restoreTimeZone() { TimeZone.setDefault(orgTimeZone); } - @Test - public void testTimestampNTZ() throws Throwable { + @ParameterizedTest(name = "{index}: {1} {0}") + @ArgumentsSource(Params.class) + public void testTimestampNTZ(String timeZone, String inputTimestamp, String outputTimestamp) { TimeZone.setDefault(TimeZone.getTimeZone(timeZone)); - LocalDateTime dt = parseTimestampNTZ(this.inputTimestamp); + LocalDateTime dt = parseTimestampNTZ(inputTimestamp); SnowflakeTimestampWithTimezone stn = new SnowflakeTimestampWithTimezone( dt.toEpochSecond(ZoneOffset.UTC) * 1000, dt.getNano(), TimeZone.getTimeZone("UTC")); - assertEquals(this.outputTimestamp, stn.toString()); + assertEquals(outputTimestamp, stn.toString()); } @Test diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java index 29c58b787..b24825c96 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeTypeTest.java @@ -2,15 +2,16 @@ import static net.snowflake.client.jdbc.SnowflakeType.convertStringToType; import static net.snowflake.client.jdbc.SnowflakeType.getJavaType; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThrows; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; import java.math.BigDecimal; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; +import java.sql.Time; import java.sql.Types; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SnowflakeTypeTest { @@ -97,7 +98,7 @@ public void testJavaTypeToSFType() throws SnowflakeSQLException { @Test public void testJavaTypeToClassName() throws SQLException { assertEquals(SnowflakeType.javaTypeToClassName(Types.DECIMAL), BigDecimal.class.getName()); - assertEquals(SnowflakeType.javaTypeToClassName(Types.TIME), java.sql.Time.class.getName()); + assertEquals(SnowflakeType.javaTypeToClassName(Types.TIME), Time.class.getName()); assertEquals(SnowflakeType.javaTypeToClassName(Types.BOOLEAN), Boolean.class.getName()); assertThrows( SQLFeatureNotSupportedException.class, diff --git a/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java b/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java index 703e55b7c..054aef9fe 100644 --- a/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SnowflakeUtilTest.java @@ -6,10 +6,10 @@ import static net.snowflake.client.jdbc.SnowflakeUtil.createCaseInsensitiveMap; import static net.snowflake.client.jdbc.SnowflakeUtil.extractColumnMetadata; import static net.snowflake.client.jdbc.SnowflakeUtil.getSnowflakeType; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -21,14 +21,14 @@ import java.util.HashMap; import java.util.Map; import java.util.TreeMap; -import net.snowflake.client.category.TestCategoryCore; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.ObjectMapperFactory; import org.apache.http.Header; import org.apache.http.message.BasicHeader; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class SnowflakeUtilTest extends BaseJDBCTest { private static final ObjectMapper OBJECT_MAPPER = ObjectMapperFactory.getObjectMapper(); diff --git a/src/test/java/net/snowflake/client/jdbc/SqlFeatureNotSupportedTelemetryTest.java b/src/test/java/net/snowflake/client/jdbc/SqlFeatureNotSupportedTelemetryTest.java index 37819457c..03fa47418 100644 --- a/src/test/java/net/snowflake/client/jdbc/SqlFeatureNotSupportedTelemetryTest.java +++ b/src/test/java/net/snowflake/client/jdbc/SqlFeatureNotSupportedTelemetryTest.java @@ -1,10 +1,10 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.fasterxml.jackson.databind.node.ObjectNode; import net.minidev.json.JSONObject; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SqlFeatureNotSupportedTelemetryTest { diff --git a/src/test/java/net/snowflake/client/jdbc/StatementAlreadyClosedIT.java b/src/test/java/net/snowflake/client/jdbc/StatementAlreadyClosedIT.java index 08bf2ed72..268173e92 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementAlreadyClosedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementAlreadyClosedIT.java @@ -3,17 +3,17 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.ResultSet; import java.sql.Statement; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class StatementAlreadyClosedIT extends BaseJDBCTest { @Test public void testStatementAlreadyClosed() throws Throwable { diff --git a/src/test/java/net/snowflake/client/jdbc/StatementArrowIT.java b/src/test/java/net/snowflake/client/jdbc/StatementArrowIT.java index 061edb528..f66bbb7c2 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementArrowIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementArrowIT.java @@ -1,9 +1,9 @@ package net.snowflake.client.jdbc; -import net.snowflake.client.category.TestCategoryArrow; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; -@Category(TestCategoryArrow.class) +@Tag(TestTags.ARROW) public class StatementArrowIT extends StatementIT { public StatementArrowIT() { super(); diff --git a/src/test/java/net/snowflake/client/jdbc/StatementFeatureNotSupportedIT.java b/src/test/java/net/snowflake/client/jdbc/StatementFeatureNotSupportedIT.java index 01be27150..b6c62ddc1 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementFeatureNotSupportedIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementFeatureNotSupportedIT.java @@ -6,11 +6,11 @@ import java.sql.Connection; import java.sql.ResultSet; import java.sql.Statement; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class StatementFeatureNotSupportedIT extends BaseJDBCTest { @Test public void testFeatureNotSupportedException() throws Throwable { diff --git a/src/test/java/net/snowflake/client/jdbc/StatementIT.java b/src/test/java/net/snowflake/client/jdbc/StatementIT.java index 2fa713308..075889834 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementIT.java @@ -6,13 +6,13 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.sql.BatchUpdateException; @@ -24,21 +24,19 @@ import java.time.Duration; import java.util.List; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryStatement; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.telemetry.Telemetry; import net.snowflake.client.jdbc.telemetry.TelemetryClient; import net.snowflake.common.core.SqlState; import org.awaitility.Awaitility; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; /** Statement tests */ -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class StatementIT extends BaseJDBCWithSharedConnectionIT { protected static String queryResultFormat = "json"; @@ -50,7 +48,7 @@ public static Connection getConnection() throws SQLException { return conn; } - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; @Test public void testFetchDirection() throws SQLException { @@ -64,7 +62,7 @@ public void testFetchDirection() throws SQLException { } } - @Ignore("Not working for setFetchSize") + @Disabled("Not working for setFetchSize") @Test public void testFetchSize() throws SQLException { try (Statement statement = connection.createStatement()) { @@ -362,7 +360,8 @@ public void testExecuteBatch() throws Exception { "put file://" + getFullPathFileInResource(TEST_DATA_FILE) + " @%test_batch auto_compress=false"); - File tempFolder = tmpFolder.newFolder("test_downloads_folder"); + File tempFolder = new File(tmpFolder, "test_downloads_folder"); + tempFolder.mkdirs(); statement.addBatch("get @%test_batch file://" + tempFolder.getCanonicalPath()); rowCounts = statement.executeBatch(); @@ -423,7 +422,7 @@ public void testExecuteLargeBatch() throws SQLException { * @throws SQLException if any error occurs */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testExecuteUpdateZeroCount() throws SQLException { try (Connection connection = getConnection()) { String[] testCommands = { diff --git a/src/test/java/net/snowflake/client/jdbc/StatementLargeUpdateIT.java b/src/test/java/net/snowflake/client/jdbc/StatementLargeUpdateIT.java index d041b1694..b0eefd096 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementLargeUpdateIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementLargeUpdateIT.java @@ -1,15 +1,15 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.sql.Connection; import java.sql.Statement; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Large update test. No JSON/ARROW specific test case is required. */ -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class StatementLargeUpdateIT extends BaseJDBCTest { @Test public void testLargeUpdate() throws Throwable { diff --git a/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java b/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java index 9d96f44ea..e2f030464 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementLatestIT.java @@ -6,11 +6,11 @@ import static net.snowflake.client.jdbc.ErrorCode.ROW_DOES_NOT_EXIST; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.net.URL; @@ -22,17 +22,15 @@ import java.util.Arrays; import java.util.List; import java.util.Map; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; import net.snowflake.client.TestUtil; -import net.snowflake.client.category.TestCategoryStatement; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.ParameterBindingDTO; import net.snowflake.client.core.SFSession; import net.snowflake.client.core.bind.BindUploader; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; /** * Statement integration tests for the latest JDBC driver. This doesn't work for the oldest @@ -40,7 +38,7 @@ * if the tests still is not applicable. If it is applicable, move tests to StatementIT so that both * the latest and oldest supported driver run the tests. */ -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class StatementLatestIT extends BaseJDBCWithSharedConnectionIT { protected static String queryResultFormat = "json"; @@ -52,7 +50,7 @@ public static Connection getConnection() throws SQLException { return conn; } - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; @Test public void testExecuteCreateAndDrop() throws SQLException { @@ -83,9 +81,10 @@ public void testExecuteCreateAndDrop() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testCopyAndUpload() throws Exception { - File tempFolder = tmpFolder.newFolder("test_downloads_folder"); + File tempFolder = new File(tmpFolder, "test_downloads_folder"); + tempFolder.mkdirs(); List accounts = Arrays.asList(null, "s3testaccount", "azureaccount", "gcpaccount"); for (int i = 0; i < accounts.size(); i++) { String fileName = "test_copy.csv"; @@ -198,7 +197,7 @@ public void testExecuteOpenResultSets() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPreparedStatementLogging() throws SQLException { try (Connection con = getConnection(); Statement stmt = con.createStatement()) { diff --git a/src/test/java/net/snowflake/client/jdbc/StatementNoOpLatestIT.java b/src/test/java/net/snowflake/client/jdbc/StatementNoOpLatestIT.java index 12aa69882..22b58584d 100644 --- a/src/test/java/net/snowflake/client/jdbc/StatementNoOpLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StatementNoOpLatestIT.java @@ -4,17 +4,17 @@ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; -import net.snowflake.client.category.TestCategoryStatement; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryStatement.class) +@Tag(TestTags.STATEMENT) public class StatementNoOpLatestIT { @Test public void testSnowflakeNoOpStatement() throws SQLException { diff --git a/src/test/java/net/snowflake/client/jdbc/StreamIT.java b/src/test/java/net/snowflake/client/jdbc/StreamIT.java index d1762904d..e6407c16c 100644 --- a/src/test/java/net/snowflake/client/jdbc/StreamIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StreamIT.java @@ -3,8 +3,8 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.InputStream; import java.io.StringWriter; @@ -14,15 +14,14 @@ import java.sql.Statement; import java.util.Arrays; import java.util.List; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import org.apache.commons.io.IOUtils; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Stream interface tests. Snowflake JDBC specific API */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class StreamIT extends BaseJDBCTest { /** * Test Upload Stream @@ -53,7 +52,7 @@ public void testUploadStream() throws Throwable { while (rset.next()) { ret = rset.getString(1); } - assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); + assertEquals("hello", ret, "Unexpected string value: " + ret + " expect: hello"); } } finally { statement.execute("rm @~/" + DEST_PREFIX); @@ -69,7 +68,7 @@ public void testUploadStream() throws Throwable { * @throws Throwable if any error occurs. */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDownloadStream() throws Throwable { final String DEST_PREFIX = TEST_UUID + "/testUploadStream"; List supportedAccounts = Arrays.asList("s3testaccount", "azureaccount"); @@ -132,7 +131,7 @@ public void testCompressAndUploadStream() throws Throwable { while (rset.next()) { ret = rset.getString(1); } - assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); + assertEquals("hello", ret, "Unexpected string value: " + ret + " expect: hello"); } } finally { diff --git a/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java b/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java index 093c2de27..af7c8eea3 100644 --- a/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/StreamLatestIT.java @@ -3,9 +3,9 @@ */ package net.snowflake.client.jdbc; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.BufferedWriter; import java.io.File; @@ -19,15 +19,13 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.Properties; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import org.apache.commons.io.IOUtils; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; /** * Stream API tests for the latest JDBC driver. This doesn't work for the oldest supported driver. @@ -35,10 +33,10 @@ * is not applicable. If it is applicable, move tests to StreamIT so that both the latest and oldest * supported driver run the tests. */ -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class StreamLatestIT extends BaseJDBCTest { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); + @TempDir private File tmpFolder; /** * Test Upload Stream with atypical stage names @@ -72,7 +70,7 @@ public void testUnusualStageName() throws Throwable { while (rset.next()) { ret = rset.getString(1); } - assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); + assertEquals("hello", ret, "Unexpected string value: " + ret + " expect: hello"); } statement.execute("CREATE or replace TABLE \"ice cream (nice)\" (types STRING)"); @@ -92,7 +90,7 @@ public void testUnusualStageName() throws Throwable { while (rset.next()) { ret = rset.getString(1); } - assertEquals("Unexpected string value: " + ret + " expect: hello", "hello", ret); + assertEquals("hello", ret, "Unexpected string value: " + ret + " expect: hello"); } } finally { statement.execute("DROP TABLE IF EXISTS \"ice cream (nice)\""); @@ -101,7 +99,7 @@ public void testUnusualStageName() throws Throwable { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDownloadToStreamBlobNotFoundGCS() throws SQLException { final String DEST_PREFIX = TEST_UUID + "/testUploadStream"; Properties paramProperties = new Properties(); @@ -118,8 +116,8 @@ public void testDownloadToStreamBlobNotFoundGCS() throws SQLException { } catch (Exception ex) { assertTrue(ex instanceof SQLException); assertTrue( - "Wrong exception message: " + ex.getMessage(), - ex.getMessage().contains("File not found")); + ex.getMessage().contains("File not found"), + "Wrong exception message: " + ex.getMessage()); } finally { statement.execute("rm @~/" + DEST_PREFIX); } @@ -127,7 +125,7 @@ public void testDownloadToStreamBlobNotFoundGCS() throws SQLException { } @Test - @Ignore + @Disabled public void testDownloadToStreamGCSPresignedUrl() throws SQLException, IOException { final String DEST_PREFIX = "testUploadStream"; @@ -141,7 +139,7 @@ public void testDownloadToStreamGCSPresignedUrl() throws SQLException, IOExcepti + " @testgcpstage/" + DEST_PREFIX)) { assertTrue(rset.next()); - assertEquals("Error message:" + rset.getString(8), "UPLOADED", rset.getString(7)); + assertEquals("UPLOADED", rset.getString(7), "Error message:" + rset.getString(8)); InputStream out = connection @@ -162,7 +160,7 @@ public void testDownloadToStreamGCSPresignedUrl() throws SQLException, IOExcepti } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDownloadToStreamGCS() throws SQLException, IOException { final String DEST_PREFIX = TEST_UUID + "/testUploadStream"; Properties paramProperties = new Properties(); @@ -202,7 +200,8 @@ public void testSpecialCharactersInFileName() throws SQLException, IOException { Statement statement = connection.createStatement()) { try { // Create a temporary file with special characters in the name and write to it - File specialCharFile = tmpFolder.newFile("(special char@).txt"); + File specialCharFile = new File(tmpFolder, "(special char@).txt"); + specialCharFile.createNewFile(); try (BufferedWriter bw = new BufferedWriter(new FileWriter(specialCharFile))) { bw.write("Creating test file for downloadStream test"); } diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/CloudStorageClientLatestIT.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/CloudStorageClientLatestIT.java index 20a070a02..d09182860 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/CloudStorageClientLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/CloudStorageClientLatestIT.java @@ -2,26 +2,28 @@ import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.fail; import java.io.InputStream; import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; import java.util.UUID; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.BaseJDBCTest; import net.snowflake.client.jdbc.SnowflakeConnection; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class CloudStorageClientLatestIT extends BaseJDBCTest { /** * Test for SNOW-565154 - it was waiting for ~5 minutes so the test is waiting much shorter time */ - @Test(timeout = 30000L) + @Test + @Timeout(30) public void testDownloadStreamShouldFailFastOnNotExistingFile() throws Throwable { String stageName = "testDownloadStream_stage_" + UUID.randomUUID().toString().replaceAll("-", "_"); diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/EncryptionProviderTest.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/EncryptionProviderTest.java index a560d1f81..61cd07769 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/EncryptionProviderTest.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/EncryptionProviderTest.java @@ -1,6 +1,6 @@ package net.snowflake.client.jdbc.cloud.storage; -import static org.junit.Assert.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -16,8 +16,8 @@ import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; public class EncryptionProviderTest { @@ -41,7 +41,7 @@ public class EncryptionProviderTest { byte[] plainText = "the quick brown fox jumps over the lazy dog".getBytes(StandardCharsets.UTF_8); - @Before + @BeforeEach public void setUp() { encMat.setQueryStageMasterKey(queryStageMasterKey); encMat.setSmkId(123); diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/GcmEncryptionProviderTest.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/GcmEncryptionProviderTest.java index b853ef639..f883324d3 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/GcmEncryptionProviderTest.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/GcmEncryptionProviderTest.java @@ -1,8 +1,8 @@ package net.snowflake.client.jdbc.cloud.storage; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThrows; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -25,8 +25,8 @@ import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; public class GcmEncryptionProviderTest { @@ -58,7 +58,7 @@ public class GcmEncryptionProviderTest { byte[] dataAad = "data aad".getBytes(StandardCharsets.UTF_8); byte[] keyAad = "key aad".getBytes(StandardCharsets.UTF_8); - @Before + @BeforeEach public void setUp() { encMat.setQueryStageMasterKey(queryStageMasterKey); encMat.setSmkId(123); @@ -138,21 +138,21 @@ public void testDecryptStreamWithInvalidKey() throws Exception { InputStream plainTextStream = new ByteArrayInputStream(plainText); byte[] cipherText = encryptStream(plainTextStream, dataAad, keyAad); + + byte[] encryptedKey = encKeyArgumentCaptor.getValue(); + encryptedKey[0] = (byte) ((encryptedKey[0] + 1) % 255); assertThrows( AEADBadTagException.class, - () -> { - byte[] encryptedKey = encKeyArgumentCaptor.getValue(); - encryptedKey[0] = (byte) ((encryptedKey[0] + 1) % 255); - IOUtils.toByteArray( - GcmEncryptionProvider.decryptStream( - new ByteArrayInputStream(cipherText), - Base64.getEncoder().encodeToString(encryptedKey), - Base64.getEncoder().encodeToString(dataIvDataArgumentCaptor.getValue()), - Base64.getEncoder().encodeToString(keyIvDataArgumentCaptor.getValue()), - encMat, - dataAad == null ? "" : Base64.getEncoder().encodeToString(dataAad), - keyAad == null ? "" : Base64.getEncoder().encodeToString(keyAad))); - }); + () -> + IOUtils.toByteArray( + GcmEncryptionProvider.decryptStream( + new ByteArrayInputStream(cipherText), + Base64.getEncoder().encodeToString(encryptedKey), + Base64.getEncoder().encodeToString(dataIvDataArgumentCaptor.getValue()), + Base64.getEncoder().encodeToString(keyIvDataArgumentCaptor.getValue()), + encMat, + dataAad == null ? "" : Base64.getEncoder().encodeToString(dataAad), + keyAad == null ? "" : Base64.getEncoder().encodeToString(keyAad)))); } @Test @@ -160,12 +160,12 @@ public void testDecryptStreamWithInvalidDataIV() throws Exception { InputStream plainTextStream = new ByteArrayInputStream(plainText); byte[] cipherText = encryptStream(plainTextStream, dataAad, keyAad); + byte[] dataIvBase64 = dataIvDataArgumentCaptor.getValue(); + dataIvBase64[0] = (byte) ((dataIvBase64[0] + 1) % 255); IOException ioException = assertThrows( IOException.class, () -> { - byte[] dataIvBase64 = dataIvDataArgumentCaptor.getValue(); - dataIvBase64[0] = (byte) ((dataIvBase64[0] + 1) % 255); IOUtils.toByteArray( GcmEncryptionProvider.decryptStream( new ByteArrayInputStream(cipherText), @@ -184,11 +184,11 @@ public void testDecryptStreamWithInvalidKeyIV() throws Exception { InputStream plainTextStream = new ByteArrayInputStream(plainText); byte[] cipherText = encryptStream(plainTextStream, dataAad, keyAad); + byte[] keyIvBase64 = keyIvDataArgumentCaptor.getValue(); + keyIvBase64[0] = (byte) ((keyIvBase64[0] + 1) % 255); assertThrows( AEADBadTagException.class, () -> { - byte[] keyIvBase64 = keyIvDataArgumentCaptor.getValue(); - keyIvBase64[0] = (byte) ((keyIvBase64[0] + 1) % 255); IOUtils.toByteArray( GcmEncryptionProvider.decryptStream( new ByteArrayInputStream(cipherText), diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java index 05050b669..4bca15b3a 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientLatestIT.java @@ -1,9 +1,9 @@ package net.snowflake.client.jdbc.cloud.storage; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.spy; import com.amazonaws.services.kms.model.UnsupportedOperationException; @@ -11,9 +11,8 @@ import java.sql.Connection; import java.sql.SQLException; import java.util.ArrayList; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.SFSession; import net.snowflake.client.core.SFStatement; import net.snowflake.client.jdbc.BaseJDBCTest; @@ -21,13 +20,13 @@ import net.snowflake.client.jdbc.SnowflakeFileTransferAgent; import net.snowflake.client.jdbc.SnowflakeSQLLoggedException; import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class SnowflakeAzureClientLatestIT extends BaseJDBCTest { @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testAzureClientSetupInvalidEncryptionKeySize() throws SQLException { try (Connection connection = getConnection("azureaccount")) { SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientTest.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientTest.java index f0ba5b3d4..efc49f41f 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientTest.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeAzureClientTest.java @@ -4,11 +4,11 @@ package net.snowflake.client.jdbc.cloud.storage; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.microsoft.azure.storage.StorageExtendedErrorInformation; import java.util.LinkedHashMap; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SnowflakeAzureClientTest { @Test diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientLatestIT.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientLatestIT.java index d9e1821c2..b46064778 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientLatestIT.java @@ -3,8 +3,9 @@ */ package net.snowflake.client.jdbc.cloud.storage; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import com.amazonaws.AmazonServiceException; import com.amazonaws.ClientConfiguration; @@ -13,26 +14,24 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.Properties; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.SFSession; import net.snowflake.client.core.SFStatement; import net.snowflake.client.jdbc.BaseJDBCTest; import net.snowflake.client.jdbc.SnowflakeConnectionV1; import net.snowflake.client.jdbc.SnowflakeFileTransferAgent; import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial; -import org.junit.Assert; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; -@Category(TestCategoryOthers.class) +@Tag(TestTags.OTHERS) public class SnowflakeS3ClientLatestIT extends BaseJDBCTest { @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testS3Client256Encryption() throws SQLException { try (Connection connection = getConnection("s3testaccount")) { SFSession sfSession = connection.unwrap(SnowflakeConnectionV1.class).getSfSession(); @@ -73,7 +72,7 @@ public void testS3Client256Encryption() throws SQLException { * @throws SQLException */ @Test - @Ignore + @Disabled public void testS3ConnectionWithProxyEnvVariablesSet() throws SQLException { String testStageName = "s3TestStage"; @@ -103,7 +102,7 @@ public void testS3ConnectionWithProxyEnvVariablesSet() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testIsClientException400Or404() throws SQLException { AmazonServiceException servEx = new AmazonServiceException("S3 operation failed"); servEx.setServiceName("Amazon S3"); @@ -138,7 +137,7 @@ public void testIsClientException400Or404() throws SQLException { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testPutGetMaxRetries() throws SQLException { Properties props = new Properties(); props.put("putGetMaxRetries", 1); @@ -170,7 +169,7 @@ public void testPutGetMaxRetries() throws SQLException { spy.handleStorageException( new InterruptedException(), 0, "download", sfSession, command, null); } catch (Exception e) { - Assert.fail("Should not have exception here"); + fail("Should not have exception here"); } Mockito.verify(spy, Mockito.never()).renew(Mockito.anyMap()); spy.handleStorageException( diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java index 3daddf3df..91366cbaf 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/SnowflakeS3ClientTest.java @@ -3,9 +3,9 @@ */ package net.snowflake.client.jdbc.cloud.storage; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SnowflakeS3ClientTest { diff --git a/src/test/java/net/snowflake/client/jdbc/cloud/storage/StageInfoGcsCustomEndpointTest.java b/src/test/java/net/snowflake/client/jdbc/cloud/storage/StageInfoGcsCustomEndpointTest.java index f8e00d7eb..6b27066c6 100644 --- a/src/test/java/net/snowflake/client/jdbc/cloud/storage/StageInfoGcsCustomEndpointTest.java +++ b/src/test/java/net/snowflake/client/jdbc/cloud/storage/StageInfoGcsCustomEndpointTest.java @@ -3,55 +3,57 @@ */ package net.snowflake.client.jdbc.cloud.storage; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import java.util.Arrays; import java.util.HashMap; +import java.util.List; import java.util.Optional; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import net.snowflake.client.providers.SnowflakeArgumentsProvider; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) public class StageInfoGcsCustomEndpointTest { - private final String region; - private final boolean useRegionalUrl; - private final String endPoint; - private final Optional expectedHost; - public StageInfoGcsCustomEndpointTest( - String region, boolean useRegionalUrl, String endPoint, Optional expectedHost) { - this.region = region; - this.useRegionalUrl = useRegionalUrl; - this.endPoint = endPoint; - this.expectedHost = expectedHost; + private static class DataProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return Arrays.asList( + Arguments.of("US-CENTRAL1", false, null, Optional.empty()), + Arguments.of("US-CENTRAL1", false, "", Optional.empty()), + Arguments.of("US-CENTRAL1", false, "null", Optional.empty()), + Arguments.of("US-CENTRAL1", false, " ", Optional.empty()), + Arguments.of("US-CENTRAL1", false, "example.com", Optional.of("example.com")), + Arguments.of( + "ME-CENTRAL2", false, null, Optional.of("storage.me-central2.rep.googleapis.com")), + Arguments.of( + "ME-CENTRAL2", true, null, Optional.of("storage.me-central2.rep.googleapis.com")), + Arguments.of( + "ME-CENTRAL2", true, "", Optional.of("storage.me-central2.rep.googleapis.com")), + Arguments.of( + "ME-CENTRAL2", true, " ", Optional.of("storage.me-central2.rep.googleapis.com")), + Arguments.of("ME-CENTRAL2", true, "example.com", Optional.of("example.com")), + Arguments.of( + "US-CENTRAL1", true, null, Optional.of("storage.us-central1.rep.googleapis.com")), + Arguments.of( + "US-CENTRAL1", true, "", Optional.of("storage.us-central1.rep.googleapis.com")), + Arguments.of( + "US-CENTRAL1", true, " ", Optional.of("storage.us-central1.rep.googleapis.com")), + Arguments.of( + "US-CENTRAL1", true, "null", Optional.of("storage.us-central1.rep.googleapis.com")), + Arguments.of("US-CENTRAL1", true, "example.com", Optional.of("example.com"))); + } } - @Test - public void shouldReturnEmptyGCSRegionalUrlWhenNotMeCentral1AndNotUseRegionalUrl() { + @ParameterizedTest + @ArgumentsSource(DataProvider.class) + public void shouldReturnEmptyGCSRegionalUrlWhenNotMeCentral1AndNotUseRegionalUrl( + String region, boolean useRegionalUrl, String endPoint, Optional expectedHost) { StageInfo stageInfo = StageInfo.createStageInfo("GCS", "bla", new HashMap<>(), region, endPoint, "account", true); stageInfo.setUseRegionalUrl(useRegionalUrl); assertEquals(expectedHost, stageInfo.gcsCustomEndpoint()); } - - @Parameterized.Parameters() - public static Object[][] data() { - return new Object[][] { - {"US-CENTRAL1", false, null, Optional.empty()}, - {"US-CENTRAL1", false, "", Optional.empty()}, - {"US-CENTRAL1", false, "null", Optional.empty()}, - {"US-CENTRAL1", false, " ", Optional.empty()}, - {"US-CENTRAL1", false, "example.com", Optional.of("example.com")}, - {"ME-CENTRAL2", false, null, Optional.of("storage.me-central2.rep.googleapis.com")}, - {"ME-CENTRAL2", true, null, Optional.of("storage.me-central2.rep.googleapis.com")}, - {"ME-CENTRAL2", true, "", Optional.of("storage.me-central2.rep.googleapis.com")}, - {"ME-CENTRAL2", true, " ", Optional.of("storage.me-central2.rep.googleapis.com")}, - {"ME-CENTRAL2", true, "example.com", Optional.of("example.com")}, - {"US-CENTRAL1", true, null, Optional.of("storage.us-central1.rep.googleapis.com")}, - {"US-CENTRAL1", true, "", Optional.of("storage.us-central1.rep.googleapis.com")}, - {"US-CENTRAL1", true, " ", Optional.of("storage.us-central1.rep.googleapis.com")}, - {"US-CENTRAL1", true, "null", Optional.of("storage.us-central1.rep.googleapis.com")}, - {"US-CENTRAL1", true, "example.com", Optional.of("example.com")}, - }; - } } diff --git a/src/test/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContextLatestIT.java b/src/test/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContextLatestIT.java index 042c6b0f4..8df4f988e 100644 --- a/src/test/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContextLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/diagnostic/DiagnosticContextLatestIT.java @@ -1,8 +1,8 @@ package net.snowflake.client.jdbc.diagnostic; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.net.InetSocketAddress; @@ -11,15 +11,15 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import net.snowflake.client.category.TestCategoryDiagnostic; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.SFSessionProperty; -import org.junit.After; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryDiagnostic.class) +@Tag(TestTags.DIAGNOSTIC) public class DiagnosticContextLatestIT { private static final String HTTP_NON_PROXY_HOSTS = "http.nonProxyHosts"; @@ -34,7 +34,7 @@ public class DiagnosticContextLatestIT { private static String oldJvmHttpsProxyHost; private static String oldJvmHttpsProxyPort; - @BeforeClass + @BeforeAll public static void init() { oldJvmNonProxyHosts = System.getProperty(HTTP_NON_PROXY_HOSTS); oldJvmHttpProxyHost = System.getProperty(HTTP_PROXY_HOST); @@ -43,7 +43,7 @@ public static void init() { oldJvmHttpsProxyPort = System.getProperty(HTTPS_PROXY_PORT); } - @Before + @BeforeEach public void clearJvmProperties() { System.clearProperty(HTTP_NON_PROXY_HOSTS); System.clearProperty(HTTP_PROXY_HOST); @@ -97,7 +97,7 @@ public void parseAllowListFileTest() { String testFailedMessage = "The lists of SnowflakeEndpoints in mockEndpoints and endpointsFromTestFile should be identical"; - assertTrue(testFailedMessage, endpointsFromTestFile.containsAll(mockEndpoints)); + assertTrue(endpointsFromTestFile.containsAll(mockEndpoints), testFailedMessage); } /** @@ -112,24 +112,24 @@ public void testEmptyProxyConfig() { DiagnosticContext diagnosticContext = new DiagnosticContext(connectionPropertiesMap); - assertFalse("Proxy configurations should be empty", diagnosticContext.isProxyEnabled()); + assertFalse(diagnosticContext.isProxyEnabled(), "Proxy configurations should be empty"); assertTrue( - "getHttpProxyHost() must return an empty string in the absence of proxy configuration", - diagnosticContext.getHttpProxyHost().isEmpty()); + diagnosticContext.getHttpProxyHost().isEmpty(), + "getHttpProxyHost() must return an empty string in the absence of proxy configuration"); assertEquals( - "getHttpProxyPort() must return -1 in the absence of proxy configuration", -1, - diagnosticContext.getHttpProxyPort()); + diagnosticContext.getHttpProxyPort(), + "getHttpProxyPort() must return -1 in the absence of proxy configuration"); assertTrue( - "getHttpsProxyHost() must return an empty string in the absence of proxy configuration", - diagnosticContext.getHttpsProxyHost().isEmpty()); + diagnosticContext.getHttpsProxyHost().isEmpty(), + "getHttpsProxyHost() must return an empty string in the absence of proxy configuration"); assertEquals( - "getHttpsProxyPort() must return -1 in the absence of proxy configuration", -1, - diagnosticContext.getHttpsProxyPort()); + diagnosticContext.getHttpsProxyPort(), + "getHttpsProxyPort() must return -1 in the absence of proxy configuration"); assertTrue( - "getHttpNonProxyHosts() must return an empty string in the absence of proxy configuration", - diagnosticContext.getHttpNonProxyHosts().isEmpty()); + diagnosticContext.getHttpNonProxyHosts().isEmpty(), + "getHttpNonProxyHosts() must return an empty string in the absence of proxy configuration"); } /** Test added in version > 3.16.1 */ @@ -329,7 +329,7 @@ public void testgetNoProxyAfterOverridingJvm() { assertEquals(noProxy, diagnosticContext.getProxy(host4)); } - @After + @AfterEach public void restoreJvmArguments() { System.clearProperty(HTTP_NON_PROXY_HOSTS); System.clearProperty(HTTP_PROXY_HOST); diff --git a/src/test/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpointTest.java b/src/test/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpointTest.java index a926a649e..6072c3453 100644 --- a/src/test/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpointTest.java +++ b/src/test/java/net/snowflake/client/jdbc/diagnostic/SnowflakeEndpointTest.java @@ -1,10 +1,10 @@ package net.snowflake.client.jdbc.diagnostic; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.HashMap; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SnowflakeEndpointTest { @@ -20,9 +20,9 @@ public void shouldDetectPrivateLinkEndpoint() { (host, expectedToBePrivateLink) -> { SnowflakeEndpoint endpoint = new SnowflakeEndpoint("SNOWFLAKE_DEPLOYMENT", host, 443); assertEquals( - String.format("Expecting %s to be private link: %s", host, expectedToBePrivateLink), expectedToBePrivateLink, - endpoint.isPrivateLink()); + endpoint.isPrivateLink(), + String.format("Expecting %s to be private link: %s", host, expectedToBePrivateLink)); }); } } diff --git a/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java b/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java index 2857634f8..1660ea57d 100644 --- a/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java +++ b/src/test/java/net/snowflake/client/jdbc/structuredtypes/ResultSetStructuredTypesLatestIT.java @@ -3,11 +3,11 @@ */ package net.snowflake.client.jdbc.structuredtypes; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.math.BigDecimal; import java.sql.Connection; @@ -24,11 +24,10 @@ import java.time.ZoneId; import java.util.List; import java.util.Map; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; import net.snowflake.client.TestUtil; import net.snowflake.client.ThrowingConsumer; -import net.snowflake.client.category.TestCategoryResultSet; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.structs.SnowflakeObjectTypeFactories; import net.snowflake.client.jdbc.BaseJDBCTest; import net.snowflake.client.jdbc.ResultSetFormatType; @@ -39,34 +38,17 @@ import net.snowflake.client.jdbc.structuredtypes.sqldata.NullableFieldsSqlData; import net.snowflake.client.jdbc.structuredtypes.sqldata.SimpleClass; import net.snowflake.client.jdbc.structuredtypes.sqldata.StringClass; -import org.junit.After; -import org.junit.Assume; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -@RunWith(Parameterized.class) -@Category(TestCategoryResultSet.class) +import net.snowflake.client.providers.ResultFormatProvider; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assumptions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ArgumentsSource; + +@Tag(TestTags.RESULT_SET) public class ResultSetStructuredTypesLatestIT extends BaseJDBCTest { - - @Parameterized.Parameters(name = "format={0}") - public static Object[][] data() { - return new Object[][] { - {ResultSetFormatType.JSON}, - {ResultSetFormatType.ARROW_WITH_JSON_STRUCTURED_TYPES}, - {ResultSetFormatType.NATIVE_ARROW} - }; - } - - private final ResultSetFormatType queryResultFormat; - - public ResultSetStructuredTypesLatestIT(ResultSetFormatType queryResultFormat) { - this.queryResultFormat = queryResultFormat; - } - - @Before + @BeforeEach public void setup() { SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); SnowflakeObjectTypeFactories.register(SimpleClass.class, SimpleClass::new); @@ -74,7 +56,7 @@ public void setup() { SnowflakeObjectTypeFactories.register(NullableFieldsSqlData.class, NullableFieldsSqlData::new); } - @After + @AfterEach public void clean() { SnowflakeObjectTypeFactories.unregister(StringClass.class); SnowflakeObjectTypeFactories.unregister(SimpleClass.class); @@ -82,7 +64,7 @@ public void clean() { SnowflakeObjectTypeFactories.unregister(NullableFieldsSqlData.class); } - public Connection init() throws SQLException { + public Connection init(ResultSetFormatType format) throws SQLException { Connection conn = BaseJDBCTest.getConnection(BaseJDBCTest.DONT_INJECT_SOCKET_TIMEOUT); try (Statement stmt = conn.createStatement()) { stmt.execute("alter session set ENABLE_STRUCTURED_TYPES_IN_CLIENT_RESPONSE = true"); @@ -90,9 +72,9 @@ public Connection init() throws SQLException { stmt.execute("ALTER SESSION SET TIMEZONE = 'Europe/Warsaw'"); stmt.execute( "alter session set jdbc_query_result_format = '" - + queryResultFormat.sessionParameterTypeValue + + format.sessionParameterTypeValue + "'"); - if (queryResultFormat == ResultSetFormatType.NATIVE_ARROW) { + if (format == ResultSetFormatType.NATIVE_ARROW) { stmt.execute("alter session set ENABLE_STRUCTURED_TYPES_NATIVE_ARROW_FORMAT = true"); stmt.execute("alter session set FORCE_ENABLE_STRUCTURED_TYPES_NATIVE_ARROW_FORMAT = true"); } @@ -100,20 +82,23 @@ public Connection init() throws SQLException { return conn; } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapStructToObjectWithFactory() throws SQLException { - testMapJson(true); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapStructToObjectWithFactory(ResultSetFormatType format) throws SQLException { + testMapJson(true, format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapStructToObjectWithReflection() throws SQLException { - testMapJson(false); - testMapJson(true); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapStructToObjectWithReflection(ResultSetFormatType format) throws SQLException { + testMapJson(false, format); + testMapJson(true, format); } - private void testMapJson(boolean registerFactory) throws SQLException { + private void testMapJson(boolean registerFactory, ResultSetFormatType format) + throws SQLException { if (registerFactory) { SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); } else { @@ -124,25 +109,29 @@ private void testMapJson(boolean registerFactory) throws SQLException { (resultSet) -> { StringClass object = resultSet.getObject(1, StringClass.class); assertEquals("a", object.getString()); - }); + }, + format); SnowflakeObjectTypeFactories.register(StringClass.class, StringClass::new); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapNullStruct() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapNullStruct(ResultSetFormatType format) throws SQLException { withFirstRow( "select null::OBJECT(string VARCHAR)", (resultSet) -> { StringClass object = resultSet.getObject(1, StringClass.class); assertNull(object); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapStructAllTypes() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapStructAllTypes(ResultSetFormatType format) throws SQLException { + try (Connection connection = init(format); Statement statement = connection.createStatement()) { statement.execute("ALTER SESSION SET TIMEZONE = 'Europe/Warsaw'"); try (ResultSet resultSet = statement.executeQuery(AllTypesClass.ALL_TYPES_QUERY); ) { @@ -179,7 +168,7 @@ public void testMapStructAllTypes() throws SQLException { assertEquals("b", object.getSimpleClass().getString()); assertEquals(Integer.valueOf(2), object.getSimpleClass().getIntValue()); - if (queryResultFormat == ResultSetFormatType.NATIVE_ARROW) { + if (format == ResultSetFormatType.NATIVE_ARROW) { // Only verify getString for Arrow since JSON representations have difficulties with // floating point toString conversion (3.300000000000000e+00 vs 3.3 in native arrow) String expectedArrowGetStringResult = @@ -190,11 +179,13 @@ public void testMapStructAllTypes() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnStructAsStringIfTypeWasNotIndicated() throws SQLException { - Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW); - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnStructAsStringIfTypeWasNotIndicated(ResultSetFormatType format) + throws SQLException { + Assumptions.assumeTrue(format != ResultSetFormatType.NATIVE_ARROW); + try (Connection connection = init(format); Statement statement = connection.createStatement()) { statement.execute( "alter session set " @@ -237,26 +228,30 @@ public void testReturnStructAsStringIfTypeWasNotIndicated() throws SQLException } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testThrowingGettingObjectIfTypeWasNotIndicatedAndFormatNativeArrow() - throws SQLException { - Assume.assumeTrue(queryResultFormat == ResultSetFormatType.NATIVE_ARROW); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testThrowingGettingObjectIfTypeWasNotIndicatedAndFormatNativeArrow( + ResultSetFormatType format) throws SQLException { + Assumptions.assumeTrue(format == ResultSetFormatType.NATIVE_ARROW); withFirstRow( "select {'string':'a'}::OBJECT(string VARCHAR)", (resultSet) -> { assertThrows(SQLException.class, () -> resultSet.getObject(1)); - }); + }, + format); withFirstRow( "select {'x':{'string':'one'},'y':{'string':'two'},'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { assertThrows(SQLException.class, () -> resultSet.getObject(1, Map.class)); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsArrayOfSqlData() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsArrayOfSqlData(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT({'string':'one'}, {'string':'two'}, {'string':'three'})::ARRAY(OBJECT(string VARCHAR))", (resultSet) -> { @@ -265,12 +260,15 @@ public void testReturnAsArrayOfSqlData() throws SQLException { assertEquals("one", resultArray[0].getString()); assertEquals("two", resultArray[1].getString()); assertEquals("three", resultArray[2].getString()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsArrayOfNullableFieldsInSqlData() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsArrayOfNullableFieldsInSqlData(ResultSetFormatType format) + throws SQLException { withFirstRow( "SELECT OBJECT_CONSTRUCT_KEEP_NULL('string', null, 'nullableIntValue', null, 'nullableLongValue', null, " + "'date', null, 'bd', null, 'bytes', null, 'longValue', null)" @@ -287,13 +285,15 @@ public void testReturnAsArrayOfNullableFieldsInSqlData() throws SQLException { assertNull(result.getBd()); assertNull(result.getBytes()); assertEquals(Long.valueOf(0), result.getLongValue()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnNullsForAllTpesInSqlData() throws SQLException { - try (Connection connection = init(); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnNullsForAllTpesInSqlData(ResultSetFormatType format) throws SQLException { + try (Connection connection = init(format); Statement statement = connection.createStatement()) { statement.execute("ALTER SESSION SET TIMEZONE = 'Europe/Warsaw'"); try (ResultSet resultSet = @@ -324,9 +324,10 @@ public void testReturnNullsForAllTpesInSqlData() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsArrayOfString() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsArrayOfString(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT('one', 'two','three')::ARRAY(VARCHAR)", (resultSet) -> { @@ -335,13 +336,15 @@ public void testReturnAsArrayOfString() throws SQLException { assertEquals("one", resultArray[0]); assertEquals("two", resultArray[1]); assertEquals("three", resultArray[2]); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsArrayOfNullableString() throws SQLException { - Assume.assumeTrue(queryResultFormat == ResultSetFormatType.NATIVE_ARROW); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsArrayOfNullableString(ResultSetFormatType format) throws SQLException { + Assumptions.assumeTrue(format == ResultSetFormatType.NATIVE_ARROW); withFirstRow( "SELECT ARRAY_CONSTRUCT('one', 'two', null)::ARRAY(VARCHAR)", (resultSet) -> { @@ -350,24 +353,28 @@ public void testReturnAsArrayOfNullableString() throws SQLException { assertEquals("one", resultArray[0]); assertEquals("two", resultArray[1]); assertNull(resultArray[2]); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnNullAsArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnNullAsArray(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT null::ARRAY(VARCHAR)", (resultSet) -> { String[] resultArray = resultSet.unwrap(SnowflakeBaseResultSet.class).getArray(1, String.class); assertNull(resultArray); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsListOfIntegers() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsListOfIntegers(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(1,2,3)::ARRAY(INTEGER)", (resultSet) -> { @@ -376,12 +383,14 @@ public void testReturnAsListOfIntegers() throws SQLException { assertEquals(Integer.valueOf(1), resultList.get(0)); assertEquals(Integer.valueOf(2), resultList.get(1)); assertEquals(Integer.valueOf(3), resultList.get(2)); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsListOfFloat() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsListOfFloat(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(1.1,2.2,3.3)::ARRAY(FLOAT)", (resultSet) -> { @@ -390,12 +399,14 @@ public void testReturnAsListOfFloat() throws SQLException { assertEquals(Float.valueOf(1.1f), resultList[0]); assertEquals(Float.valueOf(2.2f), resultList[1]); assertEquals(Float.valueOf(3.3f), resultList[2]); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsListOfDouble() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsListOfDouble(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(1.1,2.2,3.3)::ARRAY(DOUBLE)", (resultSet) -> { @@ -404,12 +415,14 @@ public void testReturnAsListOfDouble() throws SQLException { assertEquals(Double.valueOf(1.1), resultList.get(0)); assertEquals(Double.valueOf(2.2), resultList.get(1)); assertEquals(Double.valueOf(3.3), resultList.get(2)); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsMap() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsMap(ResultSetFormatType format) throws SQLException { withFirstRow( "select {'x':{'string':'one'},'y':{'string':'two'},'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { @@ -418,13 +431,15 @@ public void testReturnAsMap() throws SQLException { assertEquals("one", map.get("x").getString()); assertEquals("two", map.get("y").getString()); assertEquals("three", map.get("z").getString()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsMapByGetObject() throws SQLException { - Assume.assumeTrue(queryResultFormat != ResultSetFormatType.NATIVE_ARROW); + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsMapByGetObject(ResultSetFormatType format) throws SQLException { + Assumptions.assumeTrue(format != ResultSetFormatType.NATIVE_ARROW); withFirstRow( "select {'x':{'string':'one'},'y':{'string':'two'},'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { @@ -432,12 +447,14 @@ public void testReturnAsMapByGetObject() throws SQLException { assertEquals("one", map.get("x").get("string")); assertEquals("two", map.get("y").get("string")); assertEquals("three", map.get("z").get("string")); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsMapWithNullableValues() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsMapWithNullableValues(ResultSetFormatType format) throws SQLException { withFirstRow( "select {'x':{'string':'one'},'y':null,'z':{'string':'three'}}::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { @@ -446,36 +463,42 @@ public void testReturnAsMapWithNullableValues() throws SQLException { assertEquals("one", map.get("x").getString()); assertNull(map.get("y")); assertEquals("three", map.get("z").getString()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnNullAsObjectOfTypeMap() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnNullAsObjectOfTypeMap(ResultSetFormatType format) throws SQLException { withFirstRow( "select null::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { Map map = resultSet.unwrap(SnowflakeBaseResultSet.class).getObject(1, Map.class); assertNull(map); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnNullAsMap() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnNullAsMap(ResultSetFormatType format) throws SQLException { withFirstRow( "select null::MAP(VARCHAR, OBJECT(string VARCHAR));", (resultSet) -> { Map map = resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, StringClass.class); assertNull(map); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsMapOfTimestampsNtz() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsMapOfTimestampsNtz(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT {'x': TO_TIMESTAMP_NTZ('2021-12-23 09:44:44'), 'y': TO_TIMESTAMP_NTZ('2021-12-24 09:55:55')}::MAP(VARCHAR, TIMESTAMP)", (resultSet) -> { @@ -491,12 +514,14 @@ public void testReturnAsMapOfTimestampsNtz() throws SQLException { .atZone(ZoneId.of("Europe/Warsaw")) .toInstant(), map.get("y").toInstant()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsMapOfTimestampsLtz() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsMapOfTimestampsLtz(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT {'x': TO_TIMESTAMP_LTZ('2021-12-23 09:44:44'), 'y': TO_TIMESTAMP_LTZ('2021-12-24 09:55:55')}::MAP(VARCHAR, TIMESTAMP_LTZ)", (resultSet) -> { @@ -512,12 +537,14 @@ public void testReturnAsMapOfTimestampsLtz() throws SQLException { .atZone(ZoneId.of("Europe/Warsaw")) .toInstant(), map.get("y").toInstant()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsMapOfLong() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsMapOfLong(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT {'x':1, 'y':2, 'z':3}::MAP(VARCHAR, BIGINT)", (resultSet) -> { @@ -526,12 +553,14 @@ public void testReturnAsMapOfLong() throws SQLException { assertEquals(Long.valueOf(1), map.get("x")); assertEquals(Long.valueOf(2), map.get("y")); assertEquals(Long.valueOf(3), map.get("z")); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsMapOfDate() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsMapOfDate(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT {'x':'2023-12-24', 'y':'2023-12-25'}::MAP(VARCHAR, DATE)", (resultSet) -> { @@ -541,12 +570,14 @@ public void testReturnAsMapOfDate() throws SQLException { Date.valueOf(LocalDate.of(2023, 12, 24)).toString(), map.get("x").toString()); assertEquals( Date.valueOf(LocalDate.of(2023, 12, 25)).toString(), map.get("y").toString()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsMapOfTime() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsMapOfTime(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT {'x':'12:34:56', 'y':'12:34:58'}::MAP(VARCHAR, TIME)", (resultSet) -> { @@ -554,12 +585,14 @@ public void testReturnAsMapOfTime() throws SQLException { resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, Time.class); assertEquals(Time.valueOf(LocalTime.of(12, 34, 56)), map.get("x")); assertEquals(Time.valueOf(LocalTime.of(12, 34, 58)), map.get("y")); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsMapOfBoolean() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsMapOfBoolean(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT {'x':'true', 'y':0}::MAP(VARCHAR, BOOLEAN)", (resultSet) -> { @@ -567,12 +600,14 @@ public void testReturnAsMapOfBoolean() throws SQLException { resultSet.unwrap(SnowflakeBaseResultSet.class).getMap(1, Boolean.class); assertEquals(Boolean.TRUE, map.get("x")); assertEquals(Boolean.FALSE, map.get("y")); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testReturnAsList() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testReturnAsList(ResultSetFormatType format) throws SQLException { withFirstRow( "select [{'string':'one'},{'string': 'two'}]::ARRAY(OBJECT(string varchar))", (resultSet) -> { @@ -580,12 +615,14 @@ public void testReturnAsList() throws SQLException { resultSet.unwrap(SnowflakeBaseResultSet.class).getList(1, StringClass.class); assertEquals("one", map.get(0).getString()); assertEquals("two", map.get(1).getString()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapStructsFromChunks() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapStructsFromChunks(ResultSetFormatType format) throws SQLException { withFirstRow( "select {'string':'a'}::OBJECT(string VARCHAR) FROM TABLE(GENERATOR(ROWCOUNT=>30000))", (resultSet) -> { @@ -593,12 +630,14 @@ public void testMapStructsFromChunks() throws SQLException { StringClass object = resultSet.getObject(1, StringClass.class); assertEquals("a", object.getString()); } - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapIntegerArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapIntegerArray(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(10, 20, 30)::ARRAY(INTEGER)", (resultSet) -> { @@ -606,12 +645,14 @@ public void testMapIntegerArray() throws SQLException { assertEquals(Long.valueOf(10), resultArray[0]); assertEquals(Long.valueOf(20), resultArray[1]); assertEquals(Long.valueOf(30), resultArray[2]); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapFixedToLongArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapFixedToLongArray(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(10, 20, 30)::ARRAY(SMALLINT)", (resultSet) -> { @@ -619,14 +660,16 @@ public void testMapFixedToLongArray() throws SQLException { assertEquals(Long.valueOf("10"), resultArray[0]); assertEquals(Long.valueOf("20"), resultArray[1]); assertEquals(Long.valueOf("30"), resultArray[2]); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapDecimalArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapDecimalArray(ResultSetFormatType format) throws SQLException { // when: jdbc_treat_decimal_as_int=true scale=0 - try (Connection connection = init(); + try (Connection connection = init(format); Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery( @@ -639,7 +682,7 @@ public void testMapDecimalArray() throws SQLException { } // when: jdbc_treat_decimal_as_int=true scale=2 - try (Connection connection = init(); + try (Connection connection = init(format); Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery( @@ -652,7 +695,7 @@ public void testMapDecimalArray() throws SQLException { } // when: jdbc_treat_decimal_as_int=false scale=0 - try (Connection connection = init(); + try (Connection connection = init(format); Statement statement = connection.createStatement(); ) { statement.execute("alter session set jdbc_treat_decimal_as_int = false"); try (ResultSet resultSet = @@ -666,9 +709,10 @@ public void testMapDecimalArray() throws SQLException { } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapVarcharArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapVarcharArray(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT 'text', ARRAY_CONSTRUCT('10', '20','30')::ARRAY(VARCHAR)", (resultSet) -> { @@ -677,12 +721,14 @@ public void testMapVarcharArray() throws SQLException { assertEquals("10", resultArray[0]); assertEquals("20", resultArray[1]); assertEquals("30", resultArray[2]); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapDatesArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapDatesArray(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(to_date('2023-12-24', 'YYYY-MM-DD'), to_date('2023-12-25', 'YYYY-MM-DD'))::ARRAY(DATE)", (resultSet) -> { @@ -691,12 +737,14 @@ public void testMapDatesArray() throws SQLException { Date.valueOf(LocalDate.of(2023, 12, 24)).toString(), resultArray[0].toString()); assertEquals( Date.valueOf(LocalDate.of(2023, 12, 25)).toString(), resultArray[1].toString()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapTimeArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapTimeArray(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(to_time('15:39:20.123'), to_time('09:12:20.123'))::ARRAY(TIME)", (resultSet) -> { @@ -704,12 +752,14 @@ public void testMapTimeArray() throws SQLException { assertEquals( Time.valueOf(LocalTime.of(15, 39, 20)).toString(), resultArray[0].toString()); assertEquals(Time.valueOf(LocalTime.of(9, 12, 20)).toString(), resultArray[1].toString()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapTimestampArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapTimestampArray(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(TO_TIMESTAMP_NTZ('2021-12-23 09:44:44'), TO_TIMESTAMP_NTZ('2021-12-24 09:55:55'))::ARRAY(TIMESTAMP)", (resultSet) -> { @@ -724,36 +774,42 @@ public void testMapTimestampArray() throws SQLException { .atZone(ZoneId.of("Europe/Warsaw")) .toInstant(), resultArray[1].toInstant()); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapBooleanArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapBooleanArray(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(true,false)::ARRAY(BOOLEAN)", (resultSet) -> { Boolean[] resultArray = (Boolean[]) resultSet.getArray(1).getArray(); assertEquals(true, resultArray[0]); assertEquals(false, resultArray[1]); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapBinaryArray() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapBinaryArray(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(TO_BINARY('616263', 'HEX'),TO_BINARY('616263', 'HEX'))::ARRAY(BINARY)", (resultSet) -> { Byte[][] resultArray = (Byte[][]) resultSet.getArray(1).getArray(); assertArrayEquals(new Byte[] {'a', 'b', 'c'}, resultArray[0]); assertArrayEquals(new Byte[] {'a', 'b', 'c'}, resultArray[1]); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapArrayOfStructToMap() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapArrayOfStructToMap(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT({'x': 'abc', 'y': 1}, {'x': 'def', 'y': 2} )::ARRAY(OBJECT(x VARCHAR, y INTEGER))", (resultSet) -> { @@ -764,12 +820,14 @@ public void testMapArrayOfStructToMap() throws SQLException { assertEquals(firstEntry.get("y").toString(), "1"); assertEquals(secondEntry.get("x").toString(), "def"); assertEquals(secondEntry.get("y").toString(), "2"); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapArrayOfArrays() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapArrayOfArrays(ResultSetFormatType format) throws SQLException { withFirstRow( "SELECT ARRAY_CONSTRUCT(ARRAY_CONSTRUCT({'x': 'abc', 'y': 1}, {'x': 'def', 'y': 2}) )::ARRAY(ARRAY(OBJECT(x VARCHAR, y INTEGER)))", (resultSet) -> { @@ -780,12 +838,14 @@ public void testMapArrayOfArrays() throws SQLException { assertEquals(firstEntry.get("y").toString(), "1"); assertEquals(secondEntry.get("x").toString(), "def"); assertEquals(secondEntry.get("y").toString(), "2"); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testMapNestedStructures() throws SQLException { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testMapNestedStructures(ResultSetFormatType format) throws SQLException { String structSelectStatement = "SELECT {'simpleClass': {'string': 'a', 'intValue': 2}, " + "'simpleClasses': ARRAY_CONSTRUCT({'string': 'a', 'intValue': 2}, {'string': 'b', 'intValue': 2}), " @@ -850,22 +910,28 @@ public void testMapNestedStructures() throws SQLException { assertEquals(Integer.valueOf(3), nestedStructSqlData.getMapOfIntegers().get("x")); assertEquals(Integer.valueOf(4), nestedStructSqlData.getMapOfIntegers().get("y")); TestUtil.assertEqualsIgnoringWhitespace(expectedQueryResult, resultSet.getString(1)); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testColumnTypeWhenStructureTypeIsDisabled() throws Exception { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testColumnTypeWhenStructureTypeIsDisabled(ResultSetFormatType format) + throws Exception { withFirstRow( "SELECT {'string':'a'}", resultSet -> { assertEquals(Types.VARCHAR, resultSet.getMetaData().getColumnType(1)); - }); + }, + format); } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testColumnTypeAndFieldsWhenStructureTypeIsReturned() throws Exception { + @ParameterizedTest + @ArgumentsSource(ResultFormatProvider.class) + @DontRunOnGithubActions + public void testColumnTypeAndFieldsWhenStructureTypeIsReturned(ResultSetFormatType format) + throws Exception { withFirstRow( "SELECT {'string':'a'}::OBJECT(string VARCHAR)", resultSet -> { @@ -893,12 +959,16 @@ public void testColumnTypeAndFieldsWhenStructureTypeIsReturned() throws Exceptio .getColumnFields(1) .get(0) .getName()); - }); + }, + format); } - private void withFirstRow(String sqlText, ThrowingConsumer consumer) + private void withFirstRow( + String sqlText, + ThrowingConsumer consumer, + ResultSetFormatType format) throws SQLException { - try (Connection connection = init(); + try (Connection connection = init(format); Statement statement = connection.createStatement(); ResultSet rs = statement.executeQuery(sqlText); ) { assertTrue(rs.next()); diff --git a/src/test/java/net/snowflake/client/jdbc/structuredtypes/StructuredTypesGetStringArrowJsonCompatibilityIT.java b/src/test/java/net/snowflake/client/jdbc/structuredtypes/StructuredTypesGetStringArrowJsonCompatibilityIT.java index 352d2b1a4..c3ae5fdd8 100644 --- a/src/test/java/net/snowflake/client/jdbc/structuredtypes/StructuredTypesGetStringArrowJsonCompatibilityIT.java +++ b/src/test/java/net/snowflake/client/jdbc/structuredtypes/StructuredTypesGetStringArrowJsonCompatibilityIT.java @@ -2,137 +2,161 @@ import java.sql.Connection; import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Collection; import java.util.HashMap; -import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; import java.util.Map; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryResultSet; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.ResultSetFormatType; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import net.snowflake.client.providers.ProvidersUtil; +import net.snowflake.client.providers.ResultFormatProvider; +import net.snowflake.client.providers.SnowflakeArgumentsProvider; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsSource; -@RunWith(Parameterized.class) -@Category(TestCategoryResultSet.class) +@Tag(TestTags.RESULT_SET) public class StructuredTypesGetStringArrowJsonCompatibilityIT extends StructuredTypesGetStringBaseIT { - - private final String expectedStructureTypeRepresentation; - private final String selectSql; private static Map connections = new HashMap<>(); - public StructuredTypesGetStringArrowJsonCompatibilityIT( - ResultSetFormatType queryResultFormat, - String selectSql, - String expectedStructureTypeRepresentation) { - super(queryResultFormat); - this.selectSql = selectSql; - this.expectedStructureTypeRepresentation = expectedStructureTypeRepresentation; - } - - @Before - public void setUpConnection() throws SQLException { + @BeforeAll + public static void setUpConnections() throws SQLException { // We initialize connection here since we need to set server properties that cannot be set in GH // actions and before class is running even when all the tests have conditional ignore of tests - Connection connection = connections.get(queryResultFormat); - if (connection == null) { + for (ResultSetFormatType queryResultFormat : ResultSetFormatType.values()) { connections.put(queryResultFormat, initConnection(queryResultFormat)); } } - @AfterClass + @AfterAll public static void closeConnections() throws SQLException { for (Connection connection : connections.values()) { connection.close(); } } - @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) - public void testRunAsGetString() throws SQLException { + @ParameterizedTest + @DontRunOnGithubActions + @ArgumentsSource(DataProvider.class) + public void testRunAsGetString( + ResultSetFormatType queryResultFormat, + String selectSql, + String expectedStructureTypeRepresentation) + throws SQLException { withFirstRow( connections.get(queryResultFormat), selectSql, (resultSet) -> assertGetStringIsCompatible(resultSet, expectedStructureTypeRepresentation)); } - @Parameterized.Parameters(name = "format={0},sql={1}") - public static Collection data() { - Map samples = new LinkedHashMap<>(); - samples.put("select {'a':3}::map(text, int);", "{\"a\":3}"); - samples.put( - "select {'a':'zażółć gęślą jaźń'}::map(text, text);", "{\"a\":\"zażółć gęślą jaźń\"}"); - samples.put("select {'a':'bla'}::map(text, text);", "{\"a\":\"bla\"}"); - samples.put("select {'1':'bla'}::map(int, text);", "{\"1\":\"bla\"}"); - samples.put("select {'1':[1,2,3]}::map(int, ARRAY(int));", "{\"1\":[1,2,3]}"); - samples.put( - "select {'1':{'string':'a'}}::map(int, OBJECT(string VARCHAR));", - "{\"1\":{\"string\":\"a\"}}"); - samples.put( - "select {'1':{'string':'a'}}::map(int, map(string, string));", - "{\"1\":{\"string\":\"a\"}}"); - samples.put( - "select {'1':[{'string':'a'},{'bla':'ble'}]}::map(int, array(map(string, string)));", - "{\"1\":[{\"string\":\"a\"},{\"bla\":\"ble\"}]}"); - samples.put("select [1,2,3]::array(int)", "[1,2,3]"); - samples.put( - "select [{'a':'a'}, {'b':'b'}]::array(map(string, string))", - "[{\"a\":\"a\"}, {\"b\":\"b\"}]"); - samples.put( - "select [{'a':true}, {'b':false}]::array(map(string, boolean))", - "[{\"a\":true}, {\"b\":false}]"); - samples.put( - "select [{'string':'a'}, {'string':'b'}]::array(object(string varchar))", - "[{\"string\":\"a\"}, {\"string\":\"b\"}]"); - samples.put("select {'string':'a'}::object(string varchar)", "{\"string\":\"a\"}"); - samples.put( - "select {'x':'a','b':'a','c':'a','d':'a','e':'a'}::object(x varchar,b varchar,c varchar,d varchar,e varchar)", - "{\"x\":\"a\",\"b\":\"a\",\"c\":\"a\",\"d\":\"a\",\"e\":\"a\"}"); - samples.put("select {'string':[1,2,3]}::object(string array(int))", "{\"string\":[1,2,3]}"); - samples.put( - "select {'string':{'a':15}}::object(string object(a int))", "{\"string\":{\"a\":15}}"); - samples.put( - "select {'string':{'a':15}}::object(string map(string,int))", "{\"string\":{\"a\":15}}"); - samples.put( - "select {'string':{'a':{'b':15}}}::object(string object(a map(string, int)))", - "{\"string\":{\"a\":{\"b\":15}}}"); + public static class SampleProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + List samples = new LinkedList<>(); + samples.add(Arguments.of("select {'a':3}::map(text, int);", "{\"a\":3}")); + samples.add( + Arguments.of( + "select {'a':'zażółć gęślą jaźń'}::map(text, text);", + "{\"a\":\"zażółć gęślą jaźń\"}")); + samples.add(Arguments.of("select {'a':'bla'}::map(text, text);", "{\"a\":\"bla\"}")); + samples.add(Arguments.of("select {'1':'bla'}::map(int, text);", "{\"1\":\"bla\"}")); + samples.add(Arguments.of("select {'1':[1,2,3]}::map(int, ARRAY(int));", "{\"1\":[1,2,3]}")); + samples.add( + Arguments.of( + "select {'1':{'string':'a'}}::map(int, OBJECT(string VARCHAR));", + "{\"1\":{\"string\":\"a\"}}")); + samples.add( + Arguments.of( + "select {'1':{'string':'a'}}::map(int, map(string, string));", + "{\"1\":{\"string\":\"a\"}}")); + samples.add( + Arguments.of( + "select {'1':[{'string':'a'},{'bla':'ble'}]}::map(int, array(map(string, string)));", + "{\"1\":[{\"string\":\"a\"},{\"bla\":\"ble\"}]}")); + samples.add(Arguments.of("select [1,2,3]::array(int)", "[1,2,3]")); + samples.add( + Arguments.of( + "select [{'a':'a'}, {'b':'b'}]::array(map(string, string))", + "[{\"a\":\"a\"}, {\"b\":\"b\"}]")); + samples.add( + Arguments.of( + "select [{'a':true}, {'b':false}]::array(map(string, boolean))", + "[{\"a\":true}, {\"b\":false}]")); + samples.add( + Arguments.of( + "select [{'string':'a'}, {'string':'b'}]::array(object(string varchar))", + "[{\"string\":\"a\"}, {\"string\":\"b\"}]")); + samples.add( + Arguments.of("select {'string':'a'}::object(string varchar)", "{\"string\":\"a\"}")); + samples.add( + Arguments.of( + "select {'x':'a','b':'a','c':'a','d':'a','e':'a'}::object(x varchar,b varchar,c varchar,d varchar,e varchar)", + "{\"x\":\"a\",\"b\":\"a\",\"c\":\"a\",\"d\":\"a\",\"e\":\"a\"}")); + samples.add( + Arguments.of( + "select {'string':[1,2,3]}::object(string array(int))", "{\"string\":[1,2,3]}")); + samples.add( + Arguments.of( + "select {'string':{'a':15}}::object(string object(a int))", + "{\"string\":{\"a\":15}}")); + samples.add( + Arguments.of( + "select {'string':{'a':15}}::object(string map(string,int))", + "{\"string\":{\"a\":15}}")); + samples.add( + Arguments.of( + "select {'string':{'a':{'b':15}}}::object(string object(a map(string, int)))", + "{\"string\":{\"a\":{\"b\":15}}}")); - samples.put( - "select {'string':{'a':{'b':[{'c': 15}]}}}::object(string map(string, object(b array(object(c int)))))", - "{\"string\":{\"a\":{\"b\":[{\"c\":15}]}}}"); - // DY, DD MON YYYY HH24:MI:SS TZHTZM - samples.put( - "select {'ltz': '2024-05-20 11:22:33'::TIMESTAMP_LTZ}::object(ltz TIMESTAMP_LTZ)", - "{\"ltz\":\"Mon, 20 May 2024 11:22:33 +0200\"}"); - samples.put( - "select {'ntz': '2024-05-20 11:22:33'::TIMESTAMP_NTZ}::object(ntz TIMESTAMP_NTZ)", - "{\"ntz\":\"Mon, 20 May 2024 11:22:33 Z\"}"); - samples.put( - "select {'tz': '2024-05-20 11:22:33+0800'::TIMESTAMP_TZ}::object(tz TIMESTAMP_TZ)", - "{\"tz\":\"Mon, 20 May 2024 11:22:33 +0800\"}"); - samples.put( - "select {'date': '2024-05-20'::DATE}::object(date DATE)", "{\"date\":\"2024-05-20\"}"); - samples.put("select {'time': '22:14:55'::TIME}::object(time TIME)", "{\"time\":\"22:14:55\"}"); - samples.put("select {'bool': TRUE}::object(bool BOOLEAN)", "{\"bool\":true}"); - samples.put("select {'bool': 'y'}::object(bool BOOLEAN)", "{\"bool\":true}"); - samples.put( - "select {'binary': TO_BINARY('616263', 'HEX')}::object(binary BINARY)", - "{\"binary\":\"616263\"}"); - samples.put("select [1,2,3]::VECTOR(INT, 3)", "[1,2,3]"); - samples.put("select ['a','b','c']::ARRAY(varchar)", "[\"a\",\"b\",\"c\"]"); + samples.add( + Arguments.of( + "select {'string':{'a':{'b':[{'c': 15}]}}}::object(string map(string, object(b array(object(c int)))))", + "{\"string\":{\"a\":{\"b\":[{\"c\":15}]}}}")); + // DY, DD MON YYYY HH24:MI:SS TZHTZM + samples.add( + Arguments.of( + "select {'ltz': '2024-05-20 11:22:33'::TIMESTAMP_LTZ}::object(ltz TIMESTAMP_LTZ)", + "{\"ltz\":\"Mon, 20 May 2024 11:22:33 +0200\"}")); + samples.add( + Arguments.of( + "select {'ntz': '2024-05-20 11:22:33'::TIMESTAMP_NTZ}::object(ntz TIMESTAMP_NTZ)", + "{\"ntz\":\"Mon, 20 May 2024 11:22:33 Z\"}")); + samples.add( + Arguments.of( + "select {'tz': '2024-05-20 11:22:33+0800'::TIMESTAMP_TZ}::object(tz TIMESTAMP_TZ)", + "{\"tz\":\"Mon, 20 May 2024 11:22:33 +0800\"}")); + samples.add( + Arguments.of( + "select {'date': '2024-05-20'::DATE}::object(date DATE)", + "{\"date\":\"2024-05-20\"}")); + samples.add( + Arguments.of( + "select {'time': '22:14:55'::TIME}::object(time TIME)", "{\"time\":\"22:14:55\"}")); + samples.add(Arguments.of("select {'bool': TRUE}::object(bool BOOLEAN)", "{\"bool\":true}")); + samples.add(Arguments.of("select {'bool': 'y'}::object(bool BOOLEAN)", "{\"bool\":true}")); + samples.add( + Arguments.of( + "select {'binary': TO_BINARY('616263', 'HEX')}::object(binary BINARY)", + "{\"binary\":\"616263\"}")); + samples.add(Arguments.of("select [1,2,3]::VECTOR(INT, 3)", "[1,2,3]")); + samples.add(Arguments.of("select ['a','b','c']::ARRAY(varchar)", "[\"a\",\"b\",\"c\"]")); - Collection parameters = new ArrayList<>(); - for (ResultSetFormatType resultSetFormatType : ResultSetFormatType.values()) { - samples.forEach( - (sql, expected) -> parameters.add(new Object[] {resultSetFormatType, sql, expected})); + return samples; } + } - return parameters; + private static class DataProvider extends SnowflakeArgumentsProvider { + + @Override + protected List rawArguments(ExtensionContext context) { + return ProvidersUtil.cartesianProduct( + context, new ResultFormatProvider(), new SampleProvider()); + } } } diff --git a/src/test/java/net/snowflake/client/jdbc/structuredtypes/StructuredTypesGetStringBaseIT.java b/src/test/java/net/snowflake/client/jdbc/structuredtypes/StructuredTypesGetStringBaseIT.java index d9d5c15e2..35d10c4b1 100644 --- a/src/test/java/net/snowflake/client/jdbc/structuredtypes/StructuredTypesGetStringBaseIT.java +++ b/src/test/java/net/snowflake/client/jdbc/structuredtypes/StructuredTypesGetStringBaseIT.java @@ -1,6 +1,6 @@ package net.snowflake.client.jdbc.structuredtypes; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.Connection; import java.sql.ResultSet; @@ -12,15 +12,10 @@ import net.snowflake.client.jdbc.ResultSetFormatType; abstract class StructuredTypesGetStringBaseIT extends BaseJDBCTest { + public StructuredTypesGetStringBaseIT() {} - protected final ResultSetFormatType queryResultFormat; - - public StructuredTypesGetStringBaseIT(ResultSetFormatType queryResultFormat) { - this.queryResultFormat = queryResultFormat; - } - - protected Connection init() throws SQLException { - return initConnection(this.queryResultFormat); + protected Connection init(ResultSetFormatType queryResultFormat) throws SQLException { + return initConnection(queryResultFormat); } protected static Connection initConnection(ResultSetFormatType queryResultFormat) diff --git a/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryIT.java b/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryIT.java index e100534e7..302146801 100644 --- a/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryIT.java +++ b/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryIT.java @@ -3,9 +3,9 @@ */ package net.snowflake.client.jdbc.telemetry; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -18,25 +18,23 @@ import java.sql.Statement; import java.util.Map; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnGithubAction; -import net.snowflake.client.category.TestCategoryCore; +import net.snowflake.client.annotations.DontRunOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.HttpUtil; import net.snowflake.client.core.SFException; import net.snowflake.client.core.SessionUtil; import org.apache.http.impl.client.CloseableHttpClient; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class TelemetryIT extends AbstractDriverIT { private Connection connection = null; private static final ObjectMapper mapper = new ObjectMapper(); - @Before + @BeforeEach public void init() throws SQLException, IOException { this.connection = getConnection(); } @@ -47,23 +45,23 @@ public void testTelemetry() throws Exception { testTelemetryInternal(telemetry); } - @Ignore + @Disabled @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testSessionlessTelemetry() throws Exception, SFException { testTelemetryInternal(createSessionlessTelemetry()); } - @Ignore + @Disabled @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testJWTSessionlessTelemetry() throws Exception, SFException { testTelemetryInternal(createJWTSessionlessTelemetry()); } - @Ignore + @Disabled @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testOAuthSessionlessTelemetry() throws Exception, SFException { testTelemetryInternal(createOAuthSessionlessTelemetry()); } @@ -143,13 +141,13 @@ public void testDisableTelemetry() throws Exception { } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDisableJWTSessionlessTelemetry() throws Exception, SFException { testDisableTelemetryInternal(createJWTSessionlessTelemetry()); } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testDisableOAuthSessionlessTelemetry() throws Exception, SFException { testDisableTelemetryInternal(createOAuthSessionlessTelemetry()); } @@ -181,7 +179,7 @@ public void testDisableTelemetryInternal(TelemetryClient telemetry) throws Excep } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testClosedJWTSessionlessTelemetry() throws Exception, SFException { TelemetryClient telemetry = createJWTSessionlessTelemetry(); telemetry.close(); @@ -189,11 +187,11 @@ public void testClosedJWTSessionlessTelemetry() throws Exception, SFException { node.put("type", "query"); node.put("query_id", "sdasdasdasdasds"); telemetry.addLogToBatch(node, 1234567); - Assert.assertFalse(telemetry.sendBatchAsync().get()); + assertFalse(telemetry.sendBatchAsync().get()); } @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnGithubAction.class) + @DontRunOnGithubActions public void testClosedOAuthSessionlessTelemetry() throws Exception, SFException { TelemetryClient telemetry = createOAuthSessionlessTelemetry(); telemetry.close(); @@ -201,7 +199,7 @@ public void testClosedOAuthSessionlessTelemetry() throws Exception, SFException node.put("type", "query"); node.put("query_id", "sdasdasdasdasds"); telemetry.addLogToBatch(node, 1234567); - Assert.assertFalse(telemetry.sendBatchAsync().get()); + assertFalse(telemetry.sendBatchAsync().get()); } // Helper function to create a sessionless telemetry diff --git a/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryTest.java b/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryTest.java index 6fc0c86d6..3d1471eb7 100644 --- a/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryTest.java +++ b/src/test/java/net/snowflake/client/jdbc/telemetry/TelemetryTest.java @@ -3,13 +3,13 @@ */ package net.snowflake.client.jdbc.telemetry; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import java.util.LinkedList; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** Telemetry unit tests */ public class TelemetryTest { diff --git a/src/test/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryServiceIT.java b/src/test/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryServiceIT.java index 1d8ec8c9e..347bc97e3 100644 --- a/src/test/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryServiceIT.java +++ b/src/test/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryServiceIT.java @@ -2,9 +2,10 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.Connection; import java.sql.SQLException; @@ -13,9 +14,8 @@ import java.util.Map; import java.util.Properties; import java.util.concurrent.TimeUnit; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningNotOnTestaccount; -import net.snowflake.client.category.TestCategoryCore; +import net.snowflake.client.annotations.RunOnTestaccountNotOnGithubActions; +import net.snowflake.client.category.TestTags; import net.snowflake.client.core.SFSession; import net.snowflake.client.jdbc.BaseJDBCTest; import net.snowflake.client.jdbc.SnowflakeConnectionV1; @@ -23,19 +23,19 @@ import net.snowflake.client.jdbc.SnowflakeSQLLoggedException; import net.snowflake.common.core.SqlState; import org.apache.commons.lang3.time.StopWatch; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Standalone test cases for the out of band telemetry service */ -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class TelemetryServiceIT extends BaseJDBCTest { private static final int WAIT_FOR_TELEMETRY_REPORT_IN_MILLISECS = 5000; private boolean defaultState; - @Before + @BeforeEach public void setUp() { TelemetryService service = TelemetryService.getInstance(); Map connectionParams = getConnectionParameters(); @@ -45,7 +45,7 @@ public void setUp() { service.enable(); } - @After + @AfterEach public void tearDown() throws InterruptedException { // wait 5 seconds while the service is flushing TimeUnit.SECONDS.sleep(5); @@ -58,7 +58,7 @@ public void tearDown() throws InterruptedException { } @SuppressWarnings("divzero") - @Ignore + @Disabled @Test public void testCreateException() { TelemetryService service = TelemetryService.getInstance(); @@ -82,7 +82,7 @@ public void testCreateException() { } /** test wrong server url. */ - @Ignore + @Disabled @Test public void testWrongServerURL() throws InterruptedException { TelemetryService service = TelemetryService.getInstance(); @@ -102,7 +102,7 @@ public void testWrongServerURL() throws InterruptedException { assertThat("WrongServerURL do not block.", service.getEventCount() > count); } - @Ignore + @Disabled @Test public void testCreateLog() { // this log will be delivered to snowflake @@ -114,7 +114,7 @@ public void testCreateLog() { service.report(log); } - @Ignore + @Disabled @Test public void testCreateLogWithAWSSecret() { // this log will be delivered to snowflake @@ -135,7 +135,7 @@ public void testCreateLogWithAWSSecret() { service.report(log); } - @Ignore + @Disabled @Test public void stressTestCreateLog() { // this log will be delivered to snowflake @@ -161,7 +161,7 @@ public void stressTestCreateLog() { sw.stop(); } - @Ignore + @Disabled @Test public void testCreateLogInBlackList() { // this log will be delivered to snowflake @@ -172,7 +172,7 @@ public void testCreateLogInBlackList() { service.report(log); } - @Ignore + @Disabled @Test public void testCreateUrgentEvent() { // this log will be delivered to snowflake @@ -184,7 +184,7 @@ public void testCreateUrgentEvent() { service.report(log); } - @Ignore + @Disabled @Test public void stressTestCreateUrgentEvent() { // this log will be delivered to snowflake @@ -229,7 +229,7 @@ private int generateSQLFeatureNotSupportedException() throws SQLFeatureNotSuppor * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningNotOnTestaccount.class) + @RunOnTestaccountNotOnGithubActions public void testSnowflakeSQLLoggedExceptionOOBTelemetry() throws SQLException, InterruptedException { // make a connection to initialize telemetry instance @@ -264,7 +264,7 @@ public void testSnowflakeSQLLoggedExceptionOOBTelemetry() * @throws SQLException */ @Test - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningNotOnTestaccount.class) + @RunOnTestaccountNotOnGithubActions public void testSQLFeatureNotSupportedOOBTelemetry() throws InterruptedException { // with null session, OOB telemetry will be thrown try { @@ -290,7 +290,7 @@ public void testSQLFeatureNotSupportedOOBTelemetry() throws InterruptedException * * @throws SQLException */ - @Ignore + @Disabled @Test public void testHTAPTelemetry() throws SQLException { Properties properties = new Properties(); @@ -317,7 +317,7 @@ public void testHTAPTelemetry() throws SQLException { * Requires part 2 of SNOW-844477. Make sure CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED is true at * account level. Tests connection property CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED=true */ - @Ignore + @Disabled @Test public void testOOBTelemetryEnabled() throws SQLException { Properties properties = new Properties(); @@ -334,7 +334,7 @@ public void testOOBTelemetryEnabled() throws SQLException { * Requires part 2 of SNOW-844477. Make sure CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED is false at * account level. Tests connection property CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED=false */ - @Ignore + @Disabled @Test public void testOOBTelemetryDisabled() throws SQLException { Properties properties = new Properties(); @@ -352,7 +352,7 @@ public void testOOBTelemetryDisabled() throws SQLException { * account level. Tests connection property CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED=false but * CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED is enabled on account level */ - @Ignore + @Disabled @Test public void testOOBTelemetryEnabledOnServerDisabledOnClient() throws SQLException { Properties properties = new Properties(); @@ -392,16 +392,15 @@ public void testSnowflakeSQLLoggedExceptionIBTelemetry() throws SQLException { * telemetry should be used. * *

After running test, check for telemetry message in client_telemetry_v table. - * - * @throws SQLException */ - @Test(expected = SQLFeatureNotSupportedException.class) + @Test public void testSqlFeatureNotSupportedExceptionIBTelemetry() throws SQLException { // make a connection to initialize telemetry instance try (Connection con = getConnection()) { Statement statement = con.createStatement(); // try to execute a statement that throws a SQLFeatureNotSupportedException - statement.execute("select 1", new int[] {}); + assertThrows( + SQLFeatureNotSupportedException.class, () -> statement.execute("select 1", new int[] {})); } } } diff --git a/src/test/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryServiceTest.java b/src/test/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryServiceTest.java index 5103348fa..fe359b0ec 100644 --- a/src/test/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryServiceTest.java +++ b/src/test/java/net/snowflake/client/jdbc/telemetryOOB/TelemetryServiceTest.java @@ -6,21 +6,21 @@ import java.util.HashMap; import java.util.Map; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class TelemetryServiceTest { private boolean defaultState; - @Before + @BeforeEach public void setUp() { TelemetryService service = TelemetryService.getInstance(); defaultState = service.isEnabled(); service.enable(); } - @After + @AfterEach public void tearDown() throws InterruptedException { TelemetryService service = TelemetryService.getInstance(); if (defaultState) { diff --git a/src/test/java/net/snowflake/client/loader/FlatfileReadMultithreadIT.java b/src/test/java/net/snowflake/client/loader/FlatfileReadMultithreadIT.java index 86f8caf5a..dae7fc196 100644 --- a/src/test/java/net/snowflake/client/loader/FlatfileReadMultithreadIT.java +++ b/src/test/java/net/snowflake/client/loader/FlatfileReadMultithreadIT.java @@ -16,13 +16,13 @@ import java.util.Random; import java.util.concurrent.atomic.AtomicInteger; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.category.TestCategoryLoader; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryLoader.class) +@Tag(TestTags.LOADER) public class FlatfileReadMultithreadIT { private final int NUM_RECORDS = 100000; @@ -30,7 +30,7 @@ public class FlatfileReadMultithreadIT { private static String TARGET_SCHEMA; private static String TARGET_DB; - @BeforeClass + @BeforeAll public static void setUpClass() throws Throwable { try (Connection testConnection = AbstractDriverIT.getConnection(); // NOTE: the stage object must be created right after the connection @@ -43,7 +43,7 @@ public static void setUpClass() throws Throwable { } } - @AfterClass + @AfterAll public static void tearDownClass() throws Throwable { try (Connection testConnection = AbstractDriverIT.getConnection(); Statement statement = testConnection.createStatement()) { diff --git a/src/test/java/net/snowflake/client/loader/LoaderBase.java b/src/test/java/net/snowflake/client/loader/LoaderBase.java index ea0c29fdf..853955862 100644 --- a/src/test/java/net/snowflake/client/loader/LoaderBase.java +++ b/src/test/java/net/snowflake/client/loader/LoaderBase.java @@ -6,8 +6,8 @@ import java.sql.Connection; import java.sql.SQLException; import net.snowflake.client.AbstractDriverIT; -import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; public class LoaderBase { static final String TARGET_TABLE_NAME = "LOADER_test_TABLE"; @@ -16,7 +16,7 @@ public class LoaderBase { static Connection putConnection; static String SCHEMA_NAME; - @BeforeClass + @BeforeAll public static void setUpClass() throws Throwable { testConnection = AbstractDriverIT.getConnection(); putConnection = AbstractDriverIT.getConnection(); @@ -40,7 +40,7 @@ public static void setUpClass() throws Throwable { .execute("alter session set JDBC_QUERY_RESULT_FORMAT='ARROW', QUERY_RESULT_FORMAT='ARROW'"); } - @AfterClass + @AfterAll public static void tearDownClass() throws SQLException { testConnection .createStatement() diff --git a/src/test/java/net/snowflake/client/loader/LoaderIT.java b/src/test/java/net/snowflake/client/loader/LoaderIT.java index 00fea060f..7f4e3ee97 100644 --- a/src/test/java/net/snowflake/client/loader/LoaderIT.java +++ b/src/test/java/net/snowflake/client/loader/LoaderIT.java @@ -9,8 +9,8 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.ResultSet; import java.sql.SQLException; @@ -22,13 +22,13 @@ import java.util.Date; import java.util.Random; import java.util.TimeZone; -import net.snowflake.client.category.TestCategoryLoader; -import org.junit.Ignore; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** Loader IT */ -@Category(TestCategoryLoader.class) +@Tag(TestTags.LOADER) public class LoaderIT extends LoaderBase { @Test public void testInjectBadStagedFileInsert() throws Exception { @@ -93,7 +93,7 @@ public void testExecuteBeforeAfterSQLError() throws Exception { * * @throws Exception raises an exception if any error occurs. */ - @Ignore("Performance test") + @Disabled("Performance test") @Test public void testLoaderLargeInsert() throws Exception { new TestDataConfigBuilder(testConnection, putConnection) diff --git a/src/test/java/net/snowflake/client/loader/LoaderLatestIT.java b/src/test/java/net/snowflake/client/loader/LoaderLatestIT.java index e10a606d4..72212171b 100644 --- a/src/test/java/net/snowflake/client/loader/LoaderLatestIT.java +++ b/src/test/java/net/snowflake/client/loader/LoaderLatestIT.java @@ -4,8 +4,8 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.PreparedStatement; import java.sql.ResultSet; @@ -13,9 +13,9 @@ import java.util.Arrays; import java.util.Collections; import java.util.Date; -import net.snowflake.client.category.TestCategoryLoader; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Loader API tests for the latest JDBC driver. This doesn't work for the oldest supported driver. @@ -23,7 +23,7 @@ * is not applicable. If it is applicable, move tests to LoaderIT so that both the latest and oldest * supported driver run the tests. */ -@Category(TestCategoryLoader.class) +@Tag(TestTags.LOADER) public class LoaderLatestIT extends LoaderBase { @Test public void testLoaderUpsert() throws Exception { diff --git a/src/test/java/net/snowflake/client/loader/LoaderMultipleBatchIT.java b/src/test/java/net/snowflake/client/loader/LoaderMultipleBatchIT.java index 859533686..a01dfffa5 100644 --- a/src/test/java/net/snowflake/client/loader/LoaderMultipleBatchIT.java +++ b/src/test/java/net/snowflake/client/loader/LoaderMultipleBatchIT.java @@ -5,16 +5,16 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.ResultSet; import java.sql.Statement; import java.util.List; -import net.snowflake.client.category.TestCategoryLoader; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryLoader.class) +@Tag(TestTags.LOADER) public class LoaderMultipleBatchIT extends LoaderBase { @Test public void testLoaderMultipleBatch() throws Exception { diff --git a/src/test/java/net/snowflake/client/loader/LoaderTimestampIT.java b/src/test/java/net/snowflake/client/loader/LoaderTimestampIT.java index 790249e96..9c418c421 100644 --- a/src/test/java/net/snowflake/client/loader/LoaderTimestampIT.java +++ b/src/test/java/net/snowflake/client/loader/LoaderTimestampIT.java @@ -5,7 +5,7 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.sql.ResultSet; import java.sql.Statement; @@ -14,11 +14,11 @@ import java.util.Arrays; import java.util.Date; import java.util.TimeZone; -import net.snowflake.client.category.TestCategoryLoader; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryLoader.class) +@Tag(TestTags.LOADER) public class LoaderTimestampIT extends LoaderBase { @Test public void testLoadTimestamp() throws Exception { diff --git a/src/test/java/net/snowflake/client/loader/OnErrorTest.java b/src/test/java/net/snowflake/client/loader/OnErrorTest.java index db31b59b5..062621051 100644 --- a/src/test/java/net/snowflake/client/loader/OnErrorTest.java +++ b/src/test/java/net/snowflake/client/loader/OnErrorTest.java @@ -6,7 +6,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class OnErrorTest { @Test diff --git a/src/test/java/net/snowflake/client/log/AbstractLoggerIT.java b/src/test/java/net/snowflake/client/log/AbstractLoggerIT.java index 006574d66..15ee56e6a 100644 --- a/src/test/java/net/snowflake/client/log/AbstractLoggerIT.java +++ b/src/test/java/net/snowflake/client/log/AbstractLoggerIT.java @@ -3,21 +3,21 @@ */ package net.snowflake.client.log; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; -import net.snowflake.client.category.TestCategoryCore; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** A base class for testing implementations of {@link SFLogger} */ -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public abstract class AbstractLoggerIT { public static final String fakeCreds = "credentials=(aws_key_id='abc123' aws_secret_key='rtyuiop')"; - @Before + @BeforeEach void setUp() { setLogLevel(LogLevel.TRACE); } @@ -36,7 +36,7 @@ public void TestLambdaIsNotEvaluatedIfMsgIsNotLogged() { "Value: {}", (ArgSupplier) () -> { - Assert.fail("Lambda expression evaluated even though message " + "is not logged"); + fail("Lambda expression evaluated even though message " + "is not logged"); return 0; }); } @@ -103,19 +103,19 @@ private void logAndVerifyAtEachLogLevel(String expectedLogMsg, String msg, Objec String loggedMsg = getLoggedMessage(); assertEquals( + expectedLogMsg, + loggedMsg, String.format( "Message logged did not match expected value. " + "expected=%s actual=%s", - expectedLogMsg, loggedMsg), - expectedLogMsg, - loggedMsg); + expectedLogMsg, loggedMsg)); LogLevel loggedMsgLevel = getLoggedMessageLevel(); assertEquals( + level, + loggedMsgLevel, String.format( "Message was not logged at expected log level. " + "expected=%s actual=%s", - level.toString(), loggedMsgLevel.toString()), - level, - loggedMsgLevel); + level.toString(), loggedMsgLevel.toString())); } } diff --git a/src/test/java/net/snowflake/client/log/JDK14JCLWrapperLatestIT.java b/src/test/java/net/snowflake/client/log/JDK14JCLWrapperLatestIT.java index 033a15457..b8c2b63e5 100644 --- a/src/test/java/net/snowflake/client/log/JDK14JCLWrapperLatestIT.java +++ b/src/test/java/net/snowflake/client/log/JDK14JCLWrapperLatestIT.java @@ -3,21 +3,21 @@ */ package net.snowflake.client.log; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.logging.Formatter; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.LogRecord; -import net.snowflake.client.category.TestCategoryCore; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class JDK14JCLWrapperLatestIT { JDK14JCLWrapper wrapper = new JDK14JCLWrapper(JDK14JCLWrapperLatestIT.class.getName()); JDK14Logger logger = (JDK14Logger) wrapper.getLogger(); @@ -66,7 +66,7 @@ private enum LogLevel { private TestJDK14LogHandler handler = new TestJDK14LogHandler(new SFFormatter()); - @Before + @BeforeEach public void setUp() { logLevelToRestore = logger.getLevel(); // Set debug level to lowest so that all possible messages can be sent. @@ -75,7 +75,7 @@ public void setUp() { logger.setUseParentHandlers(false); } - @After + @AfterEach public void tearDown() { logger.setUseParentHandlers(true); logger.setLevel(logLevelToRestore); diff --git a/src/test/java/net/snowflake/client/log/JDK14LoggerLatestIT.java b/src/test/java/net/snowflake/client/log/JDK14LoggerLatestIT.java index 7bcfaa216..54d21f4e6 100644 --- a/src/test/java/net/snowflake/client/log/JDK14LoggerLatestIT.java +++ b/src/test/java/net/snowflake/client/log/JDK14LoggerLatestIT.java @@ -8,15 +8,15 @@ import java.util.logging.Level; import java.util.logging.LogRecord; import java.util.logging.Logger; -import net.snowflake.client.category.TestCategoryCore; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; /** A class for testing {@link JDK14Logger} */ -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class JDK14LoggerLatestIT extends AbstractLoggerIT { /** {@link JDK14Logger} instance that will be tested in this class */ private static final JDK14Logger LOGGER = new JDK14Logger(JDK14LoggerLatestIT.class.getName()); @@ -53,7 +53,7 @@ public class JDK14LoggerLatestIT extends AbstractLoggerIT { /** Level at which last message was logged using JDK14Logger. */ private Level lastLogMessageLevel = null; - @BeforeClass + @BeforeAll public static void oneTimeSetUp() { logLevelToRestore = internalLogger.getLevel(); useParentHandlersToRestore = internalLogger.getUseParentHandlers(); @@ -61,19 +61,19 @@ public static void oneTimeSetUp() { internalLogger.setUseParentHandlers(false); } - @AfterClass + @AfterAll public static void oneTimeTearDown() { internalLogger.setLevel(logLevelToRestore); internalLogger.setUseParentHandlers(useParentHandlersToRestore); } - @Before + @BeforeEach public void setUp() { super.setUp(); internalLogger.addHandler(this.handler); } - @After + @AfterEach public void tearDown() { internalLogger.removeHandler(this.handler); } diff --git a/src/test/java/net/snowflake/client/log/JDK14LoggerTest.java b/src/test/java/net/snowflake/client/log/JDK14LoggerTest.java index e4aadfb14..101c5f9c8 100644 --- a/src/test/java/net/snowflake/client/log/JDK14LoggerTest.java +++ b/src/test/java/net/snowflake/client/log/JDK14LoggerTest.java @@ -4,17 +4,19 @@ package net.snowflake.client.log; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.nio.file.Paths; import java.util.logging.Level; -import org.junit.Test; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; public class JDK14LoggerTest { @Test + @Disabled public void testLegacyLoggerInit() throws IOException { System.setProperty("snowflake.jdbc.log.size", "100000"); System.setProperty("snowflake.jdbc.log.count", "3"); diff --git a/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java b/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java index 5c11cdf22..c1f9df5df 100644 --- a/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java +++ b/src/test/java/net/snowflake/client/log/JDK14LoggerWithClientLatestIT.java @@ -1,9 +1,9 @@ package net.snowflake.client.log; import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.io.IOException; @@ -18,30 +18,43 @@ import java.util.Properties; import java.util.logging.Level; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.ConditionalIgnoreRule; -import net.snowflake.client.RunningOnWin; -import net.snowflake.client.category.TestCategoryOthers; +import net.snowflake.client.annotations.DontRunOnWindows; +import net.snowflake.client.category.TestTags; +import net.snowflake.client.jdbc.SnowflakeSQLException; import net.snowflake.client.jdbc.SnowflakeSQLLoggedException; import org.apache.commons.io.FileUtils; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TemporaryFolder; - -@Category(TestCategoryOthers.class) +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +@Tag(TestTags.OTHERS) public class JDK14LoggerWithClientLatestIT extends AbstractDriverIT { - @Rule public TemporaryFolder tmpFolder = new TemporaryFolder(); - + @TempDir public File tmpFolder; String homePath = systemGetProperty("user.home"); + private static Level originalLevel; + + @BeforeAll + static void saveLevel() { + originalLevel = JDK14Logger.getLevel(); + } + + @AfterAll + static void restoreLevel() { + JDK14Logger.setLevel(originalLevel); + } @Test - @Ignore + @Disabled public void testJDK14LoggingWithClientConfig() throws IOException { - File configFile = tmpFolder.newFile("config.json"); + File configFile = new File(tmpFolder, "config.json"); + configFile.createNewFile(); Path configFilePath = configFile.toPath(); - File logFolder = tmpFolder.newFolder("logs"); + File logFolder = new File(tmpFolder, "logs"); + logFolder.createNewFile(); Path logFolderPath = logFolder.toPath(); String configJson = "{\"common\":{\"log_level\":\"debug\",\"log_path\":\"" + logFolderPath + "\"}}"; @@ -63,31 +76,37 @@ public void testJDK14LoggingWithClientConfig() throws IOException { } } - @Test(expected = SQLException.class) - public void testJDK14LoggingWithClientConfigInvalidConfigFilePath() throws SQLException { + @Test + public void testJDK14LoggingWithClientConfigInvalidConfigFilePath() { Path configFilePath = Paths.get("invalid.json"); Properties properties = new Properties(); properties.put("client_config_file", configFilePath.toString()); - try (Connection connection = getConnection(properties)) { - connection.createStatement().executeQuery("select 1"); - } + assertThrows( + SnowflakeSQLException.class, + () -> { + try (Connection connection = getConnection(properties)) { + connection.createStatement().executeQuery("select 1"); + } + }); } @Test - @Ignore - @ConditionalIgnoreRule.ConditionalIgnore(condition = RunningOnWin.class) + @Disabled + @DontRunOnWindows public void testJDK14LoggingWithClientConfigPermissionError() throws IOException { - File configFile = tmpFolder.newFile("config.json"); + File configFile = new File(tmpFolder, "config.json"); + configFile.createNewFile(); Path configFilePath = configFile.toPath(); - File directory = tmpFolder.newFolder("logs"); - Path directoryPath = directory.toPath(); + File logFolder = new File(tmpFolder, "logs"); + logFolder.createNewFile(); + Path logFolderPath = logFolder.toPath(); String configJson = - "{\"common\":{\"log_level\":\"debug\",\"log_path\":\"" + directoryPath + "\"}}"; + "{\"common\":{\"log_level\":\"debug\",\"log_path\":\"" + logFolderPath + "\"}}"; HashSet perms = new HashSet<>(); perms.add(PosixFilePermission.OWNER_READ); perms.add(PosixFilePermission.GROUP_READ); perms.add(PosixFilePermission.OTHERS_READ); - Files.setPosixFilePermissions(directoryPath, perms); + Files.setPosixFilePermissions(logFolderPath, perms); Files.write(configFilePath, configJson.getBytes()); Properties properties = new Properties(); @@ -112,12 +131,13 @@ public void testJDK14LoggerWithQuotesInMessage() { } @Test - @Ignore + @Disabled public void testJDK14LoggingWithMissingLogPathClientConfig() throws Exception { - File configFile = tmpFolder.newFile("config.json"); + File configFile = new File(tmpFolder, "config.json"); + configFile.createNewFile(); Path configFilePath = configFile.toPath(); String configJson = "{\"common\":{\"log_level\":\"debug\"}}"; - Path home = tmpFolder.getRoot().toPath(); + Path home = tmpFolder.toPath(); System.setProperty("user.home", home.toString()); Path homeLogPath = Paths.get(home.toString(), "jdbc"); @@ -142,11 +162,11 @@ public void testJDK14LoggingWithMissingLogPathClientConfig() throws Exception { } @Test - @Ignore + @Disabled public void testJDK14LoggingWithMissingLogPathNoHomeDirClientConfig() throws Exception { System.clearProperty("user.home"); - File configFile = tmpFolder.newFile("config.json"); + File configFile = new File(tmpFolder, "config.json"); Path configFilePath = configFile.toPath(); String configJson = "{\"common\":{\"log_level\":\"debug\"}}"; Files.write(configFilePath, configJson.getBytes()); diff --git a/src/test/java/net/snowflake/client/log/SFFormatterTest.java b/src/test/java/net/snowflake/client/log/SFFormatterTest.java index 3255a7357..04ef08c02 100644 --- a/src/test/java/net/snowflake/client/log/SFFormatterTest.java +++ b/src/test/java/net/snowflake/client/log/SFFormatterTest.java @@ -4,7 +4,7 @@ package net.snowflake.client.log; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.text.DateFormat; import java.text.ParseException; @@ -15,8 +15,8 @@ import java.util.logging.Formatter; import java.util.logging.Level; import java.util.logging.LogRecord; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class SFFormatterTest { // Change these numbers if necessary @@ -28,7 +28,7 @@ public class SFFormatterTest { /** Log record generator */ private LRGenerator recordGenerator; - @Before + @BeforeEach public void setUp() { recordGenerator = new LRGenerator(SFFormatter.CLASS_NAME_PREFIX + "TestClass", "TestMethod"); recordGenerator.setFormatter(new SFFormatter()); @@ -56,8 +56,8 @@ public void testUTCTimeStampSimple() throws ParseException { Date date = extractDate(record); long nowInMs = Calendar.getInstance(TimeZone.getTimeZone("UTC")).getTimeInMillis(); assertTrue( - "Time difference boundary should be less than " + TIME_DIFFERENCE_BOUNDARY + "ms", - nowInMs - date.getTime() < TIME_DIFFERENCE_BOUNDARY); + nowInMs - date.getTime() < TIME_DIFFERENCE_BOUNDARY, + "Time difference boundary should be less than " + TIME_DIFFERENCE_BOUNDARY + "ms"); } finally { TimeZone.setDefault(originalTz); } diff --git a/src/test/java/net/snowflake/client/log/SFLogLevelTest.java b/src/test/java/net/snowflake/client/log/SFLogLevelTest.java index 5604fa013..e12271639 100644 --- a/src/test/java/net/snowflake/client/log/SFLogLevelTest.java +++ b/src/test/java/net/snowflake/client/log/SFLogLevelTest.java @@ -1,8 +1,8 @@ package net.snowflake.client.log; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SFLogLevelTest { diff --git a/src/test/java/net/snowflake/client/log/SFLoggerFactoryTest.java b/src/test/java/net/snowflake/client/log/SFLoggerFactoryTest.java index cd3f73898..a79e25de8 100644 --- a/src/test/java/net/snowflake/client/log/SFLoggerFactoryTest.java +++ b/src/test/java/net/snowflake/client/log/SFLoggerFactoryTest.java @@ -3,9 +3,9 @@ */ package net.snowflake.client.log; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SFLoggerFactoryTest { diff --git a/src/test/java/net/snowflake/client/log/SFToJavaLogMapperTest.java b/src/test/java/net/snowflake/client/log/SFToJavaLogMapperTest.java index 49ee89d60..16280b1aa 100644 --- a/src/test/java/net/snowflake/client/log/SFToJavaLogMapperTest.java +++ b/src/test/java/net/snowflake/client/log/SFToJavaLogMapperTest.java @@ -1,20 +1,20 @@ package net.snowflake.client.log; import static net.snowflake.client.log.SFToJavaLogMapper.toJavaUtilLoggingLevel; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.logging.Level; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SFToJavaLogMapperTest { @Test public void testToJavaUtilLoggingLevel() { - assertEquals(toJavaUtilLoggingLevel(SFLogLevel.OFF), java.util.logging.Level.OFF); - assertEquals(toJavaUtilLoggingLevel(SFLogLevel.ERROR), java.util.logging.Level.SEVERE); - assertEquals(toJavaUtilLoggingLevel(SFLogLevel.WARN), java.util.logging.Level.WARNING); - assertEquals(toJavaUtilLoggingLevel(SFLogLevel.INFO), java.util.logging.Level.INFO); + assertEquals(toJavaUtilLoggingLevel(SFLogLevel.OFF), Level.OFF); + assertEquals(toJavaUtilLoggingLevel(SFLogLevel.ERROR), Level.SEVERE); + assertEquals(toJavaUtilLoggingLevel(SFLogLevel.WARN), Level.WARNING); + assertEquals(toJavaUtilLoggingLevel(SFLogLevel.INFO), Level.INFO); assertEquals(toJavaUtilLoggingLevel(SFLogLevel.DEBUG), Level.FINE); - assertEquals(toJavaUtilLoggingLevel(SFLogLevel.TRACE), java.util.logging.Level.FINEST); + assertEquals(toJavaUtilLoggingLevel(SFLogLevel.TRACE), Level.FINEST); } } diff --git a/src/test/java/net/snowflake/client/log/SLF4JJJCLWrapperLatestIT.java b/src/test/java/net/snowflake/client/log/SLF4JJJCLWrapperLatestIT.java index 0b7d55a3c..008f356a0 100644 --- a/src/test/java/net/snowflake/client/log/SLF4JJJCLWrapperLatestIT.java +++ b/src/test/java/net/snowflake/client/log/SLF4JJJCLWrapperLatestIT.java @@ -3,22 +3,22 @@ */ package net.snowflake.client.log; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.Logger; import ch.qos.logback.classic.spi.ILoggingEvent; import ch.qos.logback.core.Appender; import ch.qos.logback.core.AppenderBase; -import net.snowflake.client.category.TestCategoryCore; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class SLF4JJJCLWrapperLatestIT { /** Message last logged using SLF4JLogger. */ @@ -55,7 +55,7 @@ private enum LogLevel { Logger logger = (Logger) wrapper.getLogger(); private final Appender testAppender = new TestAppender(); - @Before + @BeforeEach public void setUp() { levelToRestore = logger.getLevel(); if (!testAppender.isStarted()) { @@ -66,7 +66,7 @@ public void setUp() { logger.addAppender(testAppender); } - @After + @AfterEach public void tearDown() { logger.setLevel(levelToRestore); logger.detachAppender(testAppender); diff --git a/src/test/java/net/snowflake/client/log/SLF4JLoggerLatestIT.java b/src/test/java/net/snowflake/client/log/SLF4JLoggerLatestIT.java index 79e9829f7..9e515b03a 100644 --- a/src/test/java/net/snowflake/client/log/SLF4JLoggerLatestIT.java +++ b/src/test/java/net/snowflake/client/log/SLF4JLoggerLatestIT.java @@ -11,16 +11,16 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import net.snowflake.client.category.TestCategoryCore; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; import org.slf4j.LoggerFactory; /** A class for testing {@link SLF4JLogger} */ -@Category(TestCategoryCore.class) +@Tag(TestTags.CORE) public class SLF4JLoggerLatestIT extends AbstractLoggerIT { /** {@link SLF4JLogger} instance that will be tested in this class */ private static final SLF4JLogger LOGGER = new SLF4JLogger(SLF4JLoggerLatestIT.class); @@ -65,7 +65,7 @@ public class SLF4JLoggerLatestIT extends AbstractLoggerIT { /** Level at which last message was logged using SLF4JLogger. */ private Level lastLogMessageLevel = null; - @BeforeClass + @BeforeAll public static void oneTimeSetUp() { logLevelToRestore = internalLogger.getLevel(); additivityToRestore = internalLogger.isAdditive(); @@ -85,7 +85,7 @@ public static void oneTimeSetUp() { internalLogger.setAdditive(false); } - @AfterClass + @AfterAll public static void oneTimeTearDown() { // Restore original configuration internalLogger.setLevel(logLevelToRestore); @@ -96,10 +96,9 @@ public static void oneTimeTearDown() { appendersToRestore.forEach(internalLogger::addAppender); } - @Before + @BeforeEach public void setUp() { super.setUp(); - if (!testAppender.isStarted()) { testAppender.start(); } @@ -107,7 +106,7 @@ public void setUp() { internalLogger.addAppender(testAppender); } - @After + @AfterEach public void tearDown() { internalLogger.detachAppender(testAppender); } diff --git a/src/test/java/net/snowflake/client/pooling/ConnectionPoolingDataSourceIT.java b/src/test/java/net/snowflake/client/pooling/ConnectionPoolingDataSourceIT.java index eadd984cc..09ffe213a 100644 --- a/src/test/java/net/snowflake/client/pooling/ConnectionPoolingDataSourceIT.java +++ b/src/test/java/net/snowflake/client/pooling/ConnectionPoolingDataSourceIT.java @@ -8,7 +8,7 @@ import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.CoreMatchers.sameInstance; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.fail; import java.sql.Connection; import java.sql.SQLException; @@ -20,11 +20,11 @@ import javax.sql.ConnectionEventListener; import javax.sql.PooledConnection; import net.snowflake.client.AbstractDriverIT; -import net.snowflake.client.category.TestCategoryConnection; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class ConnectionPoolingDataSourceIT extends AbstractDriverIT { @Test public void testPooledConnection() throws SQLException { diff --git a/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java b/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java index ce93928ac..268989657 100644 --- a/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java +++ b/src/test/java/net/snowflake/client/pooling/LogicalConnectionAlreadyClosedLatestIT.java @@ -7,12 +7,12 @@ import java.sql.SQLException; import java.util.Map; import javax.sql.PooledConnection; -import net.snowflake.client.category.TestCategoryConnection; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.BaseJDBCTest; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class LogicalConnectionAlreadyClosedLatestIT extends BaseJDBCTest { @Test diff --git a/src/test/java/net/snowflake/client/pooling/LogicalConnectionFeatureNotSupportedLatestIT.java b/src/test/java/net/snowflake/client/pooling/LogicalConnectionFeatureNotSupportedLatestIT.java index 39df72aa2..d3d19c8cf 100644 --- a/src/test/java/net/snowflake/client/pooling/LogicalConnectionFeatureNotSupportedLatestIT.java +++ b/src/test/java/net/snowflake/client/pooling/LogicalConnectionFeatureNotSupportedLatestIT.java @@ -11,12 +11,12 @@ import java.util.HashMap; import java.util.Map; import javax.sql.PooledConnection; -import net.snowflake.client.category.TestCategoryConnection; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.BaseJDBCTest; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class LogicalConnectionFeatureNotSupportedLatestIT extends BaseJDBCTest { @Test diff --git a/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java b/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java index d25cdb485..70afaf2bc 100644 --- a/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java +++ b/src/test/java/net/snowflake/client/pooling/LogicalConnectionLatestIT.java @@ -3,12 +3,12 @@ */ package net.snowflake.client.pooling; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; @@ -27,14 +27,14 @@ import java.util.Map; import java.util.Properties; import javax.sql.PooledConnection; -import net.snowflake.client.category.TestCategoryConnection; +import net.snowflake.client.category.TestTags; import net.snowflake.client.jdbc.BaseJDBCTest; import net.snowflake.client.jdbc.SnowflakeConnectionV1; import net.snowflake.client.jdbc.SnowflakeDriver; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category(TestCategoryConnection.class) +@Tag(TestTags.CONNECTION) public class LogicalConnectionLatestIT extends BaseJDBCTest { Map properties = getConnectionParameters(); diff --git a/src/test/java/net/snowflake/client/providers/BooleanProvider.java b/src/test/java/net/snowflake/client/providers/BooleanProvider.java new file mode 100644 index 000000000..24d2a09d3 --- /dev/null +++ b/src/test/java/net/snowflake/client/providers/BooleanProvider.java @@ -0,0 +1,16 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.providers; + +import java.util.Arrays; +import java.util.List; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.provider.Arguments; + +public class BooleanProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return Arrays.asList(Arguments.of(true), Arguments.of(false)); + } +} diff --git a/src/test/java/net/snowflake/client/providers/ProvidersUtil.java b/src/test/java/net/snowflake/client/providers/ProvidersUtil.java new file mode 100644 index 000000000..05b4f64c4 --- /dev/null +++ b/src/test/java/net/snowflake/client/providers/ProvidersUtil.java @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.providers; + +import java.util.ArrayList; +import java.util.List; +import org.apache.commons.lang3.ArrayUtils; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.provider.Arguments; + +public class ProvidersUtil { + private ProvidersUtil() {} + + private static List cartesianProduct( + ExtensionContext context, List a, SnowflakeArgumentsProvider b) { + List argsB = b.rawArguments(context); + List result = new ArrayList<>(); + for (Arguments args : a) { + for (Arguments args2 : argsB) { + result.add(Arguments.of(ArrayUtils.addAll(args.get(), args2.get()))); + } + } + return result; + } + + public static List cartesianProduct( + ExtensionContext context, + SnowflakeArgumentsProvider provider, + SnowflakeArgumentsProvider... providers) { + List args = provider.rawArguments(context); + for (SnowflakeArgumentsProvider argProvider : providers) { + args = cartesianProduct(context, args, argProvider); + } + return args; + } +} diff --git a/src/test/java/net/snowflake/client/providers/ResultFormatProvider.java b/src/test/java/net/snowflake/client/providers/ResultFormatProvider.java new file mode 100644 index 000000000..8f7ffbac4 --- /dev/null +++ b/src/test/java/net/snowflake/client/providers/ResultFormatProvider.java @@ -0,0 +1,20 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.providers; + +import java.util.Arrays; +import java.util.List; +import net.snowflake.client.jdbc.ResultSetFormatType; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.provider.Arguments; + +public class ResultFormatProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + return Arrays.asList( + Arguments.of(ResultSetFormatType.JSON), + Arguments.of(ResultSetFormatType.ARROW_WITH_JSON_STRUCTURED_TYPES), + Arguments.of(ResultSetFormatType.NATIVE_ARROW)); + } +} diff --git a/src/test/java/net/snowflake/client/providers/ScaleProvider.java b/src/test/java/net/snowflake/client/providers/ScaleProvider.java new file mode 100644 index 000000000..e94421cb0 --- /dev/null +++ b/src/test/java/net/snowflake/client/providers/ScaleProvider.java @@ -0,0 +1,20 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.providers; + +import java.util.ArrayList; +import java.util.List; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.provider.Arguments; + +public class ScaleProvider extends SnowflakeArgumentsProvider { + @Override + protected List rawArguments(ExtensionContext context) { + ArrayList scales = new ArrayList<>(); + for (int scale = 0; scale < 10; scale++) { + scales.add(Arguments.of(scale)); + } + return scales; + } +} diff --git a/src/test/java/net/snowflake/client/providers/SimpleResultFormatProvider.java b/src/test/java/net/snowflake/client/providers/SimpleResultFormatProvider.java new file mode 100644 index 000000000..1b973f966 --- /dev/null +++ b/src/test/java/net/snowflake/client/providers/SimpleResultFormatProvider.java @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.providers; + +import java.util.Arrays; +import java.util.List; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.provider.Arguments; + +public class SimpleResultFormatProvider extends SnowflakeArgumentsProvider { + private static List arguments = + Arrays.asList(Arguments.of("JSON"), Arguments.of("ARROW")); + + public static void setSupportedFormats(List supportedFormats) { + arguments = supportedFormats; + } + + public static void resetSupportedFormats() { + setSupportedFormats(Arrays.asList(Arguments.of("JSON"), Arguments.of("ARROW"))); + } + + @Override + protected List rawArguments(ExtensionContext context) { + return arguments; + } +} diff --git a/src/test/java/net/snowflake/client/providers/SnowflakeArgumentsProvider.java b/src/test/java/net/snowflake/client/providers/SnowflakeArgumentsProvider.java new file mode 100644 index 000000000..28d9d48d7 --- /dev/null +++ b/src/test/java/net/snowflake/client/providers/SnowflakeArgumentsProvider.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.providers; + +import java.util.List; +import java.util.stream.Stream; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; + +public abstract class SnowflakeArgumentsProvider implements ArgumentsProvider { + protected abstract List rawArguments(ExtensionContext context); + + @Override + public Stream provideArguments(ExtensionContext context) { + return rawArguments(context).stream(); + } +} diff --git a/src/test/java/net/snowflake/client/providers/TimezoneProvider.java b/src/test/java/net/snowflake/client/providers/TimezoneProvider.java new file mode 100644 index 000000000..163b982c7 --- /dev/null +++ b/src/test/java/net/snowflake/client/providers/TimezoneProvider.java @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.providers; + +import java.util.Arrays; +import java.util.List; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.params.provider.Arguments; + +public class TimezoneProvider extends SnowflakeArgumentsProvider { + private int length; + + private static List timeZones = + Arrays.asList( + Arguments.of("UTC"), + Arguments.of("America/Los_Angeles"), + Arguments.of("America/New_York"), + Arguments.of("Pacific/Honolulu"), + Arguments.of("Asia/Singapore"), + Arguments.of("CET"), + Arguments.of("GMT+0200")); + + public TimezoneProvider(int length) { + this.length = length; + } + + public TimezoneProvider() { + this.length = timeZones.size(); + } + + @Override + protected List rawArguments(ExtensionContext context) { + return timeZones.subList(0, length); + } +} diff --git a/src/test/java/net/snowflake/client/suites/ArrowTestSuite.java b/src/test/java/net/snowflake/client/suites/ArrowTestSuite.java new file mode 100644 index 000000000..b0bfa532a --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/ArrowTestSuite.java @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@BaseTestSuite +@IncludeTags(TestTags.ARROW) +public class ArrowTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/BaseTestSuite.java b/src/test/java/net/snowflake/client/suites/BaseTestSuite.java new file mode 100644 index 000000000..42b3d9a53 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/BaseTestSuite.java @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import org.junit.platform.suite.api.ExcludePackages; +import org.junit.platform.suite.api.IncludeClassNamePatterns; +import org.junit.platform.suite.api.SelectPackages; +import org.junit.platform.suite.api.Suite; +import org.junit.platform.suite.api.SuiteDisplayName; + +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +@Suite +@SuiteDisplayName("Testowanie") +@SelectPackages("net.snowflake.client") +@ExcludePackages("net.snowflake.client.suites") +@IncludeClassNamePatterns(".+") +public @interface BaseTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/ConnectionOldDriverTestSuite.java b/src/test/java/net/snowflake/client/suites/ConnectionOldDriverTestSuite.java new file mode 100644 index 000000000..6dc07481d --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/ConnectionOldDriverTestSuite.java @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@IncludeTags(TestTags.CONNECTION) +public class ConnectionOldDriverTestSuite extends OldDriverTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/ConnectionTestSuite.java b/src/test/java/net/snowflake/client/suites/ConnectionTestSuite.java new file mode 100644 index 000000000..6ebbd1237 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/ConnectionTestSuite.java @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@BaseTestSuite +@IncludeTags(TestTags.CONNECTION) +public class ConnectionTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/CoreOldDriverTestSuite.java b/src/test/java/net/snowflake/client/suites/CoreOldDriverTestSuite.java new file mode 100644 index 000000000..be0763f55 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/CoreOldDriverTestSuite.java @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@IncludeTags(TestTags.CORE) +public class CoreOldDriverTestSuite extends OldDriverTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/CoreTestSuite.java b/src/test/java/net/snowflake/client/suites/CoreTestSuite.java new file mode 100644 index 000000000..3e7a15db1 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/CoreTestSuite.java @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@BaseTestSuite +@IncludeTags(TestTags.CORE) +public class CoreTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/DiagnosticOldDriverTestSuite.java b/src/test/java/net/snowflake/client/suites/DiagnosticOldDriverTestSuite.java new file mode 100644 index 000000000..cdc925ecb --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/DiagnosticOldDriverTestSuite.java @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@IncludeTags(TestTags.DIAGNOSTIC) +public class DiagnosticOldDriverTestSuite extends OldDriverTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/DiagnosticTestSuite.java b/src/test/java/net/snowflake/client/suites/DiagnosticTestSuite.java new file mode 100644 index 000000000..18a53668c --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/DiagnosticTestSuite.java @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@BaseTestSuite +@IncludeTags(TestTags.DIAGNOSTIC) +public class DiagnosticTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/LoaderOldDriverTestSuite.java b/src/test/java/net/snowflake/client/suites/LoaderOldDriverTestSuite.java new file mode 100644 index 000000000..897613378 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/LoaderOldDriverTestSuite.java @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@IncludeTags(TestTags.LOADER) +public class LoaderOldDriverTestSuite extends OldDriverTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/LoaderTestSuite.java b/src/test/java/net/snowflake/client/suites/LoaderTestSuite.java new file mode 100644 index 000000000..7d4952e57 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/LoaderTestSuite.java @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@BaseTestSuite +@IncludeTags(TestTags.LOADER) +public class LoaderTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/OldDriverTestSuite.java b/src/test/java/net/snowflake/client/suites/OldDriverTestSuite.java new file mode 100644 index 000000000..363ad3d2a --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/OldDriverTestSuite.java @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import java.util.Arrays; +import net.snowflake.client.providers.SimpleResultFormatProvider; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.platform.suite.api.AfterSuite; +import org.junit.platform.suite.api.BeforeSuite; + +@BaseTestSuite +public abstract class OldDriverTestSuite { + @BeforeSuite + public static void beforeAll() { + SimpleResultFormatProvider.setSupportedFormats(Arrays.asList(Arguments.of("JSON"))); + } + + @AfterSuite + public static void afterAll() { + SimpleResultFormatProvider.resetSupportedFormats(); + } +} diff --git a/src/test/java/net/snowflake/client/suites/OthersOldDriverTestSuite.java b/src/test/java/net/snowflake/client/suites/OthersOldDriverTestSuite.java new file mode 100644 index 000000000..3562d9c0e --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/OthersOldDriverTestSuite.java @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@IncludeTags(TestTags.OTHERS) +public class OthersOldDriverTestSuite extends OldDriverTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/OthersTestSuite.java b/src/test/java/net/snowflake/client/suites/OthersTestSuite.java new file mode 100644 index 000000000..02f9f3630 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/OthersTestSuite.java @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@BaseTestSuite +@IncludeTags(TestTags.OTHERS) +public class OthersTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/ResultSetOldDriverTestSuite.java b/src/test/java/net/snowflake/client/suites/ResultSetOldDriverTestSuite.java new file mode 100644 index 000000000..a57873e80 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/ResultSetOldDriverTestSuite.java @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@IncludeTags(TestTags.RESULT_SET) +public class ResultSetOldDriverTestSuite extends OldDriverTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/ResultSetTestSuite.java b/src/test/java/net/snowflake/client/suites/ResultSetTestSuite.java new file mode 100644 index 000000000..0032593c2 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/ResultSetTestSuite.java @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@BaseTestSuite +@IncludeTags(TestTags.RESULT_SET) +public class ResultSetTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/StatementOldDriverTestSuite.java b/src/test/java/net/snowflake/client/suites/StatementOldDriverTestSuite.java new file mode 100644 index 000000000..62ece4cec --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/StatementOldDriverTestSuite.java @@ -0,0 +1,10 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@IncludeTags(TestTags.STATEMENT) +public class StatementOldDriverTestSuite extends OldDriverTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/StatementTestSuite.java b/src/test/java/net/snowflake/client/suites/StatementTestSuite.java new file mode 100644 index 000000000..19b96cf34 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/StatementTestSuite.java @@ -0,0 +1,11 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.IncludeTags; + +@BaseTestSuite +@IncludeTags(TestTags.STATEMENT) +public class StatementTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/UnitOldDriverTestSuite.java b/src/test/java/net/snowflake/client/suites/UnitOldDriverTestSuite.java new file mode 100644 index 000000000..8c9a9f470 --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/UnitOldDriverTestSuite.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.platform.suite.api.ExcludeTags; + +@ExcludeTags({ + TestTags.CORE, + TestTags.ARROW, + TestTags.DIAGNOSTIC, + TestTags.CONNECTION, + TestTags.LOADER, + TestTags.OTHERS, + TestTags.RESULT_SET, + TestTags.STATEMENT +}) +public class UnitOldDriverTestSuite extends OldDriverTestSuite {} diff --git a/src/test/java/net/snowflake/client/suites/UnitTestSuite.java b/src/test/java/net/snowflake/client/suites/UnitTestSuite.java new file mode 100644 index 000000000..5bd5904fe --- /dev/null +++ b/src/test/java/net/snowflake/client/suites/UnitTestSuite.java @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2024 Snowflake Computing Inc. All rights reserved. + */ +package net.snowflake.client.suites; + +import net.snowflake.client.category.TestTags; +import org.junit.jupiter.api.DisplayName; +import org.junit.platform.suite.api.ExcludeTags; + +@BaseTestSuite +@DisplayName("Unit tests") +@ExcludeTags({ + TestTags.CORE, + TestTags.ARROW, + TestTags.DIAGNOSTIC, + TestTags.CONNECTION, + TestTags.LOADER, + TestTags.OTHERS, + TestTags.RESULT_SET, + TestTags.STATEMENT +}) +public class UnitTestSuite {} diff --git a/src/test/java/net/snowflake/client/util/SecretDetectorTest.java b/src/test/java/net/snowflake/client/util/SecretDetectorTest.java index aa3339309..1b936b929 100644 --- a/src/test/java/net/snowflake/client/util/SecretDetectorTest.java +++ b/src/test/java/net/snowflake/client/util/SecretDetectorTest.java @@ -1,7 +1,7 @@ package net.snowflake.client.util; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; @@ -12,7 +12,7 @@ import net.minidev.json.JSONObject; import net.snowflake.client.core.ObjectMapperFactory; import org.apache.commons.lang3.RandomStringUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class SecretDetectorTest { @Test diff --git a/src/test/java/net/snowflake/client/util/StopwatchTest.java b/src/test/java/net/snowflake/client/util/StopwatchTest.java index ed5f5d743..066b450fa 100644 --- a/src/test/java/net/snowflake/client/util/StopwatchTest.java +++ b/src/test/java/net/snowflake/client/util/StopwatchTest.java @@ -7,18 +7,18 @@ import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.concurrent.TimeUnit; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class StopwatchTest { Stopwatch stopwatch = new Stopwatch(); - @Before + @BeforeEach public void before() { stopwatch = new Stopwatch(); }