Skip to content

Commit

Permalink
Merge pull request #242 from datafuselabs/fix/uploadstream-2gi
Browse files Browse the repository at this point in the history
fix: uploadStream over 2Gi failed
  • Loading branch information
hantmac authored Aug 1, 2024
2 parents a8ab0b4 + ba4c50a commit fb24d5a
Show file tree
Hide file tree
Showing 4 changed files with 47 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ private static String stringColumnEquals(String columnName, String value) {

private static String stringColumnLike(String columnName, String pattern) {
StringBuilder filter = new StringBuilder();
filter.append(columnName).append(" LIKE ");
filter.append(columnName).append(" = ");
quoteStringLiteral(filter, pattern);

return filter.toString();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -231,13 +231,13 @@ public MediaType contentType() {
}

@Override
public long contentLength() throws IOException {
return inputStream.available() == 0 ? -1 : inputStream.available();
public long contentLength() {
return fileSize; // return the actual file size
// return inputStream.available() == 0 ? -1 : inputStream.available();
}

@Override
public void writeTo(@NonNull BufferedSink sink) throws IOException {

try (Source source = Okio.source(inputStream)) {
sink.writeAll(source);
} catch (IOException e) {
Expand Down
20 changes: 20 additions & 0 deletions databend-jdbc/src/test/java/com/databend/jdbc/TestBasicDriver.java
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,26 @@ public void testExecuteInvalidSql() {
});
}

public void testSchema() {
try (Connection connection = createConnection()) {
PaginationOptions p = connection.unwrap(DatabendConnection.class).getPaginationOptions();
Assert.assertEquals(p.getWaitTimeSecs(), PaginationOptions.getDefaultWaitTimeSec());
Assert.assertEquals(p.getMaxRowsInBuffer(), PaginationOptions.getDefaultMaxRowsInBuffer());
Assert.assertEquals(p.getMaxRowsPerPage(), PaginationOptions.getDefaultMaxRowsPerPage());
DatabendStatement statement = (DatabendStatement) connection.createStatement();
statement.execute("set global timezone='Asia/Shanghai';");
statement.execute("SELEcT schema_name as TABLE_SCHEM, catalog_name as TABLE_CATALOG FROM information_schema.schemata where schema_name = 'default' order by catalog_name, schema_name");
ResultSet r = statement.getResultSet();

while (r.next()) {
System.out.println(r.getString(1));
}
connection.close();
} catch (SQLException throwables) {
throwables.printStackTrace();
}
}

@Test
public void testCreateUserFunction() throws SQLException {
String s = "create or replace function add_plus(int,int)\n" +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ private Connection createConnection()
throws SQLException {
String url = "jdbc:databend://localhost:8000/default";
return DriverManager.getConnection(url, "databend", "databend");

}

private Connection createConnection(boolean presignDisabled) throws SQLException {
Expand Down Expand Up @@ -84,6 +85,28 @@ private String generateRandomCSV(int lines) {
return csvPath;
}

private String generateLargeCSV() {
String tmpDir = System.getProperty("java.io.tmpdir");
String csvPath = tmpDir + "/large_test.csv";
long fileSizeInBytes = 0;
File f = new File(csvPath);
try {
FileWriter writer = new FileWriter(f);
while (fileSizeInBytes < 2L * 1024 * 1024 * 1024) { // 2GB
for (int i = 0; i < 1000; i++) { // write 1000 lines at a time
int num = (int) (Math.random() * 1000);
writer.write("a,b,c," + num + "\n");
}
writer.flush();
fileSizeInBytes = f.length();
}
writer.close();
} catch (Exception e) {
throw new RuntimeException(e);
}
return csvPath;
}

private String generateRandomCSVComplex(int lines) {
if (lines <= 0) {
return "";
Expand Down

0 comments on commit fb24d5a

Please sign in to comment.