java/code/src/com/redhat/rhn/common/db/datasource/CachedStatement.java | 30 ++++++++--
java/code/src/com/redhat/rhn/common/db/datasource/test/AdvDataSourceTest.java | 21 -------
java/code/src/com/redhat/rhn/frontend/action/CSVDownloadAction.java | 12 ++++
java/code/src/com/redhat/rhn/frontend/taglibs/list/CSVTag.java | 26 +++++++-
4 files changed, 61 insertions(+), 28 deletions(-)
New commits:
commit fb05a2cc003088aba405718f551915ea92ab6eef
Author: Simon Lukasik <slukasik(a)redhat.com>
Date: Tue Aug 28 16:45:48 2012 +0200
851480 - Do not elaborate objects twice in row.
This fixes bug 453477 more properly.
diff --git a/java/code/src/com/redhat/rhn/common/db/datasource/CachedStatement.java b/java/code/src/com/redhat/rhn/common/db/datasource/CachedStatement.java
index 477bcc1..1b2cfa4 100644
--- a/java/code/src/com/redhat/rhn/common/db/datasource/CachedStatement.java
+++ b/java/code/src/com/redhat/rhn/common/db/datasource/CachedStatement.java
@@ -76,6 +76,7 @@ public class CachedStatement {
// This is only set if the current CachedStatement is a duplicate of an
// existing one with the %s expanded out.
private CachedStatement parentStatement;
+ private List restartData = null;
// We could (and probably should) cache the ResultSet metadata here as
// well. There is no reason that the first call to each statement
@@ -437,6 +438,7 @@ public class CachedStatement {
// returning an Object and letting the caller do the casting for us.
private Object execute(String sql, Map parameterMap,
Map parameters, Mode mode, List dr) {
+ storeForRestart(sql, parameterMap, parameters, mode, dr);
PreparedStatement ps = null;
try {
Connection conn = stealConnection();
@@ -861,5 +863,28 @@ public class CachedStatement {
conn = session.connection();
return conn;
}
+
+ private void storeForRestart(String sql, Map parameterMap,
+ Map parameters, Mode mode, List dr) {
+ restartData = new ArrayList();
+ restartData.add(sql);
+ restartData.add(parameterMap);
+ restartData.add(parameters);
+ restartData.add(mode);
+ restartData.add(dr);
+ }
+
+ /**
+ * Restart the latest query
+ * @return what the previous query returned or null.
+ */
+ public Object restartQuery() {
+ return restartData == null ? null :
+ execute((String) restartData.get(0),
+ (Map) restartData.get(1),
+ (Map) restartData.get(2),
+ (Mode) restartData.get(3),
+ (List) restartData.get(4));
+ }
}
diff --git a/java/code/src/com/redhat/rhn/frontend/action/CSVDownloadAction.java b/java/code/src/com/redhat/rhn/frontend/action/CSVDownloadAction.java
index 6cd56e0..3b20f09 100644
--- a/java/code/src/com/redhat/rhn/frontend/action/CSVDownloadAction.java
+++ b/java/code/src/com/redhat/rhn/frontend/action/CSVDownloadAction.java
@@ -14,6 +14,8 @@
*/
package com.redhat.rhn.frontend.action;
+import com.redhat.rhn.common.db.datasource.CachedStatement;
+import com.redhat.rhn.common.db.datasource.DataResult;
import com.redhat.rhn.common.db.datasource.Elaborator;
import com.redhat.rhn.common.util.CSVWriter;
import com.redhat.rhn.common.util.download.ByteArrayStreamInfo;
@@ -47,6 +49,7 @@ import javax.servlet.http.HttpSession;
public class CSVDownloadAction extends DownloadAction {
public static final String EXPORT_COLUMNS = "__CSV__exportColumnsParam";
public static final String PAGE_LIST_DATA = "___CSV_pageListData";
+ public static final String QUERY_DATA = "__CSV_queryMode";
public static final String UNIQUE_NAME = "__CSV_uniqueName";
public static final String HEADER_NAME = "__CSV_headerName";
@@ -106,6 +109,15 @@ public class CSVDownloadAction extends DownloadAction {
*/
protected List getPageData(HttpServletRequest request, HttpSession session)
throws Exception {
+ String paramQuery = request.getParameter(QUERY_DATA);
+ if (paramQuery != null) {
+ CachedStatement query = (CachedStatement) session.getAttribute(paramQuery);
+ if (query == null) {
+ throw new Exception("Missing request parameter, " + QUERY_DATA);
+ }
+ return (DataResult) query.restartQuery();
+ }
+
String paramPageData = request.getParameter(PAGE_LIST_DATA);
if (null == paramPageData) {
throw new Exception("Missing request parameter, " + EXPORT_COLUMNS);
diff --git a/java/code/src/com/redhat/rhn/frontend/taglibs/list/CSVTag.java b/java/code/src/com/redhat/rhn/frontend/taglibs/list/CSVTag.java
index 30570ce..5d737a9 100644
--- a/java/code/src/com/redhat/rhn/frontend/taglibs/list/CSVTag.java
+++ b/java/code/src/com/redhat/rhn/frontend/taglibs/list/CSVTag.java
@@ -14,6 +14,7 @@
*/
package com.redhat.rhn.frontend.taglibs.list;
+import com.redhat.rhn.common.db.datasource.DataResult;
import com.redhat.rhn.common.localization.LocalizationService;
import com.redhat.rhn.frontend.action.CSVDownloadAction;
import com.redhat.rhn.frontend.taglibs.list.helper.ListHelper;
@@ -208,7 +209,6 @@ public class CSVTag extends BodyTagSupport {
*/
public String makeCSVRequestParams() {
String paramExportColumns = "exportColumns_" + getUniqueName();
- String paramPageList = "pageList_" + getUniqueName();
String paramHeader = "header_" + getUniqueName();
HttpServletRequest request = (HttpServletRequest) pageContext
.getRequest();
@@ -216,11 +216,10 @@ public class CSVTag extends BodyTagSupport {
// exportColumns and pageData __must__ be in session context
// so CSVDownloadAction is able to retreive them.
session.setAttribute(paramExportColumns, exportColumns);
- session.setAttribute(paramPageList, pageData);
String csvKey =
CSVDownloadAction.EXPORT_COLUMNS + "=" + paramExportColumns +
- "&" + CSVDownloadAction.PAGE_LIST_DATA + "=" + paramPageList +
+ "&" + exportDataToSession(session) +
"&" + CSVDownloadAction.UNIQUE_NAME + "=" + getUniqueName();
if (header != null) {
@@ -231,4 +230,25 @@ public class CSVTag extends BodyTagSupport {
return csvKey;
}
+ private String exportDataToSession(HttpSession session) {
+ if (pageData != null && pageData instanceof DataResult &&
+ ((DataResult)pageData).getMode() != null &&
+ ((DataResult)pageData).getMode().getQuery() != null) {
+ /* We better do not export pageList, let's keep the query instead.
+ * 1) Query is usually smaller than data. And since this never gets deleted
+ * from session, the session data doesn't grow that rapidly.
+ * 2) Part of the pageList might be already elaborated and we cannot say
+ * which (consider filters, alfphabar, pagination, and sorting).
+ * Repeated elaboration isn't great thing; bug 453477, 851480, 445895
+ */
+ String paramQuery = "query_" + getUniqueName();
+ session.setAttribute(paramQuery, ((DataResult)pageData).getMode().getQuery());
+ return CSVDownloadAction.QUERY_DATA + "=" + paramQuery;
+ }
+ else {
+ String paramPageList = "pageList_" + getUniqueName();
+ session.setAttribute(paramPageList, pageData);
+ return CSVDownloadAction.PAGE_LIST_DATA + "=" + paramPageList;
+ }
+ }
}
commit 1f887b97fd27f1ac770a11fea7e445f0a8755e77
Author: Simon Lukasik <slukasik(a)redhat.com>
Date: Wed Aug 29 17:36:37 2012 +0200
851480 - Revert "bz: 453477: duplicated entries in CSV download for some fields"
This reverts commit a0200a462868a25ea36790c7dd6725b8c047ac6c.
Conflicts:
java/code/src/com/redhat/rhn/common/db/datasource/test/AdvDataSourceTest.java
diff --git a/java/code/src/com/redhat/rhn/common/db/datasource/CachedStatement.java b/java/code/src/com/redhat/rhn/common/db/datasource/CachedStatement.java
index a5d3852..477bcc1 100644
--- a/java/code/src/com/redhat/rhn/common/db/datasource/CachedStatement.java
+++ b/java/code/src/com/redhat/rhn/common/db/datasource/CachedStatement.java
@@ -736,10 +736,7 @@ public class CachedStatement {
if (c == null) {
c = new ArrayList();
}
- Object item = getObject(rs, columnName);
- if (!c.contains(item)) {
- c.add(item);
- }
+ c.add(getObject(rs, columnName));
MethodUtil.callMethod(obj, setName, c);
continue;
}
diff --git a/java/code/src/com/redhat/rhn/common/db/datasource/test/AdvDataSourceTest.java b/java/code/src/com/redhat/rhn/common/db/datasource/test/AdvDataSourceTest.java
index d3ff272..c740c1f 100644
--- a/java/code/src/com/redhat/rhn/common/db/datasource/test/AdvDataSourceTest.java
+++ b/java/code/src/com/redhat/rhn/common/db/datasource/test/AdvDataSourceTest.java
@@ -26,7 +26,6 @@ import com.redhat.rhn.common.hibernate.HibernateHelper;
import com.redhat.rhn.testing.RhnBaseTestCase;
import com.redhat.rhn.testing.TestUtils;
-import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.hibernate.Session;
@@ -292,26 +291,6 @@ public class AdvDataSourceTest extends RhnBaseTestCase {
}
}
- public void testDoubleElaboration() throws Exception {
- if (ConfigDefaults.get().isOracle()) {
- SelectMode m = ModeFactory.getMode("test_queries", "withClass");
- DataResult<TableData> dr = m.execute(Collections.EMPTY_MAP);
- assertTrue(dr.size() >= 1);
- dr.elaborate();
- TableData rowA = dr.get(0);
- String tableNameA = rowA.getTableName();
- String columnNameA = StringUtils.join(rowA.getColumnName().iterator(), ",");
- // Elaborate 2nd time
- dr.elaborate();
- TableData rowB = dr.get(0);
- String tableNameB = rowB.getTableName();
- String columnNameB = StringUtils.join(rowB.getColumnName().iterator(), ",");
-
- assertEquals(tableNameA, tableNameB);
- assertEquals(columnNameA, columnNameB);
- }
- }
-
public void testMaxRowsWithElaboration() throws Exception {
int startId = 1000;
int endId = startId + 50;