bugfix: testapi is killed because of oom_kill 11/39411/3
authorSerenaFeng <feng.xiaowei@zte.com.cn>
Wed, 16 Aug 2017 09:12:14 +0000 (17:12 +0800)
committerSerenaFeng <feng.xiaowei@zte.com.cn>
Wed, 16 Aug 2017 09:53:22 +0000 (17:53 +0800)
if page is not provided, return page 1 to avoid large memory consume,
further more to kill testapi:

Out of memory: Kill process 30842 (opnfv-testapi) score 519 or sacrifice child
Killed process 30842 (opnfv-testapi) total-vm:4739716kB, anon-rss:3974612kB, file-rss:0kB

JIRA: RELENG-291

Change-Id: Id0e144c030aee390cc41cff8a48e3c997de4ad84
Signed-off-by: SerenaFeng <feng.xiaowei@zte.com.cn>
utils/test/testapi/opnfv_testapi/resources/handlers.py
utils/test/testapi/opnfv_testapi/resources/result_handlers.py

index 8a3a2db..474a203 100644 (file)
@@ -106,16 +106,22 @@ class GenericApiHandler(web.RequestHandler):
         per_page = kwargs.get('per_page', 0)
         if query is None:
             query = {}
+        pipelines = list()
+        pipelines.append({'$match': query})
 
         total_pages = 0
         if page > 0:
             cursor = dbapi.db_list(self.table, query)
             records_count = yield cursor.count()
-            total_pages = self._calc_total_pages(records_count,
-                                                 last,
-                                                 page,
-                                                 per_page)
-        pipelines = self._set_pipelines(query, sort, last, page, per_page)
+            total_pages, return_nr = self._calc_total_pages(records_count,
+                                                            last,
+                                                            page,
+                                                            per_page)
+            pipelines = self._set_pipelines(pipelines,
+                                            sort,
+                                            return_nr,
+                                            page,
+                                            per_page)
         cursor = dbapi.db_aggregate(self.table, pipelines)
         data = list()
         while (yield cursor.fetch_next):
@@ -145,21 +151,17 @@ class GenericApiHandler(web.RequestHandler):
         if page > 1 and page > total_pages:
             raises.BadRequest(
                 'Request page > total_pages [{}]'.format(total_pages))
-        return total_pages
+        return total_pages, records_nr
 
     @staticmethod
-    def _set_pipelines(query, sort, last, page, per_page):
-        pipelines = list()
-        if query:
-            pipelines.append({'$match': query})
+    def _set_pipelines(pipelines, sort, return_nr, page, per_page):
         if sort:
             pipelines.append({'$sort': sort})
 
-        if page > 0:
-            pipelines.append({'$skip': (page - 1) * per_page})
-            pipelines.append({'$limit': per_page})
-        elif last > 0:
-            pipelines.append({'$limit': last})
+        over = (page - 1) * per_page
+        left = return_nr - over
+        pipelines.append({'$skip': over})
+        pipelines.append({'$limit': per_page if per_page < left else left})
 
         return pipelines
 
index 2bf1792..9389d26 100644 (file)
@@ -155,7 +155,7 @@ class ResultsCLHandler(GenericResultHandler):
             @type last: L{string}
             @in last: query
             @required last: False
-            @param page: which page to list
+            @param page: which page to list, default to 1
             @type page: L{int}
             @in page: query
             @required page: False
@@ -180,7 +180,7 @@ class ResultsCLHandler(GenericResultHandler):
             return self.get_int('last', self.get_query_argument('last', 0))
 
         def page_limit():
-            return self.get_int('page', self.get_query_argument('page', 0))
+            return self.get_int('page', self.get_query_argument('page', 1))
 
         limitations = {
             'sort': {'_id': descend_limit()},