| |
@@ -440,7 +440,7 @@
|
| |
dest="bodhi_request_limit",
|
| |
help="Maximum number of updates to request at "
|
| |
"once from Bodhi, default: %default",
|
| |
- default=25,
|
| |
+ default=50,
|
| |
type="int")
|
| |
|
| |
(self.options, args) = parser.parse_args()
|
| |
@@ -484,7 +484,7 @@
|
| |
retries=self.options.retries)
|
| |
self.bc = bc
|
| |
# Bodhi is too slow for our queries, therefore wait longer
|
| |
- bc.timeout = 300
|
| |
+ bc.timeout = 120
|
| |
pkghelper = PkgHelper()
|
| |
|
| |
if not self.options.releasever:
|
| |
@@ -689,7 +689,7 @@
|
| |
single list of updates."""
|
| |
|
| |
query_args = {"release": release,
|
| |
- "limit": self.options.bodhi_request_limit,
|
| |
+ "rows_per_page": self.options.bodhi_request_limit,
|
| |
}
|
| |
if pending:
|
| |
query_args["request"] = "testing"
|
| |
Default bodhi server timeout seems to be 60 seconds anyway, so there is no point in waiting 300 seconds. Having higher results per page makes querying bodhi a little bit faster by cutting overhead to make a single query.