Bug fix (there are cases when provided kwargs containing explicit None values while we want to use the alternative in those kind of cases; there was an intention in original code, while the implementation was buggy)

This commit is contained in:
stamparm 2013-04-16 14:17:41 +02:00
parent 840ee26a14
commit 6fed1921ed

View File

@ -192,22 +192,22 @@ class Connect(object):
kb.requestCounter += 1 kb.requestCounter += 1
threadData.lastRequestUID = kb.requestCounter threadData.lastRequestUID = kb.requestCounter
url = kwargs.get("url", conf.url) url = kwargs.get("url", None) or conf.url
get = kwargs.get("get", None) get = kwargs.get("get", None)
post = kwargs.get("post", None) post = kwargs.get("post", None)
method = kwargs.get("method", None) method = kwargs.get("method", None)
cookie = kwargs.get("cookie", None) cookie = kwargs.get("cookie", None)
ua = kwargs.get("ua", None) ua = kwargs.get("ua", None) or conf.agent
referer = kwargs.get("referer", None) referer = kwargs.get("referer", None) or conf.referer
host = kwargs.get("host", conf.host) host = kwargs.get("host", None) or conf.host
direct_ = kwargs.get("direct", False) direct_ = kwargs.get("direct", False)
multipart = kwargs.get("multipart", False) multipart = kwargs.get("multipart", False)
silent = kwargs.get("silent", False) silent = kwargs.get("silent", False)
raise404 = kwargs.get("raise404", True) raise404 = kwargs.get("raise404", True)
timeout = kwargs.get("timeout", conf.timeout) timeout = kwargs.get("timeout", None) or conf.timeout
auxHeaders = kwargs.get("auxHeaders", None) auxHeaders = kwargs.get("auxHeaders", None)
response = kwargs.get("response", False) response = kwargs.get("response", False)
ignoreTimeout = kwargs.get("ignoreTimeout", kb.ignoreTimeout) ignoreTimeout = kwargs.get("ignoreTimeout", False) or kb.ignoreTimeout
refreshing = kwargs.get("refreshing", False) refreshing = kwargs.get("refreshing", False)
retrying = kwargs.get("retrying", False) retrying = kwargs.get("retrying", False)
crawling = kwargs.get("crawling", False) crawling = kwargs.get("crawling", False)