Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
HttpRequest._process_response | (self, resp, content) | Process the response from a single chunk upload.
Args:
resp: httplib2.Response, the response object.
content: string, the content of the response.
Returns:
(status, body): (ResumableMediaStatus, object)
The body will be None until the resumable media is fully uploaded.
Raises:
googleapiclient.errors.HttpError if the response was not a 2xx or a 308.
| Process the response from a single chunk upload. | def _process_response(self, resp, content):
"""Process the response from a single chunk upload.
Args:
resp: httplib2.Response, the response object.
content: string, the content of the response.
Returns:
(status, body): (ResumableMediaStatus, object)
The body will be None until the resumable media is fully uploaded.
Raises:
googleapiclient.errors.HttpError if the response was not a 2xx or a 308.
"""
if resp.status in [200, 201]:
self._in_error_state = False
return None, self.postproc(resp, content)
elif resp.status == 308:
self._in_error_state = False
# A "308 Resume Incomplete" indicates we are not done.
try:
self.resumable_progress = int(resp['range'].split('-')[1]) + 1
except KeyError:
# If resp doesn't contain range header, resumable progress is 0
self.resumable_progress = 0
if 'location' in resp:
self.resumable_uri = resp['location']
else:
self._in_error_state = True
raise HttpError(resp, content, uri=self.uri)
return (MediaUploadProgress(self.resumable_progress, self.resumable.size()),
None) | [
"def",
"_process_response",
"(",
"self",
",",
"resp",
",",
"content",
")",
":",
"if",
"resp",
".",
"status",
"in",
"[",
"200",
",",
"201",
"]",
":",
"self",
".",
"_in_error_state",
"=",
"False",
"return",
"None",
",",
"self",
".",
"postproc",
"(",
"resp",
",",
"content",
")",
"elif",
"resp",
".",
"status",
"==",
"308",
":",
"self",
".",
"_in_error_state",
"=",
"False",
"# A \"308 Resume Incomplete\" indicates we are not done.",
"try",
":",
"self",
".",
"resumable_progress",
"=",
"int",
"(",
"resp",
"[",
"'range'",
"]",
".",
"split",
"(",
"'-'",
")",
"[",
"1",
"]",
")",
"+",
"1",
"except",
"KeyError",
":",
"# If resp doesn't contain range header, resumable progress is 0",
"self",
".",
"resumable_progress",
"=",
"0",
"if",
"'location'",
"in",
"resp",
":",
"self",
".",
"resumable_uri",
"=",
"resp",
"[",
"'location'",
"]",
"else",
":",
"self",
".",
"_in_error_state",
"=",
"True",
"raise",
"HttpError",
"(",
"resp",
",",
"content",
",",
"uri",
"=",
"self",
".",
"uri",
")",
"return",
"(",
"MediaUploadProgress",
"(",
"self",
".",
"resumable_progress",
",",
"self",
".",
"resumable",
".",
"size",
"(",
")",
")",
",",
"None",
")"
] | [
980,
2
] | [
1012,
17
] | python | en | ['en', 'en', 'en'] | True |
HttpRequest.to_json | (self) | Returns a JSON representation of the HttpRequest. | Returns a JSON representation of the HttpRequest. | def to_json(self):
"""Returns a JSON representation of the HttpRequest."""
d = copy.copy(self.__dict__)
if d['resumable'] is not None:
d['resumable'] = self.resumable.to_json()
del d['http']
del d['postproc']
del d['_sleep']
del d['_rand']
return json.dumps(d) | [
"def",
"to_json",
"(",
"self",
")",
":",
"d",
"=",
"copy",
".",
"copy",
"(",
"self",
".",
"__dict__",
")",
"if",
"d",
"[",
"'resumable'",
"]",
"is",
"not",
"None",
":",
"d",
"[",
"'resumable'",
"]",
"=",
"self",
".",
"resumable",
".",
"to_json",
"(",
")",
"del",
"d",
"[",
"'http'",
"]",
"del",
"d",
"[",
"'postproc'",
"]",
"del",
"d",
"[",
"'_sleep'",
"]",
"del",
"d",
"[",
"'_rand'",
"]",
"return",
"json",
".",
"dumps",
"(",
"d",
")"
] | [
1014,
2
] | [
1024,
24
] | python | en | ['en', 'en', 'en'] | True |
HttpRequest.from_json | (s, http, postproc) | Returns an HttpRequest populated with info from a JSON object. | Returns an HttpRequest populated with info from a JSON object. | def from_json(s, http, postproc):
"""Returns an HttpRequest populated with info from a JSON object."""
d = json.loads(s)
if d['resumable'] is not None:
d['resumable'] = MediaUpload.new_from_json(d['resumable'])
return HttpRequest(
http,
postproc,
uri=d['uri'],
method=d['method'],
body=d['body'],
headers=d['headers'],
methodId=d['methodId'],
resumable=d['resumable']) | [
"def",
"from_json",
"(",
"s",
",",
"http",
",",
"postproc",
")",
":",
"d",
"=",
"json",
".",
"loads",
"(",
"s",
")",
"if",
"d",
"[",
"'resumable'",
"]",
"is",
"not",
"None",
":",
"d",
"[",
"'resumable'",
"]",
"=",
"MediaUpload",
".",
"new_from_json",
"(",
"d",
"[",
"'resumable'",
"]",
")",
"return",
"HttpRequest",
"(",
"http",
",",
"postproc",
",",
"uri",
"=",
"d",
"[",
"'uri'",
"]",
",",
"method",
"=",
"d",
"[",
"'method'",
"]",
",",
"body",
"=",
"d",
"[",
"'body'",
"]",
",",
"headers",
"=",
"d",
"[",
"'headers'",
"]",
",",
"methodId",
"=",
"d",
"[",
"'methodId'",
"]",
",",
"resumable",
"=",
"d",
"[",
"'resumable'",
"]",
")"
] | [
1027,
2
] | [
1040,
33
] | python | en | ['en', 'en', 'en'] | True |
BatchHttpRequest.__init__ | (self, callback=None, batch_uri=None) | Constructor for a BatchHttpRequest.
Args:
callback: callable, A callback to be called for each response, of the
form callback(id, response, exception). The first parameter is the
request id, and the second is the deserialized response object. The
third is an googleapiclient.errors.HttpError exception object if an HTTP error
occurred while processing the request, or None if no error occurred.
batch_uri: string, URI to send batch requests to.
| Constructor for a BatchHttpRequest. | def __init__(self, callback=None, batch_uri=None):
"""Constructor for a BatchHttpRequest.
Args:
callback: callable, A callback to be called for each response, of the
form callback(id, response, exception). The first parameter is the
request id, and the second is the deserialized response object. The
third is an googleapiclient.errors.HttpError exception object if an HTTP error
occurred while processing the request, or None if no error occurred.
batch_uri: string, URI to send batch requests to.
"""
if batch_uri is None:
batch_uri = 'https://www.googleapis.com/batch'
self._batch_uri = batch_uri
# Global callback to be called for each individual response in the batch.
self._callback = callback
# A map from id to request.
self._requests = {}
# A map from id to callback.
self._callbacks = {}
# List of request ids, in the order in which they were added.
self._order = []
# The last auto generated id.
self._last_auto_id = 0
# Unique ID on which to base the Content-ID headers.
self._base_id = None
# A map from request id to (httplib2.Response, content) response pairs
self._responses = {}
# A map of id(Credentials) that have been refreshed.
self._refreshed_credentials = {} | [
"def",
"__init__",
"(",
"self",
",",
"callback",
"=",
"None",
",",
"batch_uri",
"=",
"None",
")",
":",
"if",
"batch_uri",
"is",
"None",
":",
"batch_uri",
"=",
"'https://www.googleapis.com/batch'",
"self",
".",
"_batch_uri",
"=",
"batch_uri",
"# Global callback to be called for each individual response in the batch.",
"self",
".",
"_callback",
"=",
"callback",
"# A map from id to request.",
"self",
".",
"_requests",
"=",
"{",
"}",
"# A map from id to callback.",
"self",
".",
"_callbacks",
"=",
"{",
"}",
"# List of request ids, in the order in which they were added.",
"self",
".",
"_order",
"=",
"[",
"]",
"# The last auto generated id.",
"self",
".",
"_last_auto_id",
"=",
"0",
"# Unique ID on which to base the Content-ID headers.",
"self",
".",
"_base_id",
"=",
"None",
"# A map from request id to (httplib2.Response, content) response pairs",
"self",
".",
"_responses",
"=",
"{",
"}",
"# A map of id(Credentials) that have been refreshed.",
"self",
".",
"_refreshed_credentials",
"=",
"{",
"}"
] | [
1077,
2
] | [
1114,
36
] | python | en | ['en', 'en', 'en'] | True |
BatchHttpRequest._refresh_and_apply_credentials | (self, request, http) | Refresh the credentials and apply to the request.
Args:
request: HttpRequest, the request.
http: httplib2.Http, the global http object for the batch.
| Refresh the credentials and apply to the request. | def _refresh_and_apply_credentials(self, request, http):
"""Refresh the credentials and apply to the request.
Args:
request: HttpRequest, the request.
http: httplib2.Http, the global http object for the batch.
"""
# For the credentials to refresh, but only once per refresh_token
# If there is no http per the request then refresh the http passed in
# via execute()
creds = None
if request.http is not None and hasattr(request.http.request,
'credentials'):
creds = request.http.request.credentials
elif http is not None and hasattr(http.request, 'credentials'):
creds = http.request.credentials
if creds is not None:
if id(creds) not in self._refreshed_credentials:
creds.refresh(http)
self._refreshed_credentials[id(creds)] = 1
# Only apply the credentials if we are using the http object passed in,
# otherwise apply() will get called during _serialize_request().
if request.http is None or not hasattr(request.http.request,
'credentials'):
creds.apply(request.headers) | [
"def",
"_refresh_and_apply_credentials",
"(",
"self",
",",
"request",
",",
"http",
")",
":",
"# For the credentials to refresh, but only once per refresh_token",
"# If there is no http per the request then refresh the http passed in",
"# via execute()",
"creds",
"=",
"None",
"if",
"request",
".",
"http",
"is",
"not",
"None",
"and",
"hasattr",
"(",
"request",
".",
"http",
".",
"request",
",",
"'credentials'",
")",
":",
"creds",
"=",
"request",
".",
"http",
".",
"request",
".",
"credentials",
"elif",
"http",
"is",
"not",
"None",
"and",
"hasattr",
"(",
"http",
".",
"request",
",",
"'credentials'",
")",
":",
"creds",
"=",
"http",
".",
"request",
".",
"credentials",
"if",
"creds",
"is",
"not",
"None",
":",
"if",
"id",
"(",
"creds",
")",
"not",
"in",
"self",
".",
"_refreshed_credentials",
":",
"creds",
".",
"refresh",
"(",
"http",
")",
"self",
".",
"_refreshed_credentials",
"[",
"id",
"(",
"creds",
")",
"]",
"=",
"1",
"# Only apply the credentials if we are using the http object passed in,",
"# otherwise apply() will get called during _serialize_request().",
"if",
"request",
".",
"http",
"is",
"None",
"or",
"not",
"hasattr",
"(",
"request",
".",
"http",
".",
"request",
",",
"'credentials'",
")",
":",
"creds",
".",
"apply",
"(",
"request",
".",
"headers",
")"
] | [
1116,
2
] | [
1141,
34
] | python | en | ['en', 'en', 'en'] | True |
BatchHttpRequest._id_to_header | (self, id_) | Convert an id to a Content-ID header value.
Args:
id_: string, identifier of individual request.
Returns:
A Content-ID header with the id_ encoded into it. A UUID is prepended to
the value because Content-ID headers are supposed to be universally
unique.
| Convert an id to a Content-ID header value. | def _id_to_header(self, id_):
"""Convert an id to a Content-ID header value.
Args:
id_: string, identifier of individual request.
Returns:
A Content-ID header with the id_ encoded into it. A UUID is prepended to
the value because Content-ID headers are supposed to be universally
unique.
"""
if self._base_id is None:
self._base_id = uuid.uuid4()
return '<%s+%s>' % (self._base_id, quote(id_)) | [
"def",
"_id_to_header",
"(",
"self",
",",
"id_",
")",
":",
"if",
"self",
".",
"_base_id",
"is",
"None",
":",
"self",
".",
"_base_id",
"=",
"uuid",
".",
"uuid4",
"(",
")",
"return",
"'<%s+%s>'",
"%",
"(",
"self",
".",
"_base_id",
",",
"quote",
"(",
"id_",
")",
")"
] | [
1143,
2
] | [
1157,
50
] | python | en | ['en', 'en', 'en'] | True |
BatchHttpRequest._header_to_id | (self, header) | Convert a Content-ID header value to an id.
Presumes the Content-ID header conforms to the format that _id_to_header()
returns.
Args:
header: string, Content-ID header value.
Returns:
The extracted id value.
Raises:
BatchError if the header is not in the expected format.
| Convert a Content-ID header value to an id. | def _header_to_id(self, header):
"""Convert a Content-ID header value to an id.
Presumes the Content-ID header conforms to the format that _id_to_header()
returns.
Args:
header: string, Content-ID header value.
Returns:
The extracted id value.
Raises:
BatchError if the header is not in the expected format.
"""
if header[0] != '<' or header[-1] != '>':
raise BatchError("Invalid value for Content-ID: %s" % header)
if '+' not in header:
raise BatchError("Invalid value for Content-ID: %s" % header)
base, id_ = header[1:-1].rsplit('+', 1)
return unquote(id_) | [
"def",
"_header_to_id",
"(",
"self",
",",
"header",
")",
":",
"if",
"header",
"[",
"0",
"]",
"!=",
"'<'",
"or",
"header",
"[",
"-",
"1",
"]",
"!=",
"'>'",
":",
"raise",
"BatchError",
"(",
"\"Invalid value for Content-ID: %s\"",
"%",
"header",
")",
"if",
"'+'",
"not",
"in",
"header",
":",
"raise",
"BatchError",
"(",
"\"Invalid value for Content-ID: %s\"",
"%",
"header",
")",
"base",
",",
"id_",
"=",
"header",
"[",
"1",
":",
"-",
"1",
"]",
".",
"rsplit",
"(",
"'+'",
",",
"1",
")",
"return",
"unquote",
"(",
"id_",
")"
] | [
1159,
2
] | [
1180,
23
] | python | en | ['en', 'en', 'en'] | True |
BatchHttpRequest._serialize_request | (self, request) | Convert an HttpRequest object into a string.
Args:
request: HttpRequest, the request to serialize.
Returns:
The request as a string in application/http format.
| Convert an HttpRequest object into a string. | def _serialize_request(self, request):
"""Convert an HttpRequest object into a string.
Args:
request: HttpRequest, the request to serialize.
Returns:
The request as a string in application/http format.
"""
# Construct status line
parsed = urlparse(request.uri)
request_line = urlunparse(
('', '', parsed.path, parsed.params, parsed.query, '')
)
status_line = request.method + ' ' + request_line + ' HTTP/1.1\n'
major, minor = request.headers.get('content-type', 'application/json').split('/')
msg = MIMENonMultipart(major, minor)
headers = request.headers.copy()
if request.http is not None and hasattr(request.http.request,
'credentials'):
request.http.request.credentials.apply(headers)
# MIMENonMultipart adds its own Content-Type header.
if 'content-type' in headers:
del headers['content-type']
for key, value in six.iteritems(headers):
msg[key] = value
msg['Host'] = parsed.netloc
msg.set_unixfrom(None)
if request.body is not None:
msg.set_payload(request.body)
msg['content-length'] = str(len(request.body))
# Serialize the mime message.
fp = StringIO()
# maxheaderlen=0 means don't line wrap headers.
g = Generator(fp, maxheaderlen=0)
g.flatten(msg, unixfrom=False)
body = fp.getvalue()
return status_line + body | [
"def",
"_serialize_request",
"(",
"self",
",",
"request",
")",
":",
"# Construct status line",
"parsed",
"=",
"urlparse",
"(",
"request",
".",
"uri",
")",
"request_line",
"=",
"urlunparse",
"(",
"(",
"''",
",",
"''",
",",
"parsed",
".",
"path",
",",
"parsed",
".",
"params",
",",
"parsed",
".",
"query",
",",
"''",
")",
")",
"status_line",
"=",
"request",
".",
"method",
"+",
"' '",
"+",
"request_line",
"+",
"' HTTP/1.1\\n'",
"major",
",",
"minor",
"=",
"request",
".",
"headers",
".",
"get",
"(",
"'content-type'",
",",
"'application/json'",
")",
".",
"split",
"(",
"'/'",
")",
"msg",
"=",
"MIMENonMultipart",
"(",
"major",
",",
"minor",
")",
"headers",
"=",
"request",
".",
"headers",
".",
"copy",
"(",
")",
"if",
"request",
".",
"http",
"is",
"not",
"None",
"and",
"hasattr",
"(",
"request",
".",
"http",
".",
"request",
",",
"'credentials'",
")",
":",
"request",
".",
"http",
".",
"request",
".",
"credentials",
".",
"apply",
"(",
"headers",
")",
"# MIMENonMultipart adds its own Content-Type header.",
"if",
"'content-type'",
"in",
"headers",
":",
"del",
"headers",
"[",
"'content-type'",
"]",
"for",
"key",
",",
"value",
"in",
"six",
".",
"iteritems",
"(",
"headers",
")",
":",
"msg",
"[",
"key",
"]",
"=",
"value",
"msg",
"[",
"'Host'",
"]",
"=",
"parsed",
".",
"netloc",
"msg",
".",
"set_unixfrom",
"(",
"None",
")",
"if",
"request",
".",
"body",
"is",
"not",
"None",
":",
"msg",
".",
"set_payload",
"(",
"request",
".",
"body",
")",
"msg",
"[",
"'content-length'",
"]",
"=",
"str",
"(",
"len",
"(",
"request",
".",
"body",
")",
")",
"# Serialize the mime message.",
"fp",
"=",
"StringIO",
"(",
")",
"# maxheaderlen=0 means don't line wrap headers.",
"g",
"=",
"Generator",
"(",
"fp",
",",
"maxheaderlen",
"=",
"0",
")",
"g",
".",
"flatten",
"(",
"msg",
",",
"unixfrom",
"=",
"False",
")",
"body",
"=",
"fp",
".",
"getvalue",
"(",
")",
"return",
"status_line",
"+",
"body"
] | [
1182,
2
] | [
1225,
29
] | python | en | ['en', 'lb', 'en'] | True |
BatchHttpRequest._deserialize_response | (self, payload) | Convert string into httplib2 response and content.
Args:
payload: string, headers and body as a string.
Returns:
A pair (resp, content), such as would be returned from httplib2.request.
| Convert string into httplib2 response and content. | def _deserialize_response(self, payload):
"""Convert string into httplib2 response and content.
Args:
payload: string, headers and body as a string.
Returns:
A pair (resp, content), such as would be returned from httplib2.request.
"""
# Strip off the status line
status_line, payload = payload.split('\n', 1)
protocol, status, reason = status_line.split(' ', 2)
# Parse the rest of the response
parser = FeedParser()
parser.feed(payload)
msg = parser.close()
msg['status'] = status
# Create httplib2.Response from the parsed headers.
resp = httplib2.Response(msg)
resp.reason = reason
resp.version = int(protocol.split('/', 1)[1].replace('.', ''))
content = payload.split('\r\n\r\n', 1)[1]
return resp, content | [
"def",
"_deserialize_response",
"(",
"self",
",",
"payload",
")",
":",
"# Strip off the status line",
"status_line",
",",
"payload",
"=",
"payload",
".",
"split",
"(",
"'\\n'",
",",
"1",
")",
"protocol",
",",
"status",
",",
"reason",
"=",
"status_line",
".",
"split",
"(",
"' '",
",",
"2",
")",
"# Parse the rest of the response",
"parser",
"=",
"FeedParser",
"(",
")",
"parser",
".",
"feed",
"(",
"payload",
")",
"msg",
"=",
"parser",
".",
"close",
"(",
")",
"msg",
"[",
"'status'",
"]",
"=",
"status",
"# Create httplib2.Response from the parsed headers.",
"resp",
"=",
"httplib2",
".",
"Response",
"(",
"msg",
")",
"resp",
".",
"reason",
"=",
"reason",
"resp",
".",
"version",
"=",
"int",
"(",
"protocol",
".",
"split",
"(",
"'/'",
",",
"1",
")",
"[",
"1",
"]",
".",
"replace",
"(",
"'.'",
",",
"''",
")",
")",
"content",
"=",
"payload",
".",
"split",
"(",
"'\\r\\n\\r\\n'",
",",
"1",
")",
"[",
"1",
"]",
"return",
"resp",
",",
"content"
] | [
1227,
2
] | [
1253,
24
] | python | en | ['en', 'en', 'en'] | True |
BatchHttpRequest._new_id | (self) | Create a new id.
Auto incrementing number that avoids conflicts with ids already used.
Returns:
string, a new unique id.
| Create a new id. | def _new_id(self):
"""Create a new id.
Auto incrementing number that avoids conflicts with ids already used.
Returns:
string, a new unique id.
"""
self._last_auto_id += 1
while str(self._last_auto_id) in self._requests:
self._last_auto_id += 1
return str(self._last_auto_id) | [
"def",
"_new_id",
"(",
"self",
")",
":",
"self",
".",
"_last_auto_id",
"+=",
"1",
"while",
"str",
"(",
"self",
".",
"_last_auto_id",
")",
"in",
"self",
".",
"_requests",
":",
"self",
".",
"_last_auto_id",
"+=",
"1",
"return",
"str",
"(",
"self",
".",
"_last_auto_id",
")"
] | [
1255,
2
] | [
1266,
34
] | python | en | ['en', 'ig', 'en'] | True |
BatchHttpRequest.add | (self, request, callback=None, request_id=None) | Add a new request.
Every callback added will be paired with a unique id, the request_id. That
unique id will be passed back to the callback when the response comes back
from the server. The default behavior is to have the library generate it's
own unique id. If the caller passes in a request_id then they must ensure
uniqueness for each request_id, and if they are not an exception is
raised. Callers should either supply all request_ids or nevery supply a
request id, to avoid such an error.
Args:
request: HttpRequest, Request to add to the batch.
callback: callable, A callback to be called for this response, of the
form callback(id, response, exception). The first parameter is the
request id, and the second is the deserialized response object. The
third is an googleapiclient.errors.HttpError exception object if an HTTP error
occurred while processing the request, or None if no errors occurred.
request_id: string, A unique id for the request. The id will be passed to
the callback with the response.
Returns:
None
Raises:
BatchError if a media request is added to a batch.
KeyError is the request_id is not unique.
| Add a new request. | def add(self, request, callback=None, request_id=None):
"""Add a new request.
Every callback added will be paired with a unique id, the request_id. That
unique id will be passed back to the callback when the response comes back
from the server. The default behavior is to have the library generate it's
own unique id. If the caller passes in a request_id then they must ensure
uniqueness for each request_id, and if they are not an exception is
raised. Callers should either supply all request_ids or nevery supply a
request id, to avoid such an error.
Args:
request: HttpRequest, Request to add to the batch.
callback: callable, A callback to be called for this response, of the
form callback(id, response, exception). The first parameter is the
request id, and the second is the deserialized response object. The
third is an googleapiclient.errors.HttpError exception object if an HTTP error
occurred while processing the request, or None if no errors occurred.
request_id: string, A unique id for the request. The id will be passed to
the callback with the response.
Returns:
None
Raises:
BatchError if a media request is added to a batch.
KeyError is the request_id is not unique.
"""
if request_id is None:
request_id = self._new_id()
if request.resumable is not None:
raise BatchError("Media requests cannot be used in a batch request.")
if request_id in self._requests:
raise KeyError("A request with this ID already exists: %s" % request_id)
self._requests[request_id] = request
self._callbacks[request_id] = callback
self._order.append(request_id) | [
"def",
"add",
"(",
"self",
",",
"request",
",",
"callback",
"=",
"None",
",",
"request_id",
"=",
"None",
")",
":",
"if",
"request_id",
"is",
"None",
":",
"request_id",
"=",
"self",
".",
"_new_id",
"(",
")",
"if",
"request",
".",
"resumable",
"is",
"not",
"None",
":",
"raise",
"BatchError",
"(",
"\"Media requests cannot be used in a batch request.\"",
")",
"if",
"request_id",
"in",
"self",
".",
"_requests",
":",
"raise",
"KeyError",
"(",
"\"A request with this ID already exists: %s\"",
"%",
"request_id",
")",
"self",
".",
"_requests",
"[",
"request_id",
"]",
"=",
"request",
"self",
".",
"_callbacks",
"[",
"request_id",
"]",
"=",
"callback",
"self",
".",
"_order",
".",
"append",
"(",
"request_id",
")"
] | [
1269,
2
] | [
1305,
34
] | python | en | ['en', 'en', 'en'] | True |
BatchHttpRequest._execute | (self, http, order, requests) | Serialize batch request, send to server, process response.
Args:
http: httplib2.Http, an http object to be used to make the request with.
order: list, list of request ids in the order they were added to the
batch.
request: list, list of request objects to send.
Raises:
httplib2.HttpLib2Error if a transport error has occured.
googleapiclient.errors.BatchError if the response is the wrong format.
| Serialize batch request, send to server, process response. | def _execute(self, http, order, requests):
"""Serialize batch request, send to server, process response.
Args:
http: httplib2.Http, an http object to be used to make the request with.
order: list, list of request ids in the order they were added to the
batch.
request: list, list of request objects to send.
Raises:
httplib2.HttpLib2Error if a transport error has occured.
googleapiclient.errors.BatchError if the response is the wrong format.
"""
message = MIMEMultipart('mixed')
# Message should not write out it's own headers.
setattr(message, '_write_headers', lambda self: None)
# Add all the individual requests.
for request_id in order:
request = requests[request_id]
msg = MIMENonMultipart('application', 'http')
msg['Content-Transfer-Encoding'] = 'binary'
msg['Content-ID'] = self._id_to_header(request_id)
body = self._serialize_request(request)
msg.set_payload(body)
message.attach(msg)
# encode the body: note that we can't use `as_string`, because
# it plays games with `From ` lines.
fp = StringIO()
g = Generator(fp, mangle_from_=False)
g.flatten(message, unixfrom=False)
body = fp.getvalue()
headers = {}
headers['content-type'] = ('multipart/mixed; '
'boundary="%s"') % message.get_boundary()
resp, content = http.request(self._batch_uri, method='POST', body=body,
headers=headers)
if resp.status >= 300:
raise HttpError(resp, content, uri=self._batch_uri)
# Prepend with a content-type header so FeedParser can handle it.
header = 'content-type: %s\r\n\r\n' % resp['content-type']
# PY3's FeedParser only accepts unicode. So we should decode content
# here, and encode each payload again.
if six.PY3:
content = content.decode('utf-8')
for_parser = header + content
parser = FeedParser()
parser.feed(for_parser)
mime_response = parser.close()
if not mime_response.is_multipart():
raise BatchError("Response not in multipart/mixed format.", resp=resp,
content=content)
for part in mime_response.get_payload():
request_id = self._header_to_id(part['Content-ID'])
response, content = self._deserialize_response(part.get_payload())
# We encode content here to emulate normal http response.
if isinstance(content, six.text_type):
content = content.encode('utf-8')
self._responses[request_id] = (response, content) | [
"def",
"_execute",
"(",
"self",
",",
"http",
",",
"order",
",",
"requests",
")",
":",
"message",
"=",
"MIMEMultipart",
"(",
"'mixed'",
")",
"# Message should not write out it's own headers.",
"setattr",
"(",
"message",
",",
"'_write_headers'",
",",
"lambda",
"self",
":",
"None",
")",
"# Add all the individual requests.",
"for",
"request_id",
"in",
"order",
":",
"request",
"=",
"requests",
"[",
"request_id",
"]",
"msg",
"=",
"MIMENonMultipart",
"(",
"'application'",
",",
"'http'",
")",
"msg",
"[",
"'Content-Transfer-Encoding'",
"]",
"=",
"'binary'",
"msg",
"[",
"'Content-ID'",
"]",
"=",
"self",
".",
"_id_to_header",
"(",
"request_id",
")",
"body",
"=",
"self",
".",
"_serialize_request",
"(",
"request",
")",
"msg",
".",
"set_payload",
"(",
"body",
")",
"message",
".",
"attach",
"(",
"msg",
")",
"# encode the body: note that we can't use `as_string`, because",
"# it plays games with `From ` lines.",
"fp",
"=",
"StringIO",
"(",
")",
"g",
"=",
"Generator",
"(",
"fp",
",",
"mangle_from_",
"=",
"False",
")",
"g",
".",
"flatten",
"(",
"message",
",",
"unixfrom",
"=",
"False",
")",
"body",
"=",
"fp",
".",
"getvalue",
"(",
")",
"headers",
"=",
"{",
"}",
"headers",
"[",
"'content-type'",
"]",
"=",
"(",
"'multipart/mixed; '",
"'boundary=\"%s\"'",
")",
"%",
"message",
".",
"get_boundary",
"(",
")",
"resp",
",",
"content",
"=",
"http",
".",
"request",
"(",
"self",
".",
"_batch_uri",
",",
"method",
"=",
"'POST'",
",",
"body",
"=",
"body",
",",
"headers",
"=",
"headers",
")",
"if",
"resp",
".",
"status",
">=",
"300",
":",
"raise",
"HttpError",
"(",
"resp",
",",
"content",
",",
"uri",
"=",
"self",
".",
"_batch_uri",
")",
"# Prepend with a content-type header so FeedParser can handle it.",
"header",
"=",
"'content-type: %s\\r\\n\\r\\n'",
"%",
"resp",
"[",
"'content-type'",
"]",
"# PY3's FeedParser only accepts unicode. So we should decode content",
"# here, and encode each payload again.",
"if",
"six",
".",
"PY3",
":",
"content",
"=",
"content",
".",
"decode",
"(",
"'utf-8'",
")",
"for_parser",
"=",
"header",
"+",
"content",
"parser",
"=",
"FeedParser",
"(",
")",
"parser",
".",
"feed",
"(",
"for_parser",
")",
"mime_response",
"=",
"parser",
".",
"close",
"(",
")",
"if",
"not",
"mime_response",
".",
"is_multipart",
"(",
")",
":",
"raise",
"BatchError",
"(",
"\"Response not in multipart/mixed format.\"",
",",
"resp",
"=",
"resp",
",",
"content",
"=",
"content",
")",
"for",
"part",
"in",
"mime_response",
".",
"get_payload",
"(",
")",
":",
"request_id",
"=",
"self",
".",
"_header_to_id",
"(",
"part",
"[",
"'Content-ID'",
"]",
")",
"response",
",",
"content",
"=",
"self",
".",
"_deserialize_response",
"(",
"part",
".",
"get_payload",
"(",
")",
")",
"# We encode content here to emulate normal http response.",
"if",
"isinstance",
"(",
"content",
",",
"six",
".",
"text_type",
")",
":",
"content",
"=",
"content",
".",
"encode",
"(",
"'utf-8'",
")",
"self",
".",
"_responses",
"[",
"request_id",
"]",
"=",
"(",
"response",
",",
"content",
")"
] | [
1307,
2
] | [
1375,
55
] | python | en | ['en', 'pt', 'en'] | True |
BatchHttpRequest.execute | (self, http=None) | Execute all the requests as a single batched HTTP request.
Args:
http: httplib2.Http, an http object to be used in place of the one the
HttpRequest request object was constructed with. If one isn't supplied
then use a http object from the requests in this batch.
Returns:
None
Raises:
httplib2.HttpLib2Error if a transport error has occured.
googleapiclient.errors.BatchError if the response is the wrong format.
| Execute all the requests as a single batched HTTP request. | def execute(self, http=None):
"""Execute all the requests as a single batched HTTP request.
Args:
http: httplib2.Http, an http object to be used in place of the one the
HttpRequest request object was constructed with. If one isn't supplied
then use a http object from the requests in this batch.
Returns:
None
Raises:
httplib2.HttpLib2Error if a transport error has occured.
googleapiclient.errors.BatchError if the response is the wrong format.
"""
# If we have no requests return
if len(self._order) == 0:
return None
# If http is not supplied use the first valid one given in the requests.
if http is None:
for request_id in self._order:
request = self._requests[request_id]
if request is not None:
http = request.http
break
if http is None:
raise ValueError("Missing a valid http object.")
# Special case for OAuth2Credentials-style objects which have not yet been
# refreshed with an initial access_token.
if getattr(http.request, 'credentials', None) is not None:
creds = http.request.credentials
if not getattr(creds, 'access_token', None):
LOGGER.info('Attempting refresh to obtain initial access_token')
creds.refresh(http)
self._execute(http, self._order, self._requests)
# Loop over all the requests and check for 401s. For each 401 request the
# credentials should be refreshed and then sent again in a separate batch.
redo_requests = {}
redo_order = []
for request_id in self._order:
resp, content = self._responses[request_id]
if resp['status'] == '401':
redo_order.append(request_id)
request = self._requests[request_id]
self._refresh_and_apply_credentials(request, http)
redo_requests[request_id] = request
if redo_requests:
self._execute(http, redo_order, redo_requests)
# Now process all callbacks that are erroring, and raise an exception for
# ones that return a non-2xx response? Or add extra parameter to callback
# that contains an HttpError?
for request_id in self._order:
resp, content = self._responses[request_id]
request = self._requests[request_id]
callback = self._callbacks[request_id]
response = None
exception = None
try:
if resp.status >= 300:
raise HttpError(resp, content, uri=request.uri)
response = request.postproc(resp, content)
except HttpError as e:
exception = e
if callback is not None:
callback(request_id, response, exception)
if self._callback is not None:
self._callback(request_id, response, exception) | [
"def",
"execute",
"(",
"self",
",",
"http",
"=",
"None",
")",
":",
"# If we have no requests return",
"if",
"len",
"(",
"self",
".",
"_order",
")",
"==",
"0",
":",
"return",
"None",
"# If http is not supplied use the first valid one given in the requests.",
"if",
"http",
"is",
"None",
":",
"for",
"request_id",
"in",
"self",
".",
"_order",
":",
"request",
"=",
"self",
".",
"_requests",
"[",
"request_id",
"]",
"if",
"request",
"is",
"not",
"None",
":",
"http",
"=",
"request",
".",
"http",
"break",
"if",
"http",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Missing a valid http object.\"",
")",
"# Special case for OAuth2Credentials-style objects which have not yet been",
"# refreshed with an initial access_token.",
"if",
"getattr",
"(",
"http",
".",
"request",
",",
"'credentials'",
",",
"None",
")",
"is",
"not",
"None",
":",
"creds",
"=",
"http",
".",
"request",
".",
"credentials",
"if",
"not",
"getattr",
"(",
"creds",
",",
"'access_token'",
",",
"None",
")",
":",
"LOGGER",
".",
"info",
"(",
"'Attempting refresh to obtain initial access_token'",
")",
"creds",
".",
"refresh",
"(",
"http",
")",
"self",
".",
"_execute",
"(",
"http",
",",
"self",
".",
"_order",
",",
"self",
".",
"_requests",
")",
"# Loop over all the requests and check for 401s. For each 401 request the",
"# credentials should be refreshed and then sent again in a separate batch.",
"redo_requests",
"=",
"{",
"}",
"redo_order",
"=",
"[",
"]",
"for",
"request_id",
"in",
"self",
".",
"_order",
":",
"resp",
",",
"content",
"=",
"self",
".",
"_responses",
"[",
"request_id",
"]",
"if",
"resp",
"[",
"'status'",
"]",
"==",
"'401'",
":",
"redo_order",
".",
"append",
"(",
"request_id",
")",
"request",
"=",
"self",
".",
"_requests",
"[",
"request_id",
"]",
"self",
".",
"_refresh_and_apply_credentials",
"(",
"request",
",",
"http",
")",
"redo_requests",
"[",
"request_id",
"]",
"=",
"request",
"if",
"redo_requests",
":",
"self",
".",
"_execute",
"(",
"http",
",",
"redo_order",
",",
"redo_requests",
")",
"# Now process all callbacks that are erroring, and raise an exception for",
"# ones that return a non-2xx response? Or add extra parameter to callback",
"# that contains an HttpError?",
"for",
"request_id",
"in",
"self",
".",
"_order",
":",
"resp",
",",
"content",
"=",
"self",
".",
"_responses",
"[",
"request_id",
"]",
"request",
"=",
"self",
".",
"_requests",
"[",
"request_id",
"]",
"callback",
"=",
"self",
".",
"_callbacks",
"[",
"request_id",
"]",
"response",
"=",
"None",
"exception",
"=",
"None",
"try",
":",
"if",
"resp",
".",
"status",
">=",
"300",
":",
"raise",
"HttpError",
"(",
"resp",
",",
"content",
",",
"uri",
"=",
"request",
".",
"uri",
")",
"response",
"=",
"request",
".",
"postproc",
"(",
"resp",
",",
"content",
")",
"except",
"HttpError",
"as",
"e",
":",
"exception",
"=",
"e",
"if",
"callback",
"is",
"not",
"None",
":",
"callback",
"(",
"request_id",
",",
"response",
",",
"exception",
")",
"if",
"self",
".",
"_callback",
"is",
"not",
"None",
":",
"self",
".",
"_callback",
"(",
"request_id",
",",
"response",
",",
"exception",
")"
] | [
1378,
2
] | [
1456,
55
] | python | en | ['en', 'en', 'en'] | True |
HttpRequestMock.__init__ | (self, resp, content, postproc) | Constructor for HttpRequestMock
Args:
resp: httplib2.Response, the response to emulate coming from the request
content: string, the response body
postproc: callable, the post processing function usually supplied by
the model class. See model.JsonModel.response() as an example.
| Constructor for HttpRequestMock | def __init__(self, resp, content, postproc):
"""Constructor for HttpRequestMock
Args:
resp: httplib2.Response, the response to emulate coming from the request
content: string, the response body
postproc: callable, the post processing function usually supplied by
the model class. See model.JsonModel.response() as an example.
"""
self.resp = resp
self.content = content
self.postproc = postproc
if resp is None:
self.resp = httplib2.Response({'status': 200, 'reason': 'OK'})
if 'reason' in self.resp:
self.resp.reason = self.resp['reason'] | [
"def",
"__init__",
"(",
"self",
",",
"resp",
",",
"content",
",",
"postproc",
")",
":",
"self",
".",
"resp",
"=",
"resp",
"self",
".",
"content",
"=",
"content",
"self",
".",
"postproc",
"=",
"postproc",
"if",
"resp",
"is",
"None",
":",
"self",
".",
"resp",
"=",
"httplib2",
".",
"Response",
"(",
"{",
"'status'",
":",
"200",
",",
"'reason'",
":",
"'OK'",
"}",
")",
"if",
"'reason'",
"in",
"self",
".",
"resp",
":",
"self",
".",
"resp",
".",
"reason",
"=",
"self",
".",
"resp",
"[",
"'reason'",
"]"
] | [
1465,
2
] | [
1480,
44
] | python | en | ['da', 'en', 'en'] | True |
HttpRequestMock.execute | (self, http=None) | Execute the request.
Same behavior as HttpRequest.execute(), but the response is
mocked and not really from an HTTP request/response.
| Execute the request. | def execute(self, http=None):
"""Execute the request.
Same behavior as HttpRequest.execute(), but the response is
mocked and not really from an HTTP request/response.
"""
return self.postproc(self.resp, self.content) | [
"def",
"execute",
"(",
"self",
",",
"http",
"=",
"None",
")",
":",
"return",
"self",
".",
"postproc",
"(",
"self",
".",
"resp",
",",
"self",
".",
"content",
")"
] | [
1482,
2
] | [
1488,
49
] | python | en | ['en', 'en', 'en'] | True |
RequestMockBuilder.__init__ | (self, responses, check_unexpected=False) | Constructor for RequestMockBuilder
The constructed object should be a callable object
that can replace the class HttpResponse.
responses - A dictionary that maps methodIds into tuples
of (httplib2.Response, content). The methodId
comes from the 'rpcName' field in the discovery
document.
check_unexpected - A boolean setting whether or not UnexpectedMethodError
should be raised on unsupplied method.
| Constructor for RequestMockBuilder | def __init__(self, responses, check_unexpected=False):
"""Constructor for RequestMockBuilder
The constructed object should be a callable object
that can replace the class HttpResponse.
responses - A dictionary that maps methodIds into tuples
of (httplib2.Response, content). The methodId
comes from the 'rpcName' field in the discovery
document.
check_unexpected - A boolean setting whether or not UnexpectedMethodError
should be raised on unsupplied method.
"""
self.responses = responses
self.check_unexpected = check_unexpected | [
"def",
"__init__",
"(",
"self",
",",
"responses",
",",
"check_unexpected",
"=",
"False",
")",
":",
"self",
".",
"responses",
"=",
"responses",
"self",
".",
"check_unexpected",
"=",
"check_unexpected"
] | [
1518,
2
] | [
1532,
44
] | python | da | ['da', 'da', 'en'] | True |
RequestMockBuilder.__call__ | (self, http, postproc, uri, method='GET', body=None,
headers=None, methodId=None, resumable=None) | Implements the callable interface that discovery.build() expects
of requestBuilder, which is to build an object compatible with
HttpRequest.execute(). See that method for the description of the
parameters and the expected response.
| Implements the callable interface that discovery.build() expects
of requestBuilder, which is to build an object compatible with
HttpRequest.execute(). See that method for the description of the
parameters and the expected response.
| def __call__(self, http, postproc, uri, method='GET', body=None,
headers=None, methodId=None, resumable=None):
"""Implements the callable interface that discovery.build() expects
of requestBuilder, which is to build an object compatible with
HttpRequest.execute(). See that method for the description of the
parameters and the expected response.
"""
if methodId in self.responses:
response = self.responses[methodId]
resp, content = response[:2]
if len(response) > 2:
# Test the body against the supplied expected_body.
expected_body = response[2]
if bool(expected_body) != bool(body):
# Not expecting a body and provided one
# or expecting a body and not provided one.
raise UnexpectedBodyError(expected_body, body)
if isinstance(expected_body, str):
expected_body = json.loads(expected_body)
body = json.loads(body)
if body != expected_body:
raise UnexpectedBodyError(expected_body, body)
return HttpRequestMock(resp, content, postproc)
elif self.check_unexpected:
raise UnexpectedMethodError(methodId=methodId)
else:
model = JsonModel(False)
return HttpRequestMock(None, '{}', model.response) | [
"def",
"__call__",
"(",
"self",
",",
"http",
",",
"postproc",
",",
"uri",
",",
"method",
"=",
"'GET'",
",",
"body",
"=",
"None",
",",
"headers",
"=",
"None",
",",
"methodId",
"=",
"None",
",",
"resumable",
"=",
"None",
")",
":",
"if",
"methodId",
"in",
"self",
".",
"responses",
":",
"response",
"=",
"self",
".",
"responses",
"[",
"methodId",
"]",
"resp",
",",
"content",
"=",
"response",
"[",
":",
"2",
"]",
"if",
"len",
"(",
"response",
")",
">",
"2",
":",
"# Test the body against the supplied expected_body.",
"expected_body",
"=",
"response",
"[",
"2",
"]",
"if",
"bool",
"(",
"expected_body",
")",
"!=",
"bool",
"(",
"body",
")",
":",
"# Not expecting a body and provided one",
"# or expecting a body and not provided one.",
"raise",
"UnexpectedBodyError",
"(",
"expected_body",
",",
"body",
")",
"if",
"isinstance",
"(",
"expected_body",
",",
"str",
")",
":",
"expected_body",
"=",
"json",
".",
"loads",
"(",
"expected_body",
")",
"body",
"=",
"json",
".",
"loads",
"(",
"body",
")",
"if",
"body",
"!=",
"expected_body",
":",
"raise",
"UnexpectedBodyError",
"(",
"expected_body",
",",
"body",
")",
"return",
"HttpRequestMock",
"(",
"resp",
",",
"content",
",",
"postproc",
")",
"elif",
"self",
".",
"check_unexpected",
":",
"raise",
"UnexpectedMethodError",
"(",
"methodId",
"=",
"methodId",
")",
"else",
":",
"model",
"=",
"JsonModel",
"(",
"False",
")",
"return",
"HttpRequestMock",
"(",
"None",
",",
"'{}'",
",",
"model",
".",
"response",
")"
] | [
1534,
2
] | [
1561,
56
] | python | en | ['en', 'en', 'en'] | True |
HttpMock.__init__ | (self, filename=None, headers=None) |
Args:
filename: string, absolute filename to read response from
headers: dict, header to return with response
|
Args:
filename: string, absolute filename to read response from
headers: dict, header to return with response
| def __init__(self, filename=None, headers=None):
"""
Args:
filename: string, absolute filename to read response from
headers: dict, header to return with response
"""
if headers is None:
headers = {'status': '200'}
if filename:
f = open(filename, 'rb')
self.data = f.read()
f.close()
else:
self.data = None
self.response_headers = headers
self.headers = None
self.uri = None
self.method = None
self.body = None
self.headers = None | [
"def",
"__init__",
"(",
"self",
",",
"filename",
"=",
"None",
",",
"headers",
"=",
"None",
")",
":",
"if",
"headers",
"is",
"None",
":",
"headers",
"=",
"{",
"'status'",
":",
"'200'",
"}",
"if",
"filename",
":",
"f",
"=",
"open",
"(",
"filename",
",",
"'rb'",
")",
"self",
".",
"data",
"=",
"f",
".",
"read",
"(",
")",
"f",
".",
"close",
"(",
")",
"else",
":",
"self",
".",
"data",
"=",
"None",
"self",
".",
"response_headers",
"=",
"headers",
"self",
".",
"headers",
"=",
"None",
"self",
".",
"uri",
"=",
"None",
"self",
".",
"method",
"=",
"None",
"self",
".",
"body",
"=",
"None",
"self",
".",
"headers",
"=",
"None"
] | [
1567,
2
] | [
1586,
23
] | python | en | ['en', 'error', 'th'] | False |
HttpMockSequence.__init__ | (self, iterable) |
Args:
iterable: iterable, a sequence of pairs of (headers, body)
|
Args:
iterable: iterable, a sequence of pairs of (headers, body)
| def __init__(self, iterable):
"""
Args:
iterable: iterable, a sequence of pairs of (headers, body)
"""
self._iterable = iterable
self.follow_redirects = True | [
"def",
"__init__",
"(",
"self",
",",
"iterable",
")",
":",
"self",
".",
"_iterable",
"=",
"iterable",
"self",
".",
"follow_redirects",
"=",
"True"
] | [
1626,
2
] | [
1632,
32
] | python | en | ['en', 'error', 'th'] | False |
Filter.__init__ | (self, source, encoding) | Creates a Filter
:arg source: the source token stream
:arg encoding: the encoding to set
| Creates a Filter | def __init__(self, source, encoding):
"""Creates a Filter
:arg source: the source token stream
:arg encoding: the encoding to set
"""
base.Filter.__init__(self, source)
self.encoding = encoding | [
"def",
"__init__",
"(",
"self",
",",
"source",
",",
"encoding",
")",
":",
"base",
".",
"Filter",
".",
"__init__",
"(",
"self",
",",
"source",
")",
"self",
".",
"encoding",
"=",
"encoding"
] | [
7,
4
] | [
16,
32
] | python | en | ['en', 'gl', 'en'] | True |
Node.__init__ | (self, children=None, connector=None, negated=False) | Construct a new Node. If no connector is given, use the default. | Construct a new Node. If no connector is given, use the default. | def __init__(self, children=None, connector=None, negated=False):
"""Construct a new Node. If no connector is given, use the default."""
self.children = children[:] if children else []
self.connector = connector or self.default
self.negated = negated | [
"def",
"__init__",
"(",
"self",
",",
"children",
"=",
"None",
",",
"connector",
"=",
"None",
",",
"negated",
"=",
"False",
")",
":",
"self",
".",
"children",
"=",
"children",
"[",
":",
"]",
"if",
"children",
"else",
"[",
"]",
"self",
".",
"connector",
"=",
"connector",
"or",
"self",
".",
"default",
"self",
".",
"negated",
"=",
"negated"
] | [
20,
4
] | [
24,
30
] | python | en | ['en', 'en', 'en'] | True |
Node._new_instance | (cls, children=None, connector=None, negated=False) |
Create a new instance of this class when new Nodes (or subclasses) are
needed in the internal code in this class. Normally, it just shadows
__init__(). However, subclasses with an __init__ signature that aren't
an extension of Node.__init__ might need to implement this method to
allow a Node to create a new instance of them (if they have any extra
setting up to do).
|
Create a new instance of this class when new Nodes (or subclasses) are
needed in the internal code in this class. Normally, it just shadows
__init__(). However, subclasses with an __init__ signature that aren't
an extension of Node.__init__ might need to implement this method to
allow a Node to create a new instance of them (if they have any extra
setting up to do).
| def _new_instance(cls, children=None, connector=None, negated=False):
"""
Create a new instance of this class when new Nodes (or subclasses) are
needed in the internal code in this class. Normally, it just shadows
__init__(). However, subclasses with an __init__ signature that aren't
an extension of Node.__init__ might need to implement this method to
allow a Node to create a new instance of them (if they have any extra
setting up to do).
"""
obj = Node(children, connector, negated)
obj.__class__ = cls
return obj | [
"def",
"_new_instance",
"(",
"cls",
",",
"children",
"=",
"None",
",",
"connector",
"=",
"None",
",",
"negated",
"=",
"False",
")",
":",
"obj",
"=",
"Node",
"(",
"children",
",",
"connector",
",",
"negated",
")",
"obj",
".",
"__class__",
"=",
"cls",
"return",
"obj"
] | [
29,
4
] | [
40,
18
] | python | en | ['en', 'error', 'th'] | False |
Node.__len__ | (self) | Return the number of children this node has. | Return the number of children this node has. | def __len__(self):
"""Return the number of children this node has."""
return len(self.children) | [
"def",
"__len__",
"(",
"self",
")",
":",
"return",
"len",
"(",
"self",
".",
"children",
")"
] | [
55,
4
] | [
57,
33
] | python | en | ['en', 'en', 'en'] | True |
Node.__bool__ | (self) | Return whether or not this node has children. | Return whether or not this node has children. | def __bool__(self):
"""Return whether or not this node has children."""
return bool(self.children) | [
"def",
"__bool__",
"(",
"self",
")",
":",
"return",
"bool",
"(",
"self",
".",
"children",
")"
] | [
59,
4
] | [
61,
34
] | python | en | ['en', 'en', 'en'] | True |
Node.__contains__ | (self, other) | Return True if 'other' is a direct child of this instance. | Return True if 'other' is a direct child of this instance. | def __contains__(self, other):
"""Return True if 'other' is a direct child of this instance."""
return other in self.children | [
"def",
"__contains__",
"(",
"self",
",",
"other",
")",
":",
"return",
"other",
"in",
"self",
".",
"children"
] | [
63,
4
] | [
65,
37
] | python | en | ['en', 'en', 'en'] | True |
Node.add | (self, data, conn_type, squash=True) |
Combine this tree and the data represented by data using the
connector conn_type. The combine is done by squashing the node other
away if possible.
This tree (self) will never be pushed to a child node of the
combined tree, nor will the connector or negated properties change.
Return a node which can be used in place of data regardless if the
node other got squashed or not.
If `squash` is False the data is prepared and added as a child to
this tree without further logic.
|
Combine this tree and the data represented by data using the
connector conn_type. The combine is done by squashing the node other
away if possible. | def add(self, data, conn_type, squash=True):
"""
Combine this tree and the data represented by data using the
connector conn_type. The combine is done by squashing the node other
away if possible.
This tree (self) will never be pushed to a child node of the
combined tree, nor will the connector or negated properties change.
Return a node which can be used in place of data regardless if the
node other got squashed or not.
If `squash` is False the data is prepared and added as a child to
this tree without further logic.
"""
if self.connector == conn_type and data in self.children:
return data
if not squash:
self.children.append(data)
return data
if self.connector == conn_type:
# We can reuse self.children to append or squash the node other.
if (isinstance(data, Node) and not data.negated and
(data.connector == conn_type or len(data) == 1)):
# We can squash the other node's children directly into this
# node. We are just doing (AB)(CD) == (ABCD) here, with the
# addition that if the length of the other node is 1 the
# connector doesn't matter. However, for the len(self) == 1
# case we don't want to do the squashing, as it would alter
# self.connector.
self.children.extend(data.children)
return self
else:
# We could use perhaps additional logic here to see if some
# children could be used for pushdown here.
self.children.append(data)
return data
else:
obj = self._new_instance(self.children, self.connector,
self.negated)
self.connector = conn_type
self.children = [obj, data]
return data | [
"def",
"add",
"(",
"self",
",",
"data",
",",
"conn_type",
",",
"squash",
"=",
"True",
")",
":",
"if",
"self",
".",
"connector",
"==",
"conn_type",
"and",
"data",
"in",
"self",
".",
"children",
":",
"return",
"data",
"if",
"not",
"squash",
":",
"self",
".",
"children",
".",
"append",
"(",
"data",
")",
"return",
"data",
"if",
"self",
".",
"connector",
"==",
"conn_type",
":",
"# We can reuse self.children to append or squash the node other.",
"if",
"(",
"isinstance",
"(",
"data",
",",
"Node",
")",
"and",
"not",
"data",
".",
"negated",
"and",
"(",
"data",
".",
"connector",
"==",
"conn_type",
"or",
"len",
"(",
"data",
")",
"==",
"1",
")",
")",
":",
"# We can squash the other node's children directly into this",
"# node. We are just doing (AB)(CD) == (ABCD) here, with the",
"# addition that if the length of the other node is 1 the",
"# connector doesn't matter. However, for the len(self) == 1",
"# case we don't want to do the squashing, as it would alter",
"# self.connector.",
"self",
".",
"children",
".",
"extend",
"(",
"data",
".",
"children",
")",
"return",
"self",
"else",
":",
"# We could use perhaps additional logic here to see if some",
"# children could be used for pushdown here.",
"self",
".",
"children",
".",
"append",
"(",
"data",
")",
"return",
"data",
"else",
":",
"obj",
"=",
"self",
".",
"_new_instance",
"(",
"self",
".",
"children",
",",
"self",
".",
"connector",
",",
"self",
".",
"negated",
")",
"self",
".",
"connector",
"=",
"conn_type",
"self",
".",
"children",
"=",
"[",
"obj",
",",
"data",
"]",
"return",
"data"
] | [
77,
4
] | [
119,
23
] | python | en | ['en', 'error', 'th'] | False |
Node.negate | (self) | Negate the sense of the root connector. | Negate the sense of the root connector. | def negate(self):
"""Negate the sense of the root connector."""
self.negated = not self.negated | [
"def",
"negate",
"(",
"self",
")",
":",
"self",
".",
"negated",
"=",
"not",
"self",
".",
"negated"
] | [
121,
4
] | [
123,
39
] | python | en | ['en', 'en', 'en'] | True |
ItemAttributeKNN.__init__ | (self, train_file=None, test_file=None, output_file=None, metadata_file=None, similarity_file=None,
k_neighbors=30, rank_length=10, as_binary=False, as_similar_first=True, metadata_as_binary=False,
metadata_similarity_sep='\t', similarity_metric="cosine", sep='\t', output_sep='\t') |
Item Attribute KNN for Item Recommendation
This algorithm predicts a rank for each user based on the similar items that he/her consumed,
using a metadata or similarity pre-computed file
Usage::
>> ItemAttributeKNN(train, test, similarity_file=sim_matrix, as_similar_first=True).compute()
>> ItemAttributeKNN(train, test, metadata_file=metadata, as_similar_first=True).compute()
:param train_file: File which contains the train set. This file needs to have at least 3 columns
(user item feedback_value).
:type train_file: str
:param test_file: File which contains the test set. This file needs to have at least 3 columns
(user item feedback_value).
:type test_file: str, default None
:param output_file: File with dir to write the final predictions
:type output_file: str, default None
:param metadata_file: File which contains the metadata set. This file needs to have at least 2 columns
(item metadata).
:type metadata_file: str, default None
:param similarity_file: File which contains the similarity set. This file needs to have at least 3 columns
(item item similarity).
:type similarity_file: str, default None
:param k_neighbors: Number of neighbors to use. If None, k_neighbor = int(sqrt(n_users))
:type k_neighbors: int, default None
:param rank_length: Size of the rank that must be generated by the predictions of the recommender algorithm
:type rank_length: int, default 10
:param as_binary: If True, the explicit feedback will be transform to binary
:type as_binary: bool, default False
:param as_similar_first: If True, for each unknown item, which will be predicted, we first look for its k
most similar users and then take the intersection with the users that
seen that item.
:type as_similar_first: bool, default True
:param metadata_as_binary: f True, the explicit value will be transform to binary
:type metadata_as_binary: bool, default False
:param metadata_similarity_sep: Delimiter for similarity or metadata file
:type metadata_similarity_sep: str, default '\t'
:param similarity_metric: Pairwise metric to compute the similarity between the items. Reference about
distances: http://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.spatial.distance.pdist.html
:type similarity_metric: str, default cosine
:param sep: Delimiter for input files file
:type sep: str, default '\t'
:param output_sep: Delimiter for output file
:type output_sep: str, default '\t'
|
Item Attribute KNN for Item Recommendation | def __init__(self, train_file=None, test_file=None, output_file=None, metadata_file=None, similarity_file=None,
k_neighbors=30, rank_length=10, as_binary=False, as_similar_first=True, metadata_as_binary=False,
metadata_similarity_sep='\t', similarity_metric="cosine", sep='\t', output_sep='\t'):
"""
Item Attribute KNN for Item Recommendation
This algorithm predicts a rank for each user based on the similar items that he/her consumed,
using a metadata or similarity pre-computed file
Usage::
>> ItemAttributeKNN(train, test, similarity_file=sim_matrix, as_similar_first=True).compute()
>> ItemAttributeKNN(train, test, metadata_file=metadata, as_similar_first=True).compute()
:param train_file: File which contains the train set. This file needs to have at least 3 columns
(user item feedback_value).
:type train_file: str
:param test_file: File which contains the test set. This file needs to have at least 3 columns
(user item feedback_value).
:type test_file: str, default None
:param output_file: File with dir to write the final predictions
:type output_file: str, default None
:param metadata_file: File which contains the metadata set. This file needs to have at least 2 columns
(item metadata).
:type metadata_file: str, default None
:param similarity_file: File which contains the similarity set. This file needs to have at least 3 columns
(item item similarity).
:type similarity_file: str, default None
:param k_neighbors: Number of neighbors to use. If None, k_neighbor = int(sqrt(n_users))
:type k_neighbors: int, default None
:param rank_length: Size of the rank that must be generated by the predictions of the recommender algorithm
:type rank_length: int, default 10
:param as_binary: If True, the explicit feedback will be transform to binary
:type as_binary: bool, default False
:param as_similar_first: If True, for each unknown item, which will be predicted, we first look for its k
most similar users and then take the intersection with the users that
seen that item.
:type as_similar_first: bool, default True
:param metadata_as_binary: f True, the explicit value will be transform to binary
:type metadata_as_binary: bool, default False
:param metadata_similarity_sep: Delimiter for similarity or metadata file
:type metadata_similarity_sep: str, default '\t'
:param similarity_metric: Pairwise metric to compute the similarity between the items. Reference about
distances: http://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.spatial.distance.pdist.html
:type similarity_metric: str, default cosine
:param sep: Delimiter for input files file
:type sep: str, default '\t'
:param output_sep: Delimiter for output file
:type output_sep: str, default '\t'
"""
super(ItemAttributeKNN, self).__init__(train_file=train_file, test_file=test_file, output_file=output_file,
k_neighbors=k_neighbors, rank_length=rank_length, as_binary=as_binary,
as_similar_first=as_similar_first, similarity_metric=similarity_metric,
sep=sep, output_sep=output_sep)
self.recommender_name = 'Item Attribute KNN Algorithm'
self.metadata_file = metadata_file
self.similarity_file = similarity_file
self.metadata_as_binary = metadata_as_binary
self.metadata_similarity_sep = metadata_similarity_sep | [
"def",
"__init__",
"(",
"self",
",",
"train_file",
"=",
"None",
",",
"test_file",
"=",
"None",
",",
"output_file",
"=",
"None",
",",
"metadata_file",
"=",
"None",
",",
"similarity_file",
"=",
"None",
",",
"k_neighbors",
"=",
"30",
",",
"rank_length",
"=",
"10",
",",
"as_binary",
"=",
"False",
",",
"as_similar_first",
"=",
"True",
",",
"metadata_as_binary",
"=",
"False",
",",
"metadata_similarity_sep",
"=",
"'\\t'",
",",
"similarity_metric",
"=",
"\"cosine\"",
",",
"sep",
"=",
"'\\t'",
",",
"output_sep",
"=",
"'\\t'",
")",
":",
"super",
"(",
"ItemAttributeKNN",
",",
"self",
")",
".",
"__init__",
"(",
"train_file",
"=",
"train_file",
",",
"test_file",
"=",
"test_file",
",",
"output_file",
"=",
"output_file",
",",
"k_neighbors",
"=",
"k_neighbors",
",",
"rank_length",
"=",
"rank_length",
",",
"as_binary",
"=",
"as_binary",
",",
"as_similar_first",
"=",
"as_similar_first",
",",
"similarity_metric",
"=",
"similarity_metric",
",",
"sep",
"=",
"sep",
",",
"output_sep",
"=",
"output_sep",
")",
"self",
".",
"recommender_name",
"=",
"'Item Attribute KNN Algorithm'",
"self",
".",
"metadata_file",
"=",
"metadata_file",
"self",
".",
"similarity_file",
"=",
"similarity_file",
"self",
".",
"metadata_as_binary",
"=",
"metadata_as_binary",
"self",
".",
"metadata_similarity_sep",
"=",
"metadata_similarity_sep"
] | [
23,
4
] | [
96,
62
] | python | en | ['en', 'error', 'th'] | False |
ItemAttributeKNN.init_model | (self) |
Method to fit the model. Create and calculate a similarity matrix by metadata file or a pre-computed similarity
matrix
|
Method to fit the model. Create and calculate a similarity matrix by metadata file or a pre-computed similarity
matrix | def init_model(self):
"""
Method to fit the model. Create and calculate a similarity matrix by metadata file or a pre-computed similarity
matrix
"""
self.similar_items = defaultdict(list)
# Set the value for k
if self.k_neighbors is None:
self.k_neighbors = int(np.sqrt(len(self.items)))
if self.metadata_file is not None:
metadata = ReadFile(self.metadata_file, sep=self.metadata_similarity_sep, as_binary=self.metadata_as_binary
).read_metadata_or_similarity()
self.matrix = np.zeros((len(self.items), len(metadata['col_2'])))
meta_to_meta_id = {}
for m, data in enumerate(metadata['col_2']):
meta_to_meta_id[data] = m
for item in metadata['col_1']:
for m in metadata['dict'][item]:
self.matrix[self.item_to_item_id[item], meta_to_meta_id[m]] = metadata['dict'][item][m]
# create header info for metadata
sparsity = (1 - (metadata['number_interactions'] / (len(metadata['col_1']) * len(metadata['col_2'])))) * 100
self.extra_info_header = ">> metadata:: %d items and %d metadata (%d interactions) | sparsity:: %.2f%%" % \
(len(metadata['col_1']), len(metadata['col_2']), metadata['number_interactions'],
sparsity)
# Create similarity matrix based on metadata or similarity file. Transpose=False, because it is an
# item x metadata matrix
self.si_matrix = self.compute_similarity(transpose=False)
elif self.similarity_file is not None:
similarity = ReadFile(self.similarity_file, sep=self.metadata_similarity_sep, as_binary=False
).read_metadata_or_similarity()
self.si_matrix = np.zeros((len(self.items), len(self.items)))
# Fill similarity matrix
for i in similarity['col_1']:
for i_j in similarity['dict'][i]:
self.si_matrix[self.item_to_item_id[i], self.item_to_item_id[int(i_j)]] = similarity['dict'][i][i_j]
# Remove NaNs
self.si_matrix[np.isnan(self.si_matrix)] = 0.0
else:
raise ValueError("This algorithm needs a similarity matrix or a metadata file!")
# Create original matrix user x item for prediction process
self.create_matrix()
for i_id, item in enumerate(self.items):
self.similar_items[i_id] = sorted(range(len(self.si_matrix[i_id])),
key=lambda k: -self.si_matrix[i_id][k])[1:self.k_neighbors + 1] | [
"def",
"init_model",
"(",
"self",
")",
":",
"self",
".",
"similar_items",
"=",
"defaultdict",
"(",
"list",
")",
"# Set the value for k",
"if",
"self",
".",
"k_neighbors",
"is",
"None",
":",
"self",
".",
"k_neighbors",
"=",
"int",
"(",
"np",
".",
"sqrt",
"(",
"len",
"(",
"self",
".",
"items",
")",
")",
")",
"if",
"self",
".",
"metadata_file",
"is",
"not",
"None",
":",
"metadata",
"=",
"ReadFile",
"(",
"self",
".",
"metadata_file",
",",
"sep",
"=",
"self",
".",
"metadata_similarity_sep",
",",
"as_binary",
"=",
"self",
".",
"metadata_as_binary",
")",
".",
"read_metadata_or_similarity",
"(",
")",
"self",
".",
"matrix",
"=",
"np",
".",
"zeros",
"(",
"(",
"len",
"(",
"self",
".",
"items",
")",
",",
"len",
"(",
"metadata",
"[",
"'col_2'",
"]",
")",
")",
")",
"meta_to_meta_id",
"=",
"{",
"}",
"for",
"m",
",",
"data",
"in",
"enumerate",
"(",
"metadata",
"[",
"'col_2'",
"]",
")",
":",
"meta_to_meta_id",
"[",
"data",
"]",
"=",
"m",
"for",
"item",
"in",
"metadata",
"[",
"'col_1'",
"]",
":",
"for",
"m",
"in",
"metadata",
"[",
"'dict'",
"]",
"[",
"item",
"]",
":",
"self",
".",
"matrix",
"[",
"self",
".",
"item_to_item_id",
"[",
"item",
"]",
",",
"meta_to_meta_id",
"[",
"m",
"]",
"]",
"=",
"metadata",
"[",
"'dict'",
"]",
"[",
"item",
"]",
"[",
"m",
"]",
"# create header info for metadata",
"sparsity",
"=",
"(",
"1",
"-",
"(",
"metadata",
"[",
"'number_interactions'",
"]",
"/",
"(",
"len",
"(",
"metadata",
"[",
"'col_1'",
"]",
")",
"*",
"len",
"(",
"metadata",
"[",
"'col_2'",
"]",
")",
")",
")",
")",
"*",
"100",
"self",
".",
"extra_info_header",
"=",
"\">> metadata:: %d items and %d metadata (%d interactions) | sparsity:: %.2f%%\"",
"%",
"(",
"len",
"(",
"metadata",
"[",
"'col_1'",
"]",
")",
",",
"len",
"(",
"metadata",
"[",
"'col_2'",
"]",
")",
",",
"metadata",
"[",
"'number_interactions'",
"]",
",",
"sparsity",
")",
"# Create similarity matrix based on metadata or similarity file. Transpose=False, because it is an",
"# item x metadata matrix",
"self",
".",
"si_matrix",
"=",
"self",
".",
"compute_similarity",
"(",
"transpose",
"=",
"False",
")",
"elif",
"self",
".",
"similarity_file",
"is",
"not",
"None",
":",
"similarity",
"=",
"ReadFile",
"(",
"self",
".",
"similarity_file",
",",
"sep",
"=",
"self",
".",
"metadata_similarity_sep",
",",
"as_binary",
"=",
"False",
")",
".",
"read_metadata_or_similarity",
"(",
")",
"self",
".",
"si_matrix",
"=",
"np",
".",
"zeros",
"(",
"(",
"len",
"(",
"self",
".",
"items",
")",
",",
"len",
"(",
"self",
".",
"items",
")",
")",
")",
"# Fill similarity matrix",
"for",
"i",
"in",
"similarity",
"[",
"'col_1'",
"]",
":",
"for",
"i_j",
"in",
"similarity",
"[",
"'dict'",
"]",
"[",
"i",
"]",
":",
"self",
".",
"si_matrix",
"[",
"self",
".",
"item_to_item_id",
"[",
"i",
"]",
",",
"self",
".",
"item_to_item_id",
"[",
"int",
"(",
"i_j",
")",
"]",
"]",
"=",
"similarity",
"[",
"'dict'",
"]",
"[",
"i",
"]",
"[",
"i_j",
"]",
"# Remove NaNs",
"self",
".",
"si_matrix",
"[",
"np",
".",
"isnan",
"(",
"self",
".",
"si_matrix",
")",
"]",
"=",
"0.0",
"else",
":",
"raise",
"ValueError",
"(",
"\"This algorithm needs a similarity matrix or a metadata file!\"",
")",
"# Create original matrix user x item for prediction process",
"self",
".",
"create_matrix",
"(",
")",
"for",
"i_id",
",",
"item",
"in",
"enumerate",
"(",
"self",
".",
"items",
")",
":",
"self",
".",
"similar_items",
"[",
"i_id",
"]",
"=",
"sorted",
"(",
"range",
"(",
"len",
"(",
"self",
".",
"si_matrix",
"[",
"i_id",
"]",
")",
")",
",",
"key",
"=",
"lambda",
"k",
":",
"-",
"self",
".",
"si_matrix",
"[",
"i_id",
"]",
"[",
"k",
"]",
")",
"[",
"1",
":",
"self",
".",
"k_neighbors",
"+",
"1",
"]"
] | [
98,
4
] | [
158,
109
] | python | en | ['en', 'error', 'th'] | False |
run | (argv=None) | The main function which creates the pipeline and runs it. | The main function which creates the pipeline and runs it. | def run(argv=None):
"""The main function which creates the pipeline and runs it."""
parser = argparse.ArgumentParser()
# Here we add some specific command line arguments we expect. Specifically
# we have the input file to load and the output table to write to.
parser.add_argument(
'--input', dest='input', required=False,
help='Input file to read. This can be a local file or '
'a file in a Google Storage Bucket.',
# This example file contains a total of only 10 lines.
# It is useful for developing on a small set of data
default='gs://python-dataflow-example/data_files/head_usa_names.csv')
# This defaults to the temp dataset in your BigQuery project. You'll have
# to create the temp dataset yourself using bq mk temp
parser.add_argument('--output', dest='output', required=False,
help='Output BQ table to write results to.',
default='lake.usa_names_transformed')
# Parse arguments from the command line.
known_args, pipeline_args = parser.parse_known_args(argv)
# DataTransformation is a class we built in this script to hold the logic for
# transforming the file into a BigQuery table.
data_ingestion = DataTransformation()
# Initiate the pipeline using the pipeline arguments passed in from the
# command line. This includes information like where Dataflow should
# store temp files, and what the project id is.
p = beam.Pipeline(options=PipelineOptions(pipeline_args))
schema = parse_table_schema_from_json(data_ingestion.schema_str)
(p
# Read the file. This is the source of the pipeline. All further
# processing starts with lines read from the file. We use the input
# argument from the command line. We also skip the first line which is a
# header row.
| 'Read From Text' >> beam.io.ReadFromText(known_args.input,
skip_header_lines=1)
# This stage of the pipeline translates from a CSV file single row
# input as a string, to a dictionary object consumable by BigQuery.
# It refers to a function we have written. This function will
# be run in parallel on different workers using input from the
# previous stage of the pipeline.
| 'String to BigQuery Row' >> beam.Map(lambda s:
data_ingestion.parse_method(s))
| 'Write to BigQuery' >> beam.io.Write(
beam.io.BigQuerySink(
# The table name is a required argument for the BigQuery sink.
# In this case we use the value passed in from the command line.
known_args.output,
# Here we use the JSON schema read in from a JSON file.
# Specifying the schema allows the API to create the table correctly if it does not yet exist.
schema=schema,
# Creates the table in BigQuery if it does not yet exist.
create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED,
# Deletes all data in the BigQuery table before writing.
write_disposition=beam.io.BigQueryDisposition.WRITE_TRUNCATE)))
p.run().wait_until_finish() | [
"def",
"run",
"(",
"argv",
"=",
"None",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
")",
"# Here we add some specific command line arguments we expect. Specifically",
"# we have the input file to load and the output table to write to.",
"parser",
".",
"add_argument",
"(",
"'--input'",
",",
"dest",
"=",
"'input'",
",",
"required",
"=",
"False",
",",
"help",
"=",
"'Input file to read. This can be a local file or '",
"'a file in a Google Storage Bucket.'",
",",
"# This example file contains a total of only 10 lines.",
"# It is useful for developing on a small set of data",
"default",
"=",
"'gs://python-dataflow-example/data_files/head_usa_names.csv'",
")",
"# This defaults to the temp dataset in your BigQuery project. You'll have",
"# to create the temp dataset yourself using bq mk temp",
"parser",
".",
"add_argument",
"(",
"'--output'",
",",
"dest",
"=",
"'output'",
",",
"required",
"=",
"False",
",",
"help",
"=",
"'Output BQ table to write results to.'",
",",
"default",
"=",
"'lake.usa_names_transformed'",
")",
"# Parse arguments from the command line.",
"known_args",
",",
"pipeline_args",
"=",
"parser",
".",
"parse_known_args",
"(",
"argv",
")",
"# DataTransformation is a class we built in this script to hold the logic for",
"# transforming the file into a BigQuery table.",
"data_ingestion",
"=",
"DataTransformation",
"(",
")",
"# Initiate the pipeline using the pipeline arguments passed in from the",
"# command line. This includes information like where Dataflow should",
"# store temp files, and what the project id is.",
"p",
"=",
"beam",
".",
"Pipeline",
"(",
"options",
"=",
"PipelineOptions",
"(",
"pipeline_args",
")",
")",
"schema",
"=",
"parse_table_schema_from_json",
"(",
"data_ingestion",
".",
"schema_str",
")",
"(",
"p",
"# Read the file. This is the source of the pipeline. All further",
"# processing starts with lines read from the file. We use the input",
"# argument from the command line. We also skip the first line which is a",
"# header row.",
"|",
"'Read From Text'",
">>",
"beam",
".",
"io",
".",
"ReadFromText",
"(",
"known_args",
".",
"input",
",",
"skip_header_lines",
"=",
"1",
")",
"# This stage of the pipeline translates from a CSV file single row",
"# input as a string, to a dictionary object consumable by BigQuery.",
"# It refers to a function we have written. This function will",
"# be run in parallel on different workers using input from the",
"# previous stage of the pipeline.",
"|",
"'String to BigQuery Row'",
">>",
"beam",
".",
"Map",
"(",
"lambda",
"s",
":",
"data_ingestion",
".",
"parse_method",
"(",
"s",
")",
")",
"|",
"'Write to BigQuery'",
">>",
"beam",
".",
"io",
".",
"Write",
"(",
"beam",
".",
"io",
".",
"BigQuerySink",
"(",
"# The table name is a required argument for the BigQuery sink.",
"# In this case we use the value passed in from the command line.",
"known_args",
".",
"output",
",",
"# Here we use the JSON schema read in from a JSON file.",
"# Specifying the schema allows the API to create the table correctly if it does not yet exist.",
"schema",
"=",
"schema",
",",
"# Creates the table in BigQuery if it does not yet exist.",
"create_disposition",
"=",
"beam",
".",
"io",
".",
"BigQueryDisposition",
".",
"CREATE_IF_NEEDED",
",",
"# Deletes all data in the BigQuery table before writing.",
"write_disposition",
"=",
"beam",
".",
"io",
".",
"BigQueryDisposition",
".",
"WRITE_TRUNCATE",
")",
")",
")",
"p",
".",
"run",
"(",
")",
".",
"wait_until_finish",
"(",
")"
] | [
108,
0
] | [
164,
31
] | python | en | ['en', 'en', 'en'] | True |
DataTransformation.parse_method | (self, string_input) | This method translates a single line of comma separated values to a
dictionary which can be loaded into BigQuery.
Args:
string_input: A comma separated list of values in the form of
state_abbreviation,gender,year,name,count_of_babies,dataset_created_date
example string_input: KS,F,1923,Dorothy,654,11/28/2016
Returns:
A dict mapping BigQuery column names as keys to the corresponding value
parsed from string_input. In this example, the data is not transformed, and
remains in the same format as the CSV. There are no date format transformations.
example output:
{'state': 'KS',
'gender': 'F',
'year': '1923-01-01', <- This is the BigQuery date format.
'name': 'Dorothy',
'number': '654',
'created_date': '11/28/2016'
}
| This method translates a single line of comma separated values to a
dictionary which can be loaded into BigQuery. | def parse_method(self, string_input):
"""This method translates a single line of comma separated values to a
dictionary which can be loaded into BigQuery.
Args:
string_input: A comma separated list of values in the form of
state_abbreviation,gender,year,name,count_of_babies,dataset_created_date
example string_input: KS,F,1923,Dorothy,654,11/28/2016
Returns:
A dict mapping BigQuery column names as keys to the corresponding value
parsed from string_input. In this example, the data is not transformed, and
remains in the same format as the CSV. There are no date format transformations.
example output:
{'state': 'KS',
'gender': 'F',
'year': '1923-01-01', <- This is the BigQuery date format.
'name': 'Dorothy',
'number': '654',
'created_date': '11/28/2016'
}
"""
# Strip out return characters and quote characters.
schema = parse_table_schema_from_json(self.schema_str)
field_map = [f for f in schema.fields]
# Use a CSV Reader which can handle quoted strings etc.
reader = csv.reader(string_input.split('\n'))
for csv_row in reader:
if (sys.version_info.major < 3.0):
values = [x.decode('utf8') for x in csv_row]
else:
values = csv_row
# Our source data only contains year, so default January 1st as the
# month and day.
month = u'01'
day = u'01'
# The year comes from our source data.
year = values[2]
row = {}
i = 0
# Iterate over the values from our csv file, applying any transformation logic.
for value in values:
# If the schema indicates this field is a date format, we must
# transform the date from the source data into a format that
# BigQuery can understand.
if field_map[i].type == 'DATE':
# Format the date to YYYY-MM-DD format which BigQuery
# accepts.
value = u'-'.join((year, month, day))
row[field_map[i].name] = value
i += 1
return row | [
"def",
"parse_method",
"(",
"self",
",",
"string_input",
")",
":",
"# Strip out return characters and quote characters.",
"schema",
"=",
"parse_table_schema_from_json",
"(",
"self",
".",
"schema_str",
")",
"field_map",
"=",
"[",
"f",
"for",
"f",
"in",
"schema",
".",
"fields",
"]",
"# Use a CSV Reader which can handle quoted strings etc.",
"reader",
"=",
"csv",
".",
"reader",
"(",
"string_input",
".",
"split",
"(",
"'\\n'",
")",
")",
"for",
"csv_row",
"in",
"reader",
":",
"if",
"(",
"sys",
".",
"version_info",
".",
"major",
"<",
"3.0",
")",
":",
"values",
"=",
"[",
"x",
".",
"decode",
"(",
"'utf8'",
")",
"for",
"x",
"in",
"csv_row",
"]",
"else",
":",
"values",
"=",
"csv_row",
"# Our source data only contains year, so default January 1st as the",
"# month and day.",
"month",
"=",
"u'01'",
"day",
"=",
"u'01'",
"# The year comes from our source data.",
"year",
"=",
"values",
"[",
"2",
"]",
"row",
"=",
"{",
"}",
"i",
"=",
"0",
"# Iterate over the values from our csv file, applying any transformation logic.",
"for",
"value",
"in",
"values",
":",
"# If the schema indicates this field is a date format, we must",
"# transform the date from the source data into a format that",
"# BigQuery can understand.",
"if",
"field_map",
"[",
"i",
"]",
".",
"type",
"==",
"'DATE'",
":",
"# Format the date to YYYY-MM-DD format which BigQuery",
"# accepts.",
"value",
"=",
"u'-'",
".",
"join",
"(",
"(",
"year",
",",
"month",
",",
"day",
")",
")",
"row",
"[",
"field_map",
"[",
"i",
"]",
".",
"name",
"]",
"=",
"value",
"i",
"+=",
"1",
"return",
"row"
] | [
48,
4
] | [
105,
22
] | python | en | ['en', 'en', 'en'] | True |
compute_giou_loss | (box_target, wh_weight, pred_wh, mode="diou", reduce="sum") |
Computes giou loss and in future also diou or ciou loss for ttfnet.
:param box_target: ground truth bounding boxes
:param wh_weight: weight of heatmap
:param pred_wh: prediction of 4 values (offsets to left upper and right bottom corner)
:param mode: giou or diou or ciou, defaults to "diou"
:param reduce: sum over batch or mean the batch loss, defaults to "sum""
:return: Computes giou loss.
|
Computes giou loss and in future also diou or ciou loss for ttfnet.
:param box_target: ground truth bounding boxes
:param wh_weight: weight of heatmap
:param pred_wh: prediction of 4 values (offsets to left upper and right bottom corner)
:param mode: giou or diou or ciou, defaults to "diou"
:param reduce: sum over batch or mean the batch loss, defaults to "sum""
:return: Computes giou loss.
| def compute_giou_loss(box_target, wh_weight, pred_wh, mode="diou", reduce="sum"):
"""
Computes giou loss and in future also diou or ciou loss for ttfnet.
:param box_target: ground truth bounding boxes
:param wh_weight: weight of heatmap
:param pred_wh: prediction of 4 values (offsets to left upper and right bottom corner)
:param mode: giou or diou or ciou, defaults to "diou"
:param reduce: sum over batch or mean the batch loss, defaults to "sum""
:return: Computes giou loss.
"""
base_step = 1
b = tf.shape(wh_weight)[0]
h = tf.shape(wh_weight)[1]
w = tf.shape(wh_weight)[2]
mask = tf.reshape(wh_weight, (b, h, w))
avg_factor = tf.reduce_sum(mask)
pos_mask = mask > 0.0
weight = tf.cast(mask[pos_mask], tf.float32)
shifts_x = tf.range(0, (w - 1) * base_step + 1, base_step, dtype=tf.float32)
shifts_y = tf.range(0, (h - 1) * base_step + 1, base_step, dtype=tf.float32)
shift_y, shift_x = tf.meshgrid(shifts_y, shifts_x, indexing="ij")
base_loc = tf.stack((shift_y, shift_x), axis=0)
base_loc = tf.cast(base_loc, tf.float32)
y1 = base_loc[0] - pred_wh[:, :, :, 0]
x1 = base_loc[1] - pred_wh[:, :, :, 1]
y2 = base_loc[0] + pred_wh[:, :, :, 2]
x2 = base_loc[1] + pred_wh[:, :, :, 3]
pred_wh = tf.stack((y1, x1, y2, x2), axis=3)
bboxes2 = pred_wh[pos_mask]
bboxes1 = box_target[pos_mask]
bboxes_num_per_sample = tf.math.reduce_sum(tf.cast(pos_mask, dtype=tf.int32), axis=[1, 2])
losses = _giou_loss(bboxes1, bboxes2, mode=mode)
if reduce == "mean":
return tf.math.reduce_mean(losses * weight) / avg_factor
elif reduce == "sum":
return tf.math.reduce_sum(losses * weight) / avg_factor
return (losses * weight) / avg_factor, bboxes_num_per_sample | [
"def",
"compute_giou_loss",
"(",
"box_target",
",",
"wh_weight",
",",
"pred_wh",
",",
"mode",
"=",
"\"diou\"",
",",
"reduce",
"=",
"\"sum\"",
")",
":",
"base_step",
"=",
"1",
"b",
"=",
"tf",
".",
"shape",
"(",
"wh_weight",
")",
"[",
"0",
"]",
"h",
"=",
"tf",
".",
"shape",
"(",
"wh_weight",
")",
"[",
"1",
"]",
"w",
"=",
"tf",
".",
"shape",
"(",
"wh_weight",
")",
"[",
"2",
"]",
"mask",
"=",
"tf",
".",
"reshape",
"(",
"wh_weight",
",",
"(",
"b",
",",
"h",
",",
"w",
")",
")",
"avg_factor",
"=",
"tf",
".",
"reduce_sum",
"(",
"mask",
")",
"pos_mask",
"=",
"mask",
">",
"0.0",
"weight",
"=",
"tf",
".",
"cast",
"(",
"mask",
"[",
"pos_mask",
"]",
",",
"tf",
".",
"float32",
")",
"shifts_x",
"=",
"tf",
".",
"range",
"(",
"0",
",",
"(",
"w",
"-",
"1",
")",
"*",
"base_step",
"+",
"1",
",",
"base_step",
",",
"dtype",
"=",
"tf",
".",
"float32",
")",
"shifts_y",
"=",
"tf",
".",
"range",
"(",
"0",
",",
"(",
"h",
"-",
"1",
")",
"*",
"base_step",
"+",
"1",
",",
"base_step",
",",
"dtype",
"=",
"tf",
".",
"float32",
")",
"shift_y",
",",
"shift_x",
"=",
"tf",
".",
"meshgrid",
"(",
"shifts_y",
",",
"shifts_x",
",",
"indexing",
"=",
"\"ij\"",
")",
"base_loc",
"=",
"tf",
".",
"stack",
"(",
"(",
"shift_y",
",",
"shift_x",
")",
",",
"axis",
"=",
"0",
")",
"base_loc",
"=",
"tf",
".",
"cast",
"(",
"base_loc",
",",
"tf",
".",
"float32",
")",
"y1",
"=",
"base_loc",
"[",
"0",
"]",
"-",
"pred_wh",
"[",
":",
",",
":",
",",
":",
",",
"0",
"]",
"x1",
"=",
"base_loc",
"[",
"1",
"]",
"-",
"pred_wh",
"[",
":",
",",
":",
",",
":",
",",
"1",
"]",
"y2",
"=",
"base_loc",
"[",
"0",
"]",
"+",
"pred_wh",
"[",
":",
",",
":",
",",
":",
",",
"2",
"]",
"x2",
"=",
"base_loc",
"[",
"1",
"]",
"+",
"pred_wh",
"[",
":",
",",
":",
",",
":",
",",
"3",
"]",
"pred_wh",
"=",
"tf",
".",
"stack",
"(",
"(",
"y1",
",",
"x1",
",",
"y2",
",",
"x2",
")",
",",
"axis",
"=",
"3",
")",
"bboxes2",
"=",
"pred_wh",
"[",
"pos_mask",
"]",
"bboxes1",
"=",
"box_target",
"[",
"pos_mask",
"]",
"bboxes_num_per_sample",
"=",
"tf",
".",
"math",
".",
"reduce_sum",
"(",
"tf",
".",
"cast",
"(",
"pos_mask",
",",
"dtype",
"=",
"tf",
".",
"int32",
")",
",",
"axis",
"=",
"[",
"1",
",",
"2",
"]",
")",
"losses",
"=",
"_giou_loss",
"(",
"bboxes1",
",",
"bboxes2",
",",
"mode",
"=",
"mode",
")",
"if",
"reduce",
"==",
"\"mean\"",
":",
"return",
"tf",
".",
"math",
".",
"reduce_mean",
"(",
"losses",
"*",
"weight",
")",
"/",
"avg_factor",
"elif",
"reduce",
"==",
"\"sum\"",
":",
"return",
"tf",
".",
"math",
".",
"reduce_sum",
"(",
"losses",
"*",
"weight",
")",
"/",
"avg_factor",
"return",
"(",
"losses",
"*",
"weight",
")",
"/",
"avg_factor",
",",
"bboxes_num_per_sample"
] | [
32,
0
] | [
76,
64
] | python | en | ['en', 'error', 'th'] | False |
focal_loss | (hm_true, hm_pred) |
Computes focal loss for heatmap.
This function was taken from:
https://github.com/MioChiu/TF_CenterNet/blob/master/loss.py
:param hm_true: gt heatmap
:param hm_pred: predicted heatmap
:return: loss value
|
Computes focal loss for heatmap. | def focal_loss(hm_true, hm_pred):
"""
Computes focal loss for heatmap.
This function was taken from:
https://github.com/MioChiu/TF_CenterNet/blob/master/loss.py
:param hm_true: gt heatmap
:param hm_pred: predicted heatmap
:return: loss value
"""
pos_mask = tf.cast(tf.equal(hm_true, 1.0), dtype=tf.float32)
neg_mask = tf.cast(tf.less(hm_true, 1.0), dtype=tf.float32)
neg_weights = tf.pow(1.0 - hm_true, 4)
pos_loss = -tf.math.log(tf.clip_by_value(hm_pred, 1e-5, 1.0 - 1e-5)) * tf.math.pow(1.0 - hm_pred, 2.0) * pos_mask
neg_loss = (
-tf.math.log(tf.clip_by_value(1.0 - hm_pred, 1e-5, 1.0 - 1e-5))
* tf.math.pow(hm_pred, 2.0)
* neg_weights
* neg_mask
)
num_pos = tf.reduce_sum(pos_mask)
pos_loss = tf.reduce_sum(pos_loss)
neg_loss = tf.reduce_sum(neg_loss)
loss = tf.cond(tf.greater(num_pos, 0), lambda: (pos_loss + neg_loss) / num_pos, lambda: neg_loss)
return loss | [
"def",
"focal_loss",
"(",
"hm_true",
",",
"hm_pred",
")",
":",
"pos_mask",
"=",
"tf",
".",
"cast",
"(",
"tf",
".",
"equal",
"(",
"hm_true",
",",
"1.0",
")",
",",
"dtype",
"=",
"tf",
".",
"float32",
")",
"neg_mask",
"=",
"tf",
".",
"cast",
"(",
"tf",
".",
"less",
"(",
"hm_true",
",",
"1.0",
")",
",",
"dtype",
"=",
"tf",
".",
"float32",
")",
"neg_weights",
"=",
"tf",
".",
"pow",
"(",
"1.0",
"-",
"hm_true",
",",
"4",
")",
"pos_loss",
"=",
"-",
"tf",
".",
"math",
".",
"log",
"(",
"tf",
".",
"clip_by_value",
"(",
"hm_pred",
",",
"1e-5",
",",
"1.0",
"-",
"1e-5",
")",
")",
"*",
"tf",
".",
"math",
".",
"pow",
"(",
"1.0",
"-",
"hm_pred",
",",
"2.0",
")",
"*",
"pos_mask",
"neg_loss",
"=",
"(",
"-",
"tf",
".",
"math",
".",
"log",
"(",
"tf",
".",
"clip_by_value",
"(",
"1.0",
"-",
"hm_pred",
",",
"1e-5",
",",
"1.0",
"-",
"1e-5",
")",
")",
"*",
"tf",
".",
"math",
".",
"pow",
"(",
"hm_pred",
",",
"2.0",
")",
"*",
"neg_weights",
"*",
"neg_mask",
")",
"num_pos",
"=",
"tf",
".",
"reduce_sum",
"(",
"pos_mask",
")",
"pos_loss",
"=",
"tf",
".",
"reduce_sum",
"(",
"pos_loss",
")",
"neg_loss",
"=",
"tf",
".",
"reduce_sum",
"(",
"neg_loss",
")",
"loss",
"=",
"tf",
".",
"cond",
"(",
"tf",
".",
"greater",
"(",
"num_pos",
",",
"0",
")",
",",
"lambda",
":",
"(",
"pos_loss",
"+",
"neg_loss",
")",
"/",
"num_pos",
",",
"lambda",
":",
"neg_loss",
")",
"return",
"loss"
] | [
80,
0
] | [
108,
15
] | python | en | ['en', 'error', 'th'] | False |
reg_l1_loss | (y_true, y_pred, indices, mask) |
This function was taken from:
https://github.com/MioChiu/TF_CenterNet/blob/master/loss.py
:param y_true: (batch, max_objects, 2)
:param y_pred: (batch, heatmap_height, heatmap_width, max_objects)
:param indices: (batch, max_objects)
:param mask: (batch, max_objects)
:return: l1 loss (single float value) for given predictions and ground truth
|
This function was taken from:
https://github.com/MioChiu/TF_CenterNet/blob/master/loss.py | def reg_l1_loss(y_true, y_pred, indices, mask):
"""
This function was taken from:
https://github.com/MioChiu/TF_CenterNet/blob/master/loss.py
:param y_true: (batch, max_objects, 2)
:param y_pred: (batch, heatmap_height, heatmap_width, max_objects)
:param indices: (batch, max_objects)
:param mask: (batch, max_objects)
:return: l1 loss (single float value) for given predictions and ground truth
"""
batch_dim = tf.shape(y_pred)[0]
channel_dim = tf.shape(y_pred)[-1]
y_pred = tf.reshape(y_pred, (batch_dim, -1, channel_dim))
indices = tf.cast(indices, tf.int32)
y_pred = tf.gather(y_pred, indices, batch_dims=1)
mask = tf.tile(tf.expand_dims(mask, axis=-1), (1, 1, 2))
total_loss = tf.reduce_sum(tf.abs(y_true * mask - y_pred * mask))
loss = total_loss / (tf.reduce_sum(mask) + 1e-5)
return loss | [
"def",
"reg_l1_loss",
"(",
"y_true",
",",
"y_pred",
",",
"indices",
",",
"mask",
")",
":",
"batch_dim",
"=",
"tf",
".",
"shape",
"(",
"y_pred",
")",
"[",
"0",
"]",
"channel_dim",
"=",
"tf",
".",
"shape",
"(",
"y_pred",
")",
"[",
"-",
"1",
"]",
"y_pred",
"=",
"tf",
".",
"reshape",
"(",
"y_pred",
",",
"(",
"batch_dim",
",",
"-",
"1",
",",
"channel_dim",
")",
")",
"indices",
"=",
"tf",
".",
"cast",
"(",
"indices",
",",
"tf",
".",
"int32",
")",
"y_pred",
"=",
"tf",
".",
"gather",
"(",
"y_pred",
",",
"indices",
",",
"batch_dims",
"=",
"1",
")",
"mask",
"=",
"tf",
".",
"tile",
"(",
"tf",
".",
"expand_dims",
"(",
"mask",
",",
"axis",
"=",
"-",
"1",
")",
",",
"(",
"1",
",",
"1",
",",
"2",
")",
")",
"total_loss",
"=",
"tf",
".",
"reduce_sum",
"(",
"tf",
".",
"abs",
"(",
"y_true",
"*",
"mask",
"-",
"y_pred",
"*",
"mask",
")",
")",
"loss",
"=",
"total_loss",
"/",
"(",
"tf",
".",
"reduce_sum",
"(",
"mask",
")",
"+",
"1e-5",
")",
"return",
"loss"
] | [
112,
0
] | [
131,
15
] | python | en | ['en', 'error', 'th'] | False |
_giou_loss | (b1, b2, mode) |
Args:
b1: bounding box. The coordinates of the each bounding box in boxes are
encoded as [y_min, x_min, y_max, x_max].
b2: the other bounding box. The coordinates of the each bounding box
in boxes are encoded as [y_min, x_min, y_max, x_max].
mode: one of ['iou', 'ciou', 'diou', 'giou'], decided to calculate IoU or CIoU or DIoU or GIoU.
Returns:
IoU loss float `Tensor`.
|
Args:
b1: bounding box. The coordinates of the each bounding box in boxes are
encoded as [y_min, x_min, y_max, x_max].
b2: the other bounding box. The coordinates of the each bounding box
in boxes are encoded as [y_min, x_min, y_max, x_max].
mode: one of ['iou', 'ciou', 'diou', 'giou'], decided to calculate IoU or CIoU or DIoU or GIoU.
Returns:
IoU loss float `Tensor`.
| def _giou_loss(b1, b2, mode):
"""
Args:
b1: bounding box. The coordinates of the each bounding box in boxes are
encoded as [y_min, x_min, y_max, x_max].
b2: the other bounding box. The coordinates of the each bounding box
in boxes are encoded as [y_min, x_min, y_max, x_max].
mode: one of ['iou', 'ciou', 'diou', 'giou'], decided to calculate IoU or CIoU or DIoU or GIoU.
Returns:
IoU loss float `Tensor`.
"""
zero = 0.0
b1_ymin, b1_xmin, b1_ymax, b1_xmax = tf.unstack(b1, 4, axis=-1)
b2_ymin, b2_xmin, b2_ymax, b2_xmax = tf.unstack(b2, 4, axis=-1)
b1_width = tf.maximum(zero, b1_xmax - b1_xmin)
b1_height = tf.maximum(zero, b1_ymax - b1_ymin)
b2_width = tf.maximum(zero, b2_xmax - b2_xmin)
b2_height = tf.maximum(zero, b2_ymax - b2_ymin)
b1_area = b1_width * b1_height
b2_area = b2_width * b2_height
intersect_ymin = tf.maximum(b1_ymin, b2_ymin)
intersect_xmin = tf.maximum(b1_xmin, b2_xmin)
intersect_ymax = tf.minimum(b1_ymax, b2_ymax)
intersect_xmax = tf.minimum(b1_xmax, b2_xmax)
intersect_width = tf.maximum(zero, intersect_xmax - intersect_xmin)
intersect_height = tf.maximum(zero, intersect_ymax - intersect_ymin)
intersect_area = intersect_width * intersect_height
union_area = b1_area + b2_area - intersect_area
iou = tf.math.divide_no_nan(intersect_area, union_area)
if mode == "iou":
return 1.0 - iou
elif mode in ["diou"]:
enclose_ymin = tf.minimum(b1_ymin, b2_ymin)
enclose_xmin = tf.minimum(b1_xmin, b2_xmin)
enclose_ymax = tf.maximum(b1_ymax, b2_ymax)
enclose_xmax = tf.maximum(b1_xmax, b2_xmax)
b1_center = tf.stack([(b1_ymin + b1_ymax) / 2, (b1_xmin + b1_xmax) / 2])
b2_center = tf.stack([(b2_ymin + b2_ymax) / 2, (b2_xmin + b2_xmax) / 2])
euclidean = tf.linalg.norm(b2_center - b1_center)
diag_length = tf.linalg.norm([enclose_ymax - enclose_ymin, enclose_xmax - enclose_xmin])
diou = iou - (euclidean ** 2) / (diag_length ** 2)
return 1.0 - diou
elif mode == "giou":
enclose_ymin = tf.minimum(b1_ymin, b2_ymin)
enclose_xmin = tf.minimum(b1_xmin, b2_xmin)
enclose_ymax = tf.maximum(b1_ymax, b2_ymax)
enclose_xmax = tf.maximum(b1_xmax, b2_xmax)
enclose_width = tf.maximum(zero, enclose_xmax - enclose_xmin)
enclose_height = tf.maximum(zero, enclose_ymax - enclose_ymin)
enclose_area = enclose_width * enclose_height
giou = iou - tf.math.divide_no_nan((enclose_area - union_area), enclose_area)
return 1.0 - giou
else:
raise ValueError("Value of mode should be one of ['iou','giou','diou']") | [
"def",
"_giou_loss",
"(",
"b1",
",",
"b2",
",",
"mode",
")",
":",
"zero",
"=",
"0.0",
"b1_ymin",
",",
"b1_xmin",
",",
"b1_ymax",
",",
"b1_xmax",
"=",
"tf",
".",
"unstack",
"(",
"b1",
",",
"4",
",",
"axis",
"=",
"-",
"1",
")",
"b2_ymin",
",",
"b2_xmin",
",",
"b2_ymax",
",",
"b2_xmax",
"=",
"tf",
".",
"unstack",
"(",
"b2",
",",
"4",
",",
"axis",
"=",
"-",
"1",
")",
"b1_width",
"=",
"tf",
".",
"maximum",
"(",
"zero",
",",
"b1_xmax",
"-",
"b1_xmin",
")",
"b1_height",
"=",
"tf",
".",
"maximum",
"(",
"zero",
",",
"b1_ymax",
"-",
"b1_ymin",
")",
"b2_width",
"=",
"tf",
".",
"maximum",
"(",
"zero",
",",
"b2_xmax",
"-",
"b2_xmin",
")",
"b2_height",
"=",
"tf",
".",
"maximum",
"(",
"zero",
",",
"b2_ymax",
"-",
"b2_ymin",
")",
"b1_area",
"=",
"b1_width",
"*",
"b1_height",
"b2_area",
"=",
"b2_width",
"*",
"b2_height",
"intersect_ymin",
"=",
"tf",
".",
"maximum",
"(",
"b1_ymin",
",",
"b2_ymin",
")",
"intersect_xmin",
"=",
"tf",
".",
"maximum",
"(",
"b1_xmin",
",",
"b2_xmin",
")",
"intersect_ymax",
"=",
"tf",
".",
"minimum",
"(",
"b1_ymax",
",",
"b2_ymax",
")",
"intersect_xmax",
"=",
"tf",
".",
"minimum",
"(",
"b1_xmax",
",",
"b2_xmax",
")",
"intersect_width",
"=",
"tf",
".",
"maximum",
"(",
"zero",
",",
"intersect_xmax",
"-",
"intersect_xmin",
")",
"intersect_height",
"=",
"tf",
".",
"maximum",
"(",
"zero",
",",
"intersect_ymax",
"-",
"intersect_ymin",
")",
"intersect_area",
"=",
"intersect_width",
"*",
"intersect_height",
"union_area",
"=",
"b1_area",
"+",
"b2_area",
"-",
"intersect_area",
"iou",
"=",
"tf",
".",
"math",
".",
"divide_no_nan",
"(",
"intersect_area",
",",
"union_area",
")",
"if",
"mode",
"==",
"\"iou\"",
":",
"return",
"1.0",
"-",
"iou",
"elif",
"mode",
"in",
"[",
"\"diou\"",
"]",
":",
"enclose_ymin",
"=",
"tf",
".",
"minimum",
"(",
"b1_ymin",
",",
"b2_ymin",
")",
"enclose_xmin",
"=",
"tf",
".",
"minimum",
"(",
"b1_xmin",
",",
"b2_xmin",
")",
"enclose_ymax",
"=",
"tf",
".",
"maximum",
"(",
"b1_ymax",
",",
"b2_ymax",
")",
"enclose_xmax",
"=",
"tf",
".",
"maximum",
"(",
"b1_xmax",
",",
"b2_xmax",
")",
"b1_center",
"=",
"tf",
".",
"stack",
"(",
"[",
"(",
"b1_ymin",
"+",
"b1_ymax",
")",
"/",
"2",
",",
"(",
"b1_xmin",
"+",
"b1_xmax",
")",
"/",
"2",
"]",
")",
"b2_center",
"=",
"tf",
".",
"stack",
"(",
"[",
"(",
"b2_ymin",
"+",
"b2_ymax",
")",
"/",
"2",
",",
"(",
"b2_xmin",
"+",
"b2_xmax",
")",
"/",
"2",
"]",
")",
"euclidean",
"=",
"tf",
".",
"linalg",
".",
"norm",
"(",
"b2_center",
"-",
"b1_center",
")",
"diag_length",
"=",
"tf",
".",
"linalg",
".",
"norm",
"(",
"[",
"enclose_ymax",
"-",
"enclose_ymin",
",",
"enclose_xmax",
"-",
"enclose_xmin",
"]",
")",
"diou",
"=",
"iou",
"-",
"(",
"euclidean",
"**",
"2",
")",
"/",
"(",
"diag_length",
"**",
"2",
")",
"return",
"1.0",
"-",
"diou",
"elif",
"mode",
"==",
"\"giou\"",
":",
"enclose_ymin",
"=",
"tf",
".",
"minimum",
"(",
"b1_ymin",
",",
"b2_ymin",
")",
"enclose_xmin",
"=",
"tf",
".",
"minimum",
"(",
"b1_xmin",
",",
"b2_xmin",
")",
"enclose_ymax",
"=",
"tf",
".",
"maximum",
"(",
"b1_ymax",
",",
"b2_ymax",
")",
"enclose_xmax",
"=",
"tf",
".",
"maximum",
"(",
"b1_xmax",
",",
"b2_xmax",
")",
"enclose_width",
"=",
"tf",
".",
"maximum",
"(",
"zero",
",",
"enclose_xmax",
"-",
"enclose_xmin",
")",
"enclose_height",
"=",
"tf",
".",
"maximum",
"(",
"zero",
",",
"enclose_ymax",
"-",
"enclose_ymin",
")",
"enclose_area",
"=",
"enclose_width",
"*",
"enclose_height",
"giou",
"=",
"iou",
"-",
"tf",
".",
"math",
".",
"divide_no_nan",
"(",
"(",
"enclose_area",
"-",
"union_area",
")",
",",
"enclose_area",
")",
"return",
"1.0",
"-",
"giou",
"else",
":",
"raise",
"ValueError",
"(",
"\"Value of mode should be one of ['iou','giou','diou']\"",
")"
] | [
134,
0
] | [
192,
80
] | python | en | ['en', 'error', 'th'] | False |
_fd | (f) | Get a filedescriptor from something which could be a file or an fd. | Get a filedescriptor from something which could be a file or an fd. | def _fd(f):
"""Get a filedescriptor from something which could be a file or an fd."""
return f.fileno() if hasattr(f, 'fileno') else f | [
"def",
"_fd",
"(",
"f",
")",
":",
"return",
"f",
".",
"fileno",
"(",
")",
"if",
"hasattr",
"(",
"f",
",",
"'fileno'",
")",
"else",
"f"
] | [
23,
0
] | [
25,
52
] | python | en | ['en', 'en', 'en'] | True |
_unique_everseen | (iterable, key=None) | List unique elements, preserving order. Remember all elements ever seen. | List unique elements, preserving order. Remember all elements ever seen. | def _unique_everseen(iterable, key=None):
"List unique elements, preserving order. Remember all elements ever seen."
# unique_everseen('AAAABBBCCDAABBB') --> A B C D
# unique_everseen('ABBCcAD', str.lower) --> A B C D
seen = set()
seen_add = seen.add
if key is None:
for element in filterfalse(seen.__contains__, iterable):
seen_add(element)
yield element
else:
for element in iterable:
k = key(element)
if k not in seen:
seen_add(k)
yield element | [
"def",
"_unique_everseen",
"(",
"iterable",
",",
"key",
"=",
"None",
")",
":",
"# unique_everseen('AAAABBBCCDAABBB') --> A B C D",
"# unique_everseen('ABBCcAD', str.lower) --> A B C D",
"seen",
"=",
"set",
"(",
")",
"seen_add",
"=",
"seen",
".",
"add",
"if",
"key",
"is",
"None",
":",
"for",
"element",
"in",
"filterfalse",
"(",
"seen",
".",
"__contains__",
",",
"iterable",
")",
":",
"seen_add",
"(",
"element",
")",
"yield",
"element",
"else",
":",
"for",
"element",
"in",
"iterable",
":",
"k",
"=",
"key",
"(",
"element",
")",
"if",
"k",
"not",
"in",
"seen",
":",
"seen_add",
"(",
"k",
")",
"yield",
"element"
] | [
238,
0
] | [
253,
29
] | python | ca | ['ca', 'ca', 'en'] | True |
build_py.run | (self) | Build modules, packages, and copy data files to build directory | Build modules, packages, and copy data files to build directory | def run(self):
"""Build modules, packages, and copy data files to build directory"""
if not self.py_modules and not self.packages:
return
if self.py_modules:
self.build_modules()
if self.packages:
self.build_packages()
self.build_package_data()
self.run_2to3(self.__updated_files, False)
self.run_2to3(self.__updated_files, True)
self.run_2to3(self.__doctests_2to3, True)
# Only compile actual .py files, using our base class' idea of what our
# output files are.
self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0)) | [
"def",
"run",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"py_modules",
"and",
"not",
"self",
".",
"packages",
":",
"return",
"if",
"self",
".",
"py_modules",
":",
"self",
".",
"build_modules",
"(",
")",
"if",
"self",
".",
"packages",
":",
"self",
".",
"build_packages",
"(",
")",
"self",
".",
"build_package_data",
"(",
")",
"self",
".",
"run_2to3",
"(",
"self",
".",
"__updated_files",
",",
"False",
")",
"self",
".",
"run_2to3",
"(",
"self",
".",
"__updated_files",
",",
"True",
")",
"self",
".",
"run_2to3",
"(",
"self",
".",
"__doctests_2to3",
",",
"True",
")",
"# Only compile actual .py files, using our base class' idea of what our",
"# output files are.",
"self",
".",
"byte_compile",
"(",
"orig",
".",
"build_py",
".",
"get_outputs",
"(",
"self",
",",
"include_bytecode",
"=",
"0",
")",
")"
] | [
42,
4
] | [
60,
78
] | python | en | ['en', 'en', 'en'] | True |
build_py.__getattr__ | (self, attr) | lazily compute data files | lazily compute data files | def __getattr__(self, attr):
"lazily compute data files"
if attr == 'data_files':
self.data_files = self._get_data_files()
return self.data_files
return orig.build_py.__getattr__(self, attr) | [
"def",
"__getattr__",
"(",
"self",
",",
"attr",
")",
":",
"if",
"attr",
"==",
"'data_files'",
":",
"self",
".",
"data_files",
"=",
"self",
".",
"_get_data_files",
"(",
")",
"return",
"self",
".",
"data_files",
"return",
"orig",
".",
"build_py",
".",
"__getattr__",
"(",
"self",
",",
"attr",
")"
] | [
62,
4
] | [
67,
52
] | python | it | ['it', 'it', 'it'] | True |
build_py._get_data_files | (self) | Generate list of '(package,src_dir,build_dir,filenames)' tuples | Generate list of '(package,src_dir,build_dir,filenames)' tuples | def _get_data_files(self):
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
self.analyze_manifest()
return list(map(self._get_pkg_data_files, self.packages or ())) | [
"def",
"_get_data_files",
"(",
"self",
")",
":",
"self",
".",
"analyze_manifest",
"(",
")",
"return",
"list",
"(",
"map",
"(",
"self",
".",
"_get_pkg_data_files",
",",
"self",
".",
"packages",
"or",
"(",
")",
")",
")"
] | [
79,
4
] | [
82,
71
] | python | en | ['en', 'af', 'en'] | True |
build_py.find_data_files | (self, package, src_dir) | Return filenames for package's data files in 'src_dir | Return filenames for package's data files in 'src_dir | def find_data_files(self, package, src_dir):
"""Return filenames for package's data files in 'src_dir'"""
patterns = self._get_platform_patterns(
self.package_data,
package,
src_dir,
)
globs_expanded = map(glob, patterns)
# flatten the expanded globs into an iterable of matches
globs_matches = itertools.chain.from_iterable(globs_expanded)
glob_files = filter(os.path.isfile, globs_matches)
files = itertools.chain(
self.manifest_files.get(package, []),
glob_files,
)
return self.exclude_data_files(package, src_dir, files) | [
"def",
"find_data_files",
"(",
"self",
",",
"package",
",",
"src_dir",
")",
":",
"patterns",
"=",
"self",
".",
"_get_platform_patterns",
"(",
"self",
".",
"package_data",
",",
"package",
",",
"src_dir",
",",
")",
"globs_expanded",
"=",
"map",
"(",
"glob",
",",
"patterns",
")",
"# flatten the expanded globs into an iterable of matches",
"globs_matches",
"=",
"itertools",
".",
"chain",
".",
"from_iterable",
"(",
"globs_expanded",
")",
"glob_files",
"=",
"filter",
"(",
"os",
".",
"path",
".",
"isfile",
",",
"globs_matches",
")",
"files",
"=",
"itertools",
".",
"chain",
"(",
"self",
".",
"manifest_files",
".",
"get",
"(",
"package",
",",
"[",
"]",
")",
",",
"glob_files",
",",
")",
"return",
"self",
".",
"exclude_data_files",
"(",
"package",
",",
"src_dir",
",",
"files",
")"
] | [
98,
4
] | [
113,
63
] | python | en | ['en', 'no', 'en'] | True |
build_py.build_package_data | (self) | Copy data files into build directory | Copy data files into build directory | def build_package_data(self):
"""Copy data files into build directory"""
for package, src_dir, build_dir, filenames in self.data_files:
for filename in filenames:
target = os.path.join(build_dir, filename)
self.mkpath(os.path.dirname(target))
srcfile = os.path.join(src_dir, filename)
outf, copied = self.copy_file(srcfile, target)
srcfile = os.path.abspath(srcfile)
if (copied and
srcfile in self.distribution.convert_2to3_doctests):
self.__doctests_2to3.append(outf) | [
"def",
"build_package_data",
"(",
"self",
")",
":",
"for",
"package",
",",
"src_dir",
",",
"build_dir",
",",
"filenames",
"in",
"self",
".",
"data_files",
":",
"for",
"filename",
"in",
"filenames",
":",
"target",
"=",
"os",
".",
"path",
".",
"join",
"(",
"build_dir",
",",
"filename",
")",
"self",
".",
"mkpath",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"target",
")",
")",
"srcfile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"src_dir",
",",
"filename",
")",
"outf",
",",
"copied",
"=",
"self",
".",
"copy_file",
"(",
"srcfile",
",",
"target",
")",
"srcfile",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"srcfile",
")",
"if",
"(",
"copied",
"and",
"srcfile",
"in",
"self",
".",
"distribution",
".",
"convert_2to3_doctests",
")",
":",
"self",
".",
"__doctests_2to3",
".",
"append",
"(",
"outf",
")"
] | [
115,
4
] | [
126,
53
] | python | en | ['en', 'en', 'en'] | True |
build_py.check_package | (self, package, package_dir) | Check namespace packages' __init__ for declare_namespace | Check namespace packages' __init__ for declare_namespace | def check_package(self, package, package_dir):
"""Check namespace packages' __init__ for declare_namespace"""
try:
return self.packages_checked[package]
except KeyError:
pass
init_py = orig.build_py.check_package(self, package, package_dir)
self.packages_checked[package] = init_py
if not init_py or not self.distribution.namespace_packages:
return init_py
for pkg in self.distribution.namespace_packages:
if pkg == package or pkg.startswith(package + '.'):
break
else:
return init_py
with io.open(init_py, 'rb') as f:
contents = f.read()
if b'declare_namespace' not in contents:
raise distutils.errors.DistutilsError(
"Namespace package problem: %s is a namespace package, but "
"its\n__init__.py does not call declare_namespace()! Please "
'fix it.\n(See the setuptools manual under '
'"Namespace Packages" for details.)\n"' % (package,)
)
return init_py | [
"def",
"check_package",
"(",
"self",
",",
"package",
",",
"package_dir",
")",
":",
"try",
":",
"return",
"self",
".",
"packages_checked",
"[",
"package",
"]",
"except",
"KeyError",
":",
"pass",
"init_py",
"=",
"orig",
".",
"build_py",
".",
"check_package",
"(",
"self",
",",
"package",
",",
"package_dir",
")",
"self",
".",
"packages_checked",
"[",
"package",
"]",
"=",
"init_py",
"if",
"not",
"init_py",
"or",
"not",
"self",
".",
"distribution",
".",
"namespace_packages",
":",
"return",
"init_py",
"for",
"pkg",
"in",
"self",
".",
"distribution",
".",
"namespace_packages",
":",
"if",
"pkg",
"==",
"package",
"or",
"pkg",
".",
"startswith",
"(",
"package",
"+",
"'.'",
")",
":",
"break",
"else",
":",
"return",
"init_py",
"with",
"io",
".",
"open",
"(",
"init_py",
",",
"'rb'",
")",
"as",
"f",
":",
"contents",
"=",
"f",
".",
"read",
"(",
")",
"if",
"b'declare_namespace'",
"not",
"in",
"contents",
":",
"raise",
"distutils",
".",
"errors",
".",
"DistutilsError",
"(",
"\"Namespace package problem: %s is a namespace package, but \"",
"\"its\\n__init__.py does not call declare_namespace()! Please \"",
"'fix it.\\n(See the setuptools manual under '",
"'\"Namespace Packages\" for details.)\\n\"'",
"%",
"(",
"package",
",",
")",
")",
"return",
"init_py"
] | [
155,
4
] | [
183,
22
] | python | en | ['es', 'en', 'en'] | True |
build_py.exclude_data_files | (self, package, src_dir, files) | Filter filenames for package's data files in 'src_dir | Filter filenames for package's data files in 'src_dir | def exclude_data_files(self, package, src_dir, files):
"""Filter filenames for package's data files in 'src_dir'"""
files = list(files)
patterns = self._get_platform_patterns(
self.exclude_package_data,
package,
src_dir,
)
match_groups = (
fnmatch.filter(files, pattern)
for pattern in patterns
)
# flatten the groups of matches into an iterable of matches
matches = itertools.chain.from_iterable(match_groups)
bad = set(matches)
keepers = (
fn
for fn in files
if fn not in bad
)
# ditch dupes
return list(_unique_everseen(keepers)) | [
"def",
"exclude_data_files",
"(",
"self",
",",
"package",
",",
"src_dir",
",",
"files",
")",
":",
"files",
"=",
"list",
"(",
"files",
")",
"patterns",
"=",
"self",
".",
"_get_platform_patterns",
"(",
"self",
".",
"exclude_package_data",
",",
"package",
",",
"src_dir",
",",
")",
"match_groups",
"=",
"(",
"fnmatch",
".",
"filter",
"(",
"files",
",",
"pattern",
")",
"for",
"pattern",
"in",
"patterns",
")",
"# flatten the groups of matches into an iterable of matches",
"matches",
"=",
"itertools",
".",
"chain",
".",
"from_iterable",
"(",
"match_groups",
")",
"bad",
"=",
"set",
"(",
"matches",
")",
"keepers",
"=",
"(",
"fn",
"for",
"fn",
"in",
"files",
"if",
"fn",
"not",
"in",
"bad",
")",
"# ditch dupes",
"return",
"list",
"(",
"_unique_everseen",
"(",
"keepers",
")",
")"
] | [
195,
4
] | [
216,
46
] | python | en | ['en', 'en', 'en'] | True |
build_py._get_platform_patterns | (spec, package, src_dir) |
yield platform-specific path patterns (suitable for glob
or fn_match) from a glob-based spec (such as
self.package_data or self.exclude_package_data)
matching package in src_dir.
|
yield platform-specific path patterns (suitable for glob
or fn_match) from a glob-based spec (such as
self.package_data or self.exclude_package_data)
matching package in src_dir.
| def _get_platform_patterns(spec, package, src_dir):
"""
yield platform-specific path patterns (suitable for glob
or fn_match) from a glob-based spec (such as
self.package_data or self.exclude_package_data)
matching package in src_dir.
"""
raw_patterns = itertools.chain(
spec.get('', []),
spec.get(package, []),
)
return (
# Each pattern has to be converted to a platform-specific path
os.path.join(src_dir, convert_path(pattern))
for pattern in raw_patterns
) | [
"def",
"_get_platform_patterns",
"(",
"spec",
",",
"package",
",",
"src_dir",
")",
":",
"raw_patterns",
"=",
"itertools",
".",
"chain",
"(",
"spec",
".",
"get",
"(",
"''",
",",
"[",
"]",
")",
",",
"spec",
".",
"get",
"(",
"package",
",",
"[",
"]",
")",
",",
")",
"return",
"(",
"# Each pattern has to be converted to a platform-specific path",
"os",
".",
"path",
".",
"join",
"(",
"src_dir",
",",
"convert_path",
"(",
"pattern",
")",
")",
"for",
"pattern",
"in",
"raw_patterns",
")"
] | [
219,
4
] | [
234,
9
] | python | en | ['en', 'error', 'th'] | False |
ExpBuffer.__init__ | (self, max_size=10000, min_size=5000) | Initializes the maximum size of the buffer.
Args:
max_size: Height of the imag
| Initializes the maximum size of the buffer.
Args:
max_size: Height of the imag
| def __init__(self, max_size=10000, min_size=5000):
""" Initializes the maximum size of the buffer.
Args:
max_size: Height of the imag
"""
self.buffer = deque()
self.max_size = max_size
self.min_size = min_size | [
"def",
"__init__",
"(",
"self",
",",
"max_size",
"=",
"10000",
",",
"min_size",
"=",
"5000",
")",
":",
"self",
".",
"buffer",
"=",
"deque",
"(",
")",
"self",
".",
"max_size",
"=",
"max_size",
"self",
".",
"min_size",
"=",
"min_size"
] | [
39,
2
] | [
46,
28
] | python | en | ['en', 'en', 'en'] | True |
ExpBuffer.add_exp | (self, exp) | Adds an experience to the buffer.
| Adds an experience to the buffer. | def add_exp(self, exp):
""" Adds an experience to the buffer.
"""
if len(self.buffer) > self.max_size:
self.buffer.popleft()
self.buffer.append(exp) | [
"def",
"add_exp",
"(",
"self",
",",
"exp",
")",
":",
"if",
"len",
"(",
"self",
".",
"buffer",
")",
">",
"self",
".",
"max_size",
":",
"self",
".",
"buffer",
".",
"popleft",
"(",
")",
"self",
".",
"buffer",
".",
"append",
"(",
"exp",
")"
] | [
48,
2
] | [
54,
27
] | python | en | ['en', 'en', 'en'] | True |
ExpBuffer.sample_experiences | (self, batch_size=128) | Samples experiences from the buffer.
Returns: Sampled array from the experience buffer
| Samples experiences from the buffer. | def sample_experiences(self, batch_size=128):
""" Samples experiences from the buffer.
Returns: Sampled array from the experience buffer
"""
sampled_buffer = random.sample(self.buffer, batch_size)
state, next_state, reward, action, done = zip(*sampled_buffer)
state, next_state = np.array(state), np.array(next_state)
done, action = np.array(done), np.array(action)
return (state, next_state, reward, action, done) | [
"def",
"sample_experiences",
"(",
"self",
",",
"batch_size",
"=",
"128",
")",
":",
"sampled_buffer",
"=",
"random",
".",
"sample",
"(",
"self",
".",
"buffer",
",",
"batch_size",
")",
"state",
",",
"next_state",
",",
"reward",
",",
"action",
",",
"done",
"=",
"zip",
"(",
"*",
"sampled_buffer",
")",
"state",
",",
"next_state",
"=",
"np",
".",
"array",
"(",
"state",
")",
",",
"np",
".",
"array",
"(",
"next_state",
")",
"done",
",",
"action",
"=",
"np",
".",
"array",
"(",
"done",
")",
",",
"np",
".",
"array",
"(",
"action",
")",
"return",
"(",
"state",
",",
"next_state",
",",
"reward",
",",
"action",
",",
"done",
")"
] | [
56,
2
] | [
67,
52
] | python | en | ['en', 'en', 'en'] | True |
get_architectures | () |
get all of model architectures
|
get all of model architectures
| def get_architectures():
"""
get all of model architectures
"""
names = []
for k, v in architectures.__dict__.items():
if isinstance(v, (types.FunctionType, six.class_types)):
names.append(k)
return names | [
"def",
"get_architectures",
"(",
")",
":",
"names",
"=",
"[",
"]",
"for",
"k",
",",
"v",
"in",
"architectures",
".",
"__dict__",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"v",
",",
"(",
"types",
".",
"FunctionType",
",",
"six",
".",
"class_types",
")",
")",
":",
"names",
".",
"append",
"(",
"k",
")",
"return",
"names"
] | [
21,
0
] | [
29,
16
] | python | en | ['en', 'error', 'th'] | False |
similar_architectures | (name='', names=[], thresh=0.1, topk=10) |
inferred similar architectures
|
inferred similar architectures
| def similar_architectures(name='', names=[], thresh=0.1, topk=10):
"""
inferred similar architectures
"""
scores = []
for idx, n in enumerate(names):
if n.startswith('__'):
continue
score = SequenceMatcher(None, n.lower(), name.lower()).quick_ratio()
if score > thresh:
scores.append((idx, score))
scores.sort(key=lambda x: x[1], reverse=True)
similar_names = [names[s[0]] for s in scores[:min(topk, len(scores))]]
return similar_names | [
"def",
"similar_architectures",
"(",
"name",
"=",
"''",
",",
"names",
"=",
"[",
"]",
",",
"thresh",
"=",
"0.1",
",",
"topk",
"=",
"10",
")",
":",
"scores",
"=",
"[",
"]",
"for",
"idx",
",",
"n",
"in",
"enumerate",
"(",
"names",
")",
":",
"if",
"n",
".",
"startswith",
"(",
"'__'",
")",
":",
"continue",
"score",
"=",
"SequenceMatcher",
"(",
"None",
",",
"n",
".",
"lower",
"(",
")",
",",
"name",
".",
"lower",
"(",
")",
")",
".",
"quick_ratio",
"(",
")",
"if",
"score",
">",
"thresh",
":",
"scores",
".",
"append",
"(",
"(",
"idx",
",",
"score",
")",
")",
"scores",
".",
"sort",
"(",
"key",
"=",
"lambda",
"x",
":",
"x",
"[",
"1",
"]",
",",
"reverse",
"=",
"True",
")",
"similar_names",
"=",
"[",
"names",
"[",
"s",
"[",
"0",
"]",
"]",
"for",
"s",
"in",
"scores",
"[",
":",
"min",
"(",
"topk",
",",
"len",
"(",
"scores",
")",
")",
"]",
"]",
"return",
"similar_names"
] | [
32,
0
] | [
45,
24
] | python | en | ['en', 'error', 'th'] | False |
ensure_local_file | (input_file) |
Ensure the training ratings file is stored locally.
|
Ensure the training ratings file is stored locally.
| def ensure_local_file(input_file):
"""
Ensure the training ratings file is stored locally.
"""
if input_file.startswith('gs:/'):
input_path = os.path.join('/tmp/', str(uuid.uuid4()))
os.makedirs(input_path)
tmp_input_file = os.path.join(input_path, os.path.basename(input_file))
sh.gsutil("cp", "-r", input_file, tmp_input_file)
return tmp_input_file
else:
return input_file | [
"def",
"ensure_local_file",
"(",
"input_file",
")",
":",
"if",
"input_file",
".",
"startswith",
"(",
"'gs:/'",
")",
":",
"input_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"'/tmp/'",
",",
"str",
"(",
"uuid",
".",
"uuid4",
"(",
")",
")",
")",
"os",
".",
"makedirs",
"(",
"input_path",
")",
"tmp_input_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"input_path",
",",
"os",
".",
"path",
".",
"basename",
"(",
"input_file",
")",
")",
"sh",
".",
"gsutil",
"(",
"\"cp\"",
",",
"\"-r\"",
",",
"input_file",
",",
"tmp_input_file",
")",
"return",
"tmp_input_file",
"else",
":",
"return",
"input_file"
] | [
22,
0
] | [
33,
21
] | python | en | ['en', 'error', 'th'] | False |
write_hptuning_metric | (args, metric) |
Write a summary containing the tuning loss metric, as required by hyperparam tuning.
|
Write a summary containing the tuning loss metric, as required by hyperparam tuning.
| def write_hptuning_metric(args, metric):
"""
Write a summary containing the tuning loss metric, as required by hyperparam tuning.
"""
summary = Summary(value=[Summary.Value(tag='training/hptuning/metric', simple_value=metric)])
# for hyperparam tuning, we write a summary log to a directory 'eval' below the job directory
eval_path = os.path.join(args['output_dir'], 'eval')
summary_writer = tf.summary.FileWriter(eval_path)
# Note: adding the summary to the writer is enough for hyperparam tuning.
# The ml engine system is looking for any summary added with the hyperparam metric tag.
summary_writer.add_summary(summary)
summary_writer.flush() | [
"def",
"write_hptuning_metric",
"(",
"args",
",",
"metric",
")",
":",
"summary",
"=",
"Summary",
"(",
"value",
"=",
"[",
"Summary",
".",
"Value",
"(",
"tag",
"=",
"'training/hptuning/metric'",
",",
"simple_value",
"=",
"metric",
")",
"]",
")",
"# for hyperparam tuning, we write a summary log to a directory 'eval' below the job directory",
"eval_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"args",
"[",
"'output_dir'",
"]",
",",
"'eval'",
")",
"summary_writer",
"=",
"tf",
".",
"summary",
".",
"FileWriter",
"(",
"eval_path",
")",
"# Note: adding the summary to the writer is enough for hyperparam tuning.",
"# The ml engine system is looking for any summary added with the hyperparam metric tag.",
"summary_writer",
".",
"add_summary",
"(",
"summary",
")",
"summary_writer",
".",
"flush",
"(",
")"
] | [
36,
0
] | [
49,
24
] | python | en | ['en', 'error', 'th'] | False |
add_srs_entry | (srs, auth_name='EPSG', auth_srid=None, ref_sys_name=None,
database=None) |
Take a GDAL SpatialReference system and add its information to the
`spatial_ref_sys` table of the spatial backend. Doing this enables
database-level spatial transformations for the backend. Thus, this utility
is useful for adding spatial reference systems not included by default with
the backend:
>>> from django.contrib.gis.utils import add_srs_entry
>>> add_srs_entry(3857)
Keyword Arguments:
auth_name:
This keyword may be customized with the value of the `auth_name` field.
Defaults to 'EPSG'.
auth_srid:
This keyword may be customized with the value of the `auth_srid` field.
Defaults to the SRID determined by GDAL.
ref_sys_name:
For SpatiaLite users only, sets the value of the `ref_sys_name` field.
Defaults to the name determined by GDAL.
database:
The name of the database connection to use; the default is the value
of `django.db.DEFAULT_DB_ALIAS` (at the time of this writing, its value
is 'default').
|
Take a GDAL SpatialReference system and add its information to the
`spatial_ref_sys` table of the spatial backend. Doing this enables
database-level spatial transformations for the backend. Thus, this utility
is useful for adding spatial reference systems not included by default with
the backend: | def add_srs_entry(srs, auth_name='EPSG', auth_srid=None, ref_sys_name=None,
database=None):
"""
Take a GDAL SpatialReference system and add its information to the
`spatial_ref_sys` table of the spatial backend. Doing this enables
database-level spatial transformations for the backend. Thus, this utility
is useful for adding spatial reference systems not included by default with
the backend:
>>> from django.contrib.gis.utils import add_srs_entry
>>> add_srs_entry(3857)
Keyword Arguments:
auth_name:
This keyword may be customized with the value of the `auth_name` field.
Defaults to 'EPSG'.
auth_srid:
This keyword may be customized with the value of the `auth_srid` field.
Defaults to the SRID determined by GDAL.
ref_sys_name:
For SpatiaLite users only, sets the value of the `ref_sys_name` field.
Defaults to the name determined by GDAL.
database:
The name of the database connection to use; the default is the value
of `django.db.DEFAULT_DB_ALIAS` (at the time of this writing, its value
is 'default').
"""
database = database or DEFAULT_DB_ALIAS
connection = connections[database]
if not hasattr(connection.ops, 'spatial_version'):
raise Exception('The `add_srs_entry` utility only works '
'with spatial backends.')
if not connection.features.supports_add_srs_entry:
raise Exception('This utility does not support your database backend.')
SpatialRefSys = connection.ops.spatial_ref_sys()
# If argument is not a `SpatialReference` instance, use it as parameter
# to construct a `SpatialReference` instance.
if not isinstance(srs, SpatialReference):
srs = SpatialReference(srs)
if srs.srid is None:
raise Exception('Spatial reference requires an SRID to be '
'compatible with the spatial backend.')
# Initializing the keyword arguments dictionary for both PostGIS
# and SpatiaLite.
kwargs = {
'srid': srs.srid,
'auth_name': auth_name,
'auth_srid': auth_srid or srs.srid,
'proj4text': srs.proj4,
}
# Backend-specific fields for the SpatialRefSys model.
srs_field_names = {f.name for f in SpatialRefSys._meta.get_fields()}
if 'srtext' in srs_field_names:
kwargs['srtext'] = srs.wkt
if 'ref_sys_name' in srs_field_names:
# SpatiaLite specific
kwargs['ref_sys_name'] = ref_sys_name or srs.name
# Creating the spatial_ref_sys model.
try:
# Try getting via SRID only, because using all kwargs may
# differ from exact wkt/proj in database.
SpatialRefSys.objects.using(database).get(srid=srs.srid)
except SpatialRefSys.DoesNotExist:
SpatialRefSys.objects.using(database).create(**kwargs) | [
"def",
"add_srs_entry",
"(",
"srs",
",",
"auth_name",
"=",
"'EPSG'",
",",
"auth_srid",
"=",
"None",
",",
"ref_sys_name",
"=",
"None",
",",
"database",
"=",
"None",
")",
":",
"database",
"=",
"database",
"or",
"DEFAULT_DB_ALIAS",
"connection",
"=",
"connections",
"[",
"database",
"]",
"if",
"not",
"hasattr",
"(",
"connection",
".",
"ops",
",",
"'spatial_version'",
")",
":",
"raise",
"Exception",
"(",
"'The `add_srs_entry` utility only works '",
"'with spatial backends.'",
")",
"if",
"not",
"connection",
".",
"features",
".",
"supports_add_srs_entry",
":",
"raise",
"Exception",
"(",
"'This utility does not support your database backend.'",
")",
"SpatialRefSys",
"=",
"connection",
".",
"ops",
".",
"spatial_ref_sys",
"(",
")",
"# If argument is not a `SpatialReference` instance, use it as parameter",
"# to construct a `SpatialReference` instance.",
"if",
"not",
"isinstance",
"(",
"srs",
",",
"SpatialReference",
")",
":",
"srs",
"=",
"SpatialReference",
"(",
"srs",
")",
"if",
"srs",
".",
"srid",
"is",
"None",
":",
"raise",
"Exception",
"(",
"'Spatial reference requires an SRID to be '",
"'compatible with the spatial backend.'",
")",
"# Initializing the keyword arguments dictionary for both PostGIS",
"# and SpatiaLite.",
"kwargs",
"=",
"{",
"'srid'",
":",
"srs",
".",
"srid",
",",
"'auth_name'",
":",
"auth_name",
",",
"'auth_srid'",
":",
"auth_srid",
"or",
"srs",
".",
"srid",
",",
"'proj4text'",
":",
"srs",
".",
"proj4",
",",
"}",
"# Backend-specific fields for the SpatialRefSys model.",
"srs_field_names",
"=",
"{",
"f",
".",
"name",
"for",
"f",
"in",
"SpatialRefSys",
".",
"_meta",
".",
"get_fields",
"(",
")",
"}",
"if",
"'srtext'",
"in",
"srs_field_names",
":",
"kwargs",
"[",
"'srtext'",
"]",
"=",
"srs",
".",
"wkt",
"if",
"'ref_sys_name'",
"in",
"srs_field_names",
":",
"# SpatiaLite specific",
"kwargs",
"[",
"'ref_sys_name'",
"]",
"=",
"ref_sys_name",
"or",
"srs",
".",
"name",
"# Creating the spatial_ref_sys model.",
"try",
":",
"# Try getting via SRID only, because using all kwargs may",
"# differ from exact wkt/proj in database.",
"SpatialRefSys",
".",
"objects",
".",
"using",
"(",
"database",
")",
".",
"get",
"(",
"srid",
"=",
"srs",
".",
"srid",
")",
"except",
"SpatialRefSys",
".",
"DoesNotExist",
":",
"SpatialRefSys",
".",
"objects",
".",
"using",
"(",
"database",
")",
".",
"create",
"(",
"*",
"*",
"kwargs",
")"
] | [
4,
0
] | [
75,
62
] | python | en | ['en', 'error', 'th'] | False |
OneHotPreprocCore.preproc | (self, frame) |
Args:
frame (B, H, W)
Returns:
processed frame (B, C, H, W)
|
Args:
frame (B, H, W)
Returns:
processed frame (B, C, H, W)
| def preproc(self, frame):
"""
Args:
frame (B, H, W)
Returns:
processed frame (B, C, H, W)
"""
return self._image_colors_to_onehot(frame) | [
"def",
"preproc",
"(",
"self",
",",
"frame",
")",
":",
"return",
"self",
".",
"_image_colors_to_onehot",
"(",
"frame",
")"
] | [
35,
4
] | [
42,
50
] | python | en | ['en', 'error', 'th'] | False |
OneHotPreprocCore.unpreproc_after_loss | (cls, proc_frame_for_loss) |
Args:
frame (B, C, H, W)
Returns:
processed frame (B, H, W)
|
Args:
frame (B, C, H, W)
Returns:
processed frame (B, H, W)
| def unpreproc_after_loss(cls, proc_frame_for_loss):
"""
Args:
frame (B, C, H, W)
Returns:
processed frame (B, H, W)
"""
return torch.argmax(proc_frame_for_loss, axis=1) | [
"def",
"unpreproc_after_loss",
"(",
"cls",
",",
"proc_frame_for_loss",
")",
":",
"return",
"torch",
".",
"argmax",
"(",
"proc_frame_for_loss",
",",
"axis",
"=",
"1",
")"
] | [
49,
4
] | [
56,
56
] | python | en | ['en', 'error', 'th'] | False |
OneHotPtNetPreprocCore.preproc | (self, frame) |
Args:
frame (B, H, W)
Returns:
processed frame (B, C, H, W)
|
Args:
frame (B, H, W)
Returns:
processed frame (B, C, H, W)
| def preproc(self, frame):
"""
Args:
frame (B, H, W)
Returns:
processed frame (B, C, H, W)
"""
# First make it 1-hot rep, then add the XY locations
frame_onehot = self.one_hot_preproc.preproc(frame)
# Make space for the X, Y
frame_onehot_rep = frame_onehot.repeat_interleave(3, dim=1)
# Compute the XY grid
loc_x, loc_y = torch.meshgrid(torch.arange(frame.shape[-2]),
torch.arange(frame.shape[-1]))
frame_onehot_rep[:, 0::3, :, :] = loc_x
frame_onehot_rep[:, 1::3, :, :] = loc_y
return frame_onehot_rep | [
"def",
"preproc",
"(",
"self",
",",
"frame",
")",
":",
"# First make it 1-hot rep, then add the XY locations",
"frame_onehot",
"=",
"self",
".",
"one_hot_preproc",
".",
"preproc",
"(",
"frame",
")",
"# Make space for the X, Y",
"frame_onehot_rep",
"=",
"frame_onehot",
".",
"repeat_interleave",
"(",
"3",
",",
"dim",
"=",
"1",
")",
"# Compute the XY grid",
"loc_x",
",",
"loc_y",
"=",
"torch",
".",
"meshgrid",
"(",
"torch",
".",
"arange",
"(",
"frame",
".",
"shape",
"[",
"-",
"2",
"]",
")",
",",
"torch",
".",
"arange",
"(",
"frame",
".",
"shape",
"[",
"-",
"1",
"]",
")",
")",
"frame_onehot_rep",
"[",
":",
",",
"0",
":",
":",
"3",
",",
":",
",",
":",
"]",
"=",
"loc_x",
"frame_onehot_rep",
"[",
":",
",",
"1",
":",
":",
"3",
",",
":",
",",
":",
"]",
"=",
"loc_y",
"return",
"frame_onehot_rep"
] | [
66,
4
] | [
82,
31
] | python | en | ['en', 'error', 'th'] | False |
OneHotPtNetPreprocCore.unpreproc_for_loss | (cls, proc_frame) | Generate a 1-hot pixel-level output to incur the loss. | Generate a 1-hot pixel-level output to incur the loss. | def unpreproc_for_loss(cls, proc_frame):
"""Generate a 1-hot pixel-level output to incur the loss."""
proc_frame_ch = torch.chunk(proc_frame, phyre.NUM_COLORS, 1)
all_feat = []
for channel in proc_frame_ch:
index = channel[:, :2, ...]
index[:, 0, ...] = 2 * (index[:, 0, ...] / index.shape[-2]) - 1
index[:, 1, ...] = 2 * (index[:, 1, ...] / index.shape[-1]) - 1
feat = channel[:, 2:, ...] # B, 1 (typically), H, W
all_feat.append(
nn.functional.grid_sample(feat,
index.permute(0, 3, 2, 1),
mode='bilinear',
align_corners=True))
return torch.cat(all_feat, dim=1) | [
"def",
"unpreproc_for_loss",
"(",
"cls",
",",
"proc_frame",
")",
":",
"proc_frame_ch",
"=",
"torch",
".",
"chunk",
"(",
"proc_frame",
",",
"phyre",
".",
"NUM_COLORS",
",",
"1",
")",
"all_feat",
"=",
"[",
"]",
"for",
"channel",
"in",
"proc_frame_ch",
":",
"index",
"=",
"channel",
"[",
":",
",",
":",
"2",
",",
"...",
"]",
"index",
"[",
":",
",",
"0",
",",
"...",
"]",
"=",
"2",
"*",
"(",
"index",
"[",
":",
",",
"0",
",",
"...",
"]",
"/",
"index",
".",
"shape",
"[",
"-",
"2",
"]",
")",
"-",
"1",
"index",
"[",
":",
",",
"1",
",",
"...",
"]",
"=",
"2",
"*",
"(",
"index",
"[",
":",
",",
"1",
",",
"...",
"]",
"/",
"index",
".",
"shape",
"[",
"-",
"1",
"]",
")",
"-",
"1",
"feat",
"=",
"channel",
"[",
":",
",",
"2",
":",
",",
"...",
"]",
"# B, 1 (typically), H, W",
"all_feat",
".",
"append",
"(",
"nn",
".",
"functional",
".",
"grid_sample",
"(",
"feat",
",",
"index",
".",
"permute",
"(",
"0",
",",
"3",
",",
"2",
",",
"1",
")",
",",
"mode",
"=",
"'bilinear'",
",",
"align_corners",
"=",
"True",
")",
")",
"return",
"torch",
".",
"cat",
"(",
"all_feat",
",",
"dim",
"=",
"1",
")"
] | [
85,
4
] | [
99,
41
] | python | en | ['en', 'en', 'en'] | True |
OneHotPtNetPreprocCore.unpreproc_after_loss | (self, proc_frame_for_loss) |
Args:
proc_frame (B, C, H, W)
Returns:
frame (B, H, W)
|
Args:
proc_frame (B, C, H, W)
Returns:
frame (B, H, W)
| def unpreproc_after_loss(self, proc_frame_for_loss):
"""
Args:
proc_frame (B, C, H, W)
Returns:
frame (B, H, W)
"""
return self.one_hot_preproc.unpreproc_after_loss(proc_frame_for_loss) | [
"def",
"unpreproc_after_loss",
"(",
"self",
",",
"proc_frame_for_loss",
")",
":",
"return",
"self",
".",
"one_hot_preproc",
".",
"unpreproc_after_loss",
"(",
"proc_frame_for_loss",
")"
] | [
101,
4
] | [
108,
77
] | python | en | ['en', 'error', 'th'] | False |
VideoPreprocessor._apply_each_obj | (cls, func, frame) | Apply a function to each obj in the frame.
Args:
func
frame: B, Nobj, ...
| Apply a function to each obj in the frame.
Args:
func
frame: B, Nobj, ...
| def _apply_each_obj(cls, func, frame):
"""Apply a function to each obj in the frame.
Args:
func
frame: B, Nobj, ...
"""
frame_flat = torch.flatten(frame, 0, 1)
frame_flat_proc = func(frame_flat)
frame_proc = frame_flat_proc.view(frame.shape[:2] +
frame_flat_proc.shape[1:])
return frame_proc | [
"def",
"_apply_each_obj",
"(",
"cls",
",",
"func",
",",
"frame",
")",
":",
"frame_flat",
"=",
"torch",
".",
"flatten",
"(",
"frame",
",",
"0",
",",
"1",
")",
"frame_flat_proc",
"=",
"func",
"(",
"frame_flat",
")",
"frame_proc",
"=",
"frame_flat_proc",
".",
"view",
"(",
"frame",
".",
"shape",
"[",
":",
"2",
"]",
"+",
"frame_flat_proc",
".",
"shape",
"[",
"1",
":",
"]",
")",
"return",
"frame_proc"
] | [
119,
4
] | [
129,
25
] | python | en | ['en', 'en', 'en'] | True |
VideoPreprocessor.preproc_frame | (self, frame) | Process a frame from the vid.
Args:
frame: (B,Nobj,H,W)
Returns:
Processed frame: (B,Nobj,C,H,W)
| Process a frame from the vid.
Args:
frame: (B,Nobj,H,W)
Returns:
Processed frame: (B,Nobj,C,H,W)
| def preproc_frame(self, frame):
"""Process a frame from the vid.
Args:
frame: (B,Nobj,H,W)
Returns:
Processed frame: (B,Nobj,C,H,W)
"""
if frame is None:
return None
assert len(frame.shape) == 4
return self._apply_each_obj(self.preproc_core.preproc, frame) | [
"def",
"preproc_frame",
"(",
"self",
",",
"frame",
")",
":",
"if",
"frame",
"is",
"None",
":",
"return",
"None",
"assert",
"len",
"(",
"frame",
".",
"shape",
")",
"==",
"4",
"return",
"self",
".",
"_apply_each_obj",
"(",
"self",
".",
"preproc_core",
".",
"preproc",
",",
"frame",
")"
] | [
131,
4
] | [
141,
69
] | python | en | ['en', 'en', 'en'] | True |
VideoPreprocessor.unpreprocess_frame_after_loss | (self, proc_frame) | Unprocess a frame from the vid, that has already been unprocessed
for loss using the unprocess_frame_for_loss function.
Note that the decoder automatically handles objects, so no obj here
Args:
processed frame: (B,Nobj,C,H,W)
Returns:
frame: (B, Nobj, H, W)
| Unprocess a frame from the vid, that has already been unprocessed
for loss using the unprocess_frame_for_loss function.
Note that the decoder automatically handles objects, so no obj here
Args:
processed frame: (B,Nobj,C,H,W)
Returns:
frame: (B, Nobj, H, W)
| def unpreprocess_frame_after_loss(self, proc_frame):
"""Unprocess a frame from the vid, that has already been unprocessed
for loss using the unprocess_frame_for_loss function.
Note that the decoder automatically handles objects, so no obj here
Args:
processed frame: (B,Nobj,C,H,W)
Returns:
frame: (B, Nobj, H, W)
"""
if proc_frame is None:
return None
assert len(proc_frame.shape) == 5
return self._apply_each_obj(self.preproc_core.unpreproc_after_loss,
proc_frame) | [
"def",
"unpreprocess_frame_after_loss",
"(",
"self",
",",
"proc_frame",
")",
":",
"if",
"proc_frame",
"is",
"None",
":",
"return",
"None",
"assert",
"len",
"(",
"proc_frame",
".",
"shape",
")",
"==",
"5",
"return",
"self",
".",
"_apply_each_obj",
"(",
"self",
".",
"preproc_core",
".",
"unpreproc_after_loss",
",",
"proc_frame",
")"
] | [
143,
4
] | [
156,
47
] | python | en | ['en', 'en', 'en'] | True |
VideoPreprocessor.unpreprocess_frame_for_loss | (self, proc_frame) | Unprocess a frame from the vid, for loss.
Args:
processed frame: (B,Nobj,C,H,W)
Returns:
frame: (B, Nobj, C, H, W)
| Unprocess a frame from the vid, for loss.
Args:
processed frame: (B,Nobj,C,H,W)
Returns:
frame: (B, Nobj, C, H, W)
| def unpreprocess_frame_for_loss(self, proc_frame):
"""Unprocess a frame from the vid, for loss.
Args:
processed frame: (B,Nobj,C,H,W)
Returns:
frame: (B, Nobj, C, H, W)
"""
if proc_frame is None:
return proc_frame
assert len(proc_frame.shape) == 5
return self._apply_each_obj(self.preproc_core.unpreproc_for_loss,
proc_frame) | [
"def",
"unpreprocess_frame_for_loss",
"(",
"self",
",",
"proc_frame",
")",
":",
"if",
"proc_frame",
"is",
"None",
":",
"return",
"proc_frame",
"assert",
"len",
"(",
"proc_frame",
".",
"shape",
")",
"==",
"5",
"return",
"self",
".",
"_apply_each_obj",
"(",
"self",
".",
"preproc_core",
".",
"unpreproc_for_loss",
",",
"proc_frame",
")"
] | [
158,
4
] | [
169,
47
] | python | en | ['en', 'en', 'en'] | True |
VideoPreprocessor.preprocess_vid | (self, vid) |
Args:
vid (B, T, Nobj, H, W)
Returns:
res (B, T, Nobj, C, H, W): Basically the 1-hot representation
|
Args:
vid (B, T, Nobj, H, W)
Returns:
res (B, T, Nobj, C, H, W): Basically the 1-hot representation
| def preprocess_vid(self, vid):
"""
Args:
vid (B, T, Nobj, H, W)
Returns:
res (B, T, Nobj, C, H, W): Basically the 1-hot representation
"""
assert len(vid.shape) == 5
vid_flat = torch.flatten(vid, 0, 1)
vid_flat_onehot = self.preproc_frame(vid_flat)
return torch.reshape(vid_flat_onehot,
vid.shape[:2] + vid_flat_onehot.shape[1:]) | [
"def",
"preprocess_vid",
"(",
"self",
",",
"vid",
")",
":",
"assert",
"len",
"(",
"vid",
".",
"shape",
")",
"==",
"5",
"vid_flat",
"=",
"torch",
".",
"flatten",
"(",
"vid",
",",
"0",
",",
"1",
")",
"vid_flat_onehot",
"=",
"self",
".",
"preproc_frame",
"(",
"vid_flat",
")",
"return",
"torch",
".",
"reshape",
"(",
"vid_flat_onehot",
",",
"vid",
".",
"shape",
"[",
":",
"2",
"]",
"+",
"vid_flat_onehot",
".",
"shape",
"[",
"1",
":",
"]",
")"
] | [
171,
4
] | [
182,
71
] | python | en | ['en', 'error', 'th'] | False |
get_capability_token | () | Respond to incoming requests. | Respond to incoming requests. | def get_capability_token():
"""Respond to incoming requests."""
# Find these values at twilio.com/console
# To set up environmental variables, see http://twil.io/secure
account_sid = os.environ['TWILIO_ACCOUNT_SID']
auth_token = os.environ['TWILIO_AUTH_TOKEN']
capability = ClientCapabilityToken(account_sid, auth_token)
# Twilio Application Sid
application_sid = 'APXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
capability.allow_client_outgoing(application_sid)
capability.allow_client_incoming(request.form["ClientName"])
token = capability.to_jwt()
return Response(token, mimetype='application/jwt') | [
"def",
"get_capability_token",
"(",
")",
":",
"# Find these values at twilio.com/console",
"# To set up environmental variables, see http://twil.io/secure",
"account_sid",
"=",
"os",
".",
"environ",
"[",
"'TWILIO_ACCOUNT_SID'",
"]",
"auth_token",
"=",
"os",
".",
"environ",
"[",
"'TWILIO_AUTH_TOKEN'",
"]",
"capability",
"=",
"ClientCapabilityToken",
"(",
"account_sid",
",",
"auth_token",
")",
"# Twilio Application Sid",
"application_sid",
"=",
"'APXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'",
"capability",
".",
"allow_client_outgoing",
"(",
"application_sid",
")",
"capability",
".",
"allow_client_incoming",
"(",
"request",
".",
"form",
"[",
"\"ClientName\"",
"]",
")",
"token",
"=",
"capability",
".",
"to_jwt",
"(",
")",
"return",
"Response",
"(",
"token",
",",
"mimetype",
"=",
"'application/jwt'",
")"
] | [
8,
0
] | [
24,
54
] | python | en | ['en', 'en', 'en'] | True |
JpegImageFile.load_read | (self, read_bytes) |
internal: read more image data
For premature EOF and LOAD_TRUNCATED_IMAGES adds EOI marker
so libjpeg can finish decoding
|
internal: read more image data
For premature EOF and LOAD_TRUNCATED_IMAGES adds EOI marker
so libjpeg can finish decoding
| def load_read(self, read_bytes):
"""
internal: read more image data
For premature EOF and LOAD_TRUNCATED_IMAGES adds EOI marker
so libjpeg can finish decoding
"""
s = self.fp.read(read_bytes)
if not s and ImageFile.LOAD_TRUNCATED_IMAGES:
# Premature EOF.
# Pretend file is finished adding EOI marker
return b"\xFF\xD9"
return s | [
"def",
"load_read",
"(",
"self",
",",
"read_bytes",
")",
":",
"s",
"=",
"self",
".",
"fp",
".",
"read",
"(",
"read_bytes",
")",
"if",
"not",
"s",
"and",
"ImageFile",
".",
"LOAD_TRUNCATED_IMAGES",
":",
"# Premature EOF.",
"# Pretend file is finished adding EOI marker",
"return",
"b\"\\xFF\\xD9\"",
"return",
"s"
] | [
395,
4
] | [
408,
16
] | python | en | ['en', 'error', 'th'] | False |
JpegImageFile.getxmp | (self) |
Returns a dictionary containing the XMP tags.
Requires defusedxml to be installed.
:returns: XMP tags in a dictionary.
|
Returns a dictionary containing the XMP tags.
Requires defusedxml to be installed.
:returns: XMP tags in a dictionary.
| def getxmp(self):
"""
Returns a dictionary containing the XMP tags.
Requires defusedxml to be installed.
:returns: XMP tags in a dictionary.
"""
for segment, content in self.applist:
if segment == "APP1":
marker, xmp_tags = content.rsplit(b"\x00", 1)
if marker == b"http://ns.adobe.com/xap/1.0/":
return self._getxmp(xmp_tags)
return {} | [
"def",
"getxmp",
"(",
"self",
")",
":",
"for",
"segment",
",",
"content",
"in",
"self",
".",
"applist",
":",
"if",
"segment",
"==",
"\"APP1\"",
":",
"marker",
",",
"xmp_tags",
"=",
"content",
".",
"rsplit",
"(",
"b\"\\x00\"",
",",
"1",
")",
"if",
"marker",
"==",
"b\"http://ns.adobe.com/xap/1.0/\"",
":",
"return",
"self",
".",
"_getxmp",
"(",
"xmp_tags",
")",
"return",
"{",
"}"
] | [
479,
4
] | [
491,
17
] | python | en | ['en', 'error', 'th'] | False |
DashboardsServiceGrpcTransport.__init__ | (
self,
*,
host: str = "monitoring.googleapis.com",
credentials: credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None
) | Instantiate the transport.
Args:
host (Optional[str]): The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
callback to provide client SSL certificate bytes and private key
bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
is None.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
| Instantiate the transport. | def __init__(
self,
*,
host: str = "monitoring.googleapis.com",
credentials: credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]): The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
callback to provide client SSL certificate bytes and private key
bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
is None.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
elif api_mtls_endpoint:
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
else api_mtls_endpoint + ":443"
)
if credentials is None:
credentials, _ = auth.default(
scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
)
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
ssl_credentials = SslCredentials().ssl_credentials
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
ssl_credentials=ssl_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
self._stubs = {} # type: Dict[str, Callable]
# Run the base constructor.
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
) | [
"def",
"__init__",
"(",
"self",
",",
"*",
",",
"host",
":",
"str",
"=",
"\"monitoring.googleapis.com\"",
",",
"credentials",
":",
"credentials",
".",
"Credentials",
"=",
"None",
",",
"credentials_file",
":",
"str",
"=",
"None",
",",
"scopes",
":",
"Sequence",
"[",
"str",
"]",
"=",
"None",
",",
"channel",
":",
"grpc",
".",
"Channel",
"=",
"None",
",",
"api_mtls_endpoint",
":",
"str",
"=",
"None",
",",
"client_cert_source",
":",
"Callable",
"[",
"[",
"]",
",",
"Tuple",
"[",
"bytes",
",",
"bytes",
"]",
"]",
"=",
"None",
",",
"quota_project_id",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
")",
"->",
"None",
":",
"if",
"channel",
":",
"# Sanity check: Ensure that channel and credentials are not both",
"# provided.",
"credentials",
"=",
"False",
"# If a channel was explicitly provided, set it.",
"self",
".",
"_grpc_channel",
"=",
"channel",
"elif",
"api_mtls_endpoint",
":",
"host",
"=",
"(",
"api_mtls_endpoint",
"if",
"\":\"",
"in",
"api_mtls_endpoint",
"else",
"api_mtls_endpoint",
"+",
"\":443\"",
")",
"if",
"credentials",
"is",
"None",
":",
"credentials",
",",
"_",
"=",
"auth",
".",
"default",
"(",
"scopes",
"=",
"self",
".",
"AUTH_SCOPES",
",",
"quota_project_id",
"=",
"quota_project_id",
")",
"# Create SSL credentials with client_cert_source or application",
"# default SSL credentials.",
"if",
"client_cert_source",
":",
"cert",
",",
"key",
"=",
"client_cert_source",
"(",
")",
"ssl_credentials",
"=",
"grpc",
".",
"ssl_channel_credentials",
"(",
"certificate_chain",
"=",
"cert",
",",
"private_key",
"=",
"key",
")",
"else",
":",
"ssl_credentials",
"=",
"SslCredentials",
"(",
")",
".",
"ssl_credentials",
"# create a new channel. The provided one is ignored.",
"self",
".",
"_grpc_channel",
"=",
"type",
"(",
"self",
")",
".",
"create_channel",
"(",
"host",
",",
"credentials",
"=",
"credentials",
",",
"credentials_file",
"=",
"credentials_file",
",",
"ssl_credentials",
"=",
"ssl_credentials",
",",
"scopes",
"=",
"scopes",
"or",
"self",
".",
"AUTH_SCOPES",
",",
"quota_project_id",
"=",
"quota_project_id",
",",
")",
"self",
".",
"_stubs",
"=",
"{",
"}",
"# type: Dict[str, Callable]",
"# Run the base constructor.",
"super",
"(",
")",
".",
"__init__",
"(",
"host",
"=",
"host",
",",
"credentials",
"=",
"credentials",
",",
"credentials_file",
"=",
"credentials_file",
",",
"scopes",
"=",
"scopes",
"or",
"self",
".",
"AUTH_SCOPES",
",",
"quota_project_id",
"=",
"quota_project_id",
",",
")"
] | [
50,
4
] | [
144,
9
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceGrpcTransport.create_channel | (
cls,
host: str = "monitoring.googleapis.com",
credentials: credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs
) | Create and return a gRPC channel object.
Args:
address (Optionsl[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
| Create and return a gRPC channel object.
Args:
address (Optionsl[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object. | def create_channel(
cls,
host: str = "monitoring.googleapis.com",
credentials: credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
address (Optionsl[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
scopes = scopes or cls.AUTH_SCOPES
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
**kwargs
) | [
"def",
"create_channel",
"(",
"cls",
",",
"host",
":",
"str",
"=",
"\"monitoring.googleapis.com\"",
",",
"credentials",
":",
"credentials",
".",
"Credentials",
"=",
"None",
",",
"credentials_file",
":",
"str",
"=",
"None",
",",
"scopes",
":",
"Optional",
"[",
"Sequence",
"[",
"str",
"]",
"]",
"=",
"None",
",",
"quota_project_id",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
"->",
"grpc",
".",
"Channel",
":",
"scopes",
"=",
"scopes",
"or",
"cls",
".",
"AUTH_SCOPES",
"return",
"grpc_helpers",
".",
"create_channel",
"(",
"host",
",",
"credentials",
"=",
"credentials",
",",
"credentials_file",
"=",
"credentials_file",
",",
"scopes",
"=",
"scopes",
",",
"quota_project_id",
"=",
"quota_project_id",
",",
"*",
"*",
"kwargs",
")"
] | [
147,
4
] | [
189,
9
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceGrpcTransport.grpc_channel | (self) | Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
| Create the channel designed to connect to this service. | def grpc_channel(self) -> grpc.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Sanity check: Only create a new channel if we do not already
# have one.
if not hasattr(self, "_grpc_channel"):
self._grpc_channel = self.create_channel(
self._host, credentials=self._credentials,
)
# Return the channel from cache.
return self._grpc_channel | [
"def",
"grpc_channel",
"(",
"self",
")",
"->",
"grpc",
".",
"Channel",
":",
"# Sanity check: Only create a new channel if we do not already",
"# have one.",
"if",
"not",
"hasattr",
"(",
"self",
",",
"\"_grpc_channel\"",
")",
":",
"self",
".",
"_grpc_channel",
"=",
"self",
".",
"create_channel",
"(",
"self",
".",
"_host",
",",
"credentials",
"=",
"self",
".",
"_credentials",
",",
")",
"# Return the channel from cache.",
"return",
"self",
".",
"_grpc_channel"
] | [
192,
4
] | [
206,
33
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceGrpcTransport.create_dashboard | (
self,
) | r"""Return a callable for the create dashboard method over gRPC.
Creates a new custom dashboard.
This method requires the ``monitoring.dashboards.create``
permission on the specified project. For more information, see
`Google Cloud IAM <https://cloud.google.com/iam>`__.
Returns:
Callable[[~.CreateDashboardRequest],
~.Dashboard]:
A function that, when called, will call the underlying RPC
on the server.
| r"""Return a callable for the create dashboard method over gRPC. | def create_dashboard(
self,
) -> Callable[[dashboards_service.CreateDashboardRequest], dashboard.Dashboard]:
r"""Return a callable for the create dashboard method over gRPC.
Creates a new custom dashboard.
This method requires the ``monitoring.dashboards.create``
permission on the specified project. For more information, see
`Google Cloud IAM <https://cloud.google.com/iam>`__.
Returns:
Callable[[~.CreateDashboardRequest],
~.Dashboard]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_dashboard" not in self._stubs:
self._stubs["create_dashboard"] = self.grpc_channel.unary_unary(
"/google.monitoring.dashboard.v1.DashboardsService/CreateDashboard",
request_serializer=dashboards_service.CreateDashboardRequest.serialize,
response_deserializer=dashboard.Dashboard.deserialize,
)
return self._stubs["create_dashboard"] | [
"def",
"create_dashboard",
"(",
"self",
",",
")",
"->",
"Callable",
"[",
"[",
"dashboards_service",
".",
"CreateDashboardRequest",
"]",
",",
"dashboard",
".",
"Dashboard",
"]",
":",
"# Generate a \"stub function\" on-the-fly which will actually make",
"# the request.",
"# gRPC handles serialization and deserialization, so we just need",
"# to pass in the functions for each.",
"if",
"\"create_dashboard\"",
"not",
"in",
"self",
".",
"_stubs",
":",
"self",
".",
"_stubs",
"[",
"\"create_dashboard\"",
"]",
"=",
"self",
".",
"grpc_channel",
".",
"unary_unary",
"(",
"\"/google.monitoring.dashboard.v1.DashboardsService/CreateDashboard\"",
",",
"request_serializer",
"=",
"dashboards_service",
".",
"CreateDashboardRequest",
".",
"serialize",
",",
"response_deserializer",
"=",
"dashboard",
".",
"Dashboard",
".",
"deserialize",
",",
")",
"return",
"self",
".",
"_stubs",
"[",
"\"create_dashboard\"",
"]"
] | [
209,
4
] | [
236,
46
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceGrpcTransport.list_dashboards | (
self,
) | r"""Return a callable for the list dashboards method over gRPC.
Lists the existing dashboards.
This method requires the ``monitoring.dashboards.list``
permission on the specified project. For more information, see
`Google Cloud IAM <https://cloud.google.com/iam>`__.
Returns:
Callable[[~.ListDashboardsRequest],
~.ListDashboardsResponse]:
A function that, when called, will call the underlying RPC
on the server.
| r"""Return a callable for the list dashboards method over gRPC. | def list_dashboards(
self,
) -> Callable[
[dashboards_service.ListDashboardsRequest],
dashboards_service.ListDashboardsResponse,
]:
r"""Return a callable for the list dashboards method over gRPC.
Lists the existing dashboards.
This method requires the ``monitoring.dashboards.list``
permission on the specified project. For more information, see
`Google Cloud IAM <https://cloud.google.com/iam>`__.
Returns:
Callable[[~.ListDashboardsRequest],
~.ListDashboardsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_dashboards" not in self._stubs:
self._stubs["list_dashboards"] = self.grpc_channel.unary_unary(
"/google.monitoring.dashboard.v1.DashboardsService/ListDashboards",
request_serializer=dashboards_service.ListDashboardsRequest.serialize,
response_deserializer=dashboards_service.ListDashboardsResponse.deserialize,
)
return self._stubs["list_dashboards"] | [
"def",
"list_dashboards",
"(",
"self",
",",
")",
"->",
"Callable",
"[",
"[",
"dashboards_service",
".",
"ListDashboardsRequest",
"]",
",",
"dashboards_service",
".",
"ListDashboardsResponse",
",",
"]",
":",
"# Generate a \"stub function\" on-the-fly which will actually make",
"# the request.",
"# gRPC handles serialization and deserialization, so we just need",
"# to pass in the functions for each.",
"if",
"\"list_dashboards\"",
"not",
"in",
"self",
".",
"_stubs",
":",
"self",
".",
"_stubs",
"[",
"\"list_dashboards\"",
"]",
"=",
"self",
".",
"grpc_channel",
".",
"unary_unary",
"(",
"\"/google.monitoring.dashboard.v1.DashboardsService/ListDashboards\"",
",",
"request_serializer",
"=",
"dashboards_service",
".",
"ListDashboardsRequest",
".",
"serialize",
",",
"response_deserializer",
"=",
"dashboards_service",
".",
"ListDashboardsResponse",
".",
"deserialize",
",",
")",
"return",
"self",
".",
"_stubs",
"[",
"\"list_dashboards\"",
"]"
] | [
239,
4
] | [
269,
45
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceGrpcTransport.get_dashboard | (
self,
) | r"""Return a callable for the get dashboard method over gRPC.
Fetches a specific dashboard.
This method requires the ``monitoring.dashboards.get``
permission on the specified dashboard. For more information, see
`Google Cloud IAM <https://cloud.google.com/iam>`__.
Returns:
Callable[[~.GetDashboardRequest],
~.Dashboard]:
A function that, when called, will call the underlying RPC
on the server.
| r"""Return a callable for the get dashboard method over gRPC. | def get_dashboard(
self,
) -> Callable[[dashboards_service.GetDashboardRequest], dashboard.Dashboard]:
r"""Return a callable for the get dashboard method over gRPC.
Fetches a specific dashboard.
This method requires the ``monitoring.dashboards.get``
permission on the specified dashboard. For more information, see
`Google Cloud IAM <https://cloud.google.com/iam>`__.
Returns:
Callable[[~.GetDashboardRequest],
~.Dashboard]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_dashboard" not in self._stubs:
self._stubs["get_dashboard"] = self.grpc_channel.unary_unary(
"/google.monitoring.dashboard.v1.DashboardsService/GetDashboard",
request_serializer=dashboards_service.GetDashboardRequest.serialize,
response_deserializer=dashboard.Dashboard.deserialize,
)
return self._stubs["get_dashboard"] | [
"def",
"get_dashboard",
"(",
"self",
",",
")",
"->",
"Callable",
"[",
"[",
"dashboards_service",
".",
"GetDashboardRequest",
"]",
",",
"dashboard",
".",
"Dashboard",
"]",
":",
"# Generate a \"stub function\" on-the-fly which will actually make",
"# the request.",
"# gRPC handles serialization and deserialization, so we just need",
"# to pass in the functions for each.",
"if",
"\"get_dashboard\"",
"not",
"in",
"self",
".",
"_stubs",
":",
"self",
".",
"_stubs",
"[",
"\"get_dashboard\"",
"]",
"=",
"self",
".",
"grpc_channel",
".",
"unary_unary",
"(",
"\"/google.monitoring.dashboard.v1.DashboardsService/GetDashboard\"",
",",
"request_serializer",
"=",
"dashboards_service",
".",
"GetDashboardRequest",
".",
"serialize",
",",
"response_deserializer",
"=",
"dashboard",
".",
"Dashboard",
".",
"deserialize",
",",
")",
"return",
"self",
".",
"_stubs",
"[",
"\"get_dashboard\"",
"]"
] | [
272,
4
] | [
299,
43
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceGrpcTransport.delete_dashboard | (
self,
) | r"""Return a callable for the delete dashboard method over gRPC.
Deletes an existing custom dashboard.
This method requires the ``monitoring.dashboards.delete``
permission on the specified dashboard. For more information, see
`Google Cloud IAM <https://cloud.google.com/iam>`__.
Returns:
Callable[[~.DeleteDashboardRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
| r"""Return a callable for the delete dashboard method over gRPC. | def delete_dashboard(
self,
) -> Callable[[dashboards_service.DeleteDashboardRequest], empty.Empty]:
r"""Return a callable for the delete dashboard method over gRPC.
Deletes an existing custom dashboard.
This method requires the ``monitoring.dashboards.delete``
permission on the specified dashboard. For more information, see
`Google Cloud IAM <https://cloud.google.com/iam>`__.
Returns:
Callable[[~.DeleteDashboardRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_dashboard" not in self._stubs:
self._stubs["delete_dashboard"] = self.grpc_channel.unary_unary(
"/google.monitoring.dashboard.v1.DashboardsService/DeleteDashboard",
request_serializer=dashboards_service.DeleteDashboardRequest.serialize,
response_deserializer=empty.Empty.FromString,
)
return self._stubs["delete_dashboard"] | [
"def",
"delete_dashboard",
"(",
"self",
",",
")",
"->",
"Callable",
"[",
"[",
"dashboards_service",
".",
"DeleteDashboardRequest",
"]",
",",
"empty",
".",
"Empty",
"]",
":",
"# Generate a \"stub function\" on-the-fly which will actually make",
"# the request.",
"# gRPC handles serialization and deserialization, so we just need",
"# to pass in the functions for each.",
"if",
"\"delete_dashboard\"",
"not",
"in",
"self",
".",
"_stubs",
":",
"self",
".",
"_stubs",
"[",
"\"delete_dashboard\"",
"]",
"=",
"self",
".",
"grpc_channel",
".",
"unary_unary",
"(",
"\"/google.monitoring.dashboard.v1.DashboardsService/DeleteDashboard\"",
",",
"request_serializer",
"=",
"dashboards_service",
".",
"DeleteDashboardRequest",
".",
"serialize",
",",
"response_deserializer",
"=",
"empty",
".",
"Empty",
".",
"FromString",
",",
")",
"return",
"self",
".",
"_stubs",
"[",
"\"delete_dashboard\"",
"]"
] | [
302,
4
] | [
329,
46
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceGrpcTransport.update_dashboard | (
self,
) | r"""Return a callable for the update dashboard method over gRPC.
Replaces an existing custom dashboard with a new definition.
This method requires the ``monitoring.dashboards.update``
permission on the specified dashboard. For more information, see
`Google Cloud IAM <https://cloud.google.com/iam>`__.
Returns:
Callable[[~.UpdateDashboardRequest],
~.Dashboard]:
A function that, when called, will call the underlying RPC
on the server.
| r"""Return a callable for the update dashboard method over gRPC. | def update_dashboard(
self,
) -> Callable[[dashboards_service.UpdateDashboardRequest], dashboard.Dashboard]:
r"""Return a callable for the update dashboard method over gRPC.
Replaces an existing custom dashboard with a new definition.
This method requires the ``monitoring.dashboards.update``
permission on the specified dashboard. For more information, see
`Google Cloud IAM <https://cloud.google.com/iam>`__.
Returns:
Callable[[~.UpdateDashboardRequest],
~.Dashboard]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_dashboard" not in self._stubs:
self._stubs["update_dashboard"] = self.grpc_channel.unary_unary(
"/google.monitoring.dashboard.v1.DashboardsService/UpdateDashboard",
request_serializer=dashboards_service.UpdateDashboardRequest.serialize,
response_deserializer=dashboard.Dashboard.deserialize,
)
return self._stubs["update_dashboard"] | [
"def",
"update_dashboard",
"(",
"self",
",",
")",
"->",
"Callable",
"[",
"[",
"dashboards_service",
".",
"UpdateDashboardRequest",
"]",
",",
"dashboard",
".",
"Dashboard",
"]",
":",
"# Generate a \"stub function\" on-the-fly which will actually make",
"# the request.",
"# gRPC handles serialization and deserialization, so we just need",
"# to pass in the functions for each.",
"if",
"\"update_dashboard\"",
"not",
"in",
"self",
".",
"_stubs",
":",
"self",
".",
"_stubs",
"[",
"\"update_dashboard\"",
"]",
"=",
"self",
".",
"grpc_channel",
".",
"unary_unary",
"(",
"\"/google.monitoring.dashboard.v1.DashboardsService/UpdateDashboard\"",
",",
"request_serializer",
"=",
"dashboards_service",
".",
"UpdateDashboardRequest",
".",
"serialize",
",",
"response_deserializer",
"=",
"dashboard",
".",
"Dashboard",
".",
"deserialize",
",",
")",
"return",
"self",
".",
"_stubs",
"[",
"\"update_dashboard\"",
"]"
] | [
332,
4
] | [
359,
46
] | python | en | ['en', 'en', 'en'] | True |
PackageIndex.__init__ | (self, url=None) |
Initialise an instance.
:param url: The URL of the index. If not specified, the URL for PyPI is
used.
|
Initialise an instance. | def __init__(self, url=None):
"""
Initialise an instance.
:param url: The URL of the index. If not specified, the URL for PyPI is
used.
"""
self.url = url or DEFAULT_INDEX
self.read_configuration()
scheme, netloc, path, params, query, frag = urlparse(self.url)
if params or query or frag or scheme not in ('http', 'https'):
raise DistlibException('invalid repository: %s' % self.url)
self.password_handler = None
self.ssl_verifier = None
self.gpg = None
self.gpg_home = None
with open(os.devnull, 'w') as sink:
# Use gpg by default rather than gpg2, as gpg2 insists on
# prompting for passwords
for s in ('gpg', 'gpg2'):
try:
rc = subprocess.check_call([s, '--version'], stdout=sink,
stderr=sink)
if rc == 0:
self.gpg = s
break
except OSError:
pass | [
"def",
"__init__",
"(",
"self",
",",
"url",
"=",
"None",
")",
":",
"self",
".",
"url",
"=",
"url",
"or",
"DEFAULT_INDEX",
"self",
".",
"read_configuration",
"(",
")",
"scheme",
",",
"netloc",
",",
"path",
",",
"params",
",",
"query",
",",
"frag",
"=",
"urlparse",
"(",
"self",
".",
"url",
")",
"if",
"params",
"or",
"query",
"or",
"frag",
"or",
"scheme",
"not",
"in",
"(",
"'http'",
",",
"'https'",
")",
":",
"raise",
"DistlibException",
"(",
"'invalid repository: %s'",
"%",
"self",
".",
"url",
")",
"self",
".",
"password_handler",
"=",
"None",
"self",
".",
"ssl_verifier",
"=",
"None",
"self",
".",
"gpg",
"=",
"None",
"self",
".",
"gpg_home",
"=",
"None",
"with",
"open",
"(",
"os",
".",
"devnull",
",",
"'w'",
")",
"as",
"sink",
":",
"# Use gpg by default rather than gpg2, as gpg2 insists on",
"# prompting for passwords",
"for",
"s",
"in",
"(",
"'gpg'",
",",
"'gpg2'",
")",
":",
"try",
":",
"rc",
"=",
"subprocess",
".",
"check_call",
"(",
"[",
"s",
",",
"'--version'",
"]",
",",
"stdout",
"=",
"sink",
",",
"stderr",
"=",
"sink",
")",
"if",
"rc",
"==",
"0",
":",
"self",
".",
"gpg",
"=",
"s",
"break",
"except",
"OSError",
":",
"pass"
] | [
35,
4
] | [
62,
24
] | python | en | ['en', 'error', 'th'] | False |
PackageIndex._get_pypirc_command | (self) |
Get the distutils command for interacting with PyPI configurations.
:return: the command.
|
Get the distutils command for interacting with PyPI configurations.
:return: the command.
| def _get_pypirc_command(self):
"""
Get the distutils command for interacting with PyPI configurations.
:return: the command.
"""
from .util import _get_pypirc_command as cmd
return cmd() | [
"def",
"_get_pypirc_command",
"(",
"self",
")",
":",
"from",
".",
"util",
"import",
"_get_pypirc_command",
"as",
"cmd",
"return",
"cmd",
"(",
")"
] | [
64,
4
] | [
70,
20
] | python | en | ['en', 'error', 'th'] | False |
PackageIndex.read_configuration | (self) |
Read the PyPI access configuration as supported by distutils. This populates
``username``, ``password``, ``realm`` and ``url`` attributes from the
configuration.
|
Read the PyPI access configuration as supported by distutils. This populates
``username``, ``password``, ``realm`` and ``url`` attributes from the
configuration.
| def read_configuration(self):
"""
Read the PyPI access configuration as supported by distutils. This populates
``username``, ``password``, ``realm`` and ``url`` attributes from the
configuration.
"""
from .util import _load_pypirc
cfg = _load_pypirc(self)
self.username = cfg.get('username')
self.password = cfg.get('password')
self.realm = cfg.get('realm', 'pypi')
self.url = cfg.get('repository', self.url) | [
"def",
"read_configuration",
"(",
"self",
")",
":",
"from",
".",
"util",
"import",
"_load_pypirc",
"cfg",
"=",
"_load_pypirc",
"(",
"self",
")",
"self",
".",
"username",
"=",
"cfg",
".",
"get",
"(",
"'username'",
")",
"self",
".",
"password",
"=",
"cfg",
".",
"get",
"(",
"'password'",
")",
"self",
".",
"realm",
"=",
"cfg",
".",
"get",
"(",
"'realm'",
",",
"'pypi'",
")",
"self",
".",
"url",
"=",
"cfg",
".",
"get",
"(",
"'repository'",
",",
"self",
".",
"url",
")"
] | [
72,
4
] | [
83,
50
] | python | en | ['en', 'error', 'th'] | False |
PackageIndex.save_configuration | (self) |
Save the PyPI access configuration. You must have set ``username`` and
``password`` attributes before calling this method.
|
Save the PyPI access configuration. You must have set ``username`` and
``password`` attributes before calling this method.
| def save_configuration(self):
"""
Save the PyPI access configuration. You must have set ``username`` and
``password`` attributes before calling this method.
"""
self.check_credentials()
from .util import _store_pypirc
_store_pypirc(self) | [
"def",
"save_configuration",
"(",
"self",
")",
":",
"self",
".",
"check_credentials",
"(",
")",
"from",
".",
"util",
"import",
"_store_pypirc",
"_store_pypirc",
"(",
"self",
")"
] | [
85,
4
] | [
92,
27
] | python | en | ['en', 'error', 'th'] | False |
PackageIndex.check_credentials | (self) |
Check that ``username`` and ``password`` have been set, and raise an
exception if not.
|
Check that ``username`` and ``password`` have been set, and raise an
exception if not.
| def check_credentials(self):
"""
Check that ``username`` and ``password`` have been set, and raise an
exception if not.
"""
if self.username is None or self.password is None:
raise DistlibException('username and password must be set')
pm = HTTPPasswordMgr()
_, netloc, _, _, _, _ = urlparse(self.url)
pm.add_password(self.realm, netloc, self.username, self.password)
self.password_handler = HTTPBasicAuthHandler(pm) | [
"def",
"check_credentials",
"(",
"self",
")",
":",
"if",
"self",
".",
"username",
"is",
"None",
"or",
"self",
".",
"password",
"is",
"None",
":",
"raise",
"DistlibException",
"(",
"'username and password must be set'",
")",
"pm",
"=",
"HTTPPasswordMgr",
"(",
")",
"_",
",",
"netloc",
",",
"_",
",",
"_",
",",
"_",
",",
"_",
"=",
"urlparse",
"(",
"self",
".",
"url",
")",
"pm",
".",
"add_password",
"(",
"self",
".",
"realm",
",",
"netloc",
",",
"self",
".",
"username",
",",
"self",
".",
"password",
")",
"self",
".",
"password_handler",
"=",
"HTTPBasicAuthHandler",
"(",
"pm",
")"
] | [
94,
4
] | [
104,
56
] | python | en | ['en', 'error', 'th'] | False |
PackageIndex.register | (self, metadata) |
Register a distribution on PyPI, using the provided metadata.
:param metadata: A :class:`Metadata` instance defining at least a name
and version number for the distribution to be
registered.
:return: The HTTP response received from PyPI upon submission of the
request.
|
Register a distribution on PyPI, using the provided metadata. | def register(self, metadata):
"""
Register a distribution on PyPI, using the provided metadata.
:param metadata: A :class:`Metadata` instance defining at least a name
and version number for the distribution to be
registered.
:return: The HTTP response received from PyPI upon submission of the
request.
"""
self.check_credentials()
metadata.validate()
d = metadata.todict()
d[':action'] = 'verify'
request = self.encode_request(d.items(), [])
response = self.send_request(request)
d[':action'] = 'submit'
request = self.encode_request(d.items(), [])
return self.send_request(request) | [
"def",
"register",
"(",
"self",
",",
"metadata",
")",
":",
"self",
".",
"check_credentials",
"(",
")",
"metadata",
".",
"validate",
"(",
")",
"d",
"=",
"metadata",
".",
"todict",
"(",
")",
"d",
"[",
"':action'",
"]",
"=",
"'verify'",
"request",
"=",
"self",
".",
"encode_request",
"(",
"d",
".",
"items",
"(",
")",
",",
"[",
"]",
")",
"response",
"=",
"self",
".",
"send_request",
"(",
"request",
")",
"d",
"[",
"':action'",
"]",
"=",
"'submit'",
"request",
"=",
"self",
".",
"encode_request",
"(",
"d",
".",
"items",
"(",
")",
",",
"[",
"]",
")",
"return",
"self",
".",
"send_request",
"(",
"request",
")"
] | [
106,
4
] | [
124,
41
] | python | en | ['en', 'error', 'th'] | False |
PackageIndex._reader | (self, name, stream, outbuf) |
Thread runner for reading lines of from a subprocess into a buffer.
:param name: The logical name of the stream (used for logging only).
:param stream: The stream to read from. This will typically a pipe
connected to the output stream of a subprocess.
:param outbuf: The list to append the read lines to.
|
Thread runner for reading lines of from a subprocess into a buffer. | def _reader(self, name, stream, outbuf):
"""
Thread runner for reading lines of from a subprocess into a buffer.
:param name: The logical name of the stream (used for logging only).
:param stream: The stream to read from. This will typically a pipe
connected to the output stream of a subprocess.
:param outbuf: The list to append the read lines to.
"""
while True:
s = stream.readline()
if not s:
break
s = s.decode('utf-8').rstrip()
outbuf.append(s)
logger.debug('%s: %s' % (name, s))
stream.close() | [
"def",
"_reader",
"(",
"self",
",",
"name",
",",
"stream",
",",
"outbuf",
")",
":",
"while",
"True",
":",
"s",
"=",
"stream",
".",
"readline",
"(",
")",
"if",
"not",
"s",
":",
"break",
"s",
"=",
"s",
".",
"decode",
"(",
"'utf-8'",
")",
".",
"rstrip",
"(",
")",
"outbuf",
".",
"append",
"(",
"s",
")",
"logger",
".",
"debug",
"(",
"'%s: %s'",
"%",
"(",
"name",
",",
"s",
")",
")",
"stream",
".",
"close",
"(",
")"
] | [
126,
4
] | [
142,
22
] | python | en | ['en', 'error', 'th'] | False |
PackageIndex.get_sign_command | (self, filename, signer, sign_password,
keystore=None) |
Return a suitable command for signing a file.
:param filename: The pathname to the file to be signed.
:param signer: The identifier of the signer of the file.
:param sign_password: The passphrase for the signer's
private key used for signing.
:param keystore: The path to a directory which contains the keys
used in verification. If not specified, the
instance's ``gpg_home`` attribute is used instead.
:return: The signing command as a list suitable to be
passed to :class:`subprocess.Popen`.
|
Return a suitable command for signing a file. | def get_sign_command(self, filename, signer, sign_password,
keystore=None):
"""
Return a suitable command for signing a file.
:param filename: The pathname to the file to be signed.
:param signer: The identifier of the signer of the file.
:param sign_password: The passphrase for the signer's
private key used for signing.
:param keystore: The path to a directory which contains the keys
used in verification. If not specified, the
instance's ``gpg_home`` attribute is used instead.
:return: The signing command as a list suitable to be
passed to :class:`subprocess.Popen`.
"""
cmd = [self.gpg, '--status-fd', '2', '--no-tty']
if keystore is None:
keystore = self.gpg_home
if keystore:
cmd.extend(['--homedir', keystore])
if sign_password is not None:
cmd.extend(['--batch', '--passphrase-fd', '0'])
td = tempfile.mkdtemp()
sf = os.path.join(td, os.path.basename(filename) + '.asc')
cmd.extend(['--detach-sign', '--armor', '--local-user',
signer, '--output', sf, filename])
logger.debug('invoking: %s', ' '.join(cmd))
return cmd, sf | [
"def",
"get_sign_command",
"(",
"self",
",",
"filename",
",",
"signer",
",",
"sign_password",
",",
"keystore",
"=",
"None",
")",
":",
"cmd",
"=",
"[",
"self",
".",
"gpg",
",",
"'--status-fd'",
",",
"'2'",
",",
"'--no-tty'",
"]",
"if",
"keystore",
"is",
"None",
":",
"keystore",
"=",
"self",
".",
"gpg_home",
"if",
"keystore",
":",
"cmd",
".",
"extend",
"(",
"[",
"'--homedir'",
",",
"keystore",
"]",
")",
"if",
"sign_password",
"is",
"not",
"None",
":",
"cmd",
".",
"extend",
"(",
"[",
"'--batch'",
",",
"'--passphrase-fd'",
",",
"'0'",
"]",
")",
"td",
"=",
"tempfile",
".",
"mkdtemp",
"(",
")",
"sf",
"=",
"os",
".",
"path",
".",
"join",
"(",
"td",
",",
"os",
".",
"path",
".",
"basename",
"(",
"filename",
")",
"+",
"'.asc'",
")",
"cmd",
".",
"extend",
"(",
"[",
"'--detach-sign'",
",",
"'--armor'",
",",
"'--local-user'",
",",
"signer",
",",
"'--output'",
",",
"sf",
",",
"filename",
"]",
")",
"logger",
".",
"debug",
"(",
"'invoking: %s'",
",",
"' '",
".",
"join",
"(",
"cmd",
")",
")",
"return",
"cmd",
",",
"sf"
] | [
144,
4
] | [
171,
22
] | python | en | ['en', 'error', 'th'] | False |
PackageIndex.run_command | (self, cmd, input_data=None) |
Run a command in a child process , passing it any input data specified.
:param cmd: The command to run.
:param input_data: If specified, this must be a byte string containing
data to be sent to the child process.
:return: A tuple consisting of the subprocess' exit code, a list of
lines read from the subprocess' ``stdout``, and a list of
lines read from the subprocess' ``stderr``.
|
Run a command in a child process , passing it any input data specified. | def run_command(self, cmd, input_data=None):
"""
Run a command in a child process , passing it any input data specified.
:param cmd: The command to run.
:param input_data: If specified, this must be a byte string containing
data to be sent to the child process.
:return: A tuple consisting of the subprocess' exit code, a list of
lines read from the subprocess' ``stdout``, and a list of
lines read from the subprocess' ``stderr``.
"""
kwargs = {
'stdout': subprocess.PIPE,
'stderr': subprocess.PIPE,
}
if input_data is not None:
kwargs['stdin'] = subprocess.PIPE
stdout = []
stderr = []
p = subprocess.Popen(cmd, **kwargs)
# We don't use communicate() here because we may need to
# get clever with interacting with the command
t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout))
t1.start()
t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr))
t2.start()
if input_data is not None:
p.stdin.write(input_data)
p.stdin.close()
p.wait()
t1.join()
t2.join()
return p.returncode, stdout, stderr | [
"def",
"run_command",
"(",
"self",
",",
"cmd",
",",
"input_data",
"=",
"None",
")",
":",
"kwargs",
"=",
"{",
"'stdout'",
":",
"subprocess",
".",
"PIPE",
",",
"'stderr'",
":",
"subprocess",
".",
"PIPE",
",",
"}",
"if",
"input_data",
"is",
"not",
"None",
":",
"kwargs",
"[",
"'stdin'",
"]",
"=",
"subprocess",
".",
"PIPE",
"stdout",
"=",
"[",
"]",
"stderr",
"=",
"[",
"]",
"p",
"=",
"subprocess",
".",
"Popen",
"(",
"cmd",
",",
"*",
"*",
"kwargs",
")",
"# We don't use communicate() here because we may need to",
"# get clever with interacting with the command",
"t1",
"=",
"Thread",
"(",
"target",
"=",
"self",
".",
"_reader",
",",
"args",
"=",
"(",
"'stdout'",
",",
"p",
".",
"stdout",
",",
"stdout",
")",
")",
"t1",
".",
"start",
"(",
")",
"t2",
"=",
"Thread",
"(",
"target",
"=",
"self",
".",
"_reader",
",",
"args",
"=",
"(",
"'stderr'",
",",
"p",
".",
"stderr",
",",
"stderr",
")",
")",
"t2",
".",
"start",
"(",
")",
"if",
"input_data",
"is",
"not",
"None",
":",
"p",
".",
"stdin",
".",
"write",
"(",
"input_data",
")",
"p",
".",
"stdin",
".",
"close",
"(",
")",
"p",
".",
"wait",
"(",
")",
"t1",
".",
"join",
"(",
")",
"t2",
".",
"join",
"(",
")",
"return",
"p",
".",
"returncode",
",",
"stdout",
",",
"stderr"
] | [
173,
4
] | [
206,
43
] | python | en | ['en', 'error', 'th'] | False |
PackageIndex.sign_file | (self, filename, signer, sign_password, keystore=None) |
Sign a file.
:param filename: The pathname to the file to be signed.
:param signer: The identifier of the signer of the file.
:param sign_password: The passphrase for the signer's
private key used for signing.
:param keystore: The path to a directory which contains the keys
used in signing. If not specified, the instance's
``gpg_home`` attribute is used instead.
:return: The absolute pathname of the file where the signature is
stored.
|
Sign a file. | def sign_file(self, filename, signer, sign_password, keystore=None):
"""
Sign a file.
:param filename: The pathname to the file to be signed.
:param signer: The identifier of the signer of the file.
:param sign_password: The passphrase for the signer's
private key used for signing.
:param keystore: The path to a directory which contains the keys
used in signing. If not specified, the instance's
``gpg_home`` attribute is used instead.
:return: The absolute pathname of the file where the signature is
stored.
"""
cmd, sig_file = self.get_sign_command(filename, signer, sign_password,
keystore)
rc, stdout, stderr = self.run_command(cmd,
sign_password.encode('utf-8'))
if rc != 0:
raise DistlibException('sign command failed with error '
'code %s' % rc)
return sig_file | [
"def",
"sign_file",
"(",
"self",
",",
"filename",
",",
"signer",
",",
"sign_password",
",",
"keystore",
"=",
"None",
")",
":",
"cmd",
",",
"sig_file",
"=",
"self",
".",
"get_sign_command",
"(",
"filename",
",",
"signer",
",",
"sign_password",
",",
"keystore",
")",
"rc",
",",
"stdout",
",",
"stderr",
"=",
"self",
".",
"run_command",
"(",
"cmd",
",",
"sign_password",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"if",
"rc",
"!=",
"0",
":",
"raise",
"DistlibException",
"(",
"'sign command failed with error '",
"'code %s'",
"%",
"rc",
")",
"return",
"sig_file"
] | [
208,
4
] | [
229,
23
] | python | en | ['en', 'error', 'th'] | False |
PackageIndex.upload_file | (self, metadata, filename, signer=None, sign_password=None,
filetype='sdist', pyversion='source', keystore=None) |
Upload a release file to the index.
:param metadata: A :class:`Metadata` instance defining at least a name
and version number for the file to be uploaded.
:param filename: The pathname of the file to be uploaded.
:param signer: The identifier of the signer of the file.
:param sign_password: The passphrase for the signer's
private key used for signing.
:param filetype: The type of the file being uploaded. This is the
distutils command which produced that file, e.g.
``sdist`` or ``bdist_wheel``.
:param pyversion: The version of Python which the release relates
to. For code compatible with any Python, this would
be ``source``, otherwise it would be e.g. ``3.2``.
:param keystore: The path to a directory which contains the keys
used in signing. If not specified, the instance's
``gpg_home`` attribute is used instead.
:return: The HTTP response received from PyPI upon submission of the
request.
|
Upload a release file to the index. | def upload_file(self, metadata, filename, signer=None, sign_password=None,
filetype='sdist', pyversion='source', keystore=None):
"""
Upload a release file to the index.
:param metadata: A :class:`Metadata` instance defining at least a name
and version number for the file to be uploaded.
:param filename: The pathname of the file to be uploaded.
:param signer: The identifier of the signer of the file.
:param sign_password: The passphrase for the signer's
private key used for signing.
:param filetype: The type of the file being uploaded. This is the
distutils command which produced that file, e.g.
``sdist`` or ``bdist_wheel``.
:param pyversion: The version of Python which the release relates
to. For code compatible with any Python, this would
be ``source``, otherwise it would be e.g. ``3.2``.
:param keystore: The path to a directory which contains the keys
used in signing. If not specified, the instance's
``gpg_home`` attribute is used instead.
:return: The HTTP response received from PyPI upon submission of the
request.
"""
self.check_credentials()
if not os.path.exists(filename):
raise DistlibException('not found: %s' % filename)
metadata.validate()
d = metadata.todict()
sig_file = None
if signer:
if not self.gpg:
logger.warning('no signing program available - not signed')
else:
sig_file = self.sign_file(filename, signer, sign_password,
keystore)
with open(filename, 'rb') as f:
file_data = f.read()
md5_digest = hashlib.md5(file_data).hexdigest()
sha256_digest = hashlib.sha256(file_data).hexdigest()
d.update({
':action': 'file_upload',
'protocol_version': '1',
'filetype': filetype,
'pyversion': pyversion,
'md5_digest': md5_digest,
'sha256_digest': sha256_digest,
})
files = [('content', os.path.basename(filename), file_data)]
if sig_file:
with open(sig_file, 'rb') as f:
sig_data = f.read()
files.append(('gpg_signature', os.path.basename(sig_file),
sig_data))
shutil.rmtree(os.path.dirname(sig_file))
request = self.encode_request(d.items(), files)
return self.send_request(request) | [
"def",
"upload_file",
"(",
"self",
",",
"metadata",
",",
"filename",
",",
"signer",
"=",
"None",
",",
"sign_password",
"=",
"None",
",",
"filetype",
"=",
"'sdist'",
",",
"pyversion",
"=",
"'source'",
",",
"keystore",
"=",
"None",
")",
":",
"self",
".",
"check_credentials",
"(",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"filename",
")",
":",
"raise",
"DistlibException",
"(",
"'not found: %s'",
"%",
"filename",
")",
"metadata",
".",
"validate",
"(",
")",
"d",
"=",
"metadata",
".",
"todict",
"(",
")",
"sig_file",
"=",
"None",
"if",
"signer",
":",
"if",
"not",
"self",
".",
"gpg",
":",
"logger",
".",
"warning",
"(",
"'no signing program available - not signed'",
")",
"else",
":",
"sig_file",
"=",
"self",
".",
"sign_file",
"(",
"filename",
",",
"signer",
",",
"sign_password",
",",
"keystore",
")",
"with",
"open",
"(",
"filename",
",",
"'rb'",
")",
"as",
"f",
":",
"file_data",
"=",
"f",
".",
"read",
"(",
")",
"md5_digest",
"=",
"hashlib",
".",
"md5",
"(",
"file_data",
")",
".",
"hexdigest",
"(",
")",
"sha256_digest",
"=",
"hashlib",
".",
"sha256",
"(",
"file_data",
")",
".",
"hexdigest",
"(",
")",
"d",
".",
"update",
"(",
"{",
"':action'",
":",
"'file_upload'",
",",
"'protocol_version'",
":",
"'1'",
",",
"'filetype'",
":",
"filetype",
",",
"'pyversion'",
":",
"pyversion",
",",
"'md5_digest'",
":",
"md5_digest",
",",
"'sha256_digest'",
":",
"sha256_digest",
",",
"}",
")",
"files",
"=",
"[",
"(",
"'content'",
",",
"os",
".",
"path",
".",
"basename",
"(",
"filename",
")",
",",
"file_data",
")",
"]",
"if",
"sig_file",
":",
"with",
"open",
"(",
"sig_file",
",",
"'rb'",
")",
"as",
"f",
":",
"sig_data",
"=",
"f",
".",
"read",
"(",
")",
"files",
".",
"append",
"(",
"(",
"'gpg_signature'",
",",
"os",
".",
"path",
".",
"basename",
"(",
"sig_file",
")",
",",
"sig_data",
")",
")",
"shutil",
".",
"rmtree",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"sig_file",
")",
")",
"request",
"=",
"self",
".",
"encode_request",
"(",
"d",
".",
"items",
"(",
")",
",",
"files",
")",
"return",
"self",
".",
"send_request",
"(",
"request",
")"
] | [
231,
4
] | [
286,
41
] | python | en | ['en', 'error', 'th'] | False |
PackageIndex.upload_documentation | (self, metadata, doc_dir) |
Upload documentation to the index.
:param metadata: A :class:`Metadata` instance defining at least a name
and version number for the documentation to be
uploaded.
:param doc_dir: The pathname of the directory which contains the
documentation. This should be the directory that
contains the ``index.html`` for the documentation.
:return: The HTTP response received from PyPI upon submission of the
request.
|
Upload documentation to the index. | def upload_documentation(self, metadata, doc_dir):
"""
Upload documentation to the index.
:param metadata: A :class:`Metadata` instance defining at least a name
and version number for the documentation to be
uploaded.
:param doc_dir: The pathname of the directory which contains the
documentation. This should be the directory that
contains the ``index.html`` for the documentation.
:return: The HTTP response received from PyPI upon submission of the
request.
"""
self.check_credentials()
if not os.path.isdir(doc_dir):
raise DistlibException('not a directory: %r' % doc_dir)
fn = os.path.join(doc_dir, 'index.html')
if not os.path.exists(fn):
raise DistlibException('not found: %r' % fn)
metadata.validate()
name, version = metadata.name, metadata.version
zip_data = zip_dir(doc_dir).getvalue()
fields = [(':action', 'doc_upload'),
('name', name), ('version', version)]
files = [('content', name, zip_data)]
request = self.encode_request(fields, files)
return self.send_request(request) | [
"def",
"upload_documentation",
"(",
"self",
",",
"metadata",
",",
"doc_dir",
")",
":",
"self",
".",
"check_credentials",
"(",
")",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"doc_dir",
")",
":",
"raise",
"DistlibException",
"(",
"'not a directory: %r'",
"%",
"doc_dir",
")",
"fn",
"=",
"os",
".",
"path",
".",
"join",
"(",
"doc_dir",
",",
"'index.html'",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"fn",
")",
":",
"raise",
"DistlibException",
"(",
"'not found: %r'",
"%",
"fn",
")",
"metadata",
".",
"validate",
"(",
")",
"name",
",",
"version",
"=",
"metadata",
".",
"name",
",",
"metadata",
".",
"version",
"zip_data",
"=",
"zip_dir",
"(",
"doc_dir",
")",
".",
"getvalue",
"(",
")",
"fields",
"=",
"[",
"(",
"':action'",
",",
"'doc_upload'",
")",
",",
"(",
"'name'",
",",
"name",
")",
",",
"(",
"'version'",
",",
"version",
")",
"]",
"files",
"=",
"[",
"(",
"'content'",
",",
"name",
",",
"zip_data",
")",
"]",
"request",
"=",
"self",
".",
"encode_request",
"(",
"fields",
",",
"files",
")",
"return",
"self",
".",
"send_request",
"(",
"request",
")"
] | [
288,
4
] | [
314,
41
] | python | en | ['en', 'error', 'th'] | False |
PackageIndex.get_verify_command | (self, signature_filename, data_filename,
keystore=None) |
Return a suitable command for verifying a file.
:param signature_filename: The pathname to the file containing the
signature.
:param data_filename: The pathname to the file containing the
signed data.
:param keystore: The path to a directory which contains the keys
used in verification. If not specified, the
instance's ``gpg_home`` attribute is used instead.
:return: The verifying command as a list suitable to be
passed to :class:`subprocess.Popen`.
|
Return a suitable command for verifying a file. | def get_verify_command(self, signature_filename, data_filename,
keystore=None):
"""
Return a suitable command for verifying a file.
:param signature_filename: The pathname to the file containing the
signature.
:param data_filename: The pathname to the file containing the
signed data.
:param keystore: The path to a directory which contains the keys
used in verification. If not specified, the
instance's ``gpg_home`` attribute is used instead.
:return: The verifying command as a list suitable to be
passed to :class:`subprocess.Popen`.
"""
cmd = [self.gpg, '--status-fd', '2', '--no-tty']
if keystore is None:
keystore = self.gpg_home
if keystore:
cmd.extend(['--homedir', keystore])
cmd.extend(['--verify', signature_filename, data_filename])
logger.debug('invoking: %s', ' '.join(cmd))
return cmd | [
"def",
"get_verify_command",
"(",
"self",
",",
"signature_filename",
",",
"data_filename",
",",
"keystore",
"=",
"None",
")",
":",
"cmd",
"=",
"[",
"self",
".",
"gpg",
",",
"'--status-fd'",
",",
"'2'",
",",
"'--no-tty'",
"]",
"if",
"keystore",
"is",
"None",
":",
"keystore",
"=",
"self",
".",
"gpg_home",
"if",
"keystore",
":",
"cmd",
".",
"extend",
"(",
"[",
"'--homedir'",
",",
"keystore",
"]",
")",
"cmd",
".",
"extend",
"(",
"[",
"'--verify'",
",",
"signature_filename",
",",
"data_filename",
"]",
")",
"logger",
".",
"debug",
"(",
"'invoking: %s'",
",",
"' '",
".",
"join",
"(",
"cmd",
")",
")",
"return",
"cmd"
] | [
316,
4
] | [
338,
18
] | python | en | ['en', 'error', 'th'] | False |
PackageIndex.verify_signature | (self, signature_filename, data_filename,
keystore=None) |
Verify a signature for a file.
:param signature_filename: The pathname to the file containing the
signature.
:param data_filename: The pathname to the file containing the
signed data.
:param keystore: The path to a directory which contains the keys
used in verification. If not specified, the
instance's ``gpg_home`` attribute is used instead.
:return: True if the signature was verified, else False.
|
Verify a signature for a file. | def verify_signature(self, signature_filename, data_filename,
keystore=None):
"""
Verify a signature for a file.
:param signature_filename: The pathname to the file containing the
signature.
:param data_filename: The pathname to the file containing the
signed data.
:param keystore: The path to a directory which contains the keys
used in verification. If not specified, the
instance's ``gpg_home`` attribute is used instead.
:return: True if the signature was verified, else False.
"""
if not self.gpg:
raise DistlibException('verification unavailable because gpg '
'unavailable')
cmd = self.get_verify_command(signature_filename, data_filename,
keystore)
rc, stdout, stderr = self.run_command(cmd)
if rc not in (0, 1):
raise DistlibException('verify command failed with error '
'code %s' % rc)
return rc == 0 | [
"def",
"verify_signature",
"(",
"self",
",",
"signature_filename",
",",
"data_filename",
",",
"keystore",
"=",
"None",
")",
":",
"if",
"not",
"self",
".",
"gpg",
":",
"raise",
"DistlibException",
"(",
"'verification unavailable because gpg '",
"'unavailable'",
")",
"cmd",
"=",
"self",
".",
"get_verify_command",
"(",
"signature_filename",
",",
"data_filename",
",",
"keystore",
")",
"rc",
",",
"stdout",
",",
"stderr",
"=",
"self",
".",
"run_command",
"(",
"cmd",
")",
"if",
"rc",
"not",
"in",
"(",
"0",
",",
"1",
")",
":",
"raise",
"DistlibException",
"(",
"'verify command failed with error '",
"'code %s'",
"%",
"rc",
")",
"return",
"rc",
"==",
"0"
] | [
340,
4
] | [
363,
22
] | python | en | ['en', 'error', 'th'] | False |
PackageIndex.download_file | (self, url, destfile, digest=None, reporthook=None) |
This is a convenience method for downloading a file from an URL.
Normally, this will be a file from the index, though currently
no check is made for this (i.e. a file can be downloaded from
anywhere).
The method is just like the :func:`urlretrieve` function in the
standard library, except that it allows digest computation to be
done during download and checking that the downloaded data
matched any expected value.
:param url: The URL of the file to be downloaded (assumed to be
available via an HTTP GET request).
:param destfile: The pathname where the downloaded file is to be
saved.
:param digest: If specified, this must be a (hasher, value)
tuple, where hasher is the algorithm used (e.g.
``'md5'``) and ``value`` is the expected value.
:param reporthook: The same as for :func:`urlretrieve` in the
standard library.
|
This is a convenience method for downloading a file from an URL.
Normally, this will be a file from the index, though currently
no check is made for this (i.e. a file can be downloaded from
anywhere). | def download_file(self, url, destfile, digest=None, reporthook=None):
"""
This is a convenience method for downloading a file from an URL.
Normally, this will be a file from the index, though currently
no check is made for this (i.e. a file can be downloaded from
anywhere).
The method is just like the :func:`urlretrieve` function in the
standard library, except that it allows digest computation to be
done during download and checking that the downloaded data
matched any expected value.
:param url: The URL of the file to be downloaded (assumed to be
available via an HTTP GET request).
:param destfile: The pathname where the downloaded file is to be
saved.
:param digest: If specified, this must be a (hasher, value)
tuple, where hasher is the algorithm used (e.g.
``'md5'``) and ``value`` is the expected value.
:param reporthook: The same as for :func:`urlretrieve` in the
standard library.
"""
if digest is None:
digester = None
logger.debug('No digest specified')
else:
if isinstance(digest, (list, tuple)):
hasher, digest = digest
else:
hasher = 'md5'
digester = getattr(hashlib, hasher)()
logger.debug('Digest specified: %s' % digest)
# The following code is equivalent to urlretrieve.
# We need to do it this way so that we can compute the
# digest of the file as we go.
with open(destfile, 'wb') as dfp:
# addinfourl is not a context manager on 2.x
# so we have to use try/finally
sfp = self.send_request(Request(url))
try:
headers = sfp.info()
blocksize = 8192
size = -1
read = 0
blocknum = 0
if "content-length" in headers:
size = int(headers["Content-Length"])
if reporthook:
reporthook(blocknum, blocksize, size)
while True:
block = sfp.read(blocksize)
if not block:
break
read += len(block)
dfp.write(block)
if digester:
digester.update(block)
blocknum += 1
if reporthook:
reporthook(blocknum, blocksize, size)
finally:
sfp.close()
# check that we got the whole file, if we can
if size >= 0 and read < size:
raise DistlibException(
'retrieval incomplete: got only %d out of %d bytes'
% (read, size))
# if we have a digest, it must match.
if digester:
actual = digester.hexdigest()
if digest != actual:
raise DistlibException('%s digest mismatch for %s: expected '
'%s, got %s' % (hasher, destfile,
digest, actual))
logger.debug('Digest verified: %s', digest) | [
"def",
"download_file",
"(",
"self",
",",
"url",
",",
"destfile",
",",
"digest",
"=",
"None",
",",
"reporthook",
"=",
"None",
")",
":",
"if",
"digest",
"is",
"None",
":",
"digester",
"=",
"None",
"logger",
".",
"debug",
"(",
"'No digest specified'",
")",
"else",
":",
"if",
"isinstance",
"(",
"digest",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"hasher",
",",
"digest",
"=",
"digest",
"else",
":",
"hasher",
"=",
"'md5'",
"digester",
"=",
"getattr",
"(",
"hashlib",
",",
"hasher",
")",
"(",
")",
"logger",
".",
"debug",
"(",
"'Digest specified: %s'",
"%",
"digest",
")",
"# The following code is equivalent to urlretrieve.",
"# We need to do it this way so that we can compute the",
"# digest of the file as we go.",
"with",
"open",
"(",
"destfile",
",",
"'wb'",
")",
"as",
"dfp",
":",
"# addinfourl is not a context manager on 2.x",
"# so we have to use try/finally",
"sfp",
"=",
"self",
".",
"send_request",
"(",
"Request",
"(",
"url",
")",
")",
"try",
":",
"headers",
"=",
"sfp",
".",
"info",
"(",
")",
"blocksize",
"=",
"8192",
"size",
"=",
"-",
"1",
"read",
"=",
"0",
"blocknum",
"=",
"0",
"if",
"\"content-length\"",
"in",
"headers",
":",
"size",
"=",
"int",
"(",
"headers",
"[",
"\"Content-Length\"",
"]",
")",
"if",
"reporthook",
":",
"reporthook",
"(",
"blocknum",
",",
"blocksize",
",",
"size",
")",
"while",
"True",
":",
"block",
"=",
"sfp",
".",
"read",
"(",
"blocksize",
")",
"if",
"not",
"block",
":",
"break",
"read",
"+=",
"len",
"(",
"block",
")",
"dfp",
".",
"write",
"(",
"block",
")",
"if",
"digester",
":",
"digester",
".",
"update",
"(",
"block",
")",
"blocknum",
"+=",
"1",
"if",
"reporthook",
":",
"reporthook",
"(",
"blocknum",
",",
"blocksize",
",",
"size",
")",
"finally",
":",
"sfp",
".",
"close",
"(",
")",
"# check that we got the whole file, if we can",
"if",
"size",
">=",
"0",
"and",
"read",
"<",
"size",
":",
"raise",
"DistlibException",
"(",
"'retrieval incomplete: got only %d out of %d bytes'",
"%",
"(",
"read",
",",
"size",
")",
")",
"# if we have a digest, it must match.",
"if",
"digester",
":",
"actual",
"=",
"digester",
".",
"hexdigest",
"(",
")",
"if",
"digest",
"!=",
"actual",
":",
"raise",
"DistlibException",
"(",
"'%s digest mismatch for %s: expected '",
"'%s, got %s'",
"%",
"(",
"hasher",
",",
"destfile",
",",
"digest",
",",
"actual",
")",
")",
"logger",
".",
"debug",
"(",
"'Digest verified: %s'",
",",
"digest",
")"
] | [
365,
4
] | [
440,
55
] | python | en | ['en', 'error', 'th'] | False |
PackageIndex.send_request | (self, req) |
Send a standard library :class:`Request` to PyPI and return its
response.
:param req: The request to send.
:return: The HTTP response from PyPI (a standard library HTTPResponse).
|
Send a standard library :class:`Request` to PyPI and return its
response. | def send_request(self, req):
"""
Send a standard library :class:`Request` to PyPI and return its
response.
:param req: The request to send.
:return: The HTTP response from PyPI (a standard library HTTPResponse).
"""
handlers = []
if self.password_handler:
handlers.append(self.password_handler)
if self.ssl_verifier:
handlers.append(self.ssl_verifier)
opener = build_opener(*handlers)
return opener.open(req) | [
"def",
"send_request",
"(",
"self",
",",
"req",
")",
":",
"handlers",
"=",
"[",
"]",
"if",
"self",
".",
"password_handler",
":",
"handlers",
".",
"append",
"(",
"self",
".",
"password_handler",
")",
"if",
"self",
".",
"ssl_verifier",
":",
"handlers",
".",
"append",
"(",
"self",
".",
"ssl_verifier",
")",
"opener",
"=",
"build_opener",
"(",
"*",
"handlers",
")",
"return",
"opener",
".",
"open",
"(",
"req",
")"
] | [
442,
4
] | [
456,
31
] | python | en | ['en', 'error', 'th'] | False |
PackageIndex.encode_request | (self, fields, files) |
Encode fields and files for posting to an HTTP server.
:param fields: The fields to send as a list of (fieldname, value)
tuples.
:param files: The files to send as a list of (fieldname, filename,
file_bytes) tuple.
|
Encode fields and files for posting to an HTTP server. | def encode_request(self, fields, files):
"""
Encode fields and files for posting to an HTTP server.
:param fields: The fields to send as a list of (fieldname, value)
tuples.
:param files: The files to send as a list of (fieldname, filename,
file_bytes) tuple.
"""
# Adapted from packaging, which in turn was adapted from
# http://code.activestate.com/recipes/146306
parts = []
boundary = self.boundary
for k, values in fields:
if not isinstance(values, (list, tuple)):
values = [values]
for v in values:
parts.extend((
b'--' + boundary,
('Content-Disposition: form-data; name="%s"' %
k).encode('utf-8'),
b'',
v.encode('utf-8')))
for key, filename, value in files:
parts.extend((
b'--' + boundary,
('Content-Disposition: form-data; name="%s"; filename="%s"' %
(key, filename)).encode('utf-8'),
b'',
value))
parts.extend((b'--' + boundary + b'--', b''))
body = b'\r\n'.join(parts)
ct = b'multipart/form-data; boundary=' + boundary
headers = {
'Content-type': ct,
'Content-length': str(len(body))
}
return Request(self.url, body, headers) | [
"def",
"encode_request",
"(",
"self",
",",
"fields",
",",
"files",
")",
":",
"# Adapted from packaging, which in turn was adapted from",
"# http://code.activestate.com/recipes/146306",
"parts",
"=",
"[",
"]",
"boundary",
"=",
"self",
".",
"boundary",
"for",
"k",
",",
"values",
"in",
"fields",
":",
"if",
"not",
"isinstance",
"(",
"values",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"values",
"=",
"[",
"values",
"]",
"for",
"v",
"in",
"values",
":",
"parts",
".",
"extend",
"(",
"(",
"b'--'",
"+",
"boundary",
",",
"(",
"'Content-Disposition: form-data; name=\"%s\"'",
"%",
"k",
")",
".",
"encode",
"(",
"'utf-8'",
")",
",",
"b''",
",",
"v",
".",
"encode",
"(",
"'utf-8'",
")",
")",
")",
"for",
"key",
",",
"filename",
",",
"value",
"in",
"files",
":",
"parts",
".",
"extend",
"(",
"(",
"b'--'",
"+",
"boundary",
",",
"(",
"'Content-Disposition: form-data; name=\"%s\"; filename=\"%s\"'",
"%",
"(",
"key",
",",
"filename",
")",
")",
".",
"encode",
"(",
"'utf-8'",
")",
",",
"b''",
",",
"value",
")",
")",
"parts",
".",
"extend",
"(",
"(",
"b'--'",
"+",
"boundary",
"+",
"b'--'",
",",
"b''",
")",
")",
"body",
"=",
"b'\\r\\n'",
".",
"join",
"(",
"parts",
")",
"ct",
"=",
"b'multipart/form-data; boundary='",
"+",
"boundary",
"headers",
"=",
"{",
"'Content-type'",
":",
"ct",
",",
"'Content-length'",
":",
"str",
"(",
"len",
"(",
"body",
")",
")",
"}",
"return",
"Request",
"(",
"self",
".",
"url",
",",
"body",
",",
"headers",
")"
] | [
458,
4
] | [
499,
47
] | python | en | ['en', 'error', 'th'] | False |
MavWebRTC.start_pipeline | (self) |
# Need to translate this to python
g_signal_emit_by_name (receiver_entry->webrtcbin, "get-transceivers", &transceivers);
g_assert (transceivers != NULL && transceivers->len > 0);
trans = g_array_index (transceivers, GstWebRTCRTPTransceiver *, 0);
trans->direction = GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY;
|
# Need to translate this to python
g_signal_emit_by_name (receiver_entry->webrtcbin, "get-transceivers", &transceivers);
g_assert (transceivers != NULL && transceivers->len > 0);
trans = g_array_index (transceivers, GstWebRTCRTPTransceiver *, 0);
trans->direction = GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY;
| def start_pipeline(self):
self.webrtc = self.pipeline.get_by_name('webrtc')
### Set transceiver to SENDONLY
# https://gstreamer.freedesktop.org/documentation/webrtc/index.html?gi-language=c#webrtcbin::get-transceivers
# https://gstreamer.freedesktop.org/documentation/webrtclib/webrtc_fwd.html?gi-language=c#GstWebRTCRTPTransceiverDirection
# https://gstreamer.freedesktop.org/documentation/webrtc/index.html?gi-language=c#webrtcbin::get-transceiver
# ^^ get_transceivers returns GLib.Array which is not useable in python introspection. get_transceiver added but only works > 1.16
# https://stackoverflow.com/a/57464086
"""
# Need to translate this to python
g_signal_emit_by_name (receiver_entry->webrtcbin, "get-transceivers", &transceivers);
g_assert (transceivers != NULL && transceivers->len > 0);
trans = g_array_index (transceivers, GstWebRTCRTPTransceiver *, 0);
trans->direction = GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY;
"""
#pay = self.pipeline.get_by_name('pay0')
#self.logger.debug("pay: {}".format(pay.get_caps()))
#direction = GstWebRTC.WebRTCRTPTransceiverDirection.SENDONLY
#caps = Gst.caps_from_string("application/x-rtp,media=video,encoding-name=VP8/9000,payload=96")
#self.webrtc.emit('add-transceiver', direction, caps)
self.webrtc.connect('on-negotiation-needed', self.on_negotiation_needed)
self.webrtc.connect('on-ice-candidate', self.send_ice_candidate_message)
self.webrtc.connect('pad-added', self.on_incoming_stream)
self.logger.info("Setting WebRTC pipeline to active")
self.pipeline.set_state(Gst.State.PLAYING) | [
"def",
"start_pipeline",
"(",
"self",
")",
":",
"self",
".",
"webrtc",
"=",
"self",
".",
"pipeline",
".",
"get_by_name",
"(",
"'webrtc'",
")",
"### Set transceiver to SENDONLY",
"# https://gstreamer.freedesktop.org/documentation/webrtc/index.html?gi-language=c#webrtcbin::get-transceivers",
"# https://gstreamer.freedesktop.org/documentation/webrtclib/webrtc_fwd.html?gi-language=c#GstWebRTCRTPTransceiverDirection",
"# https://gstreamer.freedesktop.org/documentation/webrtc/index.html?gi-language=c#webrtcbin::get-transceiver",
"# ^^ get_transceivers returns GLib.Array which is not useable in python introspection. get_transceiver added but only works > 1.16",
"# https://stackoverflow.com/a/57464086",
"#pay = self.pipeline.get_by_name('pay0')",
"#self.logger.debug(\"pay: {}\".format(pay.get_caps()))",
"#direction = GstWebRTC.WebRTCRTPTransceiverDirection.SENDONLY",
"#caps = Gst.caps_from_string(\"application/x-rtp,media=video,encoding-name=VP8/9000,payload=96\")",
"#self.webrtc.emit('add-transceiver', direction, caps)",
"self",
".",
"webrtc",
".",
"connect",
"(",
"'on-negotiation-needed'",
",",
"self",
".",
"on_negotiation_needed",
")",
"self",
".",
"webrtc",
".",
"connect",
"(",
"'on-ice-candidate'",
",",
"self",
".",
"send_ice_candidate_message",
")",
"self",
".",
"webrtc",
".",
"connect",
"(",
"'pad-added'",
",",
"self",
".",
"on_incoming_stream",
")",
"self",
".",
"logger",
".",
"info",
"(",
"\"Setting WebRTC pipeline to active\"",
")",
"self",
".",
"pipeline",
".",
"set_state",
"(",
"Gst",
".",
"State",
".",
"PLAYING",
")"
] | [
149,
4
] | [
175,
50
] | python | en | ['en', 'error', 'th'] | False |
get_invoke | (command: click.Command) |
Get the Cmd main method from the click command
:param command: The click Command object
:return: the do_* method for Cmd
:rtype: function
|
Get the Cmd main method from the click command
:param command: The click Command object
:return: the do_* method for Cmd
:rtype: function
| def get_invoke(command: click.Command) -> Callable[[ClickCmd, str], bool]:
"""
Get the Cmd main method from the click command
:param command: The click Command object
:return: the do_* method for Cmd
:rtype: function
"""
assert isinstance(command, click.Command)
assert command.callback
def invoke_(self: ClickCmd, arg: str): # pylint: disable=unused-argument
try:
self.ctx.return_code = 0 # Set initial code
self.ctx.return_code = command.main(args=shlex.split(arg),
prog_name=command.name,
standalone_mode=False,
parent=self.ctx)
except click.ClickException as e:
# Show the error message
e.show()
except click.Abort:
# We got an EOF or Keyboard interrupt. Just silence it
pass
except SystemExit:
# Catch this an return the code instead. All of click's help commands do a sys.exit(),
# and that's not ideal when running in a shell.
pass
except Exception as e:
traceback.print_exception(type(e), e, None)
logger.warning(traceback.format_exc())
# Always return False so the shell doesn't exit
return False
invoke_ = update_wrapper(invoke_, command.callback)
invoke_.__name__ = 'do_%s' % command.name
return invoke_ | [
"def",
"get_invoke",
"(",
"command",
":",
"click",
".",
"Command",
")",
"->",
"Callable",
"[",
"[",
"ClickCmd",
",",
"str",
"]",
",",
"bool",
"]",
":",
"assert",
"isinstance",
"(",
"command",
",",
"click",
".",
"Command",
")",
"assert",
"command",
".",
"callback",
"def",
"invoke_",
"(",
"self",
":",
"ClickCmd",
",",
"arg",
":",
"str",
")",
":",
"# pylint: disable=unused-argument",
"try",
":",
"self",
".",
"ctx",
".",
"return_code",
"=",
"0",
"# Set initial code",
"self",
".",
"ctx",
".",
"return_code",
"=",
"command",
".",
"main",
"(",
"args",
"=",
"shlex",
".",
"split",
"(",
"arg",
")",
",",
"prog_name",
"=",
"command",
".",
"name",
",",
"standalone_mode",
"=",
"False",
",",
"parent",
"=",
"self",
".",
"ctx",
")",
"except",
"click",
".",
"ClickException",
"as",
"e",
":",
"# Show the error message",
"e",
".",
"show",
"(",
")",
"except",
"click",
".",
"Abort",
":",
"# We got an EOF or Keyboard interrupt. Just silence it",
"pass",
"except",
"SystemExit",
":",
"# Catch this an return the code instead. All of click's help commands do a sys.exit(),",
"# and that's not ideal when running in a shell.",
"pass",
"except",
"Exception",
"as",
"e",
":",
"traceback",
".",
"print_exception",
"(",
"type",
"(",
"e",
")",
",",
"e",
",",
"None",
")",
"logger",
".",
"warning",
"(",
"traceback",
".",
"format_exc",
"(",
")",
")",
"# Always return False so the shell doesn't exit",
"return",
"False",
"invoke_",
"=",
"update_wrapper",
"(",
"invoke_",
",",
"command",
".",
"callback",
")",
"invoke_",
".",
"__name__",
"=",
"'do_%s'",
"%",
"command",
".",
"name",
"return",
"invoke_"
] | [
23,
0
] | [
60,
18
] | python | en | ['en', 'error', 'th'] | False |
get_help | (command: click.Command) |
Get the Cmd help function from the click command
:param command: The click Command object
:return: the help_* method for Cmd
:rtype: function
|
Get the Cmd help function from the click command
:param command: The click Command object
:return: the help_* method for Cmd
:rtype: function
| def get_help(command: click.Command) -> Callable[[ClickCmd], None]:
"""
Get the Cmd help function from the click command
:param command: The click Command object
:return: the help_* method for Cmd
:rtype: function
"""
assert isinstance(command, click.Command)
def help_(self: ClickCmd): # pylint: disable=unused-argument
extra = {}
for key, value in command.context_settings.items():
if key not in extra:
extra[key] = value
# Print click's help message
with click.Context(command, info_name=command.name, parent=self.ctx, **extra) as ctx:
click.echo(ctx.get_help(), color=ctx.color)
help_.__name__ = 'help_%s' % command.name
return help_ | [
"def",
"get_help",
"(",
"command",
":",
"click",
".",
"Command",
")",
"->",
"Callable",
"[",
"[",
"ClickCmd",
"]",
",",
"None",
"]",
":",
"assert",
"isinstance",
"(",
"command",
",",
"click",
".",
"Command",
")",
"def",
"help_",
"(",
"self",
":",
"ClickCmd",
")",
":",
"# pylint: disable=unused-argument",
"extra",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"command",
".",
"context_settings",
".",
"items",
"(",
")",
":",
"if",
"key",
"not",
"in",
"extra",
":",
"extra",
"[",
"key",
"]",
"=",
"value",
"# Print click's help message",
"with",
"click",
".",
"Context",
"(",
"command",
",",
"info_name",
"=",
"command",
".",
"name",
",",
"parent",
"=",
"self",
".",
"ctx",
",",
"*",
"*",
"extra",
")",
"as",
"ctx",
":",
"click",
".",
"echo",
"(",
"ctx",
".",
"get_help",
"(",
")",
",",
"color",
"=",
"ctx",
".",
"color",
")",
"help_",
".",
"__name__",
"=",
"'help_%s'",
"%",
"command",
".",
"name",
"return",
"help_"
] | [
63,
0
] | [
83,
16
] | python | en | ['en', 'error', 'th'] | False |
get_complete | (command: click.Command) |
Get the Cmd complete function for the click command
:param command: The click Command object
:return: the complete_* method for Cmd
:rtype: function
|
Get the Cmd complete function for the click command
:param command: The click Command object
:return: the complete_* method for Cmd
:rtype: function
| def get_complete(command: click.Command) -> Callable[[ClickCmd, str, str, int, int], List[str]]:
"""
Get the Cmd complete function for the click command
:param command: The click Command object
:return: the complete_* method for Cmd
:rtype: function
"""
assert isinstance(command, click.Command)
# pylint: disable=unused-argument
def complete_(
self: ClickCmd,
text: str,
line: str,
begidx: int,
endidx: int,
):
# Parse the args
args = shlex.split(line[:begidx])
# Strip of the first item which is the name of the command
args = args[1:]
# Then pass them on to the get_choices method that click uses for completion
return [choice[0] if isinstance(choice, tuple) else choice
for choice in get_choices(command, command.name or "", args, text)]
complete_.__name__ = 'complete_%s' % command.name
return complete_ | [
"def",
"get_complete",
"(",
"command",
":",
"click",
".",
"Command",
")",
"->",
"Callable",
"[",
"[",
"ClickCmd",
",",
"str",
",",
"str",
",",
"int",
",",
"int",
"]",
",",
"List",
"[",
"str",
"]",
"]",
":",
"assert",
"isinstance",
"(",
"command",
",",
"click",
".",
"Command",
")",
"# pylint: disable=unused-argument",
"def",
"complete_",
"(",
"self",
":",
"ClickCmd",
",",
"text",
":",
"str",
",",
"line",
":",
"str",
",",
"begidx",
":",
"int",
",",
"endidx",
":",
"int",
",",
")",
":",
"# Parse the args",
"args",
"=",
"shlex",
".",
"split",
"(",
"line",
"[",
":",
"begidx",
"]",
")",
"# Strip of the first item which is the name of the command",
"args",
"=",
"args",
"[",
"1",
":",
"]",
"# Then pass them on to the get_choices method that click uses for completion",
"return",
"[",
"choice",
"[",
"0",
"]",
"if",
"isinstance",
"(",
"choice",
",",
"tuple",
")",
"else",
"choice",
"for",
"choice",
"in",
"get_choices",
"(",
"command",
",",
"command",
".",
"name",
"or",
"\"\"",
",",
"args",
",",
"text",
")",
"]",
"complete_",
".",
"__name__",
"=",
"'complete_%s'",
"%",
"command",
".",
"name",
"return",
"complete_"
] | [
86,
0
] | [
114,
20
] | python | en | ['en', 'error', 'th'] | False |
safe_join | (base, *paths) |
Join one or more path components to the base path component intelligently.
Return a normalized, absolute version of the final path.
Raise ValueError if the final path isn't located inside of the base path
component.
|
Join one or more path components to the base path component intelligently.
Return a normalized, absolute version of the final path. | def safe_join(base, *paths):
"""
Join one or more path components to the base path component intelligently.
Return a normalized, absolute version of the final path.
Raise ValueError if the final path isn't located inside of the base path
component.
"""
final_path = abspath(join(base, *paths))
base_path = abspath(base)
# Ensure final_path starts with base_path (using normcase to ensure we
# don't false-negative on case insensitive operating systems like Windows),
# further, one of the following conditions must be true:
# a) The next character is the path separator (to prevent conditions like
# safe_join("/dir", "/../d"))
# b) The final path must be the same as the base path.
# c) The base path must be the most root path (meaning either "/" or "C:\\")
if (not normcase(final_path).startswith(normcase(base_path + sep)) and
normcase(final_path) != normcase(base_path) and
dirname(normcase(base_path)) != normcase(base_path)):
raise SuspiciousFileOperation(
'The joined path ({}) is located outside of the base path '
'component ({})'.format(final_path, base_path))
return final_path | [
"def",
"safe_join",
"(",
"base",
",",
"*",
"paths",
")",
":",
"final_path",
"=",
"abspath",
"(",
"join",
"(",
"base",
",",
"*",
"paths",
")",
")",
"base_path",
"=",
"abspath",
"(",
"base",
")",
"# Ensure final_path starts with base_path (using normcase to ensure we",
"# don't false-negative on case insensitive operating systems like Windows),",
"# further, one of the following conditions must be true:",
"# a) The next character is the path separator (to prevent conditions like",
"# safe_join(\"/dir\", \"/../d\"))",
"# b) The final path must be the same as the base path.",
"# c) The base path must be the most root path (meaning either \"/\" or \"C:\\\\\")",
"if",
"(",
"not",
"normcase",
"(",
"final_path",
")",
".",
"startswith",
"(",
"normcase",
"(",
"base_path",
"+",
"sep",
")",
")",
"and",
"normcase",
"(",
"final_path",
")",
"!=",
"normcase",
"(",
"base_path",
")",
"and",
"dirname",
"(",
"normcase",
"(",
"base_path",
")",
")",
"!=",
"normcase",
"(",
"base_path",
")",
")",
":",
"raise",
"SuspiciousFileOperation",
"(",
"'The joined path ({}) is located outside of the base path '",
"'component ({})'",
".",
"format",
"(",
"final_path",
",",
"base_path",
")",
")",
"return",
"final_path"
] | [
8,
0
] | [
31,
21
] | python | en | ['en', 'error', 'th'] | False |
symlinks_supported | () |
Return whether or not creating symlinks are supported in the host platform
and/or if they are allowed to be created (e.g. on Windows it requires admin
permissions).
|
Return whether or not creating symlinks are supported in the host platform
and/or if they are allowed to be created (e.g. on Windows it requires admin
permissions).
| def symlinks_supported():
"""
Return whether or not creating symlinks are supported in the host platform
and/or if they are allowed to be created (e.g. on Windows it requires admin
permissions).
"""
with tempfile.TemporaryDirectory() as temp_dir:
original_path = os.path.join(temp_dir, 'original')
symlink_path = os.path.join(temp_dir, 'symlink')
os.makedirs(original_path)
try:
os.symlink(original_path, symlink_path)
supported = True
except (OSError, NotImplementedError):
supported = False
return supported | [
"def",
"symlinks_supported",
"(",
")",
":",
"with",
"tempfile",
".",
"TemporaryDirectory",
"(",
")",
"as",
"temp_dir",
":",
"original_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"temp_dir",
",",
"'original'",
")",
"symlink_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"temp_dir",
",",
"'symlink'",
")",
"os",
".",
"makedirs",
"(",
"original_path",
")",
"try",
":",
"os",
".",
"symlink",
"(",
"original_path",
",",
"symlink_path",
")",
"supported",
"=",
"True",
"except",
"(",
"OSError",
",",
"NotImplementedError",
")",
":",
"supported",
"=",
"False",
"return",
"supported"
] | [
34,
0
] | [
49,
24
] | python | en | ['en', 'error', 'th'] | False |
to_path | (value) | Convert value to a pathlib.Path instance, if not already a Path. | Convert value to a pathlib.Path instance, if not already a Path. | def to_path(value):
"""Convert value to a pathlib.Path instance, if not already a Path."""
if isinstance(value, Path):
return value
elif not isinstance(value, str):
raise TypeError('Invalid path type: %s' % type(value).__name__)
return Path(value) | [
"def",
"to_path",
"(",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"Path",
")",
":",
"return",
"value",
"elif",
"not",
"isinstance",
"(",
"value",
",",
"str",
")",
":",
"raise",
"TypeError",
"(",
"'Invalid path type: %s'",
"%",
"type",
"(",
"value",
")",
".",
"__name__",
")",
"return",
"Path",
"(",
"value",
")"
] | [
52,
0
] | [
58,
22
] | python | en | ['en', 'en', 'en'] | True |
MiddlewareMixin._async_check | (self) |
If get_response is a coroutine function, turns us into async mode so
a thread is not consumed during a whole request.
|
If get_response is a coroutine function, turns us into async mode so
a thread is not consumed during a whole request.
| def _async_check(self):
"""
If get_response is a coroutine function, turns us into async mode so
a thread is not consumed during a whole request.
"""
if asyncio.iscoroutinefunction(self.get_response):
# Mark the class as async-capable, but do the actual switch
# inside __call__ to avoid swapping out dunder methods
self._is_coroutine = asyncio.coroutines._is_coroutine | [
"def",
"_async_check",
"(",
"self",
")",
":",
"if",
"asyncio",
".",
"iscoroutinefunction",
"(",
"self",
".",
"get_response",
")",
":",
"# Mark the class as async-capable, but do the actual switch",
"# inside __call__ to avoid swapping out dunder methods",
"self",
".",
"_is_coroutine",
"=",
"asyncio",
".",
"coroutines",
".",
"_is_coroutine"
] | [
99,
4
] | [
107,
65
] | python | en | ['en', 'error', 'th'] | False |
MiddlewareMixin.__acall__ | (self, request) |
Async version of __call__ that is swapped in when an async request
is running.
|
Async version of __call__ that is swapped in when an async request
is running.
| async def __acall__(self, request):
"""
Async version of __call__ that is swapped in when an async request
is running.
"""
response = None
if hasattr(self, 'process_request'):
response = await sync_to_async(
self.process_request,
thread_sensitive=True,
)(request)
response = response or await self.get_response(request)
if hasattr(self, 'process_response'):
response = await sync_to_async(
self.process_response,
thread_sensitive=True,
)(request, response)
return response | [
"async",
"def",
"__acall__",
"(",
"self",
",",
"request",
")",
":",
"response",
"=",
"None",
"if",
"hasattr",
"(",
"self",
",",
"'process_request'",
")",
":",
"response",
"=",
"await",
"sync_to_async",
"(",
"self",
".",
"process_request",
",",
"thread_sensitive",
"=",
"True",
",",
")",
"(",
"request",
")",
"response",
"=",
"response",
"or",
"await",
"self",
".",
"get_response",
"(",
"request",
")",
"if",
"hasattr",
"(",
"self",
",",
"'process_response'",
")",
":",
"response",
"=",
"await",
"sync_to_async",
"(",
"self",
".",
"process_response",
",",
"thread_sensitive",
"=",
"True",
",",
")",
"(",
"request",
",",
"response",
")",
"return",
"response"
] | [
121,
4
] | [
138,
23
] | python | en | ['en', 'error', 'th'] | False |