text
stringlengths 89
104k
| code_tokens
sequence | avg_line_len
float64 7.91
980
| score
float64 0
630
|
---|---|---|---|
def draw_dithered_color(cb, x, y, palette, dither, n, n_max, crosshairs_coord=None):
"""
Draws a dithered color block on the terminal, given a palette.
:type cb: cursebox.CurseBox
"""
i = n * (len(palette) - 1) / n_max
c1 = palette[int(math.floor(i))]
c2 = palette[int(math.ceil(i))]
value = i - int(math.floor(i))
symbol = dither_symbol(value, dither)
if crosshairs_coord is not None:
old_symbol = symbol
symbol, crosshairs = get_crosshairs_symbol(x, y, old_symbol, crosshairs_coord)
if crosshairs:
sorted_palette = sort_palette(palette)
if old_symbol == DITHER_TYPES[dither][1][0]:
c2 = c1
sorted_index = sorted_palette.index(c2)
if sorted_index > len(sorted_palette) // 2:
c1 = sorted_palette[0]
else:
c1 = sorted_palette[-1]
cb.put(x, y, symbol, c1(), c2()) | [
"def",
"draw_dithered_color",
"(",
"cb",
",",
"x",
",",
"y",
",",
"palette",
",",
"dither",
",",
"n",
",",
"n_max",
",",
"crosshairs_coord",
"=",
"None",
")",
":",
"i",
"=",
"n",
"*",
"(",
"len",
"(",
"palette",
")",
"-",
"1",
")",
"/",
"n_max",
"c1",
"=",
"palette",
"[",
"int",
"(",
"math",
".",
"floor",
"(",
"i",
")",
")",
"]",
"c2",
"=",
"palette",
"[",
"int",
"(",
"math",
".",
"ceil",
"(",
"i",
")",
")",
"]",
"value",
"=",
"i",
"-",
"int",
"(",
"math",
".",
"floor",
"(",
"i",
")",
")",
"symbol",
"=",
"dither_symbol",
"(",
"value",
",",
"dither",
")",
"if",
"crosshairs_coord",
"is",
"not",
"None",
":",
"old_symbol",
"=",
"symbol",
"symbol",
",",
"crosshairs",
"=",
"get_crosshairs_symbol",
"(",
"x",
",",
"y",
",",
"old_symbol",
",",
"crosshairs_coord",
")",
"if",
"crosshairs",
":",
"sorted_palette",
"=",
"sort_palette",
"(",
"palette",
")",
"if",
"old_symbol",
"==",
"DITHER_TYPES",
"[",
"dither",
"]",
"[",
"1",
"]",
"[",
"0",
"]",
":",
"c2",
"=",
"c1",
"sorted_index",
"=",
"sorted_palette",
".",
"index",
"(",
"c2",
")",
"if",
"sorted_index",
">",
"len",
"(",
"sorted_palette",
")",
"//",
"2",
":",
"c1",
"=",
"sorted_palette",
"[",
"0",
"]",
"else",
":",
"c1",
"=",
"sorted_palette",
"[",
"-",
"1",
"]",
"cb",
".",
"put",
"(",
"x",
",",
"y",
",",
"symbol",
",",
"c1",
"(",
")",
",",
"c2",
"(",
")",
")"
] | 35.192308 | 15.269231 |
def gelu(x):
"""Gaussian Error Linear Unit.
This is a smoother version of the RELU.
Original paper: https://arxiv.org/abs/1606.08415
Args:
x: float Tensor to perform activation.
Returns:
x with the GELU activation applied.
"""
cdf = 0.5 * (1.0 + tf.tanh(
(np.sqrt(2 / np.pi) * (x + 0.044715 * tf.pow(x, 3)))))
return x * cdf | [
"def",
"gelu",
"(",
"x",
")",
":",
"cdf",
"=",
"0.5",
"*",
"(",
"1.0",
"+",
"tf",
".",
"tanh",
"(",
"(",
"np",
".",
"sqrt",
"(",
"2",
"/",
"np",
".",
"pi",
")",
"*",
"(",
"x",
"+",
"0.044715",
"*",
"tf",
".",
"pow",
"(",
"x",
",",
"3",
")",
")",
")",
")",
")",
"return",
"x",
"*",
"cdf"
] | 22.866667 | 18.666667 |
def makeCustomSpecificationsMapping(func):
'''
Creates the "pickleable" dictionary that will be used with
:data:`~exhale.configs.customSpecificationsMapping` supplied to ``exhale_args`` in
your ``conf.py``.
**Parameters**
``func`` (types.FunctionType)
A callable function that takes as input a string from
:data:`~exhale.utils.AVAILABLE_KINDS` and returns a ``list`` of strings.
The empty list ``[]`` indicates to use the Breathe defaults.
**Return**
``dict``
A dictionary where the keys are every value in
:data:`~exhale.utils.AVAILABLE_KINDS`, and the values are the ``list``
returns of the input ``func``.
.. note::
To help ensure the dictionary has everything it needs for the rest of Exhale to
function, a "secret" key-value pair is inserted to the returned dictionary.
'''
# Make sure they gave us a function
if not isinstance(func, types.FunctionType):
raise ValueError(
"The input to exhale.util.makeCustomSpecificationsMapping was *NOT* a function: {0}".format(
type(func)
)
)
# Stamp the return to ensure exhale created this function.
ret = {configs._closure_map_sanity_check: configs._closure_map_sanity_check}
try:
# Because we cannot pickle a fully-fledged function object, we are going to go
# through every kind and store its return value.
for kind in AVAILABLE_KINDS:
specs = func(kind)
bad = type(specs) is not list
for s in specs:
if not isinstance(s, six.string_types):
bad = True
break
if bad:
raise RuntimeError(textwrap.dedent('''
The specifications function did not return a valid list for input
`{kind}`
1. Make sure that every entry in the returned list is a string.
2. If you want to use the breathe defaults, you must return the
empty list `[]`.
'''.format(kind=kind)))
ret[kind] = specs
except Exception as e:
raise RuntimeError("Unable to create custom specifications:\n{0}".format(e))
# Everything went according to plan, send it back to `conf.py` :)
return ret | [
"def",
"makeCustomSpecificationsMapping",
"(",
"func",
")",
":",
"# Make sure they gave us a function",
"if",
"not",
"isinstance",
"(",
"func",
",",
"types",
".",
"FunctionType",
")",
":",
"raise",
"ValueError",
"(",
"\"The input to exhale.util.makeCustomSpecificationsMapping was *NOT* a function: {0}\"",
".",
"format",
"(",
"type",
"(",
"func",
")",
")",
")",
"# Stamp the return to ensure exhale created this function.",
"ret",
"=",
"{",
"configs",
".",
"_closure_map_sanity_check",
":",
"configs",
".",
"_closure_map_sanity_check",
"}",
"try",
":",
"# Because we cannot pickle a fully-fledged function object, we are going to go",
"# through every kind and store its return value.",
"for",
"kind",
"in",
"AVAILABLE_KINDS",
":",
"specs",
"=",
"func",
"(",
"kind",
")",
"bad",
"=",
"type",
"(",
"specs",
")",
"is",
"not",
"list",
"for",
"s",
"in",
"specs",
":",
"if",
"not",
"isinstance",
"(",
"s",
",",
"six",
".",
"string_types",
")",
":",
"bad",
"=",
"True",
"break",
"if",
"bad",
":",
"raise",
"RuntimeError",
"(",
"textwrap",
".",
"dedent",
"(",
"'''\n The specifications function did not return a valid list for input\n\n `{kind}`\n\n 1. Make sure that every entry in the returned list is a string.\n 2. If you want to use the breathe defaults, you must return the\n empty list `[]`.\n '''",
".",
"format",
"(",
"kind",
"=",
"kind",
")",
")",
")",
"ret",
"[",
"kind",
"]",
"=",
"specs",
"except",
"Exception",
"as",
"e",
":",
"raise",
"RuntimeError",
"(",
"\"Unable to create custom specifications:\\n{0}\"",
".",
"format",
"(",
"e",
")",
")",
"# Everything went according to plan, send it back to `conf.py` :)",
"return",
"ret"
] | 39.2 | 25.9 |
def split_unbalanced(chunks):
"""Return (unbalanced_start, balanced, unbalanced_end), where each is
a list of text and tag chunks.
unbalanced_start is a list of all the tags that are opened, but
not closed in this span. Similarly, unbalanced_end is a list of
tags that are closed but were not opened. Extracting these might
mean some reordering of the chunks."""
start = []
end = []
tag_stack = []
balanced = []
for chunk in chunks:
if not chunk.startswith('<'):
balanced.append(chunk)
continue
endtag = chunk[1] == '/'
name = chunk.split()[0].strip('<>/')
if name in empty_tags:
balanced.append(chunk)
continue
if endtag:
if tag_stack and tag_stack[-1][0] == name:
balanced.append(chunk)
name, pos, tag = tag_stack.pop()
balanced[pos] = tag
elif tag_stack:
start.extend([tag for name, pos, tag in tag_stack])
tag_stack = []
end.append(chunk)
else:
end.append(chunk)
else:
tag_stack.append((name, len(balanced), chunk))
balanced.append(None)
start.extend(
[chunk for name, pos, chunk in tag_stack])
balanced = [chunk for chunk in balanced if chunk is not None]
return start, balanced, end | [
"def",
"split_unbalanced",
"(",
"chunks",
")",
":",
"start",
"=",
"[",
"]",
"end",
"=",
"[",
"]",
"tag_stack",
"=",
"[",
"]",
"balanced",
"=",
"[",
"]",
"for",
"chunk",
"in",
"chunks",
":",
"if",
"not",
"chunk",
".",
"startswith",
"(",
"'<'",
")",
":",
"balanced",
".",
"append",
"(",
"chunk",
")",
"continue",
"endtag",
"=",
"chunk",
"[",
"1",
"]",
"==",
"'/'",
"name",
"=",
"chunk",
".",
"split",
"(",
")",
"[",
"0",
"]",
".",
"strip",
"(",
"'<>/'",
")",
"if",
"name",
"in",
"empty_tags",
":",
"balanced",
".",
"append",
"(",
"chunk",
")",
"continue",
"if",
"endtag",
":",
"if",
"tag_stack",
"and",
"tag_stack",
"[",
"-",
"1",
"]",
"[",
"0",
"]",
"==",
"name",
":",
"balanced",
".",
"append",
"(",
"chunk",
")",
"name",
",",
"pos",
",",
"tag",
"=",
"tag_stack",
".",
"pop",
"(",
")",
"balanced",
"[",
"pos",
"]",
"=",
"tag",
"elif",
"tag_stack",
":",
"start",
".",
"extend",
"(",
"[",
"tag",
"for",
"name",
",",
"pos",
",",
"tag",
"in",
"tag_stack",
"]",
")",
"tag_stack",
"=",
"[",
"]",
"end",
".",
"append",
"(",
"chunk",
")",
"else",
":",
"end",
".",
"append",
"(",
"chunk",
")",
"else",
":",
"tag_stack",
".",
"append",
"(",
"(",
"name",
",",
"len",
"(",
"balanced",
")",
",",
"chunk",
")",
")",
"balanced",
".",
"append",
"(",
"None",
")",
"start",
".",
"extend",
"(",
"[",
"chunk",
"for",
"name",
",",
"pos",
",",
"chunk",
"in",
"tag_stack",
"]",
")",
"balanced",
"=",
"[",
"chunk",
"for",
"chunk",
"in",
"balanced",
"if",
"chunk",
"is",
"not",
"None",
"]",
"return",
"start",
",",
"balanced",
",",
"end"
] | 35.512821 | 15.128205 |
def watch(project_path, output_path, host='0.0.0.0', port=8000, min_reload_time=2.0,
open_browser=True, safe_mode=False, error_context=None):
"""Watches the given project path for filesystem changes, and automatically rebuilds the project when
changes are detected. Also serves an HTTP server on the given host/port.
Args:
project_path: The path to the Statik project to be watched.
output_path: The path into which to write the output files.
host: The host IP/hostname to which to bind when serving output files.
port: The port to which to bind when serving output files.
min_reload_time: The minimum time (in seconds) between reloads when files change.
open_browser: Whether or not to automatically open the web browser at the served URL.
safe_mode: Whether or not to run Statik in safe mode.
error_context: An optional StatikErrorContext instance for detailed error reporting.
"""
error_context = error_context or StatikErrorContext()
project = StatikProject(project_path, safe_mode=safe_mode, error_context=error_context)
project.generate(output_path=output_path, in_memory=False)
watch_folders = [
StatikProject.MODELS_DIR,
StatikProject.DATA_DIR,
StatikProject.VIEWS_DIR,
StatikProject.TEMPLATES_DIR,
project.config.assets_src_path
]
# let the template tags folder be optional
template_tags_folder = os.path.join(project.path, StatikProject.TEMPLATETAGS_DIR)
if os.path.exists(template_tags_folder) and os.path.isdir(template_tags_folder):
watch_folders.append(StatikProject.TEMPLATETAGS_DIR)
# if theming is enabled, watch the specific theme's folder for changes
if project.config.theme is not None:
watch_folders.append(os.path.join(StatikProject.THEMES_DIR, project.config.theme))
watch_folders = [f if os.path.isabs(f) else os.path.join(project.path, f) for f in watch_folders]
for folder in watch_folders:
if not os.path.exists(folder) or not os.path.isdir(folder):
raise MissingProjectFolderError(folder)
httpwatcher.watch(
output_path,
watch_paths=watch_folders,
on_reload=lambda: safe_wrap_project_generate(
project,
output_path
),
host=host,
port=port,
server_base_path=project.config.base_path,
watcher_interval=min_reload_time,
recursive=True,
open_browser=open_browser
) | [
"def",
"watch",
"(",
"project_path",
",",
"output_path",
",",
"host",
"=",
"'0.0.0.0'",
",",
"port",
"=",
"8000",
",",
"min_reload_time",
"=",
"2.0",
",",
"open_browser",
"=",
"True",
",",
"safe_mode",
"=",
"False",
",",
"error_context",
"=",
"None",
")",
":",
"error_context",
"=",
"error_context",
"or",
"StatikErrorContext",
"(",
")",
"project",
"=",
"StatikProject",
"(",
"project_path",
",",
"safe_mode",
"=",
"safe_mode",
",",
"error_context",
"=",
"error_context",
")",
"project",
".",
"generate",
"(",
"output_path",
"=",
"output_path",
",",
"in_memory",
"=",
"False",
")",
"watch_folders",
"=",
"[",
"StatikProject",
".",
"MODELS_DIR",
",",
"StatikProject",
".",
"DATA_DIR",
",",
"StatikProject",
".",
"VIEWS_DIR",
",",
"StatikProject",
".",
"TEMPLATES_DIR",
",",
"project",
".",
"config",
".",
"assets_src_path",
"]",
"# let the template tags folder be optional",
"template_tags_folder",
"=",
"os",
".",
"path",
".",
"join",
"(",
"project",
".",
"path",
",",
"StatikProject",
".",
"TEMPLATETAGS_DIR",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"template_tags_folder",
")",
"and",
"os",
".",
"path",
".",
"isdir",
"(",
"template_tags_folder",
")",
":",
"watch_folders",
".",
"append",
"(",
"StatikProject",
".",
"TEMPLATETAGS_DIR",
")",
"# if theming is enabled, watch the specific theme's folder for changes",
"if",
"project",
".",
"config",
".",
"theme",
"is",
"not",
"None",
":",
"watch_folders",
".",
"append",
"(",
"os",
".",
"path",
".",
"join",
"(",
"StatikProject",
".",
"THEMES_DIR",
",",
"project",
".",
"config",
".",
"theme",
")",
")",
"watch_folders",
"=",
"[",
"f",
"if",
"os",
".",
"path",
".",
"isabs",
"(",
"f",
")",
"else",
"os",
".",
"path",
".",
"join",
"(",
"project",
".",
"path",
",",
"f",
")",
"for",
"f",
"in",
"watch_folders",
"]",
"for",
"folder",
"in",
"watch_folders",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"folder",
")",
"or",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"folder",
")",
":",
"raise",
"MissingProjectFolderError",
"(",
"folder",
")",
"httpwatcher",
".",
"watch",
"(",
"output_path",
",",
"watch_paths",
"=",
"watch_folders",
",",
"on_reload",
"=",
"lambda",
":",
"safe_wrap_project_generate",
"(",
"project",
",",
"output_path",
")",
",",
"host",
"=",
"host",
",",
"port",
"=",
"port",
",",
"server_base_path",
"=",
"project",
".",
"config",
".",
"base_path",
",",
"watcher_interval",
"=",
"min_reload_time",
",",
"recursive",
"=",
"True",
",",
"open_browser",
"=",
"open_browser",
")"
] | 44.836364 | 25.236364 |
def check_status(content, response):
"""
Check the response that is returned for known exceptions and errors.
:param response: Response that is returned from the call.
:raise:
MalformedRequestException if `response.status` is 400
UnauthorisedException if `response.status` is 401
NotFoundException if `response.status` is 404
UnacceptableContentException if `response.status` is 406
InvalidRequestException if `response.status` is 422
RateLimitException if `response.status` is 429
ServerException if `response.status` > 500
"""
if response.status == 400:
raise MalformedRequestException(content, response)
if response.status == 401:
raise UnauthorisedException(content, response)
if response.status == 404:
raise NotFoundException(content, response)
if response.status == 406:
raise UnacceptableContentException(content, response)
if response.status == 422:
raise InvalidRequestException(content, response)
if response.status == 429:
raise RateLimitException(content, response)
if response.status >= 500:
raise ServerException(content, response) | [
"def",
"check_status",
"(",
"content",
",",
"response",
")",
":",
"if",
"response",
".",
"status",
"==",
"400",
":",
"raise",
"MalformedRequestException",
"(",
"content",
",",
"response",
")",
"if",
"response",
".",
"status",
"==",
"401",
":",
"raise",
"UnauthorisedException",
"(",
"content",
",",
"response",
")",
"if",
"response",
".",
"status",
"==",
"404",
":",
"raise",
"NotFoundException",
"(",
"content",
",",
"response",
")",
"if",
"response",
".",
"status",
"==",
"406",
":",
"raise",
"UnacceptableContentException",
"(",
"content",
",",
"response",
")",
"if",
"response",
".",
"status",
"==",
"422",
":",
"raise",
"InvalidRequestException",
"(",
"content",
",",
"response",
")",
"if",
"response",
".",
"status",
"==",
"429",
":",
"raise",
"RateLimitException",
"(",
"content",
",",
"response",
")",
"if",
"response",
".",
"status",
">=",
"500",
":",
"raise",
"ServerException",
"(",
"content",
",",
"response",
")"
] | 37.147059 | 19.5 |
def sortByColumn(self, index, direction):
"""
Sorts the data for this widget based on the inputed index & direction.
:param index | <int>
direction | <Qt.SortOrder
"""
if self.isPaged() and not self.isFullyLoaded():
self.reorder(index, direction)
else:
super(XOrbTreeWidget, self).sortByColumn(index, direction) | [
"def",
"sortByColumn",
"(",
"self",
",",
"index",
",",
"direction",
")",
":",
"if",
"self",
".",
"isPaged",
"(",
")",
"and",
"not",
"self",
".",
"isFullyLoaded",
"(",
")",
":",
"self",
".",
"reorder",
"(",
"index",
",",
"direction",
")",
"else",
":",
"super",
"(",
"XOrbTreeWidget",
",",
"self",
")",
".",
"sortByColumn",
"(",
"index",
",",
"direction",
")"
] | 37.909091 | 14.454545 |
def _validate_bagittxt(self):
"""
Verify that bagit.txt conforms to specification
"""
bagit_file_path = os.path.join(self.path, "bagit.txt")
with open(bagit_file_path, 'r') as bagit_file:
first_line = bagit_file.readline()
if first_line.startswith(BOM):
raise BagValidationError("bagit.txt must not contain a byte-order mark") | [
"def",
"_validate_bagittxt",
"(",
"self",
")",
":",
"bagit_file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"path",
",",
"\"bagit.txt\"",
")",
"with",
"open",
"(",
"bagit_file_path",
",",
"'r'",
")",
"as",
"bagit_file",
":",
"first_line",
"=",
"bagit_file",
".",
"readline",
"(",
")",
"if",
"first_line",
".",
"startswith",
"(",
"BOM",
")",
":",
"raise",
"BagValidationError",
"(",
"\"bagit.txt must not contain a byte-order mark\"",
")"
] | 44.222222 | 13.111111 |
def write(self, content=None):
"""
Write report to file.
Parameters
----------
content: str
'summary', 'extended', 'powerflow'
"""
if self.system.files.no_output is True:
return
t, _ = elapsed()
if not content:
logger.warning('report content not specified.')
return
self.update(content)
system = self.system
file = system.files.output
export = all_formats.get(system.config.export, 'txt')
module = importlib.import_module('andes.formats.' + export)
dump_data = getattr(module, 'dump_data')
text = list()
header = list()
rowname = list()
data = list()
text.append(self.info)
header.append(None)
rowname.append(None)
data.append(None)
if content == 'summary' or 'extended' or 'powerflow':
text.append(['SUMMARY:\n'])
header.append(None)
rowname.append(self._basic_name)
data.append([self.basic[item] for item in self._basic])
if content == 'extended' or 'powerflow':
text.append(['EXTENDED SUMMARY:\n'])
header.append(['P (pu)', 'Q (pu)'])
rowname.append(
['Generation', 'Load', 'Shunt Inj', 'Losses', 'Line Charging'])
Pcol = [
self.extended['Pg'],
self.extended['Pl'],
self.extended['Psh'],
self.extended['Ploss'],
self.extended['Pch'],
]
Qcol = [
self.extended['Qg'],
self.extended['Ql'],
self.extended['Qsh'],
self.extended['Qloss'],
self.extended['Qch'],
]
data.append([Pcol, Qcol])
if content == 'powerflow' and system.pflow.solved:
idx, name, Vm, Va, Pg, Qg, Pl, Ql = system.get_busdata()
Va_unit = 'deg' if system.pflow.config.usedegree else 'rad'
text.append(['BUS DATA:\n'])
# todo: consider system.pflow.config.units
header.append([
'Vm(pu)', 'Va({:s})'.format(Va_unit), 'Pg (pu)', 'Qg (pu)',
'Pl (pu)', 'Ql (pu)'
])
name = [str(i) + '-' + j[:8] for i, j in zip(idx, name)]
rowname.append(name)
data.append([Vm, Va, Pg, Qg, Pl, Ql])
# Node data
if hasattr(system, 'Node') and system.Node.n:
idx, name, V = system.get_nodedata()
text.append(['NODE DATA:\n'])
header.append(['V(pu)'])
rowname.append(name)
data.append([V])
# Line data
name, fr, to, Pfr, Qfr, Pto, Qto, Ploss, Qloss = \
system.get_linedata()
text.append(['LINE DATA:\n'])
header.append([
'From Bus', 'To Bus', 'P From (pu)', 'Q From (pu)',
'P To (pu)', 'Q To(pu)', 'P Loss(pu)', 'Q Loss(pu)'
])
rowname.append(name)
data.append([fr, to, Pfr, Qfr, Pto, Qto, Ploss, Qloss])
# Additional Algebraic data
text.append(['OTHER ALGEBRAIC VARIABLES:\n'])
header.append([''])
rowname.append(
system.varname.unamey[2 * system.Bus.n:system.dae.m])
data.append([round(i, 5) for i in system.dae.y[2 * system.Bus.n:]])
# Additional State variable data
if system.dae.n:
text.append(['OTHER STATE VARIABLES:\n'])
header.append([''])
rowname.append(system.varname.unamex[:])
data.append([round(i, 5) for i in system.dae.x[:]])
dump_data(text, header, rowname, data, file)
_, s = elapsed(t)
logger.info('report written to <{:s}> in {:s}.'.format(system.files.output, s)) | [
"def",
"write",
"(",
"self",
",",
"content",
"=",
"None",
")",
":",
"if",
"self",
".",
"system",
".",
"files",
".",
"no_output",
"is",
"True",
":",
"return",
"t",
",",
"_",
"=",
"elapsed",
"(",
")",
"if",
"not",
"content",
":",
"logger",
".",
"warning",
"(",
"'report content not specified.'",
")",
"return",
"self",
".",
"update",
"(",
"content",
")",
"system",
"=",
"self",
".",
"system",
"file",
"=",
"system",
".",
"files",
".",
"output",
"export",
"=",
"all_formats",
".",
"get",
"(",
"system",
".",
"config",
".",
"export",
",",
"'txt'",
")",
"module",
"=",
"importlib",
".",
"import_module",
"(",
"'andes.formats.'",
"+",
"export",
")",
"dump_data",
"=",
"getattr",
"(",
"module",
",",
"'dump_data'",
")",
"text",
"=",
"list",
"(",
")",
"header",
"=",
"list",
"(",
")",
"rowname",
"=",
"list",
"(",
")",
"data",
"=",
"list",
"(",
")",
"text",
".",
"append",
"(",
"self",
".",
"info",
")",
"header",
".",
"append",
"(",
"None",
")",
"rowname",
".",
"append",
"(",
"None",
")",
"data",
".",
"append",
"(",
"None",
")",
"if",
"content",
"==",
"'summary'",
"or",
"'extended'",
"or",
"'powerflow'",
":",
"text",
".",
"append",
"(",
"[",
"'SUMMARY:\\n'",
"]",
")",
"header",
".",
"append",
"(",
"None",
")",
"rowname",
".",
"append",
"(",
"self",
".",
"_basic_name",
")",
"data",
".",
"append",
"(",
"[",
"self",
".",
"basic",
"[",
"item",
"]",
"for",
"item",
"in",
"self",
".",
"_basic",
"]",
")",
"if",
"content",
"==",
"'extended'",
"or",
"'powerflow'",
":",
"text",
".",
"append",
"(",
"[",
"'EXTENDED SUMMARY:\\n'",
"]",
")",
"header",
".",
"append",
"(",
"[",
"'P (pu)'",
",",
"'Q (pu)'",
"]",
")",
"rowname",
".",
"append",
"(",
"[",
"'Generation'",
",",
"'Load'",
",",
"'Shunt Inj'",
",",
"'Losses'",
",",
"'Line Charging'",
"]",
")",
"Pcol",
"=",
"[",
"self",
".",
"extended",
"[",
"'Pg'",
"]",
",",
"self",
".",
"extended",
"[",
"'Pl'",
"]",
",",
"self",
".",
"extended",
"[",
"'Psh'",
"]",
",",
"self",
".",
"extended",
"[",
"'Ploss'",
"]",
",",
"self",
".",
"extended",
"[",
"'Pch'",
"]",
",",
"]",
"Qcol",
"=",
"[",
"self",
".",
"extended",
"[",
"'Qg'",
"]",
",",
"self",
".",
"extended",
"[",
"'Ql'",
"]",
",",
"self",
".",
"extended",
"[",
"'Qsh'",
"]",
",",
"self",
".",
"extended",
"[",
"'Qloss'",
"]",
",",
"self",
".",
"extended",
"[",
"'Qch'",
"]",
",",
"]",
"data",
".",
"append",
"(",
"[",
"Pcol",
",",
"Qcol",
"]",
")",
"if",
"content",
"==",
"'powerflow'",
"and",
"system",
".",
"pflow",
".",
"solved",
":",
"idx",
",",
"name",
",",
"Vm",
",",
"Va",
",",
"Pg",
",",
"Qg",
",",
"Pl",
",",
"Ql",
"=",
"system",
".",
"get_busdata",
"(",
")",
"Va_unit",
"=",
"'deg'",
"if",
"system",
".",
"pflow",
".",
"config",
".",
"usedegree",
"else",
"'rad'",
"text",
".",
"append",
"(",
"[",
"'BUS DATA:\\n'",
"]",
")",
"# todo: consider system.pflow.config.units",
"header",
".",
"append",
"(",
"[",
"'Vm(pu)'",
",",
"'Va({:s})'",
".",
"format",
"(",
"Va_unit",
")",
",",
"'Pg (pu)'",
",",
"'Qg (pu)'",
",",
"'Pl (pu)'",
",",
"'Ql (pu)'",
"]",
")",
"name",
"=",
"[",
"str",
"(",
"i",
")",
"+",
"'-'",
"+",
"j",
"[",
":",
"8",
"]",
"for",
"i",
",",
"j",
"in",
"zip",
"(",
"idx",
",",
"name",
")",
"]",
"rowname",
".",
"append",
"(",
"name",
")",
"data",
".",
"append",
"(",
"[",
"Vm",
",",
"Va",
",",
"Pg",
",",
"Qg",
",",
"Pl",
",",
"Ql",
"]",
")",
"# Node data",
"if",
"hasattr",
"(",
"system",
",",
"'Node'",
")",
"and",
"system",
".",
"Node",
".",
"n",
":",
"idx",
",",
"name",
",",
"V",
"=",
"system",
".",
"get_nodedata",
"(",
")",
"text",
".",
"append",
"(",
"[",
"'NODE DATA:\\n'",
"]",
")",
"header",
".",
"append",
"(",
"[",
"'V(pu)'",
"]",
")",
"rowname",
".",
"append",
"(",
"name",
")",
"data",
".",
"append",
"(",
"[",
"V",
"]",
")",
"# Line data",
"name",
",",
"fr",
",",
"to",
",",
"Pfr",
",",
"Qfr",
",",
"Pto",
",",
"Qto",
",",
"Ploss",
",",
"Qloss",
"=",
"system",
".",
"get_linedata",
"(",
")",
"text",
".",
"append",
"(",
"[",
"'LINE DATA:\\n'",
"]",
")",
"header",
".",
"append",
"(",
"[",
"'From Bus'",
",",
"'To Bus'",
",",
"'P From (pu)'",
",",
"'Q From (pu)'",
",",
"'P To (pu)'",
",",
"'Q To(pu)'",
",",
"'P Loss(pu)'",
",",
"'Q Loss(pu)'",
"]",
")",
"rowname",
".",
"append",
"(",
"name",
")",
"data",
".",
"append",
"(",
"[",
"fr",
",",
"to",
",",
"Pfr",
",",
"Qfr",
",",
"Pto",
",",
"Qto",
",",
"Ploss",
",",
"Qloss",
"]",
")",
"# Additional Algebraic data",
"text",
".",
"append",
"(",
"[",
"'OTHER ALGEBRAIC VARIABLES:\\n'",
"]",
")",
"header",
".",
"append",
"(",
"[",
"''",
"]",
")",
"rowname",
".",
"append",
"(",
"system",
".",
"varname",
".",
"unamey",
"[",
"2",
"*",
"system",
".",
"Bus",
".",
"n",
":",
"system",
".",
"dae",
".",
"m",
"]",
")",
"data",
".",
"append",
"(",
"[",
"round",
"(",
"i",
",",
"5",
")",
"for",
"i",
"in",
"system",
".",
"dae",
".",
"y",
"[",
"2",
"*",
"system",
".",
"Bus",
".",
"n",
":",
"]",
"]",
")",
"# Additional State variable data",
"if",
"system",
".",
"dae",
".",
"n",
":",
"text",
".",
"append",
"(",
"[",
"'OTHER STATE VARIABLES:\\n'",
"]",
")",
"header",
".",
"append",
"(",
"[",
"''",
"]",
")",
"rowname",
".",
"append",
"(",
"system",
".",
"varname",
".",
"unamex",
"[",
":",
"]",
")",
"data",
".",
"append",
"(",
"[",
"round",
"(",
"i",
",",
"5",
")",
"for",
"i",
"in",
"system",
".",
"dae",
".",
"x",
"[",
":",
"]",
"]",
")",
"dump_data",
"(",
"text",
",",
"header",
",",
"rowname",
",",
"data",
",",
"file",
")",
"_",
",",
"s",
"=",
"elapsed",
"(",
"t",
")",
"logger",
".",
"info",
"(",
"'report written to <{:s}> in {:s}.'",
".",
"format",
"(",
"system",
".",
"files",
".",
"output",
",",
"s",
")",
")"
] | 33.344828 | 18.413793 |
def start_server(self):
"""start the selenium Remote Server."""
self.__subp = subprocess.Popen(self.command)
#print("\tselenium jar pid[%s] is running." %self.__subp.pid)
time.sleep(2) | [
"def",
"start_server",
"(",
"self",
")",
":",
"self",
".",
"__subp",
"=",
"subprocess",
".",
"Popen",
"(",
"self",
".",
"command",
")",
"#print(\"\\tselenium jar pid[%s] is running.\" %self.__subp.pid) \r",
"time",
".",
"sleep",
"(",
"2",
")"
] | 48 | 18.8 |
def _handle_paired_with(self, paired_with):
'''Handle input option paired_with'''
if paired_with is None or not paired_with:
var_name = []
var_value = []
elif isinstance(paired_with, str):
var_name = ['_' + paired_with]
if paired_with not in env.sos_dict:
raise ValueError(f'Variable {paired_with} does not exist.')
var_value = [env.sos_dict[paired_with]]
elif isinstance(paired_with, dict):
var_name = []
var_value = []
for k, v in paired_with.items():
var_name.append(k)
var_value.append(v)
elif isinstance(paired_with, Iterable):
try:
var_name = ['_' + x for x in paired_with]
except Exception:
raise ValueError(
f'Invalud value for option paired_with {paired_with}')
var_value = []
for vn in var_name:
if vn[1:] not in env.sos_dict:
raise ValueError(f'Variable {vn[1:]} does not exist.')
var_value.append(env.sos_dict[vn[1:]])
else:
raise ValueError(
f'Unacceptable value for parameter paired_with: {paired_with}')
#
for vn, vv in zip(var_name, var_value):
# set paired with values to step_input
self.paired_with(vn, vv) | [
"def",
"_handle_paired_with",
"(",
"self",
",",
"paired_with",
")",
":",
"if",
"paired_with",
"is",
"None",
"or",
"not",
"paired_with",
":",
"var_name",
"=",
"[",
"]",
"var_value",
"=",
"[",
"]",
"elif",
"isinstance",
"(",
"paired_with",
",",
"str",
")",
":",
"var_name",
"=",
"[",
"'_'",
"+",
"paired_with",
"]",
"if",
"paired_with",
"not",
"in",
"env",
".",
"sos_dict",
":",
"raise",
"ValueError",
"(",
"f'Variable {paired_with} does not exist.'",
")",
"var_value",
"=",
"[",
"env",
".",
"sos_dict",
"[",
"paired_with",
"]",
"]",
"elif",
"isinstance",
"(",
"paired_with",
",",
"dict",
")",
":",
"var_name",
"=",
"[",
"]",
"var_value",
"=",
"[",
"]",
"for",
"k",
",",
"v",
"in",
"paired_with",
".",
"items",
"(",
")",
":",
"var_name",
".",
"append",
"(",
"k",
")",
"var_value",
".",
"append",
"(",
"v",
")",
"elif",
"isinstance",
"(",
"paired_with",
",",
"Iterable",
")",
":",
"try",
":",
"var_name",
"=",
"[",
"'_'",
"+",
"x",
"for",
"x",
"in",
"paired_with",
"]",
"except",
"Exception",
":",
"raise",
"ValueError",
"(",
"f'Invalud value for option paired_with {paired_with}'",
")",
"var_value",
"=",
"[",
"]",
"for",
"vn",
"in",
"var_name",
":",
"if",
"vn",
"[",
"1",
":",
"]",
"not",
"in",
"env",
".",
"sos_dict",
":",
"raise",
"ValueError",
"(",
"f'Variable {vn[1:]} does not exist.'",
")",
"var_value",
".",
"append",
"(",
"env",
".",
"sos_dict",
"[",
"vn",
"[",
"1",
":",
"]",
"]",
")",
"else",
":",
"raise",
"ValueError",
"(",
"f'Unacceptable value for parameter paired_with: {paired_with}'",
")",
"#",
"for",
"vn",
",",
"vv",
"in",
"zip",
"(",
"var_name",
",",
"var_value",
")",
":",
"# set paired with values to step_input",
"self",
".",
"paired_with",
"(",
"vn",
",",
"vv",
")"
] | 41.264706 | 13.441176 |
async def mget(self, keys, *args):
"""
Returns a list of values ordered identically to ``keys``
"""
args = list_or_args(keys, args)
return await self.execute_command('MGET', *args) | [
"async",
"def",
"mget",
"(",
"self",
",",
"keys",
",",
"*",
"args",
")",
":",
"args",
"=",
"list_or_args",
"(",
"keys",
",",
"args",
")",
"return",
"await",
"self",
".",
"execute_command",
"(",
"'MGET'",
",",
"*",
"args",
")"
] | 35.833333 | 7.833333 |
def impute_dataframe_zero(df_impute):
"""
Replaces all ``NaNs``, ``-infs`` and ``+infs`` from the DataFrame `df_impute` with 0s.
The `df_impute` will be modified in place. All its columns will be into converted into dtype ``np.float64``.
:param df_impute: DataFrame to impute
:type df_impute: pandas.DataFrame
:return df_impute: imputed DataFrame
:rtype df_impute: pandas.DataFrame
"""
df_impute.replace([np.PINF, np.NINF], 0, inplace=True)
df_impute.fillna(0, inplace=True)
# Ensure a type of "np.float64"
df_impute.astype(np.float64, copy=False)
return df_impute | [
"def",
"impute_dataframe_zero",
"(",
"df_impute",
")",
":",
"df_impute",
".",
"replace",
"(",
"[",
"np",
".",
"PINF",
",",
"np",
".",
"NINF",
"]",
",",
"0",
",",
"inplace",
"=",
"True",
")",
"df_impute",
".",
"fillna",
"(",
"0",
",",
"inplace",
"=",
"True",
")",
"# Ensure a type of \"np.float64\"",
"df_impute",
".",
"astype",
"(",
"np",
".",
"float64",
",",
"copy",
"=",
"False",
")",
"return",
"df_impute"
] | 33.5 | 18.944444 |
def new(self, bytes_to_skip):
# type: (int) -> None
'''
Create a new Rock Ridge Sharing Protocol record.
Parameters:
bytes_to_skip - The number of bytes to skip.
Returns:
Nothing.
'''
if self._initialized:
raise pycdlibexception.PyCdlibInternalError('SP record already initialized!')
self.bytes_to_skip = bytes_to_skip
self._initialized = True | [
"def",
"new",
"(",
"self",
",",
"bytes_to_skip",
")",
":",
"# type: (int) -> None",
"if",
"self",
".",
"_initialized",
":",
"raise",
"pycdlibexception",
".",
"PyCdlibInternalError",
"(",
"'SP record already initialized!'",
")",
"self",
".",
"bytes_to_skip",
"=",
"bytes_to_skip",
"self",
".",
"_initialized",
"=",
"True"
] | 28.8 | 21.733333 |