id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 51
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
|
---|---|---|---|---|---|---|---|---|---|---|---|
300 | gtaylor/python-colormath | colormath/color_conversions.py | RGB_to_HSL | def RGB_to_HSL(cobj, *args, **kwargs):
"""
Converts from RGB to HSL.
H values are in degrees and are 0 to 360.
S values are a percentage, 0.0 to 1.0.
L values are a percentage, 0.0 to 1.0.
"""
var_R = cobj.rgb_r
var_G = cobj.rgb_g
var_B = cobj.rgb_b
var_max = max(var_R, var_G, var_B)
var_min = min(var_R, var_G, var_B)
var_H = __RGB_to_Hue(var_R, var_G, var_B, var_min, var_max)
var_L = 0.5 * (var_max + var_min)
if var_max == var_min:
var_S = 0
elif var_L <= 0.5:
var_S = (var_max - var_min) / (2.0 * var_L)
else:
var_S = (var_max - var_min) / (2.0 - (2.0 * var_L))
return HSLColor(
var_H, var_S, var_L) | python | def RGB_to_HSL(cobj, *args, **kwargs):
var_R = cobj.rgb_r
var_G = cobj.rgb_g
var_B = cobj.rgb_b
var_max = max(var_R, var_G, var_B)
var_min = min(var_R, var_G, var_B)
var_H = __RGB_to_Hue(var_R, var_G, var_B, var_min, var_max)
var_L = 0.5 * (var_max + var_min)
if var_max == var_min:
var_S = 0
elif var_L <= 0.5:
var_S = (var_max - var_min) / (2.0 * var_L)
else:
var_S = (var_max - var_min) / (2.0 - (2.0 * var_L))
return HSLColor(
var_H, var_S, var_L) | [
"def",
"RGB_to_HSL",
"(",
"cobj",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"var_R",
"=",
"cobj",
".",
"rgb_r",
"var_G",
"=",
"cobj",
".",
"rgb_g",
"var_B",
"=",
"cobj",
".",
"rgb_b",
"var_max",
"=",
"max",
"(",
"var_R",
",",
"var_G",
",",
"var_B",
")",
"var_min",
"=",
"min",
"(",
"var_R",
",",
"var_G",
",",
"var_B",
")",
"var_H",
"=",
"__RGB_to_Hue",
"(",
"var_R",
",",
"var_G",
",",
"var_B",
",",
"var_min",
",",
"var_max",
")",
"var_L",
"=",
"0.5",
"*",
"(",
"var_max",
"+",
"var_min",
")",
"if",
"var_max",
"==",
"var_min",
":",
"var_S",
"=",
"0",
"elif",
"var_L",
"<=",
"0.5",
":",
"var_S",
"=",
"(",
"var_max",
"-",
"var_min",
")",
"/",
"(",
"2.0",
"*",
"var_L",
")",
"else",
":",
"var_S",
"=",
"(",
"var_max",
"-",
"var_min",
")",
"/",
"(",
"2.0",
"-",
"(",
"2.0",
"*",
"var_L",
")",
")",
"return",
"HSLColor",
"(",
"var_H",
",",
"var_S",
",",
"var_L",
")"
] | Converts from RGB to HSL.
H values are in degrees and are 0 to 360.
S values are a percentage, 0.0 to 1.0.
L values are a percentage, 0.0 to 1.0. | [
"Converts",
"from",
"RGB",
"to",
"HSL",
"."
] | 1d168613718d2d7d31ec4230524e987ef66823c7 | https://github.com/gtaylor/python-colormath/blob/1d168613718d2d7d31ec4230524e987ef66823c7/colormath/color_conversions.py#L644-L670 |
301 | gtaylor/python-colormath | colormath/color_conversions.py | __Calc_HSL_to_RGB_Components | def __Calc_HSL_to_RGB_Components(var_q, var_p, C):
"""
This is used in HSL_to_RGB conversions on R, G, and B.
"""
if C < 0:
C += 1.0
if C > 1:
C -= 1.0
# Computing C of vector (Color R, Color G, Color B)
if C < (1.0 / 6.0):
return var_p + ((var_q - var_p) * 6.0 * C)
elif (1.0 / 6.0) <= C < 0.5:
return var_q
elif 0.5 <= C < (2.0 / 3.0):
return var_p + ((var_q - var_p) * 6.0 * ((2.0 / 3.0) - C))
else:
return var_p | python | def __Calc_HSL_to_RGB_Components(var_q, var_p, C):
if C < 0:
C += 1.0
if C > 1:
C -= 1.0
# Computing C of vector (Color R, Color G, Color B)
if C < (1.0 / 6.0):
return var_p + ((var_q - var_p) * 6.0 * C)
elif (1.0 / 6.0) <= C < 0.5:
return var_q
elif 0.5 <= C < (2.0 / 3.0):
return var_p + ((var_q - var_p) * 6.0 * ((2.0 / 3.0) - C))
else:
return var_p | [
"def",
"__Calc_HSL_to_RGB_Components",
"(",
"var_q",
",",
"var_p",
",",
"C",
")",
":",
"if",
"C",
"<",
"0",
":",
"C",
"+=",
"1.0",
"if",
"C",
">",
"1",
":",
"C",
"-=",
"1.0",
"# Computing C of vector (Color R, Color G, Color B)",
"if",
"C",
"<",
"(",
"1.0",
"/",
"6.0",
")",
":",
"return",
"var_p",
"+",
"(",
"(",
"var_q",
"-",
"var_p",
")",
"*",
"6.0",
"*",
"C",
")",
"elif",
"(",
"1.0",
"/",
"6.0",
")",
"<=",
"C",
"<",
"0.5",
":",
"return",
"var_q",
"elif",
"0.5",
"<=",
"C",
"<",
"(",
"2.0",
"/",
"3.0",
")",
":",
"return",
"var_p",
"+",
"(",
"(",
"var_q",
"-",
"var_p",
")",
"*",
"6.0",
"*",
"(",
"(",
"2.0",
"/",
"3.0",
")",
"-",
"C",
")",
")",
"else",
":",
"return",
"var_p"
] | This is used in HSL_to_RGB conversions on R, G, and B. | [
"This",
"is",
"used",
"in",
"HSL_to_RGB",
"conversions",
"on",
"R",
"G",
"and",
"B",
"."
] | 1d168613718d2d7d31ec4230524e987ef66823c7 | https://github.com/gtaylor/python-colormath/blob/1d168613718d2d7d31ec4230524e987ef66823c7/colormath/color_conversions.py#L674-L691 |
302 | gtaylor/python-colormath | colormath/color_conversions.py | HSV_to_RGB | def HSV_to_RGB(cobj, target_rgb, *args, **kwargs):
"""
HSV to RGB conversion.
H values are in degrees and are 0 to 360.
S values are a percentage, 0.0 to 1.0.
V values are a percentage, 0.0 to 1.0.
"""
H = cobj.hsv_h
S = cobj.hsv_s
V = cobj.hsv_v
h_floored = int(math.floor(H))
h_sub_i = int(h_floored / 60) % 6
var_f = (H / 60.0) - (h_floored // 60)
var_p = V * (1.0 - S)
var_q = V * (1.0 - var_f * S)
var_t = V * (1.0 - (1.0 - var_f) * S)
if h_sub_i == 0:
rgb_r = V
rgb_g = var_t
rgb_b = var_p
elif h_sub_i == 1:
rgb_r = var_q
rgb_g = V
rgb_b = var_p
elif h_sub_i == 2:
rgb_r = var_p
rgb_g = V
rgb_b = var_t
elif h_sub_i == 3:
rgb_r = var_p
rgb_g = var_q
rgb_b = V
elif h_sub_i == 4:
rgb_r = var_t
rgb_g = var_p
rgb_b = V
elif h_sub_i == 5:
rgb_r = V
rgb_g = var_p
rgb_b = var_q
else:
raise ValueError("Unable to convert HSL->RGB due to value error.")
# TODO: Investigate intent of following code block.
# In the event that they define an HSV color and want to convert it to
# a particular RGB space, let them override it here.
# if target_rgb is not None:
# rgb_type = target_rgb
# else:
# rgb_type = cobj.rgb_type
return target_rgb(rgb_r, rgb_g, rgb_b) | python | def HSV_to_RGB(cobj, target_rgb, *args, **kwargs):
H = cobj.hsv_h
S = cobj.hsv_s
V = cobj.hsv_v
h_floored = int(math.floor(H))
h_sub_i = int(h_floored / 60) % 6
var_f = (H / 60.0) - (h_floored // 60)
var_p = V * (1.0 - S)
var_q = V * (1.0 - var_f * S)
var_t = V * (1.0 - (1.0 - var_f) * S)
if h_sub_i == 0:
rgb_r = V
rgb_g = var_t
rgb_b = var_p
elif h_sub_i == 1:
rgb_r = var_q
rgb_g = V
rgb_b = var_p
elif h_sub_i == 2:
rgb_r = var_p
rgb_g = V
rgb_b = var_t
elif h_sub_i == 3:
rgb_r = var_p
rgb_g = var_q
rgb_b = V
elif h_sub_i == 4:
rgb_r = var_t
rgb_g = var_p
rgb_b = V
elif h_sub_i == 5:
rgb_r = V
rgb_g = var_p
rgb_b = var_q
else:
raise ValueError("Unable to convert HSL->RGB due to value error.")
# TODO: Investigate intent of following code block.
# In the event that they define an HSV color and want to convert it to
# a particular RGB space, let them override it here.
# if target_rgb is not None:
# rgb_type = target_rgb
# else:
# rgb_type = cobj.rgb_type
return target_rgb(rgb_r, rgb_g, rgb_b) | [
"def",
"HSV_to_RGB",
"(",
"cobj",
",",
"target_rgb",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"H",
"=",
"cobj",
".",
"hsv_h",
"S",
"=",
"cobj",
".",
"hsv_s",
"V",
"=",
"cobj",
".",
"hsv_v",
"h_floored",
"=",
"int",
"(",
"math",
".",
"floor",
"(",
"H",
")",
")",
"h_sub_i",
"=",
"int",
"(",
"h_floored",
"/",
"60",
")",
"%",
"6",
"var_f",
"=",
"(",
"H",
"/",
"60.0",
")",
"-",
"(",
"h_floored",
"//",
"60",
")",
"var_p",
"=",
"V",
"*",
"(",
"1.0",
"-",
"S",
")",
"var_q",
"=",
"V",
"*",
"(",
"1.0",
"-",
"var_f",
"*",
"S",
")",
"var_t",
"=",
"V",
"*",
"(",
"1.0",
"-",
"(",
"1.0",
"-",
"var_f",
")",
"*",
"S",
")",
"if",
"h_sub_i",
"==",
"0",
":",
"rgb_r",
"=",
"V",
"rgb_g",
"=",
"var_t",
"rgb_b",
"=",
"var_p",
"elif",
"h_sub_i",
"==",
"1",
":",
"rgb_r",
"=",
"var_q",
"rgb_g",
"=",
"V",
"rgb_b",
"=",
"var_p",
"elif",
"h_sub_i",
"==",
"2",
":",
"rgb_r",
"=",
"var_p",
"rgb_g",
"=",
"V",
"rgb_b",
"=",
"var_t",
"elif",
"h_sub_i",
"==",
"3",
":",
"rgb_r",
"=",
"var_p",
"rgb_g",
"=",
"var_q",
"rgb_b",
"=",
"V",
"elif",
"h_sub_i",
"==",
"4",
":",
"rgb_r",
"=",
"var_t",
"rgb_g",
"=",
"var_p",
"rgb_b",
"=",
"V",
"elif",
"h_sub_i",
"==",
"5",
":",
"rgb_r",
"=",
"V",
"rgb_g",
"=",
"var_p",
"rgb_b",
"=",
"var_q",
"else",
":",
"raise",
"ValueError",
"(",
"\"Unable to convert HSL->RGB due to value error.\"",
")",
"# TODO: Investigate intent of following code block.",
"# In the event that they define an HSV color and want to convert it to",
"# a particular RGB space, let them override it here.",
"# if target_rgb is not None:",
"# rgb_type = target_rgb",
"# else:",
"# rgb_type = cobj.rgb_type",
"return",
"target_rgb",
"(",
"rgb_r",
",",
"rgb_g",
",",
"rgb_b",
")"
] | HSV to RGB conversion.
H values are in degrees and are 0 to 360.
S values are a percentage, 0.0 to 1.0.
V values are a percentage, 0.0 to 1.0. | [
"HSV",
"to",
"RGB",
"conversion",
"."
] | 1d168613718d2d7d31ec4230524e987ef66823c7 | https://github.com/gtaylor/python-colormath/blob/1d168613718d2d7d31ec4230524e987ef66823c7/colormath/color_conversions.py#L696-L750 |
303 | gtaylor/python-colormath | colormath/color_conversions.py | HSL_to_RGB | def HSL_to_RGB(cobj, target_rgb, *args, **kwargs):
"""
HSL to RGB conversion.
"""
H = cobj.hsl_h
S = cobj.hsl_s
L = cobj.hsl_l
if L < 0.5:
var_q = L * (1.0 + S)
else:
var_q = L + S - (L * S)
var_p = 2.0 * L - var_q
# H normalized to range [0,1]
h_sub_k = (H / 360.0)
t_sub_R = h_sub_k + (1.0 / 3.0)
t_sub_G = h_sub_k
t_sub_B = h_sub_k - (1.0 / 3.0)
rgb_r = __Calc_HSL_to_RGB_Components(var_q, var_p, t_sub_R)
rgb_g = __Calc_HSL_to_RGB_Components(var_q, var_p, t_sub_G)
rgb_b = __Calc_HSL_to_RGB_Components(var_q, var_p, t_sub_B)
# TODO: Investigate intent of following code block.
# In the event that they define an HSV color and want to convert it to
# a particular RGB space, let them override it here.
# if target_rgb is not None:
# rgb_type = target_rgb
# else:
# rgb_type = cobj.rgb_type
return target_rgb(rgb_r, rgb_g, rgb_b) | python | def HSL_to_RGB(cobj, target_rgb, *args, **kwargs):
H = cobj.hsl_h
S = cobj.hsl_s
L = cobj.hsl_l
if L < 0.5:
var_q = L * (1.0 + S)
else:
var_q = L + S - (L * S)
var_p = 2.0 * L - var_q
# H normalized to range [0,1]
h_sub_k = (H / 360.0)
t_sub_R = h_sub_k + (1.0 / 3.0)
t_sub_G = h_sub_k
t_sub_B = h_sub_k - (1.0 / 3.0)
rgb_r = __Calc_HSL_to_RGB_Components(var_q, var_p, t_sub_R)
rgb_g = __Calc_HSL_to_RGB_Components(var_q, var_p, t_sub_G)
rgb_b = __Calc_HSL_to_RGB_Components(var_q, var_p, t_sub_B)
# TODO: Investigate intent of following code block.
# In the event that they define an HSV color and want to convert it to
# a particular RGB space, let them override it here.
# if target_rgb is not None:
# rgb_type = target_rgb
# else:
# rgb_type = cobj.rgb_type
return target_rgb(rgb_r, rgb_g, rgb_b) | [
"def",
"HSL_to_RGB",
"(",
"cobj",
",",
"target_rgb",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"H",
"=",
"cobj",
".",
"hsl_h",
"S",
"=",
"cobj",
".",
"hsl_s",
"L",
"=",
"cobj",
".",
"hsl_l",
"if",
"L",
"<",
"0.5",
":",
"var_q",
"=",
"L",
"*",
"(",
"1.0",
"+",
"S",
")",
"else",
":",
"var_q",
"=",
"L",
"+",
"S",
"-",
"(",
"L",
"*",
"S",
")",
"var_p",
"=",
"2.0",
"*",
"L",
"-",
"var_q",
"# H normalized to range [0,1]",
"h_sub_k",
"=",
"(",
"H",
"/",
"360.0",
")",
"t_sub_R",
"=",
"h_sub_k",
"+",
"(",
"1.0",
"/",
"3.0",
")",
"t_sub_G",
"=",
"h_sub_k",
"t_sub_B",
"=",
"h_sub_k",
"-",
"(",
"1.0",
"/",
"3.0",
")",
"rgb_r",
"=",
"__Calc_HSL_to_RGB_Components",
"(",
"var_q",
",",
"var_p",
",",
"t_sub_R",
")",
"rgb_g",
"=",
"__Calc_HSL_to_RGB_Components",
"(",
"var_q",
",",
"var_p",
",",
"t_sub_G",
")",
"rgb_b",
"=",
"__Calc_HSL_to_RGB_Components",
"(",
"var_q",
",",
"var_p",
",",
"t_sub_B",
")",
"# TODO: Investigate intent of following code block.",
"# In the event that they define an HSV color and want to convert it to",
"# a particular RGB space, let them override it here.",
"# if target_rgb is not None:",
"# rgb_type = target_rgb",
"# else:",
"# rgb_type = cobj.rgb_type",
"return",
"target_rgb",
"(",
"rgb_r",
",",
"rgb_g",
",",
"rgb_b",
")"
] | HSL to RGB conversion. | [
"HSL",
"to",
"RGB",
"conversion",
"."
] | 1d168613718d2d7d31ec4230524e987ef66823c7 | https://github.com/gtaylor/python-colormath/blob/1d168613718d2d7d31ec4230524e987ef66823c7/colormath/color_conversions.py#L755-L789 |
304 | gtaylor/python-colormath | colormath/color_conversions.py | RGB_to_CMY | def RGB_to_CMY(cobj, *args, **kwargs):
"""
RGB to CMY conversion.
NOTE: CMYK and CMY values range from 0.0 to 1.0
"""
cmy_c = 1.0 - cobj.rgb_r
cmy_m = 1.0 - cobj.rgb_g
cmy_y = 1.0 - cobj.rgb_b
return CMYColor(cmy_c, cmy_m, cmy_y) | python | def RGB_to_CMY(cobj, *args, **kwargs):
cmy_c = 1.0 - cobj.rgb_r
cmy_m = 1.0 - cobj.rgb_g
cmy_y = 1.0 - cobj.rgb_b
return CMYColor(cmy_c, cmy_m, cmy_y) | [
"def",
"RGB_to_CMY",
"(",
"cobj",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"cmy_c",
"=",
"1.0",
"-",
"cobj",
".",
"rgb_r",
"cmy_m",
"=",
"1.0",
"-",
"cobj",
".",
"rgb_g",
"cmy_y",
"=",
"1.0",
"-",
"cobj",
".",
"rgb_b",
"return",
"CMYColor",
"(",
"cmy_c",
",",
"cmy_m",
",",
"cmy_y",
")"
] | RGB to CMY conversion.
NOTE: CMYK and CMY values range from 0.0 to 1.0 | [
"RGB",
"to",
"CMY",
"conversion",
"."
] | 1d168613718d2d7d31ec4230524e987ef66823c7 | https://github.com/gtaylor/python-colormath/blob/1d168613718d2d7d31ec4230524e987ef66823c7/colormath/color_conversions.py#L794-L804 |
305 | gtaylor/python-colormath | colormath/color_conversions.py | CMY_to_RGB | def CMY_to_RGB(cobj, target_rgb, *args, **kwargs):
"""
Converts CMY to RGB via simple subtraction.
NOTE: Returned values are in the range of 0-255.
"""
rgb_r = 1.0 - cobj.cmy_c
rgb_g = 1.0 - cobj.cmy_m
rgb_b = 1.0 - cobj.cmy_y
return target_rgb(rgb_r, rgb_g, rgb_b) | python | def CMY_to_RGB(cobj, target_rgb, *args, **kwargs):
rgb_r = 1.0 - cobj.cmy_c
rgb_g = 1.0 - cobj.cmy_m
rgb_b = 1.0 - cobj.cmy_y
return target_rgb(rgb_r, rgb_g, rgb_b) | [
"def",
"CMY_to_RGB",
"(",
"cobj",
",",
"target_rgb",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"rgb_r",
"=",
"1.0",
"-",
"cobj",
".",
"cmy_c",
"rgb_g",
"=",
"1.0",
"-",
"cobj",
".",
"cmy_m",
"rgb_b",
"=",
"1.0",
"-",
"cobj",
".",
"cmy_y",
"return",
"target_rgb",
"(",
"rgb_r",
",",
"rgb_g",
",",
"rgb_b",
")"
] | Converts CMY to RGB via simple subtraction.
NOTE: Returned values are in the range of 0-255. | [
"Converts",
"CMY",
"to",
"RGB",
"via",
"simple",
"subtraction",
"."
] | 1d168613718d2d7d31ec4230524e987ef66823c7 | https://github.com/gtaylor/python-colormath/blob/1d168613718d2d7d31ec4230524e987ef66823c7/colormath/color_conversions.py#L809-L819 |
306 | gtaylor/python-colormath | colormath/color_conversions.py | CMY_to_CMYK | def CMY_to_CMYK(cobj, *args, **kwargs):
"""
Converts from CMY to CMYK.
NOTE: CMYK and CMY values range from 0.0 to 1.0
"""
var_k = 1.0
if cobj.cmy_c < var_k:
var_k = cobj.cmy_c
if cobj.cmy_m < var_k:
var_k = cobj.cmy_m
if cobj.cmy_y < var_k:
var_k = cobj.cmy_y
if var_k == 1:
cmyk_c = 0.0
cmyk_m = 0.0
cmyk_y = 0.0
else:
cmyk_c = (cobj.cmy_c - var_k) / (1.0 - var_k)
cmyk_m = (cobj.cmy_m - var_k) / (1.0 - var_k)
cmyk_y = (cobj.cmy_y - var_k) / (1.0 - var_k)
cmyk_k = var_k
return CMYKColor(cmyk_c, cmyk_m, cmyk_y, cmyk_k) | python | def CMY_to_CMYK(cobj, *args, **kwargs):
var_k = 1.0
if cobj.cmy_c < var_k:
var_k = cobj.cmy_c
if cobj.cmy_m < var_k:
var_k = cobj.cmy_m
if cobj.cmy_y < var_k:
var_k = cobj.cmy_y
if var_k == 1:
cmyk_c = 0.0
cmyk_m = 0.0
cmyk_y = 0.0
else:
cmyk_c = (cobj.cmy_c - var_k) / (1.0 - var_k)
cmyk_m = (cobj.cmy_m - var_k) / (1.0 - var_k)
cmyk_y = (cobj.cmy_y - var_k) / (1.0 - var_k)
cmyk_k = var_k
return CMYKColor(cmyk_c, cmyk_m, cmyk_y, cmyk_k) | [
"def",
"CMY_to_CMYK",
"(",
"cobj",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"var_k",
"=",
"1.0",
"if",
"cobj",
".",
"cmy_c",
"<",
"var_k",
":",
"var_k",
"=",
"cobj",
".",
"cmy_c",
"if",
"cobj",
".",
"cmy_m",
"<",
"var_k",
":",
"var_k",
"=",
"cobj",
".",
"cmy_m",
"if",
"cobj",
".",
"cmy_y",
"<",
"var_k",
":",
"var_k",
"=",
"cobj",
".",
"cmy_y",
"if",
"var_k",
"==",
"1",
":",
"cmyk_c",
"=",
"0.0",
"cmyk_m",
"=",
"0.0",
"cmyk_y",
"=",
"0.0",
"else",
":",
"cmyk_c",
"=",
"(",
"cobj",
".",
"cmy_c",
"-",
"var_k",
")",
"/",
"(",
"1.0",
"-",
"var_k",
")",
"cmyk_m",
"=",
"(",
"cobj",
".",
"cmy_m",
"-",
"var_k",
")",
"/",
"(",
"1.0",
"-",
"var_k",
")",
"cmyk_y",
"=",
"(",
"cobj",
".",
"cmy_y",
"-",
"var_k",
")",
"/",
"(",
"1.0",
"-",
"var_k",
")",
"cmyk_k",
"=",
"var_k",
"return",
"CMYKColor",
"(",
"cmyk_c",
",",
"cmyk_m",
",",
"cmyk_y",
",",
"cmyk_k",
")"
] | Converts from CMY to CMYK.
NOTE: CMYK and CMY values range from 0.0 to 1.0 | [
"Converts",
"from",
"CMY",
"to",
"CMYK",
"."
] | 1d168613718d2d7d31ec4230524e987ef66823c7 | https://github.com/gtaylor/python-colormath/blob/1d168613718d2d7d31ec4230524e987ef66823c7/colormath/color_conversions.py#L824-L848 |
307 | gtaylor/python-colormath | colormath/color_conversions.py | CMYK_to_CMY | def CMYK_to_CMY(cobj, *args, **kwargs):
"""
Converts CMYK to CMY.
NOTE: CMYK and CMY values range from 0.0 to 1.0
"""
cmy_c = cobj.cmyk_c * (1.0 - cobj.cmyk_k) + cobj.cmyk_k
cmy_m = cobj.cmyk_m * (1.0 - cobj.cmyk_k) + cobj.cmyk_k
cmy_y = cobj.cmyk_y * (1.0 - cobj.cmyk_k) + cobj.cmyk_k
return CMYColor(cmy_c, cmy_m, cmy_y) | python | def CMYK_to_CMY(cobj, *args, **kwargs):
cmy_c = cobj.cmyk_c * (1.0 - cobj.cmyk_k) + cobj.cmyk_k
cmy_m = cobj.cmyk_m * (1.0 - cobj.cmyk_k) + cobj.cmyk_k
cmy_y = cobj.cmyk_y * (1.0 - cobj.cmyk_k) + cobj.cmyk_k
return CMYColor(cmy_c, cmy_m, cmy_y) | [
"def",
"CMYK_to_CMY",
"(",
"cobj",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"cmy_c",
"=",
"cobj",
".",
"cmyk_c",
"*",
"(",
"1.0",
"-",
"cobj",
".",
"cmyk_k",
")",
"+",
"cobj",
".",
"cmyk_k",
"cmy_m",
"=",
"cobj",
".",
"cmyk_m",
"*",
"(",
"1.0",
"-",
"cobj",
".",
"cmyk_k",
")",
"+",
"cobj",
".",
"cmyk_k",
"cmy_y",
"=",
"cobj",
".",
"cmyk_y",
"*",
"(",
"1.0",
"-",
"cobj",
".",
"cmyk_k",
")",
"+",
"cobj",
".",
"cmyk_k",
"return",
"CMYColor",
"(",
"cmy_c",
",",
"cmy_m",
",",
"cmy_y",
")"
] | Converts CMYK to CMY.
NOTE: CMYK and CMY values range from 0.0 to 1.0 | [
"Converts",
"CMYK",
"to",
"CMY",
"."
] | 1d168613718d2d7d31ec4230524e987ef66823c7 | https://github.com/gtaylor/python-colormath/blob/1d168613718d2d7d31ec4230524e987ef66823c7/colormath/color_conversions.py#L853-L863 |
308 | gtaylor/python-colormath | colormath/color_conversions.py | XYZ_to_IPT | def XYZ_to_IPT(cobj, *args, **kwargs):
"""
Converts XYZ to IPT.
NOTE: XYZ values need to be adapted to 2 degree D65
Reference:
Fairchild, M. D. (2013). Color appearance models, 3rd Ed. (pp. 271-272). John Wiley & Sons.
"""
if cobj.illuminant != 'd65' or cobj.observer != '2':
raise ValueError('XYZColor for XYZ->IPT conversion needs to be D65 adapted.')
xyz_values = numpy.array(cobj.get_value_tuple())
lms_values = numpy.dot(
IPTColor.conversion_matrices['xyz_to_lms'],
xyz_values)
lms_prime = numpy.sign(lms_values) * numpy.abs(lms_values) ** 0.43
ipt_values = numpy.dot(
IPTColor.conversion_matrices['lms_to_ipt'],
lms_prime)
return IPTColor(*ipt_values) | python | def XYZ_to_IPT(cobj, *args, **kwargs):
if cobj.illuminant != 'd65' or cobj.observer != '2':
raise ValueError('XYZColor for XYZ->IPT conversion needs to be D65 adapted.')
xyz_values = numpy.array(cobj.get_value_tuple())
lms_values = numpy.dot(
IPTColor.conversion_matrices['xyz_to_lms'],
xyz_values)
lms_prime = numpy.sign(lms_values) * numpy.abs(lms_values) ** 0.43
ipt_values = numpy.dot(
IPTColor.conversion_matrices['lms_to_ipt'],
lms_prime)
return IPTColor(*ipt_values) | [
"def",
"XYZ_to_IPT",
"(",
"cobj",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"cobj",
".",
"illuminant",
"!=",
"'d65'",
"or",
"cobj",
".",
"observer",
"!=",
"'2'",
":",
"raise",
"ValueError",
"(",
"'XYZColor for XYZ->IPT conversion needs to be D65 adapted.'",
")",
"xyz_values",
"=",
"numpy",
".",
"array",
"(",
"cobj",
".",
"get_value_tuple",
"(",
")",
")",
"lms_values",
"=",
"numpy",
".",
"dot",
"(",
"IPTColor",
".",
"conversion_matrices",
"[",
"'xyz_to_lms'",
"]",
",",
"xyz_values",
")",
"lms_prime",
"=",
"numpy",
".",
"sign",
"(",
"lms_values",
")",
"*",
"numpy",
".",
"abs",
"(",
"lms_values",
")",
"**",
"0.43",
"ipt_values",
"=",
"numpy",
".",
"dot",
"(",
"IPTColor",
".",
"conversion_matrices",
"[",
"'lms_to_ipt'",
"]",
",",
"lms_prime",
")",
"return",
"IPTColor",
"(",
"*",
"ipt_values",
")"
] | Converts XYZ to IPT.
NOTE: XYZ values need to be adapted to 2 degree D65
Reference:
Fairchild, M. D. (2013). Color appearance models, 3rd Ed. (pp. 271-272). John Wiley & Sons. | [
"Converts",
"XYZ",
"to",
"IPT",
"."
] | 1d168613718d2d7d31ec4230524e987ef66823c7 | https://github.com/gtaylor/python-colormath/blob/1d168613718d2d7d31ec4230524e987ef66823c7/colormath/color_conversions.py#L868-L889 |
309 | gtaylor/python-colormath | colormath/color_conversions.py | IPT_to_XYZ | def IPT_to_XYZ(cobj, *args, **kwargs):
"""
Converts IPT to XYZ.
"""
ipt_values = numpy.array(cobj.get_value_tuple())
lms_values = numpy.dot(
numpy.linalg.inv(IPTColor.conversion_matrices['lms_to_ipt']),
ipt_values)
lms_prime = numpy.sign(lms_values) * numpy.abs(lms_values) ** (1 / 0.43)
xyz_values = numpy.dot(
numpy.linalg.inv(IPTColor.conversion_matrices['xyz_to_lms']),
lms_prime)
return XYZColor(*xyz_values, observer='2', illuminant='d65') | python | def IPT_to_XYZ(cobj, *args, **kwargs):
ipt_values = numpy.array(cobj.get_value_tuple())
lms_values = numpy.dot(
numpy.linalg.inv(IPTColor.conversion_matrices['lms_to_ipt']),
ipt_values)
lms_prime = numpy.sign(lms_values) * numpy.abs(lms_values) ** (1 / 0.43)
xyz_values = numpy.dot(
numpy.linalg.inv(IPTColor.conversion_matrices['xyz_to_lms']),
lms_prime)
return XYZColor(*xyz_values, observer='2', illuminant='d65') | [
"def",
"IPT_to_XYZ",
"(",
"cobj",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"ipt_values",
"=",
"numpy",
".",
"array",
"(",
"cobj",
".",
"get_value_tuple",
"(",
")",
")",
"lms_values",
"=",
"numpy",
".",
"dot",
"(",
"numpy",
".",
"linalg",
".",
"inv",
"(",
"IPTColor",
".",
"conversion_matrices",
"[",
"'lms_to_ipt'",
"]",
")",
",",
"ipt_values",
")",
"lms_prime",
"=",
"numpy",
".",
"sign",
"(",
"lms_values",
")",
"*",
"numpy",
".",
"abs",
"(",
"lms_values",
")",
"**",
"(",
"1",
"/",
"0.43",
")",
"xyz_values",
"=",
"numpy",
".",
"dot",
"(",
"numpy",
".",
"linalg",
".",
"inv",
"(",
"IPTColor",
".",
"conversion_matrices",
"[",
"'xyz_to_lms'",
"]",
")",
",",
"lms_prime",
")",
"return",
"XYZColor",
"(",
"*",
"xyz_values",
",",
"observer",
"=",
"'2'",
",",
"illuminant",
"=",
"'d65'",
")"
] | Converts IPT to XYZ. | [
"Converts",
"IPT",
"to",
"XYZ",
"."
] | 1d168613718d2d7d31ec4230524e987ef66823c7 | https://github.com/gtaylor/python-colormath/blob/1d168613718d2d7d31ec4230524e987ef66823c7/colormath/color_conversions.py#L894-L908 |
310 | gtaylor/python-colormath | colormath/color_conversions.py | convert_color | def convert_color(color, target_cs, through_rgb_type=sRGBColor,
target_illuminant=None, *args, **kwargs):
"""
Converts the color to the designated color space.
:param color: A Color instance to convert.
:param target_cs: The Color class to convert to. Note that this is not
an instance, but a class.
:keyword BaseRGBColor through_rgb_type: If during your conversion between
your original and target color spaces you have to pass through RGB,
this determines which kind of RGB to use. For example, XYZ->HSL.
You probably don't need to specify this unless you have a special
usage case.
:type target_illuminant: None or str
:keyword target_illuminant: If during conversion from RGB to a reflective
color space you want to explicitly end up with a certain illuminant,
pass this here. Otherwise the RGB space's native illuminant
will be used.
:returns: An instance of the type passed in as ``target_cs``.
:raises: :py:exc:`colormath.color_exceptions.UndefinedConversionError`
if conversion between the two color spaces isn't possible.
"""
if isinstance(target_cs, str):
raise ValueError("target_cs parameter must be a Color object.")
if not issubclass(target_cs, ColorBase):
raise ValueError("target_cs parameter must be a Color object.")
conversions = _conversion_manager.get_conversion_path(color.__class__, target_cs)
logger.debug('Converting %s to %s', color, target_cs)
logger.debug(' @ Conversion path: %s', conversions)
# Start with original color in case we convert to the same color space.
new_color = color
if issubclass(target_cs, BaseRGBColor):
# If the target_cs is an RGB color space of some sort, then we
# have to set our through_rgb_type to make sure the conversion returns
# the expected RGB colorspace (instead of defaulting to sRGBColor).
through_rgb_type = target_cs
# We have to be careful to use the same RGB color space that created
# an object (if it was created by a conversion) in order to get correct
# results. For example, XYZ->HSL via Adobe RGB should default to Adobe
# RGB when taking that generated HSL object back to XYZ.
# noinspection PyProtectedMember
if through_rgb_type != sRGBColor:
# User overrides take priority over everything.
# noinspection PyProtectedMember
target_rgb = through_rgb_type
elif color._through_rgb_type:
# Otherwise, a value on the color object is the next best thing,
# when available.
# noinspection PyProtectedMember
target_rgb = color._through_rgb_type
else:
# We could collapse this into a single if statement above,
# but I think this reads better.
target_rgb = through_rgb_type
# Iterate through the list of functions for the conversion path, storing
# the results in a dictionary via update(). This way the user has access
# to all of the variables involved in the conversion.
for func in conversions:
# Execute the function in this conversion step and store the resulting
# Color object.
logger.debug(' * Conversion: %s passed to %s()',
new_color.__class__.__name__, func)
logger.debug(' |-> in %s', new_color)
if func:
# This can be None if you try to convert a color to the color
# space that is already in. IE: XYZ->XYZ.
new_color = func(
new_color,
target_rgb=target_rgb,
target_illuminant=target_illuminant,
*args, **kwargs)
logger.debug(' |-< out %s', new_color)
# If this conversion had something other than the default sRGB color space
# requested,
if through_rgb_type != sRGBColor:
new_color._through_rgb_type = through_rgb_type
return new_color | python | def convert_color(color, target_cs, through_rgb_type=sRGBColor,
target_illuminant=None, *args, **kwargs):
if isinstance(target_cs, str):
raise ValueError("target_cs parameter must be a Color object.")
if not issubclass(target_cs, ColorBase):
raise ValueError("target_cs parameter must be a Color object.")
conversions = _conversion_manager.get_conversion_path(color.__class__, target_cs)
logger.debug('Converting %s to %s', color, target_cs)
logger.debug(' @ Conversion path: %s', conversions)
# Start with original color in case we convert to the same color space.
new_color = color
if issubclass(target_cs, BaseRGBColor):
# If the target_cs is an RGB color space of some sort, then we
# have to set our through_rgb_type to make sure the conversion returns
# the expected RGB colorspace (instead of defaulting to sRGBColor).
through_rgb_type = target_cs
# We have to be careful to use the same RGB color space that created
# an object (if it was created by a conversion) in order to get correct
# results. For example, XYZ->HSL via Adobe RGB should default to Adobe
# RGB when taking that generated HSL object back to XYZ.
# noinspection PyProtectedMember
if through_rgb_type != sRGBColor:
# User overrides take priority over everything.
# noinspection PyProtectedMember
target_rgb = through_rgb_type
elif color._through_rgb_type:
# Otherwise, a value on the color object is the next best thing,
# when available.
# noinspection PyProtectedMember
target_rgb = color._through_rgb_type
else:
# We could collapse this into a single if statement above,
# but I think this reads better.
target_rgb = through_rgb_type
# Iterate through the list of functions for the conversion path, storing
# the results in a dictionary via update(). This way the user has access
# to all of the variables involved in the conversion.
for func in conversions:
# Execute the function in this conversion step and store the resulting
# Color object.
logger.debug(' * Conversion: %s passed to %s()',
new_color.__class__.__name__, func)
logger.debug(' |-> in %s', new_color)
if func:
# This can be None if you try to convert a color to the color
# space that is already in. IE: XYZ->XYZ.
new_color = func(
new_color,
target_rgb=target_rgb,
target_illuminant=target_illuminant,
*args, **kwargs)
logger.debug(' |-< out %s', new_color)
# If this conversion had something other than the default sRGB color space
# requested,
if through_rgb_type != sRGBColor:
new_color._through_rgb_type = through_rgb_type
return new_color | [
"def",
"convert_color",
"(",
"color",
",",
"target_cs",
",",
"through_rgb_type",
"=",
"sRGBColor",
",",
"target_illuminant",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"isinstance",
"(",
"target_cs",
",",
"str",
")",
":",
"raise",
"ValueError",
"(",
"\"target_cs parameter must be a Color object.\"",
")",
"if",
"not",
"issubclass",
"(",
"target_cs",
",",
"ColorBase",
")",
":",
"raise",
"ValueError",
"(",
"\"target_cs parameter must be a Color object.\"",
")",
"conversions",
"=",
"_conversion_manager",
".",
"get_conversion_path",
"(",
"color",
".",
"__class__",
",",
"target_cs",
")",
"logger",
".",
"debug",
"(",
"'Converting %s to %s'",
",",
"color",
",",
"target_cs",
")",
"logger",
".",
"debug",
"(",
"' @ Conversion path: %s'",
",",
"conversions",
")",
"# Start with original color in case we convert to the same color space.",
"new_color",
"=",
"color",
"if",
"issubclass",
"(",
"target_cs",
",",
"BaseRGBColor",
")",
":",
"# If the target_cs is an RGB color space of some sort, then we",
"# have to set our through_rgb_type to make sure the conversion returns",
"# the expected RGB colorspace (instead of defaulting to sRGBColor).",
"through_rgb_type",
"=",
"target_cs",
"# We have to be careful to use the same RGB color space that created",
"# an object (if it was created by a conversion) in order to get correct",
"# results. For example, XYZ->HSL via Adobe RGB should default to Adobe",
"# RGB when taking that generated HSL object back to XYZ.",
"# noinspection PyProtectedMember",
"if",
"through_rgb_type",
"!=",
"sRGBColor",
":",
"# User overrides take priority over everything.",
"# noinspection PyProtectedMember",
"target_rgb",
"=",
"through_rgb_type",
"elif",
"color",
".",
"_through_rgb_type",
":",
"# Otherwise, a value on the color object is the next best thing,",
"# when available.",
"# noinspection PyProtectedMember",
"target_rgb",
"=",
"color",
".",
"_through_rgb_type",
"else",
":",
"# We could collapse this into a single if statement above,",
"# but I think this reads better.",
"target_rgb",
"=",
"through_rgb_type",
"# Iterate through the list of functions for the conversion path, storing",
"# the results in a dictionary via update(). This way the user has access",
"# to all of the variables involved in the conversion.",
"for",
"func",
"in",
"conversions",
":",
"# Execute the function in this conversion step and store the resulting",
"# Color object.",
"logger",
".",
"debug",
"(",
"' * Conversion: %s passed to %s()'",
",",
"new_color",
".",
"__class__",
".",
"__name__",
",",
"func",
")",
"logger",
".",
"debug",
"(",
"' |-> in %s'",
",",
"new_color",
")",
"if",
"func",
":",
"# This can be None if you try to convert a color to the color",
"# space that is already in. IE: XYZ->XYZ.",
"new_color",
"=",
"func",
"(",
"new_color",
",",
"target_rgb",
"=",
"target_rgb",
",",
"target_illuminant",
"=",
"target_illuminant",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"logger",
".",
"debug",
"(",
"' |-< out %s'",
",",
"new_color",
")",
"# If this conversion had something other than the default sRGB color space",
"# requested,",
"if",
"through_rgb_type",
"!=",
"sRGBColor",
":",
"new_color",
".",
"_through_rgb_type",
"=",
"through_rgb_type",
"return",
"new_color"
] | Converts the color to the designated color space.
:param color: A Color instance to convert.
:param target_cs: The Color class to convert to. Note that this is not
an instance, but a class.
:keyword BaseRGBColor through_rgb_type: If during your conversion between
your original and target color spaces you have to pass through RGB,
this determines which kind of RGB to use. For example, XYZ->HSL.
You probably don't need to specify this unless you have a special
usage case.
:type target_illuminant: None or str
:keyword target_illuminant: If during conversion from RGB to a reflective
color space you want to explicitly end up with a certain illuminant,
pass this here. Otherwise the RGB space's native illuminant
will be used.
:returns: An instance of the type passed in as ``target_cs``.
:raises: :py:exc:`colormath.color_exceptions.UndefinedConversionError`
if conversion between the two color spaces isn't possible. | [
"Converts",
"the",
"color",
"to",
"the",
"designated",
"color",
"space",
"."
] | 1d168613718d2d7d31ec4230524e987ef66823c7 | https://github.com/gtaylor/python-colormath/blob/1d168613718d2d7d31ec4230524e987ef66823c7/colormath/color_conversions.py#L929-L1015 |
311 | gtaylor/python-colormath | colormath/color_appearance_models.py | Hunt.adjust_white_for_scc | def adjust_white_for_scc(cls, rgb_p, rgb_b, rgb_w, p):
"""
Adjust the white point for simultaneous chromatic contrast.
:param rgb_p: Cone signals of proxima field.
:param rgb_b: Cone signals of background.
:param rgb_w: Cone signals of reference white.
:param p: Simultaneous contrast/assimilation parameter.
:return: Adjusted cone signals for reference white.
"""
p_rgb = rgb_p / rgb_b
rgb_w = rgb_w * (((1 - p) * p_rgb + (1 + p) / p_rgb) ** 0.5) / (((1 + p) * p_rgb + (1 - p) / p_rgb) ** 0.5)
return rgb_w | python | def adjust_white_for_scc(cls, rgb_p, rgb_b, rgb_w, p):
p_rgb = rgb_p / rgb_b
rgb_w = rgb_w * (((1 - p) * p_rgb + (1 + p) / p_rgb) ** 0.5) / (((1 + p) * p_rgb + (1 - p) / p_rgb) ** 0.5)
return rgb_w | [
"def",
"adjust_white_for_scc",
"(",
"cls",
",",
"rgb_p",
",",
"rgb_b",
",",
"rgb_w",
",",
"p",
")",
":",
"p_rgb",
"=",
"rgb_p",
"/",
"rgb_b",
"rgb_w",
"=",
"rgb_w",
"*",
"(",
"(",
"(",
"1",
"-",
"p",
")",
"*",
"p_rgb",
"+",
"(",
"1",
"+",
"p",
")",
"/",
"p_rgb",
")",
"**",
"0.5",
")",
"/",
"(",
"(",
"(",
"1",
"+",
"p",
")",
"*",
"p_rgb",
"+",
"(",
"1",
"-",
"p",
")",
"/",
"p_rgb",
")",
"**",
"0.5",
")",
"return",
"rgb_w"
] | Adjust the white point for simultaneous chromatic contrast.
:param rgb_p: Cone signals of proxima field.
:param rgb_b: Cone signals of background.
:param rgb_w: Cone signals of reference white.
:param p: Simultaneous contrast/assimilation parameter.
:return: Adjusted cone signals for reference white. | [
"Adjust",
"the",
"white",
"point",
"for",
"simultaneous",
"chromatic",
"contrast",
"."
] | 1d168613718d2d7d31ec4230524e987ef66823c7 | https://github.com/gtaylor/python-colormath/blob/1d168613718d2d7d31ec4230524e987ef66823c7/colormath/color_appearance_models.py#L548-L560 |
312 | gtaylor/python-colormath | colormath/color_appearance_models.py | CIECAM02m1._compute_adaptation | def _compute_adaptation(self, xyz, xyz_w, f_l, d):
"""
Modified adaptation procedure incorporating simultaneous chromatic contrast from Hunt model.
:param xyz: Stimulus XYZ.
:param xyz_w: Reference white XYZ.
:param f_l: Luminance adaptation factor
:param d: Degree of adaptation.
:return: Tuple of adapted rgb and rgb_w arrays.
"""
# Transform input colors to cone responses
rgb = self._xyz_to_rgb(xyz)
logger.debug("RGB: {}".format(rgb))
rgb_b = self._xyz_to_rgb(self._xyz_b)
rgb_w = self._xyz_to_rgb(xyz_w)
rgb_w = Hunt.adjust_white_for_scc(rgb, rgb_b, rgb_w, self._p)
logger.debug("RGB_W: {}".format(rgb_w))
# Compute adapted tristimulus-responses
rgb_c = self._white_adaption(rgb, rgb_w, d)
logger.debug("RGB_C: {}".format(rgb_c))
rgb_cw = self._white_adaption(rgb_w, rgb_w, d)
logger.debug("RGB_CW: {}".format(rgb_cw))
# Convert adapted tristimulus-responses to Hunt-Pointer-Estevez fundamentals
rgb_p = self._compute_hunt_pointer_estevez_fundamentals(rgb_c)
logger.debug("RGB': {}".format(rgb_p))
rgb_wp = self._compute_hunt_pointer_estevez_fundamentals(rgb_cw)
logger.debug("RGB'_W: {}".format(rgb_wp))
# Compute post-adaptation non-linearities
rgb_ap = self._compute_nonlinearities(f_l, rgb_p)
rgb_awp = self._compute_nonlinearities(f_l, rgb_wp)
return rgb_ap, rgb_awp | python | def _compute_adaptation(self, xyz, xyz_w, f_l, d):
# Transform input colors to cone responses
rgb = self._xyz_to_rgb(xyz)
logger.debug("RGB: {}".format(rgb))
rgb_b = self._xyz_to_rgb(self._xyz_b)
rgb_w = self._xyz_to_rgb(xyz_w)
rgb_w = Hunt.adjust_white_for_scc(rgb, rgb_b, rgb_w, self._p)
logger.debug("RGB_W: {}".format(rgb_w))
# Compute adapted tristimulus-responses
rgb_c = self._white_adaption(rgb, rgb_w, d)
logger.debug("RGB_C: {}".format(rgb_c))
rgb_cw = self._white_adaption(rgb_w, rgb_w, d)
logger.debug("RGB_CW: {}".format(rgb_cw))
# Convert adapted tristimulus-responses to Hunt-Pointer-Estevez fundamentals
rgb_p = self._compute_hunt_pointer_estevez_fundamentals(rgb_c)
logger.debug("RGB': {}".format(rgb_p))
rgb_wp = self._compute_hunt_pointer_estevez_fundamentals(rgb_cw)
logger.debug("RGB'_W: {}".format(rgb_wp))
# Compute post-adaptation non-linearities
rgb_ap = self._compute_nonlinearities(f_l, rgb_p)
rgb_awp = self._compute_nonlinearities(f_l, rgb_wp)
return rgb_ap, rgb_awp | [
"def",
"_compute_adaptation",
"(",
"self",
",",
"xyz",
",",
"xyz_w",
",",
"f_l",
",",
"d",
")",
":",
"# Transform input colors to cone responses",
"rgb",
"=",
"self",
".",
"_xyz_to_rgb",
"(",
"xyz",
")",
"logger",
".",
"debug",
"(",
"\"RGB: {}\"",
".",
"format",
"(",
"rgb",
")",
")",
"rgb_b",
"=",
"self",
".",
"_xyz_to_rgb",
"(",
"self",
".",
"_xyz_b",
")",
"rgb_w",
"=",
"self",
".",
"_xyz_to_rgb",
"(",
"xyz_w",
")",
"rgb_w",
"=",
"Hunt",
".",
"adjust_white_for_scc",
"(",
"rgb",
",",
"rgb_b",
",",
"rgb_w",
",",
"self",
".",
"_p",
")",
"logger",
".",
"debug",
"(",
"\"RGB_W: {}\"",
".",
"format",
"(",
"rgb_w",
")",
")",
"# Compute adapted tristimulus-responses",
"rgb_c",
"=",
"self",
".",
"_white_adaption",
"(",
"rgb",
",",
"rgb_w",
",",
"d",
")",
"logger",
".",
"debug",
"(",
"\"RGB_C: {}\"",
".",
"format",
"(",
"rgb_c",
")",
")",
"rgb_cw",
"=",
"self",
".",
"_white_adaption",
"(",
"rgb_w",
",",
"rgb_w",
",",
"d",
")",
"logger",
".",
"debug",
"(",
"\"RGB_CW: {}\"",
".",
"format",
"(",
"rgb_cw",
")",
")",
"# Convert adapted tristimulus-responses to Hunt-Pointer-Estevez fundamentals",
"rgb_p",
"=",
"self",
".",
"_compute_hunt_pointer_estevez_fundamentals",
"(",
"rgb_c",
")",
"logger",
".",
"debug",
"(",
"\"RGB': {}\"",
".",
"format",
"(",
"rgb_p",
")",
")",
"rgb_wp",
"=",
"self",
".",
"_compute_hunt_pointer_estevez_fundamentals",
"(",
"rgb_cw",
")",
"logger",
".",
"debug",
"(",
"\"RGB'_W: {}\"",
".",
"format",
"(",
"rgb_wp",
")",
")",
"# Compute post-adaptation non-linearities",
"rgb_ap",
"=",
"self",
".",
"_compute_nonlinearities",
"(",
"f_l",
",",
"rgb_p",
")",
"rgb_awp",
"=",
"self",
".",
"_compute_nonlinearities",
"(",
"f_l",
",",
"rgb_wp",
")",
"return",
"rgb_ap",
",",
"rgb_awp"
] | Modified adaptation procedure incorporating simultaneous chromatic contrast from Hunt model.
:param xyz: Stimulus XYZ.
:param xyz_w: Reference white XYZ.
:param f_l: Luminance adaptation factor
:param d: Degree of adaptation.
:return: Tuple of adapted rgb and rgb_w arrays. | [
"Modified",
"adaptation",
"procedure",
"incorporating",
"simultaneous",
"chromatic",
"contrast",
"from",
"Hunt",
"model",
"."
] | 1d168613718d2d7d31ec4230524e987ef66823c7 | https://github.com/gtaylor/python-colormath/blob/1d168613718d2d7d31ec4230524e987ef66823c7/colormath/color_appearance_models.py#L1245-L1280 |
313 | twilio/authy-python | authy/api/resources.py | OneTouch.validate_one_touch_signature | def validate_one_touch_signature(self, signature, nonce, method, url, params):
"""
Function to validate signature in X-Authy-Signature key of headers.
:param string signature: X-Authy-Signature key of headers.
:param string nonce: X-Authy-Signature-Nonce key of headers.
:param string method: GET or POST - configured in app settings for OneTouch.
:param string url: base callback url.
:param dict params: params sent by Authy.
:return bool: True if calculated signature and X-Authy-Signature are identical else False.
"""
if not signature or not isinstance(signature, str):
raise AuthyFormatException(
"Invalid signature - should not be empty. It is required")
if not nonce:
raise AuthyFormatException(
"Invalid nonce - should not be empty. It is required")
if not method or not ('get' == method.lower() or 'post' == method.lower()):
raise AuthyFormatException(
"Invalid method - should not be empty. It is required")
if not params or not isinstance(params, dict):
raise AuthyFormatException(
"Invalid params - should not be empty. It is required")
query_params = self.__make_http_query(params)
# Sort and replace encoded params in case-sensitive order
sorted_params = '&'.join(sorted(query_params.replace(
'/', '%2F').replace('%20', '+').split('&')))
sorted_params = re.sub("\\%5B([0-9])*\\%5D", "%5B%5D", sorted_params)
sorted_params = re.sub("\\=None", "=", sorted_params)
data = nonce + "|" + method + "|" + url + "|" + sorted_params
try:
calculated_signature = base64.b64encode(
hmac.new(self.api_key.encode(), data.encode(), hashlib.sha256).digest())
return calculated_signature.decode() == signature
except:
calculated_signature = base64.b64encode(
hmac.new(self.api_key, data, hashlib.sha256).digest())
return calculated_signature == signature | python | def validate_one_touch_signature(self, signature, nonce, method, url, params):
if not signature or not isinstance(signature, str):
raise AuthyFormatException(
"Invalid signature - should not be empty. It is required")
if not nonce:
raise AuthyFormatException(
"Invalid nonce - should not be empty. It is required")
if not method or not ('get' == method.lower() or 'post' == method.lower()):
raise AuthyFormatException(
"Invalid method - should not be empty. It is required")
if not params or not isinstance(params, dict):
raise AuthyFormatException(
"Invalid params - should not be empty. It is required")
query_params = self.__make_http_query(params)
# Sort and replace encoded params in case-sensitive order
sorted_params = '&'.join(sorted(query_params.replace(
'/', '%2F').replace('%20', '+').split('&')))
sorted_params = re.sub("\\%5B([0-9])*\\%5D", "%5B%5D", sorted_params)
sorted_params = re.sub("\\=None", "=", sorted_params)
data = nonce + "|" + method + "|" + url + "|" + sorted_params
try:
calculated_signature = base64.b64encode(
hmac.new(self.api_key.encode(), data.encode(), hashlib.sha256).digest())
return calculated_signature.decode() == signature
except:
calculated_signature = base64.b64encode(
hmac.new(self.api_key, data, hashlib.sha256).digest())
return calculated_signature == signature | [
"def",
"validate_one_touch_signature",
"(",
"self",
",",
"signature",
",",
"nonce",
",",
"method",
",",
"url",
",",
"params",
")",
":",
"if",
"not",
"signature",
"or",
"not",
"isinstance",
"(",
"signature",
",",
"str",
")",
":",
"raise",
"AuthyFormatException",
"(",
"\"Invalid signature - should not be empty. It is required\"",
")",
"if",
"not",
"nonce",
":",
"raise",
"AuthyFormatException",
"(",
"\"Invalid nonce - should not be empty. It is required\"",
")",
"if",
"not",
"method",
"or",
"not",
"(",
"'get'",
"==",
"method",
".",
"lower",
"(",
")",
"or",
"'post'",
"==",
"method",
".",
"lower",
"(",
")",
")",
":",
"raise",
"AuthyFormatException",
"(",
"\"Invalid method - should not be empty. It is required\"",
")",
"if",
"not",
"params",
"or",
"not",
"isinstance",
"(",
"params",
",",
"dict",
")",
":",
"raise",
"AuthyFormatException",
"(",
"\"Invalid params - should not be empty. It is required\"",
")",
"query_params",
"=",
"self",
".",
"__make_http_query",
"(",
"params",
")",
"# Sort and replace encoded params in case-sensitive order",
"sorted_params",
"=",
"'&'",
".",
"join",
"(",
"sorted",
"(",
"query_params",
".",
"replace",
"(",
"'/'",
",",
"'%2F'",
")",
".",
"replace",
"(",
"'%20'",
",",
"'+'",
")",
".",
"split",
"(",
"'&'",
")",
")",
")",
"sorted_params",
"=",
"re",
".",
"sub",
"(",
"\"\\\\%5B([0-9])*\\\\%5D\"",
",",
"\"%5B%5D\"",
",",
"sorted_params",
")",
"sorted_params",
"=",
"re",
".",
"sub",
"(",
"\"\\\\=None\"",
",",
"\"=\"",
",",
"sorted_params",
")",
"data",
"=",
"nonce",
"+",
"\"|\"",
"+",
"method",
"+",
"\"|\"",
"+",
"url",
"+",
"\"|\"",
"+",
"sorted_params",
"try",
":",
"calculated_signature",
"=",
"base64",
".",
"b64encode",
"(",
"hmac",
".",
"new",
"(",
"self",
".",
"api_key",
".",
"encode",
"(",
")",
",",
"data",
".",
"encode",
"(",
")",
",",
"hashlib",
".",
"sha256",
")",
".",
"digest",
"(",
")",
")",
"return",
"calculated_signature",
".",
"decode",
"(",
")",
"==",
"signature",
"except",
":",
"calculated_signature",
"=",
"base64",
".",
"b64encode",
"(",
"hmac",
".",
"new",
"(",
"self",
".",
"api_key",
",",
"data",
",",
"hashlib",
".",
"sha256",
")",
".",
"digest",
"(",
")",
")",
"return",
"calculated_signature",
"==",
"signature"
] | Function to validate signature in X-Authy-Signature key of headers.
:param string signature: X-Authy-Signature key of headers.
:param string nonce: X-Authy-Signature-Nonce key of headers.
:param string method: GET or POST - configured in app settings for OneTouch.
:param string url: base callback url.
:param dict params: params sent by Authy.
:return bool: True if calculated signature and X-Authy-Signature are identical else False. | [
"Function",
"to",
"validate",
"signature",
"in",
"X",
"-",
"Authy",
"-",
"Signature",
"key",
"of",
"headers",
"."
] | 7a0073b39a56bac495b10e4b4fca3f09982de6ed | https://github.com/twilio/authy-python/blob/7a0073b39a56bac495b10e4b4fca3f09982de6ed/authy/api/resources.py#L500-L541 |
314 | doloopwhile/pyjq | pyjq.py | compile | def compile(script, vars={}, library_paths=[]):
"""
Compile a jq script, retuning a script object.
library_paths is a list of strings that defines the module search path.
"""
return _pyjq.Script(script.encode('utf-8'), vars=vars,
library_paths=library_paths) | python | def compile(script, vars={}, library_paths=[]):
return _pyjq.Script(script.encode('utf-8'), vars=vars,
library_paths=library_paths) | [
"def",
"compile",
"(",
"script",
",",
"vars",
"=",
"{",
"}",
",",
"library_paths",
"=",
"[",
"]",
")",
":",
"return",
"_pyjq",
".",
"Script",
"(",
"script",
".",
"encode",
"(",
"'utf-8'",
")",
",",
"vars",
"=",
"vars",
",",
"library_paths",
"=",
"library_paths",
")"
] | Compile a jq script, retuning a script object.
library_paths is a list of strings that defines the module search path. | [
"Compile",
"a",
"jq",
"script",
"retuning",
"a",
"script",
"object",
"."
] | 003144e636af20e20862d4a191f05ec9ed9017b7 | https://github.com/doloopwhile/pyjq/blob/003144e636af20e20862d4a191f05ec9ed9017b7/pyjq.py#L11-L19 |
315 | doloopwhile/pyjq | pyjq.py | apply | def apply(script, value=None, vars={}, url=None, opener=default_opener, library_paths=[]):
"""
Transform value by script, returning all results as list.
"""
return all(script, value, vars, url, opener, library_paths) | python | def apply(script, value=None, vars={}, url=None, opener=default_opener, library_paths=[]):
return all(script, value, vars, url, opener, library_paths) | [
"def",
"apply",
"(",
"script",
",",
"value",
"=",
"None",
",",
"vars",
"=",
"{",
"}",
",",
"url",
"=",
"None",
",",
"opener",
"=",
"default_opener",
",",
"library_paths",
"=",
"[",
"]",
")",
":",
"return",
"all",
"(",
"script",
",",
"value",
",",
"vars",
",",
"url",
",",
"opener",
",",
"library_paths",
")"
] | Transform value by script, returning all results as list. | [
"Transform",
"value",
"by",
"script",
"returning",
"all",
"results",
"as",
"list",
"."
] | 003144e636af20e20862d4a191f05ec9ed9017b7 | https://github.com/doloopwhile/pyjq/blob/003144e636af20e20862d4a191f05ec9ed9017b7/pyjq.py#L52-L56 |
316 | doloopwhile/pyjq | pyjq.py | first | def first(script, value=None, default=None, vars={}, url=None, opener=default_opener, library_paths=[]):
"""
Transform object by jq script, returning the first result.
Return default if result is empty.
"""
return compile(script, vars, library_paths).first(_get_value(value, url, opener), default) | python | def first(script, value=None, default=None, vars={}, url=None, opener=default_opener, library_paths=[]):
return compile(script, vars, library_paths).first(_get_value(value, url, opener), default) | [
"def",
"first",
"(",
"script",
",",
"value",
"=",
"None",
",",
"default",
"=",
"None",
",",
"vars",
"=",
"{",
"}",
",",
"url",
"=",
"None",
",",
"opener",
"=",
"default_opener",
",",
"library_paths",
"=",
"[",
"]",
")",
":",
"return",
"compile",
"(",
"script",
",",
"vars",
",",
"library_paths",
")",
".",
"first",
"(",
"_get_value",
"(",
"value",
",",
"url",
",",
"opener",
")",
",",
"default",
")"
] | Transform object by jq script, returning the first result.
Return default if result is empty. | [
"Transform",
"object",
"by",
"jq",
"script",
"returning",
"the",
"first",
"result",
".",
"Return",
"default",
"if",
"result",
"is",
"empty",
"."
] | 003144e636af20e20862d4a191f05ec9ed9017b7 | https://github.com/doloopwhile/pyjq/blob/003144e636af20e20862d4a191f05ec9ed9017b7/pyjq.py#L61-L66 |
317 | doloopwhile/pyjq | pyjq.py | one | def one(script, value=None, vars={}, url=None, opener=default_opener, library_paths=[]):
"""
Transform object by jq script, returning the first result.
Raise ValueError unless results does not include exactly one element.
"""
return compile(script, vars, library_paths).one(_get_value(value, url, opener)) | python | def one(script, value=None, vars={}, url=None, opener=default_opener, library_paths=[]):
return compile(script, vars, library_paths).one(_get_value(value, url, opener)) | [
"def",
"one",
"(",
"script",
",",
"value",
"=",
"None",
",",
"vars",
"=",
"{",
"}",
",",
"url",
"=",
"None",
",",
"opener",
"=",
"default_opener",
",",
"library_paths",
"=",
"[",
"]",
")",
":",
"return",
"compile",
"(",
"script",
",",
"vars",
",",
"library_paths",
")",
".",
"one",
"(",
"_get_value",
"(",
"value",
",",
"url",
",",
"opener",
")",
")"
] | Transform object by jq script, returning the first result.
Raise ValueError unless results does not include exactly one element. | [
"Transform",
"object",
"by",
"jq",
"script",
"returning",
"the",
"first",
"result",
".",
"Raise",
"ValueError",
"unless",
"results",
"does",
"not",
"include",
"exactly",
"one",
"element",
"."
] | 003144e636af20e20862d4a191f05ec9ed9017b7 | https://github.com/doloopwhile/pyjq/blob/003144e636af20e20862d4a191f05ec9ed9017b7/pyjq.py#L69-L74 |
318 | ambv/flake8-mypy | flake8_mypy.py | calculate_mypypath | def calculate_mypypath() -> List[str]:
"""Return MYPYPATH so that stubs have precedence over local sources."""
typeshed_root = None
count = 0
started = time.time()
for parent in itertools.chain(
# Look in current script's parents, useful for zipapps.
Path(__file__).parents,
# Look around site-packages, useful for virtualenvs.
Path(mypy.api.__file__).parents,
# Look in global paths, useful for globally installed.
Path(os.__file__).parents,
):
count += 1
candidate = parent / 'lib' / 'mypy' / 'typeshed'
if candidate.is_dir():
typeshed_root = candidate
break
# Also check the non-installed path, useful for `setup.py develop`.
candidate = parent / 'typeshed'
if candidate.is_dir():
typeshed_root = candidate
break
LOG.debug(
'Checked %d paths in %.2fs looking for typeshed. Found %s',
count,
time.time() - started,
typeshed_root,
)
if not typeshed_root:
return []
stdlib_dirs = ('3.7', '3.6', '3.5', '3.4', '3.3', '3.2', '3', '2and3')
stdlib_stubs = [
typeshed_root / 'stdlib' / stdlib_dir
for stdlib_dir in stdlib_dirs
]
third_party_dirs = ('3.7', '3.6', '3', '2and3')
third_party_stubs = [
typeshed_root / 'third_party' / tp_dir
for tp_dir in third_party_dirs
]
return [
str(p) for p in stdlib_stubs + third_party_stubs
] | python | def calculate_mypypath() -> List[str]:
typeshed_root = None
count = 0
started = time.time()
for parent in itertools.chain(
# Look in current script's parents, useful for zipapps.
Path(__file__).parents,
# Look around site-packages, useful for virtualenvs.
Path(mypy.api.__file__).parents,
# Look in global paths, useful for globally installed.
Path(os.__file__).parents,
):
count += 1
candidate = parent / 'lib' / 'mypy' / 'typeshed'
if candidate.is_dir():
typeshed_root = candidate
break
# Also check the non-installed path, useful for `setup.py develop`.
candidate = parent / 'typeshed'
if candidate.is_dir():
typeshed_root = candidate
break
LOG.debug(
'Checked %d paths in %.2fs looking for typeshed. Found %s',
count,
time.time() - started,
typeshed_root,
)
if not typeshed_root:
return []
stdlib_dirs = ('3.7', '3.6', '3.5', '3.4', '3.3', '3.2', '3', '2and3')
stdlib_stubs = [
typeshed_root / 'stdlib' / stdlib_dir
for stdlib_dir in stdlib_dirs
]
third_party_dirs = ('3.7', '3.6', '3', '2and3')
third_party_stubs = [
typeshed_root / 'third_party' / tp_dir
for tp_dir in third_party_dirs
]
return [
str(p) for p in stdlib_stubs + third_party_stubs
] | [
"def",
"calculate_mypypath",
"(",
")",
"->",
"List",
"[",
"str",
"]",
":",
"typeshed_root",
"=",
"None",
"count",
"=",
"0",
"started",
"=",
"time",
".",
"time",
"(",
")",
"for",
"parent",
"in",
"itertools",
".",
"chain",
"(",
"# Look in current script's parents, useful for zipapps.",
"Path",
"(",
"__file__",
")",
".",
"parents",
",",
"# Look around site-packages, useful for virtualenvs.",
"Path",
"(",
"mypy",
".",
"api",
".",
"__file__",
")",
".",
"parents",
",",
"# Look in global paths, useful for globally installed.",
"Path",
"(",
"os",
".",
"__file__",
")",
".",
"parents",
",",
")",
":",
"count",
"+=",
"1",
"candidate",
"=",
"parent",
"/",
"'lib'",
"/",
"'mypy'",
"/",
"'typeshed'",
"if",
"candidate",
".",
"is_dir",
"(",
")",
":",
"typeshed_root",
"=",
"candidate",
"break",
"# Also check the non-installed path, useful for `setup.py develop`.",
"candidate",
"=",
"parent",
"/",
"'typeshed'",
"if",
"candidate",
".",
"is_dir",
"(",
")",
":",
"typeshed_root",
"=",
"candidate",
"break",
"LOG",
".",
"debug",
"(",
"'Checked %d paths in %.2fs looking for typeshed. Found %s'",
",",
"count",
",",
"time",
".",
"time",
"(",
")",
"-",
"started",
",",
"typeshed_root",
",",
")",
"if",
"not",
"typeshed_root",
":",
"return",
"[",
"]",
"stdlib_dirs",
"=",
"(",
"'3.7'",
",",
"'3.6'",
",",
"'3.5'",
",",
"'3.4'",
",",
"'3.3'",
",",
"'3.2'",
",",
"'3'",
",",
"'2and3'",
")",
"stdlib_stubs",
"=",
"[",
"typeshed_root",
"/",
"'stdlib'",
"/",
"stdlib_dir",
"for",
"stdlib_dir",
"in",
"stdlib_dirs",
"]",
"third_party_dirs",
"=",
"(",
"'3.7'",
",",
"'3.6'",
",",
"'3'",
",",
"'2and3'",
")",
"third_party_stubs",
"=",
"[",
"typeshed_root",
"/",
"'third_party'",
"/",
"tp_dir",
"for",
"tp_dir",
"in",
"third_party_dirs",
"]",
"return",
"[",
"str",
"(",
"p",
")",
"for",
"p",
"in",
"stdlib_stubs",
"+",
"third_party_stubs",
"]"
] | Return MYPYPATH so that stubs have precedence over local sources. | [
"Return",
"MYPYPATH",
"so",
"that",
"stubs",
"have",
"precedence",
"over",
"local",
"sources",
"."
] | 616eeb98092edfa0affc00c6cf4f7073f4de26a6 | https://github.com/ambv/flake8-mypy/blob/616eeb98092edfa0affc00c6cf4f7073f4de26a6/flake8_mypy.py#L52-L100 |
319 | LPgenerator/django-db-mailer | dbmail/providers/sendinblue/mail.py | email_list_to_email_dict | def email_list_to_email_dict(email_list):
"""Convert a list of email to a dict of email."""
if email_list is None:
return {}
result = {}
for value in email_list:
realname, address = email.utils.parseaddr(value)
result[address] = realname if realname and address else address
return result | python | def email_list_to_email_dict(email_list):
if email_list is None:
return {}
result = {}
for value in email_list:
realname, address = email.utils.parseaddr(value)
result[address] = realname if realname and address else address
return result | [
"def",
"email_list_to_email_dict",
"(",
"email_list",
")",
":",
"if",
"email_list",
"is",
"None",
":",
"return",
"{",
"}",
"result",
"=",
"{",
"}",
"for",
"value",
"in",
"email_list",
":",
"realname",
",",
"address",
"=",
"email",
".",
"utils",
".",
"parseaddr",
"(",
"value",
")",
"result",
"[",
"address",
"]",
"=",
"realname",
"if",
"realname",
"and",
"address",
"else",
"address",
"return",
"result"
] | Convert a list of email to a dict of email. | [
"Convert",
"a",
"list",
"of",
"email",
"to",
"a",
"dict",
"of",
"email",
"."
] | 217a73c21ba5c6b68738f74b2c55a6dd2c1afe35 | https://github.com/LPgenerator/django-db-mailer/blob/217a73c21ba5c6b68738f74b2c55a6dd2c1afe35/dbmail/providers/sendinblue/mail.py#L15-L23 |
320 | LPgenerator/django-db-mailer | dbmail/providers/sendinblue/mail.py | email_address_to_list | def email_address_to_list(email_address):
"""Convert an email address to a list."""
realname, address = email.utils.parseaddr(email_address)
return (
[address, realname] if realname and address else
[email_address, email_address]
) | python | def email_address_to_list(email_address):
realname, address = email.utils.parseaddr(email_address)
return (
[address, realname] if realname and address else
[email_address, email_address]
) | [
"def",
"email_address_to_list",
"(",
"email_address",
")",
":",
"realname",
",",
"address",
"=",
"email",
".",
"utils",
".",
"parseaddr",
"(",
"email_address",
")",
"return",
"(",
"[",
"address",
",",
"realname",
"]",
"if",
"realname",
"and",
"address",
"else",
"[",
"email_address",
",",
"email_address",
"]",
")"
] | Convert an email address to a list. | [
"Convert",
"an",
"email",
"address",
"to",
"a",
"list",
"."
] | 217a73c21ba5c6b68738f74b2c55a6dd2c1afe35 | https://github.com/LPgenerator/django-db-mailer/blob/217a73c21ba5c6b68738f74b2c55a6dd2c1afe35/dbmail/providers/sendinblue/mail.py#L26-L32 |
321 | LPgenerator/django-db-mailer | dbmail/providers/sendinblue/mail.py | send | def send(sender_instance):
"""Send a transactional email using SendInBlue API.
Site: https://www.sendinblue.com
API: https://apidocs.sendinblue.com/
"""
m = Mailin(
"https://api.sendinblue.com/v2.0",
sender_instance._kwargs.get("api_key")
)
data = {
"to": email_list_to_email_dict(sender_instance._recipient_list),
"cc": email_list_to_email_dict(sender_instance._cc),
"bcc": email_list_to_email_dict(sender_instance._bcc),
"from": email_address_to_list(sender_instance._from_email),
"subject": sender_instance._subject,
}
if sender_instance._template.is_html:
data.update({
"html": sender_instance._message,
"headers": {"Content-Type": "text/html; charset=utf-8"}
})
else:
data.update({"text": sender_instance._message})
if "attachments" in sender_instance._kwargs:
data["attachment"] = {}
for attachment in sender_instance._kwargs["attachments"]:
data["attachment"][attachment[0]] = base64.b64encode(attachment[1])
result = m.send_email(data)
if result["code"] != "success":
raise SendInBlueError(result["message"]) | python | def send(sender_instance):
m = Mailin(
"https://api.sendinblue.com/v2.0",
sender_instance._kwargs.get("api_key")
)
data = {
"to": email_list_to_email_dict(sender_instance._recipient_list),
"cc": email_list_to_email_dict(sender_instance._cc),
"bcc": email_list_to_email_dict(sender_instance._bcc),
"from": email_address_to_list(sender_instance._from_email),
"subject": sender_instance._subject,
}
if sender_instance._template.is_html:
data.update({
"html": sender_instance._message,
"headers": {"Content-Type": "text/html; charset=utf-8"}
})
else:
data.update({"text": sender_instance._message})
if "attachments" in sender_instance._kwargs:
data["attachment"] = {}
for attachment in sender_instance._kwargs["attachments"]:
data["attachment"][attachment[0]] = base64.b64encode(attachment[1])
result = m.send_email(data)
if result["code"] != "success":
raise SendInBlueError(result["message"]) | [
"def",
"send",
"(",
"sender_instance",
")",
":",
"m",
"=",
"Mailin",
"(",
"\"https://api.sendinblue.com/v2.0\"",
",",
"sender_instance",
".",
"_kwargs",
".",
"get",
"(",
"\"api_key\"",
")",
")",
"data",
"=",
"{",
"\"to\"",
":",
"email_list_to_email_dict",
"(",
"sender_instance",
".",
"_recipient_list",
")",
",",
"\"cc\"",
":",
"email_list_to_email_dict",
"(",
"sender_instance",
".",
"_cc",
")",
",",
"\"bcc\"",
":",
"email_list_to_email_dict",
"(",
"sender_instance",
".",
"_bcc",
")",
",",
"\"from\"",
":",
"email_address_to_list",
"(",
"sender_instance",
".",
"_from_email",
")",
",",
"\"subject\"",
":",
"sender_instance",
".",
"_subject",
",",
"}",
"if",
"sender_instance",
".",
"_template",
".",
"is_html",
":",
"data",
".",
"update",
"(",
"{",
"\"html\"",
":",
"sender_instance",
".",
"_message",
",",
"\"headers\"",
":",
"{",
"\"Content-Type\"",
":",
"\"text/html; charset=utf-8\"",
"}",
"}",
")",
"else",
":",
"data",
".",
"update",
"(",
"{",
"\"text\"",
":",
"sender_instance",
".",
"_message",
"}",
")",
"if",
"\"attachments\"",
"in",
"sender_instance",
".",
"_kwargs",
":",
"data",
"[",
"\"attachment\"",
"]",
"=",
"{",
"}",
"for",
"attachment",
"in",
"sender_instance",
".",
"_kwargs",
"[",
"\"attachments\"",
"]",
":",
"data",
"[",
"\"attachment\"",
"]",
"[",
"attachment",
"[",
"0",
"]",
"]",
"=",
"base64",
".",
"b64encode",
"(",
"attachment",
"[",
"1",
"]",
")",
"result",
"=",
"m",
".",
"send_email",
"(",
"data",
")",
"if",
"result",
"[",
"\"code\"",
"]",
"!=",
"\"success\"",
":",
"raise",
"SendInBlueError",
"(",
"result",
"[",
"\"message\"",
"]",
")"
] | Send a transactional email using SendInBlue API.
Site: https://www.sendinblue.com
API: https://apidocs.sendinblue.com/ | [
"Send",
"a",
"transactional",
"email",
"using",
"SendInBlue",
"API",
"."
] | 217a73c21ba5c6b68738f74b2c55a6dd2c1afe35 | https://github.com/LPgenerator/django-db-mailer/blob/217a73c21ba5c6b68738f74b2c55a6dd2c1afe35/dbmail/providers/sendinblue/mail.py#L35-L65 |
322 | jaseg/python-mpv | mpv.py | _mpv_coax_proptype | def _mpv_coax_proptype(value, proptype=str):
"""Intelligently coax the given python value into something that can be understood as a proptype property."""
if type(value) is bytes:
return value;
elif type(value) is bool:
return b'yes' if value else b'no'
elif proptype in (str, int, float):
return str(proptype(value)).encode('utf-8')
else:
raise TypeError('Cannot coax value of type {} into property type {}'.format(type(value), proptype)) | python | def _mpv_coax_proptype(value, proptype=str):
if type(value) is bytes:
return value;
elif type(value) is bool:
return b'yes' if value else b'no'
elif proptype in (str, int, float):
return str(proptype(value)).encode('utf-8')
else:
raise TypeError('Cannot coax value of type {} into property type {}'.format(type(value), proptype)) | [
"def",
"_mpv_coax_proptype",
"(",
"value",
",",
"proptype",
"=",
"str",
")",
":",
"if",
"type",
"(",
"value",
")",
"is",
"bytes",
":",
"return",
"value",
"elif",
"type",
"(",
"value",
")",
"is",
"bool",
":",
"return",
"b'yes'",
"if",
"value",
"else",
"b'no'",
"elif",
"proptype",
"in",
"(",
"str",
",",
"int",
",",
"float",
")",
":",
"return",
"str",
"(",
"proptype",
"(",
"value",
")",
")",
".",
"encode",
"(",
"'utf-8'",
")",
"else",
":",
"raise",
"TypeError",
"(",
"'Cannot coax value of type {} into property type {}'",
".",
"format",
"(",
"type",
"(",
"value",
")",
",",
"proptype",
")",
")"
] | Intelligently coax the given python value into something that can be understood as a proptype property. | [
"Intelligently",
"coax",
"the",
"given",
"python",
"value",
"into",
"something",
"that",
"can",
"be",
"understood",
"as",
"a",
"proptype",
"property",
"."
] | 7117de4005cc470a45efd9cf2e9657bdf63a9079 | https://github.com/jaseg/python-mpv/blob/7117de4005cc470a45efd9cf2e9657bdf63a9079/mpv.py#L400-L409 |
323 | jaseg/python-mpv | mpv.py | _make_node_str_list | def _make_node_str_list(l):
"""Take a list of python objects and make a MPV string node array from it.
As an example, the python list ``l = [ "foo", 23, false ]`` will result in the following MPV node object::
struct mpv_node {
.format = MPV_NODE_ARRAY,
.u.list = *(struct mpv_node_array){
.num = len(l),
.keys = NULL,
.values = struct mpv_node[len(l)] {
{ .format = MPV_NODE_STRING, .u.string = l[0] },
{ .format = MPV_NODE_STRING, .u.string = l[1] },
...
}
}
}
"""
char_ps = [ c_char_p(_mpv_coax_proptype(e, str)) for e in l ]
node_list = MpvNodeList(
num=len(l),
keys=None,
values=( MpvNode * len(l))( *[ MpvNode(
format=MpvFormat.STRING,
val=MpvNodeUnion(string=p))
for p in char_ps ]))
node = MpvNode(
format=MpvFormat.NODE_ARRAY,
val=MpvNodeUnion(list=pointer(node_list)))
return char_ps, node_list, node, cast(pointer(node), c_void_p) | python | def _make_node_str_list(l):
char_ps = [ c_char_p(_mpv_coax_proptype(e, str)) for e in l ]
node_list = MpvNodeList(
num=len(l),
keys=None,
values=( MpvNode * len(l))( *[ MpvNode(
format=MpvFormat.STRING,
val=MpvNodeUnion(string=p))
for p in char_ps ]))
node = MpvNode(
format=MpvFormat.NODE_ARRAY,
val=MpvNodeUnion(list=pointer(node_list)))
return char_ps, node_list, node, cast(pointer(node), c_void_p) | [
"def",
"_make_node_str_list",
"(",
"l",
")",
":",
"char_ps",
"=",
"[",
"c_char_p",
"(",
"_mpv_coax_proptype",
"(",
"e",
",",
"str",
")",
")",
"for",
"e",
"in",
"l",
"]",
"node_list",
"=",
"MpvNodeList",
"(",
"num",
"=",
"len",
"(",
"l",
")",
",",
"keys",
"=",
"None",
",",
"values",
"=",
"(",
"MpvNode",
"*",
"len",
"(",
"l",
")",
")",
"(",
"*",
"[",
"MpvNode",
"(",
"format",
"=",
"MpvFormat",
".",
"STRING",
",",
"val",
"=",
"MpvNodeUnion",
"(",
"string",
"=",
"p",
")",
")",
"for",
"p",
"in",
"char_ps",
"]",
")",
")",
"node",
"=",
"MpvNode",
"(",
"format",
"=",
"MpvFormat",
".",
"NODE_ARRAY",
",",
"val",
"=",
"MpvNodeUnion",
"(",
"list",
"=",
"pointer",
"(",
"node_list",
")",
")",
")",
"return",
"char_ps",
",",
"node_list",
",",
"node",
",",
"cast",
"(",
"pointer",
"(",
"node",
")",
",",
"c_void_p",
")"
] | Take a list of python objects and make a MPV string node array from it.
As an example, the python list ``l = [ "foo", 23, false ]`` will result in the following MPV node object::
struct mpv_node {
.format = MPV_NODE_ARRAY,
.u.list = *(struct mpv_node_array){
.num = len(l),
.keys = NULL,
.values = struct mpv_node[len(l)] {
{ .format = MPV_NODE_STRING, .u.string = l[0] },
{ .format = MPV_NODE_STRING, .u.string = l[1] },
...
}
}
} | [
"Take",
"a",
"list",
"of",
"python",
"objects",
"and",
"make",
"a",
"MPV",
"string",
"node",
"array",
"from",
"it",
"."
] | 7117de4005cc470a45efd9cf2e9657bdf63a9079 | https://github.com/jaseg/python-mpv/blob/7117de4005cc470a45efd9cf2e9657bdf63a9079/mpv.py#L411-L440 |
324 | jaseg/python-mpv | mpv.py | MPV.wait_for_property | def wait_for_property(self, name, cond=lambda val: val, level_sensitive=True):
"""Waits until ``cond`` evaluates to a truthy value on the named property. This can be used to wait for
properties such as ``idle_active`` indicating the player is done with regular playback and just idling around
"""
sema = threading.Semaphore(value=0)
def observer(name, val):
if cond(val):
sema.release()
self.observe_property(name, observer)
if not level_sensitive or not cond(getattr(self, name.replace('-', '_'))):
sema.acquire()
self.unobserve_property(name, observer) | python | def wait_for_property(self, name, cond=lambda val: val, level_sensitive=True):
sema = threading.Semaphore(value=0)
def observer(name, val):
if cond(val):
sema.release()
self.observe_property(name, observer)
if not level_sensitive or not cond(getattr(self, name.replace('-', '_'))):
sema.acquire()
self.unobserve_property(name, observer) | [
"def",
"wait_for_property",
"(",
"self",
",",
"name",
",",
"cond",
"=",
"lambda",
"val",
":",
"val",
",",
"level_sensitive",
"=",
"True",
")",
":",
"sema",
"=",
"threading",
".",
"Semaphore",
"(",
"value",
"=",
"0",
")",
"def",
"observer",
"(",
"name",
",",
"val",
")",
":",
"if",
"cond",
"(",
"val",
")",
":",
"sema",
".",
"release",
"(",
")",
"self",
".",
"observe_property",
"(",
"name",
",",
"observer",
")",
"if",
"not",
"level_sensitive",
"or",
"not",
"cond",
"(",
"getattr",
"(",
"self",
",",
"name",
".",
"replace",
"(",
"'-'",
",",
"'_'",
")",
")",
")",
":",
"sema",
".",
"acquire",
"(",
")",
"self",
".",
"unobserve_property",
"(",
"name",
",",
"observer",
")"
] | Waits until ``cond`` evaluates to a truthy value on the named property. This can be used to wait for
properties such as ``idle_active`` indicating the player is done with regular playback and just idling around | [
"Waits",
"until",
"cond",
"evaluates",
"to",
"a",
"truthy",
"value",
"on",
"the",
"named",
"property",
".",
"This",
"can",
"be",
"used",
"to",
"wait",
"for",
"properties",
"such",
"as",
"idle_active",
"indicating",
"the",
"player",
"is",
"done",
"with",
"regular",
"playback",
"and",
"just",
"idling",
"around"
] | 7117de4005cc470a45efd9cf2e9657bdf63a9079 | https://github.com/jaseg/python-mpv/blob/7117de4005cc470a45efd9cf2e9657bdf63a9079/mpv.py#L582-L593 |
325 | jaseg/python-mpv | mpv.py | MPV.terminate | def terminate(self):
"""Properly terminates this player instance. Preferably use this instead of relying on python's garbage
collector to cause this to be called from the object's destructor.
"""
self.handle, handle = None, self.handle
if threading.current_thread() is self._event_thread:
# Handle special case to allow event handle to be detached.
# This is necessary since otherwise the event thread would deadlock itself.
grim_reaper = threading.Thread(target=lambda: _mpv_terminate_destroy(handle))
grim_reaper.start()
else:
_mpv_terminate_destroy(handle)
if self._event_thread:
self._event_thread.join() | python | def terminate(self):
self.handle, handle = None, self.handle
if threading.current_thread() is self._event_thread:
# Handle special case to allow event handle to be detached.
# This is necessary since otherwise the event thread would deadlock itself.
grim_reaper = threading.Thread(target=lambda: _mpv_terminate_destroy(handle))
grim_reaper.start()
else:
_mpv_terminate_destroy(handle)
if self._event_thread:
self._event_thread.join() | [
"def",
"terminate",
"(",
"self",
")",
":",
"self",
".",
"handle",
",",
"handle",
"=",
"None",
",",
"self",
".",
"handle",
"if",
"threading",
".",
"current_thread",
"(",
")",
"is",
"self",
".",
"_event_thread",
":",
"# Handle special case to allow event handle to be detached.",
"# This is necessary since otherwise the event thread would deadlock itself.",
"grim_reaper",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"lambda",
":",
"_mpv_terminate_destroy",
"(",
"handle",
")",
")",
"grim_reaper",
".",
"start",
"(",
")",
"else",
":",
"_mpv_terminate_destroy",
"(",
"handle",
")",
"if",
"self",
".",
"_event_thread",
":",
"self",
".",
"_event_thread",
".",
"join",
"(",
")"
] | Properly terminates this player instance. Preferably use this instead of relying on python's garbage
collector to cause this to be called from the object's destructor. | [
"Properly",
"terminates",
"this",
"player",
"instance",
".",
"Preferably",
"use",
"this",
"instead",
"of",
"relying",
"on",
"python",
"s",
"garbage",
"collector",
"to",
"cause",
"this",
"to",
"be",
"called",
"from",
"the",
"object",
"s",
"destructor",
"."
] | 7117de4005cc470a45efd9cf2e9657bdf63a9079 | https://github.com/jaseg/python-mpv/blob/7117de4005cc470a45efd9cf2e9657bdf63a9079/mpv.py#L599-L612 |
326 | jaseg/python-mpv | mpv.py | MPV.command | def command(self, name, *args):
"""Execute a raw command."""
args = [name.encode('utf-8')] + [ (arg if type(arg) is bytes else str(arg).encode('utf-8'))
for arg in args if arg is not None ] + [None]
_mpv_command(self.handle, (c_char_p*len(args))(*args)) | python | def command(self, name, *args):
args = [name.encode('utf-8')] + [ (arg if type(arg) is bytes else str(arg).encode('utf-8'))
for arg in args if arg is not None ] + [None]
_mpv_command(self.handle, (c_char_p*len(args))(*args)) | [
"def",
"command",
"(",
"self",
",",
"name",
",",
"*",
"args",
")",
":",
"args",
"=",
"[",
"name",
".",
"encode",
"(",
"'utf-8'",
")",
"]",
"+",
"[",
"(",
"arg",
"if",
"type",
"(",
"arg",
")",
"is",
"bytes",
"else",
"str",
"(",
"arg",
")",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"for",
"arg",
"in",
"args",
"if",
"arg",
"is",
"not",
"None",
"]",
"+",
"[",
"None",
"]",
"_mpv_command",
"(",
"self",
".",
"handle",
",",
"(",
"c_char_p",
"*",
"len",
"(",
"args",
")",
")",
"(",
"*",
"args",
")",
")"
] | Execute a raw command. | [
"Execute",
"a",
"raw",
"command",
"."
] | 7117de4005cc470a45efd9cf2e9657bdf63a9079 | https://github.com/jaseg/python-mpv/blob/7117de4005cc470a45efd9cf2e9657bdf63a9079/mpv.py#L624-L628 |
327 | jaseg/python-mpv | mpv.py | MPV.property_observer | def property_observer(self, name):
"""Function decorator to register a property observer. See ``MPV.observe_property`` for details."""
def wrapper(fun):
self.observe_property(name, fun)
fun.unobserve_mpv_properties = lambda: self.unobserve_property(name, fun)
return fun
return wrapper | python | def property_observer(self, name):
def wrapper(fun):
self.observe_property(name, fun)
fun.unobserve_mpv_properties = lambda: self.unobserve_property(name, fun)
return fun
return wrapper | [
"def",
"property_observer",
"(",
"self",
",",
"name",
")",
":",
"def",
"wrapper",
"(",
"fun",
")",
":",
"self",
".",
"observe_property",
"(",
"name",
",",
"fun",
")",
"fun",
".",
"unobserve_mpv_properties",
"=",
"lambda",
":",
"self",
".",
"unobserve_property",
"(",
"name",
",",
"fun",
")",
"return",
"fun",
"return",
"wrapper"
] | Function decorator to register a property observer. See ``MPV.observe_property`` for details. | [
"Function",
"decorator",
"to",
"register",
"a",
"property",
"observer",
".",
"See",
"MPV",
".",
"observe_property",
"for",
"details",
"."
] | 7117de4005cc470a45efd9cf2e9657bdf63a9079 | https://github.com/jaseg/python-mpv/blob/7117de4005cc470a45efd9cf2e9657bdf63a9079/mpv.py#L808-L814 |
328 | jaseg/python-mpv | mpv.py | MPV.unregister_message_handler | def unregister_message_handler(self, target_or_handler):
"""Unregister a mpv script message handler for the given script message target name.
You can also call the ``unregister_mpv_messages`` function attribute set on the handler function when it is
registered.
"""
if isinstance(target_or_handler, str):
del self._message_handlers[target_or_handler]
else:
for key, val in self._message_handlers.items():
if val == target_or_handler:
del self._message_handlers[key] | python | def unregister_message_handler(self, target_or_handler):
if isinstance(target_or_handler, str):
del self._message_handlers[target_or_handler]
else:
for key, val in self._message_handlers.items():
if val == target_or_handler:
del self._message_handlers[key] | [
"def",
"unregister_message_handler",
"(",
"self",
",",
"target_or_handler",
")",
":",
"if",
"isinstance",
"(",
"target_or_handler",
",",
"str",
")",
":",
"del",
"self",
".",
"_message_handlers",
"[",
"target_or_handler",
"]",
"else",
":",
"for",
"key",
",",
"val",
"in",
"self",
".",
"_message_handlers",
".",
"items",
"(",
")",
":",
"if",
"val",
"==",
"target_or_handler",
":",
"del",
"self",
".",
"_message_handlers",
"[",
"key",
"]"
] | Unregister a mpv script message handler for the given script message target name.
You can also call the ``unregister_mpv_messages`` function attribute set on the handler function when it is
registered. | [
"Unregister",
"a",
"mpv",
"script",
"message",
"handler",
"for",
"the",
"given",
"script",
"message",
"target",
"name",
"."
] | 7117de4005cc470a45efd9cf2e9657bdf63a9079 | https://github.com/jaseg/python-mpv/blob/7117de4005cc470a45efd9cf2e9657bdf63a9079/mpv.py#L850-L861 |
329 | jaseg/python-mpv | mpv.py | MPV.message_handler | def message_handler(self, target):
"""Decorator to register a mpv script message handler.
WARNING: Only one handler can be registered at a time for any given target.
To unregister the message handler, call its ``unregister_mpv_messages`` function::
player = mpv.MPV()
@player.message_handler('foo')
def my_handler(some, args):
print(args)
my_handler.unregister_mpv_messages()
"""
def register(handler):
self._register_message_handler_internal(target, handler)
handler.unregister_mpv_messages = lambda: self.unregister_message_handler(handler)
return handler
return register | python | def message_handler(self, target):
def register(handler):
self._register_message_handler_internal(target, handler)
handler.unregister_mpv_messages = lambda: self.unregister_message_handler(handler)
return handler
return register | [
"def",
"message_handler",
"(",
"self",
",",
"target",
")",
":",
"def",
"register",
"(",
"handler",
")",
":",
"self",
".",
"_register_message_handler_internal",
"(",
"target",
",",
"handler",
")",
"handler",
".",
"unregister_mpv_messages",
"=",
"lambda",
":",
"self",
".",
"unregister_message_handler",
"(",
"handler",
")",
"return",
"handler",
"return",
"register"
] | Decorator to register a mpv script message handler.
WARNING: Only one handler can be registered at a time for any given target.
To unregister the message handler, call its ``unregister_mpv_messages`` function::
player = mpv.MPV()
@player.message_handler('foo')
def my_handler(some, args):
print(args)
my_handler.unregister_mpv_messages() | [
"Decorator",
"to",
"register",
"a",
"mpv",
"script",
"message",
"handler",
"."
] | 7117de4005cc470a45efd9cf2e9657bdf63a9079 | https://github.com/jaseg/python-mpv/blob/7117de4005cc470a45efd9cf2e9657bdf63a9079/mpv.py#L863-L881 |
330 | jaseg/python-mpv | mpv.py | MPV.key_binding | def key_binding(self, keydef, mode='force'):
"""Function decorator to register a low-level key binding.
The callback function signature is ``fun(key_state, key_name)`` where ``key_state`` is either ``'U'`` for "key
up" or ``'D'`` for "key down".
The keydef format is: ``[Shift+][Ctrl+][Alt+][Meta+]<key>`` where ``<key>`` is either the literal character the
key produces (ASCII or Unicode character), or a symbolic name (as printed by ``mpv --input-keylist``).
To unregister the callback function, you can call its ``unregister_mpv_key_bindings`` attribute::
player = mpv.MPV()
@player.key_binding('Q')
def binding(state, name):
print('blep')
binding.unregister_mpv_key_bindings()
WARNING: For a single keydef only a single callback/command can be registered at the same time. If you register
a binding multiple times older bindings will be overwritten and there is a possibility of references leaking. So
don't do that.
BIG FAT WARNING: mpv's key binding mechanism is pretty powerful. This means, you essentially get arbitrary code
exectution through key bindings. This interface makes some limited effort to sanitize the keydef given in the
first parameter, but YOU SHOULD NOT RELY ON THIS IN FOR SECURITY. If your input comes from config files, this is
completely fine--but, if you are about to pass untrusted input into this parameter, better double-check whether
this is secure in your case.
"""
def register(fun):
fun.mpv_key_bindings = getattr(fun, 'mpv_key_bindings', []) + [keydef]
def unregister_all():
for keydef in fun.mpv_key_bindings:
self.unregister_key_binding(keydef)
fun.unregister_mpv_key_bindings = unregister_all
self.register_key_binding(keydef, fun, mode)
return fun
return register | python | def key_binding(self, keydef, mode='force'):
def register(fun):
fun.mpv_key_bindings = getattr(fun, 'mpv_key_bindings', []) + [keydef]
def unregister_all():
for keydef in fun.mpv_key_bindings:
self.unregister_key_binding(keydef)
fun.unregister_mpv_key_bindings = unregister_all
self.register_key_binding(keydef, fun, mode)
return fun
return register | [
"def",
"key_binding",
"(",
"self",
",",
"keydef",
",",
"mode",
"=",
"'force'",
")",
":",
"def",
"register",
"(",
"fun",
")",
":",
"fun",
".",
"mpv_key_bindings",
"=",
"getattr",
"(",
"fun",
",",
"'mpv_key_bindings'",
",",
"[",
"]",
")",
"+",
"[",
"keydef",
"]",
"def",
"unregister_all",
"(",
")",
":",
"for",
"keydef",
"in",
"fun",
".",
"mpv_key_bindings",
":",
"self",
".",
"unregister_key_binding",
"(",
"keydef",
")",
"fun",
".",
"unregister_mpv_key_bindings",
"=",
"unregister_all",
"self",
".",
"register_key_binding",
"(",
"keydef",
",",
"fun",
",",
"mode",
")",
"return",
"fun",
"return",
"register"
] | Function decorator to register a low-level key binding.
The callback function signature is ``fun(key_state, key_name)`` where ``key_state`` is either ``'U'`` for "key
up" or ``'D'`` for "key down".
The keydef format is: ``[Shift+][Ctrl+][Alt+][Meta+]<key>`` where ``<key>`` is either the literal character the
key produces (ASCII or Unicode character), or a symbolic name (as printed by ``mpv --input-keylist``).
To unregister the callback function, you can call its ``unregister_mpv_key_bindings`` attribute::
player = mpv.MPV()
@player.key_binding('Q')
def binding(state, name):
print('blep')
binding.unregister_mpv_key_bindings()
WARNING: For a single keydef only a single callback/command can be registered at the same time. If you register
a binding multiple times older bindings will be overwritten and there is a possibility of references leaking. So
don't do that.
BIG FAT WARNING: mpv's key binding mechanism is pretty powerful. This means, you essentially get arbitrary code
exectution through key bindings. This interface makes some limited effort to sanitize the keydef given in the
first parameter, but YOU SHOULD NOT RELY ON THIS IN FOR SECURITY. If your input comes from config files, this is
completely fine--but, if you are about to pass untrusted input into this parameter, better double-check whether
this is secure in your case. | [
"Function",
"decorator",
"to",
"register",
"a",
"low",
"-",
"level",
"key",
"binding",
"."
] | 7117de4005cc470a45efd9cf2e9657bdf63a9079 | https://github.com/jaseg/python-mpv/blob/7117de4005cc470a45efd9cf2e9657bdf63a9079/mpv.py#L959-L996 |
331 | jaseg/python-mpv | mpv.py | MPV.register_key_binding | def register_key_binding(self, keydef, callback_or_cmd, mode='force'):
"""Register a key binding. This takes an mpv keydef and either a string containing a mpv command or a python
callback function. See ``MPV.key_binding`` for details.
"""
if not re.match(r'(Shift+)?(Ctrl+)?(Alt+)?(Meta+)?(.|\w+)', keydef):
raise ValueError('Invalid keydef. Expected format: [Shift+][Ctrl+][Alt+][Meta+]<key>\n'
'<key> is either the literal character the key produces (ASCII or Unicode character), or a '
'symbolic name (as printed by --input-keylist')
binding_name = MPV._binding_name(keydef)
if callable(callback_or_cmd):
self._key_binding_handlers[binding_name] = callback_or_cmd
self.register_message_handler('key-binding', self._handle_key_binding_message)
self.command('define-section',
binding_name, '{} script-binding py_event_handler/{}'.format(keydef, binding_name), mode)
elif isinstance(callback_or_cmd, str):
self.command('define-section', binding_name, '{} {}'.format(keydef, callback_or_cmd), mode)
else:
raise TypeError('register_key_binding expects either an str with an mpv command or a python callable.')
self.command('enable-section', binding_name, 'allow-hide-cursor+allow-vo-dragging') | python | def register_key_binding(self, keydef, callback_or_cmd, mode='force'):
if not re.match(r'(Shift+)?(Ctrl+)?(Alt+)?(Meta+)?(.|\w+)', keydef):
raise ValueError('Invalid keydef. Expected format: [Shift+][Ctrl+][Alt+][Meta+]<key>\n'
'<key> is either the literal character the key produces (ASCII or Unicode character), or a '
'symbolic name (as printed by --input-keylist')
binding_name = MPV._binding_name(keydef)
if callable(callback_or_cmd):
self._key_binding_handlers[binding_name] = callback_or_cmd
self.register_message_handler('key-binding', self._handle_key_binding_message)
self.command('define-section',
binding_name, '{} script-binding py_event_handler/{}'.format(keydef, binding_name), mode)
elif isinstance(callback_or_cmd, str):
self.command('define-section', binding_name, '{} {}'.format(keydef, callback_or_cmd), mode)
else:
raise TypeError('register_key_binding expects either an str with an mpv command or a python callable.')
self.command('enable-section', binding_name, 'allow-hide-cursor+allow-vo-dragging') | [
"def",
"register_key_binding",
"(",
"self",
",",
"keydef",
",",
"callback_or_cmd",
",",
"mode",
"=",
"'force'",
")",
":",
"if",
"not",
"re",
".",
"match",
"(",
"r'(Shift+)?(Ctrl+)?(Alt+)?(Meta+)?(.|\\w+)'",
",",
"keydef",
")",
":",
"raise",
"ValueError",
"(",
"'Invalid keydef. Expected format: [Shift+][Ctrl+][Alt+][Meta+]<key>\\n'",
"'<key> is either the literal character the key produces (ASCII or Unicode character), or a '",
"'symbolic name (as printed by --input-keylist'",
")",
"binding_name",
"=",
"MPV",
".",
"_binding_name",
"(",
"keydef",
")",
"if",
"callable",
"(",
"callback_or_cmd",
")",
":",
"self",
".",
"_key_binding_handlers",
"[",
"binding_name",
"]",
"=",
"callback_or_cmd",
"self",
".",
"register_message_handler",
"(",
"'key-binding'",
",",
"self",
".",
"_handle_key_binding_message",
")",
"self",
".",
"command",
"(",
"'define-section'",
",",
"binding_name",
",",
"'{} script-binding py_event_handler/{}'",
".",
"format",
"(",
"keydef",
",",
"binding_name",
")",
",",
"mode",
")",
"elif",
"isinstance",
"(",
"callback_or_cmd",
",",
"str",
")",
":",
"self",
".",
"command",
"(",
"'define-section'",
",",
"binding_name",
",",
"'{} {}'",
".",
"format",
"(",
"keydef",
",",
"callback_or_cmd",
")",
",",
"mode",
")",
"else",
":",
"raise",
"TypeError",
"(",
"'register_key_binding expects either an str with an mpv command or a python callable.'",
")",
"self",
".",
"command",
"(",
"'enable-section'",
",",
"binding_name",
",",
"'allow-hide-cursor+allow-vo-dragging'",
")"
] | Register a key binding. This takes an mpv keydef and either a string containing a mpv command or a python
callback function. See ``MPV.key_binding`` for details. | [
"Register",
"a",
"key",
"binding",
".",
"This",
"takes",
"an",
"mpv",
"keydef",
"and",
"either",
"a",
"string",
"containing",
"a",
"mpv",
"command",
"or",
"a",
"python",
"callback",
"function",
".",
"See",
"MPV",
".",
"key_binding",
"for",
"details",
"."
] | 7117de4005cc470a45efd9cf2e9657bdf63a9079 | https://github.com/jaseg/python-mpv/blob/7117de4005cc470a45efd9cf2e9657bdf63a9079/mpv.py#L998-L1016 |
332 | jaseg/python-mpv | mpv.py | MPV.unregister_key_binding | def unregister_key_binding(self, keydef):
"""Unregister a key binding by keydef."""
binding_name = MPV._binding_name(keydef)
self.command('disable-section', binding_name)
self.command('define-section', binding_name, '')
if binding_name in self._key_binding_handlers:
del self._key_binding_handlers[binding_name]
if not self._key_binding_handlers:
self.unregister_message_handler('key-binding') | python | def unregister_key_binding(self, keydef):
binding_name = MPV._binding_name(keydef)
self.command('disable-section', binding_name)
self.command('define-section', binding_name, '')
if binding_name in self._key_binding_handlers:
del self._key_binding_handlers[binding_name]
if not self._key_binding_handlers:
self.unregister_message_handler('key-binding') | [
"def",
"unregister_key_binding",
"(",
"self",
",",
"keydef",
")",
":",
"binding_name",
"=",
"MPV",
".",
"_binding_name",
"(",
"keydef",
")",
"self",
".",
"command",
"(",
"'disable-section'",
",",
"binding_name",
")",
"self",
".",
"command",
"(",
"'define-section'",
",",
"binding_name",
",",
"''",
")",
"if",
"binding_name",
"in",
"self",
".",
"_key_binding_handlers",
":",
"del",
"self",
".",
"_key_binding_handlers",
"[",
"binding_name",
"]",
"if",
"not",
"self",
".",
"_key_binding_handlers",
":",
"self",
".",
"unregister_message_handler",
"(",
"'key-binding'",
")"
] | Unregister a key binding by keydef. | [
"Unregister",
"a",
"key",
"binding",
"by",
"keydef",
"."
] | 7117de4005cc470a45efd9cf2e9657bdf63a9079 | https://github.com/jaseg/python-mpv/blob/7117de4005cc470a45efd9cf2e9657bdf63a9079/mpv.py#L1021-L1029 |
333 | lwcolton/falcon-cors | src/falcon_cors/__init__.py | CORS._process_origin | def _process_origin(self, req, resp, origin):
"""Inspects the request and adds the Access-Control-Allow-Origin
header if the requested origin is allowed.
Returns:
``True`` if the header was added and the requested origin
is allowed, ``False`` if the origin is not allowed and the
header has not been added.
"""
if self._cors_config['allow_all_origins']:
if self.supports_credentials:
self._set_allow_origin(resp, origin)
else:
self._set_allow_origin(resp, '*')
return True
if origin in self._cors_config['allow_origins_list']:
self._set_allow_origin(resp, origin)
return True
regex = self._cors_config['allow_origins_regex']
if regex is not None:
if regex.match(origin):
self._set_allow_origin(resp, origin)
return True
return False | python | def _process_origin(self, req, resp, origin):
if self._cors_config['allow_all_origins']:
if self.supports_credentials:
self._set_allow_origin(resp, origin)
else:
self._set_allow_origin(resp, '*')
return True
if origin in self._cors_config['allow_origins_list']:
self._set_allow_origin(resp, origin)
return True
regex = self._cors_config['allow_origins_regex']
if regex is not None:
if regex.match(origin):
self._set_allow_origin(resp, origin)
return True
return False | [
"def",
"_process_origin",
"(",
"self",
",",
"req",
",",
"resp",
",",
"origin",
")",
":",
"if",
"self",
".",
"_cors_config",
"[",
"'allow_all_origins'",
"]",
":",
"if",
"self",
".",
"supports_credentials",
":",
"self",
".",
"_set_allow_origin",
"(",
"resp",
",",
"origin",
")",
"else",
":",
"self",
".",
"_set_allow_origin",
"(",
"resp",
",",
"'*'",
")",
"return",
"True",
"if",
"origin",
"in",
"self",
".",
"_cors_config",
"[",
"'allow_origins_list'",
"]",
":",
"self",
".",
"_set_allow_origin",
"(",
"resp",
",",
"origin",
")",
"return",
"True",
"regex",
"=",
"self",
".",
"_cors_config",
"[",
"'allow_origins_regex'",
"]",
"if",
"regex",
"is",
"not",
"None",
":",
"if",
"regex",
".",
"match",
"(",
"origin",
")",
":",
"self",
".",
"_set_allow_origin",
"(",
"resp",
",",
"origin",
")",
"return",
"True",
"return",
"False"
] | Inspects the request and adds the Access-Control-Allow-Origin
header if the requested origin is allowed.
Returns:
``True`` if the header was added and the requested origin
is allowed, ``False`` if the origin is not allowed and the
header has not been added. | [
"Inspects",
"the",
"request",
"and",
"adds",
"the",
"Access",
"-",
"Control",
"-",
"Allow",
"-",
"Origin",
"header",
"if",
"the",
"requested",
"origin",
"is",
"allowed",
"."
] | 9e1243829078e4c6f8fb8bb895b5cad62bce9d6b | https://github.com/lwcolton/falcon-cors/blob/9e1243829078e4c6f8fb8bb895b5cad62bce9d6b/src/falcon_cors/__init__.py#L303-L329 |
334 | lwcolton/falcon-cors | src/falcon_cors/__init__.py | CORS._process_allow_headers | def _process_allow_headers(self, req, resp, requested_headers):
"""Adds the Access-Control-Allow-Headers header to the response,
using the cors settings to determine which headers are allowed.
Returns:
True if all the headers the client requested are allowed.
False if some or none of the headers the client requested are allowed.
"""
if not requested_headers:
return True
elif self._cors_config['allow_all_headers']:
self._set_allowed_headers(resp, requested_headers)
return True
approved_headers = []
for header in requested_headers:
if header.lower() in self._cors_config['allow_headers_list']:
approved_headers.append(header)
elif self._cors_config.get('allow_headers_regex'):
if self._cors_config['allow_headers_regex'].match(header):
approved_headers.append(header)
if len(approved_headers) == len(requested_headers):
self._set_allowed_headers(resp, approved_headers)
return True
return False | python | def _process_allow_headers(self, req, resp, requested_headers):
if not requested_headers:
return True
elif self._cors_config['allow_all_headers']:
self._set_allowed_headers(resp, requested_headers)
return True
approved_headers = []
for header in requested_headers:
if header.lower() in self._cors_config['allow_headers_list']:
approved_headers.append(header)
elif self._cors_config.get('allow_headers_regex'):
if self._cors_config['allow_headers_regex'].match(header):
approved_headers.append(header)
if len(approved_headers) == len(requested_headers):
self._set_allowed_headers(resp, approved_headers)
return True
return False | [
"def",
"_process_allow_headers",
"(",
"self",
",",
"req",
",",
"resp",
",",
"requested_headers",
")",
":",
"if",
"not",
"requested_headers",
":",
"return",
"True",
"elif",
"self",
".",
"_cors_config",
"[",
"'allow_all_headers'",
"]",
":",
"self",
".",
"_set_allowed_headers",
"(",
"resp",
",",
"requested_headers",
")",
"return",
"True",
"approved_headers",
"=",
"[",
"]",
"for",
"header",
"in",
"requested_headers",
":",
"if",
"header",
".",
"lower",
"(",
")",
"in",
"self",
".",
"_cors_config",
"[",
"'allow_headers_list'",
"]",
":",
"approved_headers",
".",
"append",
"(",
"header",
")",
"elif",
"self",
".",
"_cors_config",
".",
"get",
"(",
"'allow_headers_regex'",
")",
":",
"if",
"self",
".",
"_cors_config",
"[",
"'allow_headers_regex'",
"]",
".",
"match",
"(",
"header",
")",
":",
"approved_headers",
".",
"append",
"(",
"header",
")",
"if",
"len",
"(",
"approved_headers",
")",
"==",
"len",
"(",
"requested_headers",
")",
":",
"self",
".",
"_set_allowed_headers",
"(",
"resp",
",",
"approved_headers",
")",
"return",
"True",
"return",
"False"
] | Adds the Access-Control-Allow-Headers header to the response,
using the cors settings to determine which headers are allowed.
Returns:
True if all the headers the client requested are allowed.
False if some or none of the headers the client requested are allowed. | [
"Adds",
"the",
"Access",
"-",
"Control",
"-",
"Allow",
"-",
"Headers",
"header",
"to",
"the",
"response",
"using",
"the",
"cors",
"settings",
"to",
"determine",
"which",
"headers",
"are",
"allowed",
"."
] | 9e1243829078e4c6f8fb8bb895b5cad62bce9d6b | https://github.com/lwcolton/falcon-cors/blob/9e1243829078e4c6f8fb8bb895b5cad62bce9d6b/src/falcon_cors/__init__.py#L331-L357 |
335 | lwcolton/falcon-cors | src/falcon_cors/__init__.py | CORS._process_methods | def _process_methods(self, req, resp, resource):
"""Adds the Access-Control-Allow-Methods header to the response,
using the cors settings to determine which methods are allowed.
"""
requested_method = self._get_requested_method(req)
if not requested_method:
return False
if self._cors_config['allow_all_methods']:
allowed_methods = self._get_resource_methods(resource)
self._set_allowed_methods(resp, allowed_methods)
if requested_method in allowed_methods:
return True
elif requested_method in self._cors_config['allow_methods_list']:
resource_methods = self._get_resource_methods(resource)
# Only list methods as allowed if they exist
# on the resource AND are in the allowed_methods_list
allowed_methods = [
method for method in resource_methods
if method in self._cors_config['allow_methods_list']
]
self._set_allowed_methods(resp, allowed_methods)
if requested_method in allowed_methods:
return True
return False | python | def _process_methods(self, req, resp, resource):
requested_method = self._get_requested_method(req)
if not requested_method:
return False
if self._cors_config['allow_all_methods']:
allowed_methods = self._get_resource_methods(resource)
self._set_allowed_methods(resp, allowed_methods)
if requested_method in allowed_methods:
return True
elif requested_method in self._cors_config['allow_methods_list']:
resource_methods = self._get_resource_methods(resource)
# Only list methods as allowed if they exist
# on the resource AND are in the allowed_methods_list
allowed_methods = [
method for method in resource_methods
if method in self._cors_config['allow_methods_list']
]
self._set_allowed_methods(resp, allowed_methods)
if requested_method in allowed_methods:
return True
return False | [
"def",
"_process_methods",
"(",
"self",
",",
"req",
",",
"resp",
",",
"resource",
")",
":",
"requested_method",
"=",
"self",
".",
"_get_requested_method",
"(",
"req",
")",
"if",
"not",
"requested_method",
":",
"return",
"False",
"if",
"self",
".",
"_cors_config",
"[",
"'allow_all_methods'",
"]",
":",
"allowed_methods",
"=",
"self",
".",
"_get_resource_methods",
"(",
"resource",
")",
"self",
".",
"_set_allowed_methods",
"(",
"resp",
",",
"allowed_methods",
")",
"if",
"requested_method",
"in",
"allowed_methods",
":",
"return",
"True",
"elif",
"requested_method",
"in",
"self",
".",
"_cors_config",
"[",
"'allow_methods_list'",
"]",
":",
"resource_methods",
"=",
"self",
".",
"_get_resource_methods",
"(",
"resource",
")",
"# Only list methods as allowed if they exist",
"# on the resource AND are in the allowed_methods_list",
"allowed_methods",
"=",
"[",
"method",
"for",
"method",
"in",
"resource_methods",
"if",
"method",
"in",
"self",
".",
"_cors_config",
"[",
"'allow_methods_list'",
"]",
"]",
"self",
".",
"_set_allowed_methods",
"(",
"resp",
",",
"allowed_methods",
")",
"if",
"requested_method",
"in",
"allowed_methods",
":",
"return",
"True",
"return",
"False"
] | Adds the Access-Control-Allow-Methods header to the response,
using the cors settings to determine which methods are allowed. | [
"Adds",
"the",
"Access",
"-",
"Control",
"-",
"Allow",
"-",
"Methods",
"header",
"to",
"the",
"response",
"using",
"the",
"cors",
"settings",
"to",
"determine",
"which",
"methods",
"are",
"allowed",
"."
] | 9e1243829078e4c6f8fb8bb895b5cad62bce9d6b | https://github.com/lwcolton/falcon-cors/blob/9e1243829078e4c6f8fb8bb895b5cad62bce9d6b/src/falcon_cors/__init__.py#L359-L384 |
336 | lwcolton/falcon-cors | src/falcon_cors/__init__.py | CORS._process_credentials | def _process_credentials(self, req, resp, origin):
"""Adds the Access-Control-Allow-Credentials to the response
if the cors settings indicates it should be set.
"""
if self._cors_config['allow_credentials_all_origins']:
self._set_allow_credentials(resp)
return True
if origin in self._cors_config['allow_credentials_origins_list']:
self._set_allow_credentials(resp)
return True
credentials_regex = self._cors_config['allow_credentials_origins_regex']
if credentials_regex:
if credentials_regex.match(origin):
self._set_allow_credentials(resp)
return True
return False | python | def _process_credentials(self, req, resp, origin):
if self._cors_config['allow_credentials_all_origins']:
self._set_allow_credentials(resp)
return True
if origin in self._cors_config['allow_credentials_origins_list']:
self._set_allow_credentials(resp)
return True
credentials_regex = self._cors_config['allow_credentials_origins_regex']
if credentials_regex:
if credentials_regex.match(origin):
self._set_allow_credentials(resp)
return True
return False | [
"def",
"_process_credentials",
"(",
"self",
",",
"req",
",",
"resp",
",",
"origin",
")",
":",
"if",
"self",
".",
"_cors_config",
"[",
"'allow_credentials_all_origins'",
"]",
":",
"self",
".",
"_set_allow_credentials",
"(",
"resp",
")",
"return",
"True",
"if",
"origin",
"in",
"self",
".",
"_cors_config",
"[",
"'allow_credentials_origins_list'",
"]",
":",
"self",
".",
"_set_allow_credentials",
"(",
"resp",
")",
"return",
"True",
"credentials_regex",
"=",
"self",
".",
"_cors_config",
"[",
"'allow_credentials_origins_regex'",
"]",
"if",
"credentials_regex",
":",
"if",
"credentials_regex",
".",
"match",
"(",
"origin",
")",
":",
"self",
".",
"_set_allow_credentials",
"(",
"resp",
")",
"return",
"True",
"return",
"False"
] | Adds the Access-Control-Allow-Credentials to the response
if the cors settings indicates it should be set. | [
"Adds",
"the",
"Access",
"-",
"Control",
"-",
"Allow",
"-",
"Credentials",
"to",
"the",
"response",
"if",
"the",
"cors",
"settings",
"indicates",
"it",
"should",
"be",
"set",
"."
] | 9e1243829078e4c6f8fb8bb895b5cad62bce9d6b | https://github.com/lwcolton/falcon-cors/blob/9e1243829078e4c6f8fb8bb895b5cad62bce9d6b/src/falcon_cors/__init__.py#L396-L414 |
337 | azavea/django-amazon-ses | django_amazon_ses.py | EmailBackend._send | def _send(self, email_message):
"""Sends an individual message via the Amazon SES HTTP API.
Args:
email_message: A single Django EmailMessage object.
Returns:
True if the EmailMessage was sent successfully, otherwise False.
Raises:
ClientError: An interaction with the Amazon SES HTTP API
failed.
"""
pre_send.send(self.__class__, message=email_message)
if not email_message.recipients():
return False
from_email = sanitize_address(email_message.from_email,
email_message.encoding)
recipients = [sanitize_address(addr, email_message.encoding)
for addr in email_message.recipients()]
message = email_message.message().as_bytes(linesep='\r\n')
try:
result = self.conn.send_raw_email(
Source=from_email,
Destinations=recipients,
RawMessage={
'Data': message
}
)
message_id = result['MessageId']
post_send.send(
self.__class__,
message=email_message,
message_id=message_id
)
except ClientError:
if not self.fail_silently:
raise
return False
return True | python | def _send(self, email_message):
pre_send.send(self.__class__, message=email_message)
if not email_message.recipients():
return False
from_email = sanitize_address(email_message.from_email,
email_message.encoding)
recipients = [sanitize_address(addr, email_message.encoding)
for addr in email_message.recipients()]
message = email_message.message().as_bytes(linesep='\r\n')
try:
result = self.conn.send_raw_email(
Source=from_email,
Destinations=recipients,
RawMessage={
'Data': message
}
)
message_id = result['MessageId']
post_send.send(
self.__class__,
message=email_message,
message_id=message_id
)
except ClientError:
if not self.fail_silently:
raise
return False
return True | [
"def",
"_send",
"(",
"self",
",",
"email_message",
")",
":",
"pre_send",
".",
"send",
"(",
"self",
".",
"__class__",
",",
"message",
"=",
"email_message",
")",
"if",
"not",
"email_message",
".",
"recipients",
"(",
")",
":",
"return",
"False",
"from_email",
"=",
"sanitize_address",
"(",
"email_message",
".",
"from_email",
",",
"email_message",
".",
"encoding",
")",
"recipients",
"=",
"[",
"sanitize_address",
"(",
"addr",
",",
"email_message",
".",
"encoding",
")",
"for",
"addr",
"in",
"email_message",
".",
"recipients",
"(",
")",
"]",
"message",
"=",
"email_message",
".",
"message",
"(",
")",
".",
"as_bytes",
"(",
"linesep",
"=",
"'\\r\\n'",
")",
"try",
":",
"result",
"=",
"self",
".",
"conn",
".",
"send_raw_email",
"(",
"Source",
"=",
"from_email",
",",
"Destinations",
"=",
"recipients",
",",
"RawMessage",
"=",
"{",
"'Data'",
":",
"message",
"}",
")",
"message_id",
"=",
"result",
"[",
"'MessageId'",
"]",
"post_send",
".",
"send",
"(",
"self",
".",
"__class__",
",",
"message",
"=",
"email_message",
",",
"message_id",
"=",
"message_id",
")",
"except",
"ClientError",
":",
"if",
"not",
"self",
".",
"fail_silently",
":",
"raise",
"return",
"False",
"return",
"True"
] | Sends an individual message via the Amazon SES HTTP API.
Args:
email_message: A single Django EmailMessage object.
Returns:
True if the EmailMessage was sent successfully, otherwise False.
Raises:
ClientError: An interaction with the Amazon SES HTTP API
failed. | [
"Sends",
"an",
"individual",
"message",
"via",
"the",
"Amazon",
"SES",
"HTTP",
"API",
"."
] | 668c2e240ee643d02294d28966a9d44cf30dfc7f | https://github.com/azavea/django-amazon-ses/blob/668c2e240ee643d02294d28966a9d44cf30dfc7f/django_amazon_ses.py#L80-L120 |
338 | datadesk/django-bakery | bakery/static_views.py | was_modified_since | def was_modified_since(header=None, mtime=0, size=0):
"""
Was something modified since the user last downloaded it?
header
This is the value of the If-Modified-Since header. If this is None,
I'll just return True.
mtime
This is the modification time of the item we're talking about.
size
This is the size of the item we're talking about.
"""
try:
if header is None:
raise ValueError
matches = re.match(r"^([^;]+)(; length=([0-9]+))?$", header,
re.IGNORECASE)
header_mtime = parse_http_date(matches.group(1))
header_len = matches.group(3)
if header_len and int(header_len) != size:
raise ValueError
if int(mtime) > header_mtime:
raise ValueError
except (AttributeError, ValueError, OverflowError):
return True
return False | python | def was_modified_since(header=None, mtime=0, size=0):
try:
if header is None:
raise ValueError
matches = re.match(r"^([^;]+)(; length=([0-9]+))?$", header,
re.IGNORECASE)
header_mtime = parse_http_date(matches.group(1))
header_len = matches.group(3)
if header_len and int(header_len) != size:
raise ValueError
if int(mtime) > header_mtime:
raise ValueError
except (AttributeError, ValueError, OverflowError):
return True
return False | [
"def",
"was_modified_since",
"(",
"header",
"=",
"None",
",",
"mtime",
"=",
"0",
",",
"size",
"=",
"0",
")",
":",
"try",
":",
"if",
"header",
"is",
"None",
":",
"raise",
"ValueError",
"matches",
"=",
"re",
".",
"match",
"(",
"r\"^([^;]+)(; length=([0-9]+))?$\"",
",",
"header",
",",
"re",
".",
"IGNORECASE",
")",
"header_mtime",
"=",
"parse_http_date",
"(",
"matches",
".",
"group",
"(",
"1",
")",
")",
"header_len",
"=",
"matches",
".",
"group",
"(",
"3",
")",
"if",
"header_len",
"and",
"int",
"(",
"header_len",
")",
"!=",
"size",
":",
"raise",
"ValueError",
"if",
"int",
"(",
"mtime",
")",
">",
"header_mtime",
":",
"raise",
"ValueError",
"except",
"(",
"AttributeError",
",",
"ValueError",
",",
"OverflowError",
")",
":",
"return",
"True",
"return",
"False"
] | Was something modified since the user last downloaded it?
header
This is the value of the If-Modified-Since header. If this is None,
I'll just return True.
mtime
This is the modification time of the item we're talking about.
size
This is the size of the item we're talking about. | [
"Was",
"something",
"modified",
"since",
"the",
"user",
"last",
"downloaded",
"it?",
"header",
"This",
"is",
"the",
"value",
"of",
"the",
"If",
"-",
"Modified",
"-",
"Since",
"header",
".",
"If",
"this",
"is",
"None",
"I",
"ll",
"just",
"return",
"True",
".",
"mtime",
"This",
"is",
"the",
"modification",
"time",
"of",
"the",
"item",
"we",
"re",
"talking",
"about",
".",
"size",
"This",
"is",
"the",
"size",
"of",
"the",
"item",
"we",
"re",
"talking",
"about",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/static_views.py#L135-L159 |
339 | datadesk/django-bakery | bakery/tasks.py | unpublish_object | def unpublish_object(content_type_pk, obj_pk):
"""
Unbuild all views related to a object and then sync to S3.
Accepts primary keys to retrieve a model object that
inherits bakery's BuildableModel class.
"""
ct = ContentType.objects.get_for_id(content_type_pk)
obj = ct.get_object_for_this_type(pk=obj_pk)
try:
# Unbuild the object
logger.info("unpublish_object task has received %s" % obj)
obj.unbuild()
# Run the `publish` management command unless the
# ALLOW_BAKERY_AUTO_PUBLISHING variable is explictly set to False.
if not getattr(settings, 'ALLOW_BAKERY_AUTO_PUBLISHING', True):
logger.info("Not running publish command because \
ALLOW_BAKERY_AUTO_PUBLISHING is False")
else:
management.call_command("publish")
except Exception:
# Log the error if this crashes
logger.error("Task Error: unpublish_object", exc_info=True) | python | def unpublish_object(content_type_pk, obj_pk):
ct = ContentType.objects.get_for_id(content_type_pk)
obj = ct.get_object_for_this_type(pk=obj_pk)
try:
# Unbuild the object
logger.info("unpublish_object task has received %s" % obj)
obj.unbuild()
# Run the `publish` management command unless the
# ALLOW_BAKERY_AUTO_PUBLISHING variable is explictly set to False.
if not getattr(settings, 'ALLOW_BAKERY_AUTO_PUBLISHING', True):
logger.info("Not running publish command because \
ALLOW_BAKERY_AUTO_PUBLISHING is False")
else:
management.call_command("publish")
except Exception:
# Log the error if this crashes
logger.error("Task Error: unpublish_object", exc_info=True) | [
"def",
"unpublish_object",
"(",
"content_type_pk",
",",
"obj_pk",
")",
":",
"ct",
"=",
"ContentType",
".",
"objects",
".",
"get_for_id",
"(",
"content_type_pk",
")",
"obj",
"=",
"ct",
".",
"get_object_for_this_type",
"(",
"pk",
"=",
"obj_pk",
")",
"try",
":",
"# Unbuild the object",
"logger",
".",
"info",
"(",
"\"unpublish_object task has received %s\"",
"%",
"obj",
")",
"obj",
".",
"unbuild",
"(",
")",
"# Run the `publish` management command unless the",
"# ALLOW_BAKERY_AUTO_PUBLISHING variable is explictly set to False.",
"if",
"not",
"getattr",
"(",
"settings",
",",
"'ALLOW_BAKERY_AUTO_PUBLISHING'",
",",
"True",
")",
":",
"logger",
".",
"info",
"(",
"\"Not running publish command because \\\nALLOW_BAKERY_AUTO_PUBLISHING is False\"",
")",
"else",
":",
"management",
".",
"call_command",
"(",
"\"publish\"",
")",
"except",
"Exception",
":",
"# Log the error if this crashes",
"logger",
".",
"error",
"(",
"\"Task Error: unpublish_object\"",
",",
"exc_info",
"=",
"True",
")"
] | Unbuild all views related to a object and then sync to S3.
Accepts primary keys to retrieve a model object that
inherits bakery's BuildableModel class. | [
"Unbuild",
"all",
"views",
"related",
"to",
"a",
"object",
"and",
"then",
"sync",
"to",
"S3",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/tasks.py#L39-L61 |
340 | datadesk/django-bakery | bakery/views/base.py | BuildableMixin.prep_directory | def prep_directory(self, target_dir):
"""
Prepares a new directory to store the file at the provided path, if needed.
"""
dirname = path.dirname(target_dir)
if dirname:
dirname = path.join(settings.BUILD_DIR, dirname)
if not self.fs.exists(dirname):
logger.debug("Creating directory at {}{}".format(self.fs_name, dirname))
self.fs.makedirs(dirname) | python | def prep_directory(self, target_dir):
dirname = path.dirname(target_dir)
if dirname:
dirname = path.join(settings.BUILD_DIR, dirname)
if not self.fs.exists(dirname):
logger.debug("Creating directory at {}{}".format(self.fs_name, dirname))
self.fs.makedirs(dirname) | [
"def",
"prep_directory",
"(",
"self",
",",
"target_dir",
")",
":",
"dirname",
"=",
"path",
".",
"dirname",
"(",
"target_dir",
")",
"if",
"dirname",
":",
"dirname",
"=",
"path",
".",
"join",
"(",
"settings",
".",
"BUILD_DIR",
",",
"dirname",
")",
"if",
"not",
"self",
".",
"fs",
".",
"exists",
"(",
"dirname",
")",
":",
"logger",
".",
"debug",
"(",
"\"Creating directory at {}{}\"",
".",
"format",
"(",
"self",
".",
"fs_name",
",",
"dirname",
")",
")",
"self",
".",
"fs",
".",
"makedirs",
"(",
"dirname",
")"
] | Prepares a new directory to store the file at the provided path, if needed. | [
"Prepares",
"a",
"new",
"directory",
"to",
"store",
"the",
"file",
"at",
"the",
"provided",
"path",
"if",
"needed",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/base.py#L55-L64 |
341 | datadesk/django-bakery | bakery/views/base.py | BuildableMixin.write_file | def write_file(self, target_path, html):
"""
Writes out the provided HTML to the provided path.
"""
logger.debug("Building to {}{}".format(self.fs_name, target_path))
with self.fs.open(smart_text(target_path), 'wb') as outfile:
outfile.write(six.binary_type(html))
outfile.close() | python | def write_file(self, target_path, html):
logger.debug("Building to {}{}".format(self.fs_name, target_path))
with self.fs.open(smart_text(target_path), 'wb') as outfile:
outfile.write(six.binary_type(html))
outfile.close() | [
"def",
"write_file",
"(",
"self",
",",
"target_path",
",",
"html",
")",
":",
"logger",
".",
"debug",
"(",
"\"Building to {}{}\"",
".",
"format",
"(",
"self",
".",
"fs_name",
",",
"target_path",
")",
")",
"with",
"self",
".",
"fs",
".",
"open",
"(",
"smart_text",
"(",
"target_path",
")",
",",
"'wb'",
")",
"as",
"outfile",
":",
"outfile",
".",
"write",
"(",
"six",
".",
"binary_type",
"(",
"html",
")",
")",
"outfile",
".",
"close",
"(",
")"
] | Writes out the provided HTML to the provided path. | [
"Writes",
"out",
"the",
"provided",
"HTML",
"to",
"the",
"provided",
"path",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/base.py#L72-L79 |
342 | datadesk/django-bakery | bakery/views/base.py | BuildableMixin.is_gzippable | def is_gzippable(self, path):
"""
Returns a boolean indicating if the provided file path is a candidate
for gzipping.
"""
# First check if gzipping is allowed by the global setting
if not getattr(settings, 'BAKERY_GZIP', False):
return False
# Then check if the content type of this particular file is gzippable
whitelist = getattr(
settings,
'GZIP_CONTENT_TYPES',
DEFAULT_GZIP_CONTENT_TYPES
)
return mimetypes.guess_type(path)[0] in whitelist | python | def is_gzippable(self, path):
# First check if gzipping is allowed by the global setting
if not getattr(settings, 'BAKERY_GZIP', False):
return False
# Then check if the content type of this particular file is gzippable
whitelist = getattr(
settings,
'GZIP_CONTENT_TYPES',
DEFAULT_GZIP_CONTENT_TYPES
)
return mimetypes.guess_type(path)[0] in whitelist | [
"def",
"is_gzippable",
"(",
"self",
",",
"path",
")",
":",
"# First check if gzipping is allowed by the global setting",
"if",
"not",
"getattr",
"(",
"settings",
",",
"'BAKERY_GZIP'",
",",
"False",
")",
":",
"return",
"False",
"# Then check if the content type of this particular file is gzippable",
"whitelist",
"=",
"getattr",
"(",
"settings",
",",
"'GZIP_CONTENT_TYPES'",
",",
"DEFAULT_GZIP_CONTENT_TYPES",
")",
"return",
"mimetypes",
".",
"guess_type",
"(",
"path",
")",
"[",
"0",
"]",
"in",
"whitelist"
] | Returns a boolean indicating if the provided file path is a candidate
for gzipping. | [
"Returns",
"a",
"boolean",
"indicating",
"if",
"the",
"provided",
"file",
"path",
"is",
"a",
"candidate",
"for",
"gzipping",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/base.py#L81-L95 |
343 | datadesk/django-bakery | bakery/views/base.py | BuildableMixin.gzip_file | def gzip_file(self, target_path, html):
"""
Zips up the provided HTML as a companion for the provided path.
Intended to take advantage of the peculiarities of
Amazon S3's GZIP service.
mtime, an option that writes a timestamp to the output file
is set to 0, to avoid having s3cmd do unnecessary uploads because
of differences in the timestamp
"""
logger.debug("Gzipping to {}{}".format(self.fs_name, target_path))
# Write GZIP data to an in-memory buffer
data_buffer = six.BytesIO()
kwargs = dict(
filename=path.basename(target_path),
mode='wb',
fileobj=data_buffer
)
if float(sys.version[:3]) >= 2.7:
kwargs['mtime'] = 0
with gzip.GzipFile(**kwargs) as f:
f.write(six.binary_type(html))
# Write that buffer out to the filesystem
with self.fs.open(smart_text(target_path), 'wb') as outfile:
outfile.write(data_buffer.getvalue())
outfile.close() | python | def gzip_file(self, target_path, html):
logger.debug("Gzipping to {}{}".format(self.fs_name, target_path))
# Write GZIP data to an in-memory buffer
data_buffer = six.BytesIO()
kwargs = dict(
filename=path.basename(target_path),
mode='wb',
fileobj=data_buffer
)
if float(sys.version[:3]) >= 2.7:
kwargs['mtime'] = 0
with gzip.GzipFile(**kwargs) as f:
f.write(six.binary_type(html))
# Write that buffer out to the filesystem
with self.fs.open(smart_text(target_path), 'wb') as outfile:
outfile.write(data_buffer.getvalue())
outfile.close() | [
"def",
"gzip_file",
"(",
"self",
",",
"target_path",
",",
"html",
")",
":",
"logger",
".",
"debug",
"(",
"\"Gzipping to {}{}\"",
".",
"format",
"(",
"self",
".",
"fs_name",
",",
"target_path",
")",
")",
"# Write GZIP data to an in-memory buffer",
"data_buffer",
"=",
"six",
".",
"BytesIO",
"(",
")",
"kwargs",
"=",
"dict",
"(",
"filename",
"=",
"path",
".",
"basename",
"(",
"target_path",
")",
",",
"mode",
"=",
"'wb'",
",",
"fileobj",
"=",
"data_buffer",
")",
"if",
"float",
"(",
"sys",
".",
"version",
"[",
":",
"3",
"]",
")",
">=",
"2.7",
":",
"kwargs",
"[",
"'mtime'",
"]",
"=",
"0",
"with",
"gzip",
".",
"GzipFile",
"(",
"*",
"*",
"kwargs",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"six",
".",
"binary_type",
"(",
"html",
")",
")",
"# Write that buffer out to the filesystem",
"with",
"self",
".",
"fs",
".",
"open",
"(",
"smart_text",
"(",
"target_path",
")",
",",
"'wb'",
")",
"as",
"outfile",
":",
"outfile",
".",
"write",
"(",
"data_buffer",
".",
"getvalue",
"(",
")",
")",
"outfile",
".",
"close",
"(",
")"
] | Zips up the provided HTML as a companion for the provided path.
Intended to take advantage of the peculiarities of
Amazon S3's GZIP service.
mtime, an option that writes a timestamp to the output file
is set to 0, to avoid having s3cmd do unnecessary uploads because
of differences in the timestamp | [
"Zips",
"up",
"the",
"provided",
"HTML",
"as",
"a",
"companion",
"for",
"the",
"provided",
"path",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/base.py#L97-L125 |
344 | datadesk/django-bakery | bakery/models.py | AutoPublishingBuildableModel.save | def save(self, *args, **kwargs):
"""
A custom save that publishes or unpublishes the object where
appropriate.
Save with keyword argument obj.save(publish=False) to skip the process.
"""
from bakery import tasks
from django.contrib.contenttypes.models import ContentType
# if obj.save(publish=False) has been passed, we skip everything.
if not kwargs.pop('publish', True):
super(AutoPublishingBuildableModel, self).save(*args, **kwargs)
# Otherwise, for the standard obj.save(), here we go...
else:
# First figure out if the record is an addition, or an edit of
# a preexisting record.
try:
preexisting = self.__class__.objects.get(pk=self.pk)
except self.__class__.DoesNotExist:
preexisting = None
# If this is an addition...
if not preexisting:
# We will publish if that's the boolean
if self.get_publication_status():
action = 'publish'
# Otherwise we will do nothing do nothing
else:
action = None
# If this is an edit...
else:
# If it's being unpublished...
if not self.get_publication_status() and \
preexisting.get_publication_status():
action = 'unpublish'
# If it's being published...
elif self.get_publication_status():
action = 'publish'
# If it's remaining unpublished...
else:
action = None
# Now, no matter what, save it normally inside of a dedicated
# database transaction so that we are sure that the save will
# be complete before we trigger any task
with transaction.atomic():
super(AutoPublishingBuildableModel, self).save(*args, **kwargs)
# Finally, depending on the action, fire off a task
ct = ContentType.objects.get_for_model(self.__class__)
if action == 'publish':
tasks.publish_object.delay(ct.pk, self.pk)
elif action == 'unpublish':
tasks.unpublish_object.delay(ct.pk, self.pk) | python | def save(self, *args, **kwargs):
from bakery import tasks
from django.contrib.contenttypes.models import ContentType
# if obj.save(publish=False) has been passed, we skip everything.
if not kwargs.pop('publish', True):
super(AutoPublishingBuildableModel, self).save(*args, **kwargs)
# Otherwise, for the standard obj.save(), here we go...
else:
# First figure out if the record is an addition, or an edit of
# a preexisting record.
try:
preexisting = self.__class__.objects.get(pk=self.pk)
except self.__class__.DoesNotExist:
preexisting = None
# If this is an addition...
if not preexisting:
# We will publish if that's the boolean
if self.get_publication_status():
action = 'publish'
# Otherwise we will do nothing do nothing
else:
action = None
# If this is an edit...
else:
# If it's being unpublished...
if not self.get_publication_status() and \
preexisting.get_publication_status():
action = 'unpublish'
# If it's being published...
elif self.get_publication_status():
action = 'publish'
# If it's remaining unpublished...
else:
action = None
# Now, no matter what, save it normally inside of a dedicated
# database transaction so that we are sure that the save will
# be complete before we trigger any task
with transaction.atomic():
super(AutoPublishingBuildableModel, self).save(*args, **kwargs)
# Finally, depending on the action, fire off a task
ct = ContentType.objects.get_for_model(self.__class__)
if action == 'publish':
tasks.publish_object.delay(ct.pk, self.pk)
elif action == 'unpublish':
tasks.unpublish_object.delay(ct.pk, self.pk) | [
"def",
"save",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
"bakery",
"import",
"tasks",
"from",
"django",
".",
"contrib",
".",
"contenttypes",
".",
"models",
"import",
"ContentType",
"# if obj.save(publish=False) has been passed, we skip everything.",
"if",
"not",
"kwargs",
".",
"pop",
"(",
"'publish'",
",",
"True",
")",
":",
"super",
"(",
"AutoPublishingBuildableModel",
",",
"self",
")",
".",
"save",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"# Otherwise, for the standard obj.save(), here we go...",
"else",
":",
"# First figure out if the record is an addition, or an edit of",
"# a preexisting record.",
"try",
":",
"preexisting",
"=",
"self",
".",
"__class__",
".",
"objects",
".",
"get",
"(",
"pk",
"=",
"self",
".",
"pk",
")",
"except",
"self",
".",
"__class__",
".",
"DoesNotExist",
":",
"preexisting",
"=",
"None",
"# If this is an addition...",
"if",
"not",
"preexisting",
":",
"# We will publish if that's the boolean",
"if",
"self",
".",
"get_publication_status",
"(",
")",
":",
"action",
"=",
"'publish'",
"# Otherwise we will do nothing do nothing",
"else",
":",
"action",
"=",
"None",
"# If this is an edit...",
"else",
":",
"# If it's being unpublished...",
"if",
"not",
"self",
".",
"get_publication_status",
"(",
")",
"and",
"preexisting",
".",
"get_publication_status",
"(",
")",
":",
"action",
"=",
"'unpublish'",
"# If it's being published...",
"elif",
"self",
".",
"get_publication_status",
"(",
")",
":",
"action",
"=",
"'publish'",
"# If it's remaining unpublished...",
"else",
":",
"action",
"=",
"None",
"# Now, no matter what, save it normally inside of a dedicated",
"# database transaction so that we are sure that the save will",
"# be complete before we trigger any task",
"with",
"transaction",
".",
"atomic",
"(",
")",
":",
"super",
"(",
"AutoPublishingBuildableModel",
",",
"self",
")",
".",
"save",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"# Finally, depending on the action, fire off a task",
"ct",
"=",
"ContentType",
".",
"objects",
".",
"get_for_model",
"(",
"self",
".",
"__class__",
")",
"if",
"action",
"==",
"'publish'",
":",
"tasks",
".",
"publish_object",
".",
"delay",
"(",
"ct",
".",
"pk",
",",
"self",
".",
"pk",
")",
"elif",
"action",
"==",
"'unpublish'",
":",
"tasks",
".",
"unpublish_object",
".",
"delay",
"(",
"ct",
".",
"pk",
",",
"self",
".",
"pk",
")"
] | A custom save that publishes or unpublishes the object where
appropriate.
Save with keyword argument obj.save(publish=False) to skip the process. | [
"A",
"custom",
"save",
"that",
"publishes",
"or",
"unpublishes",
"the",
"object",
"where",
"appropriate",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/models.py#L117-L167 |
345 | datadesk/django-bakery | bakery/models.py | AutoPublishingBuildableModel.delete | def delete(self, *args, **kwargs):
"""
Triggers a task that will unpublish the object after it is deleted.
Save with keyword argument obj.delete(unpublish=False) to skip it.
"""
from bakery import tasks
from django.contrib.contenttypes.models import ContentType
# if obj.save(unpublish=False) has been passed, we skip the task.
unpublish = kwargs.pop('unpublish', True)
# Delete it from the database
super(AutoPublishingBuildableModel, self).delete(*args, **kwargs)
if unpublish:
ct = ContentType.objects.get_for_model(self.__class__)
tasks.unpublish_object.delay(ct.pk, self.pk) | python | def delete(self, *args, **kwargs):
from bakery import tasks
from django.contrib.contenttypes.models import ContentType
# if obj.save(unpublish=False) has been passed, we skip the task.
unpublish = kwargs.pop('unpublish', True)
# Delete it from the database
super(AutoPublishingBuildableModel, self).delete(*args, **kwargs)
if unpublish:
ct = ContentType.objects.get_for_model(self.__class__)
tasks.unpublish_object.delay(ct.pk, self.pk) | [
"def",
"delete",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
"bakery",
"import",
"tasks",
"from",
"django",
".",
"contrib",
".",
"contenttypes",
".",
"models",
"import",
"ContentType",
"# if obj.save(unpublish=False) has been passed, we skip the task.",
"unpublish",
"=",
"kwargs",
".",
"pop",
"(",
"'unpublish'",
",",
"True",
")",
"# Delete it from the database",
"super",
"(",
"AutoPublishingBuildableModel",
",",
"self",
")",
".",
"delete",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"unpublish",
":",
"ct",
"=",
"ContentType",
".",
"objects",
".",
"get_for_model",
"(",
"self",
".",
"__class__",
")",
"tasks",
".",
"unpublish_object",
".",
"delay",
"(",
"ct",
".",
"pk",
",",
"self",
".",
"pk",
")"
] | Triggers a task that will unpublish the object after it is deleted.
Save with keyword argument obj.delete(unpublish=False) to skip it. | [
"Triggers",
"a",
"task",
"that",
"will",
"unpublish",
"the",
"object",
"after",
"it",
"is",
"deleted",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/models.py#L169-L183 |
346 | datadesk/django-bakery | bakery/management/commands/build.py | Command.handle | def handle(self, *args, **options):
"""
Making it happen.
"""
logger.info("Build started")
# Set options
self.set_options(*args, **options)
# Get the build directory ready
if not options.get("keep_build_dir"):
self.init_build_dir()
# Build up static files
if not options.get("skip_static"):
self.build_static()
# Build the media directory
if not options.get("skip_media"):
self.build_media()
# Build views
self.build_views()
# Close out
logger.info("Build finished") | python | def handle(self, *args, **options):
logger.info("Build started")
# Set options
self.set_options(*args, **options)
# Get the build directory ready
if not options.get("keep_build_dir"):
self.init_build_dir()
# Build up static files
if not options.get("skip_static"):
self.build_static()
# Build the media directory
if not options.get("skip_media"):
self.build_media()
# Build views
self.build_views()
# Close out
logger.info("Build finished") | [
"def",
"handle",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"options",
")",
":",
"logger",
".",
"info",
"(",
"\"Build started\"",
")",
"# Set options",
"self",
".",
"set_options",
"(",
"*",
"args",
",",
"*",
"*",
"options",
")",
"# Get the build directory ready",
"if",
"not",
"options",
".",
"get",
"(",
"\"keep_build_dir\"",
")",
":",
"self",
".",
"init_build_dir",
"(",
")",
"# Build up static files",
"if",
"not",
"options",
".",
"get",
"(",
"\"skip_static\"",
")",
":",
"self",
".",
"build_static",
"(",
")",
"# Build the media directory",
"if",
"not",
"options",
".",
"get",
"(",
"\"skip_media\"",
")",
":",
"self",
".",
"build_media",
"(",
")",
"# Build views",
"self",
".",
"build_views",
"(",
")",
"# Close out",
"logger",
".",
"info",
"(",
"\"Build finished\"",
")"
] | Making it happen. | [
"Making",
"it",
"happen",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/build.py#L90-L115 |
347 | datadesk/django-bakery | bakery/management/commands/build.py | Command.set_options | def set_options(self, *args, **options):
"""
Configure a few global options before things get going.
"""
self.verbosity = int(options.get('verbosity', 1))
# Figure out what build directory to use
if options.get("build_dir"):
self.build_dir = options.get("build_dir")
settings.BUILD_DIR = self.build_dir
else:
if not hasattr(settings, 'BUILD_DIR'):
raise CommandError(self.build_unconfig_msg)
self.build_dir = settings.BUILD_DIR
# Get the datatypes right so fs will be happy
self.build_dir = smart_text(self.build_dir)
self.static_root = smart_text(settings.STATIC_ROOT)
self.media_root = smart_text(settings.MEDIA_ROOT)
# Connect the BUILD_DIR with our filesystem backend
self.app = apps.get_app_config("bakery")
self.fs = self.app.filesystem
self.fs_name = self.app.filesystem_name
# If the build dir doesn't exist make it
if not self.fs.exists(self.build_dir):
self.fs.makedirs(self.build_dir)
# Figure out what views we'll be using
if options.get('view_list'):
self.view_list = options['view_list']
else:
if not hasattr(settings, 'BAKERY_VIEWS'):
raise CommandError(self.views_unconfig_msg)
self.view_list = settings.BAKERY_VIEWS
# Are we pooling?
self.pooling = options.get('pooling') | python | def set_options(self, *args, **options):
self.verbosity = int(options.get('verbosity', 1))
# Figure out what build directory to use
if options.get("build_dir"):
self.build_dir = options.get("build_dir")
settings.BUILD_DIR = self.build_dir
else:
if not hasattr(settings, 'BUILD_DIR'):
raise CommandError(self.build_unconfig_msg)
self.build_dir = settings.BUILD_DIR
# Get the datatypes right so fs will be happy
self.build_dir = smart_text(self.build_dir)
self.static_root = smart_text(settings.STATIC_ROOT)
self.media_root = smart_text(settings.MEDIA_ROOT)
# Connect the BUILD_DIR with our filesystem backend
self.app = apps.get_app_config("bakery")
self.fs = self.app.filesystem
self.fs_name = self.app.filesystem_name
# If the build dir doesn't exist make it
if not self.fs.exists(self.build_dir):
self.fs.makedirs(self.build_dir)
# Figure out what views we'll be using
if options.get('view_list'):
self.view_list = options['view_list']
else:
if not hasattr(settings, 'BAKERY_VIEWS'):
raise CommandError(self.views_unconfig_msg)
self.view_list = settings.BAKERY_VIEWS
# Are we pooling?
self.pooling = options.get('pooling') | [
"def",
"set_options",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"options",
")",
":",
"self",
".",
"verbosity",
"=",
"int",
"(",
"options",
".",
"get",
"(",
"'verbosity'",
",",
"1",
")",
")",
"# Figure out what build directory to use",
"if",
"options",
".",
"get",
"(",
"\"build_dir\"",
")",
":",
"self",
".",
"build_dir",
"=",
"options",
".",
"get",
"(",
"\"build_dir\"",
")",
"settings",
".",
"BUILD_DIR",
"=",
"self",
".",
"build_dir",
"else",
":",
"if",
"not",
"hasattr",
"(",
"settings",
",",
"'BUILD_DIR'",
")",
":",
"raise",
"CommandError",
"(",
"self",
".",
"build_unconfig_msg",
")",
"self",
".",
"build_dir",
"=",
"settings",
".",
"BUILD_DIR",
"# Get the datatypes right so fs will be happy",
"self",
".",
"build_dir",
"=",
"smart_text",
"(",
"self",
".",
"build_dir",
")",
"self",
".",
"static_root",
"=",
"smart_text",
"(",
"settings",
".",
"STATIC_ROOT",
")",
"self",
".",
"media_root",
"=",
"smart_text",
"(",
"settings",
".",
"MEDIA_ROOT",
")",
"# Connect the BUILD_DIR with our filesystem backend",
"self",
".",
"app",
"=",
"apps",
".",
"get_app_config",
"(",
"\"bakery\"",
")",
"self",
".",
"fs",
"=",
"self",
".",
"app",
".",
"filesystem",
"self",
".",
"fs_name",
"=",
"self",
".",
"app",
".",
"filesystem_name",
"# If the build dir doesn't exist make it",
"if",
"not",
"self",
".",
"fs",
".",
"exists",
"(",
"self",
".",
"build_dir",
")",
":",
"self",
".",
"fs",
".",
"makedirs",
"(",
"self",
".",
"build_dir",
")",
"# Figure out what views we'll be using",
"if",
"options",
".",
"get",
"(",
"'view_list'",
")",
":",
"self",
".",
"view_list",
"=",
"options",
"[",
"'view_list'",
"]",
"else",
":",
"if",
"not",
"hasattr",
"(",
"settings",
",",
"'BAKERY_VIEWS'",
")",
":",
"raise",
"CommandError",
"(",
"self",
".",
"views_unconfig_msg",
")",
"self",
".",
"view_list",
"=",
"settings",
".",
"BAKERY_VIEWS",
"# Are we pooling?",
"self",
".",
"pooling",
"=",
"options",
".",
"get",
"(",
"'pooling'",
")"
] | Configure a few global options before things get going. | [
"Configure",
"a",
"few",
"global",
"options",
"before",
"things",
"get",
"going",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/build.py#L117-L155 |
348 | datadesk/django-bakery | bakery/management/commands/build.py | Command.init_build_dir | def init_build_dir(self):
"""
Clear out the build directory and create a new one.
"""
# Destroy the build directory, if it exists
logger.debug("Initializing %s" % self.build_dir)
if self.verbosity > 1:
self.stdout.write("Initializing build directory")
if self.fs.exists(self.build_dir):
self.fs.removetree(self.build_dir)
# Then recreate it from scratch
self.fs.makedirs(self.build_dir) | python | def init_build_dir(self):
# Destroy the build directory, if it exists
logger.debug("Initializing %s" % self.build_dir)
if self.verbosity > 1:
self.stdout.write("Initializing build directory")
if self.fs.exists(self.build_dir):
self.fs.removetree(self.build_dir)
# Then recreate it from scratch
self.fs.makedirs(self.build_dir) | [
"def",
"init_build_dir",
"(",
"self",
")",
":",
"# Destroy the build directory, if it exists",
"logger",
".",
"debug",
"(",
"\"Initializing %s\"",
"%",
"self",
".",
"build_dir",
")",
"if",
"self",
".",
"verbosity",
">",
"1",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"\"Initializing build directory\"",
")",
"if",
"self",
".",
"fs",
".",
"exists",
"(",
"self",
".",
"build_dir",
")",
":",
"self",
".",
"fs",
".",
"removetree",
"(",
"self",
".",
"build_dir",
")",
"# Then recreate it from scratch",
"self",
".",
"fs",
".",
"makedirs",
"(",
"self",
".",
"build_dir",
")"
] | Clear out the build directory and create a new one. | [
"Clear",
"out",
"the",
"build",
"directory",
"and",
"create",
"a",
"new",
"one",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/build.py#L157-L168 |
349 | datadesk/django-bakery | bakery/management/commands/build.py | Command.build_static | def build_static(self, *args, **options):
"""
Builds the static files directory as well as robots.txt and favicon.ico
"""
logger.debug("Building static directory")
if self.verbosity > 1:
self.stdout.write("Building static directory")
management.call_command(
"collectstatic",
interactive=False,
verbosity=0
)
# Set the target directory inside the filesystem.
target_dir = path.join(
self.build_dir,
settings.STATIC_URL.lstrip('/')
)
target_dir = smart_text(target_dir)
if os.path.exists(self.static_root) and settings.STATIC_URL:
if getattr(settings, 'BAKERY_GZIP', False):
self.copytree_and_gzip(self.static_root, target_dir)
# if gzip isn't enabled, just copy the tree straight over
else:
logger.debug("Copying {}{} to {}{}".format("osfs://", self.static_root, self.fs_name, target_dir))
copy.copy_dir("osfs:///", self.static_root, self.fs, target_dir)
# If they exist in the static directory, copy the robots.txt
# and favicon.ico files down to the root so they will work
# on the live website.
robots_src = path.join(target_dir, 'robots.txt')
if self.fs.exists(robots_src):
robots_target = path.join(self.build_dir, 'robots.txt')
logger.debug("Copying {}{} to {}{}".format(self.fs_name, robots_src, self.fs_name, robots_target))
self.fs.copy(robots_src, robots_target)
favicon_src = path.join(target_dir, 'favicon.ico')
if self.fs.exists(favicon_src):
favicon_target = path.join(self.build_dir, 'favicon.ico')
logger.debug("Copying {}{} to {}{}".format(self.fs_name, favicon_src, self.fs_name, favicon_target))
self.fs.copy(favicon_src, favicon_target) | python | def build_static(self, *args, **options):
logger.debug("Building static directory")
if self.verbosity > 1:
self.stdout.write("Building static directory")
management.call_command(
"collectstatic",
interactive=False,
verbosity=0
)
# Set the target directory inside the filesystem.
target_dir = path.join(
self.build_dir,
settings.STATIC_URL.lstrip('/')
)
target_dir = smart_text(target_dir)
if os.path.exists(self.static_root) and settings.STATIC_URL:
if getattr(settings, 'BAKERY_GZIP', False):
self.copytree_and_gzip(self.static_root, target_dir)
# if gzip isn't enabled, just copy the tree straight over
else:
logger.debug("Copying {}{} to {}{}".format("osfs://", self.static_root, self.fs_name, target_dir))
copy.copy_dir("osfs:///", self.static_root, self.fs, target_dir)
# If they exist in the static directory, copy the robots.txt
# and favicon.ico files down to the root so they will work
# on the live website.
robots_src = path.join(target_dir, 'robots.txt')
if self.fs.exists(robots_src):
robots_target = path.join(self.build_dir, 'robots.txt')
logger.debug("Copying {}{} to {}{}".format(self.fs_name, robots_src, self.fs_name, robots_target))
self.fs.copy(robots_src, robots_target)
favicon_src = path.join(target_dir, 'favicon.ico')
if self.fs.exists(favicon_src):
favicon_target = path.join(self.build_dir, 'favicon.ico')
logger.debug("Copying {}{} to {}{}".format(self.fs_name, favicon_src, self.fs_name, favicon_target))
self.fs.copy(favicon_src, favicon_target) | [
"def",
"build_static",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"options",
")",
":",
"logger",
".",
"debug",
"(",
"\"Building static directory\"",
")",
"if",
"self",
".",
"verbosity",
">",
"1",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"\"Building static directory\"",
")",
"management",
".",
"call_command",
"(",
"\"collectstatic\"",
",",
"interactive",
"=",
"False",
",",
"verbosity",
"=",
"0",
")",
"# Set the target directory inside the filesystem.",
"target_dir",
"=",
"path",
".",
"join",
"(",
"self",
".",
"build_dir",
",",
"settings",
".",
"STATIC_URL",
".",
"lstrip",
"(",
"'/'",
")",
")",
"target_dir",
"=",
"smart_text",
"(",
"target_dir",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"static_root",
")",
"and",
"settings",
".",
"STATIC_URL",
":",
"if",
"getattr",
"(",
"settings",
",",
"'BAKERY_GZIP'",
",",
"False",
")",
":",
"self",
".",
"copytree_and_gzip",
"(",
"self",
".",
"static_root",
",",
"target_dir",
")",
"# if gzip isn't enabled, just copy the tree straight over",
"else",
":",
"logger",
".",
"debug",
"(",
"\"Copying {}{} to {}{}\"",
".",
"format",
"(",
"\"osfs://\"",
",",
"self",
".",
"static_root",
",",
"self",
".",
"fs_name",
",",
"target_dir",
")",
")",
"copy",
".",
"copy_dir",
"(",
"\"osfs:///\"",
",",
"self",
".",
"static_root",
",",
"self",
".",
"fs",
",",
"target_dir",
")",
"# If they exist in the static directory, copy the robots.txt",
"# and favicon.ico files down to the root so they will work",
"# on the live website.",
"robots_src",
"=",
"path",
".",
"join",
"(",
"target_dir",
",",
"'robots.txt'",
")",
"if",
"self",
".",
"fs",
".",
"exists",
"(",
"robots_src",
")",
":",
"robots_target",
"=",
"path",
".",
"join",
"(",
"self",
".",
"build_dir",
",",
"'robots.txt'",
")",
"logger",
".",
"debug",
"(",
"\"Copying {}{} to {}{}\"",
".",
"format",
"(",
"self",
".",
"fs_name",
",",
"robots_src",
",",
"self",
".",
"fs_name",
",",
"robots_target",
")",
")",
"self",
".",
"fs",
".",
"copy",
"(",
"robots_src",
",",
"robots_target",
")",
"favicon_src",
"=",
"path",
".",
"join",
"(",
"target_dir",
",",
"'favicon.ico'",
")",
"if",
"self",
".",
"fs",
".",
"exists",
"(",
"favicon_src",
")",
":",
"favicon_target",
"=",
"path",
".",
"join",
"(",
"self",
".",
"build_dir",
",",
"'favicon.ico'",
")",
"logger",
".",
"debug",
"(",
"\"Copying {}{} to {}{}\"",
".",
"format",
"(",
"self",
".",
"fs_name",
",",
"favicon_src",
",",
"self",
".",
"fs_name",
",",
"favicon_target",
")",
")",
"self",
".",
"fs",
".",
"copy",
"(",
"favicon_src",
",",
"favicon_target",
")"
] | Builds the static files directory as well as robots.txt and favicon.ico | [
"Builds",
"the",
"static",
"files",
"directory",
"as",
"well",
"as",
"robots",
".",
"txt",
"and",
"favicon",
".",
"ico"
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/build.py#L170-L211 |
350 | datadesk/django-bakery | bakery/management/commands/build.py | Command.build_media | def build_media(self):
"""
Build the media files.
"""
logger.debug("Building media directory")
if self.verbosity > 1:
self.stdout.write("Building media directory")
if os.path.exists(self.media_root) and settings.MEDIA_URL:
target_dir = path.join(self.fs_name, self.build_dir, settings.MEDIA_URL.lstrip('/'))
logger.debug("Copying {}{} to {}{}".format("osfs://", self.media_root, self.fs_name, target_dir))
copy.copy_dir("osfs:///", smart_text(self.media_root), self.fs, smart_text(target_dir)) | python | def build_media(self):
logger.debug("Building media directory")
if self.verbosity > 1:
self.stdout.write("Building media directory")
if os.path.exists(self.media_root) and settings.MEDIA_URL:
target_dir = path.join(self.fs_name, self.build_dir, settings.MEDIA_URL.lstrip('/'))
logger.debug("Copying {}{} to {}{}".format("osfs://", self.media_root, self.fs_name, target_dir))
copy.copy_dir("osfs:///", smart_text(self.media_root), self.fs, smart_text(target_dir)) | [
"def",
"build_media",
"(",
"self",
")",
":",
"logger",
".",
"debug",
"(",
"\"Building media directory\"",
")",
"if",
"self",
".",
"verbosity",
">",
"1",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"\"Building media directory\"",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"media_root",
")",
"and",
"settings",
".",
"MEDIA_URL",
":",
"target_dir",
"=",
"path",
".",
"join",
"(",
"self",
".",
"fs_name",
",",
"self",
".",
"build_dir",
",",
"settings",
".",
"MEDIA_URL",
".",
"lstrip",
"(",
"'/'",
")",
")",
"logger",
".",
"debug",
"(",
"\"Copying {}{} to {}{}\"",
".",
"format",
"(",
"\"osfs://\"",
",",
"self",
".",
"media_root",
",",
"self",
".",
"fs_name",
",",
"target_dir",
")",
")",
"copy",
".",
"copy_dir",
"(",
"\"osfs:///\"",
",",
"smart_text",
"(",
"self",
".",
"media_root",
")",
",",
"self",
".",
"fs",
",",
"smart_text",
"(",
"target_dir",
")",
")"
] | Build the media files. | [
"Build",
"the",
"media",
"files",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/build.py#L213-L223 |
351 | datadesk/django-bakery | bakery/management/commands/build.py | Command.build_views | def build_views(self):
"""
Bake out specified buildable views.
"""
# Then loop through and run them all
for view_str in self.view_list:
logger.debug("Building %s" % view_str)
if self.verbosity > 1:
self.stdout.write("Building %s" % view_str)
view = get_callable(view_str)
self.get_view_instance(view).build_method() | python | def build_views(self):
# Then loop through and run them all
for view_str in self.view_list:
logger.debug("Building %s" % view_str)
if self.verbosity > 1:
self.stdout.write("Building %s" % view_str)
view = get_callable(view_str)
self.get_view_instance(view).build_method() | [
"def",
"build_views",
"(",
"self",
")",
":",
"# Then loop through and run them all",
"for",
"view_str",
"in",
"self",
".",
"view_list",
":",
"logger",
".",
"debug",
"(",
"\"Building %s\"",
"%",
"view_str",
")",
"if",
"self",
".",
"verbosity",
">",
"1",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"\"Building %s\"",
"%",
"view_str",
")",
"view",
"=",
"get_callable",
"(",
"view_str",
")",
"self",
".",
"get_view_instance",
"(",
"view",
")",
".",
"build_method",
"(",
")"
] | Bake out specified buildable views. | [
"Bake",
"out",
"specified",
"buildable",
"views",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/build.py#L231-L241 |
352 | datadesk/django-bakery | bakery/management/commands/build.py | Command.copytree_and_gzip | def copytree_and_gzip(self, source_dir, target_dir):
"""
Copies the provided source directory to the provided target directory.
Gzips JavaScript, CSS and HTML and other files along the way.
"""
# Figure out what we're building...
build_list = []
# Walk through the source directory...
for (dirpath, dirnames, filenames) in os.walk(source_dir):
for f in filenames:
# Figure out what is going where
source_path = os.path.join(dirpath, f)
rel_path = os.path.relpath(dirpath, source_dir)
target_path = os.path.join(target_dir, rel_path, f)
# Add it to our list to build
build_list.append((source_path, target_path))
logger.debug("Gzipping {} files".format(len(build_list)))
# Build em all
if not getattr(self, 'pooling', False):
[self.copyfile_and_gzip(*u) for u in build_list]
else:
cpu_count = multiprocessing.cpu_count()
logger.debug("Pooling build on {} CPUs".format(cpu_count))
pool = ThreadPool(processes=cpu_count)
pool.map(self.pooled_copyfile_and_gzip, build_list) | python | def copytree_and_gzip(self, source_dir, target_dir):
# Figure out what we're building...
build_list = []
# Walk through the source directory...
for (dirpath, dirnames, filenames) in os.walk(source_dir):
for f in filenames:
# Figure out what is going where
source_path = os.path.join(dirpath, f)
rel_path = os.path.relpath(dirpath, source_dir)
target_path = os.path.join(target_dir, rel_path, f)
# Add it to our list to build
build_list.append((source_path, target_path))
logger.debug("Gzipping {} files".format(len(build_list)))
# Build em all
if not getattr(self, 'pooling', False):
[self.copyfile_and_gzip(*u) for u in build_list]
else:
cpu_count = multiprocessing.cpu_count()
logger.debug("Pooling build on {} CPUs".format(cpu_count))
pool = ThreadPool(processes=cpu_count)
pool.map(self.pooled_copyfile_and_gzip, build_list) | [
"def",
"copytree_and_gzip",
"(",
"self",
",",
"source_dir",
",",
"target_dir",
")",
":",
"# Figure out what we're building...",
"build_list",
"=",
"[",
"]",
"# Walk through the source directory...",
"for",
"(",
"dirpath",
",",
"dirnames",
",",
"filenames",
")",
"in",
"os",
".",
"walk",
"(",
"source_dir",
")",
":",
"for",
"f",
"in",
"filenames",
":",
"# Figure out what is going where",
"source_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dirpath",
",",
"f",
")",
"rel_path",
"=",
"os",
".",
"path",
".",
"relpath",
"(",
"dirpath",
",",
"source_dir",
")",
"target_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"target_dir",
",",
"rel_path",
",",
"f",
")",
"# Add it to our list to build",
"build_list",
".",
"append",
"(",
"(",
"source_path",
",",
"target_path",
")",
")",
"logger",
".",
"debug",
"(",
"\"Gzipping {} files\"",
".",
"format",
"(",
"len",
"(",
"build_list",
")",
")",
")",
"# Build em all",
"if",
"not",
"getattr",
"(",
"self",
",",
"'pooling'",
",",
"False",
")",
":",
"[",
"self",
".",
"copyfile_and_gzip",
"(",
"*",
"u",
")",
"for",
"u",
"in",
"build_list",
"]",
"else",
":",
"cpu_count",
"=",
"multiprocessing",
".",
"cpu_count",
"(",
")",
"logger",
".",
"debug",
"(",
"\"Pooling build on {} CPUs\"",
".",
"format",
"(",
"cpu_count",
")",
")",
"pool",
"=",
"ThreadPool",
"(",
"processes",
"=",
"cpu_count",
")",
"pool",
".",
"map",
"(",
"self",
".",
"pooled_copyfile_and_gzip",
",",
"build_list",
")"
] | Copies the provided source directory to the provided target directory.
Gzips JavaScript, CSS and HTML and other files along the way. | [
"Copies",
"the",
"provided",
"source",
"directory",
"to",
"the",
"provided",
"target",
"directory",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/build.py#L243-L270 |
353 | datadesk/django-bakery | bakery/management/commands/build.py | Command.copyfile_and_gzip | def copyfile_and_gzip(self, source_path, target_path):
"""
Copies the provided file to the provided target directory.
Gzips JavaScript, CSS and HTML and other files along the way.
"""
# And then where we want to copy it to.
target_dir = path.dirname(target_path)
if not self.fs.exists(target_dir):
try:
self.fs.makedirs(target_dir)
except OSError:
pass
# determine the mimetype of the file
guess = mimetypes.guess_type(source_path)
content_type = guess[0]
encoding = guess[1]
# If it isn't a file want to gzip...
if content_type not in self.gzip_file_match:
# just copy it to the target.
logger.debug("Copying {}{} to {}{} because its filetype isn't on the whitelist".format(
"osfs://",
source_path,
self.fs_name,
target_path
))
copy.copy_file("osfs:///", smart_text(source_path), self.fs, smart_text(target_path))
# # if the file is already gzipped
elif encoding == 'gzip':
logger.debug("Copying {}{} to {}{} because it's already gzipped".format(
"osfs://",
source_path,
self.fs_name,
target_path
))
copy.copy_file("osfs:///", smart_text(source_path), self.fs, smart_text(target_path))
# If it is one we want to gzip...
else:
# ... let the world know ...
logger.debug("Gzipping {}{} to {}{}".format(
"osfs://",
source_path,
self.fs_name,
target_path
))
# Open up the source file from the OS
with open(source_path, 'rb') as source_file:
# Write GZIP data to an in-memory buffer
data_buffer = six.BytesIO()
kwargs = dict(
filename=path.basename(target_path),
mode='wb',
fileobj=data_buffer
)
if float(sys.version[:3]) >= 2.7:
kwargs['mtime'] = 0
with gzip.GzipFile(**kwargs) as f:
f.write(six.binary_type(source_file.read()))
# Write that buffer out to the filesystem
with self.fs.open(smart_text(target_path), 'wb') as outfile:
outfile.write(data_buffer.getvalue())
outfile.close() | python | def copyfile_and_gzip(self, source_path, target_path):
# And then where we want to copy it to.
target_dir = path.dirname(target_path)
if not self.fs.exists(target_dir):
try:
self.fs.makedirs(target_dir)
except OSError:
pass
# determine the mimetype of the file
guess = mimetypes.guess_type(source_path)
content_type = guess[0]
encoding = guess[1]
# If it isn't a file want to gzip...
if content_type not in self.gzip_file_match:
# just copy it to the target.
logger.debug("Copying {}{} to {}{} because its filetype isn't on the whitelist".format(
"osfs://",
source_path,
self.fs_name,
target_path
))
copy.copy_file("osfs:///", smart_text(source_path), self.fs, smart_text(target_path))
# # if the file is already gzipped
elif encoding == 'gzip':
logger.debug("Copying {}{} to {}{} because it's already gzipped".format(
"osfs://",
source_path,
self.fs_name,
target_path
))
copy.copy_file("osfs:///", smart_text(source_path), self.fs, smart_text(target_path))
# If it is one we want to gzip...
else:
# ... let the world know ...
logger.debug("Gzipping {}{} to {}{}".format(
"osfs://",
source_path,
self.fs_name,
target_path
))
# Open up the source file from the OS
with open(source_path, 'rb') as source_file:
# Write GZIP data to an in-memory buffer
data_buffer = six.BytesIO()
kwargs = dict(
filename=path.basename(target_path),
mode='wb',
fileobj=data_buffer
)
if float(sys.version[:3]) >= 2.7:
kwargs['mtime'] = 0
with gzip.GzipFile(**kwargs) as f:
f.write(six.binary_type(source_file.read()))
# Write that buffer out to the filesystem
with self.fs.open(smart_text(target_path), 'wb') as outfile:
outfile.write(data_buffer.getvalue())
outfile.close() | [
"def",
"copyfile_and_gzip",
"(",
"self",
",",
"source_path",
",",
"target_path",
")",
":",
"# And then where we want to copy it to.",
"target_dir",
"=",
"path",
".",
"dirname",
"(",
"target_path",
")",
"if",
"not",
"self",
".",
"fs",
".",
"exists",
"(",
"target_dir",
")",
":",
"try",
":",
"self",
".",
"fs",
".",
"makedirs",
"(",
"target_dir",
")",
"except",
"OSError",
":",
"pass",
"# determine the mimetype of the file",
"guess",
"=",
"mimetypes",
".",
"guess_type",
"(",
"source_path",
")",
"content_type",
"=",
"guess",
"[",
"0",
"]",
"encoding",
"=",
"guess",
"[",
"1",
"]",
"# If it isn't a file want to gzip...",
"if",
"content_type",
"not",
"in",
"self",
".",
"gzip_file_match",
":",
"# just copy it to the target.",
"logger",
".",
"debug",
"(",
"\"Copying {}{} to {}{} because its filetype isn't on the whitelist\"",
".",
"format",
"(",
"\"osfs://\"",
",",
"source_path",
",",
"self",
".",
"fs_name",
",",
"target_path",
")",
")",
"copy",
".",
"copy_file",
"(",
"\"osfs:///\"",
",",
"smart_text",
"(",
"source_path",
")",
",",
"self",
".",
"fs",
",",
"smart_text",
"(",
"target_path",
")",
")",
"# # if the file is already gzipped",
"elif",
"encoding",
"==",
"'gzip'",
":",
"logger",
".",
"debug",
"(",
"\"Copying {}{} to {}{} because it's already gzipped\"",
".",
"format",
"(",
"\"osfs://\"",
",",
"source_path",
",",
"self",
".",
"fs_name",
",",
"target_path",
")",
")",
"copy",
".",
"copy_file",
"(",
"\"osfs:///\"",
",",
"smart_text",
"(",
"source_path",
")",
",",
"self",
".",
"fs",
",",
"smart_text",
"(",
"target_path",
")",
")",
"# If it is one we want to gzip...",
"else",
":",
"# ... let the world know ...",
"logger",
".",
"debug",
"(",
"\"Gzipping {}{} to {}{}\"",
".",
"format",
"(",
"\"osfs://\"",
",",
"source_path",
",",
"self",
".",
"fs_name",
",",
"target_path",
")",
")",
"# Open up the source file from the OS",
"with",
"open",
"(",
"source_path",
",",
"'rb'",
")",
"as",
"source_file",
":",
"# Write GZIP data to an in-memory buffer",
"data_buffer",
"=",
"six",
".",
"BytesIO",
"(",
")",
"kwargs",
"=",
"dict",
"(",
"filename",
"=",
"path",
".",
"basename",
"(",
"target_path",
")",
",",
"mode",
"=",
"'wb'",
",",
"fileobj",
"=",
"data_buffer",
")",
"if",
"float",
"(",
"sys",
".",
"version",
"[",
":",
"3",
"]",
")",
">=",
"2.7",
":",
"kwargs",
"[",
"'mtime'",
"]",
"=",
"0",
"with",
"gzip",
".",
"GzipFile",
"(",
"*",
"*",
"kwargs",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"six",
".",
"binary_type",
"(",
"source_file",
".",
"read",
"(",
")",
")",
")",
"# Write that buffer out to the filesystem",
"with",
"self",
".",
"fs",
".",
"open",
"(",
"smart_text",
"(",
"target_path",
")",
",",
"'wb'",
")",
"as",
"outfile",
":",
"outfile",
".",
"write",
"(",
"data_buffer",
".",
"getvalue",
"(",
")",
")",
"outfile",
".",
"close",
"(",
")"
] | Copies the provided file to the provided target directory.
Gzips JavaScript, CSS and HTML and other files along the way. | [
"Copies",
"the",
"provided",
"file",
"to",
"the",
"provided",
"target",
"directory",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/build.py#L280-L346 |
354 | datadesk/django-bakery | bakery/management/commands/publish.py | Command.set_options | def set_options(self, options):
"""
Configure all the many options we'll need to make this happen.
"""
self.verbosity = int(options.get('verbosity'))
# Will we be gzipping?
self.gzip = getattr(settings, 'BAKERY_GZIP', False)
# And if so what content types will we be gzipping?
self.gzip_content_types = getattr(
settings,
'GZIP_CONTENT_TYPES',
DEFAULT_GZIP_CONTENT_TYPES
)
# What ACL (i.e. security permissions) will be giving the files on S3?
self.acl = getattr(settings, 'DEFAULT_ACL', self.DEFAULT_ACL)
# Should we set cache-control headers?
self.cache_control = getattr(settings, 'BAKERY_CACHE_CONTROL', {})
# If the user specifies a build directory...
if options.get('build_dir'):
# ... validate that it is good.
if not os.path.exists(options.get('build_dir')):
raise CommandError(self.build_missing_msg)
# Go ahead and use it
self.build_dir = options.get("build_dir")
# If the user does not specify a build dir...
else:
# Check if it is set in settings.py
if not hasattr(settings, 'BUILD_DIR'):
raise CommandError(self.build_unconfig_msg)
# Then make sure it actually exists
if not os.path.exists(settings.BUILD_DIR):
raise CommandError(self.build_missing_msg)
# Go ahead and use it
self.build_dir = settings.BUILD_DIR
# If the user provides a bucket name, use that.
if options.get("aws_bucket_name"):
self.aws_bucket_name = options.get("aws_bucket_name")
else:
# Otherwise try to find it the settings
if not hasattr(settings, 'AWS_BUCKET_NAME'):
raise CommandError(self.bucket_unconfig_msg)
self.aws_bucket_name = settings.AWS_BUCKET_NAME
# The bucket prefix, if it exists
self.aws_bucket_prefix = options.get("aws_bucket_prefix")
# If the user sets the --force option
if options.get('force'):
self.force_publish = True
else:
self.force_publish = False
# set the --dry-run option
if options.get('dry_run'):
self.dry_run = True
if self.verbosity > 0:
logger.info("Executing with the --dry-run option set.")
else:
self.dry_run = False
self.no_delete = options.get('no_delete')
self.no_pooling = options.get('no_pooling') | python | def set_options(self, options):
self.verbosity = int(options.get('verbosity'))
# Will we be gzipping?
self.gzip = getattr(settings, 'BAKERY_GZIP', False)
# And if so what content types will we be gzipping?
self.gzip_content_types = getattr(
settings,
'GZIP_CONTENT_TYPES',
DEFAULT_GZIP_CONTENT_TYPES
)
# What ACL (i.e. security permissions) will be giving the files on S3?
self.acl = getattr(settings, 'DEFAULT_ACL', self.DEFAULT_ACL)
# Should we set cache-control headers?
self.cache_control = getattr(settings, 'BAKERY_CACHE_CONTROL', {})
# If the user specifies a build directory...
if options.get('build_dir'):
# ... validate that it is good.
if not os.path.exists(options.get('build_dir')):
raise CommandError(self.build_missing_msg)
# Go ahead and use it
self.build_dir = options.get("build_dir")
# If the user does not specify a build dir...
else:
# Check if it is set in settings.py
if not hasattr(settings, 'BUILD_DIR'):
raise CommandError(self.build_unconfig_msg)
# Then make sure it actually exists
if not os.path.exists(settings.BUILD_DIR):
raise CommandError(self.build_missing_msg)
# Go ahead and use it
self.build_dir = settings.BUILD_DIR
# If the user provides a bucket name, use that.
if options.get("aws_bucket_name"):
self.aws_bucket_name = options.get("aws_bucket_name")
else:
# Otherwise try to find it the settings
if not hasattr(settings, 'AWS_BUCKET_NAME'):
raise CommandError(self.bucket_unconfig_msg)
self.aws_bucket_name = settings.AWS_BUCKET_NAME
# The bucket prefix, if it exists
self.aws_bucket_prefix = options.get("aws_bucket_prefix")
# If the user sets the --force option
if options.get('force'):
self.force_publish = True
else:
self.force_publish = False
# set the --dry-run option
if options.get('dry_run'):
self.dry_run = True
if self.verbosity > 0:
logger.info("Executing with the --dry-run option set.")
else:
self.dry_run = False
self.no_delete = options.get('no_delete')
self.no_pooling = options.get('no_pooling') | [
"def",
"set_options",
"(",
"self",
",",
"options",
")",
":",
"self",
".",
"verbosity",
"=",
"int",
"(",
"options",
".",
"get",
"(",
"'verbosity'",
")",
")",
"# Will we be gzipping?",
"self",
".",
"gzip",
"=",
"getattr",
"(",
"settings",
",",
"'BAKERY_GZIP'",
",",
"False",
")",
"# And if so what content types will we be gzipping?",
"self",
".",
"gzip_content_types",
"=",
"getattr",
"(",
"settings",
",",
"'GZIP_CONTENT_TYPES'",
",",
"DEFAULT_GZIP_CONTENT_TYPES",
")",
"# What ACL (i.e. security permissions) will be giving the files on S3?",
"self",
".",
"acl",
"=",
"getattr",
"(",
"settings",
",",
"'DEFAULT_ACL'",
",",
"self",
".",
"DEFAULT_ACL",
")",
"# Should we set cache-control headers?",
"self",
".",
"cache_control",
"=",
"getattr",
"(",
"settings",
",",
"'BAKERY_CACHE_CONTROL'",
",",
"{",
"}",
")",
"# If the user specifies a build directory...",
"if",
"options",
".",
"get",
"(",
"'build_dir'",
")",
":",
"# ... validate that it is good.",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"options",
".",
"get",
"(",
"'build_dir'",
")",
")",
":",
"raise",
"CommandError",
"(",
"self",
".",
"build_missing_msg",
")",
"# Go ahead and use it",
"self",
".",
"build_dir",
"=",
"options",
".",
"get",
"(",
"\"build_dir\"",
")",
"# If the user does not specify a build dir...",
"else",
":",
"# Check if it is set in settings.py",
"if",
"not",
"hasattr",
"(",
"settings",
",",
"'BUILD_DIR'",
")",
":",
"raise",
"CommandError",
"(",
"self",
".",
"build_unconfig_msg",
")",
"# Then make sure it actually exists",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"settings",
".",
"BUILD_DIR",
")",
":",
"raise",
"CommandError",
"(",
"self",
".",
"build_missing_msg",
")",
"# Go ahead and use it",
"self",
".",
"build_dir",
"=",
"settings",
".",
"BUILD_DIR",
"# If the user provides a bucket name, use that.",
"if",
"options",
".",
"get",
"(",
"\"aws_bucket_name\"",
")",
":",
"self",
".",
"aws_bucket_name",
"=",
"options",
".",
"get",
"(",
"\"aws_bucket_name\"",
")",
"else",
":",
"# Otherwise try to find it the settings",
"if",
"not",
"hasattr",
"(",
"settings",
",",
"'AWS_BUCKET_NAME'",
")",
":",
"raise",
"CommandError",
"(",
"self",
".",
"bucket_unconfig_msg",
")",
"self",
".",
"aws_bucket_name",
"=",
"settings",
".",
"AWS_BUCKET_NAME",
"# The bucket prefix, if it exists",
"self",
".",
"aws_bucket_prefix",
"=",
"options",
".",
"get",
"(",
"\"aws_bucket_prefix\"",
")",
"# If the user sets the --force option",
"if",
"options",
".",
"get",
"(",
"'force'",
")",
":",
"self",
".",
"force_publish",
"=",
"True",
"else",
":",
"self",
".",
"force_publish",
"=",
"False",
"# set the --dry-run option",
"if",
"options",
".",
"get",
"(",
"'dry_run'",
")",
":",
"self",
".",
"dry_run",
"=",
"True",
"if",
"self",
".",
"verbosity",
">",
"0",
":",
"logger",
".",
"info",
"(",
"\"Executing with the --dry-run option set.\"",
")",
"else",
":",
"self",
".",
"dry_run",
"=",
"False",
"self",
".",
"no_delete",
"=",
"options",
".",
"get",
"(",
"'no_delete'",
")",
"self",
".",
"no_pooling",
"=",
"options",
".",
"get",
"(",
"'no_pooling'",
")"
] | Configure all the many options we'll need to make this happen. | [
"Configure",
"all",
"the",
"many",
"options",
"we",
"ll",
"need",
"to",
"make",
"this",
"happen",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/publish.py#L175-L242 |
355 | datadesk/django-bakery | bakery/management/commands/publish.py | Command.get_local_file_list | def get_local_file_list(self):
"""
Walk the local build directory and create a list of relative and
absolute paths to files.
"""
file_list = []
for (dirpath, dirnames, filenames) in os.walk(self.build_dir):
for fname in filenames:
# relative path, to sync with the S3 key
local_key = os.path.join(
os.path.relpath(dirpath, self.build_dir),
fname
)
if local_key.startswith('./'):
local_key = local_key[2:]
file_list.append(local_key)
return file_list | python | def get_local_file_list(self):
file_list = []
for (dirpath, dirnames, filenames) in os.walk(self.build_dir):
for fname in filenames:
# relative path, to sync with the S3 key
local_key = os.path.join(
os.path.relpath(dirpath, self.build_dir),
fname
)
if local_key.startswith('./'):
local_key = local_key[2:]
file_list.append(local_key)
return file_list | [
"def",
"get_local_file_list",
"(",
"self",
")",
":",
"file_list",
"=",
"[",
"]",
"for",
"(",
"dirpath",
",",
"dirnames",
",",
"filenames",
")",
"in",
"os",
".",
"walk",
"(",
"self",
".",
"build_dir",
")",
":",
"for",
"fname",
"in",
"filenames",
":",
"# relative path, to sync with the S3 key",
"local_key",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"relpath",
"(",
"dirpath",
",",
"self",
".",
"build_dir",
")",
",",
"fname",
")",
"if",
"local_key",
".",
"startswith",
"(",
"'./'",
")",
":",
"local_key",
"=",
"local_key",
"[",
"2",
":",
"]",
"file_list",
".",
"append",
"(",
"local_key",
")",
"return",
"file_list"
] | Walk the local build directory and create a list of relative and
absolute paths to files. | [
"Walk",
"the",
"local",
"build",
"directory",
"and",
"create",
"a",
"list",
"of",
"relative",
"and",
"absolute",
"paths",
"to",
"files",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/publish.py#L266-L282 |
356 | datadesk/django-bakery | bakery/management/commands/publish.py | Command.sync_with_s3 | def sync_with_s3(self):
"""
Walk through our self.local_files list, and match them with the list
of keys in the S3 bucket.
"""
# Create a list to put all the files we're going to update
self.update_list = []
# Figure out which files need to be updated and upload all these files
logger.debug("Comparing {} local files with {} bucket files".format(
len(self.local_file_list),
len(self.s3_obj_dict.keys())
))
if self.no_pooling:
[self.compare_local_file(f) for f in self.local_file_list]
else:
cpu_count = multiprocessing.cpu_count()
logger.debug("Pooling local file comparison on {} CPUs".format(cpu_count))
pool = ThreadPool(processes=cpu_count)
pool.map(self.compare_local_file, self.local_file_list)
logger.debug("Uploading {} new or updated files to bucket".format(len(self.update_list)))
if self.no_pooling:
[self.upload_to_s3(*u) for u in self.update_list]
else:
logger.debug("Pooling s3 uploads on {} CPUs".format(cpu_count))
pool = ThreadPool(processes=cpu_count)
pool.map(self.pooled_upload_to_s3, self.update_list) | python | def sync_with_s3(self):
# Create a list to put all the files we're going to update
self.update_list = []
# Figure out which files need to be updated and upload all these files
logger.debug("Comparing {} local files with {} bucket files".format(
len(self.local_file_list),
len(self.s3_obj_dict.keys())
))
if self.no_pooling:
[self.compare_local_file(f) for f in self.local_file_list]
else:
cpu_count = multiprocessing.cpu_count()
logger.debug("Pooling local file comparison on {} CPUs".format(cpu_count))
pool = ThreadPool(processes=cpu_count)
pool.map(self.compare_local_file, self.local_file_list)
logger.debug("Uploading {} new or updated files to bucket".format(len(self.update_list)))
if self.no_pooling:
[self.upload_to_s3(*u) for u in self.update_list]
else:
logger.debug("Pooling s3 uploads on {} CPUs".format(cpu_count))
pool = ThreadPool(processes=cpu_count)
pool.map(self.pooled_upload_to_s3, self.update_list) | [
"def",
"sync_with_s3",
"(",
"self",
")",
":",
"# Create a list to put all the files we're going to update",
"self",
".",
"update_list",
"=",
"[",
"]",
"# Figure out which files need to be updated and upload all these files",
"logger",
".",
"debug",
"(",
"\"Comparing {} local files with {} bucket files\"",
".",
"format",
"(",
"len",
"(",
"self",
".",
"local_file_list",
")",
",",
"len",
"(",
"self",
".",
"s3_obj_dict",
".",
"keys",
"(",
")",
")",
")",
")",
"if",
"self",
".",
"no_pooling",
":",
"[",
"self",
".",
"compare_local_file",
"(",
"f",
")",
"for",
"f",
"in",
"self",
".",
"local_file_list",
"]",
"else",
":",
"cpu_count",
"=",
"multiprocessing",
".",
"cpu_count",
"(",
")",
"logger",
".",
"debug",
"(",
"\"Pooling local file comparison on {} CPUs\"",
".",
"format",
"(",
"cpu_count",
")",
")",
"pool",
"=",
"ThreadPool",
"(",
"processes",
"=",
"cpu_count",
")",
"pool",
".",
"map",
"(",
"self",
".",
"compare_local_file",
",",
"self",
".",
"local_file_list",
")",
"logger",
".",
"debug",
"(",
"\"Uploading {} new or updated files to bucket\"",
".",
"format",
"(",
"len",
"(",
"self",
".",
"update_list",
")",
")",
")",
"if",
"self",
".",
"no_pooling",
":",
"[",
"self",
".",
"upload_to_s3",
"(",
"*",
"u",
")",
"for",
"u",
"in",
"self",
".",
"update_list",
"]",
"else",
":",
"logger",
".",
"debug",
"(",
"\"Pooling s3 uploads on {} CPUs\"",
".",
"format",
"(",
"cpu_count",
")",
")",
"pool",
"=",
"ThreadPool",
"(",
"processes",
"=",
"cpu_count",
")",
"pool",
".",
"map",
"(",
"self",
".",
"pooled_upload_to_s3",
",",
"self",
".",
"update_list",
")"
] | Walk through our self.local_files list, and match them with the list
of keys in the S3 bucket. | [
"Walk",
"through",
"our",
"self",
".",
"local_files",
"list",
"and",
"match",
"them",
"with",
"the",
"list",
"of",
"keys",
"in",
"the",
"S3",
"bucket",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/publish.py#L284-L311 |
357 | datadesk/django-bakery | bakery/management/commands/publish.py | Command.get_md5 | def get_md5(self, filename):
"""
Returns the md5 checksum of the provided file name.
"""
with open(filename, 'rb') as f:
m = hashlib.md5(f.read())
return m.hexdigest() | python | def get_md5(self, filename):
with open(filename, 'rb') as f:
m = hashlib.md5(f.read())
return m.hexdigest() | [
"def",
"get_md5",
"(",
"self",
",",
"filename",
")",
":",
"with",
"open",
"(",
"filename",
",",
"'rb'",
")",
"as",
"f",
":",
"m",
"=",
"hashlib",
".",
"md5",
"(",
"f",
".",
"read",
"(",
")",
")",
"return",
"m",
".",
"hexdigest",
"(",
")"
] | Returns the md5 checksum of the provided file name. | [
"Returns",
"the",
"md5",
"checksum",
"of",
"the",
"provided",
"file",
"name",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/publish.py#L313-L319 |
358 | datadesk/django-bakery | bakery/management/commands/publish.py | Command.get_multipart_md5 | def get_multipart_md5(self, filename, chunk_size=8 * 1024 * 1024):
"""
Returns the md5 checksum of the provided file name after breaking it into chunks.
This is done to mirror the method used by Amazon S3 after a multipart upload.
"""
# Loop through the file contents ...
md5s = []
with open(filename, 'rb') as fp:
while True:
# Break it into chunks
data = fp.read(chunk_size)
# Finish when there are no more
if not data:
break
# Generate a md5 hash for each chunk
md5s.append(hashlib.md5(data))
# Combine the chunks
digests = b"".join(m.digest() for m in md5s)
# Generate a new hash using them
new_md5 = hashlib.md5(digests)
# Create the ETag as Amazon will
new_etag = '"%s-%s"' % (new_md5.hexdigest(), len(md5s))
# Trim it down and pass it back for comparison
return new_etag.strip('"').strip("'") | python | def get_multipart_md5(self, filename, chunk_size=8 * 1024 * 1024):
# Loop through the file contents ...
md5s = []
with open(filename, 'rb') as fp:
while True:
# Break it into chunks
data = fp.read(chunk_size)
# Finish when there are no more
if not data:
break
# Generate a md5 hash for each chunk
md5s.append(hashlib.md5(data))
# Combine the chunks
digests = b"".join(m.digest() for m in md5s)
# Generate a new hash using them
new_md5 = hashlib.md5(digests)
# Create the ETag as Amazon will
new_etag = '"%s-%s"' % (new_md5.hexdigest(), len(md5s))
# Trim it down and pass it back for comparison
return new_etag.strip('"').strip("'") | [
"def",
"get_multipart_md5",
"(",
"self",
",",
"filename",
",",
"chunk_size",
"=",
"8",
"*",
"1024",
"*",
"1024",
")",
":",
"# Loop through the file contents ...",
"md5s",
"=",
"[",
"]",
"with",
"open",
"(",
"filename",
",",
"'rb'",
")",
"as",
"fp",
":",
"while",
"True",
":",
"# Break it into chunks",
"data",
"=",
"fp",
".",
"read",
"(",
"chunk_size",
")",
"# Finish when there are no more",
"if",
"not",
"data",
":",
"break",
"# Generate a md5 hash for each chunk",
"md5s",
".",
"append",
"(",
"hashlib",
".",
"md5",
"(",
"data",
")",
")",
"# Combine the chunks",
"digests",
"=",
"b\"\"",
".",
"join",
"(",
"m",
".",
"digest",
"(",
")",
"for",
"m",
"in",
"md5s",
")",
"# Generate a new hash using them",
"new_md5",
"=",
"hashlib",
".",
"md5",
"(",
"digests",
")",
"# Create the ETag as Amazon will",
"new_etag",
"=",
"'\"%s-%s\"'",
"%",
"(",
"new_md5",
".",
"hexdigest",
"(",
")",
",",
"len",
"(",
"md5s",
")",
")",
"# Trim it down and pass it back for comparison",
"return",
"new_etag",
".",
"strip",
"(",
"'\"'",
")",
".",
"strip",
"(",
"\"'\"",
")"
] | Returns the md5 checksum of the provided file name after breaking it into chunks.
This is done to mirror the method used by Amazon S3 after a multipart upload. | [
"Returns",
"the",
"md5",
"checksum",
"of",
"the",
"provided",
"file",
"name",
"after",
"breaking",
"it",
"into",
"chunks",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/publish.py#L321-L349 |
359 | datadesk/django-bakery | bakery/management/commands/publish.py | Command.compare_local_file | def compare_local_file(self, file_key):
"""
Compares a local version of a file with what's already published.
If an update is needed, the file's key is added self.update_list.
"""
# Where is the file?
file_path = os.path.join(self.build_dir, file_key)
# If we're in force_publish mode just add it
if self.force_publish:
self.update_list.append((file_key, file_path))
# And quit now
return
# Does it exist in our s3 object list?
if file_key in self.s3_obj_dict:
# Get the md5 stored in Amazon's header
s3_md5 = self.s3_obj_dict[file_key].get('ETag').strip('"').strip("'")
# If there is a multipart ETag on S3, compare that to our local file after its chunked up.
# We are presuming this file was uploaded in multiple parts.
if "-" in s3_md5:
local_md5 = self.get_multipart_md5(file_path)
# Other, do it straight for the whole file
else:
local_md5 = self.get_md5(file_path)
# If their md5 hexdigests match, do nothing
if s3_md5 == local_md5:
pass
# If they don't match, we want to add it
else:
logger.debug("{} has changed".format(file_key))
self.update_list.append((file_key, file_path))
# Remove the file from the s3 dict, we don't need it anymore
del self.s3_obj_dict[file_key]
# If the file doesn't exist, queue it for creation
else:
logger.debug("{} has been added".format(file_key))
self.update_list.append((file_key, file_path)) | python | def compare_local_file(self, file_key):
# Where is the file?
file_path = os.path.join(self.build_dir, file_key)
# If we're in force_publish mode just add it
if self.force_publish:
self.update_list.append((file_key, file_path))
# And quit now
return
# Does it exist in our s3 object list?
if file_key in self.s3_obj_dict:
# Get the md5 stored in Amazon's header
s3_md5 = self.s3_obj_dict[file_key].get('ETag').strip('"').strip("'")
# If there is a multipart ETag on S3, compare that to our local file after its chunked up.
# We are presuming this file was uploaded in multiple parts.
if "-" in s3_md5:
local_md5 = self.get_multipart_md5(file_path)
# Other, do it straight for the whole file
else:
local_md5 = self.get_md5(file_path)
# If their md5 hexdigests match, do nothing
if s3_md5 == local_md5:
pass
# If they don't match, we want to add it
else:
logger.debug("{} has changed".format(file_key))
self.update_list.append((file_key, file_path))
# Remove the file from the s3 dict, we don't need it anymore
del self.s3_obj_dict[file_key]
# If the file doesn't exist, queue it for creation
else:
logger.debug("{} has been added".format(file_key))
self.update_list.append((file_key, file_path)) | [
"def",
"compare_local_file",
"(",
"self",
",",
"file_key",
")",
":",
"# Where is the file?",
"file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"build_dir",
",",
"file_key",
")",
"# If we're in force_publish mode just add it",
"if",
"self",
".",
"force_publish",
":",
"self",
".",
"update_list",
".",
"append",
"(",
"(",
"file_key",
",",
"file_path",
")",
")",
"# And quit now",
"return",
"# Does it exist in our s3 object list?",
"if",
"file_key",
"in",
"self",
".",
"s3_obj_dict",
":",
"# Get the md5 stored in Amazon's header",
"s3_md5",
"=",
"self",
".",
"s3_obj_dict",
"[",
"file_key",
"]",
".",
"get",
"(",
"'ETag'",
")",
".",
"strip",
"(",
"'\"'",
")",
".",
"strip",
"(",
"\"'\"",
")",
"# If there is a multipart ETag on S3, compare that to our local file after its chunked up.",
"# We are presuming this file was uploaded in multiple parts.",
"if",
"\"-\"",
"in",
"s3_md5",
":",
"local_md5",
"=",
"self",
".",
"get_multipart_md5",
"(",
"file_path",
")",
"# Other, do it straight for the whole file",
"else",
":",
"local_md5",
"=",
"self",
".",
"get_md5",
"(",
"file_path",
")",
"# If their md5 hexdigests match, do nothing",
"if",
"s3_md5",
"==",
"local_md5",
":",
"pass",
"# If they don't match, we want to add it",
"else",
":",
"logger",
".",
"debug",
"(",
"\"{} has changed\"",
".",
"format",
"(",
"file_key",
")",
")",
"self",
".",
"update_list",
".",
"append",
"(",
"(",
"file_key",
",",
"file_path",
")",
")",
"# Remove the file from the s3 dict, we don't need it anymore",
"del",
"self",
".",
"s3_obj_dict",
"[",
"file_key",
"]",
"# If the file doesn't exist, queue it for creation",
"else",
":",
"logger",
".",
"debug",
"(",
"\"{} has been added\"",
".",
"format",
"(",
"file_key",
")",
")",
"self",
".",
"update_list",
".",
"append",
"(",
"(",
"file_key",
",",
"file_path",
")",
")"
] | Compares a local version of a file with what's already published.
If an update is needed, the file's key is added self.update_list. | [
"Compares",
"a",
"local",
"version",
"of",
"a",
"file",
"with",
"what",
"s",
"already",
"published",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/publish.py#L351-L394 |
360 | datadesk/django-bakery | bakery/management/commands/publish.py | Command.upload_to_s3 | def upload_to_s3(self, key, filename):
"""
Set the content type and gzip headers if applicable
and upload the item to S3
"""
extra_args = {'ACL': self.acl}
# determine the mimetype of the file
guess = mimetypes.guess_type(filename)
content_type = guess[0]
encoding = guess[1]
if content_type:
extra_args['ContentType'] = content_type
# add the gzip headers, if necessary
if (self.gzip and content_type in self.gzip_content_types) or encoding == 'gzip':
extra_args['ContentEncoding'] = 'gzip'
# add the cache-control headers if necessary
if content_type in self.cache_control:
extra_args['CacheControl'] = ''.join((
'max-age=',
str(self.cache_control[content_type])
))
# access and write the contents from the file
if not self.dry_run:
logger.debug("Uploading %s" % filename)
if self.verbosity > 0:
self.stdout.write("Uploading %s" % filename)
s3_obj = self.s3_resource.Object(self.aws_bucket_name, key)
s3_obj.upload_file(filename, ExtraArgs=extra_args)
# Update counts
self.uploaded_files += 1
self.uploaded_file_list.append(filename) | python | def upload_to_s3(self, key, filename):
extra_args = {'ACL': self.acl}
# determine the mimetype of the file
guess = mimetypes.guess_type(filename)
content_type = guess[0]
encoding = guess[1]
if content_type:
extra_args['ContentType'] = content_type
# add the gzip headers, if necessary
if (self.gzip and content_type in self.gzip_content_types) or encoding == 'gzip':
extra_args['ContentEncoding'] = 'gzip'
# add the cache-control headers if necessary
if content_type in self.cache_control:
extra_args['CacheControl'] = ''.join((
'max-age=',
str(self.cache_control[content_type])
))
# access and write the contents from the file
if not self.dry_run:
logger.debug("Uploading %s" % filename)
if self.verbosity > 0:
self.stdout.write("Uploading %s" % filename)
s3_obj = self.s3_resource.Object(self.aws_bucket_name, key)
s3_obj.upload_file(filename, ExtraArgs=extra_args)
# Update counts
self.uploaded_files += 1
self.uploaded_file_list.append(filename) | [
"def",
"upload_to_s3",
"(",
"self",
",",
"key",
",",
"filename",
")",
":",
"extra_args",
"=",
"{",
"'ACL'",
":",
"self",
".",
"acl",
"}",
"# determine the mimetype of the file",
"guess",
"=",
"mimetypes",
".",
"guess_type",
"(",
"filename",
")",
"content_type",
"=",
"guess",
"[",
"0",
"]",
"encoding",
"=",
"guess",
"[",
"1",
"]",
"if",
"content_type",
":",
"extra_args",
"[",
"'ContentType'",
"]",
"=",
"content_type",
"# add the gzip headers, if necessary",
"if",
"(",
"self",
".",
"gzip",
"and",
"content_type",
"in",
"self",
".",
"gzip_content_types",
")",
"or",
"encoding",
"==",
"'gzip'",
":",
"extra_args",
"[",
"'ContentEncoding'",
"]",
"=",
"'gzip'",
"# add the cache-control headers if necessary",
"if",
"content_type",
"in",
"self",
".",
"cache_control",
":",
"extra_args",
"[",
"'CacheControl'",
"]",
"=",
"''",
".",
"join",
"(",
"(",
"'max-age='",
",",
"str",
"(",
"self",
".",
"cache_control",
"[",
"content_type",
"]",
")",
")",
")",
"# access and write the contents from the file",
"if",
"not",
"self",
".",
"dry_run",
":",
"logger",
".",
"debug",
"(",
"\"Uploading %s\"",
"%",
"filename",
")",
"if",
"self",
".",
"verbosity",
">",
"0",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"\"Uploading %s\"",
"%",
"filename",
")",
"s3_obj",
"=",
"self",
".",
"s3_resource",
".",
"Object",
"(",
"self",
".",
"aws_bucket_name",
",",
"key",
")",
"s3_obj",
".",
"upload_file",
"(",
"filename",
",",
"ExtraArgs",
"=",
"extra_args",
")",
"# Update counts",
"self",
".",
"uploaded_files",
"+=",
"1",
"self",
".",
"uploaded_file_list",
".",
"append",
"(",
"filename",
")"
] | Set the content type and gzip headers if applicable
and upload the item to S3 | [
"Set",
"the",
"content",
"type",
"and",
"gzip",
"headers",
"if",
"applicable",
"and",
"upload",
"the",
"item",
"to",
"S3"
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/publish.py#L405-L440 |
361 | datadesk/django-bakery | bakery/views/dates.py | BuildableYearArchiveView.build_year | def build_year(self, dt):
"""
Build the page for the provided year.
"""
self.year = str(dt.year)
logger.debug("Building %s" % self.year)
self.request = self.create_request(self.get_url())
target_path = self.get_build_path()
self.build_file(target_path, self.get_content()) | python | def build_year(self, dt):
self.year = str(dt.year)
logger.debug("Building %s" % self.year)
self.request = self.create_request(self.get_url())
target_path = self.get_build_path()
self.build_file(target_path, self.get_content()) | [
"def",
"build_year",
"(",
"self",
",",
"dt",
")",
":",
"self",
".",
"year",
"=",
"str",
"(",
"dt",
".",
"year",
")",
"logger",
".",
"debug",
"(",
"\"Building %s\"",
"%",
"self",
".",
"year",
")",
"self",
".",
"request",
"=",
"self",
".",
"create_request",
"(",
"self",
".",
"get_url",
"(",
")",
")",
"target_path",
"=",
"self",
".",
"get_build_path",
"(",
")",
"self",
".",
"build_file",
"(",
"target_path",
",",
"self",
".",
"get_content",
"(",
")",
")"
] | Build the page for the provided year. | [
"Build",
"the",
"page",
"for",
"the",
"provided",
"year",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/dates.py#L103-L111 |
362 | datadesk/django-bakery | bakery/views/dates.py | BuildableDayArchiveView.get_day | def get_day(self):
"""
Return the day from the database in the format expected by the URL.
"""
year = super(BuildableDayArchiveView, self).get_year()
month = super(BuildableDayArchiveView, self).get_month()
day = super(BuildableDayArchiveView, self).get_day()
fmt = self.get_day_format()
dt = date(int(year), int(month), int(day))
return dt.strftime(fmt) | python | def get_day(self):
year = super(BuildableDayArchiveView, self).get_year()
month = super(BuildableDayArchiveView, self).get_month()
day = super(BuildableDayArchiveView, self).get_day()
fmt = self.get_day_format()
dt = date(int(year), int(month), int(day))
return dt.strftime(fmt) | [
"def",
"get_day",
"(",
"self",
")",
":",
"year",
"=",
"super",
"(",
"BuildableDayArchiveView",
",",
"self",
")",
".",
"get_year",
"(",
")",
"month",
"=",
"super",
"(",
"BuildableDayArchiveView",
",",
"self",
")",
".",
"get_month",
"(",
")",
"day",
"=",
"super",
"(",
"BuildableDayArchiveView",
",",
"self",
")",
".",
"get_day",
"(",
")",
"fmt",
"=",
"self",
".",
"get_day_format",
"(",
")",
"dt",
"=",
"date",
"(",
"int",
"(",
"year",
")",
",",
"int",
"(",
"month",
")",
",",
"int",
"(",
"day",
")",
")",
"return",
"dt",
".",
"strftime",
"(",
"fmt",
")"
] | Return the day from the database in the format expected by the URL. | [
"Return",
"the",
"day",
"from",
"the",
"database",
"in",
"the",
"format",
"expected",
"by",
"the",
"URL",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/dates.py#L258-L267 |
363 | datadesk/django-bakery | bakery/views/dates.py | BuildableDayArchiveView.build_day | def build_day(self, dt):
"""
Build the page for the provided day.
"""
self.month = str(dt.month)
self.year = str(dt.year)
self.day = str(dt.day)
logger.debug("Building %s-%s-%s" % (self.year, self.month, self.day))
self.request = self.create_request(self.get_url())
path = self.get_build_path()
self.build_file(path, self.get_content()) | python | def build_day(self, dt):
self.month = str(dt.month)
self.year = str(dt.year)
self.day = str(dt.day)
logger.debug("Building %s-%s-%s" % (self.year, self.month, self.day))
self.request = self.create_request(self.get_url())
path = self.get_build_path()
self.build_file(path, self.get_content()) | [
"def",
"build_day",
"(",
"self",
",",
"dt",
")",
":",
"self",
".",
"month",
"=",
"str",
"(",
"dt",
".",
"month",
")",
"self",
".",
"year",
"=",
"str",
"(",
"dt",
".",
"year",
")",
"self",
".",
"day",
"=",
"str",
"(",
"dt",
".",
"day",
")",
"logger",
".",
"debug",
"(",
"\"Building %s-%s-%s\"",
"%",
"(",
"self",
".",
"year",
",",
"self",
".",
"month",
",",
"self",
".",
"day",
")",
")",
"self",
".",
"request",
"=",
"self",
".",
"create_request",
"(",
"self",
".",
"get_url",
"(",
")",
")",
"path",
"=",
"self",
".",
"get_build_path",
"(",
")",
"self",
".",
"build_file",
"(",
"path",
",",
"self",
".",
"get_content",
"(",
")",
")"
] | Build the page for the provided day. | [
"Build",
"the",
"page",
"for",
"the",
"provided",
"day",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/views/dates.py#L296-L306 |
364 | datadesk/django-bakery | bakery/management/commands/__init__.py | get_bucket_page | def get_bucket_page(page):
"""
Returns all the keys in a s3 bucket paginator page.
"""
key_list = page.get('Contents', [])
logger.debug("Retrieving page with {} keys".format(
len(key_list),
))
return dict((k.get('Key'), k) for k in key_list) | python | def get_bucket_page(page):
key_list = page.get('Contents', [])
logger.debug("Retrieving page with {} keys".format(
len(key_list),
))
return dict((k.get('Key'), k) for k in key_list) | [
"def",
"get_bucket_page",
"(",
"page",
")",
":",
"key_list",
"=",
"page",
".",
"get",
"(",
"'Contents'",
",",
"[",
"]",
")",
"logger",
".",
"debug",
"(",
"\"Retrieving page with {} keys\"",
".",
"format",
"(",
"len",
"(",
"key_list",
")",
",",
")",
")",
"return",
"dict",
"(",
"(",
"k",
".",
"get",
"(",
"'Key'",
")",
",",
"k",
")",
"for",
"k",
"in",
"key_list",
")"
] | Returns all the keys in a s3 bucket paginator page. | [
"Returns",
"all",
"the",
"keys",
"in",
"a",
"s3",
"bucket",
"paginator",
"page",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/__init__.py#L41-L49 |
365 | datadesk/django-bakery | bakery/management/commands/__init__.py | batch_delete_s3_objects | def batch_delete_s3_objects(
keys,
aws_bucket_name,
chunk_size=100,
s3_client=None
):
"""
Utility method that batch deletes objects in given bucket.
"""
if s3_client is None:
s3_client, s3_resource = get_s3_client()
key_chunks = []
for i in range(0, len(keys), chunk_size):
chunk = []
for key in (list(keys)[i:i+100]):
chunk.append({'Key': key})
key_chunks.append(chunk)
for chunk in key_chunks:
s3_client.delete_objects(
Bucket=aws_bucket_name,
Delete={'Objects': chunk}
) | python | def batch_delete_s3_objects(
keys,
aws_bucket_name,
chunk_size=100,
s3_client=None
):
if s3_client is None:
s3_client, s3_resource = get_s3_client()
key_chunks = []
for i in range(0, len(keys), chunk_size):
chunk = []
for key in (list(keys)[i:i+100]):
chunk.append({'Key': key})
key_chunks.append(chunk)
for chunk in key_chunks:
s3_client.delete_objects(
Bucket=aws_bucket_name,
Delete={'Objects': chunk}
) | [
"def",
"batch_delete_s3_objects",
"(",
"keys",
",",
"aws_bucket_name",
",",
"chunk_size",
"=",
"100",
",",
"s3_client",
"=",
"None",
")",
":",
"if",
"s3_client",
"is",
"None",
":",
"s3_client",
",",
"s3_resource",
"=",
"get_s3_client",
"(",
")",
"key_chunks",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"keys",
")",
",",
"chunk_size",
")",
":",
"chunk",
"=",
"[",
"]",
"for",
"key",
"in",
"(",
"list",
"(",
"keys",
")",
"[",
"i",
":",
"i",
"+",
"100",
"]",
")",
":",
"chunk",
".",
"append",
"(",
"{",
"'Key'",
":",
"key",
"}",
")",
"key_chunks",
".",
"append",
"(",
"chunk",
")",
"for",
"chunk",
"in",
"key_chunks",
":",
"s3_client",
".",
"delete_objects",
"(",
"Bucket",
"=",
"aws_bucket_name",
",",
"Delete",
"=",
"{",
"'Objects'",
":",
"chunk",
"}",
")"
] | Utility method that batch deletes objects in given bucket. | [
"Utility",
"method",
"that",
"batch",
"deletes",
"objects",
"in",
"given",
"bucket",
"."
] | e2feb13a66552a388fbcfaaacdd504bba08d3c69 | https://github.com/datadesk/django-bakery/blob/e2feb13a66552a388fbcfaaacdd504bba08d3c69/bakery/management/commands/__init__.py#L77-L99 |
366 | python-parsy/parsy | examples/proto3.py | is_present | def is_present(p):
"""
Given a parser or string, make a parser that returns
True if the parser matches, False otherwise
"""
return lexeme(p).optional().map(lambda v: False if v is None else True) | python | def is_present(p):
return lexeme(p).optional().map(lambda v: False if v is None else True) | [
"def",
"is_present",
"(",
"p",
")",
":",
"return",
"lexeme",
"(",
"p",
")",
".",
"optional",
"(",
")",
".",
"map",
"(",
"lambda",
"v",
":",
"False",
"if",
"v",
"is",
"None",
"else",
"True",
")"
] | Given a parser or string, make a parser that returns
True if the parser matches, False otherwise | [
"Given",
"a",
"parser",
"or",
"string",
"make",
"a",
"parser",
"that",
"returns",
"True",
"if",
"the",
"parser",
"matches",
"False",
"otherwise"
] | 04216ed25b6cdb389a36e16998592ba476237f86 | https://github.com/python-parsy/parsy/blob/04216ed25b6cdb389a36e16998592ba476237f86/examples/proto3.py#L49-L54 |
367 | python-parsy/parsy | src/parsy/__init__.py | Parser.parse | def parse(self, stream):
"""Parse a string or list of tokens and return the result or raise a ParseError."""
(result, _) = (self << eof).parse_partial(stream)
return result | python | def parse(self, stream):
(result, _) = (self << eof).parse_partial(stream)
return result | [
"def",
"parse",
"(",
"self",
",",
"stream",
")",
":",
"(",
"result",
",",
"_",
")",
"=",
"(",
"self",
"<<",
"eof",
")",
".",
"parse_partial",
"(",
"stream",
")",
"return",
"result"
] | Parse a string or list of tokens and return the result or raise a ParseError. | [
"Parse",
"a",
"string",
"or",
"list",
"of",
"tokens",
"and",
"return",
"the",
"result",
"or",
"raise",
"a",
"ParseError",
"."
] | 04216ed25b6cdb389a36e16998592ba476237f86 | https://github.com/python-parsy/parsy/blob/04216ed25b6cdb389a36e16998592ba476237f86/src/parsy/__init__.py#L88-L91 |
368 | python-parsy/parsy | src/parsy/__init__.py | Parser.parse_partial | def parse_partial(self, stream):
"""
Parse the longest possible prefix of a given string.
Return a tuple of the result and the rest of the string,
or raise a ParseError.
"""
result = self(stream, 0)
if result.status:
return (result.value, stream[result.index:])
else:
raise ParseError(result.expected, stream, result.furthest) | python | def parse_partial(self, stream):
result = self(stream, 0)
if result.status:
return (result.value, stream[result.index:])
else:
raise ParseError(result.expected, stream, result.furthest) | [
"def",
"parse_partial",
"(",
"self",
",",
"stream",
")",
":",
"result",
"=",
"self",
"(",
"stream",
",",
"0",
")",
"if",
"result",
".",
"status",
":",
"return",
"(",
"result",
".",
"value",
",",
"stream",
"[",
"result",
".",
"index",
":",
"]",
")",
"else",
":",
"raise",
"ParseError",
"(",
"result",
".",
"expected",
",",
"stream",
",",
"result",
".",
"furthest",
")"
] | Parse the longest possible prefix of a given string.
Return a tuple of the result and the rest of the string,
or raise a ParseError. | [
"Parse",
"the",
"longest",
"possible",
"prefix",
"of",
"a",
"given",
"string",
".",
"Return",
"a",
"tuple",
"of",
"the",
"result",
"and",
"the",
"rest",
"of",
"the",
"string",
"or",
"raise",
"a",
"ParseError",
"."
] | 04216ed25b6cdb389a36e16998592ba476237f86 | https://github.com/python-parsy/parsy/blob/04216ed25b6cdb389a36e16998592ba476237f86/src/parsy/__init__.py#L93-L104 |
369 | scrapinghub/flatson | flatson/flatson.py | extract_key_values | def extract_key_values(array_value, separators=(';', ',', ':'), **kwargs):
"""Serialize array of objects with simple key-values
"""
items_sep, fields_sep, keys_sep = separators
return items_sep.join(fields_sep.join(keys_sep.join(x) for x in sorted(it.items()))
for it in array_value) | python | def extract_key_values(array_value, separators=(';', ',', ':'), **kwargs):
items_sep, fields_sep, keys_sep = separators
return items_sep.join(fields_sep.join(keys_sep.join(x) for x in sorted(it.items()))
for it in array_value) | [
"def",
"extract_key_values",
"(",
"array_value",
",",
"separators",
"=",
"(",
"';'",
",",
"','",
",",
"':'",
")",
",",
"*",
"*",
"kwargs",
")",
":",
"items_sep",
",",
"fields_sep",
",",
"keys_sep",
"=",
"separators",
"return",
"items_sep",
".",
"join",
"(",
"fields_sep",
".",
"join",
"(",
"keys_sep",
".",
"join",
"(",
"x",
")",
"for",
"x",
"in",
"sorted",
"(",
"it",
".",
"items",
"(",
")",
")",
")",
"for",
"it",
"in",
"array_value",
")"
] | Serialize array of objects with simple key-values | [
"Serialize",
"array",
"of",
"objects",
"with",
"simple",
"key",
"-",
"values"
] | dcbcea32ad6d4df1df85fff8366bce40438d469a | https://github.com/scrapinghub/flatson/blob/dcbcea32ad6d4df1df85fff8366bce40438d469a/flatson/flatson.py#L48-L53 |
370 | scrapinghub/flatson | flatson/flatson.py | Flatson.from_schemafile | def from_schemafile(cls, schemafile):
"""Create a Flatson instance from a schemafile
"""
with open(schemafile) as f:
return cls(json.load(f)) | python | def from_schemafile(cls, schemafile):
with open(schemafile) as f:
return cls(json.load(f)) | [
"def",
"from_schemafile",
"(",
"cls",
",",
"schemafile",
")",
":",
"with",
"open",
"(",
"schemafile",
")",
"as",
"f",
":",
"return",
"cls",
"(",
"json",
".",
"load",
"(",
"f",
")",
")"
] | Create a Flatson instance from a schemafile | [
"Create",
"a",
"Flatson",
"instance",
"from",
"a",
"schemafile"
] | dcbcea32ad6d4df1df85fff8366bce40438d469a | https://github.com/scrapinghub/flatson/blob/dcbcea32ad6d4df1df85fff8366bce40438d469a/flatson/flatson.py#L93-L97 |
371 | scrapinghub/flatson | flatson/flatson.py | Flatson.register_serialization_method | def register_serialization_method(self, name, serialize_func):
"""Register a custom serialization method that can be
used via schema configuration
"""
if name in self._default_serialization_methods:
raise ValueError("Can't replace original %s serialization method")
self._serialization_methods[name] = serialize_func | python | def register_serialization_method(self, name, serialize_func):
if name in self._default_serialization_methods:
raise ValueError("Can't replace original %s serialization method")
self._serialization_methods[name] = serialize_func | [
"def",
"register_serialization_method",
"(",
"self",
",",
"name",
",",
"serialize_func",
")",
":",
"if",
"name",
"in",
"self",
".",
"_default_serialization_methods",
":",
"raise",
"ValueError",
"(",
"\"Can't replace original %s serialization method\"",
")",
"self",
".",
"_serialization_methods",
"[",
"name",
"]",
"=",
"serialize_func"
] | Register a custom serialization method that can be
used via schema configuration | [
"Register",
"a",
"custom",
"serialization",
"method",
"that",
"can",
"be",
"used",
"via",
"schema",
"configuration"
] | dcbcea32ad6d4df1df85fff8366bce40438d469a | https://github.com/scrapinghub/flatson/blob/dcbcea32ad6d4df1df85fff8366bce40438d469a/flatson/flatson.py#L123-L129 |
372 | scrapinghub/flatson | flatson/flatson.py | Flatson.flatten | def flatten(self, obj):
"""Return a list with the field values
"""
return [self._serialize(f, obj) for f in self.fields] | python | def flatten(self, obj):
return [self._serialize(f, obj) for f in self.fields] | [
"def",
"flatten",
"(",
"self",
",",
"obj",
")",
":",
"return",
"[",
"self",
".",
"_serialize",
"(",
"f",
",",
"obj",
")",
"for",
"f",
"in",
"self",
".",
"fields",
"]"
] | Return a list with the field values | [
"Return",
"a",
"list",
"with",
"the",
"field",
"values"
] | dcbcea32ad6d4df1df85fff8366bce40438d469a | https://github.com/scrapinghub/flatson/blob/dcbcea32ad6d4df1df85fff8366bce40438d469a/flatson/flatson.py#L131-L134 |
373 | scrapinghub/flatson | flatson/flatson.py | Flatson.flatten_dict | def flatten_dict(self, obj):
"""Return an OrderedDict dict preserving order of keys in fieldnames
"""
return OrderedDict(zip(self.fieldnames, self.flatten(obj))) | python | def flatten_dict(self, obj):
return OrderedDict(zip(self.fieldnames, self.flatten(obj))) | [
"def",
"flatten_dict",
"(",
"self",
",",
"obj",
")",
":",
"return",
"OrderedDict",
"(",
"zip",
"(",
"self",
".",
"fieldnames",
",",
"self",
".",
"flatten",
"(",
"obj",
")",
")",
")"
] | Return an OrderedDict dict preserving order of keys in fieldnames | [
"Return",
"an",
"OrderedDict",
"dict",
"preserving",
"order",
"of",
"keys",
"in",
"fieldnames"
] | dcbcea32ad6d4df1df85fff8366bce40438d469a | https://github.com/scrapinghub/flatson/blob/dcbcea32ad6d4df1df85fff8366bce40438d469a/flatson/flatson.py#L136-L139 |
374 | gmr/queries | queries/pool.py | Connection.busy | def busy(self):
"""Return if the connection is currently executing a query or is locked
by a session that still exists.
:rtype: bool
"""
if self.handle.isexecuting():
return True
elif self.used_by is None:
return False
return not self.used_by() is None | python | def busy(self):
if self.handle.isexecuting():
return True
elif self.used_by is None:
return False
return not self.used_by() is None | [
"def",
"busy",
"(",
"self",
")",
":",
"if",
"self",
".",
"handle",
".",
"isexecuting",
"(",
")",
":",
"return",
"True",
"elif",
"self",
".",
"used_by",
"is",
"None",
":",
"return",
"False",
"return",
"not",
"self",
".",
"used_by",
"(",
")",
"is",
"None"
] | Return if the connection is currently executing a query or is locked
by a session that still exists.
:rtype: bool | [
"Return",
"if",
"the",
"connection",
"is",
"currently",
"executing",
"a",
"query",
"or",
"is",
"locked",
"by",
"a",
"session",
"that",
"still",
"exists",
"."
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L59-L70 |
375 | gmr/queries | queries/pool.py | Connection.free | def free(self):
"""Remove the lock on the connection if the connection is not active
:raises: ConnectionBusyError
"""
LOGGER.debug('Connection %s freeing', self.id)
if self.handle.isexecuting():
raise ConnectionBusyError(self)
with self._lock:
self.used_by = None
LOGGER.debug('Connection %s freed', self.id) | python | def free(self):
LOGGER.debug('Connection %s freeing', self.id)
if self.handle.isexecuting():
raise ConnectionBusyError(self)
with self._lock:
self.used_by = None
LOGGER.debug('Connection %s freed', self.id) | [
"def",
"free",
"(",
"self",
")",
":",
"LOGGER",
".",
"debug",
"(",
"'Connection %s freeing'",
",",
"self",
".",
"id",
")",
"if",
"self",
".",
"handle",
".",
"isexecuting",
"(",
")",
":",
"raise",
"ConnectionBusyError",
"(",
"self",
")",
"with",
"self",
".",
"_lock",
":",
"self",
".",
"used_by",
"=",
"None",
"LOGGER",
".",
"debug",
"(",
"'Connection %s freed'",
",",
"self",
".",
"id",
")"
] | Remove the lock on the connection if the connection is not active
:raises: ConnectionBusyError | [
"Remove",
"the",
"lock",
"on",
"the",
"connection",
"if",
"the",
"connection",
"is",
"not",
"active"
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L81-L92 |
376 | gmr/queries | queries/pool.py | Connection.lock | def lock(self, session):
"""Lock the connection, ensuring that it is not busy and storing
a weakref for the session.
:param queries.Session session: The session to lock the connection with
:raises: ConnectionBusyError
"""
if self.busy:
raise ConnectionBusyError(self)
with self._lock:
self.used_by = weakref.ref(session)
LOGGER.debug('Connection %s locked', self.id) | python | def lock(self, session):
if self.busy:
raise ConnectionBusyError(self)
with self._lock:
self.used_by = weakref.ref(session)
LOGGER.debug('Connection %s locked', self.id) | [
"def",
"lock",
"(",
"self",
",",
"session",
")",
":",
"if",
"self",
".",
"busy",
":",
"raise",
"ConnectionBusyError",
"(",
"self",
")",
"with",
"self",
".",
"_lock",
":",
"self",
".",
"used_by",
"=",
"weakref",
".",
"ref",
"(",
"session",
")",
"LOGGER",
".",
"debug",
"(",
"'Connection %s locked'",
",",
"self",
".",
"id",
")"
] | Lock the connection, ensuring that it is not busy and storing
a weakref for the session.
:param queries.Session session: The session to lock the connection with
:raises: ConnectionBusyError | [
"Lock",
"the",
"connection",
"ensuring",
"that",
"it",
"is",
"not",
"busy",
"and",
"storing",
"a",
"weakref",
"for",
"the",
"session",
"."
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L103-L115 |
377 | gmr/queries | queries/pool.py | Pool.add | def add(self, connection):
"""Add a new connection to the pool
:param connection: The connection to add to the pool
:type connection: psycopg2.extensions.connection
:raises: PoolFullError
"""
if id(connection) in self.connections:
raise ValueError('Connection already exists in pool')
if len(self.connections) == self.max_size:
LOGGER.warning('Race condition found when adding new connection')
try:
connection.close()
except (psycopg2.Error, psycopg2.Warning) as error:
LOGGER.error('Error closing the conn that cant be used: %s',
error)
raise PoolFullError(self)
with self._lock:
self.connections[id(connection)] = Connection(connection)
LOGGER.debug('Pool %s added connection %s', self.id, id(connection)) | python | def add(self, connection):
if id(connection) in self.connections:
raise ValueError('Connection already exists in pool')
if len(self.connections) == self.max_size:
LOGGER.warning('Race condition found when adding new connection')
try:
connection.close()
except (psycopg2.Error, psycopg2.Warning) as error:
LOGGER.error('Error closing the conn that cant be used: %s',
error)
raise PoolFullError(self)
with self._lock:
self.connections[id(connection)] = Connection(connection)
LOGGER.debug('Pool %s added connection %s', self.id, id(connection)) | [
"def",
"add",
"(",
"self",
",",
"connection",
")",
":",
"if",
"id",
"(",
"connection",
")",
"in",
"self",
".",
"connections",
":",
"raise",
"ValueError",
"(",
"'Connection already exists in pool'",
")",
"if",
"len",
"(",
"self",
".",
"connections",
")",
"==",
"self",
".",
"max_size",
":",
"LOGGER",
".",
"warning",
"(",
"'Race condition found when adding new connection'",
")",
"try",
":",
"connection",
".",
"close",
"(",
")",
"except",
"(",
"psycopg2",
".",
"Error",
",",
"psycopg2",
".",
"Warning",
")",
"as",
"error",
":",
"LOGGER",
".",
"error",
"(",
"'Error closing the conn that cant be used: %s'",
",",
"error",
")",
"raise",
"PoolFullError",
"(",
"self",
")",
"with",
"self",
".",
"_lock",
":",
"self",
".",
"connections",
"[",
"id",
"(",
"connection",
")",
"]",
"=",
"Connection",
"(",
"connection",
")",
"LOGGER",
".",
"debug",
"(",
"'Pool %s added connection %s'",
",",
"self",
".",
"id",
",",
"id",
"(",
"connection",
")",
")"
] | Add a new connection to the pool
:param connection: The connection to add to the pool
:type connection: psycopg2.extensions.connection
:raises: PoolFullError | [
"Add",
"a",
"new",
"connection",
"to",
"the",
"pool"
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L154-L175 |
378 | gmr/queries | queries/pool.py | Pool.clean | def clean(self):
"""Clean the pool by removing any closed connections and if the pool's
idle has exceeded its idle TTL, remove all connections.
"""
LOGGER.debug('Cleaning the pool')
for connection in [self.connections[k] for k in self.connections if
self.connections[k].closed]:
LOGGER.debug('Removing %s', connection.id)
self.remove(connection.handle)
if self.idle_duration > self.idle_ttl:
self.close()
LOGGER.debug('Pool %s cleaned', self.id) | python | def clean(self):
LOGGER.debug('Cleaning the pool')
for connection in [self.connections[k] for k in self.connections if
self.connections[k].closed]:
LOGGER.debug('Removing %s', connection.id)
self.remove(connection.handle)
if self.idle_duration > self.idle_ttl:
self.close()
LOGGER.debug('Pool %s cleaned', self.id) | [
"def",
"clean",
"(",
"self",
")",
":",
"LOGGER",
".",
"debug",
"(",
"'Cleaning the pool'",
")",
"for",
"connection",
"in",
"[",
"self",
".",
"connections",
"[",
"k",
"]",
"for",
"k",
"in",
"self",
".",
"connections",
"if",
"self",
".",
"connections",
"[",
"k",
"]",
".",
"closed",
"]",
":",
"LOGGER",
".",
"debug",
"(",
"'Removing %s'",
",",
"connection",
".",
"id",
")",
"self",
".",
"remove",
"(",
"connection",
".",
"handle",
")",
"if",
"self",
".",
"idle_duration",
">",
"self",
".",
"idle_ttl",
":",
"self",
".",
"close",
"(",
")",
"LOGGER",
".",
"debug",
"(",
"'Pool %s cleaned'",
",",
"self",
".",
"id",
")"
] | Clean the pool by removing any closed connections and if the pool's
idle has exceeded its idle TTL, remove all connections. | [
"Clean",
"the",
"pool",
"by",
"removing",
"any",
"closed",
"connections",
"and",
"if",
"the",
"pool",
"s",
"idle",
"has",
"exceeded",
"its",
"idle",
"TTL",
"remove",
"all",
"connections",
"."
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L187-L201 |
379 | gmr/queries | queries/pool.py | Pool.close | def close(self):
"""Close the pool by closing and removing all of the connections"""
for cid in list(self.connections.keys()):
self.remove(self.connections[cid].handle)
LOGGER.debug('Pool %s closed', self.id) | python | def close(self):
for cid in list(self.connections.keys()):
self.remove(self.connections[cid].handle)
LOGGER.debug('Pool %s closed', self.id) | [
"def",
"close",
"(",
"self",
")",
":",
"for",
"cid",
"in",
"list",
"(",
"self",
".",
"connections",
".",
"keys",
"(",
")",
")",
":",
"self",
".",
"remove",
"(",
"self",
".",
"connections",
"[",
"cid",
"]",
".",
"handle",
")",
"LOGGER",
".",
"debug",
"(",
"'Pool %s closed'",
",",
"self",
".",
"id",
")"
] | Close the pool by closing and removing all of the connections | [
"Close",
"the",
"pool",
"by",
"closing",
"and",
"removing",
"all",
"of",
"the",
"connections"
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L203-L207 |
380 | gmr/queries | queries/pool.py | Pool.free | def free(self, connection):
"""Free the connection from use by the session that was using it.
:param connection: The connection to free
:type connection: psycopg2.extensions.connection
:raises: ConnectionNotFoundError
"""
LOGGER.debug('Pool %s freeing connection %s', self.id, id(connection))
try:
self.connection_handle(connection).free()
except KeyError:
raise ConnectionNotFoundError(self.id, id(connection))
if self.idle_connections == list(self.connections.values()):
with self._lock:
self.idle_start = self.time_method()
LOGGER.debug('Pool %s freed connection %s', self.id, id(connection)) | python | def free(self, connection):
LOGGER.debug('Pool %s freeing connection %s', self.id, id(connection))
try:
self.connection_handle(connection).free()
except KeyError:
raise ConnectionNotFoundError(self.id, id(connection))
if self.idle_connections == list(self.connections.values()):
with self._lock:
self.idle_start = self.time_method()
LOGGER.debug('Pool %s freed connection %s', self.id, id(connection)) | [
"def",
"free",
"(",
"self",
",",
"connection",
")",
":",
"LOGGER",
".",
"debug",
"(",
"'Pool %s freeing connection %s'",
",",
"self",
".",
"id",
",",
"id",
"(",
"connection",
")",
")",
"try",
":",
"self",
".",
"connection_handle",
"(",
"connection",
")",
".",
"free",
"(",
")",
"except",
"KeyError",
":",
"raise",
"ConnectionNotFoundError",
"(",
"self",
".",
"id",
",",
"id",
"(",
"connection",
")",
")",
"if",
"self",
".",
"idle_connections",
"==",
"list",
"(",
"self",
".",
"connections",
".",
"values",
"(",
")",
")",
":",
"with",
"self",
".",
"_lock",
":",
"self",
".",
"idle_start",
"=",
"self",
".",
"time_method",
"(",
")",
"LOGGER",
".",
"debug",
"(",
"'Pool %s freed connection %s'",
",",
"self",
".",
"id",
",",
"id",
"(",
"connection",
")",
")"
] | Free the connection from use by the session that was using it.
:param connection: The connection to free
:type connection: psycopg2.extensions.connection
:raises: ConnectionNotFoundError | [
"Free",
"the",
"connection",
"from",
"use",
"by",
"the",
"session",
"that",
"was",
"using",
"it",
"."
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L237-L254 |
381 | gmr/queries | queries/pool.py | Pool.get | def get(self, session):
"""Return an idle connection and assign the session to the connection
:param queries.Session session: The session to assign
:rtype: psycopg2.extensions.connection
:raises: NoIdleConnectionsError
"""
idle = self.idle_connections
if idle:
connection = idle.pop(0)
connection.lock(session)
if self.idle_start:
with self._lock:
self.idle_start = None
return connection.handle
raise NoIdleConnectionsError(self.id) | python | def get(self, session):
idle = self.idle_connections
if idle:
connection = idle.pop(0)
connection.lock(session)
if self.idle_start:
with self._lock:
self.idle_start = None
return connection.handle
raise NoIdleConnectionsError(self.id) | [
"def",
"get",
"(",
"self",
",",
"session",
")",
":",
"idle",
"=",
"self",
".",
"idle_connections",
"if",
"idle",
":",
"connection",
"=",
"idle",
".",
"pop",
"(",
"0",
")",
"connection",
".",
"lock",
"(",
"session",
")",
"if",
"self",
".",
"idle_start",
":",
"with",
"self",
".",
"_lock",
":",
"self",
".",
"idle_start",
"=",
"None",
"return",
"connection",
".",
"handle",
"raise",
"NoIdleConnectionsError",
"(",
"self",
".",
"id",
")"
] | Return an idle connection and assign the session to the connection
:param queries.Session session: The session to assign
:rtype: psycopg2.extensions.connection
:raises: NoIdleConnectionsError | [
"Return",
"an",
"idle",
"connection",
"and",
"assign",
"the",
"session",
"to",
"the",
"connection"
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L256-L272 |
382 | gmr/queries | queries/pool.py | Pool.idle_connections | def idle_connections(self):
"""Return a list of idle connections
:rtype: list
"""
return [c for c in self.connections.values()
if not c.busy and not c.closed] | python | def idle_connections(self):
return [c for c in self.connections.values()
if not c.busy and not c.closed] | [
"def",
"idle_connections",
"(",
"self",
")",
":",
"return",
"[",
"c",
"for",
"c",
"in",
"self",
".",
"connections",
".",
"values",
"(",
")",
"if",
"not",
"c",
".",
"busy",
"and",
"not",
"c",
".",
"closed",
"]"
] | Return a list of idle connections
:rtype: list | [
"Return",
"a",
"list",
"of",
"idle",
"connections"
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L284-L291 |
383 | gmr/queries | queries/pool.py | Pool.lock | def lock(self, connection, session):
"""Explicitly lock the specified connection
:type connection: psycopg2.extensions.connection
:param connection: The connection to lock
:param queries.Session session: The session to hold the lock
"""
cid = id(connection)
try:
self.connection_handle(connection).lock(session)
except KeyError:
raise ConnectionNotFoundError(self.id, cid)
else:
if self.idle_start:
with self._lock:
self.idle_start = None
LOGGER.debug('Pool %s locked connection %s', self.id, cid) | python | def lock(self, connection, session):
cid = id(connection)
try:
self.connection_handle(connection).lock(session)
except KeyError:
raise ConnectionNotFoundError(self.id, cid)
else:
if self.idle_start:
with self._lock:
self.idle_start = None
LOGGER.debug('Pool %s locked connection %s', self.id, cid) | [
"def",
"lock",
"(",
"self",
",",
"connection",
",",
"session",
")",
":",
"cid",
"=",
"id",
"(",
"connection",
")",
"try",
":",
"self",
".",
"connection_handle",
"(",
"connection",
")",
".",
"lock",
"(",
"session",
")",
"except",
"KeyError",
":",
"raise",
"ConnectionNotFoundError",
"(",
"self",
".",
"id",
",",
"cid",
")",
"else",
":",
"if",
"self",
".",
"idle_start",
":",
"with",
"self",
".",
"_lock",
":",
"self",
".",
"idle_start",
"=",
"None",
"LOGGER",
".",
"debug",
"(",
"'Pool %s locked connection %s'",
",",
"self",
".",
"id",
",",
"cid",
")"
] | Explicitly lock the specified connection
:type connection: psycopg2.extensions.connection
:param connection: The connection to lock
:param queries.Session session: The session to hold the lock | [
"Explicitly",
"lock",
"the",
"specified",
"connection"
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L314-L331 |
384 | gmr/queries | queries/pool.py | Pool.remove | def remove(self, connection):
"""Remove the connection from the pool
:param connection: The connection to remove
:type connection: psycopg2.extensions.connection
:raises: ConnectionNotFoundError
:raises: ConnectionBusyError
"""
cid = id(connection)
if cid not in self.connections:
raise ConnectionNotFoundError(self.id, cid)
self.connection_handle(connection).close()
with self._lock:
del self.connections[cid]
LOGGER.debug('Pool %s removed connection %s', self.id, cid) | python | def remove(self, connection):
cid = id(connection)
if cid not in self.connections:
raise ConnectionNotFoundError(self.id, cid)
self.connection_handle(connection).close()
with self._lock:
del self.connections[cid]
LOGGER.debug('Pool %s removed connection %s', self.id, cid) | [
"def",
"remove",
"(",
"self",
",",
"connection",
")",
":",
"cid",
"=",
"id",
"(",
"connection",
")",
"if",
"cid",
"not",
"in",
"self",
".",
"connections",
":",
"raise",
"ConnectionNotFoundError",
"(",
"self",
".",
"id",
",",
"cid",
")",
"self",
".",
"connection_handle",
"(",
"connection",
")",
".",
"close",
"(",
")",
"with",
"self",
".",
"_lock",
":",
"del",
"self",
".",
"connections",
"[",
"cid",
"]",
"LOGGER",
".",
"debug",
"(",
"'Pool %s removed connection %s'",
",",
"self",
".",
"id",
",",
"cid",
")"
] | Remove the connection from the pool
:param connection: The connection to remove
:type connection: psycopg2.extensions.connection
:raises: ConnectionNotFoundError
:raises: ConnectionBusyError | [
"Remove",
"the",
"connection",
"from",
"the",
"pool"
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L342-L357 |
385 | gmr/queries | queries/pool.py | Pool.report | def report(self):
"""Return a report about the pool state and configuration.
:rtype: dict
"""
return {
'connections': {
'busy': len(self.busy_connections),
'closed': len(self.closed_connections),
'executing': len(self.executing_connections),
'idle': len(self.idle_connections),
'locked': len(self.busy_connections)
},
'exceptions': sum([c.exceptions
for c in self.connections.values()]),
'executions': sum([c.executions
for c in self.connections.values()]),
'full': self.is_full,
'idle': {
'duration': self.idle_duration,
'ttl': self.idle_ttl
},
'max_size': self.max_size
} | python | def report(self):
return {
'connections': {
'busy': len(self.busy_connections),
'closed': len(self.closed_connections),
'executing': len(self.executing_connections),
'idle': len(self.idle_connections),
'locked': len(self.busy_connections)
},
'exceptions': sum([c.exceptions
for c in self.connections.values()]),
'executions': sum([c.executions
for c in self.connections.values()]),
'full': self.is_full,
'idle': {
'duration': self.idle_duration,
'ttl': self.idle_ttl
},
'max_size': self.max_size
} | [
"def",
"report",
"(",
"self",
")",
":",
"return",
"{",
"'connections'",
":",
"{",
"'busy'",
":",
"len",
"(",
"self",
".",
"busy_connections",
")",
",",
"'closed'",
":",
"len",
"(",
"self",
".",
"closed_connections",
")",
",",
"'executing'",
":",
"len",
"(",
"self",
".",
"executing_connections",
")",
",",
"'idle'",
":",
"len",
"(",
"self",
".",
"idle_connections",
")",
",",
"'locked'",
":",
"len",
"(",
"self",
".",
"busy_connections",
")",
"}",
",",
"'exceptions'",
":",
"sum",
"(",
"[",
"c",
".",
"exceptions",
"for",
"c",
"in",
"self",
".",
"connections",
".",
"values",
"(",
")",
"]",
")",
",",
"'executions'",
":",
"sum",
"(",
"[",
"c",
".",
"executions",
"for",
"c",
"in",
"self",
".",
"connections",
".",
"values",
"(",
")",
"]",
")",
",",
"'full'",
":",
"self",
".",
"is_full",
",",
"'idle'",
":",
"{",
"'duration'",
":",
"self",
".",
"idle_duration",
",",
"'ttl'",
":",
"self",
".",
"idle_ttl",
"}",
",",
"'max_size'",
":",
"self",
".",
"max_size",
"}"
] | Return a report about the pool state and configuration.
:rtype: dict | [
"Return",
"a",
"report",
"about",
"the",
"pool",
"state",
"and",
"configuration",
"."
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L359-L383 |
386 | gmr/queries | queries/pool.py | Pool.shutdown | def shutdown(self):
"""Forcefully shutdown the entire pool, closing all non-executing
connections.
:raises: ConnectionBusyError
"""
with self._lock:
for cid in list(self.connections.keys()):
if self.connections[cid].executing:
raise ConnectionBusyError(cid)
if self.connections[cid].locked:
self.connections[cid].free()
self.connections[cid].close()
del self.connections[cid] | python | def shutdown(self):
with self._lock:
for cid in list(self.connections.keys()):
if self.connections[cid].executing:
raise ConnectionBusyError(cid)
if self.connections[cid].locked:
self.connections[cid].free()
self.connections[cid].close()
del self.connections[cid] | [
"def",
"shutdown",
"(",
"self",
")",
":",
"with",
"self",
".",
"_lock",
":",
"for",
"cid",
"in",
"list",
"(",
"self",
".",
"connections",
".",
"keys",
"(",
")",
")",
":",
"if",
"self",
".",
"connections",
"[",
"cid",
"]",
".",
"executing",
":",
"raise",
"ConnectionBusyError",
"(",
"cid",
")",
"if",
"self",
".",
"connections",
"[",
"cid",
"]",
".",
"locked",
":",
"self",
".",
"connections",
"[",
"cid",
"]",
".",
"free",
"(",
")",
"self",
".",
"connections",
"[",
"cid",
"]",
".",
"close",
"(",
")",
"del",
"self",
".",
"connections",
"[",
"cid",
"]"
] | Forcefully shutdown the entire pool, closing all non-executing
connections.
:raises: ConnectionBusyError | [
"Forcefully",
"shutdown",
"the",
"entire",
"pool",
"closing",
"all",
"non",
"-",
"executing",
"connections",
"."
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L385-L399 |
387 | gmr/queries | queries/pool.py | PoolManager.add | def add(cls, pid, connection):
"""Add a new connection and session to a pool.
:param str pid: The pool id
:type connection: psycopg2.extensions.connection
:param connection: The connection to add to the pool
"""
with cls._lock:
cls._ensure_pool_exists(pid)
cls._pools[pid].add(connection) | python | def add(cls, pid, connection):
with cls._lock:
cls._ensure_pool_exists(pid)
cls._pools[pid].add(connection) | [
"def",
"add",
"(",
"cls",
",",
"pid",
",",
"connection",
")",
":",
"with",
"cls",
".",
"_lock",
":",
"cls",
".",
"_ensure_pool_exists",
"(",
"pid",
")",
"cls",
".",
"_pools",
"[",
"pid",
"]",
".",
"add",
"(",
"connection",
")"
] | Add a new connection and session to a pool.
:param str pid: The pool id
:type connection: psycopg2.extensions.connection
:param connection: The connection to add to the pool | [
"Add",
"a",
"new",
"connection",
"and",
"session",
"to",
"a",
"pool",
"."
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L454-L464 |
388 | gmr/queries | queries/pool.py | PoolManager.clean | def clean(cls, pid):
"""Clean the specified pool, removing any closed connections or
stale locks.
:param str pid: The pool id to clean
"""
with cls._lock:
try:
cls._ensure_pool_exists(pid)
except KeyError:
LOGGER.debug('Pool clean invoked against missing pool %s', pid)
return
cls._pools[pid].clean()
cls._maybe_remove_pool(pid) | python | def clean(cls, pid):
with cls._lock:
try:
cls._ensure_pool_exists(pid)
except KeyError:
LOGGER.debug('Pool clean invoked against missing pool %s', pid)
return
cls._pools[pid].clean()
cls._maybe_remove_pool(pid) | [
"def",
"clean",
"(",
"cls",
",",
"pid",
")",
":",
"with",
"cls",
".",
"_lock",
":",
"try",
":",
"cls",
".",
"_ensure_pool_exists",
"(",
"pid",
")",
"except",
"KeyError",
":",
"LOGGER",
".",
"debug",
"(",
"'Pool clean invoked against missing pool %s'",
",",
"pid",
")",
"return",
"cls",
".",
"_pools",
"[",
"pid",
"]",
".",
"clean",
"(",
")",
"cls",
".",
"_maybe_remove_pool",
"(",
"pid",
")"
] | Clean the specified pool, removing any closed connections or
stale locks.
:param str pid: The pool id to clean | [
"Clean",
"the",
"specified",
"pool",
"removing",
"any",
"closed",
"connections",
"or",
"stale",
"locks",
"."
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L467-L481 |
389 | gmr/queries | queries/pool.py | PoolManager.create | def create(cls, pid, idle_ttl=DEFAULT_IDLE_TTL, max_size=DEFAULT_MAX_SIZE,
time_method=None):
"""Create a new pool, with the ability to pass in values to override
the default idle TTL and the default maximum size.
A pool's idle TTL defines the amount of time that a pool can be open
without any sessions before it is removed.
A pool's max size defines the maximum number of connections that can
be added to the pool to prevent unbounded open connections.
:param str pid: The pool ID
:param int idle_ttl: Time in seconds for the idle TTL
:param int max_size: The maximum pool size
:param callable time_method: Override the use of :py:meth:`time.time`
method for time values.
:raises: KeyError
"""
if pid in cls._pools:
raise KeyError('Pool %s already exists' % pid)
with cls._lock:
LOGGER.debug("Creating Pool: %s (%i/%i)", pid, idle_ttl, max_size)
cls._pools[pid] = Pool(pid, idle_ttl, max_size, time_method) | python | def create(cls, pid, idle_ttl=DEFAULT_IDLE_TTL, max_size=DEFAULT_MAX_SIZE,
time_method=None):
if pid in cls._pools:
raise KeyError('Pool %s already exists' % pid)
with cls._lock:
LOGGER.debug("Creating Pool: %s (%i/%i)", pid, idle_ttl, max_size)
cls._pools[pid] = Pool(pid, idle_ttl, max_size, time_method) | [
"def",
"create",
"(",
"cls",
",",
"pid",
",",
"idle_ttl",
"=",
"DEFAULT_IDLE_TTL",
",",
"max_size",
"=",
"DEFAULT_MAX_SIZE",
",",
"time_method",
"=",
"None",
")",
":",
"if",
"pid",
"in",
"cls",
".",
"_pools",
":",
"raise",
"KeyError",
"(",
"'Pool %s already exists'",
"%",
"pid",
")",
"with",
"cls",
".",
"_lock",
":",
"LOGGER",
".",
"debug",
"(",
"\"Creating Pool: %s (%i/%i)\"",
",",
"pid",
",",
"idle_ttl",
",",
"max_size",
")",
"cls",
".",
"_pools",
"[",
"pid",
"]",
"=",
"Pool",
"(",
"pid",
",",
"idle_ttl",
",",
"max_size",
",",
"time_method",
")"
] | Create a new pool, with the ability to pass in values to override
the default idle TTL and the default maximum size.
A pool's idle TTL defines the amount of time that a pool can be open
without any sessions before it is removed.
A pool's max size defines the maximum number of connections that can
be added to the pool to prevent unbounded open connections.
:param str pid: The pool ID
:param int idle_ttl: Time in seconds for the idle TTL
:param int max_size: The maximum pool size
:param callable time_method: Override the use of :py:meth:`time.time`
method for time values.
:raises: KeyError | [
"Create",
"a",
"new",
"pool",
"with",
"the",
"ability",
"to",
"pass",
"in",
"values",
"to",
"override",
"the",
"default",
"idle",
"TTL",
"and",
"the",
"default",
"maximum",
"size",
"."
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L484-L507 |
390 | gmr/queries | queries/pool.py | PoolManager.free | def free(cls, pid, connection):
"""Free a connection that was locked by a session
:param str pid: The pool ID
:param connection: The connection to remove
:type connection: psycopg2.extensions.connection
"""
with cls._lock:
LOGGER.debug('Freeing %s from pool %s', id(connection), pid)
cls._ensure_pool_exists(pid)
cls._pools[pid].free(connection) | python | def free(cls, pid, connection):
with cls._lock:
LOGGER.debug('Freeing %s from pool %s', id(connection), pid)
cls._ensure_pool_exists(pid)
cls._pools[pid].free(connection) | [
"def",
"free",
"(",
"cls",
",",
"pid",
",",
"connection",
")",
":",
"with",
"cls",
".",
"_lock",
":",
"LOGGER",
".",
"debug",
"(",
"'Freeing %s from pool %s'",
",",
"id",
"(",
"connection",
")",
",",
"pid",
")",
"cls",
".",
"_ensure_pool_exists",
"(",
"pid",
")",
"cls",
".",
"_pools",
"[",
"pid",
"]",
".",
"free",
"(",
"connection",
")"
] | Free a connection that was locked by a session
:param str pid: The pool ID
:param connection: The connection to remove
:type connection: psycopg2.extensions.connection | [
"Free",
"a",
"connection",
"that",
"was",
"locked",
"by",
"a",
"session"
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L510-L521 |
391 | gmr/queries | queries/pool.py | PoolManager.get | def get(cls, pid, session):
"""Get an idle, unused connection from the pool. Once a connection has
been retrieved, it will be marked as in-use until it is freed.
:param str pid: The pool ID
:param queries.Session session: The session to assign to the connection
:rtype: psycopg2.extensions.connection
"""
with cls._lock:
cls._ensure_pool_exists(pid)
return cls._pools[pid].get(session) | python | def get(cls, pid, session):
with cls._lock:
cls._ensure_pool_exists(pid)
return cls._pools[pid].get(session) | [
"def",
"get",
"(",
"cls",
",",
"pid",
",",
"session",
")",
":",
"with",
"cls",
".",
"_lock",
":",
"cls",
".",
"_ensure_pool_exists",
"(",
"pid",
")",
"return",
"cls",
".",
"_pools",
"[",
"pid",
"]",
".",
"get",
"(",
"session",
")"
] | Get an idle, unused connection from the pool. Once a connection has
been retrieved, it will be marked as in-use until it is freed.
:param str pid: The pool ID
:param queries.Session session: The session to assign to the connection
:rtype: psycopg2.extensions.connection | [
"Get",
"an",
"idle",
"unused",
"connection",
"from",
"the",
"pool",
".",
"Once",
"a",
"connection",
"has",
"been",
"retrieved",
"it",
"will",
"be",
"marked",
"as",
"in",
"-",
"use",
"until",
"it",
"is",
"freed",
"."
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L524-L535 |
392 | gmr/queries | queries/pool.py | PoolManager.has_connection | def has_connection(cls, pid, connection):
"""Check to see if a pool has the specified connection
:param str pid: The pool ID
:param connection: The connection to check for
:type connection: psycopg2.extensions.connection
:rtype: bool
"""
with cls._lock:
cls._ensure_pool_exists(pid)
return connection in cls._pools[pid] | python | def has_connection(cls, pid, connection):
with cls._lock:
cls._ensure_pool_exists(pid)
return connection in cls._pools[pid] | [
"def",
"has_connection",
"(",
"cls",
",",
"pid",
",",
"connection",
")",
":",
"with",
"cls",
".",
"_lock",
":",
"cls",
".",
"_ensure_pool_exists",
"(",
"pid",
")",
"return",
"connection",
"in",
"cls",
".",
"_pools",
"[",
"pid",
"]"
] | Check to see if a pool has the specified connection
:param str pid: The pool ID
:param connection: The connection to check for
:type connection: psycopg2.extensions.connection
:rtype: bool | [
"Check",
"to",
"see",
"if",
"a",
"pool",
"has",
"the",
"specified",
"connection"
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L552-L563 |
393 | gmr/queries | queries/pool.py | PoolManager.has_idle_connection | def has_idle_connection(cls, pid):
"""Check to see if a pool has an idle connection
:param str pid: The pool ID
:rtype: bool
"""
with cls._lock:
cls._ensure_pool_exists(pid)
return bool(cls._pools[pid].idle_connections) | python | def has_idle_connection(cls, pid):
with cls._lock:
cls._ensure_pool_exists(pid)
return bool(cls._pools[pid].idle_connections) | [
"def",
"has_idle_connection",
"(",
"cls",
",",
"pid",
")",
":",
"with",
"cls",
".",
"_lock",
":",
"cls",
".",
"_ensure_pool_exists",
"(",
"pid",
")",
"return",
"bool",
"(",
"cls",
".",
"_pools",
"[",
"pid",
"]",
".",
"idle_connections",
")"
] | Check to see if a pool has an idle connection
:param str pid: The pool ID
:rtype: bool | [
"Check",
"to",
"see",
"if",
"a",
"pool",
"has",
"an",
"idle",
"connection"
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L566-L575 |
394 | gmr/queries | queries/pool.py | PoolManager.is_full | def is_full(cls, pid):
"""Return a bool indicating if the specified pool is full
:param str pid: The pool id
:rtype: bool
"""
with cls._lock:
cls._ensure_pool_exists(pid)
return cls._pools[pid].is_full | python | def is_full(cls, pid):
with cls._lock:
cls._ensure_pool_exists(pid)
return cls._pools[pid].is_full | [
"def",
"is_full",
"(",
"cls",
",",
"pid",
")",
":",
"with",
"cls",
".",
"_lock",
":",
"cls",
".",
"_ensure_pool_exists",
"(",
"pid",
")",
"return",
"cls",
".",
"_pools",
"[",
"pid",
"]",
".",
"is_full"
] | Return a bool indicating if the specified pool is full
:param str pid: The pool id
:rtype: bool | [
"Return",
"a",
"bool",
"indicating",
"if",
"the",
"specified",
"pool",
"is",
"full"
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L578-L587 |
395 | gmr/queries | queries/pool.py | PoolManager.lock | def lock(cls, pid, connection, session):
"""Explicitly lock the specified connection in the pool
:param str pid: The pool id
:type connection: psycopg2.extensions.connection
:param connection: The connection to add to the pool
:param queries.Session session: The session to hold the lock
"""
with cls._lock:
cls._ensure_pool_exists(pid)
cls._pools[pid].lock(connection, session) | python | def lock(cls, pid, connection, session):
with cls._lock:
cls._ensure_pool_exists(pid)
cls._pools[pid].lock(connection, session) | [
"def",
"lock",
"(",
"cls",
",",
"pid",
",",
"connection",
",",
"session",
")",
":",
"with",
"cls",
".",
"_lock",
":",
"cls",
".",
"_ensure_pool_exists",
"(",
"pid",
")",
"cls",
".",
"_pools",
"[",
"pid",
"]",
".",
"lock",
"(",
"connection",
",",
"session",
")"
] | Explicitly lock the specified connection in the pool
:param str pid: The pool id
:type connection: psycopg2.extensions.connection
:param connection: The connection to add to the pool
:param queries.Session session: The session to hold the lock | [
"Explicitly",
"lock",
"the",
"specified",
"connection",
"in",
"the",
"pool"
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L590-L601 |
396 | gmr/queries | queries/pool.py | PoolManager.remove | def remove(cls, pid):
"""Remove a pool, closing all connections
:param str pid: The pool ID
"""
with cls._lock:
cls._ensure_pool_exists(pid)
cls._pools[pid].close()
del cls._pools[pid] | python | def remove(cls, pid):
with cls._lock:
cls._ensure_pool_exists(pid)
cls._pools[pid].close()
del cls._pools[pid] | [
"def",
"remove",
"(",
"cls",
",",
"pid",
")",
":",
"with",
"cls",
".",
"_lock",
":",
"cls",
".",
"_ensure_pool_exists",
"(",
"pid",
")",
"cls",
".",
"_pools",
"[",
"pid",
"]",
".",
"close",
"(",
")",
"del",
"cls",
".",
"_pools",
"[",
"pid",
"]"
] | Remove a pool, closing all connections
:param str pid: The pool ID | [
"Remove",
"a",
"pool",
"closing",
"all",
"connections"
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L604-L613 |
397 | gmr/queries | queries/pool.py | PoolManager.remove_connection | def remove_connection(cls, pid, connection):
"""Remove a connection from the pool, closing it if is open.
:param str pid: The pool ID
:param connection: The connection to remove
:type connection: psycopg2.extensions.connection
:raises: ConnectionNotFoundError
"""
cls._ensure_pool_exists(pid)
cls._pools[pid].remove(connection) | python | def remove_connection(cls, pid, connection):
cls._ensure_pool_exists(pid)
cls._pools[pid].remove(connection) | [
"def",
"remove_connection",
"(",
"cls",
",",
"pid",
",",
"connection",
")",
":",
"cls",
".",
"_ensure_pool_exists",
"(",
"pid",
")",
"cls",
".",
"_pools",
"[",
"pid",
"]",
".",
"remove",
"(",
"connection",
")"
] | Remove a connection from the pool, closing it if is open.
:param str pid: The pool ID
:param connection: The connection to remove
:type connection: psycopg2.extensions.connection
:raises: ConnectionNotFoundError | [
"Remove",
"a",
"connection",
"from",
"the",
"pool",
"closing",
"it",
"if",
"is",
"open",
"."
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L616-L626 |
398 | gmr/queries | queries/pool.py | PoolManager.set_idle_ttl | def set_idle_ttl(cls, pid, ttl):
"""Set the idle TTL for a pool, after which it will be destroyed.
:param str pid: The pool id
:param int ttl: The TTL for an idle pool
"""
with cls._lock:
cls._ensure_pool_exists(pid)
cls._pools[pid].set_idle_ttl(ttl) | python | def set_idle_ttl(cls, pid, ttl):
with cls._lock:
cls._ensure_pool_exists(pid)
cls._pools[pid].set_idle_ttl(ttl) | [
"def",
"set_idle_ttl",
"(",
"cls",
",",
"pid",
",",
"ttl",
")",
":",
"with",
"cls",
".",
"_lock",
":",
"cls",
".",
"_ensure_pool_exists",
"(",
"pid",
")",
"cls",
".",
"_pools",
"[",
"pid",
"]",
".",
"set_idle_ttl",
"(",
"ttl",
")"
] | Set the idle TTL for a pool, after which it will be destroyed.
:param str pid: The pool id
:param int ttl: The TTL for an idle pool | [
"Set",
"the",
"idle",
"TTL",
"for",
"a",
"pool",
"after",
"which",
"it",
"will",
"be",
"destroyed",
"."
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L629-L638 |
399 | gmr/queries | queries/pool.py | PoolManager.set_max_size | def set_max_size(cls, pid, size):
"""Set the maximum number of connections for the specified pool
:param str pid: The pool to set the size for
:param int size: The maximum number of connections
"""
with cls._lock:
cls._ensure_pool_exists(pid)
cls._pools[pid].set_max_size(size) | python | def set_max_size(cls, pid, size):
with cls._lock:
cls._ensure_pool_exists(pid)
cls._pools[pid].set_max_size(size) | [
"def",
"set_max_size",
"(",
"cls",
",",
"pid",
",",
"size",
")",
":",
"with",
"cls",
".",
"_lock",
":",
"cls",
".",
"_ensure_pool_exists",
"(",
"pid",
")",
"cls",
".",
"_pools",
"[",
"pid",
"]",
".",
"set_max_size",
"(",
"size",
")"
] | Set the maximum number of connections for the specified pool
:param str pid: The pool to set the size for
:param int size: The maximum number of connections | [
"Set",
"the",
"maximum",
"number",
"of",
"connections",
"for",
"the",
"specified",
"pool"
] | a68855013dc6aaf9ed7b6909a4701f8da8796a0a | https://github.com/gmr/queries/blob/a68855013dc6aaf9ed7b6909a4701f8da8796a0a/queries/pool.py#L641-L650 |