text
stringlengths 3
1.05M
|
---|
#!/usr/bin/env python
# coding:utf-8
import subprocess
import shutil
from xml.dom import minidom
import zipfile
import os
import re
import glob
import sys
import codecs
import random
import string
import time
# from elf_header import ELF
# from study.day14_jiagu.apk_auto_enforce.elf_header import ELF
# from study.day12.file_path_manager import FilePathManager
'''
一、针对目标app不存在自定义application的情况
1.反编译目标app
apktool.bat d Target.apk
2.检测manifest文件是否有自定义的Application,并假设没有自定义Application
3.如果没有自定义Application,则复制smali文件夹,跟反编译后的app下的smali合并: cp -rf smali Target/
4.修改manifest文件,将自定义Application设定为“org.hackcode.ProxyApplication”
5.重打包目标app
6.提取重打包后的apk中的classes.dex文件,并压缩为TargetApk.zip文件,并将重打包的app命名为Target.modified.apk
7.合并TuokeApk项目下的classes.dex和TargetApk.zip(加固),生成classes.dex
8.将合并生成的新classes.dex文件与Target.modified.apk中的classes.dex替换
9.复制TuokeApk项目下的lib目录下的所有文件和文件夹到目标app中
10.将修改后的app重压缩成zip文件
11.签名
'''
def un_zip(file_name):
"""解压二进制(zip,apk)文件"""
"""unzip zip file"""
zip_file = zipfile.ZipFile(file_name)
if os.path.isdir(file_name + "_files"):
pass
else:
os.mkdir(file_name + "_files")
for names in zip_file.namelist():
zip_file.extract(names, file_name + "_files/")
zip_file.close()
return file_name + "_files"
def zip_dir(dirname, zipfilename):
"""
压缩文件或文件夹到指定文件
"""
filelist = []
if os.path.isfile(dirname):
filelist.append(dirname)
else:
for root, dirs, files in os.walk(dirname):
for name in files:
filelist.append(os.path.join(root, name))
zf = zipfile.ZipFile(zipfilename, "w", zipfile.zlib.DEFLATED)
for tar in filelist:
arcname = tar[len(dirname):]
zf.write(tar, arcname)
zf.close()
def recompile_TuokeApk_Project(application_name):
'''
1.修改 String appClassName = "com.targetapk.MyApplication";
2.重新编译
'''
file_path = 'TuokeApk/app/src/main/java/org/hackcode/ProxyApplication.java'
new_file_path = 'TuokeApk/app/src/main/java/org/hackcode/ProxyApplication2.java'
file_in = open(file_path, "r", encoding="utf-8")
file_out = open(new_file_path, "w", encoding="utf-8")
while 1:
line = file_in.readline()
if not line:
print('结束读取TuokeApk/app/src/main/java/org/hackcode/ProxyApplication.java文件...')
break
pattern = re.compile(r'.*String.*appClassName.*=.*\".*\";.*')
if re.search(pattern, line):
print('[+] Find \"String appClassName = ...\", replace it with \"' + application_name + '\"')
file_out.write('\t\t\tString appClassName = \"' + application_name + '\";\n')
else:
file_out.write(line)
file_in.close()
file_out.close()
os.remove(file_path)
os.rename(new_file_path, file_path)
# 重新编译TuokeApk工程
# os.chdir('TuokeApk/')
#
# out = subprocess.Popen(["gradle", "clean"], shell=True, stdout=subprocess.PIPE).stdout.read()
# out = subprocess.Popen(["gradle", "build"], shell=True, stdout=subprocess.PIPE).stdout.read()
# out = out.decode('utf-8') # bytes解码为str
# if out.find('BUILD SUCCESSFUL') < 0:
# print('Build error!')
# return False
# print('[+] Rebuild TuokeApk project successfully!')
# os.chdir('../')
return True
def remove_without_exception(item, type):
if type == 'd':
try:
shutil.rmtree(item)
except Exception as e:
pass
else:
try:
os.remove(item)
except Exception as e:
pass
def clean():
"""清除缓存"""
remove_without_exception('Target', 'd')
remove_without_exception('Target.modified.apk_files', 'd')
remove_without_exception('Target.apk', 'f')
remove_without_exception('Target.modified.apk', 'f')
remove_without_exception('Target.modified.2.apk', 'f')
remove_without_exception('classes.dex', 'f')
remove_without_exception('TargetApk.zip', 'f')
remove_without_exception('tuoke.dex', 'f')
os.chdir('TuokeApk/')
subprocess.Popen(["gradle", "clean"], shell=True, stdout=subprocess.PIPE).stdout.read()
os.chdir('../')
def genRandomStr(length):
chars = string.ascii_letters + string.digits
return ''.join([random.choice(chars) for i in range(length)]) # 得出的结果中字符会有重复的
def modify_ehdr_and_delete_shdr(apk_dir):
'''
修改ELF header(e_shoff和e_shnum属性)和删除section header table
TODO: 指定目标so文件
'''
for root, dirs, files in os.walk(apk_dir):
for name in files:
filepath = root + os.path.sep + name
if filepath.endswith('libhackcodejiagu.so'):
print(' - Modifying {} ELF header...'.format(filepath))
dex = ELF(filepath)
file_size = os.path.getsize(filepath)
shdr_offset = dex.elf32_Ehdr.e_shoff
shdr_size = dex.elf32_Ehdr.e_shnum * dex.elf32_Ehdr.e_shentsize
src_file = file(filepath, 'rb')
dst_file = file(filepath + '2', 'wb')
# 1.破坏ELF Header
dst_file.write(src_file.read(32)) # 保存e_shoff之前的内容
src_file.read(4)
dst_file.write(genRandomStr(4)) # 修复e_shoff
dst_file.write(src_file.read(12)) # 保存e_shoff到e_shnum之间的内容
src_file.read(2)
dst_file.write(genRandomStr(2)) # 修复e_shnum
# 2.删除section header table
# 读取section header table之前的内容
dst_file.write(src_file.read(shdr_offset - 50))
# 读取section header table之后的内容
src_file.seek(shdr_offset + shdr_size, 0)
dst_file.write(src_file.read())
src_file.close()
dst_file.close()
shutil.move(filepath + '2', filepath)
def main(filepath=None):
clean()
if filepath:
input_filename = filepath
else:
# 命令行参数: python jiagu.py xxx.apk
input_filename = sys.argv[1]
# pass
# input_filename = "fragmentation.apk"
# 拷贝需要加固的apk
shutil.copyfile(input_filename, 'Target.apk')
# Step1: 反编译目标app
out = subprocess.Popen('apktool.bat d Target.apk', stdout=subprocess.PIPE).stdout.read()
print("type(out):{}".format(type(out)))
print(out)
# 解码成 string,默认不填
out = out.decode('utf-8')
# out = out.decode('gb2312')
print("type(out):{}".format(type(out)))
print(out)
if out.find('error') > 0 or out.find('exception') > 0:
print('[Error] apktool decompiled error!')
return
print('[+] Apktool decompiled Target.apk successfully!')
# Step2: 检测manifest文件是否有自定义的Application
doc = minidom.parse('Target/AndroidManifest.xml')
root = doc.documentElement
application_node = root.getElementsByTagName('application')[0]
applicationName = application_node.getAttribute('android:name')
packageName = root.getAttribute('package')
if applicationName:
if not applicationName.startswith(packageName) and applicationName.startswith('.'):
applicationName = packageName + applicationName
print('[+] Target app\'s Application: {}'.format(applicationName))
# Step3: 修改JiguApk工程中ProxyApplication中的applicationName变量为目标app的Application名称
recompile_TuokeApk_Project(applicationName)
else:
print('[+] Target.apk has no self-defined Application!')
applicationName = 'com.targetapk.MyApplication'
recompile_TuokeApk_Project(applicationName)
# Step3: 复制smali文件夹,跟反编译后的app下的smali合并
print('[+] Copy smali folder into Target folder...')
out = subprocess.Popen('cp -rf smali Target/', stdout=subprocess.PIPE).stdout.read()
# Step4: 修改manifest文件,将自定义Application设定为“org.hackcode.ProxyApplication”
print('[+] Modified AndroidManifest.xml...')
application_node.setAttribute('android:name', 'org.hackcode.ProxyApplication')
# file_handle = codecs.open('Target/AndroidManifest.xml', 'w', 'utf-8')
# root.writexml(file_handle)
# file_handle.close()
with open('Target/AndroidManifest.xml', "w", encoding='UTF-8') as f:
doc.writexml(f, encoding='UTF-8')
# Step5: 重打包目标app
out = subprocess.Popen('apktool.bat b Target', stdout=subprocess.PIPE).stdout.read()
out = out.decode('utf-8')
if out.find('error') > 0 or out.find('exception') > 0:
print('[Error] apktool recompiled error!')
return
print('[+] Apktool recompiled Target successfully!')
# Step6: 将重打包的app命名为Target.modified.apk,并提取重打包后的apk中的classes.dex文件,并压缩为TargetApk.zip文件
print('[+] Rename target app: \"Target.modified.apk\"')
shutil.copyfile('Target/dist/Target.apk', 'Target.modified.apk')
extracted_dir = un_zip('Target.modified.apk')
print('[+] Extracted classes.dex from Target.modifed.apk into TargetApk.zip')
shutil.copyfile(extracted_dir + '/classes.dex', 'classes.dex')
# 写入classes.dex到TargetApk.zip
f = zipfile.ZipFile('TargetApk.zip', 'w', zipfile.ZIP_DEFLATED)
f.write('classes.dex')
f.close()
os.remove('classes.dex') # 删除classes.dex
# Step7: 合并TuokeApk/bin/classes.dex和TargetApk.zip(加固),生成classes.dex
shutil.copyfile('TuokeApk/app/build/intermediates/transforms/dex/release/folders/1000/1f/main/classes.dex',
'tuoke.dex')
subprocess.Popen('java -jar JiaguApk.jar tuoke.dex TargetApk.zip', stdout=subprocess.PIPE).stdout.read()
# Step8: 将合并生成的新classes.dex文件与Target.modified.apk中的classes.dex替换
print('[+] Replace \"%s\" with \"classes.dex\"' % (extracted_dir + '/classes.dex',))
shutil.copyfile('classes.dex', extracted_dir + '/classes.dex')
# Step9: 复制TuokeApk/libs目录下的所有文件和文件夹到目标app中
print('[+] Copying TuokeApk/app/build/intermediates/ndk/release/lib/...')
if not os.path.exists(extracted_dir + '/lib/'):
os.mkdir(extracted_dir + '/lib/')
for item in os.listdir('TuokeApk/app/build/intermediates/ndk/release/lib/'):
if not os.path.exists(extracted_dir + '/lib/' + item):
shutil.copytree('TuokeApk/app/build/intermediates/ndk/release/lib/' + item,
extracted_dir + '/lib/' + item)
else:
shutil.copyfile('TuokeApk/app/build/intermediates/ndk/release/lib/' + item + '/libhackcodejiagu.so',
extracted_dir + '/lib/' + item + '/libhackcodejiagu.so')
else:
for item in os.listdir(extracted_dir + '/lib/'):
shutil.copyfile('TuokeApk/app/build/intermediates/ndk/release/lib/' + item + '/libhackcodejiagu.so',
extracted_dir + '/lib/' + item + '/libhackcodejiagu.so')
# 破坏SO文件的ELF头部(删除 ELF header)
# modify_ehdr_and_delete_shdr(extracted_dir)
# Step10: 将修改后的app重压缩成zip文件
print('[+] Compress %s folder into Target.modified.2.apk' % extracted_dir)
zip_dir(extracted_dir, 'Target.modified.2.apk')
# Step11: 签名
print('[+] Signning...')
output_filename = input_filename[:input_filename.rfind('apk')] + 'signed.apk'
out = subprocess.Popen(
'java -jar sign/signapk.jar sign/testkey.x509.pem sign/testkey.pk8 Target.modified.2.apk ' + output_filename,
stdout=subprocess.PIPE).stdout.read()
clean()
if __name__ == '__main__':
"""
命令行参数: python jiagu.py xxx.apk
"""
start = time.time()
main()
end = time.time()
print("Total time running %s seconds" % (str(end - start)))
|
import { defineMessages } from 'react-intl'
export default defineMessages({
'schema.housing.title': {
id: 'schema.housing.title',
defaultMessage: 'Title'
},
'schema.housing.examples': {
id: 'schema.housing.examples',
defaultMessage: 'Examples'
},
'schema.housing.category': {
id: 'schema.housing.category',
defaultMessage: 'Category'
},
'schema.housing.description': {
id: 'schema.housing.description',
defaultMessage: 'Description'
},
'schema.housing.priceInETH': {
id: 'schema.housing.priceInETH',
defaultMessage: 'Price in ETH'
},
'schema.housing.selectPhotos': {
id: 'schema.housing.selectPhotos',
defaultMessage: 'Select photos'
},
'schema.housing.aptsHousingForRent': {
id: 'schema.housing.aptsHousingForRent',
defaultMessage: 'Apts/Housing for Rent'
},
'schema.housing.officeCommercial': {
id: 'schema.housing.officeCommercial',
defaultMessage: 'Office & Commercial'
},
'schema.housing.other': {
id: 'schema.housing.other',
defaultMessage: 'Other'
},
'schema.housing.parkingStorage': {
id: 'schema.housing.parkingStorage',
defaultMessage: 'Parking & Storage'
},
'schema.housing.realEstate': {
id: 'schema.housing.realEstate',
defaultMessage: 'Real Estate'
},
'schema.housing.roomShares': {
id: 'schema.housing.roomShares',
defaultMessage: 'Room Shares'
},
'schema.housing.subletsTemporary': {
id: 'schema.housing.subletsTemporary',
defaultMessage: 'Sublets & Temporary'
},
'schema.housing.vacationRentals': {
id: 'schema.housing.vacationRentals',
defaultMessage: 'Vacation Rentals'
},
'schema.housing.messageBuyer': {
id: 'schema.housing.messageBuyer',
defaultMessage: 'Message the buyer with any relevant details'
},
'schema.housing.prepareProperty': {
id: 'schema.housing.prepareProperty',
defaultMessage: 'Make sure the property is clean and ready'
},
'schema.housing.rateBuyer': {
id: 'schema.housing.rateBuyer',
defaultMessage: 'Leave a review of the buyer'
},
'schema.housing.sellerSteps': {
id: 'schema.housing.sellerSteps',
defaultMessage: 'Fulfillment Checklist'
}
})
|
angular.module('ui.bootstrap.datetimepicker', ["ui.bootstrap.dateparser", "ui.bootstrap.datepicker", "ui.bootstrap.timepicker"])
.directive('datepickerPopup', function () {
return {
restrict: 'EAC',
require: 'ngModel',
link: function (scope, element, attr, controller) {
//remove the default formatter from the input directive to prevent conflict
controller.$formatters.shift();
}
}
})
.directive('datetimepicker', [
function () {
function versionCheck(){
return (angular.version.major === 1 && (angular.version.minor > 4 || (angular.version.minor === 4 && angular.version.dot >= 4)));
}
if (!versionCheck()) {
return {
restrict: 'EA',
template: "<div class=\"alert alert-danger\">Angular 1.4.4 or above is required for datetimepicker to work correctly</div>"
};
}
return {
restrict: 'EA',
require: 'ngModel',
scope: {
ngModel: '=',
ngChange: '&',
dayFormat: "=",
monthFormat: "=",
yearFormat: "=",
minTime: "=",
maxTime: "=",
dayHeaderFormat: "=",
dayTitleFormat: "=",
monthTitleFormat: "=",
yearRange: "=",
showButtonBar: "=",
dateOptions: "=?",
dateDisabled: "&",
dateNgClick: "&",
hourStep: "=",
dateOpened: "=",
minuteStep: "=",
showMeridian: "=",
meredians: "=",
mousewheel: "=",
readonlyTime: "=",
readonlyDate: "=",
disabledDate: "=",
hiddenTime: "=",
hiddenDate: "="
},
template: function (elem, attrs) {
function dashCase(name) {
return name.replace(/[A-Z]/g, function (letter, pos) {
return (pos ? '-' : '') + letter.toLowerCase();
});
}
function createAttr(innerAttr, dateTimeAttrOpt) {
var dateTimeAttr = angular.isDefined(dateTimeAttrOpt) ? dateTimeAttrOpt : innerAttr;
if (attrs[dateTimeAttr]) {
return dashCase(innerAttr) + "=\"" + dateTimeAttr + "\" ";
} else {
return '';
}
}
function createFuncAttr(innerAttr, funcArgs, dateTimeAttrOpt, defaultImpl) {
var dateTimeAttr = angular.isDefined(dateTimeAttrOpt) ? dateTimeAttrOpt : innerAttr;
if (attrs[dateTimeAttr]) {
return dashCase(innerAttr) + "=\"" + dateTimeAttr + "({" + funcArgs + "})\" ";
} else {
return angular.isDefined(defaultImpl) ? dashCase(innerAttr) + "=\"" + defaultImpl + "\"" : "";
}
}
function createEvalAttr(innerAttr, dateTimeAttrOpt) {
var dateTimeAttr = angular.isDefined(dateTimeAttrOpt) ? dateTimeAttrOpt : innerAttr;
if (attrs[dateTimeAttr]) {
return dashCase(innerAttr) + "=\"" + attrs[dateTimeAttr] + "\" ";
} else {
return dashCase(innerAttr) + " ";
}
}
function createAttrConcat(previousAttrs, attr) {
return previousAttrs + createAttr.apply(null, attr)
}
var dateTmpl = "<div class=\"datetimepicker-wrapper\">" +
"<input class=\"form-control\" type=\"text\" " +
"name=\"datepicker\"" +
"ng-change=\"date_change($event)\" " +
"is-open=\"innerDateOpened\" " +
"datepicker-options=\"dateOptions\" " +
"uib-datepicker-popup=\"{{dateFormat}}\"" +
"ng-model=\"ngModel\" " + [
["dayFormat"],
["monthFormat"],
["yearFormat"],
["dayHeaderFormat"],
["dayTitleFormat"],
["monthTitleFormat"],
["yearRange"],
["showButtonBar"],
["ngHide", "hiddenDate"],
["ngReadonly", "readonlyDate"],
["ngDisabled", "disabledDate"]
].reduce(createAttrConcat, '') +
createFuncAttr("ngClick",
"$event: $event, opened: opened",
"dateNgClick",
"open($event)") +
createEvalAttr("currentText", "currentText") +
createEvalAttr("clearText", "clearText") +
createEvalAttr("datepickerAppendToBody", "datepickerAppendToBody") +
createEvalAttr("closeText", "closeText") +
createEvalAttr("placeholder", "placeholder") +
"/>\n" +
"</div>\n";
var timeTmpl = "<div class=\"datetimepicker-wrapper\" name=\"timepicker\" ng-model=\"time\" ng-change=\"time_change()\" style=\"display:inline-block\">\n" +
"<div uib-timepicker min=\"minDate\" max=\"maxDate\" " + [
["hourStep"],
["minuteStep"],
["showMeridian"],
["meredians"],
["mousewheel"],
["ngHide", "hiddenTime"],
["ngDisabled", "readonlyTime"]
].reduce(createAttrConcat, '') +
createEvalAttr("showSpinners", "showSpinners") +
"></div>\n" +
"</div>";
// form is isolated so the directive is registered as one component in the parent form (not date and time)
var tmpl = "<ng-form name=\"datetimepickerForm\" isolate-form>" + dateTmpl + timeTmpl + "</ng-form>";
return tmpl;
},
controller: ['$scope', '$attrs',
function ($scope, $attrs) {
$scope.date_change = function () {
// If we changed the date only, set the time (h,m) on it.
// This is important in case the previous date was null.
// This solves the issue when the user set a date and time, cleared the date, and chose another date,
// and then, the time was cleared too - which is unexpected
var time = $scope.time;
if ($scope.ngModel && $scope.time) { // if ngModel is null, that's because the user cleared the date field
$scope.ngModel.setHours(time.getHours(), time.getMinutes(), 0, 0);
$scope.ngModel = new Date($scope.ngModel); // By default, ngModel watches the model by reference, not value. This is important to know when binding inputs to models that are objects (e.g. Date) (from: https://docs.angularjs.org/api/ng/directive/ngModel)
}
};
$scope.time_change = function () {
if ($scope.ngModel && $scope.time) {
$scope.ngModel.setHours($scope.time.getHours(), $scope.time.getMinutes(), 0, 0);
$scope.ngModel = new Date($scope.ngModel); // By default, ngModel watches the model by reference, not value. This is important to know when binding inputs to models that are objects (e.g. Date) (from: https://docs.angularjs.org/api/ng/directive/ngModel)
} // else the time is invalid, keep the current Date value in datepicker
};
$scope.open = function ($event) {
$event.preventDefault();
$event.stopPropagation();
$scope.innerDateOpened = true;
};
$attrs.$observe('dateFormat', function(newDateFormat, oldValue) {
$scope.dateFormat = newDateFormat;
});
$scope.dateOptions = angular.isDefined($scope.dateOptions) ? $scope.dateOptions : {};
$scope.dateOptions.dateDisabled = $scope.dateDisabled;
}
],
link: function (scope, element, attrs, ctrl) {
var updateMinTime = function() {
if (!scope.ngModel) {
return;
}
if (scope.minTime) {
scope.minDate = new Date(scope.ngModel.getFullYear(),
scope.ngModel.getMonth(),
scope.ngModel.getDate(),
scope.minTime.getHours(),
scope.minTime.getMinutes(),
0);
if (scope.dateOptions.minDate && scope.dateOptions.minDate > scope.minDate) {
scope.minDate = scope.dateOptions.minDate;
}
} else {
scope.minDate = scope.dateOptions.minDate;
}
};
var updateMaxTime = function() {
if (!scope.ngModel) {
return;
}
if (scope.maxTime) {
scope.maxDate = new Date(scope.ngModel.getFullYear(),
scope.ngModel.getMonth(),
scope.ngModel.getDate(),
scope.maxTime.getHours(),
scope.maxTime.getMinutes(),
0);
if (scope.dateOptions.maxDate && scope.dateOptions.maxDate < scope.maxDate) {
scope.maxDate = scope.dateOptions.maxDate;
}
} else {
scope.maxDate = scope.dateOptions.maxDate;
}
};
var firstTimeAssign = true;
scope.$watch(function () {
return scope.ngModel;
}, function (newTime) {
if (scope.ngModel && !(scope.ngModel instanceof Date)) {
// convert from ISO format to Date
scope.ngModel = new Date(scope.ngModel);
}
var timeElement = element[0].querySelector('[name=timepicker]');
// if a time element is focused, updating its model will cause hours/minutes to be formatted by padding with leading zeros
if (timeElement && !timeElement.contains(document.activeElement)) {
if (newTime === null || newTime === '') { // if the newTime is not defined
if (firstTimeAssign) { // if it's the first time we assign the time value
// create a new default time where the hours, minutes, seconds and milliseconds are set to 0.
newTime = new Date();
newTime.setHours(0, 0, 0, 0);
} else { // clear the time
scope.time = null;
if (scope.ngChange) scope.$eval(scope.ngChange);
return;
}
}
// Update timepicker (watch on ng-model in timepicker does not use object equality),
// also if the ngModel was not a Date, convert it to date
newTime = new Date(newTime);
if (isNaN(newTime.getTime()) === false) {
scope.time = newTime; // change the time in timepicker
if (firstTimeAssign) {
firstTimeAssign = false;
}
}
}
updateMinTime();
updateMaxTime();
if (scope.ngChange) {
scope.$eval(scope.ngChange);
}
}, true);
scope.$watch(function () {
return scope.datetimepickerForm && scope.datetimepickerForm.$error;
}, function (errors) {
if (angular.isUndefined(errors)) {
return;
}
Object.keys(ctrl.$error).forEach(function (error) {
ctrl.$setValidity(error, true);
});
Object.keys(errors).forEach(function (error) {
ctrl.$setValidity(error, false);
});
}, true);
scope.$watch(function () {
return scope.datetimepickerForm && (scope.datetimepickerForm.timepicker.$touched || scope.datetimepickerForm.datepicker.$touched);
}, function (touched) {
if (touched) {
ctrl.$setTouched();
}
});
scope.$watch(function () {
return scope.datetimepickerForm && scope.datetimepickerForm.$dirty;
}, function (dirty) {
if (dirty) {
ctrl.$setDirty();
}
});
scope.$watch('dateOpened', function (value) {
scope.innerDateOpened = value;
});
scope.$watch('innerDateOpened', function (value) {
if (angular.isDefined(scope.dateOpened)) {
scope.dateOpened = value;
}
});
scope.$watch('dateOptions.minDate', function (value) {
updateMinTime();
});
scope.$watch('timeMin', function (value) {
updateMinTime();
});
scope.$watch('dateOptions.maxDate', function (value) {
updateMaxTime();
});
scope.$watch('timeMax', function (value) {
updateMaxTime();
});
}
}
}
]).directive('isolateForm', [function () {
return {
restrict: 'A',
require: '?form',
link: function (scope, element, attrs, formController) {
if (!formController) {
return;
}
// Remove this form from parent controller
formController.$$parentForm.$removeControl(formController)
if (!formController.$$parentForm) {
return;
}
var _handler = formController.$setValidity;
formController.$setValidity = function (validationErrorKey, isValid, cntrl) {
_handler.call(formController, validationErrorKey, isValid, cntrl);
formController.$$parentForm.$setValidity(validationErrorKey, true, this);
}
}
};
}]);
|
var shareImageButton = document.querySelector('#share-image-button');
var createPostArea = document.querySelector('#create-post');
var closeCreatePostModalButton = document.querySelector('#close-create-post-modal-btn');
var sharedMomentsArea = document.querySelector('#shared-moments');
var form = document.querySelector('form');
var titleInput = document.querySelector('#title');
var locationInput = document.querySelector('#location');
function openCreatePostModal() {
createPostArea.style.display = 'block';
setTimeout(function () {
createPostArea.style.transform = 'translateY(0)';
}, 1);
if (deferredPrompt) {
deferredPrompt.prompt();
deferredPrompt.userChoice.then(function(choiceResult) {
console.log(choiceResult.outcome);
if (choiceResult.outcome === 'dismissed') {
console.log('User cancelled installation');
} else {
console.log('User added to home screen');
}
});
deferredPrompt = null;
}
}
function closeCreatePostModal() {
// createPostArea.style.display = 'none';
createPostArea.style.transform = 'translateY(100vh)';
}
shareImageButton.addEventListener('click', openCreatePostModal);
closeCreatePostModalButton.addEventListener('click', closeCreatePostModal);
// function onSaveButtonClicked(event) {
// console.log('clicked');
// if ('caches' in window) {
// caches.open('user-requested')
// .then(function (cache) {
// cache.add('https://httpbin.org/get');
// cache.add('/src/images/sf-boat.jpg');
// })
// }
// }
function clearCards() {
while (sharedMomentsArea.hasChildNodes()) {
sharedMomentsArea.removeChild(sharedMomentsArea.lastChild);
}
}
function createCard(data) {
var cardWrapper = document.createElement('div');
cardWrapper.className = 'shared-moment-card mdl-card mdl-shadow--2dp';
var cardTitle = document.createElement('div');
cardTitle.className = 'mdl-card__title';
cardTitle.style.backgroundImage = 'url('+data.image+')';
cardTitle.style.backgroundSize = 'cover';
cardTitle.style.height = '180px';
cardWrapper.appendChild(cardTitle);
var cardTitleTextElement = document.createElement('h2');
cardTitleTextElement.style.color = 'white';
cardTitleTextElement.className = 'mdl-card__title-text';
cardTitleTextElement.textContent = data.title;
cardTitle.appendChild(cardTitleTextElement);
var cardSupportingText = document.createElement('div');
cardSupportingText.className = 'mdl-card__supporting-text';
cardSupportingText.textContent = data.location;
cardSupportingText.style.textAlign = 'center';
// var cardSaveButton = document.createElement('button');
// cardSaveButton.textContent = 'Save';
// cardSaveButton.addEventListener('click', onSaveButtonClicked);
// cardSupportingText.appendChild(cardSaveButton);
cardWrapper.appendChild(cardSupportingText);
componentHandler.upgradeElement(cardWrapper);
sharedMomentsArea.appendChild(cardWrapper);
}
function updateUI(data) {
clearCards();
for (i = 0; i < data.length; i++) {
createCard(data[i]);
}
}
var url = 'https://pwagram-9f12c-default-rtdb.firebaseio.com/posts.json';
var networkDataReceived = false;
fetch(url)
.then(function(res) {
return res.json();
})
.then(function (data) {
networkDataReceived = true;
var dataArray = [];
for (key in data) {
dataArray.push(data[key]);
}
console.log('From Fetch', dataArray);
updateUI(dataArray);
});
if ('indexedDB' in window) {
readAllData('posts')
.then(function (data) {
if (!networkDataReceived) {
console.log('From cache', data);
updateUI(data);
}
});
}
function sendData() {
fetch('https://pwagram-9f12c-default-rtdb.firebaseio.com/posts.json', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
},
body: JSON.stringify({
id: new Date().toISOString(),
title: titleInput.value,
location: localStorage.value,
image: "https://firebasestorage.googleapis.com/v0/b/pwagram-9f12c.appspot.com/o/sf-boat.jpg?alt=media&token=3eb1e8fe-5e83-4267-bca8-23cbbba7ffda"
})
})
.then(function (res) {
console.log('Sent data', res);
updateUI();
})
}
form.addEventListener('submit', function (event) {
event.preventDefault();
if (titleInput.value.trim() === '' || locationInput.value.trim() === '') {
alert('Please enter valid data!');
return;
}
closeCreatePostModal();
if ('serviceWorker' in navigator && 'SyncManager' in window) {
navigator.serviceWorker.ready
.then(function (sw) {
var post = {
id: new Date().toISOString(),
title: titleInput.value,
location: locationInput.value
};
writeData('sync-posts', post)
.then(function () {
sw.sync.register('sync-new-post');
})
.then(function () {
var snackbarContainer = document.querySelector('#confirmation_toast');
var data = { message: 'Your Post was saved for syncing!' };
snackbarContainer.MaterialSnackback.showSnackbar(data);
})
.catch(function (err) {
console.log(err);
})
})
} else {
sendData();
}
}) |
salir="s"
while salir!="Salir":
try:
seguir_ordenando="s"
cantidad_total_producto=[0,0,0]
cantidad_producto=[0,0,0]
producto=["Papas","Hamburguesas","Tacos"]
total=0
print("Bienvenido al Restaurante Rapi-Comidas\nEl menú de hoy es: \n\t1. Papas \t\tValor: 600 colones\n\t2. Hamburguesas \tValor: 1200 colones\n\t3. Tacos \t\tValor: 800")
while seguir_ordenando=="s":
opcion=int(input("\nPor favor digite el número de platillo: "))
if opcion==1:
cantidad_producto[0]=int(input("Cuantas papas desea: "))
cantidad_total_producto[0]+=cantidad_producto[0]
total+=(cantidad_producto[0]*600)
elif opcion==2:
cantidad_producto[1]=int(input("Cuantas hamburguesas desea: "))
cantidad_total_producto[1]+=cantidad_producto[1]
total+=(cantidad_producto[1]*1200)
elif opcion==3:
cantidad_producto[2]=int(input("Cuantos tacos desea: "))
cantidad_total_producto[2]+=cantidad_producto[2]
total+=(cantidad_producto[2]*800)
elif opcion < 1 or opcion > 3:
print("Esa opcion no está dentro del menú")
break
seguir_ordenando=input("\nDesea ordenar otro platillo (s/n): ")
print("\n")
for i in range(0,3,1):
if cantidad_producto[i] != 0:
print("\tCantidad de ",producto[i],"es de: ",cantidad_total_producto[i])
print("\tTotal a pagar: ",total)
salir=input("\nDigite 'Salir' para finalizar o presione 'enter' para una nueva orden: ")
except ValueError:
print("\nSolo se aceptan valores enteros")
salir=input("\nDigite 'Salir' para finalizar o presione 'enter' para una nueva orden: ")
|
from typing import Tuple
from hypothesis import given
from ground.base import (Context,
Kind)
from tests.hints import (PointsPair,
PointsTriplet)
from . import strategies
@given(strategies.contexts_with_points_triplets)
def test_basic(context_with_points_triplet: Tuple[Context, PointsTriplet]
) -> None:
context, points_triplet = context_with_points_triplet
vertex, first_ray_point, second_ray_point = points_triplet
result = context.angle_kind(vertex, first_ray_point, second_ray_point)
assert isinstance(result, Kind)
@given(strategies.contexts_with_points_pairs)
def test_same_endpoints(context_with_points_pair: Tuple[Context, PointsPair]
) -> None:
context, points_pair = context_with_points_pair
start, end = points_pair
assert context.angle_kind(end, start, start) is (Kind.RIGHT
if start == end
else Kind.ACUTE)
assert context.angle_kind(start, end, start) is Kind.RIGHT
@given(strategies.contexts_with_points_triplets)
def test_endpoints_permutations(context_with_points_triplet
: Tuple[Context, PointsTriplet]) -> None:
context, points_triplet = context_with_points_triplet
vertex, first_ray_point, second_ray_point = points_triplet
result = context.angle_kind(vertex, first_ray_point, second_ray_point)
assert result is context.angle_kind(vertex, second_ray_point,
first_ray_point)
|
// @link https://schemas.extratv.com/json-schema/extra/notify/request/get-notification-history-request/1-0-0.json#
import Fb from '@gdbots/pbj/FieldBuilder.js';
import Format from '@gdbots/pbj/enums/Format.js';
import GdbotsPbjxRequestV1Mixin from '@gdbots/schemas/gdbots/pbjx/mixin/request/RequestV1Mixin.js';
import Message from '@gdbots/pbj/Message.js';
import Schema from '@gdbots/pbj/Schema.js';
import StreamId from '@gdbots/schemas/gdbots/pbjx/StreamId.js';
import T from '@gdbots/pbj/types/index.js';
export default class GetNotificationHistoryRequestV1 extends Message {
/**
* @private
*
* @returns {Schema}
*/
static defineSchema() {
return new Schema(this.SCHEMA_ID, this,
[
Fb.create('request_id', T.UuidType.create())
.required()
.build(),
Fb.create('occurred_at', T.MicrotimeType.create())
.build(),
/*
* Multi-tenant apps can use this field to track the tenant id.
*/
Fb.create('ctx_tenant_id', T.StringType.create())
.pattern('^[\\w\\/\\.:-]+$')
.build(),
/*
* The "ctx_retries" field is used to keep track of how many attempts were
* made to handle this request. In some cases, the service or transport
* that handles the request may be down or over capacity and is being retried.
*/
Fb.create('ctx_retries', T.TinyIntType.create())
.build(),
Fb.create('ctx_causator_ref', T.MessageRefType.create())
.build(),
Fb.create('ctx_correlator_ref', T.MessageRefType.create())
.build(),
Fb.create('ctx_user_ref', T.MessageRefType.create())
.build(),
/*
* The "ctx_app" refers to the application used to make the request. This is
* different from ctx_ua (user_agent) because the agent used (Safari, Firefox)
* is not necessarily the app used (cms, iOS app, website)
*/
Fb.create('ctx_app', T.MessageType.create())
.anyOfCuries([
'gdbots:contexts::app',
])
.build(),
/*
* The "ctx_cloud" is set by the server making the request and is generally
* only used internally for tracking and performance monitoring.
*/
Fb.create('ctx_cloud', T.MessageType.create())
.anyOfCuries([
'gdbots:contexts::cloud',
])
.build(),
Fb.create('ctx_ip', T.StringType.create())
.format(Format.IPV4)
.overridable(true)
.build(),
Fb.create('ctx_ipv6', T.StringType.create())
.format(Format.IPV6)
.overridable(true)
.build(),
Fb.create('ctx_ua', T.TextType.create())
.overridable(true)
.build(),
/*
* Field names to dereference, this serves as a hint to the server and is not
* necessarily gauranteed since authorization, availability, etc. are determined
* by the server not the client.
*/
Fb.create('derefs', T.StringType.create())
.asASet()
.pattern('^[\\w\\.-]+$')
.build(),
Fb.create('stream_id', T.IdentifierType.create())
.required()
.classProto(StreamId)
.build(),
/*
* Return events since this time (exclusive greater than if forward=true, less than if forward=false)
*/
Fb.create('since', T.MicrotimeType.create())
.useTypeDefault(false)
.build(),
/*
* The number of events to return.
*/
Fb.create('count', T.TinyIntType.create())
.withDefault(25)
.build(),
/*
* When true, the events are read from oldest to newest, otherwise newest to oldest.
*/
Fb.create('forward', T.BooleanType.create())
.build(),
],
this.MIXINS,
);
}
}
const M = GetNotificationHistoryRequestV1;
M.prototype.SCHEMA_ID = M.SCHEMA_ID = 'pbj:extra:notify:request:get-notification-history-request:1-0-0';
M.prototype.SCHEMA_CURIE = M.SCHEMA_CURIE = 'extra:notify:request:get-notification-history-request';
M.prototype.SCHEMA_CURIE_MAJOR = M.SCHEMA_CURIE_MAJOR = 'extra:notify:request:get-notification-history-request:v1';
M.prototype.MIXINS = M.MIXINS = [
'gdbots:pbjx:mixin:request:v1',
'gdbots:pbjx:mixin:request',
'gdbots:pbjx:mixin:get-events-request:v1',
'gdbots:pbjx:mixin:get-events-request',
];
GdbotsPbjxRequestV1Mixin(M);
Object.freeze(M);
Object.freeze(M.prototype);
|
import { HarmonicDiffusionGrid } from "./HarmonicDiffusionGrid.js";
import { randomIntegerBetween, seededRandom } from "../../../core/math/MathUtils.js";
test("constructor doesn't throw", () => {
new HarmonicDiffusionGrid([], 0, 0);
});
test("step 1x1 unassigned", () => {
const grid = new HarmonicDiffusionGrid([7], 1, 1);
grid.step();
expect(grid.data.length).toEqual(1);
});
test("step 1x1 assigned", () => {
const grid = new HarmonicDiffusionGrid([7], 1, 1);
grid.assign(0, 0, 13);
grid.step();
expect(grid.data).toEqual([13]);
});
test("step 3x3 edges assigned", () => {
const grid = new HarmonicDiffusionGrid(new Array(9).fill(0), 3, 3);
grid.assign(0, 0, 1);
grid.assign(1, 0, 2);
grid.assign(2, 0, 3);
grid.assign(0, 1, 4);
grid.assign(2, 1, 6);
grid.assign(0, 2, 7);
grid.assign(1, 2, 8);
grid.assign(2, 2, 9);
grid.step();
expect(grid.data).toEqual([
1, 2, 3,
4, 5, 6,
7, 8, 9
]);
});
test("20 step 3x1, corner assigned", () => {
const grid = new HarmonicDiffusionGrid(new Array(3).fill(0), 3, 1);
grid.assign(0, 0, 5);
for (let i = 0; i < 20; i++) {
grid.step();
}
expect(grid.data[0]).toBe(5);
expect(grid.data[1]).toBeCloseTo(5);
expect(grid.data[2]).toBeCloseTo(5);
});
test("20 step 4x1, corners assigned", () => {
const grid = new HarmonicDiffusionGrid(new Array(4).fill(0), 4, 1);
grid.assign(0, 0, 5);
grid.assign(3, 0, -7);
for (let i = 0; i < 20; i++) {
grid.step();
}
expect(grid.data[0]).toBe(5);
expect(grid.data[1]).toBeCloseTo(1);
expect(grid.data[2]).toBeCloseTo(-3);
expect(grid.data[3]).toBe(-7);
});
test("performance 512x512", () => {
const w = 139;
const h = 139;
const grid = new HarmonicDiffusionGrid(new Float32Array(w * h).fill(0), w, h);
const rng = seededRandom(42);
let i;
for (i = 0; i < 100; i++) {
const x = randomIntegerBetween(rng, 0, w - 1);
const y = randomIntegerBetween(rng, 0, h - 1);
grid.assign(x, y, rng());
}
console.time('t');
for (i = 0; i < 500; i++) {
grid.step();
}
console.timeEnd('t');
}); |
/* Version: 16.0.9106.1000 */
Type.registerNamespace("Strings");Strings.OfficeOM=function(){};Strings.OfficeOM.registerClass("Strings.OfficeOM");Strings.OfficeOM.L_NonUniformPartialSetNotSupported="No se pueden utilizar parámetros de coordenadas con el tipo de coerción Tabla cuando la tabla contiene celdas combinadas.";Strings.OfficeOM.L_EventRegistrationError="Error de registro de eventos";Strings.OfficeOM.L_SettingsCannotSave="No se pudo guardar la configuración.";Strings.OfficeOM.L_NewWindowCrossZoneErrorString="No pudimos crear el cuadro de diálogo a causa de las restricciones del explorador. El dominio del cuadro de diálogo y el dominio del host del complemento no se encuentran en la misma zona de seguridad.";Strings.OfficeOM.L_PropertyDoesNotExist='Propiedad "{0}" no existe en el objeto.';Strings.OfficeOM.L_NewWindowCrossZoneConfigureBrowserLink="configure el explorador";Strings.OfficeOM.L_DataNotMatchCoercionType="El tipo de objeto de datos especificado no es compatible con la selección actual.";Strings.OfficeOM.L_NotSupportedEventType="No se admite el tipo de evento especificado {0}.";Strings.OfficeOM.L_RequestTokenUnavailable="Esta API se limitó para reducir la frecuencia de llamada.";Strings.OfficeOM.L_CellDataAmountBeyondLimits="Nota: Se recomienda que el número de celdas de una tabla sea inferior a 20.000.";Strings.OfficeOM.L_MemoryLimit="Límite de memoria superado";Strings.OfficeOM.L_ColIndexOutOfRange="El valor de índice de la columna está fuera del intervalo permitido. Utilice un valor (0 o superior) que sea menor que el número de columnas.";Strings.OfficeOM.L_SSOConnectionLostErrorMessage="Se perdió una conexión durante el proceso de inicio de sesión y es posible que el usuario no la pueda iniciar. Esto se debió probablemente a los valores de la configuración de explorador del usuario, como las zonas de seguridad.";Strings.OfficeOM.L_DataNotMatchBindingSize="El objeto de datos proporcionado no coincide con el tamaño de la selección actual.";Strings.OfficeOM.L_RowIndexOutOfRange="El valor de índice de la fila está fuera del intervalo permitido. Utilice un valor (0 o superior) que sea menor que el número de filas.";Strings.OfficeOM.L_UnsupportedUserIdentity="No se admite el tipo de identidad del usuario.";Strings.OfficeOM.L_GetDataParametersConflict="Hay un conflicto en los parámetros especificados.";Strings.OfficeOM.L_OperationNotSupportedOnMatrixData="El contenido seleccionado tiene que estar en formato de tabla. Dé formato de tabla a los datos y vuelva a intentarlo.";Strings.OfficeOM.L_InitializeNotReady="Office.js no se ha cargado todavía por completo. Inténtelo de nuevo más tarde o asegúrese de agregar el código de inicialización en la función Office.initialize.";Strings.OfficeOM.L_SettingNameNotExist="El nombre de configuración especificado no existe.";Strings.OfficeOM.L_APINotSupported="API no compatible";Strings.OfficeOM.L_InvalidApiCallInContext="La llamada a API no es válida en el contexto actual.";Strings.OfficeOM.L_NotSupported="No se admite la función {0}.";Strings.OfficeOM.L_NavOutOfBound="No se pudo realizar la operación porque el índice está fuera del intervalo.";Strings.OfficeOM.L_InvalidDataFormat="El formato del objeto de datos especificado no es válido.";Strings.OfficeOM.L_NotImplemented="La función {0} no está implementada.";Strings.OfficeOM.L_SSOServerError="Se produjo un error en el proveedor de autenticación.";Strings.OfficeOM.L_DocumentReadOnly="La operación solicitada no se permite en el modo de documento actual.";Strings.OfficeOM.L_BindingCreationError="No se pudo crear el enlace";Strings.OfficeOM.L_DialogInvalidScheme="No se admite el esquema de dirección URL. Use HTTPS en su lugar.";Strings.OfficeOM.L_APICallFailed="Error de llamada a la API";Strings.OfficeOM.L_InvalidGrant="Falta la autorización previa.";Strings.OfficeOM.L_InvalidArgument='El argumento "{0}" no funciona en esta situación, falta o no tiene el formato correcto.';Strings.OfficeOM.L_AppNameNotExist="El nombre del complemento para {0} no existe.";Strings.OfficeOM.L_BindingNotExist="El enlace especificado no existe.";Strings.OfficeOM.L_InvalidOperationInCellEditMode="Excel está en modo de edición de celdas. Salga del modo de edición pulsando Entrar o Tab o seleccionando otra celda y después vuelva a intentarlo.";Strings.OfficeOM.L_InvalidBinding="Enlace no válido";Strings.OfficeOM.L_DataWriteError="Error de escritura de datos";Strings.OfficeOM.L_CellFormatAmountBeyondLimits="Nota: Se recomienda que la serie de formatos establecida mediante una llamada API de formato sea inferior a 100.";Strings.OfficeOM.L_CannotApplyPropertyThroughSetMethod='Los cambios en la propiedad "{0}" no se puede aplicar a través de un método "object.set".';Strings.OfficeOM.L_EventHandlerAdditionFailed="No se pudo agregar el controlador de eventos.";Strings.OfficeOM.L_CustomFunctionDefinitionMissing="Debe existir una propiedad con este nombre que represente la definición de la función en Excel.CustomFunctions.";Strings.OfficeOM.L_DataNotMatchSelection="El objeto de datos proporcionado no es compatible con la forma o las dimensiones de la selección actual.";Strings.OfficeOM.L_DataReadError="Error de lectura de datos";Strings.OfficeOM.L_FunctionCallFailed="No se pudo llamar a la función {0}. Código de error: {1}.";Strings.OfficeOM.L_InvalidTableOptionValue="Uno o más de los parámetros de tableOptions tienen valores que no están permitidos. Compruebe los valores y vuelva a intentarlo.";Strings.OfficeOM.L_ConnectionFailureWithStatus="Error en la solicitud; código de estado: {0}.";Strings.OfficeOM.L_UserNotSignedIn="Ningún usuario ha iniciado sesión en Office.";Strings.OfficeOM.L_InvalidGrantMessage="Falta conceder permisos para este complemento.";Strings.OfficeOM.L_ApiNotFoundDetails="El método o la propiedad {0} forman parte del conjunto de requisitos {1}, que no está disponible en su versión de {2}.";Strings.OfficeOM.L_RunMustReturnPromise='La función por lotes que se pasa al método ".run" no ha devuelto una promesa. La función debe devolver una promesa para que puedan liberarse los objetos con seguimiento automático al completarse la operación por lotes. Normalmente, devuelve una promesa al devolver la respuesta de "context.sync()".';Strings.OfficeOM.L_CustomFunctionNameContainsBadChars="El nombre de función solo puede contener letras, números, guiones bajos y puntos.";Strings.OfficeOM.L_ShowWindowDialogNotificationIgnore="Ignorar";Strings.OfficeOM.L_ShuttingDown="Hubo un problema con la operación porque los datos no son actuales en el servidor.";Strings.OfficeOM.L_HostError="Error de host";Strings.OfficeOM.L_AddinIsAlreadyRequestingToken="El complemento ya está solicitando un token de acceso.";Strings.OfficeOM.L_FormattingReminder="Aviso de formato";Strings.OfficeOM.L_InvalidBindingError="Error de enlace no válido";Strings.OfficeOM.L_CoercionTypeNotMatchBinding="El tipo de conversión especificado no es compatible con este tipo de enlace.";Strings.OfficeOM.L_TooManyArguments="hay demasiados argumentos";Strings.OfficeOM.L_UnsupportedEnumeration="Enumeración no compatible";Strings.OfficeOM.L_UserAbortedMessage="El usuario no ha aceptado los permisos del complemento.";Strings.OfficeOM.L_OperationCancelledError="Operación cancelada";Strings.OfficeOM.L_SSOServerErrorMessage="Se produjo un error inesperado en el servidor.";Strings.OfficeOM.L_InvalidNode="Nodo no válido";Strings.OfficeOM.L_InvalidNamedItemForBindingType="El tipo de enlace especificado no es compatible con el elemento con nombre suministrado.";Strings.OfficeOM.L_InvalidFormatValue="Uno o más de los parámetros de formato tienen valores que no están permitidos. Compruebe los valores y vuelva a intentarlo.";Strings.OfficeOM.L_InvalidGetRows="Las filas especificadas no son válidas.";Strings.OfficeOM.L_EventHandlerNotExist="No se encontró el controlador de eventos especificado para este enlace.";Strings.OfficeOM.L_InValidOptionalArgument="argumento opcional no válido";Strings.OfficeOM.L_NewWindowCrossZone='No pudimos crear un cuadro de diálogo a causa de la configuración de seguridad de su explorador. Pruebe otro explorador o {0} para que "{1}" y el dominio que se muestra en su barra de direcciones se encuentren en la misma zona de seguridad.';Strings.OfficeOM.L_OperationNotSupported="No se admite esta operación.";Strings.OfficeOM.L_PropertyNotLoaded='La propiedad "{0}" no está disponible. Antes de leer el valor de la propiedad, llame al método de carga en el objeto contenedor y llame a "context.sync()" en el contexto de solicitud asociado.';Strings.OfficeOM.L_SetDataParametersConflict="Hay un conflicto en los parámetros especificados.";Strings.OfficeOM.L_IndexOutOfRange="Índice fuera del intervalo.";Strings.OfficeOM.L_OutOfRange="Fuera del intervalo";Strings.OfficeOM.L_OsfControlTypeNotSupported="No se admite el tipo OsfControl.";Strings.OfficeOM.L_AppNotExistInitializeNotCalled="La aplicación {0} no existe. No se llamó a Microsoft.Office.WebExtension.initialize(reason).";Strings.OfficeOM.L_GetDataIsTooLarge="El conjunto de datos solicitado es demasiado grande.";Strings.OfficeOM.L_SetDataIsTooLarge="El objeto de datos especificado es demasiado grande.";Strings.OfficeOM.L_SettingsAreStale="No se pudo guardar la configuración porque no está actualizada.";Strings.OfficeOM.L_SSOClientErrorMessage="Se produjo un error inesperado en el cliente.";Strings.OfficeOM.L_NetworkProblemRetrieveFile="Un problema de red ha impedido la recuperación del archivo.";Strings.OfficeOM.L_UserAborted="El usuario anuló la solicitud aceptada.";Strings.OfficeOM.L_DataNotMatchBindingType="El objeto de datos especificado no es compatible con el tipo de enlace.";Strings.OfficeOM.L_InvalidRequestContext="No puede usar el objeto en distintos contextos de solicitudes.";Strings.OfficeOM.L_ShowWindowDialogNotification="{0} desea mostrar una nueva ventana.";Strings.OfficeOM.L_MissingParameter="Parámetro ausente";Strings.OfficeOM.L_FormatValueOutOfRange="El valor está fuera del intervalo permitido.";Strings.OfficeOM.L_OperationNotSupportedOnThisBindingType="La operación no es compatible con este tipo de enlace.";Strings.OfficeOM.L_CustomXmlOutOfDateMessage="Los datos no están actualizados. Recupere el objeto de nuevo.";Strings.OfficeOM.L_CustomFunctionImplementationMissing='La propiedad con este nombre en Excel.CustomFunctions que representa la definición de la función debe contener una propiedad "llamar" que implemente la función.';Strings.OfficeOM.L_CoercionTypeNotSupported="No se admite el tipo de conversión especificado.";Strings.OfficeOM.L_CallbackNotAFunction="La devolución de llamada debe ser de tipo función. Era de tipo {0}.";Strings.OfficeOM.L_InternalError="Error interno";Strings.OfficeOM.L_BadSelectorString="El formato de la cadena pasado al selector es incorrecto o no se admite.";Strings.OfficeOM.L_InvalidOrTimedOutSession="Sesión caducada o no válida";Strings.OfficeOM.L_InvalidReadForBlankRow="La fila especificada está en blanco.";Strings.OfficeOM.L_TooManyOptionalObjects="existen varios objetos opcionales en la lista de parámetros";Strings.OfficeOM.L_UnsupportedDataObject="No se admite el tipo de objeto de datos proporcionado.";Strings.OfficeOM.L_InvalidGetRowColumnCounts="Los valores de rowCount o columnCount especificados no son válidos.";Strings.OfficeOM.L_InvalidValue="Valor no válido";Strings.OfficeOM.L_Timeout="Se ha superado el tiempo de espera de la operación.";Strings.OfficeOM.L_MultipleNamedItemFound="Se han encontrado varios objetos con el mismo nombre.";Strings.OfficeOM.L_TooManyIncompleteRequests="Espere a que finalice la llamada anterior.";Strings.OfficeOM.L_CannotRegisterEvent="No se puede registrar el controlador de eventos.";Strings.OfficeOM.L_UnsupportedEnumerationMessage="La enumeración no se admite la aplicación host actual.";Strings.OfficeOM.L_BrowserAPINotSupported="Este explorador no es compatible con la API solicitada.";Strings.OfficeOM.L_BindingToMultipleSelection="No se admiten las selecciones discontinuas.";Strings.OfficeOM.L_ValueNotLoaded='El valor del objeto de resultado aún no se ha cargado. Antes de leer la propiedad del valor, llame a "context.sync()" en el contexto de solicitud asociado.';Strings.OfficeOM.L_NamedItemNotFound="El elemento con nombre no existe.";Strings.OfficeOM.L_InvalidSelectionForBindingType="No se puede crear un enlace con la selección actual y el tipo de enlace especificado.";Strings.OfficeOM.L_SSOUserConsentNotSupportedByCurrentAddinCategory="Este complemento no admite el consentimiento del usuario.";Strings.OfficeOM.L_SettingsStaleError="Error de configuración obsoleto";Strings.OfficeOM.L_SpecifiedIdNotExist="El identificador especificado no existe.";Strings.OfficeOM.L_RequestTimeout="La llamada tardó demasiado tiempo en ejecutarse.";Strings.OfficeOM.L_OperationCancelledErrorMessage="El usuario canceló la operación.";Strings.OfficeOM.L_EventHandlerRemovalFailed="No se pudo quitar el controlador de eventos.";Strings.OfficeOM.L_ElementMissing="No pudimos dar formato a la celda de la tabla porque faltan algunos valores de parámetro. Compruebe los parámetros y vuelva a intentarlo.";Strings.OfficeOM.L_SSOConnectionLostError="Se perdió una conexión durante el proceso de inicio de sesión.";Strings.OfficeOM.L_DialogRequireHTTPS="No se admite el protocolo HTTP. Utilizar HTTPS en su lugar";Strings.OfficeOM.L_InvalidGetStartRowColumn="Los valores de startRow o startColumn especificados no son válidos.";Strings.OfficeOM.L_InvalidCellsValue="Uno o más de los parámetros de las celdas tienen valores que no están permitidos. Compruebe los valores y vuelva a intentarlo.";Strings.OfficeOM.L_DisplayDialogError="Error de presentación del diálogo";Strings.OfficeOM.L_InvalidSSOAddinMessage="No se admite la API de identidad para este complemento.";Strings.OfficeOM.L_FileTypeNotSupported="El tipo de archivo especificado no es compatible.";Strings.OfficeOM.L_DialogOK="Aceptar";Strings.OfficeOM.L_InvalidResourceUrl="La URL de recursos de aplicación que se ha proporcionado no es válida.";Strings.OfficeOM.L_UserClickIgnore="El usuario decidió ignorar el cuadro de diálogo.";Strings.OfficeOM.L_NoCapability="No dispone de permisos suficientes para esta acción.";Strings.OfficeOM.L_InvalidObjectPath='La ruta del objeto "{0}" no funciona para lo que está intentando hacer. Si está usando el objeto en varias llamadas de "context.sync" y fuera de la ejecución secuencial de un lote ".run", use los métodos "context.trackedObjects.add()" y "context.trackedObjects.remove()" para administrar la duración de objeto.';Strings.OfficeOM.L_InvalidFormat="Error de formato no válido";Strings.OfficeOM.L_ActivityLimitReached="Se ha alcanzado el límite de actividad.";Strings.OfficeOM.L_InvalidColumnsForBinding="Las columnas especificadas no son válidas.";Strings.OfficeOM.L_SliceSizeNotSupported="No se admite el tamaño de segmento especificado.";Strings.OfficeOM.L_CustomFunctionNameCannotSplit="El nombre de la función debe contener un espacio de nombres no vacío y un nombre corto no vacío.";Strings.OfficeOM.L_DialogNavigateError="Error de navegación por diálogo";Strings.OfficeOM.L_InvalidGetColumns="Las columnas especificadas no son válidas.";Strings.OfficeOM.L_InvalidCoercion="Tipo de conversión no válido";Strings.OfficeOM.L_InvalidBindingOperation="Operación de enlace no válido";Strings.OfficeOM.L_CustomXmlNodeNotFound="No se encontró el nodo especificado.";Strings.OfficeOM.L_AddinIsAlreadyRequestingTokenMessage="La operación falló porque este complemento ya está solicitando un token de acceso.";Strings.OfficeOM.L_InvalidParameters="La función {0} contiene parámetros no válidos.";Strings.OfficeOM.L_SelectionNotSupportCoercionType="La selección actual no es compatible con el tipo de conversión especificado.";Strings.OfficeOM.L_AddBindingFromPromptDefaultText="Haga una selección.";Strings.OfficeOM.L_InvalidDataObject="Objeto de datos no válido";Strings.OfficeOM.L_UnsupportedUserIdentityMessage="No se admite el tipo de identidad del usuario.";Strings.OfficeOM.L_DialogAlreadyOpened="No se pudo realizar la operación porque este complemento ya tiene un diálogo activo.";Strings.OfficeOM.L_DataStale="Datos obsoletos";Strings.OfficeOM.L_CustomXmlOutOfDateName="Los datos no son actuales";Strings.OfficeOM.L_CannotWriteToSelection="No se puede escribir en la selección actual.";Strings.OfficeOM.L_InvalidAPICall="Llamada de API no válida";Strings.OfficeOM.L_NonUniformPartialGetNotSupported="No se pueden utilizar parámetros de coordenadas con el tipo de coerción Tabla cuando la tabla contiene celdas combinadas.";Strings.OfficeOM.L_CloseFileBeforeRetrieve="Llamar a closeAsync en el archivo actual antes de recuperar de otro.";Strings.OfficeOM.L_InvalidResourceUrlMessage="La URL de recursos que se ha especificado en el manifiesto no es válida.";Strings.OfficeOM.L_InvalidSetRows="Las filas especificadas no son válidas.";Strings.OfficeOM.L_SaveSettingsError="Error de configuración de guardado";Strings.OfficeOM.L_CannotNavigateTo="El objeto se encuentra en una ubicación donde no se admite la navegación.";Strings.OfficeOM.L_PermissionDenied="Permiso denegado";Strings.OfficeOM.L_UnknownBindingType="El tipo de enlace no es compatible.";Strings.OfficeOM.L_SSOUserConsentNotSupportedByCurrentAddinCategoryMessage="Se ha producido un error en la operación porque el complemento no admite el consentimiento del usuario en esta categoría";Strings.OfficeOM.L_CustomXmlExceedQuotaName="Se alcanzó el límite de selección";Strings.OfficeOM.L_NotSupportedBindingType="No se admite el tipo de enlace especificado {0}.";Strings.OfficeOM.L_DialogAddressNotTrusted="El dominio de la URL no está incluido en el elemento AppDomains en el manifiesto.";Strings.OfficeOM.L_ReadSettingsError="Error de configuración de lectura";Strings.OfficeOM.L_InternalErrorDescription="Error interno.";Strings.OfficeOM.L_ShowWindowDialogNotificationAllow="Permitir";Strings.OfficeOM.L_CustomXmlError="Error de XML personalizado.";Strings.OfficeOM.L_SSOClientError="Se produjo un error en la solicitud de autenticación de Office.";Strings.OfficeOM.L_InvalidOrTimedOutSessionMessage="La sesión de Office Online ha caducado o no es válida. Para continuar, actualice la página.";Strings.OfficeOM.L_ConnectionFailureWithDetails="Error en la solicitud; código de estado {0}; código de error {1}; mensaje de error: {2}";Strings.OfficeOM.L_MissingRequiredArguments="Faltan algunos argumentos necesarios";Strings.OfficeOM.L_RedundantCallbackSpecification="La devolución de llamada no se puede especificar en la lista de argumentos y en el objeto opcional a la vez.";Strings.OfficeOM.L_CustomXmlExceedQuotaMessage="XPath limita la selección a 1024 elementos.";Strings.OfficeOM.L_TooManyOptionalFunction="existen varias funciones opcionales en la lista de parámetros";Strings.OfficeOM.L_SelectionCannotBound="No se puede enlazar a la selección actual.";Strings.OfficeOM.L_OverwriteWorksheetData="La operación establecida no se pudo realizar porque el objeto de datos proporcionado sobrescribirá o cambiará los datos.";Strings.OfficeOM.L_GetSelectionNotSupported="No se admite la selección actual.";Strings.OfficeOM.L_NetworkProblem="Problema en la red";Strings.OfficeOM.L_InvalidArgumentGeneric="Los argumentos pasados a la función no funcionan en esta situación, faltan o no están en el formato correcto.";Strings.OfficeOM.L_AttemptingToSetReadOnlyProperty='Intentar establecer la propiedad de sólo lectura "{0}".';Strings.OfficeOM.L_DataWriteReminder="Aviso de escritura de datos";Strings.OfficeOM.L_InvalidSetColumns="Las columnas especificadas no son válidas.";Strings.OfficeOM.L_InvalidSetStartRowColumn="Los valores de startRow o startColumn especificados no son válidos.";Strings.OfficeOM.L_InvalidApiArgumentsMessage="Los argumentos de entrada no son válidos." |
import unittest
import linked_list
class Test(unittest.TestCase):
def test_run_once(self):
self.assertEqual(1 + 1, 2)
if __name__ == "__main__" : unittest.main()
|
##########################################################################
#
# Copyright (c) 2013-2014, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import errno
import signal
import shlex
import subprocess32 as subprocess
import threading
import time
import traceback
import IECore
import Gaffer
import GafferDispatch
class LocalDispatcher( GafferDispatch.Dispatcher ) :
def __init__( self, name = "LocalDispatcher", jobPool = None ) :
GafferDispatch.Dispatcher.__init__( self, name )
self["executeInBackground"] = Gaffer.BoolPlug( defaultValue = False )
self["ignoreScriptLoadErrors"] = Gaffer.BoolPlug( defaultValue = False )
self["environmentCommand"] = Gaffer.StringPlug()
self.__jobPool = jobPool if jobPool else LocalDispatcher.defaultJobPool()
class Job( object ) :
Status = IECore.Enum.create( "Waiting", "Running", "Complete", "Failed", "Killed" )
def __init__( self, batch, dispatcher, name, jobId, directory ) :
assert( isinstance( batch, GafferDispatch.Dispatcher._TaskBatch ) )
assert( isinstance( dispatcher, GafferDispatch.Dispatcher ) )
self.__batch = batch
## \todo Stop storing this. It's just a temptation to access potentially
# invalid data during background dispatches - all dispatcher settings _must_
# be copied to the job upon construction, because nothing stops a user changing
# the dispatcher settings during a background dispatch. Currently __dispatcher
# is used to access the JobPool in __reportCompleted etc - instead the job should
# use signals to report changes in status, and the JobPool should connect to those
# signals. Jobs should be blissfully ignorant of JobPools.
self.__dispatcher = dispatcher
script = batch.preTasks()[0].plug().ancestor( Gaffer.ScriptNode )
self.__context = Gaffer.Context( script.context() )
self.__name = name
self.__id = jobId
self.__directory = directory
self.__stats = {}
self.__ignoreScriptLoadErrors = dispatcher["ignoreScriptLoadErrors"].getValue()
## \todo Make `Dispatcher::dispatch()` use a Process, so we don't need to
# do substitutions manually like this.
self.__environmentCommand = Gaffer.Context.current().substitute(
dispatcher["environmentCommand"].getValue()
)
self.__messageHandler = IECore.CapturingMessageHandler()
self.__messageTitle = "%s : Job %s %s" % ( self.__dispatcher.getName(), self.__name, self.__id )
scriptFileName = script["fileName"].getValue()
self.__scriptFile = os.path.join( self.__directory, os.path.basename( scriptFileName ) if scriptFileName else "untitled.gfr" )
script.serialiseToFile( self.__scriptFile )
self.__initBatchWalk( batch )
def name( self ) :
return self.__name
def id( self ) :
return self.__id
def directory( self ) :
return self.__directory
def description( self ) :
batch = self.__currentBatch()
if batch is None or batch.plug() is None :
return "N/A"
frames = str( IECore.frameListFromList( [ int(x) for x in batch.frames() ] ) )
return "Executing " + batch.blindData()["nodeName"].value + " on frames " + frames
def statistics( self ) :
batch = self.__currentBatch()
if batch is None or "pid" not in batch.blindData().keys() :
return {}
rss = 0
pcpu = 0.0
pid = batch.blindData().get( "pid" )
try :
stats = subprocess.Popen( ( "ps -Ao pid,ppid,pgid,sess,pcpu,rss" ).split( " " ), stdout=subprocess.PIPE, stderr=subprocess.PIPE ).communicate()[0].split()
for i in range( 0, len(stats), 6 ) :
if str(pid) in stats[i:i+4] :
pcpu += float(stats[i+4])
rss += float(stats[i+5])
except :
return {}
return {
"pid" : pid,
"pcpu" : pcpu,
"rss" : rss,
}
def messageHandler( self ) :
return self.__messageHandler
def execute( self, background = False ) :
if background :
threading.Thread( target = self.__backgroundDispatch ).start()
else :
with self.__messageHandler :
self.__foregroundDispatch( self.__batch )
self.__reportCompleted( self.__batch )
def failed( self ) :
return self.__getStatus( self.__batch ) == LocalDispatcher.Job.Status.Failed
def kill( self ) :
if not self.failed() :
self.__killBatchWalk( self.__batch )
def killed( self ) :
return "killed" in self.__batch.blindData().keys()
def _fail( self ) :
self.__setStatus( self.__batch, LocalDispatcher.Job.Status.Failed )
def __killBatchWalk( self, batch ) :
if "killed" in batch.blindData() :
# Already visited via another path
return
# this doesn't set the status to Killed because that could
# run into a race condition with a background dispatch.
batch.blindData()["killed"] = IECore.BoolData( True )
for upstreamBatch in batch.preTasks() :
self.__killBatchWalk( upstreamBatch )
## \todo Having separate functions for foreground and background
# dispatch functions is error prone. Have only one.
def __foregroundDispatch( self, batch ) :
if self.__getStatus( batch ) == LocalDispatcher.Job.Status.Complete :
return True
for upstreamBatch in batch.preTasks() :
if not self.__foregroundDispatch( upstreamBatch ) :
return False
if batch.blindData().get( "killed" ) :
self.__reportKilled( batch )
return False
if not batch.plug() :
self.__setStatus( batch, LocalDispatcher.Job.Status.Complete )
return True
description = "executing %s on %s" % ( batch.blindData()["nodeName"].value, str(batch.frames()) )
IECore.msg( IECore.MessageHandler.Level.Info, self.__messageTitle, description )
try :
self.__setStatus( batch, LocalDispatcher.Job.Status.Running )
batch.execute()
except :
traceback.print_exc()
self.__reportFailed( batch )
return False
self.__setStatus( batch, LocalDispatcher.Job.Status.Complete )
return True
def __backgroundDispatch( self ) :
with self.__messageHandler :
self.__doBackgroundDispatch( self.__batch )
def __doBackgroundDispatch( self, batch ) :
if self.__getStatus( batch ) == LocalDispatcher.Job.Status.Complete :
return True
for upstreamBatch in batch.preTasks() :
if not self.__doBackgroundDispatch( upstreamBatch ) :
return False
if batch.blindData().get( "killed" ) :
self.__reportKilled( batch )
return False
if not batch.plug() :
self.__reportCompleted( batch )
return True
if len( batch.frames() ) == 0 :
# This case occurs for nodes like TaskList and TaskContextProcessors,
# because they don't do anything in execute (they have empty hashes).
# Their batches exist only to depend on upstream batches. We don't need
# to do any work here, but we still signal completion for the task to
# provide progress feedback to the user.
self.__setStatus( batch, LocalDispatcher.Job.Status.Complete )
IECore.msg( IECore.MessageHandler.Level.Info, self.__messageTitle, "Finished " + batch.blindData()["nodeName"].value )
return True
taskContext = batch.context()
frames = str( IECore.frameListFromList( [ int(x) for x in batch.frames() ] ) )
args = [
"gaffer", "execute",
"-script", self.__scriptFile,
"-nodes", batch.blindData()["nodeName"].value,
"-frames", frames,
]
args = shlex.split( self.__environmentCommand ) + args
if self.__ignoreScriptLoadErrors :
args.append( "-ignoreScriptLoadErrors" )
contextArgs = []
for entry in [ k for k in taskContext.keys() if k != "frame" and not k.startswith( "ui:" ) ] :
if entry not in self.__context.keys() or taskContext[entry] != self.__context[entry] :
contextArgs.extend( [ "-" + entry, repr(taskContext[entry]) ] )
if contextArgs :
args.extend( [ "-context" ] + contextArgs )
self.__setStatus( batch, LocalDispatcher.Job.Status.Running )
IECore.msg( IECore.MessageHandler.Level.Info, self.__messageTitle, " ".join( args ) )
process = subprocess.Popen( args, start_new_session=True )
batch.blindData()["pid"] = IECore.IntData( process.pid )
while process.poll() is None :
if batch.blindData().get( "killed" ) :
os.killpg( process.pid, signal.SIGTERM )
self.__reportKilled( batch )
return False
time.sleep( 0.01 )
if process.returncode :
self.__reportFailed( batch )
return False
self.__setStatus( batch, LocalDispatcher.Job.Status.Complete )
return True
def __getStatus( self, batch ) :
return LocalDispatcher.Job.Status( batch.blindData().get( "status", IECore.IntData( int(LocalDispatcher.Job.Status.Waiting) ) ).value )
def __setStatus( self, batch, status ) :
batch.blindData()["status"] = IECore.IntData( int(status) )
def __reportCompleted( self, batch ) :
self.__setStatus( batch, LocalDispatcher.Job.Status.Complete )
self.__dispatcher.jobPool()._remove( self )
IECore.msg( IECore.MessageHandler.Level.Info, self.__messageTitle, "Dispatched all tasks for " + self.name() )
def __reportFailed( self, batch ) :
self.__setStatus( batch, LocalDispatcher.Job.Status.Failed )
self.__dispatcher.jobPool()._fail( self )
frames = str( IECore.frameListFromList( [ int(x) for x in batch.frames() ] ) )
IECore.msg( IECore.MessageHandler.Level.Error, self.__messageTitle, "Failed to execute " + batch.blindData()["nodeName"].value + " on frames " + frames )
def __reportKilled( self, batch ) :
self.__setStatus( batch, LocalDispatcher.Job.Status.Killed )
self.__dispatcher.jobPool()._remove( self )
IECore.msg( IECore.MessageHandler.Level.Info, self.__messageTitle, "Killed " + self.name() )
def __currentBatch( self ) :
## \todo Consider just storing the current batch, rather
# than searching each time it is requested.
return self.__currentBatchWalk( self.__batch, set() )
def __currentBatchWalk( self, batch, visited ) :
if batch in visited :
return None
visited.add( batch )
if self.__getStatus( batch ) == LocalDispatcher.Job.Status.Running :
return batch
for upstreamBatch in batch.preTasks() :
currentBatch = self.__currentBatchWalk( upstreamBatch, visited )
if currentBatch is not None :
return currentBatch
return None
def __initBatchWalk( self, batch ) :
if "nodeName" in batch.blindData() :
# Already visited via another path
return
nodeName = ""
if batch.plug() is not None :
nodeName = batch.plug().node().relativeName( batch.plug().node().scriptNode() )
batch.blindData()["nodeName"] = nodeName
self.__setStatus( batch, LocalDispatcher.Job.Status.Waiting )
for upstreamBatch in batch.preTasks() :
self.__initBatchWalk( upstreamBatch )
class JobPool( IECore.RunTimeTyped ) :
def __init__( self ) :
self.__jobs = []
self.__failedJobs = []
self.__jobAddedSignal = Gaffer.Signal1()
self.__jobRemovedSignal = Gaffer.Signal1()
self.__jobFailedSignal = Gaffer.Signal1()
def jobs( self ) :
return list(self.__jobs)
def failedJobs( self ) :
return list(self.__failedJobs)
def waitForAll( self ) :
while len(self.__jobs) :
time.sleep( 0.2 )
def jobAddedSignal( self ) :
return self.__jobAddedSignal
def jobRemovedSignal( self ) :
return self.__jobRemovedSignal
def jobFailedSignal( self ) :
return self.__jobFailedSignal
def _append( self, job ) :
assert( isinstance( job, LocalDispatcher.Job ) )
self.__jobs.append( job )
self.jobAddedSignal()( job )
def _remove( self, job, force = False ) :
if job in self.__jobs :
self.__jobs.remove( job )
self.jobRemovedSignal()( job )
if force and job in self.__failedJobs :
self.__failedJobs.remove( job )
def _fail( self, job ) :
if job in self.__jobs and job not in self.__failedJobs :
job._fail()
self.__failedJobs.append( job )
self.jobFailedSignal()( job )
self._remove( job )
__jobPool = JobPool()
@staticmethod
def defaultJobPool() :
return LocalDispatcher.__jobPool
def jobPool( self ) :
return self.__jobPool
def _doDispatch( self, batch ) :
job = LocalDispatcher.Job(
batch = batch,
dispatcher = self,
name = Gaffer.Context.current().substitute( self["jobName"].getValue() ),
jobId = os.path.basename( self.jobDirectory() ),
directory = self.jobDirectory(),
)
self.__jobPool._append( job )
job.execute( background = self["executeInBackground"].getValue() )
IECore.registerRunTimeTyped( LocalDispatcher, typeName = "GafferDispatch::LocalDispatcher" )
IECore.registerRunTimeTyped( LocalDispatcher.JobPool, typeName = "GafferDispatch::LocalDispatcher::JobPool" )
GafferDispatch.Dispatcher.registerDispatcher( "Local", LocalDispatcher )
|
(function() {
"use strict";
var glob = require("glob");
var fs = require("fs");
var v8 = require("./v8Ext");
var basePath = "libs/v8/" + v8.version + "/include";
console.log("Copying V8 Includes for Flathead project...");
var copyFile = function(source, destination) {
console.log(source + " > " + destination);
try {
fs.lstatSync(source);
fs.createReadStream(source).pipe(fs.createWriteStream(destination));
} catch (e) {
}
};
glob("deps/v8/include/**/*.h", function(err, files) {
files.forEach(function(file) {
copyFile(file, file.replace("deps/v8/include", basePath));
});
});
})(); |
/* eslint-disable quote-props,func-style,no-var,prefer-rest-params */
export const func1 = function (p1, ...params) {
return `${p1} ${p1?.length} ${params.length} ${Array.from(arguments).length}`
}
export var var1 = 'var1'
export default {
func1,
'var_1_1': var1,
var_1_2 : var1
}
|
module.exports = function (api, opts) {
const { nodeEnv } = opts;
delete opts['nodeEnv'];
return {
presets: [
[
require('./lib').default,
require('@birman/utils').deepmerge(
{
env: {
useBuiltIns: 'entry',
corejs: 3,
modules: false
},
transformRuntime: {}
},
opts
)
]
]
};
};
|
if (process.argv[2] === "seeder") {
// Seeder
const { importData, destroyData } = require('./api/dist/seeder')
if (process.argv[3] === "-d") {
destroyData()
} else {
importData()
}
} else {
// Load API
const app = require('./api/dist/app')
} |
import _ from 'lodash';
import React, { Component } from 'react';
import { connect } from 'react-redux';
import { Link } from 'react-router-dom';
import { fetchPosts } from '../actions';
class PostsIndex extends Component {
componentDidMount(){
this.props.fetchPosts();
}
renderPosts() {
return _.map(this.props.posts, post => {
return (
<li className="list-group-item" key={post.id}>
<Link to={`/posts/${post.id}`}>
{post.title}
</Link>
</li>
)
});
}
render() {
return (
<div>
<div className="text-xs-right">
<Link className="btn btn-primary" to="/posts/new">
Add a Post
</Link>
</div>
<h3>Posts</h3>
<ul className="list-group">
{this.renderPosts()}
</ul>
</div>
);
}
}
function mapStateToProps(state) {
return { posts: state.posts };
}
export default connect(mapStateToProps, { fetchPosts })(PostsIndex);
//ex6 {fetchPosts: fetchPosts}
|
#!/usr/bin/env python
# coding: utf-8
# In[301]:
import numpy as np
import sys
import matplotlib.pyplot as plt
from matplotlib.colors import rgb_to_hsv, hsv_to_rgb
from skimage.draw import line_aa
from PIL import Image
# In[302]:
image = Image.open(sys.argv[1])
image = np.array(image, dtype=np.float32) / 255
# In[303]:
hsv = rgb_to_hsv(image)
value = hsv[:, :, 2]
# In[304]:
# Отсуп от краев, чтобы центр лучей был не слишком близко к краям
padding_coef = 0.25
padded_value = value[int(value.shape[0] * padding_coef) : int(value.shape[0] * (1 - padding_coef)), int(value.shape[1] * padding_coef) : int(value.shape[1] * (1 - padding_coef))]
brightest_padded_pixel = np.unravel_index(np.argmax(padded_value, axis=None), padded_value.shape)
brightest_pixel = (
brightest_padded_pixel[0] + int(value.shape[0] * 0.25),
brightest_padded_pixel[1] + int(value.shape[1] * 0.25),
)
p = image[brightest_pixel]
# In[305]:
# Длина луча
ray_length_coef = 0.2
# Число лучей
nrays = 10
ray_length = int(value.shape[0] * ray_length_coef)
for phi in np.arange(0, 2 * np.pi, 2 * np.pi / nrays):
end_rr = int(brightest_pixel[0] + ray_length * np.cos(phi))
end_cc = int(brightest_pixel[1] + ray_length * np.sin(phi))
rr, cc, _ = line_aa(brightest_pixel[0], brightest_pixel[1], min(end_rr, value.shape[0] - 1), min(end_cc, value.shape[1] - 1))
image[rr, cc] = p
# In[307]:
plt.imshow(image)
# In[308]:
image = np.rint(image * 255).astype(np.uint8)
image = Image.fromarray(image)
image.save("out.bmp")
# In[ ]:
|
// NOTE: This file was generated by the ServiceGenerator.
// ----------------------------------------------------------------------------
// API:
// Cloud OS Config API (osconfig/v1)
// Description:
// OS management tools that can be used for patch management, patch
// compliance, and configuration management on VM instances.
// Documentation:
// https://cloud.google.com/
#if SWIFT_PACKAGE || GTLR_USE_MODULAR_IMPORT
@import GoogleAPIClientForRESTCore;
#elif GTLR_BUILT_AS_FRAMEWORK
#import "GTLR/GTLRObject.h"
#else
#import "GTLRObject.h"
#endif
#if GTLR_RUNTIME_VERSION != 3000
#error This file was generated by a different version of ServiceGenerator which is incompatible with this GTLR library source.
#endif
@class GTLRSystemsManagement_AptSettings;
@class GTLRSystemsManagement_ExecStep;
@class GTLRSystemsManagement_ExecStepConfig;
@class GTLRSystemsManagement_FixedOrPercent;
@class GTLRSystemsManagement_GcsObject;
@class GTLRSystemsManagement_GooSettings;
@class GTLRSystemsManagement_MonthlySchedule;
@class GTLRSystemsManagement_OneTimeSchedule;
@class GTLRSystemsManagement_PatchConfig;
@class GTLRSystemsManagement_PatchDeployment;
@class GTLRSystemsManagement_PatchInstanceFilter;
@class GTLRSystemsManagement_PatchInstanceFilterGroupLabel;
@class GTLRSystemsManagement_PatchInstanceFilterGroupLabel_Labels;
@class GTLRSystemsManagement_PatchJob;
@class GTLRSystemsManagement_PatchJobInstanceDetails;
@class GTLRSystemsManagement_PatchJobInstanceDetailsSummary;
@class GTLRSystemsManagement_PatchRollout;
@class GTLRSystemsManagement_RecurringSchedule;
@class GTLRSystemsManagement_TimeOfDay;
@class GTLRSystemsManagement_TimeZone;
@class GTLRSystemsManagement_WeekDayOfMonth;
@class GTLRSystemsManagement_WeeklySchedule;
@class GTLRSystemsManagement_WindowsUpdateSettings;
@class GTLRSystemsManagement_YumSettings;
@class GTLRSystemsManagement_ZypperSettings;
// Generated comments include content from the discovery document; avoid them
// causing warnings since clang's checks are some what arbitrary.
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdocumentation"
NS_ASSUME_NONNULL_BEGIN
// ----------------------------------------------------------------------------
// Constants - For some of the classes' properties below.
// ----------------------------------------------------------------------------
// GTLRSystemsManagement_AptSettings.type
/**
* Runs `apt-get dist-upgrade`.
*
* Value: "DIST"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_AptSettings_Type_Dist;
/**
* By default, upgrade will be performed.
*
* Value: "TYPE_UNSPECIFIED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_AptSettings_Type_TypeUnspecified;
/**
* Runs `apt-get upgrade`.
*
* Value: "UPGRADE"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_AptSettings_Type_Upgrade;
// ----------------------------------------------------------------------------
// GTLRSystemsManagement_ExecStepConfig.interpreter
/**
* Invalid for a Windows ExecStepConfig. For a Linux ExecStepConfig, the
* interpreter will be parsed from the shebang line of the script if
* unspecified.
*
* Value: "INTERPRETER_UNSPECIFIED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_ExecStepConfig_Interpreter_InterpreterUnspecified;
/**
* Indicates that the file is run with PowerShell flags `-NonInteractive`,
* `-NoProfile`, and `-ExecutionPolicy Bypass`.
*
* Value: "POWERSHELL"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_ExecStepConfig_Interpreter_Powershell;
/**
* Indicates that the script is run with `/bin/sh` on Linux and `cmd` on
* Windows.
*
* Value: "SHELL"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_ExecStepConfig_Interpreter_Shell;
// ----------------------------------------------------------------------------
// GTLRSystemsManagement_PatchConfig.rebootConfig
/**
* Always reboot the machine after the update completes.
*
* Value: "ALWAYS"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchConfig_RebootConfig_Always;
/**
* The agent decides if a reboot is necessary by checking signals such as
* registry keys on Windows or `/var/run/reboot-required` on APT based systems.
* On RPM based systems, a set of core system package install times are
* compared with system boot time.
*
* Value: "DEFAULT"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchConfig_RebootConfig_Default;
/**
* Never reboot the machine after the update completes.
*
* Value: "NEVER"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchConfig_RebootConfig_Never;
/**
* The default behavior is DEFAULT.
*
* Value: "REBOOT_CONFIG_UNSPECIFIED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchConfig_RebootConfig_RebootConfigUnspecified;
// ----------------------------------------------------------------------------
// GTLRSystemsManagement_PatchJob.state
/**
* The patch job was canceled.
*
* Value: "CANCELED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJob_State_Canceled;
/**
* Patch job completed but there were errors.
*
* Value: "COMPLETED_WITH_ERRORS"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJob_State_CompletedWithErrors;
/**
* The patch job is looking up instances to run the patch on.
*
* Value: "INSTANCE_LOOKUP"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJob_State_InstanceLookup;
/**
* Instances are being patched.
*
* Value: "PATCHING"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJob_State_Patching;
/**
* The patch job was successfully initiated.
*
* Value: "STARTED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJob_State_Started;
/**
* State must be specified.
*
* Value: "STATE_UNSPECIFIED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJob_State_StateUnspecified;
/**
* Patch job completed successfully.
*
* Value: "SUCCEEDED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJob_State_Succeeded;
/**
* The patch job timed out.
*
* Value: "TIMED_OUT"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJob_State_TimedOut;
// ----------------------------------------------------------------------------
// GTLRSystemsManagement_PatchJobInstanceDetails.state
/**
* The instance acked the notification and will start shortly.
*
* Value: "ACKED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJobInstanceDetails_State_Acked;
/**
* The instance is applying patches.
*
* Value: "APPLYING_PATCHES"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJobInstanceDetails_State_ApplyingPatches;
/**
* The instance is downloading patches.
*
* Value: "DOWNLOADING_PATCHES"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJobInstanceDetails_State_DownloadingPatches;
/**
* The instance has failed to apply the patch.
*
* Value: "FAILED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJobInstanceDetails_State_Failed;
/**
* Instance is inactive and cannot be patched.
*
* Value: "INACTIVE"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJobInstanceDetails_State_Inactive;
/**
* The service could not detect the presence of the agent. Check to ensure that
* the agent is installed, running, and able to communicate with the service.
*
* Value: "NO_AGENT_DETECTED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJobInstanceDetails_State_NoAgentDetected;
/**
* The instance is notified that it should be patched.
*
* Value: "NOTIFIED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJobInstanceDetails_State_Notified;
/**
* Unspecified.
*
* Value: "PATCH_STATE_UNSPECIFIED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJobInstanceDetails_State_PatchStateUnspecified;
/**
* The instance is not yet notified.
*
* Value: "PENDING"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJobInstanceDetails_State_Pending;
/**
* The instance is rebooting.
*
* Value: "REBOOTING"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJobInstanceDetails_State_Rebooting;
/**
* The instance is running the post-patch step.
*
* Value: "RUNNING_POST_PATCH_STEP"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJobInstanceDetails_State_RunningPostPatchStep;
/**
* The instance is running the pre-patch step.
*
* Value: "RUNNING_PRE_PATCH_STEP"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJobInstanceDetails_State_RunningPrePatchStep;
/**
* The instance has started the patching process.
*
* Value: "STARTED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJobInstanceDetails_State_Started;
/**
* The instance has completed applying patches.
*
* Value: "SUCCEEDED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJobInstanceDetails_State_Succeeded;
/**
* The instance has completed applying patches but a reboot is required.
*
* Value: "SUCCEEDED_REBOOT_REQUIRED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJobInstanceDetails_State_SucceededRebootRequired;
/**
* The instance exceeded the time out while applying the patch.
*
* Value: "TIMED_OUT"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchJobInstanceDetails_State_TimedOut;
// ----------------------------------------------------------------------------
// GTLRSystemsManagement_PatchRollout.mode
/**
* Patches are applied to VMs in all zones at the same time.
*
* Value: "CONCURRENT_ZONES"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchRollout_Mode_ConcurrentZones;
/**
* Mode must be specified.
*
* Value: "MODE_UNSPECIFIED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchRollout_Mode_ModeUnspecified;
/**
* Patches are applied one zone at a time. The patch job begins in the region
* with the lowest number of targeted VMs. Within the region, patching begins
* in the zone with the lowest number of targeted VMs. If multiple regions (or
* zones within a region) have the same number of targeted VMs, a tie-breaker
* is achieved by sorting the regions or zones in alphabetical order.
*
* Value: "ZONE_BY_ZONE"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_PatchRollout_Mode_ZoneByZone;
// ----------------------------------------------------------------------------
// GTLRSystemsManagement_RecurringSchedule.frequency
/**
* Invalid. A frequency must be specified.
*
* Value: "FREQUENCY_UNSPECIFIED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_RecurringSchedule_Frequency_FrequencyUnspecified;
/**
* Indicates that the frequency should be expressed in terms of months.
*
* Value: "MONTHLY"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_RecurringSchedule_Frequency_Monthly;
/**
* Indicates that the frequency should be expressed in terms of weeks.
*
* Value: "WEEKLY"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_RecurringSchedule_Frequency_Weekly;
// ----------------------------------------------------------------------------
// GTLRSystemsManagement_WeekDayOfMonth.dayOfWeek
/**
* The day of the week is unspecified.
*
* Value: "DAY_OF_WEEK_UNSPECIFIED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WeekDayOfMonth_DayOfWeek_DayOfWeekUnspecified;
/**
* Friday
*
* Value: "FRIDAY"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WeekDayOfMonth_DayOfWeek_Friday;
/**
* Monday
*
* Value: "MONDAY"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WeekDayOfMonth_DayOfWeek_Monday;
/**
* Saturday
*
* Value: "SATURDAY"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WeekDayOfMonth_DayOfWeek_Saturday;
/**
* Sunday
*
* Value: "SUNDAY"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WeekDayOfMonth_DayOfWeek_Sunday;
/**
* Thursday
*
* Value: "THURSDAY"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WeekDayOfMonth_DayOfWeek_Thursday;
/**
* Tuesday
*
* Value: "TUESDAY"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WeekDayOfMonth_DayOfWeek_Tuesday;
/**
* Wednesday
*
* Value: "WEDNESDAY"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WeekDayOfMonth_DayOfWeek_Wednesday;
// ----------------------------------------------------------------------------
// GTLRSystemsManagement_WeeklySchedule.dayOfWeek
/**
* The day of the week is unspecified.
*
* Value: "DAY_OF_WEEK_UNSPECIFIED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WeeklySchedule_DayOfWeek_DayOfWeekUnspecified;
/**
* Friday
*
* Value: "FRIDAY"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WeeklySchedule_DayOfWeek_Friday;
/**
* Monday
*
* Value: "MONDAY"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WeeklySchedule_DayOfWeek_Monday;
/**
* Saturday
*
* Value: "SATURDAY"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WeeklySchedule_DayOfWeek_Saturday;
/**
* Sunday
*
* Value: "SUNDAY"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WeeklySchedule_DayOfWeek_Sunday;
/**
* Thursday
*
* Value: "THURSDAY"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WeeklySchedule_DayOfWeek_Thursday;
/**
* Tuesday
*
* Value: "TUESDAY"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WeeklySchedule_DayOfWeek_Tuesday;
/**
* Wednesday
*
* Value: "WEDNESDAY"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WeeklySchedule_DayOfWeek_Wednesday;
// ----------------------------------------------------------------------------
// GTLRSystemsManagement_WindowsUpdateSettings.classifications
/**
* Invalid. If classifications are included, they must be specified.
*
* Value: "CLASSIFICATION_UNSPECIFIED"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WindowsUpdateSettings_Classifications_ClassificationUnspecified;
/**
* "A widely released fix for a specific problem that addresses a critical,
* non-security-related bug." [1]
*
* Value: "CRITICAL"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WindowsUpdateSettings_Classifications_Critical;
/**
* "A widely released and frequent software update that contains additions to a
* product's definition database. Definition databases are often used to detect
* objects that have specific attributes, such as malicious code, phishing
* websites, or junk mail." [1]
*
* Value: "DEFINITION"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WindowsUpdateSettings_Classifications_Definition;
/**
* "Software that controls the input and output of a device." [1]
*
* Value: "DRIVER"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WindowsUpdateSettings_Classifications_Driver;
/**
* "New product functionality that is first distributed outside the context of
* a product release and that is typically included in the next full product
* release." [1]
*
* Value: "FEATURE_PACK"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WindowsUpdateSettings_Classifications_FeaturePack;
/**
* "A widely released fix for a product-specific, security-related
* vulnerability. Security vulnerabilities are rated by their severity. The
* severity rating is indicated in the Microsoft security bulletin as critical,
* important, moderate, or low." [1]
*
* Value: "SECURITY"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WindowsUpdateSettings_Classifications_Security;
/**
* "A tested, cumulative set of all hotfixes, security updates, critical
* updates, and updates. Additionally, service packs may contain additional
* fixes for problems that are found internally since the release of the
* product. Service packs my also contain a limited number of
* customer-requested design changes or features." [1]
*
* Value: "SERVICE_PACK"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WindowsUpdateSettings_Classifications_ServicePack;
/**
* "A utility or feature that helps complete a task or set of tasks." [1]
*
* Value: "TOOL"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WindowsUpdateSettings_Classifications_Tool;
/**
* "A widely released fix for a specific problem. An update addresses a
* noncritical, non-security-related bug." [1]
*
* Value: "UPDATE"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WindowsUpdateSettings_Classifications_Update;
/**
* "A tested, cumulative set of hotfixes, security updates, critical updates,
* and updates that are packaged together for easy deployment. A rollup
* generally targets a specific area, such as security, or a component of a
* product, such as Internet Information Services (IIS)." [1]
*
* Value: "UPDATE_ROLLUP"
*/
FOUNDATION_EXTERN NSString * const kGTLRSystemsManagement_WindowsUpdateSettings_Classifications_UpdateRollup;
/**
* Apt patching is completed by executing `apt-get update && apt-get upgrade`.
* Additional options can be set to control how this is executed.
*/
@interface GTLRSystemsManagement_AptSettings : GTLRObject
/**
* List of packages to exclude from update. These packages will be excluded
*/
@property(nonatomic, strong, nullable) NSArray<NSString *> *excludes;
/**
* An exclusive list of packages to be updated. These are the only packages
* that will be updated. If these packages are not installed, they will be
* ignored. This field cannot be specified with any other patch configuration
* fields.
*/
@property(nonatomic, strong, nullable) NSArray<NSString *> *exclusivePackages;
/**
* By changing the type to DIST, the patching is performed using `apt-get
* dist-upgrade` instead.
*
* Likely values:
* @arg @c kGTLRSystemsManagement_AptSettings_Type_Dist Runs `apt-get
* dist-upgrade`. (Value: "DIST")
* @arg @c kGTLRSystemsManagement_AptSettings_Type_TypeUnspecified By
* default, upgrade will be performed. (Value: "TYPE_UNSPECIFIED")
* @arg @c kGTLRSystemsManagement_AptSettings_Type_Upgrade Runs `apt-get
* upgrade`. (Value: "UPGRADE")
*/
@property(nonatomic, copy, nullable) NSString *type;
@end
/**
* Message for canceling a patch job.
*/
@interface GTLRSystemsManagement_CancelPatchJobRequest : GTLRObject
@end
/**
* A generic empty message that you can re-use to avoid defining duplicated
* empty messages in your APIs. A typical example is to use it as the request
* or the response type of an API method. For instance: service Foo { rpc
* Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The JSON
* representation for `Empty` is empty JSON object `{}`.
*/
@interface GTLRSystemsManagement_Empty : GTLRObject
@end
/**
* A step that runs an executable for a PatchJob.
*/
@interface GTLRSystemsManagement_ExecStep : GTLRObject
/** The ExecStepConfig for all Linux VMs targeted by the PatchJob. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_ExecStepConfig *linuxExecStepConfig;
/** The ExecStepConfig for all Windows VMs targeted by the PatchJob. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_ExecStepConfig *windowsExecStepConfig;
@end
/**
* Common configurations for an ExecStep.
*/
@interface GTLRSystemsManagement_ExecStepConfig : GTLRObject
/**
* Defaults to [0]. A list of possible return values that the execution can
* return to indicate a success.
*
* Uses NSNumber of intValue.
*/
@property(nonatomic, strong, nullable) NSArray<NSNumber *> *allowedSuccessCodes;
/** A Cloud Storage object containing the executable. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_GcsObject *gcsObject;
/**
* The script interpreter to use to run the script. If no interpreter is
* specified the script will be executed directly, which will likely only
* succeed for scripts with [shebang lines]
* (https://en.wikipedia.org/wiki/Shebang_\\(Unix\\)).
*
* Likely values:
* @arg @c kGTLRSystemsManagement_ExecStepConfig_Interpreter_InterpreterUnspecified
* Invalid for a Windows ExecStepConfig. For a Linux ExecStepConfig, the
* interpreter will be parsed from the shebang line of the script if
* unspecified. (Value: "INTERPRETER_UNSPECIFIED")
* @arg @c kGTLRSystemsManagement_ExecStepConfig_Interpreter_Powershell
* Indicates that the file is run with PowerShell flags
* `-NonInteractive`, `-NoProfile`, and `-ExecutionPolicy Bypass`.
* (Value: "POWERSHELL")
* @arg @c kGTLRSystemsManagement_ExecStepConfig_Interpreter_Shell Indicates
* that the script is run with `/bin/sh` on Linux and `cmd` on Windows.
* (Value: "SHELL")
*/
@property(nonatomic, copy, nullable) NSString *interpreter;
/** An absolute path to the executable on the VM. */
@property(nonatomic, copy, nullable) NSString *localPath;
@end
/**
* A request message to initiate patching across Compute Engine instances.
*/
@interface GTLRSystemsManagement_ExecutePatchJobRequest : GTLRObject
/**
* Description of the patch job. Length of the description is limited to 1024
* characters.
*
* Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
*/
@property(nonatomic, copy, nullable) NSString *descriptionProperty;
/** Display name for this patch job. This does not have to be unique. */
@property(nonatomic, copy, nullable) NSString *displayName;
/**
* If this patch is a dry-run only, instances are contacted but will do
* nothing.
*
* Uses NSNumber of boolValue.
*/
@property(nonatomic, strong, nullable) NSNumber *dryRun;
/**
* Duration of the patch job. After the duration ends, the patch job times out.
*/
@property(nonatomic, strong, nullable) GTLRDuration *duration;
/**
* Required. Instances to patch, either explicitly or filtered by some criteria
* such as zone or labels.
*/
@property(nonatomic, strong, nullable) GTLRSystemsManagement_PatchInstanceFilter *instanceFilter;
/**
* Patch configuration being applied. If omitted, instances are patched using
* the default configurations.
*/
@property(nonatomic, strong, nullable) GTLRSystemsManagement_PatchConfig *patchConfig;
/** Rollout strategy of the patch job. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_PatchRollout *rollout;
@end
/**
* Message encapsulating a value that can be either absolute ("fixed") or
* relative ("percent") to a value.
*/
@interface GTLRSystemsManagement_FixedOrPercent : GTLRObject
/**
* Specifies a fixed value.
*
* Uses NSNumber of intValue.
*/
@property(nonatomic, strong, nullable) NSNumber *fixed;
/**
* Specifies the relative value defined as a percentage, which will be
* multiplied by a reference value.
*
* Uses NSNumber of intValue.
*/
@property(nonatomic, strong, nullable) NSNumber *percent;
@end
/**
* Cloud Storage object representation.
*/
@interface GTLRSystemsManagement_GcsObject : GTLRObject
/** Required. Bucket of the Cloud Storage object. */
@property(nonatomic, copy, nullable) NSString *bucket;
/**
* Required. Generation number of the Cloud Storage object. This is used to
* ensure that the ExecStep specified by this PatchJob does not change.
*
* Uses NSNumber of longLongValue.
*/
@property(nonatomic, strong, nullable) NSNumber *generationNumber;
/** Required. Name of the Cloud Storage object. */
@property(nonatomic, copy, nullable) NSString *object;
@end
/**
* Googet patching is performed by running `googet update`.
*/
@interface GTLRSystemsManagement_GooSettings : GTLRObject
@end
/**
* A response message for listing patch deployments.
*
* @note This class supports NSFastEnumeration and indexed subscripting over
* its "patchDeployments" property. If returned as the result of a query,
* it should support automatic pagination (when @c shouldFetchNextPages
* is enabled).
*/
@interface GTLRSystemsManagement_ListPatchDeploymentsResponse : GTLRCollectionObject
/**
* A pagination token that can be used to get the next page of patch
* deployments.
*/
@property(nonatomic, copy, nullable) NSString *nextPageToken;
/**
* The list of patch deployments.
*
* @note This property is used to support NSFastEnumeration and indexed
* subscripting on this class.
*/
@property(nonatomic, strong, nullable) NSArray<GTLRSystemsManagement_PatchDeployment *> *patchDeployments;
@end
/**
* A response message for listing the instances details for a patch job.
*
* @note This class supports NSFastEnumeration and indexed subscripting over
* its "patchJobInstanceDetails" property. If returned as the result of a
* query, it should support automatic pagination (when @c
* shouldFetchNextPages is enabled).
*/
@interface GTLRSystemsManagement_ListPatchJobInstanceDetailsResponse : GTLRCollectionObject
/** A pagination token that can be used to get the next page of results. */
@property(nonatomic, copy, nullable) NSString *nextPageToken;
/**
* A list of instance status.
*
* @note This property is used to support NSFastEnumeration and indexed
* subscripting on this class.
*/
@property(nonatomic, strong, nullable) NSArray<GTLRSystemsManagement_PatchJobInstanceDetails *> *patchJobInstanceDetails;
@end
/**
* A response message for listing patch jobs.
*
* @note This class supports NSFastEnumeration and indexed subscripting over
* its "patchJobs" property. If returned as the result of a query, it
* should support automatic pagination (when @c shouldFetchNextPages is
* enabled).
*/
@interface GTLRSystemsManagement_ListPatchJobsResponse : GTLRCollectionObject
/** A pagination token that can be used to get the next page of results. */
@property(nonatomic, copy, nullable) NSString *nextPageToken;
/**
* The list of patch jobs.
*
* @note This property is used to support NSFastEnumeration and indexed
* subscripting on this class.
*/
@property(nonatomic, strong, nullable) NSArray<GTLRSystemsManagement_PatchJob *> *patchJobs;
@end
/**
* Represents a monthly schedule. An example of a valid monthly schedule is "on
* the third Tuesday of the month" or "on the 15th of the month".
*/
@interface GTLRSystemsManagement_MonthlySchedule : GTLRObject
/**
* Required. One day of the month. 1-31 indicates the 1st to the 31st day. -1
* indicates the last day of the month. Months without the target day will be
* skipped. For example, a schedule to run "every month on the 31st" will not
* run in February, April, June, etc.
*
* Uses NSNumber of intValue.
*/
@property(nonatomic, strong, nullable) NSNumber *monthDay;
/** Required. Week day in a month. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_WeekDayOfMonth *weekDayOfMonth;
@end
/**
* Sets the time for a one time patch deployment. Timestamp is in
* [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format.
*/
@interface GTLRSystemsManagement_OneTimeSchedule : GTLRObject
/** Required. The desired patch job execution time. */
@property(nonatomic, strong, nullable) GTLRDateTime *executeTime;
@end
/**
* Patch configuration specifications. Contains details on how to apply the
* patch(es) to a VM instance.
*/
@interface GTLRSystemsManagement_PatchConfig : GTLRObject
/**
* Apt update settings. Use this setting to override the default `apt` patch
* rules.
*/
@property(nonatomic, strong, nullable) GTLRSystemsManagement_AptSettings *apt;
/**
* Goo update settings. Use this setting to override the default `goo` patch
* rules.
*/
@property(nonatomic, strong, nullable) GTLRSystemsManagement_GooSettings *goo;
/** The `ExecStep` to run after the patch update. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_ExecStep *postStep;
/** The `ExecStep` to run before the patch update. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_ExecStep *preStep;
/**
* Post-patch reboot settings.
*
* Likely values:
* @arg @c kGTLRSystemsManagement_PatchConfig_RebootConfig_Always Always
* reboot the machine after the update completes. (Value: "ALWAYS")
* @arg @c kGTLRSystemsManagement_PatchConfig_RebootConfig_Default The agent
* decides if a reboot is necessary by checking signals such as registry
* keys on Windows or `/var/run/reboot-required` on APT based systems. On
* RPM based systems, a set of core system package install times are
* compared with system boot time. (Value: "DEFAULT")
* @arg @c kGTLRSystemsManagement_PatchConfig_RebootConfig_Never Never reboot
* the machine after the update completes. (Value: "NEVER")
* @arg @c kGTLRSystemsManagement_PatchConfig_RebootConfig_RebootConfigUnspecified
* The default behavior is DEFAULT. (Value: "REBOOT_CONFIG_UNSPECIFIED")
*/
@property(nonatomic, copy, nullable) NSString *rebootConfig;
/**
* Windows update settings. Use this override the default windows patch rules.
*/
@property(nonatomic, strong, nullable) GTLRSystemsManagement_WindowsUpdateSettings *windowsUpdate;
/**
* Yum update settings. Use this setting to override the default `yum` patch
* rules.
*/
@property(nonatomic, strong, nullable) GTLRSystemsManagement_YumSettings *yum;
/**
* Zypper update settings. Use this setting to override the default `zypper`
* patch rules.
*/
@property(nonatomic, strong, nullable) GTLRSystemsManagement_ZypperSettings *zypper;
@end
/**
* Patch deployments are configurations that individual patch jobs use to
* complete a patch. These configurations include instance filter, package
* repository settings, and a schedule. For more information about creating and
* managing patch deployments, see [Scheduling patch
* jobs](https://cloud.google.com/compute/docs/os-patch-management/schedule-patch-jobs).
*/
@interface GTLRSystemsManagement_PatchDeployment : GTLRObject
/**
* Output only. Time the patch deployment was created. Timestamp is in
* [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format.
*/
@property(nonatomic, strong, nullable) GTLRDateTime *createTime;
/**
* Optional. Description of the patch deployment. Length of the description is
* limited to 1024 characters.
*
* Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
*/
@property(nonatomic, copy, nullable) NSString *descriptionProperty;
/**
* Optional. Duration of the patch. After the duration ends, the patch times
* out.
*/
@property(nonatomic, strong, nullable) GTLRDuration *duration;
/** Required. VM instances to patch. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_PatchInstanceFilter *instanceFilter;
/**
* Output only. The last time a patch job was started by this deployment.
* Timestamp is in [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format.
*/
@property(nonatomic, strong, nullable) GTLRDateTime *lastExecuteTime;
/**
* Unique name for the patch deployment resource in a project. The patch
* deployment name is in the form:
* `projects/{project_id}/patchDeployments/{patch_deployment_id}`. This field
* is ignored when you create a new patch deployment.
*/
@property(nonatomic, copy, nullable) NSString *name;
/** Required. Schedule a one-time execution. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_OneTimeSchedule *oneTimeSchedule;
/** Optional. Patch configuration that is applied. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_PatchConfig *patchConfig;
/** Required. Schedule recurring executions. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_RecurringSchedule *recurringSchedule;
/** Optional. Rollout strategy of the patch job. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_PatchRollout *rollout;
/**
* Output only. Time the patch deployment was last updated. Timestamp is in
* [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format.
*/
@property(nonatomic, strong, nullable) GTLRDateTime *updateTime;
@end
/**
* A filter to target VM instances for patching. The targeted VMs must meet all
* criteria specified. So if both labels and zones are specified, the patch job
* targets only VMs with those labels and in those zones.
*/
@interface GTLRSystemsManagement_PatchInstanceFilter : GTLRObject
/**
* Target all VM instances in the project. If true, no other criteria is
* permitted.
*
* Uses NSNumber of boolValue.
*/
@property(nonatomic, strong, nullable) NSNumber *all;
/**
* Targets VM instances matching ANY of these GroupLabels. This allows
* targeting of disparate groups of VM instances.
*/
@property(nonatomic, strong, nullable) NSArray<GTLRSystemsManagement_PatchInstanceFilterGroupLabel *> *groupLabels;
/**
* Targets VMs whose name starts with one of these prefixes. Similar to labels,
* this is another way to group VMs when targeting configs, for example
* prefix="prod-".
*/
@property(nonatomic, strong, nullable) NSArray<NSString *> *instanceNamePrefixes;
/**
* Targets any of the VM instances specified. Instances are specified by their
* URI in the form `zones/[ZONE]/instances/[INSTANCE_NAME]`,
* `projects/[PROJECT_ID]/zones/[ZONE]/instances/[INSTANCE_NAME]`, or
* `https://www.googleapis.com/compute/v1/projects/[PROJECT_ID]/zones/[ZONE]/instances/[INSTANCE_NAME]`
*/
@property(nonatomic, strong, nullable) NSArray<NSString *> *instances;
/**
* Targets VM instances in ANY of these zones. Leave empty to target VM
* instances in any zone.
*/
@property(nonatomic, strong, nullable) NSArray<NSString *> *zones;
@end
/**
* Targets a group of VM instances by using their [assigned
* labels](https://cloud.google.com/compute/docs/labeling-resources). Labels
* are key-value pairs. A `GroupLabel` is a combination of labels that is used
* to target VMs for a patch job. For example, a patch job can target VMs that
* have the following `GroupLabel`: `{"env":"test", "app":"web"}`. This means
* that the patch job is applied to VMs that have both the labels `env=test`
* and `app=web`.
*/
@interface GTLRSystemsManagement_PatchInstanceFilterGroupLabel : GTLRObject
/**
* Compute Engine instance labels that must be present for a VM instance to be
* targeted by this filter.
*/
@property(nonatomic, strong, nullable) GTLRSystemsManagement_PatchInstanceFilterGroupLabel_Labels *labels;
@end
/**
* Compute Engine instance labels that must be present for a VM instance to be
* targeted by this filter.
*
* @note This class is documented as having more properties of NSString. Use @c
* -additionalJSONKeys and @c -additionalPropertyForName: to get the list
* of properties and then fetch them; or @c -additionalProperties to
* fetch them all at once.
*/
@interface GTLRSystemsManagement_PatchInstanceFilterGroupLabel_Labels : GTLRObject
@end
/**
* A high level representation of a patch job that is either in progress or has
* completed. Instance details are not included in the job. To paginate through
* instance details, use ListPatchJobInstanceDetails. For more information
* about patch jobs, see [Creating patch
* jobs](https://cloud.google.com/compute/docs/os-patch-management/create-patch-job).
*/
@interface GTLRSystemsManagement_PatchJob : GTLRObject
/** Time this patch job was created. */
@property(nonatomic, strong, nullable) GTLRDateTime *createTime;
/**
* Description of the patch job. Length of the description is limited to 1024
* characters.
*
* Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
*/
@property(nonatomic, copy, nullable) NSString *descriptionProperty;
/** Display name for this patch job. This is not a unique identifier. */
@property(nonatomic, copy, nullable) NSString *displayName;
/**
* If this patch job is a dry run, the agent reports that it has finished
* without running any updates on the VM instance.
*
* Uses NSNumber of boolValue.
*/
@property(nonatomic, strong, nullable) NSNumber *dryRun;
/**
* Duration of the patch job. After the duration ends, the patch job times out.
*/
@property(nonatomic, strong, nullable) GTLRDuration *duration;
/**
* If this patch job failed, this message provides information about the
* failure.
*/
@property(nonatomic, copy, nullable) NSString *errorMessage;
/** Summary of instance details. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_PatchJobInstanceDetailsSummary *instanceDetailsSummary;
/** Instances to patch. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_PatchInstanceFilter *instanceFilter;
/**
* Unique identifier for this patch job in the form `projects/ * /patchJobs/ *`
*/
@property(nonatomic, copy, nullable) NSString *name;
/** Patch configuration being applied. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_PatchConfig *patchConfig;
/** Output only. Name of the patch deployment that created this patch job. */
@property(nonatomic, copy, nullable) NSString *patchDeployment;
/**
* Reflects the overall progress of the patch job in the range of 0.0 being no
* progress to 100.0 being complete.
*
* Uses NSNumber of doubleValue.
*/
@property(nonatomic, strong, nullable) NSNumber *percentComplete;
/** Rollout strategy being applied. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_PatchRollout *rollout;
/**
* The current state of the PatchJob.
*
* Likely values:
* @arg @c kGTLRSystemsManagement_PatchJob_State_Canceled The patch job was
* canceled. (Value: "CANCELED")
* @arg @c kGTLRSystemsManagement_PatchJob_State_CompletedWithErrors Patch
* job completed but there were errors. (Value: "COMPLETED_WITH_ERRORS")
* @arg @c kGTLRSystemsManagement_PatchJob_State_InstanceLookup The patch job
* is looking up instances to run the patch on. (Value:
* "INSTANCE_LOOKUP")
* @arg @c kGTLRSystemsManagement_PatchJob_State_Patching Instances are being
* patched. (Value: "PATCHING")
* @arg @c kGTLRSystemsManagement_PatchJob_State_Started The patch job was
* successfully initiated. (Value: "STARTED")
* @arg @c kGTLRSystemsManagement_PatchJob_State_StateUnspecified State must
* be specified. (Value: "STATE_UNSPECIFIED")
* @arg @c kGTLRSystemsManagement_PatchJob_State_Succeeded Patch job
* completed successfully. (Value: "SUCCEEDED")
* @arg @c kGTLRSystemsManagement_PatchJob_State_TimedOut The patch job timed
* out. (Value: "TIMED_OUT")
*/
@property(nonatomic, copy, nullable) NSString *state;
/** Last time this patch job was updated. */
@property(nonatomic, strong, nullable) GTLRDateTime *updateTime;
@end
/**
* Patch details for a VM instance. For more information about reviewing VM
* instance details, see [Listing all VM instance details for a specific patch
* job](https://cloud.google.com/compute/docs/os-patch-management/manage-patch-jobs#list-instance-details).
*/
@interface GTLRSystemsManagement_PatchJobInstanceDetails : GTLRObject
/**
* The number of times the agent that the agent attempts to apply the patch.
*
* Uses NSNumber of longLongValue.
*/
@property(nonatomic, strong, nullable) NSNumber *attemptCount;
/** If the patch fails, this field provides the reason. */
@property(nonatomic, copy, nullable) NSString *failureReason;
/**
* The unique identifier for the instance. This identifier is defined by the
* server.
*/
@property(nonatomic, copy, nullable) NSString *instanceSystemId;
/** The instance name in the form `projects/ * /zones/ * /instances/ *` */
@property(nonatomic, copy, nullable) NSString *name;
/**
* Current state of instance patch.
*
* Likely values:
* @arg @c kGTLRSystemsManagement_PatchJobInstanceDetails_State_Acked The
* instance acked the notification and will start shortly. (Value:
* "ACKED")
* @arg @c kGTLRSystemsManagement_PatchJobInstanceDetails_State_ApplyingPatches
* The instance is applying patches. (Value: "APPLYING_PATCHES")
* @arg @c kGTLRSystemsManagement_PatchJobInstanceDetails_State_DownloadingPatches
* The instance is downloading patches. (Value: "DOWNLOADING_PATCHES")
* @arg @c kGTLRSystemsManagement_PatchJobInstanceDetails_State_Failed The
* instance has failed to apply the patch. (Value: "FAILED")
* @arg @c kGTLRSystemsManagement_PatchJobInstanceDetails_State_Inactive
* Instance is inactive and cannot be patched. (Value: "INACTIVE")
* @arg @c kGTLRSystemsManagement_PatchJobInstanceDetails_State_NoAgentDetected
* The service could not detect the presence of the agent. Check to
* ensure that the agent is installed, running, and able to communicate
* with the service. (Value: "NO_AGENT_DETECTED")
* @arg @c kGTLRSystemsManagement_PatchJobInstanceDetails_State_Notified The
* instance is notified that it should be patched. (Value: "NOTIFIED")
* @arg @c kGTLRSystemsManagement_PatchJobInstanceDetails_State_PatchStateUnspecified
* Unspecified. (Value: "PATCH_STATE_UNSPECIFIED")
* @arg @c kGTLRSystemsManagement_PatchJobInstanceDetails_State_Pending The
* instance is not yet notified. (Value: "PENDING")
* @arg @c kGTLRSystemsManagement_PatchJobInstanceDetails_State_Rebooting The
* instance is rebooting. (Value: "REBOOTING")
* @arg @c kGTLRSystemsManagement_PatchJobInstanceDetails_State_RunningPostPatchStep
* The instance is running the post-patch step. (Value:
* "RUNNING_POST_PATCH_STEP")
* @arg @c kGTLRSystemsManagement_PatchJobInstanceDetails_State_RunningPrePatchStep
* The instance is running the pre-patch step. (Value:
* "RUNNING_PRE_PATCH_STEP")
* @arg @c kGTLRSystemsManagement_PatchJobInstanceDetails_State_Started The
* instance has started the patching process. (Value: "STARTED")
* @arg @c kGTLRSystemsManagement_PatchJobInstanceDetails_State_Succeeded The
* instance has completed applying patches. (Value: "SUCCEEDED")
* @arg @c kGTLRSystemsManagement_PatchJobInstanceDetails_State_SucceededRebootRequired
* The instance has completed applying patches but a reboot is required.
* (Value: "SUCCEEDED_REBOOT_REQUIRED")
* @arg @c kGTLRSystemsManagement_PatchJobInstanceDetails_State_TimedOut The
* instance exceeded the time out while applying the patch. (Value:
* "TIMED_OUT")
*/
@property(nonatomic, copy, nullable) NSString *state;
@end
/**
* A summary of the current patch state across all instances that this patch
* job affects. Contains counts of instances in different states. These states
* map to `InstancePatchState`. List patch job instance details to see the
* specific states of each instance.
*/
@interface GTLRSystemsManagement_PatchJobInstanceDetailsSummary : GTLRObject
/**
* Number of instances that have acked and will start shortly.
*
* Uses NSNumber of longLongValue.
*/
@property(nonatomic, strong, nullable) NSNumber *ackedInstanceCount;
/**
* Number of instances that are applying patches.
*
* Uses NSNumber of longLongValue.
*/
@property(nonatomic, strong, nullable) NSNumber *applyingPatchesInstanceCount;
/**
* Number of instances that are downloading patches.
*
* Uses NSNumber of longLongValue.
*/
@property(nonatomic, strong, nullable) NSNumber *downloadingPatchesInstanceCount;
/**
* Number of instances that failed.
*
* Uses NSNumber of longLongValue.
*/
@property(nonatomic, strong, nullable) NSNumber *failedInstanceCount;
/**
* Number of instances that are inactive.
*
* Uses NSNumber of longLongValue.
*/
@property(nonatomic, strong, nullable) NSNumber *inactiveInstanceCount;
/**
* Number of instances that do not appear to be running the agent. Check to
* ensure that the agent is installed, running, and able to communicate with
* the service.
*
* Uses NSNumber of longLongValue.
*/
@property(nonatomic, strong, nullable) NSNumber *noAgentDetectedInstanceCount;
/**
* Number of instances notified about patch job.
*
* Uses NSNumber of longLongValue.
*/
@property(nonatomic, strong, nullable) NSNumber *notifiedInstanceCount;
/**
* Number of instances pending patch job.
*
* Uses NSNumber of longLongValue.
*/
@property(nonatomic, strong, nullable) NSNumber *pendingInstanceCount;
/**
* Number of instances that are running the post-patch step.
*
* Uses NSNumber of longLongValue.
*/
@property(nonatomic, strong, nullable) NSNumber *postPatchStepInstanceCount;
/**
* Number of instances that are running the pre-patch step.
*
* Uses NSNumber of longLongValue.
*/
@property(nonatomic, strong, nullable) NSNumber *prePatchStepInstanceCount;
/**
* Number of instances rebooting.
*
* Uses NSNumber of longLongValue.
*/
@property(nonatomic, strong, nullable) NSNumber *rebootingInstanceCount;
/**
* Number of instances that have started.
*
* Uses NSNumber of longLongValue.
*/
@property(nonatomic, strong, nullable) NSNumber *startedInstanceCount;
/**
* Number of instances that have completed successfully.
*
* Uses NSNumber of longLongValue.
*/
@property(nonatomic, strong, nullable) NSNumber *succeededInstanceCount;
/**
* Number of instances that require reboot.
*
* Uses NSNumber of longLongValue.
*/
@property(nonatomic, strong, nullable) NSNumber *succeededRebootRequiredInstanceCount;
/**
* Number of instances that exceeded the time out while applying the patch.
*
* Uses NSNumber of longLongValue.
*/
@property(nonatomic, strong, nullable) NSNumber *timedOutInstanceCount;
@end
/**
* Patch rollout configuration specifications. Contains details on the
* concurrency control when applying patch(es) to all targeted VMs.
*/
@interface GTLRSystemsManagement_PatchRollout : GTLRObject
/**
* The maximum number (or percentage) of VMs per zone to disrupt at any given
* moment. The number of VMs calculated from multiplying the percentage by the
* total number of VMs in a zone is rounded up. During patching, a VM is
* considered disrupted from the time the agent is notified to begin until
* patching has completed. This disruption time includes the time to complete
* reboot and any post-patch steps. A VM contributes to the disruption budget
* if its patching operation fails either when applying the patches, running
* pre or post patch steps, or if it fails to respond with a success
* notification before timing out. VMs that are not running or do not have an
* active agent do not count toward this disruption budget. For zone-by-zone
* rollouts, if the disruption budget in a zone is exceeded, the patch job
* stops, because continuing to the next zone requires completion of the patch
* process in the previous zone. For example, if the disruption budget has a
* fixed value of `10`, and 8 VMs fail to patch in the current zone, the patch
* job continues to patch 2 VMs at a time until the zone is completed. When
* that zone is completed successfully, patching begins with 10 VMs at a time
* in the next zone. If 10 VMs in the next zone fail to patch, the patch job
* stops.
*/
@property(nonatomic, strong, nullable) GTLRSystemsManagement_FixedOrPercent *disruptionBudget;
/**
* Mode of the patch rollout.
*
* Likely values:
* @arg @c kGTLRSystemsManagement_PatchRollout_Mode_ConcurrentZones Patches
* are applied to VMs in all zones at the same time. (Value:
* "CONCURRENT_ZONES")
* @arg @c kGTLRSystemsManagement_PatchRollout_Mode_ModeUnspecified Mode must
* be specified. (Value: "MODE_UNSPECIFIED")
* @arg @c kGTLRSystemsManagement_PatchRollout_Mode_ZoneByZone Patches are
* applied one zone at a time. The patch job begins in the region with
* the lowest number of targeted VMs. Within the region, patching begins
* in the zone with the lowest number of targeted VMs. If multiple
* regions (or zones within a region) have the same number of targeted
* VMs, a tie-breaker is achieved by sorting the regions or zones in
* alphabetical order. (Value: "ZONE_BY_ZONE")
*/
@property(nonatomic, copy, nullable) NSString *mode;
@end
/**
* Sets the time for recurring patch deployments.
*/
@interface GTLRSystemsManagement_RecurringSchedule : GTLRObject
/**
* Optional. The end time at which a recurring patch deployment schedule is no
* longer active.
*/
@property(nonatomic, strong, nullable) GTLRDateTime *endTime;
/**
* Required. The frequency unit of this recurring schedule.
*
* Likely values:
* @arg @c kGTLRSystemsManagement_RecurringSchedule_Frequency_FrequencyUnspecified
* Invalid. A frequency must be specified. (Value:
* "FREQUENCY_UNSPECIFIED")
* @arg @c kGTLRSystemsManagement_RecurringSchedule_Frequency_Monthly
* Indicates that the frequency should be expressed in terms of months.
* (Value: "MONTHLY")
* @arg @c kGTLRSystemsManagement_RecurringSchedule_Frequency_Weekly
* Indicates that the frequency should be expressed in terms of weeks.
* (Value: "WEEKLY")
*/
@property(nonatomic, copy, nullable) NSString *frequency;
/** Output only. The time the last patch job ran successfully. */
@property(nonatomic, strong, nullable) GTLRDateTime *lastExecuteTime;
/** Required. Schedule with monthly executions. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_MonthlySchedule *monthly;
/** Output only. The time the next patch job is scheduled to run. */
@property(nonatomic, strong, nullable) GTLRDateTime *nextExecuteTime;
/**
* Optional. The time that the recurring schedule becomes effective. Defaults
* to `create_time` of the patch deployment.
*/
@property(nonatomic, strong, nullable) GTLRDateTime *startTime;
/** Required. Time of the day to run a recurring deployment. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_TimeOfDay *timeOfDay;
/**
* Required. Defines the time zone that `time_of_day` is relative to. The rules
* for daylight saving time are determined by the chosen time zone.
*/
@property(nonatomic, strong, nullable) GTLRSystemsManagement_TimeZone *timeZone;
/** Required. Schedule with weekly executions. */
@property(nonatomic, strong, nullable) GTLRSystemsManagement_WeeklySchedule *weekly;
@end
/**
* Represents a time of day. The date and time zone are either not significant
* or are specified elsewhere. An API may choose to allow leap seconds. Related
* types are google.type.Date and `google.protobuf.Timestamp`.
*/
@interface GTLRSystemsManagement_TimeOfDay : GTLRObject
/**
* Hours of day in 24 hour format. Should be from 0 to 23. An API may choose to
* allow the value "24:00:00" for scenarios like business closing time.
*
* Uses NSNumber of intValue.
*/
@property(nonatomic, strong, nullable) NSNumber *hours;
/**
* Minutes of hour of day. Must be from 0 to 59.
*
* Uses NSNumber of intValue.
*/
@property(nonatomic, strong, nullable) NSNumber *minutes;
/**
* Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999.
*
* Uses NSNumber of intValue.
*/
@property(nonatomic, strong, nullable) NSNumber *nanos;
/**
* Seconds of minutes of the time. Must normally be from 0 to 59. An API may
* allow the value 60 if it allows leap-seconds.
*
* Uses NSNumber of intValue.
*/
@property(nonatomic, strong, nullable) NSNumber *seconds;
@end
/**
* Represents a time zone from the [IANA Time Zone
* Database](https://www.iana.org/time-zones).
*/
@interface GTLRSystemsManagement_TimeZone : GTLRObject
/**
* IANA Time Zone Database time zone, e.g. "America/New_York".
*
* identifier property maps to 'id' in JSON (to avoid Objective C's 'id').
*/
@property(nonatomic, copy, nullable) NSString *identifier;
/** Optional. IANA Time Zone Database version number, e.g. "2019a". */
@property(nonatomic, copy, nullable) NSString *version;
@end
/**
* Represents one week day in a month. An example is "the 4th Sunday".
*/
@interface GTLRSystemsManagement_WeekDayOfMonth : GTLRObject
/**
* Required. A day of the week.
*
* Likely values:
* @arg @c kGTLRSystemsManagement_WeekDayOfMonth_DayOfWeek_DayOfWeekUnspecified
* The day of the week is unspecified. (Value: "DAY_OF_WEEK_UNSPECIFIED")
* @arg @c kGTLRSystemsManagement_WeekDayOfMonth_DayOfWeek_Friday Friday
* (Value: "FRIDAY")
* @arg @c kGTLRSystemsManagement_WeekDayOfMonth_DayOfWeek_Monday Monday
* (Value: "MONDAY")
* @arg @c kGTLRSystemsManagement_WeekDayOfMonth_DayOfWeek_Saturday Saturday
* (Value: "SATURDAY")
* @arg @c kGTLRSystemsManagement_WeekDayOfMonth_DayOfWeek_Sunday Sunday
* (Value: "SUNDAY")
* @arg @c kGTLRSystemsManagement_WeekDayOfMonth_DayOfWeek_Thursday Thursday
* (Value: "THURSDAY")
* @arg @c kGTLRSystemsManagement_WeekDayOfMonth_DayOfWeek_Tuesday Tuesday
* (Value: "TUESDAY")
* @arg @c kGTLRSystemsManagement_WeekDayOfMonth_DayOfWeek_Wednesday
* Wednesday (Value: "WEDNESDAY")
*/
@property(nonatomic, copy, nullable) NSString *dayOfWeek;
/**
* Required. Week number in a month. 1-4 indicates the 1st to 4th week of the
* month. -1 indicates the last week of the month.
*
* Uses NSNumber of intValue.
*/
@property(nonatomic, strong, nullable) NSNumber *weekOrdinal;
@end
/**
* Represents a weekly schedule.
*/
@interface GTLRSystemsManagement_WeeklySchedule : GTLRObject
/**
* Required. Day of the week.
*
* Likely values:
* @arg @c kGTLRSystemsManagement_WeeklySchedule_DayOfWeek_DayOfWeekUnspecified
* The day of the week is unspecified. (Value: "DAY_OF_WEEK_UNSPECIFIED")
* @arg @c kGTLRSystemsManagement_WeeklySchedule_DayOfWeek_Friday Friday
* (Value: "FRIDAY")
* @arg @c kGTLRSystemsManagement_WeeklySchedule_DayOfWeek_Monday Monday
* (Value: "MONDAY")
* @arg @c kGTLRSystemsManagement_WeeklySchedule_DayOfWeek_Saturday Saturday
* (Value: "SATURDAY")
* @arg @c kGTLRSystemsManagement_WeeklySchedule_DayOfWeek_Sunday Sunday
* (Value: "SUNDAY")
* @arg @c kGTLRSystemsManagement_WeeklySchedule_DayOfWeek_Thursday Thursday
* (Value: "THURSDAY")
* @arg @c kGTLRSystemsManagement_WeeklySchedule_DayOfWeek_Tuesday Tuesday
* (Value: "TUESDAY")
* @arg @c kGTLRSystemsManagement_WeeklySchedule_DayOfWeek_Wednesday
* Wednesday (Value: "WEDNESDAY")
*/
@property(nonatomic, copy, nullable) NSString *dayOfWeek;
@end
/**
* Windows patching is performed using the Windows Update Agent.
*/
@interface GTLRSystemsManagement_WindowsUpdateSettings : GTLRObject
/**
* Only apply updates of these windows update classifications. If empty, all
* updates are applied.
*/
@property(nonatomic, strong, nullable) NSArray<NSString *> *classifications;
/** List of KBs to exclude from update. */
@property(nonatomic, strong, nullable) NSArray<NSString *> *excludes;
/**
* An exclusive list of kbs to be updated. These are the only patches that will
* be updated. This field must not be used with other patch configurations.
*/
@property(nonatomic, strong, nullable) NSArray<NSString *> *exclusivePatches;
@end
/**
* Yum patching is performed by executing `yum update`. Additional options can
* be set to control how this is executed. Note that not all settings are
* supported on all platforms.
*/
@interface GTLRSystemsManagement_YumSettings : GTLRObject
/**
* List of packages to exclude from update. These packages are excluded by
* using the yum `--exclude` flag.
*/
@property(nonatomic, strong, nullable) NSArray<NSString *> *excludes;
/**
* An exclusive list of packages to be updated. These are the only packages
* that will be updated. If these packages are not installed, they will be
* ignored. This field must not be specified with any other patch configuration
* fields.
*/
@property(nonatomic, strong, nullable) NSArray<NSString *> *exclusivePackages;
/**
* Will cause patch to run `yum update-minimal` instead.
*
* Uses NSNumber of boolValue.
*/
@property(nonatomic, strong, nullable) NSNumber *minimal;
/**
* Adds the `--security` flag to `yum update`. Not supported on all platforms.
*
* Uses NSNumber of boolValue.
*/
@property(nonatomic, strong, nullable) NSNumber *security;
@end
/**
* Zypper patching is performed by running `zypper patch`. See also
* https://en.opensuse.org/SDB:Zypper_manual.
*/
@interface GTLRSystemsManagement_ZypperSettings : GTLRObject
/**
* Install only patches with these categories. Common categories include
* security, recommended, and feature.
*/
@property(nonatomic, strong, nullable) NSArray<NSString *> *categories;
/** List of patches to exclude from update. */
@property(nonatomic, strong, nullable) NSArray<NSString *> *excludes;
/**
* An exclusive list of patches to be updated. These are the only patches that
* will be installed using 'zypper patch patch:' command. This field must not
* be used with any other patch configuration fields.
*/
@property(nonatomic, strong, nullable) NSArray<NSString *> *exclusivePatches;
/**
* Install only patches with these severities. Common severities include
* critical, important, moderate, and low.
*/
@property(nonatomic, strong, nullable) NSArray<NSString *> *severities;
/**
* Adds the `--with-optional` flag to `zypper patch`.
*
* Uses NSNumber of boolValue.
*/
@property(nonatomic, strong, nullable) NSNumber *withOptional;
/**
* Adds the `--with-update` flag, to `zypper patch`.
*
* Uses NSNumber of boolValue.
*/
@property(nonatomic, strong, nullable) NSNumber *withUpdate;
@end
NS_ASSUME_NONNULL_END
#pragma clang diagnostic pop
|
from conans import ConanFile, AutoToolsBuildEnvironment, tools
import os
class LibevdevConan(ConanFile):
name = "libevdev"
version = "1.9.0"
license = "X11"
description = "Wrapper library for Linux evdev devices."
homepage = "https://www.freedesktop.org/wiki/Software/libevdev"
url = "https://github.com/ecashptyltd/conan-libevdev.git"
topics = ("evdev")
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False], "fPIC": [True, False]}
default_options = {"shared": False, "fPIC": True}
_source_subfolder = "source_subfolder"
def configure(self):
del self.settings.compiler.libcxx
def source(self):
git = tools.Git(folder=self._source_subfolder)
git.clone("https://gitlab.freedesktop.org/libevdev/libevdev", "libevdev-" + self.version)
def build(self):
self.run("autoreconf -fvi", cwd=self._source_subfolder)
autotools = AutoToolsBuildEnvironment(self)
autotools.configure(configure_dir=self._source_subfolder)
autotools.make()
def package(self):
self.copy("*/libevdev.h", src=self._source_subfolder, dst="include")
self.copy("*/libevdev-uinput.h", src=self._source_subfolder, dst="include")
self.copy("*/libevdev.a", dst="lib", keep_path=False)
def package_info(self):
self.cpp_info.libs = ["evdev"]
|
from django.apps import AppConfig
class ProfillConfig(AppConfig):
name = 'profill'
|
#coding: latin1
#< full
from algoritmia.problems.knapsack import branch_and_bound_knapsack0
v, w, W = [10,2,3,4,2], [12,5,6,2,6], 10
x, score = branch_and_bound_knapsack0(v, w, W)
print(x, score)
#> full |
'use strict';
const fs = require('fs');
const path = require('path');
const Sequelize = require('sequelize');
const basename = path.basename(__filename);
const env = process.env.NODE_ENV || 'development';
const config = require(__dirname + '/../config/config.json')[env];
const db = {};
let sequelize;
if (config.use_env_variable) {
sequelize = new Sequelize(process.env[config.use_env_variable], config);
} else {
sequelize = new Sequelize(config.database, config.username, config.password, config);
}
fs
.readdirSync(__dirname)
.filter(file => {
return (file.indexOf('.') !== 0) && (file !== basename) && (file.slice(-3) === '.js');
})
.forEach(file => {
const model = require(path.join(__dirname, file))(sequelize, Sequelize.DataTypes);
db[model.name] = model;
});
Object.keys(db).forEach(modelName => {
if (db[modelName].associate) {
db[modelName].associate(db);
}
});
db.sequelize = sequelize;
db.Sequelize = Sequelize;
// const User = require("./User");
// const Post = require("./Post");
// const Comment = require("./Comment");
// User.hasMany(Post);
// Post.belongsTo(User, , {
// foreignKey: {
// type: DataTypes.UUID,
// allowNull: false
// }
// });
// Post.hasMany(Comment);
// Comment.belongsTo(Post, , {
// foreignKey: {
// type: DataTypes.UUID,
// allowNull: false
// }
// });
module.exports = db; |
define([
'dojo/_base/declare',
'dojo/_base/lang',
'dojo/_base/array',
'dijit/MenuItem',
'dijit/registry',
'EbrcTracks/View/Dialog/SetTrackYMax',
'EbrcTracks/View/Dialog/SetTrackLogScale',
'EbrcTracks/View/Dialog/DownloadHelp',
'JBrowse/Plugin'
],
function(
declare,
lang,
array,
dijitMenuItem,
dijitRegistry,
SetTrackYMaxDialog,
SetTrackLogScaleDialog,
DownloadHelp,
JBrowsePlugin
) {
return declare( JBrowsePlugin,
{
constructor: function( args ) {
var browser = args.browser;
// do anything you need to initialize your plugin here
console.log( "EbrcTracks plugin starting" );
// hide smrna filter btn
browser.afterMilestone('completely initialized', function () {
var smrnabutton = dijitRegistry.byId('smrna-filter-btn');
var smrnabuttonNode = smrnabutton.domNode;
smrnabuttonNode.parentNode.removeChild(smrnabuttonNode);
});
browser.afterMilestone('initView', function() {
// Patch to disable L/R Two finger scroll;
browser.view.wheelScroll = function( event ) {
if ( !event )
event = window.event;
// if( window.WheelEvent )
// event = window.WheelEvent;
var delta = { x: 0, y: 0 };
if( 'wheelDeltaX' in event ) {
delta.x = event.wheelDeltaX/2;
delta.y = event.wheelDeltaY/2;
}
else if( 'deltaX' in event ) {
var multiplier = navigator.userAgent.indexOf("OS X 10.9")!==-1 ? -5 : -40;
delta.x = Math.abs(event.deltaY) > Math.abs(2*event.deltaX) ? 0 : event.deltaX*multiplier;
delta.y = event.deltaY*-10;
}
else if( event.wheelDelta ) {
delta.y = event.wheelDelta/2;
if( window.opera )
delta.y = -delta.y;
}
else if( event.detail ) {
delta.y = -event.detail*100;
}
delta.x = Math.round( delta.x * 2 );
delta.y = Math.round( delta.y );
var didScroll = false
// PATCH to disallow L/R trackpad scrolling
//if( delta.x ) {
//this.keySlideX( -delta.x );
//didScroll = true
//}
if( delta.y ) {
// 60 pixels per mouse wheel event
var prevY = this.getY()
var currY = this.setY( prevY - delta.y );
// check if clamping happened
if(currY !== prevY) {
didScroll = true
}
}
//the timeout is so that we don't have to run showVisibleBlocks
//for every scroll wheel click (we just wait until so many ms
//after the last one).
if ( this.wheelScrollTimeout )
window.clearTimeout( this.wheelScrollTimeout );
// 100 milliseconds since the last scroll event is an arbitrary
// cutoff for deciding when the user is done scrolling
// (set by a bit of experimentation)
this.wheelScrollTimeout = window.setTimeout( dojo.hitch( this, function() {
this.showVisibleBlocks(true);
this.wheelScrollTimeout = null;
}, 100));
// allow event to bubble out of iframe for example
if(didScroll || this.browser.config.alwaysStopScrollBubble) dojo.stopEvent(event);
};
browser.addGlobalMenuItem( 'help',
new dijitMenuItem(
{
id: 'menubar_downloadhelp',
label: 'Download',
iconClass: 'jbrowseIconHelp',
onClick: function() {
new DownloadHelp( lang.mixin(browser.config.quickHelp || {}, { browser: browser } )).show()
}
})
);
// add a global menu item for resizing all visible quantitative tracks
browser.addGlobalMenuItem( 'view', new dijitMenuItem({
label: 'Set Y-axis max (linear) for quant. tracks',
id: 'menubar_settrackymax',
title: 'Set all visible quantitative tracks to a new ymax',
iconClass: 'jbrowseIconVerticalResize',
onClick: function() {
new SetTrackYMaxDialog({
setCallback: function( maxScore ) {
var tracks = browser.view.visibleTracks();
array.forEach( tracks, function( track ) {
// operate only on XYPlot or Density tracks
if( ! /\b(XYPlot|Density)/.test( track.config.type ) )
return;
track.config.max_score = maxScore;
track.browser.publish('/jbrowse/v1/v/tracks/replace', [track.config]);
});
}
}).show();
}
}));
// add a global menu item for resizing all visible quantitative tracks
browser.addGlobalMenuItem( 'view', new dijitMenuItem({
label: 'Set Log Scale for quant. tracks',
id: 'menubar_settracklogscale',
title: 'Set log scale for all visible quantitative tracks',
iconClass: 'dijitIconConfigure',
onClick: function() {
new SetTrackLogScaleDialog({
setCallback: function( checked ) {
var tracks = browser.view.visibleTracks();
array.forEach( tracks, function( track ) {
// operate only on XYPlot or Density tracks
if( ! /\b(XYPlot|Density)/.test( track.config.type ) )
return;
if(track.config.logScaleOption) {
if(checked) {
track.config.scale = 'log';
}
else {
track.config.scale = 'linear';
}
}
track.browser.publish('/jbrowse/v1/v/tracks/replace', [track.config]);
});
}
}).show();
}
}));
});
}
});
});
|
/*
* TLS PRF P_SHA256
* Copyright (c) 2011, Jouni Malinen <[email protected]>
*
* This software may be distributed under the terms of the BSD license.
* See README for more details.
*/
#include "includes.h"
#include "common.h"
#include "sha256.h"
/**
* tls_prf_sha256 - Pseudo-Random Function for TLS v1.2 (P_SHA256, RFC 5246)
* @secret: Key for PRF
* @secret_len: Length of the key in bytes
* @label: A unique label for each purpose of the PRF
* @seed: Seed value to bind into the key
* @seed_len: Length of the seed
* @out: Buffer for the generated pseudo-random key
* @outlen: Number of bytes of key to generate
* Returns: 0 on success, -1 on failure.
*
* This function is used to derive new, cryptographically separate keys from a
* given key in TLS. This PRF is defined in RFC 2246, Chapter 5.
*/
void tls_prf_sha256(const u8 *secret, size_t secret_len, const char *label, const u8 *seed, size_t seed_len, u8 *out, size_t outlen)
{
size_t clen;
u8 A[SHA256_MAC_LEN];
u8 P[SHA256_MAC_LEN];
size_t pos;
const unsigned char *addr[3];
size_t len[3];
addr[0] = A;
len[0] = SHA256_MAC_LEN;
addr[1] = (unsigned char *)label;
len[1] = os_strlen(label);
addr[2] = seed;
len[2] = seed_len;
/*
* RFC 5246, Chapter 5
* A(0) = seed, A(i) = HMAC(secret, A(i-1))
* P_hash = HMAC(secret, A(1) + seed) + HMAC(secret, A(2) + seed) + ..
* PRF(secret, label, seed) = P_SHA256(secret, label + seed)
*/
hmac_sha256_vector(secret, secret_len, 2, &addr[1], &len[1], A);
pos = 0;
while (pos < outlen) {
hmac_sha256_vector(secret, secret_len, 3, addr, len, P);
hmac_sha256(secret, secret_len, A, SHA256_MAC_LEN, A);
clen = outlen - pos;
if (clen > SHA256_MAC_LEN) {
clen = SHA256_MAC_LEN;
}
os_memcpy(out + pos, P, clen);
pos += clen;
}
}
|
function foo(){
var a = 10;
var b = 20;
return a + b;
}
function bar(){
var x = 10;
var y = 20;
return x + y;
}
|
const assert = require('assert');
const makeConfig = require('../../../core/util/makeConfig');
describe('make config', function () {
it('should pass the filter arg correctly', function () {
const actualConfig = makeConfig('init', { filter: true });
assert.strictEqual(actualConfig.args.filter, true);
});
it('should work without an option param', function () {
const actualConfig = makeConfig('init');
assert.deepStrictEqual(actualConfig.args, {});
});
});
|
import { API } from '$lib/api/server-side';
/**
* @type {import('@sveltejs/kit').RequestHandler}
*/
export async function get()
{
try
{
const songs = await API.fetchSongs();
return { body: songs };
}
catch(error)
{
if(error.response)
return { status: error?.response?.statusCode, body: error?.response?.body };
return { status: 500, };
}
}
|
export default {
common: {
play: "播放",
songs: "首歌",
},
nav: {
home: "首页",
explore: "发现",
library: "音乐库",
search: "搜索",
github: "GitHub 仓库",
},
home: {
recommendPlaylist: "推荐歌单",
recommendArtist: "推荐艺人",
newAlbum: "新专速递",
seeMore: "查看全部",
charts: "排行榜",
},
library: {
sLibrary: "的音乐库",
likedSongs: "我喜欢的音乐",
sLikedSongs: "喜欢的音乐",
playlists: "歌单",
albums: "专辑",
artists: "艺人",
mvs: "MV",
newPlayList: "新建歌单",
userProfileMenu: {
settings: "设置",
logout: "登出",
},
},
explore: {
explore: "发现",
loadMore: "加载更多",
},
artist: {
latestRelease: "最新发布",
popularSongs: "热门歌曲",
showMore: "显示更多",
showLess: "收起",
EPsSingles: "EP和单曲",
albums: "专辑",
withAlbums: "张专辑",
artist: "艺人",
videos: "个MV",
following: "已关注",
follow: "关注",
},
album: {
released: "发行于",
},
playlist: {
playlist: "歌单",
updatedAt: "最后更新于",
search: "搜索歌单音乐",
},
login: {
accessToAll: "可访问全部数据",
loginText: "登录网易云账号",
search: "搜索网易云账号",
readonly: "只能读取账号公开数据",
usernameLogin: "用户名登录",
searchHolder: "请输入你的网易云用户名",
enterTip: "按 Enter 搜索",
choose: "在列表中选中你的账号",
confirm: "确认",
countryCode: "国际区号",
phone: "手机号",
email: "邮箱",
password: "密码",
login: "登录",
loginWithEmail: "使用邮箱登录",
loginWithPhone: "使用手机号登录",
notice: `YesPlayMusic 承诺不会保存你的任何账号信息到云端。<br />
你的密码会在本地进行 MD5 加密后再传输到网易云 API。<br />
YesPlayMusic 并非网易云官方网站,输入账号信息前请慎重考虑。 你也可以前往
<a href="https://github.com/qier222/YesPlayMusic"
>YesPlayMusic 的 GitHub 源代码仓库</a
>
自行构建并使用自托管的网易云 API。`,
noticeElectron: `你的密码会在本地进行 MD5 加密后再传输到网易云 API。<br />
YesPlayMusic 不会传输你的账号数据到任何非网易云音乐官方的服务器。<br />`,
},
mv: {
moreVideo: "更多视频",
},
next: {
nowPlaying: "正在播放",
nextUp: "即将播放",
},
player: {
like: "喜欢",
previous: "上一首",
next: "下一首",
repeat: "循环播放",
repeatTrack: "单曲循环",
shuffle: "随机播放",
play: "播放",
pause: "暂停",
mute: "静音",
nextUp: "播放列表",
},
modal: {
close: "关闭",
},
search: {
artist: "艺人",
album: "专辑",
song: "歌曲",
mv: "视频",
playlist: "歌单",
noResult: "暂无结果",
searchFor: "搜索",
},
settings: {
settings: "设置",
logout: "登出",
language: "语言",
musicQuality: {
text: "音质选择",
low: "普通",
medium: "较高",
high: "极高",
lossless: "无损",
},
lyricFontSize: {
text: "歌词字体大小",
small: "小",
medium: "中",
large: "大(默认)",
xlarge: "超大",
},
deviceSelector: "音频输出设备",
permissionRequired: "需要麦克风权限",
appearance: {
text: "外观",
auto: "自动",
light: "浅色",
dark: "深色",
},
automaticallyCacheSongs: "自动缓存歌曲",
clearSongsCache: "清除歌曲缓存",
cacheCount: "已缓存 {song} 首 ({size})",
showLyricsTranslation: "显示歌词翻译",
showLyricsDynamicBackground: "显示歌词动态背景",
minimizeToTray: "最小化到托盘",
showGitHubIcon: "显示 GitHub 图标",
showUnavailableSongInGreyStyle: "显示不可播放的歌曲为灰色",
showPlaylistsByAppleMusic: "首页显示来自 Apple Music 的歌单",
enableDiscordRichPresence: "启用 Discord Rich Presence",
},
contextMenu: {
play: "播放",
playNext: "下一首播放",
saveToMyLikedSongs: "添加到我喜欢的音乐",
removeFromMyLikedSongs: "从喜欢的音乐中删除",
},
toast: {
savedToMyLikedSongs: "已添加到我喜欢的音乐",
removedFromMyLikedSongs: "已从喜欢的音乐中删除",
},
};
|
$(document).ready(function () {
var doExist = $("ul.globalErrors").length;
if (doExist) {
setTimeout(function () {
$("ul.globalErrors").fadeOut(3000);
}, 3000);
}
var doEventExist = $("ul.globalSuccess").length;
if (doEventExist) {
setTimeout(function () {
$("ul.globalSuccess").fadeOut(3000);
}, 3000);
}
}); |
/* ------------------------------------------------------------------------------
*
* # Handsontable - Excel-like tables with extensive funtionality
*
* Specific JS code additions for handsontable_advanced.html page
*
* Version: 1.0
* Latest update: Nov 1, 2015
*
* ---------------------------------------------------------------------------- */
$(function() {
// Conditional formatting
// ------------------------------
// Add data
var hot_format_data = [
['', 'Kia', 'Nissan', 'Toyota', 'Honda', 'Mazda', 'Ford'],
['2003', -38293, '', 38849, 32353, -47758, 'Read only'],
['2004', 23433, 88569, 48892, 12322, '', 27840],
['2005', 64393, -89432, 'Read only', 89390, 42853, -12228],
['2006', 45382, 57729, -48823, -12774, '', -98493],
['2007', -86433, 48923, -33378, 'Read only', 90043, 34982],
['2008', 45833, -12293, 12894, 78859, '', 43054],
['2009', 'Read only', '', 49950, -58823, -57882, 89954],
['2010', -85943, 90449, -38882, 34928, '', 23487],
['2011', 44950, -90092, 'Read only', '', 89003, 'Read only'],
['2012', 23486, 'Read only', 47729, 23945, -99001, 48995],
['2013', 90392, '', 48852, 17789, 32984, ''],
['2014', -47382, 88457, '', 58875, -45398, '']
];
// Header row renderer
function firstRowRenderer(instance, td, row, col, prop, value, cellProperties) {
Handsontable.renderers.TextRenderer.apply(this, arguments);
// Add styles to the table cell
td.style.fontWeight = '500';
td.style.color = '#1B5E20';
td.style.background = '#E8F5E9';
}
// Negative values renderer
function negativeValueRenderer(instance, td, row, col, prop, value, cellProperties) {
Handsontable.renderers.TextRenderer.apply(this, arguments);
// If row contains negative number, add class "negative"
if (parseInt(value, 10) < 0) {
td.className = 'text-danger';
}
// If empty cell, add grey background
if (!value || value === '') {
td.style.background = '#f5f5f5';
}
}
// Maps function to lookup string
Handsontable.renderers.registerRenderer('negativeValueRenderer', negativeValueRenderer);
// Define element
var hot_format = document.getElementById('hot_format');
// Initialize with options
hot_format_init = new Handsontable(hot_format, {
data: hot_format_data,
stretchH: 'all',
afterSelection: function (row, col, row2, col2) {
var meta = this.getCellMeta(row2, col2);
if (meta.readOnly) {
this.updateSettings({fillHandle: false});
}
else {
this.updateSettings({fillHandle: true});
}
},
cells: function (row, col, prop, td) {
var cellProperties = {};
if (row === 0 || this.instance.getData()[row][col] === 'Read only') {
cellProperties.readOnly = true; // make cell read-only if it is first row or the text reads 'readOnly'
}
if (row === 0 || col === 0) {
cellProperties.renderer = firstRowRenderer; // uses function directly
}
else {
cellProperties.renderer = "negativeValueRenderer"; // uses lookup map
}
return cellProperties;
}
});
// Sorting data
// ------------------------------
// Add sample data for multiple examples
var hot_data = [
[1, "George Washington", "http://en.wikipedia.org/wiki/George_Washington", "30/04/1789", "4/03/1797", "Virginia"],
[2, "John Adams", "http://en.wikipedia.org/wiki/John_Adams", "4/03/1797", "4/03/1801", "Massachusetts"],
[3, "Thomas Jefferson", "http://en.wikipedia.org/wiki/Thomas_Jefferson", "4/03/1801", "4/03/1809", "Virginia"],
[4, "James Madison", "http://en.wikipedia.org/wiki/James_Madison", "4/03/1809", "4/03/1817", "Virginia"],
[5, "James Monroe", "http://en.wikipedia.org/wiki/James_Monroe", "4/03/1817", "4/03/1825", "Virginia"],
[6, "John Quincy Adams", "http://en.wikipedia.org/wiki/John_Quincy_Adams", "4/03/1825", "4/03/1829", "Massachusetts"],
[7, "Andrew Jackson", "http://en.wikipedia.org/wiki/Andrew_Jackson", "4/03/1829", "4/03/1837", "Tennessee"],
[8, "Martin Van Buren", "http://en.wikipedia.org/wiki/Martin_Van_Buren", "4/03/1837", "4/03/1841", "New York"],
[9, "William Henry Harrison", "http://en.wikipedia.org/wiki/William_Henry_Harrison", "4/03/1841", "4/04/1841", "Ohio"],
[10, "John Tyler", "http://en.wikipedia.org/wiki/John_Tyler", "4/04/1841", "4/03/1845", "Virginia"],
[11, "James K. Polk", "http://en.wikipedia.org/wiki/James_K._Polk", "4/03/1845", "4/03/1849", "Tennessee"],
[12, "Zachary Taylor", "http://en.wikipedia.org/wiki/Zachary_Taylor", "4/03/1849", "9/07/1850", "Louisiana"]
];
// Define element
var hot_sorting = document.getElementById('hot_sorting');
// Initialize with options
var hot_sorting_init = new Handsontable(hot_sorting, {
data: hot_data,
stretchH: 'all',
rowHeaders: true,
colHeaders: true,
columnSorting: true,
manualColumnResize: true,
sortIndicator: true
});
// Pagination
// ------------------------------
// Setup data set
var getData = (function () {
// Data
var hot_pagination_data = [
[1, "George Washington", "http://en.wikipedia.org/wiki/George_Washington", "30/04/1789", "4/03/1797", "Virginia"],
[2, "John Adams", "http://en.wikipedia.org/wiki/John_Adams", "4/03/1797", "4/03/1801", "Massachusetts"],
[3, "Thomas Jefferson", "http://en.wikipedia.org/wiki/Thomas_Jefferson", "4/03/1801", "4/03/1809", "Virginia"],
[4, "James Madison", "http://en.wikipedia.org/wiki/James_Madison", "4/03/1809", "4/03/1817", "Virginia"],
[5, "James Monroe", "http://en.wikipedia.org/wiki/James_Monroe", "4/03/1817", "4/03/1825", "Virginia"],
[6, "John Quincy Adams", "http://en.wikipedia.org/wiki/John_Quincy_Adams", "4/03/1825", "4/03/1829", "Massachusetts"],
[7, "Andrew Jackson", "http://en.wikipedia.org/wiki/Andrew_Jackson", "4/03/1829", "4/03/1837", "Tennessee"],
[8, "Martin Van Buren", "http://en.wikipedia.org/wiki/Martin_Van_Buren", "4/03/1837", "4/03/1841", "New York"],
[9, "William Henry Harrison", "http://en.wikipedia.org/wiki/William_Henry_Harrison", "4/03/1841", "4/04/1841", "Ohio"],
[10, "John Tyler", "http://en.wikipedia.org/wiki/John_Tyler", "4/04/1841", "4/03/1845", "Virginia"],
[11, "James K. Polk", "http://en.wikipedia.org/wiki/James_K._Polk", "4/03/1845", "4/03/1849", "Tennessee"],
[12, "Zachary Taylor", "http://en.wikipedia.org/wiki/Zachary_Taylor", "4/03/1849", "9/07/1850", "Louisiana"],
[13, "Millard Fillmore", "http://en.wikipedia.org/wiki/Millard_Fillmore", "9/07/1850", "4/03/1853", "New York"],
[14, "Franklin Pierce", "http://en.wikipedia.org/wiki/Franklin_Pierce", "4/03/1853", "4/03/1857", "New Hampshire"],
[15, "James Buchanan", "http://en.wikipedia.org/wiki/James_Buchanan", "4/03/1857", "4/03/1861", "Pennsylvania"],
[16, "Abraham Lincoln", "http://en.wikipedia.org/wiki/Abraham_Lincoln", "4/03/1861", "15/04/1865", "Illinois"],
[17, "Andrew Johnson", "http://en.wikipedia.org/wiki/Andrew_Johnson", "15/04/1865", "4/03/1869", "Tennessee"],
[18, "Ulysses S. Grant", "http://en.wikipedia.org/wiki/Ulysses_S._Grant", "4/03/1869", "4/03/1877", "Ohio"],
[19, "Rutherford B. Hayes", "http://en.wikipedia.org/wiki/Rutherford_B._Hayes", "4/03/1877", "4/03/1881", "Ohio"],
[20, "James A. Garfield", "http://en.wikipedia.org/wiki/James_A._Garfield", "4/03/1881", "19/09/1881", "Ohio"],
[21, "Chester A. Arthur", "http://en.wikipedia.org/wiki/Chester_A._Arthur", "19/09/1881", "4/03/1885", "New York"],
[22, "Grover Cleveland", "http://en.wikipedia.org/wiki/Grover_Cleveland", "4/03/1885", "4/03/1889", "New York"],
[23, "Benjamin Harrison", "http://en.wikipedia.org/wiki/Benjamin_Harrison", "4/03/1889", "4/03/1893", "Indiana"],
[24, "Grover Cleveland (2nd term)", "http://en.wikipedia.org/wiki/Grover_Cleveland", "4/03/1893", "4/03/1897", "New York"],
[25, "William McKinley", "http://en.wikipedia.org/wiki/William_McKinley", "4/03/1897", "14/9/1901", "Ohio"],
[26, "Theodore Roosevelt", "http://en.wikipedia.org/wiki/Theodore_Roosevelt", "14/9/1901", "04/03/09", "New York"],
[27, "William Howard Taft", "http://en.wikipedia.org/wiki/William_Howard_Taft", "04/03/09", "04/03/13", "Ohio"],
[28, "Woodrow Wilson", "http://en.wikipedia.org/wiki/Woodrow_Wilson", "04/03/13", "04/03/21", "New Jersey"],
[29, "Warren G. Harding", "http://en.wikipedia.org/wiki/Warren_G._Harding", "04/03/21", "02/08/23", "Ohio"],
[30, "Calvin Coolidge", "http://en.wikipedia.org/wiki/Calvin_Coolidge", "02/08/23", "04/03/29", "Massachusetts"],
[31, "Herbert Hoover", "http://en.wikipedia.org/wiki/Herbert_Hoover", "04/03/29", "04/03/33", "Iowa"],
[32, "Franklin D. Roosevelt", "http://en.wikipedia.org/wiki/Franklin_D._Roosevelt", "04/03/33", "12/04/45", "New York"],
[33, "Harry S. Truman", "http://en.wikipedia.org/wiki/Harry_S._Truman", "12/04/45", "20/01/53", "Missouri"],
[34, "Dwight D. Eisenhower", "http://en.wikipedia.org/wiki/Dwight_D._Eisenhower", "20/01/53", "20/01/61", "Texas"],
[35, "John F. Kennedy", "http://en.wikipedia.org/wiki/John_F._Kennedy", "20/01/61", "22/11/63", "Massachusetts"],
[36, "Lyndon B. Johnson", "http://en.wikipedia.org/wiki/Lyndon_B._Johnson", "22/11/63", "20/01/69", "Texas"],
[37, "Richard Nixon", "http://en.wikipedia.org/wiki/Richard_Nixon", "20/01/69", "09/08/74", "California"],
[38, "Gerald Ford", "http://en.wikipedia.org/wiki/Gerald_Ford", "09/08/74", "20/01/77", "Michigan"],
[39, "Jimmy Carter", "http://en.wikipedia.org/wiki/Jimmy_Carter", "20/01/77", "20/01/81", "Georgia"],
[40, "Ronald Reagan", "http://en.wikipedia.org/wiki/Ronald_Reagan", "20/01/81", "20/01/89", "California"],
[41, "George H. W. Bush", "http://en.wikipedia.org/wiki/George_H._W._Bush", "20/01/89", "20/01/93", "Texas"],
[42, "Bill Clinton", "http://en.wikipedia.org/wiki/Bill_Clinton", "20/01/93", "20/01/01", "Arkansas"],
[43, "George W. Bush", "http://en.wikipedia.org/wiki/George_W._Bush", "20/01/01", "20/01/09", "Texas"],
[44, "Barack Obama", "http://en.wikipedia.org/wiki/Barack_Obama", "20/01/09", "Incumbent", "Illinois"]
];
// Paging setup
return function () {
var page = parseInt(window.location.hash.replace('#', ''), 10) || 1,
limit = 10,
row = (page - 1) * limit,
count = page * limit,
part = [];
for (;row < count;row++) {
part.push(hot_pagination_data[row]);
}
// Toggling active class in pagination
if(location.hash != "") {
// Remove active class on load from the first item
$('#hot-pagination > li').removeClass('active');
// Remove active class from siblings on click
$('#hot-pagination > li').on('click', function() {
$(this).siblings('li').removeClass('active');
});
// Add active class
$('#hot-pagination > li').has('a[href="' + location.hash + '"]').addClass('active');
}
return part;
}
})();
// Load data on hash change
Handsontable.Dom.addEvent(window, 'hashchange', function (event) {
hot_pagination_init.loadData(getData());
});
// Define element
var hot_pagination = document.getElementById('hot_pagination');
// Initialize with options
var hot_pagination_init = new Handsontable(hot_pagination, {
data: getData(),
colHeaders: true,
stretchH: 'all'
});
// Pre-populating new rows
// ------------------------------
// Add data
var hot_populate_data = [
[1, "George Washington", "http://en.wikipedia.org/wiki/George_Washington", "30/04/1789", "4/03/1797", "Virginia"],
[2, "John Adams", "http://en.wikipedia.org/wiki/John_Adams", "4/03/1797", "4/03/1801", "Massachusetts"],
[3, "Thomas Jefferson", "http://en.wikipedia.org/wiki/Thomas_Jefferson", "4/03/1801", "4/03/1809", "Virginia"],
[4, "James Madison", "http://en.wikipedia.org/wiki/James_Madison", "4/03/1809", "4/03/1817", "Virginia"],
[5, "James Monroe", "http://en.wikipedia.org/wiki/James_Monroe", "4/03/1817", "4/03/1825", "Virginia"],
[6, "John Quincy Adams", "http://en.wikipedia.org/wiki/John_Quincy_Adams", "4/03/1825", "4/03/1829", "Massachusetts"],
[7, "Andrew Jackson", "http://en.wikipedia.org/wiki/Andrew_Jackson", "4/03/1829", "4/03/1837", "Tennessee"],
[8, "Martin Van Buren", "http://en.wikipedia.org/wiki/Martin_Van_Buren", "4/03/1837", "4/03/1841", "New York"],
[9, "William Henry Harrison", "http://en.wikipedia.org/wiki/William_Henry_Harrison", "4/03/1841", "4/04/1841", "Ohio"],
[10, "John Tyler", "http://en.wikipedia.org/wiki/John_Tyler", "4/04/1841", "4/03/1845", "Virginia"],
[11, "James K. Polk", "http://en.wikipedia.org/wiki/James_K._Polk", "4/03/1845", "4/03/1849", "Tennessee"],
[12, "Zachary Taylor", "http://en.wikipedia.org/wiki/Zachary_Taylor", "4/03/1849", "9/07/1850", "Louisiana"]
];
// Cells template
var tpl = ['one', 'two', 'three', 'four', 'five', 'six'];
// Render empty row
function isEmptyRow(instance, row) {
var rowData = instance.getData()[row];
for (var i = 0, ilen = rowData.length; i < ilen; i++) {
if (rowData[i] !== null) {
return false;
}
}
return true;
}
// Render default values
function defaultValueRenderer(instance, td, row, col, prop, value, cellProperties) {
var args = arguments;
if (args[5] === null && isEmptyRow(instance, row)) {
args[5] = tpl[col];
td.style.color = '#ccc';
}
else {
td.style.color = '';
}
Handsontable.renderers.TextRenderer.apply(this, args);
}
// Define element
var hot_populate = document.getElementById('hot_populate');
// Initialize with options
var hot_populate_init = new Handsontable(hot_populate, {
data: hot_populate_data,
startRows: 8,
startCols: 5,
minSpareRows: 1,
colHeaders: true,
stretchH: 'all',
contextMenu: true,
cells: function (row, col, prop) {
var cellProperties = {};
cellProperties.renderer = defaultValueRenderer;
return cellProperties;
},
beforeChange: function (changes) {
var instance = hot_populate_init,
ilen = changes.length,
clen = instance.colCount,
rowColumnSeen = {},
rowsToFill = {},
i,
c;
for (i = 0; i < ilen; i++) {
// If oldVal is empty
if (changes[i][2] === null && changes[i][3] !== null) {
if (isEmptyRow(instance, changes[i][0])) {
// Add this row/col combination to cache so it will not be overwritten by template
rowColumnSeen[changes[i][0] + '/' + changes[i][1]] = true;
rowsToFill[changes[i][0]] = true;
}
}
}
for (var r in rowsToFill) {
if (rowsToFill.hasOwnProperty(r)) {
for (c = 0; c < clen; c++) {
// If it is not provided by user in this change set, take value from template
if (!rowColumnSeen[r + '/' + c]) {
changes.push([r, c, null, tpl[c]]);
}
}
}
}
}
});
// Highlighting current
// ------------------------------
// Define element
hot_highlight = document.getElementById('hot_highlight');
// Initialize with options
hot_highlight_init = Handsontable(hot_highlight, {
data: hot_data,
minRows: 5,
minCols: 6,
stretchH: 'all',
currentRowClassName: 'active',
currentColClassName: 'active',
rowHeaders: true,
colHeaders: true
});
// Select cell
hot_highlight_init.selectCell(2,2);
// Bootstrap integration
// ------------------------------
// Define element
var hot_bootstrap = document.getElementById('hot_bootstrap');
// Init with options
hot_bootstrap_init = new Handsontable(hot_bootstrap, {
data: hot_data,
colHeaders: true,
stretchH: 'all',
fixedColumnsLeft: 2,
tableClassName: ['table-hover', 'table-striped']
});
});
|
/* Copyright (c) 2013-2016, David Hauweele <[email protected]>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <stdio.h>
#include "version.h"
void version(const char *target)
{
printf("%s from " PACKAGE_VERSION "\n", target);
}
#ifdef COMMIT
void commit(void)
{
printf("Commit-Id SHA1 : " COMMIT "\n");
}
#endif /* COMMIT */
|
const db = {
names: [
{id: 0, text: 'James'},
{id: 1, text: 'William'}
]
};
class MainApi{
async getAll(){
return new Promise(y => setTimeout(() => {
y(db.names)
}, 200))
}
async getFirst(){
return new Promise(y => setTimeout(() => {
y(db.names[0])
}, 200))
}
async getLast(){
return new Promise(y => setTimeout(() => {
y(db.names[1])
}, 200))
}
async getPersonId(id){
return new Promise(y => setTimeout(() => {
y(db.names.find(v => v.id === parseInt(id, 10)))
}, 200))
}
async getPersonName(name){
return new Promise(y => setTimeout(() => {
y(db.names.find(v => v.text === name))
}, 200))
}
}
module.exports = {
MainApi
}; |
/*
* Copyright 2018 TWO SIGMA OPEN SOURCE, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var BeakerXPageObject = require('../beakerx.po.js');
var beakerxPO;
describe('Testing of table (python)', function () {
beforeAll(function () {
beakerxPO = new BeakerXPageObject();
beakerxPO.runNotebookByUrl('/test/ipynb/python/TableAPIPythonTest.ipynb');
});
afterAll(function () {
beakerxPO.closeAndHaltNotebook();
});
var cellIndex;
var imageDir = 'python/tableAPI';
describe("Data types for TableDisplay", function(){
it('Can use Array of Integers parameter', function () {
cellIndex = 0;
var width = 120, height = 90;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell1_case1.png');
});
it('Table have index column menu. ', function () {
var tblDisplay = beakerxPO.getTableDisplayByIndex(cellIndex);
expect(beakerxPO.getTableIndexMenu(tblDisplay)).not.toBe(null);
});
it('Can use 2D Array of Integers parameter', function () {
cellIndex += 2;
var width = 120, height = 65;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell2_case1.png');
});
it('Can use Array of Decimals parameter', function () {
cellIndex += 2;
var width = 120, height = 90;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell3_case1.png');
});
it('Can use 2D Array of Decimals parameter', function () {
cellIndex += 2;
var width = 128, height = 65;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell4_case1.png');
});
it('Can use Array of Strings parameter', function () {
cellIndex += 2;
var width = 130, height = 90;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell5_case1.png');
});
it('Can use 2D Array of Strings parameter', function () {
cellIndex += 2;
var width = 110, height = 65;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell6_case1.png');
});
it('Can use Array of Integer Arrays parameter', function () {
cellIndex += 2;
var width = 128, height = 90;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell7_case1.png');
});
it('Can use 2D Array of Integer Arrays parameter', function () {
cellIndex += 2;
var width = 180, height = 65;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell8_case1.png');
});
it('Can use 2D Array of Integer,Decimal,String,Array Arrays parameter', function () {
cellIndex += 2;
var width = 220, height = 115;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell9_case1.png');
});
it('Can use [Integer,Decimal,String,Array] parameter', function () {
cellIndex += 2;
var width = 128, height = 115;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell10_case1.png');
});
it('Can use 2D Arrays of [Integer,Decimal,String,Array] parameter', function () {
cellIndex += 2;
var width = 220, height = 65;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell11_case1.png');
});
it('Can use numbers as name of Array keys (Array parameter)', function () {
cellIndex += 2;
var width = 125, height = 115;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell12_case1.png');
});
it('Can use numbers as name of Array keys (2D Array parameter)', function () {
cellIndex += 2;
var width = 250, height = 65;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell13_case1.png');
});
});
describe("TableDisplay(pandas DataFrame)", function() {
it('TableDisplay should display table from pandas dataFrame', function () {
cellIndex += 2;
browser.log('browser'); // reset log
var width = 100, height = 65;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell14_case1.png');
});
it('Table have index column menu', function () {
var tblDisplay = beakerxPO.getTableDisplayByIndex(cellIndex);
expect(beakerxPO.getTableIndexMenu(tblDisplay)).not.toBe(null);
});
it("Log doesn't have 'SEVERE' level errors. ", function () {
beakerxPO.checkBrowserLogError('SEVERE');
});
});
describe("Use index in pandas DataFrame. ", function() {
it('Table have index column menu. ', function () {
cellIndex += 2;
beakerxPO.runCodeCellByIndex(cellIndex);
var tblDisplay = beakerxPO.getTableDisplayByIndex(cellIndex);
expect(beakerxPO.getTableIndexMenu(tblDisplay)).not.toBe(null);
});
});
describe("Pandas read csv with index_col parameter. ", function() {
it('Should display table. ', function () {
cellIndex += 1;
browser.log('browser'); // reset log
var width = 70, height = 42;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell15_case1.png');
});
it("Log doesn't have 'SEVERE' level errors. ", function () {
beakerxPO.checkBrowserLogError('SEVERE');
});
});
describe('Pandas read csv ', function(){
it('Should display table ', function() {
cellIndex += 2;
var width = 650, height = 90;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell16_case1.png');
});
});
describe('Set alignment provider for "m3" column ', function () {
it('Should display formatted table ', function() {
cellIndex += 2;
var width = 650, height = 90;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell17_case1.png');
});
});
describe('Set string format for times, type and column ', function () {
it('Should display formatted table ', function() {
cellIndex += 2;
var width = 500, height = 90;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell18_case1.png');
});
});
describe('Set column visible ', function () {
it('Should display formatted table ', function() {
cellIndex += 2;
var width = 550, height = 90;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell19_case1.png');
});
});
describe('Set column order ', function () {
it('Should display formatted table ', function() {
cellIndex += 2;
var width = 410, height = 90;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell20_case1.png');
});
});
describe('Add CellHighlighter ', function () {
it('Should display formatted table ', function() {
cellIndex += 2;
var width = 440, height = 90;
var canvas = beakerxPO.runCellToGetCanvas(cellIndex);
var imageData = beakerxPO.getCanvasImageData(canvas, width, height);
beakerxPO.checkImageData(imageData.value, imageDir, 'cell21_case1.png');
});
});
}); |
# -*- coding: utf-8 -*-
import os
import json
import yaml
import logging
import logging.config
import time
import aiohttp
from aiohttp import web
def _get_logger():
logger = logging.getLogger('entitymanager.server')
return logger
class JsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, numpy.integer):
return int(obj)
elif isinstance(obj, numpy.floating):
return float(obj)
elif isinstance(obj, numpy.ndarray):
return obj.tolist()
else:
return super(JsonEncoder, self).default(obj)
class EntityManagerServer:
def __init__(self):
self.logger = _get_logger()
async def handle_request_health(self, request):
return web.Response()
LOGGING_CONFIG_TEXT = """
version: 1
root:
level: DEBUG
handlers: ['console']
formatters:
json:
class: pythonjsonlogger.jsonlogger.JsonFormatter
format: "(asctime) (levelname) (name) (message)"
filters:
entitymanagerlogfilter:
(): entitymanager.server.entitymanagerLogFilter
handlers:
console:
class: logging.StreamHandler
level: INFO
stream: ext://sys.stdout
formatter: json
filters: [entitymanagerlogfilter]
"""
@web.middleware
async def log_error_middleware(request, handler):
try:
response = await handler(request)
except aiohttp.web_exceptions.HTTPException:
# assume if we're throwing this that it's already logged
raise
except Exception:
_get_logger().exception("Unexpected exception in call")
error_string = "Internal Server Error\n" + traceback.format_exc()
raise aiohttp.web_exceptions.HTTPInternalServerError(text=error_string)
return response
def initialize_web_app(app, entity_manager_server):
app.middlewares.append(log_error_middleware)
app.router.add_get('/health', entity_manager_server.handle_request_health)
class EntityManagerLogFilter(logging.Filter):
def __init__(self):
self.language = os.environ.get("ENTITY_MANAGER_LANGUAGE", "en")
self.version = os.environ.get("ENTITY_MANAGER_VERSION", None)
def filter(self, record):
"""Add language, and if available, the version"""
record.entity_manager_language = self.language
if self.version:
record.entity_manager_version = self.version
return True
def main():
"""Main function"""
logging_config_file = os.environ.get("LOGGING_CONFIG_FILE", None)
if logging_config_file:
logging_config_path = pathlib.Path(logging_config_file)
with logging_config_path.open() as file_handle:
logging_config = yaml.safe_load(file_handle)
else:
logging_config = yaml.safe_load(LOGGING_CONFIG_TEXT)
print("*** LOGGING CONFIG ***")
print(logging_config)
print("*** LOGGING CONFIG ***")
logging.config.dictConfig(logging_config)
config = SvcConfig.get_instance()
server = EntityManagerServer()
server.load(config.vectors_file)
app = web.Application()
initialize_web_app(app, server)
web.run_app(app, port=config.server_port)
if __name__ == '__main__':
main()
|
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef PRINTING_BACKEND_PRINT_BACKEND_CUPS_H_
#define PRINTING_BACKEND_PRINT_BACKEND_CUPS_H_
#include <memory>
#include <string>
#include "base/files/file_util.h"
#include "printing/backend/cups_helper.h"
#include "printing/backend/print_backend.h"
#include "printing/printing_export.h"
#include "url/gurl.h"
namespace printing {
class PrintBackendCUPS : public PrintBackend {
public:
PrintBackendCUPS(const GURL& print_server_url,
http_encryption_t encryption,
bool blocking,
const std::string& locale);
// This static function is exposed here for use in the tests.
PRINTING_EXPORT static bool PrinterBasicInfoFromCUPS(
const cups_dest_t& printer,
PrinterBasicInfo* printer_info);
private:
struct DestinationDeleter {
void operator()(cups_dest_t* dest) const;
};
using ScopedDestination = std::unique_ptr<cups_dest_t, DestinationDeleter>;
~PrintBackendCUPS() override {}
// PrintBackend implementation.
bool EnumeratePrinters(PrinterList* printer_list) override;
std::string GetDefaultPrinterName() override;
bool GetPrinterBasicInfo(const std::string& printer_name,
PrinterBasicInfo* printer_info) override;
bool GetPrinterSemanticCapsAndDefaults(
const std::string& printer_name,
PrinterSemanticCapsAndDefaults* printer_info) override;
bool GetPrinterCapsAndDefaults(const std::string& printer_name,
PrinterCapsAndDefaults* printer_info) override;
std::string GetPrinterDriverInfo(const std::string& printer_name) override;
bool IsValidPrinter(const std::string& printer_name) override;
// The following functions are wrappers around corresponding CUPS functions.
// <functions>2() are called when print server is specified, and plain version
// in another case. There is an issue specifying CUPS_HTTP_DEFAULT in the
// functions>2(), it does not work in CUPS prior to 1.4.
int GetDests(cups_dest_t** dests);
base::FilePath GetPPD(const char* name);
// Wrapper around cupsGetNamedDest().
ScopedDestination GetNamedDest(const std::string& printer_name);
GURL print_server_url_;
http_encryption_t cups_encryption_;
bool blocking_;
};
} // namespace printing
#endif // PRINTING_BACKEND_PRINT_BACKEND_CUPS_H_
|
from os.path import join, dirname
from dotenv import load_dotenv
import os
dotenv_path = join(dirname(__file__), '.env')
load_dotenv(dotenv_path)
'''
Configuration for Redis.
Although this package is using python-dotenv, it is advised to configure
through environment valuables.
'''
redis_config = {
"host": os.getenv("REDIS_HOST", "127.0.0.1"),
"port": int(os.getenv("REDIS_PORT", "6379")),
"db": int(os.getenv("REDIS_DB", "0")),
"password": os.getenv("REDIS_PASSWORD", None)
} |
'use strict';
describe('Controller: CarTypesListCtrl', function () {
// load the controller's module
beforeEach(module('carRentalAppApp'));
var CarTypesListCtrl,
scope;
// Initialize the controller and a mock scope
beforeEach(inject(function ($controller, $rootScope) {
scope = $rootScope.$new();
CarTypesListCtrl = $controller('CarTypesListCtrl', {
$scope: scope
// place here mocked dependencies
});
}));
it('should attach a list of awesomeThings to the scope', function () {
expect(CarTypesListCtrl.awesomeThings.length).toBe(3);
});
});
|
// The following example creates five accessible and
// focusable markers.
var tourStopsa = [];
var datamap = [];
lok.forEach(item => {
var geo = {lat: parseFloat(item.lat), lng: parseFloat(item.lng)};
datamap.push(geo);
datamap.push(item.nama_lokasi);
tourStopsa.push(datamap);
datamap = [];
});
console.log(tourStopsa);
let map;
function initMap() {
map = new google.maps.Map(document.getElementById("map"), {
// mapId: "8e0a97af9386fef",
zoom: 13,
center: { lat: -7.5973464, lng: 111.9066452 },
mapTypeControl: false,
});
// Set LatLng and title text for the markers. The first marker (Boynton Pass)
// receives the initial focus when tab is pressed. Use arrow keys to
// move between markers; press tab again to cycle through the map controls.
const tourStops = tourStopsa;
// Create an info window to share between markers.
const infoWindow = new google.maps.InfoWindow();
// Create the markers.
tourStops.forEach(([position, title], i) => {
const marker = new google.maps.Marker({
position,
map,
title: `${i + 1}. ${title}`,
label: `${i + 1}`,
optimized: false,
});
// Add a click listener for each marker, and set up the info window.
marker.addListener("click", () => {
infoWindow.close();
infoWindow.setContent(marker.getTitle());
infoWindow.open(marker.getMap(), marker);
});
});
// Add a style-selector control to the map.
const styleControl = document.getElementById("style-selector-control");
map.controls[google.maps.ControlPosition.TOP_LEFT].push(styleControl);
// Set the map's style to the initial value of the selector.
const styleSelector = document.getElementById("style-selector");
map.setOptions({ styles: styles["hide"] });
// map.setOptions({ styles: styles[styleSelector.value] });
// Apply new JSON when the user selects a different style.
styleSelector.addEventListener("change", () => {
map.setOptions({ styles: styles[styleSelector.value] });
});
}
const styles = {
default: [],
hide: [
{
featureType: "poi.business",
stylers: [{ visibility: "off" }],
},
{
featureType: "transit",
elementType: "labels.icon",
stylers: [{ visibility: "off" }],
},
],
};
// coba
// ketinggian
// https://developers.google.com/maps/documentation/javascript/examples/elevation-simple
// center
// https://developers.google.com/maps/documentation/javascript/examples/control-custom |
import argparse
import random
import unittest
from multiprocessing import Manager
import torch
import torch.nn as nn
from fairseq import distributed_utils, optim
class Model(nn.Module):
def __init__(self, input_size, output_size):
super(Model, self).__init__()
self.fc = nn.Linear(input_size, output_size)
def forward(self, input):
output = self.fc(input)
return output
def setup_model_loss_criterion(args, rank, is_cuda):
"""
setup model, criterion and optimizer based on input args
"""
args.distributed_rank = rank
distributed_utils.distributed_init(args)
torch.manual_seed(1)
model = Model(args.input_size, args.nb_classes)
loss_fn = nn.CrossEntropyLoss()
if is_cuda:
model = model.cuda()
loss_fn = loss_fn.cuda()
optimizer = optim.sgd.SGD(args, model.parameters())
optimizer = optim.FairseqBMUF(args, optimizer)
return model, loss_fn, optimizer
def train_step(input, target, model, loss_fn, optimizer):
"""Do forward, backward and parameter update."""
model.train()
output = model(input)
loss = loss_fn(output, target)
optimizer.backward(loss)
optimizer.step()
def single_gpu_training(args, rank, iterations, shared_results):
is_cuda = torch.cuda.is_available()
if is_cuda:
torch.cuda.set_device(rank)
model, loss_fn, optimizer = setup_model_loss_criterion(args, rank, is_cuda)
for _ in range(iterations):
input = torch.randn(1, args.input_size)
target = torch.empty(args.batch_size, dtype=torch.long).random_(args.nb_classes)
if is_cuda:
input = input.cuda()
target = target.cuda()
train_step(input, target, model, loss_fn, optimizer)
results = []
for param in model.parameters():
if len(results) == 0:
results = param.flatten().cpu().data
else:
results = torch.cat((results, param.flatten().cpu().data), 0)
shared_results[rank] = results
def setup_args():
args = argparse.Namespace()
args.global_sync_iter = 20
args.block_momentum = 0.875
args.block_lr = 0.5
args.input_size = 5
args.nb_classes = 2
args.batch_size = 1
args.lr = [1e-3]
args.momentum = 0
args.weight_decay = 0
args.warmup_iterations = 0
args.use_nbm = True
args.average_sync = True
args.global_sync_iter = 1
args.distributed_backend = "gloo"
args.distributed_world_size = 2
port = random.randint(10000, 20000)
args.distributed_init_method = "tcp://localhost:{port}".format(port=port)
args.distributed_init_host = "localhost"
args.distributed_port = port + 1
args.local_world_size = args.distributed_world_size
return args
class TestBMUF(unittest.TestCase):
def bmuf_process(self, args, iterations):
processes = []
results = Manager().dict()
ctx = torch.multiprocessing.get_context("spawn")
for rank in range(args.distributed_world_size):
p = ctx.Process(
target=single_gpu_training, args=(args, rank, iterations, results)
)
p.start()
processes.append(p)
for p in processes:
p.join()
# Make sure params in both machines are same
assert len(results) == 2
self.assertAlmostEqual(results[0], results[1])
def test_bmuf_sync(self):
# Train model for 1 iteration and do bmuf sync without doing warmup
args = setup_args()
iterations = 1
self.bmuf_process(args, iterations)
def test_warmup_sync(self):
# Train model for 20 iteration and do warmup sync without doing bmuf sync
args = setup_args()
args.warmup_iterations = 20
iterations = 20
self.bmuf_process(args, iterations)
def test_warmup_sync_bmuf_sync(self):
# Train model for 25 iteration and do warmup sync after 20 iteration
# and bmuf sync after 25 iteration
args = setup_args()
args.warmup_iterations = 20
args.global_sync_iter = 5
iterations = 25
self.bmuf_process(args, iterations)
def assertAlmostEqual(self, t1, t2):
self.assertEqual(t1.size(), t2.size(), "size mismatch")
self.assertLess((t1 - t2).abs().max(), 1e-4)
|
// SPDX-License-Identifier: GPL-2.0+
/*
* Pvpanic Device Support
*
* Copyright (C) 2013 Fujitsu.
* Copyright (C) 2018 ZTE.
* Copyright (C) 2021 Oracle.
*/
#include <linux/io.h>
#include <linux/kernel.h>
#include <linux/kexec.h>
#include <linux/mod_devicetable.h>
#include <linux/module.h>
#include <linux/platform_device.h>
#include <linux/types.h>
#include <linux/cdev.h>
#include <linux/list.h>
#include <uapi/misc/pvpanic.h>
#include "pvpanic.h"
MODULE_AUTHOR("Mihai Carabas <[email protected]>");
MODULE_DESCRIPTION("pvpanic device driver ");
MODULE_LICENSE("GPL");
static struct list_head pvpanic_list;
static spinlock_t pvpanic_lock;
static void
pvpanic_send_event(unsigned int event)
{
struct pvpanic_instance *pi_cur;
spin_lock(&pvpanic_lock);
list_for_each_entry(pi_cur, &pvpanic_list, list) {
if (event & pi_cur->capability & pi_cur->events)
iowrite8(event, pi_cur->base);
}
spin_unlock(&pvpanic_lock);
}
static int
pvpanic_panic_notify(struct notifier_block *nb, unsigned long code,
void *unused)
{
unsigned int event = PVPANIC_PANICKED;
if (kexec_crash_loaded())
event = PVPANIC_CRASH_LOADED;
pvpanic_send_event(event);
return NOTIFY_DONE;
}
static struct notifier_block pvpanic_panic_nb = {
.notifier_call = pvpanic_panic_notify,
.priority = 1, /* let this called before broken drm_fb_helper */
};
int pvpanic_probe(struct pvpanic_instance *pi)
{
if (!pi || !pi->base)
return -EINVAL;
spin_lock(&pvpanic_lock);
list_add(&pi->list, &pvpanic_list);
spin_unlock(&pvpanic_lock);
return 0;
}
EXPORT_SYMBOL_GPL(pvpanic_probe);
void pvpanic_remove(struct pvpanic_instance *pi)
{
struct pvpanic_instance *pi_cur, *pi_next;
if (!pi)
return;
spin_lock(&pvpanic_lock);
list_for_each_entry_safe(pi_cur, pi_next, &pvpanic_list, list) {
if (pi_cur == pi) {
list_del(&pi_cur->list);
break;
}
}
spin_unlock(&pvpanic_lock);
}
EXPORT_SYMBOL_GPL(pvpanic_remove);
static int pvpanic_init(void)
{
INIT_LIST_HEAD(&pvpanic_list);
spin_lock_init(&pvpanic_lock);
atomic_notifier_chain_register(&panic_notifier_list,
&pvpanic_panic_nb);
return 0;
}
static void pvpanic_exit(void)
{
atomic_notifier_chain_unregister(&panic_notifier_list,
&pvpanic_panic_nb);
}
module_init(pvpanic_init);
module_exit(pvpanic_exit);
|
// Copyright 2015-2018 Espressif Systems (Shanghai) PTE LTD
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdint.h>
#include <string.h>
#include <stdbool.h>
#include "esp_attr.h"
#include "esp_err.h"
#include "esp_log.h"
#include "esp_system.h"
#include "esp_efuse.h"
#include "cache_err_int.h"
#include "esp_clk_internal.h"
#include "esp_rom_efuse.h"
#include "esp_rom_uart.h"
#include "esp_rom_sys.h"
#include "sdkconfig.h"
#if CONFIG_IDF_TARGET_ESP32
#include "soc/dport_reg.h"
#include "esp32/rtc.h"
#include "esp32/rom/cache.h"
#include "esp32/rom/rtc.h"
#include "esp32/spiram.h"
#elif CONFIG_IDF_TARGET_ESP32S2
#include "esp32s2/rtc.h"
#include "esp32s2/rom/cache.h"
#include "esp32s2/rom/rtc.h"
#include "esp32s2/spiram.h"
#include "esp32s2/dport_access.h"
#include "esp32s2/memprot.h"
#elif CONFIG_IDF_TARGET_ESP32S3
#include "esp32s3/rtc.h"
#include "esp32s3/rom/cache.h"
#include "esp32s3/rom/rtc.h"
#include "esp32s3/spiram.h"
#include "esp32s3/dport_access.h"
#include "esp32s3/memprot.h"
#include "soc/assist_debug_reg.h"
#include "soc/cache_memory.h"
#include "soc/system_reg.h"
#elif CONFIG_IDF_TARGET_ESP32C3
#include "esp32c3/rtc.h"
#include "esp32s3/rom/cache.h"
#include "esp32c3/rom/rtc.h"
#include "soc/cache_memory.h"
#include "esp32c3/memprot.h"
#endif
#include "bootloader_flash_config.h"
#include "esp_private/crosscore_int.h"
#include "esp_flash_encrypt.h"
#include "hal/rtc_io_hal.h"
#include "hal/gpio_hal.h"
#include "hal/wdt_hal.h"
#include "soc/rtc.h"
#include "soc/efuse_reg.h"
#include "soc/periph_defs.h"
#include "soc/cpu.h"
#include "soc/rtc.h"
#include "soc/spinlock.h"
#if CONFIG_ESP32_TRAX || CONFIG_ESP32S2_TRAX
#include "trax.h"
#endif
#include "bootloader_mem.h"
#if CONFIG_APP_BUILD_TYPE_ELF_RAM
#if CONFIG_IDF_TARGET_ESP32
#include "esp32/rom/spi_flash.h"
#endif // CONFIG_IDF_TARGET_ESP32
#if CONFIG_IDF_TARGET_ESP32S2
#include "esp32s2/rom/spi_flash.h"
#endif // CONFIG_IDF_TARGET_ESP32S2
#if CONFIG_IDF_TARGET_ESP32S3
#include "esp32s3/rom/spi_flash.h"
#endif // CONFIG_IDF_TARGET_ESP32S3
#if CONFIG_IDF_TARGET_ESP32C3
#include "esp32c3/rom/spi_flash.h"
#endif // CONFIG_IDF_TARGET_ESP32C3
#endif // CONFIG_APP_BUILD_TYPE_ELF_RAM
// Set efuse ROM_LOG_MODE on first boot
//
// For CONFIG_BOOT_ROM_LOG_ALWAYS_ON (default) or undefined (ESP32), leave
// ROM_LOG_MODE undefined (no need to call this function during startup)
#if CONFIG_BOOT_ROM_LOG_ALWAYS_OFF
#define ROM_LOG_MODE ESP_EFUSE_ROM_LOG_ALWAYS_OFF
#elif CONFIG_BOOT_ROM_LOG_ON_GPIO_LOW
#define ROM_LOG_MODE ESP_EFUSE_ROM_LOG_ON_GPIO_LOW
#elif CONFIG_BOOT_ROM_LOG_ON_GPIO_HIGH
#define ROM_LOG_MODE ESP_EFUSE_ROM_LOG_ON_GPIO_HIGH
#endif
#include "esp_private/startup_internal.h"
#include "esp_private/system_internal.h"
extern int _bss_start;
extern int _bss_end;
extern int _rtc_bss_start;
extern int _rtc_bss_end;
extern int _vector_table;
static const char *TAG = "cpu_start";
#if CONFIG_IDF_TARGET_ESP32
#if CONFIG_SPIRAM_ALLOW_BSS_SEG_EXTERNAL_MEMORY
extern int _ext_ram_bss_start;
extern int _ext_ram_bss_end;
#endif
#ifdef CONFIG_ESP32_IRAM_AS_8BIT_ACCESSIBLE_MEMORY
extern int _iram_bss_start;
extern int _iram_bss_end;
#endif
#endif // CONFIG_IDF_TARGET_ESP32
#if !CONFIG_ESP_SYSTEM_SINGLE_CORE_MODE
static volatile bool s_cpu_up[SOC_CPU_CORES_NUM] = { false };
static volatile bool s_cpu_inited[SOC_CPU_CORES_NUM] = { false };
static volatile bool s_resume_cores;
#endif
// If CONFIG_SPIRAM_IGNORE_NOTFOUND is set and external RAM is not found or errors out on testing, this is set to false.
bool g_spiram_ok = true;
#if !CONFIG_ESP_SYSTEM_SINGLE_CORE_MODE
void startup_resume_other_cores(void)
{
s_resume_cores = true;
}
void IRAM_ATTR call_start_cpu1(void)
{
cpu_hal_set_vecbase(&_vector_table);
ets_set_appcpu_boot_addr(0);
bootloader_init_mem();
#if CONFIG_ESP_CONSOLE_UART_NONE
esp_rom_install_channel_putc(1, NULL);
esp_rom_install_channel_putc(2, NULL);
#else // CONFIG_ESP_CONSOLE_UART_NONE
esp_rom_install_uart_printf();
esp_rom_uart_set_as_console(CONFIG_ESP_CONSOLE_UART_NUM);
#endif
#if CONFIG_IDF_TARGET_ESP32
DPORT_REG_SET_BIT(DPORT_APP_CPU_RECORD_CTRL_REG, DPORT_APP_CPU_PDEBUG_ENABLE | DPORT_APP_CPU_RECORD_ENABLE);
DPORT_REG_CLR_BIT(DPORT_APP_CPU_RECORD_CTRL_REG, DPORT_APP_CPU_RECORD_ENABLE);
#else
REG_WRITE(ASSIST_DEBUG_CORE_1_RCD_PDEBUGENABLE_REG, 1);
REG_WRITE(ASSIST_DEBUG_CORE_1_RCD_RECORDING_REG, 1);
#endif
s_cpu_up[1] = true;
ESP_EARLY_LOGI(TAG, "App cpu up.");
//Take care putting stuff here: if asked, FreeRTOS will happily tell you the scheduler
//has started, but it isn't active *on this CPU* yet.
esp_cache_err_int_init();
#if CONFIG_IDF_TARGET_ESP32
#if CONFIG_ESP32_TRAX_TWOBANKS
trax_start_trace(TRAX_DOWNCOUNT_WORDS);
#endif
#endif
s_cpu_inited[1] = true;
while (!s_resume_cores) {
esp_rom_delay_us(100);
}
SYS_STARTUP_FN();
}
static void start_other_core(void)
{
esp_chip_info_t chip_info;
esp_chip_info(&chip_info);
// If not the single core variant of a target - check this since there is
// no separate soc_caps.h for the single core variant.
if (!(chip_info.cores > 1)) {
ESP_EARLY_LOGE(TAG, "Running on single core variant of a chip, but app is built with multi-core support.");
ESP_EARLY_LOGE(TAG, "Check that CONFIG_FREERTOS_UNICORE is enabled in menuconfig");
abort();
}
ESP_EARLY_LOGI(TAG, "Starting app cpu, entry point is %p", call_start_cpu1);
#if CONFIG_IDF_TARGET_ESP32
Cache_Flush(1);
Cache_Read_Enable(1);
#endif
esp_cpu_unstall(1);
// Enable clock and reset APP CPU. Note that OpenOCD may have already
// enabled clock and taken APP CPU out of reset. In this case don't reset
// APP CPU again, as that will clear the breakpoints which may have already
// been set.
#if CONFIG_IDF_TARGET_ESP32
if (!DPORT_GET_PERI_REG_MASK(DPORT_APPCPU_CTRL_B_REG, DPORT_APPCPU_CLKGATE_EN)) {
DPORT_SET_PERI_REG_MASK(DPORT_APPCPU_CTRL_B_REG, DPORT_APPCPU_CLKGATE_EN);
DPORT_CLEAR_PERI_REG_MASK(DPORT_APPCPU_CTRL_C_REG, DPORT_APPCPU_RUNSTALL);
DPORT_SET_PERI_REG_MASK(DPORT_APPCPU_CTRL_A_REG, DPORT_APPCPU_RESETTING);
DPORT_CLEAR_PERI_REG_MASK(DPORT_APPCPU_CTRL_A_REG, DPORT_APPCPU_RESETTING);
}
#elif CONFIG_IDF_TARGET_ESP32S3
if (!REG_GET_BIT(SYSTEM_CORE_1_CONTROL_0_REG, SYSTEM_CONTROL_CORE_1_CLKGATE_EN)) {
REG_SET_BIT(SYSTEM_CORE_1_CONTROL_0_REG, SYSTEM_CONTROL_CORE_1_CLKGATE_EN);
REG_CLR_BIT(SYSTEM_CORE_1_CONTROL_0_REG, SYSTEM_CONTROL_CORE_1_RUNSTALL);
REG_SET_BIT(SYSTEM_CORE_1_CONTROL_0_REG, SYSTEM_CONTROL_CORE_1_RESETING);
REG_CLR_BIT(SYSTEM_CORE_1_CONTROL_0_REG, SYSTEM_CONTROL_CORE_1_RESETING);
}
#endif
ets_set_appcpu_boot_addr((uint32_t)call_start_cpu1);
bool cpus_up = false;
while (!cpus_up) {
cpus_up = true;
for (int i = 0; i < SOC_CPU_CORES_NUM; i++) {
cpus_up &= s_cpu_up[i];
}
esp_rom_delay_us(100);
}
}
#endif // !CONFIG_ESP_SYSTEM_SINGLE_CORE_MODE
static void intr_matrix_clear(void)
{
for (int i = 0; i < ETS_MAX_INTR_SOURCE; i++) {
intr_matrix_set(0, i, ETS_INVALID_INUM);
#if !CONFIG_ESP_SYSTEM_SINGLE_CORE_MODE
intr_matrix_set(1, i, ETS_INVALID_INUM);
#endif
}
}
/*
* We arrive here after the bootloader finished loading the program from flash. The hardware is mostly uninitialized,
* and the app CPU is in reset. We do have a stack, so we can do the initialization in C.
*/
void IRAM_ATTR call_start_cpu0(void)
{
#if !CONFIG_ESP_SYSTEM_SINGLE_CORE_MODE
RESET_REASON rst_reas[SOC_CPU_CORES_NUM];
#else
RESET_REASON rst_reas[1];
#endif
#ifdef __riscv
// Configure the global pointer register
// (This should be the first thing IDF app does, as any other piece of code could be
// relaxed by the linker to access something relative to __global_pointer$)
__asm__ __volatile__ (
".option push\n"
".option norelax\n"
"la gp, __global_pointer$\n"
".option pop"
);
#endif
// Move exception vectors to IRAM
cpu_hal_set_vecbase(&_vector_table);
rst_reas[0] = rtc_get_reset_reason(0);
#if !CONFIG_ESP_SYSTEM_SINGLE_CORE_MODE
rst_reas[1] = rtc_get_reset_reason(1);
#endif
#ifndef CONFIG_BOOTLOADER_WDT_ENABLE
// from panic handler we can be reset by RWDT or TG0WDT
if (rst_reas[0] == RTCWDT_SYS_RESET || rst_reas[0] == TG0WDT_SYS_RESET
#if !CONFIG_ESP_SYSTEM_SINGLE_CORE_MODE
|| rst_reas[1] == RTCWDT_SYS_RESET || rst_reas[1] == TG0WDT_SYS_RESET
#endif
) {
wdt_hal_context_t rtc_wdt_ctx = {.inst = WDT_RWDT, .rwdt_dev = &RTCCNTL};
wdt_hal_write_protect_disable(&rtc_wdt_ctx);
wdt_hal_disable(&rtc_wdt_ctx);
wdt_hal_write_protect_enable(&rtc_wdt_ctx);
}
#endif
//Clear BSS. Please do not attempt to do any complex stuff (like early logging) before this.
memset(&_bss_start, 0, (&_bss_end - &_bss_start) * sizeof(_bss_start));
#if defined(CONFIG_IDF_TARGET_ESP32) && defined(CONFIG_ESP32_IRAM_AS_8BIT_ACCESSIBLE_MEMORY)
// Clear IRAM BSS
memset(&_iram_bss_start, 0, (&_iram_bss_end - &_iram_bss_start) * sizeof(_iram_bss_start));
#endif
/* Unless waking from deep sleep (implying RTC memory is intact), clear RTC bss */
if (rst_reas[0] != DEEPSLEEP_RESET) {
memset(&_rtc_bss_start, 0, (&_rtc_bss_end - &_rtc_bss_start) * sizeof(_rtc_bss_start));
}
#if CONFIG_IDF_TARGET_ESP32S2
/* Configure the mode of instruction cache : cache size, cache associated ways, cache line size. */
extern void esp_config_instruction_cache_mode(void);
esp_config_instruction_cache_mode();
/* If we need use SPIRAM, we should use data cache, or if we want to access rodata, we also should use data cache.
Configure the mode of data : cache size, cache associated ways, cache line size.
Enable data cache, so if we don't use SPIRAM, it just works. */
#if CONFIG_SPIRAM_BOOT_INIT
extern void esp_config_data_cache_mode(void);
esp_config_data_cache_mode();
Cache_Enable_DCache(0);
#endif
#endif
#if CONFIG_IDF_TARGET_ESP32S3
/* Configure the mode of instruction cache : cache size, cache line size. */
extern void rom_config_instruction_cache_mode(uint32_t cfg_cache_size, uint8_t cfg_cache_ways, uint8_t cfg_cache_line_size);
rom_config_instruction_cache_mode(CONFIG_ESP32S3_INSTRUCTION_CACHE_SIZE, CONFIG_ESP32S3_ICACHE_ASSOCIATED_WAYS, CONFIG_ESP32S3_INSTRUCTION_CACHE_LINE_SIZE);
/* If we need use SPIRAM, we should use data cache.
Configure the mode of data : cache size, cache line size.*/
Cache_Suspend_DCache();
extern void rom_config_data_cache_mode(uint32_t cfg_cache_size, uint8_t cfg_cache_ways, uint8_t cfg_cache_line_size);
rom_config_data_cache_mode(CONFIG_ESP32S3_DATA_CACHE_SIZE, CONFIG_ESP32S3_DCACHE_ASSOCIATED_WAYS, CONFIG_ESP32S3_DATA_CACHE_LINE_SIZE);
Cache_Resume_DCache(0);
#endif // CONFIG_IDF_TARGET_ESP32S3
#if CONFIG_IDF_TARGET_ESP32S3 || CONFIG_IDF_TARGET_ESP32C3
/* Configure the Cache MMU size for instruction and rodata in flash. */
extern uint32_t Cache_Set_IDROM_MMU_Size(uint32_t irom_size, uint32_t drom_size);
extern int _rodata_reserved_start;
uint32_t rodata_reserved_start_align = (uint32_t)&_rodata_reserved_start & ~(MMU_PAGE_SIZE - 1);
uint32_t cache_mmu_irom_size = ((rodata_reserved_start_align - SOC_DROM_LOW) / MMU_PAGE_SIZE) * sizeof(uint32_t);
Cache_Set_IDROM_MMU_Size(cache_mmu_irom_size, CACHE_DROM_MMU_MAX_END - cache_mmu_irom_size);
#endif // CONFIG_IDF_TARGET_ESP32S3 || CONFIG_IDF_TARGET_ESP32C3
bootloader_init_mem();
#if CONFIG_SPIRAM_BOOT_INIT
if (esp_spiram_init() != ESP_OK) {
#if CONFIG_IDF_TARGET_ESP32
#if CONFIG_SPIRAM_ALLOW_BSS_SEG_EXTERNAL_MEMORY
ESP_EARLY_LOGE(TAG, "Failed to init external RAM, needed for external .bss segment");
abort();
#endif
#endif
#if CONFIG_SPIRAM_IGNORE_NOTFOUND
ESP_EARLY_LOGI(TAG, "Failed to init external RAM; continuing without it.");
g_spiram_ok = false;
#else
ESP_EARLY_LOGE(TAG, "Failed to init external RAM!");
abort();
#endif
}
if (g_spiram_ok) {
esp_spiram_init_cache();
}
#endif
#if !CONFIG_ESP_SYSTEM_SINGLE_CORE_MODE
s_cpu_up[0] = true;
#endif
ESP_EARLY_LOGI(TAG, "Pro cpu up.");
#if SOC_CPU_CORES_NUM > 1 // there is no 'single-core mode' for natively single-core processors
#if !CONFIG_ESP_SYSTEM_SINGLE_CORE_MODE
start_other_core();
#else
ESP_EARLY_LOGI(TAG, "Single core mode");
#if CONFIG_IDF_TARGET_ESP32
DPORT_CLEAR_PERI_REG_MASK(DPORT_APPCPU_CTRL_B_REG, DPORT_APPCPU_CLKGATE_EN); // stop the other core
#elif CONFIG_IDF_TARGET_ESP32S3
REG_CLR_BIT(SYSTEM_CORE_1_CONTROL_0_REG, SYSTEM_CONTROL_CORE_1_CLKGATE_EN);
#endif
#endif // !CONFIG_ESP_SYSTEM_SINGLE_CORE_MODE
#endif // SOC_CPU_CORES_NUM > 1
#if CONFIG_SPIRAM_MEMTEST
if (g_spiram_ok) {
bool ext_ram_ok = esp_spiram_test();
if (!ext_ram_ok) {
ESP_EARLY_LOGE(TAG, "External RAM failed memory test!");
abort();
}
}
#endif
#if CONFIG_SPIRAM_FETCH_INSTRUCTIONS
extern void instruction_flash_page_info_init(void);
instruction_flash_page_info_init();
#endif
#if CONFIG_SPIRAM_RODATA
extern void rodata_flash_page_info_init(void);
rodata_flash_page_info_init();
#endif
#if CONFIG_SPIRAM_FETCH_INSTRUCTIONS
extern void esp_spiram_enable_instruction_access(void);
esp_spiram_enable_instruction_access();
#endif
#if CONFIG_SPIRAM_RODATA
extern void esp_spiram_enable_rodata_access(void);
esp_spiram_enable_rodata_access();
#endif
#if CONFIG_ESP32S2_INSTRUCTION_CACHE_WRAP || CONFIG_ESP32S2_DATA_CACHE_WRAP
uint32_t icache_wrap_enable = 0, dcache_wrap_enable = 0;
#if CONFIG_ESP32S2_INSTRUCTION_CACHE_WRAP
icache_wrap_enable = 1;
#endif
#if CONFIG_ESP32S2_DATA_CACHE_WRAP
dcache_wrap_enable = 1;
#endif
extern void esp_enable_cache_wrap(uint32_t icache_wrap_enable, uint32_t dcache_wrap_enable);
esp_enable_cache_wrap(icache_wrap_enable, dcache_wrap_enable);
#endif
#if CONFIG_SPIRAM_ALLOW_BSS_SEG_EXTERNAL_MEMORY
memset(&_ext_ram_bss_start, 0, (&_ext_ram_bss_end - &_ext_ram_bss_start) * sizeof(_ext_ram_bss_start));
#endif
//Enable trace memory and immediately start trace.
#if CONFIG_ESP32_TRAX || CONFIG_ESP32S2_TRAX
#if CONFIG_IDF_TARGET_ESP32
#if CONFIG_ESP32_TRAX_TWOBANKS
trax_enable(TRAX_ENA_PRO_APP);
#else
trax_enable(TRAX_ENA_PRO);
#endif
#elif CONFIG_IDF_TARGET_ESP32S2
trax_enable(TRAX_ENA_PRO);
#endif
trax_start_trace(TRAX_DOWNCOUNT_WORDS);
#endif // CONFIG_ESP32_TRAX || CONFIG_ESP32S2_TRAX
esp_clk_init();
esp_perip_clk_init();
// Now that the clocks have been set-up, set the startup time from RTC
// and default RTC-backed system time provider.
g_startup_time = esp_rtc_get_time_us();
intr_matrix_clear();
#ifdef CONFIG_ESP_CONSOLE_UART
uint32_t clock_hz = rtc_clk_apb_freq_get();
#if CONFIG_IDF_TARGET_ESP32S3 || CONFIG_IDF_TARGET_ESP32C3
clock_hz = UART_CLK_FREQ_ROM; // From esp32-s3 on, UART clock source is selected to XTAL in ROM
#endif
esp_rom_uart_tx_wait_idle(CONFIG_ESP_CONSOLE_UART_NUM);
esp_rom_uart_set_clock_baudrate(CONFIG_ESP_CONSOLE_UART_NUM, clock_hz, CONFIG_ESP_CONSOLE_UART_BAUDRATE);
#endif
#if SOC_RTCIO_HOLD_SUPPORTED
rtcio_hal_unhold_all();
#else
gpio_hal_force_unhold_all();
#endif
esp_cache_err_int_init();
#if CONFIG_ESP_SYSTEM_MEMPROT_FEATURE
#if CONFIG_ESP_SYSTEM_MEMPROT_FEATURE_LOCK
esp_memprot_set_prot(true, true, NULL);
#else
esp_memprot_set_prot(true, false, NULL);
#endif
#endif
bootloader_flash_update_id();
// Read the application binary image header. This will also decrypt the header if the image is encrypted.
__attribute__((unused)) esp_image_header_t fhdr = {0};
#ifdef CONFIG_APP_BUILD_TYPE_ELF_RAM
fhdr.spi_mode = ESP_IMAGE_SPI_MODE_DIO;
fhdr.spi_speed = ESP_IMAGE_SPI_SPEED_40M;
fhdr.spi_size = ESP_IMAGE_FLASH_SIZE_4MB;
extern void esp_rom_spiflash_attach(uint32_t, bool);
esp_rom_spiflash_attach(esp_rom_efuse_get_flash_gpio_info(), false);
esp_rom_spiflash_unlock();
#else
// This assumes that DROM is the first segment in the application binary, i.e. that we can read
// the binary header through cache by accessing SOC_DROM_LOW address.
memcpy(&fhdr, (void *) SOC_DROM_LOW, sizeof(fhdr));
#endif // CONFIG_APP_BUILD_TYPE_ELF_RAM
#if CONFIG_IDF_TARGET_ESP32
#if !CONFIG_SPIRAM_BOOT_INIT
// If psram is uninitialized, we need to improve some flash configuration.
bootloader_flash_clock_config(&fhdr);
bootloader_flash_gpio_config(&fhdr);
bootloader_flash_dummy_config(&fhdr);
bootloader_flash_cs_timing_config();
#endif //!CONFIG_SPIRAM_BOOT_INIT
#endif //CONFIG_IDF_TARGET_ESP32
#if CONFIG_SPI_FLASH_SIZE_OVERRIDE
int app_flash_size = esp_image_get_flash_size(fhdr.spi_size);
if (app_flash_size < 1 * 1024 * 1024) {
ESP_LOGE(TAG, "Invalid flash size in app image header.");
abort();
}
bootloader_flash_update_size(app_flash_size);
#endif //CONFIG_SPI_FLASH_SIZE_OVERRIDE
#if !CONFIG_ESP_SYSTEM_SINGLE_CORE_MODE
s_cpu_inited[0] = true;
volatile bool cpus_inited = false;
while (!cpus_inited) {
cpus_inited = true;
for (int i = 0; i < SOC_CPU_CORES_NUM; i++) {
cpus_inited &= s_cpu_inited[i];
}
esp_rom_delay_us(100);
}
#endif
#ifdef ROM_LOG_MODE
esp_efuse_set_rom_log_scheme(ROM_LOG_MODE);
#endif
SYS_STARTUP_FN();
}
|
(window.webpackJsonp=window.webpackJsonp||[]).push([[15],{365:function(t,e,s){"use strict";s.r(e);var a=s(1),i=Object(a.a)({},function(){var t=this.$createElement,e=this._self._c||t;return e("div",{staticClass:"content"},[this._m(0),this._v(" "),e("FirstName")],1)},[function(){var t=this.$createElement,e=this._self._c||t;return e("h4",{attrs:{id:"loading"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#loading","aria-hidden":"true"}},[this._v("#")]),this._v(" loading")])}],!1,null,null,null);e.default=i.exports}}]); |
import datetime
import logging
from typing import List, Tuple
import numpy as np
import geopandas as gpd
import pandas as pd
from geoalchemy2 import WKTElement
from shapely import wkt
from shapely.geometry import Polygon, Point
from sklearn.base import TransformerMixin
from sqlalchemy import VARCHAR
from tqdm import tqdm
import socket
from contextlib import closing
from coord2vec.common.db.connectors import get_connection
from coord2vec.common.db.sqlalchemy_utils import get_df, merge_to_table, add_sdo_geo_to_table, insert_into_table, \
get_temp_table_name
from coord2vec.common.geographic.visualization_utils import get_image_overlay
from coord2vec.config import STEP_SIZE, ors_server_ip, ors_server_port
from coord2vec.feature_extraction.feature_table import FEATURE_NAME, GEOM, GEOM_WKT, FEATURE_VALUE, \
MODIFICATION_DATE, DTYPES, GEOM_WKT_HASH
# TODO: re-order file, too long
def load_features_using_geoms(input_gs: gpd.GeoSeries, features_table: str,
feature_names: List[str] = None) -> gpd.GeoDataFrame:
"""
Args:
input_gs: A geo series with geometries to load features on
features_table: the cache table to load features from
feature_names: optional. load only a set of features. if None, will load all the features in the table
Returns:
A GeoDataFrame with feature names as columns, and input_gs as samples.
The geometry column in the gdf is GEOM_WKT
"""
features_table = features_table.lower()
# create temporary hash table
input_wkt = input_gs.apply(lambda geo: geo.wkt)
input_hash = [str(h) for h in pd.util.hash_pandas_object(input_wkt)]
input_hash_df = pd.DataFrame({GEOM_WKT_HASH: input_hash})
eng = get_connection(db_name='POSTGRES')
if not eng.has_table(features_table): # cache table does not exist
return gpd.GeoDataFrame() # just an empty gdf
tmp_tbl_name = get_temp_table_name()
insert_into_table(eng, input_hash_df, tmp_tbl_name, dtypes={GEOM_WKT_HASH: VARCHAR(300)})
add_q = lambda l: ["'" + s + "'" for s in l]
feature_filter_sql = f"WHERE {FEATURE_NAME} in ({', '.join(add_q(feature_names))})" if feature_names is not None else ""
# extract the features
query = f"""
select {FEATURE_NAME}, {FEATURE_VALUE}, {GEOM_WKT}, f.{GEOM_WKT_HASH}
from {features_table} f
join {tmp_tbl_name} t
on t.{GEOM_WKT_HASH} = f.{GEOM_WKT_HASH}
{feature_filter_sql}
"""
results_df = get_df(query, eng)
pivot_results_df = _pivot_table(results_df)
# create the full results df
full_df = pd.DataFrame(data={GEOM_WKT: input_gs.tolist()}, index=input_hash, columns=pivot_results_df.columns)
assert pivot_results_df.index.isin(full_df.index).all(), "all loaded features should be from the input (according to hash)"
if not pivot_results_df.empty:
full_df[full_df.index.isin(pivot_results_df.index)] = pivot_results_df[
pivot_results_df.index.isin(full_df.index)].values
full_gdf = gpd.GeoDataFrame(full_df, geometry=GEOM_WKT)
full_gdf = full_gdf.astype({c: float for c in pivot_results_df.columns if c != GEOM_WKT})
with eng.begin() as con:
con.execute(f"DROP TABLE {tmp_tbl_name}")
return full_gdf
def load_features_in_polygon(polygon: Polygon, features_table: str,
feature_names: List[str] = None) -> gpd.GeoDataFrame:
"""
Extract all the features already calculated inside a polygon
Args:
:param polygon: a polygon to get features of all the geometries inside of it
:param features_table: the name of the features table in which the cache is saved
:param feature_names: The names of all the features you want to extract. if None, extract all features
Returns:
A GeoDataFrame with a features as columns
"""
features_table = features_table.lower()
eng = get_connection(db_name='POSTGRES')
if eng.has_table(features_table):
# extract data
add_q = lambda l: ["'" + s + "'" for s in l]
feature_filter_sql = f"and {FEATURE_NAME} in ({', '.join(add_q(feature_names))})" if feature_names is not None else ""
query = f"""
select {FEATURE_NAME}, {FEATURE_VALUE}, CAST(ST_AsText({GEOM}) as TEXT) as {GEOM_WKT}, {GEOM_WKT_HASH}
from {features_table}
where ST_Covers(ST_GeomFromText('{polygon.wkt}', 4326)::geography, {GEOM})
{feature_filter_sql}
"""
res_df = get_df(query, eng)
ret_df = _pivot_table(res_df) # rearrange the df
else:
ret_df = gpd.GeoDataFrame()
eng.dispose()
return ret_df
def load_all_features(features_table: str) -> gpd.GeoDataFrame:
"""
Load all the features from the features table in the oracle db
Returns:
A Geo Dataframe with all the features as columns
"""
features_table = features_table.lower()
eng = get_connection(db_name='POSTGRES')
query = f"""
select {FEATURE_NAME}, {FEATURE_VALUE}, CAST(ST_AsText({GEOM}) as TEXT) as {GEOM_WKT}
from {features_table}
"""
res_df = pd.read_sql(query, eng)
eng.dispose()
return _pivot_table(res_df)
def _pivot_table(df: pd.DataFrame) -> gpd.GeoDataFrame:
hash2wkt = df[[GEOM_WKT_HASH, GEOM_WKT]].set_index(GEOM_WKT_HASH).to_dict()[GEOM_WKT]
features_df = df.pivot(index=GEOM_WKT_HASH, columns=FEATURE_NAME,
values=FEATURE_VALUE)
features_df[GEOM_WKT] = [wkt.loads(hash2wkt[h]) for h in features_df.index]
features_gdf = gpd.GeoDataFrame(features_df, geometry=GEOM_WKT, index=features_df.index)
return features_gdf
def save_features_to_db(gs: gpd.GeoSeries, df: pd.DataFrame, table_name: str):
"""
Insert features into the Oracle DB
Args:
gs: The geometries of the features
df: the features, with columns as feature names
table_name: the features table name in oracle db
Returns:
None
"""
if len(gs) == 0:
return
table_name = table_name.lower()
eng = get_connection(db_name='POSTGRES')
for column in tqdm(df.columns, desc=f'Inserting Features to {table_name}', unit='feature', leave=False):
insert_df = pd.DataFrame(data={MODIFICATION_DATE: datetime.datetime.now(),
GEOM: gs.values,
FEATURE_NAME: column,
FEATURE_VALUE: df[column]})
insert_df[GEOM_WKT] = insert_df[GEOM].apply(lambda g: g.wkt)
# add hash column for the GEOM_WKT
insert_df[GEOM_WKT_HASH] = [str(h) for h in pd.util.hash_pandas_object(insert_df[GEOM_WKT])]
insert_df[GEOM] = insert_df[GEOM].apply(lambda x: WKTElement(x.wkt, srid=4326))
merge_to_table(eng, insert_df, table_name, compare_columns=[GEOM_WKT_HASH, FEATURE_NAME],
update_columns=[MODIFICATION_DATE, FEATURE_VALUE, GEOM, GEOM_WKT], dtypes=DTYPES)
eng.dispose()
def extract_feature_image(polygon: Polygon, features_table: str, step=STEP_SIZE,
feature_names: List[str] = None) -> Tuple[np.ndarray, np.ndarray]:
features_table = features_table.lower()
features_df = load_features_in_polygon(polygon, features_table, feature_names)
image_mask_list = [get_image_overlay(features_df.iloc[:, -1], features_df[col], step=step, return_array=True) for
col in tqdm(features_df.columns[:-1], desc="Creating image from features", unit='feature')]
images_list = [feature[0] for feature in image_mask_list]
image = np.transpose(np.stack(images_list), (1, 2, 0))
# create mask
mask_index = image_mask_list[0][1]
mask = np.zeros((image.shape[0], image.shape[1]))
mask[mask_index] = 1
return image, mask
def ors_is_up(host=ors_server_ip, port=ors_server_port) -> bool:
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
return sock.connect_ex((host, port)) == 0
class FeatureFilter(TransformerMixin):
# TODO: typing + add test
# TODO: this is no longer used in pipeline, old bug
def __init__(self, bundle: list=None, importance=10):
if bundle is not None:
from coord2vec.feature_extraction.features_builders import FeaturesBuilder
from coord2vec.feature_extraction.feature_bundles import create_building_features
all_feats = create_building_features(bundle, importance)
builder = FeaturesBuilder(all_feats)
self.feat_names = builder.all_feat_names
def fit(self, X: pd.DataFrame, y=None, **kwargs):
return self
def transform(self, X: pd.DataFrame):
if self.feat_names is not None:
assert isinstance(X, pd.DataFrame), f"X is not a DataFrame \n {X}"
feat_names_after_filt = [c for c in X.columns if c in self.feat_names]
if len(feat_names_after_filt) != len(self.feat_names):
pass
# logging.warning(f"""Some features in FeatureFilter do not appear in X
# X: {X.columns}
# filt_feat_names: {self.feat_names}
# """)
try:
# TODO: old bug was that applying this after feature selection resulted in feature not being here
X_filt = X[feat_names_after_filt]
except:
print("Error in FeatureFilter: returing X")
X_filt = X
return X_filt
return X
def fit_transform(self, X: pd.DataFrame, y=None, **fit_params):
return self.transform(X)
|
/**
* Created by ari on 6/19/2015.
*/
// Client Script
if(typeof document === 'object')
(function() {
self.addEventListener('submit', onFormEvent, false);
//self.addEventListener('change', onFormEvent, false);
self.addEventListener('input', onFormEvent, false);
function onFormEvent(e, formElm) {
if(!formElm) formElm = e.target.form ? e.target.form : e.target;
if(formElm.nodeName.toLowerCase() !== 'form')
return false;
switch(formElm.getAttribute('name')) {
case 'pgp-import-form':
refreshPGPImportForm(e, formElm);
if(e.type === 'submit')
submitPGPImportForm(e, formElm);
return true;
default:
return false;
}
}
function refreshPGPImportForm(e, formElm) {
var submitElm = formElm.querySelector('input[type=submit]');
submitElm.setAttribute('disabled', 'disabled');
var pgpKeyBlock = formElm.querySelector('textarea[name=private_key]').value;
if(pgpKeyBlock.indexOf("-----BEGIN PGP PRIVATE KEY BLOCK-----") >= 0) {
submitElm.removeAttribute('disabled');
} else if(pgpKeyBlock.indexOf("-----BEGIN PGP PUBLIC KEY BLOCK-----") >= 0) {
submitElm.removeAttribute('disabled');
}
}
function submitPGPImportForm(e, formElm) {
e.preventDefault();
var pgpKeyBlock = formElm.querySelector('textarea[name=private_key]').value;
if(
pgpKeyBlock.indexOf("-----BEGIN PGP PRIVATE KEY BLOCK-----") === -1
&& pgpKeyBlock.indexOf("-----BEGIN PGP PUBLIC KEY BLOCK-----") === -1
)
throw new Error("PGP PUBLIC/PRIVATE KEY BLOCK not found");
var commandString = "PGP.IMPORT " + pgpKeyBlock;
var messageEvent = new CustomEvent('command', {
detail: commandString,
cancelable:true
});
document.dispatchEvent(messageEvent);
// TODO: Close Window
//var windowElm = document.getElementsByClassName('pgp-import:')[0];
//windowElm.classList.add('closed');
}
})();
// Worker Script
if(typeof module === 'object') (function() {
module.exports.renderPGPImportForm = function (private_key_block, status_box, callback) {
var TEMPLATE_URL = "pgp/import/render/pgp-import-form.html";
var EXAMPLE_PUBLIC_KEY =
"Example: \n\n"
+ "-----BEGIN PGP PUBLIC KEY BLOCK-----\n"
+ "Version: pgpwnt v3.0a\n"
+ "Comment: pgpwned by pgpwnt\n"
+ "\n"
+ "mQENBFWZ6r0BCACakVSmgG6NaFlTbJxxdJMQHIDC16e2ospVoVkFunTiD7uQ+da3\n"
+ "5Y5Ewjv5skMcVkmAilWxtDQWwdgb+mv9SqpT3FmDEp7pPtDl/1tMZQyTQfjQ3+YC\n"
+ "a/6tAGx7p3Abi/7UXkz/3Yh3x+Oin71EHlE0mhqIgbwh8UQOP+q6+CH0SYeCPPir\n"
+ "t5+gsSSoME4ZMMxLE9osTGpYwsOE6Y4iO9oeqjAuOglWqMeRWIaUH4Om8N1IVhJF\n"
+ "oSMzTby91x0OaEePDtTHW/h6rD4ZAZoj20dxutApYHo29lVUhEY2gLrdptgw8E5I\n"
+ "SSJj8fIhZiO6o2ZLkqcCmJqd6BwoaZW+FWKPABEBAAG0EWd1ZXN0QHJlbGF5LmNv\n"
+ "LmlsiQEcBBABAgAGBQJVmeq9AAoJEFeCpFUFcZCa7G8IAIsfFF4RwEah2JIM1+VB\n"
+ "GOBilAvTcEyOhOn93Rfih2I9UMYWhAflvwi0FtAkZ4ysY1j7F4frnQ4E/6f9sNjm\n"
+ "5wMPwiEPaoSTFcEKVDNHV3qcGjCcyXtpKYY0afm3GZK8Rcc5IouDC4tHMYbmVAav\n"
+ "7YsfSRMoCw1c+6FWoE2S3A0v6uKLiq9Yux+FC36X+eXlkzp+nqCSjZ3AOC/zDPHv\n"
+ "HtZIfS7yaKJeMKdA31q4c5h0Ts3t8ojW7K/Q/v5s1LlqxM3zDx/5KsO657AKcgmv\n"
+ "1EOWmy8OyRH7M7FXN3bcU34g0hHhNWdD+n0ew0COydgj5ZMzulY5Su1hrG0UNasX\n"
+ "/Bw=\n"
+ "=E+6i\n"
+ "-----END PGP PUBLIC KEY BLOCK-----";
var xhr = new XMLHttpRequest();
xhr.open("GET", TEMPLATE_URL, false);
xhr.send();
if(xhr.status !== 200)
throw new Error("Error: " + xhr.responseText);
callback(xhr.responseText
.replace(/{\$status_box}/gi, status_box || '')
.replace(/{\$private_key_block}/gi, private_key_block)
.replace(/{\$example_public_key}/gi, EXAMPLE_PUBLIC_KEY)
);
return true;
};
})(); |
# Generated by Django 3.2.1 on 2021-05-09 17:35
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('social', '0007_comment_parent'),
]
operations = [
migrations.CreateModel(
name='Notification',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('notification_type', models.IntegerField()),
('date', models.DateTimeField(default=django.utils.timezone.now)),
('user_has_seen', models.BooleanField(default=False)),
('comment', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='social.comment')),
('from_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notification_from', to=settings.AUTH_USER_MODEL)),
('post', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='social.post')),
('to_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notification_to', to=settings.AUTH_USER_MODEL)),
],
),
]
|
from requests.exceptions import RequestException
from contextlib import closing
import logging
import requests
def simple_get(url):
try:
with closing(requests.get(url, stream=True)) as response:
if is_html(response):
return response.content
else:
return None
except RequestException as e:
logging.info('GET request to {} failed: {}'.format(url, e.strerror))
return None
def is_html(response):
content_type = response.headers['Content-Type'].lower()
return (response.status_code == 200 and content_type is not None
and content_type.find('html') > -1) |
// Copyright IBM Corp. 2018. All Rights Reserved.
// Node module: @loopback/cli
// This file is licensed under the MIT License.
// License text available at https://opensource.org/licenses/MIT
'use strict';
const assert = require('yeoman-assert');
const fs = require('fs');
const expect = require('@loopback/testlab').expect;
const path = require('path');
const generator = path.join(__dirname, '../../../generators/example');
const baseTests = require('../lib/base-generator')(generator);
const testUtils = require('../../test-utils');
const ALL_EXAMPLES = require('../../../generators/example').getAllExamples();
const VALID_EXAMPLE = 'todo';
describe('lb4 example', function() {
this.timeout(10000);
describe('correctly extends BaseGenerator', baseTests);
describe('_setupGenerator', () => {
it('has name argument set up', () => {
const helpText = getHelpText();
expect(helpText)
.to.match(/\[<example-name>\]/)
.and.match(/# Name of the example/)
.and.match(/Type: String/)
.and.match(/Required: false/);
});
it('lists all example names in help', () => {
const helpText = getHelpText();
for (const key of Object.keys(ALL_EXAMPLES)) {
expect(helpText).to.match(new RegExp(`${key}: (.*?)`));
}
});
function getHelpText() {
return testUtils.testSetUpGen(generator).help();
}
});
it('accepts the example name via interactive prompt', () => {
return testUtils
.executeGenerator(generator)
.withPrompts({name: VALID_EXAMPLE})
.then(() => {
const targetPkgFile = `loopback4-example-${VALID_EXAMPLE}/package.json`;
const originalPkgMeta = require(`../../../../../examples/${VALID_EXAMPLE}/package.json`);
assert.file(targetPkgFile);
assert.jsonFileContent(targetPkgFile, {
name: originalPkgMeta.name,
version: originalPkgMeta.version,
});
});
});
it('accepts the example name as a CLI argument', () => {
return testUtils
.executeGenerator(generator)
.withArguments([VALID_EXAMPLE])
.then(() => {
const targetPkgFile = `loopback4-example-${VALID_EXAMPLE}/package.json`;
const originalPkgMeta = require(`../../../../../examples/${VALID_EXAMPLE}/package.json`);
assert.file(targetPkgFile);
assert.jsonFileContent(targetPkgFile, {
name: originalPkgMeta.name,
version: originalPkgMeta.version,
});
});
});
it('rejects invalid example names', () => {
return testUtils
.executeGenerator(generator)
.withArguments(['example-does-not-exist'])
.then(
() => {
throw new Error('Generator should have failed.');
},
err => {
expect(err).to.match(/Invalid example name/);
},
);
});
});
|
from tqdm import tqdm
import torch
import numpy as np
from torch import nn
# helpful function
def sum_params(model):
s = 0
for p in model.parameters():
n = p.cpu().data.numpy()
s += np.sum(n)
return s
class BertPuncCapV1(nn.Module):
"""
This is the first version of BertPuncCap. It's a model that is able to
perform re-punctuations & re-capitalization. This model assumes that the
these two tasks are dependent on each other. The labels for this model are
the concatenation of the punctuation labels and the capitalization labels.
So, if you want to restore 3 punctuations and 2 capitalizations, the labels
for this model will be 6.
"""
def __init__(self, BERT_model, segment_size, punc_size, case_size, dropout):
"""
Initializes BertPuncCap, which is the same as BERT with one extra dense
layer for classification.
Parameters
----------
BERT_model: transformers.PreTrainedModel
The BERT pre-trained model at HuggingFace's `transformers` package.
segment_size: int
The size of the input.
punc_size: int
The number of punctuations you are considering.
case_size: int
The number of cases you are considering.
droupout: float
The dropout rate for the linear layer.
"""
super(BertPuncCapV1, self).__init__()
self.segment_size = segment_size
self.bert = BERT_model
self.hidden_size = self.bert.config.hidden_size
output_size = punc_size * case_size if case_size else punc_size
self.bert.config.output_hidden_states=True
self.bn = nn.BatchNorm1d(segment_size*self.hidden_size)
self.fc = nn.Linear(segment_size*self.hidden_size, output_size)
self.dropout = nn.Dropout(dropout)
def forward(self, x):
x = self.bert(x).hidden_states[-1]
x = x.view(x.shape[0], -1)
logits = self.fc(self.dropout(self.bn(x)))
return logits
def compute_loss(self, model, criterion, inputs, labels):
outputs = model(inputs)
loss = criterion(outputs, labels)
return loss
def predict(self, data_loader, tokenizer):
"""
Predicts the labels for the given data.
Parameters
----------
data_loader: torch.utils.data.DataLoader
A data loader object for the test data (usually).
tokenizer: transformers.PreTrainedTokenizer
A tokenizer object from the HuggingFace's `transformers` package.
Returns
-------
out_tokens: list(str)
A list of tokens found in the dataloader.
out_preds: list(int)
A list of predicted labels.
out_labels: list(int)
A list of true labels.
"""
subwords, y_pred, y_true = [], [], []
for inputs, labels in tqdm(data_loader, total=len(data_loader)):
with torch.no_grad():
subword_ids = inputs[:, (self.segment_size-1)//2 - 1].flatten()
subwords += tokenizer.convert_ids_to_tokens(subword_ids)
inputs, labels = inputs.cuda(), labels.cuda()
outputs = self.forward(inputs)
y_pred += list(outputs.argmax(dim=1).cpu().data.numpy().flatten())
y_true += list(labels.cpu().data.numpy().flatten())
assert len(subwords) == len(y_pred) == len(y_true)
# now, we have predictions for sub-words. Let's get the token predictions
i = 0
curr_word = ""
out_tokens, out_preds, out_labels = [], [], []
while( i < len(y_true)):
curr_word += subwords[i]
while(i+1 < len(y_true) and subwords[i+1].startswith("##")):
i += 1
curr_word += subwords[i][2:]
out_tokens.append(curr_word)
out_preds.append(y_pred[i])
out_labels.append(y_true[i])
curr_word = ""
i += 1
return out_tokens, out_preds, out_labels
class BertPuncCapV2(BertPuncCapV1):
def __init__(self, BERT_model, segment_size, punc_size, case_size, dropout, alpha=0.5):
super().__init__(BERT_model, segment_size, punc_size, case_size, dropout)
self.alpha = alpha
self.punc_bn = nn.BatchNorm1d(segment_size*self.hidden_size)
self.punc_fc = nn.Linear(segment_size*self.hidden_size, punc_size)
self.case_bn = nn.BatchNorm1d(segment_size*self.hidden_size)
self.case_fc = nn.Linear(segment_size*self.hidden_size, case_size)
def forward(self, x):
x = self.bert(x).hidden_states[-1]
x = x.view(x.shape[0], -1)
punc_logits = self.punc_fc(self.dropout(self.punc_bn(x)))
case_logits = self.case_fc(self.dropout(self.case_bn(x)))
return punc_logits, case_logits
def compute_loss(self, model, criterion, inputs, labels):
punc_outputs, case_outputs = model(inputs)
punc_loss = criterion(punc_outputs, labels[:, 0])
case_loss = criterion(case_outputs, labels[:, 1])
loss = (self.alpha * punc_loss) + (1-self.alpha) * case_loss
return loss
def predict(self, data_loader, tokenizer):
"""
Predicts the labels for the given data.
Parameters
----------
data_loader: torch.utils.data.DataLoader
A data loader object for the test data (usually).
tokenizer: transformers.PreTrainedTokenizer
A tokenizer object from the HuggingFace's `transformers` package.
Returns
-------
out_tokens: list(str)
A list of tokens found in the dataloader.
out_preds: list(list(int))
A list of two items; the first is the predicted labels for the
re-punctuation task while the other is for the re-capitalization
task.
out_labels: list(int)
A list of two items; the first is the true labels for the
re-punctuation task while the other is for the re-capitalization
task.
"""
subwords, punc_pred, case_pred, punc_true, case_true = [], [], [], [], []
for inputs, labels in tqdm(data_loader, total=len(data_loader)):
with torch.no_grad():
subword_ids = inputs[:, (self.segment_size-1)//2 - 1].flatten()
subwords += tokenizer.convert_ids_to_tokens(subword_ids)
inputs, labels = inputs.cuda(), labels.cuda()
punc_outputs, case_outputs = self.forward(inputs)
punc_pred += list(punc_outputs.argmax(dim=1).cpu().data.numpy().flatten())
case_pred += list(case_outputs.argmax(dim=1).cpu().data.numpy().flatten())
punc_true += list(labels[:,0].cpu().data.numpy().flatten())
case_true += list(labels[:,1].cpu().data.numpy().flatten())
assert len(subwords) == len(punc_pred) == len(punc_true) == len(case_pred) == len(case_true)
# now, we have predictions for sub-words. Let's get the token predictions
i = 0
curr_word = ""
out_tokens, punc_preds, punc_labels, case_preds, case_labels = [], [], [], [], []
while( i < len(subwords)):
curr_word += subwords[i]
while(i+1 < len(subwords) and subwords[i+1].startswith("##")):
i += 1
curr_word += subwords[i][2:]
out_tokens.append(curr_word)
punc_preds.append(punc_pred[i])
case_preds.append(case_pred[i])
punc_labels.append(punc_true[i])
case_labels.append(case_true[i])
curr_word = ""
i += 1
out_preds = [punc_preds, case_preds]
out_labels = [punc_labels, case_labels]
return out_tokens, out_preds, out_labels |
(function() {
'use strict';
angular
.module('jhbookz')
.config(stateConfig);
stateConfig.$inject = ['$stateProvider'];
function stateConfig($stateProvider) {
$stateProvider.state('settings', {
parent: 'account',
url: '/settings',
data: {
authorities: ['ROLE_USER'],
pageTitle: 'global.menu.account.settings'
},
views: {
'content@': {
templateUrl: 'app/account/settings/settings.html',
controller: 'SettingsController',
controllerAs: 'vm'
}
},
resolve: {
translatePartialLoader: ['$translate', '$translatePartialLoader', function ($translate, $translatePartialLoader) {
$translatePartialLoader.addPart('settings');
return $translate.refresh();
}]
}
});
}
})();
|
import unittest
import pytest
from sumeval.metrics.lang.lang_zh import LangZH
class TestLangZH(unittest.TestCase):
def test_tokenize(self):
lang = LangZH()
text = "我发现了一朵非常漂亮的花"
tokens = lang.tokenize(text)
self.assertEqual(len(tokens), 8)
@pytest.mark.skip(reason="Download the parse model is terrible slow.")
def test_basic_element(self):
lang = LangZH()
text = "我发现了一朵非常漂亮的花"
bes = lang.parse_to_be(text)
for i, be in enumerate(bes):
if i == 0:
self.assertEqual(be.head, "花")
self.assertEqual(be.modifier, "漂亮")
else:
self.assertEqual(be.head, "花")
self.assertEqual(be.modifier, "发现")
|
/*!
* # Semantic UI undefined - Form Validation
* http://github.com/semantic-org/semantic-ui/
*
*
* Released under the MIT license
* http://opensource.org/licenses/MIT
*
*/
;(function ($, window, document, undefined) {
"use strict";
window = (typeof window != 'undefined' && window.Math == Math)
? window
: (typeof self != 'undefined' && self.Math == Math)
? self
: Function('return this')()
;
$.fn.form = function(parameters) {
var
$allModules = $(this),
moduleSelector = $allModules.selector || '',
time = new Date().getTime(),
performance = [],
query = arguments[0],
legacyParameters = arguments[1],
methodInvoked = (typeof query == 'string'),
queryArguments = [].slice.call(arguments, 1),
returnedValue
;
$allModules
.each(function() {
var
$module = $(this),
element = this,
formErrors = [],
keyHeldDown = false,
// set at run-time
$field,
$group,
$message,
$prompt,
$submit,
$clear,
$reset,
settings,
validation,
metadata,
selector,
className,
regExp,
error,
namespace,
moduleNamespace,
eventNamespace,
instance,
module
;
module = {
initialize: function() {
// settings grabbed at run time
module.get.settings();
if(methodInvoked) {
if(instance === undefined) {
module.instantiate();
}
module.invoke(query);
}
else {
if(instance !== undefined) {
instance.invoke('destroy');
}
module.verbose('Initializing form validation', $module, settings);
module.bindEvents();
module.set.defaults();
module.instantiate();
}
},
instantiate: function() {
module.verbose('Storing instance of module', module);
instance = module;
$module
.data(moduleNamespace, module)
;
},
destroy: function() {
module.verbose('Destroying previous module', instance);
module.removeEvents();
$module
.removeData(moduleNamespace)
;
},
refresh: function() {
module.verbose('Refreshing selector cache');
$field = $module.find(selector.field);
$group = $module.find(selector.group);
$message = $module.find(selector.message);
$prompt = $module.find(selector.prompt);
$submit = $module.find(selector.submit);
$clear = $module.find(selector.clear);
$reset = $module.find(selector.reset);
},
submit: function() {
module.verbose('Submitting form', $module);
$module
.submit()
;
},
attachEvents: function(selector, action) {
action = action || 'submit';
$(selector)
.on('click' + eventNamespace, function(event) {
module[action]();
event.preventDefault();
})
;
},
bindEvents: function() {
module.verbose('Attaching form events');
$module
.on('submit' + eventNamespace, module.validate.form)
.on('blur' + eventNamespace, selector.field, module.event.field.blur)
.on('click' + eventNamespace, selector.submit, module.submit)
.on('click' + eventNamespace, selector.reset, module.reset)
.on('click' + eventNamespace, selector.clear, module.clear)
;
if(settings.keyboardShortcuts) {
$module
.on('keydown' + eventNamespace, selector.field, module.event.field.keydown)
;
}
$field
.each(function() {
var
$input = $(this),
type = $input.prop('type'),
inputEvent = module.get.changeEvent(type, $input)
;
$(this)
.on(inputEvent + eventNamespace, module.event.field.change)
;
})
;
},
clear: function() {
$field
.each(function () {
var
$field = $(this),
$element = $field.parent(),
$fieldGroup = $field.closest($group),
$prompt = $fieldGroup.find(selector.prompt),
defaultValue = $field.data(metadata.defaultValue) || '',
isCheckbox = $element.is(selector.uiCheckbox),
isDropdown = $element.is(selector.uiDropdown),
isErrored = $fieldGroup.hasClass(className.error)
;
if(isErrored) {
module.verbose('Resetting error on field', $fieldGroup);
$fieldGroup.removeClass(className.error);
$prompt.remove();
}
if(isDropdown) {
module.verbose('Resetting dropdown value', $element, defaultValue);
$element.dropdown('clear');
}
else if(isCheckbox) {
$field.prop('checked', false);
}
else {
module.verbose('Resetting field value', $field, defaultValue);
$field.val('');
}
})
;
},
reset: function() {
$field
.each(function () {
var
$field = $(this),
$element = $field.parent(),
$fieldGroup = $field.closest($group),
$prompt = $fieldGroup.find(selector.prompt),
defaultValue = $field.data(metadata.defaultValue),
isCheckbox = $element.is(selector.uiCheckbox),
isDropdown = $element.is(selector.uiDropdown),
isErrored = $fieldGroup.hasClass(className.error)
;
if(defaultValue === undefined) {
return;
}
if(isErrored) {
module.verbose('Resetting error on field', $fieldGroup);
$fieldGroup.removeClass(className.error);
$prompt.remove();
}
if(isDropdown) {
module.verbose('Resetting dropdown value', $element, defaultValue);
$element.dropdown('restore defaults');
}
else if(isCheckbox) {
module.verbose('Resetting checkbox value', $element, defaultValue);
$field.prop('checked', defaultValue);
}
else {
module.verbose('Resetting field value', $field, defaultValue);
$field.val(defaultValue);
}
})
;
},
determine: {
isValid: function() {
var
allValid = true
;
$.each(validation, function(fieldName, field) {
if( !( module.validate.field(field, fieldName, true) ) ) {
allValid = false;
}
});
return allValid;
}
},
is: {
bracketedRule: function(rule) {
return (rule.type && rule.type.match(settings.regExp.bracket));
},
shorthandFields: function(fields) {
var
fieldKeys = Object.keys(fields),
firstRule = fields[fieldKeys[0]]
;
return module.is.shorthandRules(firstRule);
},
// duck type rule test
shorthandRules: function(rules) {
return (typeof rules == 'string' || $.isArray(rules));
},
empty: function($field) {
if(!$field || $field.length === 0) {
return true;
}
else if($field.is('input[type="checkbox"]')) {
return !$field.is(':checked');
}
else {
return module.is.blank($field);
}
},
blank: function($field) {
return $.trim($field.val()) === '';
},
valid: function(field) {
var
allValid = true
;
if(field) {
module.verbose('Checking if field is valid', field);
return module.validate.field(validation[field], field, false);
}
else {
module.verbose('Checking if form is valid');
$.each(validation, function(fieldName, field) {
if( !module.is.valid(fieldName) ) {
allValid = false;
}
});
return allValid;
}
}
},
removeEvents: function() {
$module
.off(eventNamespace)
;
$field
.off(eventNamespace)
;
$submit
.off(eventNamespace)
;
$field
.off(eventNamespace)
;
},
event: {
field: {
keydown: function(event) {
var
$field = $(this),
key = event.which,
isInput = $field.is(selector.input),
isCheckbox = $field.is(selector.checkbox),
isInDropdown = ($field.closest(selector.uiDropdown).length > 0),
keyCode = {
enter : 13,
escape : 27
}
;
if( key == keyCode.escape) {
module.verbose('Escape key pressed blurring field');
$field
.blur()
;
}
if(!event.ctrlKey && key == keyCode.enter && isInput && !isInDropdown && !isCheckbox) {
if(!keyHeldDown) {
$field
.one('keyup' + eventNamespace, module.event.field.keyup)
;
module.submit();
module.debug('Enter pressed on input submitting form');
}
keyHeldDown = true;
}
},
keyup: function() {
keyHeldDown = false;
},
blur: function(event) {
var
$field = $(this),
$fieldGroup = $field.closest($group),
validationRules = module.get.validation($field)
;
if( $fieldGroup.hasClass(className.error) ) {
module.debug('Revalidating field', $field, validationRules);
if(validationRules) {
module.validate.field( validationRules );
}
}
else if(settings.on == 'blur' || settings.on == 'change') {
if(validationRules) {
module.validate.field( validationRules );
}
}
},
change: function(event) {
var
$field = $(this),
$fieldGroup = $field.closest($group),
validationRules = module.get.validation($field)
;
if(validationRules && (settings.on == 'change' || ( $fieldGroup.hasClass(className.error) && settings.revalidate) )) {
clearTimeout(module.timer);
module.timer = setTimeout(function() {
module.debug('Revalidating field', $field, module.get.validation($field));
module.validate.field( validationRules );
}, settings.delay);
}
}
}
},
get: {
ancillaryValue: function(rule) {
if(!rule.type || (!rule.value && !module.is.bracketedRule(rule))) {
return false;
}
return (rule.value !== undefined)
? rule.value
: rule.type.match(settings.regExp.bracket)[1] + ''
;
},
ruleName: function(rule) {
if( module.is.bracketedRule(rule) ) {
return rule.type.replace(rule.type.match(settings.regExp.bracket)[0], '');
}
return rule.type;
},
changeEvent: function(type, $input) {
if(type == 'checkbox' || type == 'radio' || type == 'hidden' || $input.is('select')) {
return 'change';
}
else {
return module.get.inputEvent();
}
},
inputEvent: function() {
return (document.createElement('input').oninput !== undefined)
? 'input'
: (document.createElement('input').onpropertychange !== undefined)
? 'propertychange'
: 'keyup'
;
},
fieldsFromShorthand: function(fields) {
var
fullFields = {}
;
$.each(fields, function(name, rules) {
if(typeof rules == 'string') {
rules = [rules];
}
fullFields[name] = {
rules: []
};
$.each(rules, function(index, rule) {
fullFields[name].rules.push({ type: rule });
});
});
return fullFields;
},
prompt: function(rule, field) {
var
ruleName = module.get.ruleName(rule),
ancillary = module.get.ancillaryValue(rule),
prompt = rule.prompt || settings.prompt[ruleName] || settings.text.unspecifiedRule,
requiresValue = (prompt.search('{value}') !== -1),
requiresName = (prompt.search('{name}') !== -1),
$label,
$field,
name
;
if(requiresName || requiresValue) {
$field = module.get.field(field.identifier);
}
if(requiresValue) {
prompt = prompt.replace('{value}', $field.val());
}
if(requiresName) {
$label = $field.closest(selector.group).find('label').eq(0);
name = ($label.length == 1)
? $label.text()
: $field.prop('placeholder') || settings.text.unspecifiedField
;
prompt = prompt.replace('{name}', name);
}
prompt = prompt.replace('{identifier}', field.identifier);
prompt = prompt.replace('{ruleValue}', ancillary);
if(!rule.prompt) {
module.verbose('Using default validation prompt for type', prompt, ruleName);
}
return prompt;
},
settings: function() {
if($.isPlainObject(parameters)) {
var
keys = Object.keys(parameters),
isLegacySettings = (keys.length > 0)
? (parameters[keys[0]].identifier !== undefined && parameters[keys[0]].rules !== undefined)
: false,
ruleKeys
;
if(isLegacySettings) {
// 1.x (ducktyped)
settings = $.extend(true, {}, $.fn.form.settings, legacyParameters);
validation = $.extend({}, $.fn.form.settings.defaults, parameters);
module.error(settings.error.oldSyntax, element);
module.verbose('Extending settings from legacy parameters', validation, settings);
}
else {
// 2.x
if(parameters.fields && module.is.shorthandFields(parameters.fields)) {
parameters.fields = module.get.fieldsFromShorthand(parameters.fields);
}
settings = $.extend(true, {}, $.fn.form.settings, parameters);
validation = $.extend({}, $.fn.form.settings.defaults, settings.fields);
module.verbose('Extending settings', validation, settings);
}
}
else {
settings = $.fn.form.settings;
validation = $.fn.form.settings.defaults;
module.verbose('Using default form validation', validation, settings);
}
// shorthand
namespace = settings.namespace;
metadata = settings.metadata;
selector = settings.selector;
className = settings.className;
regExp = settings.regExp;
error = settings.error;
moduleNamespace = 'module-' + namespace;
eventNamespace = '.' + namespace;
// grab instance
instance = $module.data(moduleNamespace);
// refresh selector cache
module.refresh();
},
field: function(identifier) {
module.verbose('Finding field with identifier', identifier);
identifier = module.escape.string(identifier);
if($field.filter('#' + identifier).length > 0 ) {
return $field.filter('#' + identifier);
}
else if( $field.filter('[name="' + identifier +'"]').length > 0 ) {
return $field.filter('[name="' + identifier +'"]');
}
else if( $field.filter('[name="' + identifier +'[]"]').length > 0 ) {
return $field.filter('[name="' + identifier +'[]"]');
}
else if( $field.filter('[data-' + metadata.validate + '="'+ identifier +'"]').length > 0 ) {
return $field.filter('[data-' + metadata.validate + '="'+ identifier +'"]');
}
return $('<input/>');
},
fields: function(fields) {
var
$fields = $()
;
$.each(fields, function(index, name) {
$fields = $fields.add( module.get.field(name) );
});
return $fields;
},
validation: function($field) {
var
fieldValidation,
identifier
;
if(!validation) {
return false;
}
$.each(validation, function(fieldName, field) {
identifier = field.identifier || fieldName;
if( module.get.field(identifier)[0] == $field[0] ) {
field.identifier = identifier;
fieldValidation = field;
}
});
return fieldValidation || false;
},
value: function (field) {
var
fields = [],
results
;
fields.push(field);
results = module.get.values.call(element, fields);
return results[field];
},
values: function (fields) {
var
$fields = $.isArray(fields)
? module.get.fields(fields)
: $field,
values = {}
;
$fields.each(function(index, field) {
var
$field = $(field),
type = $field.prop('type'),
name = $field.prop('name'),
value = $field.val(),
isCheckbox = $field.is(selector.checkbox),
isRadio = $field.is(selector.radio),
isMultiple = (name.indexOf('[]') !== -1),
isChecked = (isCheckbox)
? $field.is(':checked')
: false
;
if(name) {
if(isMultiple) {
name = name.replace('[]', '');
if(!values[name]) {
values[name] = [];
}
if(isCheckbox) {
if(isChecked) {
values[name].push(value || true);
}
else {
values[name].push(false);
}
}
else {
values[name].push(value);
}
}
else {
if(isRadio) {
if(values[name] === undefined) {
values[name] = (isChecked)
? true
: false
;
}
}
else if(isCheckbox) {
if(isChecked) {
values[name] = value || true;
}
else {
values[name] = false;
}
}
else {
values[name] = value;
}
}
}
});
return values;
}
},
has: {
field: function(identifier) {
module.verbose('Checking for existence of a field with identifier', identifier);
identifier = module.escape.string(identifier);
if(typeof identifier !== 'string') {
module.error(error.identifier, identifier);
}
if($field.filter('#' + identifier).length > 0 ) {
return true;
}
else if( $field.filter('[name="' + identifier +'"]').length > 0 ) {
return true;
}
else if( $field.filter('[data-' + metadata.validate + '="'+ identifier +'"]').length > 0 ) {
return true;
}
return false;
}
},
escape: {
string: function(text) {
text = String(text);
return text.replace(regExp.escape, '\\$&');
}
},
add: {
// alias
rule: function(name, rules) {
module.add.field(name, rules);
},
field: function(name, rules) {
var
newValidation = {}
;
if(module.is.shorthandRules(rules)) {
rules = $.isArray(rules)
? rules
: [rules]
;
newValidation[name] = {
rules: []
};
$.each(rules, function(index, rule) {
newValidation[name].rules.push({ type: rule });
});
}
else {
newValidation[name] = rules;
}
validation = $.extend({}, validation, newValidation);
module.debug('Adding rules', newValidation, validation);
},
fields: function(fields) {
var
newValidation
;
if(fields && module.is.shorthandFields(fields)) {
newValidation = module.get.fieldsFromShorthand(fields);
}
else {
newValidation = fields;
}
validation = $.extend({}, validation, newValidation);
},
prompt: function(identifier, errors) {
var
$field = module.get.field(identifier),
$fieldGroup = $field.closest($group),
$prompt = $fieldGroup.children(selector.prompt),
promptExists = ($prompt.length !== 0)
;
errors = (typeof errors == 'string')
? [errors]
: errors
;
module.verbose('Adding field error state', identifier);
$fieldGroup
.addClass(className.error)
;
if(settings.inline) {
if(!promptExists) {
$prompt = settings.templates.prompt(errors);
$prompt
.appendTo($fieldGroup)
;
}
$prompt
.html(errors[0])
;
if(!promptExists) {
if(settings.transition && $.fn.transition !== undefined && $module.transition('is supported')) {
module.verbose('Displaying error with css transition', settings.transition);
$prompt.transition(settings.transition + ' in', settings.duration);
}
else {
module.verbose('Displaying error with fallback javascript animation');
$prompt
.fadeIn(settings.duration)
;
}
}
else {
module.verbose('Inline errors are disabled, no inline error added', identifier);
}
}
},
errors: function(errors) {
module.debug('Adding form error messages', errors);
module.set.error();
$message
.html( settings.templates.error(errors) )
;
}
},
remove: {
rule: function(field, rule) {
var
rules = $.isArray(rule)
? rule
: [rule]
;
if(rule == undefined) {
module.debug('Removed all rules');
validation[field].rules = [];
return;
}
if(validation[field] == undefined || !$.isArray(validation[field].rules)) {
return;
}
$.each(validation[field].rules, function(index, rule) {
if(rules.indexOf(rule.type) !== -1) {
module.debug('Removed rule', rule.type);
validation[field].rules.splice(index, 1);
}
});
},
field: function(field) {
var
fields = $.isArray(field)
? field
: [field]
;
$.each(fields, function(index, field) {
module.remove.rule(field);
});
},
// alias
rules: function(field, rules) {
if($.isArray(field)) {
$.each(fields, function(index, field) {
module.remove.rule(field, rules);
});
}
else {
module.remove.rule(field, rules);
}
},
fields: function(fields) {
module.remove.field(fields);
},
prompt: function(identifier) {
var
$field = module.get.field(identifier),
$fieldGroup = $field.closest($group),
$prompt = $fieldGroup.children(selector.prompt)
;
$fieldGroup
.removeClass(className.error)
;
if(settings.inline && $prompt.is(':visible')) {
module.verbose('Removing prompt for field', identifier);
if(settings.transition && $.fn.transition !== undefined && $module.transition('is supported')) {
$prompt.transition(settings.transition + ' out', settings.duration, function() {
$prompt.remove();
});
}
else {
$prompt
.fadeOut(settings.duration, function(){
$prompt.remove();
})
;
}
}
}
},
set: {
success: function() {
$module
.removeClass(className.error)
.addClass(className.success)
;
},
defaults: function () {
$field
.each(function () {
var
$field = $(this),
isCheckbox = ($field.filter(selector.checkbox).length > 0),
value = (isCheckbox)
? $field.is(':checked')
: $field.val()
;
$field.data(metadata.defaultValue, value);
})
;
},
error: function() {
$module
.removeClass(className.success)
.addClass(className.error)
;
},
value: function (field, value) {
var
fields = {}
;
fields[field] = value;
return module.set.values.call(element, fields);
},
values: function (fields) {
if($.isEmptyObject(fields)) {
return;
}
$.each(fields, function(key, value) {
var
$field = module.get.field(key),
$element = $field.parent(),
isMultiple = $.isArray(value),
isCheckbox = $element.is(selector.uiCheckbox),
isDropdown = $element.is(selector.uiDropdown),
isRadio = ($field.is(selector.radio) && isCheckbox),
fieldExists = ($field.length > 0),
$multipleField
;
if(fieldExists) {
if(isMultiple && isCheckbox) {
module.verbose('Selecting multiple', value, $field);
$element.checkbox('uncheck');
$.each(value, function(index, value) {
$multipleField = $field.filter('[value="' + value + '"]');
$element = $multipleField.parent();
if($multipleField.length > 0) {
$element.checkbox('check');
}
});
}
else if(isRadio) {
module.verbose('Selecting radio value', value, $field);
$field.filter('[value="' + value + '"]')
.parent(selector.uiCheckbox)
.checkbox('check')
;
}
else if(isCheckbox) {
module.verbose('Setting checkbox value', value, $element);
if(value === true) {
$element.checkbox('check');
}
else {
$element.checkbox('uncheck');
}
}
else if(isDropdown) {
module.verbose('Setting dropdown value', value, $element);
$element.dropdown('set selected', value);
}
else {
module.verbose('Setting field value', value, $field);
$field.val(value);
}
}
});
}
},
validate: {
form: function(event, ignoreCallbacks) {
var
values = module.get.values(),
apiRequest
;
// input keydown event will fire submit repeatedly by browser default
if(keyHeldDown) {
return false;
}
// reset errors
formErrors = [];
if( module.determine.isValid() ) {
module.debug('Form has no validation errors, submitting');
module.set.success();
if(ignoreCallbacks !== true) {
return settings.onSuccess.call(element, event, values);
}
}
else {
module.debug('Form has errors');
module.set.error();
if(!settings.inline) {
module.add.errors(formErrors);
}
// prevent ajax submit
if($module.data('moduleApi') !== undefined) {
event.stopImmediatePropagation();
}
if(ignoreCallbacks !== true) {
return settings.onFailure.call(element, formErrors, values);
}
}
},
// takes a validation object and returns whether field passes validation
field: function(field, fieldName, showErrors) {
showErrors = (showErrors !== undefined)
? showErrors
: true
;
if(typeof field == 'string') {
module.verbose('Validating field', field);
fieldName = field;
field = validation[field];
}
var
identifier = field.identifier || fieldName,
$field = module.get.field(identifier),
$dependsField = (field.depends)
? module.get.field(field.depends)
: false,
fieldValid = true,
fieldErrors = []
;
if(!field.identifier) {
module.debug('Using field name as identifier', identifier);
field.identifier = identifier;
}
if($field.prop('disabled')) {
module.debug('Field is disabled. Skipping', identifier);
fieldValid = true;
}
else if(field.optional && module.is.blank($field)){
module.debug('Field is optional and blank. Skipping', identifier);
fieldValid = true;
}
else if(field.depends && module.is.empty($dependsField)) {
module.debug('Field depends on another value that is not present or empty. Skipping', $dependsField);
fieldValid = true;
}
else if(field.rules !== undefined) {
$.each(field.rules, function(index, rule) {
if( module.has.field(identifier) && !( module.validate.rule(field, rule) ) ) {
module.debug('Field is invalid', identifier, rule.type);
fieldErrors.push(module.get.prompt(rule, field));
fieldValid = false;
}
});
}
if(fieldValid) {
if(showErrors) {
module.remove.prompt(identifier, fieldErrors);
settings.onValid.call($field);
}
}
else {
if(showErrors) {
formErrors = formErrors.concat(fieldErrors);
module.add.prompt(identifier, fieldErrors);
settings.onInvalid.call($field, fieldErrors);
}
return false;
}
return true;
},
// takes validation rule and returns whether field passes rule
rule: function(field, rule) {
var
$field = module.get.field(field.identifier),
type = rule.type,
value = $field.val(),
isValid = true,
ancillary = module.get.ancillaryValue(rule),
ruleName = module.get.ruleName(rule),
ruleFunction = settings.rules[ruleName]
;
if( !$.isFunction(ruleFunction) ) {
module.error(error.noRule, ruleName);
return;
}
// cast to string avoiding encoding special values
value = (value === undefined || value === '' || value === null)
? ''
: $.trim(value + '')
;
return ruleFunction.call($field, value, ancillary);
}
},
setting: function(name, value) {
if( $.isPlainObject(name) ) {
$.extend(true, settings, name);
}
else if(value !== undefined) {
settings[name] = value;
}
else {
return settings[name];
}
},
internal: function(name, value) {
if( $.isPlainObject(name) ) {
$.extend(true, module, name);
}
else if(value !== undefined) {
module[name] = value;
}
else {
return module[name];
}
},
debug: function() {
if(!settings.silent && settings.debug) {
if(settings.performance) {
module.performance.log(arguments);
}
else {
module.debug = Function.prototype.bind.call(console.info, console, settings.name + ':');
module.debug.apply(console, arguments);
}
}
},
verbose: function() {
if(!settings.silent && settings.verbose && settings.debug) {
if(settings.performance) {
module.performance.log(arguments);
}
else {
module.verbose = Function.prototype.bind.call(console.info, console, settings.name + ':');
module.verbose.apply(console, arguments);
}
}
},
error: function() {
if(!settings.silent) {
module.error = Function.prototype.bind.call(console.error, console, settings.name + ':');
module.error.apply(console, arguments);
}
},
performance: {
log: function(message) {
var
currentTime,
executionTime,
previousTime
;
if(settings.performance) {
currentTime = new Date().getTime();
previousTime = time || currentTime;
executionTime = currentTime - previousTime;
time = currentTime;
performance.push({
'Name' : message[0],
'Arguments' : [].slice.call(message, 1) || '',
'Element' : element,
'Execution Time' : executionTime
});
}
clearTimeout(module.performance.timer);
module.performance.timer = setTimeout(module.performance.display, 500);
},
display: function() {
var
title = settings.name + ':',
totalTime = 0
;
time = false;
clearTimeout(module.performance.timer);
$.each(performance, function(index, data) {
totalTime += data['Execution Time'];
});
title += ' ' + totalTime + 'ms';
if(moduleSelector) {
title += ' \'' + moduleSelector + '\'';
}
if($allModules.length > 1) {
title += ' ' + '(' + $allModules.length + ')';
}
if( (console.group !== undefined || console.table !== undefined) && performance.length > 0) {
console.groupCollapsed(title);
if(console.table) {
console.table(performance);
}
else {
$.each(performance, function(index, data) {
console.log(data['Name'] + ': ' + data['Execution Time']+'ms');
});
}
console.groupEnd();
}
performance = [];
}
},
invoke: function(query, passedArguments, context) {
var
object = instance,
maxDepth,
found,
response
;
passedArguments = passedArguments || queryArguments;
context = element || context;
if(typeof query == 'string' && object !== undefined) {
query = query.split(/[\. ]/);
maxDepth = query.length - 1;
$.each(query, function(depth, value) {
var camelCaseValue = (depth != maxDepth)
? value + query[depth + 1].charAt(0).toUpperCase() + query[depth + 1].slice(1)
: query
;
if( $.isPlainObject( object[camelCaseValue] ) && (depth != maxDepth) ) {
object = object[camelCaseValue];
}
else if( object[camelCaseValue] !== undefined ) {
found = object[camelCaseValue];
return false;
}
else if( $.isPlainObject( object[value] ) && (depth != maxDepth) ) {
object = object[value];
}
else if( object[value] !== undefined ) {
found = object[value];
return false;
}
else {
return false;
}
});
}
if( $.isFunction( found ) ) {
response = found.apply(context, passedArguments);
}
else if(found !== undefined) {
response = found;
}
if($.isArray(returnedValue)) {
returnedValue.push(response);
}
else if(returnedValue !== undefined) {
returnedValue = [returnedValue, response];
}
else if(response !== undefined) {
returnedValue = response;
}
return found;
}
};
module.initialize();
})
;
return (returnedValue !== undefined)
? returnedValue
: this
;
};
$.fn.form.settings = {
name : 'Form',
namespace : 'form',
debug : false,
verbose : false,
performance : true,
fields : false,
keyboardShortcuts : true,
on : 'submit',
inline : false,
delay : 200,
revalidate : true,
transition : 'scale',
duration : 200,
onValid : function() {},
onInvalid : function() {},
onSuccess : function() { return true; },
onFailure : function() { return false; },
metadata : {
defaultValue : 'default',
validate : 'validate'
},
regExp: {
htmlID : /^[a-zA-Z][\w:.-]*$/g,
bracket : /\[(.*)\]/i,
decimal : /^\d+\.?\d*$/,
email : /^[a-z0-9!#$%&'*+\/=?^_`{|}~.-]+@[a-z0-9]([a-z0-9-]*[a-z0-9])?(\.[a-z0-9]([a-z0-9-]*[a-z0-9])?)*$/i,
escape : /[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g,
flags : /^\/(.*)\/(.*)?/,
integer : /^\-?\d+$/,
number : /^\-?\d*(\.\d+)?$/,
url : /(https?:\/\/(?:www\.|(?!www))[^\s\.]+\.[^\s]{2,}|www\.[^\s]+\.[^\s]{2,})/i
},
text: {
unspecifiedRule : 'Please enter a valid value',
unspecifiedField : 'This field'
},
prompt: {
empty : '{name} must have a value',
checked : '{name} must be checked',
email : '{name} must be a valid e-mail',
url : '{name} must be a valid url',
regExp : '{name} is not formatted correctly',
integer : '{name} must be an integer',
decimal : '{name} must be a decimal number',
number : '{name} must be set to a number',
is : '{name} must be "{ruleValue}"',
isExactly : '{name} must be exactly "{ruleValue}"',
not : '{name} cannot be set to "{ruleValue}"',
notExactly : '{name} cannot be set to exactly "{ruleValue}"',
contain : '{name} cannot contain "{ruleValue}"',
containExactly : '{name} cannot contain exactly "{ruleValue}"',
doesntContain : '{name} must contain "{ruleValue}"',
doesntContainExactly : '{name} must contain exactly "{ruleValue}"',
minLength : '{name} must be at least {ruleValue} characters',
length : '{name} must be at least {ruleValue} characters',
exactLength : '{name} must be exactly {ruleValue} characters',
maxLength : '{name} cannot be longer than {ruleValue} characters',
match : '{name} must match {ruleValue} field',
different : '{name} must have a different value than {ruleValue} field',
creditCard : '{name} must be a valid credit card number',
minCount : '{name} must have at least {ruleValue} choices',
exactCount : '{name} must have exactly {ruleValue} choices',
maxCount : '{name} must have {ruleValue} or less choices'
},
selector : {
checkbox : 'input[type="checkbox"], input[type="radio"]',
clear : '.clear',
field : 'input, textarea, select',
group : '.field',
input : 'input',
message : '.error.message',
prompt : '.prompt.label',
radio : 'input[type="radio"]',
reset : '.reset:not([type="reset"])',
submit : '.submit:not([type="submit"])',
uiCheckbox : '.ui.checkbox',
uiDropdown : '.ui.dropdown'
},
className : {
error : 'error',
label : 'ui prompt label',
pressed : 'down',
success : 'success'
},
error: {
identifier : 'You must specify a string identifier for each field',
method : 'The method you called is not defined.',
noRule : 'There is no rule matching the one you specified',
oldSyntax : 'Starting in 2.0 forms now only take a single settings object. Validation settings converted to new syntax automatically.'
},
templates: {
// template that produces error message
error: function(errors) {
var
html = '<ul class="list">'
;
$.each(errors, function(index, value) {
html += '<li>' + value + '</li>';
});
html += '</ul>';
return $(html);
},
// template that produces label
prompt: function(errors) {
return $('<div/>')
.addClass('ui basic red pointing prompt label')
.html(errors[0])
;
}
},
rules: {
// is not empty or blank string
empty: function(value) {
return !(value === undefined || '' === value || $.isArray(value) && value.length === 0);
},
// checkbox checked
checked: function() {
return ($(this).filter(':checked').length > 0);
},
// is most likely an email
email: function(value){
return $.fn.form.settings.regExp.email.test(value);
},
// value is most likely url
url: function(value) {
return $.fn.form.settings.regExp.url.test(value);
},
// matches specified regExp
regExp: function(value, regExp) {
if(regExp instanceof RegExp) {
return value.match(regExp);
}
var
regExpParts = regExp.match($.fn.form.settings.regExp.flags),
flags
;
// regular expression specified as /baz/gi (flags)
if(regExpParts) {
regExp = (regExpParts.length >= 2)
? regExpParts[1]
: regExp
;
flags = (regExpParts.length >= 3)
? regExpParts[2]
: ''
;
}
return value.match( new RegExp(regExp, flags) );
},
// is valid integer or matches range
integer: function(value, range) {
var
intRegExp = $.fn.form.settings.regExp.integer,
min,
max,
parts
;
if( !range || ['', '..'].indexOf(range) !== -1) {
// do nothing
}
else if(range.indexOf('..') == -1) {
if(intRegExp.test(range)) {
min = max = range - 0;
}
}
else {
parts = range.split('..', 2);
if(intRegExp.test(parts[0])) {
min = parts[0] - 0;
}
if(intRegExp.test(parts[1])) {
max = parts[1] - 0;
}
}
return (
intRegExp.test(value) &&
(min === undefined || value >= min) &&
(max === undefined || value <= max)
);
},
// is valid number (with decimal)
decimal: function(value) {
return $.fn.form.settings.regExp.decimal.test(value);
},
// is valid number
number: function(value) {
return $.fn.form.settings.regExp.number.test(value);
},
// is value (case insensitive)
is: function(value, text) {
text = (typeof text == 'string')
? text.toLowerCase()
: text
;
value = (typeof value == 'string')
? value.toLowerCase()
: value
;
return (value == text);
},
// is value
isExactly: function(value, text) {
return (value == text);
},
// value is not another value (case insensitive)
not: function(value, notValue) {
value = (typeof value == 'string')
? value.toLowerCase()
: value
;
notValue = (typeof notValue == 'string')
? notValue.toLowerCase()
: notValue
;
return (value != notValue);
},
// value is not another value (case sensitive)
notExactly: function(value, notValue) {
return (value != notValue);
},
// value contains text (insensitive)
contains: function(value, text) {
// escape regex characters
text = text.replace($.fn.form.settings.regExp.escape, "\\$&");
return (value.search( new RegExp(text, 'i') ) !== -1);
},
// value contains text (case sensitive)
containsExactly: function(value, text) {
// escape regex characters
text = text.replace($.fn.form.settings.regExp.escape, "\\$&");
return (value.search( new RegExp(text) ) !== -1);
},
// value contains text (insensitive)
doesntContain: function(value, text) {
// escape regex characters
text = text.replace($.fn.form.settings.regExp.escape, "\\$&");
return (value.search( new RegExp(text, 'i') ) === -1);
},
// value contains text (case sensitive)
doesntContainExactly: function(value, text) {
// escape regex characters
text = text.replace($.fn.form.settings.regExp.escape, "\\$&");
return (value.search( new RegExp(text) ) === -1);
},
// is at least string length
minLength: function(value, requiredLength) {
return (value !== undefined)
? (value.length >= requiredLength)
: false
;
},
// see rls notes for 2.0.6 (this is a duplicate of minLength)
length: function(value, requiredLength) {
return (value !== undefined)
? (value.length >= requiredLength)
: false
;
},
// is exactly length
exactLength: function(value, requiredLength) {
return (value !== undefined)
? (value.length == requiredLength)
: false
;
},
// is less than length
maxLength: function(value, maxLength) {
return (value !== undefined)
? (value.length <= maxLength)
: false
;
},
// matches another field
match: function(value, identifier) {
var
$form = $(this),
matchingValue
;
if( $('[data-validate="'+ identifier +'"]').length > 0 ) {
matchingValue = $('[data-validate="'+ identifier +'"]').val();
}
else if($('#' + identifier).length > 0) {
matchingValue = $('#' + identifier).val();
}
else if($('[name="' + identifier +'"]').length > 0) {
matchingValue = $('[name="' + identifier + '"]').val();
}
else if( $('[name="' + identifier +'[]"]').length > 0 ) {
matchingValue = $('[name="' + identifier +'[]"]');
}
return (matchingValue !== undefined)
? ( value.toString() == matchingValue.toString() )
: false
;
},
// different than another field
different: function(value, identifier) {
// use either id or name of field
var
$form = $(this),
matchingValue
;
if( $('[data-validate="'+ identifier +'"]').length > 0 ) {
matchingValue = $('[data-validate="'+ identifier +'"]').val();
}
else if($('#' + identifier).length > 0) {
matchingValue = $('#' + identifier).val();
}
else if($('[name="' + identifier +'"]').length > 0) {
matchingValue = $('[name="' + identifier + '"]').val();
}
else if( $('[name="' + identifier +'[]"]').length > 0 ) {
matchingValue = $('[name="' + identifier +'[]"]');
}
return (matchingValue !== undefined)
? ( value.toString() !== matchingValue.toString() )
: false
;
},
creditCard: function(cardNumber, cardTypes) {
var
cards = {
visa: {
pattern : /^4/,
length : [16]
},
amex: {
pattern : /^3[47]/,
length : [15]
},
mastercard: {
pattern : /^5[1-5]/,
length : [16]
},
discover: {
pattern : /^(6011|622(12[6-9]|1[3-9][0-9]|[2-8][0-9]{2}|9[0-1][0-9]|92[0-5]|64[4-9])|65)/,
length : [16]
},
unionPay: {
pattern : /^(62|88)/,
length : [16, 17, 18, 19]
},
jcb: {
pattern : /^35(2[89]|[3-8][0-9])/,
length : [16]
},
maestro: {
pattern : /^(5018|5020|5038|6304|6759|676[1-3])/,
length : [12, 13, 14, 15, 16, 17, 18, 19]
},
dinersClub: {
pattern : /^(30[0-5]|^36)/,
length : [14]
},
laser: {
pattern : /^(6304|670[69]|6771)/,
length : [16, 17, 18, 19]
},
visaElectron: {
pattern : /^(4026|417500|4508|4844|491(3|7))/,
length : [16]
}
},
valid = {},
validCard = false,
requiredTypes = (typeof cardTypes == 'string')
? cardTypes.split(',')
: false,
unionPay,
validation
;
if(typeof cardNumber !== 'string' || cardNumber.length === 0) {
return;
}
// allow dashes in card
cardNumber = cardNumber.replace(/[\-]/g, '');
// verify card types
if(requiredTypes) {
$.each(requiredTypes, function(index, type){
// verify each card type
validation = cards[type];
if(validation) {
valid = {
length : ($.inArray(cardNumber.length, validation.length) !== -1),
pattern : (cardNumber.search(validation.pattern) !== -1)
};
if(valid.length && valid.pattern) {
validCard = true;
}
}
});
if(!validCard) {
return false;
}
}
// skip luhn for UnionPay
unionPay = {
number : ($.inArray(cardNumber.length, cards.unionPay.length) !== -1),
pattern : (cardNumber.search(cards.unionPay.pattern) !== -1)
};
if(unionPay.number && unionPay.pattern) {
return true;
}
// verify luhn, adapted from <https://gist.github.com/2134376>
var
length = cardNumber.length,
multiple = 0,
producedValue = [
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
[0, 2, 4, 6, 8, 1, 3, 5, 7, 9]
],
sum = 0
;
while (length--) {
sum += producedValue[multiple][parseInt(cardNumber.charAt(length), 10)];
multiple ^= 1;
}
return (sum % 10 === 0 && sum > 0);
},
minCount: function(value, minCount) {
if(minCount == 0) {
return true;
}
if(minCount == 1) {
return (value !== '');
}
return (value.split(',').length >= minCount);
},
exactCount: function(value, exactCount) {
if(exactCount == 0) {
return (value === '');
}
if(exactCount == 1) {
return (value !== '' && value.search(',') === -1);
}
return (value.split(',').length == exactCount);
},
maxCount: function(value, maxCount) {
if(maxCount == 0) {
return false;
}
if(maxCount == 1) {
return (value.search(',') === -1);
}
return (value.split(',').length <= maxCount);
}
}
};
})( jQuery, window, document );
|
(function(){tinymce.PluginManager.requireLangPack("example");tinymce.create("tinymce.plugins.ExamplePlugin",{init:function(a,b){a.addCommand("mceExample",function(){a.windowManager.open({file:b+"/dialog.htm",width:320+parseInt(a.getLang("example.delta_width",0)),height:120+parseInt(a.getLang("example.delta_height",0)),inline:1},{plugin_url:b,some_custom_arg:"custom arg"});});a.addButton("example",{title:"example.desc",cmd:"mceExample",image:b+"/img/example.gif"});a.onNodeChange.add(function(e,d,c){d.setActive("example",c.nodeName=="IMG");});},createControl:function(a,b){return null;},getInfo:function(){return{longname:"Example plugin",author:"Some author",authorurl:"http://tinymce.moxiecode.com",infourl:"http://wiki.moxiecode.com/index.php/TinyMCE:Plugins/example",version:"1.0"};}});tinymce.PluginManager.add("example",tinymce.plugins.ExamplePlugin);})(); |
/*
* This header is generated by classdump-dyld 1.5
* on Wednesday, April 28, 2021 at 9:05:43 PM Mountain Standard Time
* Operating System: Version 14.5 (Build 18L204)
* Image Source: /System/Library/Frameworks/MediaPlayer.framework/MediaPlayer
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by Elias Limneos. Updated by Kevin Bradley.
*/
#import <MediaPlayer/MPLibraryKeepLocalStatusObserverConfiguration.h>
@interface MPLibraryKeepLocalStatusObserverIndividualEntityConfiguration : MPLibraryKeepLocalStatusObserverConfiguration {
BOOL _collectionType;
BOOL _hasNonPurgeableAsset;
BOOL _storeRedownloadable;
long long _managedStatus;
}
@property (assign,getter=isCollectionType,nonatomic) BOOL collectionType; //@synthesize collectionType=_collectionType - In the implementation block
@property (assign,nonatomic) long long managedStatus; //@synthesize managedStatus=_managedStatus - In the implementation block
@property (assign,nonatomic) BOOL hasNonPurgeableAsset; //@synthesize hasNonPurgeableAsset=_hasNonPurgeableAsset - In the implementation block
@property (assign,getter=isStoreRedownloadable,nonatomic) BOOL storeRedownloadable; //@synthesize storeRedownloadable=_storeRedownloadable - In the implementation block
-(BOOL)isEqual:(id)arg1 ;
-(unsigned long long)hash;
-(void)setCollectionType:(BOOL)arg1 ;
-(BOOL)isStoreRedownloadable;
-(BOOL)isCollectionType;
-(long long)managedStatus;
-(void)setManagedStatus:(long long)arg1 ;
-(BOOL)hasNonPurgeableAsset;
-(void)setHasNonPurgeableAsset:(BOOL)arg1 ;
-(void)setStoreRedownloadable:(BOOL)arg1 ;
@end
|
import pandas as pd
import numpy as np
np.seterr(divide="raise")
rolling_zscore = (
lambda x: (x - x.rolling(window=200, min_periods=10).mean())
/ x.rolling(window=200, min_periods=10).std()
)
rolling_percentile = lambda x: x.rolling(200, min_periods=10).apply(
lambda x: pd.Series(x).rank(pct=True)[0]
)
def PRICE_zscore(data):
data["Adjusted_close_zscore"] = rolling_zscore(data["Adjusted_close"])
return data
def SMA_zscore(data, ndays):
data["SMA_zscore"] = rolling_zscore(
data["Adjusted_close"].rolling(ndays, min_periods=5).mean()
)
return data
def WMA_zscore(data, ndays):
weights = np.arange(1, ndays + 1)
data["WMA_zscore"] = rolling_zscore(
data["Adjusted_close"]
.rolling(ndays, min_periods=5)
.apply(
lambda prices: np.dot(prices, np.arange(1, len(prices) + 1))
/ np.arange(1, len(prices) + 1).sum(),
raw=True,
)
)
return data
def EMA_zscore(data, ndays): # emw() uses approximate formula
data["EMA_zscore"] = rolling_zscore(
data["Adjusted_close"].ewm(span=ndays, adjust=False, min_periods=5).mean()
)
return data
# Triple Exponential Moving Average
def TEMA_zscore(data, ndays): # emv() uses approximate formula
ema1 = data["Adjusted_close"].ewm(span=ndays, adjust=False, min_periods=5).mean()
ema2 = ema1.ewm(span=ndays, adjust=False).mean()
ema3 = ema2.ewm(span=ndays, adjust=False).mean()
data["TEMA_zscore"] = rolling_zscore(((3 * ema1) - (3 * ema2) + ema3))
return data
# Moving Average Convergence Divergence Oscilator normalized by Adjusted Close
def MACD(data, ndays, ndays2):
data["MACD"] = (
data["Adjusted_close"].ewm(span=ndays, adjust=False, min_periods=5).mean()
- data["Adjusted_close"].ewm(span=ndays2, adjust=False, min_periods=5).mean()
) / data["Adjusted_close"]
return data
# Commodity Channel Index / 100
def CCI(data, ndays):
TP = (data["High"] + data["Low"] + data["Close"]) / 3
data["CCI"] = (
0.01
* (TP - TP.rolling(ndays, min_periods=5).mean())
/ (0.015 * TP.rolling(ndays, min_periods=5).std())
).replace([np.inf, -np.inf], 0)
return data
# Mass Index / 100
def MSSINDX(data, ndays, ndays2):
ema1 = (
(data["High"] - data["Low"]).ewm(span=ndays, adjust=False, min_periods=5).mean()
)
emaratio = ema1 / ema1.ewm(span=ndays, adjust=False, min_periods=5).mean().fillna(0)
data["MSSINDX"] = emaratio.rolling(ndays2, min_periods=5).sum() / 100
return data
# Aroon indicators / 100
def AROON(data, ndays):
data["AROON_UP"] = (
data["High"]
.rolling(ndays + 1, min_periods=5)
.apply(lambda x: x.argmax(), raw=True)
/ ndays
)
data["AROON_DOWN"] = (
data["Low"]
.rolling(ndays + 1, min_periods=5)
.apply(lambda x: x.argmin(), raw=True)
/ ndays
)
return data
# Relative Strengh Index / 100
def RSI(data, ndays):
delta = data["Adjusted_close"].diff(1)
up, down = delta.copy(), delta.copy()
up[up < 0] = 0
down[down > 0] = 0
roll_up = up.ewm(com=ndays - 1, adjust=False, min_periods=5).mean()
roll_down = down.ewm(com=ndays - 1, adjust=False, min_periods=5).mean().abs()
RS = roll_up / roll_down
data["RSI"] = (1 - 1 / (1 + RS)).fillna(1)
return data
# K (fast) stochastic oscillator / 100
def K(data, ndays):
low = data["Low"].rolling(window=ndays, min_periods=5).min()
high = data["High"].rolling(window=ndays, min_periods=5).max()
data["%K"] = ((data["Close"] - low) / (high - low)).replace([np.inf, -np.inf], 0.5)
return data
# D (slow) stochastic oscillator / 100
def D(data, ndays):
data["%D"] = data["%K"].rolling(window=ndays).mean()
return data
# Williams R / (-100)
def WILLSR(data, ndays):
low = data["Low"].rolling(window=ndays, min_periods=5).min()
high = data["High"].rolling(window=ndays, min_periods=5).max()
data["WILLSR"] = ((high - data["Close"]) / (high - low)).replace(
[np.inf, -np.inf], 0.5
)
return data
# Rate of change
def ROC(data, ndays):
data["ROC"] = (
data["Adjusted_close"] - data["Adjusted_close"].shift(ndays, fill_value=0)
) / data["Adjusted_close"].shift(
ndays, fill_value=1000000000000000
) # When there is yet not enough values to calculate the ROC, set it as 0 by dividing by a big number
return data
# Ultimate Oscillator / 100
def ULTOSC(data, ndays, ndays2, ndays3):
trlow = np.where(
data["Low"] > data["Close"].shift(1), data["Close"].shift(1), data["Low"]
)
trhigh = np.where(
data["High"] < data["Close"].shift(1), data["Close"].shift(1), data["High"]
)
a = pd.DataFrame(
data=np.transpose(np.array([data["Close"] - trlow, trhigh - trlow])),
columns=["buypress", "trrange"],
index=data.index,
)
avg = a.buypress.rolling(ndays, min_periods=5).sum() / a.trrange.rolling(
ndays, min_periods=5
).sum().fillna(0.5)
avg2 = a.buypress.rolling(ndays2, min_periods=5).sum() / a.trrange.rolling(
ndays2, min_periods=5
).sum().fillna(0.5)
avg3 = a.buypress.rolling(ndays3, min_periods=5).sum() / a.trrange.rolling(
ndays3, min_periods=5
).sum().fillna(0.5)
data["ULTOSC"] = (4 * avg + 2 * avg2 + avg3) / 7
return data
# On Balance Volume rolling standarization
def OBV_zscore(data):
a = np.where(
data["Adjusted_close"] > data["Adjusted_close"].shift(1), data["Volume"], 0
)
b = np.where(
data["Adjusted_close"] < data["Adjusted_close"].shift(1), -data["Volume"], 0
)
data["OBV_zscore"] = rolling_zscore(
pd.DataFrame(data=np.transpose(np.array((a + b).cumsum())), index=data.index)
)
return data
# Volume-Price Trend but normalized by 'normdays' days volume mean
def VPT(data, normdays):
data["VPT"] = (
(data["Adjusted_close"] - data["Adjusted_close"].shift(1))
* data["Volume"]
/ (
data["Adjusted_close"].shift(1)
* data["Volume"].rolling(window=normdays, min_periods=5).mean()
)
).cumsum()
return data
# Normalized Ease of Movement exponential moving average
def EMV(
data, ndays, normdays
): # ( (High-Low)/2 - (High.shift(1)-Low.shift(1))/2 ) *(High+Low)*100000000/Volume
midpoint = (data["High"] - data["Low"]) / 2
prev_midpoint = (data["High"].shift(1) + data["Low"].shift(1)) / 2
midpointmove_percent = (midpoint - prev_midpoint) / midpoint
nrmlzd_volume = (
data["Volume"] / data["Volume"].rolling(window=normdays, min_periods=5).mean()
)
nrmlzd_range = (data["High"] - data["Low"]) / (data["High"] - data["Low"]).rolling(
window=normdays, min_periods=5
).mean()
data["EMV"] = (
(midpointmove_percent * nrmlzd_range / (nrmlzd_volume * 100))
.fillna(0)
.rolling(ndays, min_periods=5)
.mean()
)
return data
# Chaikin Oscillator but with moneyflow volume normalized by 'normdays' days volume mean
def CHKOSC(data, ndays, ndays2, normdays):
moneyflowvol = (
(2 * data["Close"] - data["High"] - data["Low"])
* data["Volume"]
/ (
(data["High"] - data["Low"])
* data["Volume"].rolling(window=normdays, min_periods=5).mean()
)
)
moneyflowvol.fillna(0, inplace=True)
adline = moneyflowvol + moneyflowvol.shift(1)
data["CHKOSC"] = (
adline.ewm(span=ndays, adjust=False).mean()
- adline.ewm(span=ndays2, adjust=False).mean()
)
return data
# Acumulation Distribution but with moneyflow volume normalized by 'normdays' days volume mean
def AD(data, normdays): # TODO: Standarize in some way
moneyflowvol = (
(2 * data["Close"] - data["High"] - data["Low"])
* data["Volume"]
/ (
(data["High"] - data["Low"])
* data["Volume"].rolling(window=normdays, min_periods=5).mean()
)
)
moneyflowvol.fillna(0, inplace=True)
data["AD"] = np.cumsum(moneyflowvol)
return data
# Force Index Normalized by (price*volume)
def FINDX_zscore(data, ndays):
data["FINDX_zscore"] = rolling_zscore(
(
((data["Close"] - data["Close"].shift(1)) * data["Volume"])
.fillna(0)
.ewm(span=ndays, adjust=False, min_periods=5)
.mean()
/ (data["Close"] * data["Volume"])
).replace([np.inf, -np.inf], 0)
)
return data
# Average True Range / Close
def ATR(data, ndays):
trlow = np.where(
data["Low"] > data["Close"].shift(1), data["Close"].shift(1), data["Low"]
)
trhigh = np.where(
data["High"] < data["Close"].shift(1), data["Close"].shift(1), data["High"]
)
tr = pd.DataFrame(
data=np.transpose(np.array([trhigh - trlow])),
columns=["range"],
index=data.index,
)
tr = pd.DataFrame(
data=np.transpose(np.array([(trhigh - trlow) / data["Close"].to_numpy()])),
columns=["range"],
index=data.index,
)
data["ATR"] = tr.range.rolling(ndays, min_periods=5).mean()
return data
# Chaikin volatility / 100
def CHKVLT_zscore(data, ndays):
x = (data["High"] - data["Low"]).ewm(span=ndays, adjust=False, min_periods=5).mean()
chkvlt = (x - x.shift(10)) / x.shift(10)
data["CHKVLT_zscore"] = rolling_zscore(chkvlt.fillna(0))
return data
def VOL(data, ndays):
ret = data["Adjusted_close"] / data["Adjusted_close"].shift(1) - 1
data["VOL"] = ret.ewm(span=ndays).std()
return data
def add_technical_indicators(df):
PRICE_zscore(df)
# Trend
SMA_zscore(df, 10)
WMA_zscore(df, 10)
EMA_zscore(df, 10)
TEMA_zscore(df, 15)
MACD(df, 12, 26)
CCI(df, 20)
MSSINDX(df, 9, 25)
AROON(df, 7)
# Momentum
RSI(df, 14)
K(df, 14)
D(df, 3)
WILLSR(df, 15)
ROC(df, 10)
ULTOSC(df, 7, 14, 28)
# Volume
OBV_zscore(df)
# VPT(df, 60)
EMV(df, 14, 60)
CHKOSC(df, 3, 10, 60)
# AD(df, 60)
FINDX_zscore(df, 15)
# Volatility
# ATR(df, 14)
CHKVLT_zscore(df, 10)
VOL(df, 10)
return df
|
#!/usr/bin/env python3
import datetime
import importlib
import os
import sys
import fcntl
import errno
import signal
import shutil
import subprocess
import textwrap
import time
import traceback
from multiprocessing import Process
from typing import Dict
from common.basedir import BASEDIR
from common.spinner import Spinner
from common.text_window import TextWindow
import selfdrive.crash as crash
from selfdrive.hardware import HARDWARE, EON, PC, TICI
from selfdrive.hardware.eon.apk import update_apks, pm_apply_packages, start_offroad
from selfdrive.swaglog import cloudlog, add_logentries_handler
from selfdrive.version import version, dirty
import re
from common.dp_conf import init_params_vals
os.environ['BASEDIR'] = BASEDIR
sys.path.append(os.path.join(BASEDIR, "pyextra"))
TOTAL_SCONS_NODES = 1225
MAX_BUILD_PROGRESS = 70
WEBCAM = os.getenv("WEBCAM") is not None
PREBUILT = os.path.exists(os.path.join(BASEDIR, 'prebuilt'))
def unblock_stdout():
# get a non-blocking stdout
child_pid, child_pty = os.forkpty()
if child_pid != 0: # parent
# child is in its own process group, manually pass kill signals
signal.signal(signal.SIGINT, lambda signum, frame: os.kill(child_pid, signal.SIGINT))
signal.signal(signal.SIGTERM, lambda signum, frame: os.kill(child_pid, signal.SIGTERM))
fcntl.fcntl(sys.stdout, fcntl.F_SETFL, fcntl.fcntl(sys.stdout, fcntl.F_GETFL) | os.O_NONBLOCK)
while True:
try:
dat = os.read(child_pty, 4096)
except OSError as e:
if e.errno == errno.EIO:
break
continue
if not dat:
break
try:
sys.stdout.write(dat.decode('utf8'))
except (OSError, IOError, UnicodeDecodeError):
pass
# os.wait() returns a tuple with the pid and a 16 bit value
# whose low byte is the signal number and whose high byte is the exit satus
exit_status = os.wait()[1] >> 8
os._exit(exit_status)
if __name__ == "__main__":
unblock_stdout()
# Start spinner
spinner = Spinner()
spinner.update_progress(0, 100)
if __name__ != "__main__":
spinner.close()
def build():
env = os.environ.copy()
env['SCONS_PROGRESS'] = "1"
env['SCONS_CACHE'] = "1"
nproc = os.cpu_count()
j_flag = "" if nproc is None else f"-j{nproc - 1}"
for retry in [True, False]:
scons = subprocess.Popen(["scons", j_flag], cwd=BASEDIR, env=env, stderr=subprocess.PIPE)
compile_output = []
# Read progress from stderr and update spinner
while scons.poll() is None:
try:
line = scons.stderr.readline()
if line is None:
continue
line = line.rstrip()
prefix = b'progress: '
if line.startswith(prefix):
i = int(line[len(prefix):])
spinner.update_progress(MAX_BUILD_PROGRESS * min(1., i / TOTAL_SCONS_NODES), 100.)
elif len(line):
compile_output.append(line)
print(line.decode('utf8', 'replace'))
except Exception:
pass
if scons.returncode != 0:
# Read remaining output
r = scons.stderr.read().split(b'\n')
compile_output += r
if retry and (not dirty):
if not os.getenv("CI"):
print("scons build failed, cleaning in")
for i in range(3, -1, -1):
print("....%d" % i)
time.sleep(1)
subprocess.check_call(["scons", "-c"], cwd=BASEDIR, env=env)
shutil.rmtree("/tmp/scons_cache", ignore_errors=True)
shutil.rmtree("/data/scons_cache", ignore_errors=True)
else:
print("scons build failed after retry")
sys.exit(1)
else:
# Build failed log errors
errors = [line.decode('utf8', 'replace') for line in compile_output
if any([err in line for err in [b'error: ', b'not found, needed by target']])]
error_s = "\n".join(errors)
add_logentries_handler(cloudlog)
cloudlog.error("scons build failed\n" + error_s)
ip = 'N/A'
if EON:
try:
result = subprocess.check_output(["ifconfig", "wlan0"], encoding='utf8')
ip = re.findall(r"inet addr:((\d+\.){3}\d+)", result)[0][0]
except:
ip = 'N/A'
# Show TextWindow
spinner.close()
error_s = "\n \n".join(["\n".join(textwrap.wrap(e, 65)) for e in errors])
with TextWindow(("openpilot failed to build (IP: %s)\n \n" % ip) + error_s) as t:
t.wait_for_exit()
exit(1)
else:
break
if __name__ == "__main__" and not PREBUILT:
build()
import cereal.messaging as messaging
from cereal import log
from common.params import Params, put_nonblocking
from selfdrive.registration import register
from selfdrive.launcher import launcher
# comment out anything you don't want to run
managed_processes = {
"thermald": "selfdrive.thermald.thermald",
"uploader": "selfdrive.loggerd.uploader",
"deleter": "selfdrive.loggerd.deleter",
"controlsd": "selfdrive.controls.controlsd",
"plannerd": "selfdrive.controls.plannerd",
"radard": "selfdrive.controls.radard",
"dmonitoringd": "selfdrive.monitoring.dmonitoringd",
"ubloxd": ("selfdrive/locationd", ["./ubloxd"]),
"loggerd": ("selfdrive/loggerd", ["./loggerd"]),
"logmessaged": "selfdrive.logmessaged",
"locationd": "selfdrive.locationd.locationd",
"tombstoned": "selfdrive.tombstoned",
"logcatd": ("selfdrive/logcatd", ["./logcatd"]),
"proclogd": ("selfdrive/proclogd", ["./proclogd"]),
"pandad": "selfdrive.pandad",
"ui": ("selfdrive/ui", ["./ui"]),
"calibrationd": "selfdrive.locationd.calibrationd",
"paramsd": "selfdrive.locationd.paramsd",
"camerad": ("selfdrive/camerad", ["./camerad"]),
"sensord": ("selfdrive/sensord", ["./sensord"]),
"clocksd": ("selfdrive/clocksd", ["./clocksd"]),
"updated": "selfdrive.updated",
"dmonitoringmodeld": ("selfdrive/modeld", ["./dmonitoringmodeld"]),
"modeld": ("selfdrive/modeld", ["./modeld"]),
"rtshield": "selfdrive.rtshield",
"systemd": "selfdrive.dragonpilot.systemd",
"appd": "selfdrive.dragonpilot.appd",
"gpxd": "selfdrive.dragonpilot.gpxd",
}
daemon_processes = {
"manage_athenad": ("selfdrive.athena.manage_athenad", "AthenadPid"),
}
running: Dict[str, Process] = {}
def get_running():
return running
# due to qualcomm kernel bugs SIGKILLing camerad sometimes causes page table corruption
unkillable_processes = ['camerad']
# processes to end with SIGKILL instead of SIGTERM
kill_processes = []
if EON:
kill_processes += [
'sensord',
]
persistent_processes = [
'pandad',
'thermald',
'logmessaged',
'ui',
'uploader',
'deleter',
'systemd',
]
if not PC:
persistent_processes += [
'updated',
'tombstoned',
'appd',
]
if EON:
persistent_processes += [
'sensord',
]
if TICI:
managed_processes["timezoned"] = "selfdrive.timezoned"
persistent_processes += ['timezoned']
car_started_processes = [
'controlsd',
'plannerd',
'loggerd',
'radard',
'calibrationd',
'paramsd',
'camerad',
'modeld',
'proclogd',
'locationd',
'clocksd',
'gpxd',
]
driver_view_processes = [
'camerad',
'dmonitoringd',
'dmonitoringmodeld'
]
if not PC or WEBCAM:
car_started_processes += [
'ubloxd',
'dmonitoringd',
'dmonitoringmodeld',
]
if EON:
car_started_processes += [
'rtshield',
]
else:
car_started_processes += [
'sensord',
]
def register_managed_process(name, desc, car_started=False):
global managed_processes, car_started_processes, persistent_processes
managed_processes[name] = desc
if car_started:
car_started_processes.append(name)
else:
persistent_processes.append(name)
# ****************** process management functions ******************
def nativelauncher(pargs, cwd):
# exec the process
os.chdir(cwd)
os.execvp(pargs[0], pargs)
def start_managed_process(name):
if name in running or name not in managed_processes:
return
proc = managed_processes[name]
if isinstance(proc, str):
cloudlog.info("starting python %s" % proc)
running[name] = Process(name=name, target=launcher, args=(proc,))
else:
pdir, pargs = proc
cwd = os.path.join(BASEDIR, pdir)
cloudlog.info("starting process %s" % name)
running[name] = Process(name=name, target=nativelauncher, args=(pargs, cwd))
running[name].start()
def start_daemon_process(name):
params = Params()
proc, pid_param = daemon_processes[name]
pid = params.get(pid_param, encoding='utf-8')
if pid is not None:
try:
os.kill(int(pid), 0)
with open(f'/proc/{pid}/cmdline') as f:
if proc in f.read():
# daemon is running
return
except (OSError, FileNotFoundError):
# process is dead
pass
cloudlog.info("starting daemon %s" % name)
proc = subprocess.Popen(['python', '-m', proc], # pylint: disable=subprocess-popen-preexec-fn
stdin=open('/dev/null', 'r'),
stdout=open('/dev/null', 'w'),
stderr=open('/dev/null', 'w'),
preexec_fn=os.setpgrp)
params.put(pid_param, str(proc.pid))
def prepare_managed_process(p, build=False):
proc = managed_processes[p]
if isinstance(proc, str):
# import this python
cloudlog.info("preimporting %s" % proc)
importlib.import_module(proc)
elif os.path.isfile(os.path.join(BASEDIR, proc[0], "SConscript")) and build:
# build this process
cloudlog.info("building %s" % (proc,))
try:
subprocess.check_call(["scons", "u", "-j4", "."], cwd=os.path.join(BASEDIR, proc[0]))
except subprocess.CalledProcessError:
# clean and retry if the build failed
cloudlog.warning("building %s failed, cleaning and retrying" % (proc, ))
subprocess.check_call(["scons", "-u", "-c", "."], cwd=os.path.join(BASEDIR, proc[0]))
subprocess.check_call(["scons", "-u", "-j4", "."], cwd=os.path.join(BASEDIR, proc[0]))
def join_process(process, timeout):
# Process().join(timeout) will hang due to a python 3 bug: https://bugs.python.org/issue28382
# We have to poll the exitcode instead
t = time.time()
while time.time() - t < timeout and process.exitcode is None:
time.sleep(0.001)
def kill_managed_process(name, retry=True):
if name not in running or name not in managed_processes:
return
cloudlog.info(f"killing {name}")
if running[name].exitcode is None:
sig = signal.SIGKILL if name in kill_processes else signal.SIGINT
os.kill(running[name].pid, sig)
join_process(running[name], 5)
if running[name].exitcode is None:
if not retry:
raise Exception(f"{name} failed to die")
if name in unkillable_processes:
cloudlog.critical("unkillable process %s failed to exit! rebooting in 15 if it doesn't die" % name)
join_process(running[name], 15)
if running[name].exitcode is None:
cloudlog.critical("unkillable process %s failed to die!" % name)
os.system("date >> /data/unkillable_reboot")
os.sync()
HARDWARE.reboot()
raise RuntimeError
else:
cloudlog.info("killing %s with SIGKILL" % name)
os.kill(running[name].pid, signal.SIGKILL)
running[name].join()
ret = running[name].exitcode
cloudlog.info(f"{name} is dead with {ret}")
del running[name]
return ret
def cleanup_all_processes(signal, frame):
cloudlog.info("caught ctrl-c %s %s" % (signal, frame))
if EON:
pm_apply_packages('disable')
for name in list(running.keys()):
kill_managed_process(name)
cloudlog.info("everything is dead")
def send_managed_process_signal(name, sig):
if name not in running or name not in managed_processes or \
running[name].exitcode is not None:
return
cloudlog.info(f"sending signal {sig} to {name}")
os.kill(running[name].pid, sig)
# ****************** run loop ******************
def manager_init(should_register=True):
os.umask(0) # Make sure we can create files with 777 permissions
# Create folders needed for msgq
try:
os.mkdir("/dev/shm")
except FileExistsError:
pass
except PermissionError:
print("WARNING: failed to make /dev/shm")
# set dongle id
if should_register:
reg_res = register(spinner)
if reg_res:
dongle_id = reg_res
else:
dongle_id = "c"*16
else:
dongle_id = "c"*16
os.environ['DONGLE_ID'] = dongle_id
if not dirty:
os.environ['CLEAN'] = '1'
cloudlog.bind_global(dongle_id=dongle_id, version=version, dirty=dirty,
device=HARDWARE.get_device_type())
crash.bind_user(id=dongle_id)
crash.bind_extra(version=version, dirty=dirty, device=HARDWARE.get_device_type())
# ensure shared libraries are readable by apks
if EON:
os.chmod(BASEDIR, 0o755)
os.chmod("/dev/shm", 0o777)
os.chmod(os.path.join(BASEDIR, "cereal"), 0o755)
os.chmod(os.path.join(BASEDIR, "cereal", "libmessaging_shared.so"), 0o755)
def manager_thread():
cloudlog.info("manager start")
cloudlog.info({"environ": os.environ})
params = Params()
# save boot log
if params.get("dp_logger") == b'1':
subprocess.call("./bootlog", cwd=os.path.join(BASEDIR, "selfdrive/loggerd"))
if params.get("dp_athenad") == b'1':
# start daemon processes
for p in daemon_processes:
start_daemon_process(p)
# start persistent processes
for p in persistent_processes:
start_managed_process(p)
# start offroad
if EON:
pm_apply_packages('enable')
start_offroad()
if os.getenv("NOBOARD") is not None:
del managed_processes["pandad"]
if os.getenv("BLOCK") is not None:
for k in os.getenv("BLOCK").split(","):
del managed_processes[k]
started_prev = False
logger_dead = False
params = Params()
device_state_sock = messaging.sub_sock('deviceState')
pm = messaging.PubMaster(['managerState'])
while 1:
msg = messaging.recv_sock(device_state_sock, wait=True)
if msg.deviceState.freeSpacePercent < 5:
logger_dead = True
if msg.deviceState.started:
for p in car_started_processes:
if p == "loggerd" and logger_dead:
kill_managed_process(p)
else:
start_managed_process(p)
else:
logger_dead = False
driver_view = params.get("IsDriverViewEnabled") == b"1"
# TODO: refactor how manager manages processes
for p in reversed(car_started_processes):
if p not in driver_view_processes or not driver_view:
kill_managed_process(p)
for p in driver_view_processes:
if driver_view:
start_managed_process(p)
else:
kill_managed_process(p)
# trigger an update after going offroad
if started_prev:
os.sync()
send_managed_process_signal("updated", signal.SIGHUP)
started_prev = msg.deviceState.started
# check the status of all processes, did any of them die?
running_list = ["%s%s\u001b[0m" % ("\u001b[32m" if running[p].is_alive() else "\u001b[31m", p) for p in running]
cloudlog.debug(' '.join(running_list))
# send managerState
states = []
for p in managed_processes:
state = log.ManagerState.ProcessState.new_message()
state.name = p
if p in running:
state.running = running[p].is_alive()
state.pid = running[p].pid
state.exitCode = running[p].exitcode or 0
states.append(state)
msg = messaging.new_message('managerState')
msg.managerState.processes = states
pm.send('managerState', msg)
# Exit main loop when uninstall is needed
if params.get("DoUninstall", encoding='utf8') == "1":
break
def manager_prepare():
# build all processes
os.chdir(os.path.dirname(os.path.abspath(__file__)))
total = 100.0 - (0 if PREBUILT else MAX_BUILD_PROGRESS)
for i, p in enumerate(managed_processes):
perc = (100.0 - total) + total * (i + 1) / len(managed_processes)
spinner.update_progress(perc, 100.)
prepare_managed_process(p)
def main():
params = Params()
params.manager_start()
default_params = [
("CommunityFeaturesToggle", "0"),
("CompletedTrainingVersion", "0"),
("IsRHD", "0"),
("IsMetric", "0"),
("RecordFront", "0"),
("HasAcceptedTerms", "0"),
("HasCompletedSetup", "0"),
("IsUploadRawEnabled", "1"),
("IsLdwEnabled", "1"),
("LastUpdateTime", datetime.datetime.utcnow().isoformat().encode('utf8')),
("OpenpilotEnabledToggle", "1"),
("VisionRadarToggle", "0"),
("LaneChangeEnabled", "1"),
("IsDriverViewEnabled", "0"),
]
# set unset params
for k, v in default_params:
if params.get(k) is None:
params.put(k, v)
# is this dashcam?
if os.getenv("PASSIVE") is not None:
params.put("Passive", str(int(os.getenv("PASSIVE"))))
if params.get("Passive") is None:
raise Exception("Passive must be set to continue")
init_params_vals(params)
if EON:
update_apks()
manager_init(params.get('dp_reg') == b'1')
manager_prepare()
spinner.close()
if os.getenv("PREPAREONLY") is not None:
return
# dp
del managed_processes['tombstoned']
steering_monitor = params.get("dp_steering_monitor") == b'1'
if not steering_monitor and params.get("dp_driver_monitor") == b'0':
del managed_processes['loggerd']
del managed_processes['logmessaged']
del managed_processes['proclogd']
del managed_processes['logcatd']
del managed_processes['dmonitoringd']
del managed_processes['dmonitoringmodeld']
elif params.get("dp_logger") == b'0' or \
params.get("dp_atl") == b'1' or \
not steering_monitor:
del managed_processes['loggerd']
del managed_processes['logmessaged']
del managed_processes['proclogd']
del managed_processes['logcatd']
if params.get("dp_uploader") == b'0':
del managed_processes['uploader']
if params.get("dp_updated") == b'0':
del managed_processes['updated']
if params.get('dp_gpxd') == b'0':
del managed_processes['gpxd']
# SystemExit on sigterm
signal.signal(signal.SIGTERM, lambda signum, frame: sys.exit(1))
try:
manager_thread()
except Exception:
traceback.print_exc()
crash.capture_exception()
finally:
cleanup_all_processes(None, None)
if params.get("DoUninstall", encoding='utf8') == "1":
cloudlog.warning("uninstalling")
HARDWARE.uninstall()
if __name__ == "__main__":
try:
main()
except Exception:
add_logentries_handler(cloudlog)
cloudlog.exception("Manager failed to start")
ip = 'N/A'
if EON:
try:
result = subprocess.check_output(["ifconfig", "wlan0"], encoding='utf8')
ip = re.findall(r"inet addr:((\d+\.){3}\d+)", result)[0][0]
except:
ip = 'N/A'
# Show last 3 lines of traceback
error = traceback.format_exc(-3)
error = ("Manager failed to start (IP: %s)\n \n" % ip) + error
spinner.close()
with TextWindow(error) as t:
t.wait_for_exit()
raise
# manual exit because we are forked
sys.exit(0)
|
const {Collection} = require('./collection');
class HomeCollection extends Collection {
reload(_, cb) {
let pageUrl = new PageURL(this.url);
this.fetch(this.url).then((doc)=>{
let titles = doc.querySelectorAll('.latest-tab-nav > ul > li');
let cols = doc.querySelectorAll('.latest-tab-box .latest-item');
let len = titles.length;
let items = [];
for (let i = 0; i < len; ++i) {
let telem = titles[i];
let item = glib.DataItem.new();
item.type = glib.DataItem.Type.Header;
item.title = telem.text;
items.push(item);
let celem = cols[i];
let list = celem.querySelectorAll('.img-list > li');
for (let node of list) {
let link = node.querySelector('a.play-img');
let img = link.querySelector('img');
let item = glib.DataItem.new();
item.title = link.attr('title');
item.link = pageUrl.href(link.attr('href'));
item.picture = img.attr('src');
item.subtitle = node.querySelector('.time').text;
items.push(item);
}
}
this.setData(items);
cb.apply(null);
}).catch((err)=>{
if (err instanceof Error) {
console.log("Err " + err.message + " stack " + err.stack);
err = glib.Error.new(305, err.message);
}
cb.apply(err);
});
return true;
}
}
class CategoryCollection extends Collection {
constructor(data) {
super(data);
this.page = 0;
}
async fetch(url) {
let pageUrl = new PageURL(url);
let doc = await super.fetch(url);
let elems = doc.querySelectorAll('.img-list > li > a');
let results = [];
for (let i = 0, t = elems.length; i < t; ++i) {
let elem = elems[i];
let item = glib.DataItem.new();
item.title = elem.attr('title');
item.link = pageUrl.href(elem.attr('href'));
let img = elem.querySelector('img');
item.picture = pageUrl.href(img.attr('src'));
item.subtitle = elem.querySelector('p').text.trim();
results.push(item);
}
return results;
}
makeURL(page) {
return this.url.replace('{0}', page + 1);
}
reload(_, cb) {
let page = 0;
this.fetch(this.makeURL(page)).then((results)=>{
this.page = page;
this.setData(results);
cb.apply(null);
}).catch(function(err) {
if (err instanceof Error)
err = glib.Error.new(305, err.message);
cb.apply(err);
});
return true;
}
loadMore(cb) {
let page = this.page + 1;
this.fetch(this.makeURL(page)).then((results)=>{
this.page = page;
this.appendData(results);
cb.apply(null);
}).catch(function(err) {
if (err instanceof Error)
err = glib.Error.new(305, err.message);
cb.apply(err);
});
return true;
}
}
module.exports = function(info) {
let data = info.toObject();
if (data.id === 'home')
return HomeCollection.new(data);
else return CategoryCollection.new(data);
};
|
"""RyuApp base class for FAUCET/Gauge."""
# Copyright (C) 2013 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2015 Brad Cowie, Christopher Lorier and Joe Stringer.
# Copyright (C) 2015 Research and Education Advanced Network New Zealand Ltd.
# Copyright (C) 2015--2019 The Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import random
import signal
import sys
from ryu.base import app_manager
from ryu.controller import dpset, event
from ryu.controller.handler import set_ev_cls
from ryu.lib import hub
from faucet import valve_of
from faucet.valve_util import dpid_log, get_logger, get_setting
class ValveDeadThreadException(Exception):
"""Exception raised when a dead thread is detected."""
class EventReconfigure(event.EventBase):
"""Event sent to controller to cause config reload."""
class RyuAppBase(app_manager.RyuApp):
"""RyuApp base class for FAUCET/Gauge."""
OFP_VERSIONS = valve_of.OFP_VERSIONS
_CONTEXTS = {
'dpset': dpset.DPSet,
}
logname = ''
exc_logname = ''
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.dpset = kwargs['dpset']
self._reg = kwargs.get('reg', None)
self.config_file = self.get_setting('CONFIG', True)
self.stat_reload = self.get_setting('CONFIG_STAT_RELOAD')
loglevel = self.get_setting('LOG_LEVEL')
logfile = self.get_setting('LOG')
exc_logfile = self.get_setting('EXCEPTION_LOG')
self.logger = get_logger(
self.logname, logfile, loglevel, 0)
self.exc_logger = get_logger(
self.exc_logname, exc_logfile, logging.DEBUG, 1)
self.threads = []
self.thread_managers = []
self.prom_client = None
def _get_threads(self):
"""Return started threads."""
threads = self.threads.copy()
threads.extend(
[thread_manager.thread for thread_manager in self.thread_managers
if thread_manager and thread_manager.thread is not None])
return threads
def _check_thread_exception(self):
"""Check for a dead thread and cause/log an exception."""
dead_threads = [thread for thread in self._get_threads() if thread.dead]
if dead_threads:
for thread in dead_threads:
thread_name = getattr(thread, 'name', 'unknown')
# Inconveniently, eventlet and friends helpfully put the last
# exception on stderr but not anywhere else where we can log it.
self.logger.error(
'unexpected %s thread termination - check Ryu/process stderr log', thread_name)
# If that succeeds (was a temporary error that killed the thread),
# then raise an exception to make sure we know a thread died.
raise ValveDeadThreadException
def _thread_jitter(self, period, jitter=2):
"""Reschedule another thread with a random jitter and check for dead threads."""
hub.sleep(period + (random.random() * jitter))
# At least one thread needs to run to be able to detect that any of the others has died.
self._check_thread_exception()
def _thread_reschedule(self, ryu_event, period, jitter=2):
"""Trigger Ryu events periodically with a jitter.
Args:
ryu_event (ryu.controller.event.EventReplyBase): event to trigger.
period (int): how often to trigger.
"""
while True:
self.send_event(self.__class__.__name__, ryu_event)
self._thread_jitter(period, jitter)
def get_setting(self, setting, path_eval=False):
"""Return config setting prefaced with logname."""
return get_setting('_'.join((self.logname.upper(), setting)), path_eval)
def signal_handler(self, sigid, _):
"""Handle signals.
Args:
sigid (int): signal received.
"""
if sigid == signal.SIGINT:
self.close()
sys.exit(0)
if sigid == signal.SIGHUP:
self.send_event(self.__class__.__name__, EventReconfigure())
@staticmethod
def _config_files_changed():
"""Return True if config files changed."""
raise NotImplementedError # pragma: no cover
def _config_file_stat(self):
"""Periodically stat config files for any changes."""
while True:
if self._config_files_changed():
if self.stat_reload:
self.send_event(self.__class__.__name__, EventReconfigure())
self._thread_jitter(3)
def start(self):
"""Start controller."""
super().start()
if self.prom_client:
self.logger.info('version %s', self.prom_client.version)
if self.stat_reload:
self.logger.info('will automatically reload new config on changes')
self.reload_config(None)
self.threads.extend([
hub.spawn(thread) for thread in (self._config_file_stat,)])
signal.signal(signal.SIGHUP, self.signal_handler)
signal.signal(signal.SIGINT, self.signal_handler)
def reload_config(self, _ryu_event):
"""Handle reloading configuration."""
self.logger.info('Reloading configuration')
def _get_datapath_obj(self, datapath_objs, ryu_event):
"""Get datapath object to response to an event.
Args:
datapath_objs (dict): datapath objects indexed by DP ID.
ryu_event (ryu.controller.event.Event): event.
Returns:
valve, ryu_dp, msg: Nones, or datapath object, Ryu datapath, and Ryu msg (if any).
"""
datapath_obj = None
msg = None
if hasattr(ryu_event, 'msg'):
msg = ryu_event.msg
ryu_dp = msg.datapath
else:
ryu_dp = ryu_event.dp
dp_id = ryu_dp.id
if dp_id in datapath_objs:
datapath_obj = datapath_objs[dp_id]
else:
ryu_dp.close()
self.logger.error('%s: unknown datapath %s', str(ryu_event), dpid_log(dp_id))
return (datapath_obj, ryu_dp, msg)
@staticmethod
def _datapath_connect(_ryu_event):
raise NotImplementedError # pragma: no cover
@staticmethod
def _datapath_disconnect(_ryu_event):
raise NotImplementedError # pragma: no cover
@set_ev_cls(dpset.EventDP, dpset.DPSET_EV_DISPATCHER)
def connect_or_disconnect_handler(self, ryu_event):
"""Handle connection or disconnection of a datapath.
Args:
ryu_event (ryu.controller.dpset.EventDP): trigger.
"""
if ryu_event.enter:
self._datapath_connect(ryu_event)
else:
self._datapath_disconnect(ryu_event)
@set_ev_cls(dpset.EventDPReconnected, dpset.DPSET_EV_DISPATCHER)
def reconnect_handler(self, ryu_event):
"""Handle reconnection of a datapath.
Args:
ryu_event (ryu.controller.dpset.EventDPReconnected): trigger.
"""
self._datapath_connect(ryu_event)
|
from django import forms
from offices.models import OfficeType, Office, StateOfficeBase
class OfficeTypeForm(forms.ModelForm):
class Meta:
model = OfficeType
fields = ['title']
class OfficeForm(forms.ModelForm):
class Meta:
model = Office
fields = ['type_ref', 'title', 'website']
class FederalOfficeForm(OfficeForm):
pass
class StateOfficeBaseForm(OfficeForm):
class Meta:
model = StateOfficeBase
fields = OfficeForm.Meta.fields + ['state_ref']
|
import React from 'react';
import Image from 'next/image';
import Head from 'next/head';
import { AnimatePresence, motion } from 'framer-motion';
import '@fontsource/plus-jakarta-sans';
export default function Home() {
const [idea, setIdea] = React.useState('');
const getIdea = async () => {
const body = await (await fetch(`https://${window.location.hostname}/api/idea`)).json();
setIdea(body.idea);
};
return (<>
<Head>
<title>Rosebot</title>
<meta name="viewport" content="initial-scale=1.0, width=device-width" />
<meta property="og:title" content="Rosebot"/>
</Head>
<div className='min-h-screen h-full bg-gradient-to-b from-blue-800 to-rose-600 flex flex-col text-center
items-center text-slate-200 space-y-4 sm:space-y-6 lg:space-y-8 pt-32 px-10 tracking-tight'>
<Image src='/icon.svg' width='150' height='150' className='scale-75 sm:scale-90 lg:scale-100' alt=''></Image>
<div className='font-extrabold text-3xl sm:text-5xl lg:text-7xl tracking-tight'>
Hackathon Idea Generator
</div>
<div className='text-xl sm:text-2xl lg:text-3xl'>
Want more hackathon ideas? Let Rosebot think of ideas for you.
</div>
<div>
<motion.button
className='text-xl sm:text-3xl lg:text-5xl bg-rosered rounded-lg p-3 lg:p-4'
onClick={getIdea}
initial={{ scale: .8 }}
whileHover={{ scale: .9 }}
>Generate</motion.button>
</div>
<AnimatePresence exitBeforeEnter={true} initial={false}>{
idea != '' && <motion.div
key={idea}
initial={{ y: -30, opacity: 0 }}
animate={{ y: 0, opacity: 1 }}
exit={{ y: 30, opacity: 0, transition: { duration: .5 } }}
transition={{ duration: .5 }}
className='text-xl sm:text-3xl lg:text-5xl lg:w-[80rem] max-w-fit'
>{idea}</motion.div>}
</AnimatePresence>
</div>
</>
);
} |
import navegador5 as nv
import navegador5.url_tool as nvurl
import navegador5.head as nvhead
import navegador5.body as nvbody
import navegador5.cookie
import navegador5.cookie.cookie as nvcookie
import navegador5.cookie.rfc6265 as nvrfc6265
import navegador5.jq as nvjq
import navegador5.js_random as nvjr
import navegador5.file_toolset as nvft
import navegador5.shell_cmd as nvsh
import navegador5.html_tool as nvhtml
import navegador5.solicitud as nvsoli
import navegador5.content_parser
import navegador5.content_parser.amf0_decode as nvamf0
import navegador5.content_parser.amf3_decode as nvamf3
from lxml import etree
import lxml.html
import collections
import copy
import re
import urllib
import os
import json
import sys
import time
from xdict.jprint import pdir
from xdict.jprint import pobj
from xdict.jprint import print_j_str
from xdict import cmdline
import hashlib
import xdict.utils
nudipix_base_url = 'http://www.nudipixel.net'
taxonomy_url = 'http://www.nudipixel.net/taxonomy/'
locs_url = 'http://www.nudipixel.net/locations/'
try:
lns_dir = sys.argv[2]
except:
#lns_dir = '/media/root/6d1de738-2a56-4564-ab92-0401c7fe0f68/NUDIPIXLOC/'
lns_dir = '../LNS/'
else:
pass
#nvft.mkdir(lns_dir+'Images')
try:
work_dir = sys.argv[4]
except:
work_dir = '/media/root/d4f174a2-4959-4719-9086-7e2af8e2c79c/NUDILOC/'
else:
pass
try:
images_dir = sys.argv[6]
except:
images_dir = '../Images/'
else:
pass
try:
infos_dir = sys.argv[8]
except:
infos_dir = '../Infos/'
else:
pass
try:
thumbs_dir = sys.argv[10]
except:
thumbs_dir = '../Thumbs/'
else:
pass
#taxonomy_init
def taxonomy_init(base_url='http://www.nudipixel.net/'):
info_container = nvsoli.new_info_container()
info_container['base_url'] = base_url
info_container['method'] = 'GET'
req_head_str = '''Accept: application/json\r\nUser-Agent: Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 Safari/537.36\r\nAccept-Encoding: gzip,deflate,sdch\r\nAccept-Language: en;q=1.0, zh-CN;q=0.8'''
info_container['req_head'] = nvhead.build_headers_dict_from_str(req_head_str,'\r\n')
info_container['req_head']['Connection'] = 'close'
#### init records_container
records_container = nvsoli.new_records_container()
return((info_container,records_container))
def get_etree_root(info_container,**kwargs):
if('coding' in kwargs):
coding = kwargs['coding']
else:
coding = 'utf-8'
html_text = info_container['resp_body_bytes'].decode(coding)
root = etree.HTML(html_text)
return(root)
#
def get_country_urls(locs_url,countries_xpath='//div[@id="content"]/div/div/a[@href]'):
info_container,records_container = taxonomy_init()
info_container['url'] = locs_url
#info_container = nvsoli.walkon(info_container,records_container=records_container)
#info_container = nvsoli.auto_redireced(info_container,records_container)
####
sleep_cnt = 0
while(1):
sleep_cnt = sleep_cnt + 1
if(sleep_cnt > 30):
sleep_cnt = 30
else:
pass
try:
info_container = nvsoli.walkon(info_container,records_container=records_container)
info_container = nvsoli.auto_redireced(info_container,records_container)
except:
time.sleep(10 * sleep_cnt)
else:
break
####
root = get_etree_root(info_container)
eles = root.xpath(countries_xpath)
country_urls =[]
country_names = []
for i in range(0,eles.__len__()):
url = nudipix_base_url + eles[i].attrib['href']
country_urls.append(url)
name = eles[i].text
country_names.append(name)
return((country_urls,country_names))
def get_location_urls(country_url,locations_xpath='//ul[@class="country_dive_site_list"]/li/a[@href]'):
info_container,records_container = taxonomy_init()
info_container['url'] = country_url
#info_container = nvsoli.walkon(info_container,records_container=records_container)
#info_container = nvsoli.auto_redireced(info_container,records_container)
####
sleep_cnt = 0
while(1):
sleep_cnt = sleep_cnt + 1
if(sleep_cnt > 30):
sleep_cnt = 30
else:
pass
try:
info_container = nvsoli.walkon(info_container,records_container=records_container)
info_container = nvsoli.auto_redireced(info_container,records_container)
except:
time.sleep(10 * sleep_cnt)
else:
break
####
root = get_etree_root(info_container)
eles = root.xpath(locations_xpath)
location_urls =[]
location_names = []
for i in range(0,eles.__len__()):
url = nudipix_base_url + eles[i].attrib['href']
if('location' in url):
location_urls.append(url)
name = eles[i].text
location_names.append(name)
return((location_urls,location_names))
def creat_country_md(curls,cnames):
country_md = {}
for i in range (0,curls.__len__()):
abbrev = os.path.basename(curls[i].rstrip('/'))
country_md[abbrev] = cnames[i]
country_md[cnames[i]] = abbrev
return(country_md)
def creat_location_md(lurls,lnames):
location_md = {}
for i in range (0,lurls.__len__()):
abbrev = os.path.basename(lurls[i].rstrip('/'))
location_md[abbrev] = lnames[i]
location_md[lnames[i]] = abbrev
return(location_md)
def get_nav_urls(loc_url,nav_xpath='//p[@class="nav"]/a[@href]'):
info_container,records_container = taxonomy_init()
info_container['url'] = loc_url
#info_container = nvsoli.walkon(info_container,records_container=records_container)
#info_container = nvsoli.auto_redireced(info_container,records_container)
####
sleep_cnt = 0
while(1):
#####
#print('--------get_nav_urls--------')
#print(sleep_cnt)
#print(loc_url)
#print('--------get_nav_urls--------')
#####
sleep_cnt = sleep_cnt + 1
if(sleep_cnt > 30):
sleep_cnt = 30
else:
pass
try:
info_container = nvsoli.walkon(info_container,records_container=records_container)
info_container = nvsoli.auto_redireced(info_container,records_container)
except:
time.sleep(10 * sleep_cnt)
else:
break
####
root = get_etree_root(info_container)
eles = root.xpath(nav_xpath)
if(eles.__len__() == 0):
nav_urls = []
else:
max_page = eles[-2].text
max_page = int(max_page)
nav_urls = [loc_url]
tem = os.path.dirname(eles[-2].attrib['href'].rstrip('/'))
for i in range(2,max_page + 1):
url = nudipix_base_url + tem + '/' + str(i)
nav_urls.append(url)
return(nav_urls)
def get_locsp_urls(nav_url,locsp_xpah='//div[@class="thumbnail"]/div/a[@href]'):
info_container,records_container = taxonomy_init()
info_container['url'] = nav_url
####
#info_container = nvsoli.walkon(info_container,records_container=records_container)
#info_container = nvsoli.auto_redireced(info_container,records_container)
####
sleep_cnt = 0
while(1):
#print('>>>>>>>>>>>>>>>>>>>>')
#print(info_container['url'])
#print('<<<<<<<<<<<<<<<<<<<<')
sleep_cnt = sleep_cnt + 1
if(sleep_cnt > 30):
sleep_cnt = 30
else:
pass
try:
info_container = nvsoli.walkon(info_container,records_container=records_container)
info_container = nvsoli.auto_redireced(info_container,records_container)
except:
time.sleep(10 * sleep_cnt)
else:
break
####
root = get_etree_root(info_container)
eles = root.xpath(locsp_xpah)
locsp_urls = []
for i in range(0,eles.__len__()):
url = nudipix_base_url + eles[i].attrib['href']
if('location' in url):
locsp_urls.append(url)
return(locsp_urls)
def get_img_urls(locsp_url,img_xpath='//div[@class="thumbnail"]/div/a[@href]'):
####
#sys.stdout.flush()
#print(locsp_url)
#sys.stdout.flush()
####
info_container,records_container = taxonomy_init()
info_container['url'] = locsp_url
####
#info_container = nvsoli.walkon(info_container,records_container=records_container)
#info_container = nvsoli.auto_redireced(info_container,records_container)
####
####
sleep_cnt = 0
while(1):
#print('-------------------')
#print(locsp_url)
#print('-------------------')
sleep_cnt = sleep_cnt + 1
if(sleep_cnt > 30):
sleep_cnt = 30
else:
pass
try:
info_container = nvsoli.walkon(info_container,records_container=records_container)
info_container = nvsoli.auto_redireced(info_container,records_container)
except:
time.sleep(10 * sleep_cnt)
else:
break
####
root = get_etree_root(info_container)
eles = root.xpath(img_xpath)
img_urls = []
thumbnail_urls = []
####
####
for i in range(0,eles.__len__()):
url = nudipix_base_url + eles[i].attrib['href']
if(('photo' in url) & ( not ('photographer' in url))):
img_urls.append(url)
ele = eles[i].xpath('img')[0]
thumbnail_urls.append(nudipix_base_url +ele.attrib['src'])
nav_xpath='//p[@class="nav"]/a[@href]'
eles = root.xpath(nav_xpath)
if(eles.__len__() == 0):
pass
else:
max_page = eles[-2].text
max_page = int(max_page)
tem = os.path.dirname(eles[-2].attrib['href'].rstrip('/'))
for i in range(2,max_page + 1):
nav_url = nudipix_base_url + tem + '/' + str(i)
info_container,records_container = taxonomy_init()
info_container['url'] = nav_url
####
sleep_cnt = 0
while(1):
sleep_cnt = sleep_cnt + 1
if(sleep_cnt > 30):
sleep_cnt = 30
else:
pass
try:
info_container = nvsoli.walkon(info_container,records_container=records_container)
info_container = nvsoli.auto_redireced(info_container,records_container)
except:
time.sleep(10 * sleep_cnt)
else:
break
####
root = get_etree_root(info_container)
eles = root.xpath(img_xpath)
for j in range(0,eles.__len__()):
url = nudipix_base_url + eles[j].attrib['href']
if(('photo' in url) & ( not ('photographer' in url))):
img_urls.append(url)
ele = eles[j].xpath('img')[0]
thumbnail_urls.append(nudipix_base_url +ele.attrib['src'])
return((img_urls,thumbnail_urls))
def get_EXIF(EXIF_url):
info_container,records_container = taxonomy_init()
info_container['url'] = EXIF_url
####
####info_container = nvsoli.walkon(info_container,records_container=records_container)
####info_container = nvsoli.auto_redireced(info_container,records_container)
####
####
sleep_cnt = 0
while(1):
sleep_cnt = sleep_cnt + 1
if(sleep_cnt > 30):
sleep_cnt = 30
else:
pass
try:
info_container = nvsoli.walkon(info_container,records_container=records_container)
info_container = nvsoli.auto_redireced(info_container,records_container)
except:
time.sleep(10 * sleep_cnt)
else:
break
####
root = get_etree_root(info_container)
eles = root.xpath('//table[@class="exif"]/tr')
EXIF = {}
for i in range(0,eles.__len__()):
key = eles[i].xpath('td')[0].text.rstrip(':')
EXIF[key] = eles[i].xpath('td')[1].text
return(EXIF)
def init_KPCOFGS(rsltin='path',**kwargs):
if('names' in kwargs):
kpcofgs_names = kwargs['names']
else:
kpcofgs_names = ['Kingdom','Phylum','Class','Subclass','Infraclass','Order','Superfamily','Family','Genus','Species']
pobj(kpcofgs_names)
if(rsltin == 'path'):
rslt = ''
for i in range(1,kpcofgs_names.__len__()):
rslt = rslt + '/'
return(rslt)
else:
rslt = {}
for each in kpcofgs_names:
rslt[each] = ''
return(rslt)
def get_KPCOFGS(tbodys,**kwargs):
if('names' in kwargs):
kpcofgs_name = kwargs['names']
else:
kpcofgs_names = ['Kingdom','Phylum','Class','Subclass','Infraclass','Order','Superfamily','Family','Genus','Species']
kpcofgs = tbodys[1].getchildren()
ks = init_KPCOFGS(rsltin='dict',names=kpcofgs_names)
for i in range(0,kpcofgs.__len__()):
ks[kpcofgs[i].xpath('td')[0].text.rstrip(':')] = kpcofgs[i].xpath('td/a')[0].text
if('rsltin' in kwargs):
rsltin = kwargs['rsltin']
else:
rsltin = 'path'
if(rsltin == 'path'):
path = ks[kpcofgs_names[0]]
for i in range(1,kpcofgs_names.__len__()):
path = path + '/' + ks[kpcofgs_names[i]]
return(path)
else:
return(ks)
def get_img_info(img_url,thumbnail_url,country_abbrev,location,base_url = nudipix_base_url):
info_container,records_container = taxonomy_init()
info_container['url'] = img_url
####
#sys.stdout.flush()
#print('---------------')
#print(img_url)
#sys.stdout.flush()
####
#info_container = nvsoli.walkon(info_container,records_container=records_container)
#info_container = nvsoli.auto_redireced(info_container,records_container)
####
sleep_cnt = 0
while(1):
sleep_cnt = sleep_cnt + 1
if(sleep_cnt > 30):
sleep_cnt = 30
else:
pass
try:
info_container = nvsoli.walkon(info_container,records_container=records_container)
info_container = nvsoli.auto_redireced(info_container,records_container)
except:
time.sleep(10 * sleep_cnt)
else:
break
####
img_root = get_etree_root(info_container)
tbodys = img_root.xpath('//table')
sp = img_root.xpath('//div/div/h2/a')[0].attrib['href'].rstrip('/')
sp_name = os.path.basename(sp)
info_raw = tbodys[0].getchildren()
info = {}
for i in range(0,info_raw.__len__()):
key = info_raw[i].xpath('td')[0].text.rstrip(':')
if(key == 'Camera'):
info[key] = info_raw[i].xpath('td')[1].text
EXIF_url = nudipix_base_url + info_raw[i].xpath('td/span/a')[0].attrib['href']
info['EXIF'] = get_EXIF(EXIF_url)
elif(key == 'Taken on'):
info[key] = info_raw[i].xpath('td')[1].text
elif(key == 'Viewed'):
info[key] = info_raw[i].xpath('td')[1].text
elif(key == 'Posted'):
info[key] = info_raw[i].xpath('td')[1].text
elif(key == 'Updated'):
info[key] = info_raw[i].xpath('td')[1].text
else:
info[key] = info_raw[i].xpath('td/a')[0].text
kpcofgs = get_KPCOFGS(tbodys,rsltin='dict')
info['kpcofgs'] = kpcofgs
img_real_url = nudipix_base_url + img_root.xpath('//div/img')[0].attrib['src']
try:
img_verifier = img_root.xpath('//div/img')[1].attrib['title']
except:
img_verifier = ''
else:
pass
sha1 = hashlib.sha1(img_real_url.encode('utf-8')).hexdigest()
img_suffix = os.path.basename(img_real_url).split('.')[-1]
img_name = sp_name + '_' + sha1 + '.' + img_suffix
thumbnail_suffix = os.path.basename(thumbnail_url).split('.')[-1]
thumbnail_name = sp_name + '_' + sha1 + '.thumbnail.' + thumbnail_suffix
info_name = sp_name + '_' + sha1 + '.dict'
info['img_url'] = img_real_url
info['verifier'] = img_verifier
info['img_name'] = images_dir + img_name
info['index'] = sha1
info['thumbnail_url'] = thumbnail_url
info['thumbnail_name'] = thumbs_dir + thumbnail_name
info['info_name'] = infos_dir + info_name
info['country'] = country_abbrev
info['location'] = location
####
#print(img_real_url)
try:
info['seq'] = int(os.path.basename(img_real_url).split('.')[0])
except:
info['seq'] = -1
else:
pass
#print('-------------')
return(info)
#####################
try:
content = nvft.read_file_content(fn = '../seq.record',op='r')
except:
istart = 0
jstart = 0
kstart = 0
xstart = 0
ystart = 0
else:
istart = json.loads(content)['istart']
jstart = json.loads(content)['jstart']
kstart = json.loads(content)['kstart']
xstart = json.loads(content)['xstart']
ystart = json.loads(content)['ystart']
try:
content_curls = nvft.read_file_content(fn = '../curls.dict',op='r')
content_cnames = nvft.read_file_content(fn = '../cnames.dict',op='r')
except:
curls,cnames = get_country_urls(locs_url)
nvft.write_to_file(fn='../curls.dict',content=json.dumps(curls),op='w+')
nvft.write_to_file(fn='../cnames.dict',content=json.dumps(cnames),op='w+')
else:
curls = json.loads(content_curls)
cnames = json.loads(content_cnames)
try:
content_country_md = nvft.read_file_content(fn = '../country.dict',op='r')
except:
country_md = creat_country_md(curls,cnames)
nvft.write_to_file(fn='../country.dict',content=json.dumps(country_md),op='w+')
else:
country_md = json.loads(content_country_md)
total = 0
for i in range (istart,curls.__len__()):
#
sys.stdout.flush()
print('curl i:')
print(i)
print(curls[i])
print('curl i:')
sys.stdout.flush()
#
country_dir = lns_dir + 'Images/' + cnames[i]
country_abbrev = os.path.basename(curls[i].rstrip('/'))
nvft.mkdir(country_dir)
####
lurls,lnames = get_location_urls(curls[i])
#
#sys.stdout.flush()
#print("all lurls")
#print(lurls)
#print("all lurls")
#sys.stdout.flush()
#
####
try:
content_location_md = nvft.read_file_content(fn='../'+country_abbrev+'.loc.dict',op='r')
except:
location_md = creat_location_md(lurls,lnames)
nvft.write_to_file(fn='../'+country_abbrev+'.loc.dict',content=json.dumps(location_md),op='w+')
else:
location_md = json.loads(content_location_md)
####
if(i == istart):
pass
else:
jstart = 0
for j in range(jstart,lurls.__len__()):
#
sys.stdout.flush()
print('lurl j:')
print(j)
print(lurls[j])
print('lurl j:')
sys.stdout.flush()
#
loc_dir = country_dir + '/' + lnames[j]
nav_urls = get_nav_urls(lurls[j])
nvft.mkdir(loc_dir)
if(j == jstart):
pass
else:
kstart = 0
for k in range(kstart,nav_urls.__len__()):
#
sys.stdout.flush()
print('nav_url k:')
print(k)
print(nav_urls[k])
print('nav_url k:')
sys.stdout.flush()
#
nav_url = nav_urls[k]
locsp_urls = get_locsp_urls(nav_url)
####
#sys.stdout.flush()
#print(nav_url)
#pobj(locsp_urls)
#sys.stdout.flush()
####
if(k == kstart):
pass
else:
xstart = 0
for x in range(xstart,locsp_urls.__len__()):
locsp_url = locsp_urls[x]
img_urls,thumbnail_urls = get_img_urls(locsp_url)
####
sys.stdout.flush()
print('locsp_url x:')
print(x)
print(locsp_url)
print('locsp_url x:')
#pobj(img_urls)
sys.stdout.flush()
####
if(x == xstart):
pass
else:
ystart = 0
for y in range(ystart,img_urls.__len__()):
#
sys.stdout.flush()
print('img_url y:')
print(y)
print(img_urls[y])
print('img_url y:')
sys.stdout.flush()
#
img_url = img_urls[y]
thumbnail_url = thumbnail_urls[y]
location = os.path.basename(locsp_url)
info = get_img_info(img_url,thumbnail_url,country_abbrev,location)
nvft.write_to_file(fn='../phnum.record',content=str(info['seq']),op='w+')
if(info['seq'] > total):
total = info['seq']
nvft.write_to_file(fn=info['info_name'],content=json.dumps(info),op='w+')
info_container,records_container = taxonomy_init()
info_container['url'] = info['img_url']
####
sleep_cnt = 0
while(1):
sleep_cnt = sleep_cnt + 1
if(sleep_cnt > 30):
sleep_cnt = 30
else:
pass
try:
info_container = nvsoli.walkon(info_container,records_container=records_container)
info_container = nvsoli.auto_redireced(info_container,records_container)
except:
time.sleep(10 * sleep_cnt)
else:
break
####
#sys.stdout.flush()
#print(info['img_name'])
#print(info['seq'])
#print(info['index'])
#print(info['img_url'])
#print(info_container['resp_body_bytes'][:50])
#sys.stdout.flush()
####
nvft.write_to_file(fn=info['img_name'],content=info_container['resp_body_bytes'],op='wb+')
info_container,records_container = taxonomy_init()
info_container['url'] = info['thumbnail_url']
####
#info_container = nvsoli.walkon(info_container,records_container=records_container)
#info_container = nvsoli.auto_redireced(info_container,records_container)
sleep_cnt = 0
while(1):
print("================")
print(info_container['url'])
print("================")
sleep_cnt = sleep_cnt + 1
if(sleep_cnt > 30):
sleep_cnt = 30
else:
pass
try:
info_container = nvsoli.walkon(info_container,records_container=records_container)
info_container = nvsoli.auto_redireced(info_container,records_container)
except:
time.sleep(10 * sleep_cnt)
else:
break
####
try:
nvft.write_to_file(fn=info['thumbnail_name'],content=info_container['resp_body_bytes'],op='wb+')
except:
print(info_container['resp_body_bytes'])
exit()
else:
pass
nvft.write_to_file(fn='../seq.record',content=json.dumps({'istart':i,'jstart':j,'kstart':k,'xstart':x,'ystart':y}),op='w+')
shell_CMDs = {}
shell_CMDs[1] = 'ln -s ' + info['img_name'].replace('../',work_dir) + ' ' + loc_dir
nvsh.pipe_shell_cmds(shell_CMDs)
nvft.write_to_file(fn='../total.record',content=str(total),op='w+')
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
wz_compat/migrate.py - last updated 2019-09-28
Use data from the database of a previous year to get a starting point
for a new year.
==============================
Copyright 2019 Michael Towers
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Messages
_BADCLASSNAME = "Ungültiger Klassenname: {klass}"
_PUPIL_LEFT = "Abgemeldeter Schüler in Klasse {klass}: {name}"
from wz_core.db import DB
from wz_core.pupils import Pupils, PupilData
## First (official) day of school year
# month1 = CONF.MISC.SCHOOLYEAR_MONTH_1.nat (1, 12)
# year1 = schoolyear if month1 == 1 else schoolyear - 1
# date0 = '{:04d}-{:02d}-01'.format (year1, month1)
def migratePupils (schoolyear):
"""Read the pupil data from the previous year and build a preliminary
database table for the current (new) year, migrating the class
names according to <CONF.MISC.MIGRATE_CLASS>
"""
# Get pupil data from previous year
pdb = Pupils (schoolyear-1)
# Maximum year number for various streams:
maxyear = {}
try:
for x in CONF.MISC.STREAM_MAX_YEAR:
k, v = x.split (':')
maxyear [k] = v
except:
REPORT.Fail (_BAD_STREAM_MAX_YEAR, val=x)
rows = []
for c_old in pdb.classes ():
# Increment the year part of the class name
try:
cnum = int (c_old [:2]) + 1
ctag = c_old [2:]
except:
REPORT.Fail (_BADCLASSNAME, klass=c_old)
c_new = '%02d%s' % (cnum, ctag)
for prow in pdb.classPupils (c_old):
left = False
if prow ['EXIT_D']:
# If there is an exit date, assume the pupil has left.
left = True
else:
try:
mxy = maxyear [prow ['STREAM']]
except:
mxy = maxyear ['']
if cnum > int (mxy):
left = True
if left:
REPORT.Info (_PUPIL_LEFT, klass=c_old, name=prow.name ())
continue
prow ['CLASS'] = c_new
rows.append (prow)
# Create the database table PUPILS from the loaded pupil data.
db = DB (schoolyear, flag='CANCREATE')
# Use (CLASS, PSORT) as primary key, with additional index on PID.
# This makes quite a small db (without rowid).
db.makeTable2 ('PUPILS', PupilData.fields (), data=rows,
force=True,
pk=('CLASS', 'PSORT'), index=('PID',))
def test_01 ():
schoolyear = 2017
REPORT.PRINT ("Pupil table created:", migratePupils (schoolyear))
|
const createImage = url =>
new Promise((resolve, reject) => {
const image = new Image()
image.addEventListener('load', () => resolve(image))
image.addEventListener('error', error => reject(error))
image.setAttribute('crossOrigin', 'anonymous') // needed to avoid cross-origin issues on CodeSandbox
image.src = url
// console.log(image.src)
})
function getRadianAngle(degreeValue) {
return (degreeValue * Math.PI) / 180
}
/**
* This function was adapted from the one in the ReadMe of https://github.com/DominicTobias/react-image-crop
* @param {File} image - Image File url
* @param {Object} pixelCrop - pixelCrop Object provided by react-easy-crop
* @param {number} rotation - optional rotation parameter
*/
export default async function getCroppedImg(imageSrc, pixelCrop, rotation = 0) {
const image = await createImage(imageSrc)
const canvas = document.createElement('canvas')
const ctx = canvas.getContext('2d')
const maxSize = Math.max(image.width, image.height)
const safeArea = 2 * ((maxSize / 2) * Math.sqrt(2))
// set each dimensions to double largest dimension to allow for a safe area for the
// image to rotate in without being clipped by canvas context
canvas.width = safeArea
canvas.height = safeArea
// translate canvas context to a central location on image to allow rotating around the center.
ctx.translate(safeArea / 2, safeArea / 2)
ctx.rotate(getRadianAngle(rotation))
ctx.translate(-safeArea / 2, -safeArea / 2)
// draw rotated image and store data.
ctx.drawImage(
image,
safeArea / 2 - image.width * 0.5,
safeArea / 2 - image.height * 0.5
)
const data = ctx.getImageData(0, 0, safeArea, safeArea)
// set canvas width to final desired crop size - this will clear existing context
canvas.width = pixelCrop.width
canvas.height = pixelCrop.height
// paste generated rotate image with correct offsets for x,y crop values.
ctx.putImageData(
data,
Math.round(0 - safeArea / 2 + image.width * 0.5 - pixelCrop.x),
Math.round(0 - safeArea / 2 + image.height * 0.5 - pixelCrop.y)
)
// As Base64 string
// return canvas.toDataURL('image/jpeg');
// As a blob
return new Promise(resolve => {
canvas.toBlob(file => {
// console.log(blobo)
resolve(URL.createObjectURL(file))
}, 'image/jpeg')
})
}
|
import tkinter as tk
from tkinter import messagebox
from tkinter import ttk
from tkinter import font
from ttkthemes import ThemedTk
import tweepy
import src.GUI._stream_tab as _stream_tab
import src.GUI._search_tab as _search_tab
import src.GUI._query_tab as _query_tab
from src.collection.collect import get_replies
from src.url_to_id import url_to_id
from src.collection.twitter.connection import twitter_setup
from src.collection.twitter.credentials import CONSUMER_SECRET,CONSUMER_SECRET,ACCESS_TOKEN,ACCESS_SECRET
from src.strip_smileys import strip_smileys
from src.detect import detect_tweets
url0=""
class Interface(tk.Frame):
def __init__(self, parent = None, **kwargs):
super().__init__(parent)
self.parent = parent
self.create_widgets()
#self.configure(bg="blue")
self.pack(fill = 'both', expand = 1)
def create_widgets(self):
self.parent.title("Twinsult")
self.create_notebook()
def create_search_tab(self, nb):
_search_tab.create_search_tab(self, nb)
def create_stream_tab(self, nb):
_stream_tab.create_stream_tab(self, nb)
def create_query_tab(self,nb):
_query_tab.create_query_tab(self, nb)
def create_notebook(self):
# Create a notebook in the main window
nb = ttk.Notebook(self)
#print(nb.winfo_class() )
self.create_search_tab(nb)
self.create_stream_tab(nb)
self.create_query_tab(nb)
nb.pack(expand = 1, fill = 'both')
root = ThemedTk(theme="arc")
#Prints available fonts on machine
#print(font.families())
interface = Interface(parent = root)
w, h = root.winfo_screenwidth(), root.winfo_screenheight()
root.geometry("%dx%d+0+0" % (w, h-100))
#Ajout d'un titre à la fenêtre root et d'un favicon.
icon = tk.PhotoImage(file=r".\src\GUI\twinsult.ico")
#La fenêtre root occupe tout l'espace.
root.columnconfigure(0, weight=1)
root.rowconfigure(0, weight=1)
root.tk.call("wm", "iconphoto", root._w, icon)
interface.mainloop()
|
/*
* 2007-2014 PrestaShop
*
* NOTICE OF LICENSE
*
* This source file is subject to the Academic Free License (AFL 3.0)
* that is bundled with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://opensource.org/licenses/afl-3.0.php
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to [email protected] so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade PrestaShop to newer
* versions in the future. If you wish to customize PrestaShop for your
* needs please refer to http://www.prestashop.com for more information.
*
* @author PrestaShop SA <[email protected]>
* @copyright 2007-2014 PrestaShop SA
* @license http://opensource.org/licenses/afl-3.0.php Academic Free License (AFL 3.0)
* International Registered Trademark & Property of PrestaShop SA
*/
//global variables
var responsiveflag = false;
$(document).ready(function(){
highdpiInit();
responsiveResize();
$(window).resize(responsiveResize);
if (navigator.userAgent.match(/Android/i))
{
var viewport = document.querySelector('meta[name="viewport"]');
viewport.setAttribute('content', 'initial-scale=1.0,maximum-scale=1.0,user-scalable=0,width=device-width,height=device-height');
window.scrollTo(0, 1);
}
blockHover();
if (typeof quickView !== 'undefined' && quickView)
quick_view();
dropDown();
if (typeof page_name != 'undefined' && !in_array(page_name, ['index', 'product']))
{
bindGrid();
$(document).on('change', '.selectProductSort', function(e){
if (typeof request != 'undefined' && request)
var requestSortProducts = request;
var splitData = $(this).val().split(':');
if (typeof requestSortProducts != 'undefined' && requestSortProducts)
document.location.href = requestSortProducts + ((requestSortProducts.indexOf('?') < 0) ? '?' : '&') + 'orderby=' + splitData[0] + '&orderway=' + splitData[1];
});
$(document).on('change', 'select[name="n"]', function(){
$(this.form).submit();
});
$(document).on('change', 'select[name="manufacturer_list"], select[name="supplier_list"]', function() {
if (this.value != '')
location.href = this.value;
});
$(document).on('change', 'select[name="currency_payement"]', function(){
setCurrency($(this).val());
});
}
$(document).on('click', '.back', function(e){
e.preventDefault();
history.back();
});
jQuery.curCSS = jQuery.css;
if (!!$.prototype.cluetip)
$('a.cluetip').cluetip({
local:true,
cursor: 'pointer',
dropShadow: false,
dropShadowSteps: 0,
showTitle: false,
tracking: true,
sticky: false,
mouseOutClose: true,
fx: {
open: 'fadeIn',
openSpeed: 'fast'
}
}).css('opacity', 0.8);
if (!!$.prototype.fancybox)
$.extend($.fancybox.defaults.tpl, {
closeBtn : '<a title="' + FancyboxI18nClose + '" class="fancybox-item fancybox-close" href="javascript:;"></a>',
next : '<a title="' + FancyboxI18nNext + '" class="fancybox-nav fancybox-next" href="javascript:;"><span></span></a>',
prev : '<a title="' + FancyboxI18nPrev + '" class="fancybox-nav fancybox-prev" href="javascript:;"><span></span></a>'
});
});
function highdpiInit()
{
if($('.replace-2x').css('font-size') == "1px")
{
var els = $("img.replace-2x").get();
for(var i = 0; i < els.length; i++)
{
src = els[i].src;
extension = src.substr( (src.lastIndexOf('.') +1) );
src = src.replace("." + extension, "2x." + extension);
var img = new Image();
img.src = src;
img.height != 0 ? els[i].src = src : els[i].src = els[i].src;
}
}
}
// Used to compensante Chrome/Safari bug (they don't care about scroll bar for width)
function scrollCompensate()
{
var inner = document.createElement('p');
inner.style.width = "100%";
inner.style.height = "200px";
var outer = document.createElement('div');
outer.style.position = "absolute";
outer.style.top = "0px";
outer.style.left = "0px";
outer.style.visibility = "hidden";
outer.style.width = "200px";
outer.style.height = "150px";
outer.style.overflow = "hidden";
outer.appendChild(inner);
document.body.appendChild(outer);
var w1 = inner.offsetWidth;
outer.style.overflow = 'scroll';
var w2 = inner.offsetWidth;
if (w1 == w2) w2 = outer.clientWidth;
document.body.removeChild(outer);
return (w1 - w2);
}
function responsiveResize()
{
compensante = scrollCompensate();
if (($(window).width()+scrollCompensate()) <= 767 && responsiveflag == false)
{
accordion('enable');
accordionFooter('enable');
responsiveflag = true;
}
else if (($(window).width()+scrollCompensate()) >= 768)
{
accordion('disable');
accordionFooter('disable');
responsiveflag = false;
}
if (typeof page_name != 'undefined' && in_array(page_name, ['category']))
resizeCatimg();
}
function blockHover(status)
{
$(document).off('mouseenter').on('mouseenter', '.product_list.grid li.ajax_block_product .product-container', function(e){
if ($('body').find('.container').width() == 1170)
{
var pcHeight = $(this).parent().outerHeight();
var pcPHeight = $(this).parent().find('.button-container').outerHeight() + $(this).parent().find('.comments_note').outerHeight() + $(this).parent().find('.functional-buttons').outerHeight();
$(this).parent().addClass('hovered').css({'height':pcHeight + pcPHeight, 'margin-bottom':pcPHeight * (-1)});
}
});
$(document).off('mouseleave').on('mouseleave', '.product_list.grid li.ajax_block_product .product-container', function(e){
if ($('body').find('.container').width() == 1170)
$(this).parent().removeClass('hovered').css({'height':'auto', 'margin-bottom':'0'});
});
}
function quick_view()
{
$(document).on('click', '.quick-view:visible, .quick-view-mobile:visible', function(e)
{
e.preventDefault();
var url = this.rel;
if (url.indexOf('?') != -1)
url += '&';
else
url += '?';
if (!!$.prototype.fancybox)
$.fancybox({
'padding': 0,
'width': 1087,
'height': 610,
'type': 'iframe',
'href': url + 'content_only=1'
});
});
}
function bindGrid()
{
var view = $.totalStorage('display');
if (!view && (typeof displayList != 'undefined') && displayList)
view = 'list';
if (view && view != 'grid')
display(view);
else
$('.display').find('li#grid').addClass('selected');
$(document).on('click', '#grid', function(e){
e.preventDefault();
display('grid');
});
$(document).on('click', '#list', function(e){
e.preventDefault();
display('list');
});
}
function display(view)
{
if (view == 'list')
{
$('ul.product_list').removeClass('grid').addClass('list row');
$('.product_list > li').removeClass('col-xs-12 col-sm-6 col-md-4').addClass('col-xs-12');
$('.product_list > li').each(function(index, element) {
html = '';
html = '<div class="product-container"><div class="row">';
html += '<div class="left-block col-xs-4 col-xs-5 col-md-4">' + $(element).find('.left-block').html() + '</div>';
html += '<div class="center-block col-xs-4 col-xs-7 col-md-4">';
html += '<div class="product-flags">'+ $(element).find('.product-flags').html() + '</div>';
html += '<h5 itemprop="name">'+ $(element).find('h5').html() + '</h5>';
var rating = $(element).find('.comments_note').html(); // check : rating
if (rating != null) {
html += '<div itemprop="aggregateRating" itemscope itemtype="http://schema.org/AggregateRating" class="comments_note">'+ rating + '</div>';
}
html += '<p class="product-desc">'+ $(element).find('.product-desc').html() + '</p>';
var colorList = $(element).find('.color-list-container').html();
if (colorList != null) {
html += '<div class="color-list-container">'+ colorList +'</div>';
}
var availability = $(element).find('.availability').html(); // check : catalog mode is enabled
if (availability != null) {
html += '<span class="availability">'+ availability +'</span>';
}
html += '</div>';
html += '<div class="right-block col-xs-4 col-xs-12 col-md-4"><div class="right-block-content row">';
var price = $(element).find('.content_price').html(); // check : catalog mode is enabled
if (price != null) {
html += '<div class="content_price col-xs-5 col-md-12">'+ price + '</div>';
}
html += '<div class="button-container col-xs-7 col-md-12">'+ $(element).find('.button-container').html() +'</div>';
html += '<div class="functional-buttons clearfix col-sm-12">' + $(element).find('.functional-buttons').html() + '</div>';
html += '</div>';
html += '</div></div>';
$(element).html(html);
});
$('.display').find('li#list').addClass('selected');
$('.display').find('li#grid').removeAttr('class');
$.totalStorage('display', 'list');
}
else
{
$('ul.product_list').removeClass('list').addClass('grid row');
$('.product_list > li').removeClass('col-xs-12').addClass('col-xs-12 col-sm-6 col-md-4');
$('.product_list > li').each(function(index, element) {
html = '';
html += '<div class="product-container">';
html += '<div class="left-block">' + $(element).find('.left-block').html() + '</div>';
html += '<div class="right-block">';
html += '<div class="product-flags">'+ $(element).find('.product-flags').html() + '</div>';
html += '<h5 itemprop="name">'+ $(element).find('h5').html() + '</h5>';
var rating = $(element).find('.comments_note').html(); // check : rating
if (rating != null) {
html += '<div itemprop="aggregateRating" itemscope itemtype="http://schema.org/AggregateRating" class="comments_note">'+ rating + '</div>';
}
html += '<p itemprop="description" class="product-desc">'+ $(element).find('.product-desc').html() + '</p>';
var price = $(element).find('.content_price').html(); // check : catalog mode is enabled
if (price != null) {
html += '<div class="content_price">'+ price + '</div>';
}
html += '<div itemprop="offers" itemscope itemtype="http://schema.org/Offer" class="button-container">'+ $(element).find('.button-container').html() +'</div>';
var colorList = $(element).find('.color-list-container').html();
if (colorList != null) {
html += '<div class="color-list-container">'+ colorList +'</div>';
}
var availability = $(element).find('.availability').html(); // check : catalog mode is enabled
if (availability != null) {
html += '<span class="availability">'+ availability +'</span>';
}
html += '</div>';
html += '<div class="functional-buttons clearfix">' + $(element).find('.functional-buttons').html() + '</div>';
html += '</div>';
$(element).html(html);
});
$('.display').find('li#grid').addClass('selected');
$('.display').find('li#list').removeAttr('class');
$.totalStorage('display', 'grid');
}
}
function dropDown()
{
elementClick = '#header .current';
elementSlide = 'ul.toogle_content';
activeClass = 'active';
$(elementClick).on('click', function(e){
e.stopPropagation();
var subUl = $(this).next(elementSlide);
if(subUl.is(':hidden'))
{
subUl.slideDown();
$(this).addClass(activeClass);
}
else
{
subUl.slideUp();
$(this).removeClass(activeClass);
}
$(elementClick).not(this).next(elementSlide).slideUp();
$(elementClick).not(this).removeClass(activeClass);
e.preventDefault();
});
$(elementSlide).on('click', function(e){
e.stopPropagation();
});
$(document).on('click', function(e){
e.stopPropagation();
var elementHide = $(elementClick).next(elementSlide);
$(elementHide).slideUp();
$(elementClick).removeClass('active');
});
}
function accordionFooter(status)
{
if(status == 'enable')
{
$('#footer .footer-block h4').on('click', function(){
$(this).toggleClass('active').parent().find('.toggle-footer').stop().slideToggle('medium');
})
$('#footer').addClass('accordion').find('.toggle-footer').slideUp('fast');
}
else
{
$('.footer-block h4').removeClass('active').off().parent().find('.toggle-footer').removeAttr('style').slideDown('fast');
$('#footer').removeClass('accordion');
}
}
function accordion(status)
{
leftColumnBlocks = $('#left_column');
if(status == 'enable')
{
$('#right_column .block .title_block, #left_column .block .title_block, #left_column #newsletter_block_left h4').on('click', function(){
$(this).toggleClass('active').parent().find('.block_content').stop().slideToggle('medium');
})
$('#right_column, #left_column').addClass('accordion').find('.block .block_content').slideUp('fast');
}
else
{
$('#right_column .block .title_block, #left_column .block .title_block, #left_column #newsletter_block_left h4').removeClass('active').off().parent().find('.block_content').removeAttr('style').slideDown('fast');
$('#left_column, #right_column').removeClass('accordion');
}
}
function resizeCatimg()
{
var div = $('.cat_desc').parent('div');
var image = new Image;
$(image).load(function(){
var width = image.width;
var height = image.height;
var ratio = parseFloat(height / width);
var calc = Math.round(ratio * parseInt(div.outerWidth(false)));
div.css('min-height', calc);
});
if (div.length)
image.src = div.css('background-image').replace(/url\("?|"?\)$/ig, '');
} |
//
//
// MIT License
//
// Copyright (c) 2017 Stellacore Corporation.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject
// to the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
// KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
// WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
// BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
// AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
//
#ifndef prob_Frac9_INCL_
#define prob_Frac9_INCL_
/*! \file
\brief Declarations for prob::Frac9
*/
#include "libprob/CdfForward.h"
#include "libprob/CdfInverse.h"
#include <array>
#include <string>
namespace prob
{
/*! \brief Fractile values at 9 key points
\par Example
\dontinclude testprob/uFrac9.cpp
\skip ExampleStart
\until ExampleEnd
*/
class Frac9
{
public: // data
std::array<double, 9u> theValues;
public: // static methods
//! Fractile values associatd with statistics
constexpr
static
std::array<double, 9u>
fractiles
()
{
return std::array<double, 9u>
{{ .00, .01, .10, .25, .50, .75, .90, .99, 1.0 }};
}
public: // methods
//! default null constructor
Frac9
();
//! Extract stats from forward cumulative distribution
explicit
Frac9
( CdfForward const & cdfFwd
);
//! Extract stats from inverse cumulative distribution
explicit
Frac9
( CdfInverse const & cdfInv
);
//! Value ctor
explicit
Frac9
( std::array<double, 9u> const & values
);
// copy constructor -- compiler provided
// assignment operator -- compiler provided
// destructor -- compiler provided
//! Check if instance is valid
inline
bool
isValid
() const;
//! Direct access to theValues
inline
double const &
operator[]
( size_t const & ndx
) const;
//! Convenience
inline
double
median
() const;
//! Descriptive information about this instance.
std::string
infoString
( std::string const & title = std::string()
, std::string const & fmt = std::string("%7.3f")
, std::string const & sep = std::string("\n")
) const;
//! Convenience call
std::string
infoStringOneLine
( std::string const & title = std::string()
, std::string const & fmt = std::string("%7.3f")
) const;
};
}
// Inline definitions
#include "libprob/Frac9.inl"
#endif // prob_Frac9_INCL_
|
import { Row, Col } from '../../model';
import {
closest as domClosest,
event as domEvent
} from 'min-dom';
const TARGET_SELECTOR =
`.dmn-decision-table-container td,
.dmn-decision-table-container th`;
export default class DragAndDrop {
constructor(eventBus, renderer, modeling, sheet) {
this._eventBus = eventBus;
this._renderer = renderer;
this._modeling = modeling;
this._sheet = sheet;
this._dragContext = null;
eventBus.on('table.destroy', () => {
this._unbindListeners();
});
}
_bindListeners() {
domEvent.bind(document, 'dragover', this.handleDragOver);
domEvent.bind(document, 'drop', this.handleDrop);
domEvent.bind(document, 'dragend', this.handleDragEnd);
}
_unbindListeners() {
domEvent.unbind(document, 'dragover', this.handleDragOver);
domEvent.unbind(document, 'drop', this.handleDrop);
domEvent.unbind(document, 'dragend', this.handleDragEnd);
}
_emit(eventName, originalEvent) {
return this._eventBus.fire(eventName, {
dragContext: this._dragContext,
originalEvent
});
}
startDrag(element, event) {
stopEvent(event, true);
event.dataTransfer.effectAllowed = 'move';
// QUIRK: Firefox won't fire events unless data was set
if (event.dataTransfer.setData) {
event.dataTransfer.setData('text', '__DUMMY');
}
this._dragContext = {
draggedElement: element
};
this._bindListeners();
this._emit('dragAndDrop.dragStart', event);
}
handleDragOver = (event) => {
// we're taking over (!)
stopEvent(event);
const targetEl = event.target;
const cellEl = domClosest(targetEl, TARGET_SELECTOR, true);
let allowed = !!cellEl;
const {
hoverEl
} = this._dragContext;
// drag leave
if (hoverEl && hoverEl !== cellEl) {
this._emit('dragAndDrop.dragLeave', event);
// unset target element
this._dragContext.targetEl = null;
// unset hover element
this._dragContext.hoverEl = null;
}
if (cellEl) {
// drag enter
if (cellEl !== hoverEl) {
// new hover element
this._dragContext.hoverEl = cellEl;
allowed = this._emit('dragAndDrop.dragEnter', event);
if (allowed !== false) {
// new targetEl
this._dragContext.targetEl = cellEl;
}
}
// drag over
allowed = this._emit('dragAndDrop.dragOver', event);
}
event.dataTransfer.dropEffect = allowed !== false ? 'move' : 'none';
}
handleDrop = (event) => {
// prevent default drop action
// QUIRK: Firefox will redirect if not prevented
stopEvent(event);
const target = this._emit('dragAndDrop.drop', event);
if (target) {
const {
draggedElement
} = this._dragContext;
if (draggedElement instanceof Row) {
const { rows } = this._sheet.getRoot();
let index = rows.indexOf(target);
this._modeling.moveRow(draggedElement, index);
} else if (draggedElement instanceof Col) {
const { cols } = this._sheet.getRoot();
let index = cols.indexOf(target);
this._modeling.moveCol(draggedElement, index);
}
}
// manually call to drag end needed, as we prevent the default
// browser behavior / drag end handling via
// event.preventDefault();
this.handleDragEnd(event);
}
handleDragEnd = (event) => {
// prevent default drop action
stopEvent(event);
this._unbindListeners();
this._emit('dragAndDrop.dragEnd', event);
this._dragContext = null;
}
}
DragAndDrop.$inject = [
'eventBus',
'renderer',
'modeling',
'sheet'
];
// helpers /////////////////
function stopEvent(event, preventDefault) {
event.stopPropagation();
if (preventDefault !== true) {
event.preventDefault();
}
} |
# Copyright 2019 The Texar Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Preprocesses raw data and produces pickle files
"""
import argparse
import importlib
import texar.torch as tx
from utils import data_utils, processor
parser = argparse.ArgumentParser()
parser.add_argument(
'--data-dir', type=str, default='data/toy',
help="The directory of raw data, wherein data files must be named as "
"'train.txt', 'dev.txt', or 'test.txt'.")
parser.add_argument(
'--max-seq-length', type=int, default=128,
help="The maxium length of sequence, longer sequence will be trimmed.")
parser.add_argument(
'--output-dir', type=str, default=None,
help="The output directory where the pickle files will be generated. "
"By default it is set to be the same as `--data-dir`.")
parser.add_argument(
"--pretrained-model-name", type=str, default="117M",
choices=tx.modules.GPT2Decoder.available_checkpoints(),
help="Name of the pre-trained checkpoint to load.")
parser.add_argument(
'--config-train', type=str, default="configs.config_train",
help="Configurations of GPT-2 training, including data and "
"optimization hyperparameters.")
args = parser.parse_args()
def main():
"""Preprocess raw data and produces pickled files."""
data_dir = args.data_dir
if args.output_dir is None:
pickle_output_dir = data_dir
else:
pickle_output_dir = args.output_dir
tx.utils.maybe_create_dir(pickle_output_dir)
pretrained_model_dir = \
tx.modules.PretrainedGPT2Mixin.download_checkpoint(
pretrained_model_name=args.pretrained_model_name)
# Creates a data pre-processor for, e.g., BPE encoding
proc = processor.get_encoder(pretrained_model_dir)
config_train = importlib.import_module(args.config_train)
# Produces pickle files
data_utils.prepare_pickle_data(
data_dir=data_dir,
max_seq_length=args.max_seq_length,
encoder=proc,
output_dir=pickle_output_dir,
feature_original_types=config_train.feature_original_types)
if __name__ == "__main__":
main()
|
#ifndef _SYS_LOG_H_
#define _SYS_LOG_H_
typedef void(*f_flint_log)(const char* format, ...);
enum{
LOG_TRACE = 0,
LOG_INFO = 1,
LOG_DEBUG = 2,
LOG_WARN = 3,
LOG_ERR = 4,
LOG_CUSTOM = 5
};
void flint_log_init(void);
void flint_log_deinit(void);
void flint_log_reg(int lvl, f_flint_log);
typedef struct{
f_flint_log trace;
f_flint_log info;
f_flint_log debug;
f_flint_log warn;
f_flint_log err;
}t_flint_hlog;
extern t_flint_hlog flint_hlog;
#endif
|
import os
from conans import ConanFile, tools
from conans.errors import ConanInvalidConfiguration
required_conan_version = ">=1.32.0"
class FrugallyDeepConan(ConanFile):
name = "frugally-deep"
description = "Use Keras models in C++ with ease."
license = "MIT"
topics = ("keras", "tensorflow")
homepage = "https://github.com/Dobiasd/frugally-deep"
url = "https://github.com/conan-io/conan-center-index"
settings = "os", "compiler", "build_type", "arch"
no_copy_source = True
@property
def _source_subfolder(self):
return "source_subfolder"
@property
def _compilers_minimum_version(self):
return {
"gcc": "4.9",
"Visual Studio": "14",
"clang": "3.7",
"apple-clang": "9",
}
def validate(self):
if self.settings.compiler.cppstd:
tools.check_min_cppstd(self, 14)
def lazy_lt_semver(v1, v2):
lv1 = [int(v) for v in v1.split(".")]
lv2 = [int(v) for v in v2.split(".")]
min_length = min(len(lv1), len(lv2))
return lv1[:min_length] < lv2[:min_length]
minimum_version = self._compilers_minimum_version.get(str(self.settings.compiler), False)
if not minimum_version:
self.output.warn("frugally-deep requires C++14. Your compiler is unknown. Assuming it supports C++14.")
elif lazy_lt_semver(str(self.settings.compiler.version), minimum_version):
raise ConanInvalidConfiguration("frugally-deep requires C++14, which your compiler does not support.")
def requirements(self):
self.requires("eigen/3.3.9")
self.requires("functionalplus/0.2.13-p0")
self.requires("nlohmann_json/3.9.1")
def package_id(self):
self.info.header_only()
def source(self):
tools.get(**self.conan_data["sources"][self.version])
os.rename("frugally-deep-" + self.version, self._source_subfolder)
def package(self):
self.copy("LICENSE", dst="licenses", src=self._source_subfolder)
self.copy("*", dst="include", src=os.path.join(self._source_subfolder, "include"))
def package_info(self):
self.cpp_info.names["cmake_find_package"] = "frugally-deep"
self.cpp_info.names["cmake_find_package_multi"] = "frugally-deep"
self.cpp_info.components["fdeep"].names["cmake_find_package"] = "fdeep"
self.cpp_info.components["fdeep"].names["cmake_find_package_multi"] = "fdeep"
self.cpp_info.components["fdeep"].requires = ["eigen::eigen",
"functionalplus::functionalplus",
"nlohmann_json::nlohmann_json"]
if self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.components["fdeep"].system_libs = ["pthread"]
|
#
# Copyright 2021 Logical Clocks AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Utility helper module for maggy experiments.
"""
import math
import os
import json
import numpy as np
from pyspark import TaskContext
from pyspark.sql import SparkSession
from maggy import constants, tensorboard
from maggy.core import exceptions
from maggy.core.environment.singleton import EnvSing
DEBUG = True
def log(msg):
"""
Generic log function (in case logging is changed from stdout later)
:param msg: The msg to log
:type msg: str
"""
if DEBUG:
print(msg)
def num_executors(sc):
"""
Get the number of executors configured for Jupyter
:param sc: The SparkContext to take the executors from.
:type sc: [SparkContext
:return: Number of configured executors for Jupyter
:rtype: int
"""
return EnvSing.get_instance().get_executors(sc)
def get_partition_attempt_id():
"""Returns partitionId and attemptNumber of the task context, when invoked
on a spark executor.
PartitionId is ID of the RDD partition that is computed by this task.
The first task attempt will be assigned attemptNumber = 0, and subsequent
attempts will have increasing attempt numbers.
Returns:
partitionId, attemptNumber -- [description]
"""
task_context = TaskContext.get()
return task_context.partitionId(), task_context.attemptNumber()
def progress_bar(done, total):
done_ratio = done / total
progress = math.floor(done_ratio * 30)
bar = "["
for i in range(30):
if i < progress:
bar += "="
elif i == progress:
bar += ">"
else:
bar += "."
bar += "]"
return bar
def json_default_numpy(obj):
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
else:
raise TypeError(
"Object of type {0}: {1} is not JSON serializable".format(type(obj), obj)
)
def finalize_experiment(
experiment_json,
metric,
app_id,
run_id,
state,
duration,
logdir,
best_logdir,
optimization_key,
):
EnvSing.get_instance().finalize_experiment(
experiment_json,
metric,
app_id,
run_id,
state,
duration,
logdir,
best_logdir,
optimization_key,
)
def build_summary_json(logdir):
"""Builds the summary json to be read by the experiments service."""
combinations = []
env = EnvSing.get_instance()
for trial in env.ls(logdir):
if env.isdir(trial):
return_file = trial + "/.outputs.json"
hparams_file = trial + "/.hparams.json"
if env.exists(return_file) and env.exists(hparams_file):
metric_arr = env.convert_return_file_to_arr(return_file)
hparams_dict = _load_hparams(hparams_file)
combinations.append({"parameters": hparams_dict, "outputs": metric_arr})
return json.dumps({"combinations": combinations}, default=json_default_numpy)
def _load_hparams(hparams_file):
"""Loads the HParams configuration from a hparams file of a trial."""
hparams_file_contents = EnvSing.get_instance().load(hparams_file)
hparams = json.loads(hparams_file_contents)
return hparams
def handle_return_val(return_val, log_dir, optimization_key, log_file):
"""Handles the return value of the user defined training function."""
env = EnvSing.get_instance()
env.upload_file_output(return_val, log_dir)
# Return type validation
if not optimization_key:
raise ValueError("Optimization key cannot be None.")
if not return_val:
raise exceptions.ReturnTypeError(optimization_key, return_val)
if not isinstance(return_val, constants.USER_FCT.RETURN_TYPES):
raise exceptions.ReturnTypeError(optimization_key, return_val)
if isinstance(return_val, dict) and optimization_key not in return_val:
raise KeyError(
"Returned dictionary does not contain optimization key with the "
"provided name: {}".format(optimization_key)
)
# validate that optimization metric is numeric
if isinstance(return_val, dict):
opt_val = return_val[optimization_key]
else:
opt_val = return_val
return_val = {optimization_key: opt_val}
if not isinstance(opt_val, constants.USER_FCT.NUMERIC_TYPES):
raise exceptions.MetricTypeError(optimization_key, opt_val)
# for key, value in return_val.items():
# return_val[key] = value if isinstance(value, str) else str(value)
return_val["log"] = log_file.replace(env.project_path(), "")
return_file = log_dir + "/.outputs.json"
env.dump(json.dumps(return_val, default=json_default_numpy), return_file)
metric_file = log_dir + "/.metric"
env.dump(json.dumps(opt_val, default=json_default_numpy), metric_file)
return opt_val
def clean_dir(clean_dir, keep=[]):
"""Deletes all files in a directory but keeps a few."""
env = EnvSing.get_instance()
if not env.isdir(clean_dir):
raise ValueError(
"{} is not a directory. Use `hops.hdfs.delete()` to delete single "
"files.".format(clean_dir)
)
for path in env.ls(clean_dir):
if path not in keep:
env.delete(path, recursive=True)
def validate_ml_id(app_id, run_id):
"""Validates if there was an experiment run previously from the same app id
but from a different experiment (e.g. hops-util-py vs. maggy) module.
"""
try:
prev_ml_id = os.environ["ML_ID"]
except KeyError:
return app_id, run_id
prev_app_id, _, prev_run_id = prev_ml_id.rpartition("_")
if prev_run_id == prev_ml_id:
# means there was no underscore found in string
raise ValueError(
"Found a previous ML_ID with wrong format: {}".format(prev_ml_id)
)
if prev_app_id == app_id and int(prev_run_id) >= run_id:
return app_id, (int(prev_run_id) + 1)
return app_id, run_id
def set_ml_id(app_id, run_id):
"""Sets the environment variables 'HOME' and 'ML_ID' to register the experiment.
Args:
app_id (int): Maggy App ID.
run_id (int): Maggy experiment run ID.
"""
os.environ["HOME"] = os.getcwd()
os.environ["ML_ID"] = str(app_id) + "_" + str(run_id)
def find_spark():
"""
Returns: SparkSession
"""
return SparkSession.builder.getOrCreate()
def seconds_to_milliseconds(time):
"""
Returns: time converted from seconds to milliseconds
"""
return int(round(time * 1000))
def time_diff(t0, t1):
"""
Args:
:t0: start time in seconds
:t1: end time in seconds
Returns: string with time difference (i.e. t1-t0)
"""
minutes, seconds = divmod(t1 - t0, 60)
hours, minutes = divmod(minutes, 60)
return "%d hours, %d minutes, %d seconds" % (hours, minutes, seconds)
def register_environment(app_id, run_id):
"""Validates IDs and creates an experiment folder in the fs.
Args:
:app_id: Application ID
:run_id: Current experiment run ID
Returns: (app_id, run_id) with the updated IDs.
"""
app_id = str(find_spark().sparkContext.applicationId)
app_id, run_id = validate_ml_id(app_id, run_id)
set_ml_id(app_id, run_id)
# Create experiment directory.
EnvSing.get_instance().create_experiment_dir(app_id, run_id)
tensorboard._register(EnvSing.get_instance().get_logdir(app_id, run_id))
return app_id, run_id
def populate_experiment(config, app_id, run_id, exp_function):
"""Creates a dictionary with the experiment information.
Args:
:config: Experiment config object
:app_id: Application ID
:run_id: Current experiment run ID
:exp_function: Name of experiment driver.
Returns:
:experiment_json: Dictionary with config info on the experiment.
"""
try:
direction = config.direction
except AttributeError:
direction = "N/A"
try:
opt_key = config.optimization_key
except AttributeError:
opt_key = "N/A"
experiment_json = EnvSing.get_instance().populate_experiment(
config.name,
exp_function,
"MAGGY",
None,
config.description,
app_id,
direction,
opt_key,
)
exp_ml_id = app_id + "_" + str(run_id)
experiment_json = EnvSing.get_instance().attach_experiment_xattr(
exp_ml_id, experiment_json, "INIT"
)
return experiment_json
|
/* */
"format cjs";
var parse5 = require('parse5/index');
var parser = new parse5.Parser(parse5.TreeAdapters.htmlparser2);
var serializer = new parse5.Serializer(parse5.TreeAdapters.htmlparser2);
var treeAdapter = parser.treeAdapter;
import { ListWrapper, StringMapWrapper } from 'angular2/src/facade/collection';
import { DomAdapter, setRootDomAdapter } from 'angular2/platform/common_dom';
import { isPresent, isBlank, global, setValueOnPath, DateWrapper } from 'angular2/src/facade/lang';
import { BaseException } from 'angular2/src/facade/exceptions';
import { SelectorMatcher, CssSelector } from 'angular2/src/compiler/selector';
import { XHR } from 'angular2/src/compiler/xhr';
var _attrToPropMap = {
'class': 'className',
'innerHtml': 'innerHTML',
'readonly': 'readOnly',
'tabindex': 'tabIndex',
};
var defDoc = null;
var mapProps = ['attribs', 'x-attribsNamespace', 'x-attribsPrefix'];
function _notImplemented(methodName) {
return new BaseException('This method is not implemented in Parse5DomAdapter: ' + methodName);
}
/* tslint:disable:requireParameterType */
export class Parse5DomAdapter extends DomAdapter {
static makeCurrent() { setRootDomAdapter(new Parse5DomAdapter()); }
hasProperty(element, name) {
return _HTMLElementPropertyList.indexOf(name) > -1;
}
// TODO(tbosch): don't even call this method when we run the tests on server side
// by not using the DomRenderer in tests. Keeping this for now to make tests happy...
setProperty(el, name, value) {
if (name === 'innerHTML') {
this.setInnerHTML(el, value);
}
else if (name === 'className') {
el.attribs["class"] = el.className = value;
}
else {
el[name] = value;
}
}
// TODO(tbosch): don't even call this method when we run the tests on server side
// by not using the DomRenderer in tests. Keeping this for now to make tests happy...
getProperty(el, name) { return el[name]; }
logError(error) { console.error(error); }
log(error) { console.log(error); }
logGroup(error) { console.error(error); }
logGroupEnd() { }
getXHR() { return XHR; }
get attrToPropMap() { return _attrToPropMap; }
query(selector) { throw _notImplemented('query'); }
querySelector(el, selector) { return this.querySelectorAll(el, selector)[0]; }
querySelectorAll(el, selector) {
var res = [];
var _recursive = (result, node, selector, matcher) => {
var cNodes = node.childNodes;
if (cNodes && cNodes.length > 0) {
for (var i = 0; i < cNodes.length; i++) {
var childNode = cNodes[i];
if (this.elementMatches(childNode, selector, matcher)) {
result.push(childNode);
}
_recursive(result, childNode, selector, matcher);
}
}
};
var matcher = new SelectorMatcher();
matcher.addSelectables(CssSelector.parse(selector));
_recursive(res, el, selector, matcher);
return res;
}
elementMatches(node, selector, matcher = null) {
if (this.isElementNode(node) && selector === '*') {
return true;
}
var result = false;
if (selector && selector.charAt(0) == "#") {
result = this.getAttribute(node, 'id') == selector.substring(1);
}
else if (selector) {
var result = false;
if (matcher == null) {
matcher = new SelectorMatcher();
matcher.addSelectables(CssSelector.parse(selector));
}
var cssSelector = new CssSelector();
cssSelector.setElement(this.tagName(node));
if (node.attribs) {
for (var attrName in node.attribs) {
cssSelector.addAttribute(attrName, node.attribs[attrName]);
}
}
var classList = this.classList(node);
for (var i = 0; i < classList.length; i++) {
cssSelector.addClassName(classList[i]);
}
matcher.match(cssSelector, function (selector, cb) { result = true; });
}
return result;
}
on(el, evt, listener) {
var listenersMap = el._eventListenersMap;
if (isBlank(listenersMap)) {
var listenersMap = StringMapWrapper.create();
el._eventListenersMap = listenersMap;
}
var listeners = StringMapWrapper.get(listenersMap, evt);
if (isBlank(listeners)) {
listeners = [];
}
listeners.push(listener);
StringMapWrapper.set(listenersMap, evt, listeners);
}
onAndCancel(el, evt, listener) {
this.on(el, evt, listener);
return () => {
ListWrapper.remove(StringMapWrapper.get(el._eventListenersMap, evt), listener);
};
}
dispatchEvent(el, evt) {
if (isBlank(evt.target)) {
evt.target = el;
}
if (isPresent(el._eventListenersMap)) {
var listeners = StringMapWrapper.get(el._eventListenersMap, evt.type);
if (isPresent(listeners)) {
for (var i = 0; i < listeners.length; i++) {
listeners[i](evt);
}
}
}
if (isPresent(el.parent)) {
this.dispatchEvent(el.parent, evt);
}
if (isPresent(el._window)) {
this.dispatchEvent(el._window, evt);
}
}
createMouseEvent(eventType) { return this.createEvent(eventType); }
createEvent(eventType) {
var evt = {
type: eventType,
defaultPrevented: false,
preventDefault: () => { evt.defaultPrevented = true; }
};
return evt;
}
preventDefault(evt) { evt.returnValue = false; }
isPrevented(evt) { return isPresent(evt.returnValue) && !evt.returnValue; }
getInnerHTML(el) { return serializer.serialize(this.templateAwareRoot(el)); }
getOuterHTML(el) {
serializer.html = '';
serializer._serializeElement(el);
return serializer.html;
}
nodeName(node) { return node.tagName; }
nodeValue(node) { return node.nodeValue; }
type(node) { throw _notImplemented('type'); }
content(node) { return node.childNodes[0]; }
firstChild(el) { return el.firstChild; }
nextSibling(el) { return el.nextSibling; }
parentElement(el) { return el.parent; }
childNodes(el) { return el.childNodes; }
childNodesAsList(el) {
var childNodes = el.childNodes;
var res = ListWrapper.createFixedSize(childNodes.length);
for (var i = 0; i < childNodes.length; i++) {
res[i] = childNodes[i];
}
return res;
}
clearNodes(el) {
while (el.childNodes.length > 0) {
this.remove(el.childNodes[0]);
}
}
appendChild(el, node) {
this.remove(node);
treeAdapter.appendChild(this.templateAwareRoot(el), node);
}
removeChild(el, node) {
if (ListWrapper.contains(el.childNodes, node)) {
this.remove(node);
}
}
remove(el) {
var parent = el.parent;
if (parent) {
var index = parent.childNodes.indexOf(el);
parent.childNodes.splice(index, 1);
}
var prev = el.previousSibling;
var next = el.nextSibling;
if (prev) {
prev.next = next;
}
if (next) {
next.prev = prev;
}
el.prev = null;
el.next = null;
el.parent = null;
return el;
}
insertBefore(el, node) {
this.remove(node);
treeAdapter.insertBefore(el.parent, node, el);
}
insertAllBefore(el, nodes) { nodes.forEach(n => this.insertBefore(el, n)); }
insertAfter(el, node) {
if (el.nextSibling) {
this.insertBefore(el.nextSibling, node);
}
else {
this.appendChild(el.parent, node);
}
}
setInnerHTML(el, value) {
this.clearNodes(el);
var content = parser.parseFragment(value);
for (var i = 0; i < content.childNodes.length; i++) {
treeAdapter.appendChild(el, content.childNodes[i]);
}
}
getText(el, isRecursive) {
if (this.isTextNode(el)) {
return el.data;
}
else if (this.isCommentNode(el)) {
// In the DOM, comments within an element return an empty string for textContent
// However, comment node instances return the comment content for textContent getter
return isRecursive ? '' : el.data;
}
else if (isBlank(el.childNodes) || el.childNodes.length == 0) {
return "";
}
else {
var textContent = "";
for (var i = 0; i < el.childNodes.length; i++) {
textContent += this.getText(el.childNodes[i], true);
}
return textContent;
}
}
setText(el, value) {
if (this.isTextNode(el) || this.isCommentNode(el)) {
el.data = value;
}
else {
this.clearNodes(el);
if (value !== '')
treeAdapter.insertText(el, value);
}
}
getValue(el) { return el.value; }
setValue(el, value) { el.value = value; }
getChecked(el) { return el.checked; }
setChecked(el, value) { el.checked = value; }
createComment(text) { return treeAdapter.createCommentNode(text); }
createTemplate(html) {
var template = treeAdapter.createElement("template", 'http://www.w3.org/1999/xhtml', []);
var content = parser.parseFragment(html);
treeAdapter.appendChild(template, content);
return template;
}
createElement(tagName) {
return treeAdapter.createElement(tagName, 'http://www.w3.org/1999/xhtml', []);
}
createElementNS(ns, tagName) { return treeAdapter.createElement(tagName, ns, []); }
createTextNode(text) {
var t = this.createComment(text);
t.type = 'text';
return t;
}
createScriptTag(attrName, attrValue) {
return treeAdapter.createElement("script", 'http://www.w3.org/1999/xhtml', [{ name: attrName, value: attrValue }]);
}
createStyleElement(css) {
var style = this.createElement('style');
this.setText(style, css);
return style;
}
createShadowRoot(el) {
el.shadowRoot = treeAdapter.createDocumentFragment();
el.shadowRoot.parent = el;
return el.shadowRoot;
}
getShadowRoot(el) { return el.shadowRoot; }
getHost(el) { return el.host; }
getDistributedNodes(el) { throw _notImplemented('getDistributedNodes'); }
clone(node) {
var _recursive = (node) => {
var nodeClone = Object.create(Object.getPrototypeOf(node));
for (var prop in node) {
var desc = Object.getOwnPropertyDescriptor(node, prop);
if (desc && 'value' in desc && typeof desc.value !== 'object') {
nodeClone[prop] = node[prop];
}
}
nodeClone.parent = null;
nodeClone.prev = null;
nodeClone.next = null;
nodeClone.children = null;
mapProps.forEach(mapName => {
if (isPresent(node[mapName])) {
nodeClone[mapName] = {};
for (var prop in node[mapName]) {
nodeClone[mapName][prop] = node[mapName][prop];
}
}
});
var cNodes = node.children;
if (cNodes) {
var cNodesClone = new Array(cNodes.length);
for (var i = 0; i < cNodes.length; i++) {
var childNode = cNodes[i];
var childNodeClone = _recursive(childNode);
cNodesClone[i] = childNodeClone;
if (i > 0) {
childNodeClone.prev = cNodesClone[i - 1];
cNodesClone[i - 1].next = childNodeClone;
}
childNodeClone.parent = nodeClone;
}
nodeClone.children = cNodesClone;
}
return nodeClone;
};
return _recursive(node);
}
getElementsByClassName(element, name) {
return this.querySelectorAll(element, "." + name);
}
getElementsByTagName(element, name) {
throw _notImplemented('getElementsByTagName');
}
classList(element) {
var classAttrValue = null;
var attributes = element.attribs;
if (attributes && attributes.hasOwnProperty("class")) {
classAttrValue = attributes["class"];
}
return classAttrValue ? classAttrValue.trim().split(/\s+/g) : [];
}
addClass(element, className) {
var classList = this.classList(element);
var index = classList.indexOf(className);
if (index == -1) {
classList.push(className);
element.attribs["class"] = element.className = classList.join(" ");
}
}
removeClass(element, className) {
var classList = this.classList(element);
var index = classList.indexOf(className);
if (index > -1) {
classList.splice(index, 1);
element.attribs["class"] = element.className = classList.join(" ");
}
}
hasClass(element, className) {
return ListWrapper.contains(this.classList(element), className);
}
hasStyle(element, styleName, styleValue = null) {
var value = this.getStyle(element, styleName) || '';
return styleValue ? value == styleValue : value.length > 0;
}
/** @internal */
_readStyleAttribute(element) {
var styleMap = {};
var attributes = element.attribs;
if (attributes && attributes.hasOwnProperty("style")) {
var styleAttrValue = attributes["style"];
var styleList = styleAttrValue.split(/;+/g);
for (var i = 0; i < styleList.length; i++) {
if (styleList[i].length > 0) {
var elems = styleList[i].split(/:+/g);
styleMap[elems[0].trim()] = elems[1].trim();
}
}
}
return styleMap;
}
/** @internal */
_writeStyleAttribute(element, styleMap) {
var styleAttrValue = "";
for (var key in styleMap) {
var newValue = styleMap[key];
if (newValue && newValue.length > 0) {
styleAttrValue += key + ":" + styleMap[key] + ";";
}
}
element.attribs["style"] = styleAttrValue;
}
setStyle(element, styleName, styleValue) {
var styleMap = this._readStyleAttribute(element);
styleMap[styleName] = styleValue;
this._writeStyleAttribute(element, styleMap);
}
removeStyle(element, styleName) { this.setStyle(element, styleName, null); }
getStyle(element, styleName) {
var styleMap = this._readStyleAttribute(element);
return styleMap.hasOwnProperty(styleName) ? styleMap[styleName] : "";
}
tagName(element) { return element.tagName == "style" ? "STYLE" : element.tagName; }
attributeMap(element) {
var res = new Map();
var elAttrs = treeAdapter.getAttrList(element);
for (var i = 0; i < elAttrs.length; i++) {
var attrib = elAttrs[i];
res.set(attrib.name, attrib.value);
}
return res;
}
hasAttribute(element, attribute) {
return element.attribs && element.attribs.hasOwnProperty(attribute);
}
hasAttributeNS(element, ns, attribute) { throw 'not implemented'; }
getAttribute(element, attribute) {
return element.attribs && element.attribs.hasOwnProperty(attribute) ?
element.attribs[attribute] :
null;
}
getAttributeNS(element, ns, attribute) { throw 'not implemented'; }
setAttribute(element, attribute, value) {
if (attribute) {
element.attribs[attribute] = value;
if (attribute === 'class') {
element.className = value;
}
}
}
setAttributeNS(element, ns, attribute, value) { throw 'not implemented'; }
removeAttribute(element, attribute) {
if (attribute) {
StringMapWrapper.delete(element.attribs, attribute);
}
}
removeAttributeNS(element, ns, name) { throw 'not implemented'; }
templateAwareRoot(el) { return this.isTemplateElement(el) ? this.content(el) : el; }
createHtmlDocument() {
var newDoc = treeAdapter.createDocument();
newDoc.title = "fake title";
var head = treeAdapter.createElement("head", null, []);
var body = treeAdapter.createElement("body", 'http://www.w3.org/1999/xhtml', []);
this.appendChild(newDoc, head);
this.appendChild(newDoc, body);
StringMapWrapper.set(newDoc, "head", head);
StringMapWrapper.set(newDoc, "body", body);
StringMapWrapper.set(newDoc, "_window", StringMapWrapper.create());
return newDoc;
}
defaultDoc() {
if (defDoc === null) {
defDoc = this.createHtmlDocument();
}
return defDoc;
}
getBoundingClientRect(el) { return { left: 0, top: 0, width: 0, height: 0 }; }
getTitle() { return this.defaultDoc().title || ""; }
setTitle(newTitle) { this.defaultDoc().title = newTitle; }
isTemplateElement(el) {
return this.isElementNode(el) && this.tagName(el) === "template";
}
isTextNode(node) { return treeAdapter.isTextNode(node); }
isCommentNode(node) { return treeAdapter.isCommentNode(node); }
isElementNode(node) { return node ? treeAdapter.isElementNode(node) : false; }
hasShadowRoot(node) { return isPresent(node.shadowRoot); }
isShadowRoot(node) { return this.getShadowRoot(node) == node; }
importIntoDoc(node) { return this.clone(node); }
adoptNode(node) { return node; }
getHref(el) { return el.href; }
resolveAndSetHref(el, baseUrl, href) {
if (href == null) {
el.href = baseUrl;
}
else {
el.href = baseUrl + '/../' + href;
}
}
/** @internal */
_buildRules(parsedRules, css) {
var rules = [];
for (var i = 0; i < parsedRules.length; i++) {
var parsedRule = parsedRules[i];
var rule = StringMapWrapper.create();
StringMapWrapper.set(rule, "cssText", css);
StringMapWrapper.set(rule, "style", { content: "", cssText: "" });
if (parsedRule.type == "rule") {
StringMapWrapper.set(rule, "type", 1);
StringMapWrapper.set(rule, "selectorText", parsedRule.selectors.join(", ")
.replace(/\s{2,}/g, " ")
.replace(/\s*~\s*/g, " ~ ")
.replace(/\s*\+\s*/g, " + ")
.replace(/\s*>\s*/g, " > ")
.replace(/\[(\w+)=(\w+)\]/g, '[$1="$2"]'));
if (isBlank(parsedRule.declarations)) {
continue;
}
for (var j = 0; j < parsedRule.declarations.length; j++) {
var declaration = parsedRule.declarations[j];
StringMapWrapper.set(StringMapWrapper.get(rule, "style"), declaration.property, declaration.value);
StringMapWrapper.get(rule, "style").cssText +=
declaration.property + ": " + declaration.value + ";";
}
}
else if (parsedRule.type == "media") {
StringMapWrapper.set(rule, "type", 4);
StringMapWrapper.set(rule, "media", { mediaText: parsedRule.media });
if (parsedRule.rules) {
StringMapWrapper.set(rule, "cssRules", this._buildRules(parsedRule.rules));
}
}
rules.push(rule);
}
return rules;
}
supportsDOMEvents() { return false; }
supportsNativeShadowDOM() { return false; }
getGlobalEventTarget(target) {
if (target == "window") {
return this.defaultDoc()._window;
}
else if (target == "document") {
return this.defaultDoc();
}
else if (target == "body") {
return this.defaultDoc().body;
}
}
getBaseHref() { throw 'not implemented'; }
resetBaseElement() { throw 'not implemented'; }
getHistory() { throw 'not implemented'; }
getLocation() { throw 'not implemented'; }
getUserAgent() { return "Fake user agent"; }
getData(el, name) { return this.getAttribute(el, 'data-' + name); }
getComputedStyle(el) { throw 'not implemented'; }
setData(el, name, value) { this.setAttribute(el, 'data-' + name, value); }
// TODO(tbosch): move this into a separate environment class once we have it
setGlobalVar(path, value) { setValueOnPath(global, path, value); }
requestAnimationFrame(callback) { return setTimeout(callback, 0); }
cancelAnimationFrame(id) { clearTimeout(id); }
performanceNow() { return DateWrapper.toMillis(DateWrapper.now()); }
getAnimationPrefix() { return ''; }
getTransitionEnd() { return 'transitionend'; }
supportsAnimation() { return true; }
replaceChild(el, newNode, oldNode) { throw new Error('not implemented'); }
parse(templateHtml) { throw new Error('not implemented'); }
invoke(el, methodName, args) { throw new Error('not implemented'); }
getEventKey(event) { throw new Error('not implemented'); }
}
// TODO: build a proper list, this one is all the keys of a HTMLInputElement
var _HTMLElementPropertyList = [
"webkitEntries",
"incremental",
"webkitdirectory",
"selectionDirection",
"selectionEnd",
"selectionStart",
"labels",
"validationMessage",
"validity",
"willValidate",
"width",
"valueAsNumber",
"valueAsDate",
"value",
"useMap",
"defaultValue",
"type",
"step",
"src",
"size",
"required",
"readOnly",
"placeholder",
"pattern",
"name",
"multiple",
"min",
"minLength",
"maxLength",
"max",
"list",
"indeterminate",
"height",
"formTarget",
"formNoValidate",
"formMethod",
"formEnctype",
"formAction",
"files",
"form",
"disabled",
"dirName",
"checked",
"defaultChecked",
"autofocus",
"autocomplete",
"alt",
"align",
"accept",
"onautocompleteerror",
"onautocomplete",
"onwaiting",
"onvolumechange",
"ontoggle",
"ontimeupdate",
"onsuspend",
"onsubmit",
"onstalled",
"onshow",
"onselect",
"onseeking",
"onseeked",
"onscroll",
"onresize",
"onreset",
"onratechange",
"onprogress",
"onplaying",
"onplay",
"onpause",
"onmousewheel",
"onmouseup",
"onmouseover",
"onmouseout",
"onmousemove",
"onmouseleave",
"onmouseenter",
"onmousedown",
"onloadstart",
"onloadedmetadata",
"onloadeddata",
"onload",
"onkeyup",
"onkeypress",
"onkeydown",
"oninvalid",
"oninput",
"onfocus",
"onerror",
"onended",
"onemptied",
"ondurationchange",
"ondrop",
"ondragstart",
"ondragover",
"ondragleave",
"ondragenter",
"ondragend",
"ondrag",
"ondblclick",
"oncuechange",
"oncontextmenu",
"onclose",
"onclick",
"onchange",
"oncanplaythrough",
"oncanplay",
"oncancel",
"onblur",
"onabort",
"spellcheck",
"isContentEditable",
"contentEditable",
"outerText",
"innerText",
"accessKey",
"hidden",
"webkitdropzone",
"draggable",
"tabIndex",
"dir",
"translate",
"lang",
"title",
"childElementCount",
"lastElementChild",
"firstElementChild",
"children",
"onwebkitfullscreenerror",
"onwebkitfullscreenchange",
"nextElementSibling",
"previousElementSibling",
"onwheel",
"onselectstart",
"onsearch",
"onpaste",
"oncut",
"oncopy",
"onbeforepaste",
"onbeforecut",
"onbeforecopy",
"shadowRoot",
"dataset",
"classList",
"className",
"outerHTML",
"innerHTML",
"scrollHeight",
"scrollWidth",
"scrollTop",
"scrollLeft",
"clientHeight",
"clientWidth",
"clientTop",
"clientLeft",
"offsetParent",
"offsetHeight",
"offsetWidth",
"offsetTop",
"offsetLeft",
"localName",
"prefix",
"namespaceURI",
"id",
"style",
"attributes",
"tagName",
"parentElement",
"textContent",
"baseURI",
"ownerDocument",
"nextSibling",
"previousSibling",
"lastChild",
"firstChild",
"childNodes",
"parentNode",
"nodeType",
"nodeValue",
"nodeName",
"closure_lm_714617",
"__jsaction"
];
|
// WARNING: Please don't edit this file. It was generated by C++/WinRT v2.0.200921.6
#ifndef WINRT_Windows_Media_Devices_Core_0_H
#define WINRT_Windows_Media_Devices_Core_0_H
WINRT_EXPORT namespace winrt::Windows::Foundation
{
template <typename T> struct __declspec(empty_bases) IReference;
struct Point;
}
WINRT_EXPORT namespace winrt::Windows::Foundation::Collections
{
template <typename T> struct __declspec(empty_bases) IVector;
}
WINRT_EXPORT namespace winrt::Windows::Foundation::Numerics
{
}
WINRT_EXPORT namespace winrt::Windows::Media::MediaProperties
{
struct IMediaEncodingProperties;
struct MediaRatio;
}
WINRT_EXPORT namespace winrt::Windows::Perception::Spatial
{
struct SpatialCoordinateSystem;
}
WINRT_EXPORT namespace winrt::Windows::Media::Devices::Core
{
enum class FrameFlashMode : int32_t
{
Disable = 0,
Enable = 1,
Global = 2,
};
struct ICameraIntrinsics;
struct ICameraIntrinsics2;
struct ICameraIntrinsicsFactory;
struct IDepthCorrelatedCoordinateMapper;
struct IFrameControlCapabilities;
struct IFrameControlCapabilities2;
struct IFrameController;
struct IFrameController2;
struct IFrameExposureCapabilities;
struct IFrameExposureCompensationCapabilities;
struct IFrameExposureCompensationControl;
struct IFrameExposureControl;
struct IFrameFlashCapabilities;
struct IFrameFlashControl;
struct IFrameFocusCapabilities;
struct IFrameFocusControl;
struct IFrameIsoSpeedCapabilities;
struct IFrameIsoSpeedControl;
struct IVariablePhotoSequenceController;
struct CameraIntrinsics;
struct DepthCorrelatedCoordinateMapper;
struct FrameControlCapabilities;
struct FrameController;
struct FrameExposureCapabilities;
struct FrameExposureCompensationCapabilities;
struct FrameExposureCompensationControl;
struct FrameExposureControl;
struct FrameFlashCapabilities;
struct FrameFlashControl;
struct FrameFocusCapabilities;
struct FrameFocusControl;
struct FrameIsoSpeedCapabilities;
struct FrameIsoSpeedControl;
struct VariablePhotoSequenceController;
}
namespace winrt::impl
{
template <> struct category<Windows::Media::Devices::Core::ICameraIntrinsics>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::ICameraIntrinsics2>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::ICameraIntrinsicsFactory>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::IDepthCorrelatedCoordinateMapper>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::IFrameControlCapabilities>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::IFrameControlCapabilities2>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::IFrameController>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::IFrameController2>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::IFrameExposureCapabilities>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::IFrameExposureCompensationCapabilities>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::IFrameExposureCompensationControl>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::IFrameExposureControl>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::IFrameFlashCapabilities>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::IFrameFlashControl>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::IFrameFocusCapabilities>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::IFrameFocusControl>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::IFrameIsoSpeedCapabilities>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::IFrameIsoSpeedControl>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::IVariablePhotoSequenceController>{ using type = interface_category; };
template <> struct category<Windows::Media::Devices::Core::CameraIntrinsics>{ using type = class_category; };
template <> struct category<Windows::Media::Devices::Core::DepthCorrelatedCoordinateMapper>{ using type = class_category; };
template <> struct category<Windows::Media::Devices::Core::FrameControlCapabilities>{ using type = class_category; };
template <> struct category<Windows::Media::Devices::Core::FrameController>{ using type = class_category; };
template <> struct category<Windows::Media::Devices::Core::FrameExposureCapabilities>{ using type = class_category; };
template <> struct category<Windows::Media::Devices::Core::FrameExposureCompensationCapabilities>{ using type = class_category; };
template <> struct category<Windows::Media::Devices::Core::FrameExposureCompensationControl>{ using type = class_category; };
template <> struct category<Windows::Media::Devices::Core::FrameExposureControl>{ using type = class_category; };
template <> struct category<Windows::Media::Devices::Core::FrameFlashCapabilities>{ using type = class_category; };
template <> struct category<Windows::Media::Devices::Core::FrameFlashControl>{ using type = class_category; };
template <> struct category<Windows::Media::Devices::Core::FrameFocusCapabilities>{ using type = class_category; };
template <> struct category<Windows::Media::Devices::Core::FrameFocusControl>{ using type = class_category; };
template <> struct category<Windows::Media::Devices::Core::FrameIsoSpeedCapabilities>{ using type = class_category; };
template <> struct category<Windows::Media::Devices::Core::FrameIsoSpeedControl>{ using type = class_category; };
template <> struct category<Windows::Media::Devices::Core::VariablePhotoSequenceController>{ using type = class_category; };
template <> struct category<Windows::Media::Devices::Core::FrameFlashMode>{ using type = enum_category; };
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::CameraIntrinsics> = L"Windows.Media.Devices.Core.CameraIntrinsics";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::DepthCorrelatedCoordinateMapper> = L"Windows.Media.Devices.Core.DepthCorrelatedCoordinateMapper";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::FrameControlCapabilities> = L"Windows.Media.Devices.Core.FrameControlCapabilities";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::FrameController> = L"Windows.Media.Devices.Core.FrameController";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::FrameExposureCapabilities> = L"Windows.Media.Devices.Core.FrameExposureCapabilities";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::FrameExposureCompensationCapabilities> = L"Windows.Media.Devices.Core.FrameExposureCompensationCapabilities";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::FrameExposureCompensationControl> = L"Windows.Media.Devices.Core.FrameExposureCompensationControl";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::FrameExposureControl> = L"Windows.Media.Devices.Core.FrameExposureControl";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::FrameFlashCapabilities> = L"Windows.Media.Devices.Core.FrameFlashCapabilities";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::FrameFlashControl> = L"Windows.Media.Devices.Core.FrameFlashControl";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::FrameFocusCapabilities> = L"Windows.Media.Devices.Core.FrameFocusCapabilities";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::FrameFocusControl> = L"Windows.Media.Devices.Core.FrameFocusControl";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::FrameIsoSpeedCapabilities> = L"Windows.Media.Devices.Core.FrameIsoSpeedCapabilities";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::FrameIsoSpeedControl> = L"Windows.Media.Devices.Core.FrameIsoSpeedControl";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::VariablePhotoSequenceController> = L"Windows.Media.Devices.Core.VariablePhotoSequenceController";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::FrameFlashMode> = L"Windows.Media.Devices.Core.FrameFlashMode";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::ICameraIntrinsics> = L"Windows.Media.Devices.Core.ICameraIntrinsics";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::ICameraIntrinsics2> = L"Windows.Media.Devices.Core.ICameraIntrinsics2";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::ICameraIntrinsicsFactory> = L"Windows.Media.Devices.Core.ICameraIntrinsicsFactory";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::IDepthCorrelatedCoordinateMapper> = L"Windows.Media.Devices.Core.IDepthCorrelatedCoordinateMapper";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::IFrameControlCapabilities> = L"Windows.Media.Devices.Core.IFrameControlCapabilities";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::IFrameControlCapabilities2> = L"Windows.Media.Devices.Core.IFrameControlCapabilities2";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::IFrameController> = L"Windows.Media.Devices.Core.IFrameController";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::IFrameController2> = L"Windows.Media.Devices.Core.IFrameController2";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::IFrameExposureCapabilities> = L"Windows.Media.Devices.Core.IFrameExposureCapabilities";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::IFrameExposureCompensationCapabilities> = L"Windows.Media.Devices.Core.IFrameExposureCompensationCapabilities";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::IFrameExposureCompensationControl> = L"Windows.Media.Devices.Core.IFrameExposureCompensationControl";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::IFrameExposureControl> = L"Windows.Media.Devices.Core.IFrameExposureControl";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::IFrameFlashCapabilities> = L"Windows.Media.Devices.Core.IFrameFlashCapabilities";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::IFrameFlashControl> = L"Windows.Media.Devices.Core.IFrameFlashControl";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::IFrameFocusCapabilities> = L"Windows.Media.Devices.Core.IFrameFocusCapabilities";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::IFrameFocusControl> = L"Windows.Media.Devices.Core.IFrameFocusControl";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::IFrameIsoSpeedCapabilities> = L"Windows.Media.Devices.Core.IFrameIsoSpeedCapabilities";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::IFrameIsoSpeedControl> = L"Windows.Media.Devices.Core.IFrameIsoSpeedControl";
template <> inline constexpr auto& name_v<Windows::Media::Devices::Core::IVariablePhotoSequenceController> = L"Windows.Media.Devices.Core.IVariablePhotoSequenceController";
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::ICameraIntrinsics>{ 0x0AA6ED32,0x6589,0x49DA,{ 0xAF,0xDE,0x59,0x42,0x70,0xCA,0x0A,0xAC } }; // 0AA6ED32-6589-49DA-AFDE-594270CA0AAC
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::ICameraIntrinsics2>{ 0x0CDAA447,0x0798,0x4B4D,{ 0x83,0x9F,0xC5,0xEC,0x41,0x4D,0xB2,0x7A } }; // 0CDAA447-0798-4B4D-839F-C5EC414DB27A
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::ICameraIntrinsicsFactory>{ 0xC0DDC486,0x2132,0x4A34,{ 0xA6,0x59,0x9B,0xFE,0x2A,0x05,0x57,0x12 } }; // C0DDC486-2132-4A34-A659-9BFE2A055712
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::IDepthCorrelatedCoordinateMapper>{ 0xF95D89FB,0x8AF0,0x4CB0,{ 0x92,0x6D,0x69,0x68,0x66,0xE5,0x04,0x6A } }; // F95D89FB-8AF0-4CB0-926D-696866E5046A
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::IFrameControlCapabilities>{ 0xA8FFAE60,0x4E9E,0x4377,{ 0xA7,0x89,0xE2,0x4C,0x4A,0xE7,0xE5,0x44 } }; // A8FFAE60-4E9E-4377-A789-E24C4AE7E544
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::IFrameControlCapabilities2>{ 0xCE9B0464,0x4730,0x440F,{ 0xBD,0x3E,0xEF,0xE8,0xA8,0xF2,0x30,0xA8 } }; // CE9B0464-4730-440F-BD3E-EFE8A8F230A8
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::IFrameController>{ 0xC16459D9,0xBAEF,0x4052,{ 0x91,0x77,0x48,0xAF,0xF2,0xAF,0x75,0x22 } }; // C16459D9-BAEF-4052-9177-48AFF2AF7522
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::IFrameController2>{ 0x00D3BC75,0xD87C,0x485B,{ 0x8A,0x09,0x5C,0x35,0x85,0x68,0xB4,0x27 } }; // 00D3BC75-D87C-485B-8A09-5C358568B427
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::IFrameExposureCapabilities>{ 0xBDBE9CE3,0x3985,0x4E72,{ 0x97,0xC2,0x05,0x90,0xD6,0x13,0x07,0xA1 } }; // BDBE9CE3-3985-4E72-97C2-0590D61307A1
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::IFrameExposureCompensationCapabilities>{ 0xB988A823,0x8065,0x41EE,{ 0xB0,0x4F,0x72,0x22,0x65,0x95,0x45,0x00 } }; // B988A823-8065-41EE-B04F-722265954500
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::IFrameExposureCompensationControl>{ 0xE95896C9,0xF7F9,0x48CA,{ 0x85,0x91,0xA2,0x65,0x31,0xCB,0x15,0x78 } }; // E95896C9-F7F9-48CA-8591-A26531CB1578
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::IFrameExposureControl>{ 0xB1605A61,0xFFAF,0x4752,{ 0xB6,0x21,0xF5,0xB6,0xF1,0x17,0xF4,0x32 } }; // B1605A61-FFAF-4752-B621-F5B6F117F432
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::IFrameFlashCapabilities>{ 0xBB9341A2,0x5EBE,0x4F62,{ 0x82,0x23,0x0E,0x2B,0x05,0xBF,0xBB,0xD0 } }; // BB9341A2-5EBE-4F62-8223-0E2B05BFBBD0
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::IFrameFlashControl>{ 0x75D5F6C7,0xBD45,0x4FAB,{ 0x93,0x75,0x45,0xAC,0x04,0xB3,0x32,0xC2 } }; // 75D5F6C7-BD45-4FAB-9375-45AC04B332C2
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::IFrameFocusCapabilities>{ 0x7B25CD58,0x01C0,0x4065,{ 0x9C,0x40,0xC1,0xA7,0x21,0x42,0x5C,0x1A } }; // 7B25CD58-01C0-4065-9C40-C1A721425C1A
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::IFrameFocusControl>{ 0x272DF1D0,0xD912,0x4214,{ 0xA6,0x7B,0xE3,0x8A,0x8D,0x48,0xD8,0xC6 } }; // 272DF1D0-D912-4214-A67B-E38A8D48D8C6
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::IFrameIsoSpeedCapabilities>{ 0x16BDFF61,0x6DF6,0x4AC9,{ 0xB9,0x2A,0x9F,0x6E,0xCD,0x1A,0xD2,0xFA } }; // 16BDFF61-6DF6-4AC9-B92A-9F6ECD1AD2FA
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::IFrameIsoSpeedControl>{ 0x1A03EFED,0x786A,0x4C75,{ 0xA5,0x57,0x7A,0xB9,0xA8,0x5F,0x58,0x8C } }; // 1A03EFED-786A-4C75-A557-7AB9A85F588C
template <> inline constexpr guid guid_v<Windows::Media::Devices::Core::IVariablePhotoSequenceController>{ 0x7FBFF880,0xED8C,0x43FD,{ 0xA7,0xC3,0xB3,0x58,0x09,0xE4,0x22,0x9A } }; // 7FBFF880-ED8C-43FD-A7C3-B35809E4229A
template <> struct default_interface<Windows::Media::Devices::Core::CameraIntrinsics>{ using type = Windows::Media::Devices::Core::ICameraIntrinsics; };
template <> struct default_interface<Windows::Media::Devices::Core::DepthCorrelatedCoordinateMapper>{ using type = Windows::Media::Devices::Core::IDepthCorrelatedCoordinateMapper; };
template <> struct default_interface<Windows::Media::Devices::Core::FrameControlCapabilities>{ using type = Windows::Media::Devices::Core::IFrameControlCapabilities; };
template <> struct default_interface<Windows::Media::Devices::Core::FrameController>{ using type = Windows::Media::Devices::Core::IFrameController; };
template <> struct default_interface<Windows::Media::Devices::Core::FrameExposureCapabilities>{ using type = Windows::Media::Devices::Core::IFrameExposureCapabilities; };
template <> struct default_interface<Windows::Media::Devices::Core::FrameExposureCompensationCapabilities>{ using type = Windows::Media::Devices::Core::IFrameExposureCompensationCapabilities; };
template <> struct default_interface<Windows::Media::Devices::Core::FrameExposureCompensationControl>{ using type = Windows::Media::Devices::Core::IFrameExposureCompensationControl; };
template <> struct default_interface<Windows::Media::Devices::Core::FrameExposureControl>{ using type = Windows::Media::Devices::Core::IFrameExposureControl; };
template <> struct default_interface<Windows::Media::Devices::Core::FrameFlashCapabilities>{ using type = Windows::Media::Devices::Core::IFrameFlashCapabilities; };
template <> struct default_interface<Windows::Media::Devices::Core::FrameFlashControl>{ using type = Windows::Media::Devices::Core::IFrameFlashControl; };
template <> struct default_interface<Windows::Media::Devices::Core::FrameFocusCapabilities>{ using type = Windows::Media::Devices::Core::IFrameFocusCapabilities; };
template <> struct default_interface<Windows::Media::Devices::Core::FrameFocusControl>{ using type = Windows::Media::Devices::Core::IFrameFocusControl; };
template <> struct default_interface<Windows::Media::Devices::Core::FrameIsoSpeedCapabilities>{ using type = Windows::Media::Devices::Core::IFrameIsoSpeedCapabilities; };
template <> struct default_interface<Windows::Media::Devices::Core::FrameIsoSpeedControl>{ using type = Windows::Media::Devices::Core::IFrameIsoSpeedControl; };
template <> struct default_interface<Windows::Media::Devices::Core::VariablePhotoSequenceController>{ using type = Windows::Media::Devices::Core::IVariablePhotoSequenceController; };
template <> struct abi<Windows::Media::Devices::Core::ICameraIntrinsics>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall get_FocalLength(Windows::Foundation::Numerics::float2*) noexcept = 0;
virtual int32_t __stdcall get_PrincipalPoint(Windows::Foundation::Numerics::float2*) noexcept = 0;
virtual int32_t __stdcall get_RadialDistortion(Windows::Foundation::Numerics::float3*) noexcept = 0;
virtual int32_t __stdcall get_TangentialDistortion(Windows::Foundation::Numerics::float2*) noexcept = 0;
virtual int32_t __stdcall get_ImageWidth(uint32_t*) noexcept = 0;
virtual int32_t __stdcall get_ImageHeight(uint32_t*) noexcept = 0;
virtual int32_t __stdcall ProjectOntoFrame(Windows::Foundation::Numerics::float3, Windows::Foundation::Point*) noexcept = 0;
virtual int32_t __stdcall UnprojectAtUnitDepth(Windows::Foundation::Point, Windows::Foundation::Numerics::float2*) noexcept = 0;
virtual int32_t __stdcall ProjectManyOntoFrame(uint32_t, Windows::Foundation::Numerics::float3*, uint32_t, Windows::Foundation::Point*) noexcept = 0;
virtual int32_t __stdcall UnprojectPixelsAtUnitDepth(uint32_t, Windows::Foundation::Point*, uint32_t, Windows::Foundation::Numerics::float2*) noexcept = 0;
};
};
template <> struct abi<Windows::Media::Devices::Core::ICameraIntrinsics2>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall get_UndistortedProjectionTransform(Windows::Foundation::Numerics::float4x4*) noexcept = 0;
virtual int32_t __stdcall DistortPoint(Windows::Foundation::Point, Windows::Foundation::Point*) noexcept = 0;
virtual int32_t __stdcall DistortPoints(uint32_t, Windows::Foundation::Point*, uint32_t, Windows::Foundation::Point*) noexcept = 0;
virtual int32_t __stdcall UndistortPoint(Windows::Foundation::Point, Windows::Foundation::Point*) noexcept = 0;
virtual int32_t __stdcall UndistortPoints(uint32_t, Windows::Foundation::Point*, uint32_t, Windows::Foundation::Point*) noexcept = 0;
};
};
template <> struct abi<Windows::Media::Devices::Core::ICameraIntrinsicsFactory>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall Create(Windows::Foundation::Numerics::float2, Windows::Foundation::Numerics::float2, Windows::Foundation::Numerics::float3, Windows::Foundation::Numerics::float2, uint32_t, uint32_t, void**) noexcept = 0;
};
};
template <> struct abi<Windows::Media::Devices::Core::IDepthCorrelatedCoordinateMapper>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall UnprojectPoint(Windows::Foundation::Point, void*, Windows::Foundation::Numerics::float3*) noexcept = 0;
virtual int32_t __stdcall UnprojectPoints(uint32_t, Windows::Foundation::Point*, void*, uint32_t, Windows::Foundation::Numerics::float3*) noexcept = 0;
virtual int32_t __stdcall MapPoint(Windows::Foundation::Point, void*, void*, Windows::Foundation::Point*) noexcept = 0;
virtual int32_t __stdcall MapPoints(uint32_t, Windows::Foundation::Point*, void*, void*, uint32_t, Windows::Foundation::Point*) noexcept = 0;
};
};
template <> struct abi<Windows::Media::Devices::Core::IFrameControlCapabilities>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall get_Exposure(void**) noexcept = 0;
virtual int32_t __stdcall get_ExposureCompensation(void**) noexcept = 0;
virtual int32_t __stdcall get_IsoSpeed(void**) noexcept = 0;
virtual int32_t __stdcall get_Focus(void**) noexcept = 0;
virtual int32_t __stdcall get_PhotoConfirmationSupported(bool*) noexcept = 0;
};
};
template <> struct abi<Windows::Media::Devices::Core::IFrameControlCapabilities2>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall get_Flash(void**) noexcept = 0;
};
};
template <> struct abi<Windows::Media::Devices::Core::IFrameController>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall get_ExposureControl(void**) noexcept = 0;
virtual int32_t __stdcall get_ExposureCompensationControl(void**) noexcept = 0;
virtual int32_t __stdcall get_IsoSpeedControl(void**) noexcept = 0;
virtual int32_t __stdcall get_FocusControl(void**) noexcept = 0;
virtual int32_t __stdcall get_PhotoConfirmationEnabled(void**) noexcept = 0;
virtual int32_t __stdcall put_PhotoConfirmationEnabled(void*) noexcept = 0;
};
};
template <> struct abi<Windows::Media::Devices::Core::IFrameController2>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall get_FlashControl(void**) noexcept = 0;
};
};
template <> struct abi<Windows::Media::Devices::Core::IFrameExposureCapabilities>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall get_Supported(bool*) noexcept = 0;
virtual int32_t __stdcall get_Min(int64_t*) noexcept = 0;
virtual int32_t __stdcall get_Max(int64_t*) noexcept = 0;
virtual int32_t __stdcall get_Step(int64_t*) noexcept = 0;
};
};
template <> struct abi<Windows::Media::Devices::Core::IFrameExposureCompensationCapabilities>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall get_Supported(bool*) noexcept = 0;
virtual int32_t __stdcall get_Min(float*) noexcept = 0;
virtual int32_t __stdcall get_Max(float*) noexcept = 0;
virtual int32_t __stdcall get_Step(float*) noexcept = 0;
};
};
template <> struct abi<Windows::Media::Devices::Core::IFrameExposureCompensationControl>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall get_Value(void**) noexcept = 0;
virtual int32_t __stdcall put_Value(void*) noexcept = 0;
};
};
template <> struct abi<Windows::Media::Devices::Core::IFrameExposureControl>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall get_Auto(bool*) noexcept = 0;
virtual int32_t __stdcall put_Auto(bool) noexcept = 0;
virtual int32_t __stdcall get_Value(void**) noexcept = 0;
virtual int32_t __stdcall put_Value(void*) noexcept = 0;
};
};
template <> struct abi<Windows::Media::Devices::Core::IFrameFlashCapabilities>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall get_Supported(bool*) noexcept = 0;
virtual int32_t __stdcall get_RedEyeReductionSupported(bool*) noexcept = 0;
virtual int32_t __stdcall get_PowerSupported(bool*) noexcept = 0;
};
};
template <> struct abi<Windows::Media::Devices::Core::IFrameFlashControl>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall get_Mode(int32_t*) noexcept = 0;
virtual int32_t __stdcall put_Mode(int32_t) noexcept = 0;
virtual int32_t __stdcall get_Auto(bool*) noexcept = 0;
virtual int32_t __stdcall put_Auto(bool) noexcept = 0;
virtual int32_t __stdcall get_RedEyeReduction(bool*) noexcept = 0;
virtual int32_t __stdcall put_RedEyeReduction(bool) noexcept = 0;
virtual int32_t __stdcall get_PowerPercent(float*) noexcept = 0;
virtual int32_t __stdcall put_PowerPercent(float) noexcept = 0;
};
};
template <> struct abi<Windows::Media::Devices::Core::IFrameFocusCapabilities>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall get_Supported(bool*) noexcept = 0;
virtual int32_t __stdcall get_Min(uint32_t*) noexcept = 0;
virtual int32_t __stdcall get_Max(uint32_t*) noexcept = 0;
virtual int32_t __stdcall get_Step(uint32_t*) noexcept = 0;
};
};
template <> struct abi<Windows::Media::Devices::Core::IFrameFocusControl>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall get_Value(void**) noexcept = 0;
virtual int32_t __stdcall put_Value(void*) noexcept = 0;
};
};
template <> struct abi<Windows::Media::Devices::Core::IFrameIsoSpeedCapabilities>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall get_Supported(bool*) noexcept = 0;
virtual int32_t __stdcall get_Min(uint32_t*) noexcept = 0;
virtual int32_t __stdcall get_Max(uint32_t*) noexcept = 0;
virtual int32_t __stdcall get_Step(uint32_t*) noexcept = 0;
};
};
template <> struct abi<Windows::Media::Devices::Core::IFrameIsoSpeedControl>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall get_Auto(bool*) noexcept = 0;
virtual int32_t __stdcall put_Auto(bool) noexcept = 0;
virtual int32_t __stdcall get_Value(void**) noexcept = 0;
virtual int32_t __stdcall put_Value(void*) noexcept = 0;
};
};
template <> struct abi<Windows::Media::Devices::Core::IVariablePhotoSequenceController>
{
struct __declspec(novtable) type : inspectable_abi
{
virtual int32_t __stdcall get_Supported(bool*) noexcept = 0;
virtual int32_t __stdcall get_MaxPhotosPerSecond(float*) noexcept = 0;
virtual int32_t __stdcall get_PhotosPerSecondLimit(float*) noexcept = 0;
virtual int32_t __stdcall put_PhotosPerSecondLimit(float) noexcept = 0;
virtual int32_t __stdcall GetHighestConcurrentFrameRate(void*, void**) noexcept = 0;
virtual int32_t __stdcall GetCurrentFrameRate(void**) noexcept = 0;
virtual int32_t __stdcall get_FrameCapabilities(void**) noexcept = 0;
virtual int32_t __stdcall get_DesiredFrameControllers(void**) noexcept = 0;
};
};
template <typename D>
struct consume_Windows_Media_Devices_Core_ICameraIntrinsics
{
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Foundation::Numerics::float2) FocalLength() const;
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Foundation::Numerics::float2) PrincipalPoint() const;
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Foundation::Numerics::float3) RadialDistortion() const;
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Foundation::Numerics::float2) TangentialDistortion() const;
[[nodiscard]] WINRT_IMPL_AUTO(uint32_t) ImageWidth() const;
[[nodiscard]] WINRT_IMPL_AUTO(uint32_t) ImageHeight() const;
WINRT_IMPL_AUTO(Windows::Foundation::Point) ProjectOntoFrame(Windows::Foundation::Numerics::float3 const& coordinate) const;
WINRT_IMPL_AUTO(Windows::Foundation::Numerics::float2) UnprojectAtUnitDepth(Windows::Foundation::Point const& pixelCoordinate) const;
WINRT_IMPL_AUTO(void) ProjectManyOntoFrame(array_view<Windows::Foundation::Numerics::float3 const> coordinates, array_view<Windows::Foundation::Point> results) const;
WINRT_IMPL_AUTO(void) UnprojectPixelsAtUnitDepth(array_view<Windows::Foundation::Point const> pixelCoordinates, array_view<Windows::Foundation::Numerics::float2> results) const;
};
template <> struct consume<Windows::Media::Devices::Core::ICameraIntrinsics>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_ICameraIntrinsics<D>;
};
template <typename D>
struct consume_Windows_Media_Devices_Core_ICameraIntrinsics2
{
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Foundation::Numerics::float4x4) UndistortedProjectionTransform() const;
WINRT_IMPL_AUTO(Windows::Foundation::Point) DistortPoint(Windows::Foundation::Point const& input) const;
WINRT_IMPL_AUTO(void) DistortPoints(array_view<Windows::Foundation::Point const> inputs, array_view<Windows::Foundation::Point> results) const;
WINRT_IMPL_AUTO(Windows::Foundation::Point) UndistortPoint(Windows::Foundation::Point const& input) const;
WINRT_IMPL_AUTO(void) UndistortPoints(array_view<Windows::Foundation::Point const> inputs, array_view<Windows::Foundation::Point> results) const;
};
template <> struct consume<Windows::Media::Devices::Core::ICameraIntrinsics2>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_ICameraIntrinsics2<D>;
};
template <typename D>
struct consume_Windows_Media_Devices_Core_ICameraIntrinsicsFactory
{
WINRT_IMPL_AUTO(Windows::Media::Devices::Core::CameraIntrinsics) Create(Windows::Foundation::Numerics::float2 const& focalLength, Windows::Foundation::Numerics::float2 const& principalPoint, Windows::Foundation::Numerics::float3 const& radialDistortion, Windows::Foundation::Numerics::float2 const& tangentialDistortion, uint32_t imageWidth, uint32_t imageHeight) const;
};
template <> struct consume<Windows::Media::Devices::Core::ICameraIntrinsicsFactory>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_ICameraIntrinsicsFactory<D>;
};
template <typename D>
struct consume_Windows_Media_Devices_Core_IDepthCorrelatedCoordinateMapper
{
WINRT_IMPL_AUTO(Windows::Foundation::Numerics::float3) UnprojectPoint(Windows::Foundation::Point const& sourcePoint, Windows::Perception::Spatial::SpatialCoordinateSystem const& targetCoordinateSystem) const;
WINRT_IMPL_AUTO(void) UnprojectPoints(array_view<Windows::Foundation::Point const> sourcePoints, Windows::Perception::Spatial::SpatialCoordinateSystem const& targetCoordinateSystem, array_view<Windows::Foundation::Numerics::float3> results) const;
WINRT_IMPL_AUTO(Windows::Foundation::Point) MapPoint(Windows::Foundation::Point const& sourcePoint, Windows::Perception::Spatial::SpatialCoordinateSystem const& targetCoordinateSystem, Windows::Media::Devices::Core::CameraIntrinsics const& targetCameraIntrinsics) const;
WINRT_IMPL_AUTO(void) MapPoints(array_view<Windows::Foundation::Point const> sourcePoints, Windows::Perception::Spatial::SpatialCoordinateSystem const& targetCoordinateSystem, Windows::Media::Devices::Core::CameraIntrinsics const& targetCameraIntrinsics, array_view<Windows::Foundation::Point> results) const;
};
template <> struct consume<Windows::Media::Devices::Core::IDepthCorrelatedCoordinateMapper>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_IDepthCorrelatedCoordinateMapper<D>;
};
template <typename D>
struct consume_Windows_Media_Devices_Core_IFrameControlCapabilities
{
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Media::Devices::Core::FrameExposureCapabilities) Exposure() const;
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Media::Devices::Core::FrameExposureCompensationCapabilities) ExposureCompensation() const;
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Media::Devices::Core::FrameIsoSpeedCapabilities) IsoSpeed() const;
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Media::Devices::Core::FrameFocusCapabilities) Focus() const;
[[nodiscard]] WINRT_IMPL_AUTO(bool) PhotoConfirmationSupported() const;
};
template <> struct consume<Windows::Media::Devices::Core::IFrameControlCapabilities>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_IFrameControlCapabilities<D>;
};
template <typename D>
struct consume_Windows_Media_Devices_Core_IFrameControlCapabilities2
{
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Media::Devices::Core::FrameFlashCapabilities) Flash() const;
};
template <> struct consume<Windows::Media::Devices::Core::IFrameControlCapabilities2>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_IFrameControlCapabilities2<D>;
};
template <typename D>
struct consume_Windows_Media_Devices_Core_IFrameController
{
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Media::Devices::Core::FrameExposureControl) ExposureControl() const;
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Media::Devices::Core::FrameExposureCompensationControl) ExposureCompensationControl() const;
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Media::Devices::Core::FrameIsoSpeedControl) IsoSpeedControl() const;
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Media::Devices::Core::FrameFocusControl) FocusControl() const;
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Foundation::IReference<bool>) PhotoConfirmationEnabled() const;
WINRT_IMPL_AUTO(void) PhotoConfirmationEnabled(Windows::Foundation::IReference<bool> const& value) const;
};
template <> struct consume<Windows::Media::Devices::Core::IFrameController>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_IFrameController<D>;
};
template <typename D>
struct consume_Windows_Media_Devices_Core_IFrameController2
{
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Media::Devices::Core::FrameFlashControl) FlashControl() const;
};
template <> struct consume<Windows::Media::Devices::Core::IFrameController2>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_IFrameController2<D>;
};
template <typename D>
struct consume_Windows_Media_Devices_Core_IFrameExposureCapabilities
{
[[nodiscard]] WINRT_IMPL_AUTO(bool) Supported() const;
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Foundation::TimeSpan) Min() const;
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Foundation::TimeSpan) Max() const;
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Foundation::TimeSpan) Step() const;
};
template <> struct consume<Windows::Media::Devices::Core::IFrameExposureCapabilities>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_IFrameExposureCapabilities<D>;
};
template <typename D>
struct consume_Windows_Media_Devices_Core_IFrameExposureCompensationCapabilities
{
[[nodiscard]] WINRT_IMPL_AUTO(bool) Supported() const;
[[nodiscard]] WINRT_IMPL_AUTO(float) Min() const;
[[nodiscard]] WINRT_IMPL_AUTO(float) Max() const;
[[nodiscard]] WINRT_IMPL_AUTO(float) Step() const;
};
template <> struct consume<Windows::Media::Devices::Core::IFrameExposureCompensationCapabilities>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_IFrameExposureCompensationCapabilities<D>;
};
template <typename D>
struct consume_Windows_Media_Devices_Core_IFrameExposureCompensationControl
{
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Foundation::IReference<float>) Value() const;
WINRT_IMPL_AUTO(void) Value(Windows::Foundation::IReference<float> const& value) const;
};
template <> struct consume<Windows::Media::Devices::Core::IFrameExposureCompensationControl>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_IFrameExposureCompensationControl<D>;
};
template <typename D>
struct consume_Windows_Media_Devices_Core_IFrameExposureControl
{
[[nodiscard]] WINRT_IMPL_AUTO(bool) Auto() const;
WINRT_IMPL_AUTO(void) Auto(bool value) const;
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Foundation::IReference<Windows::Foundation::TimeSpan>) Value() const;
WINRT_IMPL_AUTO(void) Value(Windows::Foundation::IReference<Windows::Foundation::TimeSpan> const& value) const;
};
template <> struct consume<Windows::Media::Devices::Core::IFrameExposureControl>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_IFrameExposureControl<D>;
};
template <typename D>
struct consume_Windows_Media_Devices_Core_IFrameFlashCapabilities
{
[[nodiscard]] WINRT_IMPL_AUTO(bool) Supported() const;
[[nodiscard]] WINRT_IMPL_AUTO(bool) RedEyeReductionSupported() const;
[[nodiscard]] WINRT_IMPL_AUTO(bool) PowerSupported() const;
};
template <> struct consume<Windows::Media::Devices::Core::IFrameFlashCapabilities>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_IFrameFlashCapabilities<D>;
};
template <typename D>
struct consume_Windows_Media_Devices_Core_IFrameFlashControl
{
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Media::Devices::Core::FrameFlashMode) Mode() const;
WINRT_IMPL_AUTO(void) Mode(Windows::Media::Devices::Core::FrameFlashMode const& value) const;
[[nodiscard]] WINRT_IMPL_AUTO(bool) Auto() const;
WINRT_IMPL_AUTO(void) Auto(bool value) const;
[[nodiscard]] WINRT_IMPL_AUTO(bool) RedEyeReduction() const;
WINRT_IMPL_AUTO(void) RedEyeReduction(bool value) const;
[[nodiscard]] WINRT_IMPL_AUTO(float) PowerPercent() const;
WINRT_IMPL_AUTO(void) PowerPercent(float value) const;
};
template <> struct consume<Windows::Media::Devices::Core::IFrameFlashControl>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_IFrameFlashControl<D>;
};
template <typename D>
struct consume_Windows_Media_Devices_Core_IFrameFocusCapabilities
{
[[nodiscard]] WINRT_IMPL_AUTO(bool) Supported() const;
[[nodiscard]] WINRT_IMPL_AUTO(uint32_t) Min() const;
[[nodiscard]] WINRT_IMPL_AUTO(uint32_t) Max() const;
[[nodiscard]] WINRT_IMPL_AUTO(uint32_t) Step() const;
};
template <> struct consume<Windows::Media::Devices::Core::IFrameFocusCapabilities>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_IFrameFocusCapabilities<D>;
};
template <typename D>
struct consume_Windows_Media_Devices_Core_IFrameFocusControl
{
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Foundation::IReference<uint32_t>) Value() const;
WINRT_IMPL_AUTO(void) Value(Windows::Foundation::IReference<uint32_t> const& value) const;
};
template <> struct consume<Windows::Media::Devices::Core::IFrameFocusControl>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_IFrameFocusControl<D>;
};
template <typename D>
struct consume_Windows_Media_Devices_Core_IFrameIsoSpeedCapabilities
{
[[nodiscard]] WINRT_IMPL_AUTO(bool) Supported() const;
[[nodiscard]] WINRT_IMPL_AUTO(uint32_t) Min() const;
[[nodiscard]] WINRT_IMPL_AUTO(uint32_t) Max() const;
[[nodiscard]] WINRT_IMPL_AUTO(uint32_t) Step() const;
};
template <> struct consume<Windows::Media::Devices::Core::IFrameIsoSpeedCapabilities>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_IFrameIsoSpeedCapabilities<D>;
};
template <typename D>
struct consume_Windows_Media_Devices_Core_IFrameIsoSpeedControl
{
[[nodiscard]] WINRT_IMPL_AUTO(bool) Auto() const;
WINRT_IMPL_AUTO(void) Auto(bool value) const;
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Foundation::IReference<uint32_t>) Value() const;
WINRT_IMPL_AUTO(void) Value(Windows::Foundation::IReference<uint32_t> const& value) const;
};
template <> struct consume<Windows::Media::Devices::Core::IFrameIsoSpeedControl>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_IFrameIsoSpeedControl<D>;
};
template <typename D>
struct consume_Windows_Media_Devices_Core_IVariablePhotoSequenceController
{
[[nodiscard]] WINRT_IMPL_AUTO(bool) Supported() const;
[[nodiscard]] WINRT_IMPL_AUTO(float) MaxPhotosPerSecond() const;
[[nodiscard]] WINRT_IMPL_AUTO(float) PhotosPerSecondLimit() const;
WINRT_IMPL_AUTO(void) PhotosPerSecondLimit(float value) const;
WINRT_IMPL_AUTO(Windows::Media::MediaProperties::MediaRatio) GetHighestConcurrentFrameRate(Windows::Media::MediaProperties::IMediaEncodingProperties const& captureProperties) const;
WINRT_IMPL_AUTO(Windows::Media::MediaProperties::MediaRatio) GetCurrentFrameRate() const;
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Media::Devices::Core::FrameControlCapabilities) FrameCapabilities() const;
[[nodiscard]] WINRT_IMPL_AUTO(Windows::Foundation::Collections::IVector<Windows::Media::Devices::Core::FrameController>) DesiredFrameControllers() const;
};
template <> struct consume<Windows::Media::Devices::Core::IVariablePhotoSequenceController>
{
template <typename D> using type = consume_Windows_Media_Devices_Core_IVariablePhotoSequenceController<D>;
};
}
#endif
|
"""
Prime Developer Trial
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from fds.sdk.QuotesAPIforDigitalPortals.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from fds.sdk.QuotesAPIforDigitalPortals.exceptions import ApiAttributeError
class InlineResponse20062DataDistributionChannel(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('execution_only',): {
'RETAIL': "retail",
'PROFESSIONAL': "professional",
'BOTH': "both",
'NONE': "none",
},
('execution_appropriateness',): {
'RETAIL': "retail",
'PROFESSIONAL': "professional",
'BOTH': "both",
'NONE': "none",
},
('investment_advice',): {
'RETAIL': "retail",
'PROFESSIONAL': "professional",
'BOTH': "both",
'NONE': "none",
},
('portfolio_management',): {
'RETAIL': "retail",
'PROFESSIONAL': "professional",
'BOTH': "both",
'NONE': "none",
},
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'execution_only': (str,), # noqa: E501
'execution_appropriateness': (str,), # noqa: E501
'investment_advice': (str,), # noqa: E501
'portfolio_management': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'execution_only': 'executionOnly', # noqa: E501
'execution_appropriateness': 'executionAppropriateness', # noqa: E501
'investment_advice': 'investmentAdvice', # noqa: E501
'portfolio_management': 'portfolioManagement', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""InlineResponse20062DataDistributionChannel - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
execution_only (str): Indicates the client type(s) for which the investment product is suitable through the distribution channel \"execution only / reception and transmission of orders (RTO) without appropriateness assessment\".. [optional] # noqa: E501
execution_appropriateness (str): Indicates the client type(s) for which the investment product is suitable through the distribution channel \"execution only / reception and transmission of orders (RTO) with appropriateness assessment\".. [optional] # noqa: E501
investment_advice (str): Indicates the client type(s) for which the investment product is suitable through the distribution channel \"investment advice with suitability assessment\".. [optional] # noqa: E501
portfolio_management (str): Indicates the client type for which the investment product is suitable through the distribution channel \"discretionary/portfolio management with suitability assessment\".. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""InlineResponse20062DataDistributionChannel - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
execution_only (str): Indicates the client type(s) for which the investment product is suitable through the distribution channel \"execution only / reception and transmission of orders (RTO) without appropriateness assessment\".. [optional] # noqa: E501
execution_appropriateness (str): Indicates the client type(s) for which the investment product is suitable through the distribution channel \"execution only / reception and transmission of orders (RTO) with appropriateness assessment\".. [optional] # noqa: E501
investment_advice (str): Indicates the client type(s) for which the investment product is suitable through the distribution channel \"investment advice with suitability assessment\".. [optional] # noqa: E501
portfolio_management (str): Indicates the client type for which the investment product is suitable through the distribution channel \"discretionary/portfolio management with suitability assessment\".. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
/*
* WARNING: do not edit!
* Generated by Makefile from ../../openssl-src/include/openssl/opensslconf.h.in
*
* Copyright 2016-2018 The OpenSSL Project Authors. All Rights Reserved.
*
* Licensed under the OpenSSL license (the "License"). You may not use
* this file except in compliance with the License. You can obtain a copy
* in the file LICENSE in the source distribution or at
* https://www.openssl.org/source/license.html
*/
#include <openssl/opensslv.h>
#ifdef __cplusplus
extern "C" {
#endif
#ifdef OPENSSL_ALGORITHM_DEFINES
# error OPENSSL_ALGORITHM_DEFINES no longer supported
#endif
/*
* OpenSSL was configured with the following options:
*/
#ifndef OPENSSL_SYS_iOS
# define OPENSSL_SYS_iOS 1
#endif
#ifndef OPENSSL_NO_MD2
# define OPENSSL_NO_MD2
#endif
#ifndef OPENSSL_NO_RC5
# define OPENSSL_NO_RC5
#endif
#ifndef OPENSSL_THREADS
# define OPENSSL_THREADS
#endif
#ifndef OPENSSL_RAND_SEED_OS
# define OPENSSL_RAND_SEED_OS
#endif
#ifndef OPENSSL_NO_AFALGENG
# define OPENSSL_NO_AFALGENG
#endif
#ifndef OPENSSL_NO_ASAN
# define OPENSSL_NO_ASAN
#endif
#ifndef OPENSSL_NO_ASM
# define OPENSSL_NO_ASM
#endif
#ifndef OPENSSL_NO_CRYPTO_MDEBUG
# define OPENSSL_NO_CRYPTO_MDEBUG
#endif
#ifndef OPENSSL_NO_CRYPTO_MDEBUG_BACKTRACE
# define OPENSSL_NO_CRYPTO_MDEBUG_BACKTRACE
#endif
#ifndef OPENSSL_NO_DEVCRYPTOENG
# define OPENSSL_NO_DEVCRYPTOENG
#endif
#ifndef OPENSSL_NO_EC_NISTP_64_GCC_128
# define OPENSSL_NO_EC_NISTP_64_GCC_128
#endif
#ifndef OPENSSL_NO_EGD
# define OPENSSL_NO_EGD
#endif
#ifndef OPENSSL_NO_ENGINE
# define OPENSSL_NO_ENGINE
#endif
#ifndef OPENSSL_NO_EXTERNAL_TESTS
# define OPENSSL_NO_EXTERNAL_TESTS
#endif
#ifndef OPENSSL_NO_FUZZ_AFL
# define OPENSSL_NO_FUZZ_AFL
#endif
#ifndef OPENSSL_NO_FUZZ_LIBFUZZER
# define OPENSSL_NO_FUZZ_LIBFUZZER
#endif
#ifndef OPENSSL_NO_HEARTBEATS
# define OPENSSL_NO_HEARTBEATS
#endif
#ifndef OPENSSL_NO_HW
# define OPENSSL_NO_HW
#endif
#ifndef OPENSSL_NO_MSAN
# define OPENSSL_NO_MSAN
#endif
#ifndef OPENSSL_NO_SCTP
# define OPENSSL_NO_SCTP
#endif
#ifndef OPENSSL_NO_SSL_TRACE
# define OPENSSL_NO_SSL_TRACE
#endif
#ifndef OPENSSL_NO_SSL3
# define OPENSSL_NO_SSL3
#endif
#ifndef OPENSSL_NO_SSL3_METHOD
# define OPENSSL_NO_SSL3_METHOD
#endif
#ifndef OPENSSL_NO_UBSAN
# define OPENSSL_NO_UBSAN
#endif
#ifndef OPENSSL_NO_UNIT_TEST
# define OPENSSL_NO_UNIT_TEST
#endif
#ifndef OPENSSL_NO_WEAK_SSL_CIPHERS
# define OPENSSL_NO_WEAK_SSL_CIPHERS
#endif
#ifndef OPENSSL_NO_DYNAMIC_ENGINE
# define OPENSSL_NO_DYNAMIC_ENGINE
#endif
#ifndef OPENSSL_NO_AFALGENG
# define OPENSSL_NO_AFALGENG
#endif
/*
* Sometimes OPENSSSL_NO_xxx ends up with an empty file and some compilers
* don't like that. This will hopefully silence them.
*/
#define NON_EMPTY_TRANSLATION_UNIT static void *dummy = &dummy;
/*
* Applications should use -DOPENSSL_API_COMPAT=<version> to suppress the
* declarations of functions deprecated in or before <version>. Otherwise, they
* still won't see them if the library has been built to disable deprecated
* functions.
*/
#ifndef DECLARE_DEPRECATED
# define DECLARE_DEPRECATED(f) f;
# ifdef __GNUC__
# if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 0)
# undef DECLARE_DEPRECATED
# define DECLARE_DEPRECATED(f) f __attribute__ ((deprecated));
# endif
# endif
#endif
#ifndef OPENSSL_FILE
# ifdef OPENSSL_NO_FILENAMES
# define OPENSSL_FILE ""
# define OPENSSL_LINE 0
# else
# define OPENSSL_FILE __FILE__
# define OPENSSL_LINE __LINE__
# endif
#endif
#ifndef OPENSSL_MIN_API
# define OPENSSL_MIN_API 0
#endif
#if !defined(OPENSSL_API_COMPAT) || OPENSSL_API_COMPAT < OPENSSL_MIN_API
# undef OPENSSL_API_COMPAT
# define OPENSSL_API_COMPAT OPENSSL_MIN_API
#endif
/*
* Do not deprecate things to be deprecated in version 1.2.0 before the
* OpenSSL version number matches.
*/
#if OPENSSL_VERSION_NUMBER < 0x10200000L
# define DEPRECATEDIN_1_2_0(f) f;
#elif OPENSSL_API_COMPAT < 0x10200000L
# define DEPRECATEDIN_1_2_0(f) DECLARE_DEPRECATED(f)
#else
# define DEPRECATEDIN_1_2_0(f)
#endif
#if OPENSSL_API_COMPAT < 0x10100000L
# define DEPRECATEDIN_1_1_0(f) DECLARE_DEPRECATED(f)
#else
# define DEPRECATEDIN_1_1_0(f)
#endif
#if OPENSSL_API_COMPAT < 0x10000000L
# define DEPRECATEDIN_1_0_0(f) DECLARE_DEPRECATED(f)
#else
# define DEPRECATEDIN_1_0_0(f)
#endif
#if OPENSSL_API_COMPAT < 0x00908000L
# define DEPRECATEDIN_0_9_8(f) DECLARE_DEPRECATED(f)
#else
# define DEPRECATEDIN_0_9_8(f)
#endif
/* Generate 80386 code? */
#undef I386_ONLY
#undef OPENSSL_UNISTD
#define OPENSSL_UNISTD <unistd.h>
#undef OPENSSL_EXPORT_VAR_AS_FUNCTION
/*
* The following are cipher-specific, but are part of the public API.
*/
#if !defined(OPENSSL_SYS_UEFI)
# undef BN_LLONG
/* Only one for the following should be defined */
# define SIXTY_FOUR_BIT_LONG
# undef SIXTY_FOUR_BIT
# undef THIRTY_TWO_BIT
#endif
#define RC4_INT unsigned char
#ifdef __cplusplus
}
#endif
|
/* Generated by re2c */
#line 1 "encodings/default_full_8_encoding_policy_substitute.re"
// re2c $INPUT -o $OUTPUT -8 --encoding-policy substitute
#line 6 "encodings/default_full_8_encoding_policy_substitute.c"
{
YYCTYPE yych;
if ((YYLIMIT - YYCURSOR) < 4) YYFILL(4);
yych = *YYCURSOR;
switch (yych) {
case 0x00:
case 0x01:
case 0x02:
case 0x03:
case 0x04:
case 0x05:
case 0x06:
case 0x07:
case 0x08:
case '\t':
case '\n':
case '\v':
case '\f':
case '\r':
case 0x0E:
case 0x0F:
case 0x10:
case 0x11:
case 0x12:
case 0x13:
case 0x14:
case 0x15:
case 0x16:
case 0x17:
case 0x18:
case 0x19:
case 0x1A:
case 0x1B:
case 0x1C:
case 0x1D:
case 0x1E:
case 0x1F:
case ' ':
case '!':
case '"':
case '#':
case '$':
case '%':
case '&':
case '\'':
case '(':
case ')':
case '*':
case '+':
case ',':
case '-':
case '.':
case '/':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case ':':
case ';':
case '<':
case '=':
case '>':
case '?':
case '@':
case 'A':
case 'B':
case 'C':
case 'D':
case 'E':
case 'F':
case 'G':
case 'H':
case 'I':
case 'J':
case 'K':
case 'L':
case 'M':
case 'N':
case 'O':
case 'P':
case 'Q':
case 'R':
case 'S':
case 'T':
case 'U':
case 'V':
case 'W':
case 'X':
case 'Y':
case 'Z':
case '[':
case '\\':
case ']':
case '^':
case '_':
case '`':
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'g':
case 'h':
case 'i':
case 'j':
case 'k':
case 'l':
case 'm':
case 'n':
case 'o':
case 'p':
case 'q':
case 'r':
case 's':
case 't':
case 'u':
case 'v':
case 'w':
case 'x':
case 'y':
case 'z':
case '{':
case '|':
case '}':
case '~':
case 0x7F: goto yy2;
case 0xC2:
case 0xC3:
case 0xC4:
case 0xC5:
case 0xC6:
case 0xC7:
case 0xC8:
case 0xC9:
case 0xCA:
case 0xCB:
case 0xCC:
case 0xCD:
case 0xCE:
case 0xCF:
case 0xD0:
case 0xD1:
case 0xD2:
case 0xD3:
case 0xD4:
case 0xD5:
case 0xD6:
case 0xD7:
case 0xD8:
case 0xD9:
case 0xDA:
case 0xDB:
case 0xDC:
case 0xDD:
case 0xDE:
case 0xDF: goto yy6;
case 0xE0: goto yy7;
case 0xE1:
case 0xE2:
case 0xE3:
case 0xE4:
case 0xE5:
case 0xE6:
case 0xE7:
case 0xE8:
case 0xE9:
case 0xEA:
case 0xEB:
case 0xEC:
case 0xEE:
case 0xEF: goto yy8;
case 0xED: goto yy9;
case 0xF0: goto yy10;
case 0xF1:
case 0xF2:
case 0xF3: goto yy11;
case 0xF4: goto yy12;
default: goto yy4;
}
yy2:
++YYCURSOR;
#line 4 "encodings/default_full_8_encoding_policy_substitute.re"
{ return FULL; }
#line 197 "encodings/default_full_8_encoding_policy_substitute.c"
yy4:
++YYCURSOR;
yy5:
#line 3 "encodings/default_full_8_encoding_policy_substitute.re"
{ return DEFAULT; }
#line 203 "encodings/default_full_8_encoding_policy_substitute.c"
yy6:
yych = *++YYCURSOR;
switch (yych) {
case 0x80:
case 0x81:
case 0x82:
case 0x83:
case 0x84:
case 0x85:
case 0x86:
case 0x87:
case 0x88:
case 0x89:
case 0x8A:
case 0x8B:
case 0x8C:
case 0x8D:
case 0x8E:
case 0x8F:
case 0x90:
case 0x91:
case 0x92:
case 0x93:
case 0x94:
case 0x95:
case 0x96:
case 0x97:
case 0x98:
case 0x99:
case 0x9A:
case 0x9B:
case 0x9C:
case 0x9D:
case 0x9E:
case 0x9F:
case 0xA0:
case 0xA1:
case 0xA2:
case 0xA3:
case 0xA4:
case 0xA5:
case 0xA6:
case 0xA7:
case 0xA8:
case 0xA9:
case 0xAA:
case 0xAB:
case 0xAC:
case 0xAD:
case 0xAE:
case 0xAF:
case 0xB0:
case 0xB1:
case 0xB2:
case 0xB3:
case 0xB4:
case 0xB5:
case 0xB6:
case 0xB7:
case 0xB8:
case 0xB9:
case 0xBA:
case 0xBB:
case 0xBC:
case 0xBD:
case 0xBE:
case 0xBF: goto yy2;
default: goto yy5;
}
yy7:
yych = *(YYMARKER = ++YYCURSOR);
switch (yych) {
case 0xA0:
case 0xA1:
case 0xA2:
case 0xA3:
case 0xA4:
case 0xA5:
case 0xA6:
case 0xA7:
case 0xA8:
case 0xA9:
case 0xAA:
case 0xAB:
case 0xAC:
case 0xAD:
case 0xAE:
case 0xAF:
case 0xB0:
case 0xB1:
case 0xB2:
case 0xB3:
case 0xB4:
case 0xB5:
case 0xB6:
case 0xB7:
case 0xB8:
case 0xB9:
case 0xBA:
case 0xBB:
case 0xBC:
case 0xBD:
case 0xBE:
case 0xBF: goto yy13;
default: goto yy5;
}
yy8:
yych = *(YYMARKER = ++YYCURSOR);
switch (yych) {
case 0x80:
case 0x81:
case 0x82:
case 0x83:
case 0x84:
case 0x85:
case 0x86:
case 0x87:
case 0x88:
case 0x89:
case 0x8A:
case 0x8B:
case 0x8C:
case 0x8D:
case 0x8E:
case 0x8F:
case 0x90:
case 0x91:
case 0x92:
case 0x93:
case 0x94:
case 0x95:
case 0x96:
case 0x97:
case 0x98:
case 0x99:
case 0x9A:
case 0x9B:
case 0x9C:
case 0x9D:
case 0x9E:
case 0x9F:
case 0xA0:
case 0xA1:
case 0xA2:
case 0xA3:
case 0xA4:
case 0xA5:
case 0xA6:
case 0xA7:
case 0xA8:
case 0xA9:
case 0xAA:
case 0xAB:
case 0xAC:
case 0xAD:
case 0xAE:
case 0xAF:
case 0xB0:
case 0xB1:
case 0xB2:
case 0xB3:
case 0xB4:
case 0xB5:
case 0xB6:
case 0xB7:
case 0xB8:
case 0xB9:
case 0xBA:
case 0xBB:
case 0xBC:
case 0xBD:
case 0xBE:
case 0xBF: goto yy13;
default: goto yy5;
}
yy9:
yych = *(YYMARKER = ++YYCURSOR);
switch (yych) {
case 0x80:
case 0x81:
case 0x82:
case 0x83:
case 0x84:
case 0x85:
case 0x86:
case 0x87:
case 0x88:
case 0x89:
case 0x8A:
case 0x8B:
case 0x8C:
case 0x8D:
case 0x8E:
case 0x8F:
case 0x90:
case 0x91:
case 0x92:
case 0x93:
case 0x94:
case 0x95:
case 0x96:
case 0x97:
case 0x98:
case 0x99:
case 0x9A:
case 0x9B:
case 0x9C:
case 0x9D:
case 0x9E:
case 0x9F: goto yy13;
default: goto yy5;
}
yy10:
yych = *(YYMARKER = ++YYCURSOR);
switch (yych) {
case 0x90:
case 0x91:
case 0x92:
case 0x93:
case 0x94:
case 0x95:
case 0x96:
case 0x97:
case 0x98:
case 0x99:
case 0x9A:
case 0x9B:
case 0x9C:
case 0x9D:
case 0x9E:
case 0x9F:
case 0xA0:
case 0xA1:
case 0xA2:
case 0xA3:
case 0xA4:
case 0xA5:
case 0xA6:
case 0xA7:
case 0xA8:
case 0xA9:
case 0xAA:
case 0xAB:
case 0xAC:
case 0xAD:
case 0xAE:
case 0xAF:
case 0xB0:
case 0xB1:
case 0xB2:
case 0xB3:
case 0xB4:
case 0xB5:
case 0xB6:
case 0xB7:
case 0xB8:
case 0xB9:
case 0xBA:
case 0xBB:
case 0xBC:
case 0xBD:
case 0xBE:
case 0xBF: goto yy15;
default: goto yy5;
}
yy11:
yych = *(YYMARKER = ++YYCURSOR);
switch (yych) {
case 0x80:
case 0x81:
case 0x82:
case 0x83:
case 0x84:
case 0x85:
case 0x86:
case 0x87:
case 0x88:
case 0x89:
case 0x8A:
case 0x8B:
case 0x8C:
case 0x8D:
case 0x8E:
case 0x8F:
case 0x90:
case 0x91:
case 0x92:
case 0x93:
case 0x94:
case 0x95:
case 0x96:
case 0x97:
case 0x98:
case 0x99:
case 0x9A:
case 0x9B:
case 0x9C:
case 0x9D:
case 0x9E:
case 0x9F:
case 0xA0:
case 0xA1:
case 0xA2:
case 0xA3:
case 0xA4:
case 0xA5:
case 0xA6:
case 0xA7:
case 0xA8:
case 0xA9:
case 0xAA:
case 0xAB:
case 0xAC:
case 0xAD:
case 0xAE:
case 0xAF:
case 0xB0:
case 0xB1:
case 0xB2:
case 0xB3:
case 0xB4:
case 0xB5:
case 0xB6:
case 0xB7:
case 0xB8:
case 0xB9:
case 0xBA:
case 0xBB:
case 0xBC:
case 0xBD:
case 0xBE:
case 0xBF: goto yy15;
default: goto yy5;
}
yy12:
yych = *(YYMARKER = ++YYCURSOR);
switch (yych) {
case 0x80:
case 0x81:
case 0x82:
case 0x83:
case 0x84:
case 0x85:
case 0x86:
case 0x87:
case 0x88:
case 0x89:
case 0x8A:
case 0x8B:
case 0x8C:
case 0x8D:
case 0x8E:
case 0x8F: goto yy15;
default: goto yy5;
}
yy13:
yych = *++YYCURSOR;
switch (yych) {
case 0x80:
case 0x81:
case 0x82:
case 0x83:
case 0x84:
case 0x85:
case 0x86:
case 0x87:
case 0x88:
case 0x89:
case 0x8A:
case 0x8B:
case 0x8C:
case 0x8D:
case 0x8E:
case 0x8F:
case 0x90:
case 0x91:
case 0x92:
case 0x93:
case 0x94:
case 0x95:
case 0x96:
case 0x97:
case 0x98:
case 0x99:
case 0x9A:
case 0x9B:
case 0x9C:
case 0x9D:
case 0x9E:
case 0x9F:
case 0xA0:
case 0xA1:
case 0xA2:
case 0xA3:
case 0xA4:
case 0xA5:
case 0xA6:
case 0xA7:
case 0xA8:
case 0xA9:
case 0xAA:
case 0xAB:
case 0xAC:
case 0xAD:
case 0xAE:
case 0xAF:
case 0xB0:
case 0xB1:
case 0xB2:
case 0xB3:
case 0xB4:
case 0xB5:
case 0xB6:
case 0xB7:
case 0xB8:
case 0xB9:
case 0xBA:
case 0xBB:
case 0xBC:
case 0xBD:
case 0xBE:
case 0xBF: goto yy2;
default: goto yy14;
}
yy14:
YYCURSOR = YYMARKER;
goto yy5;
yy15:
yych = *++YYCURSOR;
switch (yych) {
case 0x80:
case 0x81:
case 0x82:
case 0x83:
case 0x84:
case 0x85:
case 0x86:
case 0x87:
case 0x88:
case 0x89:
case 0x8A:
case 0x8B:
case 0x8C:
case 0x8D:
case 0x8E:
case 0x8F:
case 0x90:
case 0x91:
case 0x92:
case 0x93:
case 0x94:
case 0x95:
case 0x96:
case 0x97:
case 0x98:
case 0x99:
case 0x9A:
case 0x9B:
case 0x9C:
case 0x9D:
case 0x9E:
case 0x9F:
case 0xA0:
case 0xA1:
case 0xA2:
case 0xA3:
case 0xA4:
case 0xA5:
case 0xA6:
case 0xA7:
case 0xA8:
case 0xA9:
case 0xAA:
case 0xAB:
case 0xAC:
case 0xAD:
case 0xAE:
case 0xAF:
case 0xB0:
case 0xB1:
case 0xB2:
case 0xB3:
case 0xB4:
case 0xB5:
case 0xB6:
case 0xB7:
case 0xB8:
case 0xB9:
case 0xBA:
case 0xBB:
case 0xBC:
case 0xBD:
case 0xBE:
case 0xBF: goto yy13;
default: goto yy14;
}
}
#line 5 "encodings/default_full_8_encoding_policy_substitute.re"
|
export default {
componentContainer: {},
container: {
alignItems: 'center',
flexDirection: 'row',
flexWrap: 'nowrap'
},
input: {
borderRadius: 6,
borderStyle: 'solid',
borderWidth: 1.5,
fontSize: 14,
height: 40,
marginRight: 24,
padding: 10,
paddingRight: 34,
width: 90
},
toggle: {
borderRadius: 6,
flexDirection: 'row',
flexWrap: 'nowrap',
height: 40,
overflow: 'hidden',
position: 'relative',
width: 80
},
toggleButton: {
alignItems: 'center',
height: '100%',
justifyContent: 'center',
width: 40
},
toggleButtonActive: {
borderRadius: 6,
height: 42,
left: 0,
position: 'absolute',
top: -1
},
errorText: {
marginTop: 10
}
};
//# sourceMappingURL=style.js.map |
# STUDY DEFINITION FOR BASELINE CHARACTERISTICS
# Import necessary functions
from cohortextractor import (
StudyDefinition,
patients,
codelist_from_csv,
codelist,
filter_codes_by_category,
combine_codelists,
Measure
)
# Import codelists
from codelists import *
study = StudyDefinition(
default_expectations={
"date": {"earliest": "1900-01-01", "latest": "today"},
"rate": "uniform",
"incidence": 0.5,
},
# select the study population
index_date="2020-01-01",
population=patients.satisfying(
"""
(age >= 65 AND age < 120) AND
is_registered_with_tpp
""",
is_registered_with_tpp=patients.registered_as_of(
"index_date"
),
),
# DEMOGRAPHICS
## age
age=patients.age_as_of(
"index_date",
return_expectations={
"rate": "universal",
"int": {"distribution": "population_ages"},
},
),
## self-reported ethnicity
ethnicity=patients.with_these_clinical_events(
ethnicity_codes,
returning="category",
find_last_match_in_period=True,
return_expectations={
"category": {"ratios": {"1": 0.5, "2": 0.2, "3": 0.1, "4": 0.1, "5": 0.1}},
"incidence": 0.75,
},
),
)
|
def index_tracker(key, msg):
"""
This function is meant to track the start and end index of
a key in a message.
Takes in: (key : string) (msg : string)
Does: Calls helper_track when the first char of key matches a char in msg
to determine if the entire key string matches
returns tuple (True, start, finish) if in msg else false
"""
key = ''.join(key.split())
start, finish = 0, 0
for each in range(0, len(msg)):
if msg[each] == key[0]:
if len(key) > 1 and len(msg) > each + 1:
ret = helper_track(key[1:], msg, each + 1)
if ret[0]:
return (True, each, ret[1])
else:
return (True, each, each)
else:
continue
return None
def helper_track(key, msg, msgindex):
"""
This function is meant to track if entire key exists in msg,
takes in: (key : string) (msg : string) (msgindex : int)
does: checks if the next char in key matches next char in msg
returns (True, end index) if whole char exists else False
"""
if key is '':
return (True, msgindex - 1)
elif msg is '':
return (False, msgindex - 1)
elif msg[msgindex] == ' ' and len(msg) > msgindex + 1:
return helper_track(key, msg, msgindex + 1)
else:
if key[0] == msg[msgindex] and len(key) >= 1 and len(msg) > msgindex:
# print key, msg, msg[msgindex], msgindex, len(msg)
return helper_track(key[1:], msg, msgindex + 1)
else:
return (False, msgindex)
def split_str(text, separator):
words = []
word = ""
for each in range(0, len(text)):
if text[each] != separator:
if word == "" and text[each] == ' ':
continue
else:
word += text[each]
if each == (len(text) - 1):
words.append(remove_end_spaces(word))
else:
words.append(remove_end_spaces(word))
word = ""
continue
return words
def remove_end_spaces(text):
if text == '' or text[-1] != ' ':
return text
else:
return remove_end_spaces(text[:-1])
def join_str(text, seperator):
new_text = ""
for each in text:
if new_text == "":
new_text += each
else:
new_text += seperator + each
return new_text
|
import eventbus from './eventbus';
const { ipcMain } = require('electron');
const fs = require('fs');
const storage = require('electron-json-storage');
import moment from 'moment';
let timers = [];
let timerIntervals = {};
storage.get('twitchtube_timers', function(error, data) {
if (error) throw error;
if (Array.isArray(data) && data.length > 0) timers = data;
startTimerCommands();
});
function startTimerCommands() {
generateIntervals();
// @TODO: Just set minute interval?
let lastMinuteChecked = moment().minute();
setInterval(() => {
let currentMinute = moment().minute();
if (currentMinute === lastMinuteChecked) return;
lastMinuteChecked = currentMinute;
if (!timerIntervals[currentMinute]) return;
timerIntervals[currentMinute].timers.forEach(timer => {
// @TODO: Check for platform?
eventbus.emit('outgoing-twitch-message', timer.message);
eventbus.emit('outgoing-youtube-message', timer.message);
});
}, 1000);
setListeners();
}
function generateIntervals() {
timers.forEach(timer => {
let minuteIndex = parseInt(timer.interval, 10);
while (minuteIndex <= 60) {
if (!timerIntervals[minuteIndex]) {
timerIntervals[minuteIndex] = {
timers: [],
} ;
}
timerIntervals[minuteIndex].timers.push(timer);
minuteIndex += parseInt(timer.interval, 10);
}
if (timerIntervals[0] && timerIntervals[60]) {
timerIntervals[0] = timerIntervals[0].concat(timerIntervals[60]);
}
});
}
function setListeners () {
ipcMain.on('get-timers', (event, arg) => {
ipcMain.emit('timers-loaded', timers);
event.sender.send('timers-loaded', timers)
});
ipcMain.on('timer-created', (event, arg) => {
timers.push(arg);
generateIntervals(); // @TODO: Add function to check just this one
storage.set('twitchtube_timers', timers);
});
ipcMain.on('timer-removed', (event, arg) => {
let newTimers = timers.filter(timer => {
return timer.id !== arg;
});
timers = newTimers;
generateIntervals(); // @TODO: Add function to check just this one
storage.set('twitchtube_timers', timers);
});
ipcMain.on('timer-updated', (event, updatedTimer) => {
let index = timers.findIndex(timer => {
return timer.id = updatedTimer.id;
});
timers[index].message = updatedTimer.message;
timers[index].interval = updatedTimer.interval;
generateIntervals(); // @TODO: Add function to check just this one
storage.set('twitchtube_timers', timers);
});
}
|
var structcc_p_v_r_texture_pixel_format_info =
[
[ "alpha", "structcc_p_v_r_texture_pixel_format_info.html#a9b64c2f4c30ada6f2f1d96da18ded1ee", null ],
[ "bpp", "structcc_p_v_r_texture_pixel_format_info.html#a17bdfe2798dacb937d7fd4b848192d98", null ],
[ "ccPixelFormat", "structcc_p_v_r_texture_pixel_format_info.html#a625f83e45f0c024dd68988e16c16893a", null ],
[ "compressed", "structcc_p_v_r_texture_pixel_format_info.html#af8cbceebc1ff7e73d5867a5f3901fbb8", null ],
[ "format", "structcc_p_v_r_texture_pixel_format_info.html#ab530610af9bad14cc8a09db8ea172673", null ],
[ "internalFormat", "structcc_p_v_r_texture_pixel_format_info.html#a6abc1f4176413c9176e8faf4f44a7acc", null ],
[ "type", "structcc_p_v_r_texture_pixel_format_info.html#a671ae8e73f4c89a4d8e1c0f0ba82be94", null ]
]; |
# -*- coding: utf-8 -*-
import unittest
from collections import namedtuple
from os import path
from random import shuffle
import numpy as np
from replicationbase import ReplicationBase, REPRO_QUALITY_LEVELS
from classic_heuristics.lr3opt import lr3opt_init, _check_lr3opt_move, _init_with_random
from cvrp_ops import calculate_objective
def _random_init_lr3opt(pts,D,d,C,L,st,times):
best_sol = None
best_f = float('inf')
for t in range(times):
sol = lr3opt_init(D, d, C, L, initialization_algorithm=_init_with_random)
sol_f = calculate_objective(sol, D)
if sol_f<best_f:
best_sol = None
best_f = None
return best_sol
def _random_init_lr3opt_once(pts,D,d,C,L,st):
return lr3opt_init(D, d, C, L, initialization_algorithm=_init_with_random)
LiteratureResult = namedtuple('LiteratureResult', 'obj_f cpu_time')
class TestLR3OPTAlgorithm(unittest.TestCase):
def setUp(self):
pass
def test_penalty_calculation_fig1_example(self):
D = np.array([
[ 0, 19, 39, 51, 66, 59, 42, 22, 30, 40, 54, 68, 73, 62, 41],
[19, 0, 21, 37, 54, 52, 36, 21, 36, 45, 58, 69, 77, 74, 54],
[39, 21, 0, 21, 37, 40, 30, 27, 39, 46, 59, 65, 76, 81, 63],
[51, 37, 21, 0, 17, 20, 19, 31, 34, 37, 48, 49, 61, 75, 60],
[66, 54, 37, 17, 0, 16, 28, 45, 43, 43, 50, 45, 59, 79, 67],
[59, 52, 40, 20, 16, 0, 17, 37, 30, 28, 34, 30, 43, 63, 53],
[42, 36, 30, 19, 28, 17, 0, 19, 15, 18, 30, 34, 45, 55, 41],
[22, 21, 27, 31, 45, 37, 19, 0, 15, 24, 37, 48, 56, 55, 35],
[30, 36, 39, 34, 43, 30, 15, 15, 0, 10, 22, 34, 41, 42, 26],
[40, 45, 46, 37, 43, 28, 18, 24, 10, 0, 13, 25, 32, 37, 24],
[54, 58, 59, 48, 50, 34, 30, 37, 22, 13, 0, 16, 19, 28, 24],
[68, 69, 65, 49, 45, 30, 34, 48, 34, 25, 16, 0, 13, 40, 40],
[73, 77, 76, 61, 59, 43, 45, 56, 41, 32, 19, 13, 0, 32, 39],
[62, 74, 81, 75, 79, 63, 55, 55, 42, 37, 28, 40, 32, 0, 20],
[41, 54, 63, 60, 67, 53, 41, 35, 26, 24, 24, 40, 39, 20, 0]])
C = 140
#d = [30,15,15,15,15,15,15, #route 3
# 10,20,20,20,30,30,20] #route 1
#sol = [0,1,2,3,4,5,6,7,0,8,9,10,11,12,13,14,0]
self.assertAlmostEqual( -10, _check_lr3opt_move(D, C, None, 60, 0,
[[0,4],[3,1],[2,5]],
[6,7,8,9,9,10], #end_p
[6,7,0,8,8,9], #end_n
[105,15,0,10,10,140], #cum_d
None,
8, None, [2.0, None]))
class TestStewartGoldenReplications(ReplicationBase):
def setUp(self):
self.algorithms = [
("lr3opt_det", lambda pts,D,d,C,L,st:\
lr3opt_init(D, d, C, L)),
("lr3opt_ran", _random_init_lr3opt_once)]
self.problem_names = [
"00-CW64_n31_k8c.vrp",
"05-E051-k5.vrp",
"06-E076-k15s.vrp",
"07-E076-k10s.vrp",
"08-E076-k8s.vrp",
"09-E076-k7s.vrp",
"10-E101-k14s.vrp",
"11-E101-k8.vrp"]
self.targets = [(1212,521,1058,847,751,692,1117,829), #det
(1212,521,1058,847,751,692,1117,829)] #rnd
self.problem_path = path.join("Classic", "GilletMiller1974")
def test_deterministic_LR3OPT_with_GilletMiller1974_instances(self):
avgq, sdq, minq, maxq = self.solve_problems(
"lr3opt_det", require_K = False,
round_f_func = np.int,
cost_compare = False)
self.assertTrue( abs(avgq) < REPRO_QUALITY_LEVELS.D_AVG, "Average quality not replicated (%.2f)"%avgq)
self.assertTrue( abs(sdq) < REPRO_QUALITY_LEVELS.B_SD, "There is too much variation between instances")
#TestStewartGoldenReplications.test_stochastic_LR3OPT_with_GilletMiller1974_instances
def test_stochastic_LR3OPT_with_GilletMiller1974_instances(self):
repeats_per_problem = zip(list(range(8)), [10, 10, 7, 8*3, 10, 10*2, 3, 6*2])
bestqs = [float('inf')]*8
for i, repeats in repeats_per_problem:
for repeat in range(repeats):
problem_name = self.problem_names[i]
print "Repeat %d of %d for %s"%(repeat+1,repeats,problem_name)
avgq, sdq, minq, maxq = self.solve_problems(
"lr3opt_ran", instance_idx = i, require_K = False,
round_f_func = np.int,
#round_D_func = np.around,
cost_compare = False)
if avgq<bestqs[i]:
bestqs[i] = avgq
# check the average gap of [10, 10, 7, 8, 10, 10, 3, 6] repeats
avgq = np.average(bestqs)
sdq = np.std(bestqs)
# Usually this assertion succeeds, but because it is stochastic, it
# is possible that by chance some of the results are (much) worse.
# Then, it is best to try again on bump the level up to B.
self.assertTrue( abs(avgq) < REPRO_QUALITY_LEVELS.A_AVG, "Average quality not replicated (%.2f)"%avgq)
self.assertTrue( abs(sdq) < REPRO_QUALITY_LEVELS.A_SD, "There is too much variation between instances")
if __name__ == '__main__':
unittest.main() |
import geopandas as gpd
import numpy as np
import pytest
from scipy.ndimage.morphology import binary_erosion
from shapely.geometry.linestring import LineString
from shapely.geometry.multilinestring import MultiLineString
from shapely.geometry.multipolygon import MultiPolygon
from shapely.geometry.polygon import Polygon
import geoutils as gu
GLACIER_OUTLINES_URL = "http://public.data.npolar.no/cryoclim/CryoClim_GAO_SJ_1990.zip"
class TestVector:
glacier_outlines = gu.Vector(GLACIER_OUTLINES_URL)
def test_init(self) -> None:
vector = gu.Vector(GLACIER_OUTLINES_URL)
assert isinstance(vector, gu.Vector)
def test_copy(self) -> None:
vector2 = self.glacier_outlines.copy()
assert vector2 is not self.glacier_outlines
vector2.ds = vector2.ds.query("NAME == 'Ayerbreen'")
assert vector2.ds.shape[0] < self.glacier_outlines.ds.shape[0]
def test_query(self) -> None:
vector2 = self.glacier_outlines.query("NAME == 'Ayerbreen'")
assert vector2 is not self.glacier_outlines
assert vector2.ds.shape[0] < self.glacier_outlines.ds.shape[0]
def test_bounds(self) -> None:
bounds = self.glacier_outlines.bounds
assert bounds.left < bounds.right
assert bounds.bottom < bounds.top
assert bounds.left == self.glacier_outlines.ds.total_bounds[0]
assert bounds.bottom == self.glacier_outlines.ds.total_bounds[1]
assert bounds.right == self.glacier_outlines.ds.total_bounds[2]
assert bounds.top == self.glacier_outlines.ds.total_bounds[3]
class TestSynthetic:
# Create a synthetic vector file with a square of size 1, started at position (10, 10)
poly1 = Polygon([(10, 10), (11, 10), (11, 11), (10, 11)])
gdf = gpd.GeoDataFrame({"geometry": [poly1]}, crs="EPSG:4326")
vector = gu.Vector(gdf)
# Same with a square started at position (5, 5)
poly2 = Polygon([(5, 5), (6, 5), (6, 6), (5, 6)])
gdf = gpd.GeoDataFrame({"geometry": [poly2]}, crs="EPSG:4326")
vector2 = gu.Vector(gdf)
# Create a multipolygon with both
multipoly = MultiPolygon([poly1, poly2])
gdf = gpd.GeoDataFrame({"geometry": [multipoly]}, crs="EPSG:4326")
vector_multipoly = gu.Vector(gdf)
# Create a synthetic vector file with a square of size 5, started at position (8, 8)
poly3 = Polygon([(8, 8), (13, 8), (13, 13), (8, 13)])
gdf = gpd.GeoDataFrame({"geometry": [poly3]}, crs="EPSG:4326")
vector_5 = gu.Vector(gdf)
# Create a synthetic LineString geometry
lines = LineString([(10, 10), (11, 10), (11, 11)])
gdf = gpd.GeoDataFrame({"geometry": [lines]}, crs="EPSG:4326")
vector_lines = gu.Vector(gdf)
# Create a synthetic MultiLineString geometry
multilines = MultiLineString([[(10, 10), (11, 10), (11, 11)], [(5, 5), (6, 5), (6, 6)]])
gdf = gpd.GeoDataFrame({"geometry": [multilines]}, crs="EPSG:4326")
vector_multilines = gu.Vector(gdf)
def test_create_mask(self) -> None:
"""
Test Vector.create_mask.
"""
# First with given res and bounds -> Should be a 21 x 21 array with 0 everywhere except center pixel
vector = self.vector.copy()
out_mask = vector.create_mask(xres=1, bounds=(0, 0, 21, 21))
ref_mask = np.zeros((21, 21), dtype="bool")
ref_mask[10, 10] = True
assert out_mask.shape == (21, 21)
assert np.all(ref_mask == out_mask)
# Check that vector has not been modified by accident
assert vector.bounds == self.vector.bounds
assert len(vector.ds) == len(self.vector.ds)
assert vector.crs == self.vector.crs
# Then with a gu.Raster as reference, single band
rst = gu.Raster.from_array(np.zeros((21, 21)), transform=(1.0, 0.0, 0.0, 0.0, -1.0, 21.0), crs="EPSG:4326")
out_mask = vector.create_mask(rst)
assert out_mask.shape == (1, 21, 21)
# With gu.Raster, 2 bands -> fails...
# rst = gu.Raster.from_array(np.zeros((2, 21, 21)), transform=(1., 0., 0., 0., -1., 21.), crs='EPSG:4326')
# out_mask = vector.create_mask(rst)
# Test that buffer = 0 works
out_mask_buff = vector.create_mask(rst, buffer=0)
assert np.all(ref_mask == out_mask_buff)
# Test that buffer > 0 works
rst = gu.Raster.from_array(np.zeros((21, 21)), transform=(1.0, 0.0, 0.0, 0.0, -1.0, 21.0), crs="EPSG:4326")
out_mask = vector.create_mask(rst)
for buffer in np.arange(1, 8):
out_mask_buff = vector.create_mask(rst, buffer=buffer)
diff = out_mask_buff & ~out_mask
assert np.count_nonzero(diff) > 0
# Difference between masks should always be thinner than buffer + 1
eroded_diff = binary_erosion(diff.squeeze(), np.ones((buffer + 1, buffer + 1)))
assert np.count_nonzero(eroded_diff) == 0
# Test that buffer < 0 works
vector_5 = self.vector_5
out_mask = vector_5.create_mask(rst)
for buffer in np.arange(-1, -3, -1):
out_mask_buff = vector_5.create_mask(rst, buffer=buffer)
diff = ~out_mask_buff & out_mask
assert np.count_nonzero(diff) > 0
# Difference between masks should always be thinner than buffer + 1
eroded_diff = binary_erosion(diff.squeeze(), np.ones((abs(buffer) + 1, abs(buffer) + 1)))
assert np.count_nonzero(eroded_diff) == 0
def test_extract_vertices(self) -> None:
"""
Test that extract_vertices works with simple geometries.
"""
# Polygons
vertices = gu.geovector.extract_vertices(self.vector.ds)
assert len(vertices) == 1
assert vertices == [[(10.0, 10.0), (11.0, 10.0), (11.0, 11.0), (10.0, 11.0), (10.0, 10.0)]]
# MultiPolygons
vertices = gu.geovector.extract_vertices(self.vector_multipoly.ds)
assert len(vertices) == 2
assert vertices[0] == [(10.0, 10.0), (11.0, 10.0), (11.0, 11.0), (10.0, 11.0), (10.0, 10.0)]
assert vertices[1] == [(5.0, 5.0), (6.0, 5.0), (6.0, 6.0), (5.0, 6.0), (5.0, 5.0)]
# LineString
vertices = gu.geovector.extract_vertices(self.vector_lines.ds)
assert len(vertices) == 1
assert vertices == [[(10.0, 10.0), (11.0, 10.0), (11.0, 11.0)]]
# MultiLineString
vertices = gu.geovector.extract_vertices(self.vector_multilines.ds)
assert len(vertices) == 2
assert vertices[0] == [(10.0, 10.0), (11.0, 10.0), (11.0, 11.0)]
assert vertices[1] == [(5.0, 5.0), (6.0, 5.0), (6.0, 6.0)]
def test_generate_voronoi(self) -> None:
"""
Check that geovector.generate_voronoi_polygons works on a simple Polygon.
Does not work with simple shapes as squares or triangles as the diagram is infinite.
For now, test on a set of two squares.
"""
# Check with a multipolygon
voronoi = gu.geovector.generate_voronoi_polygons(self.vector_multipoly.ds)
assert len(voronoi) == 2
vertices = gu.geovector.extract_vertices(voronoi)
assert vertices == [
[(5.5, 10.5), (10.5, 10.5), (10.5, 5.5), (5.5, 10.5)],
[(5.5, 10.5), (10.5, 5.5), (5.5, 5.5), (5.5, 10.5)],
]
# Check that it fails with proper error for too simple geometries
expected_message = "Invalid geometry, cannot generate finite Voronoi polygons"
with pytest.raises(ValueError, match=expected_message):
voronoi = gu.geovector.generate_voronoi_polygons(self.vector.ds)
def test_buffer_without_overlap(self) -> None:
"""
Check that non-overlapping buffer feature works. Does not work on simple geometries, so test on MultiPolygon.
Yet, very simple geometries yield unexpected results, as is the case for the second test case here.
"""
# Case 1, test with two squares, in separate Polygons
two_squares = gu.Vector(gpd.GeoDataFrame(geometry=[self.poly1, self.poly2], crs="EPSG:4326"))
# Check with buffers that should not overlap
# ------------------------------------------
buffer_size = 2
buffer = two_squares.buffer_without_overlap(buffer_size)
# Output should be of same size as input and same geometry type
assert len(buffer.ds) == len(two_squares.ds)
assert np.all(buffer.ds.geometry.geom_type == two_squares.ds.geometry.geom_type)
# Extract individual geometries
polys = []
for geom in buffer.ds.geometry:
if geom.geom_type in ["MultiPolygon"]:
polys.extend(list(geom))
else:
polys.append(geom)
# Check they do not overlap
for i in range(len(polys)):
for j in range(i + 1, len(polys)):
assert not polys[i].intersects(polys[j])
# buffer should yield the same result as create_mask with buffer, minus the original mask
mask_nonoverlap = buffer.create_mask(xres=0.1, bounds=(0, 0, 21, 21))
mask_buffer = two_squares.create_mask(xres=0.1, bounds=(0, 0, 21, 21), buffer=buffer_size)
mask_nobuffer = two_squares.create_mask(xres=0.1, bounds=(0, 0, 21, 21))
assert np.all(mask_nobuffer | mask_nonoverlap == mask_buffer)
# Case 2 - Check with buffers that overlap -> this case is actually not the expected result !
# -------------------------------
buffer_size = 5
buffer = two_squares.buffer_without_overlap(buffer_size)
# Output should be of same size as input and same geometry type
assert len(buffer.ds) == len(two_squares.ds)
assert np.all(buffer.ds.geometry.geom_type == two_squares.ds.geometry.geom_type)
# Extract individual geometries
polys = []
for geom in buffer.ds.geometry:
if geom.geom_type in ["MultiPolygon"]:
polys.extend(list(geom))
else:
polys.append(geom)
# Check they do not overlap
for i in range(len(polys)):
for j in range(i + 1, len(polys)):
assert polys[i].intersection(polys[j]).area == 0
# buffer should yield the same result as create_mask with buffer, minus the original mask
mask_nonoverlap = buffer.create_mask(xres=0.1, bounds=(0, 0, 21, 21))
mask_buffer = two_squares.create_mask(xres=0.1, bounds=(0, 0, 21, 21), buffer=buffer_size)
mask_nobuffer = two_squares.create_mask(xres=0.1, bounds=(0, 0, 21, 21))
assert np.all(mask_nobuffer | mask_nonoverlap == mask_buffer)
|
import sys
import os
import argparse
from pathlib import Path
import datetime
import shutil
import logging
import torch
import torch.nn.functional as F
from torch.utils.tensorboard import SummaryWriter
import numpy as np
from tqdm import tqdm
sys.path.append(os.path.join(sys.path[0], '../..'))
from dataloader.any_folder import DataLoaderAnyFolder
from utils.training_utils import set_randomness, mse2psnr, save_checkpoint
from utils.pos_enc import encode_position
from utils.volume_op import volume_sampling_ndc, volume_rendering
from utils.comp_ray_dir import comp_ray_dir_cam_fxfy
from models.nerf_models import OfficialNerf
from models.intrinsics import LearnFocal
from models.poses import LearnPose
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--epoch', default=10000, type=int)
parser.add_argument('--eval_interval', default=100, type=int, help='run eval every this epoch number')
parser.add_argument('--gpu_id', default=0, type=int)
parser.add_argument('--multi_gpu', default=False, type=eval, choices=[True, False])
parser.add_argument('--base_dir', type=str, default='./data_dir/nerfmm_release_data')
parser.add_argument('--scene_name', type=str, default='any_folder_demo/desk')
parser.add_argument('--nerf_lr', default=0.001, type=float)
parser.add_argument('--nerf_milestones', default=list(range(0, 10000, 10)), type=int, nargs='+',
help='learning rate schedule milestones')
parser.add_argument('--nerf_lr_gamma', type=float, default=0.9954, help="learning rate milestones gamma")
parser.add_argument('--learn_focal', default=True, type=bool)
parser.add_argument('--focal_order', default=2, type=int)
parser.add_argument('--fx_only', default=False, type=eval, choices=[True, False])
parser.add_argument('--focal_lr', default=0.001, type=float)
parser.add_argument('--focal_milestones', default=list(range(0, 10000, 100)), type=int, nargs='+',
help='learning rate schedule milestones')
parser.add_argument('--focal_lr_gamma', type=float, default=0.9, help="learning rate milestones gamma")
parser.add_argument('--learn_R', default=True, type=eval, choices=[True, False])
parser.add_argument('--learn_t', default=True, type=eval, choices=[True, False])
parser.add_argument('--pose_lr', default=0.001, type=float)
parser.add_argument('--pose_milestones', default=list(range(0, 10000, 100)), type=int, nargs='+',
help='learning rate schedule milestones')
parser.add_argument('--pose_lr_gamma', type=float, default=0.9, help="learning rate milestones gamma")
parser.add_argument('--resize_ratio', type=int, default=4, help='lower the image resolution with this ratio')
parser.add_argument('--num_rows_eval_img', type=int, default=10, help='split a high res image to rows in eval')
parser.add_argument('--hidden_dims', type=int, default=128, help='network hidden unit dimensions')
parser.add_argument('--train_rand_rows', type=int, default=32, help='rand sample these rows to train')
parser.add_argument('--train_rand_cols', type=int, default=32, help='rand sample these cols to train')
parser.add_argument('--num_sample', type=int, default=128, help='number samples along a ray')
parser.add_argument('--pos_enc_levels', type=int, default=10, help='number of freqs for positional encoding')
parser.add_argument('--pos_enc_inc_in', type=bool, default=True, help='concat the input to the encoding')
parser.add_argument('--use_dir_enc', type=bool, default=True, help='use pos enc for view dir?')
parser.add_argument('--dir_enc_levels', type=int, default=4, help='number of freqs for positional encoding')
parser.add_argument('--dir_enc_inc_in', type=bool, default=True, help='concat the input to the encoding')
parser.add_argument('--train_img_num', type=int, default=-1, help='num of images to train, -1 for all')
parser.add_argument('--train_load_sorted', type=bool, default=True)
parser.add_argument('--train_start', type=int, default=0, help='inclusive')
parser.add_argument('--train_end', type=int, default=-1, help='exclusive, -1 for all')
parser.add_argument('--train_skip', type=int, default=1, help='skip every this number of imgs')
parser.add_argument('--rand_seed', type=int, default=17)
parser.add_argument('--true_rand', type=bool, default=False)
parser.add_argument('--optimizer', type=str, default='Adam', help='optimizer for training')
parser.add_argument('--alias', type=str, default='', help="experiments alias")
return parser.parse_args()
def gen_detail_name(args):
outstr = 'lr_' + str(args.nerf_lr) + \
'_gpu' + str(args.gpu_id) + \
'_seed_' + str(args.rand_seed) + \
'_resize_' + str(args.resize_ratio) + \
'_Nsam_' + str(args.num_sample) + \
'_Ntr_img_'+ str(args.train_img_num) + \
'_freq_' + str(args.pos_enc_levels) + \
'_' + str(args.alias) + \
'_' + str(datetime.datetime.now().strftime('%y%m%d_%H%M'))
return outstr
def model_render_image(c2w, rays_cam, t_vals, near, far, H, W, fxfy, model, perturb_t, sigma_noise_std,
args, rgb_act_fn):
"""Render an image or pixels.
:param c2w: (4, 4) pose to transform ray direction from cam to world.
:param rays_cam: (someH, someW, 3) ray directions in camera coordinate, can be random selected
rows and cols, or some full rows, or an entire image.
:param t_vals: (N_samples) sample depth along a ray.
:param fxfy: a float or a (2, ) torch tensor for focal.
:param perturb_t: True/False whether add noise to t.
:param sigma_noise_std: a float std dev when adding noise to raw density (sigma).
:rgb_act_fn: sigmoid() apply an activation fn to the raw rgb output to get actual rgb.
:return: (someH, someW, 3) volume rendered images for the input rays.
"""
# (H, W, N_sample, 3), (H, W, 3), (H, W, N_sam)
sample_pos, _, ray_dir_world, t_vals_noisy = volume_sampling_ndc(c2w, rays_cam, t_vals, near, far,
H, W, fxfy, perturb_t)
# encode position: (H, W, N_sample, (2L+1)*C = 63)
pos_enc = encode_position(sample_pos, levels=args.pos_enc_levels, inc_input=args.pos_enc_inc_in)
# encode direction: (H, W, N_sample, (2L+1)*C = 27)
if args.use_dir_enc:
ray_dir_world = F.normalize(ray_dir_world, p=2, dim=2) # (H, W, 3)
dir_enc = encode_position(ray_dir_world, levels=args.dir_enc_levels, inc_input=args.dir_enc_inc_in) # (H, W, 27)
dir_enc = dir_enc.unsqueeze(2).expand(-1, -1, args.num_sample, -1) # (H, W, N_sample, 27)
else:
dir_enc = None
# inference rgb and density using position and direction encoding.
rgb_density = model(pos_enc, dir_enc) # (H, W, N_sample, 4)
render_result = volume_rendering(rgb_density, t_vals_noisy, sigma_noise_std, rgb_act_fn)
rgb_rendered = render_result['rgb'] # (H, W, 3)
depth_map = render_result['depth_map'] # (H, W)
result = {
'rgb': rgb_rendered, # (H, W, 3)
'sample_pos': sample_pos, # (H, W, N_sample, 3)
'depth_map': depth_map, # (H, W)
'rgb_density': rgb_density, # (H, W, N_sample, 4)
}
return result
def eval_one_epoch(eval_c2ws, scene_train, model, focal_net, pose_param_net,
my_devices, args, epoch_i, writer, rgb_act_fn):
model.eval()
focal_net.eval()
pose_param_net.eval()
fxfy = focal_net(0)
ray_dir_cam = comp_ray_dir_cam_fxfy(scene_train.H, scene_train.W, fxfy[0], fxfy[1])
t_vals = torch.linspace(scene_train.near, scene_train.far, args.num_sample, device=my_devices) # (N_sample,) sample position
N_img, H, W = eval_c2ws.shape[0], scene_train.H, scene_train.W
rendered_img_list = []
rendered_depth_list = []
for i in range(N_img):
c2w = eval_c2ws[i].to(my_devices) # (4, 4)
# split an image to rows when the input image resolution is high
rays_dir_cam_split_rows = ray_dir_cam.split(args.num_rows_eval_img, dim=0)
rendered_img = []
rendered_depth = []
for rays_dir_rows in rays_dir_cam_split_rows:
render_result = model_render_image(c2w, rays_dir_rows, t_vals, scene_train.near, scene_train.far,
scene_train.H, scene_train.W, fxfy,
model, False, 0.0, args, rgb_act_fn)
rgb_rendered_rows = render_result['rgb'] # (num_rows_eval_img, W, 3)
depth_map = render_result['depth_map'] # (num_rows_eval_img, W)
rendered_img.append(rgb_rendered_rows)
rendered_depth.append(depth_map)
# combine rows to an image
rendered_img = torch.cat(rendered_img, dim=0)
rendered_depth = torch.cat(rendered_depth, dim=0).unsqueeze(0) # (1, H, W)
# for vis
rendered_img_list.append(rendered_img.cpu().numpy())
rendered_depth_list.append(rendered_depth.cpu().numpy())
# random display an eval image to tfboard
rand_num = np.random.randint(low=0, high=N_img)
disp_img = np.transpose(rendered_img_list[rand_num], (2, 0, 1)) # (3, H, W)
disp_depth = rendered_depth_list[rand_num] # (1, H, W)
writer.add_image('eval_img', disp_img, global_step=epoch_i)
writer.add_image('eval_depth', disp_depth, global_step=epoch_i)
return
def train_one_epoch(scene_train, optimizer_nerf, optimizer_focal, optimizer_pose, model, focal_net, pose_param_net,
my_devices, args, rgb_act_fn):
model.train()
focal_net.train()
pose_param_net.train()
t_vals = torch.linspace(scene_train.near, scene_train.far, args.num_sample, device=my_devices) # (N_sample,) sample position
N_img, H, W = scene_train.N_imgs, scene_train.H, scene_train.W
L2_loss_epoch = []
# shuffle the training imgs
ids = np.arange(N_img)
np.random.shuffle(ids)
for i in ids:
fxfy = focal_net(0)
ray_dir_cam = comp_ray_dir_cam_fxfy(H, W, fxfy[0], fxfy[1])
img = scene_train.imgs[i].to(my_devices) # (H, W, 3)
c2w = pose_param_net(i) # (4, 4)
# sample pixel on an image and their rays for training.
r_id = torch.randperm(H, device=my_devices)[:args.train_rand_rows] # (N_select_rows)
c_id = torch.randperm(W, device=my_devices)[:args.train_rand_cols] # (N_select_cols)
ray_selected_cam = ray_dir_cam[r_id][:, c_id] # (N_select_rows, N_select_cols, 3)
img_selected = img[r_id][:, c_id] # (N_select_rows, N_select_cols, 3)
# render an image using selected rays, pose, sample intervals, and the network
render_result = model_render_image(c2w, ray_selected_cam, t_vals, scene_train.near, scene_train.far,
scene_train.H, scene_train.W, fxfy,
model, True, 0.0, args, rgb_act_fn) # (N_select_rows, N_select_cols, 3)
rgb_rendered = render_result['rgb'] # (N_select_rows, N_select_cols, 3)
L2_loss = F.mse_loss(rgb_rendered, img_selected) # loss for one image
L2_loss.backward()
optimizer_nerf.step()
optimizer_focal.step()
optimizer_pose.step()
optimizer_nerf.zero_grad()
optimizer_focal.zero_grad()
optimizer_pose.zero_grad()
L2_loss_epoch.append(L2_loss.item())
L2_loss_epoch_mean = np.mean(L2_loss_epoch) # loss for all images.
mean_losses = {
'L2': L2_loss_epoch_mean,
}
return mean_losses
def main(args):
my_devices = torch.device('cuda:' + str(args.gpu_id))
'''Create Folders'''
exp_root_dir = Path(os.path.join('./logs/any_folder', args.scene_name))
exp_root_dir.mkdir(parents=True, exist_ok=True)
experiment_dir = Path(os.path.join(exp_root_dir, gen_detail_name(args)))
experiment_dir.mkdir(parents=True, exist_ok=True)
shutil.copy('./models/nerf_models.py', experiment_dir)
shutil.copy('./models/intrinsics.py', experiment_dir)
shutil.copy('./models/poses.py', experiment_dir)
shutil.copy('./tasks/any_folder/train.py', experiment_dir)
'''LOG'''
logger = logging.getLogger()
logger.setLevel(logging.INFO)
file_handler = logging.FileHandler(os.path.join(experiment_dir, 'log.txt'))
file_handler.setLevel(logging.INFO)
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.WARNING)
logger.addHandler(file_handler)
logger.addHandler(stream_handler)
logger.info(args)
'''Summary Writer'''
writer = SummaryWriter(log_dir=str(experiment_dir))
'''Data Loading'''
scene_train = DataLoaderAnyFolder(base_dir=args.base_dir,
scene_name=args.scene_name,
res_ratio=args.resize_ratio,
num_img_to_load=args.train_img_num,
start=args.train_start,
end=args.train_end,
skip=args.train_skip,
load_sorted=args.train_load_sorted)
print('Train with {0:6d} images.'.format(scene_train.imgs.shape[0]))
# We have no eval pose in this any_folder task. Eval with a 4x4 identity pose.
eval_c2ws = torch.eye(4).unsqueeze(0).float() # (1, 4, 4)
'''Model Loading'''
pos_enc_in_dims = (2 * args.pos_enc_levels + int(args.pos_enc_inc_in)) * 3 # (2L + 0 or 1) * 3
if args.use_dir_enc:
dir_enc_in_dims = (2 * args.dir_enc_levels + int(args.dir_enc_inc_in)) * 3 # (2L + 0 or 1) * 3
else:
dir_enc_in_dims = 0
model = OfficialNerf(pos_enc_in_dims, dir_enc_in_dims, args.hidden_dims)
if args.multi_gpu:
model = torch.nn.DataParallel(model).to(device=my_devices)
else:
model = model.to(device=my_devices)
# learn focal parameter
focal_net = LearnFocal(scene_train.H, scene_train.W, args.learn_focal, args.fx_only, order=args.focal_order)
if args.multi_gpu:
focal_net = torch.nn.DataParallel(focal_net).to(device=my_devices)
else:
focal_net = focal_net.to(device=my_devices)
# learn pose for each image
pose_param_net = LearnPose(scene_train.N_imgs, args.learn_R, args.learn_t, None)
if args.multi_gpu:
pose_param_net = torch.nn.DataParallel(pose_param_net).to(device=my_devices)
else:
pose_param_net = pose_param_net.to(device=my_devices)
'''Set Optimiser'''
optimizer_nerf = torch.optim.Adam(model.parameters(), lr=args.nerf_lr)
optimizer_focal = torch.optim.Adam(focal_net.parameters(), lr=args.focal_lr)
optimizer_pose = torch.optim.Adam(pose_param_net.parameters(), lr=args.pose_lr)
scheduler_nerf = torch.optim.lr_scheduler.MultiStepLR(optimizer_nerf, milestones=args.nerf_milestones,
gamma=args.nerf_lr_gamma)
scheduler_focal = torch.optim.lr_scheduler.MultiStepLR(optimizer_focal, milestones=args.focal_milestones,
gamma=args.focal_lr_gamma)
scheduler_pose = torch.optim.lr_scheduler.MultiStepLR(optimizer_pose, milestones=args.pose_milestones,
gamma=args.pose_lr_gamma)
'''Training'''
for epoch_i in tqdm(range(args.epoch), desc='epochs'):
rgb_act_fn = torch.sigmoid
train_epoch_losses = train_one_epoch(scene_train, optimizer_nerf, optimizer_focal, optimizer_pose,
model, focal_net, pose_param_net, my_devices, args, rgb_act_fn)
train_L2_loss = train_epoch_losses['L2']
scheduler_nerf.step()
scheduler_focal.step()
scheduler_pose.step()
train_psnr = mse2psnr(train_L2_loss)
writer.add_scalar('train/mse', train_L2_loss, epoch_i)
writer.add_scalar('train/psnr', train_psnr, epoch_i)
writer.add_scalar('train/lr', scheduler_nerf.get_lr()[0], epoch_i)
logger.info('{0:6d} ep: Train: L2 loss: {1:.4f}, PSNR: {2:.3f}'.format(epoch_i, train_L2_loss, train_psnr))
tqdm.write('{0:6d} ep: Train: L2 loss: {1:.4f}, PSNR: {2:.3f}'.format(epoch_i, train_L2_loss, train_psnr))
if epoch_i % args.eval_interval == 0 and epoch_i > 0:
with torch.no_grad():
eval_one_epoch(eval_c2ws, scene_train, model, focal_net, pose_param_net, my_devices, args, epoch_i, writer, rgb_act_fn)
fxfy = focal_net(0)
tqdm.write('Est fx: {0:.2f}, fy {1:.2f}'.format(fxfy[0].item(), fxfy[1].item()))
logger.info('Est fx: {0:.2f}, fy {1:.2f}'.format(fxfy[0].item(), fxfy[1].item()))
# save the latest model
save_checkpoint(epoch_i, model, optimizer_nerf, experiment_dir, ckpt_name='latest_nerf')
save_checkpoint(epoch_i, focal_net, optimizer_focal, experiment_dir, ckpt_name='latest_focal')
save_checkpoint(epoch_i, pose_param_net, optimizer_pose, experiment_dir, ckpt_name='latest_pose')
return
if __name__ == '__main__':
args = parse_args()
set_randomness(args)
main(args)
|
#!/usr/bin/env python
"""Basic histogram script"""
import argparse
from general_seq import conv
from general_seq import seq_IO
from plot import conv as pconv
from plot import hist
def main(data_file, title, output_prefix):
sequences = seq_IO.read_sequences(data_file, additional_params=True, header=True)
data = [ seq_dict["Degree"] for seq, seq_dict in sequences.items() ]
fig, axarr = pconv.create_ax(1, 1, shx=False, shy=False)
hist.draw_actual_plot(axarr[0,0], data, "", title.capitalize(), normed=True, nbins=30, edgecolor=None, log=False)
#axarr[0,0].ticklabel_format(axis='x', style='sci', scilimits=(-2,2))
pconv.save_fig(fig, output_prefix, title, 5, 5, tight=True, size=10)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument ('--data_file', '-d', help="text file which contains sequences and the label you want to use for the set")
parser.add_argument ('--title', help="title for the plot")
parser.add_argument ('--output_prefix', help='output file prefix')
args = parser.parse_args()
main(args.data_file, args.title, args.output_prefix)
|
import { Container } from 'react-bootstrap'
import { Route, Switch } from 'react-router-dom'
// Layout
import Layout from './layout/Layout'
// pages
import Home from './pages/Home'
import About from './pages/About'
import NotFound from './pages/NotFound'
const App = () => {
return (
<div>
<Layout>
<Switch>
<Route path='/' component={Home} exact />
<Route path='/about' component={About} />
<Route component={NotFound} />
</Switch>
</Layout>
</div>
)
}
export default App
|
import select, time, queue
from bitcoinnano import Connection, Interface, SimpleConfig
from bitcoinnano.network import parse_servers
from collections import defaultdict
# electrum.util.set_verbosity(1)
def get_interfaces(servers, timeout=10):
'''Returns a map of servers to connected interfaces. If any
connections fail or timeout, they will be missing from the map.
'''
socket_queue = queue.Queue()
config = SimpleConfig()
connecting = {}
for server in servers:
if server not in connecting:
connecting[server] = Connection(server, socket_queue, config.path)
interfaces = {}
timeout = time.time() + timeout
count = 0
while time.time() < timeout and count < len(servers):
try:
server, socket = socket_queue.get(True, 0.3)
except queue.Empty:
continue
if socket:
interfaces[server] = Interface(server, socket)
count += 1
return interfaces
def wait_on_interfaces(interfaces, timeout=10):
'''Return a map of servers to a list of (request, response) tuples.
Waits timeout seconds, or until each interface has a response'''
result = defaultdict(list)
timeout = time.time() + timeout
while len(result) < len(interfaces) and time.time() < timeout:
rin = [i for i in interfaces.values()]
win = [i for i in interfaces.values() if i.unsent_requests]
rout, wout, xout = select.select(rin, win, [], 1)
for interface in wout:
interface.send_requests()
for interface in rout:
responses = interface.get_responses()
if responses:
result[interface.server].extend(responses)
return result
def get_peers():
config = SimpleConfig()
peers = {}
# 1. get connected interfaces
server = config.get('server')
interfaces = get_interfaces([server])
if not interfaces:
print("No connection to", server)
return []
# 2. get list of peers
interface = interfaces[server]
interface.queue_request('server.peers.subscribe', [], 0)
responses = wait_on_interfaces(interfaces).get(server)
if responses:
response = responses[0][1] # One response, (req, response) tuple
peers = parse_servers(response.get('result'))
return peers
def send_request(peers, method, params):
print("Contacting %d servers"%len(peers))
interfaces = get_interfaces(peers)
print("%d servers could be reached" % len(interfaces))
for peer in peers:
if not peer in interfaces:
print("Connection failed:", peer)
for msg_id, i in enumerate(interfaces.values()):
i.queue_request(method, params, msg_id)
responses = wait_on_interfaces(interfaces)
for peer in interfaces:
if not peer in responses:
print(peer, "did not answer")
results = dict(zip(responses.keys(), [t[0][1].get('result') for t in responses.values()]))
print("%d answers"%len(results))
return results
|
/**
* Created by Rogier on 27/06/2017.
*/
import { assign, clone, cloneDeep, concat, each, find, isArray, map, merge, pull, startsWith } from 'lodash';
import * as React from 'react';
import is from 'bottom_line/lang/is';
import rightOf from 'bottom_line/sequences/rightOf';
// entity id
let eid = 0;
/**
* Scene loader that loads an application based on a React scene.
*/
export default class SceneLoader {
/**
* Constructs a new SceneLoader.
*
* @param cells - Instance of CellManager.
* @param components - Object containing all React available scene components
* @param events - Instance of the EventQueue.
*
* @returns new SceneLoader.
*/
constructor(cells, components, events) {
this.cells = cells;
this.components = components;
this.events = events;
}
/**
* Loads a react node and all its children. Typically used to load a scene.
*
* @param sceneNode - React node to load.
* @param parent - Render node to attach the loaded node to.
* @param loader - Loader object to retrieve asset information.
* @param store - Store the register data elements.
*
* @returns this for chaining.
*/
load(sceneNode, parent, loader, store) {
const reactScene = this.loadNode(sceneNode);
const renderScene = this.createSceneNode(reactScene, loader);
store.init(reactScene);
parent['addChild'](renderScene);
return this;
}
/**
* Loads a react node. Compiling the scene using predefined components.
*
* @param node - React node to load. Can be a string in case of a text node.
*
* @returns The top node of the loaded scene.
*/
loadNode(node) {
if (is.string(node)) {
return node;
}
const component = this.components[node.type];
const nodeClone = this.setId(node);
const mergedNode = component
? this.mergeNodes(React.cloneElement(component), nodeClone)
: nodeClone;
let children;
children = mergedNode.props.children || [];
children = isArray(children) ? Array.from(children) : [children];
children = map(children, (child) => this.loadNode(child));
return React.cloneElement(mergedNode, mergedNode.props, children);
}
/**
* Retrieves all cells from the React node properties object.
*
* @param properties - React node properties object.
*
* @returns Object containing all cells retrieved from the properties object.
*/
getCells(properties) {
const cells = {};
each(properties, (data, cell) => {
cell = startsWith(cell, 'c-') ? rightOf(cell, 'c-') : '';
if (!cell) {
return;
} // continue
cells[cell] = assign(clone(this.cells.get(cell)), data);
cells[cell].eid = properties.id;
});
return cells;
}
/**
* Merges 2 react nodes.
*
* @param node1 - React node 1.
* @param node2 - React node 2.
*
* @returns Merged node.
*/
mergeNodes(node1, node2) {
const mergedProps = merge(cloneDeep(node1.props), node2.props);
const mergedChildren = this.mergeChildren(node1.props.children, node2.props.children);
return this.setId(React.cloneElement(node1, mergedProps, mergedChildren));
}
/**
* Merges 2 nodes children' into 1 array.
*
* @param node1children - Children node 1.
* @param node2children - Children node 2.
*
* @returns Merged array of children.
*/
mergeChildren(node1children = [], node2children = []) {
const children1 = (isArray(node1children) ? Array.from(node1children) : [node1children]);
const children2 = (isArray(node2children) ? Array.from(node2children) : [node2children]);
const mergedChildren = [];
each(children1, (child) => {
const match = child.props.name && find(children2, (pChild) => pChild.props.name && pChild.props.name === child.props.name);
if (match) {
mergedChildren.push(this.mergeNodes(child, match));
pull(children2, match);
}
else {
mergedChildren.push(this.setId(cloneDeep(child)));
}
});
return concat(mergedChildren, children2);
}
/**
* Sets a proper id/key onto an element.
*
* @param element - Element to set an id for.
*
* @returns Element clone with proper id
*/
setId(element) {
if (element.key !== null) {
return element;
}
const props = cloneDeep(element.props);
props.id = props.key = props.hasOwnProperty('id') ? props.id : eid++;
return React.cloneElement(element, props);
}
/**
* Creates a view node from a React node.
*
* @param reactNode - The react node to create a view from.
* @param loader - Loader object to retrieve assets.
*
* @returns The constructed sceneNode.
*/
createSceneNode(reactNode, loader) {
if (is.string(reactNode)) {
return this.string2SceneNode(reactNode);
}
else {
return this.reactNode2SceneNode(reactNode, loader);
}
}
/**
* Creates a view node from a React node.
*
* @param reactNode - The react node to create a view from.
* @param loader - Loader object to retrieve assets.
*
* @returns The constructed sceneNode.
*/
reactNode2SceneNode(reactNode, loader) {
const properties = reactNode.props;
const cells = this.getCells(properties);
const on = cells.on;
const spriteData = cells.sprite;
const transformData = cells.transform;
const sceneNode = spriteData
? PIXI.Sprite.fromImage(loader.resources[spriteData.texture].url)
: new PIXI.Container();
if (spriteData && sceneNode instanceof PIXI.Sprite) {
sceneNode.anchor.x = spriteData.anchorX;
sceneNode.anchor.y = spriteData.anchorY;
}
if (transformData) {
sceneNode.x = transformData.x;
sceneNode.y = transformData.y;
sceneNode.rotation = transformData.rotation;
}
if (on) {
sceneNode.interactive = true;
sceneNode.buttonMode = true;
if (on.click) {
sceneNode.on('pointerdown', () => this.events.push({ event: on.click, sender: on.eid }));
}
}
let children;
children = properties.children || [];
children = isArray(children) ? Array.from(children) : [children];
Reflect.defineMetadata('link:structure-view', sceneNode, reactNode);
each(children, (child) => {
sceneNode.addChild(this.createSceneNode(child, loader));
});
return sceneNode;
}
/**
* Creates a text node from a string.
*
* @param text - Text string to convert to a node.
*
* @returns The text constructed text node.
*/
string2SceneNode(text) {
const defaultStyle = new PIXI.TextStyle({ fontFamily: 'Arial', fontSize: 32 });
const sceneNode = new PIXI.Text(text, defaultStyle);
sceneNode.anchor.set(0.5);
return sceneNode;
}
}
//# sourceMappingURL=SceneLoader.js.map |
describe('DAY 7: this keyword', () => {
it(`invoke a constructor function and assign the resulting object to "a"`, () => {
/**
* @returns {undefined|object}
*/
function A () {
this.b = function b () {
return this.c;
};
this.c = [1, 2, 3, 4];
}
// complete the code to pass the test
let a = new A();
expect(a.b()).toBe(a.c);
expect(a).toBeInstanceOf(A);
});
it(`create a bound function to make b return a.c value`, () => {
let a = {
c: [1, 2, 3]
};
/**
* @memberof a
* @returns {array}
*/
function b () {
return this.c;
}
// complete the code to pass the test
let w;
expect(w).toBe(a.c);
});
it(`call function b with a as the belonging object
and provide the required argument values to pass the test`, () => {
let a = {
c: [1, 2, 3]
};
/**
*
* @param {number} x
* @param {number} y
* @memberof a
* @returns {array}
*/
function b (x, y) {
this.x = x;
this.y = y;
return this.c;
}
// complete the code to pass the test
let w;
expect(w).toBe(a.c);
expect(typeof a.x).toBe('number');
expect(typeof a.y).toBe('number');
});
it(`apply a as this for b and pass the required arguments to pass the test`, () => {
let a = {
c: [1, 2, 3]
};
/**
*
* @param {number} x
* @param {number} y
* @memberof a
* @returns {array}
*/
function b (x, y) {
this.x = x;
this.y = y;
return this.c;
}
// complete the code to pass the test
let w;
expect(w).toBe(a.c);
expect(typeof a.x).toBe('number');
expect(typeof a.y).toBe('number');
});
it(`function b should resolve this to object a`, () => {
/**
*
* @memberof a
* @returns {array}
*/
function b () {
return this.c;
}
let a = {
// complete the object property to pass the test
c: [1, 2, 3]
};
expect(a.b).toBe(b);
expect(a.b()).toBe(a.c);
});
it(`lexical this
can you fix it?`, () => {
/**
* @returns {undefined|object}
*/
function A () {
this.b = function () {
// use lexical scope to fix this
return function () {
return this.c;
};
};
this.c = 'hi';
}
let a = new A();
let d = {
b: a.b,
c: 'bye',
e: a.b()
};
let f = a.b();
expect(d.b()()).toBe(d.c);
expect(d.e()).toBe(a.c);
expect(f()).toBe(a.c);
});
});
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible import constants as C
from ansible import context
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.module_utils._text import to_text
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.plugins.loader import become_loader, connection_loader, shell_loader
from ansible.playbook import Playbook
from ansible.template import Templar
from ansible.utils.helpers import pct_to_int
from ansible.utils.path import makedirs_safe
from ansible.utils.ssh_functions import check_for_controlpersist
from ansible.utils.display import Display
display = Display()
class PlaybookExecutor:
'''
This is the primary class for executing playbooks, and thus the
basis for bin/ansible-playbook operation.
'''
def __init__(self, playbooks, inventory, variable_manager, loader, passwords):
self._playbooks = playbooks
self._inventory = inventory
self._variable_manager = variable_manager
self._loader = loader
self.passwords = passwords
self._unreachable_hosts = dict()
if context.CLIARGS.get('listhosts') or context.CLIARGS.get('listtasks') or \
context.CLIARGS.get('listtags') or context.CLIARGS.get('syntax'):
self._tqm = None
else:
self._tqm = TaskQueueManager(
inventory=inventory,
variable_manager=variable_manager,
loader=loader,
passwords=self.passwords,
forks=context.CLIARGS.get('forks'),
)
# Note: We run this here to cache whether the default ansible ssh
# executable supports control persist. Sometime in the future we may
# need to enhance this to check that ansible_ssh_executable specified
# in inventory is also cached. We can't do this caching at the point
# where it is used (in task_executor) because that is post-fork and
# therefore would be discarded after every task.
check_for_controlpersist(C.ANSIBLE_SSH_EXECUTABLE)
def run(self):
'''
Run the given playbook, based on the settings in the play which
may limit the runs to serialized groups, etc.
'''
result = 0
entrylist = []
entry = {}
try:
# preload become/connection/shell to set config defs cached
list(connection_loader.all(class_only=True))
list(shell_loader.all(class_only=True))
list(become_loader.all(class_only=True))
for playbook_path in self._playbooks:
pb = Playbook.load(playbook_path, variable_manager=self._variable_manager, loader=self._loader)
# FIXME: move out of inventory self._inventory.set_playbook_basedir(os.path.realpath(os.path.dirname(playbook_path)))
if self._tqm is None: # we are doing a listing
entry = {'playbook': playbook_path}
entry['plays'] = []
else:
# make sure the tqm has callbacks loaded
self._tqm.load_callbacks()
self._tqm.send_callback('v2_playbook_on_start', pb)
i = 1
plays = pb.get_plays()
display.vv(u'%d plays in %s' % (len(plays), to_text(playbook_path)))
for play in plays:
if play._included_path is not None:
self._loader.set_basedir(play._included_path)
else:
self._loader.set_basedir(pb._basedir)
# clear any filters which may have been applied to the inventory
self._inventory.remove_restriction()
# Allow variables to be used in vars_prompt fields.
all_vars = self._variable_manager.get_vars(play=play)
templar = Templar(loader=self._loader, variables=all_vars)
setattr(play, 'vars_prompt', templar.template(play.vars_prompt))
# FIXME: this should be a play 'sub object' like loop_control
if play.vars_prompt:
for var in play.vars_prompt:
vname = var['name']
prompt = var.get("prompt", vname)
default = var.get("default", None)
private = boolean(var.get("private", True))
confirm = boolean(var.get("confirm", False))
encrypt = var.get("encrypt", None)
salt_size = var.get("salt_size", None)
salt = var.get("salt", None)
unsafe = var.get("unsafe", None)
if vname not in self._variable_manager.extra_vars:
if self._tqm:
self._tqm.send_callback('v2_playbook_on_vars_prompt', vname, private, prompt, encrypt, confirm, salt_size, salt,
default, unsafe)
play.vars[vname] = display.do_var_prompt(vname, private, prompt, encrypt, confirm, salt_size, salt, default, unsafe)
else: # we are either in --list-<option> or syntax check
play.vars[vname] = default
# Post validate so any play level variables are templated
all_vars = self._variable_manager.get_vars(play=play)
templar = Templar(loader=self._loader, variables=all_vars)
play.post_validate(templar)
if context.CLIARGS['syntax']:
continue
if self._tqm is None:
# we are just doing a listing
entry['plays'].append(play)
else:
self._tqm._unreachable_hosts.update(self._unreachable_hosts)
previously_failed = len(self._tqm._failed_hosts)
previously_unreachable = len(self._tqm._unreachable_hosts)
break_play = False
# we are actually running plays
batches = self._get_serialized_batches(play)
if len(batches) == 0:
self._tqm.send_callback('v2_playbook_on_play_start', play)
self._tqm.send_callback('v2_playbook_on_no_hosts_matched')
for batch in batches:
# restrict the inventory to the hosts in the serialized batch
self._inventory.restrict_to_hosts(batch)
# and run it...
result = self._tqm.run(play=play)
# break the play if the result equals the special return code
if result & self._tqm.RUN_FAILED_BREAK_PLAY != 0:
result = self._tqm.RUN_FAILED_HOSTS
break_play = True
# check the number of failures here, to see if they're above the maximum
# failure percentage allowed, or if any errors are fatal. If either of those
# conditions are met, we break out, otherwise we only break out if the entire
# batch failed
failed_hosts_count = len(self._tqm._failed_hosts) + len(self._tqm._unreachable_hosts) - \
(previously_failed + previously_unreachable)
if len(batch) == failed_hosts_count:
break_play = True
break
# update the previous counts so they don't accumulate incorrectly
# over multiple serial batches
previously_failed += len(self._tqm._failed_hosts) - previously_failed
previously_unreachable += len(self._tqm._unreachable_hosts) - previously_unreachable
# save the unreachable hosts from this batch
self._unreachable_hosts.update(self._tqm._unreachable_hosts)
if break_play:
break
i = i + 1 # per play
if entry:
entrylist.append(entry) # per playbook
# send the stats callback for this playbook
if self._tqm is not None:
if C.RETRY_FILES_ENABLED:
retries = set(self._tqm._failed_hosts.keys())
retries.update(self._tqm._unreachable_hosts.keys())
retries = sorted(retries)
if len(retries) > 0:
if C.RETRY_FILES_SAVE_PATH:
basedir = C.RETRY_FILES_SAVE_PATH
elif playbook_path:
basedir = os.path.dirname(os.path.abspath(playbook_path))
else:
basedir = '~/'
(retry_name, _) = os.path.splitext(os.path.basename(playbook_path))
filename = os.path.join(basedir, "%s.retry" % retry_name)
if self._generate_retry_inventory(filename, retries):
display.display("\tto retry, use: --limit @%s\n" % filename)
self._tqm.send_callback('v2_playbook_on_stats', self._tqm._stats)
# if the last result wasn't zero, break out of the playbook file name loop
if result != 0:
break
if entrylist:
return entrylist
finally:
if self._tqm is not None:
self._tqm.cleanup()
if self._loader:
self._loader.cleanup_all_tmp_files()
if context.CLIARGS['syntax']:
display.display("No issues encountered")
return result
if context.CLIARGS['start_at_task'] and not self._tqm._start_at_done:
display.error(
"No matching task \"%s\" found."
" Note: --start-at-task can only follow static includes."
% context.CLIARGS['start_at_task']
)
return result
def _get_serialized_batches(self, play):
'''
Returns a list of hosts, subdivided into batches based on
the serial size specified in the play.
'''
# make sure we have a unique list of hosts
all_hosts = self._inventory.get_hosts(play.hosts, order=play.order)
all_hosts_len = len(all_hosts)
# the serial value can be listed as a scalar or a list of
# scalars, so we make sure it's a list here
serial_batch_list = play.serial
if len(serial_batch_list) == 0:
serial_batch_list = [-1]
cur_item = 0
serialized_batches = []
while len(all_hosts) > 0:
# get the serial value from current item in the list
serial = pct_to_int(serial_batch_list[cur_item], all_hosts_len)
# if the serial count was not specified or is invalid, default to
# a list of all hosts, otherwise grab a chunk of the hosts equal
# to the current serial item size
if serial <= 0:
serialized_batches.append(all_hosts)
break
else:
play_hosts = []
for x in range(serial):
if len(all_hosts) > 0:
play_hosts.append(all_hosts.pop(0))
serialized_batches.append(play_hosts)
# increment the current batch list item number, and if we've hit
# the end keep using the last element until we've consumed all of
# the hosts in the inventory
cur_item += 1
if cur_item > len(serial_batch_list) - 1:
cur_item = len(serial_batch_list) - 1
return serialized_batches
def _generate_retry_inventory(self, retry_path, replay_hosts):
'''
Called when a playbook run fails. It generates an inventory which allows
re-running on ONLY the failed hosts. This may duplicate some variable
information in group_vars/host_vars but that is ok, and expected.
'''
try:
makedirs_safe(os.path.dirname(retry_path))
with open(retry_path, 'w') as fd:
for x in replay_hosts:
fd.write("%s\n" % x)
except Exception as e:
display.warning("Could not create retry file '%s'.\n\t%s" % (retry_path, to_text(e)))
return False
return True
|
import * as f from '@kuba/f'
import agent from './agent'
import bots from './bots'
export default f.or(f.isEmpty(agent), f.test(bots, agent))
|
#
# Simple JWT documentation build configuration file, created by
# sphinx-quickstart on Thu Oct 16 20:43:24 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
from pkg_resources import get_distribution
# -- General configuration ------------------------------------------------
def django_configure():
from django.conf import settings
settings.configure(
SECRET_KEY="a random key to use",
INSTALLED_APPS=(
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.staticfiles",
"rest_framework",
"rest_framework_simplejwt",
"rest_framework_simplejwt.token_blacklist",
),
)
try:
import django
django.setup()
except AttributeError:
pass
django_configure()
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.doctest",
"sphinx.ext.intersphinx",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "Simple JWT"
copyright = "2020, David Sanders"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = get_distribution("djangorestframework_simplejwt").version
# The short X.Y version.
version = ".".join(release.split(".")[:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = [
"_build",
"modules.rst",
]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = "rest_framework_simplejwtdoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
"index",
"rest_framework_simplejwt.tex",
"Simple JWT Documentation",
"David Sanders",
"manual",
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
"index",
"rest_framework_simplejwt",
"Simple JWT Documentation",
["David Sanders"],
1,
)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
"index",
"Simple JWT",
"Simple JWT",
"David Sanders",
"Simple JWT",
"A JSON Web Token authentication plugin for the Django REST Framework.",
"Miscellaneous",
),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# -- Intersphinx configuration ------------------------------------------------
intersphinx_mapping = {
"python": ("https://docs.python.org/3.8", None),
}
# -- Doctest configuration ----------------------------------------
import doctest
doctest_default_flags = (
0
| doctest.DONT_ACCEPT_TRUE_FOR_1
| doctest.ELLIPSIS
| doctest.IGNORE_EXCEPTION_DETAIL
| doctest.NORMALIZE_WHITESPACE
)
|
from tap_lever.streams.base import BaseStream
from tap_lever.streams import cache as stream_cache
import singer
LOGGER = singer.get_logger() # noqa
class CandidateReferralsStream(BaseStream):
API_METHOD = 'GET'
TABLE = 'candidate_referrals'
@property
def path(self):
return '/candidates/{candidate_id}/referrals'
def get_url(self, candidate):
_path = self.path.format(candidate_id=candidate)
return 'https://api.lever.co/v1{}'.format(_path)
def sync_data(self):
table = self.TABLE
candidates = stream_cache.get('candidates')
LOGGER.info("Found {} candidates in cache".format(len(candidates)))
params = self.get_params(_next=None)
for i, candidate in enumerate(candidates):
LOGGER.info("Fetching referrals for candidate {} of {}".format(i + 1, len(candidates)))
candidate_id = candidate['id']
url = self.get_url(candidate_id)
resources = self.sync_paginated(url, params)
|
##############################################################################
#
# Copyright (c) 2014, 2degrees Limited.
# All Rights Reserved.
#
# This file is part of hubspot-contacts
# <https://github.com/2degrees/hubspot-contacts>, which is subject to the
# provisions of the BSD at
# <http://dev.2degreesnetwork.com/p/2degrees-license.html>. A copy of the
# license should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS"
# AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT
# NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST
# INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
#
##############################################################################
from nose.tools import assert_raises
from nose.tools import eq_
from voluptuous import Invalid
from voluptuous import Schema
from hubspot.contacts._schemas._validators import AnyListItemValidates
from hubspot.contacts._schemas._validators import Constant
from hubspot.contacts._schemas._validators import DynamicDictionary
from hubspot.contacts._schemas._validators import GetDictValue
class TestGetttingDictValues(object):
def setup(self):
self.schema = Schema(GetDictValue('key'))
def test_key_in_dictionary(self):
eq_('abc', self.schema({'key': 'abc'}))
def test_key_not_in_dictionary(self):
with assert_raises(Invalid) as context_manager:
self.schema({})
exception = context_manager.exception
eq_('expected key \'key\' in dictionary', str(exception))
def test_not_a_dictionary(self):
with assert_raises(Invalid) as context_manager:
self.schema([1, 2])
exception = context_manager.exception
eq_('expected a dictionary', str(exception))
class TestDynamicDictionary(object):
def setup(self):
self.schema = Schema(DynamicDictionary(str, int))
def test_valid_dictionary(self):
dictionary = {'a': 1, 'b': 2}
eq_(dictionary, self.schema(dictionary))
def test_empty_dictionary(self):
eq_({}, self.schema({}))
def test_non_dictionary(self):
"""
An 'Invalid' exception is raised when the value is not a dictionary
"""
with assert_raises(Invalid):
self.schema(('value', 'whatever'))
def test_invalid_dictionary_key(self):
""" An 'Invalid' exception is raised when any key is invalid """
with assert_raises(Invalid):
self.schema({1: 2})
def test_invalid_dictionary_value(self):
""" An 'Invalid' exception is raised when any value is invalid """
with assert_raises(Invalid):
self.schema({'value': [1, 2, 3]})
class TestAnyListItemValidates(object):
def setup(self):
self.schema = Schema(AnyListItemValidates(int))
def test_contains(self):
input_tuple = [1, 'string', []]
eq_(input_tuple, self.schema(input_tuple))
def test_contains_multiple(self):
input_tuple = [1, 2, 3]
eq_(input_tuple, self.schema(input_tuple))
def test_doesnt_contain(self):
input_tuple = ['string', []]
with assert_raises(Invalid):
self.schema(input_tuple)
def test_is_not_iterable(self):
with assert_raises(Invalid):
self.schema(1)
class TestConstantValue(object):
def setup(self):
self.schema = Schema(Constant(1))
def test_matching_value(self):
eq_(1, self.schema(1))
def test_non_matching_value(self):
with assert_raises(Invalid):
self.schema(2)
|
from django.template.defaultfilters import truncatewords
from django.utils.html import strip_tags
from docutils import nodes
def extract_title(document):
"""Return the title of the document.
:param document:
:type document: :class:`docutils.nodes.document`
"""
for node in document.traverse(nodes.PreBibliographic):
if isinstance(node, nodes.title):
return node.astext()
def extract_metadata(document):
"""Return the dict containing document metadata.
:param document:
:type document: :class:`docutils.nodes.document`
:returns: docinfo data from document
:rtype: dict
From: https://github.com/adieu/mezzanine-cli @ mezzanine_cli/parser.py
License: BSD (https://github.com/adieu/mezzanine-cli/blob/master/setup.py)
"""
output = {}
for docinfo in document.traverse(nodes.docinfo):
for element in docinfo.children:
if element.tagname == 'field': # custom fields (e.g. summary)
name_elem, body_elem = element.children
name = name_elem.astext()
value = body_elem.astext()
else: # standard fields (e.g. address)
name = element.tagname
value = element.astext()
name = name.lower()
output[name] = value
return output
def extract_subtitle(document):
"""Return the subtitle of the document."""
for node in document.traverse(nodes.PreBibliographic):
if isinstance(node, nodes.subtitle):
return node.astext()
def extract_abstract(doctree, length=100):
"""Pull first n words from a docutils document.
We use this to create snippets for Twitter Cards, FB, etc.
:param doctree: docutils document to extract from
:type doctree: :class:`docutils.nodes.document`
:param length: word count to cut content off at
:type length: int
:rtype: string
:returns: truncated content, html tags removed
"""
paragraph_nodes = doctree.traverse(nodes.paragraph)
text = ''
for node in paragraph_nodes:
text += node.astext()
if len(text.split(' ')) > 100:
break
return truncatewords(strip_tags(text), 100)
|
from __future__ import annotations
from dataclasses import dataclass
from dataclasses import field
from typing import List
from typing import Optional
from typing import TYPE_CHECKING
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import select
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy.orm import registry
from sqlalchemy.orm import relationship
mapper_registry: registry = registry()
@mapper_registry.mapped
@dataclass
class User:
__table__ = Table(
"user",
mapper_registry.metadata,
Column("id", Integer, primary_key=True),
Column("name", String(50)),
Column("fullname", String(50)),
Column("nickname", String(12)),
)
id: int = field(init=False)
name: Optional[str] = None
fullname: Optional[str] = None
nickname: Optional[str] = None
addresses: List[Address] = field(default_factory=list)
if TYPE_CHECKING:
_mypy_mapped_attrs = [id, name, fullname, nickname, addresses]
__mapper_args__ = { # type: ignore
"properties": {"addresses": relationship("Address")}
}
@mapper_registry.mapped
@dataclass
class Address:
__table__ = Table(
"address",
mapper_registry.metadata,
Column("id", Integer, primary_key=True),
Column("user_id", Integer, ForeignKey("user.id")),
Column("email_address", String(50)),
)
id: int = field(init=False)
user_id: int = field(init=False)
email_address: Optional[str] = None
if TYPE_CHECKING:
_mypy_mapped_attrs = [id, user_id, email_address]
stmt = select(User.name).where(User.id.in_([1, 2, 3]))
stmt = select(Address).where(Address.email_address.contains(["foo"]))
|
/**
* Pipedrive API v1
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.0.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*
*/
import ApiClient from "../ApiClient";
import DeleteRoleAssignment from '../model/DeleteRoleAssignment';
import FailResponse from '../model/FailResponse';
import GetRoleAssignments from '../model/GetRoleAssignments';
import GetRoleSettings from '../model/GetRoleSettings';
import NumberBooleanDefault0 from '../model/NumberBooleanDefault0';
import PostRoleAssignment from '../model/PostRoleAssignment';
import Unauthorized from '../model/Unauthorized';
import User from '../model/User';
import UserIDs from '../model/UserIDs';
import UserMe from '../model/UserMe';
import UserPermissions from '../model/UserPermissions';
import Users from '../model/Users';
/**
* Users service.
* @module api/UsersApi
* @version 1.0.0
*/
export default class UsersApi {
/**
* Constructs a new UsersApi.
* @alias module:api/UsersApi
* @class
* @param {module:ApiClient} [apiClient] Optional API client implementation to use,
* default to {@link module:ApiClient#instance} if unspecified.
*/
constructor(apiClient) {
this.apiClient = apiClient || ApiClient.instance;
}
/**
* Add a new user
* Adds a new user to the company, returns the ID upon success.
* @param {String} name The name of the user
* @param {String} email The email of the user
* @param {Boolean} activeFlag Whether the user is active or not. `false` = Not activated, `true` = Activated
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with an object containing data of type {@link module:model/User} and HTTP response
*/
addUserWithHttpInfo(name, email, activeFlag) {
const opts = {}
let postBody = null;
// verify the required parameter 'name' is set
if (name === undefined || name === null) {
throw new Error("Missing the required parameter 'name' when calling addUser");
}
// verify the required parameter 'email' is set
if (email === undefined || email === null) {
throw new Error("Missing the required parameter 'email' when calling addUser");
}
// verify the required parameter 'activeFlag' is set
if (activeFlag === undefined || activeFlag === null) {
throw new Error("Missing the required parameter 'activeFlag' when calling addUser");
}
let pathParams = {
};
let queryParams = {
};
let headerParams = {
};
let formParams = {
'name': name,
'email': email,
'active_flag': activeFlag,
};
let formParamArray = [
'name',
'email',
'activeFlag',
];
let contentTypes = ['application/x-www-form-urlencoded', ];
const isURLEncoded = contentTypes.includes('application/x-www-form-urlencoded');
const isJSON = contentTypes.includes('application/json');
if (isJSON) {
postBody = { ...postBody, ...opts };
} else if (isURLEncoded) {
for (let key in opts) {
if (opts.hasOwnProperty(key) && !formParamArray.includes(key)) {
formParams[key] = opts[key];
}
}
}
let authNames = ['api_key', 'oauth2', ];
let accepts = ['application/json', ];
let returnType = User;
return this.apiClient.callApi(
'/users', 'POST',
pathParams, queryParams, headerParams, formParams, postBody,
authNames, contentTypes, accepts, returnType, null
);
}
/**
* Add a new user
* Adds a new user to the company, returns the ID upon success.
* @param {String} name The name of the user
* @param {String} email The email of the user
* @param {Boolean} activeFlag Whether the user is active or not. `false` = Not activated, `true` = Activated
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with data of type {@link module:model/User}
*/
addUser(name, email, activeFlag) {
return this.addUserWithHttpInfo(name, email, activeFlag)
.then(function(response_and_data) {
return response_and_data;
});
}
/**
* Add role assignment
* Adds a role assignment for a user.
* @param {Number} id The ID of the user
* @param {Number} roleId The ID of the role
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with an object containing data of type {@link module:model/PostRoleAssignment} and HTTP response
*/
addUserRoleAssignmentWithHttpInfo(id, roleId) {
const opts = {}
let postBody = null;
// verify the required parameter 'id' is set
if (id === undefined || id === null) {
throw new Error("Missing the required parameter 'id' when calling addUserRoleAssignment");
}
// verify the required parameter 'roleId' is set
if (roleId === undefined || roleId === null) {
throw new Error("Missing the required parameter 'roleId' when calling addUserRoleAssignment");
}
let pathParams = {
'id': id,
};
let queryParams = {
};
let headerParams = {
};
let formParams = {
'role_id': roleId,
};
let formParamArray = [
'roleId',
];
let contentTypes = ['application/x-www-form-urlencoded', ];
const isURLEncoded = contentTypes.includes('application/x-www-form-urlencoded');
const isJSON = contentTypes.includes('application/json');
if (isJSON) {
postBody = { ...postBody, ...opts };
} else if (isURLEncoded) {
for (let key in opts) {
if (opts.hasOwnProperty(key) && !formParamArray.includes(key)) {
formParams[key] = opts[key];
}
}
}
let authNames = ['api_key', ];
let accepts = ['application/json', ];
let returnType = PostRoleAssignment;
return this.apiClient.callApi(
'/users/{id}/roleAssignments', 'POST',
pathParams, queryParams, headerParams, formParams, postBody,
authNames, contentTypes, accepts, returnType, null
);
}
/**
* Add role assignment
* Adds a role assignment for a user.
* @param {Number} id The ID of the user
* @param {Number} roleId The ID of the role
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with data of type {@link module:model/PostRoleAssignment}
*/
addUserRoleAssignment(id, roleId) {
return this.addUserRoleAssignmentWithHttpInfo(id, roleId)
.then(function(response_and_data) {
return response_and_data;
});
}
/**
* Delete a role assignment
* Deletes a role assignment for a user.
* @param {Number} id The ID of the user
* @param {Number} roleId The ID of the role
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with an object containing data of type {@link module:model/DeleteRoleAssignment} and HTTP response
*/
deleteUserRoleAssignmentWithHttpInfo(id, roleId) {
const opts = {}
let postBody = null;
// verify the required parameter 'id' is set
if (id === undefined || id === null) {
throw new Error("Missing the required parameter 'id' when calling deleteUserRoleAssignment");
}
// verify the required parameter 'roleId' is set
if (roleId === undefined || roleId === null) {
throw new Error("Missing the required parameter 'roleId' when calling deleteUserRoleAssignment");
}
let pathParams = {
'id': id,
};
let queryParams = {
};
let headerParams = {
};
let formParams = {
'role_id': roleId,
};
let formParamArray = [
'roleId',
];
let contentTypes = ['application/x-www-form-urlencoded', ];
const isURLEncoded = contentTypes.includes('application/x-www-form-urlencoded');
const isJSON = contentTypes.includes('application/json');
if (isJSON) {
postBody = { ...postBody, ...opts };
} else if (isURLEncoded) {
for (let key in opts) {
if (opts.hasOwnProperty(key) && !formParamArray.includes(key)) {
formParams[key] = opts[key];
}
}
}
let authNames = ['api_key', ];
let accepts = ['application/json', ];
let returnType = DeleteRoleAssignment;
return this.apiClient.callApi(
'/users/{id}/roleAssignments', 'DELETE',
pathParams, queryParams, headerParams, formParams, postBody,
authNames, contentTypes, accepts, returnType, null
);
}
/**
* Delete a role assignment
* Deletes a role assignment for a user.
* @param {Number} id The ID of the user
* @param {Number} roleId The ID of the role
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with data of type {@link module:model/DeleteRoleAssignment}
*/
deleteUserRoleAssignment(id, roleId) {
return this.deleteUserRoleAssignmentWithHttpInfo(id, roleId)
.then(function(response_and_data) {
return response_and_data;
});
}
/**
* Find users by name
* Finds users by their name.
* @param {String} term The search term to look for
* @param {Object} opts Optional parameters
* @param {module:model/NumberBooleanDefault0} opts.searchByEmail When enabled, the term will only be matched against email addresses of users. Default: `false`
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with an object containing data of type {@link module:model/Users} and HTTP response
*/
findUsersByNameWithHttpInfo(term, opts) {
opts = opts || {};
let postBody = null;
// verify the required parameter 'term' is set
if (term === undefined || term === null) {
throw new Error("Missing the required parameter 'term' when calling findUsersByName");
}
let pathParams = {
};
let queryParams = {
'term': term,
'search_by_email': opts['searchByEmail'],
};
let headerParams = {
};
let formParams = {
};
let formParamArray = [
];
let contentTypes = [];
const isURLEncoded = contentTypes.includes('application/x-www-form-urlencoded');
const isJSON = contentTypes.includes('application/json');
if (isJSON) {
postBody = { ...postBody, ...opts };
} else if (isURLEncoded) {
for (let key in opts) {
if (opts.hasOwnProperty(key) && !formParamArray.includes(key)) {
formParams[key] = opts[key];
}
}
}
let authNames = ['api_key', 'oauth2', ];
let accepts = ['application/json', ];
let returnType = Users;
return this.apiClient.callApi(
'/users/find', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
authNames, contentTypes, accepts, returnType, null
);
}
/**
* Find users by name
* Finds users by their name.
* @param {String} term The search term to look for
* @param {Object} opts Optional parameters
* @param {module:model/NumberBooleanDefault0} opts.searchByEmail When enabled, the term will only be matched against email addresses of users. Default: `false`
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with data of type {@link module:model/Users}
*/
findUsersByName(term, opts) {
return this.findUsersByNameWithHttpInfo(term, opts)
.then(function(response_and_data) {
return response_and_data;
});
}
/**
* Get current user data
* Returns data about an authorized user within the company with bound company data: company ID, company name, and domain. Note that the `locale` property means 'Date/number format' in the Pipedrive account settings, not the chosen language.
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with an object containing data of type {@link module:model/UserMe} and HTTP response
*/
getCurrentUserWithHttpInfo() {
const opts = {}
let postBody = null;
let pathParams = {
};
let queryParams = {
};
let headerParams = {
};
let formParams = {
};
let formParamArray = [
];
let contentTypes = [];
const isURLEncoded = contentTypes.includes('application/x-www-form-urlencoded');
const isJSON = contentTypes.includes('application/json');
if (isJSON) {
postBody = { ...postBody, ...opts };
} else if (isURLEncoded) {
for (let key in opts) {
if (opts.hasOwnProperty(key) && !formParamArray.includes(key)) {
formParams[key] = opts[key];
}
}
}
let authNames = ['api_key', 'oauth2', ];
let accepts = ['application/json', ];
let returnType = UserMe;
return this.apiClient.callApi(
'/users/me', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
authNames, contentTypes, accepts, returnType, null
);
}
/**
* Get current user data
* Returns data about an authorized user within the company with bound company data: company ID, company name, and domain. Note that the `locale` property means 'Date/number format' in the Pipedrive account settings, not the chosen language.
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with data of type {@link module:model/UserMe}
*/
getCurrentUser() {
return this.getCurrentUserWithHttpInfo()
.then(function(response_and_data) {
return response_and_data;
});
}
/**
* Get one user
* Returns data about a specific user within the company.
* @param {Number} id The ID of the user
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with an object containing data of type {@link module:model/User} and HTTP response
*/
getUserWithHttpInfo(id) {
const opts = {}
let postBody = null;
// verify the required parameter 'id' is set
if (id === undefined || id === null) {
throw new Error("Missing the required parameter 'id' when calling getUser");
}
let pathParams = {
'id': id,
};
let queryParams = {
};
let headerParams = {
};
let formParams = {
};
let formParamArray = [
];
let contentTypes = [];
const isURLEncoded = contentTypes.includes('application/x-www-form-urlencoded');
const isJSON = contentTypes.includes('application/json');
if (isJSON) {
postBody = { ...postBody, ...opts };
} else if (isURLEncoded) {
for (let key in opts) {
if (opts.hasOwnProperty(key) && !formParamArray.includes(key)) {
formParams[key] = opts[key];
}
}
}
let authNames = ['api_key', 'oauth2', ];
let accepts = ['application/json', ];
let returnType = User;
return this.apiClient.callApi(
'/users/{id}', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
authNames, contentTypes, accepts, returnType, null
);
}
/**
* Get one user
* Returns data about a specific user within the company.
* @param {Number} id The ID of the user
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with data of type {@link module:model/User}
*/
getUser(id) {
return this.getUserWithHttpInfo(id)
.then(function(response_and_data) {
return response_and_data;
});
}
/**
* List followers of a user
* Lists the followers of a specific user.
* @param {Number} id The ID of the user
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with an object containing data of type {@link module:model/UserIDs} and HTTP response
*/
getUserFollowersWithHttpInfo(id) {
const opts = {}
let postBody = null;
// verify the required parameter 'id' is set
if (id === undefined || id === null) {
throw new Error("Missing the required parameter 'id' when calling getUserFollowers");
}
let pathParams = {
'id': id,
};
let queryParams = {
};
let headerParams = {
};
let formParams = {
};
let formParamArray = [
];
let contentTypes = [];
const isURLEncoded = contentTypes.includes('application/x-www-form-urlencoded');
const isJSON = contentTypes.includes('application/json');
if (isJSON) {
postBody = { ...postBody, ...opts };
} else if (isURLEncoded) {
for (let key in opts) {
if (opts.hasOwnProperty(key) && !formParamArray.includes(key)) {
formParams[key] = opts[key];
}
}
}
let authNames = ['api_key', 'oauth2', ];
let accepts = ['application/json', ];
let returnType = UserIDs;
return this.apiClient.callApi(
'/users/{id}/followers', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
authNames, contentTypes, accepts, returnType, null
);
}
/**
* List followers of a user
* Lists the followers of a specific user.
* @param {Number} id The ID of the user
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with data of type {@link module:model/UserIDs}
*/
getUserFollowers(id) {
return this.getUserFollowersWithHttpInfo(id)
.then(function(response_and_data) {
return response_and_data;
});
}
/**
* List user permissions
* Lists aggregated permissions over all assigned permission sets for a user.
* @param {Number} id The ID of the user
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with an object containing data of type {@link module:model/UserPermissions} and HTTP response
*/
getUserPermissionsWithHttpInfo(id) {
const opts = {}
let postBody = null;
// verify the required parameter 'id' is set
if (id === undefined || id === null) {
throw new Error("Missing the required parameter 'id' when calling getUserPermissions");
}
let pathParams = {
'id': id,
};
let queryParams = {
};
let headerParams = {
};
let formParams = {
};
let formParamArray = [
];
let contentTypes = [];
const isURLEncoded = contentTypes.includes('application/x-www-form-urlencoded');
const isJSON = contentTypes.includes('application/json');
if (isJSON) {
postBody = { ...postBody, ...opts };
} else if (isURLEncoded) {
for (let key in opts) {
if (opts.hasOwnProperty(key) && !formParamArray.includes(key)) {
formParams[key] = opts[key];
}
}
}
let authNames = ['api_key', 'oauth2', ];
let accepts = ['application/json', ];
let returnType = UserPermissions;
return this.apiClient.callApi(
'/users/{id}/permissions', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
authNames, contentTypes, accepts, returnType, null
);
}
/**
* List user permissions
* Lists aggregated permissions over all assigned permission sets for a user.
* @param {Number} id The ID of the user
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with data of type {@link module:model/UserPermissions}
*/
getUserPermissions(id) {
return this.getUserPermissionsWithHttpInfo(id)
.then(function(response_and_data) {
return response_and_data;
});
}
/**
* List role assignments
* Lists role assignments for a user.
* @param {Number} id The ID of the user
* @param {Object} opts Optional parameters
* @param {Number} opts.start Pagination start (default to 0)
* @param {Number} opts.limit Items shown per page
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with an object containing data of type {@link module:model/GetRoleAssignments} and HTTP response
*/
getUserRoleAssignmentsWithHttpInfo(id, opts) {
opts = opts || {};
let postBody = null;
// verify the required parameter 'id' is set
if (id === undefined || id === null) {
throw new Error("Missing the required parameter 'id' when calling getUserRoleAssignments");
}
let pathParams = {
'id': id,
};
let queryParams = {
'start': opts['start'],
'limit': opts['limit'],
};
let headerParams = {
};
let formParams = {
};
let formParamArray = [
];
let contentTypes = [];
const isURLEncoded = contentTypes.includes('application/x-www-form-urlencoded');
const isJSON = contentTypes.includes('application/json');
if (isJSON) {
postBody = { ...postBody, ...opts };
} else if (isURLEncoded) {
for (let key in opts) {
if (opts.hasOwnProperty(key) && !formParamArray.includes(key)) {
formParams[key] = opts[key];
}
}
}
let authNames = ['api_key', 'oauth2', ];
let accepts = ['application/json', ];
let returnType = GetRoleAssignments;
return this.apiClient.callApi(
'/users/{id}/roleAssignments', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
authNames, contentTypes, accepts, returnType, null
);
}
/**
* List role assignments
* Lists role assignments for a user.
* @param {Number} id The ID of the user
* @param {Object} opts Optional parameters
* @param {Number} opts.start Pagination start (default to 0)
* @param {Number} opts.limit Items shown per page
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with data of type {@link module:model/GetRoleAssignments}
*/
getUserRoleAssignments(id, opts) {
return this.getUserRoleAssignmentsWithHttpInfo(id, opts)
.then(function(response_and_data) {
return response_and_data;
});
}
/**
* List user role settings
* Lists the settings of user's assigned role.
* @param {Number} id The ID of the user
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with an object containing data of type {@link module:model/GetRoleSettings} and HTTP response
*/
getUserRoleSettingsWithHttpInfo(id) {
const opts = {}
let postBody = null;
// verify the required parameter 'id' is set
if (id === undefined || id === null) {
throw new Error("Missing the required parameter 'id' when calling getUserRoleSettings");
}
let pathParams = {
'id': id,
};
let queryParams = {
};
let headerParams = {
};
let formParams = {
};
let formParamArray = [
];
let contentTypes = [];
const isURLEncoded = contentTypes.includes('application/x-www-form-urlencoded');
const isJSON = contentTypes.includes('application/json');
if (isJSON) {
postBody = { ...postBody, ...opts };
} else if (isURLEncoded) {
for (let key in opts) {
if (opts.hasOwnProperty(key) && !formParamArray.includes(key)) {
formParams[key] = opts[key];
}
}
}
let authNames = ['api_key', 'oauth2', ];
let accepts = ['application/json', ];
let returnType = GetRoleSettings;
return this.apiClient.callApi(
'/users/{id}/roleSettings', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
authNames, contentTypes, accepts, returnType, null
);
}
/**
* List user role settings
* Lists the settings of user's assigned role.
* @param {Number} id The ID of the user
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with data of type {@link module:model/GetRoleSettings}
*/
getUserRoleSettings(id) {
return this.getUserRoleSettingsWithHttpInfo(id)
.then(function(response_and_data) {
return response_and_data;
});
}
/**
* Get all users
* Returns data about all users within the company.
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with an object containing data of type {@link module:model/Users} and HTTP response
*/
getUsersWithHttpInfo() {
const opts = {}
let postBody = null;
let pathParams = {
};
let queryParams = {
};
let headerParams = {
};
let formParams = {
};
let formParamArray = [
];
let contentTypes = [];
const isURLEncoded = contentTypes.includes('application/x-www-form-urlencoded');
const isJSON = contentTypes.includes('application/json');
if (isJSON) {
postBody = { ...postBody, ...opts };
} else if (isURLEncoded) {
for (let key in opts) {
if (opts.hasOwnProperty(key) && !formParamArray.includes(key)) {
formParams[key] = opts[key];
}
}
}
let authNames = ['api_key', 'oauth2', ];
let accepts = ['application/json', ];
let returnType = Users;
return this.apiClient.callApi(
'/users', 'GET',
pathParams, queryParams, headerParams, formParams, postBody,
authNames, contentTypes, accepts, returnType, null
);
}
/**
* Get all users
* Returns data about all users within the company.
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with data of type {@link module:model/Users}
*/
getUsers() {
return this.getUsersWithHttpInfo()
.then(function(response_and_data) {
return response_and_data;
});
}
/**
* Update user details
* Updates the properties of a user. Currently, only `active_flag` can be updated.
* @param {Number} id The ID of the user
* @param {Boolean} activeFlag Whether the user is active or not. `false` = Not activated, `true` = Activated
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with an object containing data of type {@link module:model/User} and HTTP response
*/
updateUserWithHttpInfo(id, activeFlag) {
const opts = {}
let postBody = null;
// verify the required parameter 'id' is set
if (id === undefined || id === null) {
throw new Error("Missing the required parameter 'id' when calling updateUser");
}
// verify the required parameter 'activeFlag' is set
if (activeFlag === undefined || activeFlag === null) {
throw new Error("Missing the required parameter 'activeFlag' when calling updateUser");
}
let pathParams = {
'id': id,
};
let queryParams = {
};
let headerParams = {
};
let formParams = {
'active_flag': activeFlag,
};
let formParamArray = [
'activeFlag',
];
let contentTypes = ['application/x-www-form-urlencoded', ];
const isURLEncoded = contentTypes.includes('application/x-www-form-urlencoded');
const isJSON = contentTypes.includes('application/json');
if (isJSON) {
postBody = { ...postBody, ...opts };
} else if (isURLEncoded) {
for (let key in opts) {
if (opts.hasOwnProperty(key) && !formParamArray.includes(key)) {
formParams[key] = opts[key];
}
}
}
let authNames = ['api_key', 'oauth2', ];
let accepts = ['application/json', ];
let returnType = User;
return this.apiClient.callApi(
'/users/{id}', 'PUT',
pathParams, queryParams, headerParams, formParams, postBody,
authNames, contentTypes, accepts, returnType, null
);
}
/**
* Update user details
* Updates the properties of a user. Currently, only `active_flag` can be updated.
* @param {Number} id The ID of the user
* @param {Boolean} activeFlag Whether the user is active or not. `false` = Not activated, `true` = Activated
* @return {Promise} a {@link https://www.promisejs.org/|Promise}, with data of type {@link module:model/User}
*/
updateUser(id, activeFlag) {
return this.updateUserWithHttpInfo(id, activeFlag)
.then(function(response_and_data) {
return response_and_data;
});
}
}
|
# -*- coding: utf-8 -*-
# This code is part of Ansible, but is an independent component
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
# Copyright: (c) 2017, Dag Wieers <[email protected]>
# Copyright: (c) 2017, Jacob McGill (@jmcgill298)
# Copyright: (c) 2017, Swetha Chunduri (@schunduri)
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import base64
import json
import os
from copy import deepcopy
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.module_utils.six.moves.urllib.parse import urlencode
from ansible.module_utils.urls import fetch_url
from ansible.module_utils._text import to_bytes, to_native
# Optional, only used for APIC signature-based authentication
try:
from OpenSSL.crypto import FILETYPE_PEM, load_privatekey, sign
HAS_OPENSSL = True
except ImportError:
HAS_OPENSSL = False
# Optional, only used for XML payload
try:
import lxml.etree
HAS_LXML_ETREE = True
except ImportError:
HAS_LXML_ETREE = False
# Optional, only used for XML payload
try:
from xmljson import cobra
HAS_XMLJSON_COBRA = True
except ImportError:
HAS_XMLJSON_COBRA = False
def aci_argument_spec():
return dict(
host=dict(type='str', required=True, aliases=['hostname']),
port=dict(type='int', required=False),
username=dict(type='str', default='admin', aliases=['user']),
password=dict(type='str', no_log=True),
private_key=dict(type='str', aliases=['cert_key'], no_log=True), # Beware, this is not the same as client_key !
certificate_name=dict(type='str', aliases=['cert_name']), # Beware, this is not the same as client_cert !
output_level=dict(type='str', default='normal', choices=['debug', 'info', 'normal']),
timeout=dict(type='int', default=30),
use_proxy=dict(type='bool', default=True),
use_ssl=dict(type='bool', default=True),
validate_certs=dict(type='bool', default=True),
)
class ACIModule(object):
def __init__(self, module):
self.module = module
self.params = module.params
self.result = dict(changed=False)
self.headers = dict()
self.child_classes = set()
# error output
self.error = dict(code=None, text=None)
# normal output
self.existing = None
# info output
self.config = dict()
self.original = None
self.proposed = dict()
# debug output
self.filter_string = ''
self.method = None
self.path = None
self.response = None
self.status = None
self.url = None
# aci_rest output
self.imdata = None
self.totalCount = None
# Ensure protocol is set
self.define_protocol()
if self.module._debug:
self.module.warn('Enable debug output because ANSIBLE_DEBUG was set.')
self.params['output_level'] = 'debug'
if self.params['private_key']:
# Perform signature-based authentication, no need to log on separately
if not HAS_OPENSSL:
self.module.fail_json(msg='Cannot use signature-based authentication because pyopenssl is not available')
elif self.params['password'] is not None:
self.module.warn("When doing ACI signatured-based authentication, providing parameter 'password' is not required")
elif self.params['password']:
# Perform password-based authentication, log on using password
self.login()
else:
self.module.fail_json(msg="Either parameter 'password' or 'private_key' is required for authentication")
def boolean(self, value, true='yes', false='no'):
''' Return an acceptable value back '''
# When we expect value is of type=bool
if value is None:
return None
elif value is True:
return true
elif value is False:
return false
# When we expect value is of type=raw, deprecate in Ansible v2.8 (and all modules use type=bool)
try:
# This supports all Ansible boolean types
bool_value = boolean(value)
if bool_value is True:
return true
elif bool_value is False:
return false
except Exception:
# This provides backward compatibility to Ansible v2.4, deprecate in Ansible v2.8
if value == true:
self.module.deprecate("Boolean value '%s' is no longer valid, please use 'yes' as a boolean value." % value, '2.9')
return true
elif value == false:
self.module.deprecate("Boolean value '%s' is no longer valid, please use 'no' as a boolean value." % value, '2.9')
return false
# If all else fails, escalate back to user
self.module.fail_json(msg="Boolean value '%s' is an invalid ACI boolean value.")
def iso8601_format(self, dt):
''' Return an ACI-compatible ISO8601 formatted time: 2123-12-12T00:00:00.000+00:00 '''
try:
return dt.isoformat(timespec='milliseconds')
except Exception:
tz = dt.strftime('%z')
return '%s.%03d%s:%s' % (dt.strftime('%Y-%m-%dT%H:%M:%S'), dt.microsecond / 1000, tz[:3], tz[3:])
def define_protocol(self):
''' Set protocol based on use_ssl parameter '''
# Set protocol for further use
self.params['protocol'] = 'https' if self.params.get('use_ssl', True) else 'http'
def define_method(self):
''' Set method based on state parameter '''
# Set method for further use
state_map = dict(absent='delete', present='post', query='get')
self.params['method'] = state_map[self.params['state']]
def login(self):
''' Log in to APIC '''
# Perform login request
if 'port' in self.params and self.params['port'] is not None:
url = '%(protocol)s://%(host)s:%(port)s/api/aaaLogin.json' % self.params
else:
url = '%(protocol)s://%(host)s/api/aaaLogin.json' % self.params
payload = {'aaaUser': {'attributes': {'name': self.params['username'], 'pwd': self.params['password']}}}
resp, auth = fetch_url(self.module, url,
data=json.dumps(payload),
method='POST',
timeout=self.params['timeout'],
use_proxy=self.params['use_proxy'])
# Handle APIC response
if auth['status'] != 200:
self.response = auth['msg']
self.status = auth['status']
try:
# APIC error
self.response_json(auth['body'])
self.fail_json(msg='Authentication failed: %(code)s %(text)s' % self.error)
except KeyError:
# Connection error
self.fail_json(msg='Connection failed for %(url)s. %(msg)s' % auth)
# Retain cookie for later use
self.headers['Cookie'] = resp.headers['Set-Cookie']
def cert_auth(self, path=None, payload='', method=None):
''' Perform APIC signature-based authentication, not the expected SSL client certificate authentication. '''
if method is None:
method = self.params['method'].upper()
# NOTE: ACI documentation incorrectly uses complete URL
if path is None:
path = self.path
path = '/' + path.lstrip('/')
if payload is None:
payload = ''
# Check if we got a private key. This allows the use of vaulting the private key.
if self.params['private_key'].startswith('-----BEGIN PRIVATE KEY-----'):
try:
sig_key = load_privatekey(FILETYPE_PEM, self.params['private_key'])
except Exception:
self.module.fail_json(msg="Cannot load provided 'private_key' parameter.")
# Use the username as the certificate_name value
if self.params['certificate_name'] is None:
self.params['certificate_name'] = self.params['username']
elif self.params['private_key'].startswith('-----BEGIN CERTIFICATE-----'):
self.module.fail_json(msg="Provided 'private_key' parameter value appears to be a certificate. Please correct.")
else:
# If we got a private key file, read from this file.
# NOTE: Avoid exposing any other credential as a filename in output...
if not os.path.exists(self.params['private_key']):
self.module.fail_json(msg="The provided private key file does not appear to exist. Is it a filename?")
try:
with open(self.params['private_key'], 'r') as fh:
private_key_content = fh.read()
except Exception:
self.module.fail_json(msg="Cannot open private key file '%s'." % self.params['private_key'])
if private_key_content.startswith('-----BEGIN PRIVATE KEY-----'):
try:
sig_key = load_privatekey(FILETYPE_PEM, private_key_content)
except Exception:
self.module.fail_json(msg="Cannot load private key file '%s'." % self.params['private_key'])
# Use the private key basename (without extension) as certificate_name
if self.params['certificate_name'] is None:
self.params['certificate_name'] = os.path.basename(os.path.splitext(self.params['private_key'])[0])
elif private_key_content.startswith('-----BEGIN CERTIFICATE-----'):
self.module.fail_json(msg="Provided private key file %s appears to be a certificate. Please correct." % self.params['private_key'])
else:
self.module.fail_json(msg="Provided private key file '%s' does not appear to be a private key. Please correct." % self.params['private_key'])
# NOTE: ACI documentation incorrectly adds a space between method and path
sig_request = method + path + payload
sig_signature = base64.b64encode(sign(sig_key, sig_request, 'sha256'))
sig_dn = 'uni/userext/user-%s/usercert-%s' % (self.params['username'], self.params['certificate_name'])
self.headers['Cookie'] = 'APIC-Certificate-Algorithm=v1.0; ' +\
'APIC-Certificate-DN=%s; ' % sig_dn +\
'APIC-Certificate-Fingerprint=fingerprint; ' +\
'APIC-Request-Signature=%s' % to_native(sig_signature)
def response_json(self, rawoutput):
''' Handle APIC JSON response output '''
try:
jsondata = json.loads(rawoutput)
except Exception as e:
# Expose RAW output for troubleshooting
self.error = dict(code=-1, text="Unable to parse output as JSON, see 'raw' output. %s" % e)
self.result['raw'] = rawoutput
return
# Extract JSON API output
try:
self.imdata = jsondata['imdata']
except KeyError:
self.imdata = dict()
self.totalCount = int(jsondata['totalCount'])
# Handle possible APIC error information
self.response_error()
def response_xml(self, rawoutput):
''' Handle APIC XML response output '''
# NOTE: The XML-to-JSON conversion is using the "Cobra" convention
try:
xml = lxml.etree.fromstring(to_bytes(rawoutput))
xmldata = cobra.data(xml)
except Exception as e:
# Expose RAW output for troubleshooting
self.error = dict(code=-1, text="Unable to parse output as XML, see 'raw' output. %s" % e)
self.result['raw'] = rawoutput
return
# Reformat as ACI does for JSON API output
try:
self.imdata = xmldata['imdata']['children']
except KeyError:
self.imdata = dict()
self.totalCount = int(xmldata['imdata']['attributes']['totalCount'])
# Handle possible APIC error information
self.response_error()
def response_error(self):
''' Set error information when found '''
# Handle possible APIC error information
if self.totalCount != '0':
try:
self.error = self.imdata[0]['error']['attributes']
except (KeyError, IndexError):
pass
def request(self, path, payload=None):
''' Perform a REST request '''
# Ensure method is set (only do this once)
self.define_method()
self.path = path
if 'port' in self.params and self.params['port'] is not None:
self.url = '%(protocol)s://%(host)s:%(port)s/' % self.params + path.lstrip('/')
else:
self.url = '%(protocol)s://%(host)s/' % self.params + path.lstrip('/')
# Sign and encode request as to APIC's wishes
if not self.params['private_key']:
self.cert_auth(path=path, payload=payload)
# Perform request
resp, info = fetch_url(self.module, self.url,
data=payload,
headers=self.headers,
method=self.params['method'].upper(),
timeout=self.params['timeout'],
use_proxy=self.params['use_proxy'])
self.response = info['msg']
self.status = info['status']
# Handle APIC response
if info['status'] != 200:
try:
# APIC error
self.response_json(info['body'])
self.fail_json(msg='APIC Error %(code)s: %(text)s' % self.error)
except KeyError:
# Connection error
self.fail_json(msg='Connection failed for %(url)s. %(msg)s' % info)
self.response_json(resp.read())
def query(self, path):
''' Perform a query with no payload '''
self.path = path
if 'port' in self.params and self.params['port'] is not None:
self.url = '%(protocol)s://%(host)s:%(port)s/' % self.params + path.lstrip('/')
else:
self.url = '%(protocol)s://%(host)s/' % self.params + path.lstrip('/')
# Sign and encode request as to APIC's wishes
if not self.params['private_key']:
self.cert_auth(path=path, method='GET')
# Perform request
resp, query = fetch_url(self.module, self.url,
data=None,
headers=self.headers,
method='GET',
timeout=self.params['timeout'],
use_proxy=self.params['use_proxy'])
# Handle APIC response
if query['status'] != 200:
self.response = query['msg']
self.status = query['status']
try:
# APIC error
self.response_json(query['body'])
self.fail_json(msg='APIC Error %(code)s: %(text)s' % self.error)
except KeyError:
# Connection error
self.fail_json(msg='Connection failed for %(url)s. %(msg)s' % query)
query = json.loads(resp.read())
return json.dumps(query['imdata'], sort_keys=True, indent=2) + '\n'
def request_diff(self, path, payload=None):
''' Perform a request, including a proper diff output '''
self.result['diff'] = dict()
self.result['diff']['before'] = self.query(path)
self.request(path, payload=payload)
# TODO: Check if we can use the request output for the 'after' diff
self.result['diff']['after'] = self.query(path)
if self.result['diff']['before'] != self.result['diff']['after']:
self.result['changed'] = True
# TODO: This could be designed to update existing keys
def update_qs(self, params):
''' Append key-value pairs to self.filter_string '''
accepted_params = dict((k, v) for (k, v) in params.items() if v is not None)
if accepted_params:
if self.filter_string:
self.filter_string += '&'
else:
self.filter_string = '?'
self.filter_string += urlencode(accepted_params)
# TODO: This could be designed to accept multiple obj_classes and keys
def build_filter(self, obj_class, params):
''' Build an APIC filter based on obj_class and key-value pairs '''
accepted_params = dict((k, v) for (k, v) in params.items() if v is not None)
if len(accepted_params) == 1:
return ','.join('eq({0}.{1}, "{2}")'.format(obj_class, k, v) for (k, v) in accepted_params.items())
elif len(accepted_params) > 1:
return 'and(' + ','.join(['eq({0}.{1}, "{2}")'.format(obj_class, k, v) for (k, v) in accepted_params.items()]) + ')'
def construct_url(self, root_class, subclass_1=None, subclass_2=None, subclass_3=None, child_classes=None):
"""
This method is used to retrieve the appropriate URL path and filter_string to make the request to the APIC.
:param root_class: The top-level class dictionary containing aci_class, aci_rn, target_filter, and module_object keys.
:param sublass_1: The second-level class dictionary containing aci_class, aci_rn, target_filter, and module_object keys.
:param sublass_2: The third-level class dictionary containing aci_class, aci_rn, target_filter, and module_object keys.
:param sublass_3: The fourth-level class dictionary containing aci_class, aci_rn, target_filter, and module_object keys.
:param child_classes: The list of child classes that the module supports along with the object.
:type root_class: dict
:type subclass_1: dict
:type subclass_2: dict
:type subclass_3: dict
:type child_classes: list
:return: The path and filter_string needed to build the full URL.
"""
self.filter_string = ''
if child_classes is None:
self.child_classes = set()
else:
self.child_classes = set(child_classes)
if subclass_3 is not None:
self._construct_url_4(root_class, subclass_1, subclass_2, subclass_3)
elif subclass_2 is not None:
self._construct_url_3(root_class, subclass_1, subclass_2)
elif subclass_1 is not None:
self._construct_url_2(root_class, subclass_1)
else:
self._construct_url_1(root_class)
if 'port' in self.params and self.params['port'] is not None:
self.url = '{protocol}://{host}:{port}/{path}'.format(path=self.path, **self.module.params)
else:
self.url = '{protocol}://{host}/{path}'.format(path=self.path, **self.module.params)
if self.child_classes:
# Append child_classes to filter_string if filter string is empty
self.update_qs({'rsp-subtree': 'full', 'rsp-subtree-class': ','.join(self.child_classes)})
def _construct_url_1(self, obj):
"""
This method is used by construct_url when the object is the top-level class.
"""
obj_class = obj['aci_class']
obj_rn = obj['aci_rn']
obj_filter = obj['target_filter']
mo = obj['module_object']
if self.module.params['state'] in ('absent', 'present'):
# State is absent or present
self.path = 'api/mo/uni/{0}.json'.format(obj_rn)
self.update_qs({'rsp-prop-include': 'config-only'})
elif mo is None:
# Query for all objects of the module's class (filter by properties)
self.path = 'api/class/{0}.json'.format(obj_class)
self.update_qs({'query-target-filter': self.build_filter(obj_class, obj_filter)})
else:
# Query for a specific object in the module's class
self.path = 'api/mo/uni/{0}.json'.format(obj_rn)
def _construct_url_2(self, parent, obj):
"""
This method is used by construct_url when the object is the second-level class.
"""
parent_class = parent['aci_class']
parent_rn = parent['aci_rn']
parent_filter = parent['target_filter']
parent_obj = parent['module_object']
obj_class = obj['aci_class']
obj_rn = obj['aci_rn']
obj_filter = obj['target_filter']
mo = obj['module_object']
if self.module.params['state'] in ('absent', 'present'):
# State is absent or present
self.path = 'api/mo/uni/{0}/{1}.json'.format(parent_rn, obj_rn)
self.update_qs({'rsp-prop-include': 'config-only'})
elif parent_obj is None and mo is None:
# Query for all objects of the module's class
self.path = 'api/class/{0}.json'.format(obj_class)
self.update_qs({'query-target-filter': self.build_filter(obj_class, obj_filter)})
elif parent_obj is None: # mo is known
# Query for all objects of the module's class that match the provided ID value
self.path = 'api/class/{0}.json'.format(obj_class)
self.update_qs({'query-target-filter': self.build_filter(obj_class, obj_filter)})
elif mo is None: # parent_obj is known
# Query for all object's of the module's class that belong to a specific parent object
self.child_classes.add(obj_class)
self.path = 'api/mo/uni/{0}.json'.format(parent_rn)
else:
# Query for specific object in the module's class
self.path = 'api/mo/uni/{0}/{1}.json'.format(parent_rn, obj_rn)
def _construct_url_3(self, root, parent, obj):
"""
This method is used by construct_url when the object is the third-level class.
"""
root_class = root['aci_class']
root_rn = root['aci_rn']
root_filter = root['target_filter']
root_obj = root['module_object']
parent_class = parent['aci_class']
parent_rn = parent['aci_rn']
parent_filter = parent['target_filter']
parent_obj = parent['module_object']
obj_class = obj['aci_class']
obj_rn = obj['aci_rn']
obj_filter = obj['target_filter']
mo = obj['module_object']
if self.module.params['state'] in ('absent', 'present'):
# State is absent or present
self.path = 'api/mo/uni/{0}/{1}/{2}.json'.format(root_rn, parent_rn, obj_rn)
self.update_qs({'rsp-prop-include': 'config-only'})
elif root_obj is None and parent_obj is None and mo is None:
# Query for all objects of the module's class
self.path = 'api/class/{0}.json'.format(obj_class)
self.update_qs({'query-target-filter': self.build_filter(obj_class, obj_filter)})
elif root_obj is None and parent_obj is None: # mo is known
# Query for all objects of the module's class matching the provided ID value of the object
self.path = 'api/class/{0}.json'.format(obj_class)
self.update_qs({'query-target-filter': self.build_filter(obj_class, obj_filter)})
elif root_obj is None and mo is None: # parent_obj is known
# Query for all objects of the module's class that belong to any parent class
# matching the provided ID value for the parent object
self.child_classes.add(obj_class)
self.path = 'api/class/{0}.json'.format(parent_class)
self.update_qs({'query-target-filter': self.build_filter(parent_class, parent_filter)})
elif parent_obj is None and mo is None: # root_obj is known
# Query for all objects of the module's class that belong to a specific root object
self.child_classes.update([parent_class, obj_class])
self.path = 'api/mo/uni/{0}.json'.format(root_rn)
# NOTE: No need to select by root_filter
# self.update_qs({'query-target-filter': self.build_filter(root_class, root_filter)})
elif root_obj is None: # mo and parent_obj are known
# Query for all objects of the module's class that belong to any parent class
# matching the provided ID values for both object and parent object
self.child_classes.add(obj_class)
self.path = 'api/class/{0}.json'.format(parent_class)
self.update_qs({'query-target-filter': self.build_filter(parent_class, parent_filter)})
self.update_qs({'rsp-subtree-filter': self.build_filter(obj_class, obj_filter)})
elif parent_obj is None: # mo and root_obj are known
# Query for all objects of the module's class that match the provided ID value and belong to a specific root object
self.child_classes.add(obj_class)
self.path = 'api/mo/uni/{0}.json'.format(root_rn)
# NOTE: No need to select by root_filter
# self.update_qs({'query-target-filter': self.build_filter(root_class, root_filter)})
# TODO: Filter by parent_filter and obj_filter
self.update_qs({'rsp-subtree-filter': self.build_filter(obj_class, obj_filter)})
elif mo is None: # root_obj and parent_obj are known
# Query for all objects of the module's class that belong to a specific parent object
self.child_classes.add(obj_class)
self.path = 'api/mo/uni/{0}/{1}.json'.format(root_rn, parent_rn)
# NOTE: No need to select by parent_filter
# self.update_qs({'query-target-filter': self.build_filter(parent_class, parent_filter)})
else:
# Query for a specific object of the module's class
self.path = 'api/mo/uni/{0}/{1}/{2}.json'.format(root_rn, parent_rn, obj_rn)
def _construct_url_4(self, root, sec, parent, obj):
"""
This method is used by construct_url when the object is the fourth-level class.
"""
root_class = root['aci_class']
root_rn = root['aci_rn']
root_filter = root['target_filter']
root_obj = root['module_object']
sec_class = sec['aci_class']
sec_rn = sec['aci_rn']
sec_filter = sec['target_filter']
sec_obj = sec['module_object']
parent_class = parent['aci_class']
parent_rn = parent['aci_rn']
parent_filter = parent['target_filter']
parent_obj = parent['module_object']
obj_class = obj['aci_class']
obj_rn = obj['aci_rn']
obj_filter = obj['target_filter']
mo = obj['module_object']
if self.child_classes is None:
self.child_classes = [obj_class]
if self.module.params['state'] in ('absent', 'present'):
# State is absent or present
self.path = 'api/mo/uni/{0}/{1}/{2}/{3}.json'.format(root_rn, sec_rn, parent_rn, obj_rn)
self.update_qs({'rsp-prop-include': 'config-only'})
# TODO: Add all missing cases
elif root_obj is None:
self.child_classes.add(obj_class)
self.path = 'api/class/{0}.json'.format(obj_class)
self.update_qs({'query-target-filter': self.build_filter(obj_class, obj_filter)})
elif sec_obj is None:
self.child_classes.add(obj_class)
self.path = 'api/mo/uni/{0}.json'.format(root_rn)
# NOTE: No need to select by root_filter
# self.update_qs({'query-target-filter': self.build_filter(root_class, root_filter)})
# TODO: Filter by sec_filter, parent and obj_filter
self.update_qs({'rsp-subtree-filter': self.build_filter(obj_class, obj_filter)})
elif parent_obj is None:
self.child_classes.add(obj_class)
self.path = 'api/mo/uni/{0}/{1}.json'.format(root_rn, sec_rn)
# NOTE: No need to select by sec_filter
# self.update_qs({'query-target-filter': self.build_filter(sec_class, sec_filter)})
# TODO: Filter by parent_filter and obj_filter
self.update_qs({'rsp-subtree-filter': self.build_filter(obj_class, obj_filter)})
elif mo is None:
self.child_classes.add(obj_class)
self.path = 'api/mo/uni/{0}/{1}/{2}.json'.format(root_rn, sec_rn, parent_rn)
# NOTE: No need to select by parent_filter
# self.update_qs({'query-target-filter': self.build_filter(parent_class, parent_filter)})
else:
# Query for a specific object of the module's class
self.path = 'api/mo/uni/{0}/{1}/{2}/{3}.json'.format(root_rn, sec_rn, parent_rn, obj_rn)
def delete_config(self):
"""
This method is used to handle the logic when the modules state is equal to absent. The method only pushes a change if
the object exists, and if check_mode is False. A successful change will mark the module as changed.
"""
self.proposed = dict()
if not self.existing:
return
elif not self.module.check_mode:
# Sign and encode request as to APIC's wishes
if not self.params['private_key']:
self.cert_auth(method='DELETE')
resp, info = fetch_url(self.module, self.url,
headers=self.headers,
method='DELETE',
timeout=self.params['timeout'],
use_proxy=self.params['use_proxy'])
self.response = info['msg']
self.status = info['status']
self.method = 'DELETE'
# Handle APIC response
if info['status'] == 200:
self.result['changed'] = True
self.response_json(resp.read())
else:
try:
# APIC error
self.response_json(info['body'])
self.fail_json(msg='APIC Error %(code)s: %(text)s' % self.error)
except KeyError:
# Connection error
self.fail_json(msg='Connection failed for %(url)s. %(msg)s' % info)
else:
self.result['changed'] = True
self.method = 'DELETE'
def get_diff(self, aci_class):
"""
This method is used to get the difference between the proposed and existing configurations. Each module
should call the get_existing method before this method, and add the proposed config to the module results
using the module's config parameters. The new config will added to the self.result dictionary.
:param aci_class: Type str.
This is the root dictionary key for the MO's configuration body, or the ACI class of the MO.
"""
proposed_config = self.proposed[aci_class]['attributes']
if self.existing:
existing_config = self.existing[0][aci_class]['attributes']
config = {}
# values are strings, so any diff between proposed and existing can be a straight replace
for key, value in proposed_config.items():
existing_field = existing_config.get(key)
if value != existing_field:
config[key] = value
# add name back to config only if the configs do not match
if config:
# TODO: If URLs are built with the object's name, then we should be able to leave off adding the name back
# config["name"] = proposed_config["name"]
config = {aci_class: {'attributes': config}}
# check for updates to child configs and update new config dictionary
children = self.get_diff_children(aci_class)
if children and config:
config[aci_class].update({'children': children})
elif children:
config = {aci_class: {'attributes': {}, 'children': children}}
else:
config = self.proposed
self.config = config
@staticmethod
def get_diff_child(child_class, proposed_child, existing_child):
"""
This method is used to get the difference between a proposed and existing child configs. The get_nested_config()
method should be used to return the proposed and existing config portions of child.
:param child_class: Type str.
The root class (dict key) for the child dictionary.
:param proposed_child: Type dict.
The config portion of the proposed child dictionary.
:param existing_child: Type dict.
The config portion of the existing child dictionary.
:return: The child config with only values that are updated. If the proposed dictionary has no updates to make
to what exists on the APIC, then None is returned.
"""
update_config = {child_class: {'attributes': {}}}
for key, value in proposed_child.items():
existing_field = existing_child.get(key)
if value != existing_field:
update_config[child_class]['attributes'][key] = value
if not update_config[child_class]['attributes']:
return None
return update_config
def get_diff_children(self, aci_class):
"""
This method is used to retrieve the updated child configs by comparing the proposed children configs
agains the objects existing children configs.
:param aci_class: Type str.
This is the root dictionary key for the MO's configuration body, or the ACI class of the MO.
:return: The list of updated child config dictionaries. None is returned if there are no changes to the child
configurations.
"""
proposed_children = self.proposed[aci_class].get('children')
if proposed_children:
child_updates = []
existing_children = self.existing[0][aci_class].get('children', [])
# Loop through proposed child configs and compare against existing child configuration
for child in proposed_children:
child_class, proposed_child, existing_child = self.get_nested_config(child, existing_children)
if existing_child is None:
child_update = child
else:
child_update = self.get_diff_child(child_class, proposed_child, existing_child)
# Update list of updated child configs only if the child config is different than what exists
if child_update:
child_updates.append(child_update)
else:
return None
return child_updates
def get_existing(self):
"""
This method is used to get the existing object(s) based on the path specified in the module. Each module should
build the URL so that if the object's name is supplied, then it will retrieve the configuration for that particular
object, but if no name is supplied, then it will retrieve all MOs for the class. Following this method will ensure
that this method can be used to supply the existing configuration when using the get_diff method. The response, status,
and existing configuration will be added to the self.result dictionary.
"""
uri = self.url + self.filter_string
# Sign and encode request as to APIC's wishes
if not self.params['private_key']:
self.cert_auth(path=self.path + self.filter_string, method='GET')
resp, info = fetch_url(self.module, uri,
headers=self.headers,
method='GET',
timeout=self.params['timeout'],
use_proxy=self.params['use_proxy'])
self.response = info['msg']
self.status = info['status']
self.method = 'GET'
# Handle APIC response
if info['status'] == 200:
self.existing = json.loads(resp.read())['imdata']
else:
try:
# APIC error
self.response_json(info['body'])
self.fail_json(msg='APIC Error %(code)s: %(text)s' % self.error)
except KeyError:
# Connection error
self.fail_json(msg='Connection failed for %(url)s. %(msg)s' % info)
@staticmethod
def get_nested_config(proposed_child, existing_children):
"""
This method is used for stiping off the outer layers of the child dictionaries so only the configuration
key, value pairs are returned.
:param proposed_child: Type dict.
The dictionary that represents the child config.
:param existing_children: Type list.
The list of existing child config dictionaries.
:return: The child's class as str (root config dict key), the child's proposed config dict, and the child's
existing configuration dict.
"""
for key in proposed_child.keys():
child_class = key
proposed_config = proposed_child[key]['attributes']
existing_config = None
# FIXME: Design causes issues for repeated child_classes
# get existing dictionary from the list of existing to use for comparison
for child in existing_children:
if child.get(child_class):
existing_config = child[key]['attributes']
# NOTE: This is an ugly fix
# Return the one that is a subset match
if set(proposed_config.items()).issubset(set(existing_config.items())):
break
return child_class, proposed_config, existing_config
def payload(self, aci_class, class_config, child_configs=None):
"""
This method is used to dynamically build the proposed configuration dictionary from the config related parameters
passed into the module. All values that were not passed values from the playbook task will be removed so as to not
inadvertently change configurations.
:param aci_class: Type str
This is the root dictionary key for the MO's configuration body, or the ACI class of the MO.
:param class_config: Type dict
This is the configuration of the MO using the dictionary keys expected by the API
:param child_configs: Type list
This is a list of child dictionaries associated with the MOs config. The list should only
include child objects that are used to associate two MOs together. Children that represent
MOs should have their own module.
"""
proposed = dict((k, str(v)) for k, v in class_config.items() if v is not None)
self.proposed = {aci_class: {'attributes': proposed}}
# add child objects to proposed
if child_configs:
children = []
for child in child_configs:
child_copy = deepcopy(child)
has_value = False
for root_key in child_copy.keys():
for final_keys, values in child_copy[root_key]['attributes'].items():
if values is None:
child[root_key]['attributes'].pop(final_keys)
else:
child[root_key]['attributes'][final_keys] = str(values)
has_value = True
if has_value:
children.append(child)
if children:
self.proposed[aci_class].update(dict(children=children))
def post_config(self):
"""
This method is used to handle the logic when the modules state is equal to present. The method only pushes a change if
the object has differences than what exists on the APIC, and if check_mode is False. A successful change will mark the
module as changed.
"""
if not self.config:
return
elif not self.module.check_mode:
# Sign and encode request as to APIC's wishes
if not self.params['private_key']:
self.cert_auth(method='POST', payload=json.dumps(self.config))
resp, info = fetch_url(self.module, self.url,
data=json.dumps(self.config),
headers=self.headers,
method='POST',
timeout=self.params['timeout'],
use_proxy=self.params['use_proxy'])
self.response = info['msg']
self.status = info['status']
self.method = 'POST'
# Handle APIC response
if info['status'] == 200:
self.result['changed'] = True
self.response_json(resp.read())
else:
try:
# APIC error
self.response_json(info['body'])
self.fail_json(msg='APIC Error %(code)s: %(text)s' % self.error)
except KeyError:
# Connection error
self.fail_json(msg='Connection failed for %(url)s. %(msg)s' % info)
else:
self.result['changed'] = True
self.method = 'POST'
def exit_json(self, **kwargs):
if 'state' in self.params:
if self.params['state'] in ('absent', 'present'):
if self.params['output_level'] in ('debug', 'info'):
self.result['previous'] = self.existing
# Return the gory details when we need it
if self.params['output_level'] == 'debug':
if 'state' in self.params:
self.result['filter_string'] = self.filter_string
self.result['method'] = self.method
# self.result['path'] = self.path # Adding 'path' in result causes state: absent in output
self.result['response'] = self.response
self.result['status'] = self.status
self.result['url'] = self.url
if 'state' in self.params:
self.original = self.existing
if self.params['state'] in ('absent', 'present'):
self.get_existing()
# if self.module._diff and self.original != self.existing:
# self.result['diff'] = dict(
# before=json.dumps(self.original, sort_keys=True, indent=4),
# after=json.dumps(self.existing, sort_keys=True, indent=4),
# )
self.result['current'] = self.existing
if self.params['output_level'] in ('debug', 'info'):
self.result['sent'] = self.config
self.result['proposed'] = self.proposed
self.result.update(**kwargs)
self.module.exit_json(**self.result)
def fail_json(self, msg, **kwargs):
# Return error information, if we have it
if self.error['code'] is not None and self.error['text'] is not None:
self.result['error'] = self.error
if 'state' in self.params:
if self.params['state'] in ('absent', 'present'):
if self.params['output_level'] in ('debug', 'info'):
self.result['previous'] = self.existing
# Return the gory details when we need it
if self.params['output_level'] == 'debug':
if self.imdata is not None:
self.result['imdata'] = self.imdata
self.result['totalCount'] = self.totalCount
if self.params['output_level'] == 'debug':
if self.url is not None:
if 'state' in self.params:
self.result['filter_string'] = self.filter_string
self.result['method'] = self.method
# self.result['path'] = self.path # Adding 'path' in result causes state: absent in output
self.result['response'] = self.response
self.result['status'] = self.status
self.result['url'] = self.url
if 'state' in self.params:
if self.params['output_level'] in ('debug', 'info'):
self.result['sent'] = self.config
self.result['proposed'] = self.proposed
self.result.update(**kwargs)
self.module.fail_json(msg=msg, **self.result)
|
#Ex.11
hora = str(input('Insira o valor hora (HH:MM): ')).split(':')
min = int(hora[1])
min += int(hora[0]) * 60
print(f'Passaram-se {min} minutos desde as 00:00h.')
|
#ifndef VOXBLOX_BLOCK_HASH_H_
#define VOXBLOX_BLOCK_HASH_H_
#include <functional>
#include <unordered_map>
#include <Eigen/Core>
#include "voxblox/core/common.h"
namespace voxblox {
struct BlockIndexHash {
static constexpr size_t prime1 = 73856093;
static constexpr size_t prime2 = 19349663;
static constexpr size_t prime3 = 83492791;
std::size_t operator()(const BlockIndex& index) const {
return (static_cast<unsigned int>(index.x()) * prime1 ^ index.y() * prime2 ^
index.z() * prime3);
}
};
template <typename ValueType>
struct BlockHashMapType {
typedef std::unordered_map<
BlockIndex, ValueType, BlockIndexHash, std::equal_to<BlockIndex>,
Eigen::aligned_allocator<std::pair<const BlockIndex, ValueType> > >
type;
};
typedef std::unordered_set<AnyIndex, BlockIndexHash, std::equal_to<AnyIndex>,
Eigen::aligned_allocator<AnyIndex> >
IndexSet;
typedef typename BlockHashMapType<IndexVector>::type HierarchicalIndexMap;
typedef typename HierarchicalIndexMap::value_type HierarchicalIndex;
} // namespace voxblox
#endif // VOXBLOX_BLOCK_HASH_H_
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Test alter op layout pass"""
import pytest
import tvm
from tvm import relay, topi
from tvm.relay import transform, analysis
from tvm.relay.testing.temp_op_attr import TempOpAttr
from tvm.relay.testing import run_infer_type
import numpy as np
import tvm.testing
from tvm.relay import testing
def run_opt_pass(expr, passes):
passes = passes if isinstance(passes, list) else [passes]
mod = tvm.IRModule.from_expr(expr)
seq = tvm.transform.Sequential(passes)
with tvm.transform.PassContext(opt_level=3):
mod = seq(mod)
entry = mod["main"]
return entry if isinstance(expr, relay.Function) else entry.body
def test_alter_op():
"""Test directly replacing an operator with a new one"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
weight = relay.var("weight", shape=(64, 64, 3, 3))
y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1))
y = relay.nn.relu(y)
y = relay.Function([x, weight], y)
return y
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
weight = relay.multiply(weight, relay.const(2.0, "float32"))
return relay.nn.conv2d(data, weight, **attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
weight = relay.var("weight", shape=(64, 64, 3, 3))
y = relay.nn.conv2d(
x,
relay.multiply(weight, relay.const(2.0, "float32")),
channels=64,
kernel_size=(3, 3),
padding=(1, 1),
)
y = relay.nn.relu(y)
y = relay.Function([x, weight], y)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
def test_alter_return_none():
"""Test doing nothing by returning 'None'"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
y = relay.nn.global_max_pool2d(x)
y = relay.Function([x], y)
return y
called = [False]
def alter_conv2d(attrs, inputs, tinfos, out_type):
called[0] = True
return None
with TempOpAttr("nn.global_max_pool2d", "FTVMAlterOpLayout", alter_conv2d):
a = before()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(before(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
assert called[0]
def test_alter_layout():
"""Test alternating the layout of a conv2d.
The layout of broadcast operators and the weight should be changed accordingly.
"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
bias = relay.var("bias")
weight = relay.var("weight")
y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1))
y = relay.nn.bias_add(y, bias)
# a useless tuple, which will be eliminated
y = relay.Tuple([y])[0]
y = relay.nn.relu(y)
y = relay.nn.max_pool2d(y, pool_size=(2, 2))
y = relay.cast(y, "int32")
y = relay.nn.batch_flatten(y)
y = relay.Function(analysis.free_vars(y), y)
return y
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NCHW16c"
new_attrs["kernel_layout"] = "OIHW16i"
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
bias = relay.var("bias", shape=(64,))
weight = relay.var("weight", shape=(64, 64, 3, 3))
y = relay.layout_transform(x, "NCHW", "NCHW16c")
w = relay.layout_transform(weight, "OIHW", "OIHW16i")
y = relay.nn.conv2d(
y,
w,
channels=64,
kernel_size=(3, 3),
padding=(1, 1),
kernel_layout="OIHW16i",
data_layout="NCHW16c",
)
b = relay.expand_dims(bias, axis=1, num_newaxis=2)
b = relay.expand_dims(b, axis=0, num_newaxis=1)
b = relay.layout_transform(b, "NCHW", "NCHW16c")
y = relay.add(y, b)
y = relay.nn.relu(y)
y = relay.nn.max_pool2d(y, pool_size=(2, 2), layout="NCHW16c")
y = relay.cast(y, "int32")
y = relay.layout_transform(y, "NCHW16c", "NCHW")
y = relay.nn.batch_flatten(y)
y = relay.Function(analysis.free_vars(y), y)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before()
a = run_opt_pass(a, [transform.CanonicalizeOps(), transform.AlterOpLayout()])
b = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_lrn():
"""Test alternating the layout of a conv2d.
The layout of broadcast operators and the weight should be changed accordingly.
"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
bias = relay.var("bias")
weight = relay.var("weight")
y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1))
y = relay.nn.max_pool2d(y, pool_size=(2, 2))
y = relay.nn.lrn(y)
y = relay.Function(analysis.free_vars(y), y)
return y
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NCHW16c"
new_attrs["kernel_layout"] = "OIHW16i"
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
bias = relay.var("bias", shape=(64,))
weight = relay.var("weight", shape=(64, 64, 3, 3))
y = relay.layout_transform(x, "NCHW", "NCHW16c")
w = relay.layout_transform(weight, "OIHW", "OIHW16i")
y = relay.nn.conv2d(
y,
w,
channels=64,
kernel_size=(3, 3),
padding=(1, 1),
kernel_layout="OIHW16i",
data_layout="NCHW16c",
)
y = relay.nn.max_pool2d(y, pool_size=(2, 2), layout="NCHW16c")
y = relay.layout_transform(y, "NCHW16c", "NCHW")
y = relay.nn.lrn(y)
y = relay.Function(analysis.free_vars(y), y)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before()
a = run_opt_pass(a, [transform.CanonicalizeOps(), transform.AlterOpLayout()])
b = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_dual_path():
"""
Test alternating the layout with two outputs.
One path continues to use the new layout while one path fall backs to old layout.
"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var("weight1")
weight2 = relay.var("weight2")
y = relay.nn.conv2d(x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1))
y = relay.nn.relu(y)
y1 = relay.nn.conv2d(y, weight2, channels=32, kernel_size=(3, 3), padding=(1, 1))
y1 = relay.nn.relu(y1)
y2 = relay.nn.batch_flatten(y)
ret = relay.Tuple([y1, y2])
y = relay.Function(analysis.free_vars(ret), ret)
return y
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NCHW16c"
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var("weight1")
weight2 = relay.var("weight2")
y = relay.layout_transform(x, "NCHW", "NCHW16c")
y = relay.nn.conv2d(
y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
y = relay.nn.relu(y)
y1 = relay.nn.conv2d(
y, weight2, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
y1 = relay.nn.relu(y1)
y1 = relay.layout_transform(y1, "NCHW16c", "NCHW")
y2 = relay.layout_transform(y, "NCHW16c", "NCHW")
y2 = relay.nn.batch_flatten(y2)
ret = relay.Tuple([y1, y2])
y = relay.Function(analysis.free_vars(ret), ret)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_resnet():
"""Test alternating the layout of a residual block
This also tests the elimination of duplicated transformation.
If a same transformation applies to a same node twice, only one transformation will be created.
"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var("weight1")
weight2 = relay.var("weight2")
y = relay.nn.conv2d(x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1))
y = relay.nn.relu(y)
y2 = relay.nn.conv2d(x, weight2, channels=32, kernel_size=(1, 1))
y2 = relay.nn.relu(y2)
y = y + y2
y = relay.nn.global_max_pool2d(y)
return relay.Function(analysis.free_vars(y), y)
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NCHW16c"
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var("weight1")
weight2 = relay.var("weight2")
x = relay.layout_transform(x, "NCHW", "NCHW16c")
y = relay.nn.conv2d(
x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
y = relay.nn.relu(y)
y2 = relay.nn.conv2d(x, weight2, channels=32, kernel_size=(1, 1), data_layout="NCHW16c")
y2 = relay.nn.relu(y2)
y = y + y2
y = relay.nn.global_max_pool2d(y, layout="NCHW16c")
y = relay.layout_transform(y, "NCHW16c", "NCHW")
return relay.Function(analysis.free_vars(y), y)
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_broadcast_op():
"""Test boradcast operators"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
bias = relay.var("bias", shape=(64,))
scale = relay.var("scale", shape=(64, 1, 1))
weight = relay.var("weight")
y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1))
y = relay.nn.bias_add(y, bias) # test broadcasting to lhs
y = relay.multiply(scale, y) # test broadcasting to rhs
y = relay.Function(analysis.free_vars(y), y)
return y
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NCHW16c"
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
bias = relay.var("bias", shape=(64,))
scale = relay.var("scale", shape=(64, 1, 1))
weight = relay.var("weight")
x = relay.layout_transform(x, "NCHW", "NCHW16c")
bias = relay.expand_dims(bias, 1, 2)
bias = relay.expand_dims(bias, 0, 1)
bias = relay.layout_transform(bias, "NCHW", "NCHW16c")
scale = relay.expand_dims(scale, 0, 1)
scale = relay.layout_transform(scale, "NCHW", "NCHW16c")
y = relay.nn.conv2d(
x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
y = relay.add(y, bias) # test broadcasting to lhs
y = relay.multiply(scale, y) # test broadcasting to rhs
y = relay.layout_transform(y, "NCHW16c", "NCHW")
y = relay.Function(analysis.free_vars(y), y)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before()
a = run_opt_pass(a, [transform.CanonicalizeOps(), transform.AlterOpLayout()])
b = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_broadcast_scalar_op():
"""Test alternating the layout of a conv2d.
The layout of broadcast operators and the weight should be changed accordingly.
"""
def before():
x = relay.var("x", shape=(1, 500, 500, 64))
kernel = relay.var("kernel", shape=(3, 3, 64, 64), dtype="float32")
bias = relay.var("bias", shape=(64,))
multiplier1 = relay.var("multiplier1", shape=(1,), dtype="float32")
multiplier2 = relay.var("multiplier2", shape=(1, 1), dtype="float32")
y = relay.nn.conv2d(x, kernel, data_layout="NHWC", kernel_layout="HWIO", kernel_size=(3, 3))
y = relay.add(bias, y)
y = relay.nn.relu(y)
y = relay.multiply(multiplier1, y)
y = relay.multiply(y, multiplier2)
y = relay.Function(analysis.free_vars(y), y)
return y
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NCHW16c"
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 500, 500, 64))
kernel = relay.var("kernel", shape=(3, 3, 64, 64), dtype="float32")
bias = relay.var("bias", shape=(64,))
multiplier1 = relay.var("multiplier1", shape=(1,), dtype="float32")
multiplier2 = relay.var("multiplier2", shape=(1, 1), dtype="float32")
b = relay.expand_dims(bias, axis=0, num_newaxis=3)
b = relay.layout_transform(b, "NHWC", "NCHW16c")
y = relay.layout_transform(x, "NHWC", "NCHW16c")
y = relay.nn.conv2d(
y, kernel, data_layout="NCHW16c", kernel_layout="HWIO", kernel_size=(3, 3)
)
y = relay.add(b, y)
y = relay.nn.relu(y)
y = relay.multiply(multiplier1, y)
y = relay.multiply(y, multiplier2)
y = relay.layout_transform(y, "NCHW16c", "NHWC")
y = relay.Function(analysis.free_vars(y), y)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before()
a = run_opt_pass(a, [transform.CanonicalizeOps(), transform.AlterOpLayout()])
b = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_scalar():
"""Test alternating the layout of a conv2d.
The layout of broadcast operators and the weight should be changed accordingly.
"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
weight = relay.var("weight")
y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1))
y = relay.add(y, relay.const(1, "float32"))
y = relay.Function(analysis.free_vars(y), y)
return y
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NCHW16c"
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
w = relay.var("weight")
y = relay.layout_transform(x, "NCHW", "NCHW16c")
y = relay.nn.conv2d(
y, w, channels=64, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
y = relay.add(y, relay.const(1.0, "float32"))
y = relay.layout_transform(y, "NCHW16c", "NCHW")
y = relay.Function(analysis.free_vars(y), y)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before()
a = run_opt_pass(a, [transform.CanonicalizeOps(), transform.AlterOpLayout()])
b = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_scalar_regression():
"""regression test where scalar fails"""
def before():
x = relay.var("x", shape=(1, 56, 56, 64))
weight = relay.var("weight", shape=(3, 3, 64, 16))
bias = relay.var("bias", shape=(1, 1, 1, 16))
y = relay.nn.conv2d(
x,
weight,
channels=16,
kernel_size=(3, 3),
padding=(1, 1),
data_layout="NHWC",
kernel_layout="HWIO",
)
y = relay.add(y, bias)
mean = relay.mean(y, axis=3, exclude=True)
var = relay.variance(y, axis=3, exclude=True)
gamma = relay.var("gamma")
beta = relay.var("beta")
y = relay.nn.batch_norm(y, gamma, beta, mean, var, axis=3)
y = y[0]
return relay.Function(analysis.free_vars(y), y)
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NCHW16c"
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 56, 56, 64))
weight = relay.var("weight", shape=(3, 3, 64, 16))
bias = relay.var("bias", shape=(1, 1, 1, 16))
x = relay.layout_transform(x, src_layout="NHWC", dst_layout="NCHW")
x = relay.layout_transform(x, src_layout="NCHW", dst_layout="NCHW16c")
weight = relay.layout_transform(weight, src_layout="HWIO", dst_layout="OIHW")
y = relay.nn.conv2d(
x, weight, channels=16, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
bias = relay.layout_transform(bias, src_layout="NHWC", dst_layout="NCHW")
bias = relay.layout_transform(bias, src_layout="NCHW", dst_layout="NCHW16c")
add = relay.add(y, bias)
mean = relay.mean(add, axis=[1, 4], exclude=True)
var = relay.variance(add, axis=[1, 4], exclude=True)
denom = relay.const(1.0) / relay.sqrt(var + relay.const(1e-05))
gamma = relay.var("gamma", shape=(16,))
denom_c16c = denom * relay.layout_transform(gamma, src_layout="C", dst_layout="C16c")
denom = relay.layout_transform(denom_c16c, src_layout="C16c", dst_layout="C")
denom_expand1 = relay.expand_dims(denom, axis=1, num_newaxis=2)
denom_expand2 = relay.expand_dims(denom_expand1, axis=0)
denom_nchwc16 = relay.layout_transform(
denom_expand2, src_layout="NCHW", dst_layout="NCHW16c"
)
out = add * denom_nchwc16
beta = relay.var("beta", shape=(16,))
numerator_c16c = (-mean) * denom_c16c + relay.layout_transform(
beta, src_layout="C", dst_layout="C16c"
)
numerator = relay.layout_transform(numerator_c16c, src_layout="C16c", dst_layout="C")
numerator_expand1 = relay.expand_dims(numerator, axis=1, num_newaxis=2)
numerator_expand2 = relay.expand_dims(numerator_expand1, axis=0)
numerator_nchwc16 = relay.layout_transform(
numerator_expand2, src_layout="NCHW", dst_layout="NCHW16c"
)
out = out + numerator_nchwc16
out = relay.layout_transform(out, src_layout="NCHW16c", dst_layout="NCHW")
y = relay.layout_transform(out, src_layout="NCHW", dst_layout="NHWC")
y = relay.Function(analysis.free_vars(y), y)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before()
desired_layouts = {"nn.conv2d": ["NCHW", "default"], "nn.batch_norm": ["NHWC", "default"]}
a = run_opt_pass(
a,
[
transform.InferType(),
relay.transform.ConvertLayout(desired_layouts),
transform.SimplifyInference(),
transform.CanonicalizeOps(),
transform.AlterOpLayout(),
],
)
b = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_concatenate():
"""NCHW, NHWC and corner case concatenate layout transform."""
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NCHW16c"
return relay.nn.conv2d(data, weight, **new_attrs)
# NCHW layout transformation.
def before_nchw():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var("weight1")
weight2 = relay.var("weight2")
y = relay.nn.conv2d(x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1))
y1 = relay.nn.conv2d(y, weight2, channels=32, kernel_size=(3, 3), padding=(1, 1))
ret = relay.concatenate([y, y1], axis=1)
y = relay.Function(analysis.free_vars(ret), ret)
return y
def expected_nchw():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var("weight1")
weight2 = relay.var("weight2")
y = relay.layout_transform(x, "NCHW", "NCHW16c")
y = relay.nn.conv2d(
y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
y1 = relay.nn.conv2d(
y, weight2, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
ret = relay.concatenate([y, y1], axis=1)
ret = relay.layout_transform(ret, "NCHW16c", "NCHW")
y = relay.Function(analysis.free_vars(ret), ret)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before_nchw()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected_nchw(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
# NHWC layout transformation.
def before_nhwc():
x = relay.var("x", shape=(1, 56, 56, 64))
weight1 = relay.var("weight1")
weight2 = relay.var("weight2")
y = relay.nn.conv2d(
x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NHWC"
)
y1 = relay.nn.conv2d(
y, weight2, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NHWC"
)
ret = relay.concatenate([y, y1], axis=3)
y = relay.Function(analysis.free_vars(ret), ret)
return y
def expected_nhwc():
x = relay.var("x", shape=(1, 56, 56, 64))
weight1 = relay.var("weight1")
weight2 = relay.var("weight2")
y = relay.layout_transform(x, "NHWC", "NCHW16c")
y = relay.nn.conv2d(
y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
y1 = relay.nn.conv2d(
y, weight2, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
ret = relay.concatenate([y, y1], axis=1)
ret = relay.layout_transform(ret, "NCHW16c", "NHWC")
y = relay.Function(analysis.free_vars(ret), ret)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before_nhwc()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected_nhwc(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_nchw_upsamping_op():
"""Test upsamping operators"""
def before():
x = relay.var("x", shape=(1, 32, 28, 28))
weight = relay.var("weight", shape=(32, 32, 3, 3))
y = relay.nn.conv2d(x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1))
y = relay.nn.upsampling(y, scale_h=2, scale_w=2)
y = relay.nn.avg_pool2d(y, pool_size=(2, 2), strides=(2, 2))
y = relay.Function(analysis.free_vars(y), y)
return y
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NCHW16c"
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 32, 28, 28))
weight = relay.var("weight")
x = relay.layout_transform(x, "NCHW", "NCHW16c")
y = relay.nn.conv2d(
x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
y = relay.nn.upsampling(y, scale_h=2, scale_w=2, layout="NCHW16c")
y = relay.nn.avg_pool2d(y, pool_size=(2, 2), strides=(2, 2), layout="NCHW16c")
y = relay.layout_transform(y, "NCHW16c", "NCHW")
y = relay.Function(analysis.free_vars(y), y)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_nchw_dyn_upsamping_op():
"""Test upsamping operators"""
def before():
x = relay.var("x", shape=(1, 32, 28, 28))
weight = relay.var("weight", shape=(32, 32, 3, 3))
y = relay.nn.conv2d(x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1))
y = relay.nn.upsampling(y, scale_h=relay.const(2), scale_w=relay.const(2))
y = relay.nn.avg_pool2d(y, pool_size=(2, 2), strides=(2, 2))
y = relay.Function(analysis.free_vars(y), y)
return y
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NCHW16c"
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 32, 28, 28))
weight = relay.var("weight")
x = relay.layout_transform(x, "NCHW", "NCHW16c")
y = relay.nn.conv2d(
x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
y = relay.nn.upsampling(y, scale_h=relay.const(2), scale_w=relay.const(2), layout="NCHW16c")
y = relay.nn.avg_pool2d(y, pool_size=(2, 2), strides=(2, 2), layout="NCHW16c")
y = relay.layout_transform(y, "NCHW16c", "NCHW")
y = relay.Function(analysis.free_vars(y), y)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
@tvm.testing.parametrize_targets("llvm")
def test_alter_layout_strided_slice(target, dev):
"""Test rewriting strided_slice during alter_iop_layout"""
def before():
x = relay.var("x", shape=(1, 32, 28, 28))
weight = relay.var("weight", shape=(32, 32, 3, 3))
y = relay.nn.conv2d(x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1))
y = relay.strided_slice(y, begin=[0, 16], end=[1, 33], strides=[1, 1])
y = relay.Function(analysis.free_vars(y), y)
return y
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NCHW4c"
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 32, 28, 28))
weight = relay.var("weight", shape=(32, 32, 3, 3))
weight = relay.layout_transform(weight, "OIHW", "OIHW4i4o")
x = relay.layout_transform(x, "NCHW", "NCHW4c")
y = relay.op.nn.contrib_conv2d_nchwc(
x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW4c"
)
y = relay.strided_slice(y, begin=[0, 4], end=[1, 21], strides=[1, 1])
y = relay.layout_transform(y, "NCHW4c", "NCHW")
y = relay.Function(analysis.free_vars(y), y)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before()
b = run_opt_pass(expected(), transform.InferType())
# Verify inference result
mod_before = tvm.IRModule()
mod_new = tvm.IRModule()
mod_before["main"] = a
mod_new["main"] = b
mod_before = transform.InferType()(mod_before)
mod_new = transform.InferType()(mod_new)
with relay.build_config(opt_level=3):
for kind in ["graph", "debug", "vm"]:
np_data = np.random.uniform(size=(1, 32, 28, 28)).astype("float32")
np_weight = np.random.uniform(size=(32, 32, 3, 3)).astype("float32")
f_before = relay.create_executor(
kind, mod=mod_before, device=dev, target=target
).evaluate()
result_before = f_before(np_data, np_weight)
f_new = relay.create_executor(kind, mod=mod_new, device=dev, target=target).evaluate()
result_new = f_new(np_data, np_weight)
tvm.testing.assert_allclose(
result_before.numpy(), result_new.numpy(), rtol=1e-5, atol=1e-5
)
def test_alter_layout_strided_slice_axes_nhwc():
"""Test rewriting strided_slice with axes during alter_iop_layout"""
def before():
x = relay.var("x", shape=(1, 28, 28, 32))
weight = relay.var("weight", shape=(3, 3, 32, 32))
y = relay.nn.conv2d(
x,
weight,
channels=32,
kernel_size=(3, 3),
padding=(1, 1),
data_layout="NHWC",
kernel_layout="HWIO",
)
y = relay.strided_slice(y, begin=[0, 16], end=[1, 32], strides=[1, 1], axes=[0, 3])
y = relay.Function(analysis.free_vars(y), y)
return y
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NHWC4c"
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 28, 28, 32))
weight = relay.var("weight", shape=(3, 3, 32, 32))
x = relay.layout_transform(x, "NHWC", "NHWC4c")
y = relay.op.nn.conv2d(
x,
weight,
channels=32,
kernel_size=(3, 3),
padding=(1, 1),
data_layout="NHWC4c",
kernel_layout="HWIO",
)
y = relay.strided_slice(y, begin=[0, 4], end=[1, 8], strides=[1, 1], axes=[0, 3])
y = relay.layout_transform(y, "NHWC4c", "NHWC")
y = relay.Function(analysis.free_vars(y), y)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = run_opt_pass(before(), transform.AlterOpLayout())
b = run_opt_pass(expected(), transform.InferType())
mod_before = tvm.IRModule()
mod_new = tvm.IRModule()
mod_before["main"] = a
mod_new["main"] = b
assert tvm.ir.structural_equal(mod_before, mod_new)
def test_alter_layout_depthwise_conv2d():
"""Test depthwise_conv2d operator"""
def before():
x = relay.var("x", shape=(1, 32, 56, 56))
w = relay.var("w", shape=(32, 1, 3, 3))
y = relay.nn.conv2d(x, w, padding=(1, 1), channels=32, kernel_size=(3, 3), groups=32)
y = relay.Function(analysis.free_vars(y), y)
return y
from tvm import topi
def alter_conv2d(attrs, inputs, tinfos, out_type):
with tvm.target.Target("llvm -mcpu=core-avx2"):
return topi.nn.conv2d_alter_layout(attrs, inputs, tinfos, out_type)
def expected():
x = relay.var("x", shape=(1, 32, 56, 56))
w = relay.var("w", shape=(32, 1, 3, 3))
x = relay.layout_transform(x, "NCHW", "NCHW8c")
w = relay.layout_transform(w, "OIHW", "OIHW1i8o")
y = relay.nn.contrib_depthwise_conv2d_nchwc(
x,
w,
padding=(1, 1, 1, 1),
channels=32,
kernel_size=(3, 3),
groups=32,
data_layout="NCHW8c",
kernel_layout="OIHW1i8o",
out_layout="NCHW8c",
)
y = relay.layout_transform(y, "NCHW8c", "NCHW")
y = relay.Function(analysis.free_vars(y), y)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before()
a = run_opt_pass(a, [transform.CanonicalizeOps(), transform.AlterOpLayout()])
b = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(a, b)
def test_alter_layout_prelu():
"""Test PRelu operator"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
weight = relay.var("weight")
alpha = relay.var("alpha", relay.IncompleteType())
y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1))
y = relay.nn.prelu(y, alpha)
y = relay.Function(analysis.free_vars(y), y)
return y
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NCHW16c"
return relay.nn.conv2d(data, weight, **new_attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
w = relay.var("weight")
alpha = relay.var("alpha", relay.IncompleteType())
y = relay.layout_transform(x, "NCHW", "NCHW16c")
y = relay.nn.conv2d(
y, w, channels=64, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
y = relay.layout_transform(y, "NCHW16c", "NCHW")
y = relay.nn.prelu(y, alpha)
y = relay.Function(analysis.free_vars(y), y)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before()
a = run_opt_pass(a, [transform.CanonicalizeOps(), transform.AlterOpLayout()])
b = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(a, b)
def test_alter_layout_pad():
"""Check NCHW, NHWC and corner case for pad layout conversion"""
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NCHW16c"
return relay.nn.conv2d(data, weight, **new_attrs)
# Check NCHW conversion.
def before_nchw():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var("weight1")
y = relay.nn.conv2d(x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1))
ret = relay.nn.pad(y, pad_width=((0, 0), (0, 0), (1, 1), (1, 1)))
y = relay.Function(analysis.free_vars(ret), ret)
return y
def expected_nchw():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var("weight1")
y = relay.layout_transform(x, "NCHW", "NCHW16c")
y = relay.nn.conv2d(
y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
ret = relay.nn.pad(y, pad_width=((0, 0), (0, 0), (1, 1), (1, 1), (0, 0)))
ret = relay.layout_transform(ret, "NCHW16c", "NCHW")
y = relay.Function(analysis.free_vars(ret), ret)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before_nchw()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected_nchw(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
# Check NHWC conversion.
def before_nhwc():
x = relay.var("x", shape=(1, 56, 56, 64))
weight1 = relay.var("weight1")
y = relay.nn.conv2d(
x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NHWC"
)
ret = relay.nn.pad(y, pad_width=((0, 0), (1, 1), (1, 1), (0, 0)))
y = relay.Function(analysis.free_vars(ret), ret)
return y
def expected_nhwc():
x = relay.var("x", shape=(1, 56, 56, 64))
weight1 = relay.var("weight1")
y = relay.layout_transform(x, "NHWC", "NCHW16c")
y = relay.nn.conv2d(
y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
ret = relay.nn.pad(y, pad_width=((0, 0), (0, 0), (1, 1), (1, 1), (0, 0)))
ret = relay.layout_transform(ret, "NCHW16c", "NHWC")
y = relay.Function(analysis.free_vars(ret), ret)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before_nhwc()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected_nhwc(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
# Check that conversion does not happen when padding along split axis.
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var("weight1")
y = relay.nn.conv2d(x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1))
ret = relay.nn.pad(y, pad_width=((0, 0), (1, 1), (1, 1), (1, 1)))
y = relay.Function(analysis.free_vars(ret), ret)
return y
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var("weight1")
y = relay.layout_transform(x, "NCHW", "NCHW16c")
y = relay.nn.conv2d(
y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
ret = relay.layout_transform(y, "NCHW16c", "NCHW")
ret = relay.nn.pad(ret, pad_width=((0, 0), (1, 1), (1, 1), (1, 1)))
y = relay.Function(analysis.free_vars(ret), ret)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_pool():
"""Check NCHW, NHWC pool layout conversion"""
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NCHW16c"
return relay.nn.conv2d(data, weight, **new_attrs)
# Check NCHW conversion.
def before_nchw():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var("weight1")
y = relay.nn.conv2d(x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1))
ret = relay.nn.avg_pool2d(y, pool_size=(1, 1))
y = relay.Function(analysis.free_vars(ret), ret)
return y
def expected_nchw():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var("weight1")
y = relay.layout_transform(x, "NCHW", "NCHW16c")
y = relay.nn.conv2d(
y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
ret = relay.nn.avg_pool2d(y, pool_size=(1, 1), layout="NCHW16c")
ret = relay.layout_transform(ret, "NCHW16c", "NCHW")
y = relay.Function(analysis.free_vars(ret), ret)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before_nchw()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected_nchw(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
# Check NHWC conversion.
def before_nhwc():
x = relay.var("x", shape=(1, 56, 56, 64))
weight1 = relay.var("weight1")
y = relay.nn.conv2d(
x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NHWC"
)
ret = relay.nn.avg_pool2d(y, pool_size=(1, 1), layout="NHWC")
y = relay.Function(analysis.free_vars(ret), ret)
return y
def expected_nhwc():
x = relay.var("x", shape=(1, 56, 56, 64))
weight1 = relay.var("weight1")
y = relay.layout_transform(x, "NHWC", "NCHW16c")
y = relay.nn.conv2d(
y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
ret = relay.nn.avg_pool2d(y, pool_size=(1, 1), layout="NCHW16c")
ret = relay.layout_transform(ret, "NCHW16c", "NHWC")
y = relay.Function(analysis.free_vars(ret), ret)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before_nhwc()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected_nhwc(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_sum():
"""Check NCHW, NHWC sum layout conversion"""
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NCHW16c"
return relay.nn.conv2d(data, weight, **new_attrs)
# Check NCHW conversion.
def before_nchw():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var("weight1")
y = relay.nn.conv2d(x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1))
ret = relay.sum(y, axis=1, keepdims=True)
y = relay.Function(analysis.free_vars(ret), ret)
return y
def expected_nchw():
x = relay.var("x", shape=(1, 64, 56, 56))
weight1 = relay.var("weight1")
y = relay.layout_transform(x, "NCHW", "NCHW16c")
y = relay.nn.conv2d(
y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
ret = relay.sum(y, axis=[1, 4], keepdims=True)
ret = relay.layout_transform(ret, "NCHW1c", "NCHW")
y = relay.Function(analysis.free_vars(ret), ret)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before_nchw()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected_nchw(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
# Check NHWC conversion.
def before_nhwc():
x = relay.var("x", shape=(1, 56, 56, 64))
weight1 = relay.var("weight1")
y = relay.nn.conv2d(
x, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NHWC"
)
ret = relay.sum(y, axis=3, keepdims=True)
y = relay.Function(analysis.free_vars(ret), ret)
return y
def expected_nhwc():
x = relay.var("x", shape=(1, 56, 56, 64))
weight1 = relay.var("weight1")
y = relay.layout_transform(x, "NHWC", "NCHW16c")
y = relay.nn.conv2d(
y, weight1, channels=32, kernel_size=(3, 3), padding=(1, 1), data_layout="NCHW16c"
)
ret = relay.sum(y, axis=[1, 4], keepdims=True)
ret = relay.layout_transform(ret, "NCHW1c", "NHWC")
y = relay.Function(analysis.free_vars(ret), ret)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before_nhwc()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected_nhwc(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_nhwc_arm():
"""Check that AlterOplayout does not alter NHWC data layout."""
def alter_conv2d(attrs, inputs, tinfos, out_type):
from tvm import topi
with tvm.target.Target("llvm -device=arm_cpu"):
return topi.nn.conv2d_alter_layout(attrs, inputs, tinfos, out_type)
# Check NHWC conversion.
def before_nhwc():
x = relay.var("x", shape=(1, 56, 56, 64))
weight1 = relay.var("weight1", shape=(3, 3, 64, 64))
weight2 = relay.var("weight2", shape=(3, 3, 64, 64))
y = relay.nn.conv2d(
x, weight1, channels=64, kernel_size=(3, 3), data_layout="NHWC", kernel_layout="HWIO"
)
y = relay.nn.relu(y)
y = relay.nn.avg_pool2d(y, pool_size=(1, 1), layout="NHWC")
y = relay.nn.conv2d(
y, weight2, channels=64, kernel_size=(3, 3), data_layout="NHWC", kernel_layout="HWIO"
)
y = relay.nn.relu(y)
y = relay.Function(analysis.free_vars(y), y)
return y
def expected_nhwc():
return before_nhwc()
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before_nhwc()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected_nhwc(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
def test_alter_layout_nhwc_int8_aarch64():
"""Check that AlterOplayout does not alter NHWC data layout."""
from tvm import autotvm
expected_workload_shape = (20, 42, 4, 16)
# We use Int8Fallback to disable the fallback flag
# and to test the new workload produced during the pass
class Int8Fallback(autotvm.FallbackContext):
def _query_inside(self, target, workload):
key = (target, workload)
if key in self.memory:
return self.memory[key]
cfg = autotvm.task.space.FallbackConfigEntity()
cfg.is_fallback = False
cfg.cost = 0
self.memory[key] = cfg
return cfg
def update(self, target, workload, cfg):
key = (str(target), workload)
assert workload[2][1] == expected_workload_shape
assert workload[0] == "conv2d_NHWC_quantized_interleaved_without_transform.arm_cpu"
self.memory[key] = cfg
def alter_conv2d(attrs, inputs, tinfos, out_type):
from tvm import topi
with tvm.target.Target("llvm -device=arm_cpu -mtriple=aarch64-linux-gnu"):
with Int8Fallback():
tmp = topi.nn.conv2d_alter_layout(attrs, inputs, tinfos, out_type)
return tmp
# Check NHWC conversion.
def before_nhwc_int8():
x = relay.var("x", shape=(1, 56, 56, 73), dtype="int8")
weight = relay.var("weight1", shape=(3, 3, 73, 79), dtype="int8")
y = relay.nn.conv2d(
x,
weight,
channels=79,
kernel_size=(3, 3),
data_layout="NHWC",
kernel_layout="HWIO",
out_dtype="int32",
)
y = relay.Function(analysis.free_vars(y), y)
return y
def expected_nhwc_int8():
x = relay.var("x", shape=(1, 56, 56, 73), dtype="int8")
weight = relay.var("weight1", shape=(3, 3, 73, 79), dtype="int8")
tile_rows = 4
tile_cols = 16
weight_transformed = relay.nn.contrib_conv2d_gemm_weight_transform(
weight, tile_rows, tile_cols
)
y = relay.nn.contrib_conv2d_gemm_without_weight_transform(
x,
weight_transformed,
channels=79,
kernel_size=(3, 3),
data_layout="NHWC",
kernel_layout="HWIO",
out_dtype="int32",
)
y = relay.Function(analysis.free_vars(y), y)
return y
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before_nhwc_int8()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected_nhwc_int8(), transform.InferType())
assert tvm.ir.structural_equal(a, b), "Actual = \n" + str(a)
def test_alter_op_with_global_var():
"""Test directly replacing an operator with a new one"""
def before():
x = relay.var("x", shape=(1, 64, 56, 56))
weight = relay.var("weight", shape=(64, 64, 3, 3))
y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1))
y = relay.nn.relu(y)
mod = tvm.IRModule()
foo = relay.GlobalVar("foo")
mod[foo] = relay.Function([x, weight], y)
mod = transform.InferType()(mod)
mod["main"] = relay.Function([x, weight], foo(x, weight))
mod = transform.InferType()(mod)
return mod
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
weight = relay.multiply(weight, relay.const(2.0, "float32"))
return relay.nn.conv2d(data, weight, **attrs)
def expected():
x = relay.var("x", shape=(1, 64, 56, 56))
weight = relay.var("weight", shape=(64, 64, 3, 3))
y = relay.nn.conv2d(
x,
relay.multiply(weight, relay.const(2.0, "float32")),
channels=64,
kernel_size=(3, 3),
padding=(1, 1),
)
y = relay.nn.relu(y)
mod = tvm.IRModule()
foo = relay.GlobalVar("foo")
mod[foo] = relay.Function([x, weight], y)
mod = transform.InferType()(mod)
mod["main"] = relay.Function([x, weight], foo(x, weight))
return mod
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = before()
a = transform.AlterOpLayout()(a)
b = transform.InferType()(expected())
assert tvm.ir.structural_equal(a, b, map_free_vars=True), "Actual = \n" + str(a)
def test_alter_op_dense():
def before():
x = relay.var("x", shape=(32, 1, 128))
weight = relay.var("weight", shape=(48, 64))
avg1d = relay.nn.adaptive_avg_pool1d(x, [64])
squeeze = relay.squeeze(avg1d, axis=[1])
y = relay.nn.dense(squeeze, weight)
y = relay.Function(analysis.free_vars(y), y)
return y
def expected():
x = relay.var("x", shape=(32, 1, 128))
weight = relay.var("weight", shape=(48, 64))
target_layout = "NC16n"
weight_transform = relay.layout_transform(weight, "NC", target_layout)
avg1d = relay.nn.adaptive_avg_pool1d(x, [64])
squeeze = relay.squeeze(avg1d, axis=[1])
y = relay.nn.contrib_dense_pack(
squeeze, weight_transform, target_layout, units=None, out_dtype="float32"
)
y = relay.Function(analysis.free_vars(y), y)
return y
target = "llvm -mcpu=core-avx2"
with tvm.target.Target(target):
with TempOpAttr(
"nn.dense", "FTVMAlterOpLayout", topi.x86.dense_alter_op._alter_dense_layout
):
a = before()
a = run_opt_pass(a, transform.AlterOpLayout())
b = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(a, b)
def test_not_inplace_modify():
def func():
x = relay.var("x", shape=(1, 64, 56, 56))
weight = relay.var("weight", shape=(64, 64, 3, 3))
y = relay.nn.conv2d(x, weight, channels=64, kernel_size=(3, 3), padding=(1, 1))
y = relay.nn.relu(y)
y = relay.nn.max_pool2d(y, pool_size=[2, 2], strides=[2, 2], padding=[0, 0, 0, 0])
y = relay.Function([x, weight], y)
return y
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NCHW16c"
new_attrs["kernel_layout"] = "OIHW16i"
return relay.nn.conv2d(data, weight, **new_attrs)
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
before = func()
run_opt_pass(before, [transform.AlterOpLayout()])
assert before.body.attrs.layout == "NCHW"
def test_alter_op_dense_packed_data():
def before():
x = relay.var("x", shape=(1, 32, 8, 8))
weight = relay.var("conv2d_weight", shape=(32, 32, 3, 3))
conv = relay.nn.conv2d(x, weight, channels=32, kernel_size=(3, 3), padding=(1, 1))
pool = relay.nn.avg_pool2d(conv, pool_size=[8, 8], padding=[0, 0, 0, 0])
squeeze = relay.squeeze(pool, axis=[2, 3])
dense = relay.nn.dense(squeeze, relay.var("dense_weight", shape=(16, 32)))
return relay.Function(analysis.free_vars(dense), dense)
def expected():
x = relay.var("x", shape=(1, 32, 8, 8))
conv_weight = relay.var("conv2d_weight", shape=(32, 32, 3, 3))
dense_weight = relay.var("dense_weight", shape=(16, 32))
conv = relay.nn.contrib_conv2d_nchwc(
relay.layout_transform(x, "NCHW", "NCHW8c"),
relay.layout_transform(conv_weight, "OIHW", "OIHW8i8o"),
channels=32,
kernel_size=(3, 3),
padding=(1, 1),
data_layout="NCHW8c",
kernel_layout="OIHW8i8o",
out_layout="NCHW8c",
)
pool = relay.nn.avg_pool2d(conv, pool_size=[8, 8], padding=[0, 0, 0, 0], layout="NCHW8c")
squeeze = relay.squeeze(pool, axis=[2, 3])
dense = relay.nn.contrib_dense_pack(
relay.layout_transform(squeeze, "NC8c", "NC"),
relay.layout_transform(dense_weight, "NC", "NC16n"),
"NC16n",
out_dtype="float32",
)
return relay.Function(analysis.free_vars(dense), dense)
with tvm.target.Target("llvm -mcpu=core-avx2"):
with TempOpAttr(
"nn.dense", "FTVMAlterOpLayout", topi.x86.dense_alter_op._alter_dense_layout
):
a = run_opt_pass(before(), transform.AlterOpLayout())
b = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(a, b)
def test_conv2d_strided_slice_packed_to_unpacked():
"""We do not support propagating through packed to unpacked layout"""
x_shape = (1, 1, 1, 1, 4)
w_shape = (9, 1, 3, 3, 4, 4)
def before():
x = relay.var("x", shape=x_shape)
weight = relay.var("weight", shape=w_shape)
y = relay.nn.conv2d(
x,
weight,
kernel_size=(3, 3),
padding=(1, 1),
data_layout="NCHW4c",
kernel_layout="OIHW4i4o",
)
y = relay.strided_slice(y, begin=[0, 0], end=[1, -1], strides=[1, 8])
return relay.Function([x, weight], y)
def expected():
x = relay.var("x", shape=x_shape)
weight = relay.var("weight", shape=w_shape)
x_nchw = relay.layout_transform(x, src_layout="NCHW4c", dst_layout="NCHW")
weight_oihw = relay.layout_transform(weight, src_layout="OIHW4i4o", dst_layout="OIHW")
y = relay.nn.conv2d(
x_nchw,
weight_oihw,
kernel_size=(3, 3),
padding=(1, 1),
data_layout="NCHW",
kernel_layout="OIHW",
)
y = relay.layout_transform(y, src_layout="NCHW", dst_layout="NCHW4c")
y = relay.strided_slice(y, begin=[0, 0], end=[1, -1], strides=[1, 8])
return relay.Function([x, weight], y)
def alter_conv2d(attrs, inputs, tinfos, out_type):
data, weight = inputs
new_attrs = dict(attrs)
new_attrs["data_layout"] = "NCHW"
new_attrs["kernel_layout"] = "OIHW"
return relay.nn.conv2d(data, weight, **new_attrs)
with TempOpAttr("nn.conv2d", "FTVMAlterOpLayout", alter_conv2d):
a = run_opt_pass(before(), transform.AlterOpLayout())
b = run_opt_pass(expected(), transform.InferType())
assert tvm.ir.structural_equal(a, b)
def test_conv2d_reduce_channels():
x = relay.var("data", shape=(1, 8, 48, 48))
y = relay.nn.conv2d(
data=x,
weight=relay.var("weight"),
kernel_size=(1, 1),
channels=8,
dilation=1,
strides=(47, 47),
)
z = relay.argmin(y, axis=1)
mod, params = testing.create_workload(z)
with tvm.transform.PassContext(opt_level=3):
relay.build(mod, params=params, target="llvm")
def test_axis_semantic_change():
x = relay.var("x", shape=(1, 1, 24, 48))
w1 = relay.const(np.random.uniform(size=(1, 1, 1, 1)))
w2 = relay.const(np.random.uniform(size=(1, 1, 1, 1)))
y = relay.nn.conv2d(x, w1, kernel_size=(1, 1), padding=(0, 0), channels=1)
y = relay.transpose(y, (0, 1, 3, 2))
z = relay.nn.conv2d(y, w2, kernel_size=(1, 1), padding=(0, 0), channels=1)
func = relay.Function([x], z)
mod = tvm.IRModule.from_expr(func)
with tvm.transform.PassContext(opt_level=3):
relay.build(mod, target="llvm")
if __name__ == "__main__":
pytest.main([__file__])
|