return (str1[(l1-l2):] == str2) ## class TestVTKFiles: def __init__(self): self.FileName = "" self.ErrorValue = 0; self.Errors = {} self.WarningValue = 0; self.Warnings = {} self.FileLines = [] self.Export = "" self.UnnecessaryIncludes = [ "stdio.h", "stdlib.h", "string.h", "iostream", "iostream.h", "strstream", "strstream.h", "fstream", "fstream.h", "windows.h" ] pass def SetExport(self, export): self.Export = export def Print(self, text=""): rtext = text if test_from_dart: rtext = string.replace(rtext, "<", "<") rtext = string.replace(rtext, ">", ">") print rtext def Error(self, error): self.ErrorValue = 1 self.Errors[error] = 1 pass def Warning(self, warning): self.WarningValue = 1 self.Warnings[warning] = 1 pass def PrintErrors(self): if self.ErrorValue: self.Print( ) self.Print( "There were errors:" ) for a in self.Errors.keys(): self.Print( "* %s" % a ) def PrintWarnings(self): if self.WarningValue: self.Print( ) self.Print( "There were warnings:" ) for a in self.Warnings.keys(): self.Print( "* %s" % a ) def TestFile(self, filename): self.FileName = filename self.FileLines = [] self.ClassName = "" self.ParentName = "" try: file = open(filename) self.FileLines = file.readlines() file.close() except: self.Print( "Problem reading file: %s" % filename ) sys.exit(1) pass def CheckIncludes(self): count = 0 lines = [] nplines = [] unlines = [] includere = "^\s*#\s*include\s*[\"<]([^>\"]+)" ignincludere = ".*\/\/.*" regx = re.compile(includere) regx1 = re.compile(ignincludere) cc = 0 includeparent = 0 for a in self.FileLines: line = string.strip(a) rm = regx.match(line) if rm and not regx1.match(line): lines.append(" %4d: %s" % (cc, line)) file = rm.group(1) if file == (self.ParentName + ".h"): includeparent = 1 if not StringEndsWith(file, ".h"): nplines.append(" %4d: %s" % (cc, line)) if file in self.UnnecessaryIncludes: unlines.append(" %4d: %s" % (cc, line)) cc = cc + 1 if len(lines) > 1: self.Print() self.Print( "File: %s has %d includes: " % ( self.FileName, len(lines)) ) for a in lines: self.Print( a ) self.Error("Multiple includes") if len(nplines) > 0: self.Print( ) self.Print( "File: %s has non-portable include(s): " % self.FileName ) for a in nplines: self.Print( a ) self.Error("Non-portable includes") if len(unlines) > 0: self.Print( ) self.Print( "File: %s has unnecessary include(s): " % self.FileName ) for a in unlines: self.Print( a ) self.Error("Unnecessary includes") if not includeparent and self.ParentName: self.Print() self.Print( "File: %s does not include parent \"%s.h\"" % ( self.FileName, self.ParentName ) ) self.Error("Does not include parent") pass def CheckParent(self): classre = "^class\s*(.*_EXPORT|\s*) (vtk[A-Z0-9_][^ :\n]*)\s*:\s*public\s*(vtk[^ \n\{]*)" cname = "" pname = "" classlines = [] regx = re.compile(classre) cc = 0 lastline = "" for a in self.FileLines: line = string.strip(a) rm = regx.match(line) if not rm and not cname: rm = regx.match(lastline + line) if rm: export = rm.group(1) export = string.strip(export) cname = rm.group(2) pname = rm.group(3) classlines.append(" %4d: %s" % (cc, line)) if not export: self.Print("File: %s defines 1 class with no export macro:" % self.FileName) self.Print(" %4d: %s" % (cc, line)) self.Error("No export macro") elif self.Export and self.Export != export: self.Print("File: %s defines 1 class with wrong export macro:" % self.FileName) self.Print(" %4d: %s" % (cc, line)) self.Print(" The export macro should be: %s" % (self.Export)) self.Error("Wrong export macro") cc = cc + 1 lastline = a if len(classlines) > 1: self.Print() self.Print( "File: %s defines %d classes: " % (self.FileName, len(classlines)) ) for a in classlines: self.Print( a ) self.Error("Multiple classes defined") if len(classlines) < 1: self.Print() self.Print( "File: %s does not define any classes" % self.FileName ) self.Error("No class defined") return #self.Print( "Classname: %s ParentName: %s" % (cname, pname) self.ClassName = cname self.ParentName = pname pass def CheckTypeMacro(self): count = 0 lines = [] oldlines = [] typere = "^\s*vtkType(Revision)*Macro\s*\(\s*(vtk[^ ,]+)\s*,\s*(vtk[^ \)]+)\s*\)\s*;" typesplitre = "^\s*vtkType(Revision)*Macro\s*\(" regx = re.compile(typere) regxs = re.compile(typesplitre) cc = 0 found = 0 for a in range(len(self.FileLines)): line = string.strip(self.FileLines[a]) rm = regx.match(line) if rm: found = 1 if rm.group(1) == "Revision": oldlines.append(" %4d: %s" % (cc, line)) cname = rm.group(2) pname = rm.group(3) if cname != self.ClassName or pname != self.ParentName: lines.append(" %4d: %s" % (cc, line)) else: # Maybe it is in two lines rm = regxs.match(line) if rm: nline = line + " " + string.strip(self.FileLines[a+1]) line = string.strip(nline) rm = regx.match(line) if rm: found = 1 if rm.group(1) == "Revision": oldlines.append(" %4d: %s" % (cc, line)) cname = rm.group(2) pname = rm.group(3) if cname != self.ClassName or pname != self.ParentName: lines.append(" %4d: %s" % (cc, line)) cc = cc + 1 if len(lines) > 0: self.Print( "File: %s has broken type macro(s):" % self.FileName ) for a in lines: self.Print( a ) self.Print( "Should be:\n vtkTypeMacro(%s, %s)" % (self.ClassName, self.ParentName) ) self.Error("Broken type macro") if len(oldlines) > 0: self.Print( "File: %s has legacy type-revision macro(s):" % self.FileName ) for a in oldlines: self.Print( a ) self.Print( "Should be:\n vtkTypeMacro(%s, %s);" % (self.ClassName, self.ParentName)) self.Error("Legacy style type-revision macro") if not found: self.Print( "File: %s does not have type macro" % self.FileName ) self.Print( "Should be:\n vtkTypeMacro(%s, %s);" % (self.ClassName, self.ParentName)) self.Error("No type macro") pass def CheckForCopyAndAssignment(self): if not self.ClassName: return count = 0 lines = [] oldlines = [] copyoperator = "^\s*%s\s*\(\s*const\s*%s\s*&\s*\)\s*;\s*\/\/\s*Not\s*[iI]mplemented(\.)*" % ( self.ClassName, self.ClassName) asgnoperator = "^\s*void\s*operator\s*=\s*\(\s*const\s*%s\s*&\s*\)\s*;\s*\/\/\s*Not\s*[iI]mplemented(\.)*" % self.ClassName #self.Print( copyoperator regx1 = re.compile(copyoperator) regx2 = re.compile(asgnoperator) foundcopy = 0 foundasgn = 0 for a in self.FileLines: line = string.strip(a) if regx1.match(line): foundcopy = foundcopy + 1 if regx2.match(line): foundasgn = foundasgn + 1 lastline = "" if foundcopy < 1: for a in self.FileLines: line = string.strip(a) if regx1.match(lastline + line): foundcopy = foundcopy + 1 lastline = a lastline = "" if foundasgn < 1: for a in self.FileLines: line = string.strip(a) if regx2.match(lastline + line): foundasgn = foundasgn + 1 lastline = a if foundcopy < 1: self.Print( "File: %s does not define copy constructor" % self.FileName ) self.Print( "Should be:\n%s(const %s&); // Not implemented" % (self.ClassName, self.ClassName) ) self.Error("No private copy constructor") if foundcopy > 1: self.Print( "File: %s defines multiple copy constructors" % self.FileName ) self.Error("Multiple copy constructor") if foundasgn < 1: self.Print( "File: %s does not define assignment operator" % self.FileName ) self.Print( "Should be:\nvoid operator=(const %s&); // Not implemented" % self.ClassName ) self.Error("No private assignment operator") if foundcopy > 1: self.Print( "File: %s defines multiple assignment operators" % self.FileName ) self.Error("Multiple assignment operators") pass def CheckWeirdConstructors(self): count = 0 lines = [] oldlines = [] constructor = "^\s*%s\s*\(([^ )]*)\)" % self.ClassName copyoperator = "^\s*%s\s*\(\s*const\s*%s\s*&\s*\)\s*;\s*\/\/\s*Not\s*implemented(\.)*" % ( self.ClassName, self.ClassName) regx1 = re.compile(constructor) regx2 = re.compile(copyoperator) cc = 0 for a in self.FileLines: line = string.strip(a) rm = regx1.match(line) if rm: arg = string.strip(rm.group(1)) if arg and not regx2.match(line): lines.append(" %4d: %s" % (cc, line)) cc = cc + 1 if len(lines) > 0: self.Print( "File: %s has weird constructor(s):" % self.FileName ) for a in lines: self.Print( a ) self.Print( "There should be only:\n %s();" % self.ClassName ) self.Error("Weird constructor") pass def CheckPrintSelf(self): if not self.ClassName: return typere = "^\s*void\s*PrintSelf\s*\(\s*ostream\s*&\s*os*\s*,\s*vtkIndent\s*indent\s*\)" newtypere = "^\s*virtual\s*void\s*PrintSelf\s*\(\s*ostream\s*&\s*os*\s*,\s*vtkIndent\s*indent\s*\)" regx1 = re.compile(typere) regx2 = re.compile(newtypere) found = 0 oldstyle = 0 for a in self.FileLines: line = string.strip(a) rm1 = regx1.match(line) rm2 = regx2.match(line) if rm1 or rm2: found = 1 if rm1: oldstyle = 1 if not found: self.Print( "File: %s does not define PrintSelf method:" % self.FileName ) self.Warning("No PrintSelf method") pass def CheckWindowsMangling(self): lines = [] regx1 = WindowsMangleRegEx regx2 = re.compile("^.*VTK_LEGACY.*$") # This version will leave out comment lines but we probably do # not want to refer to mangled (hopefully deprecated) methods # in comments. # regx2 = re.compile("^(\s*//|\s*\*|.*VTK_LEGACY).*$") cc = 1 for a in self.FileLines: line = string.strip(a) rm = regx1.match(line) if rm: arg = string.strip(rm.group(1)) if arg and not regx2.match(line): lines.append(" %4d: %s" % (cc, line)) cc = cc + 1 if len(lines) > 0: self.Print( "File: %s has windows.h mangling violations:" % self.FileName ) for a in lines: self.Print(a) self.Error("Windows Mangling Violation - choose another name that does not conflict.") pass ## test = TestVTKFiles() ## Check command line arguments if len(sys.argv) < 2: print "Testing directory not specified..." print "Usage: %s [ exception(s) ]" % sys.argv[0] sys.exit(1) dirname = sys.argv[1] exceptions = sys.argv[2:] if len(sys.argv) > 2: export = sys.argv[2] if export[:3] == "VTK" and export[len(export)-len("EXPORT"):] == "EXPORT": print "Use export macro: %s" % export exceptions = sys.argv[3:] test.SetExport(export) ## Traverse through the list of files for a in os.listdir(dirname): ## Skip non-header files if not StringEndsWith(a, ".h"): continue ## Skip exceptions if a in exceptions: continue pathname = '%s/%s' % (dirname, a) if pathname in exceptions: continue mode = os.stat(pathname)[stat.ST_MODE] ## Skip directories if stat.S_ISDIR(mode): continue elif stat.S_ISREG(mode): ## Do all the tests test.TestFile(pathname) test.CheckParent() test.CheckIncludes() test.CheckTypeMacro() test.CheckForCopyAndAssignment() test.CheckWeirdConstructors() test.CheckPrintSelf() test.CheckWindowsMangling() ## Summarize errors test.PrintWarnings() test.PrintErrors() sys.exit(test.ErrorValue) #!/usr/bin/env python # encoding: utf-8 ################################################################################ # # RMG - Reaction Mechanism Generator # # Copyright (c) 2002-2017 Prof. William H. Green (whgreen@mit.edu), # Prof. Richard H. West (r.west@neu.edu) and the RMG Team (rmg_dev@mit.edu) # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the 'Software'), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. # ################################################################################ """ This script contains unit tests of the :mod:`rmgpy.thermo.thermodata` module. """ import unittest import numpy from rmgpy.thermo.thermodata import ThermoData import rmgpy.constants as constants ################################################################################ class TestThermoData(unittest.TestCase): """ Contains unit tests of the :class:`ThermoData` class. """ def setUp(self): """ A function run before each unit test in this class. """ self.H298 = -32.9725 self.S298 = 27.5727 self.Tdata = numpy.array([300,400,500,600,800,1000,1500]) self.Cpdata = numpy.array([6.3827,7.80327,9.22175,10.5528,12.8323,14.6013,17.4089]) self.Cp0 = 4.0 self.CpInf = 21.5 self.Tmin = 100. self.Tmax = 3000. self.E0 = -782292. self.comment = 'C2H6' self.thermodata = ThermoData( Tdata = (self.Tdata,"K"), Cpdata = (self.Cpdata*constants.R,"J/(mol*K)"), H298 = (self.H298*0.001*constants.R*298.,"kJ/mol"), S298 = (self.S298*constants.R,"J/(mol*K)"), Cp0 = (self.Cp0*constants.R,"J/(mol*K)"), CpInf = (self.CpInf*constants.R,"J/(mol*K)"), Tmin = (self.Tmin,"K"), Tmax = (self.Tmax,"K"), E0 = (self.E0,'J/mol'), comment = self.comment, ) def test_Tdata(self): """ Test that the ThermoData Tdata property was properly set. """ self.assertEqual(self.thermodata.Tdata.value_si.shape, self.Tdata.shape) for T, T0 in zip(self.thermodata.Tdata.value_si, self.Tdata): self.assertAlmostEqual(T, T0, 4) def test_Cpdata(self): """ Test that the ThermoData Cpdata property was properly set. """ self.assertEqual(self.thermodata.Cpdata.value_si.shape, self.Cpdata.shape) for Cp, Cp0 in zip(self.thermodata.Cpdata.value_si / constants.R, self.Cpdata): self.assertAlmostEqual(Cp, Cp0, 4) def test_H298(self): """ Test that the ThermoData H298 property was properly set. """ self.assertAlmostEqual(self.thermodata.H298.value_si / constants.R / 298., self.H298, 4) def test_S298(self): """ Test that the ThermoData S298 property was properly set. """ self.assertAlmostEqual(self.thermodata.S298.value_si / constants.R, self.S298, 4) def test_Cp0(self): """ Test that the ThermoData Cp0 property was properly set. """ self.assertAlmostEqual(self.thermodata.Cp0.value_si / constants.R, self.Cp0, 4) def test_CpInf(self): """ Test that the ThermoData CpInf property was properly set. """ self.assertAlmostEqual(self.thermodata.CpInf.value_si / constants.R, self.CpInf, 4) def test_Tmin(self): """ Test that the ThermoData Tmin property was properly set. """ self.assertAlmostEqual(self.thermodata.Tmin.value_si, self.Tmin, 6) def test_Tmax(self): """ Test that the ThermoData Tmax property was properly set. """ self.assertAlmostEqual(self.thermodata.Tmax.value_si, self.Tmax, 6) def test_E0(self): """ Test that the ThermoData E0 property was properly set. """ self.assertAlmostEqual(self.thermodata.E0.value_si, self.E0, 6) def test_Comment(self): """ Test that the ThermoData comment property was properly set. """ self.assertEqual(self.thermodata.comment, self.comment) def test_isTemperatureValid(self): """ Test the ThermoData.isTemperatureValid() method. """ Tdata = [200,400,600,800,1000,1200,1400,1600,1800,2000] validdata = [True,True,True,True,True,True,True,True,True,True] for T, valid in zip(Tdata, validdata): valid0 = self.thermodata.isTemperatureValid(T) self.assertEqual(valid0, valid) def test_getHeatCapacity(self): """ Test the ThermoData.getHeatCapacity() method. """ Tlist = numpy.array([200,400,600,800,1000,1200,1400,1600,1800,2000]) Cpexplist = numpy.array([4.96208, 7.80327, 10.5528, 12.8323, 14.6013, 15.7243, 16.8473, 17.9704, 19.0934, 20.2165]) * constants.R for T, Cpexp in zip(Tlist, Cpexplist): Cpact = self.thermodata.getHeatCapacity(T) self.assertAlmostEqual(Cpexp, Cpact, 2) def test_getEnthalpy(self): """ Test the ThermoData.getEnthalpy() method. """ Tlist = numpy.array([200,400,600,800,1000,1200,1400,1600,1800,2000]) Hexplist = numpy.array([-51.9015, -22.7594, -12.1063, -6.15660, -2.18192, 0.708869, 2.93415, 4.74350, 6.27555, 7.61349]) * constants.R * Tlist for T, Hexp in zip(Tlist, Hexplist): Hact = self.thermodata.getEnthalpy(T) self.assertAlmostEqual(Hexp, Hact, delta=1e0) def test_getEntropy(self): """ Test the ThermoData.getEntropy() method. """ Tlist = numpy.array([200,400,600,800,1000,1200,1400,1600,1800,2000]) Sexplist = numpy.array([25.3347, 29.6460, 33.3386, 36.6867, 39.7402, 42.5016, 45.0098, 47.3328, 49.5142, 51.5841]) * constants.R for T, Sexp in zip(Tlist, Sexplist): Sact = self.thermodata.getEntropy(T) self.assertAlmostEqual(Sexp, Sact, 3) def test_getFreeEnergy(self): """ Test the ThermoData.getFreeEnergy() method. """ Tlist = numpy.array([200,400,600,800,1000,1200,1400,1600,1800,2000]) for T in Tlist: Gexp = self.thermodata.getEnthalpy(T) - T * self.thermodata.getEntropy(T) Gact = self.thermodata.getFreeEnergy(T) self.assertAlmostEqual(Gexp, Gact, 3) def test_pickle(self): """ Test that a ThermoData object can be successfully pickled and unpickled with no loss of information. """ import cPickle thermodata = cPickle.loads(cPickle.dumps(self.thermodata)) self.assertEqual(self.thermodata.Tdata.value.shape, thermodata.Tdata.value.shape) for T, T0 in zip(self.thermodata.Tdata.value, thermodata.Tdata.value): self.assertAlmostEqual(T, T0, 4) self.assertEqual(self.thermodata.Tdata.units, thermodata.Tdata.units) self.assertEqual(self.thermodata.Cpdata.value.shape, thermodata.Cpdata.value.shape) for Cp, Cp0 in zip(self.thermodata.Cpdata.value, thermodata.Cpdata.value): self.assertAlmostEqual(Cp, Cp0, 3) self.assertEqual(self.thermodata.Cpdata.units, thermodata.Cpdata.units) self.assertAlmostEqual(self.thermodata.H298.value, thermodata.H298.value, 4) self.assertEqual(self.thermodata.H298.units, thermodata.H298.units) self.assertAlmostEqual(self.thermodata.S298.value, thermodata.S298.value, 2) self.assertEqual(self.thermodata.S298.units, thermodata.S298.units) self.assertAlmostEqual(self.thermodata.Cp0.value, thermodata.Cp0.value, 4) self.assertEqual(self.thermodata.Cp0.units, thermodata.Cp0.units) self.assertAlmostEqual(self.thermodata.CpInf.value, thermodata.CpInf.value, 3) self.assertEqual(self.thermodata.CpInf.units, thermodata.CpInf.units) self.assertAlmostEqual(self.thermodata.Tmin.value, thermodata.Tmin.value, 4) self.assertEqual(self.thermodata.Tmin.units, thermodata.Tmin.units) self.assertAlmostEqual(self.thermodata.Tmax.value, thermodata.Tmax.value, 4) self.assertEqual(self.thermodata.Tmax.units, thermodata.Tmax.units) self.assertAlmostEqual(self.thermodata.E0.value, thermodata.E0.value, 4) self.assertEqual(self.thermodata.E0.units, thermodata.E0.units) self.assertEqual(self.thermodata.label,thermodata.label) self.assertEqual(self.thermodata.comment, thermodata.comment) def test_repr(self): """ Test that a ThermoData object can be successfully reconstructed from its repr() output with no loss of information. """ thermodata = None exec('thermodata = {0!r}'.format(self.thermodata)) self.assertEqual(self.thermodata.Tdata.value.shape, thermodata.Tdata.value.shape) for T, T0 in zip(self.thermodata.Tdata.value, thermodata.Tdata.value): self.assertAlmostEqual(T, T0, 4) self.assertEqual(self.thermodata.Tdata.units, thermodata.Tdata.units) self.assertEqual(self.thermodata.Cpdata.value.shape, thermodata.Cpdata.value.shape) for Cp, Cp0 in zip(self.thermodata.Cpdata.value, thermodata.Cpdata.value): self.assertAlmostEqual(Cp, Cp0, 3) self.assertEqual(self.thermodata.Cpdata.units, thermodata.Cpdata.units) self.assertAlmostEqual(self.thermodata.H298.value, thermodata.H298.value, 4) self.assertEqual(self.thermodata.H298.units, thermodata.H298.units) self.assertAlmostEqual(self.thermodata.S298.value, thermodata.S298.value, 2) self.assertEqual(self.thermodata.S298.units, thermodata.S298.units) self.assertAlmostEqual(self.thermodata.Cp0.value, thermodata.Cp0.value, 4) self.assertEqual(self.thermodata.Cp0.units, thermodata.Cp0.units) self.assertAlmostEqual(self.thermodata.CpInf.value, thermodata.CpInf.value, 3) self.assertEqual(self.thermodata.CpInf.units, thermodata.CpInf.units) self.assertAlmostEqual(self.thermodata.Tmin.value, thermodata.Tmin.value, 4) self.assertEqual(self.thermodata.Tmin.units, thermodata.Tmin.units) self.assertAlmostEqual(self.thermodata.Tmax.value, thermodata.Tmax.value, 4) self.assertEqual(self.thermodata.Tmax.units, thermodata.Tmax.units) self.assertAlmostEqual(self.thermodata.E0.value, thermodata.E0.value, 4) self.assertEqual(self.thermodata.E0.units, thermodata.E0.units) self.assertEqual(self.thermodata.label, thermodata.label) self.assertEqual(self.thermodata.comment, thermodata.comment) # (c) 2016 Red Hat Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see . # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import json from units.compat.mock import patch from ansible.modules.network.voss import voss_command from units.modules.utils import set_module_args from .voss_module import TestVossModule, load_fixture class TestVossCommandModule(TestVossModule): module = voss_command def setUp(self): super(TestVossCommandModule, self).setUp() self.mock_run_commands = patch('ansible.modules.network.voss.voss_command.run_commands') self.run_commands = self.mock_run_commands.start() def tearDown(self): super(TestVossCommandModule, self).tearDown() self.mock_run_commands.stop() def load_fixtures(self, commands=None): def load_from_file(*args, **kwargs): module, commands = args output = list() for item in commands: try: obj = json.loads(item['command']) command = obj['command'] except ValueError: command = item['command'] filename = str(command).replace(' ', '_') output.append(load_fixture(filename)) return output self.run_commands.side_effect = load_from_file def test_voss_command_simple(self): set_module_args(dict(commands=['show sys-info'])) result = self.execute_module() self.assertEqual(len(result['stdout']), 1) self.assertTrue(result['stdout'][0].startswith('General Info')) def test_voss_command_multiple(self): set_module_args(dict(commands=['show sys-info', 'show sys-info'])) result = self.execute_module() self.assertEqual(len(result['stdout']), 2) self.assertTrue(result['stdout'][0].startswith('General Info')) def test_voss_command_wait_for(self): wait_for = 'result[0] contains "General Info"' set_module_args(dict(commands=['show sys-info'], wait_for=wait_for)) self.execute_module() def test_voss_command_wait_for_fails(self): wait_for = 'result[0] contains "test string"' set_module_args(dict(commands=['show sys-info'], wait_for=wait_for)) self.execute_module(failed=True) self.assertEqual(self.run_commands.call_count, 10) def test_voss_command_retries(self): wait_for = 'result[0] contains "test string"' set_module_args(dict(commands=['show sys-info'], wait_for=wait_for, retries=2)) self.execute_module(failed=True) self.assertEqual(self.run_commands.call_count, 2) def test_voss_command_match_any(self): wait_for = ['result[0] contains "General Info"', 'result[0] contains "test string"'] set_module_args(dict(commands=['show sys-info'], wait_for=wait_for, match='any')) self.execute_module() def test_voss_command_match_all(self): wait_for = ['result[0] contains "General Info"', 'result[0] contains "Chassis Info"'] set_module_args(dict(commands=['show sys-info'], wait_for=wait_for, match='all')) self.execute_module() def test_voss_command_match_all_failure(self): wait_for = ['result[0] contains "General Info"', 'result[0] contains "test string"'] commands = ['show sys-info', 'show sys-info'] set_module_args(dict(commands=commands, wait_for=wait_for, match='all')) self.execute_module(failed=True) def test_voss_command_configure_error(self): commands = ['configure terminal'] set_module_args({ 'commands': commands, '_ansible_check_mode': True, }) result = self.execute_module(failed=True) self.assertEqual( result['msg'], 'voss_command does not support running config mode commands. Please use voss_config instead' ) """Check that instantiating a class with `abc.ABCMeta` as metaclass fails if it defines abstract methods. """ # pylint: disable=too-few-public-methods, missing-docstring # pylint: disable=abstract-class-not-used, abstract-class-little-used # pylint: disable=abstract-method __revision__ = 0 import abc class GoodClass(object, metaclass=abc.ABCMeta): pass class SecondGoodClass(object, metaclass=abc.ABCMeta): def test(self): """ do nothing. """ class ThirdGoodClass(object, metaclass=abc.ABCMeta): """ This should not raise the warning. """ def test(self): raise NotImplementedError() class BadClass(object, metaclass=abc.ABCMeta): @abc.abstractmethod def test(self): """ do nothing. """ class SecondBadClass(object, metaclass=abc.ABCMeta): @property @abc.abstractmethod def test(self): """ do nothing. """ class ThirdBadClass(SecondBadClass): pass def main(): """ do nothing """ GoodClass() SecondGoodClass() ThirdGoodClass() BadClass() # [abstract-class-instantiated] SecondBadClass() # [abstract-class-instantiated] ThirdBadClass() # [abstract-class-instantiated] #!/usr/bin/python # Copyright: (c) Vincent Van de Kussen # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: rhn_channel short_description: Adds or removes Red Hat software channels description: - Adds or removes Red Hat software channels. version_added: "1.1" author: - Vincent Van der Kussen (@vincentvdk) notes: - This module fetches the system id from RHN. - This module doesn't support I(check_mode). options: name: description: - Name of the software channel. required: true sysname: description: - Name of the system as it is known in RHN/Satellite. required: true state: description: - Whether the channel should be present or not, taking action if the state is different from what is stated. default: present url: description: - The full URL to the RHN/Satellite API. required: true user: description: - RHN/Satellite login. required: true password: description: - RHN/Satellite password. required: true ''' EXAMPLES = ''' - rhn_channel: name: rhel-x86_64-server-v2vwin-6 sysname: server01 url: https://rhn.redhat.com/rpc/api user: rhnuser password: guessme delegate_to: localhost ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.six.moves import xmlrpc_client def get_systemid(client, session, sysname): systems = client.system.listUserSystems(session) for system in systems: if system.get('name') == sysname: idres = system.get('id') idd = int(idres) return idd def subscribe_channels(channelname, client, session, sysname, sys_id): channels = base_channels(client, session, sys_id) channels.append(channelname) return client.system.setChildChannels(session, sys_id, channels) def unsubscribe_channels(channelname, client, session, sysname, sys_id): channels = base_channels(client, session, sys_id) channels.remove(channelname) return client.system.setChildChannels(session, sys_id, channels) def base_channels(client, session, sys_id): basechan = client.channel.software.listSystemChannels(session, sys_id) try: chans = [item['label'] for item in basechan] except KeyError: chans = [item['channel_label'] for item in basechan] return chans def main(): module = AnsibleModule( argument_spec=dict( state=dict(type='str', default='present', choices=['present', 'absent']), name=dict(type='str', required=True), sysname=dict(type='str', required=True), url=dict(type='str', required=True), user=dict(type='str', required=True), password=dict(type='str', required=True, aliases=['pwd'], no_log=True), ) ) state = module.params['state'] channelname = module.params['name'] systname = module.params['sysname'] saturl = module.params['url'] user = module.params['user'] password = module.params['password'] # initialize connection client = xmlrpc_client.Server(saturl) session = client.auth.login(user, password) # get systemid sys_id = get_systemid(client, session, systname) # get channels for system chans = base_channels(client, session, sys_id) try: if state == 'present': if channelname in chans: module.exit_json(changed=False, msg="Channel %s already exists" % channelname) else: subscribe_channels(channelname, client, session, systname, sys_id) module.exit_json(changed=True, msg="Channel %s added" % channelname) if state == 'absent': if channelname not in chans: module.exit_json(changed=False, msg="Not subscribed to channel %s." % channelname) else: unsubscribe_channels(channelname, client, session, systname, sys_id) module.exit_json(changed=True, msg="Channel %s removed" % channelname) finally: client.auth.logout(session) if __name__ == '__main__': main() """ ================= Drop Shadow Frame ================= A widget providing a drop shadow (gaussian blur effect) around another widget. """ from PyQt4.QtGui import ( QWidget, QPainter, QPixmap, QGraphicsScene, QGraphicsRectItem, QGraphicsDropShadowEffect, QColor, QPen, QPalette, QStyleOption, QAbstractScrollArea, QToolBar, QRegion ) from PyQt4.QtCore import ( Qt, QPoint, QPointF, QRect, QRectF, QSize, QSizeF, QEvent ) from PyQt4.QtCore import pyqtProperty as Property CACHED_SHADOW_RECT_SIZE = (50, 50) def render_drop_shadow_frame(pixmap, shadow_rect, shadow_color, offset, radius, rect_fill_color): pixmap.fill(QColor(0, 0, 0, 0)) scene = QGraphicsScene() rect = QGraphicsRectItem(shadow_rect) rect.setBrush(QColor(rect_fill_color)) rect.setPen(QPen(Qt.NoPen)) scene.addItem(rect) effect = QGraphicsDropShadowEffect(color=shadow_color, blurRadius=radius, offset=offset) rect.setGraphicsEffect(effect) scene.setSceneRect(QRectF(QPointF(0, 0), QSizeF(pixmap.size()))) painter = QPainter(pixmap) scene.render(painter) painter.end() scene.clear() scene.deleteLater() return pixmap class DropShadowFrame(QWidget): """ A widget drawing a drop shadow effect around the geometry of another widget (works similar to :class:`QFocusFrame`). Parameters ---------- parent : :class:`QObject` Parent object. color : :class:`QColor` The color of the drop shadow. radius : float Shadow radius. """ def __init__(self, parent=None, color=None, radius=5, **kwargs): QWidget.__init__(self, parent, **kwargs) self.setAttribute(Qt.WA_TransparentForMouseEvents, True) self.setAttribute(Qt.WA_NoChildEventsForParent, True) self.setFocusPolicy(Qt.NoFocus) if color is None: color = self.palette().color(QPalette.Dark) self.__color = color self.__radius = radius self.__widget = None self.__widgetParent = None self.__updatePixmap() def setColor(self, color): """ Set the color of the shadow. """ if not isinstance(color, QColor): color = QColor(color) if self.__color != color: self.__color = QColor(color) self.__updatePixmap() def color(self): """ Return the color of the drop shadow. """ return QColor(self.__color) color_ = Property(QColor, fget=color, fset=setColor, designable=True, doc="Drop shadow color") def setRadius(self, radius): """ Set the drop shadow's blur radius. """ if self.__radius != radius: self.__radius = radius self.__updateGeometry() self.__updatePixmap() def radius(self): """ Return the shadow blur radius. """ return self.__radius radius_ = Property(int, fget=radius, fset=setRadius, designable=True, doc="Drop shadow blur radius.") def setWidget(self, widget): """ Set the widget around which to show the shadow. """ if self.__widget: self.__widget.removeEventFilter(self) self.__widget = widget if self.__widget: self.__widget.installEventFilter(self) # Find the parent for the frame # This is the top level window a toolbar or a viewport # of a scroll area parent = widget.parentWidget() while not (isinstance(parent, (QAbstractScrollArea, QToolBar)) or \ parent.isWindow()): parent = parent.parentWidget() if isinstance(parent, QAbstractScrollArea): parent = parent.viewport() self.__widgetParent = parent self.setParent(parent) self.stackUnder(widget) self.__updateGeometry() self.setVisible(widget.isVisible()) def widget(self): """ Return the widget that was set by `setWidget`. """ return self.__widget def paintEvent(self, event): # TODO: Use QPainter.drawPixmapFragments on Qt 4.7 opt = QStyleOption() opt.initFrom(self) pixmap = self.__shadowPixmap shadow_rect = QRectF(opt.rect) widget_rect = QRectF(self.widget().geometry()) widget_rect.moveTo(self.radius_, self.radius_) left = top = right = bottom = self.radius_ pixmap_rect = QRectF(QPointF(0, 0), QSizeF(pixmap.size())) # Shadow casting rectangle in the source pixmap. pixmap_shadow_rect = pixmap_rect.adjusted(left, top, -right, -bottom) source_rects = self.__shadowPixmapFragments(pixmap_rect, pixmap_shadow_rect) target_rects = self.__shadowPixmapFragments(shadow_rect, widget_rect) painter = QPainter(self) for source, target in zip(source_rects, target_rects): painter.drawPixmap(target, pixmap, source) painter.end() def eventFilter(self, obj, event): etype = event.type() if etype == QEvent.Move or etype == QEvent.Resize: self.__updateGeometry() elif etype == QEvent.Show: self.__updateGeometry() self.show() elif etype == QEvent.Hide: self.hide() return QWidget.eventFilter(self, obj, event) def __updateGeometry(self): """ Update the shadow geometry to fit the widget's changed geometry. """ widget = self.__widget parent = self.__widgetParent radius = self.radius_ pos = widget.pos() if parent != widget.parentWidget(): pos = widget.parentWidget().mapTo(parent, pos) geom = QRect(pos, widget.size()) geom.adjust(-radius, -radius, radius, radius) if geom != self.geometry(): self.setGeometry(geom) # Set the widget mask (punch a hole through to the `widget` instance. rect = self.rect() mask = QRegion(rect) transparent = QRegion(rect.adjusted(radius, radius, -radius, -radius)) mask = mask.subtracted(transparent) self.setMask(mask) def __updatePixmap(self): """ Update the cached shadow pixmap. """ rect_size = QSize(50, 50) left = top = right = bottom = self.radius_ # Size of the pixmap. pixmap_size = QSize(rect_size.width() + left + right, rect_size.height() + top + bottom) shadow_rect = QRect(QPoint(left, top), rect_size) pixmap = QPixmap(pixmap_size) pixmap.fill(QColor(0, 0, 0, 0)) rect_fill_color = self.palette().color(QPalette.Window) pixmap = render_drop_shadow_frame( pixmap, QRectF(shadow_rect), shadow_color=self.color_, offset=QPointF(0, 0), radius=self.radius_, rect_fill_color=rect_fill_color ) self.__shadowPixmap = pixmap self.update() def __shadowPixmapFragments(self, pixmap_rect, shadow_rect): """ Return a list of 8 QRectF fragments for drawing a shadow. """ s_left, s_top, s_right, s_bottom = \ shadow_rect.left(), shadow_rect.top(), \ shadow_rect.right(), shadow_rect.bottom() s_width, s_height = shadow_rect.width(), shadow_rect.height() p_width, p_height = pixmap_rect.width(), pixmap_rect.height() top_left = QRectF(0.0, 0.0, s_left, s_top) top = QRectF(s_left, 0.0, s_width, s_top) top_right = QRectF(s_right, 0.0, p_width - s_width, s_top) right = QRectF(s_right, s_top, p_width - s_right, s_height) right_bottom = QRectF(shadow_rect.bottomRight(), pixmap_rect.bottomRight()) bottom = QRectF(shadow_rect.bottomLeft(), pixmap_rect.bottomRight() - \ QPointF(p_width - s_right, 0.0)) bottom_left = QRectF(shadow_rect.bottomLeft() - QPointF(s_left, 0.0), pixmap_rect.bottomLeft() + QPointF(s_left, 0.0)) left = QRectF(pixmap_rect.topLeft() + QPointF(0.0, s_top), shadow_rect.bottomLeft()) return [top_left, top, top_right, right, right_bottom, bottom, bottom_left, left] # A different obsolete implementation class _DropShadowWidget(QWidget): """A frame widget drawing a drop shadow effect around its contents. """ def __init__(self, parent=None, offset=None, radius=None, color=None, **kwargs): QWidget.__init__(self, parent, **kwargs) # Bypass the overloaded method to set the default margins. QWidget.setContentsMargins(self, 10, 10, 10, 10) if offset is None: offset = QPointF(0., 0.) if radius is None: radius = 20 if color is None: color = QColor(Qt.black) self.offset = offset self.radius = radius self.color = color self._shadowPixmap = None self._updateShadowPixmap() def setOffset(self, offset): """Set the drop shadow offset (`QPoint`) """ self.offset = offset self._updateShadowPixmap() self.update() def setRadius(self, radius): """Set the drop shadow blur radius (`float`). """ self.radius = radius self._updateShadowPixmap() self.update() def setColor(self, color): """Set the drop shadow color (`QColor`). """ self.color = color self._updateShadowPixmap() self.update() def setContentsMargins(self, *args, **kwargs): QWidget.setContentsMargins(self, *args, **kwargs) self._updateShadowPixmap() def _updateShadowPixmap(self): """Update the cached drop shadow pixmap. """ # Rectangle casting the shadow rect_size = QSize(*CACHED_SHADOW_RECT_SIZE) left, top, right, bottom = self.getContentsMargins() # Size of the pixmap. pixmap_size = QSize(rect_size.width() + left + right, rect_size.height() + top + bottom) shadow_rect = QRect(QPoint(left, top), rect_size) pixmap = QPixmap(pixmap_size) pixmap.fill(QColor(0, 0, 0, 0)) rect_fill_color = self.palette().color(QPalette.Window) pixmap = render_drop_shadow_frame(pixmap, QRectF(shadow_rect), shadow_color=self.color, offset=self.offset, radius=self.radius, rect_fill_color=rect_fill_color) self._shadowPixmap = pixmap def paintEvent(self, event): pixmap = self._shadowPixmap widget_rect = QRectF(QPointF(0.0, 0.0), QSizeF(self.size())) frame_rect = QRectF(self.contentsRect()) left, top, right, bottom = self.getContentsMargins() pixmap_rect = QRectF(QPointF(0, 0), QSizeF(pixmap.size())) # Shadow casting rectangle. pixmap_shadow_rect = pixmap_rect.adjusted(left, top, -right, -bottom) source_rects = self._shadowPixmapFragments(pixmap_rect, pixmap_shadow_rect) target_rects = self._shadowPixmapFragments(widget_rect, frame_rect) painter = QPainter(self) for source, target in zip(source_rects, target_rects): painter.drawPixmap(target, pixmap, source) painter.end() def _shadowPixmapFragments(self, pixmap_rect, shadow_rect): """Return a list of 8 QRectF fragments for drawing a shadow. """ s_left, s_top, s_right, s_bottom = \ shadow_rect.left(), shadow_rect.top(), \ shadow_rect.right(), shadow_rect.bottom() s_width, s_height = shadow_rect.width(), shadow_rect.height() p_width, p_height = pixmap_rect.width(), pixmap_rect.height() top_left = QRectF(0.0, 0.0, s_left, s_top) top = QRectF(s_left, 0.0, s_width, s_top) top_right = QRectF(s_right, 0.0, p_width - s_width, s_top) right = QRectF(s_right, s_top, p_width - s_right, s_height) right_bottom = QRectF(shadow_rect.bottomRight(), pixmap_rect.bottomRight()) bottom = QRectF(shadow_rect.bottomLeft(), pixmap_rect.bottomRight() - \ QPointF(p_width - s_right, 0.0)) bottom_left = QRectF(shadow_rect.bottomLeft() - QPointF(s_left, 0.0), pixmap_rect.bottomLeft() + QPointF(s_left, 0.0)) left = QRectF(pixmap_rect.topLeft() + QPointF(0.0, s_top), shadow_rect.bottomLeft()) return [top_left, top, top_right, right, right_bottom, bottom, bottom_left, left] """ Classes representing uploaded files. """ import errno import os from io import BytesIO from django.conf import settings from django.core.files.base import File from django.core.files import temp as tempfile from django.utils.encoding import force_str __all__ = ('UploadedFile', 'TemporaryUploadedFile', 'InMemoryUploadedFile', 'SimpleUploadedFile') class UploadedFile(File): """ A abstract uploaded file (``TemporaryUploadedFile`` and ``InMemoryUploadedFile`` are the built-in concrete subclasses). An ``UploadedFile`` object behaves somewhat like a file object and represents some file data that the user submitted with a form. """ DEFAULT_CHUNK_SIZE = 64 * 2 ** 10 def __init__(self, file=None, name=None, content_type=None, size=None, charset=None, content_type_extra=None): super(UploadedFile, self).__init__(file, name) self.size = size self.content_type = content_type self.charset = charset self.content_type_extra = content_type_extra def __repr__(self): return force_str("<%s: %s (%s)>" % ( self.__class__.__name__, self.name, self.content_type)) def _get_name(self): return self._name def _set_name(self, name): # Sanitize the file name so that it can't be dangerous. if name is not None: # Just use the basename of the file -- anything else is dangerous. name = os.path.basename(name) # File names longer than 255 characters can cause problems on older OSes. if len(name) > 255: name, ext = os.path.splitext(name) ext = ext[:255] name = name[:255 - len(ext)] + ext self._name = name name = property(_get_name, _set_name) class TemporaryUploadedFile(UploadedFile): """ A file uploaded to a temporary location (i.e. stream-to-disk). """ def __init__(self, name, content_type, size, charset, content_type_extra=None): if settings.FILE_UPLOAD_TEMP_DIR: file = tempfile.NamedTemporaryFile(suffix='.upload', dir=settings.FILE_UPLOAD_TEMP_DIR) else: file = tempfile.NamedTemporaryFile(suffix='.upload') super(TemporaryUploadedFile, self).__init__(file, name, content_type, size, charset, content_type_extra) def temporary_file_path(self): """ Returns the full path of this file. """ return self.file.name def close(self): try: return self.file.close() except OSError as e: if e.errno != errno.ENOENT: # Means the file was moved or deleted before the tempfile # could unlink it. Still sets self.file.close_called and # calls self.file.file.close() before the exception raise class InMemoryUploadedFile(UploadedFile): """ A file uploaded into memory (i.e. stream-to-memory). """ def __init__(self, file, field_name, name, content_type, size, charset, content_type_extra=None): super(InMemoryUploadedFile, self).__init__(file, name, content_type, size, charset, content_type_extra) self.field_name = field_name def open(self, mode=None): self.file.seek(0) def chunks(self, chunk_size=None): self.file.seek(0) yield self.read() def multiple_chunks(self, chunk_size=None): # Since it's in memory, we'll never have multiple chunks. return False class SimpleUploadedFile(InMemoryUploadedFile): """ A simple representation of a file, which just has content, size, and a name. """ def __init__(self, name, content, content_type='text/plain'): content = content or b'' super(SimpleUploadedFile, self).__init__(BytesIO(content), None, name, content_type, len(content), None, None) @classmethod def from_dict(cls, file_dict): """ Creates a SimpleUploadedFile object from a dictionary object with the following keys: - filename - content-type - content """ return cls(file_dict['filename'], file_dict['content'], file_dict.get('content-type', 'text/plain')) import sys from idl.ttypes import CommandDTO from thrift import Thrift from thrift.transport import TSocket from thrift.transport import TTransport from thrift.protocol import TBinaryProtocol from idl import CenterSynRPCService from Command import Command from prettytable import PrettyTable sys.path.append("./idl") docker_center_command = ['node-list', "create-job", "job-list", "start-job", "stop-job", 'help', 'version'] docker_center_param_name = ['--node-tag'] BILLION = 100000000 def execute_command(dc_command): try: # transport = TSocket.TSocket('localhost', 9047) transport = TSocket.TSocket('192.168.30.1', 9047) transport = TTransport.TBufferedTransport(transport) protocol = TBinaryProtocol.TBinaryProtocol(transport) client = CenterSynRPCService.Client(protocol) transport.open() dto = CommandDTO(command.command, dc_command.docker_params) dto.nodeTag = int(dc_command.center_params['--node-tag']) result = client.executeCommand(dto) print(result.returnMessage) except Thrift.TException as e: print(e) def parse_param(args): center_param_map = {} docker_param_list = [] dc_command = Command() if len(args) < 2: dc_command.command = 'help' return dc_command.command = args[1] skip_loop = False for x in args[2:]: if skip_loop: skip_loop = False continue if x in docker_center_param_name: center_param_map[x] = args[args.index(x) + 1] skip_loop = True else: docker_param_list.append(x) dc_command.center_params = center_param_map dc_command.docker_params = docker_param_list return dc_command # def get_docker_engine_version(version_str): # lines = version_str.split('\n') # for i, line in enumerate(lines): # if "Server:" not in line: # continue # if "Version:" in lines[i + 1]: # return lines[i + 1].replace("Version:", "").strip() # return "UNKNOWN" def get_node_info(): try: client, transport = get_thrift_client() transport.open() result = client.getNodeMap() transport.close() x = PrettyTable(["Tag", "Name", "Node Ip", "version", "status", "Architecture", "Free Disk", "Free Memory", "Response Time", "Container Running/Total"]) for node in result.values(): x.add_row([node.tag, node.name, node.ip, node.dockerVersion, node.dockerStatus, node.architecture, node.freeDiskSpace / BILLION, node.freeMemorySpace / BILLION, node.responseTime, str(node.RunningContainerCount) + '/' + str(node.containerCount)]) print(x) except Thrift.TException as e: print(e) def create_job(job_name): try: client, transport = get_thrift_client() transport.open() result = client.newJob(job_name) transport.close() print(result.message) except Thrift.TException as e: print(e) def start_job(job_name): try: client, transport = get_thrift_client() transport.open() result = client.startJob(job_name) transport.close() print(result.message) except Thrift.TException as e: print(e) def stop_job(job_name): try: print(job_name) client, transport = get_thrift_client() transport.open() result = client.stopJob(job_name) transport.close() print(result.message) except Thrift.TException as e: print(e) def get_thrift_client(): transport = TSocket.TSocket('localhost', 9047) transport = TTransport.TBufferedTransport(transport) protocol = TBinaryProtocol.TBinaryProtocol(transport) client = CenterSynRPCService.Client(protocol) return client, transport def get_job_list(): try: client, transport = get_thrift_client() transport.open() result = client.getJoblist() transport.close() print(result) x = PrettyTable(["ID", "Name", "Status", "Deploy Strategy", "SubName Strategy"]) for job in result: x.add_row([job.jobId, job.jobname, job.status, job.deployStrategy, job.subNameStrategy]) print(x) except Thrift.TException as e: print(e) if __name__ == '__main__': command = parse_param(sys.argv) if command.command not in docker_center_command: execute_command(command) else: if command.command == docker_center_command[0]: get_node_info() if command.command == docker_center_command[1]: if len(command.docker_params) != 1: print("missing job name, try user dockerc help to get function use.") else: job_name = command.docker_params[0] create_job(job_name) if command.command == docker_center_command[2]: get_job_list() if command.command == docker_center_command[3]: if len(command.docker_params) != 1: print("missing job name, try user dockerc help to get function use.") else: job_name = command.docker_params[0] start_job(job_name) if command.command == docker_center_command[4]: if len(command.docker_params) != 1: print("missing job name, try user dockerc help to get function use.") else: job_name = command.docker_params[0] stop_job(job_name) if command.command == docker_center_command[5]: print("node-tag ${node_tag}\t use this param to set the node to run command.") print("node-list\t show all nodes registered in Docker Center.") print("job-list\t show all jobs registered in Docker Center.") print("create-job ${job_name}\t create a new job in Docker Center.") print("start-job ${job_name}\t start a job in Docker Center.") print("stop-job ${job_name}\t stop a job in Docker Center.") print("log ${job_name}\t show all logs achieved of job ") print("version\t show Docker Center current version.") if command.command == docker_center_command[6]: print("Docker Center 1.0.0") #!/usr/bin/env python from __future__ import print_function """ Demonstrates the new Ruffus syntax in version 2.6 """ import os tempdir = os.path.relpath(os.path.abspath(os.path.splitext(__file__)[0])) import sys # add grandparent to search path for testing grandparent_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..")) sys.path.insert(0, grandparent_dir) import ruffus from ruffus import add_inputs, suffix, mkdir, regex, Pipeline, output_from, touch_file #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888 # imports #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888 import shutil def touch (outfile): with open(outfile, "w"): pass #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888 # Tasks #88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888 def task_originate(o): """ Makes new files """ touch(o) def task_m_to_1(i, o): """ Merges files together """ with open(o, "w") as o_file: for f in sorted(i): with open(f) as ii: o_file.write(f +"=" + ii.read() + "; ") def task_1_to_1(i, o): """ 1 to 1 for transform """ with open(o, "w") as o_file: with open(i) as ii: o_file.write(i +"+" + ii.read()) DEBUG_do_not_define_tail_task = False DEBUG_do_not_define_head_task = False import unittest # # Returns a fully formed sub pipeline useable as a building block # def make_pipeline1(pipeline_name, # Pipelines need to have a unique name starting_file_names): test_pipeline = Pipeline(pipeline_name) # We can change the starting files later using # set_input() for transform etc. # or set_output() for originate # But it can be more convenient to just pass this to the function making the pipeline # test_pipeline.originate(task_originate, starting_file_names)\ .follows(mkdir(tempdir), mkdir(tempdir + "/testdir", tempdir + "/testdir2"))\ .posttask(touch_file(tempdir + "/testdir/whatever.txt")) test_pipeline.transform(task_func = task_m_to_1, name = "add_input", # Lookup Task from function name task_originate() # So long as this is unique in the pipeline input = task_originate, filter = regex(r"(.*)"), add_inputs = add_inputs(tempdir + "/testdir/whatever.txt"), output = r"\1.22") test_pipeline.transform(task_func = task_1_to_1, name = "22_to_33", # Lookup Task from Task name # Function name is not unique in the pipeline input = output_from("add_input"), filter = suffix(".22"), output = ".33") tail_task = test_pipeline.transform(task_func = task_1_to_1, name = "33_to_44", # Ask Pipeline to lookup Task from Task name input = test_pipeline["22_to_33"], filter = suffix(".33"), output = ".44") # Set the tail task so that users of my sub pipeline can use it as a dependency # without knowing the details of task names # # Use Task() object directly without having to lookup test_pipeline.set_tail_tasks([tail_task]) # If we try to connect a Pipeline without tail tasks defined, we have to # specify the exact task within the Pipeline. # Otherwise Ruffus will not know which task we mean and throw an exception if DEBUG_do_not_define_tail_task: test_pipeline.set_tail_tasks([]) # Set the head task so that users of my sub pipeline send input into it # without knowing the details of task names test_pipeline.set_head_tasks([test_pipeline[task_originate]]) return test_pipeline # # Returns a fully formed sub pipeline useable as a building block # def make_pipeline2( pipeline_name = "pipeline2"): test_pipeline2 = Pipeline(pipeline_name) test_pipeline2.transform(task_func = task_1_to_1, # task name name = "44_to_55", # placeholder: will be replaced later with set_input() input = None, filter = suffix(".44"), output = ".55") test_pipeline2.merge( task_func = task_m_to_1, input = test_pipeline2["44_to_55"], output = tempdir + "/final.output",) # Set head and tail test_pipeline2.set_tail_tasks([test_pipeline2[task_m_to_1]]) if not DEBUG_do_not_define_head_task: test_pipeline2.set_head_tasks([test_pipeline2["44_to_55"]]) return test_pipeline2 # First two pipelines are created as separate instances by the make_pipeline1 function pipeline1a = make_pipeline1(pipeline_name = "pipeline1a", starting_file_names = [tempdir + "/" + ss for ss in ("a.1", "b.1")]) pipeline1b = make_pipeline1(pipeline_name = "pipeline1b", starting_file_names = [tempdir + "/" + ss for ss in ("c.1", "d.1")]) # The Third pipeline is a clone of pipeline1b pipeline1c = pipeline1b.clone(new_name = "pipeline1c") # Set the "originate" files for pipeline1c to ("e.1" and "f.1") # Otherwise they would use the original ("c.1", "d.1") pipeline1c.set_output(output = []) pipeline1c.set_output(output = [tempdir + "/" + ss for ss in ("e.1", "f.1")]) # Join all pipeline1a-c to pipeline2 pipeline2 = make_pipeline2() pipeline2.set_input(input = [pipeline1a, pipeline1b, pipeline1c]) import ruffus.cmdline as cmdline parser = cmdline.get_argparse(description='Demonstrates the new Ruffus syntax in version 2.6') parser.add_argument('--cleanup', "-C", action="store_true", help="Cleanup before and after.") options = parser.parse_args() # standard python logger which can be synchronised across concurrent Ruffus tasks logger, logger_mutex = cmdline.setup_logging (__file__, options.log_file, options.verbose) logger.debug("\tRuffus Version = " + ruffus.__version__) if options.cleanup: try: shutil.rmtree(tempdir) except: pass correct = False # if we are not printing but running if not options.just_print and \ not options.flowchart and \ not options.touch_files_only: # # Cleanup # if options.cleanup: try: shutil.rmtree(tempdir) except: pass # # Run # cmdline.run (options) # Check that the output reflecting the pipeline topology is correct. correct_output = '{tempdir}/a.1.55={tempdir}/a.1.44+{tempdir}/a.1.33+{tempdir}/a.1.22+{tempdir}/a.1=; {tempdir}/testdir/whatever.txt=; ; ' \ '{tempdir}/b.1.55={tempdir}/b.1.44+{tempdir}/b.1.33+{tempdir}/b.1.22+{tempdir}/b.1=; {tempdir}/testdir/whatever.txt=; ; ' \ '{tempdir}/c.1.55={tempdir}/c.1.44+{tempdir}/c.1.33+{tempdir}/c.1.22+{tempdir}/c.1=; {tempdir}/testdir/whatever.txt=; ; ' \ '{tempdir}/d.1.55={tempdir}/d.1.44+{tempdir}/d.1.33+{tempdir}/d.1.22+{tempdir}/d.1=; {tempdir}/testdir/whatever.txt=; ; ' \ '{tempdir}/e.1.55={tempdir}/e.1.44+{tempdir}/e.1.33+{tempdir}/e.1.22+{tempdir}/e.1=; {tempdir}/testdir/whatever.txt=; ; ' \ '{tempdir}/f.1.55={tempdir}/f.1.44+{tempdir}/f.1.33+{tempdir}/f.1.22+{tempdir}/f.1=; {tempdir}/testdir/whatever.txt=; ; '.format(tempdir = tempdir) try: with open(tempdir + "/final.output") as real_output: real_output_str = real_output.read() except Exception as e: real_output_str = str(e) + "\n" if (correct_output != real_output_str): print ("_" * 80 + "\n" + " " * 25 + "Warning\n" + "_" * 80 + "\n" + "If you had run the whole of pipeline 2, " "you should have obtained:<\n\t%s>\n\n Rather than:<\n\t%s>\n\n" % (correct_output.replace("; ", ";\n\t"), real_output_str.replace("; ", ";\n\t")) + "_" * 80, "\n",) else: logger.debug("\tAll Correct.\n") correct = True # # Cleanup # if options.cleanup: try: shutil.rmtree(tempdir) except: pass else: cmdline.run (options) correct = True sys.exit(0 if correct else 1) from __future__ import unicode_literals from datetime import datetime from django.test import TestCase from django.utils import six from .models import Reporter, Article, Writer class M2MIntermediaryTests(TestCase): def test_intermeiary(self): r1 = Reporter.objects.create(first_name="John", last_name="Smith") r2 = Reporter.objects.create(first_name="Jane", last_name="Doe") a = Article.objects.create( headline="This is a test", pub_date=datetime(2005, 7, 27) ) w1 = Writer.objects.create(reporter=r1, article=a, position="Main writer") w2 = Writer.objects.create(reporter=r2, article=a, position="Contributor") self.assertQuerysetEqual( a.writer_set.select_related().order_by("-position"), [ ("John Smith", "Main writer"), ("Jane Doe", "Contributor"), ], lambda w: (six.text_type(w.reporter), w.position) ) self.assertEqual(w1.reporter, r1) self.assertEqual(w2.reporter, r2) self.assertEqual(w1.article, a) self.assertEqual(w2.article, a) self.assertQuerysetEqual( r1.writer_set.all(), [ ("John Smith", "Main writer") ], lambda w: (six.text_type(w.reporter), w.position) ) # -*- coding: utf-8 -*- """ Unit tests for preference APIs. """ import datetime import ddt import unittest from mock import patch from pytz import UTC from django.conf import settings from django.contrib.auth.models import User from django.test import TestCase from django.test.utils import override_settings from dateutil.parser import parse as parse_datetime from student.tests.factories import UserFactory from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory from ...accounts.api import create_account from ...errors import UserNotFound, UserNotAuthorized, PreferenceValidationError, PreferenceUpdateError from ...models import UserProfile, UserOrgTag from ...preferences.api import ( get_user_preference, get_user_preferences, set_user_preference, update_user_preferences, delete_user_preference, update_email_opt_in ) @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Account APIs are only supported in LMS') class TestPreferenceAPI(TestCase): """ These tests specifically cover the parts of the API methods that are not covered by test_views.py. This includes the specific types of error raised, and default behavior when optional arguments are not specified. """ password = "test" def setUp(self): super(TestPreferenceAPI, self).setUp() self.user = UserFactory.create(password=self.password) self.different_user = UserFactory.create(password=self.password) self.staff_user = UserFactory(is_staff=True, password=self.password) self.no_such_user = UserFactory.create(password=self.password) self.no_such_user.username = "no_such_user" self.test_preference_key = "test_key" self.test_preference_value = "test_value" set_user_preference(self.user, self.test_preference_key, self.test_preference_value) def test_get_user_preference(self): """ Verifies the basic behavior of get_user_preference. """ self.assertEqual( get_user_preference(self.user, self.test_preference_key), self.test_preference_value ) self.assertEqual( get_user_preference(self.staff_user, self.test_preference_key, username=self.user.username), self.test_preference_value ) def test_get_user_preference_errors(self): """ Verifies that get_user_preference returns appropriate errors. """ with self.assertRaises(UserNotFound): get_user_preference(self.user, self.test_preference_key, username="no_such_user") with self.assertRaises(UserNotFound): get_user_preference(self.no_such_user, self.test_preference_key) with self.assertRaises(UserNotAuthorized): get_user_preference(self.different_user, self.test_preference_key, username=self.user.username) def test_get_user_preferences(self): """ Verifies the basic behavior of get_user_preferences. """ expected_user_preferences = { self.test_preference_key: self.test_preference_value, } self.assertEqual(get_user_preferences(self.user), expected_user_preferences) self.assertEqual(get_user_preferences(self.staff_user, username=self.user.username), expected_user_preferences) def test_get_user_preferences_errors(self): """ Verifies that get_user_preferences returns appropriate errors. """ with self.assertRaises(UserNotFound): get_user_preferences(self.user, username="no_such_user") with self.assertRaises(UserNotFound): get_user_preferences(self.no_such_user) with self.assertRaises(UserNotAuthorized): get_user_preferences(self.different_user, username=self.user.username) def test_set_user_preference(self): """ Verifies the basic behavior of set_user_preference. """ test_key = u'ⓟⓡⓔⓕⓔⓡⓔⓝⓒⓔ_ⓚⓔⓨ' test_value = u'ǝnןɐʌ_ǝɔuǝɹǝɟǝɹd' set_user_preference(self.user, test_key, test_value) self.assertEqual(get_user_preference(self.user, test_key), test_value) set_user_preference(self.user, test_key, "new_value", username=self.user.username) self.assertEqual(get_user_preference(self.user, test_key), "new_value") @patch('openedx.core.djangoapps.user_api.models.UserPreference.save') def test_set_user_preference_errors(self, user_preference_save): """ Verifies that set_user_preference returns appropriate errors. """ with self.assertRaises(UserNotFound): set_user_preference(self.user, self.test_preference_key, "new_value", username="no_such_user") with self.assertRaises(UserNotFound): set_user_preference(self.no_such_user, self.test_preference_key, "new_value") with self.assertRaises(UserNotAuthorized): set_user_preference(self.staff_user, self.test_preference_key, "new_value", username=self.user.username) with self.assertRaises(UserNotAuthorized): set_user_preference(self.different_user, self.test_preference_key, "new_value", username=self.user.username) too_long_key = "x" * 256 with self.assertRaises(PreferenceValidationError) as context_manager: set_user_preference(self.user, too_long_key, "new_value") errors = context_manager.exception.preference_errors self.assertEqual(len(errors.keys()), 1) self.assertEqual( errors[too_long_key], { "developer_message": get_expected_validation_developer_message(too_long_key, "new_value"), "user_message": get_expected_key_error_user_message(too_long_key, "new_value"), } ) for empty_value in (None, "", " "): with self.assertRaises(PreferenceValidationError) as context_manager: set_user_preference(self.user, self.test_preference_key, empty_value) errors = context_manager.exception.preference_errors self.assertEqual(len(errors.keys()), 1) self.assertEqual( errors[self.test_preference_key], { "developer_message": get_empty_preference_message(self.test_preference_key), "user_message": get_empty_preference_message(self.test_preference_key), } ) user_preference_save.side_effect = [Exception, None] with self.assertRaises(PreferenceUpdateError) as context_manager: set_user_preference(self.user, u"new_key_ȻħȺɍłɇs", u"new_value_ȻħȺɍłɇs") self.assertEqual( context_manager.exception.developer_message, u"Save failed for user preference 'new_key_ȻħȺɍłɇs' with value 'new_value_ȻħȺɍłɇs': " ) self.assertEqual( context_manager.exception.user_message, u"Save failed for user preference 'new_key_ȻħȺɍłɇs' with value 'new_value_ȻħȺɍłɇs'." ) def test_update_user_preferences(self): """ Verifies the basic behavior of update_user_preferences. """ expected_user_preferences = { self.test_preference_key: "new_value", } set_user_preference(self.user, self.test_preference_key, "new_value") self.assertEqual( get_user_preference(self.user, self.test_preference_key), "new_value" ) set_user_preference(self.user, self.test_preference_key, "new_value", username=self.user.username) self.assertEqual( get_user_preference(self.user, self.test_preference_key), "new_value" ) @patch('openedx.core.djangoapps.user_api.models.UserPreference.delete') @patch('openedx.core.djangoapps.user_api.models.UserPreference.save') def test_update_user_preferences_errors(self, user_preference_save, user_preference_delete): """ Verifies that set_user_preferences returns appropriate errors. """ update_data = { self.test_preference_key: "new_value" } with self.assertRaises(UserNotFound): update_user_preferences(self.user, update_data, username="no_such_user") with self.assertRaises(UserNotFound): update_user_preferences(self.no_such_user, update_data) with self.assertRaises(UserNotAuthorized): update_user_preferences(self.staff_user, update_data, username=self.user.username) with self.assertRaises(UserNotAuthorized): update_user_preferences(self.different_user, update_data, username=self.user.username) too_long_key = "x" * 256 with self.assertRaises(PreferenceValidationError) as context_manager: update_user_preferences(self.user, {too_long_key: "new_value"}) errors = context_manager.exception.preference_errors self.assertEqual(len(errors.keys()), 1) self.assertEqual( errors[too_long_key], { "developer_message": get_expected_validation_developer_message(too_long_key, "new_value"), "user_message": get_expected_key_error_user_message(too_long_key, "new_value"), } ) for empty_value in ("", " "): with self.assertRaises(PreferenceValidationError) as context_manager: update_user_preferences(self.user, {self.test_preference_key: empty_value}) errors = context_manager.exception.preference_errors self.assertEqual(len(errors.keys()), 1) self.assertEqual( errors[self.test_preference_key], { "developer_message": get_empty_preference_message(self.test_preference_key), "user_message": get_empty_preference_message(self.test_preference_key), } ) user_preference_save.side_effect = [Exception, None] with self.assertRaises(PreferenceUpdateError) as context_manager: update_user_preferences(self.user, {self.test_preference_key: "new_value"}) self.assertEqual( context_manager.exception.developer_message, u"Save failed for user preference 'test_key' with value 'new_value': " ) self.assertEqual( context_manager.exception.user_message, u"Save failed for user preference 'test_key' with value 'new_value'." ) user_preference_delete.side_effect = [Exception, None] with self.assertRaises(PreferenceUpdateError) as context_manager: update_user_preferences(self.user, {self.test_preference_key: None}) self.assertEqual( context_manager.exception.developer_message, u"Delete failed for user preference 'test_key': " ) self.assertEqual( context_manager.exception.user_message, u"Delete failed for user preference 'test_key'." ) def test_delete_user_preference(self): """ Verifies the basic behavior of delete_user_preference. """ self.assertTrue(delete_user_preference(self.user, self.test_preference_key)) set_user_preference(self.user, self.test_preference_key, self.test_preference_value) self.assertTrue(delete_user_preference(self.user, self.test_preference_key, username=self.user.username)) self.assertFalse(delete_user_preference(self.user, "no_such_key")) @patch('openedx.core.djangoapps.user_api.models.UserPreference.delete') def test_delete_user_preference_errors(self, user_preference_delete): """ Verifies that delete_user_preference returns appropriate errors. """ with self.assertRaises(UserNotFound): delete_user_preference(self.user, self.test_preference_key, username="no_such_user") with self.assertRaises(UserNotFound): delete_user_preference(self.no_such_user, self.test_preference_key) with self.assertRaises(UserNotAuthorized): delete_user_preference(self.staff_user, self.test_preference_key, username=self.user.username) with self.assertRaises(UserNotAuthorized): delete_user_preference(self.different_user, self.test_preference_key, username=self.user.username) user_preference_delete.side_effect = [Exception, None] with self.assertRaises(PreferenceUpdateError) as context_manager: delete_user_preference(self.user, self.test_preference_key) self.assertEqual( context_manager.exception.developer_message, u"Delete failed for user preference 'test_key': " ) self.assertEqual( context_manager.exception.user_message, u"Delete failed for user preference 'test_key'." ) @ddt.ddt class UpdateEmailOptInTests(ModuleStoreTestCase): USERNAME = u'frank-underwood' PASSWORD = u'ṕáśśẃőŕd' EMAIL = u'frank+underwood@example.com' @ddt.data( # Check that a 27 year old can opt-in (27, True, u"True"), # Check that a 32-year old can opt-out (32, False, u"False"), # Check that someone 14 years old can opt-in (14, True, u"True"), # Check that someone 13 years old cannot opt-in (must have turned 13 before this year) (13, True, u"False"), # Check that someone 12 years old cannot opt-in (12, True, u"False") ) @ddt.unpack @override_settings(EMAIL_OPTIN_MINIMUM_AGE=13) def test_update_email_optin(self, age, option, expected_result): # Create the course and account. course = CourseFactory.create() create_account(self.USERNAME, self.PASSWORD, self.EMAIL) # Set year of birth user = User.objects.get(username=self.USERNAME) profile = UserProfile.objects.get(user=user) year_of_birth = datetime.datetime.now().year - age # pylint: disable=maybe-no-member profile.year_of_birth = year_of_birth profile.save() update_email_opt_in(user, course.id.org, option) result_obj = UserOrgTag.objects.get(user=user, org=course.id.org, key='email-optin') self.assertEqual(result_obj.value, expected_result) def test_update_email_optin_no_age_set(self): # Test that the API still works if no age is specified. # Create the course and account. course = CourseFactory.create() create_account(self.USERNAME, self.PASSWORD, self.EMAIL) user = User.objects.get(username=self.USERNAME) update_email_opt_in(user, course.id.org, True) result_obj = UserOrgTag.objects.get(user=user, org=course.id.org, key='email-optin') self.assertEqual(result_obj.value, u"True") def test_update_email_optin_anonymous_user(self): """Verify that the API raises an exception for a user with no profile.""" course = CourseFactory.create() no_profile_user, __ = User.objects.get_or_create(username="no_profile_user", password=self.PASSWORD) with self.assertRaises(UserNotFound): update_email_opt_in(no_profile_user, course.id.org, True) @ddt.data( # Check that a 27 year old can opt-in, then out. (27, True, False, u"False"), # Check that a 32-year old can opt-out, then in. (32, False, True, u"True"), # Check that someone 13 years old can opt-in, then out. (13, True, False, u"False"), # Check that someone 12 years old cannot opt-in, then explicitly out. (12, True, False, u"False") ) @ddt.unpack @override_settings(EMAIL_OPTIN_MINIMUM_AGE=13) def test_change_email_optin(self, age, option, second_option, expected_result): # Create the course and account. course = CourseFactory.create() create_account(self.USERNAME, self.PASSWORD, self.EMAIL) # Set year of birth user = User.objects.get(username=self.USERNAME) profile = UserProfile.objects.get(user=user) year_of_birth = datetime.datetime.now(UTC).year - age # pylint: disable=maybe-no-member profile.year_of_birth = year_of_birth profile.save() update_email_opt_in(user, course.id.org, option) update_email_opt_in(user, course.id.org, second_option) result_obj = UserOrgTag.objects.get(user=user, org=course.id.org, key='email-optin') self.assertEqual(result_obj.value, expected_result) def _assert_is_datetime(self, timestamp): if not timestamp: return False try: parse_datetime(timestamp) except ValueError: return False else: return True def get_expected_validation_developer_message(preference_key, preference_value): """ Returns the expected dict of validation messages for the specified key. """ return u"Value '{preference_value}' not valid for preference '{preference_key}': {error}".format( preference_key=preference_key, preference_value=preference_value, error={ "key": [u"Ensure this value has at most 255 characters (it has 256)."] } ) def get_expected_key_error_user_message(preference_key, preference_value): """ Returns the expected user message for an invalid key. """ return u"Invalid user preference key '{preference_key}'.".format(preference_key=preference_key) def get_empty_preference_message(preference_key): """ Returns the validation message shown for an empty preference. """ return "Preference '{preference_key}' cannot be set to an empty value.".format(preference_key=preference_key) """LabeledDataset for labeled datasets""" import numpy as np import ankura.pipeline def get_labels(filename): """Since labels are assumed to be normalized from 0 to 1 in computing the augmented Q matrix, this function will scale all labels to fit this range """ smallest = float('inf') largest = float('-inf') labels = {} with open(filename) as ifh: for line in ifh: data = line.strip().split() val = float(data[1]) labels[data[0]] = val if val < smallest: smallest = val if val > largest: largest = val difference = largest - smallest if difference < 1e-50: # all of the label values were essentially the same, so just assign # everything to have the same label for label in labels: labels[label] = 0.5 elif abs(difference - 1) > 1e-50: for label in labels: labels[label] = (labels[label] - smallest) / difference # otherwise, the labels were already spanning the range 0 to 1, so no need # to change anything return labels class LabeledDataset(ankura.pipeline.Dataset): """Implementation of labeled dataset Attributes of the object with the same names as those in ankura.pipeline.Dataset have the same behaviors. The labels are stored in a dictionary mapping titles to floats. """ def __init__(self, dataset, labels): ankura.pipeline.Dataset.__init__(self, dataset.docwords, dataset.vocab, dataset.titles) self.labels = labels # precompute vanilla Q beforehand (useful for semi-supervised) ankura.pipeline.Dataset.compute_cooccurrences(self) self._dataset_cooccurrences = self._cooccurrences # don't keep self._cooccurrences, since we want compute_cooccurrences to # compute the proper augmented Q later self._cooccurrences = None def compute_cooccurrences(self): orig_height, orig_width = self._dataset_cooccurrences.shape self._cooccurrences = np.zeros((orig_height, orig_width+2)) self._cooccurrences[:, :-2] = self._dataset_cooccurrences # multiply word counts per document with corresponding regressand regressands = [] labeled_docs = [] for i, title in enumerate(self.titles): if title in self.labels: regressands.append(self.labels[title]) labeled_docs.append(i) # TODO extract information directly (indexing into matrix is slow) labeled_docwords = self.docwords[:, np.array(labeled_docs)] # Make weighted sum for labels reg_sums = labeled_docwords.dot(np.array(regressands)) # summing rows of sparse matrix returns a row matrix; but we want a # numpy array vocab_counts = np.array(labeled_docwords.sum(axis=1).T)[0] #pylint:disable=consider-using-enumerate for i in range(len(vocab_counts)): if vocab_counts[i] > 0: # divide by vocabulary count self._cooccurrences[i, -2] = reg_sums[i] / vocab_counts[i] # if vocab_counts[i] == 0, reg_sums[i, :] == np.zeros # TODO was the above sufficient for making semi-supervised work? # fill in second augmented column with 1 - average self._cooccurrences[:, -1] = 1.0 - self._cooccurrences[:, -2] # -*- coding: utf-'8' "-*-" import base64 try: import simplejson as json except ImportError: import json from hashlib import sha1 import hmac import logging import urlparse from openerp.addons.payment.models.payment_acquirer import ValidationError from openerp.addons.payment_adyen.controllers.main import AdyenController from openerp.osv import osv, fields from openerp.tools import float_round _logger = logging.getLogger(__name__) class AcquirerAdyen(osv.Model): _inherit = 'payment.acquirer' def _get_adyen_urls(self, cr, uid, environment, context=None): """ Adyen URLs - yhpp: hosted payment page: pay.shtml for single, select.shtml for multiple """ return { 'adyen_form_url': 'https://%s.adyen.com/hpp/pay.shtml' % environment, } def _get_providers(self, cr, uid, context=None): providers = super(AcquirerAdyen, self)._get_providers(cr, uid, context=context) providers.append(['adyen', 'Adyen']) return providers _columns = { 'adyen_merchant_account': fields.char('Merchant Account', required_if_provider='adyen'), 'adyen_skin_code': fields.char('Skin Code', required_if_provider='adyen'), 'adyen_skin_hmac_key': fields.char('Skin HMAC Key', required_if_provider='adyen'), } def _adyen_generate_merchant_sig(self, acquirer, inout, values): """ Generate the shasign for incoming or outgoing communications. :param browse acquirer: the payment.acquirer browse record. It should have a shakey in shaky out :param string inout: 'in' (openerp contacting ogone) or 'out' (adyen contacting openerp). In this last case only some fields should be contained (see e-Commerce basic) :param dict values: transaction values :return string: shasign """ assert inout in ('in', 'out') assert acquirer.provider == 'adyen' if inout == 'in': keys = "paymentAmount currencyCode shipBeforeDate merchantReference skinCode merchantAccount sessionValidity shopperEmail shopperReference recurringContract allowedMethods blockedMethods shopperStatement merchantReturnData billingAddressType deliveryAddressType offset".split() else: keys = "authResult pspReference merchantReference skinCode paymentMethod shopperLocale merchantReturnData".split() def get_value(key): if values.get(key): return values[key] return '' sign = ''.join('%s' % get_value(k) for k in keys).encode('ascii') key = acquirer.adyen_skin_hmac_key.encode('ascii') return base64.b64encode(hmac.new(key, sign, sha1).digest()) def adyen_form_generate_values(self, cr, uid, id, partner_values, tx_values, context=None): base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url') acquirer = self.browse(cr, uid, id, context=context) # tmp import datetime from dateutil import relativedelta tmp_date = datetime.date.today() + relativedelta.relativedelta(days=1) adyen_tx_values = dict(tx_values) adyen_tx_values.update({ 'merchantReference': tx_values['reference'], 'paymentAmount': '%d' % int(float_round(tx_values['amount'], 2) * 100), 'currencyCode': tx_values['currency'] and tx_values['currency'].name or '', 'shipBeforeDate': tmp_date, 'skinCode': acquirer.adyen_skin_code, 'merchantAccount': acquirer.adyen_merchant_account, 'shopperLocale': partner_values['lang'], 'sessionValidity': tmp_date, 'resURL': '%s' % urlparse.urljoin(base_url, AdyenController._return_url), }) if adyen_tx_values.get('return_url'): adyen_tx_values['merchantReturnData'] = json.dumps({'return_url': '%s' % adyen_tx_values.pop('return_url')}) adyen_tx_values['merchantSig'] = self._adyen_generate_merchant_sig(acquirer, 'in', adyen_tx_values) return partner_values, adyen_tx_values def adyen_get_form_action_url(self, cr, uid, id, context=None): acquirer = self.browse(cr, uid, id, context=context) return self._get_adyen_urls(cr, uid, acquirer.environment, context=context)['adyen_form_url'] class TxAdyen(osv.Model): _inherit = 'payment.transaction' _columns = { 'adyen_psp_reference': fields.char('Adyen PSP Reference'), } # -------------------------------------------------- # FORM RELATED METHODS # -------------------------------------------------- def _adyen_form_get_tx_from_data(self, cr, uid, data, context=None): reference, pspReference = data.get('merchantReference'), data.get('pspReference') if not reference or not pspReference: error_msg = 'Adyen: received data with missing reference (%s) or missing pspReference (%s)' % (reference, pspReference) _logger.error(error_msg) raise ValidationError(error_msg) # find tx -> @TDENOTE use pspReference ? tx_ids = self.pool['payment.transaction'].search(cr, uid, [('reference', '=', reference)], context=context) if not tx_ids or len(tx_ids) > 1: error_msg = 'Adyen: received data for reference %s' % (reference) if not tx_ids: error_msg += '; no order found' else: error_msg += '; multiple order found' _logger.error(error_msg) raise ValidationError(error_msg) tx = self.pool['payment.transaction'].browse(cr, uid, tx_ids[0], context=context) # verify shasign shasign_check = self.pool['payment.acquirer']._adyen_generate_merchant_sig(tx.acquirer_id, 'out', data) if shasign_check != data.get('merchantSig'): error_msg = 'Adyen: invalid merchantSig, received %s, computed %s' % (data.get('merchantSig'), shasign_check) _logger.warning(error_msg) # raise ValidationError(error_msg) return tx def _adyen_form_get_invalid_parameters(self, cr, uid, tx, data, context=None): invalid_parameters = [] # reference at acquirer: pspReference if tx.acquirer_reference and data.get('pspReference') != tx.acquirer_reference: invalid_parameters.append(('pspReference', data.get('pspReference'), tx.acquirer_reference)) # seller if data.get('skinCode') != tx.acquirer_id.adyen_skin_code: invalid_parameters.append(('skinCode', data.get('skinCode'), tx.acquirer_id.adyen_skin_code)) # result if not data.get('authResult'): invalid_parameters.append(('authResult', data.get('authResult'), 'something')) return invalid_parameters def _adyen_form_validate(self, cr, uid, tx, data, context=None): status = data.get('authResult', 'PENDING') if status == 'AUTHORISED': tx.write({ 'state': 'done', 'adyen_psp_reference': data.get('pspReference'), # 'date_validate': data.get('payment_date', fields.datetime.now()), # 'paypal_txn_type': data.get('express_checkout') }) return True elif status == 'PENDING': tx.write({ 'state': 'pending', 'adyen_psp_reference': data.get('pspReference'), }) return True else: error = 'Paypal: feedback error' _logger.info(error) tx.write({ 'state': 'error', 'state_message': error }) return False """ Django settings for {{ project_name }} project. Common settings for all environments. Don't directly use this settings file, use environments/development.py or environments/production.py and import this file from there. """ import sys from path import path from django.conf import global_settings PROJECT_NAME = "Boots Django" PROJECT_ROOT = path(__file__).abspath().dirname().dirname() sys.path.insert(0, PROJECT_ROOT / 'apps') DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', 'your_email@example.com'), ) MANAGERS = ADMINS # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # In a Windows environment this must be set to your system time zone. TIME_ZONE = 'GMT' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en-us' SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = False # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale. USE_L10N = False # If you set this to False, Django will not use timezone-aware datetimes. USE_TZ = True # Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/home/media/media.lawrence.com/media/" MEDIA_ROOT = PROJECT_ROOT / 'public/media' # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash. # Examples: "http://media.lawrence.com/media/", "http://example.com/media/" MEDIA_URL = '/media/' # Absolute path to the directory static files should be collected to. # Don't put anything in this directory yourself; store your static files # in apps' "static/" subdirectories and in STATICFILES_DIRS. # Example: "/home/media/media.lawrence.com/static/" STATIC_ROOT = PROJECT_ROOT / 'public/static' # URL prefix for static files. # Example: "http://media.lawrence.com/static/" STATIC_URL = '/static/' # Additional locations of static files STATICFILES_DIRS = ( # Put strings here, like "/home/html/static" or "C:/www/django/static". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. PROJECT_ROOT / 'static', ) # List of finder classes that know how to find static files in # various locations. STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', # 'django.contrib.staticfiles.finders.DefaultStorageFinder', ) # Make this unique, and don't share it with anybody. SECRET_KEY = '{{ secret_key }}' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', # Uncomment the next line for simple clickjacking protection: # 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'config.urls' # Python dotted path to the WSGI application used by Django's runserver. WSGI_APPLICATION = 'config.wsgi.application' TEMPLATE_DIRS = ( # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. PROJECT_ROOT / 'templates/', ) INSTALLED_APPS = ( 'grappelli', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.admin', # Uncomment the next line to enable admin documentation: # 'django.contrib.admindocs', # 3rd party apps 'south', 'django_extensions', 'mainpage' # Project specific apps go here # 'my_app', ) # A sample logging configuration. The only tangible logging # performed by this configuration is to send an email to # the site admins on every HTTP 500 error when DEBUG=False. # See http://docs.djangoproject.com/en/dev/topics/logging for # more details on how to customize your logging configuration. LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } } TEMPLATE_CONTEXT_PROCESSORS = global_settings.TEMPLATE_CONTEXT_PROCESSORS + () ############################################################################## # Third-party app settings ############################################################################## # django-grappelli GRAPPELLI_ADMIN_TITLE = PROJECT_NAME + " Admin" # encoding: utf-8 __docformat__ = "restructuredtext en" #------------------------------------------------------------------------------- # Copyright (C) 2008 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- # Imports #------------------------------------------------------------------------------- import os import __builtin__ # Local imports. from util import Bunch # fixme: RTK thinks magics should be implemented as separate classes rather than # methods on a single class. This would give us the ability to plug new magics # in and configure them separately. class Magic(object): """ An object that maintains magic functions. """ def __init__(self, interpreter, config=None): # A reference to the interpreter. self.interpreter = interpreter # A reference to the configuration object. if config is None: # fixme: we need a better place to store this information. config = Bunch(ESC_MAGIC='%') self.config = config def has_magic(self, name): """ Return True if this object provides a given magic. Parameters ---------- name : str """ return hasattr(self, 'magic_' + name) def object_find(self, name): """ Find an object in the available namespaces. fixme: this should probably be moved elsewhere. The interpreter? """ name = name.strip() # Namespaces to search. # fixme: implement internal and alias namespaces. user_ns = self.interpreter.user_ns internal_ns = {} builtin_ns = __builtin__.__dict__ alias_ns = {} # Order the namespaces. namespaces = [ ('Interactive', user_ns), ('IPython internal', internal_ns), ('Python builtin', builtin_ns), ('Alias', alias_ns), ] # Initialize all results. found = False obj = None space = None ds = None ismagic = False isalias = False # Look for the given name by splitting it in parts. If the head is # found, then we look for all the remaining parts as members, and only # declare success if we can find them all. parts = name.split('.') head, rest = parts[0], parts[1:] for nsname, ns in namespaces: try: obj = ns[head] except KeyError: continue else: for part in rest: try: obj = getattr(obj, part) except: # Blanket except b/c some badly implemented objects # allow __getattr__ to raise exceptions other than # AttributeError, which then crashes us. break else: # If we finish the for loop (no break), we got all members found = True space = nsname isalias = (ns == alias_ns) break # namespace loop # Try to see if it is a magic. if not found: if name.startswith(self.config.ESC_MAGIC): name = name[1:] obj = getattr(self, 'magic_' + name, None) if obj is not None: found = True space = 'IPython internal' ismagic = True # Last try: special-case some literals like '', [], {}, etc: if not found and head in ["''", '""', '[]', '{}', '()']: obj = eval(head) found = True space = 'Interactive' return dict( found=found, obj=obj, namespace=space, ismagic=ismagic, isalias=isalias, ) def magic_pwd(self, parameter_s=''): """ Return the current working directory path. """ return os.getcwd() def magic_env(self, parameter_s=''): """ List environment variables. """ return os.environ.data #!/usr/bin/env python # Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import posixpath import unittest from extensions_paths import PUBLIC_TEMPLATES, SERVER2 from local_file_system import LocalFileSystem from test_file_system import TestFileSystem from object_store_creator import ObjectStoreCreator from path_canonicalizer import PathCanonicalizer from special_paths import SITE_VERIFICATION_FILE class PathCanonicalizerTest(unittest.TestCase): def setUp(self): self._path_canonicalizer = PathCanonicalizer( LocalFileSystem.Create(PUBLIC_TEMPLATES), ObjectStoreCreator.ForTest(), ('.html', '.md')) def testSpecifyCorrectly(self): self._AssertIdentity('extensions/browserAction') self._AssertIdentity('extensions/storage') self._AssertIdentity('extensions/blah') self._AssertIdentity('extensions/index') self._AssertIdentity('extensions/whats_new') self._AssertIdentity('apps/storage') self._AssertIdentity('apps/bluetooth') self._AssertIdentity('apps/blah') self._AssertIdentity('apps/tags/webview') def testSpecifyIncorrectly(self): self._AssertRedirectWithDefaultExtensions( 'extensions/browserAction', 'apps/browserAction') self._AssertRedirectWithDefaultExtensions( 'extensions/browserAction', 'apps/extensions/browserAction') self._AssertRedirectWithDefaultExtensions( 'apps/bluetooth', 'extensions/bluetooth') self._AssertRedirectWithDefaultExtensions( 'apps/bluetooth', 'extensions/apps/bluetooth') self._AssertRedirectWithDefaultExtensions( 'extensions/index', 'apps/index') self._AssertRedirectWithDefaultExtensions( 'extensions/browserAction', 'static/browserAction') self._AssertRedirectWithDefaultExtensions( 'apps/tags/webview', 'apps/webview') self._AssertRedirectWithDefaultExtensions( 'apps/tags/webview', 'extensions/webview') self._AssertRedirectWithDefaultExtensions( 'apps/tags/webview', 'extensions/tags/webview') # These are a little trickier because storage.html is in both directories. # They must canonicalize to the closest match. self._AssertRedirectWithDefaultExtensions( 'extensions/storage', 'extensions/apps/storage') self._AssertRedirectWithDefaultExtensions( 'apps/storage', 'apps/extensions/storage') def testUnspecified(self): self._AssertRedirectWithDefaultExtensions( 'extensions/browserAction', 'browserAction') self._AssertRedirectWithDefaultExtensions( 'apps/bluetooth', 'bluetooth') # Default happens to be apps because it's first alphabetically. self._AssertRedirectWithDefaultExtensions( 'apps/storage', 'storage') # Nonexistent APIs should be left alone. self._AssertIdentity('blah.html') def testDirectories(self): # Directories can be canonicalized too! self._AssertIdentity('apps/') self._AssertIdentity('apps/tags/') self._AssertIdentity('extensions/') # No trailing slash should be treated as files not directories, at least # at least according to PathCanonicalizer. self._AssertRedirect('extensions/apps', 'apps') self._AssertRedirect('extensions', 'extensions') # Just as tolerant of spelling mistakes. self._AssertRedirect('apps/', 'Apps/') self._AssertRedirect('apps/tags/', 'Apps/TAGS/') self._AssertRedirect('extensions/', 'Extensions/') # Find directories in the correct place. self._AssertRedirect('apps/tags/', 'tags/') self._AssertRedirect('apps/tags/', 'extensions/tags/') def testSpellingErrors(self): for spelme in ('browseraction', 'browseraction.htm', 'BrowserAction', 'BrowserAction.html', 'browseraction.html', 'Browseraction', 'browser-action', 'Browser.action.html', 'browser_action', 'browser-action.html', 'Browser_Action.html'): self._AssertRedirect('extensions/browserAction', spelme) self._AssertRedirect('extensions/browserAction', 'extensions/%s' % spelme) self._AssertRedirect('extensions/browserAction', 'apps/%s' % spelme) def testNonDefaultExtensions(self): # The only example currently of a file with a non-default extension is # the redirects.json file. That shouldn't have its extension stripped since # it's not in the default extensions. self._AssertIdentity('redirects.json') self._AssertRedirect('redirects.json', 'redirects') self._AssertRedirect('redirects.json', 'redirects.html') self._AssertRedirect('redirects.json', 'redirects.js') self._AssertRedirect('redirects.json', 'redirects.md') def testSiteVerificationFile(self): # The site verification file should not redirect. self._AssertIdentity(SITE_VERIFICATION_FILE) self._AssertRedirect(SITE_VERIFICATION_FILE, posixpath.splitext(SITE_VERIFICATION_FILE)[0]) def testDotSeparated(self): self._AssertIdentity('extensions/devtools_inspectedWindow') self._AssertRedirect('extensions/devtools_inspectedWindow', 'extensions/devtools.inspectedWindow') def testUnderscoreSeparated(self): file_system = TestFileSystem({ 'pepper_dev': { 'c': { 'index.html': '' } }, 'pepper_stable': { 'c': { 'index.html': '' } } }) self._path_canonicalizer = PathCanonicalizer( file_system, ObjectStoreCreator.ForTest(), ('.html', '.md')) self._AssertIdentity('pepper_stable/c/index') self._AssertRedirect('pepper_stable/c/index', 'pepper_stable/c/index.html') def _AssertIdentity(self, path): self._AssertRedirect(path, path) def _AssertRedirect(self, to, from_): self.assertEqual(to, self._path_canonicalizer.Canonicalize(from_)) def _AssertRedirectWithDefaultExtensions(self, to, from_): for ext in ('', '.html', '.md'): self._AssertRedirect( to, self._path_canonicalizer.Canonicalize(from_ + ext)) if __name__ == '__main__': unittest.main() from __future__ import unicode_literals from operator import attrgetter from django.db import connection from django.db.models import Value from django.db.models.functions import Lower from django.test import ( TestCase, override_settings, skipIfDBFeature, skipUnlessDBFeature, ) from .models import ( Country, NoFields, Pizzeria, ProxyCountry, ProxyMultiCountry, ProxyMultiProxyCountry, ProxyProxyCountry, Restaurant, State, TwoFields, ) class BulkCreateTests(TestCase): def setUp(self): self.data = [ Country(name="United States of America", iso_two_letter="US"), Country(name="The Netherlands", iso_two_letter="NL"), Country(name="Germany", iso_two_letter="DE"), Country(name="Czech Republic", iso_two_letter="CZ") ] def test_simple(self): created = Country.objects.bulk_create(self.data) self.assertEqual(len(created), 4) self.assertQuerysetEqual(Country.objects.order_by("-name"), [ "United States of America", "The Netherlands", "Germany", "Czech Republic" ], attrgetter("name")) created = Country.objects.bulk_create([]) self.assertEqual(created, []) self.assertEqual(Country.objects.count(), 4) @skipUnlessDBFeature('has_bulk_insert') def test_efficiency(self): with self.assertNumQueries(1): Country.objects.bulk_create(self.data) def test_multi_table_inheritance_unsupported(self): expected_message = "Can't bulk create a multi-table inherited model" with self.assertRaisesMessage(ValueError, expected_message): Pizzeria.objects.bulk_create([ Pizzeria(name="The Art of Pizza"), ]) with self.assertRaisesMessage(ValueError, expected_message): ProxyMultiCountry.objects.bulk_create([ ProxyMultiCountry(name="Fillory", iso_two_letter="FL"), ]) with self.assertRaisesMessage(ValueError, expected_message): ProxyMultiProxyCountry.objects.bulk_create([ ProxyMultiProxyCountry(name="Fillory", iso_two_letter="FL"), ]) def test_proxy_inheritance_supported(self): ProxyCountry.objects.bulk_create([ ProxyCountry(name="Qwghlm", iso_two_letter="QW"), Country(name="Tortall", iso_two_letter="TA"), ]) self.assertQuerysetEqual(ProxyCountry.objects.all(), { "Qwghlm", "Tortall" }, attrgetter("name"), ordered=False) ProxyProxyCountry.objects.bulk_create([ ProxyProxyCountry(name="Netherlands", iso_two_letter="NT"), ]) self.assertQuerysetEqual(ProxyProxyCountry.objects.all(), { "Qwghlm", "Tortall", "Netherlands", }, attrgetter("name"), ordered=False) def test_non_auto_increment_pk(self): State.objects.bulk_create([ State(two_letter_code=s) for s in ["IL", "NY", "CA", "ME"] ]) self.assertQuerysetEqual(State.objects.order_by("two_letter_code"), [ "CA", "IL", "ME", "NY", ], attrgetter("two_letter_code")) @skipUnlessDBFeature('has_bulk_insert') def test_non_auto_increment_pk_efficiency(self): with self.assertNumQueries(1): State.objects.bulk_create([ State(two_letter_code=s) for s in ["IL", "NY", "CA", "ME"] ]) self.assertQuerysetEqual(State.objects.order_by("two_letter_code"), [ "CA", "IL", "ME", "NY", ], attrgetter("two_letter_code")) @skipIfDBFeature('allows_auto_pk_0') def test_zero_as_autoval(self): """ Zero as id for AutoField should raise exception in MySQL, because MySQL does not allow zero for automatic primary key. """ valid_country = Country(name='Germany', iso_two_letter='DE') invalid_country = Country(id=0, name='Poland', iso_two_letter='PL') with self.assertRaises(ValueError): Country.objects.bulk_create([valid_country, invalid_country]) def test_batch_same_vals(self): # Sqlite had a problem where all the same-valued models were # collapsed to one insert. Restaurant.objects.bulk_create([ Restaurant(name='foo') for i in range(0, 2) ]) self.assertEqual(Restaurant.objects.count(), 2) def test_large_batch(self): with override_settings(DEBUG=True): connection.queries_log.clear() TwoFields.objects.bulk_create([ TwoFields(f1=i, f2=i + 1) for i in range(0, 1001) ]) self.assertEqual(TwoFields.objects.count(), 1001) self.assertEqual( TwoFields.objects.filter(f1__gte=450, f1__lte=550).count(), 101) self.assertEqual(TwoFields.objects.filter(f2__gte=901).count(), 101) @skipUnlessDBFeature('has_bulk_insert') def test_large_single_field_batch(self): # SQLite had a problem with more than 500 UNIONed selects in single # query. Restaurant.objects.bulk_create([ Restaurant() for i in range(0, 501) ]) @skipUnlessDBFeature('has_bulk_insert') def test_large_batch_efficiency(self): with override_settings(DEBUG=True): connection.queries_log.clear() TwoFields.objects.bulk_create([ TwoFields(f1=i, f2=i + 1) for i in range(0, 1001) ]) self.assertLess(len(connection.queries), 10) def test_large_batch_mixed(self): """ Test inserting a large batch with objects having primary key set mixed together with objects without PK set. """ with override_settings(DEBUG=True): connection.queries_log.clear() TwoFields.objects.bulk_create([ TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i + 1) for i in range(100000, 101000)]) self.assertEqual(TwoFields.objects.count(), 1000) # We can't assume much about the ID's created, except that the above # created IDs must exist. id_range = range(100000, 101000, 2) self.assertEqual(TwoFields.objects.filter(id__in=id_range).count(), 500) self.assertEqual(TwoFields.objects.exclude(id__in=id_range).count(), 500) @skipUnlessDBFeature('has_bulk_insert') def test_large_batch_mixed_efficiency(self): """ Test inserting a large batch with objects having primary key set mixed together with objects without PK set. """ with override_settings(DEBUG=True): connection.queries_log.clear() TwoFields.objects.bulk_create([ TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i + 1) for i in range(100000, 101000)]) self.assertLess(len(connection.queries), 10) def test_explicit_batch_size(self): objs = [TwoFields(f1=i, f2=i) for i in range(0, 4)] TwoFields.objects.bulk_create(objs, 2) self.assertEqual(TwoFields.objects.count(), len(objs)) TwoFields.objects.all().delete() TwoFields.objects.bulk_create(objs, len(objs)) self.assertEqual(TwoFields.objects.count(), len(objs)) def test_empty_model(self): NoFields.objects.bulk_create([NoFields() for i in range(2)]) self.assertEqual(NoFields.objects.count(), 2) @skipUnlessDBFeature('has_bulk_insert') def test_explicit_batch_size_efficiency(self): objs = [TwoFields(f1=i, f2=i) for i in range(0, 100)] with self.assertNumQueries(2): TwoFields.objects.bulk_create(objs, 50) TwoFields.objects.all().delete() with self.assertNumQueries(1): TwoFields.objects.bulk_create(objs, len(objs)) @skipUnlessDBFeature('has_bulk_insert') def test_bulk_insert_expressions(self): Restaurant.objects.bulk_create([ Restaurant(name="Sam's Shake Shack"), Restaurant(name=Lower(Value("Betty's Beetroot Bar"))) ]) bbb = Restaurant.objects.filter(name="betty's beetroot bar") self.assertEqual(bbb.count(), 1) """ The Spatial Reference class, represents OGR Spatial Reference objects. Example: >>> from django.contrib.gis.gdal import SpatialReference >>> srs = SpatialReference('WGS84') >>> print(srs) GEOGCS["WGS 84", DATUM["WGS_1984", SPHEROID["WGS 84",6378137,298.257223563, AUTHORITY["EPSG","7030"]], TOWGS84[0,0,0,0,0,0,0], AUTHORITY["EPSG","6326"]], PRIMEM["Greenwich",0, AUTHORITY["EPSG","8901"]], UNIT["degree",0.01745329251994328, AUTHORITY["EPSG","9122"]], AUTHORITY["EPSG","4326"]] >>> print(srs.proj) +proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs >>> print(srs.ellipsoid) (6378137.0, 6356752.3142451793, 298.25722356300003) >>> print(srs.projected, srs.geographic) False True >>> srs.import_epsg(32140) >>> print(srs.name) NAD83 / Texas South Central """ from ctypes import byref, c_char_p, c_int from enum import IntEnum from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.error import SRSException from django.contrib.gis.gdal.libgdal import GDAL_VERSION from django.contrib.gis.gdal.prototypes import srs as capi from django.utils.encoding import force_bytes, force_str class AxisOrder(IntEnum): TRADITIONAL = 0 AUTHORITY = 1 class SpatialReference(GDALBase): """ A wrapper for the OGRSpatialReference object. According to the GDAL Web site, the SpatialReference object "provide[s] services to represent coordinate systems (projections and datums) and to transform between them." """ destructor = capi.release_srs def __init__(self, srs_input='', srs_type='user', axis_order=None): """ Create a GDAL OSR Spatial Reference object from the given input. The input may be string of OGC Well Known Text (WKT), an integer EPSG code, a PROJ string, and/or a projection "well known" shorthand string (one of 'WGS84', 'WGS72', 'NAD27', 'NAD83'). """ if not isinstance(axis_order, (type(None), AxisOrder)): raise ValueError( 'SpatialReference.axis_order must be an AxisOrder instance.' ) self.axis_order = axis_order or AxisOrder.TRADITIONAL if srs_type == 'wkt': self.ptr = capi.new_srs(c_char_p(b'')) self.import_wkt(srs_input) if self.axis_order == AxisOrder.TRADITIONAL and GDAL_VERSION >= (3, 0): capi.set_axis_strategy(self.ptr, self.axis_order) elif self.axis_order != AxisOrder.TRADITIONAL and GDAL_VERSION < (3, 0): raise ValueError('%s is not supported in GDAL < 3.0.' % self.axis_order) return elif isinstance(srs_input, str): try: # If SRID is a string, e.g., '4326', then make acceptable # as user input. srid = int(srs_input) srs_input = 'EPSG:%d' % srid except ValueError: pass elif isinstance(srs_input, int): # EPSG integer code was input. srs_type = 'epsg' elif isinstance(srs_input, self.ptr_type): srs = srs_input srs_type = 'ogr' else: raise TypeError('Invalid SRS type "%s"' % srs_type) if srs_type == 'ogr': # Input is already an SRS pointer. srs = srs_input else: # Creating a new SRS pointer, using the string buffer. buf = c_char_p(b'') srs = capi.new_srs(buf) # If the pointer is NULL, throw an exception. if not srs: raise SRSException('Could not create spatial reference from: %s' % srs_input) else: self.ptr = srs if self.axis_order == AxisOrder.TRADITIONAL and GDAL_VERSION >= (3, 0): capi.set_axis_strategy(self.ptr, self.axis_order) elif self.axis_order != AxisOrder.TRADITIONAL and GDAL_VERSION < (3, 0): raise ValueError('%s is not supported in GDAL < 3.0.' % self.axis_order) # Importing from either the user input string or an integer SRID. if srs_type == 'user': self.import_user_input(srs_input) elif srs_type == 'epsg': self.import_epsg(srs_input) def __getitem__(self, target): """ Return the value of the given string attribute node, None if the node doesn't exist. Can also take a tuple as a parameter, (target, child), where child is the index of the attribute in the WKT. For example: >>> wkt = 'GEOGCS["WGS 84", DATUM["WGS_1984, ... AUTHORITY["EPSG","4326"]]' >>> srs = SpatialReference(wkt) # could also use 'WGS84', or 4326 >>> print(srs['GEOGCS']) WGS 84 >>> print(srs['DATUM']) WGS_1984 >>> print(srs['AUTHORITY']) EPSG >>> print(srs['AUTHORITY', 1]) # The authority value 4326 >>> print(srs['TOWGS84', 4]) # the fourth value in this wkt 0 >>> print(srs['UNIT|AUTHORITY']) # For the units authority, have to use the pipe symbole. EPSG >>> print(srs['UNIT|AUTHORITY', 1]) # The authority value for the units 9122 """ if isinstance(target, tuple): return self.attr_value(*target) else: return self.attr_value(target) def __str__(self): "Use 'pretty' WKT." return self.pretty_wkt # #### SpatialReference Methods #### def attr_value(self, target, index=0): """ The attribute value for the given target node (e.g. 'PROJCS'). The index keyword specifies an index of the child node to return. """ if not isinstance(target, str) or not isinstance(index, int): raise TypeError return capi.get_attr_value(self.ptr, force_bytes(target), index) def auth_name(self, target): "Return the authority name for the given string target node." return capi.get_auth_name(self.ptr, force_bytes(target)) def auth_code(self, target): "Return the authority code for the given string target node." return capi.get_auth_code(self.ptr, force_bytes(target)) def clone(self): "Return a clone of this SpatialReference object." return SpatialReference(capi.clone_srs(self.ptr), axis_order=self.axis_order) def from_esri(self): "Morph this SpatialReference from ESRI's format to EPSG." capi.morph_from_esri(self.ptr) def identify_epsg(self): """ This method inspects the WKT of this SpatialReference, and will add EPSG authority nodes where an EPSG identifier is applicable. """ capi.identify_epsg(self.ptr) def to_esri(self): "Morph this SpatialReference to ESRI's format." capi.morph_to_esri(self.ptr) def validate(self): "Check to see if the given spatial reference is valid." capi.srs_validate(self.ptr) # #### Name & SRID properties #### @property def name(self): "Return the name of this Spatial Reference." if self.projected: return self.attr_value('PROJCS') elif self.geographic: return self.attr_value('GEOGCS') elif self.local: return self.attr_value('LOCAL_CS') else: return None @property def srid(self): "Return the SRID of top-level authority, or None if undefined." try: return int(self.attr_value('AUTHORITY', 1)) except (TypeError, ValueError): return None # #### Unit Properties #### @property def linear_name(self): "Return the name of the linear units." units, name = capi.linear_units(self.ptr, byref(c_char_p())) return name @property def linear_units(self): "Return the value of the linear units." units, name = capi.linear_units(self.ptr, byref(c_char_p())) return units @property def angular_name(self): "Return the name of the angular units." units, name = capi.angular_units(self.ptr, byref(c_char_p())) return name @property def angular_units(self): "Return the value of the angular units." units, name = capi.angular_units(self.ptr, byref(c_char_p())) return units @property def units(self): """ Return a 2-tuple of the units value and the units name. Automatically determine whether to return the linear or angular units. """ units, name = None, None if self.projected or self.local: units, name = capi.linear_units(self.ptr, byref(c_char_p())) elif self.geographic: units, name = capi.angular_units(self.ptr, byref(c_char_p())) if name is not None: name = force_str(name) return (units, name) # #### Spheroid/Ellipsoid Properties #### @property def ellipsoid(self): """ Return a tuple of the ellipsoid parameters: (semimajor axis, semiminor axis, and inverse flattening) """ return (self.semi_major, self.semi_minor, self.inverse_flattening) @property def semi_major(self): "Return the Semi Major Axis for this Spatial Reference." return capi.semi_major(self.ptr, byref(c_int())) @property def semi_minor(self): "Return the Semi Minor Axis for this Spatial Reference." return capi.semi_minor(self.ptr, byref(c_int())) @property def inverse_flattening(self): "Return the Inverse Flattening for this Spatial Reference." return capi.invflattening(self.ptr, byref(c_int())) # #### Boolean Properties #### @property def geographic(self): """ Return True if this SpatialReference is geographic (root node is GEOGCS). """ return bool(capi.isgeographic(self.ptr)) @property def local(self): "Return True if this SpatialReference is local (root node is LOCAL_CS)." return bool(capi.islocal(self.ptr)) @property def projected(self): """ Return True if this SpatialReference is a projected coordinate system (root node is PROJCS). """ return bool(capi.isprojected(self.ptr)) # #### Import Routines ##### def import_epsg(self, epsg): "Import the Spatial Reference from the EPSG code (an integer)." capi.from_epsg(self.ptr, epsg) def import_proj(self, proj): """Import the Spatial Reference from a PROJ string.""" capi.from_proj(self.ptr, proj) def import_user_input(self, user_input): "Import the Spatial Reference from the given user input string." capi.from_user_input(self.ptr, force_bytes(user_input)) def import_wkt(self, wkt): "Import the Spatial Reference from OGC WKT (string)" capi.from_wkt(self.ptr, byref(c_char_p(force_bytes(wkt)))) def import_xml(self, xml): "Import the Spatial Reference from an XML string." capi.from_xml(self.ptr, xml) # #### Export Properties #### @property def wkt(self): "Return the WKT representation of this Spatial Reference." return capi.to_wkt(self.ptr, byref(c_char_p())) @property def pretty_wkt(self, simplify=0): "Return the 'pretty' representation of the WKT." return capi.to_pretty_wkt(self.ptr, byref(c_char_p()), simplify) @property def proj(self): """Return the PROJ representation for this Spatial Reference.""" return capi.to_proj(self.ptr, byref(c_char_p())) @property def proj4(self): "Alias for proj()." return self.proj @property def xml(self, dialect=''): "Return the XML representation of this Spatial Reference." return capi.to_xml(self.ptr, byref(c_char_p()), force_bytes(dialect)) class CoordTransform(GDALBase): "The coordinate system transformation object." destructor = capi.destroy_ct def __init__(self, source, target): "Initialize on a source and target SpatialReference objects." if not isinstance(source, SpatialReference) or not isinstance(target, SpatialReference): raise TypeError('source and target must be of type SpatialReference') self.ptr = capi.new_ct(source._ptr, target._ptr) self._srs1_name = source.name self._srs2_name = target.name def __str__(self): return 'Transform from "%s" to "%s"' % (self._srs1_name, self._srs2_name) # -*- coding: utf-8 -*- # sced (SuperCollider mode for gedit) # Copyright 2009 Artem Popov and other contributors (see AUTHORS) # # sced is free software: # you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . import gobject import pango import gtk class Logger: def __init__(self, pipe, log_view): self.__log_view = log_view tag_table = log_view.buffer.get_tag_table() self.__tag = gtk.TextTag() self.__good_tag = gobject.new(gtk.TextTag, weight = pango.WEIGHT_BOLD, foreground = "darkgreen", paragraph_background = "lightgreen") self.__bad_tag = gobject.new(gtk.TextTag, weight = pango.WEIGHT_BOLD, foreground = "darkred", paragraph_background = "pink") # for warnings, etc. self.__ugly_tag = gobject.new(gtk.TextTag, #weight = pango.WEIGHT_BOLD, foreground = "red") tag_table.add(self.__tag) tag_table.add(self.__good_tag) tag_table.add(self.__bad_tag) tag_table.add(self.__ugly_tag) self.__watch_id = gobject.io_add_watch(pipe, gobject.IO_IN | gobject.IO_PRI | gobject.IO_ERR | gobject.IO_HUP, self.__on_output) def __on_output(self, source, condition): s = source.readline() if s == '': self.__append_to_buffer("EOF") return False # FIXME: A workaround for a mac character self.__append_to_buffer(unicode(s, 'mac_latin2')) if condition & gobject.IO_ERR: s = source.read() # can safely read until EOF here self.__append_to_buffer(unicode(s, 'mac_latin2')) return False elif condition & gobject.IO_HUP: s = source.read() # can safely read until EOF here self.__append_to_buffer(unicode(s, 'mac_latin2')) return False elif condition != 1: return False return True def __append_to_buffer(self, text): buffer = self.__log_view.buffer if text.startswith("ERROR"): tags = self.__bad_tag elif text.startswith("WARNING") or text.startswith("FAILURE"): tags = self.__ugly_tag elif text.startswith("StartUp done."): tags = self.__good_tag else: tags = self.__tag buffer.insert_with_tags(buffer.get_end_iter(), text.rstrip(), tags) buffer.insert(buffer.get_end_iter(), "\n") buffer.place_cursor(buffer.get_end_iter()) self.__log_view.view.scroll_mark_onscreen(buffer.get_insert()) # only required for thread-based implementation # return False def stop(self): gobject.source_remove(self.__watch_id) from django.shortcuts import render from django.conf import settings from django.core.mail import send_mail from django.shortcuts import render from Internet_store.products.models import ProductFeatured, Product def home(request): title = 'Sign up now!' featured_image = ProductFeatured.objects.filter(active=True).first() products = Product.objects.all().order_by('?')[:6] featured_products = Product.objects.all().order_by('?')[:6] context = { 'title': title, 'featured_image': featured_image, 'products': products, 'featured_products': featured_products, } # if form.is_valid(): # # form.save() # # print request.POST['email'] #not recommended # instance = form.save(commit=False) # # full_name = form.cleaned_data.get("full_name") # if not full_name: # full_name = "New full name" # instance.full_name = full_name # # if not instance.full_name: # # instance.full_name = "Justin" # instance.save() # context = { # "title": "Thank you" # } return render(request, 'pages/home.html', context) # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Class for parsing metadata about extension samples.""" import locale import os import os.path import re import hashlib import zipfile import sys try: import json except ImportError: import simplejson as json _script_path = os.path.realpath(__file__) sys.path.insert(0, os.path.normpath(_script_path + "/../../../../../../tools")) import json_comment_eater # Make sure we get consistent string sorting behavior by explicitly using the # default C locale. locale.setlocale(locale.LC_ALL, 'C') import sys _script_path = os.path.realpath(__file__) _build_dir = os.path.dirname(_script_path) _base_dir = os.path.normpath(_build_dir + "/..") sys.path.insert(0, os.path.normpath(_base_dir + "/../../../../tools/json_schema_compiler")) import idl_schema def sorted_walk(path): """ A version of os.walk that yields results in order sorted by name. This is to prevent spurious docs changes due to os.walk returning items in a filesystem dependent order (by inode creation time, etc). """ for base, dirs, files in os.walk(path): dirs.sort() files.sort() yield base, dirs, files def parse_json_file(path, encoding="utf-8"): """ Load the specified file and parse it as JSON. Args: path: Path to a file containing JSON-encoded data. encoding: Encoding used in the file. Defaults to utf-8. Returns: A Python object representing the data encoded in the file. Raises: Exception: If the file could not be read or its contents could not be parsed as JSON data. """ try: json_file = open(path, 'r') except IOError, msg: raise Exception("Failed to read the file at %s: %s" % (path, msg)) try: json_str = json_file.read() json_obj = json.loads(json_comment_eater.Nom(json_str), encoding) except ValueError, msg: raise Exception("Failed to parse JSON out of file %s: %s" % (path, msg)) finally: json_file.close() return json_obj def parse_idl_file(path): """ Load the specified file and parse it as IDL. Args: path: Path to a file containing JSON-encoded data. """ api_def = idl_schema.Load(path) return api_def def write_json_to_file(manifest, path): """ Writes the contents of this manifest file as a JSON-encoded text file. Args: manifest: The manifest structure to write. path: The path to write the manifest file to. Raises: Exception: If the file could not be written. """ manifest_text = json.dumps(manifest, indent=2, sort_keys=True, separators=(',', ': ')) output_path = os.path.realpath(path) try: output_file = open(output_path, 'w') except IOError, msg: raise Exception("Failed to write the samples manifest file." "The specific error was: %s." % msg) output_file.write(manifest_text) output_file.close() class ApiManifest(object): """ Represents the list of API methods contained in the extension API JSON """ def __init__(self, json_paths, idl_paths): """ Read the supplied json files and idl files and parse their contents. Args: json_paths: Array of paths to .json API schemas. idl_paths: Array of paths to .idl API schemas. """ self._manifest = [] self._temporary_json_files = [] for path in json_paths: self._manifest.extend(parse_json_file(path)) for path in idl_paths: module = parse_idl_file(path) json_path = os.path.realpath(path.replace('.idl', '.json')) self._temporary_json_files.append((module, json_path)) self._manifest.extend(module) def _parseModuleDocLinksByKeyTypes(self, module, key): """ Given a specific API module, returns a dict of methods mapped to documentation URLs. Args: module: The data in the extension API JSON for a single module. key: A key belonging to _MODULE_DOC_KEYS to determine which set of methods to parse, and what kind of documentation URL to generate. Returns: A dict of extension methods mapped to file and hash URL parts for the corresponding documentation links, like: { "chrome.types.clear": "types.html#method-ChromeSetting-clear", "chrome.types.get": "types.html#method-ChromeSetting-get" } If the API namespace is defined "nodoc" or "internal" then an empty dict is returned. """ api_dict = {} namespace = module['namespace'] if self._disableDocs(module): return api_dict if not module.has_key('types'): return api_dict module_types = module['types'] for module_type in module_types: if not module_type.has_key(key): continue for method in module_type[key]: if self._disableDocs(method): continue method_name = 'chrome.%s.%s.%s' %\ (namespace, module_type['id'], method['name']) hashprefix = 'method' if key == 'events': hashprefix = 'event' api_dict[method_name] = '%s.html#%s-%s-%s' %\ (namespace, hashprefix, module_type['id'], method['name']) return api_dict def _parseModuleDocLinksByKey(self, module, key): """ Given a specific API module, returns a dict of methods or events mapped to documentation URLs. Args: module: The data in the extension API JSON for a single module. key: A key belonging to _MODULE_DOC_KEYS to determine which set of methods to parse, and what kind of documentation URL to generate. Returns: A dict of extension methods mapped to file and hash URL parts for the corresponding documentation links, like: { "chrome.tabs.remove": "tabs.html#method-remove", "chrome.tabs.onDetached" : "tabs.html#event-onDetatched" } If the API namespace is defined "nodoc" or "internal" then an empty dict is returned. Raises: Exception: If the key supplied is not a member of _MODULE_DOC_KEYS. """ methods = [] api_dict = {} namespace = module['namespace'] if self._disableDocs(module): return api_dict if module.has_key(key): methods.extend(module[key]) for method in methods: if self._disableDocs(method): continue method_name = 'chrome.%s.%s' % (namespace, method['name']) hashprefix = 'method' if key == 'events': hashprefix = 'event' api_dict[method_name] = '%s.html#%s-%s' %\ (namespace, hashprefix, method['name']) return api_dict def getModuleNames(self): """ Returns the names of individual modules in the API. Returns: The namespace """ # Exclude modules with documentation disabled. return set(module['namespace'].encode() for module in self._manifest if not self._disableDocs(module)) def _disableDocs(self, obj): for key in ['nodoc', 'internal']: if key in obj and obj[key]: return True return False def getDocumentationLinks(self): """ Parses the extension API JSON manifest and returns a dict of all events and methods for every module, mapped to relative documentation links. Returns: A dict of methods/events => partial doc links for every module. """ api_dict = {} for module in self._manifest: api_dict.update(self._parseModuleDocLinksByKey(module, 'functions')) api_dict.update(self._parseModuleDocLinksByKeyTypes(module, 'functions')) api_dict.update(self._parseModuleDocLinksByKey(module, 'events')) api_dict.update(self._parseModuleDocLinksByKeyTypes(module, 'events')) return api_dict def generateJSONFromIDL(self): """ Writes temporary .json files for every .idl file we have read, for use by the documentation generator. """ for (module, json_path) in self._temporary_json_files: if os.path.exists(json_path): print ("WARNING: Overwriting existing file '%s'" " with generated content." % (json_path)) write_json_to_file(module, json_path) def cleanupGeneratedFiles(self): """ Removes the temporary .json files we generated from .idl before. """ for (module, json_path) in self._temporary_json_files: os.remove(json_path) class SamplesManifest(object): """ Represents a manifest file containing information about the sample extensions available in the codebase. """ def __init__(self, base_sample_path, base_dir, api_manifest): """ Reads through the filesystem and obtains information about any Chrome extensions which exist underneath the specified folder. Args: base_sample_path: The directory under which to search for samples. base_dir: The base directory samples will be referenced from. api_manifest: An instance of the ApiManifest class, which will indicate which API methods are available. """ self._base_dir = base_dir manifest_paths = self._locateManifestsFromPath(base_sample_path) self._manifest_data = self._parseManifestData(manifest_paths, api_manifest) def _locateManifestsFromPath(self, path): """ Returns a list of paths to sample extension manifest.json files. Args: base_path: Base path in which to start the search. Returns: A list of paths below base_path pointing at manifest.json files. """ manifest_paths = [] for root, directories, files in sorted_walk(path): if 'manifest.json' in files: directories = [] # Don't go any further down this tree manifest_paths.append(os.path.join(root, 'manifest.json')) if '.svn' in directories: directories.remove('.svn') # Don't go into SVN metadata directories return manifest_paths def _parseManifestData(self, manifest_paths, api_manifest): """ Returns metadata about the sample extensions given their manifest paths. Args: manifest_paths: A list of paths to extension manifests api_manifest: An instance of the ApiManifest class, which will indicate which API methods are available. Returns: Manifest data containing a list of samples and available API methods. """ api_method_dict = api_manifest.getDocumentationLinks() api_methods = api_method_dict.keys() samples = [] for path in manifest_paths: sample = Sample(path, api_methods, self._base_dir) # Don't render hosted apps if sample.is_hosted_app() == False: samples.append(sample) def compareSamples(sample1, sample2): """ Compares two samples as a sort comparator, by name then path. """ value = cmp(sample1['name'].upper(), sample2['name'].upper()) if value == 0: value = cmp(sample1['path'], sample2['path']) return value samples.sort(compareSamples) manifest_data = {'samples': samples, 'api': api_method_dict} return manifest_data def writeToFile(self, path): """ Writes the contents of this manifest file as a JSON-encoded text file. Args: path: The path to write the samples manifest file to. """ write_json_to_file(self._manifest_data, path) def writeZippedSamples(self): """ For each sample in the current manifest, create a zip file with the sample contents in the sample's parent directory if not zip exists, or update the zip file if the sample has been updated. Returns: A set of paths representing zip files which have been modified. """ modified_paths = [] for sample in self._manifest_data['samples']: path = sample.write_zip() if path: modified_paths.append(path) return modified_paths class Sample(dict): """ Represents metadata about a Chrome extension sample. Extends dict so that it can be easily JSON serialized. """ def __init__(self, manifest_path, api_methods, base_dir): """ Initializes a Sample instance given a path to a manifest. Args: manifest_path: A filesystem path to a manifest file. api_methods: A list of strings containing all possible Chrome extension API calls. base_dir: The base directory where this sample will be referenced from - paths will be made relative to this directory. """ self._base_dir = base_dir self._manifest_path = manifest_path self._manifest = parse_json_file(self._manifest_path) self._locale_data = self._parse_locale_data() # The following calls set data which will be serialized when converting # this object to JSON. source_data = self._parse_source_data(api_methods) self['api_calls'] = source_data['api_calls'] self['source_files'] = source_data['source_files'] self['source_hash'] = source_data['source_hash'] self['name'] = self._parse_name() self['description'] = self._parse_description() self['icon'] = self._parse_icon() self['features'] = self._parse_features() self['protocols'] = self._parse_protocols() self['path'] = self._get_relative_path() self['search_string'] = self._get_search_string() self['id'] = hashlib.sha1(self['path']).hexdigest() self['zip_path'] = self._get_relative_zip_path() self['crx_path'] = self._get_relative_crx_path() self['packaged_app'] = self.is_packaged_app() _FEATURE_ATTRIBUTES = ( 'browser_action', 'page_action', 'background_page', 'options_page', 'plugins', 'theme', 'chrome_url_overrides', 'devtools_page' ) """ Attributes that will map to "features" if their corresponding key is present in the extension manifest. """ _SOURCE_FILE_EXTENSIONS = ('.html', '.json', '.js', '.css', '.htm') """ File extensions to files which may contain source code.""" _ENGLISH_LOCALES = ['en_US', 'en', 'en_GB'] """ Locales from which translations may be used in the sample gallery. """ def _get_localized_manifest_value(self, key): """ Returns a localized version of the requested manifest value. Args: key: The manifest key whose value the caller wants translated. Returns: If the supplied value exists and contains a ___MSG_token___ value, this method will resolve the appropriate translation and return the result. If no token exists, the manifest value will be returned. If the key does not exist, an empty string will be returned. Raises: Exception: If the localized value for the given token could not be found. """ if self._manifest.has_key(key): if self._manifest[key][:6] == '__MSG_': try: return self._get_localized_value(self._manifest[key]) except Exception, msg: raise Exception("Could not translate manifest value for key %s: %s" % (key, msg)) else: return self._manifest[key] else: return '' def _get_localized_value(self, message_token): """ Returns the localized version of the requested MSG bundle token. Args: message_token: A message bundle token like __MSG_extensionName__. Returns: The translated text corresponding to the token, with any placeholders automatically resolved and substituted in. Raises: Exception: If a message bundle token is not found in the translations. """ placeholder_pattern = re.compile('\$(\w*)\$') token = message_token[6:-2] if self._locale_data.has_key(token): message = self._locale_data[token]['message'] placeholder_match = placeholder_pattern.search(message) if placeholder_match: # There are placeholders in the translation - substitute them. placeholder_name = placeholder_match.group(1) placeholders = self._locale_data[token]['placeholders'] if placeholders.has_key(placeholder_name.lower()): placeholder_value = placeholders[placeholder_name.lower()]['content'] placeholder_token = '$%s$' % placeholder_name message = message.replace(placeholder_token, placeholder_value) return message else: raise Exception('Could not find localized string: %s' % message_token) def _get_relative_path(self): """ Returns a relative path from the supplied base dir to the manifest dir. This method is used because we may not be able to rely on os.path.relpath which was introduced in Python 2.6 and only works on Windows and Unix. Since the example extensions should always be subdirectories of the base sample manifest path, we can get a relative path through a simple string substitution. Returns: A relative directory path from the sample manifest's directory to the directory containing this sample's manifest.json. """ real_manifest_path = os.path.realpath(self._manifest_path) real_base_path = os.path.realpath(self._base_dir) return real_manifest_path.replace(real_base_path, '')\ .replace('manifest.json', '')[1:] def _get_relative_zip_path(self): """ Returns a relative path from the base dir to the sample's zip file. Intended for locating the zip file for the sample in the samples manifest. Returns: A relative directory path form the sample manifest's directory to this sample's zip file. """ zip_filename = self._get_zip_filename() zip_relpath = os.path.dirname(os.path.dirname(self._get_relative_path())) return os.path.join(zip_relpath, zip_filename) def _get_relative_crx_path(self): """ Returns a relative path from the base dir to the sample's crx file. Note: .crx files are provided manually and may or may not exist. Returns: If the .crx file exists, the relative directory path from the sample's manifest directory to this sample's .crx files. Otherwise, None. """ crx_filename = self._get_crx_filename() crx_relroot = os.path.dirname(os.path.dirname(self._get_relative_path())) crx_relpath = os.path.join(crx_relroot, crx_filename) crx_absroot = os.path.dirname(os.path.dirname(self._manifest_path)) crx_abspath = os.path.join(crx_absroot, crx_filename) return os.path.isfile(crx_abspath) and crx_relpath or None def _get_search_string(self): """ Constructs a string to be used when searching the samples list. To make the implementation of the JavaScript-based search very direct, a string is constructed containing the title, description, API calls, and features that this sample uses, and is converted to uppercase. This makes JavaScript sample searching very fast and easy to implement. Returns: An uppercase string containing information to match on for searching samples on the client. """ search_terms = [ self['name'], self['description'], ] search_terms.extend(self['features']) search_terms.extend(self['api_calls']) search_string = ' '.join(search_terms).replace('"', '')\ .replace('\'', '')\ .upper() return search_string def _get_zip_filename(self): """ Returns the filename to be used for a generated zip of the sample. Returns: A string in the form of ".zip" where is the name of the directory containing this sample's manifest.json. """ sample_path = os.path.realpath(os.path.dirname(self._manifest_path)) sample_dirname = os.path.basename(sample_path) return "%s.zip" % sample_dirname def _get_crx_filename(self): """ Returns the filename to be used for a generated zip of the sample. Returns: A string in the form of ".zip" where is the name of the directory containing this sample's manifest.json. """ sample_path = os.path.realpath(os.path.dirname(self._manifest_path)) sample_dirname = os.path.basename(sample_path) return "%s.crx" % sample_dirname def _parse_description(self): """ Returns a localized description of the extension. Returns: A localized version of the sample's description. """ return self._get_localized_manifest_value('description') def _parse_features(self): """ Returns a list of features the sample uses. Returns: A list of features the extension uses, as determined by self._FEATURE_ATTRIBUTES. """ features = set() for feature_attr in self._FEATURE_ATTRIBUTES: if self._manifest.has_key(feature_attr): features.add(feature_attr) if self._manifest.has_key('background'): features.add('background_page') if self._uses_popup(): features.add('popup') if self._manifest.has_key('permissions'): for permission in self._manifest['permissions']: split = permission.split('://') if (len(split) == 1): features.add(split[0]) return sorted(features) def _parse_icon(self): """ Returns the path to the 128px icon for this sample. Returns: The path to the 128px icon if defined in the manifest, None otherwise. """ if (self._manifest.has_key('icons') and self._manifest['icons'].has_key('128')): return self._manifest['icons']['128'] else: return None def _parse_locale_data(self): """ Parses this sample's locale data into a dict. Because the sample gallery is in English, this method only looks for translations as defined by self._ENGLISH_LOCALES. Returns: A dict containing the translation keys and corresponding English text for this extension. Raises: Exception: If the messages file cannot be read, or if it is improperly formatted JSON. """ en_messages = {} extension_dir_path = os.path.dirname(self._manifest_path) for locale in self._ENGLISH_LOCALES: en_messages_path = os.path.join(extension_dir_path, '_locales', locale, 'messages.json') if (os.path.isfile(en_messages_path)): break if (os.path.isfile(en_messages_path)): try: en_messages_file = open(en_messages_path, 'r') except IOError, msg: raise Exception("Failed to read %s: %s" % (en_messages_path, msg)) en_messages_contents = en_messages_file.read() en_messages_file.close() try: en_messages = json.loads(en_messages_contents) except ValueError, msg: raise Exception("File %s has a syntax error: %s" % (en_messages_path, msg)) return en_messages def _parse_name(self): """ Returns a localized name for the extension. Returns: A localized version of the sample's name. """ return self._get_localized_manifest_value('name') def _parse_protocols(self): """ Returns a list of protocols this extension requests permission for. Returns: A list of every unique protocol listed in the manifest's permssions. """ protocols = [] if self._manifest.has_key('permissions'): for permission in self._manifest['permissions']: split = permission.split('://') if (len(split) == 2) and (split[0] not in protocols): protocols.append(split[0] + "://") return protocols def _parse_source_data(self, api_methods): """ Iterates over the sample's source files and parses data from them. Parses any files in the sample directory with known source extensions (as defined in self._SOURCE_FILE_EXTENSIONS). For each file, this method: 1. Stores a relative path from the manifest.json directory to the file. 2. Searches through the contents of the file for chrome.* API calls. 3. Calculates a SHA1 digest for the contents of the file. Args: api_methods: A list of strings containing the potential API calls the and the extension sample could be making. Raises: Exception: If any of the source files cannot be read. Returns: A dictionary containing the keys/values: 'api_calls' A sorted list of API calls the sample makes. 'source_files' A sorted list of paths to files the extension uses. 'source_hash' A hash of the individual file hashes. """ data = {} source_paths = [] source_hashes = [] api_calls = set() base_path = os.path.realpath(os.path.dirname(self._manifest_path)) for root, directories, files in sorted_walk(base_path): if '.svn' in directories: directories.remove('.svn') # Don't go into SVN metadata directories for file_name in files: ext = os.path.splitext(file_name)[1] if ext in self._SOURCE_FILE_EXTENSIONS: # Add the file path to the list of source paths. fullpath = os.path.realpath(os.path.join(root, file_name)) path = fullpath.replace(base_path, '')[1:] source_paths.append(path) # Read the contents and parse out API calls. try: code_file = open(fullpath, "r") except IOError, msg: raise Exception("Failed to read %s: %s" % (fullpath, msg)) code_contents = unicode(code_file.read(), errors="replace") code_file.close() for method in api_methods: if (code_contents.find(method) > -1): api_calls.add(method) # Get a hash of the file contents for zip file generation. hash = hashlib.sha1(code_contents.encode("ascii", "replace")) source_hashes.append(hash.hexdigest()) data['api_calls'] = sorted(api_calls) data['source_files'] = sorted(source_paths) data['source_hash'] = hashlib.sha1(''.join(source_hashes)).hexdigest() return data def _uses_background(self): """ Returns true if the extension defines a background page. """ return self._manifest.has_key('background_page') def _uses_browser_action(self): """ Returns true if the extension defines a browser action. """ return self._manifest.has_key('browser_action') def _uses_content_scripts(self): """ Returns true if the extension uses content scripts. """ return self._manifest.has_key('content_scripts') def _uses_options(self): """ Returns true if the extension defines an options page. """ return self._manifest.has_key('options_page') def _uses_page_action(self): """ Returns true if the extension uses a page action. """ return self._manifest.has_key('page_action') def _uses_popup(self): """ Returns true if the extension defines a popup on a page or browser action. """ has_b_popup = (self._uses_browser_action() and self._manifest['browser_action'].has_key('popup')) has_p_popup = (self._uses_page_action() and self._manifest['page_action'].has_key('popup')) return has_b_popup or has_p_popup def is_hosted_app(self): """ Returns true if the manifest has an app but not a local_path.""" return (self._manifest.has_key('app') and (not self._manifest['app'].has_key('launch') or not self._manifest['app']['launch'].has_key('local_path'))) def is_packaged_app(self): """ Returns true if the manifest has an app/launch/local_path section.""" return (self._manifest.has_key('app') and self._manifest['app'].has_key('launch') and self._manifest['app']['launch'].has_key('local_path')) def write_zip(self): """ Writes a zip file containing all of the files in this Sample's dir.""" sample_path = os.path.realpath(os.path.dirname(self._manifest_path)) sample_dirname = os.path.basename(sample_path) sample_parentpath = os.path.dirname(sample_path) zip_filename = self._get_zip_filename() zip_path = os.path.join(sample_parentpath, zip_filename) # we pass zip_manifest_path to zipfile.getinfo(), which chokes on # backslashes, so don't rely on os.path.join, use forward slash on # all platforms. zip_manifest_path = sample_dirname + '/manifest.json' zipfile.ZipFile.debug = 3 if os.path.isfile(zip_path): try: old_zip_file = zipfile.ZipFile(zip_path, 'r') except IOError, msg: raise Exception("Could not read zip at %s: %s" % (zip_path, msg)) except zipfile.BadZipfile, msg: raise Exception("File at %s is not a zip file: %s" % (zip_path, msg)) try: info = old_zip_file.getinfo(zip_manifest_path) hash = info.comment if hash == self['source_hash']: return None # Hashes match - no need to generate file except KeyError, msg: pass # The old zip file doesn't contain a hash - overwrite finally: old_zip_file.close() zip_file = zipfile.ZipFile(zip_path, 'w') try: for root, dirs, files in sorted_walk(sample_path): if '.svn' in dirs: dirs.remove('.svn') for file in files: # Absolute path to the file to be added. abspath = os.path.realpath(os.path.join(root, file)) # Relative path to store the file in under the zip. relpath = sample_dirname + abspath.replace(sample_path, "") zip_file.write(abspath, relpath) if file == 'manifest.json': info = zip_file.getinfo(zip_manifest_path) info.comment = self['source_hash'] except RuntimeError, msg: raise Exception("Could not write zip at %s: %s" % (zip_path, msg)) finally: zip_file.close() return self._get_relative_zip_path() # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Presubmit script for Chromium WebUI resources. See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details about the presubmit API built into gcl/git cl, and see http://www.chromium.org/developers/web-development-style-guide for the rules we're checking against here. """ # TODO(dbeam): Real CSS parser? https://github.com/danbeam/css-py/tree/css3 class CSSChecker(object): def __init__(self, input_api, output_api, file_filter=None): self.input_api = input_api self.output_api = output_api self.file_filter = file_filter def RunChecks(self): # We use this a lot, so make a nick name variable. re = self.input_api.re def _collapseable_hex(s): return (len(s) == 6 and s[0] == s[1] and s[2] == s[3] and s[4] == s[5]) def _is_gray(s): return s[0] == s[1] == s[2] if len(s) == 3 else s[0:2] == s[2:4] == s[4:6] def _remove_all(s): return _remove_grit(_remove_ats(_remove_comments(s))) def _remove_ats(s): at_reg = re.compile(r""" @\w+.*?{ # @at-keyword selector junk { (.*{.*?})+ # inner { curly } blocks, rules, and selector junk .*?} # stuff up to the first end curly }""", re.DOTALL | re.VERBOSE) return at_reg.sub('\\1', s) def _remove_comments(s): return re.sub(re.compile(r'/\*.*?\*/', re.DOTALL), '', s) def _remove_grit(s): grit_reg = re.compile(r""" ]+>.*?<\s*/\s*if[^>]*>| # contents ]+> # """, re.DOTALL | re.VERBOSE) return re.sub(grit_reg, '', s) def _rgb_from_hex(s): if len(s) == 3: r, g, b = s[0] + s[0], s[1] + s[1], s[2] + s[2] else: r, g, b = s[0:2], s[2:4], s[4:6] return int(r, base=16), int(g, base=16), int(b, base=16) def _strip_prefix(s): return re.sub(r'^-(?:o|ms|moz|khtml|webkit)-', '', s) def alphabetize_props(contents): errors = [] for rule in re.finditer(r'{(.*?)}', contents, re.DOTALL): semis = map(lambda t: t.strip(), rule.group(1).split(';'))[:-1] rules = filter(lambda r: ': ' in r, semis) props = map(lambda r: r[0:r.find(':')], rules) if props != sorted(props): errors.append(' %s;\n' % (';\n '.join(rules))) return errors def braces_have_space_before_and_nothing_after(line): brace_space_reg = re.compile(r""" (?:^|\S){| # selector{ or selector\n{ or {\s*\S+\s* # selector { with stuff after it $ # must be at the end of a line""", re.VERBOSE) return brace_space_reg.search(line) def classes_use_dashes(line): # Intentionally dumbed down version of CSS 2.1 grammar for class without # non-ASCII, escape chars, or whitespace. class_reg = re.compile(r""" \.(-?[\w-]+).* # ., then maybe -, then alpha numeric and - [,{]\s*$ # selectors should end with a , or {""", re.VERBOSE) m = class_reg.search(line) if not m: return False class_name = m.group(1) return class_name.lower() != class_name or '_' in class_name def close_brace_on_new_line(line): # Ignore single frames in a @keyframe, i.e. 0% { margin: 50px; } frame_reg = re.compile(r""" \s*\d+%\s*{ # 50% { \s*[\w-]+: # rule: (\s*[\w-]+)+\s*; # value; \s*}\s* # }""", re.VERBOSE) return ('}' in line and re.search(r'[^ }]', line) and not frame_reg.match(line)) def colons_have_space_after(line): colon_space_reg = re.compile(r""" (? (?:[, ]|(?<=\()) # a comma or space not followed by a ( (?:0?\.?)?0% # some equivalent to 0%""", re.VERBOSE) zeros_reg = re.compile(r""" ^.*(?:^|\D) # start/non-number (?:\.0|0(?:\.0? # .0, 0, or 0.0 |px|em|%|in|cm|mm|pc|pt|ex|deg|g?rad|m?s|k?hz)) # a length unit (?:\D|$) # non-number/end (?=[^{}]+?}).*$ # only { rules }""", re.MULTILINE | re.VERBOSE) errors = [] for z in re.finditer(zeros_reg, contents): first_line = z.group(0).strip().splitlines()[0] if not hsl_reg.search(first_line): errors.append(' ' + first_line) return errors # NOTE: Currently multi-line checks don't support 'after'. Instead, add # suggestions while parsing the file so another pass isn't necessary. added_or_modified_files_checks = [ { 'desc': 'Alphabetize properties and list vendor specific (i.e. ' '-webkit) above standard.', 'test': alphabetize_props, 'multiline': True, }, { 'desc': 'Start braces ({) end a selector, have a space before them ' 'and no rules after.', 'test': braces_have_space_before_and_nothing_after, }, { 'desc': 'Classes use .dash-form.', 'test': classes_use_dashes, }, { 'desc': 'Always put a rule closing brace (}) on a new line.', 'test': close_brace_on_new_line, }, { 'desc': 'Colons (:) should have a space after them.', 'test': colons_have_space_after, }, { 'desc': 'Use single quotes (\') instead of double quotes (") in ' 'strings.', 'test': favor_single_quotes, }, { 'desc': 'Use abbreviated hex (#rgb) when in form #rrggbb.', 'test': hex_could_be_shorter, 'after': suggest_short_hex, }, { 'desc': 'Use milliseconds for time measurements under 1 second.', 'test': milliseconds_for_small_times, 'after': suggest_ms_from_s, }, { 'desc': "Don't use data URIs in source files. Use grit instead.", 'test': no_data_uris_in_source_files, }, { 'desc': 'One rule per line (what not to do: color: red; margin: 0;).', 'test': one_rule_per_line, }, { 'desc': 'One selector per line (what not to do: a, b {}).', 'test': one_selector_per_line, 'multiline': True, }, { 'desc': 'Pseudo-elements should use double colon (i.e. ::after).', 'test': pseudo_elements_double_colon, 'multiline': True, }, { 'desc': 'Use rgb() over #hex when not a shade of gray (like #333).', 'test': rgb_if_not_gray, 'after': suggest_rgb_from_hex, }, { 'desc': 'Make all zero length terms (i.e. 0px) 0 unless inside of ' 'hsl() or part of @keyframe.', 'test': zero_length_values, 'multiline': True, }, ] results = [] affected_files = self.input_api.AffectedFiles(include_deletes=False, file_filter=self.file_filter) files = [] for f in affected_files: # Remove all /*comments*/, @at-keywords, and grit tags; we're # not using a real parser. TODO(dbeam): Check alpha in blocks. file_contents = _remove_all('\n'.join(f.NewContents())) files.append((f.LocalPath(), file_contents)) # Only look at CSS files for now. for f in filter(lambda f: f[0].endswith('.css'), files): file_errors = [] for check in added_or_modified_files_checks: # If the check is multiline, it receieves the whole file and gives us # back a list of things wrong. If the check isn't multiline, we pass it # each line and the check returns something truthy if there's an issue. if ('multiline' in check and check['multiline']): assert not 'after' in check check_errors = check['test'](f[1]) if len(check_errors) > 0: file_errors.append('- %s\n%s' % (check['desc'], '\n'.join(check_errors).rstrip())) else: check_errors = [] lines = f[1].splitlines() for lnum, line in enumerate(lines): if check['test'](line): error = ' ' + line.strip() if 'after' in check: error += check['after'](line) check_errors.append(error) if len(check_errors) > 0: file_errors.append('- %s\n%s' % (check['desc'], '\n'.join(check_errors))) if file_errors: results.append(self.output_api.PresubmitPromptWarning( '%s:\n%s' % (f[0], '\n\n'.join(file_errors)))) if results: # Add your name if you're here often mucking around in the code. authors = ['dbeam@chromium.org'] results.append(self.output_api.PresubmitNotifyResult( 'Was the CSS checker useful? Send feedback or hate mail to %s.' % ', '.join(authors))) return results # orm/deprecated_interfaces.py # Copyright (C) 2005-2014 the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from .. import event, util from .interfaces import EXT_CONTINUE @util.langhelpers.dependency_for("sqlalchemy.orm.interfaces") class MapperExtension(object): """Base implementation for :class:`.Mapper` event hooks. .. note:: :class:`.MapperExtension` is deprecated. Please refer to :func:`.event.listen` as well as :class:`.MapperEvents`. New extension classes subclass :class:`.MapperExtension` and are specified using the ``extension`` mapper() argument, which is a single :class:`.MapperExtension` or a list of such:: from sqlalchemy.orm.interfaces import MapperExtension class MyExtension(MapperExtension): def before_insert(self, mapper, connection, instance): print "instance %s before insert !" % instance m = mapper(User, users_table, extension=MyExtension()) A single mapper can maintain a chain of ``MapperExtension`` objects. When a particular mapping event occurs, the corresponding method on each ``MapperExtension`` is invoked serially, and each method has the ability to halt the chain from proceeding further:: m = mapper(User, users_table, extension=[ext1, ext2, ext3]) Each ``MapperExtension`` method returns the symbol EXT_CONTINUE by default. This symbol generally means "move to the next ``MapperExtension`` for processing". For methods that return objects like translated rows or new object instances, EXT_CONTINUE means the result of the method should be ignored. In some cases it's required for a default mapper activity to be performed, such as adding a new instance to a result list. The symbol EXT_STOP has significance within a chain of ``MapperExtension`` objects that the chain will be stopped when this symbol is returned. Like EXT_CONTINUE, it also has additional significance in some cases that a default mapper activity will not be performed. """ @classmethod def _adapt_instrument_class(cls, self, listener): cls._adapt_listener_methods(self, listener, ('instrument_class',)) @classmethod def _adapt_listener(cls, self, listener): cls._adapt_listener_methods( self, listener, ( 'init_instance', 'init_failed', 'translate_row', 'create_instance', 'append_result', 'populate_instance', 'reconstruct_instance', 'before_insert', 'after_insert', 'before_update', 'after_update', 'before_delete', 'after_delete' )) @classmethod def _adapt_listener_methods(cls, self, listener, methods): for meth in methods: me_meth = getattr(MapperExtension, meth) ls_meth = getattr(listener, meth) if not util.methods_equivalent(me_meth, ls_meth): if meth == 'reconstruct_instance': def go(ls_meth): def reconstruct(instance, ctx): ls_meth(self, instance) return reconstruct event.listen(self.class_manager, 'load', go(ls_meth), raw=False, propagate=True) elif meth == 'init_instance': def go(ls_meth): def init_instance(instance, args, kwargs): ls_meth(self, self.class_, self.class_manager.original_init, instance, args, kwargs) return init_instance event.listen(self.class_manager, 'init', go(ls_meth), raw=False, propagate=True) elif meth == 'init_failed': def go(ls_meth): def init_failed(instance, args, kwargs): util.warn_exception(ls_meth, self, self.class_, self.class_manager.original_init, instance, args, kwargs) return init_failed event.listen(self.class_manager, 'init_failure', go(ls_meth), raw=False, propagate=True) else: event.listen(self, "%s" % meth, ls_meth, raw=False, retval=True, propagate=True) def instrument_class(self, mapper, class_): """Receive a class when the mapper is first constructed, and has applied instrumentation to the mapped class. The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ return EXT_CONTINUE def init_instance(self, mapper, class_, oldinit, instance, args, kwargs): """Receive an instance when it's constructor is called. This method is only called during a userland construction of an object. It is not called when an object is loaded from the database. The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ return EXT_CONTINUE def init_failed(self, mapper, class_, oldinit, instance, args, kwargs): """Receive an instance when it's constructor has been called, and raised an exception. This method is only called during a userland construction of an object. It is not called when an object is loaded from the database. The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ return EXT_CONTINUE def translate_row(self, mapper, context, row): """Perform pre-processing on the given result row and return a new row instance. This is called when the mapper first receives a row, before the object identity or the instance itself has been derived from that row. The given row may or may not be a ``RowProxy`` object - it will always be a dictionary-like object which contains mapped columns as keys. The returned object should also be a dictionary-like object which recognizes mapped columns as keys. If the ultimate return value is EXT_CONTINUE, the row is not translated. """ return EXT_CONTINUE def create_instance(self, mapper, selectcontext, row, class_): """Receive a row when a new object instance is about to be created from that row. The method can choose to create the instance itself, or it can return EXT_CONTINUE to indicate normal object creation should take place. mapper The mapper doing the operation selectcontext The QueryContext generated from the Query. row The result row from the database class\_ The class we are mapping. return value A new object instance, or EXT_CONTINUE """ return EXT_CONTINUE def append_result(self, mapper, selectcontext, row, instance, result, **flags): """Receive an object instance before that instance is appended to a result list. If this method returns EXT_CONTINUE, result appending will proceed normally. if this method returns any other value or None, result appending will not proceed for this instance, giving this extension an opportunity to do the appending itself, if desired. mapper The mapper doing the operation. selectcontext The QueryContext generated from the Query. row The result row from the database. instance The object instance to be appended to the result. result List to which results are being appended. \**flags extra information about the row, same as criterion in ``create_row_processor()`` method of :class:`~sqlalchemy.orm.interfaces.MapperProperty` """ return EXT_CONTINUE def populate_instance(self, mapper, selectcontext, row, instance, **flags): """Receive an instance before that instance has its attributes populated. This usually corresponds to a newly loaded instance but may also correspond to an already-loaded instance which has unloaded attributes to be populated. The method may be called many times for a single instance, as multiple result rows are used to populate eagerly loaded collections. If this method returns EXT_CONTINUE, instance population will proceed normally. If any other value or None is returned, instance population will not proceed, giving this extension an opportunity to populate the instance itself, if desired. .. deprecated:: 0.5 Most usages of this hook are obsolete. For a generic "object has been newly created from a row" hook, use ``reconstruct_instance()``, or the ``@orm.reconstructor`` decorator. """ return EXT_CONTINUE def reconstruct_instance(self, mapper, instance): """Receive an object instance after it has been created via ``__new__``, and after initial attribute population has occurred. This typically occurs when the instance is created based on incoming result rows, and is only called once for that instance's lifetime. Note that during a result-row load, this method is called upon the first row received for this instance. Note that some attributes and collections may or may not be loaded or even initialized, depending on what's present in the result rows. The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ return EXT_CONTINUE def before_insert(self, mapper, connection, instance): """Receive an object instance before that instance is inserted into its table. This is a good place to set up primary key values and such that aren't handled otherwise. Column-based attributes can be modified within this method which will result in the new value being inserted. However *no* changes to the overall flush plan can be made, and manipulation of the ``Session`` will not have the desired effect. To manipulate the ``Session`` within an extension, use ``SessionExtension``. The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ return EXT_CONTINUE def after_insert(self, mapper, connection, instance): """Receive an object instance after that instance is inserted. The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ return EXT_CONTINUE def before_update(self, mapper, connection, instance): """Receive an object instance before that instance is updated. Note that this method is called for all instances that are marked as "dirty", even those which have no net changes to their column-based attributes. An object is marked as dirty when any of its column-based attributes have a "set attribute" operation called or when any of its collections are modified. If, at update time, no column-based attributes have any net changes, no UPDATE statement will be issued. This means that an instance being sent to before_update is *not* a guarantee that an UPDATE statement will be issued (although you can affect the outcome here). To detect if the column-based attributes on the object have net changes, and will therefore generate an UPDATE statement, use ``object_session(instance).is_modified(instance, include_collections=False)``. Column-based attributes can be modified within this method which will result in the new value being updated. However *no* changes to the overall flush plan can be made, and manipulation of the ``Session`` will not have the desired effect. To manipulate the ``Session`` within an extension, use ``SessionExtension``. The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ return EXT_CONTINUE def after_update(self, mapper, connection, instance): """Receive an object instance after that instance is updated. The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ return EXT_CONTINUE def before_delete(self, mapper, connection, instance): """Receive an object instance before that instance is deleted. Note that *no* changes to the overall flush plan can be made here; and manipulation of the ``Session`` will not have the desired effect. To manipulate the ``Session`` within an extension, use ``SessionExtension``. The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ return EXT_CONTINUE def after_delete(self, mapper, connection, instance): """Receive an object instance after that instance is deleted. The return value is only significant within the ``MapperExtension`` chain; the parent mapper's behavior isn't modified by this method. """ return EXT_CONTINUE @util.langhelpers.dependency_for("sqlalchemy.orm.interfaces") class SessionExtension(object): """Base implementation for :class:`.Session` event hooks. .. note:: :class:`.SessionExtension` is deprecated. Please refer to :func:`.event.listen` as well as :class:`.SessionEvents`. Subclasses may be installed into a :class:`.Session` (or :class:`.sessionmaker`) using the ``extension`` keyword argument:: from sqlalchemy.orm.interfaces import SessionExtension class MySessionExtension(SessionExtension): def before_commit(self, session): print "before commit!" Session = sessionmaker(extension=MySessionExtension()) The same :class:`.SessionExtension` instance can be used with any number of sessions. """ @classmethod def _adapt_listener(cls, self, listener): for meth in [ 'before_commit', 'after_commit', 'after_rollback', 'before_flush', 'after_flush', 'after_flush_postexec', 'after_begin', 'after_attach', 'after_bulk_update', 'after_bulk_delete', ]: me_meth = getattr(SessionExtension, meth) ls_meth = getattr(listener, meth) if not util.methods_equivalent(me_meth, ls_meth): event.listen(self, meth, getattr(listener, meth)) def before_commit(self, session): """Execute right before commit is called. Note that this may not be per-flush if a longer running transaction is ongoing.""" def after_commit(self, session): """Execute after a commit has occurred. Note that this may not be per-flush if a longer running transaction is ongoing.""" def after_rollback(self, session): """Execute after a rollback has occurred. Note that this may not be per-flush if a longer running transaction is ongoing.""" def before_flush(self, session, flush_context, instances): """Execute before flush process has started. `instances` is an optional list of objects which were passed to the ``flush()`` method. """ def after_flush(self, session, flush_context): """Execute after flush has completed, but before commit has been called. Note that the session's state is still in pre-flush, i.e. 'new', 'dirty', and 'deleted' lists still show pre-flush state as well as the history settings on instance attributes.""" def after_flush_postexec(self, session, flush_context): """Execute after flush has completed, and after the post-exec state occurs. This will be when the 'new', 'dirty', and 'deleted' lists are in their final state. An actual commit() may or may not have occurred, depending on whether or not the flush started its own transaction or participated in a larger transaction. """ def after_begin(self, session, transaction, connection): """Execute after a transaction is begun on a connection `transaction` is the SessionTransaction. This method is called after an engine level transaction is begun on a connection. """ def after_attach(self, session, instance): """Execute after an instance is attached to a session. This is called after an add, delete or merge. """ def after_bulk_update(self, session, query, query_context, result): """Execute after a bulk update operation to the session. This is called after a session.query(...).update() `query` is the query object that this update operation was called on. `query_context` was the query context object. `result` is the result object returned from the bulk operation. """ def after_bulk_delete(self, session, query, query_context, result): """Execute after a bulk delete operation to the session. This is called after a session.query(...).delete() `query` is the query object that this delete operation was called on. `query_context` was the query context object. `result` is the result object returned from the bulk operation. """ @util.langhelpers.dependency_for("sqlalchemy.orm.interfaces") class AttributeExtension(object): """Base implementation for :class:`.AttributeImpl` event hooks, events that fire upon attribute mutations in user code. .. note:: :class:`.AttributeExtension` is deprecated. Please refer to :func:`.event.listen` as well as :class:`.AttributeEvents`. :class:`.AttributeExtension` is used to listen for set, remove, and append events on individual mapped attributes. It is established on an individual mapped attribute using the `extension` argument, available on :func:`.column_property`, :func:`.relationship`, and others:: from sqlalchemy.orm.interfaces import AttributeExtension from sqlalchemy.orm import mapper, relationship, column_property class MyAttrExt(AttributeExtension): def append(self, state, value, initiator): print "append event !" return value def set(self, state, value, oldvalue, initiator): print "set event !" return value mapper(SomeClass, sometable, properties={ 'foo':column_property(sometable.c.foo, extension=MyAttrExt()), 'bar':relationship(Bar, extension=MyAttrExt()) }) Note that the :class:`.AttributeExtension` methods :meth:`~.AttributeExtension.append` and :meth:`~.AttributeExtension.set` need to return the ``value`` parameter. The returned value is used as the effective value, and allows the extension to change what is ultimately persisted. AttributeExtension is assembled within the descriptors associated with a mapped class. """ active_history = True """indicates that the set() method would like to receive the 'old' value, even if it means firing lazy callables. Note that ``active_history`` can also be set directly via :func:`.column_property` and :func:`.relationship`. """ @classmethod def _adapt_listener(cls, self, listener): event.listen(self, 'append', listener.append, active_history=listener.active_history, raw=True, retval=True) event.listen(self, 'remove', listener.remove, active_history=listener.active_history, raw=True, retval=True) event.listen(self, 'set', listener.set, active_history=listener.active_history, raw=True, retval=True) def append(self, state, value, initiator): """Receive a collection append event. The returned value will be used as the actual value to be appended. """ return value def remove(self, state, value, initiator): """Receive a remove event. No return value is defined. """ pass def set(self, state, value, oldvalue, initiator): """Receive a set event. The returned value will be used as the actual value to be set. """ return value # -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2011-2012 Domsense s.r.l. (). # Copyright (C) 2012 Agile Business Group sagl () # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . # ############################################################################## from osv import fields, osv from tools.translate import _ import decimal_precision as dp class account_voucher(osv.osv): _inherit = "account.voucher" _columns = { 'shadow_move_id': fields.many2one('account.move','Shadow Entry', readonly=True), } def is_vat_on_payment(self, voucher): vat_on_p =0 valid_lines =0 if voucher.type in ('payment', 'receipt'): for line in voucher.line_ids: if line.amount: valid_lines +=1 if line.move_line_id and line.move_line_id.invoice and line.move_line_id.invoice.vat_on_payment: vat_on_p +=1 if vat_on_p and vat_on_p != valid_lines: raise osv.except_osv(_('Error'), _("Can't handle VAT on payment if not every invoice is on a VAT on payment treatment")) return vat_on_p def action_move_line_create(self, cr, uid, ids, context=None): if context is None: context = {} inv_pool = self.pool.get('account.invoice') journal_pool = self.pool.get('account.journal') move_line_pool = self.pool.get('account.move.line') move_pool = self.pool.get('account.move') currency_obj = self.pool.get('res.currency') res=False for voucher in self.browse(cr, uid, ids, context): entry_posted = voucher.journal_id.entry_posted # disable the 'skip draft state' option because "mixed" entry (shadow + real) won't pass validation. Anyway every entry will be posted later (if 'entry_posted' is enabled) if entry_posted: journal_pool.write(cr, uid, voucher.journal_id.id, {'entry_posted': False}) res=super(account_voucher,self).action_move_line_create(cr, uid, [voucher.id], context) voucher.refresh() # because 'move_id' has been updated by 'action_move_line_create' if entry_posted: journal_pool.write(cr, uid, voucher.journal_id.id, {'entry_posted': True}) if self.is_vat_on_payment(voucher): if not voucher.journal_id.vat_on_payment_related_journal_id: raise osv.except_osv(_('Error'), _('We are on a VAT on payment treatment but journal %s does not have a related shadow journal') % voucher.journal_id.name) lines_to_create = [] amounts_by_invoice = super(account_voucher,self).allocated_amounts_grouped_by_invoice(cr, uid,voucher, context) for inv_id in amounts_by_invoice: invoice = inv_pool.browse(cr, uid, inv_id, context) for inv_move_line in invoice.move_id.line_id: if inv_move_line.account_id.type != 'receivable' and inv_move_line.account_id.type != 'payable': # compute the VAT or base line proportionally to the paid amount new_line_amount = currency_obj.round(cr, uid, voucher.company_id.currency_id, ((amounts_by_invoice[invoice.id]['allocated'] + amounts_by_invoice[invoice.id]['write-off']) / amounts_by_invoice[invoice.id]['total']) * (inv_move_line.credit or inv_move_line.debit)) if not inv_move_line.real_account_id: raise osv.except_osv(_('Error'), _('We are on a VAT on payment treatment but move line %s does not have a related real account') % inv_move_line.name) # prepare the real move line vals = { 'name': inv_move_line.name, 'account_id': inv_move_line.real_account_id.id, 'credit': inv_move_line.credit and new_line_amount or 0.0, 'debit': inv_move_line.debit and new_line_amount or 0.0, 'type': 'real', 'partner_id': inv_move_line.partner_id and inv_move_line.partner_id.id or False } if inv_move_line.tax_code_id: if not inv_move_line.real_tax_code_id: raise osv.except_osv(_('Error'), _('We are on a VAT on payment treatment but move line %s does not have a related real tax code') % inv_move_line.name) vals['tax_code_id'] = inv_move_line.real_tax_code_id.id if inv_move_line.tax_amount < 0: vals['tax_amount'] = -new_line_amount else: vals['tax_amount'] = new_line_amount lines_to_create.append(vals) # prepare the shadow move line vals={ 'name': inv_move_line.name, 'account_id': inv_move_line.account_id.id, 'credit': inv_move_line.debit and new_line_amount or 0.0, 'debit': inv_move_line.credit and new_line_amount or 0.0, 'type': 'shadow', 'partner_id': inv_move_line.partner_id and inv_move_line.partner_id.id or False } if inv_move_line.tax_code_id: vals['tax_code_id'] = inv_move_line.tax_code_id.id if inv_move_line.tax_amount < 0: vals['tax_amount'] = new_line_amount else: vals['tax_amount'] = -new_line_amount lines_to_create.append(vals) context['journal_id'] = voucher.journal_id.vat_on_payment_related_journal_id.id context['period_id'] = voucher.move_id.period_id.id shadow_move_id = move_pool.create(cr, uid, { 'journal_id': voucher.journal_id.vat_on_payment_related_journal_id.id, 'period_id': voucher.move_id.period_id.id, 'date': voucher.move_id.date, }, context) # move the payment move lines to shadow entry for line in voucher.move_ids: if line.account_id.type != 'liquidity': line.write({ 'move_id': shadow_move_id, }, update_check=False) # this will allow user to see the real entry from invoice payment tab if line.account_id.type == 'receivable' or line.account_id.type == 'payable': line.write({ 'real_payment_move_id': voucher.move_id.id, }) for line_to_create in lines_to_create: if line_to_create['type'] == 'real': line_to_create['move_id'] = voucher.move_id.id elif line_to_create['type'] == 'shadow': line_to_create['move_id'] = shadow_move_id del line_to_create['type'] move_line_pool.create(cr, uid, line_to_create, context) voucher.write({'shadow_move_id': shadow_move_id}) super(account_voucher,self).balance_move(cr, uid, shadow_move_id, context) super(account_voucher,self).balance_move(cr, uid, voucher.move_id.id, context) return res def cancel_voucher(self, cr, uid, ids, context=None): res = super(account_voucher,self).cancel_voucher(cr, uid, ids, context) reconcile_pool = self.pool.get('account.move.reconcile') move_pool = self.pool.get('account.move') for voucher in self.browse(cr, uid, ids, context=context): recs = [] if voucher.shadow_move_id: for line in voucher.shadow_move_id.line_id: if line.reconcile_id: recs += [line.reconcile_id.id] if line.reconcile_partial_id: recs += [line.reconcile_partial_id.id] reconcile_pool.unlink(cr, uid, recs) if voucher.shadow_move_id: move_pool.button_cancel(cr, uid, [voucher.shadow_move_id.id]) move_pool.unlink(cr, uid, [voucher.shadow_move_id.id]) return res class account_invoice(osv.osv): def _get_vat_on_payment(self, cr, uid, context=None): return self.pool.get('res.users').browse(cr, uid, uid, context).company_id.vat_on_payment def finalize_invoice_move_lines(self, cr, uid, invoice_browse, move_lines): """ Use shadow accounts for journal entry to be generated, according to account and tax code related records """ move_lines = super(account_invoice,self).finalize_invoice_move_lines(cr, uid, invoice_browse, move_lines) acc_pool = self.pool.get('account.account') tax_code_pool = self.pool.get('account.tax.code') new_move_lines = [] for line_tup in move_lines: if invoice_browse.vat_on_payment: if line_tup[2].get('account_id', False): account = acc_pool.browse(cr, uid, line_tup[2]['account_id']) if account.type != 'receivable' and account.type != 'payable': if not account.vat_on_payment_related_account_id: raise osv.except_osv(_('Error'), _('The invoice is \'VAT on payment\' but account %s does not have a related shadow account') % account.name) line_tup[2]['real_account_id'] = line_tup[2]['account_id'] line_tup[2]['account_id'] = account.vat_on_payment_related_account_id.id if line_tup[2].get('tax_code_id', False): tax_code = tax_code_pool.browse(cr, uid, line_tup[2]['tax_code_id']) if not tax_code.vat_on_payment_related_tax_code_id: raise osv.except_osv(_('Error'), _('The invoice is \'VAT on payment\' but tax code %s does not have a related shadow tax code') % tax_code.name) line_tup[2]['real_tax_code_id'] = line_tup[2]['tax_code_id'] line_tup[2]['tax_code_id'] = tax_code.vat_on_payment_related_tax_code_id.id new_move_lines.append(line_tup) return new_move_lines _inherit = "account.invoice" _columns = { 'vat_on_payment': fields.boolean('Vat on payment'), } _defaults = { 'vat_on_payment': _get_vat_on_payment, } class account_move_line(osv.osv): _inherit = "account.move.line" _columns = { 'real_payment_move_id': fields.many2one('account.move', 'Real payment entry'), 'real_account_id': fields.many2one('account.account','Real account'), 'real_tax_code_id': fields.many2one('account.tax.code','Real tax code'), } class account_account(osv.osv): _inherit = "account.account" _columns = { 'vat_on_payment_related_account_id': fields.many2one('account.account', 'Shadow Account for VAT on payment', help='Related account used for real registrations on a VAT on payment basis. Set the shadow account here'), } class account_tax_code(osv.osv): _inherit = "account.tax.code" _columns = { 'vat_on_payment_related_tax_code_id': fields.many2one('account.tax.code', 'Shadow Tax code for VAT on payment', help='Related tax code used for real registrations on a VAT on payment basis. Set the shadow tax code here'), } class account_journal(osv.osv): _inherit = "account.journal" _columns = { 'vat_on_payment_related_journal_id': fields.many2one('account.journal', 'Shadow Journal for VAT on payment', help='Related journal used for shadow registrations on a VAT on payment basis. Set the shadow journal here'), } # This file was automatically created by FeynRules $Revision: 510 $ # Mathematica version: 7.0 for Linux x86 (64-bit) (February 18, 2009) # Date: Wed 2 Mar 2011 15:09:55 from __future__ import division from object_library import all_particles, Particle import parameters as Param a = Particle(pdg_code = 22, name = 'a', antiname = 'a', spin = 3, color = 1, mass = Param.ZERO, width = Param.ZERO, texname = 'a', antitexname = 'a', charge = 0, Y = 0, GhostNumber = 0) Z = Particle(pdg_code = 23, name = 'Z', antiname = 'Z', spin = 3, color = 1, mass = Param.MZ, width = Param.WZ, texname = 'Z', antitexname = 'Z', charge = 0, Y = 0, GhostNumber = 0) W__plus__ = Particle(pdg_code = 24, name = 'W+', antiname = 'W-', spin = 3, color = 1, mass = Param.MW, width = Param.WW, texname = 'W+', antitexname = 'W+', charge = 1, Y = 0, GhostNumber = 0) W__minus__ = W__plus__.anti() g = Particle(pdg_code = 21, name = 'g', antiname = 'g', spin = 3, color = 8, mass = Param.ZERO, width = Param.ZERO, texname = 'g', antitexname = 'g', charge = 0, Y = 0, GhostNumber = 0) n1 = Particle(pdg_code = 1000022, name = 'n1', antiname = 'n1', spin = 2, color = 1, mass = Param.Mneu1, width = Param.Wneu1, texname = 'n1', antitexname = 'n1', charge = 0, Y = 0, GhostNumber = 0) n2 = Particle(pdg_code = 1000023, name = 'n2', antiname = 'n2', spin = 2, color = 1, mass = Param.Mneu2, width = Param.Wneu2, texname = 'n2', antitexname = 'n2', charge = 0, Y = 0, GhostNumber = 0) n3 = Particle(pdg_code = 1000025, name = 'n3', antiname = 'n3', spin = 2, color = 1, mass = Param.Mneu3, width = Param.Wneu3, texname = 'n3', antitexname = 'n3', charge = 0, Y = 0, GhostNumber = 0) n4 = Particle(pdg_code = 1000035, name = 'n4', antiname = 'n4', spin = 2, color = 1, mass = Param.Mneu4, width = Param.Wneu4, texname = 'n4', antitexname = 'n4', charge = 0, Y = 0, GhostNumber = 0) x1__plus__ = Particle(pdg_code = 1000024, name = 'x1+', antiname = 'x1-', spin = 2, color = 1, mass = Param.Mch1, width = Param.Wch1, texname = 'x1+', antitexname = 'x1+', charge = 1, Y = 0, GhostNumber = 0) x1__minus__ = x1__plus__.anti() x2__plus__ = Particle(pdg_code = 1000037, name = 'x2+', antiname = 'x2-', spin = 2, color = 1, mass = Param.Mch2, width = Param.Wch2, texname = 'x2+', antitexname = 'x2+', charge = 1, Y = 0, GhostNumber = 0) x2__minus__ = x2__plus__.anti() go = Particle(pdg_code = 1000021, name = 'go', antiname = 'go', spin = 2, color = 8, mass = Param.Mgo, width = Param.Wgo, texname = 'go', antitexname = 'go', charge = 0, Y = 0, GhostNumber = 0) h01 = Particle(pdg_code = 25, name = 'h1', antiname = 'h1', spin = 1, color = 1, mass = Param.MH01, width = Param.WH01, texname = 'h1', antitexname = 'h1', charge = 0, Y = 0, GhostNumber = 0) h02 = Particle(pdg_code = 35, name = 'h02', antiname = 'h02', spin = 1, color = 1, mass = Param.MH02, width = Param.WH02, texname = 'h02', antitexname = 'h02', charge = 0, Y = 0, GhostNumber = 0) A0 = Particle(pdg_code = 36, name = 'A0', antiname = 'A0', spin = 1, color = 1, mass = Param.MA0, width = Param.WA0, texname = 'A0', antitexname = 'A0', charge = 0, Y = 0, GhostNumber = 0) H__plus__ = Particle(pdg_code = 37, name = 'H+', antiname = 'H-', spin = 1, color = 1, mass = Param.MH, width = Param.WH, texname = 'H+', antitexname = 'H+', charge = 1, Y = 0, GhostNumber = 0) H__minus__ = H__plus__.anti() G0 = Particle(pdg_code = 250, name = 'G0', antiname = 'G0', spin = 1, color = 1, mass = Param.MZ, width = Param.ZERO, texname = 'G0', antitexname = 'G0', GoldstoneBoson = True, charge = 0, Y = 0, GhostNumber = 0) G__plus__ = Particle(pdg_code = 251, name = 'G+', antiname = 'G-', spin = 1, color = 1, mass = Param.MW, width = Param.ZERO, texname = 'G+', antitexname = 'G+', GoldstoneBoson = True, charge = 1, Y = 0, GhostNumber = 0) G__minus__ = G__plus__.anti() ve = Particle(pdg_code = 12, name = 've', antiname = 've~', spin = 2, color = 1, mass = Param.Mve, width = Param.ZERO, texname = 've', antitexname = 've', charge = 0, Y = 0, GhostNumber = 0) ve__tilde__ = ve.anti() vm = Particle(pdg_code = 14, name = 'vm', antiname = 'vm~', spin = 2, color = 1, mass = Param.Mvm, width = Param.ZERO, texname = 'vm', antitexname = 'vm', charge = 0, Y = 0, GhostNumber = 0) vm__tilde__ = vm.anti() vt = Particle(pdg_code = 16, name = 'vt', antiname = 'vt~', spin = 2, color = 1, mass = Param.Mvt, width = Param.ZERO, texname = 'vt', antitexname = 'vt', charge = 0, Y = 0, GhostNumber = 0) vt__tilde__ = vt.anti() e__minus__ = Particle(pdg_code = 11, name = 'e-', antiname = 'e+', spin = 2, color = 1, mass = Param.Me, width = Param.ZERO, texname = 'e-', antitexname = 'e-', charge = -1, Y = 0, GhostNumber = 0) e__plus__ = e__minus__.anti() mu__minus__ = Particle(pdg_code = 13, name = 'mu-', antiname = 'mu+', spin = 2, color = 1, mass = Param.Mm, width = Param.ZERO, texname = 'mu-', antitexname = 'mu-', charge = -1, Y = 0, GhostNumber = 0) mu__plus__ = mu__minus__.anti() tau__minus__ = Particle(pdg_code = 15, name = 'tau-', antiname = 'tau+', spin = 2, color = 1, mass = Param.Mta, width = Param.ZERO, texname = 'tau-', antitexname = 'tau-', charge = -1, Y = 0, GhostNumber = 0) tau__plus__ = tau__minus__.anti() u = Particle(pdg_code = 2, name = 'u', antiname = 'u~', spin = 2, color = 3, mass = Param.MU, width = Param.ZERO, texname = 'u', antitexname = 'u', charge = 2/3, Y = 0, GhostNumber = 0) u__tilde__ = u.anti() c = Particle(pdg_code = 4, name = 'c', antiname = 'c~', spin = 2, color = 3, mass = Param.MC, width = Param.ZERO, texname = 'c', antitexname = 'c', charge = 2/3, Y = 0, GhostNumber = 0) c__tilde__ = c.anti() t = Particle(pdg_code = 6, name = 't', antiname = 't~', spin = 2, color = 3, mass = Param.MT, width = Param.WT, texname = 't', antitexname = 't', charge = 2/3, Y = 0, GhostNumber = 0) t__tilde__ = t.anti() d = Particle(pdg_code = 1, name = 'd', antiname = 'd~', spin = 2, color = 3, mass = Param.MD, width = Param.ZERO, texname = 'd', antitexname = 'd', charge = -1/3, Y = 0, GhostNumber = 0) d__tilde__ = d.anti() s = Particle(pdg_code = 3, name = 's', antiname = 's~', spin = 2, color = 3, mass = Param.MS, width = Param.ZERO, texname = 's', antitexname = 's', charge = -1/3, Y = 0, GhostNumber = 0) s__tilde__ = s.anti() b = Particle(pdg_code = 5, name = 'b', antiname = 'b~', spin = 2, color = 3, mass = Param.MB, width = Param.ZERO, texname = 'b', antitexname = 'b', charge = -1/3, Y = 0, GhostNumber = 0) b__tilde__ = b.anti() sv1 = Particle(pdg_code = 1000012, name = 'sv1', antiname = 'sv1~', spin = 1, color = 1, mass = Param.Msn1, width = Param.Wsn1, texname = 'sv1', antitexname = 'sv1', charge = 0, Y = 0, GhostNumber = 0) sv1__tilde__ = sv1.anti() sv2 = Particle(pdg_code = 1000014, name = 'sv2', antiname = 'sv2~', spin = 1, color = 1, mass = Param.Msn2, width = Param.Wsn2, texname = 'sv2', antitexname = 'sv2', charge = 0, Y = 0, GhostNumber = 0) sv2__tilde__ = sv2.anti() sv3 = Particle(pdg_code = 1000016, name = 'sv3', antiname = 'sv3~', spin = 1, color = 1, mass = Param.Msn3, width = Param.Wsn3, texname = 'sv3', antitexname = 'sv3', charge = 0, Y = 0, GhostNumber = 0) sv3__tilde__ = sv3.anti() sl1__minus__ = Particle(pdg_code = 1000011, name = 'sl1-', antiname = 'sl1+', spin = 1, color = 1, mass = Param.Msl1, width = Param.Wsl1, texname = 'sl1-', antitexname = 'sl1-', charge = -1, Y = 0, GhostNumber = 0) sl1__plus__ = sl1__minus__.anti() sl2__minus__ = Particle(pdg_code = 1000013, name = 'sl2-', antiname = 'sl2+', spin = 1, color = 1, mass = Param.Msl2, width = Param.Wsl2, texname = 'sl2-', antitexname = 'sl2-', charge = -1, Y = 0, GhostNumber = 0) sl2__plus__ = sl2__minus__.anti() sl3__minus__ = Particle(pdg_code = 1000015, name = 'sl3-', antiname = 'sl3+', spin = 1, color = 1, mass = Param.Msl3, width = Param.Wsl3, texname = 'sl3-', antitexname = 'sl3-', charge = -1, Y = 0, GhostNumber = 0) sl3__plus__ = sl3__minus__.anti() sl4__minus__ = Particle(pdg_code = 2000011, name = 'sl4-', antiname = 'sl4+', spin = 1, color = 1, mass = Param.Msl4, width = Param.Wsl4, texname = 'sl4-', antitexname = 'sl4-', charge = -1, Y = 0, GhostNumber = 0) sl4__plus__ = sl4__minus__.anti() sl5__minus__ = Particle(pdg_code = 2000013, name = 'sl5-', antiname = 'sl5+', spin = 1, color = 1, mass = Param.Msl5, width = Param.Wsl5, texname = 'sl5-', antitexname = 'sl5-', charge = -1, Y = 0, GhostNumber = 0) sl5__plus__ = sl5__minus__.anti() sl6__minus__ = Particle(pdg_code = 2000015, name = 'sl6-', antiname = 'sl6+', spin = 1, color = 1, mass = Param.Msl6, width = Param.Wsl6, texname = 'sl6-', antitexname = 'sl6-', charge = -1, Y = 0, GhostNumber = 0) sl6__plus__ = sl6__minus__.anti() su1 = Particle(pdg_code = 1000002, name = 'su1', antiname = 'su1~', spin = 1, color = 3, mass = Param.Msu1, width = Param.Wsu1, texname = 'su1', antitexname = 'su1', charge = 2/3, Y = 0, GhostNumber = 0) su1__tilde__ = su1.anti() su2 = Particle(pdg_code = 1000004, name = 'su2', antiname = 'su2~', spin = 1, color = 3, mass = Param.Msu2, width = Param.Wsu2, texname = 'su2', antitexname = 'su2', charge = 2/3, Y = 0, GhostNumber = 0) su2__tilde__ = su2.anti() su3 = Particle(pdg_code = 1000006, name = 'su3', antiname = 'su3~', spin = 1, color = 3, mass = Param.Msu3, width = Param.Wsu3, texname = 'su3', antitexname = 'su3', charge = 2/3, Y = 0, GhostNumber = 0) su3__tilde__ = su3.anti() su4 = Particle(pdg_code = 2000002, name = 'su4', antiname = 'su4~', spin = 1, color = 3, mass = Param.Msu4, width = Param.Wsu4, texname = 'su4', antitexname = 'su4', charge = 2/3, Y = 0, GhostNumber = 0) su4__tilde__ = su4.anti() su5 = Particle(pdg_code = 2000004, name = 'su5', antiname = 'su5~', spin = 1, color = 3, mass = Param.Msu5, width = Param.Wsu5, texname = 'su5', antitexname = 'su5', charge = 2/3, Y = 0, GhostNumber = 0) su5__tilde__ = su5.anti() su6 = Particle(pdg_code = 2000006, name = 'su6', antiname = 'su6~', spin = 1, color = 3, mass = Param.Msu6, width = Param.Wsu6, texname = 'su6', antitexname = 'su6', charge = 2/3, Y = 0, GhostNumber = 0) su6__tilde__ = su6.anti() sd1 = Particle(pdg_code = 1000001, name = 'sd1', antiname = 'sd1~', spin = 1, color = 3, mass = Param.Msd1, width = Param.Wsd1, texname = 'sd1', antitexname = 'sd1', charge = -1/3, Y = 0, GhostNumber = 0) sd1__tilde__ = sd1.anti() sd2 = Particle(pdg_code = 1000003, name = 'sd2', antiname = 'sd2~', spin = 1, color = 3, mass = Param.Msd2, width = Param.Wsd2, texname = 'sd2', antitexname = 'sd2', charge = -1/3, Y = 0, GhostNumber = 0) sd2__tilde__ = sd2.anti() sd3 = Particle(pdg_code = 1000005, name = 'sd3', antiname = 'sd3~', spin = 1, color = 3, mass = Param.Msd3, width = Param.Wsd3, texname = 'sd3', antitexname = 'sd3', charge = -1/3, Y = 0, GhostNumber = 0) sd3__tilde__ = sd3.anti() sd4 = Particle(pdg_code = 2000001, name = 'sd4', antiname = 'sd4~', spin = 1, color = 3, mass = Param.Msd4, width = Param.Wsd4, texname = 'sd4', antitexname = 'sd4', charge = -1/3, Y = 0, GhostNumber = 0) sd4__tilde__ = sd4.anti() sd5 = Particle(pdg_code = 2000003, name = 'sd5', antiname = 'sd5~', spin = 1, color = 3, mass = Param.Msd5, width = Param.Wsd5, texname = 'sd5', antitexname = 'sd5', charge = -1/3, Y = 0, GhostNumber = 0) sd5__tilde__ = sd5.anti() sd6 = Particle(pdg_code = 2000005, name = 'sd6', antiname = 'sd6~', spin = 1, color = 3, mass = Param.Msd6, width = Param.Wsd6, texname = 'sd6', antitexname = 'sd6', charge = -1/3, Y = 0, GhostNumber = 0) sd6__tilde__ = sd6.anti() ghG = Particle(pdg_code = 9000001, name = 'ghG', antiname = 'ghG~', spin = -1, color = 8, mass = Param.ZERO, width = Param.ZERO, texname = 'ghG', antitexname = 'ghG', charge = 0, Y = 0, GhostNumber = 1) ghG__tilde__ = ghG.anti() ghA = Particle(pdg_code = 9000002, name = 'ghA', antiname = 'ghA~', spin = -1, color = 1, mass = Param.ZERO, width = Param.ZERO, texname = 'ghA', antitexname = 'ghA', charge = 0, Y = 0, GhostNumber = 1) ghA__tilde__ = ghA.anti() ghZ = Particle(pdg_code = 9000003, name = 'ghZ', antiname = 'ghZ~', spin = -1, color = 1, mass = Param.MZ, width = Param.WZ, texname = 'ghZ', antitexname = 'ghZ', charge = 0, Y = 0, GhostNumber = 1) ghZ__tilde__ = ghZ.anti() ghWp = Particle(pdg_code = 9000004, name = 'ghWp', antiname = 'ghWp~', spin = -1, color = 1, mass = Param.MW, width = Param.WW, texname = 'ghWp', antitexname = 'ghWp', charge = 1, Y = 0, GhostNumber = 1) ghWp__tilde__ = ghWp.anti() ghWm = Particle(pdg_code = 9000005, name = 'ghWm', antiname = 'ghWm~', spin = -1, color = 1, mass = Param.MW, width = Param.WW, texname = 'ghWm', antitexname = 'ghWm', charge = -1, Y = 0, GhostNumber = 1) ghWm__tilde__ = ghWm.anti() # # added for ADM # # by Deshpreet Singh Bedi and Ian-Woo Kim # date: Apr 6, 2012 # updated: May 16, 2012 # # fermion xx = Particle( pdg_code = 9000101, name = 'xx', antiname = 'xx~', spin = 2, color = 1, mass = Param.mxx, # we should change width = Param.ZERO, # Param.wxx, # we should change texname = 'xx', antitexname = 'xx', charge = 0, Y = 0, GhostNumber = 0) xx__tilde__ = xx.anti() # scalar sxxp = Particle(pdg_code = 9000201, name = 'sxxp', antiname = 'sxxp~', spin = 1, color = 1, mass = Param.msxxp, # we should change width = Param.ZERO, # we should change texname = 'sxp', antitexname = 's\\tilde{xp}', charge = 0, Y = 0, GhostNumber = 0) sxxp__tilde__ = sxxp.anti() sxxn = Particle(pdg_code = 9000202, name = 'sxxn', antiname = 'sxxn~', spin = 1, color = 1, mass = Param.msxxn, # we should change width = Param.ZERO, # we should change texname = 'sxn', antitexname = 's\\tilde{xn}', charge = 0, Y = 0, GhostNumber = 0) sxxn__tilde__ = sxxn.anti() # -*- coding: utf-8 -*- """ pygments.lexers.testing ~~~~~~~~~~~~~~~~~~~~~~~ Lexers for testing languages. :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from pygments.lexer import RegexLexer, include, bygroups from pygments.token import Comment, Keyword, Name, String __all__ = ['GherkinLexer'] class GherkinLexer(RegexLexer): """ For `Gherkin ` syntax. .. versionadded:: 1.2 """ name = 'Gherkin' aliases = ['cucumber', 'gherkin'] filenames = ['*.feature'] mimetypes = ['text/x-gherkin'] feature_keywords = u'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$' feature_element_keywords = u'^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$' examples_keywords = u'^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$' step_keywords = u'^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假如|但是|但し|並且|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )' tokens = { 'comments': [ (r'^\s*#.*$', Comment), ], 'feature_elements': [ (step_keywords, Keyword, "step_content_stack"), include('comments'), (r"(\s|.)", Name.Function), ], 'feature_elements_on_stack': [ (step_keywords, Keyword, "#pop:2"), include('comments'), (r"(\s|.)", Name.Function), ], 'examples_table': [ (r"\s+\|", Keyword, 'examples_table_header'), include('comments'), (r"(\s|.)", Name.Function), ], 'examples_table_header': [ (r"\s+\|\s*$", Keyword, "#pop:2"), include('comments'), (r"\\\|", Name.Variable), (r"\s*\|", Keyword), (r"[^|]", Name.Variable), ], 'scenario_sections_on_stack': [ (feature_element_keywords, bygroups(Name.Function, Keyword, Keyword, Name.Function), "feature_elements_on_stack"), ], 'narrative': [ include('scenario_sections_on_stack'), include('comments'), (r"(\s|.)", Name.Function), ], 'table_vars': [ (r'(<[^>]+>)', Name.Variable), ], 'numbers': [ (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', String), ], 'string': [ include('table_vars'), (r'(\s|.)', String), ], 'py_string': [ (r'"""', Keyword, "#pop"), include('string'), ], 'step_content_root': [ (r"$", Keyword, "#pop"), include('step_content'), ], 'step_content_stack': [ (r"$", Keyword, "#pop:2"), include('step_content'), ], 'step_content': [ (r'"', Name.Function, "double_string"), include('table_vars'), include('numbers'), include('comments'), (r'(\s|.)', Name.Function), ], 'table_content': [ (r"\s+\|\s*$", Keyword, "#pop"), include('comments'), (r"\\\|", String), (r"\s*\|", Keyword), include('string'), ], 'double_string': [ (r'"', Name.Function, "#pop"), include('string'), ], 'root': [ (r'\n', Name.Function), include('comments'), (r'"""', Keyword, "py_string"), (r'\s+\|', Keyword, 'table_content'), (r'"', Name.Function, "double_string"), include('table_vars'), include('numbers'), (r'(\s*)(@[^@\r\n\t ]+)', bygroups(Name.Function, Name.Tag)), (step_keywords, bygroups(Name.Function, Keyword), 'step_content_root'), (feature_keywords, bygroups(Keyword, Keyword, Name.Function), 'narrative'), (feature_element_keywords, bygroups(Name.Function, Keyword, Keyword, Name.Function), 'feature_elements'), (examples_keywords, bygroups(Name.Function, Keyword, Keyword, Name.Function), 'examples_table'), (r'(\s|.)', Name.Function), ] } # This program is free software; you can redistribute it and/or modify # it under the terms of the (LGPL) GNU Lesser General Public License as # published by the Free Software Foundation; either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library Lesser General Public License for more details at # ( http://www.gnu.org/licenses/lgpl.html ). # # You should have received a copy of the GNU Lesser General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # written by: Jeff Ortel ( jortel@redhat.com ) """ Provides modules containing classes to support marshalling (XML). """ from suds.sudsobject import Object class Content(Object): """ Marshaller Content. @ivar tag: The content tag. @type tag: str @ivar value: The content's value. @type value: I{any} """ extensions = [] def __init__(self, tag=None, value=None, **kwargs): """ @param tag: The content tag. @type tag: str @param value: The content's value. @type value: I{any} """ Object.__init__(self) self.tag = tag self.value = value for k,v in kwargs.items(): setattr(self, k, v) def __getattr__(self, name): if name not in self.__dict__: if name in self.extensions: v = None setattr(self, name, v) else: raise AttributeError, \ 'Content has no attribute %s' % name else: v = self.__dict__[name] return v # -*- coding: utf-8 -*- # import logging from django.core.cache import cache from celery import subtask from celery.signals import ( worker_ready, worker_shutdown, after_setup_logger ) from kombu.utils.encoding import safe_str from django_celery_beat.models import PeriodicTask from common.utils import get_logger from .decorator import get_after_app_ready_tasks, get_after_app_shutdown_clean_tasks from .logger import CeleryTaskFileHandler logger = get_logger(__file__) safe_str = lambda x: x @worker_ready.connect def on_app_ready(sender=None, headers=None, **kwargs): if cache.get("CELERY_APP_READY", 0) == 1: return cache.set("CELERY_APP_READY", 1, 10) tasks = get_after_app_ready_tasks() logger.debug("Work ready signal recv") logger.debug("Start need start task: [{}]".format(", ".join(tasks))) for task in tasks: subtask(task).delay() @worker_shutdown.connect def after_app_shutdown_periodic_tasks(sender=None, **kwargs): if cache.get("CELERY_APP_SHUTDOWN", 0) == 1: return cache.set("CELERY_APP_SHUTDOWN", 1, 10) tasks = get_after_app_shutdown_clean_tasks() logger.debug("Worker shutdown signal recv") logger.debug("Clean period tasks: [{}]".format(', '.join(tasks))) PeriodicTask.objects.filter(name__in=tasks).delete() @after_setup_logger.connect def add_celery_logger_handler(sender=None, logger=None, loglevel=None, format=None, **kwargs): if not logger: return task_handler = CeleryTaskFileHandler() task_handler.setLevel(loglevel) formatter = logging.Formatter(format) task_handler.setFormatter(formatter) logger.addHandler(task_handler) # # Secret Labs' Regular Expression Engine # # re-compatible interface for the sre matching engine # # Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved. # # This version of the SRE library can be redistributed under CNRI's # Python 1.6 license. For any other use, please contact Secret Labs # AB (info@pythonware.com). # # Portions of this engine have been developed in cooperation with # CNRI. Hewlett-Packard provided funding for 1.6 integration and # other compatibility work. # r"""Support for regular expressions (RE). This module provides regular expression matching operations similar to those found in Perl. It supports both 8-bit and Unicode strings; both the pattern and the strings being processed can contain null bytes and characters outside the US ASCII range. Regular expressions can contain both special and ordinary characters. Most ordinary characters, like "A", "a", or "0", are the simplest regular expressions; they simply match themselves. You can concatenate ordinary characters, so last matches the string 'last'. The special characters are: "." Matches any character except a newline. "^" Matches the start of the string. "$" Matches the end of the string or just before the newline at the end of the string. "*" Matches 0 or more (greedy) repetitions of the preceding RE. Greedy means that it will match as many repetitions as possible. "+" Matches 1 or more (greedy) repetitions of the preceding RE. "?" Matches 0 or 1 (greedy) of the preceding RE. *?,+?,?? Non-greedy versions of the previous three special characters. {m,n} Matches from m to n repetitions of the preceding RE. {m,n}? Non-greedy version of the above. "\\" Either escapes special characters or signals a special sequence. [] Indicates a set of characters. A "^" as the first character indicates a complementing set. "|" A|B, creates an RE that will match either A or B. (...) Matches the RE inside the parentheses. The contents can be retrieved or matched later in the string. (?aiLmsux) Set the A, I, L, M, S, U, or X flag for the RE (see below). (?:...) Non-grouping version of regular parentheses. (?P...) The substring matched by the group is accessible by name. (?P=name) Matches the text matched earlier by the group named name. (?#...) A comment; ignored. (?=...) Matches if ... matches next, but doesn't consume the string. (?!...) Matches if ... doesn't match next. (?<=...) Matches if preceded by ... (must be fixed length). (?= 0x02020000: __all__.append("finditer") def finditer(pattern, string, flags=0): """Return an iterator over all non-overlapping matches in the string. For each match, the iterator returns a match object. Empty matches are included in the result.""" return _compile(pattern, flags).finditer(string) def compile(pattern, flags=0): "Compile a regular expression pattern, returning a pattern object." return _compile(pattern, flags) def purge(): "Clear the regular expression caches" _cache.clear() _cache_repl.clear() def template(pattern, flags=0): "Compile a template pattern, returning a pattern object" return _compile(pattern, flags|T) _alphanum_str = frozenset( "_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890") _alphanum_bytes = frozenset( b"_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890") def escape(pattern): """ Escape all the characters in pattern except ASCII letters, numbers and '_'. """ if isinstance(pattern, str): alphanum = _alphanum_str s = list(pattern) for i, c in enumerate(pattern): if c not in alphanum: if c == "\000": s[i] = "\\000" else: s[i] = "\\" + c return "".join(s) else: alphanum = _alphanum_bytes s = [] esc = ord(b"\\") for c in pattern: if c in alphanum: s.append(c) else: if c == 0: s.extend(b"\\000") else: s.append(esc) s.append(c) return bytes(s) # -------------------------------------------------------------------- # internals _cache = {} _cache_repl = {} _pattern_type = type(sre_compile.compile("", 0)) _MAXCACHE = 512 def _compile(pattern, flags): # internal: compile pattern bypass_cache = flags & DEBUG if not bypass_cache: try: return _cache[type(pattern), pattern, flags] except KeyError: pass if isinstance(pattern, _pattern_type): if flags: raise ValueError( "Cannot process flags argument with a compiled pattern") return pattern if not sre_compile.isstring(pattern): raise TypeError("first argument must be string or compiled pattern") p = sre_compile.compile(pattern, flags) if not bypass_cache: if len(_cache) >= _MAXCACHE: _cache.clear() _cache[type(pattern), pattern, flags] = p return p def _compile_repl(repl, pattern): # internal: compile replacement pattern try: return _cache_repl[repl, pattern] except KeyError: pass p = sre_parse.parse_template(repl, pattern) if len(_cache_repl) >= _MAXCACHE: _cache_repl.clear() _cache_repl[repl, pattern] = p return p def _expand(pattern, match, template): # internal: match.expand implementation hook template = sre_parse.parse_template(template, pattern) return sre_parse.expand_template(template, match) def _subx(pattern, template): # internal: pattern.sub/subn implementation helper template = _compile_repl(template, pattern) if not template[0] and len(template[1]) == 1: # literal replacement return template[1][0] def filter(match, template=template): return sre_parse.expand_template(template, match) return filter # register myself for pickling import copyreg def _pickle(p): return _compile, (p.pattern, p.flags) copyreg.pickle(_pattern_type, _pickle, _compile) # -------------------------------------------------------------------- # experimental stuff (see python-dev discussions for details) class Scanner: def __init__(self, lexicon, flags=0): from sre_constants import BRANCH, SUBPATTERN self.lexicon = lexicon # combine phrases into a compound pattern p = [] s = sre_parse.Pattern() s.flags = flags for phrase, action in lexicon: p.append(sre_parse.SubPattern(s, [ (SUBPATTERN, (len(p)+1, sre_parse.parse(phrase, flags))), ])) s.groups = len(p)+1 p = sre_parse.SubPattern(s, [(BRANCH, (None, p))]) self.scanner = sre_compile.compile(p) def scan(self, string): result = [] append = result.append match = self.scanner.scanner(string).match i = 0 while 1: m = match() if not m: break j = m.end() if i == j: break action = self.lexicon[m.lastindex-1][1] if callable(action): self.match = m action = action(self, m.group()) if action is not None: append(action) i = j return result, string[i:] #!/usr/bin/env python3 # Copyright (c) 2016-2020 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test various net timeouts. - Create three bitcoind nodes: no_verack_node - we never send a verack in response to their version no_version_node - we never send a version (only a ping) no_send_node - we never send any P2P message. - Start all three nodes - Wait 1 second - Assert that we're connected - Send a ping to no_verack_node and no_version_node - Wait 1 second - Assert that we're still connected - Send a ping to no_verack_node and no_version_node - Wait 2 seconds - Assert that we're no longer connected (timeout to receive version/verack is 3 seconds) """ from time import sleep from test_framework.messages import msg_ping from test_framework.p2p import P2PInterface from test_framework.test_framework import BitcoinTestFramework class TestP2PConn(P2PInterface): def on_version(self, message): # Don't send a verack in response pass class TimeoutsTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 # set timeout to receive version/verack to 3 seconds self.extra_args = [["-peertimeout=3"]] def run_test(self): # Setup the p2p connections no_verack_node = self.nodes[0].add_p2p_connection(TestP2PConn(), wait_for_verack=False) no_version_node = self.nodes[0].add_p2p_connection(TestP2PConn(), send_version=False, wait_for_verack=False) no_send_node = self.nodes[0].add_p2p_connection(TestP2PConn(), send_version=False, wait_for_verack=False) # Wait until we got the verack in response to the version. Though, don't wait for the other node to receive the # verack, since we never sent one no_verack_node.wait_for_verack() sleep(1) assert no_verack_node.is_connected assert no_version_node.is_connected assert no_send_node.is_connected with self.nodes[0].assert_debug_log(['Unsupported message "ping" prior to verack from peer=0']): no_verack_node.send_message(msg_ping()) with self.nodes[0].assert_debug_log(['non-version message before version handshake. Message "ping" from peer=1']): no_version_node.send_message(msg_ping()) sleep(1) assert "version" in no_verack_node.last_message assert no_verack_node.is_connected assert no_version_node.is_connected assert no_send_node.is_connected no_verack_node.send_message(msg_ping()) no_version_node.send_message(msg_ping()) expected_timeout_logs = [ "version handshake timeout from 0", "socket no message in first 3 seconds, 1 0 from 1", "socket no message in first 3 seconds, 0 0 from 2", ] with self.nodes[0].assert_debug_log(expected_msgs=expected_timeout_logs): sleep(3) # By now, we waited a total of 5 seconds. Off-by-two for two # reasons: # * The internal precision is one second # * Account for network delay assert not no_verack_node.is_connected assert not no_version_node.is_connected assert not no_send_node.is_connected if __name__ == '__main__': TimeoutsTest().main() #!/usr/bin/env python # encoding: utf-8 """ @version: @author: Will @license: @contact: @site: http://www.timesnotes.com @file: shopping.py @time: 15-11-3 上午8:48 """ from collections import OrderedDict ######################################## # 预算金额 while True: tmp_yusuan = raw_input("预算金额(最小单位:元):") if tmp_yusuan.strip().isdigit(): price_of_goods = int(tmp_yusuan.strip()) break else: print "输入错误,请重新输入预算金额" # print price_of_goods #商品总价 sum_goods_price = 0 # 从文件中读取信息 with open('goods.db') as goods_file: data_of_goods = goods_file.readlines() # print data_of_goods # 声明有序字典,并将读取的数据,格式化后写入字典 dict_of_goods = OrderedDict() dict_of_goods['id']={'name':'name','price':'price'} dict_of_goods_of_bought = OrderedDict() dict_of_goods_of_bought['id']={'name':'name','price':'price','count':'count','total':'total'} for goods_info_str in data_of_goods: goods_info = goods_info_str.strip().split(':') dict_of_goods[int(goods_info[0])]={'name':goods_info[1],'price':goods_info[2]} # print dict_of_goods # 输出商品列表 for ele in dict_of_goods: print ele,'\t',dict_of_goods[ele]['name'],'\t\t',dict_of_goods[ele]['price'] print '根据提示购买,输入help获取帮助信息' def help_info(): print '这里是帮助信息,输入‘help’获取帮助;输入‘cx’查询已购买商品啊,输入‘wc’完成购买;输入‘quit’退出购物车。' return '' login = True while login: buy_goods_id_count = raw_input("请输入商品id和数量,空格隔开。\n商品:") buy_goods_id_count_tmp_list = buy_goods_id_count.split() if len(buy_goods_id_count_tmp_list)>2: print "输入有误,请重新输入" elif len(buy_goods_id_count_tmp_list)<1: print "输入有误,请重新输入" elif len(buy_goods_id_count_tmp_list) ==1: ##打印帮助信息 if buy_goods_id_count_tmp_list[0].lower() == 'help': print help_info() # elif buy_goods_id_count_tmp_list[0] == 'wc': # login =False # pass #查询已经购买的商品 elif buy_goods_id_count_tmp_list[0].lower() == 'cx': # print dict_of_goods_of_bought for goods_info_of_bought in dict_of_goods_of_bought: print '{:^10}{:^10}{:^10}{:^10}'.format(goods_info_of_bought,\ dict_of_goods_of_bought[goods_info_of_bought]['name'],\ dict_of_goods_of_bought[goods_info_of_bought]['price'],\ dict_of_goods_of_bought[goods_info_of_bought]['count'],\ dict_of_goods_of_bought[goods_info_of_bought]['total']) # print ele,'\t',buy_goods_id_count_tmp_list[ele]['name'],'\t\t',buy_goods_id_count_tmp_list[ele]['price'] ##退出系统 elif buy_goods_id_count_tmp_list[0].lower() == 'quit': print '退出系统' break elif buy_goods_id_count_tmp_list[0].lower() == 'wc': print "商品选购结束" break else: print "输入有误,请重新输入" else: if not buy_goods_id_count_tmp_list[0].isdigit() or not buy_goods_id_count_tmp_list[1].isdigit(): print "输入有误,请重新输入" continue else: #这里id和count都输入正确 choose_goods_id = int(buy_goods_id_count_tmp_list[0]) choose_goods_count = int(buy_goods_id_count_tmp_list[1]) # print dict_of_goods[choose_goods_id]['price'] , choose_goods_count ,int(dict_of_goods[choose_goods_id]['price']) * choose_goods_count #已购商品字典,添加元素 if str(choose_goods_id) not in dict_of_goods_of_bought: dict_of_goods_of_bought[str(choose_goods_id)]={'name':dict_of_goods[choose_goods_id]['name'], 'price':dict_of_goods[choose_goods_id]['price'], 'count':choose_goods_count, 'total':int(dict_of_goods[choose_goods_id]['price']) * choose_goods_count} else: dict_of_goods_of_bought[str(choose_goods_id)]['count'] += choose_goods_count sum_goods_price += int(dict_of_goods[choose_goods_id]['price']) * choose_goods_count # print sum_goods_price print '预算为:',tmp_yusuan print '已购商商品总价为:',sum_goods_price chae = int(tmp_yusuan)-sum_goods_price if chae <0: print "您此次购买商品总价为:",sum_goods_price,'超支金额为:',sum_goods_price-int(tmp_yusuan) print '购买的物品为:\n' for goods_info_of_bought in dict_of_goods_of_bought: print '{:^10}{:^10}{:^10}{:^10}'.format(goods_info_of_bought,\ dict_of_goods_of_bought[goods_info_of_bought]['name'],\ dict_of_goods_of_bought[goods_info_of_bought]['price'],\ dict_of_goods_of_bought[goods_info_of_bought]['count'],\ dict_of_goods_of_bought[goods_info_of_bought]['total']) else: print "您此次购买商品总价为:",sum_goods_price,'剩余金额为:',int(tmp_yusuan)-sum_goods_price print '购买的物品为:' for goods_info_of_bought in dict_of_goods_of_bought: print '{:^10}{:^10}{:^10}{:^10}'.format(goods_info_of_bought,\ dict_of_goods_of_bought[goods_info_of_bought]['name'],\ dict_of_goods_of_bought[goods_info_of_bought]['price'],\ dict_of_goods_of_bought[goods_info_of_bought]['count'],\ dict_of_goods_of_bought[goods_info_of_bought]['total']) # 购买的商品写入文件 with open('shopping.db','w') as wf: for result_shopping_ele in dict_of_goods_of_bought: # print result_shopping_ele tmp_shopping_data = '' if result_shopping_ele != 'id': tmp_shopping_data='{}:{}:{}:{}:{}:{}:{}|'.format(result_shopping_ele,\ dict_of_goods_of_bought[result_shopping_ele]['name'],\ dict_of_goods_of_bought[result_shopping_ele]['price'],\ dict_of_goods_of_bought[result_shopping_ele]['count'],\ dict_of_goods_of_bought[result_shopping_ele]['total'],tmp_yusuan,chae) # print tmp_shopping_data wf.write(tmp_shopping_data) ######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # Big5 frequency table # by Taiwan's Mandarin Promotion Council # # # 128 --> 0.42261 # 256 --> 0.57851 # 512 --> 0.74851 # 1024 --> 0.89384 # 2048 --> 0.97583 # # Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98 # Random Distribution Ration = 512/(5401-512)=0.105 # # Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75 #Char to FreqOrder table BIG5_TABLE_SIZE = 5376 Big5CharToFreqOrder = ( 1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16 3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32 1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48 63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64 3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80 4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96 5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112 630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128 179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144 995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160 2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176 1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192 3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208 706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224 1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240 3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256 2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272 437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288 3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304 1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320 5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336 266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352 5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368 1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384 32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400 188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416 3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432 3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448 324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464 2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480 2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496 314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512 287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528 3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544 1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560 1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576 1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592 2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608 265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624 4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640 1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656 5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672 2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688 383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704 98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720 523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736 710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752 5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768 379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784 1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800 585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816 690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832 5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848 1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864 544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880 3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896 4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912 3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928 279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944 610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960 1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976 4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992 3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008 3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024 2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040 5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056 3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072 5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088 1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104 2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120 1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136 78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152 1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168 4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184 3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200 534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216 165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232 626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248 2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264 5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280 1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296 2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312 1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328 1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344 5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360 5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376 5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392 3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408 4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424 4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440 2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456 5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472 3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488 598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504 5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520 5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536 1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552 2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568 3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584 4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600 5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616 3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632 4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648 1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664 1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680 4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696 1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712 240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728 1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744 1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760 3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776 619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792 5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808 2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824 1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840 1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856 5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872 829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888 4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904 375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920 2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936 444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952 1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968 1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984 730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000 4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016 4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032 1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048 3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064 5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080 5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096 1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112 2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128 1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144 3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160 2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176 3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192 2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208 4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224 4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240 3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256 97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272 3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288 424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304 3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320 4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336 3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352 1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368 5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384 199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400 5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416 1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432 391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448 4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464 4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480 397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496 2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512 2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528 3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544 1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560 4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576 2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592 1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608 1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624 2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640 3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656 1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672 5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688 1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704 4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720 1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736 135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752 1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768 4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784 4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800 2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816 1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832 4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848 660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864 5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880 2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896 3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912 4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928 790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944 5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960 5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976 1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992 4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008 4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024 2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040 3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056 3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072 2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088 1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104 4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120 3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136 3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152 2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168 4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184 5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200 3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216 2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232 3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248 1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264 2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280 3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296 4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312 2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328 2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344 5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360 1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376 2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392 1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408 3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424 4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440 2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456 3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472 3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488 2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504 4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520 2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536 3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552 4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568 5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584 3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600 194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616 1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632 4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648 1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664 4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680 5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696 510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712 5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728 5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744 2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760 3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776 2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792 2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808 681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824 1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840 4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856 3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872 3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888 838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904 2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920 625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936 2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952 4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968 1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984 4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000 1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016 3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032 574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048 3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064 5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080 5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096 3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112 3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128 1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144 2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160 5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176 1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192 1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208 3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224 919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240 1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256 4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272 5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288 2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304 3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320 516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336 1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352 2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368 2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384 5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400 5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416 5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432 2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448 2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464 1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480 4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496 3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512 3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528 4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544 4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560 2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576 2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592 5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608 4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624 5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640 4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656 502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672 121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688 1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704 3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720 4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736 1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752 5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768 2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784 2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800 3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816 5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832 1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848 3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864 5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880 1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896 5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912 2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928 3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944 2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960 3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976 3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992 3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008 4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024 803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040 2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056 4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072 3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088 5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104 1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120 5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136 425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152 1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168 479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184 4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200 1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216 4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232 1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248 433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264 3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280 4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296 5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312 938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328 3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344 890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360 2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 #last 512 #Everything below is of no interest for detection purpose 2522,1613,4812,5799,3345,3945,2523,5800,4162,5801,1637,4163,2471,4813,3946,5802, # 5392 2500,3034,3800,5803,5804,2195,4814,5805,2163,5806,5807,5808,5809,5810,5811,5812, # 5408 5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828, # 5424 5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844, # 5440 5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856,5857,5858,5859,5860, # 5456 5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872,5873,5874,5875,5876, # 5472 5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888,5889,5890,5891,5892, # 5488 5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,5906,5907,5908, # 5504 5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,5921,5922,5923,5924, # 5520 5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936,5937,5938,5939,5940, # 5536 5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952,5953,5954,5955,5956, # 5552 5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968,5969,5970,5971,5972, # 5568 5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984,5985,5986,5987,5988, # 5584 5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004, # 5600 6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020, # 5616 6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032,6033,6034,6035,6036, # 5632 6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052, # 5648 6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068, # 5664 6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084, # 5680 6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100, # 5696 6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116, # 5712 6117,6118,6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,6132, # 5728 6133,6134,6135,6136,6137,6138,6139,6140,6141,6142,6143,6144,6145,6146,6147,6148, # 5744 6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,6164, # 5760 6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,6180, # 5776 6181,6182,6183,6184,6185,6186,6187,6188,6189,6190,6191,6192,6193,6194,6195,6196, # 5792 6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,6211,6212, # 5808 6213,6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,3670,6224,6225,6226,6227, # 5824 6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,6242,6243, # 5840 6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,6254,6255,6256,6257,6258,6259, # 5856 6260,6261,6262,6263,6264,6265,6266,6267,6268,6269,6270,6271,6272,6273,6274,6275, # 5872 6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,4815,6286,6287,6288,6289,6290, # 5888 6291,6292,4816,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,6303,6304,6305, # 5904 6306,6307,6308,6309,6310,6311,4817,4818,6312,6313,6314,6315,6316,6317,6318,4819, # 5920 6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,6334, # 5936 6335,6336,6337,4820,6338,6339,6340,6341,6342,6343,6344,6345,6346,6347,6348,6349, # 5952 6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,6364,6365, # 5968 6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,6380,6381, # 5984 6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,6396,6397, # 6000 6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,3441,6411,6412, # 6016 6413,6414,6415,6416,6417,6418,6419,6420,6421,6422,6423,6424,6425,4440,6426,6427, # 6032 6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,6439,6440,6441,6442,6443, # 6048 6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,4821,6455,6456,6457,6458, # 6064 6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,6473,6474, # 6080 6475,6476,6477,3947,3948,6478,6479,6480,6481,3272,4441,6482,6483,6484,6485,4442, # 6096 6486,6487,6488,6489,6490,6491,6492,6493,6494,6495,6496,4822,6497,6498,6499,6500, # 6112 6501,6502,6503,6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516, # 6128 6517,6518,6519,6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532, # 6144 6533,6534,6535,6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548, # 6160 6549,6550,6551,6552,6553,6554,6555,6556,2784,6557,4823,6558,6559,6560,6561,6562, # 6176 6563,6564,6565,6566,6567,6568,6569,3949,6570,6571,6572,4824,6573,6574,6575,6576, # 6192 6577,6578,6579,6580,6581,6582,6583,4825,6584,6585,6586,3950,2785,6587,6588,6589, # 6208 6590,6591,6592,6593,6594,6595,6596,6597,6598,6599,6600,6601,6602,6603,6604,6605, # 6224 6606,6607,6608,6609,6610,6611,6612,4826,6613,6614,6615,4827,6616,6617,6618,6619, # 6240 6620,6621,6622,6623,6624,6625,4164,6626,6627,6628,6629,6630,6631,6632,6633,6634, # 6256 3547,6635,4828,6636,6637,6638,6639,6640,6641,6642,3951,2984,6643,6644,6645,6646, # 6272 6647,6648,6649,4165,6650,4829,6651,6652,4830,6653,6654,6655,6656,6657,6658,6659, # 6288 6660,6661,6662,4831,6663,6664,6665,6666,6667,6668,6669,6670,6671,4166,6672,4832, # 6304 3952,6673,6674,6675,6676,4833,6677,6678,6679,4167,6680,6681,6682,3198,6683,6684, # 6320 6685,6686,6687,6688,6689,6690,6691,6692,6693,6694,6695,6696,6697,4834,6698,6699, # 6336 6700,6701,6702,6703,6704,6705,6706,6707,6708,6709,6710,6711,6712,6713,6714,6715, # 6352 6716,6717,6718,6719,6720,6721,6722,6723,6724,6725,6726,6727,6728,6729,6730,6731, # 6368 6732,6733,6734,4443,6735,6736,6737,6738,6739,6740,6741,6742,6743,6744,6745,4444, # 6384 6746,6747,6748,6749,6750,6751,6752,6753,6754,6755,6756,6757,6758,6759,6760,6761, # 6400 6762,6763,6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777, # 6416 6778,6779,6780,6781,4168,6782,6783,3442,6784,6785,6786,6787,6788,6789,6790,6791, # 6432 4169,6792,6793,6794,6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806, # 6448 6807,6808,6809,6810,6811,4835,6812,6813,6814,4445,6815,6816,4446,6817,6818,6819, # 6464 6820,6821,6822,6823,6824,6825,6826,6827,6828,6829,6830,6831,6832,6833,6834,6835, # 6480 3548,6836,6837,6838,6839,6840,6841,6842,6843,6844,6845,6846,4836,6847,6848,6849, # 6496 6850,6851,6852,6853,6854,3953,6855,6856,6857,6858,6859,6860,6861,6862,6863,6864, # 6512 6865,6866,6867,6868,6869,6870,6871,6872,6873,6874,6875,6876,6877,3199,6878,6879, # 6528 6880,6881,6882,4447,6883,6884,6885,6886,6887,6888,6889,6890,6891,6892,6893,6894, # 6544 6895,6896,6897,6898,6899,6900,6901,6902,6903,6904,4170,6905,6906,6907,6908,6909, # 6560 6910,6911,6912,6913,6914,6915,6916,6917,6918,6919,6920,6921,6922,6923,6924,6925, # 6576 6926,6927,4837,6928,6929,6930,6931,6932,6933,6934,6935,6936,3346,6937,6938,4838, # 6592 6939,6940,6941,4448,6942,6943,6944,6945,6946,4449,6947,6948,6949,6950,6951,6952, # 6608 6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,6967,6968, # 6624 6969,6970,6971,6972,6973,6974,6975,6976,6977,6978,6979,6980,6981,6982,6983,6984, # 6640 6985,6986,6987,6988,6989,6990,6991,6992,6993,6994,3671,6995,6996,6997,6998,4839, # 6656 6999,7000,7001,7002,3549,7003,7004,7005,7006,7007,7008,7009,7010,7011,7012,7013, # 6672 7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,7028,7029, # 6688 7030,4840,7031,7032,7033,7034,7035,7036,7037,7038,4841,7039,7040,7041,7042,7043, # 6704 7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,7059, # 6720 7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,2985,7071,7072,7073,7074, # 6736 7075,7076,7077,7078,7079,7080,4842,7081,7082,7083,7084,7085,7086,7087,7088,7089, # 6752 7090,7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105, # 6768 7106,7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,4450,7119,7120, # 6784 7121,7122,7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136, # 6800 7137,7138,7139,7140,7141,7142,7143,4843,7144,7145,7146,7147,7148,7149,7150,7151, # 6816 7152,7153,7154,7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167, # 6832 7168,7169,7170,7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183, # 6848 7184,7185,7186,7187,7188,4171,4172,7189,7190,7191,7192,7193,7194,7195,7196,7197, # 6864 7198,7199,7200,7201,7202,7203,7204,7205,7206,7207,7208,7209,7210,7211,7212,7213, # 6880 7214,7215,7216,7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229, # 6896 7230,7231,7232,7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245, # 6912 7246,7247,7248,7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261, # 6928 7262,7263,7264,7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277, # 6944 7278,7279,7280,7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293, # 6960 7294,7295,7296,4844,7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308, # 6976 7309,7310,7311,7312,7313,7314,7315,7316,4451,7317,7318,7319,7320,7321,7322,7323, # 6992 7324,7325,7326,7327,7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339, # 7008 7340,7341,7342,7343,7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,4173,7354, # 7024 7355,4845,7356,7357,7358,7359,7360,7361,7362,7363,7364,7365,7366,7367,7368,7369, # 7040 7370,7371,7372,7373,7374,7375,7376,7377,7378,7379,7380,7381,7382,7383,7384,7385, # 7056 7386,7387,7388,4846,7389,7390,7391,7392,7393,7394,7395,7396,7397,7398,7399,7400, # 7072 7401,7402,7403,7404,7405,3672,7406,7407,7408,7409,7410,7411,7412,7413,7414,7415, # 7088 7416,7417,7418,7419,7420,7421,7422,7423,7424,7425,7426,7427,7428,7429,7430,7431, # 7104 7432,7433,7434,7435,7436,7437,7438,7439,7440,7441,7442,7443,7444,7445,7446,7447, # 7120 7448,7449,7450,7451,7452,7453,4452,7454,3200,7455,7456,7457,7458,7459,7460,7461, # 7136 7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,7472,7473,7474,4847,7475,7476, # 7152 7477,3133,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,7488,7489,7490,7491, # 7168 7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,3347,7503,7504,7505,7506, # 7184 7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,7520,7521,4848, # 7200 7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,7536,7537, # 7216 7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,3801,4849,7550,7551, # 7232 7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, # 7248 7568,7569,3035,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582, # 7264 7583,7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598, # 7280 7599,7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614, # 7296 7615,7616,4850,7617,7618,3802,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628, # 7312 7629,7630,7631,7632,4851,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643, # 7328 7644,7645,7646,7647,7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659, # 7344 7660,7661,7662,7663,7664,7665,7666,7667,7668,7669,7670,4453,7671,7672,7673,7674, # 7360 7675,7676,7677,7678,7679,7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690, # 7376 7691,7692,7693,7694,7695,7696,7697,3443,7698,7699,7700,7701,7702,4454,7703,7704, # 7392 7705,7706,7707,7708,7709,7710,7711,7712,7713,2472,7714,7715,7716,7717,7718,7719, # 7408 7720,7721,7722,7723,7724,7725,7726,7727,7728,7729,7730,7731,3954,7732,7733,7734, # 7424 7735,7736,7737,7738,7739,7740,7741,7742,7743,7744,7745,7746,7747,7748,7749,7750, # 7440 3134,7751,7752,4852,7753,7754,7755,4853,7756,7757,7758,7759,7760,4174,7761,7762, # 7456 7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,7777,7778, # 7472 7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,7792,7793,7794, # 7488 7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,4854,7806,7807,7808,7809, # 7504 7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824,7825, # 7520 4855,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7536 7841,7842,7843,7844,7845,7846,7847,3955,7848,7849,7850,7851,7852,7853,7854,7855, # 7552 7856,7857,7858,7859,7860,3444,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870, # 7568 7871,7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886, # 7584 7887,7888,7889,7890,7891,4175,7892,7893,7894,7895,7896,4856,4857,7897,7898,7899, # 7600 7900,2598,7901,7902,7903,7904,7905,7906,7907,7908,4455,7909,7910,7911,7912,7913, # 7616 7914,3201,7915,7916,7917,7918,7919,7920,7921,4858,7922,7923,7924,7925,7926,7927, # 7632 7928,7929,7930,7931,7932,7933,7934,7935,7936,7937,7938,7939,7940,7941,7942,7943, # 7648 7944,7945,7946,7947,7948,7949,7950,7951,7952,7953,7954,7955,7956,7957,7958,7959, # 7664 7960,7961,7962,7963,7964,7965,7966,7967,7968,7969,7970,7971,7972,7973,7974,7975, # 7680 7976,7977,7978,7979,7980,7981,4859,7982,7983,7984,7985,7986,7987,7988,7989,7990, # 7696 7991,7992,7993,7994,7995,7996,4860,7997,7998,7999,8000,8001,8002,8003,8004,8005, # 7712 8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,8016,4176,8017,8018,8019,8020, # 7728 8021,8022,8023,4861,8024,8025,8026,8027,8028,8029,8030,8031,8032,8033,8034,8035, # 7744 8036,4862,4456,8037,8038,8039,8040,4863,8041,8042,8043,8044,8045,8046,8047,8048, # 7760 8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,8064, # 7776 8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,8080, # 7792 8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,8096, # 7808 8097,8098,8099,4864,4177,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110, # 7824 8111,8112,8113,8114,8115,8116,8117,8118,8119,8120,4178,8121,8122,8123,8124,8125, # 7840 8126,8127,8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141, # 7856 8142,8143,8144,8145,4865,4866,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155, # 7872 8156,8157,8158,8159,8160,8161,8162,8163,8164,8165,4179,8166,8167,8168,8169,8170, # 7888 8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181,4457,8182,8183,8184,8185, # 7904 8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201, # 7920 8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213,8214,8215,8216,8217, # 7936 8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229,8230,8231,8232,8233, # 7952 8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245,8246,8247,8248,8249, # 7968 8250,8251,8252,8253,8254,8255,8256,3445,8257,8258,8259,8260,8261,8262,4458,8263, # 7984 8264,8265,8266,8267,8268,8269,8270,8271,8272,4459,8273,8274,8275,8276,3550,8277, # 8000 8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,4460,8290,8291,8292, # 8016 8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,4867, # 8032 8308,8309,8310,8311,8312,3551,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322, # 8048 8323,8324,8325,8326,4868,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337, # 8064 8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353, # 8080 8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,4869,4461,8364,8365,8366,8367, # 8096 8368,8369,8370,4870,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382, # 8112 8383,8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398, # 8128 8399,8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,4871,8411,8412,8413, # 8144 8414,8415,8416,8417,8418,8419,8420,8421,8422,4462,8423,8424,8425,8426,8427,8428, # 8160 8429,8430,8431,8432,8433,2986,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443, # 8176 8444,8445,8446,8447,8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459, # 8192 8460,8461,8462,8463,8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475, # 8208 8476,8477,8478,4180,8479,8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490, # 8224 8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506, # 8240 8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522, # 8256 8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538, # 8272 8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554, # 8288 8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,4872,8565,8566,8567,8568,8569, # 8304 8570,8571,8572,8573,4873,8574,8575,8576,8577,8578,8579,8580,8581,8582,8583,8584, # 8320 8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597,8598,8599,8600, # 8336 8601,8602,8603,8604,8605,3803,8606,8607,8608,8609,8610,8611,8612,8613,4874,3804, # 8352 8614,8615,8616,8617,8618,8619,8620,8621,3956,8622,8623,8624,8625,8626,8627,8628, # 8368 8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,2865,8639,8640,8641,8642,8643, # 8384 8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,4463,8657,8658, # 8400 8659,4875,4876,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672, # 8416 8673,8674,8675,8676,8677,8678,8679,8680,8681,4464,8682,8683,8684,8685,8686,8687, # 8432 8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, # 8448 8704,8705,8706,8707,8708,8709,2261,8710,8711,8712,8713,8714,8715,8716,8717,8718, # 8464 8719,8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,4181, # 8480 8734,8735,8736,8737,8738,8739,8740,8741,8742,8743,8744,8745,8746,8747,8748,8749, # 8496 8750,8751,8752,8753,8754,8755,8756,8757,8758,8759,8760,8761,8762,8763,4877,8764, # 8512 8765,8766,8767,8768,8769,8770,8771,8772,8773,8774,8775,8776,8777,8778,8779,8780, # 8528 8781,8782,8783,8784,8785,8786,8787,8788,4878,8789,4879,8790,8791,8792,4880,8793, # 8544 8794,8795,8796,8797,8798,8799,8800,8801,4881,8802,8803,8804,8805,8806,8807,8808, # 8560 8809,8810,8811,8812,8813,8814,8815,3957,8816,8817,8818,8819,8820,8821,8822,8823, # 8576 8824,8825,8826,8827,8828,8829,8830,8831,8832,8833,8834,8835,8836,8837,8838,8839, # 8592 8840,8841,8842,8843,8844,8845,8846,8847,4882,8848,8849,8850,8851,8852,8853,8854, # 8608 8855,8856,8857,8858,8859,8860,8861,8862,8863,8864,8865,8866,8867,8868,8869,8870, # 8624 8871,8872,8873,8874,8875,8876,8877,8878,8879,8880,8881,8882,8883,8884,3202,8885, # 8640 8886,8887,8888,8889,8890,8891,8892,8893,8894,8895,8896,8897,8898,8899,8900,8901, # 8656 8902,8903,8904,8905,8906,8907,8908,8909,8910,8911,8912,8913,8914,8915,8916,8917, # 8672 8918,8919,8920,8921,8922,8923,8924,4465,8925,8926,8927,8928,8929,8930,8931,8932, # 8688 4883,8933,8934,8935,8936,8937,8938,8939,8940,8941,8942,8943,2214,8944,8945,8946, # 8704 8947,8948,8949,8950,8951,8952,8953,8954,8955,8956,8957,8958,8959,8960,8961,8962, # 8720 8963,8964,8965,4884,8966,8967,8968,8969,8970,8971,8972,8973,8974,8975,8976,8977, # 8736 8978,8979,8980,8981,8982,8983,8984,8985,8986,8987,8988,8989,8990,8991,8992,4885, # 8752 8993,8994,8995,8996,8997,8998,8999,9000,9001,9002,9003,9004,9005,9006,9007,9008, # 8768 9009,9010,9011,9012,9013,9014,9015,9016,9017,9018,9019,9020,9021,4182,9022,9023, # 8784 9024,9025,9026,9027,9028,9029,9030,9031,9032,9033,9034,9035,9036,9037,9038,9039, # 8800 9040,9041,9042,9043,9044,9045,9046,9047,9048,9049,9050,9051,9052,9053,9054,9055, # 8816 9056,9057,9058,9059,9060,9061,9062,9063,4886,9064,9065,9066,9067,9068,9069,4887, # 8832 9070,9071,9072,9073,9074,9075,9076,9077,9078,9079,9080,9081,9082,9083,9084,9085, # 8848 9086,9087,9088,9089,9090,9091,9092,9093,9094,9095,9096,9097,9098,9099,9100,9101, # 8864 9102,9103,9104,9105,9106,9107,9108,9109,9110,9111,9112,9113,9114,9115,9116,9117, # 8880 9118,9119,9120,9121,9122,9123,9124,9125,9126,9127,9128,9129,9130,9131,9132,9133, # 8896 9134,9135,9136,9137,9138,9139,9140,9141,3958,9142,9143,9144,9145,9146,9147,9148, # 8912 9149,9150,9151,4888,9152,9153,9154,9155,9156,9157,9158,9159,9160,9161,9162,9163, # 8928 9164,9165,9166,9167,9168,9169,9170,9171,9172,9173,9174,9175,4889,9176,9177,9178, # 8944 9179,9180,9181,9182,9183,9184,9185,9186,9187,9188,9189,9190,9191,9192,9193,9194, # 8960 9195,9196,9197,9198,9199,9200,9201,9202,9203,4890,9204,9205,9206,9207,9208,9209, # 8976 9210,9211,9212,9213,9214,9215,9216,9217,9218,9219,9220,9221,9222,4466,9223,9224, # 8992 9225,9226,9227,9228,9229,9230,9231,9232,9233,9234,9235,9236,9237,9238,9239,9240, # 9008 9241,9242,9243,9244,9245,4891,9246,9247,9248,9249,9250,9251,9252,9253,9254,9255, # 9024 9256,9257,4892,9258,9259,9260,9261,4893,4894,9262,9263,9264,9265,9266,9267,9268, # 9040 9269,9270,9271,9272,9273,4467,9274,9275,9276,9277,9278,9279,9280,9281,9282,9283, # 9056 9284,9285,3673,9286,9287,9288,9289,9290,9291,9292,9293,9294,9295,9296,9297,9298, # 9072 9299,9300,9301,9302,9303,9304,9305,9306,9307,9308,9309,9310,9311,9312,9313,9314, # 9088 9315,9316,9317,9318,9319,9320,9321,9322,4895,9323,9324,9325,9326,9327,9328,9329, # 9104 9330,9331,9332,9333,9334,9335,9336,9337,9338,9339,9340,9341,9342,9343,9344,9345, # 9120 9346,9347,4468,9348,9349,9350,9351,9352,9353,9354,9355,9356,9357,9358,9359,9360, # 9136 9361,9362,9363,9364,9365,9366,9367,9368,9369,9370,9371,9372,9373,4896,9374,4469, # 9152 9375,9376,9377,9378,9379,4897,9380,9381,9382,9383,9384,9385,9386,9387,9388,9389, # 9168 9390,9391,9392,9393,9394,9395,9396,9397,9398,9399,9400,9401,9402,9403,9404,9405, # 9184 9406,4470,9407,2751,9408,9409,3674,3552,9410,9411,9412,9413,9414,9415,9416,9417, # 9200 9418,9419,9420,9421,4898,9422,9423,9424,9425,9426,9427,9428,9429,3959,9430,9431, # 9216 9432,9433,9434,9435,9436,4471,9437,9438,9439,9440,9441,9442,9443,9444,9445,9446, # 9232 9447,9448,9449,9450,3348,9451,9452,9453,9454,9455,9456,9457,9458,9459,9460,9461, # 9248 9462,9463,9464,9465,9466,9467,9468,9469,9470,9471,9472,4899,9473,9474,9475,9476, # 9264 9477,4900,9478,9479,9480,9481,9482,9483,9484,9485,9486,9487,9488,3349,9489,9490, # 9280 9491,9492,9493,9494,9495,9496,9497,9498,9499,9500,9501,9502,9503,9504,9505,9506, # 9296 9507,9508,9509,9510,9511,9512,9513,9514,9515,9516,9517,9518,9519,9520,4901,9521, # 9312 9522,9523,9524,9525,9526,4902,9527,9528,9529,9530,9531,9532,9533,9534,9535,9536, # 9328 9537,9538,9539,9540,9541,9542,9543,9544,9545,9546,9547,9548,9549,9550,9551,9552, # 9344 9553,9554,9555,9556,9557,9558,9559,9560,9561,9562,9563,9564,9565,9566,9567,9568, # 9360 9569,9570,9571,9572,9573,9574,9575,9576,9577,9578,9579,9580,9581,9582,9583,9584, # 9376 3805,9585,9586,9587,9588,9589,9590,9591,9592,9593,9594,9595,9596,9597,9598,9599, # 9392 9600,9601,9602,4903,9603,9604,9605,9606,9607,4904,9608,9609,9610,9611,9612,9613, # 9408 9614,4905,9615,9616,9617,9618,9619,9620,9621,9622,9623,9624,9625,9626,9627,9628, # 9424 9629,9630,9631,9632,4906,9633,9634,9635,9636,9637,9638,9639,9640,9641,9642,9643, # 9440 4907,9644,9645,9646,9647,9648,9649,9650,9651,9652,9653,9654,9655,9656,9657,9658, # 9456 9659,9660,9661,9662,9663,9664,9665,9666,9667,9668,9669,9670,9671,9672,4183,9673, # 9472 9674,9675,9676,9677,4908,9678,9679,9680,9681,4909,9682,9683,9684,9685,9686,9687, # 9488 9688,9689,9690,4910,9691,9692,9693,3675,9694,9695,9696,2945,9697,9698,9699,9700, # 9504 9701,9702,9703,9704,9705,4911,9706,9707,9708,9709,9710,9711,9712,9713,9714,9715, # 9520 9716,9717,9718,9719,9720,9721,9722,9723,9724,9725,9726,9727,9728,9729,9730,9731, # 9536 9732,9733,9734,9735,4912,9736,9737,9738,9739,9740,4913,9741,9742,9743,9744,9745, # 9552 9746,9747,9748,9749,9750,9751,9752,9753,9754,9755,9756,9757,9758,4914,9759,9760, # 9568 9761,9762,9763,9764,9765,9766,9767,9768,9769,9770,9771,9772,9773,9774,9775,9776, # 9584 9777,9778,9779,9780,9781,9782,4915,9783,9784,9785,9786,9787,9788,9789,9790,9791, # 9600 9792,9793,4916,9794,9795,9796,9797,9798,9799,9800,9801,9802,9803,9804,9805,9806, # 9616 9807,9808,9809,9810,9811,9812,9813,9814,9815,9816,9817,9818,9819,9820,9821,9822, # 9632 9823,9824,9825,9826,9827,9828,9829,9830,9831,9832,9833,9834,9835,9836,9837,9838, # 9648 9839,9840,9841,9842,9843,9844,9845,9846,9847,9848,9849,9850,9851,9852,9853,9854, # 9664 9855,9856,9857,9858,9859,9860,9861,9862,9863,9864,9865,9866,9867,9868,4917,9869, # 9680 9870,9871,9872,9873,9874,9875,9876,9877,9878,9879,9880,9881,9882,9883,9884,9885, # 9696 9886,9887,9888,9889,9890,9891,9892,4472,9893,9894,9895,9896,9897,3806,9898,9899, # 9712 9900,9901,9902,9903,9904,9905,9906,9907,9908,9909,9910,9911,9912,9913,9914,4918, # 9728 9915,9916,9917,4919,9918,9919,9920,9921,4184,9922,9923,9924,9925,9926,9927,9928, # 9744 9929,9930,9931,9932,9933,9934,9935,9936,9937,9938,9939,9940,9941,9942,9943,9944, # 9760 9945,9946,4920,9947,9948,9949,9950,9951,9952,9953,9954,9955,4185,9956,9957,9958, # 9776 9959,9960,9961,9962,9963,9964,9965,4921,9966,9967,9968,4473,9969,9970,9971,9972, # 9792 9973,9974,9975,9976,9977,4474,9978,9979,9980,9981,9982,9983,9984,9985,9986,9987, # 9808 9988,9989,9990,9991,9992,9993,9994,9995,9996,9997,9998,9999,10000,10001,10002,10003, # 9824 10004,10005,10006,10007,10008,10009,10010,10011,10012,10013,10014,10015,10016,10017,10018,10019, # 9840 10020,10021,4922,10022,4923,10023,10024,10025,10026,10027,10028,10029,10030,10031,10032,10033, # 9856 10034,10035,10036,10037,10038,10039,10040,10041,10042,10043,10044,10045,10046,10047,10048,4924, # 9872 10049,10050,10051,10052,10053,10054,10055,10056,10057,10058,10059,10060,10061,10062,10063,10064, # 9888 10065,10066,10067,10068,10069,10070,10071,10072,10073,10074,10075,10076,10077,10078,10079,10080, # 9904 10081,10082,10083,10084,10085,10086,10087,4475,10088,10089,10090,10091,10092,10093,10094,10095, # 9920 10096,10097,4476,10098,10099,10100,10101,10102,10103,10104,10105,10106,10107,10108,10109,10110, # 9936 10111,2174,10112,10113,10114,10115,10116,10117,10118,10119,10120,10121,10122,10123,10124,10125, # 9952 10126,10127,10128,10129,10130,10131,10132,10133,10134,10135,10136,10137,10138,10139,10140,3807, # 9968 4186,4925,10141,10142,10143,10144,10145,10146,10147,4477,4187,10148,10149,10150,10151,10152, # 9984 10153,4188,10154,10155,10156,10157,10158,10159,10160,10161,4926,10162,10163,10164,10165,10166, #10000 10167,10168,10169,10170,10171,10172,10173,10174,10175,10176,10177,10178,10179,10180,10181,10182, #10016 10183,10184,10185,10186,10187,10188,10189,10190,10191,10192,3203,10193,10194,10195,10196,10197, #10032 10198,10199,10200,4478,10201,10202,10203,10204,4479,10205,10206,10207,10208,10209,10210,10211, #10048 10212,10213,10214,10215,10216,10217,10218,10219,10220,10221,10222,10223,10224,10225,10226,10227, #10064 10228,10229,10230,10231,10232,10233,10234,4927,10235,10236,10237,10238,10239,10240,10241,10242, #10080 10243,10244,10245,10246,10247,10248,10249,10250,10251,10252,10253,10254,10255,10256,10257,10258, #10096 10259,10260,10261,10262,10263,10264,10265,10266,10267,10268,10269,10270,10271,10272,10273,4480, #10112 4928,4929,10274,10275,10276,10277,10278,10279,10280,10281,10282,10283,10284,10285,10286,10287, #10128 10288,10289,10290,10291,10292,10293,10294,10295,10296,10297,10298,10299,10300,10301,10302,10303, #10144 10304,10305,10306,10307,10308,10309,10310,10311,10312,10313,10314,10315,10316,10317,10318,10319, #10160 10320,10321,10322,10323,10324,10325,10326,10327,10328,10329,10330,10331,10332,10333,10334,4930, #10176 10335,10336,10337,10338,10339,10340,10341,10342,4931,10343,10344,10345,10346,10347,10348,10349, #10192 10350,10351,10352,10353,10354,10355,3088,10356,2786,10357,10358,10359,10360,4189,10361,10362, #10208 10363,10364,10365,10366,10367,10368,10369,10370,10371,10372,10373,10374,10375,4932,10376,10377, #10224 10378,10379,10380,10381,10382,10383,10384,10385,10386,10387,10388,10389,10390,10391,10392,4933, #10240 10393,10394,10395,4934,10396,10397,10398,10399,10400,10401,10402,10403,10404,10405,10406,10407, #10256 10408,10409,10410,10411,10412,3446,10413,10414,10415,10416,10417,10418,10419,10420,10421,10422, #10272 10423,4935,10424,10425,10426,10427,10428,10429,10430,4936,10431,10432,10433,10434,10435,10436, #10288 10437,10438,10439,10440,10441,10442,10443,4937,10444,10445,10446,10447,4481,10448,10449,10450, #10304 10451,10452,10453,10454,10455,10456,10457,10458,10459,10460,10461,10462,10463,10464,10465,10466, #10320 10467,10468,10469,10470,10471,10472,10473,10474,10475,10476,10477,10478,10479,10480,10481,10482, #10336 10483,10484,10485,10486,10487,10488,10489,10490,10491,10492,10493,10494,10495,10496,10497,10498, #10352 10499,10500,10501,10502,10503,10504,10505,4938,10506,10507,10508,10509,10510,2552,10511,10512, #10368 10513,10514,10515,10516,3447,10517,10518,10519,10520,10521,10522,10523,10524,10525,10526,10527, #10384 10528,10529,10530,10531,10532,10533,10534,10535,10536,10537,10538,10539,10540,10541,10542,10543, #10400 4482,10544,4939,10545,10546,10547,10548,10549,10550,10551,10552,10553,10554,10555,10556,10557, #10416 10558,10559,10560,10561,10562,10563,10564,10565,10566,10567,3676,4483,10568,10569,10570,10571, #10432 10572,3448,10573,10574,10575,10576,10577,10578,10579,10580,10581,10582,10583,10584,10585,10586, #10448 10587,10588,10589,10590,10591,10592,10593,10594,10595,10596,10597,10598,10599,10600,10601,10602, #10464 10603,10604,10605,10606,10607,10608,10609,10610,10611,10612,10613,10614,10615,10616,10617,10618, #10480 10619,10620,10621,10622,10623,10624,10625,10626,10627,4484,10628,10629,10630,10631,10632,4940, #10496 10633,10634,10635,10636,10637,10638,10639,10640,10641,10642,10643,10644,10645,10646,10647,10648, #10512 10649,10650,10651,10652,10653,10654,10655,10656,4941,10657,10658,10659,2599,10660,10661,10662, #10528 10663,10664,10665,10666,3089,10667,10668,10669,10670,10671,10672,10673,10674,10675,10676,10677, #10544 10678,10679,10680,4942,10681,10682,10683,10684,10685,10686,10687,10688,10689,10690,10691,10692, #10560 10693,10694,10695,10696,10697,4485,10698,10699,10700,10701,10702,10703,10704,4943,10705,3677, #10576 10706,10707,10708,10709,10710,10711,10712,4944,10713,10714,10715,10716,10717,10718,10719,10720, #10592 10721,10722,10723,10724,10725,10726,10727,10728,4945,10729,10730,10731,10732,10733,10734,10735, #10608 10736,10737,10738,10739,10740,10741,10742,10743,10744,10745,10746,10747,10748,10749,10750,10751, #10624 10752,10753,10754,10755,10756,10757,10758,10759,10760,10761,4946,10762,10763,10764,10765,10766, #10640 10767,4947,4948,10768,10769,10770,10771,10772,10773,10774,10775,10776,10777,10778,10779,10780, #10656 10781,10782,10783,10784,10785,10786,10787,10788,10789,10790,10791,10792,10793,10794,10795,10796, #10672 10797,10798,10799,10800,10801,10802,10803,10804,10805,10806,10807,10808,10809,10810,10811,10812, #10688 10813,10814,10815,10816,10817,10818,10819,10820,10821,10822,10823,10824,10825,10826,10827,10828, #10704 10829,10830,10831,10832,10833,10834,10835,10836,10837,10838,10839,10840,10841,10842,10843,10844, #10720 10845,10846,10847,10848,10849,10850,10851,10852,10853,10854,10855,10856,10857,10858,10859,10860, #10736 10861,10862,10863,10864,10865,10866,10867,10868,10869,10870,10871,10872,10873,10874,10875,10876, #10752 10877,10878,4486,10879,10880,10881,10882,10883,10884,10885,4949,10886,10887,10888,10889,10890, #10768 10891,10892,10893,10894,10895,10896,10897,10898,10899,10900,10901,10902,10903,10904,10905,10906, #10784 10907,10908,10909,10910,10911,10912,10913,10914,10915,10916,10917,10918,10919,4487,10920,10921, #10800 10922,10923,10924,10925,10926,10927,10928,10929,10930,10931,10932,4950,10933,10934,10935,10936, #10816 10937,10938,10939,10940,10941,10942,10943,10944,10945,10946,10947,10948,10949,4488,10950,10951, #10832 10952,10953,10954,10955,10956,10957,10958,10959,4190,10960,10961,10962,10963,10964,10965,10966, #10848 10967,10968,10969,10970,10971,10972,10973,10974,10975,10976,10977,10978,10979,10980,10981,10982, #10864 10983,10984,10985,10986,10987,10988,10989,10990,10991,10992,10993,10994,10995,10996,10997,10998, #10880 10999,11000,11001,11002,11003,11004,11005,11006,3960,11007,11008,11009,11010,11011,11012,11013, #10896 11014,11015,11016,11017,11018,11019,11020,11021,11022,11023,11024,11025,11026,11027,11028,11029, #10912 11030,11031,11032,4951,11033,11034,11035,11036,11037,11038,11039,11040,11041,11042,11043,11044, #10928 11045,11046,11047,4489,11048,11049,11050,11051,4952,11052,11053,11054,11055,11056,11057,11058, #10944 4953,11059,11060,11061,11062,11063,11064,11065,11066,11067,11068,11069,11070,11071,4954,11072, #10960 11073,11074,11075,11076,11077,11078,11079,11080,11081,11082,11083,11084,11085,11086,11087,11088, #10976 11089,11090,11091,11092,11093,11094,11095,11096,11097,11098,11099,11100,11101,11102,11103,11104, #10992 11105,11106,11107,11108,11109,11110,11111,11112,11113,11114,11115,3808,11116,11117,11118,11119, #11008 11120,11121,11122,11123,11124,11125,11126,11127,11128,11129,11130,11131,11132,11133,11134,4955, #11024 11135,11136,11137,11138,11139,11140,11141,11142,11143,11144,11145,11146,11147,11148,11149,11150, #11040 11151,11152,11153,11154,11155,11156,11157,11158,11159,11160,11161,4956,11162,11163,11164,11165, #11056 11166,11167,11168,11169,11170,11171,11172,11173,11174,11175,11176,11177,11178,11179,11180,4957, #11072 11181,11182,11183,11184,11185,11186,4958,11187,11188,11189,11190,11191,11192,11193,11194,11195, #11088 11196,11197,11198,11199,11200,3678,11201,11202,11203,11204,11205,11206,4191,11207,11208,11209, #11104 11210,11211,11212,11213,11214,11215,11216,11217,11218,11219,11220,11221,11222,11223,11224,11225, #11120 11226,11227,11228,11229,11230,11231,11232,11233,11234,11235,11236,11237,11238,11239,11240,11241, #11136 11242,11243,11244,11245,11246,11247,11248,11249,11250,11251,4959,11252,11253,11254,11255,11256, #11152 11257,11258,11259,11260,11261,11262,11263,11264,11265,11266,11267,11268,11269,11270,11271,11272, #11168 11273,11274,11275,11276,11277,11278,11279,11280,11281,11282,11283,11284,11285,11286,11287,11288, #11184 11289,11290,11291,11292,11293,11294,11295,11296,11297,11298,11299,11300,11301,11302,11303,11304, #11200 11305,11306,11307,11308,11309,11310,11311,11312,11313,11314,3679,11315,11316,11317,11318,4490, #11216 11319,11320,11321,11322,11323,11324,11325,11326,11327,11328,11329,11330,11331,11332,11333,11334, #11232 11335,11336,11337,11338,11339,11340,11341,11342,11343,11344,11345,11346,11347,4960,11348,11349, #11248 11350,11351,11352,11353,11354,11355,11356,11357,11358,11359,11360,11361,11362,11363,11364,11365, #11264 11366,11367,11368,11369,11370,11371,11372,11373,11374,11375,11376,11377,3961,4961,11378,11379, #11280 11380,11381,11382,11383,11384,11385,11386,11387,11388,11389,11390,11391,11392,11393,11394,11395, #11296 11396,11397,4192,11398,11399,11400,11401,11402,11403,11404,11405,11406,11407,11408,11409,11410, #11312 11411,4962,11412,11413,11414,11415,11416,11417,11418,11419,11420,11421,11422,11423,11424,11425, #11328 11426,11427,11428,11429,11430,11431,11432,11433,11434,11435,11436,11437,11438,11439,11440,11441, #11344 11442,11443,11444,11445,11446,11447,11448,11449,11450,11451,11452,11453,11454,11455,11456,11457, #11360 11458,11459,11460,11461,11462,11463,11464,11465,11466,11467,11468,11469,4963,11470,11471,4491, #11376 11472,11473,11474,11475,4964,11476,11477,11478,11479,11480,11481,11482,11483,11484,11485,11486, #11392 11487,11488,11489,11490,11491,11492,4965,11493,11494,11495,11496,11497,11498,11499,11500,11501, #11408 11502,11503,11504,11505,11506,11507,11508,11509,11510,11511,11512,11513,11514,11515,11516,11517, #11424 11518,11519,11520,11521,11522,11523,11524,11525,11526,11527,11528,11529,3962,11530,11531,11532, #11440 11533,11534,11535,11536,11537,11538,11539,11540,11541,11542,11543,11544,11545,11546,11547,11548, #11456 11549,11550,11551,11552,11553,11554,11555,11556,11557,11558,11559,11560,11561,11562,11563,11564, #11472 4193,4194,11565,11566,11567,11568,11569,11570,11571,11572,11573,11574,11575,11576,11577,11578, #11488 11579,11580,11581,11582,11583,11584,11585,11586,11587,11588,11589,11590,11591,4966,4195,11592, #11504 11593,11594,11595,11596,11597,11598,11599,11600,11601,11602,11603,11604,3090,11605,11606,11607, #11520 11608,11609,11610,4967,11611,11612,11613,11614,11615,11616,11617,11618,11619,11620,11621,11622, #11536 11623,11624,11625,11626,11627,11628,11629,11630,11631,11632,11633,11634,11635,11636,11637,11638, #11552 11639,11640,11641,11642,11643,11644,11645,11646,11647,11648,11649,11650,11651,11652,11653,11654, #11568 11655,11656,11657,11658,11659,11660,11661,11662,11663,11664,11665,11666,11667,11668,11669,11670, #11584 11671,11672,11673,11674,4968,11675,11676,11677,11678,11679,11680,11681,11682,11683,11684,11685, #11600 11686,11687,11688,11689,11690,11691,11692,11693,3809,11694,11695,11696,11697,11698,11699,11700, #11616 11701,11702,11703,11704,11705,11706,11707,11708,11709,11710,11711,11712,11713,11714,11715,11716, #11632 11717,11718,3553,11719,11720,11721,11722,11723,11724,11725,11726,11727,11728,11729,11730,4969, #11648 11731,11732,11733,11734,11735,11736,11737,11738,11739,11740,4492,11741,11742,11743,11744,11745, #11664 11746,11747,11748,11749,11750,11751,11752,4970,11753,11754,11755,11756,11757,11758,11759,11760, #11680 11761,11762,11763,11764,11765,11766,11767,11768,11769,11770,11771,11772,11773,11774,11775,11776, #11696 11777,11778,11779,11780,11781,11782,11783,11784,11785,11786,11787,11788,11789,11790,4971,11791, #11712 11792,11793,11794,11795,11796,11797,4972,11798,11799,11800,11801,11802,11803,11804,11805,11806, #11728 11807,11808,11809,11810,4973,11811,11812,11813,11814,11815,11816,11817,11818,11819,11820,11821, #11744 11822,11823,11824,11825,11826,11827,11828,11829,11830,11831,11832,11833,11834,3680,3810,11835, #11760 11836,4974,11837,11838,11839,11840,11841,11842,11843,11844,11845,11846,11847,11848,11849,11850, #11776 11851,11852,11853,11854,11855,11856,11857,11858,11859,11860,11861,11862,11863,11864,11865,11866, #11792 11867,11868,11869,11870,11871,11872,11873,11874,11875,11876,11877,11878,11879,11880,11881,11882, #11808 11883,11884,4493,11885,11886,11887,11888,11889,11890,11891,11892,11893,11894,11895,11896,11897, #11824 11898,11899,11900,11901,11902,11903,11904,11905,11906,11907,11908,11909,11910,11911,11912,11913, #11840 11914,11915,4975,11916,11917,11918,11919,11920,11921,11922,11923,11924,11925,11926,11927,11928, #11856 11929,11930,11931,11932,11933,11934,11935,11936,11937,11938,11939,11940,11941,11942,11943,11944, #11872 11945,11946,11947,11948,11949,4976,11950,11951,11952,11953,11954,11955,11956,11957,11958,11959, #11888 11960,11961,11962,11963,11964,11965,11966,11967,11968,11969,11970,11971,11972,11973,11974,11975, #11904 11976,11977,11978,11979,11980,11981,11982,11983,11984,11985,11986,11987,4196,11988,11989,11990, #11920 11991,11992,4977,11993,11994,11995,11996,11997,11998,11999,12000,12001,12002,12003,12004,12005, #11936 12006,12007,12008,12009,12010,12011,12012,12013,12014,12015,12016,12017,12018,12019,12020,12021, #11952 12022,12023,12024,12025,12026,12027,12028,12029,12030,12031,12032,12033,12034,12035,12036,12037, #11968 12038,12039,12040,12041,12042,12043,12044,12045,12046,12047,12048,12049,12050,12051,12052,12053, #11984 12054,12055,12056,12057,12058,12059,12060,12061,4978,12062,12063,12064,12065,12066,12067,12068, #12000 12069,12070,12071,12072,12073,12074,12075,12076,12077,12078,12079,12080,12081,12082,12083,12084, #12016 12085,12086,12087,12088,12089,12090,12091,12092,12093,12094,12095,12096,12097,12098,12099,12100, #12032 12101,12102,12103,12104,12105,12106,12107,12108,12109,12110,12111,12112,12113,12114,12115,12116, #12048 12117,12118,12119,12120,12121,12122,12123,4979,12124,12125,12126,12127,12128,4197,12129,12130, #12064 12131,12132,12133,12134,12135,12136,12137,12138,12139,12140,12141,12142,12143,12144,12145,12146, #12080 12147,12148,12149,12150,12151,12152,12153,12154,4980,12155,12156,12157,12158,12159,12160,4494, #12096 12161,12162,12163,12164,3811,12165,12166,12167,12168,12169,4495,12170,12171,4496,12172,12173, #12112 12174,12175,12176,3812,12177,12178,12179,12180,12181,12182,12183,12184,12185,12186,12187,12188, #12128 12189,12190,12191,12192,12193,12194,12195,12196,12197,12198,12199,12200,12201,12202,12203,12204, #12144 12205,12206,12207,12208,12209,12210,12211,12212,12213,12214,12215,12216,12217,12218,12219,12220, #12160 12221,4981,12222,12223,12224,12225,12226,12227,12228,12229,12230,12231,12232,12233,12234,12235, #12176 4982,12236,12237,12238,12239,12240,12241,12242,12243,12244,12245,4983,12246,12247,12248,12249, #12192 4984,12250,12251,12252,12253,12254,12255,12256,12257,12258,12259,12260,12261,12262,12263,12264, #12208 4985,12265,4497,12266,12267,12268,12269,12270,12271,12272,12273,12274,12275,12276,12277,12278, #12224 12279,12280,12281,12282,12283,12284,12285,12286,12287,4986,12288,12289,12290,12291,12292,12293, #12240 12294,12295,12296,2473,12297,12298,12299,12300,12301,12302,12303,12304,12305,12306,12307,12308, #12256 12309,12310,12311,12312,12313,12314,12315,12316,12317,12318,12319,3963,12320,12321,12322,12323, #12272 12324,12325,12326,12327,12328,12329,12330,12331,12332,4987,12333,12334,12335,12336,12337,12338, #12288 12339,12340,12341,12342,12343,12344,12345,12346,12347,12348,12349,12350,12351,12352,12353,12354, #12304 12355,12356,12357,12358,12359,3964,12360,12361,12362,12363,12364,12365,12366,12367,12368,12369, #12320 12370,3965,12371,12372,12373,12374,12375,12376,12377,12378,12379,12380,12381,12382,12383,12384, #12336 12385,12386,12387,12388,12389,12390,12391,12392,12393,12394,12395,12396,12397,12398,12399,12400, #12352 12401,12402,12403,12404,12405,12406,12407,12408,4988,12409,12410,12411,12412,12413,12414,12415, #12368 12416,12417,12418,12419,12420,12421,12422,12423,12424,12425,12426,12427,12428,12429,12430,12431, #12384 12432,12433,12434,12435,12436,12437,12438,3554,12439,12440,12441,12442,12443,12444,12445,12446, #12400 12447,12448,12449,12450,12451,12452,12453,12454,12455,12456,12457,12458,12459,12460,12461,12462, #12416 12463,12464,4989,12465,12466,12467,12468,12469,12470,12471,12472,12473,12474,12475,12476,12477, #12432 12478,12479,12480,4990,12481,12482,12483,12484,12485,12486,12487,12488,12489,4498,12490,12491, #12448 12492,12493,12494,12495,12496,12497,12498,12499,12500,12501,12502,12503,12504,12505,12506,12507, #12464 12508,12509,12510,12511,12512,12513,12514,12515,12516,12517,12518,12519,12520,12521,12522,12523, #12480 12524,12525,12526,12527,12528,12529,12530,12531,12532,12533,12534,12535,12536,12537,12538,12539, #12496 12540,12541,12542,12543,12544,12545,12546,12547,12548,12549,12550,12551,4991,12552,12553,12554, #12512 12555,12556,12557,12558,12559,12560,12561,12562,12563,12564,12565,12566,12567,12568,12569,12570, #12528 12571,12572,12573,12574,12575,12576,12577,12578,3036,12579,12580,12581,12582,12583,3966,12584, #12544 12585,12586,12587,12588,12589,12590,12591,12592,12593,12594,12595,12596,12597,12598,12599,12600, #12560 12601,12602,12603,12604,12605,12606,12607,12608,12609,12610,12611,12612,12613,12614,12615,12616, #12576 12617,12618,12619,12620,12621,12622,12623,12624,12625,12626,12627,12628,12629,12630,12631,12632, #12592 12633,12634,12635,12636,12637,12638,12639,12640,12641,12642,12643,12644,12645,12646,4499,12647, #12608 12648,12649,12650,12651,12652,12653,12654,12655,12656,12657,12658,12659,12660,12661,12662,12663, #12624 12664,12665,12666,12667,12668,12669,12670,12671,12672,12673,12674,12675,12676,12677,12678,12679, #12640 12680,12681,12682,12683,12684,12685,12686,12687,12688,12689,12690,12691,12692,12693,12694,12695, #12656 12696,12697,12698,4992,12699,12700,12701,12702,12703,12704,12705,12706,12707,12708,12709,12710, #12672 12711,12712,12713,12714,12715,12716,12717,12718,12719,12720,12721,12722,12723,12724,12725,12726, #12688 12727,12728,12729,12730,12731,12732,12733,12734,12735,12736,12737,12738,12739,12740,12741,12742, #12704 12743,12744,12745,12746,12747,12748,12749,12750,12751,12752,12753,12754,12755,12756,12757,12758, #12720 12759,12760,12761,12762,12763,12764,12765,12766,12767,12768,12769,12770,12771,12772,12773,12774, #12736 12775,12776,12777,12778,4993,2175,12779,12780,12781,12782,12783,12784,12785,12786,4500,12787, #12752 12788,12789,12790,12791,12792,12793,12794,12795,12796,12797,12798,12799,12800,12801,12802,12803, #12768 12804,12805,12806,12807,12808,12809,12810,12811,12812,12813,12814,12815,12816,12817,12818,12819, #12784 12820,12821,12822,12823,12824,12825,12826,4198,3967,12827,12828,12829,12830,12831,12832,12833, #12800 12834,12835,12836,12837,12838,12839,12840,12841,12842,12843,12844,12845,12846,12847,12848,12849, #12816 12850,12851,12852,12853,12854,12855,12856,12857,12858,12859,12860,12861,4199,12862,12863,12864, #12832 12865,12866,12867,12868,12869,12870,12871,12872,12873,12874,12875,12876,12877,12878,12879,12880, #12848 12881,12882,12883,12884,12885,12886,12887,4501,12888,12889,12890,12891,12892,12893,12894,12895, #12864 12896,12897,12898,12899,12900,12901,12902,12903,12904,12905,12906,12907,12908,12909,12910,12911, #12880 12912,4994,12913,12914,12915,12916,12917,12918,12919,12920,12921,12922,12923,12924,12925,12926, #12896 12927,12928,12929,12930,12931,12932,12933,12934,12935,12936,12937,12938,12939,12940,12941,12942, #12912 12943,12944,12945,12946,12947,12948,12949,12950,12951,12952,12953,12954,12955,12956,1772,12957, #12928 12958,12959,12960,12961,12962,12963,12964,12965,12966,12967,12968,12969,12970,12971,12972,12973, #12944 12974,12975,12976,12977,12978,12979,12980,12981,12982,12983,12984,12985,12986,12987,12988,12989, #12960 12990,12991,12992,12993,12994,12995,12996,12997,4502,12998,4503,12999,13000,13001,13002,13003, #12976 4504,13004,13005,13006,13007,13008,13009,13010,13011,13012,13013,13014,13015,13016,13017,13018, #12992 13019,13020,13021,13022,13023,13024,13025,13026,13027,13028,13029,3449,13030,13031,13032,13033, #13008 13034,13035,13036,13037,13038,13039,13040,13041,13042,13043,13044,13045,13046,13047,13048,13049, #13024 13050,13051,13052,13053,13054,13055,13056,13057,13058,13059,13060,13061,13062,13063,13064,13065, #13040 13066,13067,13068,13069,13070,13071,13072,13073,13074,13075,13076,13077,13078,13079,13080,13081, #13056 13082,13083,13084,13085,13086,13087,13088,13089,13090,13091,13092,13093,13094,13095,13096,13097, #13072 13098,13099,13100,13101,13102,13103,13104,13105,13106,13107,13108,13109,13110,13111,13112,13113, #13088 13114,13115,13116,13117,13118,3968,13119,4995,13120,13121,13122,13123,13124,13125,13126,13127, #13104 4505,13128,13129,13130,13131,13132,13133,13134,4996,4506,13135,13136,13137,13138,13139,4997, #13120 13140,13141,13142,13143,13144,13145,13146,13147,13148,13149,13150,13151,13152,13153,13154,13155, #13136 13156,13157,13158,13159,4998,13160,13161,13162,13163,13164,13165,13166,13167,13168,13169,13170, #13152 13171,13172,13173,13174,13175,13176,4999,13177,13178,13179,13180,13181,13182,13183,13184,13185, #13168 13186,13187,13188,13189,13190,13191,13192,13193,13194,13195,13196,13197,13198,13199,13200,13201, #13184 13202,13203,13204,13205,13206,5000,13207,13208,13209,13210,13211,13212,13213,13214,13215,13216, #13200 13217,13218,13219,13220,13221,13222,13223,13224,13225,13226,13227,4200,5001,13228,13229,13230, #13216 13231,13232,13233,13234,13235,13236,13237,13238,13239,13240,3969,13241,13242,13243,13244,3970, #13232 13245,13246,13247,13248,13249,13250,13251,13252,13253,13254,13255,13256,13257,13258,13259,13260, #13248 13261,13262,13263,13264,13265,13266,13267,13268,3450,13269,13270,13271,13272,13273,13274,13275, #13264 13276,5002,13277,13278,13279,13280,13281,13282,13283,13284,13285,13286,13287,13288,13289,13290, #13280 13291,13292,13293,13294,13295,13296,13297,13298,13299,13300,13301,13302,3813,13303,13304,13305, #13296 13306,13307,13308,13309,13310,13311,13312,13313,13314,13315,13316,13317,13318,13319,13320,13321, #13312 13322,13323,13324,13325,13326,13327,13328,4507,13329,13330,13331,13332,13333,13334,13335,13336, #13328 13337,13338,13339,13340,13341,5003,13342,13343,13344,13345,13346,13347,13348,13349,13350,13351, #13344 13352,13353,13354,13355,13356,13357,13358,13359,13360,13361,13362,13363,13364,13365,13366,13367, #13360 5004,13368,13369,13370,13371,13372,13373,13374,13375,13376,13377,13378,13379,13380,13381,13382, #13376 13383,13384,13385,13386,13387,13388,13389,13390,13391,13392,13393,13394,13395,13396,13397,13398, #13392 13399,13400,13401,13402,13403,13404,13405,13406,13407,13408,13409,13410,13411,13412,13413,13414, #13408 13415,13416,13417,13418,13419,13420,13421,13422,13423,13424,13425,13426,13427,13428,13429,13430, #13424 13431,13432,4508,13433,13434,13435,4201,13436,13437,13438,13439,13440,13441,13442,13443,13444, #13440 13445,13446,13447,13448,13449,13450,13451,13452,13453,13454,13455,13456,13457,5005,13458,13459, #13456 13460,13461,13462,13463,13464,13465,13466,13467,13468,13469,13470,4509,13471,13472,13473,13474, #13472 13475,13476,13477,13478,13479,13480,13481,13482,13483,13484,13485,13486,13487,13488,13489,13490, #13488 13491,13492,13493,13494,13495,13496,13497,13498,13499,13500,13501,13502,13503,13504,13505,13506, #13504 13507,13508,13509,13510,13511,13512,13513,13514,13515,13516,13517,13518,13519,13520,13521,13522, #13520 13523,13524,13525,13526,13527,13528,13529,13530,13531,13532,13533,13534,13535,13536,13537,13538, #13536 13539,13540,13541,13542,13543,13544,13545,13546,13547,13548,13549,13550,13551,13552,13553,13554, #13552 13555,13556,13557,13558,13559,13560,13561,13562,13563,13564,13565,13566,13567,13568,13569,13570, #13568 13571,13572,13573,13574,13575,13576,13577,13578,13579,13580,13581,13582,13583,13584,13585,13586, #13584 13587,13588,13589,13590,13591,13592,13593,13594,13595,13596,13597,13598,13599,13600,13601,13602, #13600 13603,13604,13605,13606,13607,13608,13609,13610,13611,13612,13613,13614,13615,13616,13617,13618, #13616 13619,13620,13621,13622,13623,13624,13625,13626,13627,13628,13629,13630,13631,13632,13633,13634, #13632 13635,13636,13637,13638,13639,13640,13641,13642,5006,13643,13644,13645,13646,13647,13648,13649, #13648 13650,13651,5007,13652,13653,13654,13655,13656,13657,13658,13659,13660,13661,13662,13663,13664, #13664 13665,13666,13667,13668,13669,13670,13671,13672,13673,13674,13675,13676,13677,13678,13679,13680, #13680 13681,13682,13683,13684,13685,13686,13687,13688,13689,13690,13691,13692,13693,13694,13695,13696, #13696 13697,13698,13699,13700,13701,13702,13703,13704,13705,13706,13707,13708,13709,13710,13711,13712, #13712 13713,13714,13715,13716,13717,13718,13719,13720,13721,13722,13723,13724,13725,13726,13727,13728, #13728 13729,13730,13731,13732,13733,13734,13735,13736,13737,13738,13739,13740,13741,13742,13743,13744, #13744 13745,13746,13747,13748,13749,13750,13751,13752,13753,13754,13755,13756,13757,13758,13759,13760, #13760 13761,13762,13763,13764,13765,13766,13767,13768,13769,13770,13771,13772,13773,13774,3273,13775, #13776 13776,13777,13778,13779,13780,13781,13782,13783,13784,13785,13786,13787,13788,13789,13790,13791, #13792 13792,13793,13794,13795,13796,13797,13798,13799,13800,13801,13802,13803,13804,13805,13806,13807, #13808 13808,13809,13810,13811,13812,13813,13814,13815,13816,13817,13818,13819,13820,13821,13822,13823, #13824 13824,13825,13826,13827,13828,13829,13830,13831,13832,13833,13834,13835,13836,13837,13838,13839, #13840 13840,13841,13842,13843,13844,13845,13846,13847,13848,13849,13850,13851,13852,13853,13854,13855, #13856 13856,13857,13858,13859,13860,13861,13862,13863,13864,13865,13866,13867,13868,13869,13870,13871, #13872 13872,13873,13874,13875,13876,13877,13878,13879,13880,13881,13882,13883,13884,13885,13886,13887, #13888 13888,13889,13890,13891,13892,13893,13894,13895,13896,13897,13898,13899,13900,13901,13902,13903, #13904 13904,13905,13906,13907,13908,13909,13910,13911,13912,13913,13914,13915,13916,13917,13918,13919, #13920 13920,13921,13922,13923,13924,13925,13926,13927,13928,13929,13930,13931,13932,13933,13934,13935, #13936 13936,13937,13938,13939,13940,13941,13942,13943,13944,13945,13946,13947,13948,13949,13950,13951, #13952 13952,13953,13954,13955,13956,13957,13958,13959,13960,13961,13962,13963,13964,13965,13966,13967, #13968 13968,13969,13970,13971,13972) #13973 # flake8: noqa # Originally contributed by Sjoerd Mullender. # Significantly modified by Jeffrey Yasskin . """Fraction, infinite-precision, real numbers.""" from decimal import Decimal import math import numbers import operator import re import sys __all__ = ['Fraction', 'gcd'] def gcd(a, b): """Calculate the Greatest Common Divisor of a and b. Unless b==0, the result will have the same sign as b (so that when b is divided by it, the result comes out positive). """ while b: a, b = b, a%b return a # Constants related to the hash implementation; hash(x) is based # on the reduction of x modulo the prime _PyHASH_MODULUS. _PyHASH_MODULUS = sys.hash_info.modulus # Value to be used for rationals that reduce to infinity modulo # _PyHASH_MODULUS. _PyHASH_INF = sys.hash_info.inf _RATIONAL_FORMAT = re.compile(r""" \A\s* # optional whitespace at the start, then (?P[-+]?) # an optional sign, then (?=\d|\.\d) # lookahead for digit or .digit (?P\d*) # numerator (possibly empty) (?: # followed by (?:/(?P\d+))? # an optional denominator | # or (?:\.(?P\d*))? # an optional fractional part (?:E(?P[-+]?\d+))? # and optional exponent ) \s*\Z # and optional whitespace to finish """, re.VERBOSE | re.IGNORECASE) class Fraction(numbers.Rational): """This class implements rational numbers. In the two-argument form of the constructor, Fraction(8, 6) will produce a rational number equivalent to 4/3. Both arguments must be Rational. The numerator defaults to 0 and the denominator defaults to 1 so that Fraction(3) == 3 and Fraction() == 0. Fractions can also be constructed from: - numeric strings similar to those accepted by the float constructor (for example, '-2.3' or '1e10') - strings of the form '123/456' - float and Decimal instances - other Rational instances (including integers) """ __slots__ = ('_numerator', '_denominator') # We're immutable, so use __new__ not __init__ def __new__(cls, numerator=0, denominator=None): """Constructs a Rational. Takes a string like '3/2' or '1.5', another Rational instance, a numerator/denominator pair, or a float. Examples -------- >>> Fraction(10, -8) Fraction(-5, 4) >>> Fraction(Fraction(1, 7), 5) Fraction(1, 35) >>> Fraction(Fraction(1, 7), Fraction(2, 3)) Fraction(3, 14) >>> Fraction('314') Fraction(314, 1) >>> Fraction('-35/4') Fraction(-35, 4) >>> Fraction('3.1415') # conversion from numeric string Fraction(6283, 2000) >>> Fraction('-47e-2') # string may include a decimal exponent Fraction(-47, 100) >>> Fraction(1.47) # direct construction from float (exact conversion) Fraction(6620291452234629, 4503599627370496) >>> Fraction(2.25) Fraction(9, 4) >>> Fraction(Decimal('1.47')) Fraction(147, 100) """ self = super(Fraction, cls).__new__(cls) if denominator is None: if isinstance(numerator, numbers.Rational): self._numerator = numerator.numerator self._denominator = numerator.denominator return self elif isinstance(numerator, float): # Exact conversion from float value = Fraction.from_float(numerator) self._numerator = value._numerator self._denominator = value._denominator return self elif isinstance(numerator, Decimal): value = Fraction.from_decimal(numerator) self._numerator = value._numerator self._denominator = value._denominator return self elif isinstance(numerator, str): # Handle construction from strings. m = _RATIONAL_FORMAT.match(numerator) if m is None: raise ValueError('Invalid literal for Fraction: %r' % numerator) numerator = int(m.group('num') or '0') denom = m.group('denom') if denom: denominator = int(denom) else: denominator = 1 decimal = m.group('decimal') if decimal: scale = 10**len(decimal) numerator = numerator * scale + int(decimal) denominator *= scale exp = m.group('exp') if exp: exp = int(exp) if exp >= 0: numerator *= 10**exp else: denominator *= 10**-exp if m.group('sign') == '-': numerator = -numerator else: raise TypeError("argument should be a string " "or a Rational instance") elif (isinstance(numerator, numbers.Rational) and isinstance(denominator, numbers.Rational)): numerator, denominator = ( numerator.numerator * denominator.denominator, denominator.numerator * numerator.denominator ) else: raise TypeError("both arguments should be " "Rational instances") if denominator == 0: raise ZeroDivisionError('Fraction(%s, 0)' % numerator) g = gcd(numerator, denominator) self._numerator = numerator // g self._denominator = denominator // g return self @classmethod def from_float(cls, f): """Converts a finite float to a rational number, exactly. Beware that Fraction.from_float(0.3) != Fraction(3, 10). """ if isinstance(f, numbers.Integral): return cls(f) elif not isinstance(f, float): raise TypeError("%s.from_float() only takes floats, not %r (%s)" % (cls.__name__, f, type(f).__name__)) if math.isnan(f): raise ValueError("Cannot convert %r to %s." % (f, cls.__name__)) if math.isinf(f): raise OverflowError("Cannot convert %r to %s." % (f, cls.__name__)) return cls(*f.as_integer_ratio()) @classmethod def from_decimal(cls, dec): """Converts a finite Decimal instance to a rational number, exactly.""" from decimal import Decimal if isinstance(dec, numbers.Integral): dec = Decimal(int(dec)) elif not isinstance(dec, Decimal): raise TypeError( "%s.from_decimal() only takes Decimals, not %r (%s)" % (cls.__name__, dec, type(dec).__name__)) if dec.is_infinite(): raise OverflowError( "Cannot convert %s to %s." % (dec, cls.__name__)) if dec.is_nan(): raise ValueError("Cannot convert %s to %s." % (dec, cls.__name__)) sign, digits, exp = dec.as_tuple() digits = int(''.join(map(str, digits))) if sign: digits = -digits if exp >= 0: return cls(digits * 10 ** exp) else: return cls(digits, 10 ** -exp) def limit_denominator(self, max_denominator=1000000): """Closest Fraction to self with denominator at most max_denominator. >>> Fraction('3.141592653589793').limit_denominator(10) Fraction(22, 7) >>> Fraction('3.141592653589793').limit_denominator(100) Fraction(311, 99) >>> Fraction(4321, 8765).limit_denominator(10000) Fraction(4321, 8765) """ # Algorithm notes: For any real number x, define a *best upper # approximation* to x to be a rational number p/q such that: # # (1) p/q >= x, and # (2) if p/q > r/s >= x then s > q, for any rational r/s. # # Define *best lower approximation* similarly. Then it can be # proved that a rational number is a best upper or lower # approximation to x if, and only if, it is a convergent or # semiconvergent of the (unique shortest) continued fraction # associated to x. # # To find a best rational approximation with denominator <= M, # we find the best upper and lower approximations with # denominator <= M and take whichever of these is closer to x. # In the event of a tie, the bound with smaller denominator is # chosen. If both denominators are equal (which can happen # only when max_denominator == 1 and self is midway between # two integers) the lower bound---i.e., the floor of self, is # taken. if max_denominator < 1: raise ValueError("max_denominator should be at least 1") if self._denominator <= max_denominator: return Fraction(self) p0, q0, p1, q1 = 0, 1, 1, 0 n, d = self._numerator, self._denominator while True: a = n//d q2 = q0+a*q1 if q2 > max_denominator: break p0, q0, p1, q1 = p1, q1, p0+a*p1, q2 n, d = d, n-a*d k = (max_denominator-q0)//q1 bound1 = Fraction(p0+k*p1, q0+k*q1) bound2 = Fraction(p1, q1) if abs(bound2 - self) <= abs(bound1-self): return bound2 else: return bound1 @property def numerator(a): return a._numerator @property def denominator(a): return a._denominator def __repr__(self): """repr(self)""" return ('Fraction(%s, %s)' % (self._numerator, self._denominator)) def __str__(self): """str(self)""" if self._denominator == 1: return str(self._numerator) else: return '%s/%s' % (self._numerator, self._denominator) def _operator_fallbacks(monomorphic_operator, fallback_operator): """Generates forward and reverse operators given a purely-rational operator and a function from the operator module. Use this like: __op__, __rop__ = _operator_fallbacks(just_rational_op, operator.op) In general, we want to implement the arithmetic operations so that mixed-mode operations either call an implementation whose author knew about the types of both arguments, or convert both to the nearest built in type and do the operation there. In Fraction, that means that we define __add__ and __radd__ as: def __add__(self, other): # Both types have numerators/denominator attributes, # so do the operation directly if isinstance(other, (int, Fraction)): return Fraction(self.numerator * other.denominator + other.numerator * self.denominator, self.denominator * other.denominator) # float and complex don't have those operations, but we # know about those types, so special case them. elif isinstance(other, float): return float(self) + other elif isinstance(other, complex): return complex(self) + other # Let the other type take over. return NotImplemented def __radd__(self, other): # radd handles more types than add because there's # nothing left to fall back to. if isinstance(other, numbers.Rational): return Fraction(self.numerator * other.denominator + other.numerator * self.denominator, self.denominator * other.denominator) elif isinstance(other, Real): return float(other) + float(self) elif isinstance(other, Complex): return complex(other) + complex(self) return NotImplemented There are 5 different cases for a mixed-type addition on Fraction. I'll refer to all of the above code that doesn't refer to Fraction, float, or complex as "boilerplate". 'r' will be an instance of Fraction, which is a subtype of Rational (r : Fraction <: Rational), and b : B <: Complex. The first three involve 'r + b': 1. If B <: Fraction, int, float, or complex, we handle that specially, and all is well. 2. If Fraction falls back to the boilerplate code, and it were to return a value from __add__, we'd miss the possibility that B defines a more intelligent __radd__, so the boilerplate should return NotImplemented from __add__. In particular, we don't handle Rational here, even though we could get an exact answer, in case the other type wants to do something special. 3. If B <: Fraction, Python tries B.__radd__ before Fraction.__add__. This is ok, because it was implemented with knowledge of Fraction, so it can handle those instances before delegating to Real or Complex. The next two situations describe 'b + r'. We assume that b didn't know about Fraction in its implementation, and that it uses similar boilerplate code: 4. If B <: Rational, then __radd_ converts both to the builtin rational type (hey look, that's us) and proceeds. 5. Otherwise, __radd__ tries to find the nearest common base ABC, and fall back to its builtin type. Since this class doesn't subclass a concrete type, there's no implementation to fall back to, so we need to try as hard as possible to return an actual value, or the user will get a TypeError. """ def forward(a, b): if isinstance(b, (int, Fraction)): return monomorphic_operator(a, b) elif isinstance(b, float): return fallback_operator(float(a), b) elif isinstance(b, complex): return fallback_operator(complex(a), b) else: return NotImplemented forward.__name__ = '__' + fallback_operator.__name__ + '__' forward.__doc__ = monomorphic_operator.__doc__ def reverse(b, a): if isinstance(a, numbers.Rational): # Includes ints. return monomorphic_operator(a, b) elif isinstance(a, numbers.Real): return fallback_operator(float(a), float(b)) elif isinstance(a, numbers.Complex): return fallback_operator(complex(a), complex(b)) else: return NotImplemented reverse.__name__ = '__r' + fallback_operator.__name__ + '__' reverse.__doc__ = monomorphic_operator.__doc__ return forward, reverse def _add(a, b): """a + b""" return Fraction(a.numerator * b.denominator + b.numerator * a.denominator, a.denominator * b.denominator) __add__, __radd__ = _operator_fallbacks(_add, operator.add) def _sub(a, b): """a - b""" return Fraction(a.numerator * b.denominator - b.numerator * a.denominator, a.denominator * b.denominator) __sub__, __rsub__ = _operator_fallbacks(_sub, operator.sub) def _mul(a, b): """a * b""" return Fraction(a.numerator * b.numerator, a.denominator * b.denominator) __mul__, __rmul__ = _operator_fallbacks(_mul, operator.mul) def _div(a, b): """a / b""" return Fraction(a.numerator * b.denominator, a.denominator * b.numerator) __truediv__, __rtruediv__ = _operator_fallbacks(_div, operator.truediv) def __floordiv__(a, b): """a // b""" return math.floor(a / b) def __rfloordiv__(b, a): """a // b""" return math.floor(a / b) def __mod__(a, b): """a % b""" div = a // b return a - b * div def __rmod__(b, a): """a % b""" div = a // b return a - b * div def __pow__(a, b): """a ** b If b is not an integer, the result will be a float or complex since roots are generally irrational. If b is an integer, the result will be rational. """ if isinstance(b, numbers.Rational): if b.denominator == 1: power = b.numerator if power >= 0: return Fraction(a._numerator ** power, a._denominator ** power) else: return Fraction(a._denominator ** -power, a._numerator ** -power) else: # A fractional power will generally produce an # irrational number. return float(a) ** float(b) else: return float(a) ** b def __rpow__(b, a): """a ** b""" if b._denominator == 1 and b._numerator >= 0: # If a is an int, keep it that way if possible. return a ** b._numerator if isinstance(a, numbers.Rational): return Fraction(a.numerator, a.denominator) ** b if b._denominator == 1: return a ** b._numerator return a ** float(b) def __pos__(a): """+a: Coerces a subclass instance to Fraction""" return Fraction(a._numerator, a._denominator) def __neg__(a): """-a""" return Fraction(-a._numerator, a._denominator) def __abs__(a): """abs(a)""" return Fraction(abs(a._numerator), a._denominator) def __trunc__(a): """trunc(a)""" if a._numerator < 0: return -(-a._numerator // a._denominator) else: return a._numerator // a._denominator def __floor__(a): """Will be math.floor(a) in 3.0.""" return a.numerator // a.denominator def __ceil__(a): """Will be math.ceil(a) in 3.0.""" # The negations cleverly convince floordiv to return the ceiling. return -(-a.numerator // a.denominator) def __round__(self, ndigits=None): """Will be round(self, ndigits) in 3.0. Rounds half toward even. """ if ndigits is None: floor, remainder = divmod(self.numerator, self.denominator) if remainder * 2 < self.denominator: return floor elif remainder * 2 > self.denominator: return floor + 1 # Deal with the half case: elif floor % 2 == 0: return floor else: return floor + 1 shift = 10**abs(ndigits) # See _operator_fallbacks.forward to check that the results of # these operations will always be Fraction and therefore have # round(). if ndigits > 0: return Fraction(round(self * shift), shift) else: return Fraction(round(self / shift) * shift) def __hash__(self): """hash(self)""" # XXX since this method is expensive, consider caching the result # In order to make sure that the hash of a Fraction agrees # with the hash of a numerically equal integer, float or # Decimal instance, we follow the rules for numeric hashes # outlined in the documentation. (See library docs, 'Built-in # Types'). # dinv is the inverse of self._denominator modulo the prime # _PyHASH_MODULUS, or 0 if self._denominator is divisible by # _PyHASH_MODULUS. dinv = pow(self._denominator, _PyHASH_MODULUS - 2, _PyHASH_MODULUS) if not dinv: hash_ = _PyHASH_INF else: hash_ = abs(self._numerator) * dinv % _PyHASH_MODULUS result = hash_ if self >= 0 else -hash_ return -2 if result == -1 else result def __eq__(a, b): """a == b""" if isinstance(b, numbers.Rational): return (a._numerator == b.numerator and a._denominator == b.denominator) if isinstance(b, numbers.Complex) and b.imag == 0: b = b.real if isinstance(b, float): if math.isnan(b) or math.isinf(b): # comparisons with an infinity or nan should behave in # the same way for any finite a, so treat a as zero. return 0.0 == b else: return a == a.from_float(b) else: # Since a doesn't know how to compare with b, let's give b # a chance to compare itself with a. return NotImplemented def _richcmp(self, other, op): """Helper for comparison operators, for internal use only. Implement comparison between a Rational instance `self`, and either another Rational instance or a float `other`. If `other` is not a Rational instance or a float, return NotImplemented. `op` should be one of the six standard comparison operators. """ # convert other to a Rational instance where reasonable. if isinstance(other, numbers.Rational): return op(self._numerator * other.denominator, self._denominator * other.numerator) if isinstance(other, float): if math.isnan(other) or math.isinf(other): return op(0.0, other) else: return op(self, self.from_float(other)) else: return NotImplemented def __lt__(a, b): """a < b""" return a._richcmp(b, operator.lt) def __gt__(a, b): """a > b""" return a._richcmp(b, operator.gt) def __le__(a, b): """a <= b""" return a._richcmp(b, operator.le) def __ge__(a, b): """a >= b""" return a._richcmp(b, operator.ge) def __bool__(a): """a != 0""" return a._numerator != 0 # support for pickling, copy, and deepcopy def __reduce__(self): return (self.__class__, (str(self),)) def __copy__(self): if type(self) == Fraction: return self # I'm immutable; therefore I am my own clone return self.__class__(self._numerator, self._denominator) def __deepcopy__(self, memo): if type(self) == Fraction: return self # My components are also immutable return self.__class__(self._numerator, self._denominator) """ InaSAFE Disaster risk assessment tool by AusAid - **Standard signal defs.** Contact : ole.moller.nielsen@gmail.com .. note:: This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. These signals are defined for global use throughout the safe and InaSAFE application. They provide context for when parts of the application want to send messages to each other. See: https://github.com/AIFDR/inasafe/issues/577 for more detailed explanation. """ __author__ = 'tim@kartoza.com' __revision__ = '$Format:%H$' __date__ = '27/05/2013' __copyright__ = ('Copyright 2012, Australia Indonesia Facility for ' 'Disaster Reduction') DYNAMIC_MESSAGE_SIGNAL = 'DynamicMessage' STATIC_MESSAGE_SIGNAL = 'StaticMessage' ERROR_MESSAGE_SIGNAL = 'ErrorMessage' BUSY_SIGNAL = 'BusySignal' NOT_BUSY_SIGNAL = 'NotBusySignal' ANALYSIS_DONE_SIGNAL = 'AnalysisDone' # Core.py - Python extension for perf script, core functions # # Copyright (C) 2010 by Tom Zanussi # # This software may be distributed under the terms of the GNU General # Public License ("GPL") version 2 as published by the Free Software # Foundation. from collections import defaultdict def autodict(): return defaultdict(autodict) flag_fields = autodict() symbolic_fields = autodict() def define_flag_field(event_name, field_name, delim): flag_fields[event_name][field_name]['delim'] = delim def define_flag_value(event_name, field_name, value, field_str): flag_fields[event_name][field_name]['values'][value] = field_str def define_symbolic_field(event_name, field_name): # nothing to do, really pass def define_symbolic_value(event_name, field_name, value, field_str): symbolic_fields[event_name][field_name]['values'][value] = field_str def flag_str(event_name, field_name, value): string = "" if flag_fields[event_name][field_name]: print_delim = 0 keys = flag_fields[event_name][field_name]['values'].keys() keys.sort() for idx in keys: if not value and not idx: string += flag_fields[event_name][field_name]['values'][idx] break if idx and (value & idx) == idx: if print_delim and flag_fields[event_name][field_name]['delim']: string += " " + flag_fields[event_name][field_name]['delim'] + " " string += flag_fields[event_name][field_name]['values'][idx] print_delim = 1 value &= ~idx return string def symbol_str(event_name, field_name, value): string = "" if symbolic_fields[event_name][field_name]: keys = symbolic_fields[event_name][field_name]['values'].keys() keys.sort() for idx in keys: if not value and not idx: string = symbolic_fields[event_name][field_name]['values'][idx] break if (value == idx): string = symbolic_fields[event_name][field_name]['values'][idx] break return string trace_flags = { 0x00: "NONE", \ 0x01: "IRQS_OFF", \ 0x02: "IRQS_NOSUPPORT", \ 0x04: "NEED_RESCHED", \ 0x08: "HARDIRQ", \ 0x10: "SOFTIRQ" } def trace_flag_str(value): string = "" print_delim = 0 keys = trace_flags.keys() for idx in keys: if not value and not idx: string += "NONE" break if idx and (value & idx) == idx: if print_delim: string += " | "; string += trace_flags[idx] print_delim = 1 value &= ~idx return string def taskState(state): states = { 0 : "R", 1 : "S", 2 : "D", 64: "DEAD" } if state not in states: return "Unknown" return states[state] class EventHeaders: def __init__(self, common_cpu, common_secs, common_nsecs, common_pid, common_comm): self.cpu = common_cpu self.secs = common_secs self.nsecs = common_nsecs self.pid = common_pid self.comm = common_comm def ts(self): return (self.secs * (10 ** 9)) + self.nsecs def ts_format(self): return "%d.%d" % (self.secs, int(self.nsecs / 1000)) from django.core import checks from django.db.backends.base.validation import BaseDatabaseValidation class DatabaseValidation(BaseDatabaseValidation): def check_field(self, field, **kwargs): """ MySQL has the following field length restriction: No character (varchar) fields can have a length exceeding 255 characters if they have a unique index on them. """ from django.db import connection errors = super(DatabaseValidation, self).check_field(field, **kwargs) # Ignore any related fields. if getattr(field, 'rel', None) is None: field_type = field.db_type(connection) # Ignore any non-concrete fields if field_type is None: return errors if (field_type.startswith('varchar') # Look for CharFields... and field.unique # ... that are unique and (field.max_length is None or int(field.max_length) > 255)): errors.append( checks.Error( ('MySQL does not allow unique CharFields to have a max_length > 255.'), hint=None, obj=field, id='mysql.E001', ) ) return errors # Test driver for bsddb package. """ Run all test cases. """ import os import sys import tempfile import time import unittest from test.test_support import requires, verbose, run_unittest, unlink, rmtree # When running as a script instead of within the regrtest framework, skip the # requires test, since it's obvious we want to run them. if __name__ != '__main__': requires('bsddb') verbose = False if 'verbose' in sys.argv: verbose = True sys.argv.remove('verbose') if 'silent' in sys.argv: # take care of old flag, just in case verbose = False sys.argv.remove('silent') class TimingCheck(unittest.TestCase): """This class is not a real test. Its purpose is to print a message periodically when the test runs slowly. This will prevent the buildbots from timing out on slow machines.""" # How much time in seconds before printing a 'Still working' message. # Since this is run at most once between each test module, use a smaller # interval than other tests. _PRINT_WORKING_MSG_INTERVAL = 4 * 60 # next_time is used as a global variable that survives each instance. # This is necessary since a new instance will be created for each test. next_time = time.time() + _PRINT_WORKING_MSG_INTERVAL def testCheckElapsedTime(self): # Print still working message since these tests can be really slow. now = time.time() if self.next_time <= now: TimingCheck.next_time = now + self._PRINT_WORKING_MSG_INTERVAL sys.__stdout__.write(' test_bsddb3 still working, be patient...\n') sys.__stdout__.flush() # For invocation through regrtest def test_main(): from bsddb import db from bsddb.test import test_all test_all.set_test_path_prefix(os.path.join(tempfile.gettempdir(), 'z-test_bsddb3-%s' % os.getpid())) # Please leave this print in, having this show up in the buildbots # makes diagnosing problems a lot easier. print >>sys.stderr, db.DB_VERSION_STRING print >>sys.stderr, 'Test path prefix: ', test_all.get_test_path_prefix() try: run_unittest(test_all.suite(module_prefix='bsddb.test.', timing_check=TimingCheck)) finally: # The only reason to remove db_home is in case if there is an old # one lying around. This might be by a different user, so just # ignore errors. We should always make a unique name now. try: test_all.remove_test_path_directory() except: pass if __name__ == '__main__': test_main() # # Copyright 2009 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # """ Convenience class for dequeuing messages from a gr.msg_queue and invoking a callback. Creates a Python thread that does a blocking read on the supplied gr.msg_queue, then invokes callback each time a msg is received. If the msg type is not 0, then it is treated as a signal to exit its loop. If the callback raises an exception, and the runner was created with 'exit_on_error' equal to True, then the runner will store the exception and exit its loop, otherwise the exception is ignored. To get the exception that the callback raised, if any, call exit_error() on the object. To manually stop the runner, call stop() on the object. To determine if the runner has exited, call exited() on the object. """ from gnuradio import gr import gnuradio.gr.gr_threading as _threading class msgq_runner(_threading.Thread): def __init__(self, msgq, callback, exit_on_error=False): _threading.Thread.__init__(self) self._msgq = msgq self._callback = callback self._exit_on_error = exit_on_error self._done = False self._exited = False self._exit_error = None self.setDaemon(1) self.start() def run(self): while not self._done: msg = self._msgq.delete_head() if msg.type() != 0: self.stop() else: try: self._callback(msg) except Exception, e: if self._exit_on_error: self._exit_error = e self.stop() self._exited = True def stop(self): self._done = True def exited(self): return self._exited def exit_error(self): return self._exit_error from unittest import mock, TestCase from django.test.utils import override_settings from tethys_cli import settings_commands as cmds MOCK_SETTINGS = {'settings': {'TEST': 'test'}} class TestSettingsCommands(TestCase): def set_up(self): pass def tear_down(self): pass @mock.patch('tethys_cli.settings_commands.Path.open') @mock.patch('tethys_cli.settings_commands.yaml.safe_load', return_value=MOCK_SETTINGS) def test_read_settings(self, _, __): settings = cmds.read_settings() self.assertDictEqual(settings, MOCK_SETTINGS['settings']) @mock.patch('tethys_cli.settings_commands.Path.open') @mock.patch('tethys_cli.settings_commands.yaml.safe_load', return_value={}) @mock.patch('tethys_cli.settings_commands.yaml.safe_dump') def test_write_settings(self, mock_dump, _, mock_open): mock_open().__enter__.return_value = 'mock_file' cmds.write_settings(MOCK_SETTINGS['settings']) mock_dump.assert_called_with(MOCK_SETTINGS, 'mock_file') @mock.patch('tethys_cli.settings_commands.generate_portal_config_file') @mock.patch('tethys_cli.settings_commands.Path.exists', return_value=False) @mock.patch('tethys_cli.settings_commands.Path.open') @mock.patch('tethys_cli.settings_commands.yaml.safe_load', return_value={}) @mock.patch('tethys_cli.settings_commands.yaml.safe_dump') def test_write_settings_no_portal_config(self, mock_dump, _, mock_open, __, mock_gen_config): mock_open().__enter__.return_value = 'mock_file' cmds.write_settings(MOCK_SETTINGS['settings']) mock_gen_config.assert_called_once() mock_dump.assert_called_with(MOCK_SETTINGS, 'mock_file') @mock.patch('tethys_cli.settings_commands.generate_command') @mock.patch('tethys_cli.settings_commands.write_warning') def test_generate_portal_config_file(self, mock_write_warning, mock_gen): cmds.generate_portal_config_file() mock_write_warning.assert_called_once() mock_gen.assert_called_once() @mock.patch('tethys_cli.settings_commands._get_dict_key_handle') @mock.patch('tethys_cli.settings_commands.write_settings') def test_set_settings(self, mock_write_settings, mock_get_key): test_settings = {} mock_get_key.return_value = (test_settings, 'test') cmds.set_settings(test_settings, [('test', 'test')]) self.assertDictEqual(test_settings, {'test': 'test'}) mock_write_settings.assert_called_with(test_settings) @mock.patch('tethys_cli.settings_commands.write_info') @mock.patch('tethys_cli.settings_commands._get_dict_key_handle') def test_get_setting(self, mock_get_key, mock_write_info): test_settings = {'test_key': 'test_value'} mock_get_key.return_value = (test_settings, 'test_key') cmds.get_setting(test_settings, 'test_key') mock_write_info.assert_called_with("test_key: 'test_value'") @override_settings(test_key='test_value') @mock.patch('tethys_cli.settings_commands.write_info') def test_get_setting_from_django_settings(self, mock_write_info): test_settings = {'test_key': 'test_value'} cmds.get_setting(test_settings, 'test_key') mock_write_info.assert_called_with("test_key: 'test_value'") @mock.patch('tethys_cli.settings_commands.dir', return_value=['one', 'two']) @mock.patch('tethys_cli.settings_commands.settings', new=mock.MagicMock(one=1, two=2)) @mock.patch('tethys_cli.settings_commands.write_info') def test_get_all_settings_from_django_settings(self, mock_write_info, _): test_settings = {} cmds.get_setting(test_settings, 'all') mock_write_info.assert_called_with("{'one': 1, 'two': 2}") @mock.patch('tethys_cli.settings_commands._get_dict_key_handle') @mock.patch('tethys_cli.settings_commands.write_settings') def test_remove_setting(self, mock_write_settings, mock_get_key): test_settings = {'test_key': 'test_value'} mock_get_key.return_value = (test_settings, 'test_key') cmds.remove_setting(test_settings, 'test_key') self.assertDictEqual(test_settings, {}) mock_write_settings.assert_called_with(test_settings) def test__get_dict_key_handle(self): d = {'test': {'test1': 'test'}} result = cmds._get_dict_key_handle(d, 'test.test1') self.assertEqual(result, (d['test'], 'test1')) def test__get_dict_key_handle_key_not_exists(self): d = {'test': 'test'} result = cmds._get_dict_key_handle(d, 'test1', not_exists_okay=True) self.assertEqual(result, (d, 'test1')) result = cmds._get_dict_key_handle(d, 'test1.test2', not_exists_okay=True) self.assertEqual(result, ({}, 'test2')) @mock.patch('tethys_cli.settings_commands.write_error') def test__get_dict_key_handle_error(self, mock_write_error): d = {'test': 'test'} cmds._get_dict_key_handle(d, 'test1') mock_write_error.assert_called() @mock.patch('tethys_cli.settings_commands.set_settings') @mock.patch('tethys_cli.settings_commands.read_settings', return_value={}) def test_settings_command_set(self, _, mock_set_settings): kwargs = [('key', 'value')] mock_args = mock.MagicMock(set_kwargs=kwargs) cmds.settings_command(mock_args) mock_set_settings.assert_called_with({}, kwargs) @mock.patch('tethys_cli.settings_commands.get_setting') @mock.patch('tethys_cli.settings_commands.read_settings', return_value={}) def test_settings_command_get(self, _, mock_get_setting): get_key = 'key' mock_args = mock.MagicMock(set_kwargs=None, get_key=get_key) cmds.settings_command(mock_args) mock_get_setting.assert_called_with({}, get_key) @mock.patch('tethys_cli.settings_commands.remove_setting') @mock.patch('tethys_cli.settings_commands.read_settings', return_value={}) def test_settings_command_rm(self, _, mock_remove_setting): rm_key = ['key'] mock_args = mock.MagicMock(set_kwargs=None, get_key=None, rm_key=rm_key) cmds.settings_command(mock_args) mock_remove_setting.assert_called_with({}, rm_key[0]) """Drop-in replacement for collections.OrderedDict by Raymond Hettinger http://code.activestate.com/recipes/576693/ """ from UserDict import DictMixin # Modified from original to support Python 2.4, see # http://code.google.com/p/simplejson/issues/detail?id=53 try: all except NameError: def all(seq): for elem in seq: if not elem: return False return True class OrderedDict(dict, DictMixin): def __init__(self, *args, **kwds): if len(args) > 1: raise TypeError('expected at most 1 arguments, got %d' % len(args)) try: self.__end except AttributeError: self.clear() self.update(*args, **kwds) def clear(self): self.__end = end = [] end += [None, end, end] # sentinel node for doubly linked list self.__map = {} # key --> [key, prev, next] dict.clear(self) def __setitem__(self, key, value): if key not in self: end = self.__end curr = end[1] curr[2] = end[1] = self.__map[key] = [key, curr, end] dict.__setitem__(self, key, value) def __delitem__(self, key): dict.__delitem__(self, key) key, prev, next = self.__map.pop(key) prev[2] = next next[1] = prev def __iter__(self): end = self.__end curr = end[2] while curr is not end: yield curr[0] curr = curr[2] def __reversed__(self): end = self.__end curr = end[1] while curr is not end: yield curr[0] curr = curr[1] def popitem(self, last=True): if not self: raise KeyError('dictionary is empty') # Modified from original to support Python 2.4, see # http://code.google.com/p/simplejson/issues/detail?id=53 if last: key = reversed(self).next() else: key = iter(self).next() value = self.pop(key) return key, value def __reduce__(self): items = [[k, self[k]] for k in self] tmp = self.__map, self.__end del self.__map, self.__end inst_dict = vars(self).copy() self.__map, self.__end = tmp if inst_dict: return (self.__class__, (items,), inst_dict) return self.__class__, (items,) def keys(self): return list(self) setdefault = DictMixin.setdefault update = DictMixin.update pop = DictMixin.pop values = DictMixin.values items = DictMixin.items iterkeys = DictMixin.iterkeys itervalues = DictMixin.itervalues iteritems = DictMixin.iteritems def __repr__(self): if not self: return '%s()' % (self.__class__.__name__,) return '%s(%r)' % (self.__class__.__name__, self.items()) def copy(self): return self.__class__(self) @classmethod def fromkeys(cls, iterable, value=None): d = cls() for key in iterable: d[key] = value return d def __eq__(self, other): if isinstance(other, OrderedDict): return len(self)==len(other) and \ all(p==q for p, q in zip(self.items(), other.items())) return dict.__eq__(self, other) def __ne__(self, other): return not self == other import unittest import os from robot.errors import DataError from robot.tidy import TidyCommandLine from robot.utils.asserts import assert_raises_with_msg, assert_equals, assert_true class TestArgumentValidation(unittest.TestCase): def test_valid_explicit_format(self): opts, _ = self._validate(format='txt') assert_equals(opts['format'], 'TXT') def test_valid_implicit_format(self): opts, _ = self._validate(args=[__file__, 'out.robot']) assert_equals(opts['format'], 'ROBOT') def test_no_format(self): opts, _ = self._validate() assert_equals(opts['format'], None) def test_invalid_explicit_format(self): self._validate(format='invalid', error="Invalid format 'INVALID'.") def test_invalid_implicit_format(self): self._validate(args=[__file__, 'y.inv'], error="Invalid format 'INV'.") self._validate(args=[__file__, 'inv'], error="Invalid format ''.") def test_no_space_count(self): opts, _ = self._validate() assert_true('spacecount' not in opts) def test_valid_space_count(self): opts, _ = self._validate(spacecount='42') assert_equals(opts['spacecount'], 42) def test_invalid_space_count(self): error = '--spacecount must be an integer greater than 1.' self._validate(spacecount='not a number', error=error) self._validate(spacecount='1', error=error) def test_inplace_and_recursive_cannot_be_used_together(self): self._validate(inplace=True, recursive=True, error='--recursive and --inplace can not be used together.') def test_zero_argument_is_never_accepted(self): class Stubbed(TidyCommandLine): def _report_error(self, message, **args): raise DataError(message) for args in [], ['--inplace'], ['--recursive']: assert_raises_with_msg(DataError, 'Expected at least 1 argument, got 0.', Stubbed().execute_cli, args) def test_default_mode_accepts_one_or_two_arguments(self): self._validate(args=[__file__]) self._validate(args=[__file__, '2.txt']) self._validate(args=[__file__, '2', '3'], error='Default mode requires 1 or 2 arguments.') def test_recursive_accepts_only_one_argument(self): self._validate(recursive=True, args=['.', '..'], error='--recursive requires exactly one argument.') def test_inplace_accepts_one_or_more_arguments(self): for count in range(1, 10): self._validate(inplace=True, args=[__file__]*count) def test_default_mode_requires_input_to_be_file(self): error = 'Default mode requires input to be a file.' self._validate(args=['.'], error=error) self._validate(args=['non_existing.txt'], error=error) def test_inplace_requires_inputs_to_be_files(self): error = '--inplace requires inputs to be files.' self._validate(inplace=True, args=[__file__, '.'], error=error) self._validate(inplace=True, args=[__file__, 'nonex.txt'], error=error) def test_recursive_requires_input_to_be_directory(self): self._validate(recursive=True, error='--recursive requires input to be a directory.') def test_line_separator(self): for input, expected in [(None, os.linesep), ('Native', os.linesep), ('windows', '\r\n'), ('UNIX', '\n')]: opts, _ = self._validate(lineseparator=input) assert_equals(opts['lineseparator'], expected) def test_invalid_line_separator(self): self._validate(lineseparator='invalid', error="Invalid line separator 'invalid'.") def _validate(self, inplace=False, recursive=False, format=None, spacecount=None, lineseparator=None, args=[__file__], error=None): opts = {'inplace': inplace, 'recursive': recursive, 'format': format, 'spacecount': spacecount, 'lineseparator': lineseparator} validate = lambda: TidyCommandLine().validate(opts, args) if error: assert_raises_with_msg(DataError, error, validate) else: return validate() if __name__ == '__main__': unittest.main() # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see . from __future__ import (absolute_import, division, print_function) __metaclass__ = type import re import socket import struct from ansible.module_utils.facts.network.base import Network class GenericBsdIfconfigNetwork(Network): """ This is a generic BSD subclass of Network using the ifconfig command. It defines - interfaces (a list of interface names) - interface_ dictionary of ipv4, ipv6, and mac address information. - all_ipv4_addresses and all_ipv6_addresses: lists of all configured addresses. """ platform = 'Generic_BSD_Ifconfig' def populate(self, collected_facts=None): network_facts = {} ifconfig_path = self.module.get_bin_path('ifconfig') if ifconfig_path is None: return network_facts route_path = self.module.get_bin_path('route') if route_path is None: return network_facts default_ipv4, default_ipv6 = self.get_default_interfaces(route_path) interfaces, ips = self.get_interfaces_info(ifconfig_path) interfaces = self.detect_type_media(interfaces) self.merge_default_interface(default_ipv4, interfaces, 'ipv4') self.merge_default_interface(default_ipv6, interfaces, 'ipv6') network_facts['interfaces'] = sorted(list(interfaces.keys())) for iface in interfaces: network_facts[iface] = interfaces[iface] network_facts['default_ipv4'] = default_ipv4 network_facts['default_ipv6'] = default_ipv6 network_facts['all_ipv4_addresses'] = ips['all_ipv4_addresses'] network_facts['all_ipv6_addresses'] = ips['all_ipv6_addresses'] return network_facts def detect_type_media(self, interfaces): for iface in interfaces: if 'media' in interfaces[iface]: if 'ether' in interfaces[iface]['media'].lower(): interfaces[iface]['type'] = 'ether' return interfaces def get_default_interfaces(self, route_path): # Use the commands: # route -n get 8.8.8.8 -> Google public DNS # route -n get -inet6 2404:6800:400a:800::1012 -> ipv6.google.com # to find out the default outgoing interface, address, and gateway command = dict(v4=[route_path, '-n', 'get', '8.8.8.8'], v6=[route_path, '-n', 'get', '-inet6', '2404:6800:400a:800::1012']) interface = dict(v4={}, v6={}) for v in 'v4', 'v6': if v == 'v6' and not socket.has_ipv6: continue rc, out, err = self.module.run_command(command[v]) if not out: # v6 routing may result in # RTNETLINK answers: Invalid argument continue for line in out.splitlines(): words = line.split() # Collect output from route command if len(words) > 1: if words[0] == 'interface:': interface[v]['interface'] = words[1] if words[0] == 'gateway:': interface[v]['gateway'] = words[1] return interface['v4'], interface['v6'] def get_interfaces_info(self, ifconfig_path, ifconfig_options='-a'): interfaces = {} current_if = {} ips = dict( all_ipv4_addresses=[], all_ipv6_addresses=[], ) # FreeBSD, DragonflyBSD, NetBSD, OpenBSD and OS X all implicitly add '-a' # when running the command 'ifconfig'. # Solaris must explicitly run the command 'ifconfig -a'. rc, out, err = self.module.run_command([ifconfig_path, ifconfig_options]) for line in out.splitlines(): if line: words = line.split() if words[0] == 'pass': continue elif re.match(r'^\S', line) and len(words) > 3: current_if = self.parse_interface_line(words) interfaces[current_if['device']] = current_if elif words[0].startswith('options='): self.parse_options_line(words, current_if, ips) elif words[0] == 'nd6': self.parse_nd6_line(words, current_if, ips) elif words[0] == 'ether': self.parse_ether_line(words, current_if, ips) elif words[0] == 'media:': self.parse_media_line(words, current_if, ips) elif words[0] == 'status:': self.parse_status_line(words, current_if, ips) elif words[0] == 'lladdr': self.parse_lladdr_line(words, current_if, ips) elif words[0] == 'inet':