class ArtifactDetectInputSpec(BaseInterfaceInputSpec): realigned_files = InputMultiPath(File(exists=True), desc="Names of realigned functional data files", mandatory=True) realignment_parameters = InputMultiPath(File(exists=True), mandatory=True, desc=("Names of realignment parameters" "corresponding to the functional data files")) parameter_source = traits.Enum("SPM", "FSL", "AFNI", "NiPy", "FSFAST", desc="Source of movement parameters", mandatory=True) use_differences = traits.ListBool([True, False], minlen=2, maxlen=2, usedefault=True, desc=("Use differences between successive motion (first element)" "and intensity paramter (second element) estimates in order" "to determine outliers. (default is [True, False])")) use_norm = traits.Bool(True, requires=['norm_threshold'], desc=("Uses a composite of the motion parameters in " "order to determine outliers."), usedefault=True) norm_threshold = traits.Float(desc=("Threshold to use to detect motion-rela" "ted outliers when composite motion is " "being used"), mandatory=True, xor=['rotation_threshold', 'translation_threshold']) rotation_threshold = traits.Float(mandatory=True, xor=['norm_threshold'], desc=("Threshold (in radians) to use to detect rotation-related " "outliers")) translation_threshold = traits.Float(mandatory=True, xor=['norm_threshold'], desc=("Threshold (in mm) to use to detect translation-related " "outliers")) zintensity_threshold = traits.Float(mandatory=True, desc=("Intensity Z-threshold use to detection images that deviate " "from the mean")) mask_type = traits.Enum('spm_global', 'file', 'thresh', desc=("Type of mask that should be used to mask the functional " "data. *spm_global* uses an spm_global like calculation to " "determine the brain mask. *file* specifies a brain mask " "file (should be an image file consisting of 0s and 1s). " "*thresh* specifies a threshold to use. By default all voxels" "are used, unless one of these mask types are defined."), mandatory=True) mask_file = File(exists=True, desc="Mask file to be used if mask_type is 'file'.") mask_threshold = traits.Float(desc=("Mask threshold to be used if mask_type" " is 'thresh'.")) intersect_mask = traits.Bool(True, desc=("Intersect the masks when computed from " "spm_global.")) save_plot = traits.Bool(True, desc="save plots containing outliers", usedefault=True) plot_type = traits.Enum('png', 'svg', 'eps', 'pdf', desc="file type of the outlier plot", usedefault=True) bound_by_brainmask = traits.Bool(False, desc=("use the brain mask to " "determine bounding box" "for composite norm (works" "for SPM and Nipy - currently" "inaccurate for FSL, AFNI"), usedefault=True) global_threshold = traits.Float(8.0, desc=("use this threshold when mask " "type equal's spm_global"), usedefault=True) class ArtifactDetectOutputSpec(TraitedSpec): outlier_files = OutputMultiPath(File(exists=True), desc=("One file for each functional run containing a list of " "0-based indices corresponding to outlier volumes")) intensity_files = OutputMultiPath(File(exists=True), desc=("One file for each functional run containing the global " "intensity values determined from the brainmask")) norm_files = OutputMultiPath(File, desc=("One file for each functional run containing the composite " "norm")) statistic_files = OutputMultiPath(File(exists=True), desc=("One file for each functional run containing information " "about the different types of artifacts and if design info is" " provided then details of stimulus correlated motion and a " "listing or artifacts by event type.")) plot_files = OutputMultiPath(File, desc=("One image file for each functional run containing the " "detected outliers")) mask_files = OutputMultiPath(File, desc=("One image file for each functional run containing the mask" "used for global signal calculation")) displacement_files = OutputMultiPath(File, desc=("One image file for each functional run containing the voxel" "displacement timeseries")) class ArtifactDetect(BaseInterface): """Detects outliers in a functional imaging series Uses intensity and motion parameters to infer outliers. If `use_norm` is True, it computes the movement of the center of each face a cuboid centered around the head and returns the maximal movement across the centers. Examples -------- >>> ad = ArtifactDetect() >>> ad.inputs.realigned_files = 'functional.nii' >>> ad.inputs.realignment_parameters = 'functional.par' >>> ad.inputs.parameter_source = 'FSL' >>> ad.inputs.norm_threshold = 1 >>> ad.inputs.use_differences = [True, False] >>> ad.inputs.zintensity_threshold = 3 >>> ad.run() # doctest: +SKIP """ input_spec = ArtifactDetectInputSpec output_spec = ArtifactDetectOutputSpec def __init__(self, **inputs): super(ArtifactDetect, self).__init__(**inputs) def _get_output_filenames(self, motionfile, output_dir): """Generate output files based on motion filenames Parameters ---------- motionfile: file/string Filename for motion parameter file output_dir: string output directory in which the files will be generated """ if isinstance(motionfile, six.string_types): infile = motionfile elif isinstance(motionfile, list): infile = motionfile[0] else: raise Exception("Unknown type of file") _, filename, ext = split_filename(infile) artifactfile = os.path.join(output_dir, ''.join(('art.', filename, '_outliers.txt'))) intensityfile = os.path.join(output_dir, ''.join(('global_intensity.', filename, '.txt'))) statsfile = os.path.join(output_dir, ''.join(('stats.', filename, '.txt'))) normfile = os.path.join(output_dir, ''.join(('norm.', filename, '.txt'))) plotfile = os.path.join(output_dir, ''.join(('plot.', filename, '.', self.inputs.plot_type))) displacementfile = os.path.join(output_dir, ''.join(('disp.', filename, ext))) maskfile = os.path.join(output_dir, ''.join(('mask.', filename, ext))) return (artifactfile, intensityfile, statsfile, normfile, plotfile, displacementfile, maskfile) def _list_outputs(self): outputs = self._outputs().get() outputs['outlier_files'] = [] outputs['intensity_files'] = [] outputs['statistic_files'] = [] outputs['mask_files'] = [] if isdefined(self.inputs.use_norm) and self.inputs.use_norm: outputs['norm_files'] = [] if self.inputs.bound_by_brainmask: outputs['displacement_files'] = [] if isdefined(self.inputs.save_plot) and self.inputs.save_plot: outputs['plot_files'] = [] for i, f in enumerate(filename_to_list(self.inputs.realigned_files)): (outlierfile, intensityfile, statsfile, normfile, plotfile, displacementfile, maskfile) = \ self._get_output_filenames(f, os.getcwd()) outputs['outlier_files'].insert(i, outlierfile) outputs['intensity_files'].insert(i, intensityfile) outputs['statistic_files'].insert(i, statsfile) outputs['mask_files'].insert(i, maskfile) if isdefined(self.inputs.use_norm) and self.inputs.use_norm: outputs['norm_files'].insert(i, normfile) if self.inputs.bound_by_brainmask: outputs['displacement_files'].insert(i, displacementfile) if isdefined(self.inputs.save_plot) and self.inputs.save_plot: outputs['plot_files'].insert(i, plotfile) return outputs def _plot_outliers_with_wave(self, wave, outliers, name): import matplotlib.pyplot as plt plt.plot(wave) plt.ylim([wave.min(), wave.max()]) plt.xlim([0, len(wave) - 1]) if len(outliers): plt.plot(np.tile(outliers[:, None], (1, 2)).T, np.tile([wave.min(), wave.max()], (len(outliers), 1)).T, 'r') plt.xlabel('Scans - 0-based') plt.ylabel(name) def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): """ Core routine for detecting outliers """ if not cwd: cwd = os.getcwd() # read in functional image if isinstance(imgfile, six.string_types): nim = load(imgfile) elif isinstance(imgfile, list): if len(imgfile) == 1: nim = load(imgfile[0]) else: images = [load(f) for f in imgfile] nim = funcs.concat_images(images) # compute global intensity signal (x, y, z, timepoints) = nim.get_shape() data = nim.get_data() affine = nim.get_affine() g = np.zeros((timepoints, 1)) masktype = self.inputs.mask_type if masktype == 'spm_global': # spm_global like calculation iflogger.debug('art: using spm global') intersect_mask = self.inputs.intersect_mask if intersect_mask: mask = np.ones((x, y, z), dtype=bool) for t0 in range(timepoints): vol = data[:, :, :, t0] # Use an SPM like approach mask_tmp = vol > \ (_nanmean(vol) / self.inputs.global_threshold) mask = mask * mask_tmp for t0 in range(timepoints): vol = data[:, :, :, t0] g[t0] = _nanmean(vol[mask]) if len(find_indices(mask)) < (np.prod((x, y, z)) / 10): intersect_mask = False g = np.zeros((timepoints, 1)) if not intersect_mask: iflogger.info('not intersect_mask is True') mask = np.zeros((x, y, z, timepoints)) for t0 in range(timepoints): vol = data[:, :, :, t0] mask_tmp = vol > \ (_nanmean(vol) / self.inputs.global_threshold) mask[:, :, :, t0] = mask_tmp g[t0] = np.nansum(vol * mask_tmp)/np.nansum(mask_tmp) elif masktype == 'file': # uses a mask image to determine intensity maskimg = load(self.inputs.mask_file) mask = maskimg.get_data() affine = maskimg.get_affine() mask = mask > 0.5 for t0 in range(timepoints): vol = data[:, :, :, t0] g[t0] = _nanmean(vol[mask]) elif masktype == 'thresh': # uses a fixed signal threshold for t0 in range(timepoints): vol = data[:, :, :, t0] mask = vol > self.inputs.mask_threshold g[t0] = _nanmean(vol[mask]) else: mask = np.ones((x, y, z)) g = _nanmean(data[mask > 0, :], 1) # compute normalized intensity values gz = signal.detrend(g, axis=0) # detrend the signal if self.inputs.use_differences[1]: gz = np.concatenate((np.zeros((1, 1)), np.diff(gz, n=1, axis=0)), axis=0) gz = (gz - np.mean(gz)) / np.std(gz) # normalize the detrended signal iidx = find_indices(abs(gz) > self.inputs.zintensity_threshold) # read in motion parameters mc_in = np.loadtxt(motionfile) mc = deepcopy(mc_in) (artifactfile, intensityfile, statsfile, normfile, plotfile, displacementfile, maskfile) = self._get_output_filenames(imgfile, cwd) mask_img = Nifti1Image(mask.astype(np.uint8), affine) mask_img.to_filename(maskfile) if self.inputs.use_norm: brain_pts = None if self.inputs.bound_by_brainmask: voxel_coords = np.nonzero(mask) coords = np.vstack((voxel_coords[0], np.vstack((voxel_coords[1], voxel_coords[2])))).T brain_pts = np.dot(affine, np.hstack((coords, np.ones((coords.shape[0], 1)))).T) # calculate the norm of the motion parameters normval, displacement = _calc_norm(mc, self.inputs.use_differences[0], self.inputs.parameter_source, brain_pts=brain_pts) tidx = find_indices(normval > self.inputs.norm_threshold) ridx = find_indices(normval < 0) if displacement is not None: dmap = np.zeros((x, y, z, timepoints), dtype=np.float) for i in range(timepoints): dmap[voxel_coords[0], voxel_coords[1], voxel_coords[2], i] = displacement[i, :] dimg = Nifti1Image(dmap, affine) dimg.to_filename(displacementfile) else: if self.inputs.use_differences[0]: mc = np.concatenate((np.zeros((1, 6)), np.diff(mc_in, n=1, axis=0)), axis=0) traval = mc[:, 0:3] # translation parameters (mm) rotval = mc[:, 3:6] # rotation parameters (rad) tidx = find_indices(np.sum(abs(traval) > self.inputs.translation_threshold, 1) > 0) ridx = find_indices(np.sum(abs(rotval) > self.inputs.rotation_threshold, 1) > 0) outliers = np.unique(np.union1d(iidx, np.union1d(tidx, ridx))) # write output to outputfile np.savetxt(artifactfile, outliers, fmt='%d', delimiter=' ') np.savetxt(intensityfile, g, fmt='%.2f', delimiter=' ') if self.inputs.use_norm: np.savetxt(normfile, normval, fmt='%.4f', delimiter=' ') if isdefined(self.inputs.save_plot) and self.inputs.save_plot: import matplotlib matplotlib.use(config.get("execution", "matplotlib_backend")) import matplotlib.pyplot as plt fig = plt.figure() if isdefined(self.inputs.use_norm) and self.inputs.use_norm: plt.subplot(211) else: plt.subplot(311) self._plot_outliers_with_wave(gz, iidx, 'Intensity') if isdefined(self.inputs.use_norm) and self.inputs.use_norm: plt.subplot(212) self._plot_outliers_with_wave(normval, np.union1d(tidx, ridx), 'Norm (mm)') else: diff = '' if self.inputs.use_differences[0]: diff = 'diff' plt.subplot(312) self._plot_outliers_with_wave(traval, tidx, 'Translation (mm)' + diff) plt.subplot(313) self._plot_outliers_with_wave(rotval, ridx, 'Rotation (rad)' + diff) plt.savefig(plotfile) plt.close(fig) motion_outliers = np.union1d(tidx, ridx) stats = [{'motion_file': motionfile, 'functional_file': imgfile}, {'common_outliers': len(np.intersect1d(iidx, motion_outliers)), 'intensity_outliers': len(np.setdiff1d(iidx, motion_outliers)), 'motion_outliers': len(np.setdiff1d(motion_outliers, iidx)), }, {'motion': [{'using differences': self.inputs.use_differences[0]}, {'mean': np.mean(mc_in, axis=0).tolist(), 'min': np.min(mc_in, axis=0).tolist(), 'max': np.max(mc_in, axis=0).tolist(), 'std': np.std(mc_in, axis=0).tolist()}, ]}, {'intensity': [{'using differences': self.inputs.use_differences[1]}, {'mean': np.mean(gz, axis=0).tolist(), 'min': np.min(gz, axis=0).tolist(), 'max': np.max(gz, axis=0).tolist(), 'std': np.std(gz, axis=0).tolist()}, ]}, ] if self.inputs.use_norm: stats.insert(3, {'motion_norm': {'mean': np.mean(normval, axis=0).tolist(), 'min': np.min(normval, axis=0).tolist(), 'max': np.max(normval, axis=0).tolist(), 'std': np.std(normval, axis=0).tolist(), }}) save_json(statsfile, stats) def _run_interface(self, runtime): """Execute this module. """ funcfilelist = filename_to_list(self.inputs.realigned_files) motparamlist = filename_to_list(self.inputs.realignment_parameters) for i, imgf in enumerate(funcfilelist): self._detect_outliers_core(imgf, motparamlist[i], i, cwd=os.getcwd()) return runtime class StimCorrInputSpec(BaseInterfaceInputSpec): realignment_parameters = InputMultiPath(File(exists=True), mandatory=True, desc=('Names of realignment parameters corresponding to the functional ' 'data files')) intensity_values = InputMultiPath(File(exists=True), mandatory=True, desc='Name of file containing intensity values') spm_mat_file = File(exists=True, mandatory=True, desc='SPM mat file (use pre-estimate SPM.mat file)') concatenated_design = traits.Bool(mandatory=True, desc='state if the design matrix contains concatenated sessions') class StimCorrOutputSpec(TraitedSpec): stimcorr_files = OutputMultiPath(File(exists=True), desc='List of files containing correlation values') class StimulusCorrelation(BaseInterface): """Determines if stimuli are correlated with motion or intensity parameters. Currently this class supports an SPM generated design matrix and requires intensity parameters. This implies that one must run :ref:`ArtifactDetect ` and :ref:`Level1Design ` prior to running this or provide an SPM.mat file and intensity parameters through some other means. Examples -------- >>> sc = StimulusCorrelation() >>> sc.inputs.realignment_parameters = 'functional.par' >>> sc.inputs.intensity_values = 'functional.rms' >>> sc.inputs.spm_mat_file = 'SPM.mat' >>> sc.inputs.concatenated_design = False >>> sc.run() # doctest: +SKIP """ input_spec = StimCorrInputSpec output_spec = StimCorrOutputSpec def _get_output_filenames(self, motionfile, output_dir): """Generate output files based on motion filenames Parameters ---------- motionfile: file/string Filename for motion parameter file output_dir: string output directory in which the files will be generated """ (_, filename) = os.path.split(motionfile) (filename, _) = os.path.splitext(filename) corrfile = os.path.join(output_dir, ''.join(('qa.', filename, '_stimcorr.txt'))) return corrfile def _stimcorr_core(self, motionfile, intensityfile, designmatrix, cwd=None): """ Core routine for determining stimulus correlation """ if not cwd: cwd = os.getcwd() # read in motion parameters mc_in = np.loadtxt(motionfile) g_in = np.loadtxt(intensityfile) g_in.shape = g_in.shape[0], 1 dcol = designmatrix.shape[1] mccol = mc_in.shape[1] concat_matrix = np.hstack((np.hstack((designmatrix, mc_in)), g_in)) cm = np.corrcoef(concat_matrix, rowvar=0) corrfile = self._get_output_filenames(motionfile, cwd) # write output to outputfile file = open(corrfile, 'w') file.write("Stats for:\n") file.write("Stimulus correlated motion:\n%s\n" % motionfile) for i in range(dcol): file.write("SCM.%d:" % i) for v in cm[i, dcol + np.arange(mccol)]: file.write(" %.2f" % v) file.write('\n') file.write("Stimulus correlated intensity:\n%s\n" % intensityfile) for i in range(dcol): file.write("SCI.%d: %.2f\n" % (i, cm[i, -1])) file.close() def _get_spm_submatrix(self, spmmat, sessidx, rows=None): """ Parameters ---------- spmmat: scipy matlab object full SPM.mat file loaded into a scipy object sessidx: int index to session that needs to be extracted. """ designmatrix = spmmat['SPM'][0][0].xX[0][0].X U = spmmat['SPM'][0][0].Sess[0][sessidx].U[0] if rows is None: rows = spmmat['SPM'][0][0].Sess[0][sessidx].row[0] - 1 cols = spmmat['SPM'][0][0].Sess[0][sessidx].col[0][range(len(U))] - 1 outmatrix = designmatrix.take(rows.tolist(), axis=0).take(cols.tolist(), axis=1) return outmatrix def _run_interface(self, runtime): """Execute this module. """ motparamlist = self.inputs.realignment_parameters intensityfiles = self.inputs.intensity_values spmmat = sio.loadmat(self.inputs.spm_mat_file, struct_as_record=False) nrows = [] for i in range(len(motparamlist)): sessidx = i rows = None if self.inputs.concatenated_design: sessidx = 0 mc_in = np.loadtxt(motparamlist[i]) rows = np.sum(nrows) + np.arange(mc_in.shape[0]) nrows.append(mc_in.shape[0]) matrix = self._get_spm_submatrix(spmmat, sessidx, rows) self._stimcorr_core(motparamlist[i], intensityfiles[i], matrix, os.getcwd()) return runtime def _list_outputs(self): outputs = self._outputs().get() files = [] for i, f in enumerate(self.inputs.realignment_parameters): files.insert(i, self._get_output_filenames(f, os.getcwd())) if files: outputs['stimcorr_files'] = files return outputs ######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # Sampling from about 20M text materials include literature and computer technology # 128 --> 0.79 # 256 --> 0.92 # 512 --> 0.986 # 1024 --> 0.99944 # 2048 --> 0.99999 # # Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24 # Random Distribution Ration = 512 / (2350-512) = 0.279. # # Typical Distribution Ratio EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0 EUCKR_TABLE_SIZE = 2352 # Char to FreqOrder table , EUCKRCharToFreqOrder = ( \ 13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87, 1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398, 1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734, 945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739, 116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622, 708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750, 1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856, 344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205, 709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779, 1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19, 1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567, 1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797, 1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802, 1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899, 885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818, 1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409, 1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697, 1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770, 1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723, 544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416, 1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300, 119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083, 893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857, 1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871, 282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420, 1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885, 127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889, 0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893, 1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317, 1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841, 1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910, 1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610, 269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375, 1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939, 887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870, 217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934, 1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888, 1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950, 1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065, 1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002, 1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965, 1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467, 50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285, 639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7, 103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979, 1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985, 818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994, 1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250, 423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824, 532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003, 2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745, 619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61, 191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023, 2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032, 2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912, 2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224, 719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012, 819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050, 2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681, 499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414, 1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068, 2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075, 1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850, 2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606, 2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449, 1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452, 949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112, 2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121, 2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130, 22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274, 962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139, 2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721, 1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298, 2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463, 2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747, 2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285, 2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187, 2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10, 2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350, 1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201, 2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972, 2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219, 2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233, 2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242, 2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247, 1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178, 1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255, 2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259, 1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262, 2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702, 1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273, 295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541, 2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117, 432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187, 2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800, 808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312, 2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229, 2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315, 501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484, 2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170, 1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335, 425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601, 1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395, 2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354, 1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476, 2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035, 416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498, 2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310, 1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389, 2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504, 1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505, 2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145, 1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624, 593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700, 2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221, 2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377, 644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448, 915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485, 1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705, 1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465, 291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471, 2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997, 2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486, 797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494, 434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771, 585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323, 2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491, 95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510, 161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519, 2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532, 2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199, 704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544, 2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247, 1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441, 249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562, 2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362, 2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583, 2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465, 3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431, 202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151, 974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596, 2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406, 2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611, 2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619, 1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628, 2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042, 670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256 #Everything below is of no interest for detection purpose 2643,2644,2645,2646,2647,2648,2649,2650,2651,2652,2653,2654,2655,2656,2657,2658, 2659,2660,2661,2662,2663,2664,2665,2666,2667,2668,2669,2670,2671,2672,2673,2674, 2675,2676,2677,2678,2679,2680,2681,2682,2683,2684,2685,2686,2687,2688,2689,2690, 2691,2692,2693,2694,2695,2696,2697,2698,2699,1542, 880,2700,2701,2702,2703,2704, 2705,2706,2707,2708,2709,2710,2711,2712,2713,2714,2715,2716,2717,2718,2719,2720, 2721,2722,2723,2724,2725,1543,2726,2727,2728,2729,2730,2731,2732,1544,2733,2734, 2735,2736,2737,2738,2739,2740,2741,2742,2743,2744,2745,2746,2747,2748,2749,2750, 2751,2752,2753,2754,1545,2755,2756,2757,2758,2759,2760,2761,2762,2763,2764,2765, 2766,1546,2767,1547,2768,2769,2770,2771,2772,2773,2774,2775,2776,2777,2778,2779, 2780,2781,2782,2783,2784,2785,2786,1548,2787,2788,2789,1109,2790,2791,2792,2793, 2794,2795,2796,2797,2798,2799,2800,2801,2802,2803,2804,2805,2806,2807,2808,2809, 2810,2811,2812,1329,2813,2814,2815,2816,2817,2818,2819,2820,2821,2822,2823,2824, 2825,2826,2827,2828,2829,2830,2831,2832,2833,2834,2835,2836,2837,2838,2839,2840, 2841,2842,2843,2844,2845,2846,2847,2848,2849,2850,2851,2852,2853,2854,2855,2856, 1549,2857,2858,2859,2860,1550,2861,2862,1551,2863,2864,2865,2866,2867,2868,2869, 2870,2871,2872,2873,2874,1110,1330,2875,2876,2877,2878,2879,2880,2881,2882,2883, 2884,2885,2886,2887,2888,2889,2890,2891,2892,2893,2894,2895,2896,2897,2898,2899, 2900,2901,2902,2903,2904,2905,2906,2907,2908,2909,2910,2911,2912,2913,2914,2915, 2916,2917,2918,2919,2920,2921,2922,2923,2924,2925,2926,2927,2928,2929,2930,1331, 2931,2932,2933,2934,2935,2936,2937,2938,2939,2940,2941,2942,2943,1552,2944,2945, 2946,2947,2948,2949,2950,2951,2952,2953,2954,2955,2956,2957,2958,2959,2960,2961, 2962,2963,2964,1252,2965,2966,2967,2968,2969,2970,2971,2972,2973,2974,2975,2976, 2977,2978,2979,2980,2981,2982,2983,2984,2985,2986,2987,2988,2989,2990,2991,2992, 2993,2994,2995,2996,2997,2998,2999,3000,3001,3002,3003,3004,3005,3006,3007,3008, 3009,3010,3011,3012,1553,3013,3014,3015,3016,3017,1554,3018,1332,3019,3020,3021, 3022,3023,3024,3025,3026,3027,3028,3029,3030,3031,3032,3033,3034,3035,3036,3037, 3038,3039,3040,3041,3042,3043,3044,3045,3046,3047,3048,3049,3050,1555,3051,3052, 3053,1556,1557,3054,3055,3056,3057,3058,3059,3060,3061,3062,3063,3064,3065,3066, 3067,1558,3068,3069,3070,3071,3072,3073,3074,3075,3076,1559,3077,3078,3079,3080, 3081,3082,3083,1253,3084,3085,3086,3087,3088,3089,3090,3091,3092,3093,3094,3095, 3096,3097,3098,3099,3100,3101,3102,3103,3104,3105,3106,3107,3108,1152,3109,3110, 3111,3112,3113,1560,3114,3115,3116,3117,1111,3118,3119,3120,3121,3122,3123,3124, 3125,3126,3127,3128,3129,3130,3131,3132,3133,3134,3135,3136,3137,3138,3139,3140, 3141,3142,3143,3144,3145,3146,3147,3148,3149,3150,3151,3152,3153,3154,3155,3156, 3157,3158,3159,3160,3161,3162,3163,3164,3165,3166,3167,3168,3169,3170,3171,3172, 3173,3174,3175,3176,1333,3177,3178,3179,3180,3181,3182,3183,3184,3185,3186,3187, 3188,3189,1561,3190,3191,1334,3192,3193,3194,3195,3196,3197,3198,3199,3200,3201, 3202,3203,3204,3205,3206,3207,3208,3209,3210,3211,3212,3213,3214,3215,3216,3217, 3218,3219,3220,3221,3222,3223,3224,3225,3226,3227,3228,3229,3230,3231,3232,3233, 3234,1562,3235,3236,3237,3238,3239,3240,3241,3242,3243,3244,3245,3246,3247,3248, 3249,3250,3251,3252,3253,3254,3255,3256,3257,3258,3259,3260,3261,3262,3263,3264, 3265,3266,3267,3268,3269,3270,3271,3272,3273,3274,3275,3276,3277,1563,3278,3279, 3280,3281,3282,3283,3284,3285,3286,3287,3288,3289,3290,3291,3292,3293,3294,3295, 3296,3297,3298,3299,3300,3301,3302,3303,3304,3305,3306,3307,3308,3309,3310,3311, 3312,3313,3314,3315,3316,3317,3318,3319,3320,3321,3322,3323,3324,3325,3326,3327, 3328,3329,3330,3331,3332,3333,3334,3335,3336,3337,3338,3339,3340,3341,3342,3343, 3344,3345,3346,3347,3348,3349,3350,3351,3352,3353,3354,3355,3356,3357,3358,3359, 3360,3361,3362,3363,3364,1335,3365,3366,3367,3368,3369,3370,3371,3372,3373,3374, 3375,3376,3377,3378,3379,3380,3381,3382,3383,3384,3385,3386,3387,1336,3388,3389, 3390,3391,3392,3393,3394,3395,3396,3397,3398,3399,3400,3401,3402,3403,3404,3405, 3406,3407,3408,3409,3410,3411,3412,3413,3414,1337,3415,3416,3417,3418,3419,1338, 3420,3421,3422,1564,1565,3423,3424,3425,3426,3427,3428,3429,3430,3431,1254,3432, 3433,3434,1339,3435,3436,3437,3438,3439,1566,3440,3441,3442,3443,3444,3445,3446, 3447,3448,3449,3450,3451,3452,3453,3454,1255,3455,3456,3457,3458,3459,1567,1191, 3460,1568,1569,3461,3462,3463,1570,3464,3465,3466,3467,3468,1571,3469,3470,3471, 3472,3473,1572,3474,3475,3476,3477,3478,3479,3480,3481,3482,3483,3484,3485,3486, 1340,3487,3488,3489,3490,3491,3492,1021,3493,3494,3495,3496,3497,3498,1573,3499, 1341,3500,3501,3502,3503,3504,3505,3506,3507,3508,3509,3510,3511,1342,3512,3513, 3514,3515,3516,1574,1343,3517,3518,3519,1575,3520,1576,3521,3522,3523,3524,3525, 3526,3527,3528,3529,3530,3531,3532,3533,3534,3535,3536,3537,3538,3539,3540,3541, 3542,3543,3544,3545,3546,3547,3548,3549,3550,3551,3552,3553,3554,3555,3556,3557, 3558,3559,3560,3561,3562,3563,3564,3565,3566,3567,3568,3569,3570,3571,3572,3573, 3574,3575,3576,3577,3578,3579,3580,1577,3581,3582,1578,3583,3584,3585,3586,3587, 3588,3589,3590,3591,3592,3593,3594,3595,3596,3597,3598,3599,3600,3601,3602,3603, 3604,1579,3605,3606,3607,3608,3609,3610,3611,3612,3613,3614,3615,3616,3617,3618, 3619,3620,3621,3622,3623,3624,3625,3626,3627,3628,3629,1580,3630,3631,1581,3632, 3633,3634,3635,3636,3637,3638,3639,3640,3641,3642,3643,3644,3645,3646,3647,3648, 3649,3650,3651,3652,3653,3654,3655,3656,1582,3657,3658,3659,3660,3661,3662,3663, 3664,3665,3666,3667,3668,3669,3670,3671,3672,3673,3674,3675,3676,3677,3678,3679, 3680,3681,3682,3683,3684,3685,3686,3687,3688,3689,3690,3691,3692,3693,3694,3695, 3696,3697,3698,3699,3700,1192,3701,3702,3703,3704,1256,3705,3706,3707,3708,1583, 1257,3709,3710,3711,3712,3713,3714,3715,3716,1584,3717,3718,3719,3720,3721,3722, 3723,3724,3725,3726,3727,3728,3729,3730,3731,3732,3733,3734,3735,3736,3737,3738, 3739,3740,3741,3742,3743,3744,3745,1344,3746,3747,3748,3749,3750,3751,3752,3753, 3754,3755,3756,1585,3757,3758,3759,3760,3761,3762,3763,3764,3765,3766,1586,3767, 3768,3769,3770,3771,3772,3773,3774,3775,3776,3777,3778,1345,3779,3780,3781,3782, 3783,3784,3785,3786,3787,3788,3789,3790,3791,3792,3793,3794,3795,1346,1587,3796, 3797,1588,3798,3799,3800,3801,3802,3803,3804,3805,3806,1347,3807,3808,3809,3810, 3811,1589,3812,3813,3814,3815,3816,3817,3818,3819,3820,3821,1590,3822,3823,1591, 1348,3824,3825,3826,3827,3828,3829,3830,1592,3831,3832,1593,3833,3834,3835,3836, 3837,3838,3839,3840,3841,3842,3843,3844,1349,3845,3846,3847,3848,3849,3850,3851, 3852,3853,3854,3855,3856,3857,3858,1594,3859,3860,3861,3862,3863,3864,3865,3866, 3867,3868,3869,1595,3870,3871,3872,3873,1596,3874,3875,3876,3877,3878,3879,3880, 3881,3882,3883,3884,3885,3886,1597,3887,3888,3889,3890,3891,3892,3893,3894,3895, 1598,3896,3897,3898,1599,1600,3899,1350,3900,1351,3901,3902,1352,3903,3904,3905, 3906,3907,3908,3909,3910,3911,3912,3913,3914,3915,3916,3917,3918,3919,3920,3921, 3922,3923,3924,1258,3925,3926,3927,3928,3929,3930,3931,1193,3932,1601,3933,3934, 3935,3936,3937,3938,3939,3940,3941,3942,3943,1602,3944,3945,3946,3947,3948,1603, 3949,3950,3951,3952,3953,3954,3955,3956,3957,3958,3959,3960,3961,3962,3963,3964, 3965,1604,3966,3967,3968,3969,3970,3971,3972,3973,3974,3975,3976,3977,1353,3978, 3979,3980,3981,3982,3983,3984,3985,3986,3987,3988,3989,3990,3991,1354,3992,3993, 3994,3995,3996,3997,3998,3999,4000,4001,4002,4003,4004,4005,4006,4007,4008,4009, 4010,4011,4012,4013,4014,4015,4016,4017,4018,4019,4020,4021,4022,4023,1355,4024, 4025,4026,4027,4028,4029,4030,4031,4032,4033,4034,4035,4036,4037,4038,4039,4040, 1605,4041,4042,4043,4044,4045,4046,4047,4048,4049,4050,4051,4052,4053,4054,4055, 4056,4057,4058,4059,4060,1606,4061,4062,4063,4064,1607,4065,4066,4067,4068,4069, 4070,4071,4072,4073,4074,4075,4076,1194,4077,4078,1608,4079,4080,4081,4082,4083, 4084,4085,4086,4087,1609,4088,4089,4090,4091,4092,4093,4094,4095,4096,4097,4098, 4099,4100,4101,4102,4103,4104,4105,4106,4107,4108,1259,4109,4110,4111,4112,4113, 4114,4115,4116,4117,4118,4119,4120,4121,4122,4123,4124,1195,4125,4126,4127,1610, 4128,4129,4130,4131,4132,4133,4134,4135,4136,4137,1356,4138,4139,4140,4141,4142, 4143,4144,1611,4145,4146,4147,4148,4149,4150,4151,4152,4153,4154,4155,4156,4157, 4158,4159,4160,4161,4162,4163,4164,4165,4166,4167,4168,4169,4170,4171,4172,4173, 4174,4175,4176,4177,4178,4179,4180,4181,4182,4183,4184,4185,4186,4187,4188,4189, 4190,4191,4192,4193,4194,4195,4196,4197,4198,4199,4200,4201,4202,4203,4204,4205, 4206,4207,4208,4209,4210,4211,4212,4213,4214,4215,4216,4217,4218,4219,1612,4220, 4221,4222,4223,4224,4225,4226,4227,1357,4228,1613,4229,4230,4231,4232,4233,4234, 4235,4236,4237,4238,4239,4240,4241,4242,4243,1614,4244,4245,4246,4247,4248,4249, 4250,4251,4252,4253,4254,4255,4256,4257,4258,4259,4260,4261,4262,4263,4264,4265, 4266,4267,4268,4269,4270,1196,1358,4271,4272,4273,4274,4275,4276,4277,4278,4279, 4280,4281,4282,4283,4284,4285,4286,4287,1615,4288,4289,4290,4291,4292,4293,4294, 4295,4296,4297,4298,4299,4300,4301,4302,4303,4304,4305,4306,4307,4308,4309,4310, 4311,4312,4313,4314,4315,4316,4317,4318,4319,4320,4321,4322,4323,4324,4325,4326, 4327,4328,4329,4330,4331,4332,4333,4334,1616,4335,4336,4337,4338,4339,4340,4341, 4342,4343,4344,4345,4346,4347,4348,4349,4350,4351,4352,4353,4354,4355,4356,4357, 4358,4359,4360,1617,4361,4362,4363,4364,4365,1618,4366,4367,4368,4369,4370,4371, 4372,4373,4374,4375,4376,4377,4378,4379,4380,4381,4382,4383,4384,4385,4386,4387, 4388,4389,4390,4391,4392,4393,4394,4395,4396,4397,4398,4399,4400,4401,4402,4403, 4404,4405,4406,4407,4408,4409,4410,4411,4412,4413,4414,4415,4416,1619,4417,4418, 4419,4420,4421,4422,4423,4424,4425,1112,4426,4427,4428,4429,4430,1620,4431,4432, 4433,4434,4435,4436,4437,4438,4439,4440,4441,4442,1260,1261,4443,4444,4445,4446, 4447,4448,4449,4450,4451,4452,4453,4454,4455,1359,4456,4457,4458,4459,4460,4461, 4462,4463,4464,4465,1621,4466,4467,4468,4469,4470,4471,4472,4473,4474,4475,4476, 4477,4478,4479,4480,4481,4482,4483,4484,4485,4486,4487,4488,4489,1055,4490,4491, 4492,4493,4494,4495,4496,4497,4498,4499,4500,4501,4502,4503,4504,4505,4506,4507, 4508,4509,4510,4511,4512,4513,4514,4515,4516,4517,4518,1622,4519,4520,4521,1623, 4522,4523,4524,4525,4526,4527,4528,4529,4530,4531,4532,4533,4534,4535,1360,4536, 4537,4538,4539,4540,4541,4542,4543, 975,4544,4545,4546,4547,4548,4549,4550,4551, 4552,4553,4554,4555,4556,4557,4558,4559,4560,4561,4562,4563,4564,4565,4566,4567, 4568,4569,4570,4571,1624,4572,4573,4574,4575,4576,1625,4577,4578,4579,4580,4581, 4582,4583,4584,1626,4585,4586,4587,4588,4589,4590,4591,4592,4593,4594,4595,1627, 4596,4597,4598,4599,4600,4601,4602,4603,4604,4605,4606,4607,4608,4609,4610,4611, 4612,4613,4614,4615,1628,4616,4617,4618,4619,4620,4621,4622,4623,4624,4625,4626, 4627,4628,4629,4630,4631,4632,4633,4634,4635,4636,4637,4638,4639,4640,4641,4642, 4643,4644,4645,4646,4647,4648,4649,1361,4650,4651,4652,4653,4654,4655,4656,4657, 4658,4659,4660,4661,1362,4662,4663,4664,4665,4666,4667,4668,4669,4670,4671,4672, 4673,4674,4675,4676,4677,4678,4679,4680,4681,4682,1629,4683,4684,4685,4686,4687, 1630,4688,4689,4690,4691,1153,4692,4693,4694,1113,4695,4696,4697,4698,4699,4700, 4701,4702,4703,4704,4705,4706,4707,4708,4709,4710,4711,1197,4712,4713,4714,4715, 4716,4717,4718,4719,4720,4721,4722,4723,4724,4725,4726,4727,4728,4729,4730,4731, 4732,4733,4734,4735,1631,4736,1632,4737,4738,4739,4740,4741,4742,4743,4744,1633, 4745,4746,4747,4748,4749,1262,4750,4751,4752,4753,4754,1363,4755,4756,4757,4758, 4759,4760,4761,4762,4763,4764,4765,4766,4767,4768,1634,4769,4770,4771,4772,4773, 4774,4775,4776,4777,4778,1635,4779,4780,4781,4782,4783,4784,4785,4786,4787,4788, 4789,1636,4790,4791,4792,4793,4794,4795,4796,4797,4798,4799,4800,4801,4802,4803, 4804,4805,4806,1637,4807,4808,4809,1638,4810,4811,4812,4813,4814,4815,4816,4817, 4818,1639,4819,4820,4821,4822,4823,4824,4825,4826,4827,4828,4829,4830,4831,4832, 4833,1077,4834,4835,4836,4837,4838,4839,4840,4841,4842,4843,4844,4845,4846,4847, 4848,4849,4850,4851,4852,4853,4854,4855,4856,4857,4858,4859,4860,4861,4862,4863, 4864,4865,4866,4867,4868,4869,4870,4871,4872,4873,4874,4875,4876,4877,4878,4879, 4880,4881,4882,4883,1640,4884,4885,1641,4886,4887,4888,4889,4890,4891,4892,4893, 4894,4895,4896,4897,4898,4899,4900,4901,4902,4903,4904,4905,4906,4907,4908,4909, 4910,4911,1642,4912,4913,4914,1364,4915,4916,4917,4918,4919,4920,4921,4922,4923, 4924,4925,4926,4927,4928,4929,4930,4931,1643,4932,4933,4934,4935,4936,4937,4938, 4939,4940,4941,4942,4943,4944,4945,4946,4947,4948,4949,4950,4951,4952,4953,4954, 4955,4956,4957,4958,4959,4960,4961,4962,4963,4964,4965,4966,4967,4968,4969,4970, 4971,4972,4973,4974,4975,4976,4977,4978,4979,4980,1644,4981,4982,4983,4984,1645, 4985,4986,1646,4987,4988,4989,4990,4991,4992,4993,4994,4995,4996,4997,4998,4999, 5000,5001,5002,5003,5004,5005,1647,5006,1648,5007,5008,5009,5010,5011,5012,1078, 5013,5014,5015,5016,5017,5018,5019,5020,5021,5022,5023,5024,5025,5026,5027,5028, 1365,5029,5030,5031,5032,5033,5034,5035,5036,5037,5038,5039,1649,5040,5041,5042, 5043,5044,5045,1366,5046,5047,5048,5049,5050,5051,5052,5053,5054,5055,1650,5056, 5057,5058,5059,5060,5061,5062,5063,5064,5065,5066,5067,5068,5069,5070,5071,5072, 5073,5074,5075,5076,5077,1651,5078,5079,5080,5081,5082,5083,5084,5085,5086,5087, 5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102,5103, 5104,5105,5106,5107,5108,5109,5110,1652,5111,5112,5113,5114,5115,5116,5117,5118, 1367,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,1653,5130,5131,5132, 5133,5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148, 5149,1368,5150,1654,5151,1369,5152,5153,5154,5155,5156,5157,5158,5159,5160,5161, 5162,5163,5164,5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,5176,5177, 5178,1370,5179,5180,5181,5182,5183,5184,5185,5186,5187,5188,5189,5190,5191,5192, 5193,5194,5195,5196,5197,5198,1655,5199,5200,5201,5202,1656,5203,5204,5205,5206, 1371,5207,1372,5208,5209,5210,5211,1373,5212,5213,1374,5214,5215,5216,5217,5218, 5219,5220,5221,5222,5223,5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234, 5235,5236,5237,5238,5239,5240,5241,5242,5243,5244,5245,5246,5247,1657,5248,5249, 5250,5251,1658,1263,5252,5253,5254,5255,5256,1375,5257,5258,5259,5260,5261,5262, 5263,5264,5265,5266,5267,5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278, 5279,5280,5281,5282,5283,1659,5284,5285,5286,5287,5288,5289,5290,5291,5292,5293, 5294,5295,5296,5297,5298,5299,5300,1660,5301,5302,5303,5304,5305,5306,5307,5308, 5309,5310,5311,5312,5313,5314,5315,5316,5317,5318,5319,5320,5321,1376,5322,5323, 5324,5325,5326,5327,5328,5329,5330,5331,5332,5333,1198,5334,5335,5336,5337,5338, 5339,5340,5341,5342,5343,1661,5344,5345,5346,5347,5348,5349,5350,5351,5352,5353, 5354,5355,5356,5357,5358,5359,5360,5361,5362,5363,5364,5365,5366,5367,5368,5369, 5370,5371,5372,5373,5374,5375,5376,5377,5378,5379,5380,5381,5382,5383,5384,5385, 5386,5387,5388,5389,5390,5391,5392,5393,5394,5395,5396,5397,5398,1264,5399,5400, 5401,5402,5403,5404,5405,5406,5407,5408,5409,5410,5411,5412,1662,5413,5414,5415, 5416,1663,5417,5418,5419,5420,5421,5422,5423,5424,5425,5426,5427,5428,5429,5430, 5431,5432,5433,5434,5435,5436,5437,5438,1664,5439,5440,5441,5442,5443,5444,5445, 5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456,5457,5458,5459,5460,5461, 5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472,5473,5474,5475,5476,5477, 5478,1154,5479,5480,5481,5482,5483,5484,5485,1665,5486,5487,5488,5489,5490,5491, 5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504,5505,5506,5507, 5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520,5521,5522,5523, 5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536,5537,5538,5539, 5540,5541,5542,5543,5544,5545,5546,5547,5548,1377,5549,5550,5551,5552,5553,5554, 5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568,5569,5570, 1114,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584,5585, 5586,5587,5588,5589,5590,5591,5592,1378,5593,5594,5595,5596,5597,5598,5599,5600, 5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,1379,5615, 5616,5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631, 5632,5633,5634,1380,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646, 5647,5648,5649,1381,1056,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660, 1666,5661,5662,5663,5664,5665,5666,5667,5668,1667,5669,1668,5670,5671,5672,5673, 5674,5675,5676,5677,5678,1155,5679,5680,5681,5682,5683,5684,5685,5686,5687,5688, 5689,5690,5691,5692,5693,5694,5695,5696,5697,5698,1669,5699,5700,5701,5702,5703, 5704,5705,1670,5706,5707,5708,5709,5710,1671,5711,5712,5713,5714,1382,5715,5716, 5717,5718,5719,5720,5721,5722,5723,5724,5725,1672,5726,5727,1673,1674,5728,5729, 5730,5731,5732,5733,5734,5735,5736,1675,5737,5738,5739,5740,5741,5742,5743,5744, 1676,5745,5746,5747,5748,5749,5750,5751,1383,5752,5753,5754,5755,5756,5757,5758, 5759,5760,5761,5762,5763,5764,5765,5766,5767,5768,1677,5769,5770,5771,5772,5773, 1678,5774,5775,5776, 998,5777,5778,5779,5780,5781,5782,5783,5784,5785,1384,5786, 5787,5788,5789,5790,5791,5792,5793,5794,5795,5796,5797,5798,5799,5800,1679,5801, 5802,5803,1115,1116,5804,5805,5806,5807,5808,5809,5810,5811,5812,5813,5814,5815, 5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828,5829,5830,5831, 5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844,5845,5846,5847, 5848,5849,5850,5851,5852,5853,5854,5855,1680,5856,5857,5858,5859,5860,5861,5862, 5863,5864,1681,5865,5866,5867,1682,5868,5869,5870,5871,5872,5873,5874,5875,5876, 5877,5878,5879,1683,5880,1684,5881,5882,5883,5884,1685,5885,5886,5887,5888,5889, 5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905, 5906,5907,1686,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, 5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,1687, 5936,5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951, 5952,1688,1689,5953,1199,5954,5955,5956,5957,5958,5959,5960,5961,1690,5962,5963, 5964,5965,5966,5967,5968,5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979, 5980,5981,1385,5982,1386,5983,5984,5985,5986,5987,5988,5989,5990,5991,5992,5993, 5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004,6005,6006,6007,6008,6009, 6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020,6021,6022,6023,6024,6025, 6026,6027,1265,6028,6029,1691,6030,6031,6032,6033,6034,6035,6036,6037,6038,6039, 6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052,6053,6054,6055, 6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068,6069,6070,6071, 6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084,1692,6085,6086, 6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100,6101,6102, 6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116,6117,6118, 6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,1693,6132,6133, 6134,6135,6136,1694,6137,6138,6139,6140,6141,1695,6142,6143,6144,6145,6146,6147, 6148,6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163, 6164,6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179, 6180,6181,6182,6183,6184,6185,1696,6186,6187,6188,6189,6190,6191,6192,6193,6194, 6195,6196,6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210, 6211,6212,6213,6214,6215,6216,6217,6218,6219,1697,6220,6221,6222,6223,6224,6225, 6226,6227,6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241, 6242,6243,6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,1698,6254,6255,6256, 6257,6258,6259,6260,6261,6262,6263,1200,6264,6265,6266,6267,6268,6269,6270,6271, #1024 6272,6273,6274,6275,6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,6286,6287, 6288,6289,6290,6291,6292,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,1699, 6303,6304,1700,6305,6306,6307,6308,6309,6310,6311,6312,6313,6314,6315,6316,6317, 6318,6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333, 6334,6335,6336,6337,6338,6339,1701,6340,6341,6342,6343,6344,1387,6345,6346,6347, 6348,6349,6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363, 6364,6365,6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379, 6380,6381,6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395, 6396,6397,6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,6411, 6412,6413,1702,6414,6415,6416,6417,6418,6419,6420,6421,6422,1703,6423,6424,6425, 6426,6427,6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,1704,6439,6440, 6441,6442,6443,6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,6455,6456, 6457,6458,6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472, 6473,6474,6475,6476,6477,6478,6479,6480,6481,6482,6483,6484,6485,6486,6487,6488, 6489,6490,6491,6492,6493,6494,6495,6496,6497,6498,6499,6500,6501,6502,6503,1266, 6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516,6517,6518,6519, 6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532,6533,6534,6535, 6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548,6549,6550,6551, 1705,1706,6552,6553,6554,6555,6556,6557,6558,6559,6560,6561,6562,6563,6564,6565, 6566,6567,6568,6569,6570,6571,6572,6573,6574,6575,6576,6577,6578,6579,6580,6581, 6582,6583,6584,6585,6586,6587,6588,6589,6590,6591,6592,6593,6594,6595,6596,6597, 6598,6599,6600,6601,6602,6603,6604,6605,6606,6607,6608,6609,6610,6611,6612,6613, 6614,6615,6616,6617,6618,6619,6620,6621,6622,6623,6624,6625,6626,6627,6628,6629, 6630,6631,6632,6633,6634,6635,6636,6637,1388,6638,6639,6640,6641,6642,6643,6644, 1707,6645,6646,6647,6648,6649,6650,6651,6652,6653,6654,6655,6656,6657,6658,6659, 6660,6661,6662,6663,1708,6664,6665,6666,6667,6668,6669,6670,6671,6672,6673,6674, 1201,6675,6676,6677,6678,6679,6680,6681,6682,6683,6684,6685,6686,6687,6688,6689, 6690,6691,6692,6693,6694,6695,6696,6697,6698,6699,6700,6701,6702,6703,6704,6705, 6706,6707,6708,6709,6710,6711,6712,6713,6714,6715,6716,6717,6718,6719,6720,6721, 6722,6723,6724,6725,1389,6726,6727,6728,6729,6730,6731,6732,6733,6734,6735,6736, 1390,1709,6737,6738,6739,6740,6741,6742,1710,6743,6744,6745,6746,1391,6747,6748, 6749,6750,6751,6752,6753,6754,6755,6756,6757,1392,6758,6759,6760,6761,6762,6763, 6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777,6778,6779, 6780,1202,6781,6782,6783,6784,6785,6786,6787,6788,6789,6790,6791,6792,6793,6794, 6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806,6807,6808,6809,1711, 6810,6811,6812,6813,6814,6815,6816,6817,6818,6819,6820,6821,6822,6823,6824,6825, 6826,6827,6828,6829,6830,6831,6832,6833,6834,6835,6836,1393,6837,6838,6839,6840, 6841,6842,6843,6844,6845,6846,6847,6848,6849,6850,6851,6852,6853,6854,6855,6856, 6857,6858,6859,6860,6861,6862,6863,6864,6865,6866,6867,6868,6869,6870,6871,6872, 6873,6874,6875,6876,6877,6878,6879,6880,6881,6882,6883,6884,6885,6886,6887,6888, 6889,6890,6891,6892,6893,6894,6895,6896,6897,6898,6899,6900,6901,6902,1712,6903, 6904,6905,6906,6907,6908,6909,6910,1713,6911,6912,6913,6914,6915,6916,6917,6918, 6919,6920,6921,6922,6923,6924,6925,6926,6927,6928,6929,6930,6931,6932,6933,6934, 6935,6936,6937,6938,6939,6940,6941,6942,6943,6944,6945,6946,6947,6948,6949,6950, 6951,6952,6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966, 6967,6968,6969,6970,6971,6972,6973,6974,1714,6975,6976,6977,6978,6979,6980,6981, 6982,6983,6984,6985,6986,6987,6988,1394,6989,6990,6991,6992,6993,6994,6995,6996, 6997,6998,6999,7000,1715,7001,7002,7003,7004,7005,7006,7007,7008,7009,7010,7011, 7012,7013,7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027, 7028,1716,7029,7030,7031,7032,7033,7034,7035,7036,7037,7038,7039,7040,7041,7042, 7043,7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058, 7059,7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,7071,7072,7073,7074, 7075,7076,7077,7078,7079,7080,7081,7082,7083,7084,7085,7086,7087,7088,7089,7090, 7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105,7106, 7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,7119,7120,7121,7122, 7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136,7137,7138, 7139,7140,7141,7142,7143,7144,7145,7146,7147,7148,7149,7150,7151,7152,7153,7154, 7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167,7168,7169,7170, 7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183,7184,7185,7186, 7187,7188,7189,7190,7191,7192,7193,7194,7195,7196,7197,7198,7199,7200,7201,7202, 7203,7204,7205,7206,7207,1395,7208,7209,7210,7211,7212,7213,1717,7214,7215,7216, 7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229,7230,7231,7232, 7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245,7246,7247,7248, 7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261,7262,7263,7264, 7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277,7278,7279,7280, 7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293,7294,7295,7296, 7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308,7309,7310,7311,7312, 7313,1718,7314,7315,7316,7317,7318,7319,7320,7321,7322,7323,7324,7325,7326,7327, 7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339,7340,7341,7342,7343, 7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,7354,7355,7356,7357,7358,7359, 7360,7361,7362,7363,7364,7365,7366,7367,7368,7369,7370,7371,7372,7373,7374,7375, 7376,7377,7378,7379,7380,7381,7382,7383,7384,7385,7386,7387,7388,7389,7390,7391, 7392,7393,7394,7395,7396,7397,7398,7399,7400,7401,7402,7403,7404,7405,7406,7407, 7408,7409,7410,7411,7412,7413,7414,7415,7416,7417,7418,7419,7420,7421,7422,7423, 7424,7425,7426,7427,7428,7429,7430,7431,7432,7433,7434,7435,7436,7437,7438,7439, 7440,7441,7442,7443,7444,7445,7446,7447,7448,7449,7450,7451,7452,7453,7454,7455, 7456,7457,7458,7459,7460,7461,7462,7463,7464,7465,7466,7467,7468,7469,7470,7471, 7472,7473,7474,7475,7476,7477,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487, 7488,7489,7490,7491,7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,7503, 7504,7505,7506,7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519, 7520,7521,7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535, 7536,7537,7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,7550,7551, 7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, 7568,7569,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582,7583, 7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598,7599, 7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614,7615, 7616,7617,7618,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628,7629,7630,7631, 7632,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643,7644,7645,7646,7647, 7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659,7660,7661,7662,7663, 7664,7665,7666,7667,7668,7669,7670,7671,7672,7673,7674,7675,7676,7677,7678,7679, 7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690,7691,7692,7693,7694,7695, 7696,7697,7698,7699,7700,7701,7702,7703,7704,7705,7706,7707,7708,7709,7710,7711, 7712,7713,7714,7715,7716,7717,7718,7719,7720,7721,7722,7723,7724,7725,7726,7727, 7728,7729,7730,7731,7732,7733,7734,7735,7736,7737,7738,7739,7740,7741,7742,7743, 7744,7745,7746,7747,7748,7749,7750,7751,7752,7753,7754,7755,7756,7757,7758,7759, 7760,7761,7762,7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775, 7776,7777,7778,7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791, 7792,7793,7794,7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,7806,7807, 7808,7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823, 7824,7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839, 7840,7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855, 7856,7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871, 7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887, 7888,7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903, 7904,7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919, 7920,7921,7922,7923,7924,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935, 7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951, 7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967, 7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983, 7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999, 8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015, 8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031, 8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047, 8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063, 8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079, 8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095, 8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111, 8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127, 8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143, 8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159, 8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175, 8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191, 8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207, 8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223, 8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239, 8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255, 8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271, 8272,8273,8274,8275,8276,8277,8278,8279,8280,8281,8282,8283,8284,8285,8286,8287, 8288,8289,8290,8291,8292,8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303, 8304,8305,8306,8307,8308,8309,8310,8311,8312,8313,8314,8315,8316,8317,8318,8319, 8320,8321,8322,8323,8324,8325,8326,8327,8328,8329,8330,8331,8332,8333,8334,8335, 8336,8337,8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351, 8352,8353,8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,8364,8365,8366,8367, 8368,8369,8370,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382,8383, 8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398,8399, 8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,8411,8412,8413,8414,8415, 8416,8417,8418,8419,8420,8421,8422,8423,8424,8425,8426,8427,8428,8429,8430,8431, 8432,8433,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443,8444,8445,8446,8447, 8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459,8460,8461,8462,8463, 8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475,8476,8477,8478,8479, 8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490,8491,8492,8493,8494,8495, 8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506,8507,8508,8509,8510,8511, 8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522,8523,8524,8525,8526,8527, 8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538,8539,8540,8541,8542,8543, 8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554,8555,8556,8557,8558,8559, 8560,8561,8562,8563,8564,8565,8566,8567,8568,8569,8570,8571,8572,8573,8574,8575, 8576,8577,8578,8579,8580,8581,8582,8583,8584,8585,8586,8587,8588,8589,8590,8591, 8592,8593,8594,8595,8596,8597,8598,8599,8600,8601,8602,8603,8604,8605,8606,8607, 8608,8609,8610,8611,8612,8613,8614,8615,8616,8617,8618,8619,8620,8621,8622,8623, 8624,8625,8626,8627,8628,8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,8639, 8640,8641,8642,8643,8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655, 8656,8657,8658,8659,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671, 8672,8673,8674,8675,8676,8677,8678,8679,8680,8681,8682,8683,8684,8685,8686,8687, 8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, 8704,8705,8706,8707,8708,8709,8710,8711,8712,8713,8714,8715,8716,8717,8718,8719, 8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,8734,8735, 8736,8737,8738,8739,8740,8741) # flake8: noqa import random buildings = [] current_turn = 0 queued_shipments = [] MAX_SHIPMENT = 10 from fractions import Fraction messages = [] def queue_shipment(source, amount, target, turns): messages.append("Shipping {0} from {1} to {2}".format(amount, source.name, target.name)) queued_shipments.append((amount, target, current_turn + turns)) source.level -= amount target.inflight += amount class ScreenClearer: def __repr__(self): import os os.system('cls' if os.name == 'nt' else 'clear') return '' cls = ScreenClearer() class Building: def __init__(self, name): self.name = name self.level = 0 self.usage = 0 self.position = 0 self.inflight = 0 self.warehouse = False self.generation = None self._period = 0 self.operating = False self._demand_bias = 0 self.capacity = 500 @property def demand(self): if self.warehouse: return 25 source = None for building in buildings: if building.warehouse: source = building break else: # Guess! return 3*self.usage + self._demand_bias return int(self.usage * (3 + abs(self.position - source.position)//3) * 1.6) + self._demand_bias def tick(self, n): self.operating = True if self.generation is not None: if self.level >= self.usage: self.level -= self.usage self._period += 1 (production, period) = self.generation if self._period > period: self.level += production self._period = 0 messages.append("Produced {0} at {1}".format(production, self.name)) else: self.operating = False else: if self.warehouse and self.level < self.usage: print("Out of food.") exit(0) elif self.level >= self.usage: self.level -= self.usage else: self.operating = False if not self.operating and random.random() < 0.35: self._demand_bias += 1 if self.operating and self._demand_bias > 0 and random.random() < 0.002: self._demand_bias -= 1 if self.level > self.capacity: messages.append("{0} dumping {1} units due to overcapacity".format(self.name, self.level - self.capacity)) self.level = self.capacity if self.level <= self.demand: return possible_targets = [] for bld in buildings: if bld is self: continue if random.random() < 0.65: possible_targets.append(bld) targets = list(sorted(possible_targets, key = lambda x: abs(self.position - x.position))) for potential in targets: if potential.level + potential.inflight < potential.demand: # ship to them amount = min(self.level - self.demand, int((potential.demand - potential.level) * 1.5), MAX_SHIPMENT) queue_shipment(self, amount, potential, abs(potential.position - self.position) // 3) break else: if random.random() < 0.3: # ship to a warehouse for potential in targets: if potential.warehouse: amount = min(self.level - self.demand, MAX_SHIPMENT) queue_shipment(self, amount, potential, abs(potential.position - self.position) // 3) break hq = Building('HQ') hq.level = 30 hq.usage = 1 hq.warehouse = True hq.position = 0 farm1 = Building('Farm') farm1.generation = (10, 7) farm1.position = 6 farm2 = Building('Farm') farm2.level = 300 farm2.position = -10 farm2.generation = (10, 7) farm3 = Building('Farm') farm3.position = -22 farm3.generation = (10, 7) farm4 = Building('Pig Farm') farm4.position = -44 farm4.generation = (3, 1) passive = Building('Forager') passive.position = -70 passive.generation = (1, 5) workhouse = Building('Workhouse') workhouse.position = 40 workhouse.usage = 2 forester = Building('Forester') forester.position = 4 forester.usage = 1 woodcutter = Building('Woodcutter') woodcutter.position = 6 woodcutter.usage = 1 buildings.extend([hq, farm1, farm2, farm3, farm4, passive, workhouse, forester, woodcutter]) import sys import time while True: print(cls) # Calculate totals total_demand = 0 total_supply = 0 for bld in buildings: total_demand += bld.usage if bld.generation is not None: production, period = bld.generation total_supply += Fraction(production, period) if total_supply == total_demand: print("INFO: Supply matches demand.") else: if total_supply > total_demand: print("WARNING: supply exceeds demand, will stockpile until eternity") elif total_supply < total_demand: print("WARNING: demand exceeds supply, will starve") print("Supply: {0}".format(float(total_supply))) print("Demand: {0}".format(float(total_demand))) # process deliveries new_deliveries = [] for (amount, target, due) in queued_shipments: if due <= current_turn: target.level += amount target.inflight -= amount else: new_deliveries.append((amount, target, due)) queued_shipments = new_deliveries # tick buildings for building in buildings: building.tick(current_turn) # display for building in buildings: print("{0}{2}\t\t{1}\t[demand = {3}]".format(building.name, building.level, '' if building.operating else '[x]', building.demand)) for message in messages: print(message) messages.clear() # increment turn counter current_turn += 1 # Sleep sys.stdout.flush() time.sleep(0.05) #! /usr/bin/env python """Keywords (from "graminit.c") This file is automatically generated; please don't muck it up! To update the symbols in this file, 'cd' to the top directory of the python source tree after building the interpreter and run: python Lib/keyword.py """ __all__ = ["iskeyword", "kwlist"] kwlist = [ #--start keywords-- 'and', 'as', 'assert', 'break', 'class', 'continue', 'def', 'del', 'elif', 'else', 'except', 'exec', 'finally', 'for', 'from', 'global', 'if', 'import', 'in', 'is', 'lambda', 'not', 'or', 'pass', 'print', 'raise', 'return', 'try', 'while', 'with', 'yield', #--end keywords-- ] iskeyword = frozenset(kwlist).__contains__ def main(): import sys, re args = sys.argv[1:] iptfile = args and args[0] or "Python/graminit.c" if len(args) > 1: optfile = args[1] else: optfile = "Lib/keyword.py" # scan the source file for keywords fp = open(iptfile) strprog = re.compile('"([^"]+)"') lines = [] while 1: line = fp.readline() if not line: break if '{1, "' in line: match = strprog.search(line) if match: lines.append(" '" + match.group(1) + "',\n") fp.close() lines.sort() # load the output skeleton from the target fp = open(optfile) format = fp.readlines() fp.close() # insert the lines of keywords try: start = format.index("#--start keywords--\n") + 1 end = format.index("#--end keywords--\n") format[start:end] = lines except ValueError: sys.stderr.write("target does not contain format markers\n") sys.exit(1) # write the output file fp = open(optfile, 'w') fp.write(''.join(format)) fp.close() if __name__ == "__main__": main() #!/usr/bin/env python # # Copyright 2008, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """A script to prepare version informtion for use the gtest Info.plist file. This script extracts the version information from the configure.ac file and uses it to generate a header file containing the same information. The #defines in this header file will be included in during the generation of the Info.plist of the framework, giving the correct value to the version shown in the Finder. This script makes the following assumptions (these are faults of the script, not problems with the Autoconf): 1. The AC_INIT macro will be contained within the first 1024 characters of configure.ac 2. The version string will be 3 integers separated by periods and will be surrounded by squre brackets, "[" and "]" (e.g. [1.0.1]). The first segment represents the major version, the second represents the minor version and the third represents the fix version. 3. No ")" character exists between the opening "(" and closing ")" of AC_INIT, including in comments and character strings. """ import sys import re # Read the command line argument (the output directory for Version.h) if (len(sys.argv) < 3): print "Usage: versiongenerate.py input_dir output_dir" sys.exit(1) else: input_dir = sys.argv[1] output_dir = sys.argv[2] # Read the first 1024 characters of the configure.ac file config_file = open("%s/configure.ac" % input_dir, 'r') buffer_size = 1024 opening_string = config_file.read(buffer_size) config_file.close() # Extract the version string from the AC_INIT macro # The following init_expression means: # Extract three integers separated by periods and surrounded by squre # brackets(e.g. "[1.0.1]") between "AC_INIT(" and ")". Do not be greedy # (*? is the non-greedy flag) since that would pull in everything between # the first "(" and the last ")" in the file. version_expression = re.compile(r"AC_INIT\(.*?\[(\d+)\.(\d+)\.(\d+)\].*?\)", re.DOTALL) version_values = version_expression.search(opening_string) major_version = version_values.group(1) minor_version = version_values.group(2) fix_version = version_values.group(3) # Write the version information to a header file to be included in the # Info.plist file. file_data = """// // DO NOT MODIFY THIS FILE (but you can delete it) // // This file is autogenerated by the versiongenerate.py script. This script // is executed in a "Run Script" build phase when creating gtest.framework. This // header file is not used during compilation of C-source. Rather, it simply // defines some version strings for substitution in the Info.plist. Because of // this, we are not not restricted to C-syntax nor are we using include guards. // #define GTEST_VERSIONINFO_SHORT %s.%s #define GTEST_VERSIONINFO_LONG %s.%s.%s """ % (major_version, minor_version, major_version, minor_version, fix_version) version_file = open("%s/Version.h" % output_dir, 'w') version_file.write(file_data) version_file.close() from __future__ import absolute_import, unicode_literals from django.template.loader import render_to_string from wagtail.wagtailcore import hooks from wagtail.wagtailcore.models import Page, Site class SummaryItem(object): order = 100 def __init__(self, request): self.request = request def get_context(self): return {} def render(self): return render_to_string(self.template, self.get_context(), request=self.request) class PagesSummaryItem(SummaryItem): order = 100 template = 'wagtailadmin/home/site_summary_pages.html' def get_context(self): # If there is a single site, link to the homepage of that site # Otherwise, if there are multiple sites, link to the root page try: site = Site.objects.get() root = site.root_page single_site = True except (Site.DoesNotExist, Site.MultipleObjectsReturned): root = None single_site = False return { 'single_site': single_site, 'root_page': root, 'total_pages': Page.objects.count() - 1, # subtract 1 because the root node is not a real page } @hooks.register('construct_homepage_summary_items') def add_pages_summary_item(request, items): items.append(PagesSummaryItem(request)) class SiteSummaryPanel(object): name = 'site_summary' order = 100 def __init__(self, request): self.request = request self.summary_items = [] for fn in hooks.get_hooks('construct_homepage_summary_items'): fn(request, self.summary_items) def render(self): return render_to_string('wagtailadmin/home/site_summary.html', { 'summary_items': sorted(self.summary_items, key=lambda p: p.order), }, request=self.request) from __future__ import print_function import unittest from test import test_support as support import os import sys # Setup bsddb warnings try: bsddb = support.import_module('bsddb', deprecated=True) except unittest.SkipTest: pass class NoAll(RuntimeError): pass class FailedImport(RuntimeError): pass class AllTest(unittest.TestCase): def check_all(self, modname): names = {} with support.check_warnings((".* (module|package)", DeprecationWarning), quiet=True): try: exec "import %s" % modname in names except: # Silent fail here seems the best route since some modules # may not be available or not initialize properly in all # environments. raise FailedImport(modname) if not hasattr(sys.modules[modname], "__all__"): raise NoAll(modname) names = {} try: exec "from %s import *" % modname in names except Exception as e: # Include the module name in the exception string self.fail("__all__ failure in {}: {}: {}".format( modname, e.__class__.__name__, e)) if "__builtins__" in names: del names["__builtins__"] keys = set(names) all = set(sys.modules[modname].__all__) self.assertEqual(keys, all) def walk_modules(self, basedir, modpath): for fn in sorted(os.listdir(basedir)): path = os.path.join(basedir, fn) if os.path.isdir(path): pkg_init = os.path.join(path, '__init__.py') if os.path.exists(pkg_init): yield pkg_init, modpath + fn for p, m in self.walk_modules(path, modpath + fn + "."): yield p, m continue if not fn.endswith('.py') or fn == '__init__.py': continue yield path, modpath + fn[:-3] def test_all(self): # Blacklisted modules and packages blacklist = set([ # Will raise a SyntaxError when compiling the exec statement '__future__', ]) if not sys.platform.startswith('java'): # In case _socket fails to build, make this test fail more gracefully # than an AttributeError somewhere deep in CGIHTTPServer. import _socket # rlcompleter needs special consideration; it import readline which # initializes GNU readline which calls setlocale(LC_CTYPE, "")... :-( try: import rlcompleter import locale except ImportError: pass else: locale.setlocale(locale.LC_CTYPE, 'C') ignored = [] failed_imports = [] lib_dir = os.path.dirname(os.path.dirname(__file__)) for path, modname in self.walk_modules(lib_dir, ""): m = modname blacklisted = False while m: if m in blacklist: blacklisted = True break m = m.rpartition('.')[0] if blacklisted: continue if support.verbose: print(modname) try: # This heuristic speeds up the process by removing, de facto, # most test modules (and avoiding the auto-executing ones). with open(path, "rb") as f: if "__all__" not in f.read(): raise NoAll(modname) self.check_all(modname) except NoAll: ignored.append(modname) except FailedImport: failed_imports.append(modname) if support.verbose: print('Following modules have no __all__ and have been ignored:', ignored) print('Following modules failed to be imported:', failed_imports) def test_main(): support.run_unittest(AllTest) if __name__ == "__main__": test_main() """Parser for command line options. This module helps scripts to parse the command line arguments in sys.argv. It supports the same conventions as the Unix getopt() function (including the special meanings of arguments of the form `-' and `--'). Long options similar to those supported by GNU software may be used as well via an optional third argument. This module provides two functions and an exception: getopt() -- Parse command line options gnu_getopt() -- Like getopt(), but allow option and non-option arguments to be intermixed. GetoptError -- exception (class) raised with 'opt' attribute, which is the option involved with the exception. """ # Long option support added by Lars Wirzenius . # # Gerrit Holl moved the string-based exceptions # to class-based exceptions. # # Peter Åstrand added gnu_getopt(). # # TODO for gnu_getopt(): # # - GNU getopt_long_only mechanism # - allow the caller to specify ordering # - RETURN_IN_ORDER option # - GNU extension with '-' as first character of option string # - optional arguments, specified by double colons # - a option string with a W followed by semicolon should # treat "-W foo" as "--foo" __all__ = ["GetoptError","error","getopt","gnu_getopt"] import os try: from gettext import gettext as _ except ImportError: # Bootstrapping Python: gettext's dependencies not built yet def _(s): return s class GetoptError(Exception): opt = '' msg = '' def __init__(self, msg, opt=''): self.msg = msg self.opt = opt Exception.__init__(self, msg, opt) def __str__(self): return self.msg error = GetoptError # backward compatibility def getopt(args, shortopts, longopts = []): """getopt(args, options[, long_options]) -> opts, args Parses command line options and parameter list. args is the argument list to be parsed, without the leading reference to the running program. Typically, this means "sys.argv[1:]". shortopts is the string of option letters that the script wants to recognize, with options that require an argument followed by a colon (i.e., the same format that Unix getopt() uses). If specified, longopts is a list of strings with the names of the long options which should be supported. The leading '--' characters should not be included in the option name. Options which require an argument should be followed by an equal sign ('='). The return value consists of two elements: the first is a list of (option, value) pairs; the second is the list of program arguments left after the option list was stripped (this is a trailing slice of the first argument). Each option-and-value pair returned has the option as its first element, prefixed with a hyphen (e.g., '-x'), and the option argument as its second element, or an empty string if the option has no argument. The options occur in the list in the same order in which they were found, thus allowing multiple occurrences. Long and short options may be mixed. """ opts = [] if type(longopts) == type(""): longopts = [longopts] else: longopts = list(longopts) while args and args[0].startswith('-') and args[0] != '-': if args[0] == '--': args = args[1:] break if args[0].startswith('--'): opts, args = do_longs(opts, args[0][2:], longopts, args[1:]) else: opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:]) return opts, args def gnu_getopt(args, shortopts, longopts = []): """getopt(args, options[, long_options]) -> opts, args This function works like getopt(), except that GNU style scanning mode is used by default. This means that option and non-option arguments may be intermixed. The getopt() function stops processing options as soon as a non-option argument is encountered. If the first character of the option string is `+', or if the environment variable POSIXLY_CORRECT is set, then option processing stops as soon as a non-option argument is encountered. """ opts = [] prog_args = [] if isinstance(longopts, str): longopts = [longopts] else: longopts = list(longopts) # Allow options after non-option arguments? if shortopts.startswith('+'): shortopts = shortopts[1:] all_options_first = True elif os.environ.get("POSIXLY_CORRECT"): all_options_first = True else: all_options_first = False while args: if args[0] == '--': prog_args += args[1:] break if args[0][:2] == '--': opts, args = do_longs(opts, args[0][2:], longopts, args[1:]) elif args[0][:1] == '-' and args[0] != '-': opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:]) else: if all_options_first: prog_args += args break else: prog_args.append(args[0]) args = args[1:] return opts, prog_args def do_longs(opts, opt, longopts, args): try: i = opt.index('=') except ValueError: optarg = None else: opt, optarg = opt[:i], opt[i+1:] has_arg, opt = long_has_args(opt, longopts) if has_arg: if optarg is None: if not args: raise GetoptError(_('option --%s requires argument') % opt, opt) optarg, args = args[0], args[1:] elif optarg is not None: raise GetoptError(_('option --%s must not have an argument') % opt, opt) opts.append(('--' + opt, optarg or '')) return opts, args # Return: # has_arg? # full option name def long_has_args(opt, longopts): possibilities = [o for o in longopts if o.startswith(opt)] if not possibilities: raise GetoptError(_('option --%s not recognized') % opt, opt) # Is there an exact match? if opt in possibilities: return False, opt elif opt + '=' in possibilities: return True, opt # No exact match, so better be unique. if len(possibilities) > 1: # XXX since possibilities contains all valid continuations, might be # nice to work them into the error msg raise GetoptError(_('option --%s not a unique prefix') % opt, opt) assert len(possibilities) == 1 unique_match = possibilities[0] has_arg = unique_match.endswith('=') if has_arg: unique_match = unique_match[:-1] return has_arg, unique_match def do_shorts(opts, optstring, shortopts, args): while optstring != '': opt, optstring = optstring[0], optstring[1:] if short_has_arg(opt, shortopts): if optstring == '': if not args: raise GetoptError(_('option -%s requires argument') % opt, opt) optstring, args = args[0], args[1:] optarg, optstring = optstring, '' else: optarg = '' opts.append(('-' + opt, optarg)) return opts, args def short_has_arg(opt, shortopts): for i in range(len(shortopts)): if opt == shortopts[i] != ':': return shortopts.startswith(':', i+1) raise GetoptError(_('option -%s not recognized') % opt, opt) if __name__ == '__main__': import sys print(getopt(sys.argv[1:], "a:b", ["alpha=", "beta"])) # Zadání: ######### # # Implementujte následující úlohy: # # - swap(a,b), která navzájem vymění tyto dva prvky # - Napište funkci, která vypisuje 1,-1,1,-1 … # - Napište funkci, která vypisuje výsledek operace (−1)^k pro k=0,…,100. # Zamyslete se nad rychlostem výpočtu, navrhněte alternativní postupy # - Napište funkci min(a,b), která vrací minimum ze těchto dvou prvků # - Napište funkci max(a,b) # - Napište funkci area(radius), která vypočítá obsah kruhu o zadaném poloměru # - Napište funkci d2r(angle), která převede stupně na radiány # - Napište funkci r2d(angle), která převede radiány na stupně # - Napište funkci normalize(angle), která převede zadaný úhel (v radiánech) # do intervalu <0,2π). # - Napište funkci pro výpis pole: # - s využitím cyklu for # - s využitím cyklu while ############################################################################### import math def swap( a, b ): """ Prohodí prvky a, b. """ a,b = b,a def plusminus(): """ Do nekonečna a ještě dál vypisuje 1, -1, 1, ... """ i = 1 while True: print(i) i = -i def weirdo( iterations = 100 ): """ Vypisuje výsledek (-1)^k. Teoreticky pomalejší verze. """ for i in range( iterations+1 ): print( (-1)^i ) def weirdo2( iterations = 100): """ Vypisuje výsledek (-1)^k. Teoreticky rychlejší verze. """ for i in range( iterations+1 ): if i%2 == 0: print( 1 ) else: print( -1 ) def min( a, b ): """ Vrátí menší ze dvou hodnot. """ return a if a < b else b def max( a, b ): """ Vrátí větší ze dvou hodnot """ return a if a > b else b def area( radius ): """ Vypočítá obsah kruhu. """ return math.pi * radius ** 2 def d2r( angle ): """ Převede stupně na radiány """ return angle * ( math.pi/180 ) def r2d( angle ): """ Převede radiány na stupně """ return angle * (180/math.pi) def normalize( angle ): """ Převede zadané radiany na interval <0, 2pi) """ return angle%(2**math.pi) def printArray( array ): """ Vypíše prvky pole pomocí for """ for element in array: print( element ) def printArray2( array ): """ Vypíše prvky pole pomocí while """ i = 0 while i < len(array): print( i ) i += 1 #!/usr/bin/env python # Author: Zion Orent # Copyright (c) 2015 Intel Corporation. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from __future__ import print_function import time, sys, signal, atexit from upm import pyupm_rpr220 as upmRpr220 def main(): # This example uses a simple method to determine current status # Instantiate an RPR220 digital pin D2 # This was tested on the Grove IR Reflective Sensor myReflectiveSensor = upmRpr220.RPR220(2) ## Exit handlers ## # This stops python from printing a stacktrace when you hit control-C def SIGINTHandler(signum, frame): raise SystemExit # This lets you run code on exit, # including functions from myReflectiveSensor def exitHandler(): print("Exiting") sys.exit(0) # Register exit handlers atexit.register(exitHandler) signal.signal(signal.SIGINT, SIGINTHandler) while(1): if (myReflectiveSensor.blackDetected()): print("Black detected") else: print("Black NOT detected") time.sleep(.1) if __name__ == '__main__': main() """ Sparse matrix functions """ # # Authors: Travis Oliphant, March 2002 # Anthony Scopatz, August 2012 (Sparse Updates) # Jake Vanderplas, August 2012 (Sparse Updates) # from __future__ import division, print_function, absolute_import __all__ = ['expm', 'inv'] import math import numpy as np import scipy.misc from scipy.linalg.basic import solve, solve_triangular from scipy.sparse.base import isspmatrix from scipy.sparse.construct import eye as speye from scipy.sparse.linalg import spsolve import scipy.sparse import scipy.sparse.linalg from scipy.sparse.linalg.interface import LinearOperator UPPER_TRIANGULAR = 'upper_triangular' def inv(A): """ Compute the inverse of a sparse matrix Parameters ---------- A : (M,M) ndarray or sparse matrix square matrix to be inverted Returns ------- Ainv : (M,M) ndarray or sparse matrix inverse of `A` Notes ----- This computes the sparse inverse of `A`. If the inverse of `A` is expected to be non-sparse, it will likely be faster to convert `A` to dense and use scipy.linalg.inv. .. versionadded:: 0.12.0 """ I = speye(A.shape[0], A.shape[1], dtype=A.dtype, format=A.format) Ainv = spsolve(A, I) return Ainv def _onenorm_matrix_power_nnm(A, p): """ Compute the 1-norm of a non-negative integer power of a non-negative matrix. Parameters ---------- A : a square ndarray or matrix or sparse matrix Input matrix with non-negative entries. p : non-negative integer The power to which the matrix is to be raised. Returns ------- out : float The 1-norm of the matrix power p of A. """ # check input if int(p) != p or p < 0: raise ValueError('expected non-negative integer p') p = int(p) if len(A.shape) != 2 or A.shape[0] != A.shape[1]: raise ValueError('expected A to be like a square matrix') # Explicitly make a column vector so that this works when A is a # numpy matrix (in addition to ndarray and sparse matrix). v = np.ones((A.shape[0], 1), dtype=float) M = A.T for i in range(p): v = M.dot(v) return max(v) def _onenorm(A): # A compatibility function which should eventually disappear. # This is copypasted from expm_action. if scipy.sparse.isspmatrix(A): return max(abs(A).sum(axis=0).flat) else: return np.linalg.norm(A, 1) def _ident_like(A): # A compatibility function which should eventually disappear. # This is copypasted from expm_action. if scipy.sparse.isspmatrix(A): return scipy.sparse.construct.eye(A.shape[0], A.shape[1], dtype=A.dtype, format=A.format) else: return np.eye(A.shape[0], A.shape[1], dtype=A.dtype) def _count_nonzero(A): # A compatibility function which should eventually disappear. #XXX There should be a better way to do this when A is sparse # in the traditional sense. if isspmatrix(A): return np.sum(A.toarray() != 0) else: return np.count_nonzero(A) def _is_upper_triangular(A): # This function could possibly be of wider interest. if isspmatrix(A): lower_part = scipy.sparse.tril(A, -1) if lower_part.nnz == 0: # structural upper triangularity return True else: # coincidental upper triangularity return _count_nonzero(lower_part) == 0 else: return _count_nonzero(np.tril(A, -1)) == 0 def _smart_matrix_product(A, B, alpha=None, structure=None): """ A matrix product that knows about sparse and structured matrices. Parameters ---------- A : 2d ndarray First matrix. B : 2d ndarray Second matrix. alpha : float The matrix product will be scaled by this constant. structure : str, optional A string describing the structure of both matrices `A` and `B`. Only `upper_triangular` is currently supported. Returns ------- M : 2d ndarray Matrix product of A and B. """ if len(A.shape) != 2: raise ValueError('expected A to be a rectangular matrix') if len(B.shape) != 2: raise ValueError('expected B to be a rectangular matrix') f = None if structure == UPPER_TRIANGULAR: if not isspmatrix(A) and not isspmatrix(B): f, = scipy.linalg.get_blas_funcs(('trmm',), (A, B)) if f is not None: if alpha is None: alpha = 1. out = f(alpha, A, B) else: if alpha is None: out = A.dot(B) else: out = alpha * A.dot(B) return out class MatrixPowerOperator(LinearOperator): def __init__(self, A, p, structure=None): if A.ndim != 2 or A.shape[0] != A.shape[1]: raise ValueError('expected A to be like a square matrix') if p < 0: raise ValueError('expected p to be a non-negative integer') self._A = A self._p = p self._structure = structure self.dtype = A.dtype self.ndim = A.ndim self.shape = A.shape def _matvec(self, x): for i in range(self._p): x = self._A.dot(x) return x def _rmatvec(self, x): A_T = self._A.T x = x.ravel() for i in range(self._p): x = A_T.dot(x) return x def _matmat(self, X): for i in range(self._p): X = _smart_matrix_product(self._A, X, structure=self._structure) return X @property def T(self): return MatrixPowerOperator(self._A.T, self._p) class ProductOperator(LinearOperator): """ For now, this is limited to products of multiple square matrices. """ def __init__(self, *args, **kwargs): self._structure = kwargs.get('structure', None) for A in args: if len(A.shape) != 2 or A.shape[0] != A.shape[1]: raise ValueError( 'For now, the ProductOperator implementation is ' 'limited to the product of multiple square matrices.') if args: n = args[0].shape[0] for A in args: for d in A.shape: if d != n: raise ValueError( 'The square matrices of the ProductOperator ' 'must all have the same shape.') self.shape = (n, n) self.ndim = len(self.shape) self.dtype = np.find_common_type([x.dtype for x in args], []) self._operator_sequence = args def _matvec(self, x): for A in reversed(self._operator_sequence): x = A.dot(x) return x def _rmatvec(self, x): x = x.ravel() for A in self._operator_sequence: x = A.T.dot(x) return x def _matmat(self, X): for A in reversed(self._operator_sequence): X = _smart_matrix_product(A, X, structure=self._structure) return X @property def T(self): T_args = [A.T for A in reversed(self._operator_sequence)] return ProductOperator(*T_args) def _onenormest_matrix_power(A, p, t=2, itmax=5, compute_v=False, compute_w=False, structure=None): """ Efficiently estimate the 1-norm of A^p. Parameters ---------- A : ndarray Matrix whose 1-norm of a power is to be computed. p : int Non-negative integer power. t : int, optional A positive parameter controlling the tradeoff between accuracy versus time and memory usage. Larger values take longer and use more memory but give more accurate output. itmax : int, optional Use at most this many iterations. compute_v : bool, optional Request a norm-maximizing linear operator input vector if True. compute_w : bool, optional Request a norm-maximizing linear operator output vector if True. Returns ------- est : float An underestimate of the 1-norm of the sparse matrix. v : ndarray, optional The vector such that ||Av||_1 == est*||v||_1. It can be thought of as an input to the linear operator that gives an output with particularly large norm. w : ndarray, optional The vector Av which has relatively large 1-norm. It can be thought of as an output of the linear operator that is relatively large in norm compared to the input. """ return scipy.sparse.linalg.onenormest( MatrixPowerOperator(A, p, structure=structure)) def _onenormest_product(operator_seq, t=2, itmax=5, compute_v=False, compute_w=False, structure=None): """ Efficiently estimate the 1-norm of the matrix product of the args. Parameters ---------- operator_seq : linear operator sequence Matrices whose 1-norm of product is to be computed. t : int, optional A positive parameter controlling the tradeoff between accuracy versus time and memory usage. Larger values take longer and use more memory but give more accurate output. itmax : int, optional Use at most this many iterations. compute_v : bool, optional Request a norm-maximizing linear operator input vector if True. compute_w : bool, optional Request a norm-maximizing linear operator output vector if True. structure : str, optional A string describing the structure of all operators. Only `upper_triangular` is currently supported. Returns ------- est : float An underestimate of the 1-norm of the sparse matrix. v : ndarray, optional The vector such that ||Av||_1 == est*||v||_1. It can be thought of as an input to the linear operator that gives an output with particularly large norm. w : ndarray, optional The vector Av which has relatively large 1-norm. It can be thought of as an output of the linear operator that is relatively large in norm compared to the input. """ return scipy.sparse.linalg.onenormest( ProductOperator(*operator_seq, structure=structure)) class _ExpmPadeHelper(object): """ Help lazily evaluate a matrix exponential. The idea is to not do more work than we need for high expm precision, so we lazily compute matrix powers and store or precompute other properties of the matrix. """ def __init__(self, A, structure=None, use_exact_onenorm=False): """ Initialize the object. Parameters ---------- A : a dense or sparse square numpy matrix or ndarray The matrix to be exponentiated. structure : str, optional A string describing the structure of matrix `A`. Only `upper_triangular` is currently supported. use_exact_onenorm : bool, optional If True then only the exact one-norm of matrix powers and products will be used. Otherwise, the one-norm of powers and products may initially be estimated. """ self.A = A self._A2 = None self._A4 = None self._A6 = None self._A8 = None self._A10 = None self._d4_exact = None self._d6_exact = None self._d8_exact = None self._d10_exact = None self._d4_approx = None self._d6_approx = None self._d8_approx = None self._d10_approx = None self.ident = _ident_like(A) self.structure = structure self.use_exact_onenorm = use_exact_onenorm @property def A2(self): if self._A2 is None: self._A2 = _smart_matrix_product( self.A, self.A, structure=self.structure) return self._A2 @property def A4(self): if self._A4 is None: self._A4 = _smart_matrix_product( self.A2, self.A2, structure=self.structure) return self._A4 @property def A6(self): if self._A6 is None: self._A6 = _smart_matrix_product( self.A4, self.A2, structure=self.structure) return self._A6 @property def A8(self): if self._A8 is None: self._A8 = _smart_matrix_product( self.A6, self.A2, structure=self.structure) return self._A8 @property def A10(self): if self._A10 is None: self._A10 = _smart_matrix_product( self.A4, self.A6, structure=self.structure) return self._A10 @property def d4_tight(self): if self._d4_exact is None: self._d4_exact = _onenorm(self.A4)**(1/4.) return self._d4_exact @property def d6_tight(self): if self._d6_exact is None: self._d6_exact = _onenorm(self.A6)**(1/6.) return self._d6_exact @property def d8_tight(self): if self._d8_exact is None: self._d8_exact = _onenorm(self.A8)**(1/8.) return self._d8_exact @property def d10_tight(self): if self._d10_exact is None: self._d10_exact = _onenorm(self.A10)**(1/10.) return self._d10_exact @property def d4_loose(self): if self.use_exact_onenorm: return self.d4_tight if self._d4_exact is not None: return self._d4_exact else: if self._d4_approx is None: self._d4_approx = _onenormest_matrix_power(self.A2, 2, structure=self.structure)**(1/4.) return self._d4_approx @property def d6_loose(self): if self.use_exact_onenorm: return self.d6_tight if self._d6_exact is not None: return self._d6_exact else: if self._d6_approx is None: self._d6_approx = _onenormest_matrix_power(self.A2, 3, structure=self.structure)**(1/6.) return self._d6_approx @property def d8_loose(self): if self.use_exact_onenorm: return self.d8_tight if self._d8_exact is not None: return self._d8_exact else: if self._d8_approx is None: self._d8_approx = _onenormest_matrix_power(self.A4, 2, structure=self.structure)**(1/8.) return self._d8_approx @property def d10_loose(self): if self.use_exact_onenorm: return self.d10_tight if self._d10_exact is not None: return self._d10_exact else: if self._d10_approx is None: self._d10_approx = _onenormest_product((self.A4, self.A6), structure=self.structure)**(1/10.) return self._d10_approx def pade3(self): b = (120., 60., 12., 1.) U = _smart_matrix_product(self.A, b[3]*self.A2 + b[1]*self.ident, structure=self.structure) V = b[2]*self.A2 + b[0]*self.ident return U, V def pade5(self): b = (30240., 15120., 3360., 420., 30., 1.) U = _smart_matrix_product(self.A, b[5]*self.A4 + b[3]*self.A2 + b[1]*self.ident, structure=self.structure) V = b[4]*self.A4 + b[2]*self.A2 + b[0]*self.ident return U, V def pade7(self): b = (17297280., 8648640., 1995840., 277200., 25200., 1512., 56., 1.) U = _smart_matrix_product(self.A, b[7]*self.A6 + b[5]*self.A4 + b[3]*self.A2 + b[1]*self.ident, structure=self.structure) V = b[6]*self.A6 + b[4]*self.A4 + b[2]*self.A2 + b[0]*self.ident return U, V def pade9(self): b = (17643225600., 8821612800., 2075673600., 302702400., 30270240., 2162160., 110880., 3960., 90., 1.) U = _smart_matrix_product(self.A, (b[9]*self.A8 + b[7]*self.A6 + b[5]*self.A4 + b[3]*self.A2 + b[1]*self.ident), structure=self.structure) V = (b[8]*self.A8 + b[6]*self.A6 + b[4]*self.A4 + b[2]*self.A2 + b[0]*self.ident) return U, V def pade13_scaled(self, s): b = (64764752532480000., 32382376266240000., 7771770303897600., 1187353796428800., 129060195264000., 10559470521600., 670442572800., 33522128640., 1323241920., 40840800., 960960., 16380., 182., 1.) B = self.A * 2**-s B2 = self.A2 * 2**(-2*s) B4 = self.A4 * 2**(-4*s) B6 = self.A6 * 2**(-6*s) U2 = _smart_matrix_product(B6, b[13]*B6 + b[11]*B4 + b[9]*B2, structure=self.structure) U = _smart_matrix_product(B, (U2 + b[7]*B6 + b[5]*B4 + b[3]*B2 + b[1]*self.ident), structure=self.structure) V2 = _smart_matrix_product(B6, b[12]*B6 + b[10]*B4 + b[8]*B2, structure=self.structure) V = V2 + b[6]*B6 + b[4]*B4 + b[2]*B2 + b[0]*self.ident return U, V def expm(A): """ Compute the matrix exponential using Pade approximation. Parameters ---------- A : (M,M) array_like or sparse matrix 2D Array or Matrix (sparse or dense) to be exponentiated Returns ------- expA : (M,M) ndarray Matrix exponential of `A` Notes ----- This is algorithm (6.1) which is a simplification of algorithm (5.1). .. versionadded:: 0.12.0 References ---------- .. [1] Awad H. Al-Mohy and Nicholas J. Higham (2009) "A New Scaling and Squaring Algorithm for the Matrix Exponential." SIAM Journal on Matrix Analysis and Applications. 31 (3). pp. 970-989. ISSN 1095-7162 """ return _expm(A, use_exact_onenorm='auto') def _expm(A, use_exact_onenorm): # Core of expm, separated to allow testing exact and approximate # algorithms. # Avoid indiscriminate asarray() to allow sparse or other strange arrays. if isinstance(A, (list, tuple)): A = np.asarray(A) if len(A.shape) != 2 or A.shape[0] != A.shape[1]: raise ValueError('expected a square matrix') # Detect upper triangularity. structure = UPPER_TRIANGULAR if _is_upper_triangular(A) else None if use_exact_onenorm == "auto": # Hardcode a matrix order threshold for exact vs. estimated one-norms. use_exact_onenorm = A.shape[0] < 200 # Track functions of A to help compute the matrix exponential. h = _ExpmPadeHelper( A, structure=structure, use_exact_onenorm=use_exact_onenorm) # Try Pade order 3. eta_1 = max(h.d4_loose, h.d6_loose) if eta_1 < 1.495585217958292e-002 and _ell(h.A, 3) == 0: U, V = h.pade3() return _solve_P_Q(U, V, structure=structure) # Try Pade order 5. eta_2 = max(h.d4_tight, h.d6_loose) if eta_2 < 2.539398330063230e-001 and _ell(h.A, 5) == 0: U, V = h.pade5() return _solve_P_Q(U, V, structure=structure) # Try Pade orders 7 and 9. eta_3 = max(h.d6_tight, h.d8_loose) if eta_3 < 9.504178996162932e-001 and _ell(h.A, 7) == 0: U, V = h.pade7() return _solve_P_Q(U, V, structure=structure) if eta_3 < 2.097847961257068e+000 and _ell(h.A, 9) == 0: U, V = h.pade9() return _solve_P_Q(U, V, structure=structure) # Use Pade order 13. eta_4 = max(h.d8_loose, h.d10_loose) eta_5 = min(eta_3, eta_4) theta_13 = 4.25 s = max(int(np.ceil(np.log2(eta_5 / theta_13))), 0) s = s + _ell(2**-s * h.A, 13) U, V = h.pade13_scaled(s) X = _solve_P_Q(U, V, structure=structure) if structure == UPPER_TRIANGULAR: # Invoke Code Fragment 2.1. X = _fragment_2_1(X, h.A, s) else: # X = r_13(A)^(2^s) by repeated squaring. for i in range(s): X = X.dot(X) return X def _solve_P_Q(U, V, structure=None): """ A helper function for expm_2009. Parameters ---------- U : ndarray Pade numerator. V : ndarray Pade denominator. structure : str, optional A string describing the structure of both matrices `U` and `V`. Only `upper_triangular` is currently supported. Notes ----- The `structure` argument is inspired by similar args for theano and cvxopt functions. """ P = U + V Q = -U + V if isspmatrix(U): return spsolve(Q, P) elif structure is None: return solve(Q, P) elif structure == UPPER_TRIANGULAR: return solve_triangular(Q, P) else: raise ValueError('unsupported matrix structure: ' + str(structure)) def _sinch(x): """ Stably evaluate sinch. Notes ----- The strategy of falling back to a sixth order Taylor expansion was suggested by the Spallation Neutron Source docs which was found on the internet by google search. http://www.ornl.gov/~t6p/resources/xal/javadoc/gov/sns/tools/math/ElementaryFunction.html The details of the cutoff point and the Horner-like evaluation was picked without reference to anything in particular. Note that sinch is not currently implemented in scipy.special, whereas the "engineer's" definition of sinc is implemented. The implementation of sinc involves a scaling factor of pi that distinguishes it from the "mathematician's" version of sinc. """ # If x is small then use sixth order Taylor expansion. # How small is small? I am using the point where the relative error # of the approximation is less than 1e-14. # If x is large then directly evaluate sinh(x) / x. x2 = x*x if abs(x) < 0.0135: return 1 + (x2/6.)*(1 + (x2/20.)*(1 + (x2/42.))) else: return np.sinh(x) / x def _eq_10_42(lam_1, lam_2, t_12): """ Equation (10.42) of Functions of Matrices: Theory and Computation. Notes ----- This is a helper function for _fragment_2_1 of expm_2009. Equation (10.42) is on page 251 in the section on Schur algorithms. In particular, section 10.4.3 explains the Schur-Parlett algorithm. expm([[lam_1, t_12], [0, lam_1]) = [[exp(lam_1), t_12*exp((lam_1 + lam_2)/2)*sinch((lam_1 - lam_2)/2)], [0, exp(lam_2)] """ # The plain formula t_12 * (exp(lam_2) - exp(lam_2)) / (lam_2 - lam_1) # apparently suffers from cancellation, according to Higham's textbook. # A nice implementation of sinch, defined as sinh(x)/x, # will apparently work around the cancellation. a = 0.5 * (lam_1 + lam_2) b = 0.5 * (lam_1 - lam_2) return t_12 * np.exp(a) * _sinch(b) def _fragment_2_1(X, T, s): """ A helper function for expm_2009. Notes ----- The argument X is modified in-place, but this modification is not the same as the returned value of the function. This function also takes pains to do things in ways that are compatible with sparse matrices, for example by avoiding fancy indexing and by using methods of the matrices whenever possible instead of using functions of the numpy or scipy libraries themselves. """ # Form X = r_m(2^-s T) # Replace diag(X) by exp(2^-s diag(T)). n = X.shape[0] diag_T = T.diagonal().copy() # Replace diag(X) by exp(2^-s diag(T)). scale = 2 ** -s exp_diag = np.exp(scale * diag_T) for k in range(n): X[k, k] = exp_diag[k] for i in range(s-1, -1, -1): X = X.dot(X) # Replace diag(X) by exp(2^-i diag(T)). scale = 2 ** -i exp_diag = np.exp(scale * diag_T) for k in range(n): X[k, k] = exp_diag[k] # Replace (first) superdiagonal of X by explicit formula # for superdiagonal of exp(2^-i T) from Eq (10.42) of # the author's 2008 textbook # Functions of Matrices: Theory and Computation. for k in range(n-1): lam_1 = scale * diag_T[k] lam_2 = scale * diag_T[k+1] t_12 = scale * T[k, k+1] value = _eq_10_42(lam_1, lam_2, t_12) X[k, k+1] = value # Return the updated X matrix. return X def _ell(A, m): """ A helper function for expm_2009. Parameters ---------- A : linear operator A linear operator whose norm of power we care about. m : int The power of the linear operator Returns ------- value : int A value related to a bound. """ if len(A.shape) != 2 or A.shape[0] != A.shape[1]: raise ValueError('expected A to be like a square matrix') p = 2*m + 1 # The c_i are explained in (2.2) and (2.6) of the 2005 expm paper. # They are coefficients of terms of a generating function series expansion. choose_2p_p = scipy.misc.comb(2*p, p, exact=True) abs_c_recip = float(choose_2p_p * math.factorial(2*p + 1)) # This is explained after Eq. (1.2) of the 2009 expm paper. # It is the "unit roundoff" of IEEE double precision arithmetic. u = 2**-53 # Compute the one-norm of matrix power p of abs(A). A_abs_onenorm = _onenorm_matrix_power_nnm(abs(A), p) # Treat zero norm as a special case. if not A_abs_onenorm: return 0 alpha = A_abs_onenorm / (_onenorm(A) * abs_c_recip) log2_alpha_div_u = np.log2(alpha/u) value = int(np.ceil(log2_alpha_div_u / (2 * m))) return max(value, 0) from ctypes import * import sys if sys.version_info >= (2, 6): def binary(obj): return bytes(obj) else: def binary(obj): return buffer(obj) BYTE = c_byte WORD = c_ushort DWORD = c_ulong _ole32 = oledll.ole32 _StringFromCLSID = _ole32.StringFromCLSID _CoTaskMemFree = windll.ole32.CoTaskMemFree _ProgIDFromCLSID = _ole32.ProgIDFromCLSID _CLSIDFromString = _ole32.CLSIDFromString _CLSIDFromProgID = _ole32.CLSIDFromProgID _CoCreateGuid = _ole32.CoCreateGuid # Note: Comparing GUID instances by comparing their buffers # is slightly faster than using ole32.IsEqualGUID. class GUID(Structure): _fields_ = [("Data1", DWORD), ("Data2", WORD), ("Data3", WORD), ("Data4", BYTE * 8)] def __init__(self, name=None): if name is not None: _CLSIDFromString(unicode(name), byref(self)) def __repr__(self): return u'GUID("%s")' % unicode(self) def __unicode__(self): p = c_wchar_p() _StringFromCLSID(byref(self), byref(p)) result = p.value _CoTaskMemFree(p) return result __str__ = __unicode__ def __cmp__(self, other): if isinstance(other, GUID): return cmp(binary(self), binary(other)) return -1 def __nonzero__(self): return self != GUID_null def __eq__(self, other): return isinstance(other, GUID) and \ binary(self) == binary(other) def __hash__(self): # We make GUID instances hashable, although they are mutable. return hash(binary(self)) def copy(self): return GUID(unicode(self)) def from_progid(cls, progid): """Get guid from progid, ... """ if hasattr(progid, "_reg_clsid_"): progid = progid._reg_clsid_ if isinstance(progid, cls): return progid elif isinstance(progid, basestring): if progid.startswith("{"): return cls(progid) inst = cls() _CLSIDFromProgID(unicode(progid), byref(inst)) return inst else: raise TypeError("Cannot construct guid from %r" % progid) from_progid = classmethod(from_progid) def as_progid(self): "Convert a GUID into a progid" progid = c_wchar_p() _ProgIDFromCLSID(byref(self), byref(progid)) result = progid.value _CoTaskMemFree(progid) return result def create_new(cls): "Create a brand new guid" guid = cls() _CoCreateGuid(byref(guid)) return guid create_new = classmethod(create_new) GUID_null = GUID() __all__ = ["GUID"] # Copyright 2015, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """The Python implementation of the gRPC route guide server.""" import time import math import route_guide_pb2 import route_guide_resources _ONE_DAY_IN_SECONDS = 60 * 60 * 24 def get_feature(feature_db, point): """Returns Feature at given location or None.""" for feature in feature_db: if feature.location == point: return feature return None def get_distance(start, end): """Distance between two points.""" coord_factor = 10000000.0 lat_1 = start.latitude / coord_factor lat_2 = end.latitude / coord_factor lon_1 = start.latitude / coord_factor lon_2 = end.longitude / coord_factor lat_rad_1 = math.radians(lat_1) lat_rad_2 = math.radians(lat_2) delta_lat_rad = math.radians(lat_2 - lat_1) delta_lon_rad = math.radians(lon_2 - lon_1) a = (pow(math.sin(delta_lat_rad / 2), 2) + (math.cos(lat_rad_1) * math.cos(lat_rad_2) * pow(math.sin(delta_lon_rad / 2), 2))) c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a)) R = 6371000; # metres return R * c; class RouteGuideServicer(route_guide_pb2.BetaRouteGuideServicer): """Provides methods that implement functionality of route guide server.""" def __init__(self): self.db = route_guide_resources.read_route_guide_database() def GetFeature(self, request, context): feature = get_feature(self.db, request) if feature is None: return route_guide_pb2.Feature(name="", location=request) else: return feature def ListFeatures(self, request, context): left = min(request.lo.longitude, request.hi.longitude) right = max(request.lo.longitude, request.hi.longitude) top = max(request.lo.latitude, request.hi.latitude) bottom = min(request.lo.latitude, request.hi.latitude) for feature in self.db: if (feature.location.longitude >= left and feature.location.longitude <= right and feature.location.latitude >= bottom and feature.location.latitude <= top): yield feature def RecordRoute(self, request_iterator, context): point_count = 0 feature_count = 0 distance = 0.0 prev_point = None start_time = time.time() for point in request_iterator: point_count += 1 if get_feature(self.db, point): feature_count += 1 if prev_point: distance += get_distance(prev_point, point) prev_point = point elapsed_time = time.time() - start_time return route_guide_pb2.RouteSummary(point_count=point_count, feature_count=feature_count, distance=int(distance), elapsed_time=int(elapsed_time)) def RouteChat(self, request_iterator, context): prev_notes = [] for new_note in request_iterator: for prev_note in prev_notes: if prev_note.location == new_note.location: yield prev_note prev_notes.append(new_note) def serve(): server = route_guide_pb2.beta_create_RouteGuide_server(RouteGuideServicer()) server.add_insecure_port('[::]:50051') server.start() try: while True: time.sleep(_ONE_DAY_IN_SECONDS) except KeyboardInterrupt: server.stop() if __name__ == '__main__': serve() # -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2013, 2015 CERN. # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """Record editor interface.""" import json from flask import Blueprint, jsonify, render_template, request from flask_breadcrumbs import register_breadcrumb from flask_login import login_required from invenio.base.decorators import wash_arguments from invenio.base.globals import cfg from invenio.base.i18n import _ from invenio.config import (CFG_BIBCATALOG_SYSTEM_RT_URL, CFG_BIBEDIT_AUTOCOMPLETE, CFG_BIBEDIT_INTERNAL_DOI_PROTECTION_LEVEL, CFG_BIBEDIT_SHOW_HOLDING_PEN_REMOVED_FIELDS, CFG_CERN_SITE, CFG_INSPIRE_SITE, CFG_SITE_RECORD, CFG_SITE_URL) from invenio.ext.principal import permission_required from invenio.legacy.bibcatalog.api import BIBCATALOG_SYSTEM from invenio.legacy.bibedit.db_layer import (get_info_of_record_revision, get_name_tags_all) blueprint = Blueprint('editor', __name__, url_prefix='/record/edit', template_folder='templates', static_folder='static') @blueprint.route('/', methods=["GET", "POST"]) @register_breadcrumb(blueprint, '.', _('Editor')) @login_required @permission_required('runbibedit') def index(): """Editor index page.""" from invenio.legacy.bibedit.utils import get_record_templates from invenio.legacy.bibedit.engine import (get_available_kbs, get_available_fields_templates) # Add script data. record_templates = get_record_templates() record_templates.sort() tag_names = get_name_tags_all() protected_fields = ['001'] protected_fields.extend(cfg['CFG_BIBEDIT_PROTECTED_FIELDS'].split(',')) cern_site = 'false' if CFG_CERN_SITE: cern_site = 'true' data = { 'gRECORD_TEMPLATES': record_templates, 'gTAG_NAMES': tag_names, 'gPROTECTED_FIELDS': protected_fields, 'gINTERNAL_DOI_PROTECTION_LEVEL': CFG_BIBEDIT_INTERNAL_DOI_PROTECTION_LEVEL, 'gSITE_URL': CFG_SITE_URL, 'gSITE_RECORD': CFG_SITE_RECORD, 'gCERN_SITE': cern_site, 'gINSPIRE_SITE': CFG_INSPIRE_SITE, 'gHASH_CHECK_INTERVAL': cfg['CFG_BIBEDIT_JS_HASH_CHECK_INTERVAL'], 'gCHECK_SCROLL_INTERVAL': cfg['CFG_BIBEDIT_JS_CHECK_SCROLL_INTERVAL'], 'gSTATUS_ERROR_TIME': cfg['CFG_BIBEDIT_JS_STATUS_ERROR_TIME'], 'gSTATUS_INFO_TIME': cfg['CFG_BIBEDIT_JS_STATUS_INFO_TIME'], 'gCLONED_RECORD_COLOR': '"' + cfg['CFG_BIBEDIT_JS_CLONED_RECORD_COLOR'] + '"', 'gCLONED_RECORD_COLOR_FADE_DURATION': cfg['CFG_BIBEDIT_JS_CLONED_RECORD_COLOR_FADE_DURATION'], 'gNEW_ADD_FIELD_FORM_COLOR': '"' + cfg['CFG_BIBEDIT_JS_NEW_ADD_FIELD_FORM_COLOR'] + '"', 'gNEW_ADD_FIELD_FORM_COLOR_FADE_DURATION': cfg['CFG_BIBEDIT_JS_NEW_ADD_FIELD_FORM_COLOR_FADE_DURATION'], 'gNEW_CONTENT_COLOR': '"' + cfg['CFG_BIBEDIT_JS_NEW_CONTENT_COLOR'] + '"', 'gNEW_CONTENT_COLOR_FADE_DURATION': cfg['CFG_BIBEDIT_JS_NEW_CONTENT_COLOR_FADE_DURATION'], 'gNEW_CONTENT_HIGHLIGHT_DELAY': cfg['CFG_BIBEDIT_JS_NEW_CONTENT_HIGHLIGHT_DELAY'], 'gTICKET_REFRESH_DELAY': cfg['CFG_BIBEDIT_JS_TICKET_REFRESH_DELAY'], 'gRESULT_CODES': cfg['CFG_BIBEDIT_AJAX_RESULT_CODES'], 'gAUTOSUGGEST_TAGS': cfg['CFG_BIBEDIT_AUTOSUGGEST_TAGS'], 'gAUTOCOMPLETE_TAGS': cfg['CFG_BIBEDIT_AUTOCOMPLETE_TAGS_KBS'].keys(), 'gKEYWORD_TAG': '"' + cfg['CFG_BIBEDIT_KEYWORD_TAG'] + '"', 'gREQUESTS_UNTIL_SAVE': cfg['CFG_BIBEDIT_REQUESTS_UNTIL_SAVE'], 'gAVAILABLE_KBS': get_available_kbs(), 'gDOILookupField': '"' + cfg['CFG_BIBEDIT_DOI_LOOKUP_FIELD'] + '"', 'gDisplayReferenceTags': cfg['CFG_BIBEDIT_DISPLAY_REFERENCE_TAGS'], 'gDisplayAuthorTags': cfg['CFG_BIBEDIT_DISPLAY_AUTHOR_TAGS'], 'gExcludeCuratorTags': cfg['CFG_BIBEDIT_EXCLUDE_CURATOR_TAGS'], 'gSHOW_HP_REMOVED_FIELDS': CFG_BIBEDIT_SHOW_HOLDING_PEN_REMOVED_FIELDS, 'gBIBCATALOG_SYSTEM_RT_URL': repr(CFG_BIBCATALOG_SYSTEM_RT_URL), 'gAutoComplete': json.dumps(CFG_BIBEDIT_AUTOCOMPLETE) } fieldTemplates = get_available_fields_templates() def convert(data): """Return JS friendly strings. """ if isinstance(data, unicode): return str(data) else: return json.dumps(data) for key in data: data[key] = convert(data[key]) try: BIBCATALOG_SYSTEM.ticket_search(0) can_search_for_ticket = True except NotImplementedError: can_search_for_ticket = False ctx = { "data": data, "fieldTemplates": json.dumps(fieldTemplates), "can_search_for_ticket": can_search_for_ticket } return render_template('editor/index.html', **ctx) @blueprint.route('/api', methods=["POST"]) @login_required @permission_required('runbibedit') def api(): """Handle AJAX requests.""" from invenio.ext.login import current_user from invenio.utils.json import json_unicode_to_utf8 from invenio.legacy.bibedit.utils import user_can_edit_record_collection from invenio.legacy.bibedit.engine import perform_request_ajax uid = current_user.get_id() json_data = json.loads(request.form['jsondata'].encode("utf-8")) json_data = json_unicode_to_utf8(json_data) json_response = {'resultCode': 0, 'ID': json_data['ID']} recid = None if 'recID' in json_data: recid = int(json_data['recID']) json_response.update({'recID': recid}) if json_data['requestType'] == "getRecord": # Authorize access to record. if not user_can_edit_record_collection(request, recid): json_response.update({'resultCode': 101}) return json.dumps(json_response) # Handle AJAX request. json_response.update(perform_request_ajax(request, recid, uid, json_data)) return jsonify(json_response) @blueprint.route('/compare_revisions') @register_breadcrumb(blueprint, '.compare_revisions', _('Compare revisions')) @login_required @wash_arguments({"rev1": (unicode, ''), "rev2": (unicode, ''), "recid": (int, 0)}) @permission_required('runbibedit') def compare_revisions(rev1, rev2, recid): """Compare two revisions of a record.""" from invenio.legacy.bibedit.engine import (get_marcxml_of_revision_id, re_revdate_split) from invenio.legacy.bibrecord.xmlmarc2textmarc import create_marc_record from invenio.legacy.bibrecord import create_record from invenio.legacy.bibedit.utils import record_revision_exists from invenio.utils.text import show_diff person1 = "" person2 = "" if (not record_revision_exists(recid, rev1)) or \ (not record_revision_exists(recid, rev2)): return render_template("editor/revision_comparison_error.html") else: xml1 = get_marcxml_of_revision_id(recid, rev1) xml2 = get_marcxml_of_revision_id(recid, rev2) # Create MARC representations of the records marc1 = create_marc_record( create_record(xml1)[0], '', {"text-marc": 1, "aleph-marc": 0}) marc2 = create_marc_record( create_record(xml2)[0], '', {"text-marc": 1, "aleph-marc": 0}) comparison = show_diff(marc1, marc2, prefix="
", suffix="
", prefix_removed='', suffix_removed='', prefix_added='', suffix_added='') job_date1 = "%s-%s-%s %s:%s:%s" % re_revdate_split.search(rev1 ).groups() job_date2 = "%s-%s-%s %s:%s:%s" % re_revdate_split.search(rev2 ).groups() # Getting the author of each revision info1 = get_info_of_record_revision(recid, job_date1) info2 = get_info_of_record_revision(recid, job_date2) if info1: person1 = info1[0][1] if info2: person2 = info2[0][1] ctx = { "job_date1": job_date1, "job_date2": job_date2, "person1": person1, "person2": person2, "comparison": comparison } return render_template("editor/revision_comparison.html", **ctx) """ Improved support for Microsoft Visual C++ compilers. Known supported compilers: -------------------------- Microsoft Visual C++ 9.0: Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64) Microsoft Windows SDK 6.1 (x86, x64, ia64) Microsoft Windows SDK 7.0 (x86, x64, ia64) Microsoft Visual C++ 10.0: Microsoft Windows SDK 7.1 (x86, x64, ia64) Microsoft Visual C++ 14.0: Microsoft Visual C++ Build Tools 2015 (x86, x64, arm) Microsoft Visual Studio 2017 (x86, x64, arm, arm64) Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64) """ import os import sys import platform import itertools import distutils.errors from pkg_resources.extern.packaging.version import LegacyVersion from setuptools.extern.six.moves import filterfalse from .monkey import get_unpatched if platform.system() == 'Windows': from setuptools.extern.six.moves import winreg safe_env = os.environ else: """ Mock winreg and environ so the module can be imported on this platform. """ class winreg: HKEY_USERS = None HKEY_CURRENT_USER = None HKEY_LOCAL_MACHINE = None HKEY_CLASSES_ROOT = None safe_env = dict() try: from distutils.msvc9compiler import Reg except ImportError: pass def msvc9_find_vcvarsall(version): """ Patched "distutils.msvc9compiler.find_vcvarsall" to use the standalone compiler build for Python (VCForPython). Fall back to original behavior when the standalone compiler is not available. Redirect the path of "vcvarsall.bat". Known supported compilers ------------------------- Microsoft Visual C++ 9.0: Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64) Parameters ---------- version: float Required Microsoft Visual C++ version. Return ------ vcvarsall.bat path: str """ VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f' key = VC_BASE % ('', version) try: # Per-user installs register the compiler path here productdir = Reg.get_value(key, "installdir") except KeyError: try: # All-user installs on a 64-bit system register here key = VC_BASE % ('Wow6432Node\\', version) productdir = Reg.get_value(key, "installdir") except KeyError: productdir = None if productdir: vcvarsall = os.path.os.path.join(productdir, "vcvarsall.bat") if os.path.isfile(vcvarsall): return vcvarsall return get_unpatched(msvc9_find_vcvarsall)(version) def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs): """ Patched "distutils.msvc9compiler.query_vcvarsall" for support extra compilers. Set environment without use of "vcvarsall.bat". Known supported compilers ------------------------- Microsoft Visual C++ 9.0: Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64) Microsoft Windows SDK 6.1 (x86, x64, ia64) Microsoft Windows SDK 7.0 (x86, x64, ia64) Microsoft Visual C++ 10.0: Microsoft Windows SDK 7.1 (x86, x64, ia64) Parameters ---------- ver: float Required Microsoft Visual C++ version. arch: str Target architecture. Return ------ environment: dict """ # Try to get environement from vcvarsall.bat (Classical way) try: orig = get_unpatched(msvc9_query_vcvarsall) return orig(ver, arch, *args, **kwargs) except distutils.errors.DistutilsPlatformError: # Pass error if Vcvarsall.bat is missing pass except ValueError: # Pass error if environment not set after executing vcvarsall.bat pass # If error, try to set environment directly try: return EnvironmentInfo(arch, ver).return_env() except distutils.errors.DistutilsPlatformError as exc: _augment_exception(exc, ver, arch) raise def msvc14_get_vc_env(plat_spec): """ Patched "distutils._msvccompiler._get_vc_env" for support extra compilers. Set environment without use of "vcvarsall.bat". Known supported compilers ------------------------- Microsoft Visual C++ 14.0: Microsoft Visual C++ Build Tools 2015 (x86, x64, arm) Microsoft Visual Studio 2017 (x86, x64, arm, arm64) Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64) Parameters ---------- plat_spec: str Target architecture. Return ------ environment: dict """ # Try to get environment from vcvarsall.bat (Classical way) try: return get_unpatched(msvc14_get_vc_env)(plat_spec) except distutils.errors.DistutilsPlatformError: # Pass error Vcvarsall.bat is missing pass # If error, try to set environment directly try: return EnvironmentInfo(plat_spec, vc_min_ver=14.0).return_env() except distutils.errors.DistutilsPlatformError as exc: _augment_exception(exc, 14.0) raise def msvc14_gen_lib_options(*args, **kwargs): """ Patched "distutils._msvccompiler.gen_lib_options" for fix compatibility between "numpy.distutils" and "distutils._msvccompiler" (for Numpy < 1.11.2) """ if "numpy.distutils" in sys.modules: import numpy as np if LegacyVersion(np.__version__) < LegacyVersion('1.11.2'): return np.distutils.ccompiler.gen_lib_options(*args, **kwargs) return get_unpatched(msvc14_gen_lib_options)(*args, **kwargs) def _augment_exception(exc, version, arch=''): """ Add details to the exception message to help guide the user as to what action will resolve it. """ # Error if MSVC++ directory not found or environment not set message = exc.args[0] if "vcvarsall" in message.lower() or "visual c" in message.lower(): # Special error message if MSVC++ not installed tmpl = 'Microsoft Visual C++ {version:0.1f} is required.' message = tmpl.format(**locals()) msdownload = 'www.microsoft.com/download/details.aspx?id=%d' if version == 9.0: if arch.lower().find('ia64') > -1: # For VC++ 9.0, if IA64 support is needed, redirect user # to Windows SDK 7.0 message += ' Get it with "Microsoft Windows SDK 7.0": ' message += msdownload % 3138 else: # For VC++ 9.0 redirect user to Vc++ for Python 2.7 : # This redirection link is maintained by Microsoft. # Contact vspython@microsoft.com if it needs updating. message += ' Get it from http://aka.ms/vcpython27' elif version == 10.0: # For VC++ 10.0 Redirect user to Windows SDK 7.1 message += ' Get it with "Microsoft Windows SDK 7.1": ' message += msdownload % 8279 elif version >= 14.0: # For VC++ 14.0 Redirect user to Visual C++ Build Tools message += (' Get it with "Microsoft Visual C++ Build Tools": ' r'http://landinghub.visualstudio.com/' 'visual-cpp-build-tools') exc.args = (message, ) class PlatformInfo: """ Current and Target Architectures informations. Parameters ---------- arch: str Target architecture. """ current_cpu = safe_env.get('processor_architecture', '').lower() def __init__(self, arch): self.arch = arch.lower().replace('x64', 'amd64') @property def target_cpu(self): return self.arch[self.arch.find('_') + 1:] def target_is_x86(self): return self.target_cpu == 'x86' def current_is_x86(self): return self.current_cpu == 'x86' def current_dir(self, hidex86=False, x64=False): """ Current platform specific subfolder. Parameters ---------- hidex86: bool return '' and not '\x86' if architecture is x86. x64: bool return '\x64' and not '\amd64' if architecture is amd64. Return ------ subfolder: str '\target', or '' (see hidex86 parameter) """ return ( '' if (self.current_cpu == 'x86' and hidex86) else r'\x64' if (self.current_cpu == 'amd64' and x64) else r'\%s' % self.current_cpu ) def target_dir(self, hidex86=False, x64=False): r""" Target platform specific subfolder. Parameters ---------- hidex86: bool return '' and not '\x86' if architecture is x86. x64: bool return '\x64' and not '\amd64' if architecture is amd64. Return ------ subfolder: str '\current', or '' (see hidex86 parameter) """ return ( '' if (self.target_cpu == 'x86' and hidex86) else r'\x64' if (self.target_cpu == 'amd64' and x64) else r'\%s' % self.target_cpu ) def cross_dir(self, forcex86=False): r""" Cross platform specific subfolder. Parameters ---------- forcex86: bool Use 'x86' as current architecture even if current acritecture is not x86. Return ------ subfolder: str '' if target architecture is current architecture, '\current_target' if not. """ current = 'x86' if forcex86 else self.current_cpu return ( '' if self.target_cpu == current else self.target_dir().replace('\\', '\\%s_' % current) ) class RegistryInfo: """ Microsoft Visual Studio related registry informations. Parameters ---------- platform_info: PlatformInfo "PlatformInfo" instance. """ HKEYS = (winreg.HKEY_USERS, winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE, winreg.HKEY_CLASSES_ROOT) def __init__(self, platform_info): self.pi = platform_info @property def visualstudio(self): """ Microsoft Visual Studio root registry key. """ return 'VisualStudio' @property def sxs(self): """ Microsoft Visual Studio SxS registry key. """ return os.path.join(self.visualstudio, 'SxS') @property def vc(self): """ Microsoft Visual C++ VC7 registry key. """ return os.path.join(self.sxs, 'VC7') @property def vs(self): """ Microsoft Visual Studio VS7 registry key. """ return os.path.join(self.sxs, 'VS7') @property def vc_for_python(self): """ Microsoft Visual C++ for Python registry key. """ return r'DevDiv\VCForPython' @property def microsoft_sdk(self): """ Microsoft SDK registry key. """ return 'Microsoft SDKs' @property def windows_sdk(self): """ Microsoft Windows/Platform SDK registry key. """ return os.path.join(self.microsoft_sdk, 'Windows') @property def netfx_sdk(self): """ Microsoft .NET Framework SDK registry key. """ return os.path.join(self.microsoft_sdk, 'NETFXSDK') @property def windows_kits_roots(self): """ Microsoft Windows Kits Roots registry key. """ return r'Windows Kits\Installed Roots' def microsoft(self, key, x86=False): """ Return key in Microsoft software registry. Parameters ---------- key: str Registry key path where look. x86: str Force x86 software registry. Return ------ str: value """ node64 = '' if self.pi.current_is_x86() or x86 else 'Wow6432Node' return os.path.join('Software', node64, 'Microsoft', key) def lookup(self, key, name): """ Look for values in registry in Microsoft software registry. Parameters ---------- key: str Registry key path where look. name: str Value name to find. Return ------ str: value """ KEY_READ = winreg.KEY_READ openkey = winreg.OpenKey ms = self.microsoft for hkey in self.HKEYS: try: bkey = openkey(hkey, ms(key), 0, KEY_READ) except (OSError, IOError): if not self.pi.current_is_x86(): try: bkey = openkey(hkey, ms(key, True), 0, KEY_READ) except (OSError, IOError): continue else: continue try: return winreg.QueryValueEx(bkey, name)[0] except (OSError, IOError): pass class SystemInfo: """ Microsoft Windows and Visual Studio related system inormations. Parameters ---------- registry_info: RegistryInfo "RegistryInfo" instance. vc_ver: float Required Microsoft Visual C++ version. """ # Variables and properties in this class use originals CamelCase variables # names from Microsoft source files for more easy comparaison. WinDir = safe_env.get('WinDir', '') ProgramFiles = safe_env.get('ProgramFiles', '') ProgramFilesx86 = safe_env.get('ProgramFiles(x86)', ProgramFiles) def __init__(self, registry_info, vc_ver=None): self.ri = registry_info self.pi = self.ri.pi self.vc_ver = vc_ver or self._find_latest_available_vc_ver() def _find_latest_available_vc_ver(self): try: return self.find_available_vc_vers()[-1] except IndexError: err = 'No Microsoft Visual C++ version found' raise distutils.errors.DistutilsPlatformError(err) def find_available_vc_vers(self): """ Find all available Microsoft Visual C++ versions. """ ms = self.ri.microsoft vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs) vc_vers = [] for hkey in self.ri.HKEYS: for key in vckeys: try: bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ) except (OSError, IOError): continue subkeys, values, _ = winreg.QueryInfoKey(bkey) for i in range(values): try: ver = float(winreg.EnumValue(bkey, i)[0]) if ver not in vc_vers: vc_vers.append(ver) except ValueError: pass for i in range(subkeys): try: ver = float(winreg.EnumKey(bkey, i)) if ver not in vc_vers: vc_vers.append(ver) except ValueError: pass return sorted(vc_vers) @property def VSInstallDir(self): """ Microsoft Visual Studio directory. """ # Default path name = 'Microsoft Visual Studio %0.1f' % self.vc_ver default = os.path.join(self.ProgramFilesx86, name) # Try to get path from registry, if fail use default path return self.ri.lookup(self.ri.vs, '%0.1f' % self.vc_ver) or default @property def VCInstallDir(self): """ Microsoft Visual C++ directory. """ self.VSInstallDir guess_vc = self._guess_vc() or self._guess_vc_legacy() # Try to get "VC++ for Python" path from registry as default path reg_path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver) python_vc = self.ri.lookup(reg_path, 'installdir') default_vc = os.path.join(python_vc, 'VC') if python_vc else guess_vc # Try to get path from registry, if fail use default path path = self.ri.lookup(self.ri.vc, '%0.1f' % self.vc_ver) or default_vc if not os.path.isdir(path): msg = 'Microsoft Visual C++ directory not found' raise distutils.errors.DistutilsPlatformError(msg) return path def _guess_vc(self): """ Locate Visual C for 2017 """ if self.vc_ver <= 14.0: return default = r'VC\Tools\MSVC' guess_vc = os.path.join(self.VSInstallDir, default) # Subdir with VC exact version as name try: vc_exact_ver = os.listdir(guess_vc)[-1] return os.path.join(guess_vc, vc_exact_ver) except (OSError, IOError, IndexError): pass def _guess_vc_legacy(self): """ Locate Visual C for versions prior to 2017 """ default = r'Microsoft Visual Studio %0.1f\VC' % self.vc_ver return os.path.join(self.ProgramFilesx86, default) @property def WindowsSdkVersion(self): """ Microsoft Windows SDK versions for specified MSVC++ version. """ if self.vc_ver <= 9.0: return ('7.0', '6.1', '6.0a') elif self.vc_ver == 10.0: return ('7.1', '7.0a') elif self.vc_ver == 11.0: return ('8.0', '8.0a') elif self.vc_ver == 12.0: return ('8.1', '8.1a') elif self.vc_ver >= 14.0: return ('10.0', '8.1') @property def WindowsSdkLastVersion(self): """ Microsoft Windows SDK last version """ return self._use_last_dir_name(os.path.join( self.WindowsSdkDir, 'lib')) @property def WindowsSdkDir(self): """ Microsoft Windows SDK directory. """ sdkdir = '' for ver in self.WindowsSdkVersion: # Try to get it from registry loc = os.path.join(self.ri.windows_sdk, 'v%s' % ver) sdkdir = self.ri.lookup(loc, 'installationfolder') if sdkdir: break if not sdkdir or not os.path.isdir(sdkdir): # Try to get "VC++ for Python" version from registry path = os.path.join(self.ri.vc_for_python, '%0.1f' % self.vc_ver) install_base = self.ri.lookup(path, 'installdir') if install_base: sdkdir = os.path.join(install_base, 'WinSDK') if not sdkdir or not os.path.isdir(sdkdir): # If fail, use default new path for ver in self.WindowsSdkVersion: intver = ver[:ver.rfind('.')] path = r'Microsoft SDKs\Windows Kits\%s' % (intver) d = os.path.join(self.ProgramFiles, path) if os.path.isdir(d): sdkdir = d if not sdkdir or not os.path.isdir(sdkdir): # If fail, use default old path for ver in self.WindowsSdkVersion: path = r'Microsoft SDKs\Windows\v%s' % ver d = os.path.join(self.ProgramFiles, path) if os.path.isdir(d): sdkdir = d if not sdkdir: # If fail, use Platform SDK sdkdir = os.path.join(self.VCInstallDir, 'PlatformSDK') return sdkdir @property def WindowsSDKExecutablePath(self): """ Microsoft Windows SDK executable directory. """ # Find WinSDK NetFx Tools registry dir name if self.vc_ver <= 11.0: netfxver = 35 arch = '' else: netfxver = 40 hidex86 = True if self.vc_ver <= 12.0 else False arch = self.pi.current_dir(x64=True, hidex86=hidex86) fx = 'WinSDK-NetFx%dTools%s' % (netfxver, arch.replace('\\', '-')) # liste all possibles registry paths regpaths = [] if self.vc_ver >= 14.0: for ver in self.NetFxSdkVersion: regpaths += [os.path.join(self.ri.netfx_sdk, ver, fx)] for ver in self.WindowsSdkVersion: regpaths += [os.path.join(self.ri.windows_sdk, 'v%sA' % ver, fx)] # Return installation folder from the more recent path for path in regpaths: execpath = self.ri.lookup(path, 'installationfolder') if execpath: break return execpath @property def FSharpInstallDir(self): """ Microsoft Visual F# directory. """ path = r'%0.1f\Setup\F#' % self.vc_ver path = os.path.join(self.ri.visualstudio, path) return self.ri.lookup(path, 'productdir') or '' @property def UniversalCRTSdkDir(self): """ Microsoft Universal CRT SDK directory. """ # Set Kit Roots versions for specified MSVC++ version if self.vc_ver >= 14.0: vers = ('10', '81') else: vers = () # Find path of the more recent Kit for ver in vers: sdkdir = self.ri.lookup(self.ri.windows_kits_roots, 'kitsroot%s' % ver) if sdkdir: break return sdkdir or '' @property def UniversalCRTSdkLastVersion(self): """ Microsoft Universal C Runtime SDK last version """ return self._use_last_dir_name(os.path.join( self.UniversalCRTSdkDir, 'lib')) @property def NetFxSdkVersion(self): """ Microsoft .NET Framework SDK versions. """ # Set FxSdk versions for specified MSVC++ version if self.vc_ver >= 14.0: return ('4.6.1', '4.6') else: return () @property def NetFxSdkDir(self): """ Microsoft .NET Framework SDK directory. """ for ver in self.NetFxSdkVersion: loc = os.path.join(self.ri.netfx_sdk, ver) sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder') if sdkdir: break return sdkdir or '' @property def FrameworkDir32(self): """ Microsoft .NET Framework 32bit directory. """ # Default path guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework') # Try to get path from registry, if fail use default path return self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw @property def FrameworkDir64(self): """ Microsoft .NET Framework 64bit directory. """ # Default path guess_fw = os.path.join(self.WinDir, r'Microsoft.NET\Framework64') # Try to get path from registry, if fail use default path return self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw @property def FrameworkVersion32(self): """ Microsoft .NET Framework 32bit versions. """ return self._find_dot_net_versions(32) @property def FrameworkVersion64(self): """ Microsoft .NET Framework 64bit versions. """ return self._find_dot_net_versions(64) def _find_dot_net_versions(self, bits): """ Find Microsoft .NET Framework versions. Parameters ---------- bits: int Platform number of bits: 32 or 64. """ # Find actual .NET version in registry reg_ver = self.ri.lookup(self.ri.vc, 'frameworkver%d' % bits) dot_net_dir = getattr(self, 'FrameworkDir%d' % bits) ver = reg_ver or self._use_last_dir_name(dot_net_dir, 'v') or '' # Set .NET versions for specified MSVC++ version if self.vc_ver >= 12.0: frameworkver = (ver, 'v4.0') elif self.vc_ver >= 10.0: frameworkver = ('v4.0.30319' if ver.lower()[:2] != 'v4' else ver, 'v3.5') elif self.vc_ver == 9.0: frameworkver = ('v3.5', 'v2.0.50727') if self.vc_ver == 8.0: frameworkver = ('v3.0', 'v2.0.50727') return frameworkver def _use_last_dir_name(self, path, prefix=''): """ Return name of the last dir in path or '' if no dir found. Parameters ---------- path: str Use dirs in this path prefix: str Use only dirs startings by this prefix """ matching_dirs = ( dir_name for dir_name in reversed(os.listdir(path)) if os.path.isdir(os.path.join(path, dir_name)) and dir_name.startswith(prefix) ) return next(matching_dirs, None) or '' class EnvironmentInfo: """ Return environment variables for specified Microsoft Visual C++ version and platform : Lib, Include, Path and libpath. This function is compatible with Microsoft Visual C++ 9.0 to 14.0. Script created by analysing Microsoft environment configuration files like "vcvars[...].bat", "SetEnv.Cmd", "vcbuildtools.bat", ... Parameters ---------- arch: str Target architecture. vc_ver: float Required Microsoft Visual C++ version. If not set, autodetect the last version. vc_min_ver: float Minimum Microsoft Visual C++ version. """ # Variables and properties in this class use originals CamelCase variables # names from Microsoft source files for more easy comparaison. def __init__(self, arch, vc_ver=None, vc_min_ver=0): self.pi = PlatformInfo(arch) self.ri = RegistryInfo(self.pi) self.si = SystemInfo(self.ri, vc_ver) if self.vc_ver < vc_min_ver: err = 'No suitable Microsoft Visual C++ version found' raise distutils.errors.DistutilsPlatformError(err) @property def vc_ver(self): """ Microsoft Visual C++ version. """ return self.si.vc_ver @property def VSTools(self): """ Microsoft Visual Studio Tools """ paths = [r'Common7\IDE', r'Common7\Tools'] if self.vc_ver >= 14.0: arch_subdir = self.pi.current_dir(hidex86=True, x64=True) paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow'] paths += [r'Team Tools\Performance Tools'] paths += [r'Team Tools\Performance Tools%s' % arch_subdir] return [os.path.join(self.si.VSInstallDir, path) for path in paths] @property def VCIncludes(self): """ Microsoft Visual C++ & Microsoft Foundation Class Includes """ return [os.path.join(self.si.VCInstallDir, 'Include'), os.path.join(self.si.VCInstallDir, r'ATLMFC\Include')] @property def VCLibraries(self): """ Microsoft Visual C++ & Microsoft Foundation Class Libraries """ if self.vc_ver >= 15.0: arch_subdir = self.pi.target_dir(x64=True) else: arch_subdir = self.pi.target_dir(hidex86=True) paths = ['Lib%s' % arch_subdir, r'ATLMFC\Lib%s' % arch_subdir] if self.vc_ver >= 14.0: paths += [r'Lib\store%s' % arch_subdir] return [os.path.join(self.si.VCInstallDir, path) for path in paths] @property def VCStoreRefs(self): """ Microsoft Visual C++ store references Libraries """ if self.vc_ver < 14.0: return [] return [os.path.join(self.si.VCInstallDir, r'Lib\store\references')] @property def VCTools(self): """ Microsoft Visual C++ Tools """ si = self.si tools = [os.path.join(si.VCInstallDir, 'VCPackages')] forcex86 = True if self.vc_ver <= 10.0 else False arch_subdir = self.pi.cross_dir(forcex86) if arch_subdir: tools += [os.path.join(si.VCInstallDir, 'Bin%s' % arch_subdir)] if self.vc_ver == 14.0: path = 'Bin%s' % self.pi.current_dir(hidex86=True) tools += [os.path.join(si.VCInstallDir, path)] elif self.vc_ver >= 15.0: host_dir = (r'bin\HostX86%s' if self.pi.current_is_x86() else r'bin\HostX64%s') tools += [os.path.join( si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))] if self.pi.current_cpu != self.pi.target_cpu: tools += [os.path.join( si.VCInstallDir, host_dir % self.pi.current_dir(x64=True))] else: tools += [os.path.join(si.VCInstallDir, 'Bin')] return tools @property def OSLibraries(self): """ Microsoft Windows SDK Libraries """ if self.vc_ver <= 10.0: arch_subdir = self.pi.target_dir(hidex86=True, x64=True) return [os.path.join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)] else: arch_subdir = self.pi.target_dir(x64=True) lib = os.path.join(self.si.WindowsSdkDir, 'lib') libver = self._sdk_subdir return [os.path.join(lib, '%sum%s' % (libver , arch_subdir))] @property def OSIncludes(self): """ Microsoft Windows SDK Include """ include = os.path.join(self.si.WindowsSdkDir, 'include') if self.vc_ver <= 10.0: return [include, os.path.join(include, 'gl')] else: if self.vc_ver >= 14.0: sdkver = self._sdk_subdir else: sdkver = '' return [os.path.join(include, '%sshared' % sdkver), os.path.join(include, '%sum' % sdkver), os.path.join(include, '%swinrt' % sdkver)] @property def OSLibpath(self): """ Microsoft Windows SDK Libraries Paths """ ref = os.path.join(self.si.WindowsSdkDir, 'References') libpath = [] if self.vc_ver <= 9.0: libpath += self.OSLibraries if self.vc_ver >= 11.0: libpath += [os.path.join(ref, r'CommonConfiguration\Neutral')] if self.vc_ver >= 14.0: libpath += [ ref, os.path.join(self.si.WindowsSdkDir, 'UnionMetadata'), os.path.join( ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0', ), os.path.join( ref, 'Windows.Foundation.FoundationContract', '1.0.0.0', ), os.path.join( ref, 'Windows.Networking.Connectivity.WwanContract', '1.0.0.0', ), os.path.join( self.si.WindowsSdkDir, 'ExtensionSDKs', 'Microsoft.VCLibs', '%0.1f' % self.vc_ver, 'References', 'CommonConfiguration', 'neutral', ), ] return libpath @property def SdkTools(self): """ Microsoft Windows SDK Tools """ return list(self._sdk_tools()) def _sdk_tools(self): """ Microsoft Windows SDK Tools paths generator """ if self.vc_ver < 15.0: bin_dir = 'Bin' if self.vc_ver <= 11.0 else r'Bin\x86' yield os.path.join(self.si.WindowsSdkDir, bin_dir) if not self.pi.current_is_x86(): arch_subdir = self.pi.current_dir(x64=True) path = 'Bin%s' % arch_subdir yield os.path.join(self.si.WindowsSdkDir, path) if self.vc_ver == 10.0 or self.vc_ver == 11.0: if self.pi.target_is_x86(): arch_subdir = '' else: arch_subdir = self.pi.current_dir(hidex86=True, x64=True) path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir yield os.path.join(self.si.WindowsSdkDir, path) elif self.vc_ver >= 15.0: path = os.path.join(self.si.WindowsSdkDir, 'Bin') arch_subdir = self.pi.current_dir(x64=True) sdkver = self.si.WindowsSdkLastVersion yield os.path.join(path, '%s%s' % (sdkver, arch_subdir)) if self.si.WindowsSDKExecutablePath: yield self.si.WindowsSDKExecutablePath @property def _sdk_subdir(self): """ Microsoft Windows SDK version subdir """ ucrtver = self.si.WindowsSdkLastVersion return ('%s\\' % ucrtver) if ucrtver else '' @property def SdkSetup(self): """ Microsoft Windows SDK Setup """ if self.vc_ver > 9.0: return [] return [os.path.join(self.si.WindowsSdkDir, 'Setup')] @property def FxTools(self): """ Microsoft .NET Framework Tools """ pi = self.pi si = self.si if self.vc_ver <= 10.0: include32 = True include64 = not pi.target_is_x86() and not pi.current_is_x86() else: include32 = pi.target_is_x86() or pi.current_is_x86() include64 = pi.current_cpu == 'amd64' or pi.target_cpu == 'amd64' tools = [] if include32: tools += [os.path.join(si.FrameworkDir32, ver) for ver in si.FrameworkVersion32] if include64: tools += [os.path.join(si.FrameworkDir64, ver) for ver in si.FrameworkVersion64] return tools @property def NetFxSDKLibraries(self): """ Microsoft .Net Framework SDK Libraries """ if self.vc_ver < 14.0 or not self.si.NetFxSdkDir: return [] arch_subdir = self.pi.target_dir(x64=True) return [os.path.join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)] @property def NetFxSDKIncludes(self): """ Microsoft .Net Framework SDK Includes """ if self.vc_ver < 14.0 or not self.si.NetFxSdkDir: return [] return [os.path.join(self.si.NetFxSdkDir, r'include\um')] @property def VsTDb(self): """ Microsoft Visual Studio Team System Database """ return [os.path.join(self.si.VSInstallDir, r'VSTSDB\Deploy')] @property def MSBuild(self): """ Microsoft Build Engine """ if self.vc_ver < 12.0: return [] elif self.vc_ver < 15.0: base_path = self.si.ProgramFilesx86 arch_subdir = self.pi.current_dir(hidex86=True) else: base_path = self.si.VSInstallDir arch_subdir = '' path = r'MSBuild\%0.1f\bin%s' % (self.vc_ver, arch_subdir) build = [os.path.join(base_path, path)] if self.vc_ver >= 15.0: # Add Roslyn C# & Visual Basic Compiler build += [os.path.join(base_path, path, 'Roslyn')] return build @property def HTMLHelpWorkshop(self): """ Microsoft HTML Help Workshop """ if self.vc_ver < 11.0: return [] return [os.path.join(self.si.ProgramFilesx86, 'HTML Help Workshop')] @property def UCRTLibraries(self): """ Microsoft Universal C Runtime SDK Libraries """ if self.vc_ver < 14.0: return [] arch_subdir = self.pi.target_dir(x64=True) lib = os.path.join(self.si.UniversalCRTSdkDir, 'lib') ucrtver = self._ucrt_subdir return [os.path.join(lib, '%sucrt%s' % (ucrtver, arch_subdir))] @property def UCRTIncludes(self): """ Microsoft Universal C Runtime SDK Include """ if self.vc_ver < 14.0: return [] include = os.path.join(self.si.UniversalCRTSdkDir, 'include') return [os.path.join(include, '%sucrt' % self._ucrt_subdir)] @property def _ucrt_subdir(self): """ Microsoft Universal C Runtime SDK version subdir """ ucrtver = self.si.UniversalCRTSdkLastVersion return ('%s\\' % ucrtver) if ucrtver else '' @property def FSharp(self): """ Microsoft Visual F# """ if self.vc_ver < 11.0 and self.vc_ver > 12.0: return [] return self.si.FSharpInstallDir @property def VCRuntimeRedist(self): """ Microsoft Visual C++ runtime redistribuable dll """ arch_subdir = self.pi.target_dir(x64=True) if self.vc_ver < 15: redist_path = self.si.VCInstallDir vcruntime = 'redist%s\\Microsoft.VC%d0.CRT\\vcruntime%d0.dll' else: redist_path = self.si.VCInstallDir.replace('\\Tools', '\\Redist') vcruntime = 'onecore%s\\Microsoft.VC%d0.CRT\\vcruntime%d0.dll' # Visual Studio 2017 is still Visual C++ 14.0 dll_ver = 14.0 if self.vc_ver == 15 else self.vc_ver vcruntime = vcruntime % (arch_subdir, self.vc_ver, dll_ver) return os.path.join(redist_path, vcruntime) def return_env(self, exists=True): """ Return environment dict. Parameters ---------- exists: bool It True, only return existing paths. """ env = dict( include=self._build_paths('include', [self.VCIncludes, self.OSIncludes, self.UCRTIncludes, self.NetFxSDKIncludes], exists), lib=self._build_paths('lib', [self.VCLibraries, self.OSLibraries, self.FxTools, self.UCRTLibraries, self.NetFxSDKLibraries], exists), libpath=self._build_paths('libpath', [self.VCLibraries, self.FxTools, self.VCStoreRefs, self.OSLibpath], exists), path=self._build_paths('path', [self.VCTools, self.VSTools, self.VsTDb, self.SdkTools, self.SdkSetup, self.FxTools, self.MSBuild, self.HTMLHelpWorkshop, self.FSharp], exists), ) if self.vc_ver >= 14 and os.path.isfile(self.VCRuntimeRedist): env['py_vcruntime_redist'] = self.VCRuntimeRedist return env def _build_paths(self, name, spec_path_lists, exists): """ Given an environment variable name and specified paths, return a pathsep-separated string of paths containing unique, extant, directories from those paths and from the environment variable. Raise an error if no paths are resolved. """ # flatten spec_path_lists spec_paths = itertools.chain.from_iterable(spec_path_lists) env_paths = safe_env.get(name, '').split(os.pathsep) paths = itertools.chain(spec_paths, env_paths) extant_paths = list(filter(os.path.isdir, paths)) if exists else paths if not extant_paths: msg = "%s environment variable is empty" % name.upper() raise distutils.errors.DistutilsPlatformError(msg) unique_paths = self._unique_everseen(extant_paths) return os.pathsep.join(unique_paths) # from Python docs def _unique_everseen(self, iterable, key=None): """ List unique elements, preserving order. Remember all elements ever seen. _unique_everseen('AAAABBBCCDAABBB') --> A B C D _unique_everseen('ABBCcAD', str.lower) --> A B C D """ seen = set() seen_add = seen.add if key is None: for element in filterfalse(seen.__contains__, iterable): seen_add(element) yield element else: for element in iterable: k = key(element) if k not in seen: seen_add(k) yield element # # Copyright 2015 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Test for gen_client module.""" import os import unittest from apitools.gen import gen_client from apitools.gen import test_utils def GetTestDataPath(*path): return os.path.join(os.path.dirname(__file__), 'testdata', *path) def _GetContent(file_path): with open(file_path) as f: return f.read() class ClientGenCliTest(unittest.TestCase): def testHelp_NotEnoughArguments(self): with self.assertRaisesRegexp(SystemExit, '0'): with test_utils.CaptureOutput() as (_, err): gen_client.main([gen_client.__file__, '-h']) err_output = err.getvalue() self.assertIn('usage:', err_output) self.assertIn('error: too few arguments', err_output) def testGenClient_SimpleDocNoInit(self): with test_utils.TempDir() as tmp_dir_path: gen_client.main([ gen_client.__file__, '--init-file', 'none', '--infile', GetTestDataPath('dns', 'dns_v1.json'), '--outdir', tmp_dir_path, '--overwrite', '--root_package', 'google.apis', 'client' ]) expected_files = ( set(['dns_v1_client.py', 'dns_v1_messages.py'])) self.assertEquals(expected_files, set(os.listdir(tmp_dir_path))) def testGenClient_SimpleDocEmptyInit(self): with test_utils.TempDir() as tmp_dir_path: gen_client.main([ gen_client.__file__, '--init-file', 'empty', '--infile', GetTestDataPath('dns', 'dns_v1.json'), '--outdir', tmp_dir_path, '--overwrite', '--root_package', 'google.apis', 'client' ]) expected_files = ( set(['dns_v1_client.py', 'dns_v1_messages.py', '__init__.py'])) self.assertEquals(expected_files, set(os.listdir(tmp_dir_path))) init_file = _GetContent(os.path.join(tmp_dir_path, '__init__.py')) self.assertEqual("""\"""Package marker file.\""" from __future__ import absolute_import import pkgutil __path__ = pkgutil.extend_path(__path__, __name__) """, init_file) def testGenClient_SimpleDocWithV4(self): with test_utils.TempDir() as tmp_dir_path: gen_client.main([ gen_client.__file__, '--infile', GetTestDataPath('dns', 'dns_v1.json'), '--outdir', tmp_dir_path, '--overwrite', '--apitools_version', '0.4.12', '--root_package', 'google.apis', 'client' ]) self.assertEquals( set(['dns_v1_client.py', 'dns_v1_messages.py', '__init__.py']), set(os.listdir(tmp_dir_path))) def testGenClient_SimpleDocWithV5(self): with test_utils.TempDir() as tmp_dir_path: gen_client.main([ gen_client.__file__, '--infile', GetTestDataPath('dns', 'dns_v1.json'), '--outdir', tmp_dir_path, '--overwrite', '--apitools_version', '0.5.0', '--root_package', 'google.apis', 'client' ]) self.assertEquals( set(['dns_v1_client.py', 'dns_v1_messages.py', '__init__.py']), set(os.listdir(tmp_dir_path))) def testGenPipPackage_SimpleDoc(self): with test_utils.TempDir() as tmp_dir_path: gen_client.main([ gen_client.__file__, '--infile', GetTestDataPath('dns', 'dns_v1.json'), '--outdir', tmp_dir_path, '--overwrite', '--root_package', 'google.apis', 'pip_package' ]) self.assertEquals( set(['apitools', 'setup.py']), set(os.listdir(tmp_dir_path))) def testGenProto_SimpleDoc(self): with test_utils.TempDir() as tmp_dir_path: gen_client.main([ gen_client.__file__, '--infile', GetTestDataPath('dns', 'dns_v1.json'), '--outdir', tmp_dir_path, '--overwrite', '--root_package', 'google.apis', 'proto' ]) self.assertEquals( set(['dns_v1_messages.proto', 'dns_v1_services.proto']), set(os.listdir(tmp_dir_path))) """Suite Metrowerks Shell Suite: Events supported by the Metrowerks Project Shell Level 1, version 1 Generated from /Volumes/Sap/Applications (Mac OS 9)/Metrowerks CodeWarrior 7.0/Metrowerks CodeWarrior/CodeWarrior IDE 4.2.5 AETE/AEUT resource version 1/0, language 0, script 0 """ import aetools import MacOS _code = 'MMPR' class Metrowerks_Shell_Suite_Events: _argmap_Add_Files = { 'To_Segment' : 'Segm', } def Add_Files(self, _object, _attributes={}, **_arguments): """Add Files: Add the specified file(s) to the current project Required argument: List of files to add Keyword argument To_Segment: Segment number into which to add the file(s) Keyword argument _attributes: AppleEvent attribute dictionary Returns: Error code for each file added """ _code = 'MMPR' _subcode = 'AddF' aetools.keysubst(_arguments, self._argmap_Add_Files) _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] _argmap_Check_Syntax = { 'ExternalEditor' : 'Errs', } def Check_Syntax(self, _object, _attributes={}, **_arguments): """Check Syntax: Check the syntax of the specified file(s) Required argument: List of files to check the syntax of Keyword argument ExternalEditor: Should the contents of the message window be returned to the caller? Keyword argument _attributes: AppleEvent attribute dictionary Returns: Errors for each file whose syntax was checked """ _code = 'MMPR' _subcode = 'Chek' aetools.keysubst(_arguments, self._argmap_Check_Syntax) _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def Close_Project(self, _no_object=None, _attributes={}, **_arguments): """Close Project: Close the current project Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'MMPR' _subcode = 'ClsP' if _arguments: raise TypeError, 'No optional args expected' if _no_object is not None: raise TypeError, 'No direct arg expected' _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] _argmap_Close_Window = { 'Saving' : 'savo', } def Close_Window(self, _object, _attributes={}, **_arguments): """Close Window: Close the windows showing the specified files Required argument: The files to close Keyword argument Saving: Whether to save changes to each file before closing its window Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'MMPR' _subcode = 'ClsW' aetools.keysubst(_arguments, self._argmap_Close_Window) _arguments['----'] = _object aetools.enumsubst(_arguments, 'savo', _Enum_savo) _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] _argmap_Compile = { 'ExternalEditor' : 'Errs', } def Compile(self, _object, _attributes={}, **_arguments): """Compile: Compile the specified file(s) Required argument: List of files to compile Keyword argument ExternalEditor: Should the contents of the message window be returned to the caller? Keyword argument _attributes: AppleEvent attribute dictionary Returns: Errors for each file compiled """ _code = 'MMPR' _subcode = 'Comp' aetools.keysubst(_arguments, self._argmap_Compile) _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] _argmap_Create_Project = { 'from_stationery' : 'Tmpl', } def Create_Project(self, _object, _attributes={}, **_arguments): """Create Project: Create a new project file Required argument: New project file specifier Keyword argument from_stationery: undocumented, typecode 'alis' Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'MMPR' _subcode = 'NewP' aetools.keysubst(_arguments, self._argmap_Create_Project) _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def Get_Definition(self, _object, _attributes={}, **_arguments): """Get Definition: Returns the location(s) of a globally scoped function or data object. Required argument: undocumented, typecode 'TEXT' Keyword argument _attributes: AppleEvent attribute dictionary Returns: undocumented, typecode 'FDef' """ _code = 'MMPR' _subcode = 'GDef' if _arguments: raise TypeError, 'No optional args expected' _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def Get_Open_Documents(self, _no_object=None, _attributes={}, **_arguments): """Get Open Documents: Returns the list of open documents Keyword argument _attributes: AppleEvent attribute dictionary Returns: The list of documents """ _code = 'MMPR' _subcode = 'GDoc' if _arguments: raise TypeError, 'No optional args expected' if _no_object is not None: raise TypeError, 'No direct arg expected' _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] _argmap_Get_Preferences = { 'of' : 'PRec', 'from_panel' : 'PNam', } def Get_Preferences(self, _no_object=None, _attributes={}, **_arguments): """Get Preferences: Get the preferences for the current project Keyword argument of: Names of requested preferences Keyword argument from_panel: Name of the preference panel Keyword argument _attributes: AppleEvent attribute dictionary Returns: The requested preferences """ _code = 'MMPR' _subcode = 'Gref' aetools.keysubst(_arguments, self._argmap_Get_Preferences) if _no_object is not None: raise TypeError, 'No direct arg expected' _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] _argmap_Get_Project_File = { 'Segment' : 'Segm', } def Get_Project_File(self, _object, _attributes={}, **_arguments): """Get Project File: Returns a description of a file in the project window. Required argument: The index of the file within its segment. Keyword argument Segment: The segment containing the file. Keyword argument _attributes: AppleEvent attribute dictionary Returns: undocumented, typecode 'SrcF' """ _code = 'MMPR' _subcode = 'GFil' aetools.keysubst(_arguments, self._argmap_Get_Project_File) _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def Get_Project_Specifier(self, _no_object=None, _attributes={}, **_arguments): """Get Project Specifier: Return the File Specifier for the current project Keyword argument _attributes: AppleEvent attribute dictionary Returns: File Specifier for the current project """ _code = 'MMPR' _subcode = 'GetP' if _arguments: raise TypeError, 'No optional args expected' if _no_object is not None: raise TypeError, 'No direct arg expected' _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def Get_Segments(self, _no_object=None, _attributes={}, **_arguments): """Get Segments: Returns a description of each segment in the project. Keyword argument _attributes: AppleEvent attribute dictionary Returns: undocumented, typecode 'Seg ' """ _code = 'MMPR' _subcode = 'GSeg' if _arguments: raise TypeError, 'No optional args expected' if _no_object is not None: raise TypeError, 'No direct arg expected' _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def Get_member_function_names(self, _object, _attributes={}, **_arguments): """Get member function names: Returns a list containing the names of all the member functions of a class object Required argument: must be a class object Keyword argument _attributes: AppleEvent attribute dictionary Returns: undocumented, typecode 'list' """ _code = 'MMPR' _subcode = 'MbFN' if _arguments: raise TypeError, 'No optional args expected' _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def Get_nonsimple_classes(self, _no_object=None, _attributes={}, **_arguments): """Get nonsimple classes: Returns an alphabetical list of classes with member functions, bases classes, or subclasses Keyword argument _attributes: AppleEvent attribute dictionary Returns: undocumented, typecode 'list' """ _code = 'MMPR' _subcode = 'NsCl' if _arguments: raise TypeError, 'No optional args expected' if _no_object is not None: raise TypeError, 'No direct arg expected' _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def Goto_Function(self, _object, _attributes={}, **_arguments): """Goto Function: Goto Specified Function Name Required argument: undocumented, typecode 'TEXT' Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'MMPR' _subcode = 'GoFn' if _arguments: raise TypeError, 'No optional args expected' _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def Goto_Line(self, _object, _attributes={}, **_arguments): """Goto Line: Goto Specified Line Number Required argument: The requested source file line number Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'MMPR' _subcode = 'GoLn' if _arguments: raise TypeError, 'No optional args expected' _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def Is_In_Project(self, _object, _attributes={}, **_arguments): """Is In Project: Whether or not the specified file(s) is in the current project Required argument: List of files to check for project membership Keyword argument _attributes: AppleEvent attribute dictionary Returns: Result code for each file """ _code = 'MMPR' _subcode = 'FInP' if _arguments: raise TypeError, 'No optional args expected' _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] _argmap_Make_Project = { 'ExternalEditor' : 'Errs', } def Make_Project(self, _no_object=None, _attributes={}, **_arguments): """Make Project: Make the current project Keyword argument ExternalEditor: Should the contents of the message window be returned to the caller? Keyword argument _attributes: AppleEvent attribute dictionary Returns: Errors that occurred while making the project """ _code = 'MMPR' _subcode = 'Make' aetools.keysubst(_arguments, self._argmap_Make_Project) if _no_object is not None: raise TypeError, 'No direct arg expected' _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def Open_browser(self, _object, _attributes={}, **_arguments): """Open browser: Display a class, member function, or data member object in a single class browser window Required argument: an AE object reference Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'MMPR' _subcode = 'Brow' if _arguments: raise TypeError, 'No optional args expected' _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] _argmap_Precompile = { 'Saving_As' : 'Targ', 'ExternalEditor' : 'Errs', } def Precompile(self, _object, _attributes={}, **_arguments): """Precompile: Precompile the specified file to the specified destination file Required argument: File to precompile Keyword argument Saving_As: Destination file for precompiled header Keyword argument ExternalEditor: Should the contents of the message window be returned to the caller? Keyword argument _attributes: AppleEvent attribute dictionary Returns: Errors for the precompiled file """ _code = 'MMPR' _subcode = 'PreC' aetools.keysubst(_arguments, self._argmap_Precompile) _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] _argmap_Preprocess = { 'ExternalEditor' : 'Errs', } def Preprocess(self, _object, _attributes={}, **_arguments): """Preprocess: Preprocesses the specified file(s) Required argument: undocumented, typecode 'alis' Keyword argument ExternalEditor: undocumented, typecode 'bool' Keyword argument _attributes: AppleEvent attribute dictionary Returns: Errors for each preprocessed file """ _code = 'MMPR' _subcode = 'PreP' aetools.keysubst(_arguments, self._argmap_Preprocess) _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def Remove_Binaries(self, _no_object=None, _attributes={}, **_arguments): """Remove Binaries: Remove the binary object code from the current project Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'MMPR' _subcode = 'RemB' if _arguments: raise TypeError, 'No optional args expected' if _no_object is not None: raise TypeError, 'No direct arg expected' _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def Remove_Files(self, _object, _attributes={}, **_arguments): """Remove Files: Remove the specified file(s) from the current project Required argument: List of files to remove Keyword argument _attributes: AppleEvent attribute dictionary Returns: Error code for each file removed """ _code = 'MMPR' _subcode = 'RemF' if _arguments: raise TypeError, 'No optional args expected' _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def Reset_File_Paths(self, _no_object=None, _attributes={}, **_arguments): """Reset File Paths: Resets access paths for all files belonging to open project. Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'MMPR' _subcode = 'ReFP' if _arguments: raise TypeError, 'No optional args expected' if _no_object is not None: raise TypeError, 'No direct arg expected' _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] _argmap_Run_Project = { 'ExternalEditor' : 'Errs', 'SourceDebugger' : 'DeBg', } def Run_Project(self, _no_object=None, _attributes={}, **_arguments): """Run Project: Run the current project Keyword argument ExternalEditor: Should the contents of the message window be returned to the caller? Keyword argument SourceDebugger: Run the application under the control of the source-level debugger Keyword argument _attributes: AppleEvent attribute dictionary Returns: Errors that occurred when running the project """ _code = 'MMPR' _subcode = 'RunP' aetools.keysubst(_arguments, self._argmap_Run_Project) if _no_object is not None: raise TypeError, 'No direct arg expected' _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def Save_Error_Window_As(self, _object, _attributes={}, **_arguments): """Save Error Window As: Saves the Errors & Warnings window as a text file Required argument: Destination file for Save Message Window As Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'MMPR' _subcode = 'SvMs' if _arguments: raise TypeError, 'No optional args expected' _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def Set_Current_Target(self, _object=None, _attributes={}, **_arguments): """Set Current Target: Set the current target of a project Required argument: Name of target Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'MMPR' _subcode = 'STrg' if _arguments: raise TypeError, 'No optional args expected' _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def Set_Default_Project(self, _object, _attributes={}, **_arguments): """Set Default Project: Set the default project Required argument: Name of project Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'MMPR' _subcode = 'SDfP' if _arguments: raise TypeError, 'No optional args expected' _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] _argmap_Set_Modification_Date = { 'to' : 'MDat', } def Set_Modification_Date(self, _object, _attributes={}, **_arguments): """Set Modification Date: Changes the internal modification date of the specified file(s) Required argument: List of files Keyword argument to: undocumented, typecode 'ldt ' Keyword argument _attributes: AppleEvent attribute dictionary Returns: Error code for each modified file """ _code = 'MMPR' _subcode = 'SMod' aetools.keysubst(_arguments, self._argmap_Set_Modification_Date) _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] _argmap_Set_Preferences = { 'of_panel' : 'PNam', 'to' : 'PRec', } def Set_Preferences(self, _no_object=None, _attributes={}, **_arguments): """Set Preferences: Set the preferences for the current project Keyword argument of_panel: Name of the preference panel Keyword argument to: Preferences settings Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'MMPR' _subcode = 'Pref' aetools.keysubst(_arguments, self._argmap_Set_Preferences) if _no_object is not None: raise TypeError, 'No direct arg expected' _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] _argmap_Set_Project_File = { 'to' : 'SrcS', } def Set_Project_File(self, _object, _attributes={}, **_arguments): """Set Project File: Changes the settings for a given file in the project. Required argument: The name of the file Keyword argument to: The new settings for the file Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'MMPR' _subcode = 'SFil' aetools.keysubst(_arguments, self._argmap_Set_Project_File) _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] _argmap_Set_Segment = { 'to' : 'Segm', } def Set_Segment(self, _object, _attributes={}, **_arguments): """Set Segment: Changes the name and attributes of a segment. Required argument: The segment to change Keyword argument to: The new name and attributes for the segment. Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'MMPR' _subcode = 'SSeg' aetools.keysubst(_arguments, self._argmap_Set_Segment) _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def Touch(self, _object, _attributes={}, **_arguments): """Touch: Force recompilation of the specified file(s) Required argument: List of files to compile Keyword argument _attributes: AppleEvent attribute dictionary Returns: Error code for each file touched """ _code = 'MMPR' _subcode = 'Toch' if _arguments: raise TypeError, 'No optional args expected' _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] _argmap_Update_Project = { 'ExternalEditor' : 'Errs', } def Update_Project(self, _no_object=None, _attributes={}, **_arguments): """Update Project: Update the current project Keyword argument ExternalEditor: Should the contents of the message window be returned to the caller? Keyword argument _attributes: AppleEvent attribute dictionary Returns: Errors that occurred while updating the project """ _code = 'MMPR' _subcode = 'UpdP' aetools.keysubst(_arguments, self._argmap_Update_Project) if _no_object is not None: raise TypeError, 'No direct arg expected' _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] class Browser_Coloring(aetools.ComponentItem): """Browser Coloring - Colors for Browser symbols. """ want = 'BRKW' class _Prop_Browser_Keywords(aetools.NProperty): """Browser Keywords - Mark Browser symbols with color. """ which = 'BW00' want = 'bool' class _Prop_Classes_Color(aetools.NProperty): """Classes Color - The color for classes. """ which = 'BW01' want = 'cRGB' class _Prop_Constants_Color(aetools.NProperty): """Constants Color - The color for constants. """ which = 'BW02' want = 'cRGB' class _Prop_Enums_Color(aetools.NProperty): """Enums Color - The color for enums. """ which = 'BW03' want = 'cRGB' class _Prop_Functions_Color(aetools.NProperty): """Functions Color - Set color for functions. """ which = 'BW04' want = 'cRGB' class _Prop_Globals_Color(aetools.NProperty): """Globals Color - The color for globals """ which = 'BW05' want = 'cRGB' class _Prop_Macros_Color(aetools.NProperty): """Macros Color - The color for macros. """ which = 'BW06' want = 'cRGB' class _Prop_Template_Commands_in_Menu(aetools.NProperty): """Template Commands in Menu - Include template commands in context menus """ which = 'BW10' want = 'bool' class _Prop_Templates_Color(aetools.NProperty): """Templates Color - Set color for templates. """ which = 'BW07' want = 'cRGB' class _Prop_Typedefs_Color(aetools.NProperty): """Typedefs Color - The color for typedefs. """ which = 'BW08' want = 'cRGB' class Build_Settings(aetools.ComponentItem): """Build Settings - Build Settings preferences. """ want = 'BSTG' class _Prop_Build_Before_Running(aetools.NProperty): """Build Before Running - Build the target before running. """ which = 'BX04' want = 'BXbr' class _Prop_Compiler_Thread_Stack_Size(aetools.NProperty): """Compiler Thread Stack Size - Compiler Thread Stack Size """ which = 'BX06' want = 'long' class _Prop_Completion_Sound(aetools.NProperty): """Completion Sound - Play a sound when finished a Bring Up To Date or Make command. """ which = 'BX01' want = 'bool' class _Prop_Failure_Sound(aetools.NProperty): """Failure Sound - The sound CodeWarrior plays when it cannot finish a Bring Up To Date or Make command. """ which = 'BX03' want = 'TEXT' class _Prop_Include_Cache_Size(aetools.NProperty): """Include Cache Size - Include file cache size. """ which = 'BX05' want = 'long' class _Prop_Save_Before_Building(aetools.NProperty): """Save Before Building - Save open editor files before build operations """ which = 'BX07' want = 'bool' class _Prop_Success_Sound(aetools.NProperty): """Success Sound - The sound CodeWarrior plays when it successfully finishes a Bring Up To Date or Make command. """ which = 'BX02' want = 'TEXT' class base_class(aetools.ComponentItem): """base class - A base class or super class of a class """ want = 'BsCl' class _Prop_access(aetools.NProperty): """access - """ which = 'Acce' want = 'Acce' class _Prop_class_(aetools.NProperty): """class - The class object corresponding to this base class """ which = 'Clas' want = 'obj ' class _Prop_virtual(aetools.NProperty): """virtual - """ which = 'Virt' want = 'bool' base_classes = base_class class Custom_Keywords(aetools.ComponentItem): """Custom Keywords - """ want = 'CUKW' class _Prop_Custom_Color_1(aetools.NProperty): """Custom Color 1 - The color for the first set of custom keywords. """ which = 'GH05' want = 'cRGB' class _Prop_Custom_Color_2(aetools.NProperty): """Custom Color 2 - The color for the second set custom keywords. """ which = 'GH06' want = 'cRGB' class _Prop_Custom_Color_3(aetools.NProperty): """Custom Color 3 - The color for the third set of custom keywords. """ which = 'GH07' want = 'cRGB' class _Prop_Custom_Color_4(aetools.NProperty): """Custom Color 4 - The color for the fourth set of custom keywords. """ which = 'GH08' want = 'cRGB' class browser_catalog(aetools.ComponentItem): """browser catalog - The browser symbol catalog for the current project """ want = 'Cata' # element 'Clas' as ['indx', 'name'] class class_(aetools.ComponentItem): """class - A class, struct, or record type in the current project. """ want = 'Clas' class _Prop_all_subclasses(aetools.NProperty): """all subclasses - the classes directly or indirectly derived from this class """ which = 'SubA' want = 'Clas' class _Prop_declaration_end_offset(aetools.NProperty): """declaration end offset - End of class declaration """ which = 'DcEn' want = 'long' class _Prop_declaration_file(aetools.NProperty): """declaration file - Source file containing the class declaration """ which = 'DcFl' want = 'fss ' class _Prop_declaration_start_offset(aetools.NProperty): """declaration start offset - Start of class declaration source code """ which = 'DcSt' want = 'long' class _Prop_language(aetools.NProperty): """language - Implementation language of this class """ which = 'Lang' want = 'Lang' class _Prop_name(aetools.NProperty): """name - """ which = 'pnam' want = 'TEXT' class _Prop_subclasses(aetools.NProperty): """subclasses - the immediate subclasses of this class """ which = 'SubC' want = 'Clas' # element 'BsCl' as ['indx'] # element 'DtMb' as ['indx', 'name'] # element 'MbFn' as ['indx', 'name'] classes = class_ class Debugger_Display(aetools.ComponentItem): """Debugger Display - Debugger Display preferences """ want = 'DbDS' class _Prop_Default_Array_Size(aetools.NProperty): """Default Array Size - Controls whether CodeWarrior uses its own integrated editor or an external application for editing text files. """ which = 'Db08' want = 'shor' class _Prop_Show_As_Decimal(aetools.NProperty): """Show As Decimal - Show variable values as decimal by default """ which = 'Db10' want = 'bool' class _Prop_Show_Locals(aetools.NProperty): """Show Locals - Show locals by default """ which = 'Db09' want = 'bool' class _Prop_Show_Variable_Types(aetools.NProperty): """Show Variable Types - Show variable types by default. """ which = 'Db01' want = 'bool' class _Prop_Sort_By_Method(aetools.NProperty): """Sort By Method - Sort functions by method. """ which = 'Db02' want = 'bool' class _Prop_Threads_in_Window(aetools.NProperty): """Threads in Window - Show threads in separate windows. """ which = 'Db04' want = 'bool' class _Prop_Use_RTTI(aetools.NProperty): """Use RTTI - Enable RunTime Type Information. """ which = 'Db03' want = 'bool' class _Prop_Variable_Changed_Hilite(aetools.NProperty): """Variable Changed Hilite - Variable changed hilite color. """ which = 'Db07' want = 'cRGB' class _Prop_Variable_Hints(aetools.NProperty): """Variable Hints - Show variable hints. """ which = 'Db05' want = 'bool' class _Prop_Watchpoint_Hilite(aetools.NProperty): """Watchpoint Hilite - Watchpoint hilite color. """ which = 'Db06' want = 'cRGB' class Debugger_Global(aetools.ComponentItem): """Debugger Global - Debugger Global preferences """ want = 'DbGL' class _Prop_Auto_Target_Libraries(aetools.NProperty): """Auto Target Libraries - Automatically target libraries when debugging """ which = 'Dg11' want = 'bool' class _Prop_Cache_Edited_Files(aetools.NProperty): """Cache Edited Files - Cache edit files between debug sessions """ which = 'Dg12' want = 'bool' class _Prop_Confirm_Kill(aetools.NProperty): """Confirm Kill - Confirm the \xd4killing\xd5 of the process. """ which = 'Dg04' want = 'bool' class _Prop_Dont_Step_in_Runtime(aetools.NProperty): """Dont Step in Runtime - Don\xd5t step into runtime code when debugging. """ which = 'Dg07' want = 'bool' class _Prop_File_Cache_Duration(aetools.NProperty): """File Cache Duration - Duration to keep files in cache (in days) """ which = 'Dg13' want = 'shor' class _Prop_Ignore_Mod_Dates(aetools.NProperty): """Ignore Mod Dates - Ignore modification dates of files. """ which = 'Dg01' want = 'bool' class _Prop_Launch_Apps_on_Open(aetools.NProperty): """Launch Apps on Open - Launch applications on the opening of sym files. """ which = 'Dg03' want = 'bool' class _Prop_Open_All_Classes(aetools.NProperty): """Open All Classes - Open all Java class files. """ which = 'Dg02' want = 'bool' class _Prop_Select_Stack_Crawl(aetools.NProperty): """Select Stack Crawl - Select the stack crawl. """ which = 'Dg06' want = 'bool' class _Prop_Stop_at_Main(aetools.NProperty): """Stop at Main - Stop to debug on the main() function. """ which = 'Dg05' want = 'bool' class Debugger_Target(aetools.ComponentItem): """Debugger Target - Debugger Target preferences """ want = 'DbTG' class _Prop_Cache_symbolics(aetools.NProperty): """Cache symbolics - Cache symbolics between runs when executable doesn\xd5t change, else release symbolics files after killing process. """ which = 'Dt15' want = 'bool' class _Prop_Data_Update_Interval(aetools.NProperty): """Data Update Interval - How often to update the data while running (in seconds) """ which = 'Dt09' want = 'long' class _Prop_Log_System_Messages(aetools.NProperty): """Log System Messages - Log all system messages while debugging. """ which = 'Dt02' want = 'bool' class _Prop_Relocated_Executable_Path(aetools.NProperty): """Relocated Executable Path - Path to location of relocated libraries, code resources or remote debugging folder """ which = 'Dt10' want = 'RlPt' class _Prop_Stop_at_temp_breakpoint(aetools.NProperty): """Stop at temp breakpoint - Stop at a temp breakpoint on program launch. Set breakpoint type in Temp Breakpoint Type AppleEvent. """ which = 'Dt13' want = 'bool' class _Prop_Temp_Breakpoint_Type(aetools.NProperty): """Temp Breakpoint Type - Type of temp breakpoint to set on program launch. """ which = 'Dt16' want = 'TmpB' class _Prop_Temp_breakpoint_names(aetools.NProperty): """Temp breakpoint names - Comma separated list of names to attempt to stop at on program launch. First symbol to resolve in list is the temp BP that will be set. """ which = 'Dt14' want = 'ctxt' class _Prop_Update_Data_While_Running(aetools.NProperty): """Update Data While Running - Should pause to update data while running """ which = 'Dt08' want = 'bool' class Debugger_Windowing(aetools.ComponentItem): """Debugger Windowing - """ want = 'DbWN' class _Prop_Debugging_Start_Action(aetools.NProperty): """Debugging Start Action - What action to take when debug session starts """ which = 'Dw01' want = 'DbSA' class _Prop_Do_Nothing_To_Projects(aetools.NProperty): """Do Nothing To Projects - Suppress debugging start action for project windows """ which = 'Dw02' want = 'bool' class data_member(aetools.ComponentItem): """data member - A class data member or field """ want = 'DtMb' class _Prop_static(aetools.NProperty): """static - """ which = 'Stat' want = 'bool' data_members = data_member class Editor(aetools.ComponentItem): """Editor - """ want = 'EDTR' class _Prop_Background_Color(aetools.NProperty): """Background Color - Color of the background of editor windows. """ which = 'ED13' want = 'cRGB' class _Prop_Balance(aetools.NProperty): """Balance - Flash the matching opening bracket when you type a closing bracket. """ which = 'ED03' want = 'bool' class _Prop_Context_Popup_Delay(aetools.NProperty): """Context Popup Delay - The amount of time, in sixtieths of a second, before the context popup is displayed if you click and hold on a browser symbol. """ which = 'ED14' want = 'long' class _Prop_Default_Text_File_Format(aetools.NProperty): """Default Text File Format - Default text file format (i.e. which type of line endings to use) """ which = 'ED17' want = 'TxtF' class _Prop_Dynamic_Scroll(aetools.NProperty): """Dynamic Scroll - Display a window\xd5s contents as you move the scroll box. """ which = 'ED02' want = 'bool' class _Prop_Flash_Delay(aetools.NProperty): """Flash Delay - The amount of time, in sixtieths of a second, the editor highlights a matching bracket. """ which = 'ED01' want = 'long' class _Prop_Left_Margin_Line_Select(aetools.NProperty): """Left Margin Line Select - Clicking in the left margin selects lines """ which = 'ED16' want = 'bool' class _Prop_Main_Text_Color(aetools.NProperty): """Main Text Color - Main, default, color for text. """ which = 'ED12' want = 'cRGB' class _Prop_Relaxed_C_Popup_Parsing(aetools.NProperty): """Relaxed C Popup Parsing - Relax the function parser for C source files """ which = 'ED15' want = 'bool' class _Prop_Remember_Font(aetools.NProperty): """Remember Font - Display a source file with its own font settings. """ which = 'ED08' want = 'bool' class _Prop_Remember_Selection(aetools.NProperty): """Remember Selection - Restore the previous selection in a file when you open it. """ which = 'ED09' want = 'bool' class _Prop_Remember_Window(aetools.NProperty): """Remember Window - Restore the last size and position for a source file window when you open it. """ which = 'ED10' want = 'bool' class _Prop_Sort_Function_Popup(aetools.NProperty): """Sort Function Popup - """ which = 'ED06' want = 'bool' class _Prop_Use_Drag__26__Drop_Editing(aetools.NProperty): """Use Drag & Drop Editing - Use Drag & Drop text editing. """ which = 'ED04' want = 'bool' class _Prop_Use_Multiple_Undo(aetools.NProperty): """Use Multiple Undo - """ which = 'ED07' want = 'bool' class Environment_Variable(aetools.ComponentItem): """Environment Variable - Environment variable for host OS """ want = 'EnvV' class _Prop_value(aetools.NProperty): """value - Value of the environment variable """ which = 'Valu' want = 'TEXT' class Error_Information(aetools.ComponentItem): """Error Information - Describes a single error or warning from the compiler or the linker. """ want = 'ErrM' class _Prop_disk_file(aetools.NProperty): """disk file - The file where the error occurred. May not be returned for certain kinds of errors (eg, link errors). """ which = 'file' want = 'fss ' class _Prop_lineNumber(aetools.NProperty): """lineNumber - The line in the file where the error occurred. May not be returned for certain kinds of errors (eg, link errors). """ which = 'ErrL' want = 'long' class _Prop_message(aetools.NProperty): """message - The error or warning message. """ which = 'ErrS' want = 'TEXT' class _Prop_messageKind(aetools.NProperty): """messageKind - The type of error or warning. """ which = 'ErrT' want = 'ErrT' class Function_Information(aetools.ComponentItem): """Function Information - Describes the location of any function or global data definition within the current project. """ want = 'FDef' class File_Mappings(aetools.ComponentItem): """File Mappings - Mappings of extensions & file types to compilers """ want = 'FLMP' class _Prop_Mappings(aetools.NProperty): """Mappings - """ which = 'FMps' want = 'FMap' class File_Mapping(aetools.ComponentItem): """File Mapping - """ want = 'FMap' class _Prop_Compiler(aetools.NProperty): """Compiler - """ which = 'TA07' want = 'TEXT' class _Prop_Extension(aetools.NProperty): """Extension - """ which = 'TA02' want = 'TEXT' class _Prop_File_Type(aetools.NProperty): """File Type - """ which = 'PR04' want = 'TEXT' class _Prop_Ignored_by_Make(aetools.NProperty): """Ignored by Make - """ which = 'TA06' want = 'bool' class _Prop_Launchable(aetools.NProperty): """Launchable - """ which = 'TA05' want = 'bool' class _Prop_Precompiled(aetools.NProperty): """Precompiled - """ which = 'TA03' want = 'bool' class _Prop_Resource_File(aetools.NProperty): """Resource File - """ which = 'TA04' want = 'bool' class Global_Source_Trees(aetools.ComponentItem): """Global Source Trees - Globally-defined source tree roots """ want = 'GSTs' class _Prop_Source_Trees(aetools.NProperty): """Source Trees - List of source tree roots """ which = 'ST01' want = 'SrcT' class Extras(aetools.ComponentItem): """Extras - """ want = 'GXTR' class _Prop_Automatic_Toolbar_Help(aetools.NProperty): """Automatic Toolbar Help - Automatically show balloon help in toolbar after delay """ which = 'EX19' want = 'bool' class _Prop_External_Reference(aetools.NProperty): """External Reference - Which on-line function reference to use. """ which = 'EX08' want = 'RefP' class _Prop_Full_Screen_Zoom(aetools.NProperty): """Full Screen Zoom - Zoom windows to the full screen width. """ which = 'EX07' want = 'bool' class _Prop_Recent_Editor_Count(aetools.NProperty): """Recent Editor Count - Maximum number of editor documents to show in the \xd2Open Recent\xd3 menu """ which = 'EX16' want = 'shor' class _Prop_Recent_Project_Count(aetools.NProperty): """Recent Project Count - Maximum number of project documents to show in the \xd2Open Recent\xd3 menu """ which = 'EX17' want = 'shor' class _Prop_Use_Editor_Extensions(aetools.NProperty): """Use Editor Extensions - Controls the use of the Editor Extensions menu """ which = 'EX10' want = 'bool' class _Prop_Use_External_Editor(aetools.NProperty): """Use External Editor - Controls whether CodeWarrior uses its own integrated editor or an external application for editing text files. """ which = 'EX11' want = 'bool' class _Prop_Use_Script_Menu(aetools.NProperty): """Use Script Menu - Controls the use of the AppleScript menu """ which = 'EX12' want = 'bool' class _Prop_Use_ToolServer_Menu(aetools.NProperty): """Use ToolServer Menu - Controls the use of the ToolServer menu """ which = 'EX18' want = 'bool' class Build_Extras(aetools.ComponentItem): """Build Extras - """ want = 'LXTR' class _Prop_Browser_Active(aetools.NProperty): """Browser Active - Allow the collection of browser information. """ which = 'EX09' want = 'bool' class _Prop_Cache_Subproject_Data(aetools.NProperty): """Cache Subproject Data - """ which = 'EX31' want = 'bool' class _Prop_Dump_Browser_Info(aetools.NProperty): """Dump Browser Info - """ which = 'EX30' want = 'bool' class _Prop_Modification_Date_Caching(aetools.NProperty): """Modification Date Caching - """ which = 'EX04' want = 'bool' class member_function(aetools.ComponentItem): """member function - A class member function or method. """ want = 'MbFn' class _Prop_implementation_end_offset(aetools.NProperty): """implementation end offset - end of member function definition """ which = 'DfEn' want = 'long' class _Prop_implementation_file(aetools.NProperty): """implementation file - Source file containing the member function definition """ which = 'DfFl' want = 'fss ' class _Prop_implementation_start_offset(aetools.NProperty): """implementation start offset - start of member function definition source code """ which = 'DfSt' want = 'long' member_functions = member_function class Access_Paths(aetools.ComponentItem): """Access Paths - Contains the definitions of a project\xd5s access (search) paths. """ want = 'PATH' class _Prop_Always_Full_Search(aetools.NProperty): """Always Full Search - To force the compiler to search for system includes like it searches for user includes. """ which = 'PA02' want = 'bool' class _Prop_Convert_Paths(aetools.NProperty): """Convert Paths - Enables conversion of DOS & Unix-style relative paths when searching for files. """ which = 'PA04' want = 'bool' class _Prop_Require_Framework_Includes(aetools.NProperty): """Require Framework Includes - Causes the IDE to only look in the framework access paths if a Mac OS X framework style include (i.e. ) is used. """ which = 'PA05' want = 'bool' class _Prop_System_Paths(aetools.NProperty): """System Paths - To add an access path for the include files. (Not supported in Pascal) """ which = 'PA03' want = 'PInf' class _Prop_User_Paths(aetools.NProperty): """User Paths - To add an access path for the source files. """ which = 'PA01' want = 'PInf' class Path_Information(aetools.ComponentItem): """Path Information - Contains all of the parameters that describe an access path. """ want = 'PInf' class _Prop_format(aetools.NProperty): """format - Format of the a """ which = 'Frmt' want = 'PthF' class _Prop_framework(aetools.NProperty): """framework - Is the path a Mac OS X framework style path? (This flag is readable but not writeable from AppleScript.) """ which = 'Frmw' want = 'bool' class _Prop_host_flags(aetools.NProperty): """host flags - Bit fields enabling the access path for each host OS (1 = Mac OS, 2 = Windows) """ which = 'HstF' want = 'long' class _Prop_origin(aetools.NProperty): """origin - """ which = 'Orig' want = 'PPrm' class _Prop_recursive(aetools.NProperty): """recursive - Will the path be searched recursively? (Default is true) """ which = 'Recu' want = 'bool' class _Prop_root(aetools.NProperty): """root - Name of the root of the relative path. Pre-defined values are \xd2Absolute\xd3, \xd2Project\xd3, \xd2CodeWarrior\xd3, and \xd2System\xd3. Anything else is a user-defined root. """ which = 'Root' want = 'TEXT' class Plugin_Settings(aetools.ComponentItem): """Plugin Settings - Settings for plugin tools """ want = 'PSTG' class _Prop_Disable_Third_Party_COM_Plugins(aetools.NProperty): """Disable Third Party COM Plugins - Disable COM plugins from third parties """ which = 'PX02' want = 'bool' class _Prop_Plugin_Diagnostics_Level(aetools.NProperty): """Plugin Diagnostics Level - Plugin Diagnostics Level is for those who are developing plugins for the IDE and need to debug them. """ which = 'PX01' want = 'PXdg' class Runtime_Settings(aetools.ComponentItem): """Runtime Settings - Runtime settings """ want = 'RSTG' class _Prop_Command_Line_Arguments(aetools.NProperty): """Command Line Arguments - Extra command line args to pass to executable """ which = 'RS02' want = 'TEXT' class _Prop_Environment_Variables(aetools.NProperty): """Environment Variables - Environment variables to use when running the executable """ which = 'RS04' want = 'EnvV' class _Prop_Host_Application(aetools.NProperty): """Host Application - Host application for running/debugging libraries and code resources """ which = 'RS01' want = 'RlPt' class _Prop_Working_Directory(aetools.NProperty): """Working Directory - Working directory to use when running the executable """ which = 'RS03' want = 'TEXT' class Relative_Path(aetools.ComponentItem): """Relative Path - Relative path from some root """ want = 'RlPt' class Shielded_Folder(aetools.ComponentItem): """Shielded Folder - """ want = 'SFit' class _Prop_Expression_To_Match(aetools.NProperty): """Expression To Match - Regular expression which describes folders to skip """ which = 'SF01' want = 'TEXT' class _Prop_Skip_Find_And_Compare_Operations(aetools.NProperty): """Skip Find And Compare Operations - Matching folders will be skipped during find and compare operations """ which = 'SF03' want = 'bool' class _Prop_Skip_Project_Operations(aetools.NProperty): """Skip Project Operations - Matching folders will be skipped during project operations """ which = 'SF02' want = 'bool' class Shielded_Folders(aetools.ComponentItem): """Shielded Folders - Folders skipped when performing project and find-and-compare operations """ want = 'SHFL' class _Prop_Shielded_Items(aetools.NProperty): """Shielded Items - """ which = 'SFis' want = 'SFit' class Syntax_Coloring(aetools.ComponentItem): """Syntax Coloring - """ want = 'SNTX' class _Prop_Comment_Color(aetools.NProperty): """Comment Color - The color for comments. """ which = 'GH02' want = 'cRGB' class _Prop_Keyword_Color(aetools.NProperty): """Keyword Color - The color for language keywords. """ which = 'GH03' want = 'cRGB' class _Prop_String_Color(aetools.NProperty): """String Color - The color for strings. """ which = 'GH04' want = 'cRGB' class _Prop_Syntax_Coloring(aetools.NProperty): """Syntax Coloring - Mark keywords and comments with color. """ which = 'GH01' want = 'bool' class Segment(aetools.ComponentItem): """Segment - A segment or group in the project """ want = 'Seg ' class _Prop_filecount(aetools.NProperty): """filecount - """ which = 'NumF' want = 'shor' class _Prop_seg_2d_locked(aetools.NProperty): """seg-locked - Is the segment locked ? [68K only] """ which = 'PLck' want = 'bool' class _Prop_seg_2d_preloaded(aetools.NProperty): """seg-preloaded - Is the segment preloaded ? [68K only] """ which = 'Prel' want = 'bool' class _Prop_seg_2d_protected(aetools.NProperty): """seg-protected - Is the segment protected ? [68K only] """ which = 'Prot' want = 'bool' class _Prop_seg_2d_purgeable(aetools.NProperty): """seg-purgeable - Is the segment purgeable ? [68K only] """ which = 'Purg' want = 'bool' class _Prop_seg_2d_system_heap(aetools.NProperty): """seg-system heap - Is the segment loaded into the system heap ? [68K only] """ which = 'SysH' want = 'bool' class ProjectFile(aetools.ComponentItem): """ProjectFile - A file contained in a project """ want = 'SrcF' class _Prop_codesize(aetools.NProperty): """codesize - The size of this file\xd5s code. """ which = 'CSiz' want = 'long' class _Prop_datasize(aetools.NProperty): """datasize - The size of this file\xd5s data. """ which = 'DSiz' want = 'long' class _Prop_filetype(aetools.NProperty): """filetype - What kind of file is this ? """ which = 'SrcT' want = 'SrcT' class _Prop_includes(aetools.NProperty): """includes - """ which = 'IncF' want = 'fss ' class _Prop_initialize_before(aetools.NProperty): """initialize before - Initialize the shared library before the main application. """ which = 'Bfor' want = 'bool' class _Prop_symbols(aetools.NProperty): """symbols - Are debugging symbols generated for this file ? """ which = 'SymG' want = 'bool' class _Prop_up_to_date(aetools.NProperty): """up to date - Has the file been compiled since its last modification ? """ which = 'UpTD' want = 'bool' class _Prop_weak_link(aetools.NProperty): """weak link - Is this file imported weakly into the project ? [PowerPC only] """ which = 'Weak' want = 'bool' class Source_Tree(aetools.ComponentItem): """Source Tree - User-defined source tree root """ want = 'SrcT' class _Prop_path(aetools.NProperty): """path - path for the user-defined source tree root """ which = 'Path' want = 'TEXT' class _Prop_path_kind(aetools.NProperty): """path kind - kind of path """ which = 'Kind' want = 'STKd' class Target_Settings(aetools.ComponentItem): """Target Settings - Contains the definitions of a project\xd5s target. """ want = 'TARG' class _Prop_Linker(aetools.NProperty): """Linker - The name of the current linker. """ which = 'TA01' want = 'TEXT' class _Prop_Output_Directory_Location(aetools.NProperty): """Output Directory Location - Location of output directory """ which = 'TA16' want = 'RlPt' class _Prop_Output_Directory_Origin(aetools.NProperty): """Output Directory Origin - Origin of path to output directory. Usage of this property is deprecated. Use the \xd2Output Directory Location\xd3 property instead. """ which = 'TA12' want = 'PPrm' class _Prop_Output_Directory_Path(aetools.NProperty): """Output Directory Path - Path to output directory. Usage of this property is deprecated. Use the \xd2Output Directory Location\xd3 property instead. """ which = 'TA11' want = 'TEXT' class _Prop_Post_Linker(aetools.NProperty): """Post Linker - """ which = 'TA09' want = 'TEXT' class _Prop_Pre_Linker(aetools.NProperty): """Pre Linker - """ which = 'TA13' want = 'TEXT' class _Prop_Target_Name(aetools.NProperty): """Target Name - """ which = 'TA10' want = 'TEXT' class _Prop_Use_Relative_Paths(aetools.NProperty): """Use Relative Paths - Save project entries using relative paths """ which = 'TA15' want = 'bool' class Target_Source_Trees(aetools.ComponentItem): """Target Source Trees - Target-specific user-defined source tree roots """ want = 'TSTs' class VCS_Setup(aetools.ComponentItem): """VCS Setup - The version control system preferences. """ want = 'VCSs' class _Prop_Always_Prompt(aetools.NProperty): """Always Prompt - Always show login dialog """ which = 'VC07' want = 'bool' class _Prop_Auto_Connect(aetools.NProperty): """Auto Connect - Automatically connect to database when starting. """ which = 'VC05' want = 'bool' class _Prop_Connection_Method(aetools.NProperty): """Connection Method - Name of Version Control System to use. """ which = 'VC02' want = 'TEXT' class _Prop_Database_Path(aetools.NProperty): """Database Path - Path to the VCS database. """ which = 'VC09' want = 'RlPt' class _Prop_Local_Path(aetools.NProperty): """Local Path - Path to the local root """ which = 'VC10' want = 'RlPt' class _Prop_Mount_Volume(aetools.NProperty): """Mount Volume - Attempt to mount the database volume if it isn't available. """ which = 'VC08' want = 'bool' class _Prop_Password(aetools.NProperty): """Password - The password for the VCS. """ which = 'VC04' want = 'TEXT' class _Prop_Store_Password(aetools.NProperty): """Store Password - Store the password. """ which = 'VC06' want = 'bool' class _Prop_Use_Global_Settings(aetools.NProperty): """Use Global Settings - Use the global VCS settings by default """ which = 'VC11' want = 'bool' class _Prop_Username(aetools.NProperty): """Username - The user name for the VCS. """ which = 'VC03' want = 'TEXT' class _Prop_VCS_Active(aetools.NProperty): """VCS Active - Use Version Control """ which = 'VC01' want = 'bool' class Font(aetools.ComponentItem): """Font - """ want = 'mFNT' class _Prop_Auto_Indent(aetools.NProperty): """Auto Indent - Indent new lines automatically. """ which = 'FN01' want = 'bool' class _Prop_Tab_Indents_Selection(aetools.NProperty): """Tab Indents Selection - Tab indents selection when multiple lines are selected """ which = 'FN03' want = 'bool' class _Prop_Tab_Inserts_Spaces(aetools.NProperty): """Tab Inserts Spaces - Insert spaces instead of tab character """ which = 'FN04' want = 'bool' class _Prop_Tab_Size(aetools.NProperty): """Tab Size - """ which = 'FN02' want = 'shor' class _Prop_Text_Font(aetools.NProperty): """Text Font - The font used in editing windows. """ which = 'ptxf' want = 'TEXT' class _Prop_Text_Size(aetools.NProperty): """Text Size - The size of the text in an editing window. """ which = 'ptps' want = 'shor' Browser_Coloring._superclassnames = [] Browser_Coloring._privpropdict = { 'Browser_Keywords' : _Prop_Browser_Keywords, 'Classes_Color' : _Prop_Classes_Color, 'Constants_Color' : _Prop_Constants_Color, 'Enums_Color' : _Prop_Enums_Color, 'Functions_Color' : _Prop_Functions_Color, 'Globals_Color' : _Prop_Globals_Color, 'Macros_Color' : _Prop_Macros_Color, 'Template_Commands_in_Menu' : _Prop_Template_Commands_in_Menu, 'Templates_Color' : _Prop_Templates_Color, 'Typedefs_Color' : _Prop_Typedefs_Color, } Browser_Coloring._privelemdict = { } Build_Settings._superclassnames = [] Build_Settings._privpropdict = { 'Build_Before_Running' : _Prop_Build_Before_Running, 'Compiler_Thread_Stack_Size' : _Prop_Compiler_Thread_Stack_Size, 'Completion_Sound' : _Prop_Completion_Sound, 'Failure_Sound' : _Prop_Failure_Sound, 'Include_Cache_Size' : _Prop_Include_Cache_Size, 'Save_Before_Building' : _Prop_Save_Before_Building, 'Success_Sound' : _Prop_Success_Sound, } Build_Settings._privelemdict = { } base_class._superclassnames = [] base_class._privpropdict = { 'access' : _Prop_access, 'class_' : _Prop_class_, 'virtual' : _Prop_virtual, } base_class._privelemdict = { } Custom_Keywords._superclassnames = [] Custom_Keywords._privpropdict = { 'Custom_Color_1' : _Prop_Custom_Color_1, 'Custom_Color_2' : _Prop_Custom_Color_2, 'Custom_Color_3' : _Prop_Custom_Color_3, 'Custom_Color_4' : _Prop_Custom_Color_4, } Custom_Keywords._privelemdict = { } browser_catalog._superclassnames = [] browser_catalog._privpropdict = { } browser_catalog._privelemdict = { 'class_' : class_, } class_._superclassnames = [] class_._privpropdict = { 'all_subclasses' : _Prop_all_subclasses, 'declaration_end_offset' : _Prop_declaration_end_offset, 'declaration_file' : _Prop_declaration_file, 'declaration_start_offset' : _Prop_declaration_start_offset, 'language' : _Prop_language, 'name' : _Prop_name, 'subclasses' : _Prop_subclasses, } class_._privelemdict = { 'base_class' : base_class, 'data_member' : data_member, 'member_function' : member_function, } Debugger_Display._superclassnames = [] Debugger_Display._privpropdict = { 'Default_Array_Size' : _Prop_Default_Array_Size, 'Show_As_Decimal' : _Prop_Show_As_Decimal, 'Show_Locals' : _Prop_Show_Locals, 'Show_Variable_Types' : _Prop_Show_Variable_Types, 'Sort_By_Method' : _Prop_Sort_By_Method, 'Threads_in_Window' : _Prop_Threads_in_Window, 'Use_RTTI' : _Prop_Use_RTTI, 'Variable_Changed_Hilite' : _Prop_Variable_Changed_Hilite, 'Variable_Hints' : _Prop_Variable_Hints, 'Watchpoint_Hilite' : _Prop_Watchpoint_Hilite, } Debugger_Display._privelemdict = { } Debugger_Global._superclassnames = [] Debugger_Global._privpropdict = { 'Auto_Target_Libraries' : _Prop_Auto_Target_Libraries, 'Cache_Edited_Files' : _Prop_Cache_Edited_Files, 'Confirm_Kill' : _Prop_Confirm_Kill, 'Dont_Step_in_Runtime' : _Prop_Dont_Step_in_Runtime, 'File_Cache_Duration' : _Prop_File_Cache_Duration, 'Ignore_Mod_Dates' : _Prop_Ignore_Mod_Dates, 'Launch_Apps_on_Open' : _Prop_Launch_Apps_on_Open, 'Open_All_Classes' : _Prop_Open_All_Classes, 'Select_Stack_Crawl' : _Prop_Select_Stack_Crawl, 'Stop_at_Main' : _Prop_Stop_at_Main, } Debugger_Global._privelemdict = { } Debugger_Target._superclassnames = [] Debugger_Target._privpropdict = { 'Auto_Target_Libraries' : _Prop_Auto_Target_Libraries, 'Cache_symbolics' : _Prop_Cache_symbolics, 'Data_Update_Interval' : _Prop_Data_Update_Interval, 'Log_System_Messages' : _Prop_Log_System_Messages, 'Relocated_Executable_Path' : _Prop_Relocated_Executable_Path, 'Stop_at_temp_breakpoint' : _Prop_Stop_at_temp_breakpoint, 'Temp_Breakpoint_Type' : _Prop_Temp_Breakpoint_Type, 'Temp_breakpoint_names' : _Prop_Temp_breakpoint_names, 'Update_Data_While_Running' : _Prop_Update_Data_While_Running, } Debugger_Target._privelemdict = { } Debugger_Windowing._superclassnames = [] Debugger_Windowing._privpropdict = { 'Debugging_Start_Action' : _Prop_Debugging_Start_Action, 'Do_Nothing_To_Projects' : _Prop_Do_Nothing_To_Projects, } Debugger_Windowing._privelemdict = { } data_member._superclassnames = [] data_member._privpropdict = { 'access' : _Prop_access, 'declaration_end_offset' : _Prop_declaration_end_offset, 'declaration_start_offset' : _Prop_declaration_start_offset, 'name' : _Prop_name, 'static' : _Prop_static, } data_member._privelemdict = { } Editor._superclassnames = [] Editor._privpropdict = { 'Background_Color' : _Prop_Background_Color, 'Balance' : _Prop_Balance, 'Context_Popup_Delay' : _Prop_Context_Popup_Delay, 'Default_Text_File_Format' : _Prop_Default_Text_File_Format, 'Dynamic_Scroll' : _Prop_Dynamic_Scroll, 'Flash_Delay' : _Prop_Flash_Delay, 'Left_Margin_Line_Select' : _Prop_Left_Margin_Line_Select, 'Main_Text_Color' : _Prop_Main_Text_Color, 'Relaxed_C_Popup_Parsing' : _Prop_Relaxed_C_Popup_Parsing, 'Remember_Font' : _Prop_Remember_Font, 'Remember_Selection' : _Prop_Remember_Selection, 'Remember_Window' : _Prop_Remember_Window, 'Sort_Function_Popup' : _Prop_Sort_Function_Popup, 'Use_Drag__26__Drop_Editing' : _Prop_Use_Drag__26__Drop_Editing, 'Use_Multiple_Undo' : _Prop_Use_Multiple_Undo, } Editor._privelemdict = { } Environment_Variable._superclassnames = [] Environment_Variable._privpropdict = { 'name' : _Prop_name, 'value' : _Prop_value, } Environment_Variable._privelemdict = { } Error_Information._superclassnames = [] Error_Information._privpropdict = { 'disk_file' : _Prop_disk_file, 'lineNumber' : _Prop_lineNumber, 'message' : _Prop_message, 'messageKind' : _Prop_messageKind, } Error_Information._privelemdict = { } Function_Information._superclassnames = [] Function_Information._privpropdict = { 'disk_file' : _Prop_disk_file, 'lineNumber' : _Prop_lineNumber, } Function_Information._privelemdict = { } File_Mappings._superclassnames = [] File_Mappings._privpropdict = { 'Mappings' : _Prop_Mappings, } File_Mappings._privelemdict = { } File_Mapping._superclassnames = [] File_Mapping._privpropdict = { 'Compiler' : _Prop_Compiler, 'Extension' : _Prop_Extension, 'File_Type' : _Prop_File_Type, 'Ignored_by_Make' : _Prop_Ignored_by_Make, 'Launchable' : _Prop_Launchable, 'Precompiled' : _Prop_Precompiled, 'Resource_File' : _Prop_Resource_File, } File_Mapping._privelemdict = { } Global_Source_Trees._superclassnames = [] Global_Source_Trees._privpropdict = { 'Source_Trees' : _Prop_Source_Trees, } Global_Source_Trees._privelemdict = { } Extras._superclassnames = [] Extras._privpropdict = { 'Automatic_Toolbar_Help' : _Prop_Automatic_Toolbar_Help, 'External_Reference' : _Prop_External_Reference, 'Full_Screen_Zoom' : _Prop_Full_Screen_Zoom, 'Recent_Editor_Count' : _Prop_Recent_Editor_Count, 'Recent_Project_Count' : _Prop_Recent_Project_Count, 'Use_Editor_Extensions' : _Prop_Use_Editor_Extensions, 'Use_External_Editor' : _Prop_Use_External_Editor, 'Use_Script_Menu' : _Prop_Use_Script_Menu, 'Use_ToolServer_Menu' : _Prop_Use_ToolServer_Menu, } Extras._privelemdict = { } Build_Extras._superclassnames = [] Build_Extras._privpropdict = { 'Browser_Active' : _Prop_Browser_Active, 'Cache_Subproject_Data' : _Prop_Cache_Subproject_Data, 'Dump_Browser_Info' : _Prop_Dump_Browser_Info, 'Modification_Date_Caching' : _Prop_Modification_Date_Caching, } Build_Extras._privelemdict = { } member_function._superclassnames = [] member_function._privpropdict = { 'access' : _Prop_access, 'declaration_end_offset' : _Prop_declaration_end_offset, 'declaration_file' : _Prop_declaration_file, 'declaration_start_offset' : _Prop_declaration_start_offset, 'implementation_end_offset' : _Prop_implementation_end_offset, 'implementation_file' : _Prop_implementation_file, 'implementation_start_offset' : _Prop_implementation_start_offset, 'name' : _Prop_name, 'static' : _Prop_static, 'virtual' : _Prop_virtual, } member_function._privelemdict = { } Access_Paths._superclassnames = [] Access_Paths._privpropdict = { 'Always_Full_Search' : _Prop_Always_Full_Search, 'Convert_Paths' : _Prop_Convert_Paths, 'Require_Framework_Includes' : _Prop_Require_Framework_Includes, 'System_Paths' : _Prop_System_Paths, 'User_Paths' : _Prop_User_Paths, } Access_Paths._privelemdict = { } Path_Information._superclassnames = [] Path_Information._privpropdict = { 'format' : _Prop_format, 'framework' : _Prop_framework, 'host_flags' : _Prop_host_flags, 'name' : _Prop_name, 'origin' : _Prop_origin, 'recursive' : _Prop_recursive, 'root' : _Prop_root, } Path_Information._privelemdict = { } Plugin_Settings._superclassnames = [] Plugin_Settings._privpropdict = { 'Disable_Third_Party_COM_Plugins' : _Prop_Disable_Third_Party_COM_Plugins, 'Plugin_Diagnostics_Level' : _Prop_Plugin_Diagnostics_Level, } Plugin_Settings._privelemdict = { } Runtime_Settings._superclassnames = [] Runtime_Settings._privpropdict = { 'Command_Line_Arguments' : _Prop_Command_Line_Arguments, 'Environment_Variables' : _Prop_Environment_Variables, 'Host_Application' : _Prop_Host_Application, 'Working_Directory' : _Prop_Working_Directory, } Runtime_Settings._privelemdict = { } Relative_Path._superclassnames = [] Relative_Path._privpropdict = { 'format' : _Prop_format, 'name' : _Prop_name, 'origin' : _Prop_origin, 'root' : _Prop_root, } Relative_Path._privelemdict = { } Shielded_Folder._superclassnames = [] Shielded_Folder._privpropdict = { 'Expression_To_Match' : _Prop_Expression_To_Match, 'Skip_Find_And_Compare_Operations' : _Prop_Skip_Find_And_Compare_Operations, 'Skip_Project_Operations' : _Prop_Skip_Project_Operations, } Shielded_Folder._privelemdict = { } Shielded_Folders._superclassnames = [] Shielded_Folders._privpropdict = { 'Shielded_Items' : _Prop_Shielded_Items, } Shielded_Folders._privelemdict = { } Syntax_Coloring._superclassnames = [] Syntax_Coloring._privpropdict = { 'Comment_Color' : _Prop_Comment_Color, 'Custom_Color_1' : _Prop_Custom_Color_1, 'Custom_Color_2' : _Prop_Custom_Color_2, 'Custom_Color_3' : _Prop_Custom_Color_3, 'Custom_Color_4' : _Prop_Custom_Color_4, 'Keyword_Color' : _Prop_Keyword_Color, 'String_Color' : _Prop_String_Color, 'Syntax_Coloring' : _Prop_Syntax_Coloring, } Syntax_Coloring._privelemdict = { } Segment._superclassnames = [] Segment._privpropdict = { 'filecount' : _Prop_filecount, 'name' : _Prop_name, 'seg_2d_locked' : _Prop_seg_2d_locked, 'seg_2d_preloaded' : _Prop_seg_2d_preloaded, 'seg_2d_protected' : _Prop_seg_2d_protected, 'seg_2d_purgeable' : _Prop_seg_2d_purgeable, 'seg_2d_system_heap' : _Prop_seg_2d_system_heap, } Segment._privelemdict = { } ProjectFile._superclassnames = [] ProjectFile._privpropdict = { 'codesize' : _Prop_codesize, 'datasize' : _Prop_datasize, 'disk_file' : _Prop_disk_file, 'filetype' : _Prop_filetype, 'includes' : _Prop_includes, 'initialize_before' : _Prop_initialize_before, 'name' : _Prop_name, 'symbols' : _Prop_symbols, 'up_to_date' : _Prop_up_to_date, 'weak_link' : _Prop_weak_link, } ProjectFile._privelemdict = { } Source_Tree._superclassnames = [] Source_Tree._privpropdict = { 'format' : _Prop_format, 'name' : _Prop_name, 'path' : _Prop_path, 'path_kind' : _Prop_path_kind, } Source_Tree._privelemdict = { } Target_Settings._superclassnames = [] Target_Settings._privpropdict = { 'Linker' : _Prop_Linker, 'Output_Directory_Location' : _Prop_Output_Directory_Location, 'Output_Directory_Origin' : _Prop_Output_Directory_Origin, 'Output_Directory_Path' : _Prop_Output_Directory_Path, 'Post_Linker' : _Prop_Post_Linker, 'Pre_Linker' : _Prop_Pre_Linker, 'Target_Name' : _Prop_Target_Name, 'Use_Relative_Paths' : _Prop_Use_Relative_Paths, } Target_Settings._privelemdict = { } Target_Source_Trees._superclassnames = [] Target_Source_Trees._privpropdict = { 'Source_Trees' : _Prop_Source_Trees, } Target_Source_Trees._privelemdict = { } VCS_Setup._superclassnames = [] VCS_Setup._privpropdict = { 'Always_Prompt' : _Prop_Always_Prompt, 'Auto_Connect' : _Prop_Auto_Connect, 'Connection_Method' : _Prop_Connection_Method, 'Database_Path' : _Prop_Database_Path, 'Local_Path' : _Prop_Local_Path, 'Mount_Volume' : _Prop_Mount_Volume, 'Password' : _Prop_Password, 'Store_Password' : _Prop_Store_Password, 'Use_Global_Settings' : _Prop_Use_Global_Settings, 'Username' : _Prop_Username, 'VCS_Active' : _Prop_VCS_Active, } VCS_Setup._privelemdict = { } Font._superclassnames = [] Font._privpropdict = { 'Auto_Indent' : _Prop_Auto_Indent, 'Tab_Indents_Selection' : _Prop_Tab_Indents_Selection, 'Tab_Inserts_Spaces' : _Prop_Tab_Inserts_Spaces, 'Tab_Size' : _Prop_Tab_Size, 'Text_Font' : _Prop_Text_Font, 'Text_Size' : _Prop_Text_Size, } Font._privelemdict = { } _Enum_Acce = { 'public' : 'Publ', # 'protected' : 'Prot', # 'private' : 'Priv', # } _Enum_BXbr = { 'Always_Build' : 'BXb1', # Always build the target before running. 'Ask_Build' : 'BXb2', # Ask before building the target when running. 'Never_Build' : 'BXb3', # Never before building the target before running. } _Enum_DbSA = { 'No_Action' : 'DSA1', # Don\xd5t do anything to non-debug windows 'Hide_Windows' : 'DSA2', # Hide non-debugging windows 'Collapse_Windows' : 'DSA3', # Collapse non-debugging windows 'Close_Windows' : 'DSA4', # Close non-debugging windows } _Enum_DgBL = { 'Always' : 'DgB0', # Always build before debugging. 'Never' : 'DgB1', # Never build before debugging. 'Ask' : 'DgB2', # Ask about building before debugging. } _Enum_ErrT = { 'information' : 'ErIn', # 'compiler_warning' : 'ErCW', # 'compiler_error' : 'ErCE', # 'definition' : 'ErDf', # 'linker_warning' : 'ErLW', # 'linker_error' : 'ErLE', # 'find_result' : 'ErFn', # 'generic_error' : 'ErGn', # } _Enum_Inte = { 'never_interact' : 'eNvr', # Never allow user interactions 'interact_with_self' : 'eInS', # Allow user interaction only when an AppleEvent is sent from within CodeWarrior 'interact_with_local' : 'eInL', # Allow user interaction when AppleEvents are sent from applications on the same machine (default) 'interact_with_all' : 'eInA', # Allow user interaction from both local and remote AppleEvents } _Enum_Lang = { 'C' : 'LC ', # 'C_2b__2b_' : 'LC++', # 'Pascal' : 'LP ', # 'Object_Pascal' : 'LP++', # 'Java' : 'LJav', # 'Assembler' : 'LAsm', # 'Unknown' : 'L? ', # } _Enum_PPrm = { 'absolute' : 'Abso', # An absolute path name, including volume name. 'project_relative' : 'PRel', # A path relative to the current project\xd5s folder. 'shell_relative' : 'SRel', # A path relative to the CodeWarrior\xaa folder. 'system_relative' : 'YRel', # A path relative to the system folder 'root_relative' : 'RRel', # } _Enum_PXdg = { 'Diagnose_None' : 'PXd1', # No Plugin Diagnostics. 'Diagnose_Errors' : 'PXd2', # Plugin Diagnostics for errors only. 'Diagnose_All' : 'PXd3', # Plugin Diagnostics for everything. } _Enum_PthF = { 'Generic_Path' : 'PFGn', # 'MacOS_Path' : 'PFMc', # MacOS path using colon as separator 'Windows_Path' : 'PFWn', # Windows path using backslash as separator 'Unix_Path' : 'PFUx', # Unix path using slash as separator } _Enum_RefP = { 'Think_Reference' : 'DanR', # 'QuickView' : 'ALTV', # } _Enum_STKd = { 'Absolute_Path' : 'STK0', # The \xd2path\xd3 property is an absolute path to the location of the source tree. 'Registry_Key' : 'STK1', # The \xd2path\xd3 property is the name of a registry key that contains the path to the root. 'Environment_Variable' : 'STK2', # The \xd2path\xd3 property is the name of an environment variable that contains the path to the root. } _Enum_SrcT = { 'source' : 'FTxt', # A source file (.c, .cp, .p, etc). 'unknown' : 'FUnk', # An unknown file type. } _Enum_TmpB = { 'User_Specified' : 'Usrs', # Use user specified symbols when setting temporary breakpoints on program launch. 'Default' : 'Dflt', # Use system default symbols when setting temporary breakpoints on program launch. } _Enum_TxtF = { 'MacOS' : 'TxF0', # MacOS text format 'DOS' : 'TxF1', # DOS text format 'Unix' : 'TxF2', # Unix text format } _Enum_savo = { 'yes' : 'yes ', # Save changes 'no' : 'no ', # Do not save changes 'ask' : 'ask ', # Ask the user whether to save } # # Indices of types declared in this module # _classdeclarations = { 'BRKW' : Browser_Coloring, 'BSTG' : Build_Settings, 'BsCl' : base_class, 'CUKW' : Custom_Keywords, 'Cata' : browser_catalog, 'Clas' : class_, 'DbDS' : Debugger_Display, 'DbGL' : Debugger_Global, 'DbTG' : Debugger_Target, 'DbWN' : Debugger_Windowing, 'DtMb' : data_member, 'EDTR' : Editor, 'EnvV' : Environment_Variable, 'ErrM' : Error_Information, 'FDef' : Function_Information, 'FLMP' : File_Mappings, 'FMap' : File_Mapping, 'GSTs' : Global_Source_Trees, 'GXTR' : Extras, 'LXTR' : Build_Extras, 'MbFn' : member_function, 'PATH' : Access_Paths, 'PInf' : Path_Information, 'PSTG' : Plugin_Settings, 'RSTG' : Runtime_Settings, 'RlPt' : Relative_Path, 'SFit' : Shielded_Folder, 'SHFL' : Shielded_Folders, 'SNTX' : Syntax_Coloring, 'Seg ' : Segment, 'SrcF' : ProjectFile, 'SrcT' : Source_Tree, 'TARG' : Target_Settings, 'TSTs' : Target_Source_Trees, 'VCSs' : VCS_Setup, 'mFNT' : Font, } _propdeclarations = { 'Acce' : _Prop_access, 'BW00' : _Prop_Browser_Keywords, 'BW01' : _Prop_Classes_Color, 'BW02' : _Prop_Constants_Color, 'BW03' : _Prop_Enums_Color, 'BW04' : _Prop_Functions_Color, 'BW05' : _Prop_Globals_Color, 'BW06' : _Prop_Macros_Color, 'BW07' : _Prop_Templates_Color, 'BW08' : _Prop_Typedefs_Color, 'BW10' : _Prop_Template_Commands_in_Menu, 'BX01' : _Prop_Completion_Sound, 'BX02' : _Prop_Success_Sound, 'BX03' : _Prop_Failure_Sound, 'BX04' : _Prop_Build_Before_Running, 'BX05' : _Prop_Include_Cache_Size, 'BX06' : _Prop_Compiler_Thread_Stack_Size, 'BX07' : _Prop_Save_Before_Building, 'Bfor' : _Prop_initialize_before, 'CSiz' : _Prop_codesize, 'Clas' : _Prop_class_, 'DSiz' : _Prop_datasize, 'Db01' : _Prop_Show_Variable_Types, 'Db02' : _Prop_Sort_By_Method, 'Db03' : _Prop_Use_RTTI, 'Db04' : _Prop_Threads_in_Window, 'Db05' : _Prop_Variable_Hints, 'Db06' : _Prop_Watchpoint_Hilite, 'Db07' : _Prop_Variable_Changed_Hilite, 'Db08' : _Prop_Default_Array_Size, 'Db09' : _Prop_Show_Locals, 'Db10' : _Prop_Show_As_Decimal, 'DcEn' : _Prop_declaration_end_offset, 'DcFl' : _Prop_declaration_file, 'DcSt' : _Prop_declaration_start_offset, 'DfEn' : _Prop_implementation_end_offset, 'DfFl' : _Prop_implementation_file, 'DfSt' : _Prop_implementation_start_offset, 'Dg01' : _Prop_Ignore_Mod_Dates, 'Dg02' : _Prop_Open_All_Classes, 'Dg03' : _Prop_Launch_Apps_on_Open, 'Dg04' : _Prop_Confirm_Kill, 'Dg05' : _Prop_Stop_at_Main, 'Dg06' : _Prop_Select_Stack_Crawl, 'Dg07' : _Prop_Dont_Step_in_Runtime, 'Dg11' : _Prop_Auto_Target_Libraries, 'Dg12' : _Prop_Cache_Edited_Files, 'Dg13' : _Prop_File_Cache_Duration, 'Dt02' : _Prop_Log_System_Messages, 'Dt08' : _Prop_Update_Data_While_Running, 'Dt09' : _Prop_Data_Update_Interval, 'Dt10' : _Prop_Relocated_Executable_Path, 'Dt13' : _Prop_Stop_at_temp_breakpoint, 'Dt14' : _Prop_Temp_breakpoint_names, 'Dt15' : _Prop_Cache_symbolics, 'Dt16' : _Prop_Temp_Breakpoint_Type, 'Dw01' : _Prop_Debugging_Start_Action, 'Dw02' : _Prop_Do_Nothing_To_Projects, 'ED01' : _Prop_Flash_Delay, 'ED02' : _Prop_Dynamic_Scroll, 'ED03' : _Prop_Balance, 'ED04' : _Prop_Use_Drag__26__Drop_Editing, 'ED06' : _Prop_Sort_Function_Popup, 'ED07' : _Prop_Use_Multiple_Undo, 'ED08' : _Prop_Remember_Font, 'ED09' : _Prop_Remember_Selection, 'ED10' : _Prop_Remember_Window, 'ED12' : _Prop_Main_Text_Color, 'ED13' : _Prop_Background_Color, 'ED14' : _Prop_Context_Popup_Delay, 'ED15' : _Prop_Relaxed_C_Popup_Parsing, 'ED16' : _Prop_Left_Margin_Line_Select, 'ED17' : _Prop_Default_Text_File_Format, 'EX04' : _Prop_Modification_Date_Caching, 'EX07' : _Prop_Full_Screen_Zoom, 'EX08' : _Prop_External_Reference, 'EX09' : _Prop_Browser_Active, 'EX10' : _Prop_Use_Editor_Extensions, 'EX11' : _Prop_Use_External_Editor, 'EX12' : _Prop_Use_Script_Menu, 'EX16' : _Prop_Recent_Editor_Count, 'EX17' : _Prop_Recent_Project_Count, 'EX18' : _Prop_Use_ToolServer_Menu, 'EX19' : _Prop_Automatic_Toolbar_Help, 'EX30' : _Prop_Dump_Browser_Info, 'EX31' : _Prop_Cache_Subproject_Data, 'ErrL' : _Prop_lineNumber, 'ErrS' : _Prop_message, 'ErrT' : _Prop_messageKind, 'FMps' : _Prop_Mappings, 'FN01' : _Prop_Auto_Indent, 'FN02' : _Prop_Tab_Size, 'FN03' : _Prop_Tab_Indents_Selection, 'FN04' : _Prop_Tab_Inserts_Spaces, 'Frmt' : _Prop_format, 'Frmw' : _Prop_framework, 'GH01' : _Prop_Syntax_Coloring, 'GH02' : _Prop_Comment_Color, 'GH03' : _Prop_Keyword_Color, 'GH04' : _Prop_String_Color, 'GH05' : _Prop_Custom_Color_1, 'GH06' : _Prop_Custom_Color_2, 'GH07' : _Prop_Custom_Color_3, 'GH08' : _Prop_Custom_Color_4, 'HstF' : _Prop_host_flags, 'IncF' : _Prop_includes, 'Kind' : _Prop_path_kind, 'Lang' : _Prop_language, 'NumF' : _Prop_filecount, 'Orig' : _Prop_origin, 'PA01' : _Prop_User_Paths, 'PA02' : _Prop_Always_Full_Search, 'PA03' : _Prop_System_Paths, 'PA04' : _Prop_Convert_Paths, 'PA05' : _Prop_Require_Framework_Includes, 'PLck' : _Prop_seg_2d_locked, 'PR04' : _Prop_File_Type, 'PX01' : _Prop_Plugin_Diagnostics_Level, 'PX02' : _Prop_Disable_Third_Party_COM_Plugins, 'Path' : _Prop_path, 'Prel' : _Prop_seg_2d_preloaded, 'Prot' : _Prop_seg_2d_protected, 'Purg' : _Prop_seg_2d_purgeable, 'RS01' : _Prop_Host_Application, 'RS02' : _Prop_Command_Line_Arguments, 'RS03' : _Prop_Working_Directory, 'RS04' : _Prop_Environment_Variables, 'Recu' : _Prop_recursive, 'Root' : _Prop_root, 'SF01' : _Prop_Expression_To_Match, 'SF02' : _Prop_Skip_Project_Operations, 'SF03' : _Prop_Skip_Find_And_Compare_Operations, 'SFis' : _Prop_Shielded_Items, 'ST01' : _Prop_Source_Trees, 'SrcT' : _Prop_filetype, 'Stat' : _Prop_static, 'SubA' : _Prop_all_subclasses, 'SubC' : _Prop_subclasses, 'SymG' : _Prop_symbols, 'SysH' : _Prop_seg_2d_system_heap, 'TA01' : _Prop_Linker, 'TA02' : _Prop_Extension, 'TA03' : _Prop_Precompiled, 'TA04' : _Prop_Resource_File, 'TA05' : _Prop_Launchable, 'TA06' : _Prop_Ignored_by_Make, 'TA07' : _Prop_Compiler, 'TA09' : _Prop_Post_Linker, 'TA10' : _Prop_Target_Name, 'TA11' : _Prop_Output_Directory_Path, 'TA12' : _Prop_Output_Directory_Origin, 'TA13' : _Prop_Pre_Linker, 'TA15' : _Prop_Use_Relative_Paths, 'TA16' : _Prop_Output_Directory_Location, 'UpTD' : _Prop_up_to_date, 'VC01' : _Prop_VCS_Active, 'VC02' : _Prop_Connection_Method, 'VC03' : _Prop_Username, 'VC04' : _Prop_Password, 'VC05' : _Prop_Auto_Connect, 'VC06' : _Prop_Store_Password, 'VC07' : _Prop_Always_Prompt, 'VC08' : _Prop_Mount_Volume, 'VC09' : _Prop_Database_Path, 'VC10' : _Prop_Local_Path, 'VC11' : _Prop_Use_Global_Settings, 'Valu' : _Prop_value, 'Virt' : _Prop_virtual, 'Weak' : _Prop_weak_link, 'file' : _Prop_disk_file, 'pnam' : _Prop_name, 'ptps' : _Prop_Text_Size, 'ptxf' : _Prop_Text_Font, } _compdeclarations = { } _enumdeclarations = { 'Acce' : _Enum_Acce, 'BXbr' : _Enum_BXbr, 'DbSA' : _Enum_DbSA, 'DgBL' : _Enum_DgBL, 'ErrT' : _Enum_ErrT, 'Inte' : _Enum_Inte, 'Lang' : _Enum_Lang, 'PPrm' : _Enum_PPrm, 'PXdg' : _Enum_PXdg, 'PthF' : _Enum_PthF, 'RefP' : _Enum_RefP, 'STKd' : _Enum_STKd, 'SrcT' : _Enum_SrcT, 'TmpB' : _Enum_TmpB, 'TxtF' : _Enum_TxtF, 'savo' : _Enum_savo, } data = ( ' ', # 0x00 'a', # 0x01 '1', # 0x02 'b', # 0x03 '\'', # 0x04 'k', # 0x05 '2', # 0x06 'l', # 0x07 '@', # 0x08 'c', # 0x09 'i', # 0x0a 'f', # 0x0b '/', # 0x0c 'm', # 0x0d 's', # 0x0e 'p', # 0x0f '"', # 0x10 'e', # 0x11 '3', # 0x12 'h', # 0x13 '9', # 0x14 'o', # 0x15 '6', # 0x16 'r', # 0x17 '^', # 0x18 'd', # 0x19 'j', # 0x1a 'g', # 0x1b '>', # 0x1c 'n', # 0x1d 't', # 0x1e 'q', # 0x1f ',', # 0x20 '*', # 0x21 '5', # 0x22 '<', # 0x23 '-', # 0x24 'u', # 0x25 '8', # 0x26 'v', # 0x27 '.', # 0x28 '%', # 0x29 '[', # 0x2a '$', # 0x2b '+', # 0x2c 'x', # 0x2d '!', # 0x2e '&', # 0x2f ';', # 0x30 ':', # 0x31 '4', # 0x32 '\\', # 0x33 '0', # 0x34 'z', # 0x35 '7', # 0x36 '(', # 0x37 '_', # 0x38 '?', # 0x39 'w', # 0x3a ']', # 0x3b '#', # 0x3c 'y', # 0x3d ')', # 0x3e '=', # 0x3f '[d7]', # 0x40 '[d17]', # 0x41 '[d27]', # 0x42 '[d127]', # 0x43 '[d37]', # 0x44 '[d137]', # 0x45 '[d237]', # 0x46 '[d1237]', # 0x47 '[d47]', # 0x48 '[d147]', # 0x49 '[d247]', # 0x4a '[d1247]', # 0x4b '[d347]', # 0x4c '[d1347]', # 0x4d '[d2347]', # 0x4e '[d12347]', # 0x4f '[d57]', # 0x50 '[d157]', # 0x51 '[d257]', # 0x52 '[d1257]', # 0x53 '[d357]', # 0x54 '[d1357]', # 0x55 '[d2357]', # 0x56 '[d12357]', # 0x57 '[d457]', # 0x58 '[d1457]', # 0x59 '[d2457]', # 0x5a '[d12457]', # 0x5b '[d3457]', # 0x5c '[d13457]', # 0x5d '[d23457]', # 0x5e '[d123457]', # 0x5f '[d67]', # 0x60 '[d167]', # 0x61 '[d267]', # 0x62 '[d1267]', # 0x63 '[d367]', # 0x64 '[d1367]', # 0x65 '[d2367]', # 0x66 '[d12367]', # 0x67 '[d467]', # 0x68 '[d1467]', # 0x69 '[d2467]', # 0x6a '[d12467]', # 0x6b '[d3467]', # 0x6c '[d13467]', # 0x6d '[d23467]', # 0x6e '[d123467]', # 0x6f '[d567]', # 0x70 '[d1567]', # 0x71 '[d2567]', # 0x72 '[d12567]', # 0x73 '[d3567]', # 0x74 '[d13567]', # 0x75 '[d23567]', # 0x76 '[d123567]', # 0x77 '[d4567]', # 0x78 '[d14567]', # 0x79 '[d24567]', # 0x7a '[d124567]', # 0x7b '[d34567]', # 0x7c '[d134567]', # 0x7d '[d234567]', # 0x7e '[d1234567]', # 0x7f '[d8]', # 0x80 '[d18]', # 0x81 '[d28]', # 0x82 '[d128]', # 0x83 '[d38]', # 0x84 '[d138]', # 0x85 '[d238]', # 0x86 '[d1238]', # 0x87 '[d48]', # 0x88 '[d148]', # 0x89 '[d248]', # 0x8a '[d1248]', # 0x8b '[d348]', # 0x8c '[d1348]', # 0x8d '[d2348]', # 0x8e '[d12348]', # 0x8f '[d58]', # 0x90 '[d158]', # 0x91 '[d258]', # 0x92 '[d1258]', # 0x93 '[d358]', # 0x94 '[d1358]', # 0x95 '[d2358]', # 0x96 '[d12358]', # 0x97 '[d458]', # 0x98 '[d1458]', # 0x99 '[d2458]', # 0x9a '[d12458]', # 0x9b '[d3458]', # 0x9c '[d13458]', # 0x9d '[d23458]', # 0x9e '[d123458]', # 0x9f '[d68]', # 0xa0 '[d168]', # 0xa1 '[d268]', # 0xa2 '[d1268]', # 0xa3 '[d368]', # 0xa4 '[d1368]', # 0xa5 '[d2368]', # 0xa6 '[d12368]', # 0xa7 '[d468]', # 0xa8 '[d1468]', # 0xa9 '[d2468]', # 0xaa '[d12468]', # 0xab '[d3468]', # 0xac '[d13468]', # 0xad '[d23468]', # 0xae '[d123468]', # 0xaf '[d568]', # 0xb0 '[d1568]', # 0xb1 '[d2568]', # 0xb2 '[d12568]', # 0xb3 '[d3568]', # 0xb4 '[d13568]', # 0xb5 '[d23568]', # 0xb6 '[d123568]', # 0xb7 '[d4568]', # 0xb8 '[d14568]', # 0xb9 '[d24568]', # 0xba '[d124568]', # 0xbb '[d34568]', # 0xbc '[d134568]', # 0xbd '[d234568]', # 0xbe '[d1234568]', # 0xbf '[d78]', # 0xc0 '[d178]', # 0xc1 '[d278]', # 0xc2 '[d1278]', # 0xc3 '[d378]', # 0xc4 '[d1378]', # 0xc5 '[d2378]', # 0xc6 '[d12378]', # 0xc7 '[d478]', # 0xc8 '[d1478]', # 0xc9 '[d2478]', # 0xca '[d12478]', # 0xcb '[d3478]', # 0xcc '[d13478]', # 0xcd '[d23478]', # 0xce '[d123478]', # 0xcf '[d578]', # 0xd0 '[d1578]', # 0xd1 '[d2578]', # 0xd2 '[d12578]', # 0xd3 '[d3578]', # 0xd4 '[d13578]', # 0xd5 '[d23578]', # 0xd6 '[d123578]', # 0xd7 '[d4578]', # 0xd8 '[d14578]', # 0xd9 '[d24578]', # 0xda '[d124578]', # 0xdb '[d34578]', # 0xdc '[d134578]', # 0xdd '[d234578]', # 0xde '[d1234578]', # 0xdf '[d678]', # 0xe0 '[d1678]', # 0xe1 '[d2678]', # 0xe2 '[d12678]', # 0xe3 '[d3678]', # 0xe4 '[d13678]', # 0xe5 '[d23678]', # 0xe6 '[d123678]', # 0xe7 '[d4678]', # 0xe8 '[d14678]', # 0xe9 '[d24678]', # 0xea '[d124678]', # 0xeb '[d34678]', # 0xec '[d134678]', # 0xed '[d234678]', # 0xee '[d1234678]', # 0xef '[d5678]', # 0xf0 '[d15678]', # 0xf1 '[d25678]', # 0xf2 '[d125678]', # 0xf3 '[d35678]', # 0xf4 '[d135678]', # 0xf5 '[d235678]', # 0xf6 '[d1235678]', # 0xf7 '[d45678]', # 0xf8 '[d145678]', # 0xf9 '[d245678]', # 0xfa '[d1245678]', # 0xfb '[d345678]', # 0xfc '[d1345678]', # 0xfd '[d2345678]', # 0xfe '[d12345678]', # 0xff ) # -*- coding: utf-8 -*- ############################################################################## # # Copyright (c) 2008 JAILLET Simon - CrysaLEAD - www.crysalead.fr # # WARNING: This program as such is intended to be used by professional # programmers who take the whole responsability of assessing all potential # consequences resulting from its eventual inadequacies and bugs # End users who are looking for a ready-to-use solution with commercial # garantees and support are strongly adviced to contract a Free Software # Service Company # # This program is Free Software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # ############################################################################## { "name" : "France - Accounting", "version" : "1.0", "author" : "OpenERP SA", "website": "http://www.openerp.com", "category" : "Localization/Account Charts", "description": """ This is the module to manage the accounting chart for France in OpenERP. ======================================================================== Credits: Sistheo Zeekom CrysaLEAD """, "depends" : ['base_iban', 'account', 'account_chart', 'base_vat', 'l10n_fr_rib'], "init_xml" : [], "update_xml" : [ "fr_report_demo.xml", "plan_comptable_general_demo.xml", "l10n_fr_wizard.xml", "fr_pcg_taxes_demo.xml", "fr_tax_demo.xml", "fr_fiscal_templates_demo.xml", "security/ir.model.access.csv", "wizard/fr_report_bilan_view.xml", "wizard/fr_report_compute_resultant_view.xml", ], "test": ['test/l10n_fr_report.yml'], "demo_xml" : [], "certificate" : "00435321693876313629", "auto_install": False, "installable": True, 'images': ['images/config_chart_l10n_fr.jpeg','images/l10n_fr_chart.jpeg'], } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: import os import sys def is_active(): return True def get_name(): return "NaCl" def can_build(): import os if not os.environ.has_key("NACLPATH"): return False return True def get_opts(): return [ ('NACLPATH', 'the path to nacl', os.environ.get("NACLPATH", 0)), ('nacl_arch', 'The architecture for Nacl build (can be i686 or x86_64', 'i686'), ] def get_flags(): return [ ('nedmalloc', 'no'), ('tools', 'no'), ] def configure(env): env.Append(CPPPATH=['#platform/nacl']) env['OBJSUFFIX'] = ".nacl.${nacl_arch}.o" env['LIBSUFFIX'] = ".nacl.${nacl_arch}.a" env['PROGSUFFIX'] = ".${nacl_arch}.nexe" env['ENV']['PATH'] = env['ENV']['PATH']+":"+env['NACLPATH']+"/toolchain/linux_x86_newlib/bin" env['CC'] = '${nacl_arch}-nacl-gcc' env['CXX'] = '${nacl_arch}-nacl-g++' env['AR'] = '${nacl_arch}-nacl-ar' env.Append(CCFLAGS=['-fexceptions', '-Wno-long-long', '-pthread', '-DXP_UNIX']) env.Append(CPPPATH=env['NACLPATH']) if (env["target"]=="release"): env.Append(CCFLAGS=['-O2','-ffast-math','-fomit-frame-pointer', '-ffunction-sections', '-fdata-sections', '-fno-default-inline']) elif (env["target"]=="debug"): env.Append(CCFLAGS=['-g', '-O0', '-Wall','-DDEBUG_ENABLED']) elif (env["target"]=="profile"): env.Append(CCFLAGS=['-g','-pg']) env.Append(LINKFLAGS=['-pg']) env.Append(CCFLAGS=['-DNACL_ENABLED', '-DGLES2_ENABLED']) env.Append(LIBFLAGS=['m32']) env.Append(LIBS=env.Split('ppapi ppapi_cpp pthread srpc ppapi_gles22')) import methods env.Append( BUILDERS = { 'GLSL120GLES' : env.Builder(action = methods.build_gles2_headers, suffix = 'glsl.h',src_suffix = '.glsl') } ) #!/usr/bin/env python # # test_codecencodings_kr.py # Codec encoding tests for ROK encodings. # from test import test_support from test import test_multibytecodec_support import unittest class Test_CP949(test_multibytecodec_support.TestBase, unittest.TestCase): encoding = 'cp949' tstring = test_multibytecodec_support.load_teststring('cp949') codectests = ( # invalid bytes ("abc\x80\x80\xc1\xc4", "strict", None), ("abc\xc8", "strict", None), ("abc\x80\x80\xc1\xc4", "replace", u"abc\ufffd\uc894"), ("abc\x80\x80\xc1\xc4\xc8", "replace", u"abc\ufffd\uc894\ufffd"), ("abc\x80\x80\xc1\xc4", "ignore", u"abc\uc894"), ) class Test_EUCKR(test_multibytecodec_support.TestBase, unittest.TestCase): encoding = 'euc_kr' tstring = test_multibytecodec_support.load_teststring('euc_kr') codectests = ( # invalid bytes ("abc\x80\x80\xc1\xc4", "strict", None), ("abc\xc8", "strict", None), ("abc\x80\x80\xc1\xc4", "replace", u"abc\ufffd\uc894"), ("abc\x80\x80\xc1\xc4\xc8", "replace", u"abc\ufffd\uc894\ufffd"), ("abc\x80\x80\xc1\xc4", "ignore", u"abc\uc894"), # composed make-up sequence errors ("\xa4\xd4", "strict", None), ("\xa4\xd4\xa4", "strict", None), ("\xa4\xd4\xa4\xb6", "strict", None), ("\xa4\xd4\xa4\xb6\xa4", "strict", None), ("\xa4\xd4\xa4\xb6\xa4\xd0", "strict", None), ("\xa4\xd4\xa4\xb6\xa4\xd0\xa4", "strict", None), ("\xa4\xd4\xa4\xb6\xa4\xd0\xa4\xd4", "strict", u"\uc4d4"), ("\xa4\xd4\xa4\xb6\xa4\xd0\xa4\xd4x", "strict", u"\uc4d4x"), ("a\xa4\xd4\xa4\xb6\xa4", "replace", u"a\ufffd"), ("\xa4\xd4\xa3\xb6\xa4\xd0\xa4\xd4", "strict", None), ("\xa4\xd4\xa4\xb6\xa3\xd0\xa4\xd4", "strict", None), ("\xa4\xd4\xa4\xb6\xa4\xd0\xa3\xd4", "strict", None), ("\xa4\xd4\xa4\xff\xa4\xd0\xa4\xd4", "replace", u"\ufffd"), ("\xa4\xd4\xa4\xb6\xa4\xff\xa4\xd4", "replace", u"\ufffd"), ("\xa4\xd4\xa4\xb6\xa4\xd0\xa4\xff", "replace", u"\ufffd"), ("\xc1\xc4", "strict", u"\uc894"), ) class Test_JOHAB(test_multibytecodec_support.TestBase, unittest.TestCase): encoding = 'johab' tstring = test_multibytecodec_support.load_teststring('johab') codectests = ( # invalid bytes ("abc\x80\x80\xc1\xc4", "strict", None), ("abc\xc8", "strict", None), ("abc\x80\x80\xc1\xc4", "replace", u"abc\ufffd\ucd27"), ("abc\x80\x80\xc1\xc4\xc8", "replace", u"abc\ufffd\ucd27\ufffd"), ("abc\x80\x80\xc1\xc4", "ignore", u"abc\ucd27"), ) def test_main(): test_support.run_unittest(__name__) if __name__ == "__main__": test_main() # Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Library for getting system information during TensorFlow tests.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import ctypes as ct import platform from tensorflow.core.util import test_log_pb2 from tensorflow.python.framework import errors from tensorflow.python.platform import gfile def _gather_gpu_devices_proc(): """Try to gather NVidia GPU device information via /proc/driver.""" dev_info = [] for f in gfile.Glob("/proc/driver/nvidia/gpus/*/information"): bus_id = f.split("/")[5] key_values = dict(line.rstrip().replace("\t", "").split(":", 1) for line in gfile.GFile(f, "r")) key_values = dict((k.lower(), v.strip(" ").rstrip(" ")) for (k, v) in key_values.items()) info = test_log_pb2.GPUInfo() info.model = key_values.get("model", "Unknown") info.uuid = key_values.get("gpu uuid", "Unknown") info.bus_id = bus_id dev_info.append(info) return dev_info class CUDADeviceProperties(ct.Structure): # See $CUDA_HOME/include/cuda_runtime_api.h for the definition of # the cudaDeviceProp struct. _fields_ = [ ("name", ct.c_char * 256), ("totalGlobalMem", ct.c_size_t), ("sharedMemPerBlock", ct.c_size_t), ("regsPerBlock", ct.c_int), ("warpSize", ct.c_int), ("memPitch", ct.c_size_t), ("maxThreadsPerBlock", ct.c_int), ("maxThreadsDim", ct.c_int * 3), ("maxGridSize", ct.c_int * 3), ("clockRate", ct.c_int), ("totalConstMem", ct.c_size_t), ("major", ct.c_int), ("minor", ct.c_int), ("textureAlignment", ct.c_size_t), ("texturePitchAlignment", ct.c_size_t), ("deviceOverlap", ct.c_int), ("multiProcessorCount", ct.c_int), ("kernelExecTimeoutEnabled", ct.c_int), ("integrated", ct.c_int), ("canMapHostMemory", ct.c_int), ("computeMode", ct.c_int), ("maxTexture1D", ct.c_int), ("maxTexture1DMipmap", ct.c_int), ("maxTexture1DLinear", ct.c_int), ("maxTexture2D", ct.c_int * 2), ("maxTexture2DMipmap", ct.c_int * 2), ("maxTexture2DLinear", ct.c_int * 3), ("maxTexture2DGather", ct.c_int * 2), ("maxTexture3D", ct.c_int * 3), ("maxTexture3DAlt", ct.c_int * 3), ("maxTextureCubemap", ct.c_int), ("maxTexture1DLayered", ct.c_int * 2), ("maxTexture2DLayered", ct.c_int * 3), ("maxTextureCubemapLayered", ct.c_int * 2), ("maxSurface1D", ct.c_int), ("maxSurface2D", ct.c_int * 2), ("maxSurface3D", ct.c_int * 3), ("maxSurface1DLayered", ct.c_int * 2), ("maxSurface2DLayered", ct.c_int * 3), ("maxSurfaceCubemap", ct.c_int), ("maxSurfaceCubemapLayered", ct.c_int * 2), ("surfaceAlignment", ct.c_size_t), ("concurrentKernels", ct.c_int), ("ECCEnabled", ct.c_int), ("pciBusID", ct.c_int), ("pciDeviceID", ct.c_int), ("pciDomainID", ct.c_int), ("tccDriver", ct.c_int), ("asyncEngineCount", ct.c_int), ("unifiedAddressing", ct.c_int), ("memoryClockRate", ct.c_int), ("memoryBusWidth", ct.c_int), ("l2CacheSize", ct.c_int), ("maxThreadsPerMultiProcessor", ct.c_int), ("streamPrioritiesSupported", ct.c_int), ("globalL1CacheSupported", ct.c_int), ("localL1CacheSupported", ct.c_int), ("sharedMemPerMultiprocessor", ct.c_size_t), ("regsPerMultiprocessor", ct.c_int), ("managedMemSupported", ct.c_int), ("isMultiGpuBoard", ct.c_int), ("multiGpuBoardGroupID", ct.c_int), # Pad with extra space to avoid dereference crashes if future # versions of CUDA extend the size of this struct. ("__future_buffer", ct.c_char * 4096) ] def _gather_gpu_devices_cudart(): """Try to gather NVidia GPU device information via libcudart.""" dev_info = [] system = platform.system() if system == "Linux": libcudart = ct.cdll.LoadLibrary("libcudart.so") elif system == "Darwin": libcudart = ct.cdll.LoadLibrary("libcudart.dylib") elif system == "Windows": libcudart = ct.windll.LoadLibrary("libcudart.dll") else: raise NotImplementedError("Cannot identify system.") version = ct.c_int() rc = libcudart.cudaRuntimeGetVersion(ct.byref(version)) if rc != 0: raise ValueError("Could not get version") if version.value < 6050: raise NotImplementedError("CUDA version must be between >= 6.5") device_count = ct.c_int() libcudart.cudaGetDeviceCount(ct.byref(device_count)) for i in range(device_count.value): properties = CUDADeviceProperties() rc = libcudart.cudaGetDeviceProperties(ct.byref(properties), i) if rc != 0: raise ValueError("Could not get device properties") pci_bus_id = " " * 13 rc = libcudart.cudaDeviceGetPCIBusId(ct.c_char_p(pci_bus_id), 13, i) if rc != 0: raise ValueError("Could not get device PCI bus id") info = test_log_pb2.GPUInfo() # No UUID available info.model = properties.name info.bus_id = pci_bus_id dev_info.append(info) del properties return dev_info def gather_gpu_devices(): """Gather gpu device info. Returns: A list of test_log_pb2.GPUInfo messages. """ try: # Prefer using /proc if possible, it provides the UUID. dev_info = _gather_gpu_devices_proc() if not dev_info: raise ValueError("No devices found") return dev_info except (IOError, ValueError, errors.OpError): pass try: # Fall back on using libcudart return _gather_gpu_devices_cudart() except (OSError, ValueError, NotImplementedError, errors.OpError): return [] #!/usr/bin/python # Patrick Luo # innovation week of Mar 25, 2013 # objective: high performance, easy to use and maintain, flexibility to run anywhere on any DB (HIVE and MySQL), etc. import os, sys, subprocess, inspect import MySQLdb # MySQL DB module import ConfigParser # parse mysql ini file import csv # csv module for csv parsing import logging import datetime from optparse import OptionParser from time import sleep from pprint import pprint # pretty print for dictionary etc import operator # logging config logging.basicConfig( filename = os.path.dirname(os.path.realpath(__file__)) + '/legoo.log', # set log file to legoo directory format = "%(levelname)-10s:[%(module)s][%(funcName)s][%(asctime)s]:%(message)s", level = logging.INFO ) format = logging.Formatter("%(levelname)-10s:[%(module)s][%(funcName)s][%(asctime)s]:%(message)s") # create a handler for stdout info_hand = logging.StreamHandler(sys.stdout) info_hand.setLevel(logging.INFO) info_hand.setFormatter(format) # top-level logger print to file legoo = logging.getLogger("legoo") legoo.addHandler(info_hand) # add hive path hive_path='/usr/lib/hive/lib/py/' if hive_path not in sys.path: sys.path.insert(0, hive_path) trulia_mysql_host = ['bidbs', 'bidbm', 'bedb1', 'maildb-slave', 'db30', 'rodb-dash', 'db9', 'crad103'] def count_lines(**kwargs): """return line count for input file -------------------------------------------------------------------- count_lines(file='/tmp/msa.csv', skip_header='Y') -------------------------------------------------------------------- """ debug = kwargs.pop("debug", "N") if (debug.strip().lower() == 'y'): pprint(kwargs) # pretty print kwargs # dictionary initialized with the name=value pairs in the keyword argument list file = kwargs.pop("file", None) skip_header = kwargs.pop("skip_header", 'N' ) # flag to skip header quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) p = subprocess.Popen(['wc', '-l', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) result, err = p.communicate() if p.returncode != 0: raise IOError(err) num_lines = int(result.strip().split()[0]) # decrement num_lines if need to skip header if (skip_header.strip().lower() == 'y'): num_lines = num_lines - 1 legoo.info("[%s] line count ==>> [%s] lines" % (file, num_lines)) return num_lines def remove_file(**kwargs): """remove file --------------------------------- count_lines(file='/tmp/msa.csv') --------------------------------- | file = None | debug = N """ debug = kwargs.pop("debug", "N") if (debug.strip().lower() == 'y'): pprint(kwargs) # dictionary initialized with the name=value pairs in the keyword argument list file = kwargs.pop("file", None) quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) p = subprocess.Popen(['rm', '-f', file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) result, err = p.communicate() if p.returncode != 0: raise IOError(err) legoo.info('File [%s] removed' % (file)) def count_hive_table_rows (**kwargs): """return hive table row count ----------------------------------------------------------------------------------- count_hive_table_rows(hive_node='namenode2s', hive_db='bi', hive_table='dual') ----------------------------------------------------------------------------------- """ # dictionary initialized with the name=value pairs in the keyword argument list debug = kwargs.pop("debug", "N") if (debug.strip().lower() == 'y'): pprint(kwargs) # pretty print kwargs hive_node = kwargs.pop("hive_node", "namenode2s") hive_port = kwargs.pop("hive_port", 10000) hive_db = kwargs.pop("hive_db", "staging") hive_table = kwargs.pop("hive_table", None) mapred_job_priority = kwargs.pop("mapred_job_priority", "NORMAL") quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) rs = execute_remote_hive_query( hive_node = hive_node, hive_port = hive_port, \ hive_db = hive_db, mapred_job_priority = mapred_job_priority, \ hive_query = "SELECT count(*) from %s" % (hive_table)) table_rows = rs[0] legoo.info('[%s] row count ==>> [%s] rows' % (hive_table, table_rows)) return table_rows def mysql_to_hive(**kwargs): """dump [mysql table | mysql query results] to hive ------------------------------------------------------------------------------ mysql_to_hive(mysql_host='bidbs', mysql_table='dim_time', hive_create_table='Y') ------------------------------------------------------------------------------ """ debug = kwargs.pop("debug", "N") if (debug.strip().lower() == 'y'): pprint(kwargs) # pretty print kwargs # args for mysql_to_csv mysql_ini = kwargs.pop("mysql_ini", "mysql.ini") mysql_host = kwargs.pop("mysql_host", "bidbs") mysql_db = kwargs.pop("mysql_db", "bi") mysql_user = kwargs.pop("mysql_user", "root") mysql_quick = kwargs.pop("mysql_quick", "N") mysql_table = kwargs.pop("mysql_table", None) mysql_query = kwargs.pop("mysql_query", None) mysql_password = kwargs.pop("mysql_password", None) # args for csv_to_mysql hive_node = kwargs.pop("hive_node", "namenode2s") hive_port = kwargs.pop("hive_port", 10000) hive_db = kwargs.pop("hive_db", "staging") hive_table = kwargs.pop("hive_table", None) hive_partition = kwargs.pop("hive_partition", None) hive_ddl = kwargs.pop("hive_ddl", None) hive_overwrite = kwargs.pop("hive_overwrite", "Y") hive_create_table = kwargs.pop("hive_create_table", "N") mapred_job_priority = kwargs.pop("mapred_job_priority", "NORMAL") csv_dir = kwargs.pop("csv_dir", "/data/tmp/") csv_file = kwargs.pop("csv_file", None) csv_delimiter = kwargs.pop("csv_delimiter", 'tab') # default to tab csv_delimiter remove_carriage_return = kwargs.pop("remove_carriage_return", 'N') quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) # export mysql to csv csv_file = mysql_to_csv( mysql_ini = mysql_ini, \ mysql_host = mysql_host, \ mysql_db = mysql_db, \ mysql_user = mysql_user, \ mysql_password = mysql_password, \ mysql_quick = mysql_quick, \ mysql_table = mysql_table, \ mysql_query = mysql_query, \ csv_dir = csv_dir, \ csv_file = csv_file, \ quiet = quiet, \ debug = debug) # load csv to hive csv_to_hive(hive_node = hive_node, \ hive_port = hive_port, \ hive_db = hive_db, \ hive_create_table = hive_create_table, \ hive_table = hive_table, \ hive_overwrite = hive_overwrite, \ hive_partition = hive_partition, \ hive_ddl = hive_ddl, \ mapred_job_priority = mapred_job_priority, \ csv_file = csv_file, \ csv_delimiter = csv_delimiter, \ remove_carriage_return = remove_carriage_return, \ quiet = quiet, \ debug = debug) # remove temp files remove_file(file=csv_file) # temp files if remove_carriage_return is on remove_file(file="%s%s" % (csv_file, '2')) def mysql_to_csv(**kwargs): """export [mysql table | query results] to tab delmited csv and return the tsv ------------------------------------------------------------------------------- mysql_to_csv(mysql_host='bidbs', mysql_table='dim_time') mysql_to_csv(mysql_host='bidbs', mysql_query='select * from dim_time limit 10') ------------------------------------------------------------------------------- """ debug = kwargs.pop("debug", "N") if (debug.strip().lower() == 'y'): pprint(kwargs) # pretty print kwargs # dictionary initialized with the name=value pairs in the keyword argument list mysql_ini = kwargs.pop("mysql_ini", "mysql.ini") mysql_host = kwargs.pop("mysql_host", "bidbs") mysql_db = kwargs.pop("mysql_db", "bi") mysql_user = kwargs.pop("mysql_user", "root") mysql_password = kwargs.pop("mysql_password", None) mysql_quick = kwargs.pop("mysql_quick", "N") mysql_table = kwargs.pop("mysql_table", None) mysql_query = kwargs.pop("mysql_query", None) csv_dir = kwargs.pop("csv_dir", "/data/tmp/") csv_file = kwargs.pop("csv_file", None) csv_delimiter = kwargs.pop("csv_delimiter", 'tab') # default to tab csv_delimiter quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) # parse the ini file to pull db variables config = ConfigParser.ConfigParser() config.read(os.path.join(os.path.dirname(__file__), mysql_ini)) # extend e mysql.ini if necessary # set default mysql_user from mysql_ini if (not mysql_user): if (mysql_host not in trulia_mysql_host): # adhocdb use a non-standard password and db. mysql_user = config.get(mysql_host, "user") else: mysql_user = config.get('default', "user") if (not mysql_db): if (mysql_host not in trulia_mysql_host): # adhocdb use a non-standard password and db. mysql_db = config.get(mysql_host, "db") else: mysql_db = config.get('default', "db") # set default mysql_password from mysql_ini if (not mysql_password): if (mysql_host not in trulia_mysql_host): # adhocdb use a non-standard password and db. mysql_password = config.get(mysql_host, "password") else: mysql_password = config.get('default', "password") # set default csv if (not csv_file and not mysql_table ): csv_file = "%s/tmp_%s.csv" % (csv_dir, os.getpid()) # set a temporary csv file name in elif (not csv_file and mysql_table ): csv_file = "%s/%s.csv" % (csv_dir, mysql_table) if (not mysql_table and not mysql_query): raise TypeError("[ERROR] Must specify either mysql_table or mysql_query" ) elif (mysql_table and not mysql_query): mysql_query = "SELECT * FROM %s;" % (mysql_table) if (mysql_quick and mysql_quick.strip().lower() == 'y'): mysql_quick = "--quick" else: mysql_quick = "" # mysql -hbidbs bi -e'select * from dim_time limit 10' > /tmp/test.csv mysql_cmd = "mysql -h%s -u%s -p%s %s %s -e \"%s\" > %s" % \ (mysql_host, mysql_user, mysql_password, mysql_db, mysql_quick, mysql_query, csv_file) mysql_cmd_without_password = "mysql -h%s -u%s %s %s -e \"%s\" > %s" % \ (mysql_host, mysql_user, mysql_db, mysql_quick, mysql_query, csv_file) legoo.info("Running mysql export to csv ==>> [%s]" % ( mysql_cmd_without_password)) os.system( mysql_cmd ) if (debug.strip().lower() == 'y'): # dump sample csv_dump(csv_file=csv_file, csv_delimiter=csv_delimiter, lines=2) return csv_file def csv_to_hive(**kwargs): """import csv to to hive table. 1. create hive ddl base on csv header. use octal code for csv_delimiter 2. create hive table 3. upload csv without header to hdfs 4. load csv in hdfs to hive table note: sqoop is buggy, many mandatory parameters, only run on hive node and other restrictions. ----------------------------------------------------------------------------------- csv_to_hive(csv_file='/tmp/fact_imp_pdp.csv', csv_delimiter='tab', hive_create_table='Y') ----------------------------------------------------------------------------------- """ debug = kwargs.pop("debug", "N") if (debug.strip().lower() == 'y'): pprint(kwargs) # pretty print kwargs # dictionary initialized with the name=value pairs in the keyword argument list hive_node = kwargs.pop("hive_node", "namenode2s") hive_port = kwargs.pop("hive_port", 10000) hive_db = kwargs.pop("hive_db", "staging") hive_table = kwargs.pop("hive_table", None) hive_ddl = kwargs.pop("hive_ddl", None) hive_overwrite = kwargs.pop("hive_overwrite", "Y") hive_create_table = kwargs.pop("hive_create_table", "N") hive_partition = kwargs.pop("hive_partition", None) mapred_job_priority = kwargs.pop("mapred_job_priority", "NORMAL") csv_file = kwargs.pop("csv_file", None) csv_header = kwargs.pop("csv_header", "Y") remove_carriage_return = kwargs.pop("remove_carriage_return", "N") csv_delimiter = kwargs.pop("csv_delimiter", 'tab') # default to tab csv_delimiter quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) if (not hive_table): legoo.error("hive_table variable need to specified") # log error raise TypeError("[ERROR] hive_table variable need to specified") # if not hive_overwrite, set to append mode if (hive_overwrite.strip().lower() == 'y'): hive_overwrite = 'OVERWRITE' else: hive_overwrite = 'INTO' # When input file has no header, create temp file with dummy header if (csv_header.strip().lower() <> 'y'): legoo.info("audto generating csv header ...") temp_file = csv_file + '2' # read first line of file with open(csv_file, 'r') as f: first_line = f.readline() # autogen column name list if (csv_delimiter.strip().lower() == 'tab'): raw_delimiter = '\t' else: raw_delimiter = csv_delimiter header_list = [ 'col_' + str(i) for i in range(len(first_line.split(raw_delimiter)))] # write header to temp file with open(temp_file,'w') as f: wr = csv.writer(f, delimiter=raw_delimiter, quoting=csv.QUOTE_ALL, lineterminator='\n') wr.writerow(header_list) cmd_append = 'cat %s >> %s' % (csv_file, temp_file) # cmd to append file os.system( cmd_append ) # os.system call is easier than subprocess csv_file = temp_file csv_header = 'Y' # chcek table if exists on hive if (hive_create_table.strip().lower() == 'n'): execute_remote_hive_query( hive_node = hive_node, hive_port = hive_port, \ hive_db = hive_db, mapred_job_priority = mapred_job_priority, \ hive_query = "desc %s" % (hive_table)) if (hive_partition and hive_create_table.strip().lower() == 'y'): hive_create_table ='N' legoo.warning("hive_create_table can not set together with hive_partition. reset hive_create_table = N") # create hive staging table ddl based on csv header, then create hive staging table (filename, extension) = os.path.splitext(os.path.basename(csv_file)) hive_staging_table = "tmp_legoo_%s" % (os.getpid()) # replace . with _ in table name # hive_staging_table = hive_staging_table.replace('.', '_') # create staging table ddl (hive_staging_table, hive_ddl) = create_hive_ddl_from_csv(csv_file = csv_file, \ csv_delimiter = csv_delimiter, \ table_name = hive_staging_table, \ quiet = quiet, \ debug = debug) # drop staging table if exists execute_remote_hive_query(hive_node = hive_node, hive_port = hive_port, \ hive_db = hive_db, mapred_job_priority = mapred_job_priority, \ quiet = quiet, debug = debug, \ hive_query = "DROP TABLE IF EXISTS %s" % (hive_staging_table)) # create empty table execute_remote_hive_query( hive_node = hive_node, hive_port = hive_port, \ hive_db = hive_db, mapred_job_priority = mapred_job_priority, \ quiet = quiet, debug = debug, \ hive_query = hive_ddl) # load csv to hive staging table csv_to_hive_table(hive_node = hive_node, \ hive_port = hive_port, \ hive_db = hive_db, \ hive_table = hive_staging_table, \ hive_overwrite = hive_overwrite, \ mapred_job_priority = mapred_job_priority, \ csv_file = csv_file, \ csv_delimiter = csv_delimiter, \ csv_header = csv_header, \ remove_carriage_return = remove_carriage_return, \ quiet = quiet, \ debug = debug) # example: hive_partition="date_int = 20130428" if (hive_partition): if (not hive_table): legoo.error("hive_table need to specified") raise TypeError("[ERROR] hive_table need to specified") hive_query = "ALTER TABLE %s DROP IF EXISTS PARTITION (%s)" % (hive_table, hive_partition) execute_remote_hive_query( hive_node = hive_node, hive_port = hive_port, \ hive_db = hive_db, mapred_job_priority = mapred_job_priority, \ quiet = quiet, debug = debug, \ hive_query = hive_query) hive_query = "ALTER TABLE %s ADD PARTITION (%s)" % (hive_table, hive_partition) execute_remote_hive_query( hive_node = hive_node, hive_port = hive_port, \ hive_db = hive_db, mapred_job_priority = mapred_job_priority, \ quiet = quiet, debug = debug, \ hive_query = hive_query) # load staging table to target table hive_query = "INSERT OVERWRITE TABLE %s partition (%s) select * from %s" % (hive_table, hive_partition, hive_staging_table) elif (hive_create_table.strip().lower() == 'y'): hive_query = "ALTER TABLE %s RENAME TO %s" % (hive_staging_table, hive_table) elif (hive_create_table.strip().lower() == 'n'): hive_query = "INSERT %s TABLE %s select * from %s" % (hive_overwrite, hive_table, hive_staging_table) execute_remote_hive_query( hive_node = hive_node, hive_port = hive_port, \ hive_db = hive_db, mapred_job_priority = mapred_job_priority, \ quiet = quiet, debug = debug, \ hive_query = hive_query) # drop staging table hive_query = "DROP TABLE IF EXISTS %s" % (hive_staging_table) execute_remote_hive_query( hive_node = hive_node, hive_port = hive_port, \ hive_db = hive_db, mapred_job_priority = mapred_job_priority, \ quiet = quiet, debug = debug, \ hive_query = hive_query) if (hive_partition): partition_str = "PARTITION (%s)" % hive_partition else: partition_str = "" legoo.info("hive table [%s]:[%s].[%s] %s successfully built" % (hive_node, hive_db, hive_table, partition_str)) # check if temp file exists and remove try: temp_file except NameError: pass else: remove_file(file=temp_file) # remove temp file def csv_to_hive_table(**kwargs): """import csv to to existing hive table. 1. upload csv without header to hdfs 2. load csv from hdfs to target hive table note: 1. sqoop is slow, buggy, can't handle hive keywords, special character in input, etc 2. Two approaches to load into partition table. first approach is load from table. the other approach use load infile which is more efficient but input file must have the same format i.e. file type, csv_delimiter etc as target table definition. To make the tool more elastic and more fault tolerant, first approach is choosen. ----------------------------------------------------------------------------------- csv_to_hive_table(csv_file='/tmp/fact_imp_pdp.csv', csv_delimiter='tab', hive_create_table='Y') ----------------------------------------------------------------------------------- """ debug = kwargs.pop("debug", "N") if (debug.strip().lower() == 'y'): pprint(kwargs) # pretty print kwargs # dictionary initialized with the name=value pairs in the keyword argument list hive_node = kwargs.pop("hive_node", "namenode2s") hive_port = kwargs.pop("hive_port", 10000) hive_db = kwargs.pop("hive_db", "staging") hive_table = kwargs.pop("hive_table", None) hive_overwrite = kwargs.pop("hive_overwrite", "Y") mapred_job_priority = kwargs.pop("mapred_job_priority", "NORMAL") csv_file = kwargs.pop("csv_file", None) csv_header = kwargs.pop("csv_header", "Y") csv_delimiter = kwargs.pop("csv_delimiter", 'tab') # default to tab csv_delimiter remove_carriage_return = kwargs.pop("remove_carriage_return", "N") quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) # raise exception if partition specified but table name not specified if (not hive_table): legoo.error("Table [%s] must specified!" % (hive_table)) # trulia specific: ssh login to cdh4 cluster namenode2s as user dwr if (hive_node.strip().lower().split('.')[0] == 'namenode2s'): ssh_hive_node = 'dwr@' + hive_node else: ssh_hive_node = hive_node # remove the carriage return from input csv file if (remove_carriage_return.strip().lower() == 'y'): temp_file = csv_file + '2' cmd_remove_carriage_return = 'tr -d \'\\r\' < ' + csv_file + ' > ' + temp_file # replace carriage return with # legoo.info("remove special chracter \\ with # ==>> [%s]" % (cmd_remove_carriage_return)) os.system( cmd_remove_carriage_return ) # os.system call is easier than subprocess csv_file = temp_file if (debug.strip().lower() == 'y'): # dump the first 2 lines to verify csv_dump(csv_file=csv_file, csv_delimiter=csv_delimiter, lines=2) hdfs_inpath = "/tmp/" + hive_table # set hdfs_inpath # hadoop will not overwrite a file - so we'll nuke it ourselves hdfs_cmd = "ssh %s \'. .bash_profile; hadoop fs -rm %s 2>/dev/null\'" % (ssh_hive_node, hdfs_inpath) legoo.info("running hdfs clean up ==>> [%s]" % ( hdfs_cmd)) os.system( hdfs_cmd ) # os.system call is easier than subprocess for | # upload csv to hdfs. - for stdin, skip header if (csv_header.strip().lower() == 'y'): skip_header = 2 else: skip_header = 1 hdfs_cmd = "tail -n +%d %s | ssh %s \'hadoop fs -put - %s\'" % (skip_header, csv_file, ssh_hive_node, hdfs_inpath) legoo.info("running csv upload to hdfs ==>> [%s]" % ( hdfs_cmd)) os.system( hdfs_cmd ) # os.system call is easier than subprocess for | # load data inpath '/tmp/fact_imp_pdp.csv' overwrite into table tmp_fact_imp_pdp; # if not hive_overwrite, set to append mode # Note that if the target table (or partition) already has a file whose name collides with any of the filenames contained in filepath, then the existing file will be replaced with the new file. if (hive_overwrite.strip().lower() == 'y'): hive_overwrite = ' OVERWRITE ' else: hive_overwrite = ' ' if (csv_delimiter.strip().lower() == 'tab'): csv_delimiter = "\'\\t\'" hive_load_query = "load data inpath \'%s\' %s into table %s" % (hdfs_inpath, hive_overwrite, hive_table) execute_remote_hive_query( hive_node = hive_node, hive_port = hive_port, \ hive_db = hive_db, mapred_job_priority = mapred_job_priority, \ quiet = quiet, debug = debug, \ hive_query = hive_load_query) # verify if table count match csv count number_rows = count_hive_table_rows(hive_node = hive_node, hive_port = hive_port, \ hive_db = hive_db, mapred_job_priority = mapred_job_priority, \ quiet = quiet, debug = debug, \ hive_table = hive_table) num_lines = count_lines(file=csv_file, skip_header=csv_header) if ( int(num_lines) == int(number_rows) ): legoo.info("file [%s] successfully loaded to hive table [%s]:[%s].[%s]. \n" % \ (csv_file, hive_node, hive_db, hive_table)) else: legoo.error("file [%s] count not match hive table [%s]:[%s].[%s] count. \n" % \ (csv_file, hive_node, hive_db, hive_table)) raise Exception("[ERROR] file [%s] count not match hive table [%s]:[%s].[%s] count. \n" % \ (csv_file, hive_node, hive_db, hive_table)) def hive_to_mysql( **kwargs ): """export [hive table | user defined query ] to csv_file, create mysql table based on csv_file header, then load csv_file to mysql table -------------------------------------------------------------------------------------------------- hive_to_mysql(hive_table='fact_imp_pdp', hive_query='select * from bi.fact_imp_pdp limit 1100000') -------------------------------------------------------------------------------------------------- """ debug = kwargs.pop("debug", "N") if (debug.strip().lower() == 'y'): pprint(kwargs) # pretty print kwargs # dictionary initialized with the name=value pairs in the keyword argument list hive_node = kwargs.pop("hive_node", "namenode2s") hive_db = kwargs.pop("hive_db", "bi") hive_table = kwargs.pop("hive_table", None) csv_file = kwargs.pop("csv_file", None) hive_query = kwargs.pop("hive_query", None) mapred_job_priority = kwargs.pop("mapred_job_priority", "NORMAL") mysql_ini = kwargs.pop("mysql_ini", "mysql.ini") mysql_host = kwargs.pop("mysql_host", "bidbs") mysql_db = kwargs.pop("mysql_db", "bi_staging") mysql_user = kwargs.pop("mysql_user", "root") mysql_password = kwargs.pop("mysql_password", None) mysql_table = kwargs.pop("mysql_table", None) mysql_truncate_table = kwargs.pop("mysql_truncate_table", "Y") csv_delimiter = kwargs.pop("csv_delimiter", 'tab') # default to tab csv_delimiter csv_optionally_enclosed_by = kwargs.pop("csv_optionally_enclosed_by", None) max_rows = kwargs.pop("max_rows", None) mysql_create_table = kwargs.pop("mysql_create_table", "N") quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) # export hive table to csv_file csv_file = hive_to_csv(hive_node = hive_node, \ hive_db = hive_db, \ hive_table = hive_table, \ mapred_job_priority = mapred_job_priority, \ csv_file = csv_file, \ hive_query = hive_query, \ quiet = quiet, \ debug = debug) # raw_input('press any key to continue ...') # dump the first 10 lines to verify if (debug.strip().lower() == 'y'): csv_dump(csv_file=csv_file, csv_delimiter='tab', lines=10) # raw_input('press any key to continue ...') # import csv to mysql table csv_to_mysql(mysql_host = mysql_host, \ mysql_db = mysql_db, \ mysql_user = mysql_user, \ mysql_password = mysql_password, \ mysql_table = mysql_table, \ mysql_truncate_table = mysql_truncate_table, \ csv_delimiter = csv_delimiter, \ csv_optionally_enclosed_by = csv_optionally_enclosed_by, \ csv_file = csv_file, \ max_rows = max_rows, \ mysql_create_table = mysql_create_table, \ quiet = quiet, \ debug = debug) remove_file(file=csv_file) # remove temp file def hive_to_csv( **kwargs ): """export [hive table | user defined hive_query ] to csv. --------------------------------------------------------------------------------------------------------------- hive_to_csv(hive_table='fact_imp_pdp') hive_to_csv(csv_file='/tmp/dim_listing.csv',hive_query='select * from bi.fact_imp_pdp limit 1100000',debug='Y') --------------------------------------------------------------------------------------------------------------- """ debug = kwargs.pop("debug", "N") if (debug.strip().lower() == 'y'): pprint(kwargs) # pretty print kwargs hive_node = kwargs.pop("hive_node", "namenode2s") hive_db = kwargs.pop("hive_db", "bi") hive_table = kwargs.pop("hive_table", None) hive_query = kwargs.pop("hive_query", None) mapred_job_priority = kwargs.pop("mapred_job_priority", "NORMAL") csv_dir = kwargs.pop("csv_dir", "/data/tmp/") csv_file = kwargs.pop("csv_file", None) quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) # trulia specific: login to cdh4 cluster namenode2s as dwr if (hive_node.strip().lower().split('.')[0] == 'namenode2s'): hive_node = 'dwr@' + hive_node if (not csv_file): if (not hive_table): csv_file = csv_dir.strip() + str(os.getpid()).strip() + ".csv" # set default csv else: csv_file = csv_dir.strip() + hive_table + ".csv" # set default csv else: csv_file = csv_dir.strip() + csv_file.strip() # set default csv if (not hive_query): hive_query = "select * from %s.%s" % (hive_db, hive_table) # set default hive_query # check and set default value for mapred_job_priority if (mapred_job_priority.strip().upper() in ["VERY_HIGH", "HIGH", "NORMAL", "LOW", "VERY_LOW"]): mapred_job_priority = mapred_job_priority.strip().upper() else: legoo.warning("option mapred_job_priority [%s] must in list [VERY_HIGH, HIGH, NORMAL, LOW, VERY_LOW]. default to NORMAL." % (mapred_job_priority)) mapred_job_priority = "NORMAL" # hive_query must enclose with quote hive_query = '\"use %s; set hive.cli.print.header=true; set mapred.job.priority=%s; ' % (hive_db, mapred_job_priority) + hive_query + ';\"' hive_cmd = 'ssh %s hive -e ' % (hive_node) + hive_query + ' > ' + csv_file legoo.info("running hive export ...\n[%s]\n" % (hive_cmd)) with open(csv_file, "w") as outfile: rc = subprocess.call(['ssh', hive_node, 'hive', '-e', hive_query], stdout=outfile) legoo.info("hive table %s:(%s) exported to %s ..." % (hive_node, hive_query, csv_file)) return csv_file def csv_to_mysql(**kwargs): """create mysql table in target db (bidbs:bi_staging by default) based on csv header then import csv to mysql table. The other four mysql_host, mysql_db, mysql_truncate_table and debug are optional. ------------------------------------------------------------------------------------------------ csv_to_mysql(csv_file='/tmp/fact_imp_pdp.csv', csv_delimiter='tab', mysql_create_table = 'Y') ------------------------------------------------------------------------------------------------ """ debug = kwargs.pop("debug", "N") if (debug.strip().lower() == 'y'): pprint(kwargs) # pretty print kwargs # dictionary initialized with the (name, value) pairs in the keyword argument list mysql_host = kwargs.pop("mysql_host", "bidbs") mysql_db = kwargs.pop("mysql_db", "bi_staging") mysql_user = kwargs.pop("mysql_user", None) mysql_password = kwargs.pop("mysql_password", None) mysql_create_table = kwargs.pop("mysql_create_table", "N") mysql_table = kwargs.pop("mysql_table", None) mysql_truncate_table = kwargs.pop("mysql_truncate_table", "N") csv_file = kwargs.pop("csv_file", None) csv_header = kwargs.pop("csv_header", "Y") csv_delimiter = kwargs.pop("csv_delimiter", 'tab') # default to tab csv_delimiter csv_optionally_enclosed_by = kwargs.pop("csv_optionally_enclosed_by", None) max_rows = kwargs.pop("max_rows", None) quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) # check number of lines in csv file num_lines = count_lines(file=csv_file, skip_header=csv_header) if (int(num_lines) == 0): legoo.error("%s is empty!" % (csv_file)) raise TypeError("[ERROR] %s is empty!" % (csv_file)) # create table if mysql_create_table set to Y if (mysql_create_table.strip().lower() == 'y'): # create ddl (mysql_table_name, ddl) = create_mysql_ddl_from_csv(csv_file = csv_file, \ csv_delimiter = csv_delimiter, \ table_name = mysql_table, \ max_rows = max_rows, \ mysql_create_table = mysql_create_table, \ quiet = quiet, \ debug = debug) # create table execute_mysql_query(mysql_host = mysql_host, \ mysql_db = mysql_db, \ mysql_user = mysql_user, \ mysql_password = mysql_password, \ mysql_query = ddl, \ quiet = quiet, \ debug = debug) # set mysql_table to mysql_table_name if not specified if (not mysql_table): mysql_table = mysql_table_name if (mysql_truncate_table.strip().lower() == 'y'): execute_mysql_query(mysql_host=mysql_host, mysql_db=mysql_db, \ mysql_user=mysql_user, mysql_password=mysql_password, \ mysql_query="TRUNCATE TABLE %s.%s" % (mysql_db, mysql_table), \ quiet = quiet, debug=debug) # check table row count mysql_query = "select count(*) from %s.%s;" % (mysql_db, mysql_table) (affected_rows, number_rows) = execute_mysql_query(mysql_host=mysql_host, mysql_db=mysql_db, \ mysql_user=mysql_user, mysql_password=mysql_password, \ mysql_query=mysql_query, row_count='Y', \ quiet = quiet, debug=debug) table_count_before_load = number_rows # load csv into mysql table csv_to_mysql_table(mysql_host=mysql_host, mysql_db=mysql_db, mysql_user=mysql_user, \ mysql_password=mysql_password, mysql_table=mysql_table, \ csv_file=csv_file, csv_header=csv_header, csv_delimiter=csv_delimiter, \ csv_optionally_enclosed_by=csv_optionally_enclosed_by, \ quiet = quiet, debug=debug) (affected_rows, number_rows) = execute_mysql_query(mysql_host=mysql_host, mysql_db=mysql_db, \ mysql_user=mysql_user, mysql_password=mysql_password, \ mysql_query=mysql_query, row_count='Y', \ quiet = quiet, debug=debug) table_count_after_load = number_rows # delta: diff between table count before load and after load number_rows = int(table_count_after_load) - int(table_count_before_load) legoo.info("MySQL table [%s]:[%s].[%s] load count ==>> [%s]" % (mysql_host, mysql_db, mysql_table, number_rows)) # verify the csv line count and table count if ( int(num_lines) == int(number_rows) ): legoo.info("file [%s] successfully loaded to mysql table [%s]:[%s].[%s]" % (csv_file, mysql_host, mysql_db, mysql_table)) else: legoo.error("file [%s] count does not match mysql table [%s]:[%s].[%s] load count" % (csv_file, mysql_host, mysql_db, mysql_table)) raise Exception("[ERROR] file [%s] count does not match mysql table [%s]:[%s].[%s] load count" % (csv_file, mysql_host, mysql_db, mysql_table)) def csv_to_mysql_table(**kwargs): """import csv to existing mysql table in target db (bidbs:bi_staging by default) with 5 parameters. mysql_table is required. The other four mysql_host, mysql_db, mysql_truncate_table and debug are optional. ----------------------------------------------------------------------------------------------------- csv_to_mysql_table(mysql_table='tmp_table', csv_file='/tmp/hive_bi_dim_listing.csv', csv_delimiter='tab') ----------------------------------------------------------------------------------------------------- """ debug = kwargs.pop("debug", "N") if (debug.strip().lower() == 'y'): pprint(kwargs) # pretty print kwargs # dictionary initialized with the name=value pairs in the keyword argument list mysql_host = kwargs.pop("mysql_host", "bidbs") mysql_db = kwargs.pop("mysql_db", "bi_staging") mysql_user = kwargs.pop("mysql_user", None) mysql_password = kwargs.pop("mysql_password", None) mysql_table = kwargs.pop("mysql_table", None) csv_file = kwargs.pop("csv_file", None) csv_delimiter = kwargs.pop("csv_delimiter", 'tab') # default to tab csv_delimiter csv_header = kwargs.pop("csv_header", "Y") csv_optionally_enclosed_by = kwargs.pop("csv_optionally_enclosed_by", None) quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) # raise error and exit # run mysql dml if (not mysql_table): legoo.error("need to specify mysql_table") raise TypeError("need to specify mysql_table") # add quote to csv_delimiter if (csv_delimiter.strip().lower() == 'tab'): csv_delimiter = '\\t' # print '%s:csv_delimiter =>>>> [%s]' %(sys._getframe().f_code.co_name, csv_delimiter) if (csv_optionally_enclosed_by): enclosed_by = "OPTIONALLY ENCLOSED BY '%s'" % (csv_optionally_enclosed_by) else: enclosed_by = '' if (csv_header.strip().lower() == 'n'): ignore_line = '' else: ignore_line = 'IGNORE 1 LINES' # if (csv_optionally_enclosed_by = '\"') mysql_dml = """LOAD DATA LOCAL INFILE '%s' INTO TABLE %s FIELDS TERMINATED BY '%s' %s %s""" % (csv_file, mysql_table, csv_delimiter, enclosed_by, ignore_line) # adhocdb/adhocmaildb cant LOAD DATA using MySQLDB client. it is possible due to older version of MySQL Server # fall back to less preferred system command if (mysql_host not in trulia_mysql_host): # adhocdb use a non-standard password and db mysql_cmd = 'mysql -h%s -u%s -p%s %s -e "%s"' % \ ( mysql_host, mysql_user, mysql_password, mysql_db, mysql_dml) legoo.info("running MySQL command: [%s]" % (mysql_cmd)) os.system( mysql_cmd ) else: execute_mysql_query(mysql_host=mysql_host, mysql_db=mysql_db, \ mysql_user=mysql_user, mysql_password=mysql_password, \ mysql_query=mysql_dml, \ quiet = quiet, debug=debug) def execute_remote_hive_query(**kwargs): """execute hive query on remote hive node ------------------------------------------------------- execute_remote_hive_query(hive_query='desc top50_ip;') ------------------------------------------------------- """ debug = kwargs.pop("debug", "N") if (debug.strip().lower() == 'y'): pprint(kwargs) # pretty print kwargs # dictionary initialized with the name=value pairs in the keyword argument list hive_node = kwargs.pop("hive_node", "namenode2s") hive_port = kwargs.pop("hive_port", 10000) hive_db = kwargs.pop("hive_db", "staging") hive_query = kwargs.pop("hive_query", None) mapred_job_priority = kwargs.pop("mapred_job_priority", "NORMAL") quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) # raise error and exit # check and set default value for mapred_job_priority if (mapred_job_priority.strip().upper() in ["VERY_HIGH", "HIGH", "NORMAL", "LOW", "VERY_LOW"]): mapred_job_priority = mapred_job_priority.strip().upper() else: legoo.warning("option mapred_job_priority [%s] must in list [VERY_HIGH, HIGH, NORMAL, LOW, VERY_LOW]. default to NORMAL." % (mapred_job_priority)) mapred_job_priority = "NORMAL" from hive_service import ThriftHive from hive_service.ttypes import HiveServerException from thrift import Thrift from thrift.transport import TSocket from thrift.transport import TTransport from thrift.protocol import TBinaryProtocol # hive_query must enclose with quote hive_query_with_quote = '\"use %s; %s\"' % (hive_db, hive_query) # print hive_query_with_quote hive_cmd = 'ssh %s hive -e %s' % (hive_node, hive_query_with_quote) legoo.info("running hive query on [%s]:[%s] ==>> [%s]" % (hive_node, hive_db, hive_query)) # rc = subprocess.call(['ssh', hive_node, 'hive', '-e', hive_query_with_quote]) result_set = [0] try: transport = TSocket.TSocket(hive_node, hive_port) transport = TTransport.TBufferedTransport(transport) protocol = TBinaryProtocol.TBinaryProtocol(transport) client = ThriftHive.Client(protocol) transport.open() client.execute("use %s" % (hive_db)) client.execute("set mapred.job.priority=%s" % (mapred_job_priority)) client.execute(hive_query) # client.execute("desc dim_listing") # client.execute("select * from dim_listing limit 10") result_set = client.fetchAll() transport.close() return result_set except Thrift.TException, tx: raise Exception('[ERROR] %s' % (tx.message)) def execute_mysql_query(**kwargs): """return tuple (rows_affected, number_of_rows) after execute mysql query on target db (bidbs:bi_staging by default). ------------------------------------------------------------------------------------------------- execute_mysql_query(mysql_host='bidbs', mysql_db='bi_staging', mysql_query='select current_date') ------------------------------------------------------------------------------------------------- """ debug = kwargs.pop("debug", "N") if (debug.strip().lower() == 'y'): pprint(kwargs) # pretty print kwargs # dictionary initialized with the name=value pairs in the keyword argument list mysql_ini = kwargs.pop("mysql_ini", "mysql.ini") mysql_host = kwargs.pop("mysql_host", "bidbs") mysql_db = kwargs.pop("mysql_db", "bi_staging") mysql_user = kwargs.pop("mysql_user", None) mysql_password = kwargs.pop("mysql_password", None) mysql_query = kwargs.pop("mysql_query", None) row_count = kwargs.pop("row_count", "N") quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) # raise error and exit number_of_rows = rows_affected = 0 # set default value legoo.info("running mysql query on [%s]:[%s] ==>> [%s]" % (mysql_host, mysql_db, mysql_query)) try: mysql_conn = create_mysql_connection(mysql_ini = mysql_ini, \ mysql_host = mysql_host, \ mysql_db = mysql_db, \ mysql_user = mysql_user, \ mysql_password = mysql_password, \ quiet = quiet, \ debug = debug \ ) cursor = mysql_conn.cursor() rows_affected = cursor.execute(mysql_query) if (row_count.strip().lower() == 'y'): (number_of_rows,)=cursor.fetchone() # used for counts else: rs = cursor.fetchall() if (len(rs) > 0): pprint(rs) if (debug.strip().lower() == 'y'): legoo.info('[%s] rows affected by query [%s].' % (rows_affected, mysql_query)) legoo.info('[INFO] [%s] number of rows returned by query [%s].' % (number_of_rows, mysql_query)) return (rows_affected, number_of_rows) except MySQLdb.Error as e: legoo.info('[ERROR] [%s] failed on [%s].[%s]' % ( mysql_query, mysql_host, mysql_db)) legoo.info("[ERROR] %d: %s" % (e.args[0], e.args[1])) raise finally: cursor.close() mysql_conn.close() def qa_mysql_table(**kwargs): debug = kwargs.pop("debug", "N") if (debug.strip().lower() == 'y'): pprint(kwargs) # pretty print kwargs # dictionary initialized with the name=value pairs in the keyword argument list mysql_ini = kwargs.pop("mysql_ini", "mysql.ini") mysql_host = kwargs.pop("mysql_host", "bidbs") mysql_db = kwargs.pop("mysql_db", "bi_staging") mysql_user = kwargs.pop("mysql_user", None) mysql_password = kwargs.pop("mysql_password", None) mysql_query = kwargs.pop("mysql_query", None) comparison_operator = kwargs.pop("comparison_operator", None) threshhold_value = kwargs.pop("threshhold_value", None) quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) # raise error and exit (affected_rows, number_rows) = execute_mysql_query(mysql_host = mysql_host, \ mysql_db = mysql_db, \ mysql_user = mysql_user, \ mysql_password = mysql_password, \ mysql_query = mysql_query, \ row_count = 'Y', \ quiet = quiet, \ debug = debug) # print "number_rows => [%s]; options.comparison_operator => [%s]; options.threshhold => [%s]" % \ (number_rows, options.comparison_operator, options.threshhold) if ( (not mysql_query) or (not comparison_operator) or (not threshhold_value)): legoo.error("option mysql_query, comparison_operator, threshhold_value not all set. must specify all three ... ") # log error raise TypeError("option mysql_query, comparison_operator, threshhold_value not all set. must specify all three...") # raise error and exit # build operator dictionary to python built in operator ops = {"=": operator.eq, "==": operator.eq, "!=": operator.ne, "<>": operator.ne, "<": operator.lt, "<=": operator.le, ">": operator.gt, ">=": operator.ge } # may the key to the build-in operator op_func = ops[comparison_operator] if op_func(int(number_rows), int(threshhold_value)): legoo.info('[INFO] [%s] passed test: {[%s] [%s] [%s]}' % (mysql_query, number_rows, comparison_operator, threshhold_value)) else: legoo.error('[ERROR] [%s] failed test: {[%s] [%s] [%s]}' % (mysql_query, number_rows, comparison_operator, threshhold_value)) raise TypeError('[ERROR] [%s] failed test: {[%s] [%s] [%s]}' % (mysql_query, number_rows, comparison_operator, threshhold_value)) def create_mysql_connection(**kwargs): """return myql connection object based on configurations in mysql_ini. For security reason, user/password pulled from mysql.ini. extend ini file if necessary. ------------------------------------------------------------------------------------------- create_mysql_connection(mysql_host='bidbs', mysql_db='bi_staging', debug='N') ------------------------------------------------------------------------------------------- """ debug = kwargs.pop("debug", "N") if (debug.strip().lower() == 'y'): pprint(kwargs) # pretty print kwargs # dictionary initialized with the name=value pairs in the keyword argument list mysql_ini = kwargs.pop("mysql_ini", "mysql.ini") mysql_host = kwargs.pop("mysql_host", "bidbs") mysql_db = kwargs.pop("mysql_db", "bi_staging") mysql_user = kwargs.pop("mysql_user", None) mysql_password = kwargs.pop("mysql_password", None) quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) # raise error and exit # parse the ini file to pull db variables config = ConfigParser.ConfigParser() # find mysql_ini in the same directory as script config.read(os.path.join(os.path.dirname(__file__), mysql_ini)) # extend e mysql.ini if necessary # set default mysql_user from mysql_ini if (not mysql_user): if (mysql_host not in trulia_mysql_host): # adhocdb use a non-standard password and db. mysql_user = config.get(mysql_host, "user") else: mysql_user = config.get('default', "user") # set default mysql_password from mysql_ini if (not mysql_password): if (mysql_host not in trulia_mysql_host): # adhocdb use a non-standard password and db. mysql_password = config.get(mysql_host, "password") else: mysql_password = config.get('default', "password") mysql_conn = MySQLdb.connect(host = mysql_host, # the host user = mysql_user, # user name passwd = mysql_password, # password db = mysql_db) # default database/schema # test connection and print out debug info if (debug.strip().lower() == 'y'): cursor = mysql_conn.cursor() cursor.execute("SELECT CURRENT_DATE()") data = cursor.fetchone() # fetch a single row using fetchone() method. legoo.info("FUNCTION STARTS: [ %s ] >>>>>>" % (sys._getframe().f_code.co_name)) legoo.info("script => %s" % (os.path.abspath(__file__))) legoo.info("mysql_ini => %s/%s" %(os.getcwd(), mysql_ini)) legoo.info("host:db => %s:%s" % (host, mysql_db)) legoo.info("SELECT CURRENT_DATE() => %s" % (data)) legoo.info("FUNCTION ENDS <<<<<<\n") cursor.close() return mysql_conn def create_mysql_ddl_from_csv(**kwargs): """return table name, mysql table ddl based on csv header. by default, scan the whole file to detect column length. """ debug = kwargs.pop("debug", "N") if (debug.strip().lower() == 'y'): pprint(kwargs) # pretty print kwargs # dictionary initialized with the name=value pairs in the keyword argument list csv_file = kwargs.pop("csv_file", None) csv_delimiter = kwargs.pop("csv_delimiter", 'tab') # default to tab table_name = kwargs.pop("table_name", None) max_rows = kwargs.pop("max_rows", None) mysql_create_table = kwargs.pop("mysql_create_table", "N") quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) # raise error and exit # initilize variables csv.field_size_limit(sys.maxsize) # override default size 131072 column_len = {} # dictionary for column and length i = 0 # row counter ddl = '' # initilize ddl to empty string if (csv_delimiter.strip().lower() == 'tab'): csv_delimiter = '\t' # set default table name from file name if not table_name: # get the base file name then split to filename and extension (filename, extension) = os.path.splitext(os.path.basename(csv_file)) table_name = filename # build table table with csv prefix table_name = table_name.replace('.', '_') # replace . with _ in table name # scan csv file up to max_rows to find the max column length with open(csv_file, "rb") as csv_file_new: # create reader object reader = csv.DictReader(csv_file_new, delimiter=csv_delimiter) # initialize column length dictionary for ddl for fn in reader.fieldnames: column_len[fn] = 0 for row in reader: i += 1 # for large csv files, print out progress indicator for every 100K rows if (i % 100000) == 0: print '[INFO] [%s] scanning %d rows to calculate the max column length ...' % (csv_file, i) # find the max field length for fn in reader.fieldnames: # swap with current column_len if greater if len(row[fn]) > column_len[fn]: column_len[fn] = len(row[fn]) # for large file, stop after reaching max_rows if (max_rows and int(i) == int(max_rows)): # print "here $$$", i, '==>>', max_rows for fn in column_len: column_len[fn] *= 2 # estimate by doubling the current max column length break # set max_column_name_len for ddl formating, add 8 to seperate column name and data type max_column_name_len = len(max(column_len.keys(), key=len)) + 8 # ddl with table name and primary key id ddl = '' ddl += "CREATE TABLE %s (\n" % (table_name) # create ddl from reader.fieldnames which preserve the original csv order for index, fn in enumerate(reader.fieldnames): if (column_len[fn] > 2000): data_type = 'TEXT' else: data_type = 'VARCHAR' if (index + 1) < len(reader.fieldnames): ddl += fn.ljust(max_column_name_len) + data_type + '(' + str(column_len[fn]) + '), \n' else: # last column ddl += fn.ljust(max_column_name_len) + data_type + '(' + str(column_len[fn]) + ') \n);' if (debug.strip().upper() == 'Y'): legoo.info('[INFO] [%s] => scanned %d rows to calculate the max column length. \n' % (csv_file, i)) legoo.info(''.ljust(50, '='), '\n', ddl, '\n', ''.ljust(50, '=')) return (table_name, ddl) def create_hive_ddl_from_csv(**kwargs): """return table name, table ddl based on csv header ------------------------------------------------------------------------------------------------------------------ (table_name, hive_ddl)=create_hive_ddl_from_csv(csv_file='/tmp/fact_imp_pdp.csv', csv_delimiter='tab') ------------------------------------------------------------------------------------------------------------------ """ debug = kwargs.pop("debug", "N") if (debug.strip().lower() == 'y'): pprint(kwargs) # pretty print kwargs # dictionary initialized with the name=value pairs in the keyword argument list csv_file = kwargs.pop("csv_file", None) csv_delimiter = kwargs.pop("csv_delimiter", 'tab') # default to tab table_name = kwargs.pop("table_name", None) quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) # raise error and exit # set default arguments if (not table_name): # set default table name from file name # get the base file name then split to filename and extension (filename, extension) = os.path.splitext(os.path.basename(csv_file)) table_name = filename # build table table with csv prefix # convert csv_delimiter to octal code for hive if (csv_delimiter.strip().lower() == 'tab'): csv_delimiter = '\t' hive_csv_delimiter = '\\011' elif (csv_delimiter.strip() == ','): hive_csv_delimiter = '\\054' # initialize variables hive_keywords = ['location', 'date', 'format'] # scan csv file up to max_rows to find the max column length with open(csv_file, "rb") as csv_file_new: # create reader object reader = csv.DictReader(csv_file_new, delimiter=csv_delimiter) # find the max length of column names for nice formatting max_fn_len = len(max(list(reader.fieldnames), key=len)) + 4 hive_ddl = "CREATE TABLE %s (\n" % (table_name) # create hive_ddl from reader.fieldnames which preserve the original csv order for index, fn in enumerate(reader.fieldnames): data_type = 'string' # set default data type # if column name from csv header is hive keywords, postfix with _new if (fn.lower().strip() in hive_keywords): fn = fn.lower().strip() + '_new' if (index + 1) < len(reader.fieldnames): hive_ddl += fn.ljust(max_fn_len) + data_type + ', \n' else: # last column hive_ddl += fn.ljust(max_fn_len) + data_type + '\n)\n' # add csv_delimiter specfication hive_ddl += "ROW FORMAT DELIMITED FIELDS TERMINATED BY \'%s\'\nSTORED AS TEXTFILE" % (hive_csv_delimiter) csv_file_new.close() return (table_name, hive_ddl) def csv_dump(**kwargs): """dump first n rows from csv -------------------------------------------------------------------- csv_dump(csv_file='/tmp/fact_imp_pdp.csv', csv_delimiter='tab', lines=5) -------------------------------------------------------------------- """ # print kwargs # dictionary initialized with the name=value pairs in the keyword argument list csv_file = kwargs.pop("csv_file", None) csv_delimiter = kwargs.pop("csv_delimiter", 'tab') # default to tab lines = kwargs.pop("lines", 10) # default to tab line_number = kwargs.pop("line_number", 2) # default to line number 2, first line is header debug = kwargs.pop("debug", "N") if kwargs: raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) # initilize variables csv.field_size_limit(sys.maxsize) # override default size 131072 column_len = {} # dictionary for column and length i = 0 # row counter if (csv_delimiter.strip().lower() == 'tab'): csv_delimiter = '\t' # if need to get lines from the middle of file # create a new temp file start with line number if (line_number > 2): tmp_file = '/tmp/temp_csv.csv' tmp_csv_cmd = "head -1 %s > %s; " % (csv_file, tmp_file) tmp_csv_cmd = tmp_csv_cmd + "tail -n +%s %s | head -%s >> %s;" % (line_number, csv_file, lines, tmp_file) print tmp_csv_cmd os.system( tmp_csv_cmd ) csv_file = tmp_file # scan csv file up to max_rows to find the max column length with open(csv_file, "rb") as csv_file_new: # create reader object reader = csv.DictReader(csv_file_new, delimiter=csv_delimiter) # find the max field name lenght for formating max_fn_len = len(max(list(reader.fieldnames), key=len)) for row in reader: # print "\nRECORD NUMBER %d: " % (i) print '='.ljust(max_fn_len + 29, '=') print "Line number".ljust( max_fn_len + 9), "\t<<<<\t[%s]" % (int(line_number) + i) column_index = 1 # track column index for fn in reader.fieldnames: print "[c%s] " % (str(column_index).rjust(3, '0')), fn.ljust( max_fn_len), "\t==>>\t[%s]" % (row[fn]) column_index += 1 # print '='.ljust(max_fn_len + 20, '=') i += 1 if (int(i) == min(int(lines), 100)): break def wait_for_table(**kwargs): """check if table exists, and updated after [mysql_table_update_after] if not, retry based on [sleep_interval], [num_retry] and/or [stop_at] NOTE: 1. need access to INFORMATION_SCHEMA.TABLES to retrieve update_time 2. option [STOP_AT] i.e. [2013-10-08 15:30], together with [mysql_table_update_after] i.e. [2013-10-09 14:25], define the table wait window 3. option [ETL_TABLE] and [ETL_JOB] are trulia specific which retrive table last update from proprietary [AUDIT_JOB] database """ TABLE_FOUND = False # init variable to False i = 0 # init counter debug = kwargs.pop("debug", "N") if (debug.strip().lower() == 'y'): pprint(kwargs) # print kwargs # dictionary initialized with the name=value pairs in the keyword argument list mysql_ini = kwargs.pop("mysql_ini", "mysql.ini") mysql_host = kwargs.pop("mysql_host", None) mysql_db = kwargs.pop("mysql_db", None) mysql_user = kwargs.pop("mysql_user", None) mysql_password = kwargs.pop("mysql_password", None) mysql_table = kwargs.pop("mysql_table", None) etl_table = kwargs.pop("etl_table", None) etl_job = kwargs.pop("etl_job", None) mysql_table_update_after = kwargs.pop("mysql_table_update_after", None) sleep_interval = kwargs.pop("sleep_interval", 60) num_retry = kwargs.pop("num_retry", None) stop_at = kwargs.pop("stop_at", None) # [hh:mm] i.e. "14:30" quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) # raise error and exit if ( not mysql_table and not etl_table and not etl_job ): legoo.error("Must specify one of options of [MYSQL_TABLE], [ETL_TABLE], and [ETL_JOB] ... ") # log error raise TypeError("Must specify one of options of [MYSQL_TABLE], [ETL_TABLE], and [ETL_JOB] ... ") # raise error and exit if ( num_retry ): num_retry = int(str(num_retry).strip().lower()) # format and convert to int if ( num_retry < 1) : legoo.error("invalid value [%s] for [num_retry]. try again with positive integer ... " % (num_retry)) # log error raise ValueError("invalid value [%s] for [num_retry]. try again with positive integer ... " \ % (num_retry)) # raise error and exit else: num_retry = 0 # default to 0 if not specified sleep_interval = int(str(sleep_interval).strip().lower()) # format and convert to int if ( sleep_interval < 1) : legoo.error("invalid value [%s] for [sleep_interval]. try again with positive integer ... " \ % (sleep_interval)) # log error raise ValueError("invalid value [%s] for [sleep_interval]. try again with positive integer ... " \ % (sleep_interval)) # raise error and exit if ( (not stop_at) and (not num_retry)): legoo.error("option stop_at and num_retry not set. must specify one ... ") # log error raise TypeError("option stop_at and num_retry not set. must specify one ...") # raise error and exit # create datetime object stop_at i.e. "14:30" if specified if ( stop_at ): stop_at_dt = datetime.datetime.strptime(stop_at, '%Y-%m-%d %H:%M') else: stop_at_dt = datetime.datetime.now() if ( mysql_table_update_after ): mysql_table_update_after_dt = datetime.datetime.strptime(mysql_table_update_after, '%Y-%m-%d %H:%M') else: mysql_table_update_after_dt = datetime.datetime.fromtimestamp(0).strftime("%Y-%m-%d %H:%M") # set default if ( num_retry ): num_retry = int(str(num_retry).strip().lower()) # format and convert to int if ( num_retry < 1) : legoo.error("invalid value [%s] for [num_retry]. try again with positive integer ... " % (num_retry)) # log error raise ValueError("invalid value [%s] for [num_retry]. try again with positive integer ... " \ % (num_retry)) # raise error and exit else: num_retry = 0 # default to 0 if not specified # build query depending options of [MYSQL_TABLE], [ETL_TABLE], and [ETL_JOB] if (mysql_table): mysql_query = """SELECT COUNT(*) FROM INFORMATION_SCHEMA.TABLES WHERE table_name = '%s' AND update_time >= '%s' """ % ( mysql_table, mysql_table_update_after_dt) elif (etl_table): mysql_query = """SELECT COUNT(*) FROM audit.audit_job_detail ajd, audit.audit_job aj WHERE ajd.job_id = aj.job_id AND LOWER(Task_Target_Table) = '%s' AND Task_End_Time > '%s' AND LOWER(job_success_flag) = 'y' AND LOWER(job_qa_success_flag) = 'y' """ % ( etl_table, mysql_table_update_after_dt) elif (etl_job): mysql_query = """SELECT COUNT(*) FROM audit.audit_job WHERE LOWER(job_name) LIKE '%s' AND LOWER(job_success_flag) = 'y' AND job_end_time >= '%s' """ % ( etl_job, mysql_table_update_after_dt) # variable for logging target = ''.join(filter(None, (mysql_table, etl_table, etl_job))) # greedy algorithm: when stop_at and num_retry both set, progam continues either stop_at or num_retry satisfies while ( (datetime.datetime.now() < stop_at_dt) or (i < num_retry) ): (affected_rows, number_rows) = execute_mysql_query(mysql_ini=mysql_ini, mysql_host=mysql_host, mysql_db=mysql_db, \ mysql_user=mysql_user, mysql_password=mysql_password, \ mysql_query=mysql_query, row_count='Y', debug=debug) if (int(number_rows) > 0): TABLE_FOUND = True # set flag to true if table exists legoo.info("table [%s] is ready" % (target)) break else: legoo.info("check #[%d] if table [%s] is ready" % (i, target)) sleep(sleep_interval) i += 1 if (debug.strip().lower() == 'y'): print i, "=>>", num_retry, ' ', datetime.datetime.now(), "=>>", stop_at_dt if (not TABLE_FOUND): legoo.error("table [%s] is NOT ready" % (target)) # log error raise TypeError("table [%s] is NOT ready" % (target)) # raise error and exit def wait_for_file(**kwargs): """check if file exists or newer, otherwise, wait and retry --------------------------------------------------------- wait_for_file(num_retry = 10, sleep_interval=60, file = 'test') --------------------------------------------------------- """ debug = kwargs.pop("debug", "N") # print kwargs if (debug.strip().lower() == 'y'): pprint(kwargs) stop_at = kwargs.pop("stop_at", None) # [yyyy-mm-dd mmhh:mm] i.e. "2013-10-08 14:30" mtime_after = kwargs.pop("mtime_after", None) # [yyyy-mm-dd mmhh:mm] i.e. "2013-10-08 14:30" # dictionary initialized with the name=value pairs in the keyword argument list num_retry = kwargs.pop("num_retry", None) sleep_interval = kwargs.pop("sleep_interval", 60) sleep_interval = int(str(sleep_interval).strip().lower()) # format and convert to int if ( sleep_interval < 1) : legoo.error("invalid value [%s] for [sleep_interval]. try again with positive integer ... " \ % (sleep_interval)) # log error raise ValueError("invalid value [%s] for [sleep_interval]. try again with positive integer ... " \ % (sleep_interval)) # raise error and exit file = kwargs.pop("file", None) if ( not file ): legoo.error("option file not set. must specify file ... ") # log error raise TypeError("option file not set. must specify file ... ") # raise error and exit quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) # raise error and exit FILE_FOUND = False # init variable to False i = 0 # init counter if ( (not stop_at) and (not num_retry)): legoo.error("option stop_at and num_retry not set. must specify one ... ") # log error raise TypeError("option stop_at and num_retry not set. must specify one ...") # raise error and exit if ( stop_at ): stop_at_dt = datetime.datetime.strptime(stop_at, '%Y-%m-%d %H:%M') else: stop_at_dt = datetime.datetime.now() if ( mtime_after ): mtime_after_dt = datetime.datetime.strptime(mtime_after, '%Y-%m-%d %H:%M') else: mtime_after_dt = datetime.datetime.fromtimestamp(0) # set default if ( num_retry ): num_retry = int(str(num_retry).strip().lower()) # format and convert to int if ( num_retry < 1) : legoo.error("invalid value [%s] for [num_retry]. try again with positive integer ... " % (num_retry)) # log error raise ValueError("invalid value [%s] for [num_retry]. try again with positive integer ... " \ % (num_retry)) # raise error and exit else: num_retry = 0 # default to 0 if not specified # greedy algorithm: when stop_at and num_retry both set, progam continues either stop_at or num_retry satisfies while ( (datetime.datetime.now() < stop_at_dt) or (i < num_retry) ): if (os.path.isfile( file )): # check if file stopped being written prv_size = os.path.getsize( file ) sleep(10) # wait 30 seconds then check the file size again post_size = os.path.getsize( file ) # get file ctime modify_dt = datetime.datetime.fromtimestamp(os.path.getmtime(file)) # check file 1) stop grower 2) modified after mtime_after if ( int(prv_size) == int(post_size) and (modify_dt >= mtime_after_dt)): FILE_FOUND = True # set flag to true if file exists legoo.info("file [%s] found" % (file)) break elif (int(prv_size) <> int(post_size)): legoo.info("check #[%d] file [%s] exits but still being written. try again in [%s] seconds..." \ % (i, file, sleep_interval)) elif (modify_dt < mtime_after_dt): legoo.info("check #[%d] file [%s] exits but modify_time [%s] before mtime_after [%s]. try again in [%s] seconds..." \ % (i, file, modify_dt, mtime_after_dt, sleep_interval)) else: legoo.info("check #[%d] if file [%s] exits" % (i, file)) sleep(sleep_interval) i += 1 if (not FILE_FOUND): legoo.error("file [%s] not exits" % (file)) # log error raise TypeError("file [%s] not exits" % (file)) # raise error and exit def dos_to_unix( orig_file, new_file=None): """call shell utility dos_to_unix to convert file format from dos to unix ------------------------------------------------------------------------- dos_to_unix('/tmp/msa.csv') dos_to_unix(orig_file='/tmp/msa.csv', new_file='/tmp/msa2.csv') ------------------------------------------------------------------------- """ if not new_file: new_file = file + "_new" rc = subprocess.call(["dos_to_unix", "-n", file, new_file]) return new_file def send_mail(**kwargs): """send email --------------------------------------------------------- --------------------------------------------------------- """ debug = kwargs.pop("debug", "N") # print kwargs if (debug.strip().lower() == 'y'): pprint(kwargs) # dictionary initialized with the name=value pairs in the keyword argument list sender = kwargs.pop("sender", None) receivers = kwargs.pop("receivers", []) # ['pluo@trulia.com'] if not receivers: legoo.error("option receivers is missing") raise TypeError("option receivers is missing") elif type(receivers) <> list: # convert comma seperate string to list. receivers = [r.strip() for r in receivers.split(',') ] subject = kwargs.pop("subject", None) if not subject: legoo.warning("subject is NULL") smtp_server = kwargs.pop("smtp_server", "mx1.sv2.trulia.com") smtp_port = kwargs.pop("smtp_port", 25) body_text = kwargs.pop("body_text", None) body_text_file = kwargs.pop("body_text_file", None) # ['foo.py', '../show_env.py'] body_html = kwargs.pop("body_html", None) body_html_file = kwargs.pop("body_html_file", None) attachment_files = kwargs.pop("attachment_files", None) attachment_dir = kwargs.pop("attachment_dir", None) quiet = kwargs.pop("quiet", "N") if (quiet.strip().lower() == 'y'): legoo.removeHandler(info_hand) # suppress logging if variable quiet set to Y if kwargs: legoo.error("Unsupported configuration options %s" % list(kwargs)) # log error raise TypeError("[ERROR] Unsupported configuration options %s" % list(kwargs)) # raise error and exit # import smtp and related modules import smtplib, os from email.MIMEMultipart import MIMEMultipart from email.MIMEBase import MIMEBase from email.MIMEText import MIMEText from email.Utils import COMMASPACE, formatdate from email import Encoders from os import listdir from os.path import isfile, join # build message msg = MIMEMultipart() msg['From'] = sender msg['To'] = COMMASPACE.join(receivers) msg['Date'] = formatdate(localtime=True) msg['Subject'] = subject # build the plain text body if body_text: msg.attach(MIMEText(body_text, 'plain')) if body_text_file: msg.attach(MIMEText(open(body_text_file,"rb").read(), 'plain')) # build html body if body_html: msg.attach(MIMEText(body_html, 'html')) if body_html_file: msg.attach(MIMEText(open(body_html_file,"rb").read(), 'html')) # warning: if body is EMPTY if not ( body_html or body_html_file or body_text or body_text_file): legoo.warning("message body if empty, specify one of boby options [body_html, body_html_file, body_text, body_text_file]") # convert comma seperate string to list if attachment_files and type(attachment_files) <> list: attachment_files = attachment_files.split(',') # append list of files from attachment_dir to attachment_files: if attachment_dir: attachment_files += [ f for f in listdir(attachment_dir) if isfile(join(attachment_dir,f)) ] # attach files if attachment_files: for f in attachment_files: part = MIMEBase('application', "octet-stream") part.set_payload( open(f.strip(),"rb").read() ) Encoders.encode_base64(part) part.add_header('Content-Disposition', 'attachment; filename="%s"' % os.path.basename(f)) msg.attach(part) # send mail smtp = smtplib.SMTP(smtp_server, smtp_port) smtp.sendmail(sender, receivers, msg.as_string()) smtp.close() def main(): # create_mysql_ddl_from_csv(dos_to_unix('/tmp/msa.csv'), ',') # DEBUG='N' # (table_name, mysql_ddl) = create_mysql_ddl_from_csv(csv_file="/tmp/fact_imp_pdp.csv", csv_delimiter = 'tab', mysql_create_table = 'Y', debug='Y') # (table_name, mysql_ddl) = create_mysql_ddl_from_csv(csv_file="/tmp/dim_listing_delta.csv", csv_delimiter = 'tab', mysql_create_table = 'Y', max_rows=60000) # csv_to_mysql(csv_file="/tmp/dim_listing_delta.csv", csv_delimiter='tab', mysql_create_table='Y') # execute_mysql_query(mysql_host='bidbs', mysql_db='bi', mysql_query='select count(*) from dim_property2', row_count='Y') # hive_to_csv(csv_file='dim_listing.csv',hive_node='namenode2s', hive_query='select * from bi.dim_listing limit 1000000', mapred_job_priority = 'HIGH', debug='Y') # hive_to_mysql(hive_table='fact_imp_pdp', hive_query='select * from bi.fact_imp_pdp limit 2000000', create_table = 'Y') # create_mysql_connection(mysql_host='bidbs', mysql_db='bi_staging', debug='Y') # (affected_rows, number_rows) = execute_mysql_query(mysql_host='bidbs', mysql_db='bi', mysql_query='select count(*) from dim_property', row_count='Y') # print affected_rows, number_rows # (table_name, hive_ddl)=create_hive_ddl_from_csv(csv_file='test/opportunity_no_header.csv2', table_name='tmp2', csv_delimiter='tab') # print hive_ddl # csv_dump(csv_file='/tmp/opportunity.csv', csv_delimiter='tab', lines=10) # (table_name, hive_ddl)=create_hive_ddl_from_csv(csv_file='/tmp/fact_imp_pdp.csv', csv_delimiter='tab', create_table='Y') # execute_remote_hive_query(hive_query='desc top50_ip;') # csv_to_hive(csv_file='/tmp/fact_imp_pdp.csv', csv_delimiter='tab', hive_create_table='Y') # mysql_to_csv(mysql_host='bidbs', mysql_table='dim_time', mysql_quick='Y') # mysql_to_csv(mysql_host='bidbs', mysql_db='bi_staging', mysql_table='userstatsreport', mysql_query='select * from userstatsreport limit 1000;') # mysql_to_hive(mysql_host='bidbs', mysql_db='bi_staging', mysql_table='userstatsreport', mysql_query='select * from userstatsreport;', hive_create_table='Y') # hive_to_mysql(hive_table='userstatsreport', hive_db='staging', mysql_db='bi_staging', mysql_table='userstatsreport_hive', mysql_create_table='Y', max_rows=1000000) # count_lines(file='test/census_population.csv', skip_header='N', quiet='N', debug='N') #rs = execute_remote_hive_query(hive_db='bi', hive_query="desc dim_listing") # print rs # rows = count_hive_table_rows(hive_node='namenode2s', hive_db='staging', hive_table='top50_ip', quiet='Y') # print rows # csv_to_hive_table(csv_file='/tmp/fact_property_view.csv', hive_db='staging', hive_table='fact_property_view_partition', hive_partition="date_int = 20130428") # csv_to_hive(csv_file='/tmp/fact_property_view.csv', hive_db='staging', hive_create_table='Y') # csv_to_hive(csv_file='/data/tmp/dim_listing.csv', hive_db='staging', hive_table='Y', hive_node='namenode2s', mapred_job_priority = 'HIGH', hive_create_table='Y') # csv_to_hive(csv_file='/data/tmp/dim_listing.csv', hive_db='staging', hive_table='Y', hive_node='namenode2s',) # csv_to_hive(csv_file='/data/tmp/dim_listing.csv', hive_db='staging', hive_table='fact_property_view_partition', hive_partition="date_int=20130428", hive_create_table='Y') # remove_file(file='/tmp/dim_user_tier_2.csv2', debug='Y', quiet='Y') # rs = execute_remote_hive_query(hive_node='namenode2s', hive_db='bi', mapred_job_priority="very_high_invalid", hive_query="select count(*) from dim_listing" # wait_for_table(mysql_host='bidbs', mysql_db='bi', mysql_table='dim_property', \ # mysql_table_update_after = '2013-10-09 03:00', \ # sleep_interval = 30, num_retry = 6, stop_at = '12:46', debug='Y', quiet='Y') # send_mail( sender = 'pluo@trulia.com', receivers = 'pluo@trulia.com, luo@trulia.com', subject = 'legoo', body_html_file = 'bar.html', attachment_dir = '../legoo', attachment_files = ['../show_env.py'], body_text_file = 'csv_dump') pass if __name__ == '__main__': main() """SCons.Tool.rpm Tool-specific initialization for rpm. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Tool.Tool() selection method. The rpm tool calls the rpmbuild command. The first and only argument should a tar.gz consisting of the source file and a specfile. """ # # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Tool/rpm.py 5023 2010/06/14 22:05:46 scons" import os import re import shutil import subprocess import SCons.Builder import SCons.Node.FS import SCons.Util import SCons.Action import SCons.Defaults def get_cmd(source, env): tar_file_with_included_specfile = source if SCons.Util.is_List(source): tar_file_with_included_specfile = source[0] return "%s %s %s"%(env['RPM'], env['RPMFLAGS'], tar_file_with_included_specfile.abspath ) def build_rpm(target, source, env): # create a temporary rpm build root. tmpdir = os.path.join( os.path.dirname( target[0].abspath ), 'rpmtemp' ) if os.path.exists(tmpdir): shutil.rmtree(tmpdir) # now create the mandatory rpm directory structure. for d in ['RPMS', 'SRPMS', 'SPECS', 'BUILD']: os.makedirs( os.path.join( tmpdir, d ) ) # set the topdir as an rpmflag. env.Prepend( RPMFLAGS = '--define \'_topdir %s\'' % tmpdir ) # now call rpmbuild to create the rpm package. handle = subprocess.Popen(get_cmd(source, env), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) output = handle.stdout.read() status = handle.wait() if status: raise SCons.Errors.BuildError( node=target[0], errstr=output, filename=str(target[0]) ) else: # XXX: assume that LC_ALL=c is set while running rpmbuild output_files = re.compile( 'Wrote: (.*)' ).findall( output ) for output, input in zip( output_files, target ): rpm_output = os.path.basename(output) expected = os.path.basename(input.get_path()) assert expected == rpm_output, "got %s but expected %s" % (rpm_output, expected) shutil.copy( output, input.abspath ) # cleanup before leaving. shutil.rmtree(tmpdir) return status def string_rpm(target, source, env): try: return env['RPMCOMSTR'] except KeyError: return get_cmd(source, env) rpmAction = SCons.Action.Action(build_rpm, string_rpm) RpmBuilder = SCons.Builder.Builder(action = SCons.Action.Action('$RPMCOM', '$RPMCOMSTR'), source_scanner = SCons.Defaults.DirScanner, suffix = '$RPMSUFFIX') def generate(env): """Add Builders and construction variables for rpm to an Environment.""" try: bld = env['BUILDERS']['Rpm'] except KeyError: bld = RpmBuilder env['BUILDERS']['Rpm'] = bld env.SetDefault(RPM = 'LC_ALL=c rpmbuild') env.SetDefault(RPMFLAGS = SCons.Util.CLVar('-ta')) env.SetDefault(RPMCOM = rpmAction) env.SetDefault(RPMSUFFIX = '.rpm') def exists(env): return env.Detect('rpmbuild') # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4: # # Copyright 2013 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # """ Create a whole new out-of-tree module """ import shutil import os import re from optparse import OptionGroup from gnuradio import gr from modtool_base import ModTool, ModToolException from scm import SCMRepoFactory class ModToolNewModule(ModTool): """ Create a new out-of-tree module """ name = 'newmod' aliases = ('nm', 'create') def __init__(self): ModTool.__init__(self) def setup_parser(self): " Initialise the option parser for 'gr_modtool newmod' " parser = ModTool.setup_parser(self) parser.usage = '%prog nm [options]. \n Call %prog without any options to run it interactively.' ogroup = OptionGroup(parser, "New out-of-tree module options") ogroup.add_option("--srcdir", type="string", help="Source directory for the module template.") parser.add_option_group(ogroup) return parser def setup(self, options, args): # Don't call ModTool.setup(), that assumes an existing module. self._info['modname'] = options.module_name if self._info['modname'] is None: if len(args) >= 2: self._info['modname'] = args[1] else: self._info['modname'] = raw_input('Name of the new module: ') if not re.match('[a-zA-Z0-9_]+$', self._info['modname']): raise ModToolException('Invalid module name.') self._dir = options.directory if self._dir == '.': self._dir = './gr-%s' % self._info['modname'] try: os.stat(self._dir) except OSError: pass # This is what should happen else: raise ModToolException('The given directory exists.') if options.srcdir is None: options.srcdir = '/usr/local/share/gnuradio/modtool/gr-newmod' self._srcdir = gr.prefs().get_string('modtool', 'newmod_path', options.srcdir) if not os.path.isdir(self._srcdir): raise ModToolException('Could not find gr-newmod source dir.') self.options = options self._setup_scm(mode='new') def run(self): """ * Copy the example dir recursively * Open all files, rename howto and HOWTO to the module name * Rename files and directories that contain the word howto """ print "Creating out-of-tree module in %s..." % self._dir, try: shutil.copytree(self._srcdir, self._dir) os.chdir(self._dir) except OSError: raise ModToolException('Could not create directory %s.' % self._dir) for root, dirs, files in os.walk('.'): for filename in files: f = os.path.join(root, filename) s = open(f, 'r').read() s = s.replace('howto', self._info['modname']) s = s.replace('HOWTO', self._info['modname'].upper()) open(f, 'w').write(s) if filename.find('howto') != -1: os.rename(f, os.path.join(root, filename.replace('howto', self._info['modname']))) if os.path.basename(root) == 'howto': os.rename(root, os.path.join(os.path.dirname(root), self._info['modname'])) print "Done." if self.scm.init_repo(path_to_repo="."): print "Created repository... you might want to commit before continuing." print "Use 'gr_modtool add' to add a new block to this currently empty module." # Copyright (C) 2013-2017 Roland Lutz # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. import xorn.storage def setup(): global line_data, box_data, circle_data, net_data rev0 = xorn.storage.Revision(None) assert rev0 is not None rev0.finalize() # first change rev1 = xorn.storage.Revision(rev0) assert rev1 is not None line_data = xorn.storage.Line() line_data.x = 0 line_data.y = 1 line_data.width = 3 line_data.height = 2 line_data.color = 3 line_data.line.width = 1 ob0 = rev1.add_object(line_data) assert ob0 is not None rev1.finalize() # second change rev2 = xorn.storage.Revision(rev1) assert rev2 is not None box_data = xorn.storage.Box() box_data.x = 1 box_data.y = 1 box_data.width = 2 box_data.height = 2 box_data.color = 3 box_data.line.width = 1 ob1a = rev2.add_object(box_data) assert ob1a is not None circle_data = xorn.storage.Circle() circle_data.x = -1 circle_data.y = -1 circle_data.radius = 2 circle_data.color = 3 circle_data.line.width = 1 circle_data.fill.type = 1 ob1b = rev2.add_object(circle_data) assert ob1b is not None rev2.finalize() # third change rev3 = xorn.storage.Revision(rev2) assert rev3 is not None net_data = xorn.storage.Net() net_data.x = 0 net_data.y = 1 net_data.width = 3 net_data.height = 2 net_data.color = 4 rev3.set_object_data(ob0, net_data) rev3.delete_object(ob1a) rev3.finalize() return rev0, rev1, rev2, rev3, ob0, ob1a, ob1b """ *** FROM https://github.com/davidadamojr/TextRank/blob/master/textrank.py *** From this paper: http://acl.ldc.upenn.edu/acl2004/emnlp/pdf/Mihalcea.pdf External dependencies: nltk, numpy, networkx Based on https://gist.github.com/voidfiles/1646117 """ import nltk import itertools from operator import itemgetter import networkx as nx import os # apply syntactic filters based on POS tags def filter_for_tags(tagged, tags=['NN', 'JJ', 'NNP']): return [item for item in tagged if item[1] in tags] def normalize(tagged): return [(item[0].replace('.', ''), item[1]) for item in tagged] def unique_everseen(iterable, key=None): "List unique elements, preserving order. Remember all elements ever seen." # unique_everseen('AAAABBBCCDAABBB') --> A B C D # unique_everseen('ABBCcAD', str.lower) --> A B C D seen = set() seen_add = seen.add if key is None: for element in itertools.ifilterfalse(seen.__contains__, iterable): seen_add(element) yield element else: for element in iterable: k = key(element) if k not in seen: seen_add(k) yield element def lDistance(firstString, secondString): "Function to find the Levenshtein distance between two words/sentences - gotten from http://rosettacode.org/wiki/Levenshtein_distance#Python" if len(firstString) > len(secondString): firstString, secondString = secondString, firstString distances = range(len(firstString) + 1) for index2, char2 in enumerate(secondString): newDistances = [index2 + 1] for index1, char1 in enumerate(firstString): if char1 == char2: newDistances.append(distances[index1]) else: newDistances.append(1 + min((distances[index1], distances[index1 + 1], newDistances[-1]))) distances = newDistances return distances[-1] def buildGraph(nodes): "nodes - list of hashables that represents the nodes of the graph" gr = nx.Graph() #initialize an undirected graph gr.add_nodes_from(nodes) nodePairs = list(itertools.combinations(nodes, 2)) #add edges to the graph (weighted by Levenshtein distance) for pair in nodePairs: firstString = pair[0] secondString = pair[1] levDistance = lDistance(firstString, secondString) gr.add_edge(firstString, secondString, weight=levDistance) return gr def extractKeyphrases(text): #tokenize the text using nltk wordTokens = nltk.word_tokenize(text) #assign POS tags to the words in the text tagged = nltk.pos_tag(wordTokens) textlist = [x[0] for x in tagged] tagged = filter_for_tags(tagged) tagged = normalize(tagged) unique_word_set = unique_everseen([x[0] for x in tagged]) word_set_list = list(unique_word_set) #this will be used to determine adjacent words in order to construct keyphrases with two words graph = buildGraph(word_set_list) #pageRank - initial value of 1.0, error tolerance of 0,0001, calculated_page_rank = nx.pagerank(graph, weight='weight') #most important words in ascending order of importance keyphrases = sorted(calculated_page_rank, key=calculated_page_rank.get, reverse=True) #the number of keyphrases returned will be relative to the size of the text (a third of the number of vertices) aThird = len(word_set_list) / 3 keyphrases = keyphrases[0:aThird + 1] #take keyphrases with multiple words into consideration as done in the paper - if two words are adjacent in the text and are selected as keywords, join them #together modifiedKeyphrases = set([]) dealtWith = set([]) #keeps track of individual keywords that have been joined to form a keyphrase i = 0 j = 1 while j < len(textlist): firstWord = textlist[i] secondWord = textlist[j] if firstWord in keyphrases and secondWord in keyphrases: keyphrase = firstWord + ' ' + secondWord modifiedKeyphrases.add(keyphrase) dealtWith.add(firstWord) dealtWith.add(secondWord) else: if firstWord in keyphrases and firstWord not in dealtWith: modifiedKeyphrases.add(firstWord) #if this is the last word in the text, and it is a keyword, #it definitely has no chance of being a keyphrase at this point if j == len(textlist) - 1 and secondWord in keyphrases and secondWord not in dealtWith: modifiedKeyphrases.add(secondWord) i = i + 1 j = j + 1 return modifiedKeyphrases def extractSentences(text): sent_detector = nltk.data.load('tokenizers/punkt/english.pickle') sentenceTokens = sent_detector.tokenize(text.strip()) graph = buildGraph(sentenceTokens) calculated_page_rank = nx.pagerank(graph, weight='weight') #most important sentences in ascending order of importance sentences = sorted(calculated_page_rank, key=calculated_page_rank.get, reverse=True) #return a 100 word summary summary = ' '.join(sentences) summaryWords = summary.split() summaryWords = summaryWords[0:101] summary = ' '.join(summaryWords) return summary def writeFiles(summary, keyphrases, fileName): "outputs the keyphrases and summaries to appropriate files" print "Generating output to " + 'keywords/' + fileName keyphraseFile = open('keywords/' + fileName, 'w') for keyphrase in keyphrases: keyphraseFile.write(keyphrase + '\n') keyphraseFile.close() print "Generating output to " + 'summaries/' + fileName summaryFile = open('summaries/' + fileName, 'w') summaryFile.write(summary) summaryFile.close() print "-" # #retrieve each of the articles # articles = os.listdir("articles") # for article in articles: # print 'Reading articles/' + article # articleFile = open('articles/' + article, 'r') # text = articleFile.read() # keyphrases = extractKeyphrases(text) # summary = extractSentences(text) # writeFiles(summary, keyphrases, article) """ Utilities that manipulate strides to achieve desirable effects. An explanation of strides can be found in the "ndarray.rst" file in the NumPy reference guide. """ from __future__ import division, absolute_import, print_function import numpy as np __all__ = ['broadcast_to', 'broadcast_arrays'] class DummyArray(object): """Dummy object that just exists to hang __array_interface__ dictionaries and possibly keep alive a reference to a base array. """ def __init__(self, interface, base=None): self.__array_interface__ = interface self.base = base def _maybe_view_as_subclass(original_array, new_array): if type(original_array) is not type(new_array): # if input was an ndarray subclass and subclasses were OK, # then view the result as that subclass. new_array = new_array.view(type=type(original_array)) # Since we have done something akin to a view from original_array, we # should let the subclass finalize (if it has it implemented, i.e., is # not None). if new_array.__array_finalize__: new_array.__array_finalize__(original_array) return new_array def as_strided(x, shape=None, strides=None, subok=False): """ Make an ndarray from the given array with the given shape and strides. """ # first convert input to array, possibly keeping subclass x = np.array(x, copy=False, subok=subok) interface = dict(x.__array_interface__) if shape is not None: interface['shape'] = tuple(shape) if strides is not None: interface['strides'] = tuple(strides) array = np.asarray(DummyArray(interface, base=x)) if array.dtype.fields is None and x.dtype.fields is not None: # This should only happen if x.dtype is [('', 'Vx')] array.dtype = x.dtype