diff --git "a/codeparrot-valid_1003.txt" "b/codeparrot-valid_1003.txt" new file mode 100644--- /dev/null +++ "b/codeparrot-valid_1003.txt" @@ -0,0 +1,10000 @@ + 'recall_score', + 'roc_auc_score', + 'roc_curve', + 'SCORERS', + 'silhouette_samples', + 'silhouette_score', + 'v_measure_score', + 'zero_one_loss', + 'brier_score_loss', + 'dcg_score', + 'ndcg_score' +] + +# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors +# License: GNU General Public License v3. See license.txt + +from __future__ import unicode_literals +import frappe +from frappe import _ +from frappe.utils import (cstr, validate_email_add, cint, comma_and, has_gravatar, now, getdate, nowdate) +from frappe.model.mapper import get_mapped_doc + +from erpnext.controllers.selling_controller import SellingController +from frappe.contacts.address_and_contact import load_address_and_contact +from erpnext.accounts.party import set_taxes + +sender_field = "email_id" + +class Lead(SellingController): + def get_feed(self): + return '{0}: {1}'.format(_(self.status), self.lead_name) + + def onload(self): + customer = frappe.db.get_value("Customer", {"lead_name": self.name}) + self.get("__onload").is_customer = customer + load_address_and_contact(self) + + def validate(self): + self._prev = frappe._dict({ + "contact_date": frappe.db.get_value("Lead", self.name, "contact_date") if \ + (not cint(self.get("__islocal"))) else None, + "contact_by": frappe.db.get_value("Lead", self.name, "contact_by") if \ + (not cint(self.get("__islocal"))) else None, + }) + + self.set_status() + self.check_email_id_is_unique() + + if self.email_id: + if not self.flags.ignore_email_validation: + validate_email_add(self.email_id, True) + + if self.email_id == self.lead_owner: + frappe.throw(_("Lead Owner cannot be same as the Lead")) + + if self.email_id == self.contact_by: + frappe.throw(_("Next Contact By cannot be same as the Lead Email Address")) + + if self.is_new() or not self.image: + self.image = has_gravatar(self.email_id) + + if self.contact_date and getdate(self.contact_date) < getdate(nowdate()): + frappe.throw(_("Next Contact Date cannot be in the past")) + + def on_update(self): + self.add_calendar_event() + + def add_calendar_event(self, opts=None, force=False): + super(Lead, self).add_calendar_event({ + "owner": self.lead_owner, + "starts_on": self.contact_date, + "subject": ('Contact ' + cstr(self.lead_name)), + "description": ('Contact ' + cstr(self.lead_name)) + \ + (self.contact_by and ('. By : ' + cstr(self.contact_by)) or '') + }, force) + + def check_email_id_is_unique(self): + if self.email_id: + # validate email is unique + duplicate_leads = frappe.db.sql_list("""select name from tabLead + where email_id=%s and name!=%s""", (self.email_id, self.name)) + + if duplicate_leads: + frappe.throw(_("Email Address must be unique, already exists for {0}") + .format(comma_and(duplicate_leads)), frappe.DuplicateEntryError) + + def on_trash(self): + frappe.db.sql("""update `tabIssue` set lead='' where lead=%s""", + self.name) + + self.delete_events() + + def has_customer(self): + return frappe.db.get_value("Customer", {"lead_name": self.name}) + + def has_opportunity(self): + return frappe.db.get_value("Opportunity", {"lead": self.name, "status": ["!=", "Lost"]}) + + def has_quotation(self): + return frappe.db.get_value("Quotation", { + "lead": self.name, + "docstatus": 1, + "status": ["!=", "Lost"] + + }) + + def has_lost_quotation(self): + return frappe.db.get_value("Quotation", { + "lead": self.name, + "docstatus": 1, + "status": "Lost" + }) + +@frappe.whitelist() +def make_customer(source_name, target_doc=None): + return _make_customer(source_name, target_doc) + +def _make_customer(source_name, target_doc=None, ignore_permissions=False): + def set_missing_values(source, target): + if source.company_name: + target.customer_type = "Company" + target.customer_name = source.company_name + else: + target.customer_type = "Individual" + target.customer_name = source.lead_name + + target.customer_group = frappe.db.get_default("Customer Group") + + doclist = get_mapped_doc("Lead", source_name, + {"Lead": { + "doctype": "Customer", + "field_map": { + "name": "lead_name", + "company_name": "customer_name", + "contact_no": "phone_1", + "fax": "fax_1" + } + }}, target_doc, set_missing_values, ignore_permissions=ignore_permissions) + + return doclist + +@frappe.whitelist() +def make_opportunity(source_name, target_doc=None): + target_doc = get_mapped_doc("Lead", source_name, + {"Lead": { + "doctype": "Opportunity", + "field_map": { + "campaign_name": "campaign", + "doctype": "enquiry_from", + "name": "lead", + "lead_name": "contact_display", + "company_name": "customer_name", + "email_id": "contact_email", + "mobile_no": "contact_mobile" + } + }}, target_doc) + + return target_doc + +@frappe.whitelist() +def make_quotation(source_name, target_doc=None): + target_doc = get_mapped_doc("Lead", source_name, + {"Lead": { + "doctype": "Quotation", + "field_map": { + "name": "lead" + } + }}, target_doc) + target_doc.quotation_to = "Lead" + target_doc.run_method("set_missing_values") + target_doc.run_method("set_other_charges") + target_doc.run_method("calculate_taxes_and_totals") + + return target_doc + +@frappe.whitelist() +def get_lead_details(lead, posting_date=None, company=None): + if not lead: return {} + + from erpnext.accounts.party import set_address_details + out = frappe._dict() + + lead_doc = frappe.get_doc("Lead", lead) + lead = lead_doc + + out.update({ + "territory": lead.territory, + "customer_name": lead.company_name or lead.lead_name, + "contact_display": lead.lead_name, + "contact_email": lead.email_id, + "contact_mobile": lead.mobile_no, + "contact_phone": lead.phone, + }) + + set_address_details(out, lead, "Lead") + + taxes_and_charges = set_taxes(None, 'Lead', posting_date, company, + billing_address=out.get('customer_address'), shipping_address=out.get('shipping_address_name')) + if taxes_and_charges: + out['taxes_and_charges'] = taxes_and_charges + + return out + +# -*- coding: utf-8 -*- +############################################################################## +# +# OpenERP, Open Source Management Solution +# Copyright (C) 2004-2010 Tiny SPRL (). +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +############################################################################## + +{ + 'name' : 'Double Validation on Purchases', + 'version' : '1.1', + 'category': 'Purchase Management', + 'depends' : ['base','purchase'], + 'author' : 'OpenERP SA', + 'description': """ +Double-validation for purchases exceeding minimum amount. +========================================================= + +This module modifies the purchase workflow in order to validate purchases that +exceeds minimum amount set by configuration wizard. + """, + 'website': 'https://www.odoo.com/page/purchase', + 'data': [ + 'purchase_double_validation_workflow.xml', + 'purchase_double_validation_installer.xml', + 'purchase_double_validation_view.xml', + ], + 'test': [ + 'test/purchase_double_validation_demo.yml', + 'test/purchase_double_validation_test.yml' + ], + 'demo': [], + 'installable': True, + 'auto_install': False +} + +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: + +# Copyright 2016 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Constants for SavedModel save and restore operations. + +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +from tensorflow.python.util.all_util import remove_undocumented +from tensorflow.python.util.tf_export import tf_export + +# Subdirectory name containing the asset files. +ASSETS_DIRECTORY = "assets" +tf_export("saved_model.constants.ASSETS_DIRECTORY").export_constant( + __name__, "ASSETS_DIRECTORY") + +# CollectionDef key containing SavedModel assets. +ASSETS_KEY = "saved_model_assets" +tf_export("saved_model.constants.ASSETS_KEY").export_constant( + __name__, "ASSETS_KEY") + +# CollectionDef key for the legacy init op. +LEGACY_INIT_OP_KEY = "legacy_init_op" +tf_export("saved_model.constants.LEGACY_INIT_OP_KEY").export_constant( + __name__, "LEGACY_INIT_OP_KEY") + +# CollectionDef key for the SavedModel main op. +MAIN_OP_KEY = "saved_model_main_op" +tf_export("saved_model.constants.MAIN_OP_KEY").export_constant( + __name__, "MAIN_OP_KEY") + +# Schema version for SavedModel. +SAVED_MODEL_SCHEMA_VERSION = 1 +tf_export("saved_model.constants.SAVED_MODEL_SCHEMA_VERSION").export_constant( + __name__, "SAVED_MODEL_SCHEMA_VERSION") + +# File name for SavedModel protocol buffer. +SAVED_MODEL_FILENAME_PB = "saved_model.pb" +tf_export("saved_model.constants.SAVED_MODEL_FILENAME_PB").export_constant( + __name__, "SAVED_MODEL_FILENAME_PB") + +# File name for text version of SavedModel protocol buffer. +SAVED_MODEL_FILENAME_PBTXT = "saved_model.pbtxt" +tf_export("saved_model.constants.SAVED_MODEL_FILENAME_PBTXT").export_constant( + __name__, "SAVED_MODEL_FILENAME_PBTXT") + +# Subdirectory name containing the variables/checkpoint files. +VARIABLES_DIRECTORY = "variables" +tf_export("saved_model.constants.VARIABLES_DIRECTORY").export_constant( + __name__, "VARIABLES_DIRECTORY") + +# File name used for variables. +VARIABLES_FILENAME = "variables" +tf_export("saved_model.constants.VARIABLES_FILENAME").export_constant( + __name__, "VARIABLES_FILENAME") + + +_allowed_symbols = [ + "ASSETS_DIRECTORY", + "ASSETS_KEY", + "LEGACY_INIT_OP_KEY", + "MAIN_OP_KEY", + "SAVED_MODEL_SCHEMA_VERSION", + "SAVED_MODEL_FILENAME_PB", + "SAVED_MODEL_FILENAME_PBTXT", + "VARIABLES_DIRECTORY", + "VARIABLES_FILENAME", +] +remove_undocumented(__name__, _allowed_symbols) + +# -*- coding: utf-8 -*- +############################################################################## +# +# Author: Nicolas Bessi +# Copyright 2011-2012 Camptocamp SA +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +############################################################################## +# TODO create a base Geocoder module +from urllib import urlencode +from urllib2 import urlopen +import xml.dom.minidom +import logging + +from openerp import api +from openerp.tools.translate import _ +from openerp.exceptions import ValidationError +from openerp.exceptions import except_orm + +from openerp.addons.base_geoengine import geo_model, fields + +logger = logging.getLogger('GeoNames address encoding') + + +class ResPartner(geo_model.GeoModel): + """Auto geo coding of addresses""" + _inherit = "res.partner" + + geo_point = fields.GeoPoint( + 'Addresses coordinate', readonly=True) + + def _can_geocode(self): + usr = self.env['res.users'] + return usr.browse(self.env.uid).company_id.enable_geocoding + + def _get_point_from_reply(self, answer): + """Parse geoname answer code inspired by geopy library""" + + def get_first_text(node, tag_names, strip=None): + """Get the text value of the first child of ``node`` with tag + ``tag_name``. The text is stripped using the value of ``strip``.""" + if isinstance(tag_names, basestring): + tag_names = [tag_names] + if node: + while tag_names: + nodes = node.getElementsByTagName(tag_names.pop(0)) + if nodes: + child = nodes[0].firstChild + return child and child.nodeValue.strip(strip) + + def parse_code(code): + latitude = get_first_text(code, 'lat') or None + longitude = get_first_text(code, 'lng') or None + latitude = latitude and float(latitude) + longitude = longitude and float(longitude) + return latitude, longitude + + res = answer.read() + if not isinstance(res, basestring): + return False + doc = xml.dom.minidom.parseString(res) + codes = doc.getElementsByTagName('code') + if len(codes) < 1: + return False + latitude, longitude = parse_code(codes[0]) + return fields.GeoPoint.from_latlon(self.env.cr, latitude, longitude) + + @api.multi + def geocode_from_geonames(self, srid='900913', + strict=True, context=None): + context = context or {} + base_url = u'http://ws.geonames.org/postalCodeSearch?' + config_parameter_obj = self.env['ir.config_parameter'] + username = config_parameter_obj.get_param( + 'geoengine_geonames_username') + if not username: + raise ValidationError( + _('A username is required to access ' + 'http://ws.geonames.org/ \n' + 'Please provides a valid one by setting a ' + 'value in System Paramter for the key ' + '"geoengine_geonames_username"')) + filters = {} + for add in self: + logger.info('geolocalize %s', add.name) + if add.country_id.code and (add.city or add.zip): + filters[u'country'] = add.country_id.code.encode('utf-8') + filters[u'username'] = username.encode('utf-8') + if add.city: + filters[u'placename'] = add.city.encode('utf-8') + if add.zip: + filters[u'postalcode'] = add.zip.encode('utf-8') + filters[u'maxRows'] = u'1' + try: + url = base_url + urlencode(filters) + answer = urlopen(url) + add.geo_point = self._get_point_from_reply(answer) + except Exception, exc: + logger.exception('error while updating geocodes') + if strict: + raise except_orm(_('Geoencoding fails'), str(exc)) + + @api.multi + def write(self, vals): + res = super(ResPartner, self).write(vals) + do_geocode = self._can_geocode() + if do_geocode and \ + set(('country_id', 'city', 'zip')).intersection(vals): + self.geocode_from_geonames() + return res + + @api.model + @api.returns('self', lambda value: value.id) + def create(self, vals): + res = super(ResPartner, self).create(vals) + do_geocode = self._can_geocode() + if do_geocode: + res.geocode_from_geonames() + return res + +from __future__ import unicode_literals + +import os + +from clint.textui import colored, indent, puts + +from glob import glob +from wand.image import Image + +sizes = { + 'A0': (841.0, 1189.0), + 'A1': (594.0, 841.0), + 'A2': (420.0, 594.0), + 'A3': (297.0, 420.0), + 'A4': (210.0, 297.0), + 'A5': (148.0, 210.0), + 'A6': (105.0, 148.0), + 'A7': (74.0, 105.0), + 'A8': (52.0, 74.0), + 'A9': (37.0, 52.0), + 'A10': (26.0, 37.0), + 'Letter': (215.9, 279.4), + 'Legal': (215.9, 355.6), + 'Ledger': (2.0, 279.0), + 'Tabloid': (9.0, 432.0), +} + +orientations = { + 'portrait': lambda (w, h): h / w, + 'landscape': lambda (w, h): w / h, +} + +# TODO make image extensions more dynamic, versatile or configurable +extensions = ['bmp', 'gif', 'jpeg', 'jpg', 'png', 'tiff'] + + +def exists(target): + return os.path.exists(target) or glob('%s-*%s' % os.path.splitext(target)) + + +def run(args): + size = sizes[args.size] + ratio = orientations[args.orientation](size) + + for root, _, _ in os.walk(args.input): + puts(root) + with indent(4): + for extension in extensions: + files = glob(os.path.join(root, '*.%s' % extension)) + for source in files: + with Image(filename=source) as original: + with original.clone() as img: + width, height = img.size + + if width < height: + height = int(width * ratio) + else: + width = int(height / ratio) + + dimension = '%sx%s' % (width, height) + + relative = os.path.relpath(source, args.input) + target = os.path.join(args.output, relative) + directory = os.path.dirname(target) + + if not args.dryrun: + if not os.path.exists(directory): + os.makedirs(directory) + + if not args.overwrite and exists(target): + puts('[%s] %s' % (colored.yellow('exists'), relative)) + else: + img.transform(crop=dimension) + img.save(filename=target) + puts('[%s] %s' % (colored.green('done'), relative)) +from __future__ import print_function +from numba import jit, int_ +import numba.unittest_support as unittest +import numpy as np + +try: + xrange +except NameError: + xrange = range + + +@jit +def obj_loop1(A, i): + r = 0 + for x in xrange(10): + for y in xrange(10): + items = A[x, y] + r += 1 + if items == None: + continue + for item in items: + print(item) + return r + + +@jit +def obj_loop2(x): + i = 0 + for elem in x: + i += 1 + if elem > 9: + break + return i + + +@jit +def fill(a): + for i in range(len(a)): + a[i] += 1 + return a + + +@jit(int_(int_[:])) +def call_loop(a): + s = 0 + for x in fill(a): + s += x + return s + + +class TestLoops(unittest.TestCase): + + def test_obj_loop1(self): + self.assertTrue(obj_loop1(np.array([[None]*10]*10), 1) == 100) + + def test_obj_loop2(self): + self.assertTrue(obj_loop2([1, 2, 3, 10]) == 4) + self.assertTrue(obj_loop2(range(100)) == 11) + + def test_call_loop(self): + self.assertTrue(call_loop(np.zeros(10, dtype=np.int)) == 10) + + +if __name__ == '__main__': + unittest.main() + +""" +originally from http://www.djangosnippets.org/snippets/828/ by dnordberg +""" +import logging +from optparse import make_option + +from django.conf import settings +from django.core.management.base import CommandError, BaseCommand +from six.moves import input, configparser + + +class Command(BaseCommand): + option_list = BaseCommand.option_list + ( + make_option('--noinput', action='store_false', + dest='interactive', default=True, + help='Tells Django to NOT prompt the user for input of any kind.'), + make_option('--no-utf8', action='store_true', + dest='no_utf8_support', default=False, + help='Tells Django to not create a UTF-8 charset database'), + make_option('-U', '--user', action='store', + dest='user', default=None, + help='Use another user for the database then defined in settings.py'), + make_option('-O', '--owner', action='store', + dest='owner', default=None, + help='Use another owner for creating the database then the user defined in settings or via --user'), + make_option('-P', '--password', action='store', + dest='password', default=None, + help='Use another password for the database then defined in settings.py'), + make_option('-D', '--dbname', action='store', + dest='dbname', default=None, + help='Use another database name then defined in settings.py'), + make_option('-R', '--router', action='store', + dest='router', default='default', + help='Use this router-database other then defined in settings.py'), + ) + help = "Resets the database for this project." + + def handle(self, *args, **options): + """ + Resets the database for this project. + + Note: Transaction wrappers are in reverse as a work around for + autocommit, anybody know how to do this the right way? + """ + + if args: + raise CommandError("reset_db takes no arguments") + + router = options.get('router') + dbinfo = settings.DATABASES.get(router) + if dbinfo is None: + raise CommandError("Unknown database router %s" % router) + + engine = dbinfo.get('ENGINE').split('.')[-1] + + user = password = database_name = '' + if engine == 'mysql': + read_default_file = dbinfo.get('OPTIONS', {}).get('read_default_file') + if read_default_file: + config = configparser.ConfigParser() + config.read(read_default_file) + user = config.get('client', 'user') + password = config.get('client', 'password') + database_name = config.get('client', 'database') + + user = options.get('user') or dbinfo.get('USER') or user + password = options.get('password') or dbinfo.get('PASSWORD') or password + owner = options.get('owner') or user + + database_name = options.get('dbname') or dbinfo.get('NAME') or database_name + if database_name == '': + raise CommandError("You need to specify DATABASE_NAME in your Django settings file.") + + database_host = dbinfo.get('HOST') + database_port = dbinfo.get('PORT') + + verbosity = int(options.get('verbosity', 1)) + if options.get('interactive'): + confirm = input(""" +You have requested a database reset. +This will IRREVERSIBLY DESTROY +ALL data in the database "%s". +Are you sure you want to do this? + +Type 'yes' to continue, or 'no' to cancel: """ % (database_name,)) + else: + confirm = 'yes' + + if confirm != 'yes': + print("Reset cancelled.") + return + + if engine in ('sqlite3', 'spatialite'): + import os + try: + logging.info("Unlinking %s database" % engine) + os.unlink(database_name) + except OSError: + pass + + elif engine in ('mysql',): + import MySQLdb as Database + kwargs = { + 'user': user, + 'passwd': password, + } + if database_host.startswith('/'): + kwargs['unix_socket'] = database_host + else: + kwargs['host'] = database_host + + if database_port: + kwargs['port'] = int(database_port) + + connection = Database.connect(**kwargs) + drop_query = 'DROP DATABASE IF EXISTS `%s`' % database_name + utf8_support = options.get('no_utf8_support', False) and '' or 'CHARACTER SET utf8' + create_query = 'CREATE DATABASE `%s` %s' % (database_name, utf8_support) + logging.info('Executing... "' + drop_query + '"') + connection.query(drop_query) + logging.info('Executing... "' + create_query + '"') + connection.query(create_query) + + elif engine in ('postgresql', 'postgresql_psycopg2', 'postgis'): + if engine == 'postgresql': + import psycopg as Database # NOQA + elif engine in ('postgresql_psycopg2', 'postgis'): + import psycopg2 as Database # NOQA + + conn_string = "dbname=template1" + if user: + conn_string += " user=%s" % user + if password: + conn_string += " password='%s'" % password + if database_host: + conn_string += " host=%s" % database_host + if database_port: + conn_string += " port=%s" % database_port + + connection = Database.connect(conn_string) + connection.set_isolation_level(0) # autocommit false + cursor = connection.cursor() + drop_query = "DROP DATABASE \"%s\";" % database_name + logging.info('Executing... "' + drop_query + '"') + + try: + cursor.execute(drop_query) + except Database.ProgrammingError as e: + logging.info("Error: %s" % str(e)) + + create_query = "CREATE DATABASE \"%s\"" % database_name + if owner: + create_query += " WITH OWNER = \"%s\" " % owner + create_query += " ENCODING = 'UTF8'" + + if engine == 'postgis': + # fetch postgis template name if it exists + from django.contrib.gis.db.backends.postgis.creation import PostGISCreation + postgis_template = PostGISCreation(connection).template_postgis + if postgis_template is not None: + create_query += ' TEMPLATE = %s' % postgis_template + + if settings.DEFAULT_TABLESPACE: + create_query += ' TABLESPACE = %s;' % settings.DEFAULT_TABLESPACE + else: + create_query += ';' + + logging.info('Executing... "' + create_query + '"') + cursor.execute(create_query) + + else: + raise CommandError("Unknown database engine %s" % engine) + + if verbosity >= 2 or options.get('interactive'): + print("Reset successful.") + +"""Implements (a subset of) Sun XDR -- eXternal Data Representation. + +See: RFC 1014 + +""" + +import struct +from io import BytesIO + +__all__ = ["Error", "Packer", "Unpacker", "ConversionError"] + +# exceptions +class Error(Exception): + """Exception class for this module. Use: + + except xdrlib.Error, var: + # var has the Error instance for the exception + + Public ivars: + msg -- contains the message + + """ + def __init__(self, msg): + self.msg = msg + def __repr__(self): + return repr(self.msg) + def __str__(self): + return str(self.msg) + + +class ConversionError(Error): + pass + + + +class Packer: + """Pack various data representations into a buffer.""" + + def __init__(self): + self.reset() + + def reset(self): + self.__buf = BytesIO() + + def get_buffer(self): + return self.__buf.getvalue() + # backwards compatibility + get_buf = get_buffer + + def pack_uint(self, x): + self.__buf.write(struct.pack('>L', x)) + + def pack_int(self, x): + self.__buf.write(struct.pack('>l', x)) + + pack_enum = pack_int + + def pack_bool(self, x): + if x: self.__buf.write(b'\0\0\0\1') + else: self.__buf.write(b'\0\0\0\0') + + def pack_uhyper(self, x): + self.pack_uint(x>>32 & 0xffffffff) + self.pack_uint(x & 0xffffffff) + + pack_hyper = pack_uhyper + + def pack_float(self, x): + try: self.__buf.write(struct.pack('>f', x)) + except struct.error as msg: + raise ConversionError(msg) + + def pack_double(self, x): + try: self.__buf.write(struct.pack('>d', x)) + except struct.error as msg: + raise ConversionError(msg) + + def pack_fstring(self, n, s): + if n < 0: + raise ValueError('fstring size must be nonnegative') + data = s[:n] + n = ((n+3)//4)*4 + data = data + (n - len(data)) * b'\0' + self.__buf.write(data) + + pack_fopaque = pack_fstring + + def pack_string(self, s): + n = len(s) + self.pack_uint(n) + self.pack_fstring(n, s) + + pack_opaque = pack_string + pack_bytes = pack_string + + def pack_list(self, list, pack_item): + for item in list: + self.pack_uint(1) + pack_item(item) + self.pack_uint(0) + + def pack_farray(self, n, list, pack_item): + if len(list) != n: + raise ValueError('wrong array size') + for item in list: + pack_item(item) + + def pack_array(self, list, pack_item): + n = len(list) + self.pack_uint(n) + self.pack_farray(n, list, pack_item) + + + +class Unpacker: + """Unpacks various data representations from the given buffer.""" + + def __init__(self, data): + self.reset(data) + + def reset(self, data): + self.__buf = data + self.__pos = 0 + + def get_position(self): + return self.__pos + + def set_position(self, position): + self.__pos = position + + def get_buffer(self): + return self.__buf + + def done(self): + if self.__pos < len(self.__buf): + raise Error('unextracted data remains') + + def unpack_uint(self): + i = self.__pos + self.__pos = j = i+4 + data = self.__buf[i:j] + if len(data) < 4: + raise EOFError + x = struct.unpack('>L', data)[0] + try: + return int(x) + except OverflowError: + return x + + def unpack_int(self): + i = self.__pos + self.__pos = j = i+4 + data = self.__buf[i:j] + if len(data) < 4: + raise EOFError + return struct.unpack('>l', data)[0] + + unpack_enum = unpack_int + + def unpack_bool(self): + return bool(self.unpack_int()) + + def unpack_uhyper(self): + hi = self.unpack_uint() + lo = self.unpack_uint() + return int(hi)<<32 | lo + + def unpack_hyper(self): + x = self.unpack_uhyper() + if x >= 0x8000000000000000: + x = x - 0x10000000000000000 + return x + + def unpack_float(self): + i = self.__pos + self.__pos = j = i+4 + data = self.__buf[i:j] + if len(data) < 4: + raise EOFError + return struct.unpack('>f', data)[0] + + def unpack_double(self): + i = self.__pos + self.__pos = j = i+8 + data = self.__buf[i:j] + if len(data) < 8: + raise EOFError + return struct.unpack('>d', data)[0] + + def unpack_fstring(self, n): + if n < 0: + raise ValueError('fstring size must be nonnegative') + i = self.__pos + j = i + (n+3)//4*4 + if j > len(self.__buf): + raise EOFError + self.__pos = j + return self.__buf[i:i+n] + + unpack_fopaque = unpack_fstring + + def unpack_string(self): + n = self.unpack_uint() + return self.unpack_fstring(n) + + unpack_opaque = unpack_string + unpack_bytes = unpack_string + + def unpack_list(self, unpack_item): + list = [] + while 1: + x = self.unpack_uint() + if x == 0: break + if x != 1: + raise ConversionError('0 or 1 expected, got %r' % (x,)) + item = unpack_item() + list.append(item) + return list + + def unpack_farray(self, n, unpack_item): + list = [] + for i in range(n): + list.append(unpack_item()) + return list + + def unpack_array(self, unpack_item): + n = self.unpack_uint() + return self.unpack_farray(n, unpack_item) + +# Copyright (c) 2012 Paul Osborne +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. + +from anvil import Anvil +from anvil.examples.helpers import find_changesets_for_authors +import datetime + +AUTHORS = ["Paul Osborne", "Tom Manley"] +START_DT = datetime.datetime(2011, 11, 1) + + +def main(): + anvil = Anvil("spectrum") + anvil.create_session_by_prompting() + + print "collecting related changesets" + changesets_by_author = find_changesets_for_authors(anvil, AUTHORS, START_DT) + for author, changesets in changesets_by_author.items(): + changesets = [c for c in changesets + if c.date_time > datetime.datetime(2011, 11, 1, 1, 1, 1)] + eligible = [c for c in changesets + if (not c.is_merge() and not c.is_tag_changeset())] + linked = [c for c in eligible if c.is_linked()] + print "=== %s ===" % author + print " Total Changesets: %s" % len(changesets) + print " Total Elibigle: %s" % len(eligible) + print " Total Linked: %s" % len(linked) + percentage_linked = (float(len(linked)) / len(eligible)) * 100 + print " Percentage Linked: %0.2f" % percentage_linked + print "\n" + +if __name__ == '__main__': + main() + +"""Code unit (module) handling for Coverage.""" + +import glob, os + +from coverage.backward import open_source, string_class, StringIO +from coverage.misc import CoverageException + + +def code_unit_factory(morfs, file_locator): + """Construct a list of CodeUnits from polymorphic inputs. + + `morfs` is a module or a filename, or a list of same. + + `file_locator` is a FileLocator that can help resolve filenames. + + Returns a list of CodeUnit objects. + + """ + # Be sure we have a list. + if not isinstance(morfs, (list, tuple)): + morfs = [morfs] + + # On Windows, the shell doesn't expand wildcards. Do it here. + globbed = [] + for morf in morfs: + if isinstance(morf, string_class) and ('?' in morf or '*' in morf): + globbed.extend(glob.glob(morf)) + else: + globbed.append(morf) + morfs = globbed + + code_units = [CodeUnit(morf, file_locator) for morf in morfs] + + return code_units + + +class CodeUnit(object): + """Code unit: a filename or module. + + Instance attributes: + + `name` is a human-readable name for this code unit. + `filename` is the os path from which we can read the source. + `relative` is a boolean. + + """ + def __init__(self, morf, file_locator): + self.file_locator = file_locator + + if hasattr(morf, '__file__'): + f = morf.__file__ + else: + f = morf + # .pyc files should always refer to a .py instead. + if f.endswith('.pyc') or f.endswith('.pyo'): + f = f[:-1] + elif f.endswith('$py.class'): # Jython + f = f[:-9] + ".py" + self.filename = self.file_locator.canonical_filename(f) + + if hasattr(morf, '__name__'): + n = modname = morf.__name__ + self.relative = True + else: + n = os.path.splitext(morf)[0] + rel = self.file_locator.relative_filename(n) + if os.path.isabs(n): + self.relative = (rel != n) + else: + self.relative = True + n = rel + modname = None + self.name = n + self.modname = modname + + def __repr__(self): + return "" % (self.name, self.filename) + + # Annoying comparison operators. Py3k wants __lt__ etc, and Py2k needs all + # of them defined. + + def __lt__(self, other): + return self.name < other.name + def __le__(self, other): + return self.name <= other.name + def __eq__(self, other): + return self.name == other.name + def __ne__(self, other): + return self.name != other.name + def __gt__(self, other): + return self.name > other.name + def __ge__(self, other): + return self.name >= other.name + + def flat_rootname(self): + """A base for a flat filename to correspond to this code unit. + + Useful for writing files about the code where you want all the files in + the same directory, but need to differentiate same-named files from + different directories. + + For example, the file a/b/c.py might return 'a_b_c' + + """ + if self.modname: + return self.modname.replace('.', '_') + else: + root = os.path.splitdrive(self.name)[1] + return root.replace('\\', '_').replace('/', '_').replace('.', '_') + + def source_file(self): + """Return an open file for reading the source of the code unit.""" + if os.path.exists(self.filename): + # A regular text file: open it. + return open_source(self.filename) + + # Maybe it's in a zip file? + source = self.file_locator.get_zip_data(self.filename) + if source is not None: + return StringIO(source) + + # Couldn't find source. + raise CoverageException( + "No source for code '%s'." % self.filename + ) + + def should_be_python(self): + """Does it seem like this file should contain Python? + + This is used to decide if a file reported as part of the exection of + a program was really likely to have contained Python in the first + place. + + """ + # Get the file extension. + _, ext = os.path.splitext(self.filename) + + # Anything named *.py* should be Python. + if ext.startswith('.py'): + return True + # A file with no extension should be Python. + if not ext: + return True + # Everything else is probably not Python. + return False + +#!/usr/bin/env python + +# generate Python Manifest for the OpenEmbedded build system +# (C) 2002-2008 Michael 'Mickey' Lauer +# (C) 2007 Jeremy Laine +# licensed under MIT, see COPYING.MIT + +import os +import sys +import time + +VERSION = "2.5.2" +BASEREV = 0 + +__author__ = "Michael 'Mickey' Lauer " +__version__ = "20081209" + +class MakefileMaker: + + def __init__( self, outfile ): + """initialize""" + self.packages = {} + self.targetPrefix = "${libdir}/python%s/" % VERSION[:3] + self.output = outfile + self.out( """ +# WARNING: This file is AUTO GENERATED: Manual edits will be lost next time I regenerate the file. +# Generator: '%s' Version %s (C) 2002-2008 Michael 'Mickey' Lauer +# Visit the Python for Embedded Systems Site => http://www.Vanille.de/projects/python.spy +""" % ( sys.argv[0], __version__ ) ) + + # + # helper functions + # + + def out( self, data ): + """print a line to the output file""" + self.output.write( "%s\n" % data ) + + def setPrefix( self, targetPrefix ): + """set a file prefix for addPackage files""" + self.targetPrefix = targetPrefix + + def doProlog( self ): + self.out( """ """ ) + self.out( "" ) + + def addPackage( self, revision, name, description, dependencies, filenames ): + """add a package to the Makefile""" + if type( filenames ) == type( "" ): + filenames = filenames.split() + fullFilenames = [] + for filename in filenames: + if filename[0] != "$": + fullFilenames.append( "%s%s" % ( self.targetPrefix, filename ) ) + else: + fullFilenames.append( filename ) + self.packages[name] = revision, description, dependencies, fullFilenames + + def doBody( self ): + """generate body of Makefile""" + + global VERSION + + # + # generate provides line + # + + provideLine = 'PROVIDES+="' + for name in self.packages: + provideLine += "%s " % name + provideLine += '"' + + self.out( provideLine ) + self.out( "" ) + + # + # generate package line + # + + packageLine = 'PACKAGES="' + for name in self.packages: + packageLine += "%s " % name + packageLine += ' python-modules"' + + self.out( packageLine ) + self.out( "" ) + + # + # generate package variables + # + + for name, data in self.packages.iteritems(): + rev, desc, deps, files = data + + # + # write out the description, revision and dependencies + # + self.out( 'DESCRIPTION_%s="%s"' % ( name, desc ) ) + self.out( 'PR_%s="ml%d"' % ( name, rev + BASEREV ) ) + self.out( 'RDEPENDS_%s="%s"' % ( name, deps ) ) + + line = 'FILES_%s="' % name + + # + # check which directories to make in the temporary directory + # + + dirset = {} # if python had a set-datatype this would be sufficient. for now, we're using a dict instead. + for target in files: + dirset[os.path.dirname( target )] = True + + # + # generate which files to copy for the target (-dfR because whole directories are also allowed) + # + + for target in files: + line += "%s " % target + + line += '"' + self.out( line ) + self.out( "" ) + + self.out( 'DESCRIPTION_python-modules="All Python modules"' ) + line = 'RDEPENDS_python-modules="' + + for name, data in self.packages.iteritems(): + if name != 'python-core-dbg': + line += "%s " % name + + self.out( "%s \"" % line ) + self.out( 'ALLOW_EMPTY_python-modules = "1"' ) + + def doEpilog( self ): + self.out( """""" ) + self.out( "" ) + + def make( self ): + self.doProlog() + self.doBody() + self.doEpilog() + +if __name__ == "__main__": + + if len( sys.argv ) > 1: + os.popen( "rm -f ./%s" % sys.argv[1] ) + outfile = file( sys.argv[1], "w" ) + else: + outfile = sys.stdout + + m = MakefileMaker( outfile ) + + # Add packages here. Only specify dlopen-style library dependencies here, no ldd-style dependencies! + # Parameters: revision, name, description, dependencies, filenames + # + + m.addPackage( 0, "python-core", "Python Interpreter and core modules (needed!)", "", + "__future__.* copy.* copy_reg.* ConfigParser.* " + + "getopt.* linecache.* new.* " + + "os.* posixpath.* struct.* " + + "warnings.* site.* stat.* " + + "UserDict.* UserList.* UserString.* " + + "lib-dynload/binascii.so lib-dynload/_struct.so lib-dynload/time.so " + + "lib-dynload/xreadlines.so types.* ${bindir}/python*" ) + + m.addPackage( 0, "python-core-dbg", "Python core module debug information", "python-core", + "lib-dynload/.debug ${bindir}/.debug ${libdir}/.debug" ) + + m.addPackage( 0, "python-devel", "Python Development Package", "python-core", + "${includedir} config" ) # package + + m.addPackage( 0, "python-idle", "Python Integrated Development Environment", "python-core python-tkinter", + "${bindir}/idle idlelib" ) # package + + m.addPackage( 0, "python-pydoc", "Python Interactive Help Support", "python-core python-lang python-stringold python-re", + "${bindir}/pydoc pydoc.*" ) + + m.addPackage( 0, "python-smtpd", "Python Simple Mail Transport Daemon", "python-core python-netserver python-email python-mime", + "${bindir}/smtpd.*" ) + + m.addPackage( 0, "python-audio", "Python Audio Handling", "python-core", + "wave.* chunk.* sndhdr.* lib-dynload/ossaudiodev.so lib-dynload/audioop.so" ) + + m.addPackage( 0, "python-bsddb", "Python Berkeley Database Bindings", "python-core", + "bsddb lib-dynload/_bsddb.so" ) # package + + m.addPackage( 0, "python-codecs", "Python Codecs, Encodings & i18n Support", "python-core python-lang", + "codecs.* encodings gettext.* locale.* lib-dynload/_locale.so lib-dynload/unicodedata.so stringprep.* xdrlib.*" ) + + m.addPackage( 0, "python-compile", "Python Bytecode Compilation Support", "python-core", + "py_compile.* compileall.*" ) + + m.addPackage( 0, "python-compiler", "Python Compiler Support", "python-core", + "compiler" ) # package + + m.addPackage( 0, "python-compression", "Python High Level Compression Support", "python-core python-zlib", + "gzip.* zipfile.* tarfile.*" ) + + m.addPackage( 0, "python-crypt", "Python Basic Cryptographic and Hashing Support", "python-core", + "hashlib.* md5.* sha.* lib-dynload/crypt.so lib-dynload/_hashlib.so lib-dynload/_sha256.so lib-dynload/_sha512.so" ) + + m.addPackage( 0, "python-textutils", "Python Option Parsing, Text Wrapping and Comma-Separated-Value Support", "python-core python-io python-re python-stringold", + "lib-dynload/_csv.so csv.* optparse.* textwrap.*" ) + + m.addPackage( 0, "python-curses", "Python Curses Support", "python-core", + "curses lib-dynload/_curses.so lib-dynload/_curses_panel.so" ) # directory + low level module + + m.addPackage( 0, "python-ctypes", "Python C Types Support", "python-core", + "ctypes lib-dynload/_ctypes.so" ) # directory + low level module + + m.addPackage( 0, "python-datetime", "Python Calendar and Time support", "python-core python-codecs", + "_strptime.* calendar.* lib-dynload/datetime.so" ) + + m.addPackage( 0, "python-db", "Python File-Based Database Support", "python-core", + "anydbm.* dumbdbm.* whichdb.* " ) + + m.addPackage( 0, "python-debugger", "Python Debugger", "python-core python-io python-lang python-re python-stringold python-shell python-pprint", + "bdb.* pdb.*" ) + + m.addPackage( 0, "python-difflib", "Python helpers for computing deltas between objects.", "python-lang python-re", + "difflib.*" ) + + m.addPackage( 0, "python-distutils", "Python Distribution Utilities", "python-core", + "config distutils" ) # package + + m.addPackage( 0, "python-doctest", "Python framework for running examples in docstrings.", "python-core python-lang python-io python-re python-unittest python-debugger python-difflib", + "doctest.*" ) + + m.addPackage( 0, "python-email", "Python Email Support", "python-core python-io python-re python-mime python-audio python-image", + "email" ) # package + + m.addPackage( 0, "python-fcntl", "Python's fcntl Interface", "python-core", + "lib-dynload/fcntl.so" ) + + m.addPackage( 0, "python-hotshot", "Python Hotshot Profiler", "python-core", + "hotshot lib-dynload/_hotshot.so" ) + + m.addPackage( 0, "python-html", "Python HTML Processing", "python-core", + "formatter.* htmlentitydefs.* htmllib.* markupbase.* sgmllib.* " ) + + m.addPackage( 0, "python-gdbm", "Python GNU Database Support", "python-core", + "lib-dynload/gdbm.so" ) + + m.addPackage( 0, "python-image", "Python Graphical Image Handling", "python-core", + "colorsys.* imghdr.* lib-dynload/imageop.so lib-dynload/rgbimg.so" ) + + m.addPackage( 0, "python-io", "Python Low-Level I/O", "python-core python-math", + "lib-dynload/_socket.so lib-dynload/_ssl.so lib-dynload/select.so lib-dynload/termios.so lib-dynload/cStringIO.so " + "pipes.* socket.* tempfile.* StringIO.* " ) + + m.addPackage( 0, "python-lang", "Python Low-Level Language Support", "python-core", + "lib-dynload/array.so lib-dynload/parser.so lib-dynload/operator.so lib-dynload/_weakref.so " + + "lib-dynload/itertools.so lib-dynload/collections.so lib-dynload/_bisect.so lib-dynload/_heapq.so " + + "atexit.* bisect.* code.* codeop.* dis.* heapq.* inspect.* keyword.* opcode.* symbol.* repr.* token.* " + + " tokenize.* traceback.* linecache.* weakref.*" ) + + m.addPackage( 0, "python-logging", "Python Logging Support", "python-core python-io python-lang python-pickle python-stringold", + "logging" ) # package + + m.addPackage( 0, "python-tkinter", "Python Tcl/Tk Bindings", "python-core", + "lib-dynload/_tkinter.so lib-tk" ) # package + + m.addPackage( 0, "python-math", "Python Math Support", "python-core", + "lib-dynload/cmath.so lib-dynload/math.so lib-dynload/_random.so random.* sets.*" ) + + m.addPackage( 0, "python-mime", "Python MIME Handling APIs", "python-core python-io", + "mimetools.* uu.* quopri.* rfc822.*" ) + + m.addPackage( 0, "python-mmap", "Python Memory-Mapped-File Support", "python-core python-io", + "lib-dynload/mmap.so " ) + + m.addPackage( 0, "python-unixadmin", "Python Unix Administration Support", "python-core", + "lib-dynload/nis.so lib-dynload/grp.so lib-dynload/pwd.so getpass.*" ) + + m.addPackage( 0, "python-netclient", "Python Internet Protocol Clients", "python-core python-crypt python-datetime python-io python-lang python-logging python-mime", + "*Cookie*.* " + + "base64.* cookielib.* ftplib.* gopherlib.* hmac.* httplib.* mimetypes.* nntplib.* poplib.* smtplib.* telnetlib.* urllib.* urllib2.* urlparse.* uuid.*" ) + + m.addPackage( 0, "python-netserver", "Python Internet Protocol Servers", "python-core python-netclient", + "cgi.* BaseHTTPServer.* SimpleHTTPServer.* SocketServer.*" ) + + m.addPackage( 0, "python-pickle", "Python Persistence Support", "python-core python-codecs python-io python-re", + "pickle.* shelve.* lib-dynload/cPickle.so" ) + + m.addPackage( 0, "python-pkgutil", "Python Package Extension Utility Support", "python-core", + "pkgutil.*") + + m.addPackage( 0, "python-pprint", "Python Pretty-Print Support", "python-core", + "pprint.*" ) + + m.addPackage( 0, "python-profile", "Python Basic Profiling Support", "python-core python-textutils", + "profile.* pstats.* cProfile.* lib-dynload/_lsprof.so" ) + + m.addPackage( 0, "python-re", "Python Regular Expression APIs", "python-core", + "re.* sre.* sre_compile.* sre_constants* sre_parse.*" ) # _sre is builtin + + m.addPackage( 0, "python-readline", "Python Readline Support", "python-core", + "lib-dynload/readline.so rlcompleter.*" ) + + m.addPackage( 0, "python-resource", "Python Resource Control Interface", "python-core", + "lib-dynload/resource.so" ) + + m.addPackage( 0, "python-shell", "Python Shell-Like Functionality", "python-core python-re", + "cmd.* commands.* dircache.* fnmatch.* glob.* popen2.* shlex.* shutil.*" ) + + m.addPackage( 0, "python-robotparser", "Python robots.txt parser", "python-core python-netclient", + "robotparser.*") + + m.addPackage( 0, "python-subprocess", "Python Subprocess Support", "python-core python-io python-re python-fcntl python-pickle", + "subprocess.*" ) + + m.addPackage( 0, "python-sqlite3", "Python Sqlite3 Database Support", "python-core python-datetime python-lang python-crypt python-io python-threading python-zlib", + "lib-dynload/_sqlite3.so sqlite3/dbapi2.* sqlite3/__init__.*" ) + + m.addPackage( 0, "python-sqlite3-tests", "Python Sqlite3 Database Support Tests", "python-core python-sqlite3", + "sqlite3/test" ) + + m.addPackage( 0, "python-stringold", "Python String APIs [deprecated]", "python-core python-re", + "lib-dynload/strop.so string.*" ) + + m.addPackage( 0, "python-syslog", "Python's Syslog Interface", "python-core", + "lib-dynload/syslog.so" ) + + m.addPackage( 0, "python-terminal", "Python Terminal Controlling Support", "python-core python-io", + "pty.* tty.*" ) + + m.addPackage( 0, "python-tests", "Python Tests", "python-core", + "test" ) # package + + m.addPackage( 0, "python-threading", "Python Threading & Synchronization Support", "python-core python-lang", + "_threading_local.* dummy_thread.* dummy_threading.* mutex.* threading.* Queue.*" ) + + m.addPackage( 0, "python-unittest", "Python Unit Testing Framework", "python-core python-stringold python-lang", + "unittest.*" ) + + m.addPackage( 0, "python-xml", "Python basic XML support.", "python-core python-re", + "lib-dynload/pyexpat.so xml xmllib.*" ) # package + + m.addPackage( 0, "python-xmlrpc", "Python XMLRPC Support", "python-core python-xml python-netserver python-lang", + "xmlrpclib.* SimpleXMLRPCServer.*" ) + + m.addPackage( 0, "python-zlib", "Python zlib Support.", "python-core", + "lib-dynload/zlib.so" ) + + m.addPackage( 0, "python-mailbox", "Python Mailbox Format Support", "python-core python-mime", + "mailbox.*" ) + + # FIXME consider adding to python-compression + m.addPackage( 0, "python-bzip2", "Python bzip2 support", "python-core", + "lib-dynload/bz2.so" ) + + # FIXME consider adding to some higher level package + m.addPackage( 0, "python-elementtree", "Python elementree", "python-core", + "lib-dynload/_elementtree.so" ) + + m.make() + +# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Local CPU benchmarks for collective ops.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import time +import numpy as np + +from tensorflow.core.protobuf import config_pb2 +from tensorflow.python.client import session +from tensorflow.python.framework import constant_op +from tensorflow.python.framework import ops +from tensorflow.python.ops import collective_ops +from tensorflow.python.platform import test + + +class CollectiveOpBenchmark(test.Benchmark): + """Benchmarks for local CPU collective op execution.""" + + def benchmark_collective(self): + """Measures the performance of local CPU collective execution.""" + shapes = [(10,), (1000,), (1000000,)] + devices = [2, 4, 8] + collective_key_counter = 0 + + for group_size in devices: + group_key = collective_key_counter + instance_key = collective_key_counter + collective_key_counter += 1 + + for shape in shapes: + config = config_pb2.ConfigProto(device_count={"CPU": group_size}) + with session.Session(config=config) as sess: + # Use a C++ callable to minimize the Python overhead in the benchmark. + callable_opts = config_pb2.CallableOptions() + reduce_ops = [] + for device in range(group_size): + with ops.device("CPU:{}".format(device)): + t = constant_op.constant(np.multiply(range(shape[0]), 1.0)) + r = collective_ops.all_reduce(t, group_size, group_key, + instance_key, "Add", "Div") + reduce_ops.append(r) + callable_opts.target.append(r.name) + op_callable = sess._make_callable_from_options(callable_opts) # pylint: disable=protected-access + + # Run five steps to warm up the session caches and do collective param + # resolution before taking the first measurement. + for _ in range(5): + op_callable() + deltas = [] + overall_start = time.time() + # Run at least five repetitions and for at least five seconds. + while len(deltas) < 5 or time.time() - overall_start < 5.0: + start = time.time() + for _ in range(100): + op_callable() + end = time.time() + deltas.append(end - start) + del op_callable + + median_wall_time = np.median(deltas) / 100.0 + iters = len(deltas) * 100 + + self.report_benchmark( + iters=iters, wall_time=median_wall_time, + name="num_elements_{}_num_devices_{}".format(np.prod(shape), + group_size)) + + +if __name__ == "__main__": + test.main() + +import errno +import os +import time +import mock +import pytest + +boto3 = pytest.importorskip("boto3") +botocore = pytest.importorskip("botocore") +placebo = pytest.importorskip("placebo") + +""" +Using Placebo to test modules using boto3: + +This is an example test, using the placeboify fixture to test that a module +will fail if resources it depends on don't exist. + +> from placebo_fixtures import placeboify, scratch_vpc +> +> def test_create_with_nonexistent_launch_config(placeboify): +> connection = placeboify.client('autoscaling') +> module = FakeModule('test-asg-created', None, min_size=0, max_size=0, desired_capacity=0) +> with pytest.raises(FailJSON) as excinfo: +> asg_module.create_autoscaling_group(connection, module) +> .... asserts based on module state/exceptions .... + +In more advanced cases, use unrecorded resource fixtures to fill in ARNs/IDs of +things modules depend on, such as: + +> def test_create_in_vpc(placeboify, scratch_vpc): +> connection = placeboify.client('autoscaling') +> module = FakeModule(name='test-asg-created', +> min_size=0, max_size=0, desired_capacity=0, +> availability_zones=[s['az'] for s in scratch_vpc['subnets']], +> vpc_zone_identifier=[s['id'] for s in scratch_vpc['subnets']], +> ) +> ..... so on and so forth .... +""" + + +@pytest.fixture +def placeboify(request, monkeypatch): + """This fixture puts a recording/replaying harness around `boto3_conn` + + Placeboify patches the `boto3_conn` function in ec2 module_utils to return + a boto3 session that in recording or replaying mode, depending on the + PLACEBO_RECORD environment variable. Unset PLACEBO_RECORD (the common case + for just running tests) will put placebo in replay mode, set PLACEBO_RECORD + to any value to turn off replay & operate on real AWS resources. + + The recorded sessions are stored in the test file's directory, under the + namespace `placebo_recordings/{testfile name}/{test function name}` to + distinguish them. + """ + session = boto3.Session(region_name='us-west-2') + + recordings_path = os.path.join( + request.fspath.dirname, + 'placebo_recordings', + request.fspath.basename.replace('.py', ''), + request.function.__name__ + # remove the test_ prefix from the function & file name + ).replace('test_', '') + + try: + # make sure the directory for placebo test recordings is available + os.makedirs(recordings_path) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + pill = placebo.attach(session, data_path=recordings_path) + if os.getenv('PLACEBO_RECORD'): + pill.record() + else: + pill.playback() + + def boto3_middleman_connection(module, conn_type, resource, region='us-west-2', **kwargs): + if conn_type != 'client': + # TODO support resource-based connections + raise ValueError('Mocker only supports client, not %s' % conn_type) + return session.client(resource, region_name=region) + + import ansible.module_utils.ec2 + monkeypatch.setattr( + ansible.module_utils.ec2, + 'boto3_conn', + boto3_middleman_connection, + ) + yield session + + # tear down + pill.stop() + + +@pytest.fixture(scope='module') +def basic_launch_config(): + """Create an EC2 launch config whose creation *is not* recorded and return its name + + This fixture is module-scoped, since launch configs are immutable and this + can be reused for many tests. + """ + if not os.getenv('PLACEBO_RECORD'): + yield 'pytest_basic_lc' + return + + # use a *non recording* session to make the launch config + # since that's a prereq of the ec2_asg module, and isn't what + # we're testing. + asg = boto3.client('autoscaling') + asg.create_launch_configuration( + LaunchConfigurationName='pytest_basic_lc', + ImageId='ami-9be6f38c', # Amazon Linux 2016.09 us-east-1 AMI, can be any valid AMI + SecurityGroups=[], + UserData='#!/bin/bash\necho hello world', + InstanceType='t2.micro', + InstanceMonitoring={'Enabled': False}, + AssociatePublicIpAddress=True + ) + + yield 'pytest_basic_lc' + + try: + asg.delete_launch_configuration(LaunchConfigurationName='pytest_basic_lc') + except botocore.exceptions.ClientError as e: + if 'not found' in e.message: + return + raise + + +@pytest.fixture(scope='module') +def scratch_vpc(): + if not os.getenv('PLACEBO_RECORD'): + yield { + 'vpc_id': 'vpc-123456', + 'cidr_range': '10.0.0.0/16', + 'subnets': [ + { + 'id': 'subnet-123456', + 'az': 'us-east-1d', + }, + { + 'id': 'subnet-654321', + 'az': 'us-east-1e', + }, + ] + } + return + + # use a *non recording* session to make the base VPC and subnets + ec2 = boto3.client('ec2') + vpc_resp = ec2.create_vpc( + CidrBlock='10.0.0.0/16', + AmazonProvidedIpv6CidrBlock=False, + ) + subnets = ( + ec2.create_subnet( + VpcId=vpc_resp['Vpc']['VpcId'], + CidrBlock='10.0.0.0/24', + ), + ec2.create_subnet( + VpcId=vpc_resp['Vpc']['VpcId'], + CidrBlock='10.0.1.0/24', + ) + ) + time.sleep(3) + + yield { + 'vpc_id': vpc_resp['Vpc']['VpcId'], + 'cidr_range': '10.0.0.0/16', + 'subnets': [ + { + 'id': s['Subnet']['SubnetId'], + 'az': s['Subnet']['AvailabilityZone'], + } for s in subnets + ] + } + + try: + for s in subnets: + try: + ec2.delete_subnet(SubnetId=s['Subnet']['SubnetId']) + except botocore.exceptions.ClientError as e: + if 'not found' in e.message: + continue + raise + ec2.delete_vpc(VpcId=vpc_resp['Vpc']['VpcId']) + except botocore.exceptions.ClientError as e: + if 'not found' in e.message: + return + raise + + +@pytest.fixture(scope='module') +def maybe_sleep(): + """If placebo is reading saved sessions, make sleep always take 0 seconds. + + AWS modules often perform polling or retries, but when using recorded + sessions there's no reason to wait. We can still exercise retry and other + code paths without waiting for wall-clock time to pass.""" + if not os.getenv('PLACEBO_RECORD'): + p = mock.patch('time.sleep', return_value=None) + p.start() + yield + p.stop() + else: + yield + +#! test QC_JSON Schema with ghost atoms + +import numpy as np +import psi4 +import json + +# Generate JSON data +json_data = { + "schema_name": "qc_schema_input", + "schema_version": 1, + "molecule": { + "geometry": [ + 0.0, + 0.0, + -5.0, + 0.0, + 0.0, + 5.0, + ], + "symbols": ["He", "He"], + "real": [True, False] + }, + "driver": "energy", + "model": { + "method": "SCF", + "basis": "cc-pVDZ" + }, + "keywords": { + "scf_type": "df" + }, + "memory": 1024 * 1024 * 1024, + "nthreads": 1, +} + +# Write expected output +expected_return_result = -2.85518836280515 +expected_properties = { + 'calcinfo_nbasis': 10, + 'calcinfo_nmo': 10, + 'calcinfo_nalpha': 1, + 'calcinfo_nbeta': 1, + 'calcinfo_natom': 2, + 'scf_one_electron_energy': -3.8820496359492576, + 'scf_two_electron_energy': 1.0268612731441076, + 'nuclear_repulsion_energy': 0.0, + 'scf_total_energy': -2.85518836280515, + 'return_energy': -2.85518836280515 +} + +json_ret = psi4.json_wrapper.run_json(json_data) + + + + + +import zmq +import time +from mongrel2.request import Request +try: + import json +except: + import simplejson as json + +CTX = zmq.Context() + +HTTP_FORMAT = "HTTP/1.1 %(code)s %(status)s\r\n%(headers)s\r\n\r\n%(body)s" +MAX_IDENTS = 100 + +def http_response(body, code, status, headers): + payload = {'code': code, 'status': status, 'body': body} + headers['Content-Length'] = len(body) + payload['headers'] = "\r\n".join('%s: %s' % (k,v) for k,v in + headers.items()) + + return HTTP_FORMAT % payload + + +class Connection(object): + """ + A Connection object manages the connection between your handler + and a Mongrel2 server (or servers). It can receive raw requests + or JSON encoded requests whether from HTTP or MSG request types, + and it can send individual responses or batch responses either + raw or as JSON. It also has a way to encode HTTP responses + for simplicity since that'll be fairly common. + """ + + def __init__(self, sender_id, sub_addr, pub_addr): + """ + Your addresses should be the same as what you configured + in the config.sqlite for Mongrel2 and are usually like + tcp://127.0.0.1:9998 + """ + self.sender_id = sender_id + + reqs = CTX.socket(zmq.PULL) + reqs.connect(sub_addr) + + resp = CTX.socket(zmq.PUB) + + if sender_id: + resp.setsockopt(zmq.IDENTITY, sender_id) + + resp.connect(pub_addr) + + self.sub_addr = sub_addr + self.pub_addr = pub_addr + self.reqs = reqs + self.resp = resp + + + def recv(self): + """ + Receives a raw mongrel2.handler.Request object that you + can then work with. + """ + return Request.parse(self.reqs.recv()) + + def recv_json(self): + """ + Same as regular recv, but assumes the body is JSON and + creates a new attribute named req.data with the decoded + payload. This will throw an error if it is not JSON. + + Normally Request just does this if the METHOD is 'JSON' + but you can use this to force it for say HTTP requests. + """ + req = self.recv() + + if not req.data: + req.data = json.loads(req.body) + + return req + + def send(self, uuid, conn_id, msg): + """ + Raw send to the given connection ID at the given uuid, mostly used + internally. + """ + header = "%s %d:%s," % (uuid, len(str(conn_id)), str(conn_id)) + self.resp.send(header + ' ' + msg) + + + def reply(self, req, msg): + """ + Does a reply based on the given Request object and message. + This is easier since the req object contains all the info + needed to do the proper reply addressing. + """ + self.send(req.sender, req.conn_id, msg) + + + def reply_json(self, req, data): + """ + Same as reply, but tries to convert data to JSON first. + """ + self.send(req.sender, req.conn_id, json.dumps(data)) + + + def reply_http(self, req, body, code=200, status="OK", headers=None): + """ + Basic HTTP response mechanism which will take your body, + any headers you've made, and encode them so that the + browser gets them. + """ + self.reply(req, http_response(body, code, status, headers or {})) + + + def deliver(self, uuid, idents, data): + """ + This lets you send a single message to many currently + connected clients. There's a MAX_IDENTS that you should + not exceed, so chunk your targets as needed. Each target + will receive the message once by Mongrel2, but you don't have + to loop which cuts down on reply volume. + """ + self.send(uuid, ' '.join(idents), data) + + + def deliver_json(self, uuid, idents, data): + """ + Same as deliver, but converts to JSON first. + """ + self.deliver(uuid, idents, json.dumps(data)) + + + def deliver_http(self, uuid, idents, body, code=200, status="OK", headers=None): + """ + Same as deliver, but builds an HTTP response, which means, yes, + you can reply to multiple connected clients waiting for an HTTP + response from one handler. Kinda cool. + """ + self.deliver(uuid, idents, http_response(body, code, status, headers or {})) + + + def close(self, req): + """ + Tells mongrel2 to explicitly close the HTTP connection. + """ + self.reply(req, "") + + + def deliver_close(self, uuid, idents): + """ + Same as close but does it to a whole bunch of idents at a time. + """ + self.deliver(uuid, idents, "") + + +#! /usr/bin/env python +"""Unit tests for germinate.archive.""" + +# Copyright (C) 2012 Canonical Ltd. +# +# Germinate is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation; either version 2, or (at your option) any +# later version. +# +# Germinate is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Germinate; see the file COPYING. If not, write to the Free +# Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301, USA. + +import gzip +import os +import textwrap + +from germinate.archive import IndexType, TagFile +from germinate.tests.helpers import TestCase + + +class TestTagFile(TestCase): + def test_init_lists(self): + """TagFile may be constructed with list parameters.""" + tagfile = TagFile( + ["dist"], ["component"], "arch", ["mirror"], + source_mirrors=["source_mirror"]) + self.assertEqual(["dist"], tagfile._dists) + self.assertEqual(["component"], tagfile._components) + self.assertEqual(["mirror"], tagfile._mirrors) + self.assertEqual(["source_mirror"], tagfile._source_mirrors) + + def test_init_strings(self): + """TagFile may be constructed with string parameters.""" + tagfile = TagFile( + "dist", "component", "arch", "mirror", + source_mirrors="source_mirror") + self.assertEqual(["dist"], tagfile._dists) + self.assertEqual(["component"], tagfile._components) + self.assertEqual(["mirror"], tagfile._mirrors) + self.assertEqual(["source_mirror"], tagfile._source_mirrors) + + def test_sections(self): + """Test fetching sections from a basic TagFile archive.""" + self.useTempDir() + main_dir = os.path.join("mirror", "dists", "unstable", "main") + binary_dir = os.path.join(main_dir, "binary-i386") + source_dir = os.path.join(main_dir, "source") + os.makedirs(binary_dir) + os.makedirs(source_dir) + packages = gzip.GzipFile(os.path.join(binary_dir, "Packages.gz"), "w") + try: + packages.write(textwrap.dedent("""\ + Package: test + Version: 1.0 + Architecture: i386 + + """).encode("UTF-8")) + finally: + packages.close() + sources = gzip.GzipFile(os.path.join(source_dir, "Sources.gz"), "w") + try: + sources.write(textwrap.dedent("""\ + Source: test + Version: 1.0 + + """).encode("UTF-8")) + finally: + sources.close() + + tagfile = TagFile( + "unstable", "main", "i386", "file://%s/mirror" % self.temp_dir) + sections = list(tagfile.sections()) + self.assertEqual(IndexType.PACKAGES, sections[0][0]) + self.assertEqual("test", sections[0][1]["Package"]) + self.assertEqual("1.0", sections[0][1]["Version"]) + self.assertEqual("i386", sections[0][1]["Architecture"]) + self.assertEqual(IndexType.SOURCES, sections[1][0]) + self.assertEqual("test", sections[1][1]["Source"]) + self.assertEqual("1.0", sections[1][1]["Version"]) + +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# 오목 +# https://www.acmicpc.net/problem/2615 + +import sys +import copy + +if __name__ == '__main__': + matrix = [] + for cnt in range(19): + row_data = [int(x) for x in sys.stdin.readline().strip().replace(' ','')] + matrix.append(row_data) + + # 1이 이기는 경우 + num_matrix = [[0] * 19 for x in range(19)] + num = 1 + for i in range(19): + for j in range(19): + if matrix[i][j] == 1: + num_matrix[i][j] = num + num += 1 + graph_first = {} + for i in range(19): + for j in range(19): + if num_matrix[i][j] > 0: + # 가로 + result = [] + if i == 0 or num_matrix[i - 1][j] == 0: + for cnt in range(1, 6): + if i + cnt < 19 and num_matrix[i + cnt][j] > 0: + result.append(str(num_matrix[i + cnt][j])) + else: + break + if len(result) == 4: + graph_first[str(num_matrix[i][j])] = result + break + + # 세로 + result = [] + if j == 0 or num_matrix[i][j - 1] == 0: + for cnt in range(1, 6): + if j + cnt < 19 and num_matrix[i][j + cnt] > 0: + result.append(str(num_matrix[i][j + cnt])) + else: + break + if len(result) == 4: + graph_first[str(num_matrix[i][j])] = result + break + + # 우하 대각선 + result = [] + if i == 18 or j == 0 or num_matrix[i - 1][j - 1] == 0: + for cnt in range(1, 6): + if j + cnt < 19 and i + cnt < 19 and num_matrix[i + cnt][j + cnt] > 0: + result.append(str(num_matrix[i + cnt][j + cnt])) + else: + break + if len(result) == 4: + graph_first[str(num_matrix[i][j])] = result + break + + # 우상 대각선 + result = [] + if i == 18 or j == 0 or num_matrix[i + 1][j - 1] == 0: + for cnt in range(1, 6): + if j + cnt < 19 and i - cnt >= 0 and num_matrix[i - cnt][j + cnt] > 0: + result.append(str(num_matrix[i - cnt][j + cnt])) + else: + break + if len(result) == 4: + graph_first[str(num_matrix[i][j])] = result + break + + # 2이 이기는 경우 + num_matrix_2 = [[0] * 19 for x in range(19)] + num = 1 + for i in range(19): + for j in range(19): + if matrix[i][j] == 2: + num_matrix_2[i][j] = num + num += 1 + graph_second = {} + for i in range(19): + for j in range(19): + if num_matrix_2[i][j] > 0: + # 가로 + result = [] + if i == 0 or num_matrix_2[i - 1][j] == 0: + for cnt in range(1, 6): + if i + cnt < 19 and num_matrix_2[i + cnt][j] > 0: + result.append(str(num_matrix_2[i + cnt][j])) + else: + break + if len(result) == 4: + graph_second[str(num_matrix_2[i][j])] = result + break + + # 세로 + result = [] + if j == 0 or num_matrix_2[i][j - 1] == 0: + for cnt in range(1, 6): + if j + cnt < 19 and num_matrix_2[i][j + cnt] > 0: + result.append(str(num_matrix_2[i][j + cnt])) + else: + break + if len(result) == 4: + graph_second[str(num_matrix_2[i][j])] = result + break + + # 우하 대각선 + result = [] + if i == 18 or j == 0 or num_matrix_2[i - 1][j - 1] == 0: + for cnt in range(1, 6): + if j + cnt < 19 and i + cnt < 19 and num_matrix_2[i + cnt][j + cnt] > 0: + result.append(str(num_matrix_2[i + cnt][j + cnt])) + else: + break + if len(result) == 4: + graph_second[str(num_matrix_2[i][j])] = result + break + + # 우상 대각선 + result = [] + if i == 18 or j == 0 or num_matrix_2[i + 1][j - 1] == 0: + for cnt in range(1, 6): + if j + cnt < 19 and i - cnt >= 0 and num_matrix_2[i - cnt][j + cnt] > 0: + result.append(str(num_matrix_2[i - cnt][j + cnt])) + else: + break + if len(result) == 4: + graph_second[str(num_matrix_2[i][j])] = result + break + + check_one = True + if len(graph_first) > 0: + for d in graph_first.items(): + if len(d[1]) == 4: + print(1) + i = 0 + for mat in num_matrix: + if mat.count(int(d[0])) > 0: + print(str(i + 1) + ' ' + str(mat.index(int(d[0])) + 1)) + check_one = False + i += 1 + + check_sec = True + if len(graph_second) > 0: + for d in graph_second.items(): + if len(d[1]) == 4: + print(2) + i = 0 + for mat in num_matrix_2: + if mat.count(int(d[0])) > 0: + print(str(i + 1) + ' ' + str(mat.index(int(d[0])) + 1)) + check_sec = False + i += 1 + + if check_one and check_sec: + print(0) + + + +# -*- coding: utf-8 -*- +#-------------------------------------------------------- +# Movie Ultra 7K +# (http://forum.rojadirecta.es/ +# (http://xbmcspain.com/foro/ +# Version 0.0.5 (04.11.2014) +#-------------------------------------------------------- +# License: GPL (http://www.gnu.org/licenses/gpl-3.0.html) +#-------------------------------------------------------- + +import os,sys,urlparse,urllib,urllib2,re,shutil,zipfile,inspect,types + +import xbmc,xbmcgui,xbmcaddon,xbmcplugin,plugintools +''' +#from inspect import getmembers, isfunction +print "WISE\n" +functions_list = [o for o in getmembers(unwise) if isfunction(o[1])] +print str(functions_list) +print getmembers(unwise) +for i in dir(unwise): print i +for i in getmembers(unwise): print i +print [key for key in locals().keys() + if isinstance(locals()[key], type(sys)) and not key.startswith('__')] +''' +#print unwise + + + +def shsp(params): + url = params.get("url") + thumb = params.get("thumbnail") + plugintools.add_item( action="shsp3" , title="[COLOR=orange]Shedule[/COLOR]" , url=url ,thumbnail=thumb ,fanart=thumb , isPlayable=False, folder=True ) + plugintools.add_item( action="shsp5" , title="[COLOR=orange]List[/COLOR]" , url="http://showsport-tv.com/",thumbnail=thumb ,fanart=thumb , isPlayable=False, folder=True ) + plugintools.add_item( action="shsp4" , title="[COLOR=orange]Embed[/COLOR]" , url="http://showsport-tv.com/update/embed.html" ,thumbnail=thumb ,fanart=thumb , isPlayable=False, folder=True ) + +def shsp3(params): + url = params.get("url") + thumb = params.get("thumbnail") + request_headers=[] + request_headers.append(["User-Agent","Mozilla/4.0 (compatible; MSIE 5.01; Windows NT 5.0)"]) + body,response_headers = plugintools.read_body_and_headers(url, headers=request_headers) + #os.environ["HTTP_PROXY"]=Proxy + data=body + #print "START="+params.get("url") + import re + p = re.compile(ur'()', re.DOTALL) + matches = re.findall(p, data) + #del matches[0] + for match in matches: + #url = scrapedurl.strip() + #print match + p = re.compile(ur'([^<]+).*?\s()', re.DOTALL) + links=re.findall(p, match) + for imgs,titles,divs in links: + title=titles.replace("  ","") + title=title.replace(" ","|") + #print divs + plugintools.add_item( action="shsp2" , title=title , url=divs ,thumbnail=thumb ,fanart=thumb , isPlayable=False, folder=True ) + +def shsp5(params): + url = params.get("url") + thumb = params.get("thumbnail") + request_headers=[] + request_headers.append(["User-Agent","Mozilla/4.0 (compatible; MSIE 5.01; Windows NT 5.0)"]) + body,response_headers = plugintools.read_body_and_headers(url, headers=request_headers) + #os.environ["HTTP_PROXY"]=Proxy + data=body; + #print "START="+params.get("url") + import re + p = re.compile(ur'([^<]+)(.*?)<\/div>', re.DOTALL) + matches = re.findall(p, data) + #del matches[0] + for match,links in matches: + url="http://showsport-tv.com/"+links + plugintools.add_item( action="shsp6" , title=match , url=url ,thumbnail=thumb ,fanart=thumb , isPlayable=False, folder=True ) + +def shsp6(params): + url = params.get("url") + thumb = params.get("thumbnail") + import re + p = re.compile(ur'href="([^"]+).*?>([^<]+)', re.DOTALL) + a=re.findall(p,url); + for links,channels in a: + url="http://showsport-tv.com/"+links + plugintools.add_item( action="shsp7" , title=channels , url=url ,thumbnail=thumb ,fanart=thumb , isPlayable=True, folder=False ) + + +def shsp7(params): + url = params.get("url") + url=url.replace("/ch/","/update/").replace("php","html"); + ref="http://showsport-tv.com/" + thumb = params.get("thumbnail") + title = params.get("title") + request_headers=[] + request_headers.append(["User-Agent","Mozilla/4.0 (compatible; MSIE 5.01; Windows NT 5.0)"]) + request_headers.append(["Referer",ref]) + bodyy,response_headers = plugintools.read_body_and_headers(url, headers=request_headers) + if bodyy.find("googlecode"): + print "GOING XUSCACAMUSCA" + p = re.compile(ur'id="([^"]+).*?src="([^"]+)', re.DOTALL) + elif bodyy.find("iguide"): + p = re.compile(ur'var\s?id\s?=\s?([^;]+).*?src="?\'?([^\'"]+)', re.DOTALL) + print "GOING IGUIDE" + else: + print "UNKNOWN" + pars=re.findall(p,bodyy);ref=url;res=''; + for id,script in pars: + if script.find("xuscacamusca"): + ref=url + url='http://xuscacamusca.se/gen_h.php?id='+id+'&width=100%&height=100%' + peak2(params) + elif script.find("iguide"): + url=script+"1009&height=460&channel="+id+"&autoplay=true" + from nstream import iguide2 + iguide2(url,ref,res) + else: + print "NO SCRIPT" + +def shsp4(params): + url = params.get("url") + thumb = params.get("thumbnail") + request_headers=[] + request_headers.append(["User-Agent","Mozilla/4.0 (compatible; MSIE 5.01; Windows NT 5.0)"]) + body,response_headers = plugintools.read_body_and_headers(url, headers=request_headers) + #print body + import re + p = re.compile(ur'
([^<]+)', re.DOTALL) + foldr=re.findall(p,match) + for img,catg in foldr: + #print "\n"+img;print "\n"+catg; + thumb="http://showsport-tv.com/"+img + title=catg + plugintools.add_item( action="shsp1" , title=title , url=match ,thumbnail=thumb ,fanart=thumb , isPlayable=False, folder=True ) + #plugintools.add_item( action="" , title=title , url=str(match) ,thumbnail=thumb ,fanart=thumb , isPlayable=False, folder=True ) + +def shsp1(params): + url = params.get("url") + thumb = params.get("thumbnail") + import re + p = re.compile(ur'([^<]+).*?fid=\"([^\&]+).*?v_width=([^;]+).*?v_height=([^;]+).*?src=\"([^\&]+)', re.DOTALL) + foldr=re.findall(p,url) + for name,fid,w,h,jsrc in foldr: + thumb=thumb + title=name + url='http://showsport-tv.com/update/'+ fid +".html" + plugintools.add_item( action="peaktv2" , title=title , url=url ,thumbnail=thumb ,fanart=thumb , isPlayable=True, folder=False ) + +def shsp2(params): + divs = params.get("url") + thumb = params.get("thumbnail") + import re + p = re.compile(ur'href=\'?"?([^\'"]+).*?>([^<]+)') + link=re.findall(p, divs) + #print link + for lin in link: + url="http://showsport-tv.com"+lin[0].replace("/ch/","/update/").replace("php","html"); + title=lin[1];print url+"\n"+title + plugintools.add_item( action="peaktv2" , title=title , url=url , isPlayable=True, folder=False ) + +def peaktv(params): + #plugintools.get_localized_string(21) + url = params.get("url") + request_headers=[] + request_headers.append(["User-Agent","Mozilla/4.0 (compatible; MSIE 5.01; Windows NT 5.0)"]) + body,response_headers = plugintools.read_body_and_headers(url, headers=request_headers) + #os.environ["HTTP_PROXY"]=Proxy + data=body + #print "START="+params.get("url") + p = 'href="([^<]*)' + matches = plugintools.find_multiple_matches_multi(data,p) + del matches[0] + for scrapedurl in matches: + url = scrapedurl.strip() + #print url + title = plugintools.find_single_match(url,'>(.*?:[^:]+)') + #title = title.replace("\xe2","a".encode('iso8859-16')); + title = title.replace("\xe2","a"); + title = title.replace("\xc3","t"); + title = title.replace("\xe0","f"); + title = title.replace("\xfc","u"); + title = title.replace("\xdb","s"); + title = title.replace("\x15f","s"); + ''' + #print title.decode("utf-8") + print unicode(title,"iso8859-16") + ''' + canal = plugintools.find_single_match(url,'php\?([^"]+)') + url = 'http://peaktv.me/Live.php/?'+canal.strip() + if 'DigiSport1' in str(url): + thumb='http://www.digisport.ro/img/sigla_digisport1.png' + elif 'DigiSport2' in str(url): + thumb='http://itutorial.ro/wp-content/uploads/digi_sport2.png' + elif 'DigiSport3' in str(url): + thumb='http://www.sport4u.tv/web/logo/sport/digi_sport3_ro.png' + elif 'DolceSportHD' in str(url): + thumb='http://static.dolcetv.ro/img/tv_sigle/sigle_black/116.png' + elif 'DolceSport1' in str(url): + thumb='http://static.dolcetv.ro/img/tv_sigle/sigle_black/101.png' + elif 'DolceSport2' in str(url): + thumb='http://static.dolcetv.ro/img/tv_sigle/sigle_black/107.png' + elif 'DolceSport3' in str(url): + thumb='http://static.dolcetv.ro/img/tv_sigle/sigle_black/134.png' + elif 'DolceSport4' in str(url): + thumb='http://static.dolcetv.ro/img/tv_sigle/sigle_black/247.png' + elif 'EuroSport2HD' in str(url): + thumb='http://www.sport4u.tv/web/logo/sport/eurosport-2.png' + elif 'EuroSport1HD' in str(url): + thumb='http://4.bp.blogspot.com/-k50Qb45ZHGY/UrMCA2zRoGI/AAAAAAAAStA/Dj6sF0dHcs8/s1600/790px-Eurosport_logo.svg.png' + elif 'LookPlusHD' in str(url): + thumb='http://upload.wikimedia.org/wikipedia/commons/thumb/a/ac/Look_Plus_HD.png/100px-Look_Plus_HD.png' + elif 'LookTVHD' in str(url): + thumb='http://upload.wikimedia.org/wikipedia/commons/thumb/8/89/Look_TV_HD_logo.png/100px-Look_TV_HD_logo.png' + else: + thumb='http://frocus.net/images/logotv/Sport-ro_HD.jpg' + print thumb + fanart = thumb + plugintools.add_item( action="peaktv2" , title=title , url=url ,thumbnail=thumb ,fanart=fanart , isPlayable=True, folder=False ) + +def peaktv2(params): + msg = "Buscando enlace\nespere,porfavor... " + #plugintools.message("CipQ-TV",msg) + url = params.get("url") + print "START="+url + title = params.get("title") + thumb = params.get("thumbnail") + ref=url + request_headers=[] + request_headers.append(["User-Agent","Mozilla/4.0 (compatible; MSIE 5.01; Windows NT 5.0)"]) + body,response_headers = plugintools.read_body_and_headers(url, headers=request_headers,timeout=30) + #os.environ["HTTP_PROXY"]=Proxy + data=body + #print "START="+data + p = '