diff --git "a/codeparrot-valid_1008.txt" "b/codeparrot-valid_1008.txt" new file mode 100644--- /dev/null +++ "b/codeparrot-valid_1008.txt" @@ -0,0 +1,10000 @@ + '/System/Library/Frameworks/IOKit.framework/Versions/A/IOKit') + +if __name__ == "__main__": + unittest.main() + +# Copyright 2013 Red Hat, Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from keystone.tests import unit as tests +from keystone.tests.unit import test_sql_migrate_extensions +from keystone.tests.unit import test_sql_upgrade + + +class PostgresqlMigrateTests(test_sql_upgrade.SqlUpgradeTests): + def setUp(self): + self.skip_if_env_not_set('ENABLE_LIVE_POSTGRES_TEST') + super(PostgresqlMigrateTests, self).setUp() + + def config_files(self): + files = super(PostgresqlMigrateTests, self).config_files() + files.append(tests.dirs.tests_conf("backend_postgresql.conf")) + return files + + +class MysqlMigrateTests(test_sql_upgrade.SqlUpgradeTests): + def setUp(self): + self.skip_if_env_not_set('ENABLE_LIVE_MYSQL_TEST') + super(MysqlMigrateTests, self).setUp() + + def config_files(self): + files = super(MysqlMigrateTests, self).config_files() + files.append(tests.dirs.tests_conf("backend_mysql.conf")) + return files + + +class PostgresqlRevokeExtensionsTests( + test_sql_migrate_extensions.RevokeExtension): + def setUp(self): + self.skip_if_env_not_set('ENABLE_LIVE_POSTGRES_TEST') + super(PostgresqlRevokeExtensionsTests, self).setUp() + + def config_files(self): + files = super(PostgresqlRevokeExtensionsTests, self).config_files() + files.append(tests.dirs.tests_conf("backend_postgresql.conf")) + return files + + +class MysqlRevokeExtensionsTests(test_sql_migrate_extensions.RevokeExtension): + def setUp(self): + self.skip_if_env_not_set('ENABLE_LIVE_MYSQL_TEST') + super(MysqlRevokeExtensionsTests, self).setUp() + + def config_files(self): + files = super(MysqlRevokeExtensionsTests, self).config_files() + files.append(tests.dirs.tests_conf("backend_mysql.conf")) + return files + + +class Db2MigrateTests(test_sql_upgrade.SqlUpgradeTests): + def setUp(self): + self.skip_if_env_not_set('ENABLE_LIVE_DB2_TEST') + super(Db2MigrateTests, self).setUp() + + def config_files(self): + files = super(Db2MigrateTests, self).config_files() + files.append(tests.dirs.tests_conf("backend_db2.conf")) + return files + +import unittest + +from django.test import ignore_warnings +from django.utils.deprecation import RemovedInDjango20Warning + + +class TestUtilsChecksums(unittest.TestCase): + + def check_output(self, function, value, output=None): + """ + Check that function(value) equals output. If output is None, + check that function(value) equals value. + """ + if output is None: + output = value + self.assertEqual(function(value), output) + + @ignore_warnings(category=RemovedInDjango20Warning) + def test_luhn(self): + from django.utils import checksums + f = checksums.luhn + items = ( + (4111111111111111, True), ('4111111111111111', True), + (4222222222222, True), (378734493671000, True), + (5424000000000015, True), (5555555555554444, True), + (1008, True), ('0000001008', True), ('000000001008', True), + (4012888888881881, True), (1234567890123456789012345678909, True), + (4111111111211111, False), (42222222222224, False), + (100, False), ('100', False), ('0000100', False), + ('abc', False), (None, False), (object(), False), + ) + for value, output in items: + self.check_output(f, value, output) + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the (LGPL) GNU Lesser General Public License as +# published by the Free Software Foundation; either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Library Lesser General Public License for more details at +# ( http://www.gnu.org/licenses/lgpl.html ). +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# written by: Jeff Ortel ( jortel@redhat.com ) + +""" +Provides XML I{attribute} classes. +""" + +import suds.sax +from logging import getLogger +from suds import * +from suds.sax import * +from suds.sax.text import Text + +log = getLogger(__name__) + +class Attribute: + """ + An XML attribute object. + @ivar parent: The node containing this attribute + @type parent: L{element.Element} + @ivar prefix: The I{optional} namespace prefix. + @type prefix: basestring + @ivar name: The I{unqualified} name of the attribute + @type name: basestring + @ivar value: The attribute's value + @type value: basestring + """ + def __init__(self, name, value=None): + """ + @param name: The attribute's name with I{optional} namespace prefix. + @type name: basestring + @param value: The attribute's value + @type value: basestring + """ + self.parent = None + self.prefix, self.name = splitPrefix(name) + self.setValue(value) + + def clone(self, parent=None): + """ + Clone this object. + @param parent: The parent for the clone. + @type parent: L{element.Element} + @return: A copy of this object assigned to the new parent. + @rtype: L{Attribute} + """ + a = Attribute(self.qname(), self.value) + a.parent = parent + return a + + def qname(self): + """ + Get the B{fully} qualified name of this attribute + @return: The fully qualified name. + @rtype: basestring + """ + if self.prefix is None: + return self.name + else: + return ':'.join((self.prefix, self.name)) + + def setValue(self, value): + """ + Set the attributes value + @param value: The new value (may be None) + @type value: basestring + @return: self + @rtype: L{Attribute} + """ + if isinstance(value, Text): + self.value = value + else: + self.value = Text(value) + return self + + def getValue(self, default=Text('')): + """ + Get the attributes value with optional default. + @param default: An optional value to be return when the + attribute's has not been set. + @type default: basestring + @return: The attribute's value, or I{default} + @rtype: L{Text} + """ + if self.hasText(): + return self.value + else: + return default + + def hasText(self): + """ + Get whether the attribute has I{text} and that it is not an empty + (zero length) string. + @return: True when has I{text}. + @rtype: boolean + """ + return ( self.value is not None and len(self.value) ) + + def namespace(self): + """ + Get the attributes namespace. This may either be the namespace + defined by an optional prefix, or its parent's namespace. + @return: The attribute's namespace + @rtype: (I{prefix}, I{name}) + """ + if self.prefix is None: + return Namespace.default + else: + return self.resolvePrefix(self.prefix) + + def resolvePrefix(self, prefix): + """ + Resolve the specified prefix to a known namespace. + @param prefix: A declared prefix + @type prefix: basestring + @return: The namespace that has been mapped to I{prefix} + @rtype: (I{prefix}, I{name}) + """ + ns = Namespace.default + if self.parent is not None: + ns = self.parent.resolvePrefix(prefix) + return ns + + def match(self, name=None, ns=None): + """ + Match by (optional) name and/or (optional) namespace. + @param name: The optional attribute tag name. + @type name: str + @param ns: An optional namespace. + @type ns: (I{prefix}, I{name}) + @return: True if matched. + @rtype: boolean + """ + if name is None: + byname = True + else: + byname = ( self.name == name ) + if ns is None: + byns = True + else: + byns = ( self.namespace()[1] == ns[1] ) + return ( byname and byns ) + + def __eq__(self, rhs): + """ equals operator """ + return rhs is not None and \ + isinstance(rhs, Attribute) and \ + self.prefix == rhs.name and \ + self.name == rhs.name + + def __repr__(self): + """ get a string representation """ + return \ + 'attr (prefix=%s, name=%s, value=(%s))' %\ + (self.prefix, self.name, self.value) + + def __str__(self): + """ get an xml string representation """ + return unicode(self).encode('utf-8') + + def __unicode__(self): + """ get an xml string representation """ + n = self.qname() + if self.hasText(): + v = self.value.escape() + else: + v = self.value + return u'%s="%s"' % (n, v) + +############################################################################## +# +# OpenERP, Open Source Management Solution +# Copyright (C) 2010 +# OpenERP Italian Community () +# Servabit srl +# Agile Business Group sagl +# Domsense srl +# Albatos srl +# +# Copyright (C) 2011 +# Associazione OpenERP Italia () +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +############################################################################## + +#!/usr/bin/python +# -*- coding: utf-8 -*- +# +# Copyright (C) 2017 Lenovo, Inc. +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see . +# +# Module to send conditional template to Lenovo Switches +# Lenovo Networking +# + +ANSIBLE_METADATA = {'metadata_version': '1.1', + 'status': ['preview'], + 'supported_by': 'community'} + + +DOCUMENTATION = ''' +--- +module: cnos_conditional_template +author: "Dave Kasberg (@dkasberg)" +short_description: Manage switch configuration using templates based on condition on devices running Lenovo CNOS +description: + - This module allows you to work with the running configuration of a switch. It provides a way to + execute a set of CNOS commands on a switch by evaluating the current running configuration and + executing the commands only if the specific settings have not been already configured. + The configuration source can be a set of commands or a template written in the Jinja2 templating language. + This module functions the same as the cnos_template module. + The only exception is that the following inventory variable can be specified + ["condition = "] + When this inventory variable is specified as the variable of a task, the template is executed for + the network element that matches the flag string. Usually, templates are used when commands are the + same across a group of network devices. When there is a requirement to skip the execution of the + template on one or more devices, it is recommended to use this module. + This module uses SSH to manage network device configuration. + For more information about this module from Lenovo and customizing it usage for your + use cases, please visit U(http://systemx.lenovofiles.com/help/index.jsp?topic=%2Fcom.lenovo.switchmgt.ansible.doc%2Fcnos_conditional_template.html) +version_added: "2.3" +extends_documentation_fragment: cnos +options: + commandfile: + description: + - This specifies the path to the CNOS command file which needs to be applied. This usually + comes from the commands folder. Generally this file is the output of the variables applied + on a template file. So this command is preceded by a template module. + The command file must contain the Ansible keyword {{ inventory_hostname }} and the condition + flag in its filename to ensure that the command file is unique for each switch and condition. + If this is omitted, the command file will be overwritten during iteration. For example, + commandfile=./commands/clos_leaf_bgp_{{ inventory_hostname }}_LP21_commands.txt + required: true + default: Null + condition: + description: + - If you specify condition= in the inventory file against any device, the template + execution is done for that device in case it matches the flag setting for that task. + required: true + default: Null + flag: + description: + - If a task needs to be executed, you have to set the flag the same as it is specified in + the inventory for that device. + required: true + default: Null +''' +EXAMPLES = ''' +Tasks : The following are examples of using the module cnos_conditional_template. These are written in the main.yml file of the tasks directory. +--- +- name: Applying CLI template on VLAG Tier1 Leaf Switch1 + cnos_conditional_template: + host: "{{ inventory_hostname }}" + username: "{{ hostvars[inventory_hostname]['username'] }}" + password: "{{ hostvars[inventory_hostname]['password'] }}" + deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}" + outputfile: "./results/vlag_1tier_leaf_switch1_{{ inventory_hostname }}_output.txt" + condition: "{{ hostvars[inventory_hostname]['condition']}}" + flag: "leaf_switch1" + commandfile: "./commands/vlag_1tier_leaf_switch1_{{ inventory_hostname }}_commands.txt" + enablePassword: "anil" + stp_mode1: "disable" + port_range1: "17,18,29,30" + portchannel_interface_number1: 1001 + portchannel_mode1: active + slot_chassis_number1: 1/48 + switchport_mode1: trunk +''' +RETURN = ''' +msg: + description: Success or failure message + returned: always + type: string + sample: "Template Applied." +''' + +import sys +import paramiko +import time +import argparse +import socket +import array +import json +import time +import re +try: + from ansible.module_utils import cnos + HAS_LIB = True +except: + HAS_LIB = False +from ansible.module_utils.basic import AnsibleModule +from collections import defaultdict + + +def main(): + module = AnsibleModule( + argument_spec=dict( + commandfile=dict(required=True), + outputfile=dict(required=True), + condition=dict(required=True), + flag=dict(required=True), + host=dict(required=True), + deviceType=dict(required=True), + username=dict(required=True), + password=dict(required=True, no_log=True), + enablePassword=dict(required=False, no_log=True),), + supports_check_mode=False) + + username = module.params['username'] + password = module.params['password'] + enablePassword = module.params['enablePassword'] + condition = module.params['condition'] + flag = module.params['flag'] + commandfile = module.params['commandfile'] + deviceType = module.params['deviceType'] + outputfile = module.params['outputfile'] + hostIP = module.params['host'] + + output = "" + + # Here comes the logic against which a template is + # conditionally executed for right Network element. + if (condition != flag): + module.exit_json(changed=True, msg="Template Skipped for this value") + return " " + + # Create instance of SSHClient object + remote_conn_pre = paramiko.SSHClient() + + # Automatically add untrusted hosts (make sure okay for security policy in your environment) + remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + + # initiate SSH connection with the switch + remote_conn_pre.connect(hostIP, username=username, password=password) + time.sleep(2) + + # Use invoke_shell to establish an 'interactive session' + remote_conn = remote_conn_pre.invoke_shell() + time.sleep(2) + # Enable and enter configure terminal then send command + output = output + cnos.waitForDeviceResponse("\n", ">", 2, remote_conn) + + output = output + cnos.enterEnableModeForDevice(enablePassword, 3, remote_conn) + + # Make terminal length = 0 + output = output + cnos.waitForDeviceResponse("terminal length 0\n", "#", 2, remote_conn) + + # Go to config mode + output = output + cnos.waitForDeviceResponse("configure d\n", "(config)#", 2, remote_conn) + # Send commands one by one + #with open(commandfile, "r") as f: + f = open(commandfile, "r") + for line in f: + # Omit the comment lines in template file + if not line.startswith("#"): + # cnos.debugOutput(line) + command = line + if not line.endswith("\n"): + command = command+"\n" + response = cnos.waitForDeviceResponse(command, "#", 2, remote_conn) + errorMsg = cnos.checkOutputForError(response) + output = output + response + if(errorMsg is not None): + break + # To cater to Mufti case + # Write to memory + output = output + cnos.waitForDeviceResponse("save\n", "#", 3, remote_conn) + + # Write output to file + file = open(outputfile, "a") + file.write(output) + file.close() + + # Logic to check when changes occur or not + errorMsg = cnos.checkOutputForError(output) + if(errorMsg is None): + module.exit_json(changed=True, msg="Template Applied") + else: + module.fail_json(msg=errorMsg) + +if __name__ == '__main__': + main() + +""" +BYOND Base Packet Class + +Created with help from tobba. +""" +import logging, struct + +class NetTypes: + BYTE = 0 + SHORT = 1 + LONG = 2 + STRING = 3 + + min_lens = [1, 2, 4, None] + + @staticmethod + def GetMinLength(t): + return NetTypes.min_lens[t] + +PacketTypes = {} + +class Packet: + ID = 0 + Name = '' + def __init__(self): + self.__field_data = {} + self.header = {} + self.min_length = 0 + + self.length = 0 + self.sequence = 0 + + def LinkField(self, datatype, propname, **kwargs): + ''' + Associate a part of a packet to a field in this class + ''' + kwargs['type'] = datatype + kwargs['name'] = propname + self.__field_data[len(self.__field_data)] = kwargs + self.min_length += NetTypes.GetMinLength(datatype) + + def Deserialize(self, msg): + if len(msg) < self.min_length: + logging.error('Received truncated packet {0}: min_length={1}, msg.len={2}'.format(self.Name, self.min_length, len(msg))) + + # TIME FOR ASSUMPTIONS! + pos = 0 + for idx, fieldinfo in self.__field_data.items(): + dtype = fieldinfo['type'] + propname = fieldinfo['name'] + unpacked = None + if dtype == NetTypes.BYTE: + dat = msg[pos:pos + 1] + unpacked = struct.unpack('B', dat) # Unsigned char + pos += 1 + elif dtype == NetTypes.SHORT: + dat = msg[pos:pos + 2] + unpacked = struct.unpack('h', dat) # short (maybe H?) + pos += 2 + elif dtype == NetTypes.LONG: + dat = msg[pos:pos + 4] + unpacked = struct.unpack('l', dat) # short (maybe L?) + pos += 4 + elif dtype == NetTypes.STRING: + dat = msg[pos:] + unpacked = dat.split('\x00', 1) + pos += len(unpacked) + 1 # NUL byte stripped + else: + logging.error('Unable to unpack {0} packet at field {1}: Unknown datatype {2}'.format(self.Name, idx, dtype)) + logging.error('Packet __field_data:'.repr(self.__field_data)) + raise SystemError() + setattr(self, propname, unpacked) + + def Serialize(self): + msg = b'' + for idx, fieldinfo in self.__field_data.items(): + dtype = fieldinfo['type'] + dat = getattr(self, fieldinfo['name']) + if dtype == NetTypes.BYTE: + msg += struct.pack('B', dat) # Unsigned char + elif dtype == NetTypes.SHORT: + msg += struct.pack('h', dat) # short (maybe H?) + elif dtype == NetTypes.LONG: + msg += struct.pack('l', dat) # short (maybe L?) + elif dtype == NetTypes.STRING: + msg += dat + b'\x00' + else: + logging.error('Unable to pack {0} packet at field {1}: Unknown datatype {2}'.format(self.Name, idx, dtype)) + logging.error('Packet __field_data:'.repr(self.__field_data)) + raise SystemError() + return msg + +########################################################################## +# +# Copyright (c) 2008, Image Engine Design Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# +# * Neither the name of Image Engine Design nor the names of any +# other contributors to this software may be used to endorse or +# promote products derived from this software without specific prior +# written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +########################################################################## + +import os +import unittest +import IECore + +class MathTest( unittest.TestCase ) : + + def testSign( self ) : + + self.assertEqual( IECore.sign( 0 ), 0 ) + self.assertEqual( IECore.sign( 1 ), 1 ) + self.assertEqual( IECore.sign( 1000 ), 1 ) + self.assertEqual( IECore.sign( -1 ), -1 ) + self.assertEqual( IECore.sign( -1000 ), -1 ) + + self.assertEqual( IECore.sign( 0.0 ), 0 ) + self.assertEqual( IECore.sign( 0.1 ), 1 ) + self.assertEqual( IECore.sign( -0.1 ), -1 ) + +if __name__ == "__main__": + unittest.main() + +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): + # Adding field 'EconomicResourceType.price_per_unit' + db.add_column('valueaccounting_economicresourcetype', 'price_per_unit', + self.gf('django.db.models.fields.DecimalField')(default='0.00', max_digits=8, decimal_places=2), + keep_default=False) + + # Adding field 'EconomicResourceType.unit_of_price' + db.add_column('valueaccounting_economicresourcetype', 'unit_of_price', + self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='resource_type_price_units', null=True, to=orm['valueaccounting.Unit']), + keep_default=False) + + + def backwards(self, orm): + # Deleting field 'EconomicResourceType.price_per_unit' + db.delete_column('valueaccounting_economicresourcetype', 'price_per_unit') + + # Deleting field 'EconomicResourceType.unit_of_price' + db.delete_column('valueaccounting_economicresourcetype', 'unit_of_price_id') + + + models = { + 'auth.group': { + 'Meta': {'object_name': 'Group'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + 'auth.permission': { + 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + 'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + 'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + 'valueaccounting.accountingreference': { + 'Meta': {'object_name': 'AccountingReference'}, + 'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}) + }, + 'valueaccounting.agentassociation': { + 'Meta': {'ordering': "('is_associate',)", 'object_name': 'AgentAssociation'}, + 'association_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'associations'", 'to': "orm['valueaccounting.AgentAssociationType']"}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'has_associate': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'has_associates'", 'to': "orm['valueaccounting.EconomicAgent']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_associate': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'is_associate_of'", 'to': "orm['valueaccounting.EconomicAgent']"}), + 'state': ('django.db.models.fields.CharField', [], {'default': "'active'", 'max_length': '12'}) + }, + 'valueaccounting.agentassociationtype': { + 'Meta': {'object_name': 'AgentAssociationType'}, + 'association_behavior': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True', 'blank': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'identifier': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '12'}), + 'inverse_label': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}), + 'label': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'plural_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '128'}) + }, + 'valueaccounting.agentresourcerole': { + 'Meta': {'object_name': 'AgentResourceRole'}, + 'agent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'agent_resource_roles'", 'to': "orm['valueaccounting.EconomicAgent']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_account': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_contact': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'owner_percentage': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), + 'resource': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'agent_resource_roles'", 'to': "orm['valueaccounting.EconomicResource']"}), + 'role': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'agent_resource_roles'", 'to': "orm['valueaccounting.AgentResourceRoleType']"}) + }, + 'valueaccounting.agentresourceroletype': { + 'Meta': {'object_name': 'AgentResourceRoleType'}, + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_owner': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}) + }, + 'valueaccounting.agentresourcetype': { + 'Meta': {'object_name': 'AgentResourceType'}, + 'agent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'resource_types'", 'to': "orm['valueaccounting.EconomicAgent']"}), + 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'arts_changed'", 'null': 'True', 'to': "orm['auth.User']"}), + 'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}), + 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'arts_created'", 'null': 'True', 'to': "orm['auth.User']"}), + 'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'event_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'agent_resource_types'", 'to': "orm['valueaccounting.EventType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'lead_time': ('django.db.models.fields.IntegerField', [], {'default': '0'}), + 'resource_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'agents'", 'to': "orm['valueaccounting.EconomicResourceType']"}), + 'score': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '8', 'decimal_places': '2'}), + 'unit_of_value': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'agent_resource_value_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}), + 'value': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '8', 'decimal_places': '2'}), + 'value_per_unit': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '8', 'decimal_places': '2'}) + }, + 'valueaccounting.agenttype': { + 'Meta': {'ordering': "('name',)", 'object_name': 'AgentType'}, + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_context': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sub-agents'", 'null': 'True', 'to': "orm['valueaccounting.AgentType']"}), + 'party_type': ('django.db.models.fields.CharField', [], {'default': "'individual'", 'max_length': '12'}) + }, + 'valueaccounting.agentuser': { + 'Meta': {'object_name': 'AgentUser'}, + 'agent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'users'", 'to': "orm['valueaccounting.EconomicAgent']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'agent'", 'unique': 'True', 'to': "orm['auth.User']"}) + }, + 'valueaccounting.cachedeventsummary': { + 'Meta': {'ordering': "('agent', 'context_agent', 'resource_type')", 'object_name': 'CachedEventSummary'}, + 'agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'cached_events'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'context_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'context_cached_events'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'event_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'cached_events'", 'to': "orm['valueaccounting.EventType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'importance': ('django.db.models.fields.DecimalField', [], {'default': "'1'", 'max_digits': '3', 'decimal_places': '0'}), + 'quantity': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '8', 'decimal_places': '2'}), + 'reputation': ('django.db.models.fields.DecimalField', [], {'default': "'1.00'", 'max_digits': '8', 'decimal_places': '2'}), + 'resource_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'cached_events'", 'null': 'True', 'to': "orm['valueaccounting.EconomicResourceType']"}), + 'resource_type_rate': ('django.db.models.fields.DecimalField', [], {'default': "'1.0'", 'max_digits': '8', 'decimal_places': '2'}), + 'value': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '8', 'decimal_places': '2'}) + }, + 'valueaccounting.claim': { + 'Meta': {'ordering': "('claim_date',)", 'object_name': 'Claim'}, + 'against_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'claims against'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'claim_creation_equation': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'claim_date': ('django.db.models.fields.DateField', [], {}), + 'context_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'claims'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'has_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'has_claims'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}), + 'unit_of_value': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'claim_value_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}), + 'value': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '8', 'decimal_places': '2'}), + 'value_equation_bucket_rule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'claims'", 'null': 'True', 'to': "orm['valueaccounting.ValueEquationBucketRule']"}) + }, + 'valueaccounting.claimevent': { + 'Meta': {'ordering': "('claim_event_date',)", 'object_name': 'ClaimEvent'}, + 'claim': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'claim_events'", 'to': "orm['valueaccounting.Claim']"}), + 'claim_event_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}), + 'event': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'claim_events'", 'null': 'True', 'to': "orm['valueaccounting.EconomicEvent']"}), + 'event_effect': ('django.db.models.fields.CharField', [], {'max_length': '12'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'unit_of_value': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'claim_event_value_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}), + 'value': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}) + }, + 'valueaccounting.commitment': { + 'Meta': {'ordering': "('due_date',)", 'object_name': 'Commitment'}, + 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments_changed'", 'null': 'True', 'to': "orm['auth.User']"}), + 'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}), + 'commitment_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}), + 'context_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments_created'", 'null': 'True', 'to': "orm['auth.User']"}), + 'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'due_date': ('django.db.models.fields.DateField', [], {}), + 'event_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'commitments'", 'to': "orm['valueaccounting.EventType']"}), + 'exchange': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments'", 'null': 'True', 'to': "orm['valueaccounting.Exchange']"}), + 'finished': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'from_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'given_commitments'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'from_agent_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'given_commitments'", 'null': 'True', 'to': "orm['valueaccounting.AgentType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'independent_demand': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'dependent_commitments'", 'null': 'True', 'to': "orm['valueaccounting.Order']"}), + 'order': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments'", 'null': 'True', 'to': "orm['valueaccounting.Order']"}), + 'order_item': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'stream_commitments'", 'null': 'True', 'to': "orm['valueaccounting.Commitment']"}), + 'process': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments'", 'null': 'True', 'to': "orm['valueaccounting.Process']"}), + 'quality': ('django.db.models.fields.DecimalField', [], {'default': "'0'", 'max_digits': '3', 'decimal_places': '0'}), + 'quantity': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}), + 'resource': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments'", 'null': 'True', 'to': "orm['valueaccounting.EconomicResource']"}), + 'resource_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments'", 'null': 'True', 'to': "orm['valueaccounting.EconomicResourceType']"}), + 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}), + 'stage': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments_at_stage'", 'null': 'True', 'to': "orm['valueaccounting.ProcessType']"}), + 'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), + 'state': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments_at_state'", 'null': 'True', 'to': "orm['valueaccounting.ResourceState']"}), + 'to_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'taken_commitments'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'unit_of_quantity': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitment_qty_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}), + 'unit_of_value': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitment_value_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'value': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '8', 'decimal_places': '2'}) + }, + 'valueaccounting.compensation': { + 'Meta': {'ordering': "('compensation_date',)", 'object_name': 'Compensation'}, + 'compensating_event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'compensations'", 'to': "orm['valueaccounting.EconomicEvent']"}), + 'compensating_value': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}), + 'compensation_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'initiating_event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'initiated_compensations'", 'to': "orm['valueaccounting.EconomicEvent']"}) + }, + 'valueaccounting.distributionvalueequation': { + 'Meta': {'object_name': 'DistributionValueEquation'}, + 'distribution_date': ('django.db.models.fields.DateField', [], {}), + 'exchange': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'value equation'", 'null': 'True', 'to': "orm['valueaccounting.Exchange']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'value_equation_content': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'value_equation_link': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'distributions'", 'null': 'True', 'to': "orm['valueaccounting.ValueEquation']"}) + }, + 'valueaccounting.economicagent': { + 'Meta': {'ordering': "('nick',)", 'object_name': 'EconomicAgent'}, + 'address': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'agent_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'agents'", 'to': "orm['valueaccounting.AgentType']"}), + 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'agents_changed'", 'null': 'True', 'to': "orm['auth.User']"}), + 'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}), + 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'agents_created'", 'null': 'True', 'to': "orm['auth.User']"}), + 'created_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '96', 'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'latitude': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'null': 'True', 'blank': 'True'}), + 'longitude': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'null': 'True', 'blank': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), + 'nick': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}), + 'phone_primary': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}), + 'phone_secondary': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}), + 'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), + 'photo_url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'reputation': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '8', 'decimal_places': '2'}), + 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}), + 'unit_of_claim_value': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'agents'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}) + }, + 'valueaccounting.economicevent': { + 'Meta': {'ordering': "('-event_date',)", 'object_name': 'EconomicEvent'}, + 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'events_changed'", 'null': 'True', 'to': "orm['auth.User']"}), + 'commitment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'fulfillment_events'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['valueaccounting.Commitment']"}), + 'context_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'events'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'events_created'", 'null': 'True', 'to': "orm['auth.User']"}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'event_date': ('django.db.models.fields.DateField', [], {}), + 'event_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'events'", 'to': "orm['valueaccounting.EventType']"}), + 'exchange': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'events'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['valueaccounting.Exchange']"}), + 'from_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'given_events'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_contribution': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'process': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'events'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['valueaccounting.Process']"}), + 'quality': ('django.db.models.fields.DecimalField', [], {'default': "'0'", 'max_digits': '3', 'decimal_places': '0'}), + 'quantity': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}), + 'resource': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'events'", 'null': 'True', 'to': "orm['valueaccounting.EconomicResource']"}), + 'resource_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'events'", 'to': "orm['valueaccounting.EconomicResourceType']"}), + 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}), + 'to_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'taken_events'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'unit_of_quantity': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'event_qty_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}), + 'unit_of_value': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'event_value_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'value': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '8', 'decimal_places': '2'}) + }, + 'valueaccounting.economicresource': { + 'Meta': {'ordering': "('resource_type', 'identifier')", 'object_name': 'EconomicResource'}, + 'access_rules': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'author': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'authored_resources'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resources_changed'", 'null': 'True', 'to': "orm['auth.User']"}), + 'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}), + 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resources_created'", 'null': 'True', 'to': "orm['auth.User']"}), + 'created_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}), + 'current_location': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resources_at_location'", 'null': 'True', 'to': "orm['valueaccounting.Location']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'identifier': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}), + 'independent_demand': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'dependent_resources'", 'null': 'True', 'to': "orm['valueaccounting.Order']"}), + 'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'order_item': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'stream_resources'", 'null': 'True', 'to': "orm['valueaccounting.Commitment']"}), + 'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), + 'photo_url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'quality': ('django.db.models.fields.DecimalField', [], {'default': "'0'", 'null': 'True', 'max_digits': '3', 'decimal_places': '0', 'blank': 'True'}), + 'quantity': ('django.db.models.fields.DecimalField', [], {'default': "'1.00'", 'max_digits': '8', 'decimal_places': '2'}), + 'resource_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'resources'", 'to': "orm['valueaccounting.EconomicResourceType']"}), + 'stage': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resources_at_stage'", 'null': 'True', 'to': "orm['valueaccounting.ProcessType']"}), + 'state': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resources_at_state'", 'null': 'True', 'to': "orm['valueaccounting.ResourceState']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'value_per_unit': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '8', 'decimal_places': '2'}), + 'value_per_unit_of_use': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '8', 'decimal_places': '2'}) + }, + 'valueaccounting.economicresourcetype': { + 'Meta': {'ordering': "('name',)", 'object_name': 'EconomicResourceType'}, + 'accounting_reference': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resource_types'", 'null': 'True', 'to': "orm['valueaccounting.AccountingReference']"}), + 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resource_types_changed'", 'null': 'True', 'to': "orm['auth.User']"}), + 'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}), + 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resource_types_created'", 'null': 'True', 'to': "orm['auth.User']"}), + 'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'inventory_rule': ('django.db.models.fields.CharField', [], {'default': "'yes'", 'max_length': '5'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['valueaccounting.EconomicResourceType']"}), + 'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), + 'photo_url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'price_per_unit': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '8', 'decimal_places': '2'}), + 'resource_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resource_types'", 'null': 'True', 'to': "orm['valueaccounting.ResourceClass']"}), + 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}), + 'substitutable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'unit': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resource_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}), + 'unit_of_price': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resource_type_price_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}), + 'unit_of_use': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'units_of_use'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}), + 'unit_of_value': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'resource_type_value_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'value_per_unit': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '8', 'decimal_places': '2'}), + 'value_per_unit_of_use': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '8', 'decimal_places': '2'}) + }, + 'valueaccounting.eventtype': { + 'Meta': {'ordering': "('label',)", 'object_name': 'EventType'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'inverse_label': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}), + 'label': ('django.db.models.fields.CharField', [], {'max_length': '32'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'related_to': ('django.db.models.fields.CharField', [], {'default': "'process'", 'max_length': '12'}), + 'relationship': ('django.db.models.fields.CharField', [], {'default': "'in'", 'max_length': '12'}), + 'resource_effect': ('django.db.models.fields.CharField', [], {'max_length': '12'}), + 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}), + 'unit_type': ('django.db.models.fields.CharField', [], {'max_length': '12', 'blank': 'True'}) + }, + 'valueaccounting.exchange': { + 'Meta': {'ordering': "('-start_date',)", 'object_name': 'Exchange'}, + 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'exchanges_changed'", 'null': 'True', 'to': "orm['auth.User']"}), + 'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}), + 'context_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'exchanges'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'exchanges_created'", 'null': 'True', 'to': "orm['auth.User']"}), + 'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}), + 'customer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'exchanges_as_customer'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}), + 'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}), + 'order': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'exchanges'", 'null': 'True', 'to': "orm['valueaccounting.Order']"}), + 'process_pattern': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'exchanges'", 'null': 'True', 'to': "orm['valueaccounting.ProcessPattern']"}), + 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}), + 'start_date': ('django.db.models.fields.DateField', [], {}), + 'supplier': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'exchanges_as_supplier'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), + 'use_case': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'exchanges'", 'null': 'True', 'to': "orm['valueaccounting.UseCase']"}) + }, + 'valueaccounting.facet': { + 'Meta': {'ordering': "('name',)", 'object_name': 'Facet'}, + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}) + }, + 'valueaccounting.facetvalue': { + 'Meta': {'ordering': "('facet', 'value')", 'unique_together': "(('facet', 'value'),)", 'object_name': 'FacetValue'}, + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'facet': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'values'", 'to': "orm['valueaccounting.Facet']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'value': ('django.db.models.fields.CharField', [], {'max_length': '32'}) + }, + 'valueaccounting.feature': { + 'Meta': {'ordering': "('name',)", 'object_name': 'Feature'}, + 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'features_changed'", 'null': 'True', 'to': "orm['auth.User']"}), + 'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}), + 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'features_created'", 'null': 'True', 'to': "orm['auth.User']"}), + 'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}), + 'event_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'features'", 'to': "orm['valueaccounting.EventType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'process_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'features'", 'null': 'True', 'to': "orm['valueaccounting.ProcessType']"}), + 'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'features'", 'to': "orm['valueaccounting.EconomicResourceType']"}), + 'quantity': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '8', 'decimal_places': '2'}), + 'unit_of_quantity': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'feature_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}) + }, + 'valueaccounting.help': { + 'Meta': {'ordering': "('page',)", 'object_name': 'Help'}, + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'page': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '16'}) + }, + 'valueaccounting.homepagelayout': { + 'Meta': {'object_name': 'HomePageLayout'}, + 'banner': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'creations_panel_headline': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'footer': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'needs_panel_headline': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'panel_1': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'panel_2': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'panel_3': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'use_creations_panel': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'use_needs_panel': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'use_work_panel': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'work_panel_headline': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}) + }, + 'valueaccounting.location': { + 'Meta': {'object_name': 'Location'}, + 'address': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'latitude': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'null': 'True', 'blank': 'True'}), + 'longitude': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'null': 'True', 'blank': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}) + }, + 'valueaccounting.option': { + 'Meta': {'ordering': "('component',)", 'object_name': 'Option'}, + 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'options_changed'", 'null': 'True', 'to': "orm['auth.User']"}), + 'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}), + 'component': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['valueaccounting.EconomicResourceType']"}), + 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'options_created'", 'null': 'True', 'to': "orm['auth.User']"}), + 'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}), + 'feature': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['valueaccounting.Feature']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) + }, + 'valueaccounting.order': { + 'Meta': {'ordering': "('due_date',)", 'object_name': 'Order'}, + 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'orders_changed'", 'null': 'True', 'to': "orm['auth.User']"}), + 'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}), + 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'orders_created'", 'null': 'True', 'to': "orm['auth.User']"}), + 'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'due_date': ('django.db.models.fields.DateField', [], {}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'order_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}), + 'order_type': ('django.db.models.fields.CharField', [], {'default': "'customer'", 'max_length': '12'}), + 'provider': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sales_orders'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'receiver': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'purchase_orders'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}) + }, + 'valueaccounting.patternfacetvalue': { + 'Meta': {'ordering': "('pattern', 'event_type', 'facet_value')", 'unique_together': "(('pattern', 'facet_value', 'event_type'),)", 'object_name': 'PatternFacetValue'}, + 'event_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'patterns'", 'to': "orm['valueaccounting.EventType']"}), + 'facet_value': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'patterns'", 'to': "orm['valueaccounting.FacetValue']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'pattern': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'facets'", 'to': "orm['valueaccounting.ProcessPattern']"}) + }, + 'valueaccounting.patternusecase': { + 'Meta': {'object_name': 'PatternUseCase'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'pattern': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'use_cases'", 'to': "orm['valueaccounting.ProcessPattern']"}), + 'use_case': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'patterns'", 'null': 'True', 'to': "orm['valueaccounting.UseCase']"}) + }, + 'valueaccounting.process': { + 'Meta': {'ordering': "('-end_date',)", 'object_name': 'Process'}, + 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'processes_changed'", 'null': 'True', 'to': "orm['auth.User']"}), + 'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}), + 'context_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'processes'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'processes_created'", 'null': 'True', 'to': "orm['auth.User']"}), + 'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}), + 'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), + 'finished': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sub_processes'", 'null': 'True', 'to': "orm['valueaccounting.Process']"}), + 'process_pattern': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'processes'", 'null': 'True', 'to': "orm['valueaccounting.ProcessPattern']"}), + 'process_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'processes'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['valueaccounting.ProcessType']"}), + 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}), + 'start_date': ('django.db.models.fields.DateField', [], {}), + 'started': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}) + }, + 'valueaccounting.processpattern': { + 'Meta': {'ordering': "('name',)", 'object_name': 'ProcessPattern'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}) + }, + 'valueaccounting.processtype': { + 'Meta': {'ordering': "('name',)", 'object_name': 'ProcessType'}, + 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'process_types_changed'", 'null': 'True', 'to': "orm['auth.User']"}), + 'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}), + 'context_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'process_types'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'process_types_created'", 'null': 'True', 'to': "orm['auth.User']"}), + 'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'estimated_duration': ('django.db.models.fields.IntegerField', [], {'default': '0'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sub_process_types'", 'null': 'True', 'to': "orm['valueaccounting.ProcessType']"}), + 'process_pattern': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'process_types'", 'null': 'True', 'to': "orm['valueaccounting.ProcessPattern']"}), + 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}), + 'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}) + }, + 'valueaccounting.processtyperesourcetype': { + 'Meta': {'ordering': "('resource_type',)", 'object_name': 'ProcessTypeResourceType'}, + 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ptrts_changed'", 'null': 'True', 'to': "orm['auth.User']"}), + 'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}), + 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ptrts_created'", 'null': 'True', 'to': "orm['auth.User']"}), + 'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'event_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'process_resource_types'", 'to': "orm['valueaccounting.EventType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'process_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'resource_types'", 'to': "orm['valueaccounting.ProcessType']"}), + 'quantity': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '8', 'decimal_places': '2'}), + 'resource_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'process_types'", 'to': "orm['valueaccounting.EconomicResourceType']"}), + 'stage': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitmenttypes_at_stage'", 'null': 'True', 'to': "orm['valueaccounting.ProcessType']"}), + 'state': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitmenttypes_at_state'", 'null': 'True', 'to': "orm['valueaccounting.ResourceState']"}), + 'unit_of_quantity': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'process_resource_qty_units'", 'null': 'True', 'to': "orm['valueaccounting.Unit']"}) + }, + 'valueaccounting.reciprocity': { + 'Meta': {'ordering': "('reciprocity_date',)", 'object_name': 'Reciprocity'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'initiating_commitment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'initiated_commitments'", 'to': "orm['valueaccounting.Commitment']"}), + 'reciprocal_commitment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reciprocal_commitments'", 'to': "orm['valueaccounting.Commitment']"}), + 'reciprocity_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'}) + }, + 'valueaccounting.resourceclass': { + 'Meta': {'object_name': 'ResourceClass'}, + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}) + }, + 'valueaccounting.resourcestate': { + 'Meta': {'object_name': 'ResourceState'}, + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}) + }, + 'valueaccounting.resourcetypefacetvalue': { + 'Meta': {'ordering': "('resource_type', 'facet_value')", 'unique_together': "(('resource_type', 'facet_value'),)", 'object_name': 'ResourceTypeFacetValue'}, + 'facet_value': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'resource_types'", 'to': "orm['valueaccounting.FacetValue']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'resource_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'facets'", 'to': "orm['valueaccounting.EconomicResourceType']"}) + }, + 'valueaccounting.resourcetypelist': { + 'Meta': {'ordering': "('name',)", 'object_name': 'ResourceTypeList'}, + 'context_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'lists'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}) + }, + 'valueaccounting.resourcetypelistelement': { + 'Meta': {'ordering': "('resource_type_list', 'resource_type')", 'unique_together': "(('resource_type_list', 'resource_type'),)", 'object_name': 'ResourceTypeListElement'}, + 'default_quantity': ('django.db.models.fields.DecimalField', [], {'default': "'1.0'", 'max_digits': '8', 'decimal_places': '2'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'resource_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'lists'", 'to': "orm['valueaccounting.EconomicResourceType']"}), + 'resource_type_list': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'list_elements'", 'to': "orm['valueaccounting.ResourceTypeList']"}) + }, + 'valueaccounting.selectedoption': { + 'Meta': {'ordering': "('commitment', 'option')", 'object_name': 'SelectedOption'}, + 'commitment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['valueaccounting.Commitment']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'option': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'commitments'", 'to': "orm['valueaccounting.Option']"}) + }, + 'valueaccounting.unit': { + 'Meta': {'ordering': "('name',)", 'object_name': 'Unit'}, + 'abbrev': ('django.db.models.fields.CharField', [], {'max_length': '8'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}), + 'symbol': ('django.db.models.fields.CharField', [], {'max_length': '1', 'blank': 'True'}), + 'unit_type': ('django.db.models.fields.CharField', [], {'max_length': '12'}) + }, + 'valueaccounting.usecase': { + 'Meta': {'object_name': 'UseCase'}, + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'identifier': ('django.db.models.fields.CharField', [], {'max_length': '12'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'restrict_to_one_pattern': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) + }, + 'valueaccounting.usecaseeventtype': { + 'Meta': {'object_name': 'UseCaseEventType'}, + 'event_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'use_cases'", 'to': "orm['valueaccounting.EventType']"}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'use_case': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'event_types'", 'to': "orm['valueaccounting.UseCase']"}) + }, + 'valueaccounting.valueequation': { + 'Meta': {'object_name': 'ValueEquation'}, + 'context_agent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'value_equations'", 'to': "orm['valueaccounting.EconomicAgent']"}), + 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'value_equations_created'", 'null': 'True', 'to': "orm['auth.User']"}), + 'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'live': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), + 'percentage_behavior': ('django.db.models.fields.CharField', [], {'default': "'straight'", 'max_length': '12'}) + }, + 'valueaccounting.valueequationbucket': { + 'Meta': {'ordering': "('sequence',)", 'object_name': 'ValueEquationBucket'}, + 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'buckets_changed'", 'null': 'True', 'to': "orm['auth.User']"}), + 'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}), + 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'buckets_created'", 'null': 'True', 'to': "orm['auth.User']"}), + 'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}), + 'distribution_agent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'value_equation_buckets'", 'null': 'True', 'to': "orm['valueaccounting.EconomicAgent']"}), + 'filter_method': ('django.db.models.fields.CharField', [], {'default': "'dates'", 'max_length': '12', 'null': 'True', 'blank': 'True'}), + 'filter_rule': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}), + 'percentage': ('django.db.models.fields.IntegerField', [], {'null': 'True'}), + 'sequence': ('django.db.models.fields.IntegerField', [], {'default': '0'}), + 'value_equation': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buckets'", 'to': "orm['valueaccounting.ValueEquation']"}) + }, + 'valueaccounting.valueequationbucketrule': { + 'Meta': {'object_name': 'ValueEquationBucketRule'}, + 'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'rules_changed'", 'null': 'True', 'to': "orm['auth.User']"}), + 'changed_date': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}), + 'claim_creation_equation': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'claim_rule_type': ('django.db.models.fields.CharField', [], {'max_length': '12'}), + 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'rules_created'", 'null': 'True', 'to': "orm['auth.User']"}), + 'created_date': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}), + 'division_rule': ('django.db.models.fields.CharField', [], {'max_length': '12'}), + 'event_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'bucket_rules'", 'to': "orm['valueaccounting.EventType']"}), + 'filter_rule': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), + 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'value_equation_bucket': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'bucket_rules'", 'to': "orm['valueaccounting.ValueEquationBucket']"}) + } + } + + complete_apps = ['valueaccounting'] +# pylint: skip-file +# flake8: noqa +# noqa: E302,E301 + + +# pylint: disable=too-many-instance-attributes +class RouteConfig(object): + ''' Handle route options ''' + # pylint: disable=too-many-arguments + def __init__(self, + sname, + namespace, + kubeconfig, + destcacert=None, + cacert=None, + cert=None, + key=None, + host=None, + tls_termination=None, + service_name=None, + wildcard_policy=None, + weight=None, + port=None): + ''' constructor for handling route options ''' + self.kubeconfig = kubeconfig + self.name = sname + self.namespace = namespace + self.host = host + self.tls_termination = tls_termination + self.destcacert = destcacert + self.cacert = cacert + self.cert = cert + self.key = key + self.service_name = service_name + self.port = port + self.data = {} + self.wildcard_policy = wildcard_policy + if wildcard_policy is None: + self.wildcard_policy = 'None' + self.weight = weight + if weight is None: + self.weight = 100 + + self.create_dict() + + def create_dict(self): + ''' return a service as a dict ''' + self.data['apiVersion'] = 'v1' + self.data['kind'] = 'Route' + self.data['metadata'] = {} + self.data['metadata']['name'] = self.name + self.data['metadata']['namespace'] = self.namespace + self.data['spec'] = {} + + self.data['spec']['host'] = self.host + + if self.tls_termination: + self.data['spec']['tls'] = {} + + self.data['spec']['tls']['termination'] = self.tls_termination + + if self.tls_termination != 'passthrough': + self.data['spec']['tls']['key'] = self.key + self.data['spec']['tls']['caCertificate'] = self.cacert + self.data['spec']['tls']['certificate'] = self.cert + + if self.tls_termination == 'reencrypt': + self.data['spec']['tls']['destinationCACertificate'] = self.destcacert + + self.data['spec']['to'] = {'kind': 'Service', + 'name': self.service_name, + 'weight': self.weight} + + self.data['spec']['wildcardPolicy'] = self.wildcard_policy + + if self.port: + self.data['spec']['port'] = {} + self.data['spec']['port']['targetPort'] = self.port + +# pylint: disable=too-many-instance-attributes,too-many-public-methods +class Route(Yedit): + ''' Class to wrap the oc command line tools ''' + wildcard_policy = "spec.wildcardPolicy" + host_path = "spec.host" + port_path = "spec.port.targetPort" + service_path = "spec.to.name" + weight_path = "spec.to.weight" + cert_path = "spec.tls.certificate" + cacert_path = "spec.tls.caCertificate" + destcacert_path = "spec.tls.destinationCACertificate" + termination_path = "spec.tls.termination" + key_path = "spec.tls.key" + kind = 'route' + + def __init__(self, content): + '''Route constructor''' + super(Route, self).__init__(content=content) + + def get_destcacert(self): + ''' return cert ''' + return self.get(Route.destcacert_path) + + def get_cert(self): + ''' return cert ''' + return self.get(Route.cert_path) + + def get_key(self): + ''' return key ''' + return self.get(Route.key_path) + + def get_cacert(self): + ''' return cacert ''' + return self.get(Route.cacert_path) + + def get_service(self): + ''' return service name ''' + return self.get(Route.service_path) + + def get_weight(self): + ''' return service weight ''' + return self.get(Route.weight_path) + + def get_termination(self): + ''' return tls termination''' + return self.get(Route.termination_path) + + def get_host(self): + ''' return host ''' + return self.get(Route.host_path) + + def get_port(self): + ''' return port ''' + return self.get(Route.port_path) + + def get_wildcard_policy(self): + ''' return wildcardPolicy ''' + return self.get(Route.wildcard_policy) + +#!/usr/bin/env python +# -*- coding: utf-8 -*- +import argparse +import glob +import os +import sys + +from . import Command +from .server import main +from openerp.modules.module import get_module_root, MANIFEST +from openerp.service.db import _create_empty_database, DatabaseExists + + +class Start(Command): + """Quick start the Odoo server for your project""" + + def get_module_list(self, path): + mods = glob.glob(os.path.join(path, '*/%s' % MANIFEST)) + return [mod.split(os.path.sep)[-2] for mod in mods] + + + def run(self, cmdargs): + parser = argparse.ArgumentParser( + prog="%s start" % sys.argv[0].split(os.path.sep)[-1], + description=self.__doc__ + ) + parser.add_argument('--path', default=".", + help="Directory where your project's modules are stored (will autodetect from current dir)") + parser.add_argument("-d", "--database", dest="db_name", default=None, + help="Specify the database name (default to project's directory name") + + + args, unknown = parser.parse_known_args(args=cmdargs) + + project_path = os.path.abspath(os.path.expanduser(os.path.expandvars(args.path))) + module_root = get_module_root(project_path) + db_name = None + if module_root: + # started in a module so we choose this module name for database + db_name = project_path.split(os.path.sep)[-1] + # go to the parent's directory of the module root + project_path = os.path.abspath(os.path.join(project_path, os.pardir)) + + # check if one of the subfolders has at least one module + mods = self.get_module_list(project_path) + if mods and '--addons-path' not in cmdargs: + cmdargs.append('--addons-path=%s' % project_path) + + if not args.db_name: + args.db_name = db_name or project_path.split(os.path.sep)[-1] + cmdargs.extend(('-d', args.db_name)) + + # TODO: forbid some database names ? eg template1, ... + try: + _create_empty_database(args.db_name) + except DatabaseExists, e: + pass + except Exception, e: + die("Could not create database `%s`. (%s)" % (args.db_name, e)) + + if '--db-filter' not in cmdargs: + cmdargs.append('--db-filter=^%s$' % args.db_name) + + main(cmdargs) + +def die(message, code=1): + print >>sys.stderr, message + sys.exit(code) + +# usage: genmetaindex.py > index.xml +import sys, os +from xml.etree.ElementTree import ElementTree, Element + +root = Element("index") + +for file in sys.argv[1:]: + p = ElementTree() + p.parse(file) + + package = Element("package") + package.set("details", os.path.basename(file)) + + # we need all prerequisites + package.append(p.find("prerequisites")) + + info = None + # we need some of the info, but not all + for i in p.findall("info"): + if not info: + info = i + assert info + + for i in info[:]: + if i.tag not in ["name", "packagename", "packagetype", "shortdescription"]: + info.remove(i) + + for i in info[:]: + package.set(i.tag, i.text) + + root.append(package) + +def indent(elem, level=0): + i = "\n" + level*"\t" + if len(elem): + if not elem.text or not elem.text.strip(): + elem.text = i + "\t" + if not elem.tail or not elem.tail.strip(): + elem.tail = i + for elem in elem: + indent(elem, level+1) + if not elem.tail or not elem.tail.strip(): + elem.tail = i + else: + if level and (not elem.tail or not elem.tail.strip()): + elem.tail = i + +indent(root) + +ElementTree(root).write(sys.stdout) + +#! /usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1) +# +# (1) Kamaelia Contributors are listed in the AUTHORS file and at +# http://www.kamaelia.org/AUTHORS - please extend this file, +# not this notice. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from distutils.core import setup +setup(name="Kamaelia Jam Library", + version = "0.1a1", + platforms = ["any"], + packages = [ + "Axon", + "Kamaelia", + "Kamaelia.Apps", + "Kamaelia.Apps.Jam", + "Kamaelia.Apps.Jam.Audio", #STARTPACKAGES + "Kamaelia.Apps.Jam.Internet", + "Kamaelia.Apps.Jam.Protocol", + "Kamaelia.Apps.Jam.Support", + "Kamaelia.Apps.Jam.Support.Data", + "Kamaelia.Apps.Jam.UI", + "Kamaelia.Apps.Jam.Util", #LASTPACKAGES + ], + ) + +# (c) 2012, Michael DeHaan +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see . + +# Make coding more python3-ish +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from ansible.compat.six import iteritems +from jinja2.utils import missing +from ansible.utils.unicode import to_unicode + +__all__ = ['AnsibleJ2Vars'] + + +class AnsibleJ2Vars: + ''' + Helper class to template all variable content before jinja2 sees it. This is + done by hijacking the variable storage that jinja2 uses, and overriding __contains__ + and __getitem__ to look like a dict. Added bonus is avoiding duplicating the large + hashes that inject tends to be. + + To facilitate using builtin jinja2 things like range, globals are also handled here. + ''' + + def __init__(self, templar, globals, locals=None, *extras): + ''' + Initializes this object with a valid Templar() object, as + well as several dictionaries of variables representing + different scopes (in jinja2 terminology). + ''' + + self._templar = templar + self._globals = globals + self._extras = extras + self._locals = dict() + if isinstance(locals, dict): + for key, val in iteritems(locals): + if key[:2] == 'l_' and val is not missing: + self._locals[key[2:]] = val + + def __contains__(self, k): + if k in self._templar._available_variables: + return True + if k in self._locals: + return True + for i in self._extras: + if k in i: + return True + if k in self._globals: + return True + return False + + def __getitem__(self, varname): + if varname not in self._templar._available_variables: + if varname in self._locals: + return self._locals[varname] + for i in self._extras: + if varname in i: + return i[varname] + if varname in self._globals: + return self._globals[varname] + else: + raise KeyError("undefined variable: %s" % varname) + + variable = self._templar._available_variables[varname] + + # HostVars is special, return it as-is, as is the special variable + # 'vars', which contains the vars structure + from ansible.vars.hostvars import HostVars + if isinstance(variable, dict) and varname == "vars" or isinstance(variable, HostVars): + return variable + else: + value = None + try: + value = self._templar.template(variable) + except Exception as e: + raise type(e)(to_unicode(variable) + ': ' + e.message) + return value + + def add_locals(self, locals): + ''' + If locals are provided, create a copy of self containing those + locals in addition to what is already in this variable proxy. + ''' + if locals is None: + return self + return AnsibleJ2Vars(self._templar, self._globals, locals=locals, *self._extras) + + +# -*- coding: utf-8 - +# +# This file is part of gunicorn released under the MIT license. +# See the NOTICE for more information. + + +class ParseException(Exception): + pass + + +class NoMoreData(IOError): + def __init__(self, buf=None): + self.buf = buf + + def __str__(self): + return "No more data after: %r" % self.buf + + +class InvalidRequestLine(ParseException): + def __init__(self, req): + self.req = req + self.code = 400 + + def __str__(self): + return "Invalid HTTP request line: %r" % self.req + + +class InvalidRequestMethod(ParseException): + def __init__(self, method): + self.method = method + + def __str__(self): + return "Invalid HTTP method: %r" % self.method + + +class InvalidHTTPVersion(ParseException): + def __init__(self, version): + self.version = version + + def __str__(self): + return "Invalid HTTP Version: %r" % self.version + + +class InvalidHeader(ParseException): + def __init__(self, hdr, req=None): + self.hdr = hdr + self.req = req + + def __str__(self): + return "Invalid HTTP Header: %r" % self.hdr + + +class InvalidHeaderName(ParseException): + def __init__(self, hdr): + self.hdr = hdr + + def __str__(self): + return "Invalid HTTP header name: %r" % self.hdr + + +class InvalidChunkSize(IOError): + def __init__(self, data): + self.data = data + + def __str__(self): + return "Invalid chunk size: %r" % self.data + + +class ChunkMissingTerminator(IOError): + def __init__(self, term): + self.term = term + + def __str__(self): + return "Invalid chunk terminator is not '\\r\\n': %r" % self.term + + +class LimitRequestLine(ParseException): + def __init__(self, size, max_size): + self.size = size + self.max_size = max_size + + def __str__(self): + return "Request Line is too large (%s > %s)" % (self.size, self.max_size) + + +class LimitRequestHeaders(ParseException): + def __init__(self, msg): + self.msg = msg + + def __str__(self): + return self.msg + + +class InvalidProxyLine(ParseException): + def __init__(self, line): + self.line = line + self.code = 400 + + def __str__(self): + return "Invalid PROXY line: %r" % self.line + + +class ForbiddenProxyRequest(ParseException): + def __init__(self, host): + self.host = host + self.code = 403 + + def __str__(self): + return "Proxy request from %r not allowed" % self.host + +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +from __future__ import unicode_literals + +__license__ = 'Public Domain' + +import codecs +import io +import os +import random +import sys + + +from .options import ( + parseOpts, +) +from .compat import ( + compat_expanduser, + compat_getpass, + compat_print, + compat_shlex_split, + workaround_optparse_bug9161, +) +from .utils import ( + DateRange, + decodeOption, + DEFAULT_OUTTMPL, + DownloadError, + match_filter_func, + MaxDownloadsReached, + preferredencoding, + read_batch_urls, + SameFileError, + setproctitle, + std_headers, + write_string, +) +from .update import update_self +from .downloader import ( + FileDownloader, +) +from .extractor import gen_extractors, list_extractors +from .YoutubeDL import YoutubeDL + + +def _real_main(argv=None): + # Compatibility fixes for Windows + if sys.platform == 'win32': + # https://github.com/rg3/youtube-dl/issues/820 + codecs.register(lambda name: codecs.lookup('utf-8') if name == 'cp65001' else None) + + workaround_optparse_bug9161() + + setproctitle('youtube-dl') + + parser, opts, args = parseOpts(argv) + + # Set user agent + if opts.user_agent is not None: + std_headers['User-Agent'] = opts.user_agent + + # Set referer + if opts.referer is not None: + std_headers['Referer'] = opts.referer + + # Custom HTTP headers + if opts.headers is not None: + for h in opts.headers: + if h.find(':', 1) < 0: + parser.error('wrong header formatting, it should be key:value, not "%s"' % h) + key, value = h.split(':', 2) + if opts.verbose: + write_string('[debug] Adding header from command line option %s:%s\n' % (key, value)) + std_headers[key] = value + + # Dump user agent + if opts.dump_user_agent: + compat_print(std_headers['User-Agent']) + sys.exit(0) + + # Batch file verification + batch_urls = [] + if opts.batchfile is not None: + try: + if opts.batchfile == '-': + batchfd = sys.stdin + else: + batchfd = io.open(opts.batchfile, 'r', encoding='utf-8', errors='ignore') + batch_urls = read_batch_urls(batchfd) + if opts.verbose: + write_string('[debug] Batch file urls: ' + repr(batch_urls) + '\n') + except IOError: + sys.exit('ERROR: batch file could not be read') + all_urls = batch_urls + args + all_urls = [url.strip() for url in all_urls] + _enc = preferredencoding() + all_urls = [url.decode(_enc, 'ignore') if isinstance(url, bytes) else url for url in all_urls] + + if opts.list_extractors: + for ie in list_extractors(opts.age_limit): + compat_print(ie.IE_NAME + (' (CURRENTLY BROKEN)' if not ie._WORKING else '')) + matchedUrls = [url for url in all_urls if ie.suitable(url)] + for mu in matchedUrls: + compat_print(' ' + mu) + sys.exit(0) + if opts.list_extractor_descriptions: + for ie in list_extractors(opts.age_limit): + if not ie._WORKING: + continue + desc = getattr(ie, 'IE_DESC', ie.IE_NAME) + if desc is False: + continue + if hasattr(ie, 'SEARCH_KEY'): + _SEARCHES = ('cute kittens', 'slithering pythons', 'falling cat', 'angry poodle', 'purple fish', 'running tortoise', 'sleeping bunny', 'burping cow') + _COUNTS = ('', '5', '10', 'all') + desc += ' (Example: "%s%s:%s" )' % (ie.SEARCH_KEY, random.choice(_COUNTS), random.choice(_SEARCHES)) + compat_print(desc) + sys.exit(0) + + # Conflicting, missing and erroneous options + if opts.usenetrc and (opts.username is not None or opts.password is not None): + parser.error('using .netrc conflicts with giving username/password') + if opts.password is not None and opts.username is None: + parser.error('account username missing\n') + if opts.outtmpl is not None and (opts.usetitle or opts.autonumber or opts.useid): + parser.error('using output template conflicts with using title, video ID or auto number') + if opts.usetitle and opts.useid: + parser.error('using title conflicts with using video ID') + if opts.username is not None and opts.password is None: + opts.password = compat_getpass('Type account password and press [Return]: ') + if opts.ratelimit is not None: + numeric_limit = FileDownloader.parse_bytes(opts.ratelimit) + if numeric_limit is None: + parser.error('invalid rate limit specified') + opts.ratelimit = numeric_limit + if opts.min_filesize is not None: + numeric_limit = FileDownloader.parse_bytes(opts.min_filesize) + if numeric_limit is None: + parser.error('invalid min_filesize specified') + opts.min_filesize = numeric_limit + if opts.max_filesize is not None: + numeric_limit = FileDownloader.parse_bytes(opts.max_filesize) + if numeric_limit is None: + parser.error('invalid max_filesize specified') + opts.max_filesize = numeric_limit + if opts.retries is not None: + if opts.retries in ('inf', 'infinite'): + opts_retries = float('inf') + else: + try: + opts_retries = int(opts.retries) + except (TypeError, ValueError): + parser.error('invalid retry count specified') + if opts.buffersize is not None: + numeric_buffersize = FileDownloader.parse_bytes(opts.buffersize) + if numeric_buffersize is None: + parser.error('invalid buffer size specified') + opts.buffersize = numeric_buffersize + if opts.playliststart <= 0: + raise ValueError('Playlist start must be positive') + if opts.playlistend not in (-1, None) and opts.playlistend < opts.playliststart: + raise ValueError('Playlist end must be greater than playlist start') + if opts.extractaudio: + if opts.audioformat not in ['best', 'aac', 'mp3', 'm4a', 'opus', 'vorbis', 'wav']: + parser.error('invalid audio format specified') + if opts.audioquality: + opts.audioquality = opts.audioquality.strip('k').strip('K') + if not opts.audioquality.isdigit(): + parser.error('invalid audio quality specified') + if opts.recodevideo is not None: + if opts.recodevideo not in ['mp4', 'flv', 'webm', 'ogg', 'mkv', 'avi']: + parser.error('invalid video recode format specified') + if opts.convertsubtitles is not None: + if opts.convertsubtitles not in ['srt', 'vtt', 'ass']: + parser.error('invalid subtitle format specified') + + if opts.date is not None: + date = DateRange.day(opts.date) + else: + date = DateRange(opts.dateafter, opts.datebefore) + + # Do not download videos when there are audio-only formats + if opts.extractaudio and not opts.keepvideo and opts.format is None: + opts.format = 'bestaudio/best' + + # --all-sub automatically sets --write-sub if --write-auto-sub is not given + # this was the old behaviour if only --all-sub was given. + if opts.allsubtitles and not opts.writeautomaticsub: + opts.writesubtitles = True + + outtmpl = ((opts.outtmpl is not None and opts.outtmpl) or + (opts.format == '-1' and opts.usetitle and '%(title)s-%(id)s-%(format)s.%(ext)s') or + (opts.format == '-1' and '%(id)s-%(format)s.%(ext)s') or + (opts.usetitle and opts.autonumber and '%(autonumber)s-%(title)s-%(id)s.%(ext)s') or + (opts.usetitle and '%(title)s-%(id)s.%(ext)s') or + (opts.useid and '%(id)s.%(ext)s') or + (opts.autonumber and '%(autonumber)s-%(id)s.%(ext)s') or + DEFAULT_OUTTMPL) + if not os.path.splitext(outtmpl)[1] and opts.extractaudio: + parser.error('Cannot download a video and extract audio into the same' + ' file! Use "{0}.%(ext)s" instead of "{0}" as the output' + ' template'.format(outtmpl)) + + any_getting = opts.geturl or opts.gettitle or opts.getid or opts.getthumbnail or opts.getdescription or opts.getfilename or opts.getformat or opts.getduration or opts.dumpjson or opts.dump_single_json + any_printing = opts.print_json + download_archive_fn = compat_expanduser(opts.download_archive) if opts.download_archive is not None else opts.download_archive + + # PostProcessors + postprocessors = [] + # Add the metadata pp first, the other pps will copy it + if opts.metafromtitle: + postprocessors.append({ + 'key': 'MetadataFromTitle', + 'titleformat': opts.metafromtitle + }) + if opts.addmetadata: + postprocessors.append({'key': 'FFmpegMetadata'}) + if opts.extractaudio: + postprocessors.append({ + 'key': 'FFmpegExtractAudio', + 'preferredcodec': opts.audioformat, + 'preferredquality': opts.audioquality, + 'nopostoverwrites': opts.nopostoverwrites, + }) + if opts.recodevideo: + postprocessors.append({ + 'key': 'FFmpegVideoConvertor', + 'preferedformat': opts.recodevideo, + }) + if opts.convertsubtitles: + postprocessors.append({ + 'key': 'FFmpegSubtitlesConvertor', + 'format': opts.convertsubtitles, + }) + if opts.embedsubtitles: + postprocessors.append({ + 'key': 'FFmpegEmbedSubtitle', + }) + if opts.xattrs: + postprocessors.append({'key': 'XAttrMetadata'}) + if opts.embedthumbnail: + already_have_thumbnail = opts.writethumbnail or opts.write_all_thumbnails + postprocessors.append({ + 'key': 'EmbedThumbnail', + 'already_have_thumbnail': already_have_thumbnail + }) + if not already_have_thumbnail: + opts.writethumbnail = True + # Please keep ExecAfterDownload towards the bottom as it allows the user to modify the final file in any way. + # So if the user is able to remove the file before your postprocessor runs it might cause a few problems. + if opts.exec_cmd: + postprocessors.append({ + 'key': 'ExecAfterDownload', + 'exec_cmd': opts.exec_cmd, + }) + if opts.xattr_set_filesize: + try: + import xattr + xattr # Confuse flake8 + except ImportError: + parser.error('setting filesize xattr requested but python-xattr is not available') + external_downloader_args = None + if opts.external_downloader_args: + external_downloader_args = compat_shlex_split(opts.external_downloader_args) + postprocessor_args = None + if opts.postprocessor_args: + postprocessor_args = compat_shlex_split(opts.postprocessor_args) + match_filter = ( + None if opts.match_filter is None + else match_filter_func(opts.match_filter)) + + ydl_opts = { + 'usenetrc': opts.usenetrc, + 'username': opts.username, + 'password': opts.password, + 'twofactor': opts.twofactor, + 'videopassword': opts.videopassword, + 'quiet': (opts.quiet or any_getting or any_printing), + 'no_warnings': opts.no_warnings, + 'forceurl': opts.geturl, + 'forcetitle': opts.gettitle, + 'forceid': opts.getid, + 'forcethumbnail': opts.getthumbnail, + 'forcedescription': opts.getdescription, + 'forceduration': opts.getduration, + 'forcefilename': opts.getfilename, + 'forceformat': opts.getformat, + 'forcejson': opts.dumpjson or opts.print_json, + 'dump_single_json': opts.dump_single_json, + 'simulate': opts.simulate or any_getting, + 'skip_download': opts.skip_download, + 'format': opts.format, + 'listformats': opts.listformats, + 'outtmpl': outtmpl, + 'autonumber_size': opts.autonumber_size, + 'restrictfilenames': opts.restrictfilenames, + 'ignoreerrors': opts.ignoreerrors, + 'force_generic_extractor': opts.force_generic_extractor, + 'ratelimit': opts.ratelimit, + 'nooverwrites': opts.nooverwrites, + 'retries': opts_retries, + 'buffersize': opts.buffersize, + 'noresizebuffer': opts.noresizebuffer, + 'continuedl': opts.continue_dl, + 'noprogress': opts.noprogress, + 'progress_with_newline': opts.progress_with_newline, + 'playliststart': opts.playliststart, + 'playlistend': opts.playlistend, + 'playlistreverse': opts.playlist_reverse, + 'noplaylist': opts.noplaylist, + 'logtostderr': opts.outtmpl == '-', + 'consoletitle': opts.consoletitle, + 'nopart': opts.nopart, + 'updatetime': opts.updatetime, + 'writedescription': opts.writedescription, + 'writeannotations': opts.writeannotations, + 'writeinfojson': opts.writeinfojson, + 'writethumbnail': opts.writethumbnail, + 'write_all_thumbnails': opts.write_all_thumbnails, + 'writesubtitles': opts.writesubtitles, + 'writeautomaticsub': opts.writeautomaticsub, + 'allsubtitles': opts.allsubtitles, + 'listsubtitles': opts.listsubtitles, + 'subtitlesformat': opts.subtitlesformat, + 'subtitleslangs': opts.subtitleslangs, + 'matchtitle': decodeOption(opts.matchtitle), + 'rejecttitle': decodeOption(opts.rejecttitle), + 'max_downloads': opts.max_downloads, + 'prefer_free_formats': opts.prefer_free_formats, + 'verbose': opts.verbose, + 'dump_intermediate_pages': opts.dump_intermediate_pages, + 'write_pages': opts.write_pages, + 'test': opts.test, + 'keepvideo': opts.keepvideo, + 'min_filesize': opts.min_filesize, + 'max_filesize': opts.max_filesize, + 'min_views': opts.min_views, + 'max_views': opts.max_views, + 'daterange': date, + 'cachedir': opts.cachedir, + 'youtube_print_sig_code': opts.youtube_print_sig_code, + 'age_limit': opts.age_limit, + 'download_archive': download_archive_fn, + 'cookiefile': opts.cookiefile, + 'nocheckcertificate': opts.no_check_certificate, + 'prefer_insecure': opts.prefer_insecure, + 'proxy': opts.proxy, + 'socket_timeout': opts.socket_timeout, + 'bidi_workaround': opts.bidi_workaround, + 'debug_printtraffic': opts.debug_printtraffic, + 'prefer_ffmpeg': opts.prefer_ffmpeg, + 'include_ads': opts.include_ads, + 'default_search': opts.default_search, + 'youtube_include_dash_manifest': opts.youtube_include_dash_manifest, + 'encoding': opts.encoding, + 'extract_flat': opts.extract_flat, + 'merge_output_format': opts.merge_output_format, + 'postprocessors': postprocessors, + 'fixup': opts.fixup, + 'source_address': opts.source_address, + 'call_home': opts.call_home, + 'sleep_interval': opts.sleep_interval, + 'external_downloader': opts.external_downloader, + 'list_thumbnails': opts.list_thumbnails, + 'playlist_items': opts.playlist_items, + 'xattr_set_filesize': opts.xattr_set_filesize, + 'match_filter': match_filter, + 'no_color': opts.no_color, + 'ffmpeg_location': opts.ffmpeg_location, + 'hls_prefer_native': opts.hls_prefer_native, + 'external_downloader_args': external_downloader_args, + 'postprocessor_args': postprocessor_args, + 'cn_verification_proxy': opts.cn_verification_proxy, + } + + with YoutubeDL(ydl_opts) as ydl: + # Update version + if opts.update_self: + update_self(ydl.to_screen, opts.verbose) + + # Remove cache dir + if opts.rm_cachedir: + ydl.cache.remove() + + # Maybe do nothing + if (len(all_urls) < 1) and (opts.load_info_filename is None): + if opts.update_self or opts.rm_cachedir: + sys.exit() + + ydl.warn_if_short_id(sys.argv[1:] if argv is None else argv) + parser.error( + 'You must provide at least one URL.\n' + 'Type youtube-dl --help to see a list of all options.') + + try: + if opts.load_info_filename is not None: + retcode = ydl.download_with_info_file(opts.load_info_filename) + else: + retcode = ydl.download(all_urls) + except MaxDownloadsReached: + ydl.to_screen('--max-download limit reached, aborting.') + retcode = 101 + + sys.exit(retcode) + + +def main(argv=None): + try: + _real_main(argv) + except DownloadError: + sys.exit(1) + except SameFileError: + sys.exit('ERROR: fixed output name but more than one file to download') + except KeyboardInterrupt: + sys.exit('\nERROR: Interrupted by user') + +__all__ = ['main', 'YoutubeDL', 'gen_extractors', 'list_extractors'] + +""" A sparse matrix in COOrdinate or 'triplet' format""" +from __future__ import division, print_function, absolute_import + +__docformat__ = "restructuredtext en" + +__all__ = ['coo_matrix', 'isspmatrix_coo'] + +from warnings import warn + +import numpy as np + +from scipy._lib.six import xrange, zip as izip + +from ._sparsetools import coo_tocsr, coo_todense, coo_matvec +from .base import isspmatrix +from .data import _data_matrix, _minmax_mixin +from .sputils import (upcast, upcast_char, to_native, isshape, getdtype, + isintlike, get_index_dtype, downcast_intp_index) + + +class coo_matrix(_data_matrix, _minmax_mixin): + """ + A sparse matrix in COOrdinate format. + + Also known as the 'ijv' or 'triplet' format. + + This can be instantiated in several ways: + coo_matrix(D) + with a dense matrix D + + coo_matrix(S) + with another sparse matrix S (equivalent to S.tocoo()) + + coo_matrix((M, N), [dtype]) + to construct an empty matrix with shape (M, N) + dtype is optional, defaulting to dtype='d'. + + coo_matrix((data, (i, j)), [shape=(M, N)]) + to construct from three arrays: + 1. data[:] the entries of the matrix, in any order + 2. i[:] the row indices of the matrix entries + 3. j[:] the column indices of the matrix entries + + Where ``A[i[k], j[k]] = data[k]``. When shape is not + specified, it is inferred from the index arrays + + Attributes + ---------- + dtype : dtype + Data type of the matrix + shape : 2-tuple + Shape of the matrix + ndim : int + Number of dimensions (this is always 2) + nnz + Number of nonzero elements + data + COO format data array of the matrix + row + COO format row index array of the matrix + col + COO format column index array of the matrix + + Notes + ----- + + Sparse matrices can be used in arithmetic operations: they support + addition, subtraction, multiplication, division, and matrix power. + + Advantages of the COO format + - facilitates fast conversion among sparse formats + - permits duplicate entries (see example) + - very fast conversion to and from CSR/CSC formats + + Disadvantages of the COO format + - does not directly support: + + arithmetic operations + + slicing + + Intended Usage + - COO is a fast format for constructing sparse matrices + - Once a matrix has been constructed, convert to CSR or + CSC format for fast arithmetic and matrix vector operations + - By default when converting to CSR or CSC format, duplicate (i,j) + entries will be summed together. This facilitates efficient + construction of finite element matrices and the like. (see example) + + Examples + -------- + >>> from scipy.sparse import coo_matrix + >>> coo_matrix((3, 4), dtype=np.int8).toarray() + array([[0, 0, 0, 0], + [0, 0, 0, 0], + [0, 0, 0, 0]], dtype=int8) + + >>> row = np.array([0, 3, 1, 0]) + >>> col = np.array([0, 3, 1, 2]) + >>> data = np.array([4, 5, 7, 9]) + >>> coo_matrix((data, (row, col)), shape=(4, 4)).toarray() + array([[4, 0, 9, 0], + [0, 7, 0, 0], + [0, 0, 0, 0], + [0, 0, 0, 5]]) + + >>> # example with duplicates + >>> row = np.array([0, 0, 1, 3, 1, 0, 0]) + >>> col = np.array([0, 2, 1, 3, 1, 0, 0]) + >>> data = np.array([1, 1, 1, 1, 1, 1, 1]) + >>> coo_matrix((data, (row, col)), shape=(4, 4)).toarray() + array([[3, 0, 1, 0], + [0, 2, 0, 0], + [0, 0, 0, 0], + [0, 0, 0, 1]]) + + """ + def __init__(self, arg1, shape=None, dtype=None, copy=False): + _data_matrix.__init__(self) + + if isinstance(arg1, tuple): + if isshape(arg1): + M, N = arg1 + self.shape = (M,N) + idx_dtype = get_index_dtype(maxval=max(M, N)) + self.row = np.array([], dtype=idx_dtype) + self.col = np.array([], dtype=idx_dtype) + self.data = np.array([], getdtype(dtype, default=float)) + self.has_canonical_format = True + else: + try: + obj, ij = arg1 + except: + raise TypeError('invalid input format') + + try: + if len(ij) != 2: + raise TypeError + except TypeError: + raise TypeError('invalid input format') + + row, col = ij + if shape is None: + if len(row) == 0 or len(col) == 0: + raise ValueError('cannot infer dimensions from zero ' + 'sized index arrays') + M = np.max(row) + 1 + N = np.max(col) + 1 + self.shape = (M, N) + else: + # Use 2 steps to ensure shape has length 2. + M, N = shape + self.shape = (M, N) + + idx_dtype = get_index_dtype(maxval=max(self.shape)) + self.row = np.array(row, copy=copy, dtype=idx_dtype) + self.col = np.array(col, copy=copy, dtype=idx_dtype) + self.data = np.array(obj, copy=copy) + self.has_canonical_format = False + + elif arg1 is None: + # Initialize an empty matrix. + if not isinstance(shape, tuple) or not isintlike(shape[0]): + raise TypeError('dimensions not understood') + warn('coo_matrix(None, shape=(M,N)) is deprecated, ' + 'use coo_matrix( (M,N) ) instead', DeprecationWarning) + idx_dtype = get_index_dtype(maxval=max(M, N)) + self.shape = shape + self.data = np.array([], getdtype(dtype, default=float)) + self.row = np.array([], dtype=idx_dtype) + self.col = np.array([], dtype=idx_dtype) + self.has_canonical_format = True + else: + if isspmatrix(arg1): + if isspmatrix_coo(arg1) and copy: + self.row = arg1.row.copy() + self.col = arg1.col.copy() + self.data = arg1.data.copy() + self.shape = arg1.shape + else: + coo = arg1.tocoo() + self.row = coo.row + self.col = coo.col + self.data = coo.data + self.shape = coo.shape + self.has_canonical_format = False + else: + #dense argument + try: + M = np.atleast_2d(np.asarray(arg1)) + except: + raise TypeError('invalid input format') + + if M.ndim != 2: + raise TypeError('expected dimension <= 2 array or matrix') + else: + self.shape = M.shape + + self.row, self.col = M.nonzero() + self.data = M[self.row, self.col] + self.has_canonical_format = True + + if dtype is not None: + self.data = self.data.astype(dtype) + + self._check() + + def getnnz(self, axis=None): + """Get the count of explicitly-stored values (nonzeros) + + Parameters + ---------- + axis : None, 0, or 1 + Select between the number of values across the whole matrix, in + each column, or in each row. + """ + if axis is None: + nnz = len(self.data) + if nnz != len(self.row) or nnz != len(self.col): + raise ValueError('row, column, and data array must all be the ' + 'same length') + + if self.data.ndim != 1 or self.row.ndim != 1 or \ + self.col.ndim != 1: + raise ValueError('row, column, and data arrays must be 1-D') + + return int(nnz) + + if axis < 0: + axis += 2 + if axis == 0: + return np.bincount(downcast_intp_index(self.col), + minlength=self.shape[1]) + elif axis == 1: + return np.bincount(downcast_intp_index(self.row), + minlength=self.shape[0]) + else: + raise ValueError('axis out of bounds') + nnz = property(fget=getnnz) + + def _check(self): + """ Checks data structure for consistency """ + nnz = self.nnz + + # index arrays should have integer data types + if self.row.dtype.kind != 'i': + warn("row index array has non-integer dtype (%s) " + % self.row.dtype.name) + if self.col.dtype.kind != 'i': + warn("col index array has non-integer dtype (%s) " + % self.col.dtype.name) + + idx_dtype = get_index_dtype(maxval=max(self.shape)) + self.row = np.asarray(self.row, dtype=idx_dtype) + self.col = np.asarray(self.col, dtype=idx_dtype) + self.data = to_native(self.data) + + if nnz > 0: + if self.row.max() >= self.shape[0]: + raise ValueError('row index exceeds matrix dimensions') + if self.col.max() >= self.shape[1]: + raise ValueError('column index exceeds matrix dimensions') + if self.row.min() < 0: + raise ValueError('negative row index found') + if self.col.min() < 0: + raise ValueError('negative column index found') + + def transpose(self, copy=False): + M,N = self.shape + return coo_matrix((self.data, (self.col, self.row)), shape=(N,M), copy=copy) + + def toarray(self, order=None, out=None): + """See the docstring for `spmatrix.toarray`.""" + B = self._process_toarray_args(order, out) + fortran = int(B.flags.f_contiguous) + if not fortran and not B.flags.c_contiguous: + raise ValueError("Output array must be C or F contiguous") + M,N = self.shape + coo_todense(M, N, self.nnz, self.row, self.col, self.data, + B.ravel('A'), fortran) + return B + + def tocsc(self): + """Return a copy of this matrix in Compressed Sparse Column format + + Duplicate entries will be summed together. + + Examples + -------- + >>> from numpy import array + >>> from scipy.sparse import coo_matrix + >>> row = array([0, 0, 1, 3, 1, 0, 0]) + >>> col = array([0, 2, 1, 3, 1, 0, 0]) + >>> data = array([1, 1, 1, 1, 1, 1, 1]) + >>> A = coo_matrix((data, (row, col)), shape=(4, 4)).tocsc() + >>> A.toarray() + array([[3, 0, 1, 0], + [0, 2, 0, 0], + [0, 0, 0, 0], + [0, 0, 0, 1]]) + + """ + from .csc import csc_matrix + if self.nnz == 0: + return csc_matrix(self.shape, dtype=self.dtype) + else: + M,N = self.shape + idx_dtype = get_index_dtype((self.col, self.row), + maxval=max(self.nnz, M)) + indptr = np.empty(N + 1, dtype=idx_dtype) + indices = np.empty(self.nnz, dtype=idx_dtype) + data = np.empty(self.nnz, dtype=upcast(self.dtype)) + + coo_tocsr(N, M, self.nnz, + self.col.astype(idx_dtype), + self.row.astype(idx_dtype), + self.data, + indptr, indices, data) + + A = csc_matrix((data, indices, indptr), shape=self.shape) + A.sum_duplicates() + + return A + + def tocsr(self): + """Return a copy of this matrix in Compressed Sparse Row format + + Duplicate entries will be summed together. + + Examples + -------- + >>> from numpy import array + >>> from scipy.sparse import coo_matrix + >>> row = array([0, 0, 1, 3, 1, 0, 0]) + >>> col = array([0, 2, 1, 3, 1, 0, 0]) + >>> data = array([1, 1, 1, 1, 1, 1, 1]) + >>> A = coo_matrix((data, (row, col)), shape=(4, 4)).tocsr() + >>> A.toarray() + array([[3, 0, 1, 0], + [0, 2, 0, 0], + [0, 0, 0, 0], + [0, 0, 0, 1]]) + + """ + from .csr import csr_matrix + if self.nnz == 0: + return csr_matrix(self.shape, dtype=self.dtype) + else: + M,N = self.shape + idx_dtype = get_index_dtype((self.row, self.col), + maxval=max(self.nnz, N)) + indptr = np.empty(M + 1, dtype=idx_dtype) + indices = np.empty(self.nnz, dtype=idx_dtype) + data = np.empty(self.nnz, dtype=upcast(self.dtype)) + + coo_tocsr(M, N, self.nnz, + self.row.astype(idx_dtype), + self.col.astype(idx_dtype), + self.data, + indptr, + indices, + data) + + A = csr_matrix((data, indices, indptr), shape=self.shape) + A.sum_duplicates() + + return A + + def tocoo(self, copy=False): + if copy: + return self.copy() + else: + return self + + def todia(self): + from .dia import dia_matrix + + ks = self.col - self.row # the diagonal for each nonzero + diags = np.unique(ks) + + if len(diags) > 100: + #probably undesired, should we do something? + #should todia() have a maxdiags parameter? + pass + + #initialize and fill in data array + if self.data.size == 0: + data = np.zeros((0, 0), dtype=self.dtype) + else: + data = np.zeros((len(diags), self.col.max()+1), dtype=self.dtype) + data[np.searchsorted(diags,ks), self.col] = self.data + + return dia_matrix((data,diags), shape=self.shape) + + def todok(self): + from .dok import dok_matrix + + self.sum_duplicates() + dok = dok_matrix((self.shape), dtype=self.dtype) + dok.update(izip(izip(self.row,self.col),self.data)) + + return dok + + def diagonal(self): + # Could be rewritten without the python loop. + # Data entries at the same (row, col) are summed. + n = min(self.shape) + ndata = self.data.shape[0] + d = np.zeros(n, dtype=self.dtype) + for i in xrange(ndata): + r = self.row[i] + if r == self.col[i]: + d[r] += self.data[i] + return d + diagonal.__doc__ = _data_matrix.diagonal.__doc__ + + def _setdiag(self, values, k): + M, N = self.shape + if values.ndim and not len(values): + return + idx_dtype = self.row.dtype + + # Determine which triples to keep and where to put the new ones. + full_keep = self.col - self.row != k + if k < 0: + max_index = min(M+k, N) + if values.ndim: + max_index = min(max_index, len(values)) + keep = np.logical_or(full_keep, self.col >= max_index) + new_row = np.arange(-k, -k + max_index, dtype=idx_dtype) + new_col = np.arange(max_index, dtype=idx_dtype) + else: + max_index = min(M, N-k) + if values.ndim: + max_index = min(max_index, len(values)) + keep = np.logical_or(full_keep, self.row >= max_index) + new_row = np.arange(max_index, dtype=idx_dtype) + new_col = np.arange(k, k + max_index, dtype=idx_dtype) + + # Define the array of data consisting of the entries to be added. + if values.ndim: + new_data = values[:max_index] + else: + new_data = np.empty(max_index, dtype=self.dtype) + new_data[:] = values + + # Update the internal structure. + self.row = np.concatenate((self.row[keep], new_row)) + self.col = np.concatenate((self.col[keep], new_col)) + self.data = np.concatenate((self.data[keep], new_data)) + self.has_canonical_format = False + + # needed by _data_matrix + def _with_data(self,data,copy=True): + """Returns a matrix with the same sparsity structure as self, + but with different data. By default the index arrays + (i.e. .row and .col) are copied. + """ + if copy: + return coo_matrix((data, (self.row.copy(), self.col.copy())), + shape=self.shape, dtype=data.dtype) + else: + return coo_matrix((data, (self.row, self.col)), + shape=self.shape, dtype=data.dtype) + + def sum_duplicates(self): + """Eliminate duplicate matrix entries by adding them together + + This is an *in place* operation + """ + if self.has_canonical_format or len(self.data) == 0: + return + order = np.lexsort((self.row,self.col)) + self.row = self.row[order] + self.col = self.col[order] + self.data = self.data[order] + unique_mask = ((self.row[1:] != self.row[:-1]) | + (self.col[1:] != self.col[:-1])) + unique_mask = np.append(True, unique_mask) + self.row = self.row[unique_mask] + self.col = self.col[unique_mask] + unique_inds, = np.nonzero(unique_mask) + self.data = np.add.reduceat(self.data, unique_inds, dtype=self.dtype) + self.has_canonical_format = True + + ########################### + # Multiplication handlers # + ########################### + + def _mul_vector(self, other): + #output array + result = np.zeros(self.shape[0], dtype=upcast_char(self.dtype.char, + other.dtype.char)) + coo_matvec(self.nnz, self.row, self.col, self.data, other, result) + return result + + def _mul_multivector(self, other): + return np.hstack([self._mul_vector(col).reshape(-1,1) for col in other.T]) + + +def isspmatrix_coo(x): + return isinstance(x, coo_matrix) + +"""A generic class to build line-oriented command interpreters. + +Interpreters constructed with this class obey the following conventions: + +1. End of file on input is processed as the command 'EOF'. +2. A command is parsed out of each line by collecting the prefix composed + of characters in the identchars member. +3. A command `foo' is dispatched to a method 'do_foo()'; the do_ method + is passed a single argument consisting of the remainder of the line. +4. Typing an empty line repeats the last command. (Actually, it calls the + method `emptyline', which may be overridden in a subclass.) +5. There is a predefined `help' method. Given an argument `topic', it + calls the command `help_topic'. With no arguments, it lists all topics + with defined help_ functions, broken into up to three topics; documented + commands, miscellaneous help topics, and undocumented commands. +6. The command '?' is a synonym for `help'. The command '!' is a synonym + for `shell', if a do_shell method exists. +7. If completion is enabled, completing commands will be done automatically, + and completing of commands args is done by calling complete_foo() with + arguments text, line, begidx, endidx. text is string we are matching + against, all returned matches must begin with it. line is the current + input line (lstripped), begidx and endidx are the beginning and end + indexes of the text being matched, which could be used to provide + different completion depending upon which position the argument is in. + +The `default' method may be overridden to intercept commands for which there +is no do_ method. + +The `completedefault' method may be overridden to intercept completions for +commands that have no complete_ method. + +The data member `self.ruler' sets the character used to draw separator lines +in the help messages. If empty, no ruler line is drawn. It defaults to "=". + +If the value of `self.intro' is nonempty when the cmdloop method is called, +it is printed out on interpreter startup. This value may be overridden +via an optional argument to the cmdloop() method. + +The data members `self.doc_header', `self.misc_header', and +`self.undoc_header' set the headers used for the help function's +listings of documented functions, miscellaneous topics, and undocumented +functions respectively. + +These interpreters use raw_input; thus, if the readline module is loaded, +they automatically support Emacs-like command history and editing features. +""" + +import string + +__all__ = ["Cmd"] + +PROMPT = '(Cmd) ' +IDENTCHARS = string.ascii_letters + string.digits + '_' + +class Cmd: + """A simple framework for writing line-oriented command interpreters. + + These are often useful for test harnesses, administrative tools, and + prototypes that will later be wrapped in a more sophisticated interface. + + A Cmd instance or subclass instance is a line-oriented interpreter + framework. There is no good reason to instantiate Cmd itself; rather, + it's useful as a superclass of an interpreter class you define yourself + in order to inherit Cmd's methods and encapsulate action methods. + + """ + prompt = PROMPT + identchars = IDENTCHARS + ruler = '=' + lastcmd = '' + intro = None + doc_leader = "" + doc_header = "Documented commands (type help ):" + misc_header = "Miscellaneous help topics:" + undoc_header = "Undocumented commands:" + nohelp = "*** No help on %s" + use_rawinput = 1 + + def __init__(self, completekey='tab', stdin=None, stdout=None): + """Instantiate a line-oriented interpreter framework. + + The optional argument 'completekey' is the readline name of a + completion key; it defaults to the Tab key. If completekey is + not None and the readline module is available, command completion + is done automatically. The optional arguments stdin and stdout + specify alternate input and output file objects; if not specified, + sys.stdin and sys.stdout are used. + + """ + import sys + if stdin is not None: + self.stdin = stdin + else: + self.stdin = sys.stdin + if stdout is not None: + self.stdout = stdout + else: + self.stdout = sys.stdout + self.cmdqueue = [] + self.completekey = completekey + + def cmdloop(self, intro=None): + """Repeatedly issue a prompt, accept input, parse an initial prefix + off the received input, and dispatch to action methods, passing them + the remainder of the line as argument. + + """ + + self.preloop() + if self.use_rawinput and self.completekey: + try: + import readline + self.old_completer = readline.get_completer() + readline.set_completer(self.complete) + readline.parse_and_bind(self.completekey+": complete") + except ImportError: + pass + try: + if intro is not None: + self.intro = intro + if self.intro: + self.stdout.write(str(self.intro)+"\n") + stop = None + while not stop: + if self.cmdqueue: + line = self.cmdqueue.pop(0) + else: + if self.use_rawinput: + try: + line = raw_input(self.prompt) + except EOFError: + line = 'EOF' + else: + self.stdout.write(self.prompt) + self.stdout.flush() + line = self.stdin.readline() + if not len(line): + line = 'EOF' + else: + line = line.rstrip('\r\n') + line = self.precmd(line) + stop = self.onecmd(line) + stop = self.postcmd(stop, line) + self.postloop() + finally: + if self.use_rawinput and self.completekey: + try: + import readline + readline.set_completer(self.old_completer) + except ImportError: + pass + + + def precmd(self, line): + """Hook method executed just before the command line is + interpreted, but after the input prompt is generated and issued. + + """ + return line + + def postcmd(self, stop, line): + """Hook method executed just after a command dispatch is finished.""" + return stop + + def preloop(self): + """Hook method executed once when the cmdloop() method is called.""" + pass + + def postloop(self): + """Hook method executed once when the cmdloop() method is about to + return. + + """ + pass + + def parseline(self, line): + """Parse the line into a command name and a string containing + the arguments. Returns a tuple containing (command, args, line). + 'command' and 'args' may be None if the line couldn't be parsed. + """ + line = line.strip() + if not line: + return None, None, line + elif line[0] == '?': + line = 'help ' + line[1:] + elif line[0] == '!': + if hasattr(self, 'do_shell'): + line = 'shell ' + line[1:] + else: + return None, None, line + i, n = 0, len(line) + while i < n and line[i] in self.identchars: i = i+1 + cmd, arg = line[:i], line[i:].strip() + return cmd, arg, line + + def onecmd(self, line): + """Interpret the argument as though it had been typed in response + to the prompt. + + This may be overridden, but should not normally need to be; + see the precmd() and postcmd() methods for useful execution hooks. + The return value is a flag indicating whether interpretation of + commands by the interpreter should stop. + + """ + cmd, arg, line = self.parseline(line) + if not line: + return self.emptyline() + if cmd is None: + return self.default(line) + self.lastcmd = line + if line == 'EOF' : + self.lastcmd = '' + if cmd == '': + return self.default(line) + else: + try: + func = getattr(self, 'do_' + cmd) + except AttributeError: + return self.default(line) + return func(arg) + + def emptyline(self): + """Called when an empty line is entered in response to the prompt. + + If this method is not overridden, it repeats the last nonempty + command entered. + + """ + if self.lastcmd: + return self.onecmd(self.lastcmd) + + def default(self, line): + """Called on an input line when the command prefix is not recognized. + + If this method is not overridden, it prints an error message and + returns. + + """ + self.stdout.write('*** Unknown syntax: %s\n'%line) + + def completedefault(self, *ignored): + """Method called to complete an input line when no command-specific + complete_*() method is available. + + By default, it returns an empty list. + + """ + return [] + + def completenames(self, text, *ignored): + dotext = 'do_'+text + return [a[3:] for a in self.get_names() if a.startswith(dotext)] + + def complete(self, text, state): + """Return the next possible completion for 'text'. + + If a command has not been entered, then complete against command list. + Otherwise try to call complete_ to get list of completions. + """ + if state == 0: + import readline + origline = readline.get_line_buffer() + line = origline.lstrip() + stripped = len(origline) - len(line) + begidx = readline.get_begidx() - stripped + endidx = readline.get_endidx() - stripped + if begidx>0: + cmd, args, foo = self.parseline(line) + if cmd == '': + compfunc = self.completedefault + else: + try: + compfunc = getattr(self, 'complete_' + cmd) + except AttributeError: + compfunc = self.completedefault + else: + compfunc = self.completenames + self.completion_matches = compfunc(text, line, begidx, endidx) + try: + return self.completion_matches[state] + except IndexError: + return None + + def get_names(self): + # This method used to pull in base class attributes + # at a time dir() didn't do it yet. + return dir(self.__class__) + + def complete_help(self, *args): + commands = set(self.completenames(*args)) + topics = set(a[5:] for a in self.get_names() + if a.startswith('help_' + args[0])) + return list(commands | topics) + + def do_help(self, arg): + 'List available commands with "help" or detailed help with "help cmd".' + if arg: + # XXX check arg syntax + try: + func = getattr(self, 'help_' + arg) + except AttributeError: + try: + doc=getattr(self, 'do_' + arg).__doc__ + if doc: + self.stdout.write("%s\n"%str(doc)) + return + except AttributeError: + pass + self.stdout.write("%s\n"%str(self.nohelp % (arg,))) + return + func() + else: + names = self.get_names() + cmds_doc = [] + cmds_undoc = [] + help = {} + for name in names: + if name[:5] == 'help_': + help[name[5:]]=1 + names.sort() + # There can be duplicates if routines overridden + prevname = '' + for name in names: + if name[:3] == 'do_': + if name == prevname: + continue + prevname = name + cmd=name[3:] + if cmd in help: + cmds_doc.append(cmd) + del help[cmd] + elif getattr(self, name).__doc__: + cmds_doc.append(cmd) + else: + cmds_undoc.append(cmd) + self.stdout.write("%s\n"%str(self.doc_leader)) + self.print_topics(self.doc_header, cmds_doc, 15,80) + self.print_topics(self.misc_header, help.keys(),15,80) + self.print_topics(self.undoc_header, cmds_undoc, 15,80) + + def print_topics(self, header, cmds, cmdlen, maxcol): + if cmds: + self.stdout.write("%s\n"%str(header)) + if self.ruler: + self.stdout.write("%s\n"%str(self.ruler * len(header))) + self.columnize(cmds, maxcol-1) + self.stdout.write("\n") + + def columnize(self, list, displaywidth=80): + """Display a list of strings as a compact set of columns. + + Each column is only as wide as necessary. + Columns are separated by two spaces (one was not legible enough). + """ + if not list: + self.stdout.write("\n") + return + nonstrings = [i for i in range(len(list)) + if not isinstance(list[i], str)] + if nonstrings: + raise TypeError, ("list[i] not a string for i in %s" % + ", ".join(map(str, nonstrings))) + size = len(list) + if size == 1: + self.stdout.write('%s\n'%str(list[0])) + return + # Try every row count from 1 upwards + for nrows in range(1, len(list)): + ncols = (size+nrows-1) // nrows + colwidths = [] + totwidth = -2 + for col in range(ncols): + colwidth = 0 + for row in range(nrows): + i = row + nrows*col + if i >= size: + break + x = list[i] + colwidth = max(colwidth, len(x)) + colwidths.append(colwidth) + totwidth += colwidth + 2 + if totwidth > displaywidth: + break + if totwidth <= displaywidth: + break + else: + nrows = len(list) + ncols = 1 + colwidths = [0] + for row in range(nrows): + texts = [] + for col in range(ncols): + i = row + nrows*col + if i >= size: + x = "" + else: + x = list[i] + texts.append(x) + while texts and not texts[-1]: + del texts[-1] + for col in range(len(texts)): + texts[col] = texts[col].ljust(colwidths[col]) + self.stdout.write("%s\n"%str(" ".join(texts))) + +""" +The temp module provides a NamedTemporaryFile that can be re-opened on any +platform. Most platforms use the standard Python tempfile.TemporaryFile class, +but MS Windows users are given a custom class. + +This is needed because in Windows NT, the default implementation of +NamedTemporaryFile uses the O_TEMPORARY flag, and thus cannot be reopened [1]. + +1: http://mail.python.org/pipermail/python-list/2005-December/359474.html +""" + +import os +import tempfile +from django.core.files.utils import FileProxyMixin + +__all__ = ('NamedTemporaryFile', 'gettempdir',) + +if os.name == 'nt': + class TemporaryFile(FileProxyMixin): + """ + Temporary file object constructor that works in Windows and supports + reopening of the temporary file in windows. + """ + def __init__(self, mode='w+b', bufsize=-1, suffix='', prefix='', + dir=None): + fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix, + dir=dir) + self.name = name + self.file = os.fdopen(fd, mode, bufsize) + self.close_called = False + + # Because close can be called during shutdown + # we need to cache os.unlink and access it + # as self.unlink only + unlink = os.unlink + + def close(self): + if not self.close_called: + self.close_called = True + try: + self.file.close() + except (OSError, IOError): + pass + try: + self.unlink(self.name) + except (OSError): + pass + + def __del__(self): + self.close() + + NamedTemporaryFile = TemporaryFile +else: + NamedTemporaryFile = tempfile.NamedTemporaryFile + +gettempdir = tempfile.gettempdir + +#!/usr/bin/env python +import os +import sys + +import django + +from django.conf import settings + + +DEFAULT_SETTINGS = dict( + INSTALLED_APPS=[ + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sites", + "pinax.ratings", + "pinax.ratings.tests" + ], + MIDDLEWARE_CLASSES=[], + DATABASES={ + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": ":memory:", + } + }, + SITE_ID=1, + ROOT_URLCONF="pinax.ratings.tests.urls", + SECRET_KEY="notasecret", +) + + +def runtests(*test_args): + if not settings.configured: + settings.configure(**DEFAULT_SETTINGS) + + django.setup() + + parent = os.path.dirname(os.path.abspath(__file__)) + sys.path.insert(0, parent) + + try: + from django.test.runner import DiscoverRunner + runner_class = DiscoverRunner + test_args = ["pinax.ratings.tests"] + except ImportError: + from django.test.simple import DjangoTestSuiteRunner + runner_class = DjangoTestSuiteRunner + test_args = ["tests"] + + failures = runner_class(verbosity=1, interactive=True, failfast=False).run_tests(test_args) + sys.exit(failures) + + +if __name__ == "__main__": + runtests(*sys.argv[1:]) + +# Copyright (C) 2014 Nippon Telegraph and Telephone Corporation. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" + Defines data types and models required specifically for IPv4 support. +""" + +import logging + +from ryu.lib.packet.bgp import IPAddrPrefix +from ryu.lib.packet.bgp import RF_IPv4_UC + +from ryu.services.protocols.bgp.info_base.base import Path +from ryu.services.protocols.bgp.info_base.base import Table +from ryu.services.protocols.bgp.info_base.base import Destination +from ryu.services.protocols.bgp.info_base.base import NonVrfPathProcessingMixin +from ryu.services.protocols.bgp.info_base.base import PrefixFilter + +LOG = logging.getLogger('bgpspeaker.info_base.ipv4') + + +class IPv4Dest(Destination, NonVrfPathProcessingMixin): + """VPNv4 Destination + + Store IPv4 Paths. + """ + ROUTE_FAMILY = RF_IPv4_UC + + def _best_path_lost(self): + old_best_path = self._best_path + NonVrfPathProcessingMixin._best_path_lost(self) + self._core_service._signal_bus.best_path_changed(old_best_path, True) + + def _new_best_path(self, best_path): + NonVrfPathProcessingMixin._new_best_path(self, best_path) + self._core_service._signal_bus.best_path_changed(best_path, False) + + +class Ipv4Table(Table): + """Global table to store IPv4 routing information. + + Uses `IPv4Dest` to store destination information for each known vpnv4 + paths. + """ + ROUTE_FAMILY = RF_IPv4_UC + VPN_DEST_CLASS = IPv4Dest + + def __init__(self, core_service, signal_bus): + super(Ipv4Table, self).__init__(None, core_service, signal_bus) + + def _table_key(self, nlri): + """Return a key that will uniquely identify this NLRI inside + this table. + """ + return nlri.prefix + + def _create_dest(self, nlri): + return self.VPN_DEST_CLASS(self, nlri) + + def __str__(self): + return '%s(scope_id: %s, rf: %s)' % ( + self.__class__.__name__, self.scope_id, self.route_family + ) + + +class Ipv4Path(Path): + """Represents a way of reaching an VPNv4 destination.""" + ROUTE_FAMILY = RF_IPv4_UC + VRF_PATH_CLASS = None # defined in init - anti cyclic import hack + NLRI_CLASS = IPAddrPrefix + + def __init__(self, *args, **kwargs): + super(Ipv4Path, self).__init__(*args, **kwargs) + from ryu.services.protocols.bgp.info_base.vrf4 import Vrf4Path + self.VRF_PATH_CLASS = Vrf4Path + + +class Ipv4PrefixFilter(PrefixFilter): + """IPv4 Prefix Filter class""" + ROUTE_FAMILY = RF_IPv4_UC + +""" +This is Victor Stinner's pure-Python implementation of PEP 383: the "surrogateescape" error +handler of Python 3. + +Source: misc/python/surrogateescape.py in https://bitbucket.org/haypo/misc +""" + +# This code is released under the Python license and the BSD 2-clause license + +import codecs +import sys + +from future import utils + + +FS_ERRORS = 'surrogateescape' + +# # -- Python 2/3 compatibility ------------------------------------- +# FS_ERRORS = 'my_surrogateescape' + +def u(text): + if utils.PY3: + return text + else: + return text.decode('unicode_escape') + +def b(data): + if utils.PY3: + return data.encode('latin1') + else: + return data + +if utils.PY3: + _unichr = chr + bytes_chr = lambda code: bytes((code,)) +else: + _unichr = unichr + bytes_chr = chr + +def surrogateescape_handler(exc): + """ + Pure Python implementation of the PEP 383: the "surrogateescape" error + handler of Python 3. Undecodable bytes will be replaced by a Unicode + character U+DCxx on decoding, and these are translated into the + original bytes on encoding. + """ + mystring = exc.object[exc.start:exc.end] + + try: + if isinstance(exc, UnicodeDecodeError): + # mystring is a byte-string in this case + decoded = replace_surrogate_decode(mystring) + elif isinstance(exc, UnicodeEncodeError): + # In the case of u'\udcc3'.encode('ascii', + # 'this_surrogateescape_handler'), both Python 2.x and 3.x raise an + # exception anyway after this function is called, even though I think + # it's doing what it should. It seems that the strict encoder is called + # to encode the unicode string that this function returns ... + decoded = replace_surrogate_encode(mystring) + else: + raise exc + except NotASurrogateError: + raise exc + return (decoded, exc.end) + + +class NotASurrogateError(Exception): + pass + + +def replace_surrogate_encode(mystring): + """ + Returns a (unicode) string, not the more logical bytes, because the codecs + register_error functionality expects this. + """ + decoded = [] + for ch in mystring: + # if utils.PY3: + # code = ch + # else: + code = ord(ch) + + # The following magic comes from Py3.3's Python/codecs.c file: + if not 0xD800 <= code <= 0xDCFF: + # Not a surrogate. Fail with the original exception. + raise exc + # mybytes = [0xe0 | (code >> 12), + # 0x80 | ((code >> 6) & 0x3f), + # 0x80 | (code & 0x3f)] + # Is this a good idea? + if 0xDC00 <= code <= 0xDC7F: + decoded.append(_unichr(code - 0xDC00)) + elif code <= 0xDCFF: + decoded.append(_unichr(code - 0xDC00)) + else: + raise NotASurrogateError + return str().join(decoded) + + +def replace_surrogate_decode(mybytes): + """ + Returns a (unicode) string + """ + decoded = [] + for ch in mybytes: + # We may be parsing newbytes (in which case ch is an int) or a native + # str on Py2 + if isinstance(ch, int): + code = ch + else: + code = ord(ch) + if 0x80 <= code <= 0xFF: + decoded.append(_unichr(0xDC00 + code)) + elif code <= 0x7F: + decoded.append(_unichr(code)) + else: + # # It may be a bad byte + # # Try swallowing it. + # continue + # print("RAISE!") + raise NotASurrogateError + return str().join(decoded) + + +def encodefilename(fn): + if FS_ENCODING == 'ascii': + # ASCII encoder of Python 2 expects that the error handler returns a + # Unicode string encodable to ASCII, whereas our surrogateescape error + # handler has to return bytes in 0x80-0xFF range. + encoded = [] + for index, ch in enumerate(fn): + code = ord(ch) + if code < 128: + ch = bytes_chr(code) + elif 0xDC80 <= code <= 0xDCFF: + ch = bytes_chr(code - 0xDC00) + else: + raise UnicodeEncodeError(FS_ENCODING, + fn, index, index+1, + 'ordinal not in range(128)') + encoded.append(ch) + return bytes().join(encoded) + elif FS_ENCODING == 'utf-8': + # UTF-8 encoder of Python 2 encodes surrogates, so U+DC80-U+DCFF + # doesn't go through our error handler + encoded = [] + for index, ch in enumerate(fn): + code = ord(ch) + if 0xD800 <= code <= 0xDFFF: + if 0xDC80 <= code <= 0xDCFF: + ch = bytes_chr(code - 0xDC00) + encoded.append(ch) + else: + raise UnicodeEncodeError( + FS_ENCODING, + fn, index, index+1, 'surrogates not allowed') + else: + ch_utf8 = ch.encode('utf-8') + encoded.append(ch_utf8) + return bytes().join(encoded) + else: + return fn.encode(FS_ENCODING, FS_ERRORS) + +def decodefilename(fn): + return fn.decode(FS_ENCODING, FS_ERRORS) + +FS_ENCODING = 'ascii'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]') +# FS_ENCODING = 'cp932'; fn = b('[abc\x81\x00]'); encoded = u('[abc\udc81\x00]') +# FS_ENCODING = 'UTF-8'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]') + + +# normalize the filesystem encoding name. +# For example, we expect "utf-8", not "UTF8". +FS_ENCODING = codecs.lookup(FS_ENCODING).name + + +def register_surrogateescape(): + """ + Registers the surrogateescape error handler on Python 2 (only) + """ + if utils.PY3: + return + try: + codecs.lookup_error(FS_ERRORS) + except LookupError: + codecs.register_error(FS_ERRORS, surrogateescape_handler) + + +if __name__ == '__main__': + pass + # # Tests: + # register_surrogateescape() + + # b = decodefilename(fn) + # assert b == encoded, "%r != %r" % (b, encoded) + # c = encodefilename(b) + # assert c == fn, '%r != %r' % (c, fn) + # # print("ok") + + + +from _testcapi import _test_structmembersType, \ + CHAR_MAX, CHAR_MIN, UCHAR_MAX, \ + SHRT_MAX, SHRT_MIN, USHRT_MAX, \ + INT_MAX, INT_MIN, UINT_MAX, \ + LONG_MAX, LONG_MIN, ULONG_MAX, \ + LLONG_MAX, LLONG_MIN, ULLONG_MAX, \ + PY_SSIZE_T_MAX, PY_SSIZE_T_MIN + +import unittest +from test import support + +ts=_test_structmembersType(False, # T_BOOL + 1, # T_BYTE + 2, # T_UBYTE + 3, # T_SHORT + 4, # T_USHORT + 5, # T_INT + 6, # T_UINT + 7, # T_LONG + 8, # T_ULONG + 23, # T_PYSSIZET + 9.99999,# T_FLOAT + 10.1010101010, # T_DOUBLE + "hi" # T_STRING_INPLACE + ) + +class ReadWriteTests(unittest.TestCase): + + def test_bool(self): + ts.T_BOOL = True + self.assertEqual(ts.T_BOOL, True) + ts.T_BOOL = False + self.assertEqual(ts.T_BOOL, False) + self.assertRaises(TypeError, setattr, ts, 'T_BOOL', 1) + + def test_byte(self): + ts.T_BYTE = CHAR_MAX + self.assertEqual(ts.T_BYTE, CHAR_MAX) + ts.T_BYTE = CHAR_MIN + self.assertEqual(ts.T_BYTE, CHAR_MIN) + ts.T_UBYTE = UCHAR_MAX + self.assertEqual(ts.T_UBYTE, UCHAR_MAX) + + def test_short(self): + ts.T_SHORT = SHRT_MAX + self.assertEqual(ts.T_SHORT, SHRT_MAX) + ts.T_SHORT = SHRT_MIN + self.assertEqual(ts.T_SHORT, SHRT_MIN) + ts.T_USHORT = USHRT_MAX + self.assertEqual(ts.T_USHORT, USHRT_MAX) + + def test_int(self): + ts.T_INT = INT_MAX + self.assertEqual(ts.T_INT, INT_MAX) + ts.T_INT = INT_MIN + self.assertEqual(ts.T_INT, INT_MIN) + ts.T_UINT = UINT_MAX + self.assertEqual(ts.T_UINT, UINT_MAX) + + def test_long(self): + ts.T_LONG = LONG_MAX + self.assertEqual(ts.T_LONG, LONG_MAX) + ts.T_LONG = LONG_MIN + self.assertEqual(ts.T_LONG, LONG_MIN) + ts.T_ULONG = ULONG_MAX + self.assertEqual(ts.T_ULONG, ULONG_MAX) + + def test_py_ssize_t(self): + ts.T_PYSSIZET = PY_SSIZE_T_MAX + self.assertEqual(ts.T_PYSSIZET, PY_SSIZE_T_MAX) + ts.T_PYSSIZET = PY_SSIZE_T_MIN + self.assertEqual(ts.T_PYSSIZET, PY_SSIZE_T_MIN) + + @unittest.skipUnless(hasattr(ts, "T_LONGLONG"), "long long not present") + def test_longlong(self): + ts.T_LONGLONG = LLONG_MAX + self.assertEqual(ts.T_LONGLONG, LLONG_MAX) + ts.T_LONGLONG = LLONG_MIN + self.assertEqual(ts.T_LONGLONG, LLONG_MIN) + + ts.T_ULONGLONG = ULLONG_MAX + self.assertEqual(ts.T_ULONGLONG, ULLONG_MAX) + + ## make sure these will accept a plain int as well as a long + ts.T_LONGLONG = 3 + self.assertEqual(ts.T_LONGLONG, 3) + ts.T_ULONGLONG = 4 + self.assertEqual(ts.T_ULONGLONG, 4) + + def test_bad_assignments(self): + integer_attributes = [ + 'T_BOOL', + 'T_BYTE', 'T_UBYTE', + 'T_SHORT', 'T_USHORT', + 'T_INT', 'T_UINT', + 'T_LONG', 'T_ULONG', + 'T_PYSSIZET' + ] + if hasattr(ts, 'T_LONGLONG'): + integer_attributes.extend(['T_LONGLONG', 'T_ULONGLONG']) + + # issue8014: this produced 'bad argument to internal function' + # internal error + for nonint in None, 3.2j, "full of eels", {}, []: + for attr in integer_attributes: + self.assertRaises(TypeError, setattr, ts, attr, nonint) + + def test_inplace_string(self): + self.assertEqual(ts.T_STRING_INPLACE, "hi") + self.assertRaises(TypeError, setattr, ts, "T_STRING_INPLACE", "s") + self.assertRaises(TypeError, delattr, ts, "T_STRING_INPLACE") + + +class TestWarnings(unittest.TestCase): + + def test_byte_max(self): + with support.check_warnings(('', RuntimeWarning)): + ts.T_BYTE = CHAR_MAX+1 + + def test_byte_min(self): + with support.check_warnings(('', RuntimeWarning)): + ts.T_BYTE = CHAR_MIN-1 + + def test_ubyte_max(self): + with support.check_warnings(('', RuntimeWarning)): + ts.T_UBYTE = UCHAR_MAX+1 + + def test_short_max(self): + with support.check_warnings(('', RuntimeWarning)): + ts.T_SHORT = SHRT_MAX+1 + + def test_short_min(self): + with support.check_warnings(('', RuntimeWarning)): + ts.T_SHORT = SHRT_MIN-1 + + def test_ushort_max(self): + with support.check_warnings(('', RuntimeWarning)): + ts.T_USHORT = USHRT_MAX+1 + + +def test_main(verbose=None): + support.run_unittest(__name__) + +if __name__ == "__main__": + test_main(verbose=True) + + +""" +opcode module - potentially shared between dis and other modules which +operate on bytecodes (e.g. peephole optimizers). +""" + +__all__ = ["cmp_op", "hasconst", "hasname", "hasjrel", "hasjabs", + "haslocal", "hascompare", "hasfree", "opname", "opmap", + "HAVE_ARGUMENT", "EXTENDED_ARG", "hasnargs"] + +# It's a chicken-and-egg I'm afraid: +# We're imported before _opcode's made. +# With exception unheeded +# (stack_effect is not needed) +# Both our chickens and eggs are allayed. +# --Larry Hastings, 2013/11/23 + +try: + from _opcode import stack_effect + __all__.append('stack_effect') +except ImportError: + pass + +cmp_op = ('<', '<=', '==', '!=', '>', '>=', 'in', 'not in', 'is', + 'is not', 'exception match', 'BAD') + +hasconst = [] +hasname = [] +hasjrel = [] +hasjabs = [] +haslocal = [] +hascompare = [] +hasfree = [] +hasnargs = [] + +opmap = {} +opname = [''] * 256 +for op in range(256): opname[op] = '<%r>' % (op,) +del op + +def def_op(name, op): + opname[op] = name + opmap[name] = op + +def name_op(name, op): + def_op(name, op) + hasname.append(op) + +def jrel_op(name, op): + def_op(name, op) + hasjrel.append(op) + +def jabs_op(name, op): + def_op(name, op) + hasjabs.append(op) + +# Instruction opcodes for compiled code +# Blank lines correspond to available opcodes + +def_op('POP_TOP', 1) +def_op('ROT_TWO', 2) +def_op('ROT_THREE', 3) +def_op('DUP_TOP', 4) +def_op('DUP_TOP_TWO', 5) + +def_op('NOP', 9) +def_op('UNARY_POSITIVE', 10) +def_op('UNARY_NEGATIVE', 11) +def_op('UNARY_NOT', 12) + +def_op('UNARY_INVERT', 15) + +def_op('BINARY_POWER', 19) +def_op('BINARY_MULTIPLY', 20) + +def_op('BINARY_MODULO', 22) +def_op('BINARY_ADD', 23) +def_op('BINARY_SUBTRACT', 24) +def_op('BINARY_SUBSCR', 25) +def_op('BINARY_FLOOR_DIVIDE', 26) +def_op('BINARY_TRUE_DIVIDE', 27) +def_op('INPLACE_FLOOR_DIVIDE', 28) +def_op('INPLACE_TRUE_DIVIDE', 29) + +def_op('STORE_MAP', 54) +def_op('INPLACE_ADD', 55) +def_op('INPLACE_SUBTRACT', 56) +def_op('INPLACE_MULTIPLY', 57) + +def_op('INPLACE_MODULO', 59) +def_op('STORE_SUBSCR', 60) +def_op('DELETE_SUBSCR', 61) +def_op('BINARY_LSHIFT', 62) +def_op('BINARY_RSHIFT', 63) +def_op('BINARY_AND', 64) +def_op('BINARY_XOR', 65) +def_op('BINARY_OR', 66) +def_op('INPLACE_POWER', 67) +def_op('GET_ITER', 68) + +def_op('PRINT_EXPR', 70) +def_op('LOAD_BUILD_CLASS', 71) +def_op('YIELD_FROM', 72) + +def_op('INPLACE_LSHIFT', 75) +def_op('INPLACE_RSHIFT', 76) +def_op('INPLACE_AND', 77) +def_op('INPLACE_XOR', 78) +def_op('INPLACE_OR', 79) +def_op('BREAK_LOOP', 80) +def_op('WITH_CLEANUP', 81) + +def_op('RETURN_VALUE', 83) +def_op('IMPORT_STAR', 84) + +def_op('YIELD_VALUE', 86) +def_op('POP_BLOCK', 87) +def_op('END_FINALLY', 88) +def_op('POP_EXCEPT', 89) + +HAVE_ARGUMENT = 90 # Opcodes from here have an argument: + +name_op('STORE_NAME', 90) # Index in name list +name_op('DELETE_NAME', 91) # "" +def_op('UNPACK_SEQUENCE', 92) # Number of tuple items +jrel_op('FOR_ITER', 93) +def_op('UNPACK_EX', 94) +name_op('STORE_ATTR', 95) # Index in name list +name_op('DELETE_ATTR', 96) # "" +name_op('STORE_GLOBAL', 97) # "" +name_op('DELETE_GLOBAL', 98) # "" +def_op('LOAD_CONST', 100) # Index in const list +hasconst.append(100) +name_op('LOAD_NAME', 101) # Index in name list +def_op('BUILD_TUPLE', 102) # Number of tuple items +def_op('BUILD_LIST', 103) # Number of list items +def_op('BUILD_SET', 104) # Number of set items +def_op('BUILD_MAP', 105) # Number of dict entries (upto 255) +name_op('LOAD_ATTR', 106) # Index in name list +def_op('COMPARE_OP', 107) # Comparison operator +hascompare.append(107) +name_op('IMPORT_NAME', 108) # Index in name list +name_op('IMPORT_FROM', 109) # Index in name list + +jrel_op('JUMP_FORWARD', 110) # Number of bytes to skip +jabs_op('JUMP_IF_FALSE_OR_POP', 111) # Target byte offset from beginning of code +jabs_op('JUMP_IF_TRUE_OR_POP', 112) # "" +jabs_op('JUMP_ABSOLUTE', 113) # "" +jabs_op('POP_JUMP_IF_FALSE', 114) # "" +jabs_op('POP_JUMP_IF_TRUE', 115) # "" + +name_op('LOAD_GLOBAL', 116) # Index in name list + +jabs_op('CONTINUE_LOOP', 119) # Target address +jrel_op('SETUP_LOOP', 120) # Distance to target address +jrel_op('SETUP_EXCEPT', 121) # "" +jrel_op('SETUP_FINALLY', 122) # "" + +def_op('LOAD_FAST', 124) # Local variable number +haslocal.append(124) +def_op('STORE_FAST', 125) # Local variable number +haslocal.append(125) +def_op('DELETE_FAST', 126) # Local variable number +haslocal.append(126) + +def_op('RAISE_VARARGS', 130) # Number of raise arguments (1, 2, or 3) +def_op('CALL_FUNCTION', 131) # #args + (#kwargs << 8) +hasnargs.append(131) +def_op('MAKE_FUNCTION', 132) # Number of args with default values +def_op('BUILD_SLICE', 133) # Number of items +def_op('MAKE_CLOSURE', 134) +def_op('LOAD_CLOSURE', 135) +hasfree.append(135) +def_op('LOAD_DEREF', 136) +hasfree.append(136) +def_op('STORE_DEREF', 137) +hasfree.append(137) +def_op('DELETE_DEREF', 138) +hasfree.append(138) + +def_op('CALL_FUNCTION_VAR', 140) # #args + (#kwargs << 8) +hasnargs.append(140) +def_op('CALL_FUNCTION_KW', 141) # #args + (#kwargs << 8) +hasnargs.append(141) +def_op('CALL_FUNCTION_VAR_KW', 142) # #args + (#kwargs << 8) +hasnargs.append(142) + +jrel_op('SETUP_WITH', 143) + +def_op('LIST_APPEND', 145) +def_op('SET_ADD', 146) +def_op('MAP_ADD', 147) + +def_op('LOAD_CLASSDEREF', 148) +hasfree.append(148) + +def_op('EXTENDED_ARG', 144) +EXTENDED_ARG = 144 + +del def_op, name_op, jrel_op, jabs_op + +from __future__ import absolute_import +from typing import Any, Dict, Optional, Text + +# This file is adapted from samples/shellinabox/ssh-krb-wrapper in +# https://github.com/davidben/webathena, which has the following +# license: +# +# Copyright (c) 2013 David Benjamin and Alan Huang +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation files +# (the "Software"), to deal in the Software without restriction, +# including without limitation the rights to use, copy, modify, merge, +# publish, distribute, sublicense, and/or sell copies of the Software, +# and to permit persons to whom the Software is furnished to do so, +# subject to the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +import base64 +import struct +import six + +# Some DER encoding stuff. Bleh. This is because the ccache contains a +# DER-encoded krb5 Ticket structure, whereas Webathena deserializes +# into the various fields. Re-encoding in the client would be easy as +# there is already an ASN.1 implementation, but in the interest of +# limiting MIT Kerberos's exposure to malformed ccaches, encode it +# ourselves. To that end, here's the laziest DER encoder ever. +def der_encode_length(length): + # type: (int) -> str + if length <= 127: + return chr(length) + out = "" + while length > 0: + out = chr(length & 0xff) + out + length >>= 8 + out = chr(len(out) | 0x80) + out + return out + +def der_encode_tlv(tag, value): + # type: (int, str) -> str + return chr(tag) + der_encode_length(len(value)) + value + +def der_encode_integer_value(val): + # type: (int) -> str + if not isinstance(val, six.integer_types): + raise TypeError("int") + # base 256, MSB first, two's complement, minimum number of octets + # necessary. This has a number of annoying edge cases: + # * 0 and -1 are 0x00 and 0xFF, not the empty string. + # * 255 is 0x00 0xFF, not 0xFF + # * -256 is 0xFF 0x00, not 0x00 + + # Special-case to avoid an empty encoding. + if val == 0: + return "\x00" + sign = 0 # What you would get if you sign-extended the current high bit. + out = "" + # We can stop once sign-extension matches the remaining value. + while val != sign: + byte = val & 0xff + out = chr(byte) + out + sign = -1 if byte & 0x80 == 0x80 else 0 + val >>= 8 + return out + +def der_encode_integer(val): + # type: (int) -> str + return der_encode_tlv(0x02, der_encode_integer_value(val)) +def der_encode_int32(val): + # type: (int) -> str + if val < -2147483648 or val > 2147483647: + raise ValueError("Bad value") + return der_encode_integer(val) +def der_encode_uint32(val): + # type: (int) -> str + if val < 0 or val > 4294967295: + raise ValueError("Bad value") + return der_encode_integer(val) + +def der_encode_string(val): + # type: (Text) -> str + if not isinstance(val, Text): + raise TypeError("unicode") + return der_encode_tlv(0x1b, val.encode("utf-8")) + +def der_encode_octet_string(val): + # type: (str) -> str + if not isinstance(val, str): + raise TypeError("str") + return der_encode_tlv(0x04, val) + +def der_encode_sequence(tlvs, tagged=True): + # type: (List[str], Optional[bool]) -> str + body = [] + for i, tlv in enumerate(tlvs): + # Missing optional elements represented as None. + if not tlv: + continue + if tagged: + # Assume kerberos-style explicit tagging of components. + tlv = der_encode_tlv(0xa0 | i, tlv) + body.append(tlv) + return der_encode_tlv(0x30, "".join(body)) + +def der_encode_ticket(tkt): + # type: (Dict[str, Any]) -> str + return der_encode_tlv( + 0x61, # Ticket + der_encode_sequence( + [der_encode_integer(5), # tktVno + der_encode_string(tkt["realm"]), + der_encode_sequence( # PrincipalName + [der_encode_int32(tkt["sname"]["nameType"]), + der_encode_sequence([der_encode_string(c) + for c in tkt["sname"]["nameString"]], + tagged=False)]), + der_encode_sequence( # EncryptedData + [der_encode_int32(tkt["encPart"]["etype"]), + (der_encode_uint32(tkt["encPart"]["kvno"]) + if "kvno" in tkt["encPart"] + else None), + der_encode_octet_string( + base64.b64decode(tkt["encPart"]["cipher"]))])])) + +# Kerberos ccache writing code. Using format documentation from here: +# http://www.gnu.org/software/shishi/manual/html_node/The-Credential-Cache-Binary-File-Format.html + +def ccache_counted_octet_string(data): + # type: (str) -> bytes + if not isinstance(data, str): + raise TypeError("str") + return struct.pack("!I", len(data)) + data + +def ccache_principal(name, realm): + # type: (Dict[str, str], str) -> str + header = struct.pack("!II", name["nameType"], len(name["nameString"])) + return (header + ccache_counted_octet_string(realm.encode("utf-8")) + + "".join(ccache_counted_octet_string(c.encode("utf-8")) + for c in name["nameString"])) + +def ccache_key(key): + # type: (Dict[str, str]) -> bytes + return (struct.pack("!H", key["keytype"]) + + ccache_counted_octet_string(base64.b64decode(key["keyvalue"]))) + +def flags_to_uint32(flags): + # type: (List[str]) -> int + ret = 0 + for i, v in enumerate(flags): + if v: + ret |= 1 << (31 - i) + return ret + +def ccache_credential(cred): + # type: (Dict[str, Any]) -> str + out = ccache_principal(cred["cname"], cred["crealm"]) + out += ccache_principal(cred["sname"], cred["srealm"]) + out += ccache_key(cred["key"]) + out += struct.pack("!IIII", + cred["authtime"] // 1000, + cred.get("starttime", cred["authtime"]) // 1000, + cred["endtime"] // 1000, + cred.get("renewTill", 0) // 1000) + out += struct.pack("!B", 0) + out += struct.pack("!I", flags_to_uint32(cred["flags"])) + # TODO: Care about addrs or authdata? Former is "caddr" key. + out += struct.pack("!II", 0, 0) + out += ccache_counted_octet_string(der_encode_ticket(cred["ticket"])) + # No second_ticket. + out += ccache_counted_octet_string("") + return out + +def make_ccache(cred): + # type: (Dict[str, Any]) -> str + # Do we need a DeltaTime header? The ccache I get just puts zero + # in there, so do the same. + out = struct.pack("!HHHHII", + 0x0504, # file_format_version + 12, # headerlen + 1, # tag (DeltaTime) + 8, # taglen (two uint32_ts) + 0, 0, # time_offset / usec_offset + ) + out += ccache_principal(cred["cname"], cred["crealm"]) + out += ccache_credential(cred) + return out + +import numpy as np + +from datasets import Joints +from pairwise_relations import from_dataset + + +def generate_fake_locations(num, means, stddev=5): + """Generate a matrix with four rows (one for each "point") and three + columns (x-coord, y-coord and visibility). Means is a 3x2 matrix giving + mean locations for each point.""" + per_joint = [] + for joint_mean in means: + locations = np.random.multivariate_normal( + joint_mean, stddev * np.eye(2), num + ) + with_visibility = np.append(locations, np.ones((num, 1)), axis=1) + per_joint.append(with_visibility) + warped_array = np.array(per_joint) + # Now we need to swap the first and second dimensions + return warped_array.transpose((1, 0, 2)) + + +def test_clustering(): + """Test learning of clusters for joint types.""" + first_means = np.asarray([ + (10, 70), + (58, 94), + (66, 58), + (95, 62) + ]) + second_means = np.asarray([ + (88, 12), + (56, 15), + (25, 21), + (24, 89) + ]) + fake_locations = np.concatenate([ + generate_fake_locations(100, first_means), + generate_fake_locations(100, second_means), + ], axis=0) + np.random.shuffle(fake_locations) + fake_pairs = [ + (0, 1), + (1, 2), + (2, 3) + ] + fake_joints = Joints(fake_locations, fake_pairs) + # Make two clusters for each relationship type. Yes, passing in zeros as + # your scale is stupid, and poor testing practice. + centers = from_dataset(fake_joints, 2, np.zeros(len(fake_locations)), 1) + + assert centers.ndim == 3 + # Three joints, two clusters per joint, two coordinates (i.e. x, y) per + # cluster + assert centers.shape == (3, 2, 2) + + for idx, pair in enumerate(fake_pairs): + first_idx, second_idx = pair + first_mean = first_means[second_idx] - first_means[first_idx] + second_mean = second_means[second_idx] - second_means[first_idx] + found_means = centers[idx] + first_dists = np.linalg.norm(found_means - first_mean, axis=1) + second_dists = np.linalg.norm(found_means - second_mean, axis=1) + + # Make sure that each of our specified means are within Euclidean + # distance 1 of at least one found cluster + first_within = first_dists < 1 + assert first_within.any() + second_within = second_dists < 1 + assert second_within.any() + +# Copyright (c) 2012-2013 Benjamin Bruheim +# This file is covered by the LGPLv3 or later, read COPYING for details. +import datetime +import os +import uuid + +import pytz +from django.conf import settings +from django.contrib.auth.models import User +from django.core.exceptions import ObjectDoesNotExist +from django.core.urlresolvers import reverse +from django.db import models +from django.db.models.signals import post_save +from django.utils import timezone +from django.utils.timezone import utc +from django.utils.translation import ugettext as _ +from model_utils import Choices +from model_utils.models import TimeStampedModel + +""" +Models for the Frikanalen database. + +A lot of the models are business-specific for Frikanalen. There's also a +quite a few fields that are related to our legacy systems, but these are +likely to be removed when we're confident that data is properly +transferred. + +An empty database should populate at least FileFormat and Categories with +some content before it can be properly used. + +Fields that are commented out are suggestions for future fields. If they +turn out to be silly they should obviously be removed. +""" + + +class Organization(models.Model): + id = models.AutoField(primary_key=True) + name = models.CharField(max_length=255) + description = models.TextField(blank=True, max_length=255) + + members = models.ManyToManyField(User) # User ownership of an organization + fkmember = models.BooleanField(default=False) + orgnr = models.CharField(blank=True, max_length=255) + homepage = models.CharField('Link back to the organisation home page.', + blank=True, null=True, max_length=255) + + # No such concept yet. Every member can add members. + # owner = models.ForeignKey(User) + # Videos to feature on their frontpage, incl other members + # featured_videos = models.ManyToManyField("Video") + # twitter_username = models.CharField(null=True,max_length=255) + # twitter_tags = models.CharField(null=True,max_length=255) + # To be copied into every video they create + # homepage = models.CharField(blank=True, max_length=255) + # categories = models.ManyToManyField(Category) + + class Meta: + db_table = 'Organization' + ordering = ('name', '-id') + + def __str__(self): + return self.name + + def get_absolute_url(self): + return reverse('vod-org-video-list', kwargs={'orgid': self.id}) + + +class FileFormat(models.Model): + id = models.AutoField(primary_key=True) + description = models.TextField( + unique=True, max_length=255, null=True, blank=True) + fsname = models.CharField(max_length=20) + vod_publish = models.BooleanField('Present video format to video on demand?', + default=False) + mime_type = models.CharField(max_length=256, + null=True, blank=True) + + # httpprefix = models.CharField(max_length=200) + # metadata framerate, resolution, etc? + + class Meta: + db_table = 'ItemType' + verbose_name = 'video file format' + verbose_name_plural = 'video file formats' + ordering = ('fsname', '-id') + + def __str__(self): + return self.fsname + + +class VideoFile(models.Model): + id = models.AutoField(primary_key=True) + # uploader = models.ForeignKey(User) # Not migrated + video = models.ForeignKey("Video") + format = models.ForeignKey("FileFormat") + filename = models.CharField(max_length=256) + old_filename = models.CharField(max_length=256, default='', blank=True) + # source = video = models.ForeignKey("VideoFile") + integrated_lufs = models.FloatField( + 'Integrated LUFS of file defined in ITU R.128', + blank=True, null=True) + truepeak_lufs = models.FloatField( + 'True peak LUFS of file defined in ITU R.128', + blank=True, null=True) + created_time = models.DateTimeField( + auto_now_add=True, null=True, + help_text='Time the video file was created') + # metadata frames, width, height, framerate? mlt profile name? + # edl for in/out? + + class Meta: + verbose_name = 'video file' + verbose_name_plural = 'video files' + ordering = ('-video_id', '-id',) + + def __str__(self): + return "%s version of %s" % (self.format.fsname, self.video.name) + + def location(self, relative=False): + filename = os.path.basename(self.filename) + + path = '/'.join((str(self.video.id), self.format.fsname, filename)) + + if relative: + return path + else: + return '/'.join((settings.FK_MEDIA_ROOT, path)) + + +class Category(models.Model): + id = models.IntegerField(primary_key=True) + name = models.CharField(max_length=255) + desc = models.CharField(max_length=255, blank=True) + + class Meta: + db_table = 'Category' + verbose_name = 'video category' + verbose_name_plural = 'video categories' + ordering = ('name', '-id') + + def __str__(self): + return self.name + + +class VideoManager(models.Manager): + def public(self): + return (super(VideoManager, self) + .get_queryset() + .filter(publish_on_web=True, proper_import=True)) + + +class Video(models.Model): + id = models.AutoField(primary_key=True) + # Retire, use description instead + header = models.TextField(blank=True, null=True, max_length=2048) + name = models.CharField(max_length=255) + description = models.CharField(blank=True, null=True, max_length=2048) + # Code for editors' internal use + # production_code = models.CharField(null=True,max_length=255) + categories = models.ManyToManyField(Category) + editor = models.ForeignKey(User) + has_tono_records = models.BooleanField(default=False) + is_filler = models.BooleanField('Play automatically?', + help_text = 'You still have the editorial responsibility. Only affect videos from members.', + default=False) # Find a better name? + publish_on_web = models.BooleanField(default=True) + + # disabled = models.BooleanField() # Not migrated + # uploader = models.ForeignKey(User) + # Planned duration in milliseconds, probably not going to be used + # planned_duration = models.IntegerField() + # Time when it is to be published on web + # published_time = models.DateTimeField() + + proper_import = models.BooleanField(default=False) + played_count_web = models.IntegerField( + default=0, help_text='Number of times it has been played') + created_time = models.DateTimeField( + auto_now_add=True, null=True, + help_text='Time the program record was created') + updated_time = models.DateTimeField( + auto_now=True, null=True, + help_text='Time the program record has been updated') + uploaded_time = models.DateTimeField( + blank=True, null=True, + help_text='Time the original video for the program was uploaded') + framerate = models.IntegerField( + default=25000, + help_text='Framerate of master video in thousands / second') + organization = models.ForeignKey( + Organization, null=True, help_text='Organization for video') + ref_url = models.CharField( + blank=True, max_length=1024, help_text='URL for reference') + duration = models.DurationField(blank=True, default=datetime.timedelta(0)) + upload_token = models.CharField( + blank=True, default='', max_length=32, + help_text='Code for upload') + + objects = VideoManager() + + class Meta: + db_table = 'Video' + get_latest_by = 'uploaded_time' + ordering = ('-id',) + + def __str__(self): + return self.name + + def save(self, *args, **kwargs): + if not self.id: + self.upload_token = uuid.uuid4().hex + return super(Video, self).save(*args, **kwargs) + + def is_public(self): + return self.publish_on_web and self.proper_import + + def tags(self): + tags = [] + if self.has_tono_records: + tags.append("tono") + if self.publish_on_web: + tags.append("www") + if self.is_filler: + tags.append("filler") + return ', '.join(tags) + + def videofiles(self): + videofiles = VideoFile.objects.filter(video=self) + return videofiles + + def category_list(self): + categories = self.categories.filter(video=self) + return categories + + def schedule(self): + events = Scheduleitem.objects.filter(video=self) + return events + + def first_broadcast(self): + events = Scheduleitem.objects.filter(video=self) + if events: + return events[0] + return None + + def last_broadcast(self): + events = Scheduleitem.objects.filter(video=self) + if events: + return events[max(0, len(events)-1)] + return None + + def videofile_url(self, fsname): + videofile = self.videofile_set.get(format__fsname=fsname) + return videofile.location(relative=True) + + def small_thumbnail_url(self): + format = FileFormat.objects.get(fsname="small_thumb") + try: + videofile = VideoFile.objects.get(video=self, format=format) + except ObjectDoesNotExist: + return "/static/default_small_thumbnail.png" + return settings.FK_MEDIA_URLPREFIX+videofile.location(relative=True) + + def medium_thumbnail_url(self): + format = FileFormat.objects.get(fsname="medium_thumb") + try: + videofile = VideoFile.objects.get(video=self, format=format) + except ObjectDoesNotExist: + return "/static/default_medium_thumbnail.png" + return settings.FK_MEDIA_URLPREFIX+videofile.location(relative=True) + + def large_thumbnail_url(self): + format = FileFormat.objects.get(fsname="large_thumb") + try: + videofile = VideoFile.objects.get(video=self, format=format) + except ObjectDoesNotExist: + return "/static/default_large_thumbnail.png" + return settings.FK_MEDIA_URLPREFIX+videofile.location(relative=True) + + def ogv_url(self): + try: + return settings.FK_MEDIA_URLPREFIX + self.videofile_url("theora") + except ObjectDoesNotExist: + return + + def vod_files(self): + """Return a list of video files fit for the video on demand + presentation, with associated MIME type. + + [ + { + 'url: 'https://../.../file.ogv', + 'mime_type': 'video/ogg', + }, + ] + + """ + + vodfiles = [] + for videofile in self.videofiles().filter(format__vod_publish=True): + url = settings.FK_MEDIA_URLPREFIX + videofile.location(relative=True) + vodfiles.append({'url': url, 'mime_type': videofile.format.mime_type}) + return vodfiles + + def get_absolute_url(self): + return reverse('vod-video-detail', kwargs={'video_id': self.id}) + + +class ScheduleitemManager(models.Manager): + def by_day(self, date=None, days=1, surrounding=False): + if not date: + date = timezone.now().date() + elif hasattr(date, 'date'): + date = date.date() + # Take current date, but make an object at 00:00. + # Then make that an aware datetime so our comparisons + # are correct. + day_start = datetime.datetime.combine(date, datetime.time(0)) + startdt = timezone.make_aware(day_start, timezone.get_current_timezone()) + enddt = startdt + datetime.timedelta(days=days) + if surrounding: + startdt, enddt = self.expand_to_surrounding(startdt, enddt) + return self.get_queryset().filter(starttime__gte=startdt, + starttime__lte=enddt) + + def expand_to_surrounding(self, startdt, enddt): + # Try to find the event before the given date + try: + startdt = (Scheduleitem.objects + .filter(starttime__lte=startdt) + .order_by("-starttime")[0].starttime) + except IndexError: + pass + # Try to find the event after the end date + try: + enddt = (Scheduleitem.objects + .filter(starttime__gte=enddt) + .order_by("starttime")[0].starttime) + except IndexError: + pass + return startdt, enddt + + +class Scheduleitem(models.Model): + SCHEDULE_REASONS = ( + (1, 'Legacy'), + (2, 'Administrative'), + (3, 'User'), + (4, 'Automatic'), + ) + + id = models.AutoField(primary_key=True) + default_name = models.CharField(max_length=255, blank=True) + video = models.ForeignKey(Video, null=True, blank=True) + schedulereason = models.IntegerField(blank=True, choices=SCHEDULE_REASONS) + starttime = models.DateTimeField() + duration = models.DurationField() + + objects = ScheduleitemManager() + + """ + def save(self, *args, **kwargs): + self.endtime = self.starttime + timeutils.duration + super(Scheduleitem, self).save(*args, **kwargs) + """ + + class Meta: + db_table = 'ScheduleItem' + verbose_name = 'TX schedule entry' + verbose_name_plural = 'TX schedule entries' + ordering = ('-id',) + + def __str__(self): + t = self.starttime + s = t.strftime("%Y-%m-%d %H:%M:%S") + # format microsecond to hundreths + s += ".%02i" % (t.microsecond / 10000) + if self.video: + return str(s) + ": " + str(self.video) + else: + return str(s) + ": " + self.default_name + + def endtime(self): + if not self.duration: + return self.starttime + return self.starttime + self.duration + + +class UserProfile(models.Model): + user = models.OneToOneField(User) + phone = models.CharField( + blank=True, max_length=255, default='', null=True) + mailing_address = models.CharField( + blank=True, max_length=512, default='', null=True) + post_code = models.CharField( + blank=True, max_length=255, default='', null=True) + city = models.CharField( + blank=True, max_length=255, default='', null=True) + country = models.CharField( + blank=True, max_length=255, default='', null=True) + legacy_username = models.CharField( + blank=True, max_length=255, default='') + + def __str__(self): + return "%s (profile)" % self.user + + +def create_user_profile(sender, instance, created, **kwargs): + if created: + profile, created = UserProfile.objects.get_or_create(user=instance) + +# Create a hook so the profile model is created when a User is. +post_save.connect(create_user_profile, sender=User) + + +class SchedulePurpose(models.Model): + """ + A block of video files having a similar purpose. + + Either an organization and its videos (takes preference) or manually + connected videos. + """ + STRATEGY = Choices('latest', 'random', 'least_scheduled') + TYPE = Choices('videos', 'organization') + + name = models.CharField(max_length=100) + type = models.CharField(max_length=32, choices=TYPE) + strategy = models.CharField(max_length=32, choices=STRATEGY) + + # You probably need one of these depending on type and strategy + organization = models.ForeignKey(Organization, blank=True, null=True) + direct_videos = models.ManyToManyField(Video, blank=True) + + class Meta: + ordering = ('-id',) + + def videos_str(self): + return ", ".join([str(x) for x in self.videos_queryset()]) + videos_str.short_description = "videos" + videos_str.admin_order_field = "videos" + + def videos_queryset(self, max_duration=None): + """ + Get the queryset for the available videos + """ + if self.type == self.TYPE.organization: + qs = self.organization.video_set.all() + elif self.type == self.TYPE.videos: + qs = self.direct_videos.all() + else: + raise Exception("Unhandled type %s" % self.type) + if max_duration: + qs = qs.filter(duration__lte=max_duration) + # Workaround playout not handling broken files correctly + qs = qs.filter(proper_import=True) + return qs + + def single_video(self, max_duration=None): + """ + Get a single video based on the settings of this purpose + """ + qs = self.videos_queryset(max_duration) + if self.strategy == self.STRATEGY.latest: + try: + return qs.latest() + except Video.DoesNotExist: + return None + elif self.strategy == self.STRATEGY.random: + # This might be slow, but hopefully few records + return qs.order_by('?').first() + elif self.strategy == self.STRATEGY.least_scheduled: + # Get the video which has been scheduled the least + return (qs.annotate(num_sched=models.Count('scheduleitem')) + .order_by('num_sched').first()) + else: + raise Exception("Unhandled strategy %s" % self.strategy) + + def __str__(self): + return self.name + + +class WeeklySlot(models.Model): + DAY_OF_THE_WEEK = ( + (0, _('Monday')), + (1, _('Tuesday')), + (2, _('Wednesday')), + (3, _('Thursday')), + (4, _('Friday')), + (5, _('Saturday')), + (6, _('Sunday')), + ) + + purpose = models.ForeignKey(SchedulePurpose, null=True, blank=True) + day = models.IntegerField( + choices=DAY_OF_THE_WEEK, + ) + start_time = models.TimeField() + duration = models.DurationField() + + class Meta: + ordering = ('day', 'start_time', 'pk') + + @property + def end_time(self): + if not self.duration: + return self.start_time + return self.start_time + self.duration + + def next_date(self, from_date=None): + if not from_date: + from_date = datetime.date.today() + days_ahead = self.day - from_date.weekday() + if days_ahead <= 0: + # target date already happened this week + days_ahead += 7 + return from_date + datetime.timedelta(days_ahead) + + def next_datetime(self, from_date=None): + next_date = self.next_date(from_date) + naive_dt = datetime.datetime.combine(next_date, self.start_time) + tz = pytz.timezone(settings.TIME_ZONE) + return tz.localize(naive_dt) + + def __str__(self): + return ("{day} {s.start_time} ({s.purpose})" + "".format(day=self.get_day_display(), s=self)) + + +class AsRun(TimeStampedModel): + """ + AsRun model is a historic log over what was sent through playout. + + `video` - Points to the Video which was played if there is one. + Can be empty if something other than a video was played. + The field is mutually exclusive with `program_name`. + + `program_name` - A free form text input saying what was played. + If `video` is set, this field should not be set. + Examples of where you'd use this field is e.g. + when broadcasting live. + Defaults to the empty string. + + `playout` - The playout this entry corresponds with. This will + almost always be 'main' which it defaults to. + + `played_at` - Time when the playout started. Defaults to now. + + `in_ms` - The inpoint where the video/stream was started at. + In milliseconds. Normally 0 which it defaults to. + + `out_ms` - The outpoint where the video/stream stopped. + This would often be the duration of the video, or + how long we live streamed a particular URL. + Can be null (None) if this is 'currently happening'. + """ + video = models.ForeignKey(Video, blank=True, null=True) + program_name = models.CharField(max_length=160, blank=True, default='') + playout = models.CharField(max_length=255, blank=True, default='main') + played_at = models.DateTimeField(blank=True, default=timezone.now) + + in_ms = models.IntegerField(blank=True, default=0) + out_ms = models.IntegerField(blank=True, null=True) + + def __str__(self): + if self.video: + return '{s.playout} video: {s.video}'.format(s=self) + return '{s.playout}: {s.program_name}'.format(s=self) + + class Meta: + ordering = ('-played_at', '-id',) + +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: POGOProtos/Data/PokedexEntry.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from POGOProtos.Enums import PokemonId_pb2 as POGOProtos_dot_Enums_dot_PokemonId__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='POGOProtos/Data/PokedexEntry.proto', + package='POGOProtos.Data', + syntax='proto3', + serialized_pb=_b('\n\"POGOProtos/Data/PokedexEntry.proto\x12\x0fPOGOProtos.Data\x1a POGOProtos/Enums/PokemonId.proto\"\xac\x01\n\x0cPokedexEntry\x12/\n\npokemon_id\x18\x01 \x01(\x0e\x32\x1b.POGOProtos.Enums.PokemonId\x12\x19\n\x11times_encountered\x18\x02 \x01(\x05\x12\x16\n\x0etimes_captured\x18\x03 \x01(\x05\x12\x1e\n\x16\x65volution_stone_pieces\x18\x04 \x01(\x05\x12\x18\n\x10\x65volution_stones\x18\x05 \x01(\x05\x62\x06proto3') + , + dependencies=[POGOProtos_dot_Enums_dot_PokemonId__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_POKEDEXENTRY = _descriptor.Descriptor( + name='PokedexEntry', + full_name='POGOProtos.Data.PokedexEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='pokemon_id', full_name='POGOProtos.Data.PokedexEntry.pokemon_id', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='times_encountered', full_name='POGOProtos.Data.PokedexEntry.times_encountered', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='times_captured', full_name='POGOProtos.Data.PokedexEntry.times_captured', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='evolution_stone_pieces', full_name='POGOProtos.Data.PokedexEntry.evolution_stone_pieces', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='evolution_stones', full_name='POGOProtos.Data.PokedexEntry.evolution_stones', index=4, + number=5, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=90, + serialized_end=262, +) + +_POKEDEXENTRY.fields_by_name['pokemon_id'].enum_type = POGOProtos_dot_Enums_dot_PokemonId__pb2._POKEMONID +DESCRIPTOR.message_types_by_name['PokedexEntry'] = _POKEDEXENTRY + +PokedexEntry = _reflection.GeneratedProtocolMessageType('PokedexEntry', (_message.Message,), dict( + DESCRIPTOR = _POKEDEXENTRY, + __module__ = 'POGOProtos.Data.PokedexEntry_pb2' + # @@protoc_insertion_point(class_scope:POGOProtos.Data.PokedexEntry) + )) +_sym_db.RegisterMessage(PokedexEntry) + + +# @@protoc_insertion_point(module_scope) + +# -*- coding: utf-8 -*- + +from .structures import LookupDict + +_codes = { + + # Informational. + 100: ('continue',), + 101: ('switching_protocols',), + 102: ('processing',), + 103: ('checkpoint',), + 122: ('uri_too_long', 'request_uri_too_long'), + 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'), + 201: ('created',), + 202: ('accepted',), + 203: ('non_authoritative_info', 'non_authoritative_information'), + 204: ('no_content',), + 205: ('reset_content', 'reset'), + 206: ('partial_content', 'partial'), + 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), + 208: ('already_reported',), + 226: ('im_used',), + + # Redirection. + 300: ('multiple_choices',), + 301: ('moved_permanently', 'moved', '\\o-'), + 302: ('found',), + 303: ('see_other', 'other'), + 304: ('not_modified',), + 305: ('use_proxy',), + 306: ('switch_proxy',), + 307: ('temporary_redirect', 'temporary_moved', 'temporary'), + 308: ('permanent_redirect', + 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 + + # Client Error. + 400: ('bad_request', 'bad'), + 401: ('unauthorized',), + 402: ('payment_required', 'payment'), + 403: ('forbidden',), + 404: ('not_found', '-o-'), + 405: ('method_not_allowed', 'not_allowed'), + 406: ('not_acceptable',), + 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), + 408: ('request_timeout', 'timeout'), + 409: ('conflict',), + 410: ('gone',), + 411: ('length_required',), + 412: ('precondition_failed', 'precondition'), + 413: ('request_entity_too_large',), + 414: ('request_uri_too_large',), + 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), + 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), + 417: ('expectation_failed',), + 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), + 421: ('misdirected_request',), + 422: ('unprocessable_entity', 'unprocessable'), + 423: ('locked',), + 424: ('failed_dependency', 'dependency'), + 425: ('unordered_collection', 'unordered'), + 426: ('upgrade_required', 'upgrade'), + 428: ('precondition_required', 'precondition'), + 429: ('too_many_requests', 'too_many'), + 431: ('header_fields_too_large', 'fields_too_large'), + 444: ('no_response', 'none'), + 449: ('retry_with', 'retry'), + 450: ('blocked_by_windows_parental_controls', 'parental_controls'), + 451: ('unavailable_for_legal_reasons', 'legal_reasons'), + 499: ('client_closed_request',), + + # Server Error. + 500: ('internal_server_error', 'server_error', '/o\\', '✗'), + 501: ('not_implemented',), + 502: ('bad_gateway',), + 503: ('service_unavailable', 'unavailable'), + 504: ('gateway_timeout',), + 505: ('http_version_not_supported', 'http_version'), + 506: ('variant_also_negotiates',), + 507: ('insufficient_storage',), + 509: ('bandwidth_limit_exceeded', 'bandwidth'), + 510: ('not_extended',), + 511: ('network_authentication_required', 'network_auth', 'network_authentication'), +} + +codes = LookupDict(name='status_codes') + +for code, titles in _codes.items(): + for title in titles: + setattr(codes, title, code) + if not title.startswith('\\'): + setattr(codes, title.upper(), code) + +############################################################################## +# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/spack/spack +# Please also see the NOTICE and LICENSE files for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class RGgpubr(RPackage): + """ggpubr: 'ggplot2' Based Publication Ready Plots""" + + homepage = "http://www.sthda.com/english/rpkgs/ggpubr" + url = "https://cran.r-project.org/src/contrib/ggpubr_0.1.2.tar.gz" + list_url = "https://cran.r-project.org/src/contrib/Archive/ggpubr" + + version('0.1.2', '42a5749ae44121597ef511a7424429d1') + + depends_on('r@3.1.0:') + depends_on('r-ggplot2', type=('build', 'run')) + depends_on('r-ggrepel', type=('build', 'run')) + depends_on('r-ggsci', type=('build', 'run')) + depends_on('r-plyr', type=('build', 'run')) + +# -*- encoding: utf-8 -*- +############################################################################## +# +# OpenERP, Open Source Management Solution +# Copyright (C) 2004-2010 Tiny SPRL (). All Rights Reserved +# $Id$ +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +############################################################################## + +import project_issue +import report +import res_config + +# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: + +from django.forms import URLField, ValidationError +from django.test import SimpleTestCase + +from . import FormFieldAssertionsMixin + + +class URLFieldTest(FormFieldAssertionsMixin, SimpleTestCase): + + def test_urlfield_1(self): + f = URLField() + self.assertWidgetRendersTo(f, '') + with self.assertRaisesMessage(ValidationError, "'This field is required.'"): + f.clean('') + with self.assertRaisesMessage(ValidationError, "'This field is required.'"): + f.clean(None) + self.assertEqual('http://localhost', f.clean('http://localhost')) + self.assertEqual('http://example.com', f.clean('http://example.com')) + self.assertEqual('http://example.com.', f.clean('http://example.com.')) + self.assertEqual('http://www.example.com', f.clean('http://www.example.com')) + self.assertEqual('http://www.example.com:8000/test', f.clean('http://www.example.com:8000/test')) + self.assertEqual('http://valid-with-hyphens.com', f.clean('valid-with-hyphens.com')) + self.assertEqual('http://subdomain.domain.com', f.clean('subdomain.domain.com')) + self.assertEqual('http://200.8.9.10', f.clean('http://200.8.9.10')) + self.assertEqual('http://200.8.9.10:8000/test', f.clean('http://200.8.9.10:8000/test')) + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('foo') + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('http://') + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('http://example') + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('http://example.') + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('com.') + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('.') + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('http://.com') + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('http://invalid-.com') + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('http://-invalid.com') + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('http://inv-.alid-.com') + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('http://inv-.-alid.com') + self.assertEqual('http://valid-----hyphens.com', f.clean('http://valid-----hyphens.com')) + self.assertEqual( + 'http://some.idn.xyz\xe4\xf6\xfc\xdfabc.domain.com:123/blah', + f.clean('http://some.idn.xyzäöüßabc.domain.com:123/blah') + ) + self.assertEqual( + 'http://www.example.com/s/http://code.djangoproject.com/ticket/13804', + f.clean('www.example.com/s/http://code.djangoproject.com/ticket/13804') + ) + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('[a') + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('http://[a') + + def test_url_regex_ticket11198(self): + f = URLField() + # hangs "forever" if catastrophic backtracking in ticket:#11198 not fixed + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('http://%s' % ("X" * 200,)) + + # a second test, to make sure the problem is really addressed, even on + # domains that don't fail the domain label length check in the regex + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('http://%s' % ("X" * 60,)) + + def test_urlfield_2(self): + f = URLField(required=False) + self.assertEqual('', f.clean('')) + self.assertEqual('', f.clean(None)) + self.assertEqual('http://example.com', f.clean('http://example.com')) + self.assertEqual('http://www.example.com', f.clean('http://www.example.com')) + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('foo') + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('http://') + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('http://example') + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('http://example.') + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean('http://.com') + + def test_urlfield_5(self): + f = URLField(min_length=15, max_length=20) + self.assertWidgetRendersTo(f, '') + with self.assertRaisesMessage(ValidationError, "'Ensure this value has at least 15 characters (it has 12).'"): + f.clean('http://f.com') + self.assertEqual('http://example.com', f.clean('http://example.com')) + with self.assertRaisesMessage(ValidationError, "'Ensure this value has at most 20 characters (it has 37).'"): + f.clean('http://abcdefghijklmnopqrstuvwxyz.com') + + def test_urlfield_6(self): + f = URLField(required=False) + self.assertEqual('http://example.com', f.clean('example.com')) + self.assertEqual('', f.clean('')) + self.assertEqual('https://example.com', f.clean('https://example.com')) + + def test_urlfield_7(self): + f = URLField() + self.assertEqual('http://example.com', f.clean('http://example.com')) + self.assertEqual('http://example.com/test', f.clean('http://example.com/test')) + self.assertEqual( + 'http://example.com?some_param=some_value', + f.clean('http://example.com?some_param=some_value') + ) + + def test_urlfield_9(self): + f = URLField() + urls = ( + 'http://עברית.idn.icann.org/', + 'http://sãopaulo.com/', + 'http://sãopaulo.com.br/', + 'http://пример.испытание/', + 'http://مثال.إختبار/', + 'http://例子.测试/', + 'http://例子.測試/', + 'http://उदाहरण.परीक्षा/', + 'http://例え.テスト/', + 'http://مثال.آزمایشی/', + 'http://실례.테스트/', + 'http://العربية.idn.icann.org/', + ) + for url in urls: + with self.subTest(url=url): + # Valid IDN + self.assertEqual(url, f.clean(url)) + + def test_urlfield_10(self): + """URLField correctly validates IPv6 (#18779).""" + f = URLField() + urls = ( + 'http://[12:34::3a53]/', + 'http://[a34:9238::]:8080/', + ) + for url in urls: + with self.subTest(url=url): + self.assertEqual(url, f.clean(url)) + + def test_urlfield_not_string(self): + f = URLField(required=False) + with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"): + f.clean(23) + + def test_urlfield_normalization(self): + f = URLField() + self.assertEqual(f.clean('http://example.com/ '), 'http://example.com/') + + def test_urlfield_strip_on_none_value(self): + f = URLField(required=False, empty_value=None) + self.assertIsNone(f.clean(None)) + + def test_urlfield_unable_to_set_strip_kwarg(self): + msg = "__init__() got multiple values for keyword argument 'strip'" + with self.assertRaisesMessage(TypeError, msg): + URLField(strip=False) + +# -*- coding: utf-8 -*- + +from south.db import db +from south.v2 import SchemaMigration + +from django.conf import settings + + +class Migration(SchemaMigration): + + def forwards(self, orm): + db.rename_column('m_t_intervention', 'commentaire', 'descriptif') + + def backwards(self, orm): + db.rename_column('m_t_intervention', 'descriptif', 'commentaire') + + models = { + u'authent.structure': { + 'Meta': {'ordering': "['name']", 'object_name': 'Structure'}, + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}) + }, + u'common.organism': { + 'Meta': {'ordering': "['organism']", 'object_name': 'Organism', 'db_table': "'m_b_organisme'"}, + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'organism': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'organisme'"}), + 'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"}) + }, + u'core.comfort': { + 'Meta': {'ordering': "['comfort']", 'object_name': 'Comfort', 'db_table': "'l_b_confort'"}, + 'comfort': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_column': "'confort'"}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"}) + }, + u'core.datasource': { + 'Meta': {'ordering': "['source']", 'object_name': 'Datasource', 'db_table': "'l_b_source'"}, + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'source': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"}) + }, + u'core.network': { + 'Meta': {'ordering': "['network']", 'object_name': 'Network', 'db_table': "'l_b_reseau'"}, + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'network': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_column': "'reseau'"}), + 'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"}) + }, + u'core.path': { + 'Meta': {'object_name': 'Path', 'db_table': "'l_t_troncon'"}, + 'arrival': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'null': 'True', 'db_column': "'arrivee'", 'blank': 'True'}), + 'ascent': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_column': "'denivelee_positive'", 'blank': 'True'}), + 'comfort': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'paths'", 'null': 'True', 'db_column': "'confort'", 'to': u"orm['core.Comfort']"}), + 'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'db_column': "'remarques'", 'blank': 'True'}), + 'datasource': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'paths'", 'null': 'True', 'db_column': "'source'", 'to': u"orm['core.Datasource']"}), + 'date_insert': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_column': "'date_insert'", 'blank': 'True'}), + 'date_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_column': "'date_update'", 'blank': 'True'}), + 'departure': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '250', 'null': 'True', 'db_column': "'depart'", 'blank': 'True'}), + 'descent': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_column': "'denivelee_negative'", 'blank': 'True'}), + 'geom': ('django.contrib.gis.db.models.fields.LineStringField', [], {'srid': '%s' % settings.SRID, 'spatial_index': 'False'}), + 'geom_3d': ('django.contrib.gis.db.models.fields.GeometryField', [], {'default': 'None', 'dim': '3', 'spatial_index': 'False', 'null': 'True', 'srid': '%s' % settings.SRID}), + 'geom_cadastre': ('django.contrib.gis.db.models.fields.LineStringField', [], {'srid': '%s' % settings.SRID, 'null': 'True', 'spatial_index': 'False'}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'length': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'null': 'True', 'db_column': "'longueur'", 'blank': 'True'}), + 'max_elevation': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_column': "'altitude_maximum'", 'blank': 'True'}), + 'min_elevation': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_column': "'altitude_minimum'", 'blank': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'db_column': "'nom'", 'blank': 'True'}), + 'networks': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'paths'", 'to': u"orm['core.Network']", 'db_table': "'l_r_troncon_reseau'", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}), + 'slope': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'null': 'True', 'db_column': "'pente'", 'blank': 'True'}), + 'stake': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'paths'", 'null': 'True', 'db_column': "'enjeu'", 'to': u"orm['core.Stake']"}), + 'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"}), + 'usages': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'paths'", 'to': u"orm['core.Usage']", 'db_table': "'l_r_troncon_usage'", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}), + 'valid': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_column': "'valide'"}) + }, + u'core.pathaggregation': { + 'Meta': {'ordering': "['id']", 'object_name': 'PathAggregation', 'db_table': "'e_r_evenement_troncon'"}, + 'end_position': ('django.db.models.fields.FloatField', [], {'db_column': "'pk_fin'", 'db_index': 'True'}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'order': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_column': "'ordre'", 'blank': 'True'}), + 'path': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'aggregations'", 'on_delete': 'models.DO_NOTHING', 'db_column': "'troncon'", 'to': u"orm['core.Path']"}), + 'start_position': ('django.db.models.fields.FloatField', [], {'db_column': "'pk_debut'", 'db_index': 'True'}), + 'topo_object': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'aggregations'", 'db_column': "'evenement'", 'to': u"orm['core.Topology']"}) + }, + u'core.stake': { + 'Meta': {'ordering': "['id']", 'object_name': 'Stake', 'db_table': "'l_b_enjeu'"}, + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'stake': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_column': "'enjeu'"}), + 'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"}) + }, + u'core.topology': { + 'Meta': {'object_name': 'Topology', 'db_table': "'e_t_evenement'"}, + 'ascent': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_column': "'denivelee_positive'", 'blank': 'True'}), + 'date_insert': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_column': "'date_insert'", 'blank': 'True'}), + 'date_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_column': "'date_update'", 'blank': 'True'}), + 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'supprime'"}), + 'descent': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_column': "'denivelee_negative'", 'blank': 'True'}), + 'geom': ('django.contrib.gis.db.models.fields.GeometryField', [], {'default': 'None', 'srid': '%s' % settings.SRID, 'null': 'True', 'spatial_index': 'False'}), + 'geom_3d': ('django.contrib.gis.db.models.fields.GeometryField', [], {'default': 'None', 'dim': '3', 'spatial_index': 'False', 'null': 'True', 'srid': '%s' % settings.SRID}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'kind': ('django.db.models.fields.CharField', [], {'max_length': '32'}), + 'length': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'null': 'True', 'db_column': "'longueur'", 'blank': 'True'}), + 'max_elevation': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_column': "'altitude_maximum'", 'blank': 'True'}), + 'min_elevation': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_column': "'altitude_minimum'", 'blank': 'True'}), + 'offset': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_column': "'decallage'"}), + 'paths': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['core.Path']", 'through': u"orm['core.PathAggregation']", 'db_column': "'troncons'", 'symmetrical': 'False'}), + 'slope': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'null': 'True', 'db_column': "'pente'", 'blank': 'True'}) + }, + u'core.usage': { + 'Meta': {'ordering': "['usage']", 'object_name': 'Usage', 'db_table': "'l_b_usage'"}, + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"}), + 'usage': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_column': "'usage'"}) + }, + u'maintenance.contractor': { + 'Meta': {'ordering': "['contractor']", 'object_name': 'Contractor', 'db_table': "'m_b_prestataire'"}, + 'contractor': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'prestataire'"}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"}) + }, + u'maintenance.funding': { + 'Meta': {'object_name': 'Funding', 'db_table': "'m_r_chantier_financement'"}, + 'amount': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_column': "'montant'"}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'organism': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['common.Organism']", 'db_column': "'organisme'"}), + 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['maintenance.Project']", 'db_column': "'chantier'"}) + }, + u'maintenance.intervention': { + 'Meta': {'object_name': 'Intervention', 'db_table': "'m_t_intervention'"}, + 'area': ('django.db.models.fields.FloatField', [], {'default': '0', 'db_column': "'surface'"}), + 'ascent': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_column': "'denivelee_positive'", 'blank': 'True'}), + 'date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime.now', 'db_column': "'date'"}), + 'date_insert': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_column': "'date_insert'", 'blank': 'True'}), + 'date_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_column': "'date_update'", 'blank': 'True'}), + 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'supprime'"}), + 'descent': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_column': "'denivelee_negative'", 'blank': 'True'}), + 'description': ('django.db.models.fields.TextField', [], {'db_column': "'descriptif'", 'blank': 'True'}), + 'disorders': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'interventions'", 'blank': 'True', 'db_table': "'m_r_intervention_desordre'", 'to': u"orm['maintenance.InterventionDisorder']"}), + 'geom_3d': ('django.contrib.gis.db.models.fields.GeometryField', [], {'default': 'None', 'dim': '3', 'spatial_index': 'False', 'null': 'True', 'srid': '%s' % settings.SRID}), + 'height': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_column': "'hauteur'"}), + 'heliport_cost': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_column': "'cout_heliport'"}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'jobs': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['maintenance.InterventionJob']", 'through': u"orm['maintenance.ManDay']", 'symmetrical': 'False'}), + 'length': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'null': 'True', 'db_column': "'longueur'", 'blank': 'True'}), + 'material_cost': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_column': "'cout_materiel'"}), + 'max_elevation': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_column': "'altitude_maximum'", 'blank': 'True'}), + 'min_elevation': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_column': "'altitude_minimum'", 'blank': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'nom'"}), + 'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'interventions'", 'null': 'True', 'db_column': "'chantier'", 'to': u"orm['maintenance.Project']"}), + 'slope': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'null': 'True', 'db_column': "'pente'", 'blank': 'True'}), + 'stake': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'interventions'", 'null': 'True', 'db_column': "'enjeu'", 'to': u"orm['core.Stake']"}), + 'status': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['maintenance.InterventionStatus']", 'db_column': "'status'"}), + 'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"}), + 'subcontract_cost': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_column': "'cout_soustraitant'"}), + 'subcontracting': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'sous_traitance'"}), + 'topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'interventions_set'", 'null': 'True', 'to': u"orm['core.Topology']"}), + 'type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['maintenance.InterventionType']", 'null': 'True', 'db_column': "'type'", 'blank': 'True'}), + 'width': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_column': "'largeur'"}) + }, + u'maintenance.interventiondisorder': { + 'Meta': {'ordering': "['disorder']", 'object_name': 'InterventionDisorder', 'db_table': "'m_b_desordre'"}, + 'disorder': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'desordre'"}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"}) + }, + u'maintenance.interventionjob': { + 'Meta': {'ordering': "['job']", 'object_name': 'InterventionJob', 'db_table': "'m_b_fonction'"}, + 'cost': ('django.db.models.fields.DecimalField', [], {'default': '1.0', 'db_column': "'cout_jour'", 'decimal_places': '2', 'max_digits': '8'}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'job': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'fonction'"}), + 'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"}) + }, + u'maintenance.interventionstatus': { + 'Meta': {'ordering': "['id']", 'object_name': 'InterventionStatus', 'db_table': "'m_b_suivi'"}, + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'status': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'status'"}), + 'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"}) + }, + u'maintenance.interventiontype': { + 'Meta': {'ordering': "['type']", 'object_name': 'InterventionType', 'db_table': "'m_b_intervention'"}, + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"}), + 'type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'type'"}) + }, + u'maintenance.manday': { + 'Meta': {'object_name': 'ManDay', 'db_table': "'m_r_intervention_fonction'"}, + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'intervention': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['maintenance.Intervention']", 'db_column': "'intervention'"}), + 'job': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['maintenance.InterventionJob']", 'db_column': "'fonction'"}), + 'nb_days': ('django.db.models.fields.DecimalField', [], {'db_column': "'nb_jours'", 'decimal_places': '2', 'max_digits': '6'}) + }, + u'maintenance.project': { + 'Meta': {'ordering': "['-begin_year', 'name']", 'object_name': 'Project', 'db_table': "'m_t_chantier'"}, + 'begin_year': ('django.db.models.fields.IntegerField', [], {'db_column': "'annee_debut'"}), + 'comments': ('django.db.models.fields.TextField', [], {'db_column': "'commentaires'", 'blank': 'True'}), + 'constraint': ('django.db.models.fields.TextField', [], {'db_column': "'contraintes'", 'blank': 'True'}), + 'contractors': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'projects'", 'symmetrical': 'False', 'db_table': "'m_r_chantier_prestataire'", 'to': u"orm['maintenance.Contractor']"}), + 'date_insert': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_column': "'date_insert'", 'blank': 'True'}), + 'date_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_column': "'date_update'", 'blank': 'True'}), + 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_column': "'supprime'"}), + 'domain': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['maintenance.ProjectDomain']", 'null': 'True', 'db_column': "'domaine'", 'blank': 'True'}), + 'end_year': ('django.db.models.fields.IntegerField', [], {'db_column': "'annee_fin'"}), + 'founders': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['common.Organism']", 'through': u"orm['maintenance.Funding']", 'symmetrical': 'False'}), + 'global_cost': ('django.db.models.fields.FloatField', [], {'default': '0', 'db_column': "'cout_global'"}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'nom'"}), + 'project_manager': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'manage'", 'db_column': "'maitre_ouvrage'", 'to': u"orm['common.Organism']"}), + 'project_owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'own'", 'db_column': "'maitre_oeuvre'", 'to': u"orm['common.Organism']"}), + 'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"}), + 'type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['maintenance.ProjectType']", 'null': 'True', 'db_column': "'type'", 'blank': 'True'}) + }, + u'maintenance.projectdomain': { + 'Meta': {'ordering': "['domain']", 'object_name': 'ProjectDomain', 'db_table': "'m_b_domaine'"}, + 'domain': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'domaine'"}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"}) + }, + u'maintenance.projecttype': { + 'Meta': {'ordering': "['type']", 'object_name': 'ProjectType', 'db_table': "'m_b_chantier'"}, + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['authent.Structure']", 'db_column': "'structure'"}), + 'type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_column': "'type'"}) + } + } + + complete_apps = ['maintenance'] + +#!/usr/bin/python +# Copyright: Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + + +ANSIBLE_METADATA = {'metadata_version': '1.1', + 'status': ['preview'], + 'supported_by': 'network'} + + +DOCUMENTATION = """ +--- +module: nxos_command +extends_documentation_fragment: nxos +version_added: "2.1" +author: "Peter Sprygada (@privateip)" +short_description: Run arbitrary command on Cisco NXOS devices +description: + - Sends an arbitrary command to an NXOS node and returns the results + read from the device. This module includes an + argument that will cause the module to wait for a specific condition + before returning or timing out if the condition is not met. +options: + commands: + description: + - The commands to send to the remote NXOS device. The resulting + output from the command is returned. If the I(wait_for) + argument is provided, the module is not returned until the + condition is satisfied or the number of retires as expired. + - The I(commands) argument also accepts an alternative form + that allows for complex values that specify the command + to run and the output format to return. This can be done + on a command by command basis. The complex argument supports + the keywords C(command) and C(output) where C(command) is the + command to run and C(output) is one of 'text' or 'json'. + required: true + wait_for: + description: + - Specifies what to evaluate from the output of the command + and what conditionals to apply. This argument will cause + the task to wait for a particular conditional to be true + before moving forward. If the conditional is not true + by the configured retries, the task fails. See examples. + aliases: ['waitfor'] + version_added: "2.2" + match: + description: + - The I(match) argument is used in conjunction with the + I(wait_for) argument to specify the match policy. Valid + values are C(all) or C(any). If the value is set to C(all) + then all conditionals in the I(wait_for) must be satisfied. If + the value is set to C(any) then only one of the values must be + satisfied. + default: all + version_added: "2.2" + retries: + description: + - Specifies the number of retries a command should by tried + before it is considered failed. The command is run on the + target device every retry and evaluated against the I(wait_for) + conditionals. + default: 10 + interval: + description: + - Configures the interval in seconds to wait between retries + of the command. If the command does not pass the specified + conditional, the interval indicates how to long to wait before + trying the command again. + default: 1 +""" + +EXAMPLES = """ +--- +- name: run show version on remote devices + nxos_command: + commands: show version + +- name: run show version and check to see if output contains Cisco + nxos_command: + commands: show version + wait_for: result[0] contains Cisco + +- name: run multiple commands on remote nodes + nxos_command: + commands: + - show version + - show interfaces + +- name: run multiple commands and evaluate the output + nxos_command: + commands: + - show version + - show interfaces + wait_for: + - result[0] contains Cisco + - result[1] contains loopback0 + +- name: run commands and specify the output format + nxos_command: + commands: + - command: show version + output: json +""" + +RETURN = """ +stdout: + description: The set of responses from the commands + returned: always apart from low level errors (such as action plugin) + type: list + sample: ['...', '...'] +stdout_lines: + description: The value of stdout split into a list + returned: always apart from low level errors (such as action plugin) + type: list + sample: [['...', '...'], ['...'], ['...']] +failed_conditions: + description: The list of conditionals that have failed + returned: failed + type: list + sample: ['...', '...'] +""" +import time + +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.network.common.parsing import Conditional, FailedConditionalError +from ansible.module_utils.network.common.utils import ComplexList +from ansible.module_utils.network.nxos.nxos import check_args, nxos_argument_spec, run_commands +from ansible.module_utils.six import string_types +from ansible.module_utils._text import to_native + + +def to_lines(stdout): + lines = list() + for item in stdout: + if isinstance(item, string_types): + item = str(item).split('\n') + lines.append(item) + return lines + + +def parse_commands(module, warnings): + transform = ComplexList(dict( + command=dict(key=True), + output=dict(), + prompt=dict(), + answer=dict() + ), module) + + commands = transform(module.params['commands']) + + if module.check_mode: + for item in list(commands): + if not item['command'].startswith('show'): + warnings.append( + 'Only show commands are supported when using check_mode, not ' + 'executing %s' % item['command'] + ) + commands.remove(item) + + return commands + + +def to_cli(obj): + cmd = obj['command'] + if obj.get('output') == 'json': + cmd += ' | json' + return cmd + + +def main(): + """entry point for module execution + """ + argument_spec = dict( + # { command: , output: , prompt: , response: } + commands=dict(type='list', required=True), + + wait_for=dict(type='list', aliases=['waitfor']), + match=dict(default='all', choices=['any', 'all']), + + retries=dict(default=10, type='int'), + interval=dict(default=1, type='int') + ) + + argument_spec.update(nxos_argument_spec) + + module = AnsibleModule(argument_spec=argument_spec, + supports_check_mode=True) + + result = {'changed': False} + + warnings = list() + check_args(module, warnings) + commands = parse_commands(module, warnings) + result['warnings'] = warnings + + wait_for = module.params['wait_for'] or list() + + try: + conditionals = [Conditional(c) for c in wait_for] + except AttributeError as exc: + module.fail_json(msg=to_native(exc)) + + retries = module.params['retries'] + interval = module.params['interval'] + match = module.params['match'] + + while retries > 0: + responses = run_commands(module, commands) + + for item in list(conditionals): + try: + if item(responses): + if match == 'any': + conditionals = list() + break + conditionals.remove(item) + except FailedConditionalError as exc: + module.fail_json(msg=to_native(exc)) + + if not conditionals: + break + + time.sleep(interval) + retries -= 1 + + if conditionals: + failed_conditions = [item.raw for item in conditionals] + msg = 'One or more conditional statements have not been satisfied' + module.fail_json(msg=msg, failed_conditions=failed_conditions) + + result.update({ + 'stdout': responses, + 'stdout_lines': to_lines(responses) + }) + + module.exit_json(**result) + + +if __name__ == '__main__': + main() + +# Copyright (C) 2008-2020 Free Software Foundation, Inc. + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +# This file is part of the GDB testsuite. It tests GDB's handling of +# bad python pretty printers. + +# Test a printer with a bad children iterator. + +import re +import gdb.printing + + +class BadChildrenContainerPrinter1(object): + """Children iterator doesn't return a tuple of two elements.""" + + def __init__(self, val): + self.val = val + + def to_string(self): + return 'container %s with %d elements' % (self.val['name'], self.val['len']) + + @staticmethod + def _bad_iterator(pointer, len): + start = pointer + end = pointer + len + while pointer != end: + yield 'intentional violation of children iterator protocol' + pointer += 1 + + def children(self): + return self._bad_iterator(self.val['elements'], self.val['len']) + + +class BadChildrenContainerPrinter2(object): + """Children iterator returns a tuple of two elements with bad values.""" + + def __init__(self, val): + self.val = val + + def to_string(self): + return 'container %s with %d elements' % (self.val['name'], self.val['len']) + + @staticmethod + def _bad_iterator(pointer, len): + start = pointer + end = pointer + len + while pointer != end: + # The first argument is supposed to be a string. + yield (42, 'intentional violation of children iterator protocol') + pointer += 1 + + def children(self): + return self._bad_iterator(self.val['elements'], self.val['len']) + + +def build_pretty_printer(): + pp = gdb.printing.RegexpCollectionPrettyPrinter("bad-printers") + + pp.add_printer('container1', '^container$', + BadChildrenContainerPrinter1) + pp.add_printer('container2', '^container$', + BadChildrenContainerPrinter2) + + return pp + + +my_pretty_printer = build_pretty_printer() +gdb.printing.register_pretty_printer(gdb, my_pretty_printer) + +#!/usr/bin/env python + +from __future__ import print_function + +import sys +from os import path, getenv +from time import sleep + +# if PAPARAZZI_SRC not set, then assume the tree containing this +# file is a reasonable substitute +PPRZ_SRC = getenv("PAPARAZZI_SRC", path.normpath(path.join(path.dirname(path.abspath(__file__)), '../../../'))) +sys.path.append(PPRZ_SRC + "/sw/ext/pprzlink/lib/v1.0/python") + +from ivy_msg_interface import IvyMessagesInterface +from pprzlink.message import PprzMessage + + +class WaypointMover(object): + def __init__(self, verbose=False): + self.verbose = verbose + self._interface = IvyMessagesInterface(self.message_recv) + + def message_recv(self, ac_id, msg): + if self.verbose: + print("Got msg %s" % msg.name) + + def shutdown(self): + print("Shutting down ivy interface...") + self._interface.shutdown() + + def __del__(self): + self.shutdown() + + def move_waypoint(self, ac_id, wp_id, lat, lon, alt): + msg = PprzMessage("ground", "MOVE_WAYPOINT") + msg['ac_id'] = ac_id + msg['wp_id'] = wp_id + msg['lat'] = lat + msg['long'] = lon + msg['alt'] = alt + print("Sending message: %s" % msg) + self._interface.send(msg) + + +if __name__ == '__main__': + try: + wm = WaypointMover() + # sleep shortly in oder to make sure Ivy is up, then message sent before shutting down again + sleep(0.1) + wm.move_waypoint(ac_id=202, wp_id=3, lat=43.563, lon=1.481, alt=172.0) + sleep(0.1) + except KeyboardInterrupt: + print("Stopping on request") + wm.shutdown() + +# -*- coding: utf-8 -*- + +""" +@file +@author Chrisitan Urich +@version 1.0 +@section LICENSE + +This file is part of DynaMind +Copyright (C) 2015 Christian Urich + +This program is free software; you can redistribute it and/or +modify it under the terms of the GNU General Public License +as published by the Free Software Foundation; either version 2 +of the License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program; if not, write to the Free Software +Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +""" + +import sys + +from pydynamind import * + + +class DM_ImportSWMM(Module): + + + display_name = "Import SWMM File" + group_name = "Performance Assessment" + + def getHelpUrl(self): + return "/DynaMind-GDALModules/dm_importswmm.html" + + def __init__(self): + Module.__init__(self) + self.setIsGDALModule(True) + + self.createParameter("filename", FILENAME, "Name of SWMM File") + self.filename = "" + + self.createParameter("name_outlet", STRING, "Identifier Outlet") + self.name_outlet = "" + + + # self.conduits.addLinkAttribute("XSECTION", "XSECTION", WRITE) + + # self.junctions = View("JUNCTION", NODE, WRITE) + # self.junctions.addAttribute("Z", DOUBLE, WRITE) + # self.junctions.addAttribute("D", DOUBLE, WRITE) + # self.junctions.addAttribute("invert_elevation", DOUBLE, WRITE) + # self.junctions.addAttribute("built_year", DOUBLE, WRITE) + # + # self.outfalls = View("OUTFALL", NODE, WRITE) + # self.outfalls.addAttribute("Z", DOUBLE, WRITE) + # Not imported + # self.inlets = View("INLET", NODE, WRITE) + + + # self.wwtps = View("WWTP", NODE, WRITE) + # + # self.storages = View("STORAGE", NODE, WRITE) + # self.storages.addAttribute("Z", DOUBLE, WRITE) + # self.storages.addAttribute("max_depth", DOUBLE, WRITE) + # self.storages.addAttribute("type", STRING, WRITE) + # self.storages.addAttribute("storage_x", DOUBLE, WRITE) + # self.storages.addAttribute("storage_y", DOUBLE, WRITE) + # + # self.weirs = View("WEIR", EDGE, WRITE) + # self.weirs.addAttribute("type", STRING, WRITE) + # self.weirs.addAttribute("crest_height", DOUBLE, WRITE) + # self.weirs.addAttribute("discharge_coefficient", DOUBLE, WRITE) + # self.weirs.addAttribute("end_coefficient", DOUBLE, WRITE) + # + # self.pumps = View("PUMPS", EDGE, WRITE) + # self.pumps.addAttribute("type", STRING, WRITE) + # self.pumps.addAttribute("pump_x", DOUBLE, WRITE) + # self.pumps.addAttribute("pump_y", DOUBLE, WRITE) + + # views.append(self.conduits) + # views.append(self.nodes) + # views.append(self.outfalls) + # views.append(self.junctions) + # views.append(self.inlets) + # views.append(self.wwtps) + # views.append(self.storages) + # views.append(self.weirs) + # views.append(self.xsections) + # views.append(self.pumps) + + # self.registerViewContainers(views) + + + # self.createParameter("NameWWTP", STRING, "Identifier WWTP") + # self.NameWWTP = "MD020" + + # self.createParameter("defaultBuiltYear", INT, "Default_Built_Year") + # self.defaultBuiltYear = 1900 + # + # self.curves = {} + # self.curves_types = {} + + def readCurves(self): + try: + f = open(self.filename) + startReading = False + + for line in f: + line = line.strip() + if line is '': + continue + if line[0] is ';': + continue + if startReading == True and line[0] is '[': + startReading = False + break + if startReading == True: + # print line + content = line.split() + if content[0] not in self.curves: + self.curves[content[0]] = [] + values = self.curves[content[0]] + if (len(content) == 4): + values.append((float(content[2]), float(content[3]))) + if (len(content) == 3): + values.append((float(content[1]), float(content[2]))) + self.curves[content[0]] = values + + if (len(content) == 4): + if content[1] != "": + self.curves_types[content[0]] = str(content[1]) + + if line == "[CURVES]": + startReading = True + f.close() + + except Exception, e: + print e + print sys.exc_info() + + def init(self): + self.conduits = ViewContainer("conduit", EDGE, WRITE) + self.conduits.addAttribute("start_id", Attribute.INT, WRITE) + self.conduits.addAttribute("end_id", Attribute.INT, WRITE) + + self.pumps = ViewContainer("pump", EDGE, WRITE) + self.pumps.addAttribute("start_id", Attribute.INT, WRITE) + self.pumps.addAttribute("end_id", Attribute.INT, WRITE) + + self.weirs = ViewContainer("weir", EDGE, WRITE) + self.weirs.addAttribute("start_id", Attribute.INT, WRITE) + self.weirs.addAttribute("end_id", Attribute.INT, WRITE) + # self.conduits.addAttribute("inlet_offset", Attribute.DOUBLE, WRITE) + # self.conduits.addAttribute("outlet_offset", Attribute.DOUBLE, WRITE) + # self.conduits.addAttribute("diameter", Attribute.DOUBLE, WRITE) + + # self.dummy = ViewContainer("dummy", SUBSYSTEM, MODIFY) + + # self.xsections = ViewContainer("xsection",COMPONENT,WRITE) + # self.xsections.addAttribute("type", STRING, WRITE) + # self.xsections.addAttribute("shape", STRING, WRITE) + self.nodes_container = ViewContainer("node", NODE, WRITE) + views = [self.nodes_container, self.conduits, self.pumps, self.weirs] + if self.name_outlet != "": + self.outlet = ViewContainer("outlet", NODE, WRITE) + self.outlet.addAttribute("node_id", Attribute.INT, WRITE) + views.append(self.outlet) + self.registerViewContainers(views) + + def run(self): + # try: + # sewer = self.getData("Sewer") + results = {} + f = open(self.filename) + currentContainer = "" + for line in f: + # print line + line = line.strip() + if line is '': + continue + if line[0] is ';': + continue + if line[0] is '[': + results[line] = {} + currentContainer = line + if line is '': + continue + # First Section is always the Name + content = line.split() + container = [] + counter = 0 + if len(content) < 2: + continue + for c in content: + counter = counter + 1 + if counter is 1: + continue + container.append(c) + ress = results[currentContainer] + ress[content[0]] = container + results[currentContainer] = ress + f.close() + # print "done reading" + # self.readCurves() + + # "Create Nodes" + nodes = {} + node_ids = set() + # Add Coordinates + node_id = 0 # We assume that the node id increases incrementally + ress = results["[COORDINATES]"] + for c in ress: + node_id += 1 + coords = ress[c] + node = self.nodes_container.create_feature() + + # Create geometry + n_pt = ogr.Geometry(ogr.wkbPoint) + x1 = float(coords[0]) + y1 = float(coords[1]) + + n_pt.SetPoint_2D(0, x1, y1) + + # Set geometry in feature + node.SetGeometry(n_pt) + + nodes[c] = (node_id, x1, y1) + node_ids.add(c) + + if self.name_outlet == c: + outfall = self.outlet.create_feature() + outfall.SetGeometry(n_pt) + outfall.SetField("node_id", node_id) + self.nodes_container.finalise() + if self.name_outlet != "": + self.outlet.finalise() + + + + # #Add Nodes + # junctions = results["[JUNCTIONS]"] + # for c in junctions: + # attributes = junctions[c] + # juntion = nodes[c] + # sewer.addComponentToView(juntion, self.junctions) + # + # juntion.addAttribute("SWMM_ID", str(c)) + # juntion.addAttribute("invert_elevation", (float(attributes[0]))) + # juntion.addAttribute("D", (float(attributes[1]))) + # juntion.addAttribute("Z", (float(attributes[0])) + (float(attributes[1]))) + # juntion.addAttribute("built_year", self.defaultBuiltYear) + # if (c == self.NameWWTP): + # print "wwtp found" + # sewer.addComponentToView(juntion, self.wwtps) + # + # #Write Outfalls + # outfalls = results["[OUTFALLS]"] + # for o in outfalls: + # vals = outfalls[o] + # attributes = outfalls[o] + # outfall = nodes[o] + # sewer.addComponentToView(outfall, self.outfalls) + # outfall.addAttribute("Z", float(vals[0])) + # if (o == self.NameWWTP): + # print "wwtp found" + # sewer.addComponentToView(outfall, self.wwtps) + # outfall.addAttribute("WWTP", 1.0) + # #Write Storage Units + # if "[STORAGE]" in results: + # storages = results["[STORAGE]"] + # for s in storages: + # vals = storages[s] + # storage = nodes[s] + # sewer.addComponentToView(storage, self.storages) + # storage.addAttribute("Z", float(vals[0])) + # storage.addAttribute("max_depth", float(vals[1])) + # storage.addAttribute("type", vals[3]) + # if vals[3] == "TABULAR": + # curve = self.curves[vals[4]] + # storage_x = doublevector() + # storage_y = doublevector() + # for c in curve: + # storage_x.append(c[0]) + # storage_y.append(c[1]) + # storage.getAttribute("storage_x").setDoubleVector(storage_x) + # storage.getAttribute("storage_y").setDoubleVector(storage_y) + # + # + # + # if "[XSECTIONS]" in results: + # xsections = results["[XSECTIONS]"] + # + ress = results["[CONDUITS]"] + counter = 0 + for c in ress: + counter += 1 + vals = ress[c] + end_id = nodes[vals[0]] + start_id = nodes[vals[1]] + + # if end_id not in node_ids: + # continue + # if start_id not in node_ids: + # continue + + conduit = self.conduits.create_feature() + line = ogr.Geometry(ogr.wkbLineString) + # print start_id + # print nodes[start_id][1], nodes[start_id][2] + line.SetPoint_2D(0, nodes[vals[0]][1], nodes[vals[0]][2]) + line.SetPoint_2D(1, nodes[vals[1]][1], nodes[vals[1]][2]) + + conduit.SetGeometry(line) + + # Create XSection + conduit.SetField("start_id", nodes[vals[0]][0]) + conduit.SetField("end_id", nodes[vals[1]][0]) + # conduit.SetField("inlet_offset", float(vals[4])) + # conduit.SetField("outlet_offset", float(vals[5])) + # e.addAttribute("built_year", self.defaultBuiltYear) + # if c in xsections: + # e.addAttribute("Diameter", float(xsections[c][1])) + # xsection = self.createXSection(sewer, xsections[c]) + # e.getAttribute("XSECTION").addLink(xsection, "XSECTION") + self.conduits.finalise() + if "[WEIRS]" in results: + c_weirs = results["[WEIRS]"] + for c in c_weirs: + vals = c_weirs[c] + end_id = nodes[vals[0]] + start_id = nodes[vals[1]] + + # if end_id not in node_ids: + # continue + # if start_id not in node_ids: + # continue + + weir = self.weirs.create_feature() + line = ogr.Geometry(ogr.wkbLineString) + # print start_id + # print nodes[start_id][1], nodes[start_id][2] + line.SetPoint_2D(0, nodes[vals[0]][1], nodes[vals[0]][2]) + line.SetPoint_2D(1, nodes[vals[1]][1], nodes[vals[1]][2]) + + weir.SetGeometry(line) + + # Create XSection + weir.SetField("start_id", nodes[vals[0]][0]) + weir.SetField("end_id", nodes[vals[1]][0]) + self.weirs.finalise() + # vals = c_weirs[c] + # start = nodes[vals[0]] + # end = nodes[vals[1]] + # e = sewer.addEdge(start, end, self.weirs) + # + # e.addAttribute("type",vals[2] ) + # e.addAttribute("crest_height",float(vals[3])) + # e.addAttribute("discharge_coefficient",float(vals[4])) + # e.addAttribute("end_coefficient",float(vals[7])) + # #Create XSection + # e.addAttribute("Diameter", float(xsections[c][1])) + # + # xsection = self.createXSection(sewer, xsections[c]) + # e.getAttribute("XSECTION").addLink(xsection, "XSECTION") + + if "[PUMPS]" in results: + c_pumps = results["[PUMPS]"] + for c in c_pumps: + vals = c_pumps[c] + end_id = nodes[vals[0]] + start_id = nodes[vals[1]] + + # if end_id not in node_ids: + # continue + # if start_id not in node_ids: + # continue + + pump = self.pumps.create_feature() + line = ogr.Geometry(ogr.wkbLineString) + # print start_id + # print nodes[start_id][1], nodes[start_id][2] + line.SetPoint_2D(0, nodes[vals[0]][1], nodes[vals[0]][2]) + line.SetPoint_2D(1, nodes[vals[1]][1], nodes[vals[1]][2]) + + pump.SetGeometry(line) + + # Create XSection + pump.SetField("start_id", nodes[vals[0]][0]) + pump.SetField("end_id", nodes[vals[1]][0]) + self.pumps.finalise() + # vals = c_pumps[c] + # start = nodes[vals[0]] + # end = nodes[vals[1]] + # e = sewer.addEdge(start, end, self.pumps) + # + # e.addAttribute("type", self.curves_types[vals[2]] ) + # + # curve = self.curves[vals[2]] + # pump_x = doublevector() + # pump_y = doublevector() + # for c in curve: + # pump_x.append(c[0]) + # pump_y.append(c[1]) + # + # e.getAttribute("pump_x").setDoubleVector(pump_x) + # e.getAttribute("pump_y").setDoubleVector(pump_y) + # + # except Exception, e: + # print e + # print sys.exc_info() + + # self.nodes_container.finalise() + + + # def createXSection(self, sewer, attributes): + # c_xsection = Component() + # xsection = sewer.addComponent(c_xsection, self.xsections) + # xsection.addAttribute("type", str(attributes[0])) + # diameters = doublevector() + # diameters.push_back(float(attributes[1])) + # #print self.curves + # if str(attributes[0]) != "CUSTOM": + # diameters.push_back(float(attributes[2])) + # diameters.push_back(float(attributes[3])) + # diameters.push_back(float(attributes[4])) + # else: + # shape_x = doublevector() + # shape_y = doublevector() + # #print attributes + # cv = self.curves[attributes[2]] + # + # #xsection.getAttribute("shape_type").setString(vd) + # for c in cv: + # shape_x.append(c[0]) + # shape_y.append(c[1]) + # xsection.getAttribute("shape_x").setDoubleVector(shape_x) + # xsection.getAttribute("shape_y").setDoubleVector(shape_y) + # xsection.getAttribute("shape_type").setString(self.curves_types[attributes[2]]) + # + # xsection.getAttribute("diameters").setDoubleVector(diameters) + # + # return xsection + +import unittest +import transaction + +from pyramid import testing + +from .models import DBSession + + +class TestMyViewSuccessCondition(unittest.TestCase): + def setUp(self): + self.config = testing.setUp() + from sqlalchemy import create_engine + engine = create_engine('sqlite://') + from .models import ( + Base, + MyModel, + ) + DBSession.configure(bind=engine) + Base.metadata.create_all(engine) + with transaction.manager: + model = MyModel(name='one', value=55) + DBSession.add(model) + + def tearDown(self): + DBSession.remove() + testing.tearDown() + + def test_passing_view(self): + from .views import my_view + request = testing.DummyRequest() + info = my_view(request) + self.assertEqual(info['one'].name, 'one') + self.assertEqual(info['project'], 'example') + + +class TestMyViewFailureCondition(unittest.TestCase): + def setUp(self): + self.config = testing.setUp() + from sqlalchemy import create_engine + engine = create_engine('sqlite://') + from .models import ( + Base, + MyModel, + ) + DBSession.configure(bind=engine) + + def tearDown(self): + DBSession.remove() + testing.tearDown() + + def test_failing_view(self): + from .views import my_view + request = testing.DummyRequest() + info = my_view(request) + self.assertEqual(info.status_int, 500) + +# Copyright (c) 2001-2004 Twisted Matrix Laboratories. +# See LICENSE for details. + + +"""Creation of Windows shortcuts. + +Requires win32all. +""" + +from win32com.shell import shell +import pythoncom +import os + + +def open(filename): + """Open an existing shortcut for reading. + + @return: The shortcut object + @rtype: Shortcut + """ + sc=Shortcut() + sc.load(filename) + return sc + + +class Shortcut: + """A shortcut on Win32. + >>> sc=Shortcut(path, arguments, description, workingdir, iconpath, iconidx) + @param path: Location of the target + @param arguments: If path points to an executable, optional arguments to + pass + @param description: Human-readable decription of target + @param workingdir: Directory from which target is launched + @param iconpath: Filename that contains an icon for the shortcut + @param iconidx: If iconpath is set, optional index of the icon desired + """ + + def __init__(self, + path=None, + arguments=None, + description=None, + workingdir=None, + iconpath=None, + iconidx=0): + self._base = pythoncom.CoCreateInstance( + shell.CLSID_ShellLink, None, + pythoncom.CLSCTX_INPROC_SERVER, shell.IID_IShellLink + ) + data = map(None, + ['"%s"' % os.path.abspath(path), arguments, description, + os.path.abspath(workingdir), os.path.abspath(iconpath)], + ("SetPath", "SetArguments", "SetDescription", + "SetWorkingDirectory") ) + for value, function in data: + if value and function: + # call function on each non-null value + getattr(self, function)(value) + if iconpath: + self.SetIconLocation(iconpath, iconidx) + + def load( self, filename ): + """Read a shortcut file from disk.""" + self._base.QueryInterface(pythoncom.IID_IPersistFile).Load(filename) + + def save( self, filename ): + """Write the shortcut to disk. + + The file should be named something.lnk. + """ + self._base.QueryInterface(pythoncom.IID_IPersistFile).Save(filename, 0) + + def __getattr__( self, name ): + if name != "_base": + return getattr(self._base, name) + raise AttributeError, "%s instance has no attribute %s" % \ + (self.__class__.__name__, name) + +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""This module is deprecated. Please use `airflow.providers.postgres.hooks.postgres`.""" + +import warnings + +# pylint: disable=unused-import +from airflow.providers.postgres.hooks.postgres import PostgresHook # noqa + +warnings.warn( + "This module is deprecated. Please use `airflow.providers.postgres.hooks.postgres`.", + DeprecationWarning, stacklevel=2 +) + +import sys +import re +import os +import string +import subprocess + +#BASE_DIR = '/home/aritter/twitter_nlp' +#BASE_DIR = os.environ['HOME'] + '/twitter_nlp' +#BASE_DIR = '/homes/gws/aritter/twitter_nlp' +BASE_DIR = 'twitter_nlp.jar' + +if os.environ.has_key('TWITTER_NLP'): + BASE_DIR = os.environ['TWITTER_NLP'] + +#sys.path.append('%s/python/' % (BASE_DIR)) +#sys.path.append('%s/python/cap/' % (BASE_DIR)) +#sys.path.append('../cap/') +#import cap_classifier + +def Brown2Bits(bits): + bitstring = "" + for i in range(20): + if bits & (1 << i): + bitstring += '1' + else: + bitstring += '0' + return bitstring + +def GetOrthographicFeatures(word, goodCap): + features = [] + + #Don't include these features for usernames + features.append("word=%s" % word) + features.append("word_lower=%s" % word.lower()) + if(len(word) >= 4): + features.append("prefix=%s" % word[0:1].lower()) + features.append("prefix=%s" % word[0:2].lower()) + features.append("prefix=%s" % word[0:3].lower()) + features.append("suffix=%s" % word[len(word)-1:len(word)].lower()) + features.append("suffix=%s" % word[len(word)-2:len(word)].lower()) + features.append("suffix=%s" % word[len(word)-3:len(word)].lower()) + + #Substring features (don't seem to help) + #for i in range(1,len(word)-2): + # for j in range(i+1,len(word)-1): + # features.append("substr=%s" % word[i:j]) + + if re.search(r'^[A-Z]', word): + features.append('INITCAP') + if re.search(r'^[A-Z]', word) and goodCap: + features.append('INITCAP_AND_GOODCAP') + if re.match(r'^[A-Z]+$', word): + features.append('ALLCAP') + if re.match(r'^[A-Z]+$', word) and goodCap: + features.append('ALLCAP_AND_GOODCAP') + if re.match(r'.*[0-9].*', word): + features.append('HASDIGIT') + if re.match(r'[0-9]', word): + features.append('SINGLEDIGIT') + if re.match(r'[0-9][0-9]', word): + features.append('DOUBLEDIGIT') + if re.match(r'.*-.*', word): + features.append('HASDASH') + if re.match(r'[.,;:?!-+\'"]', word): + features.append('PUNCTUATION') + return features + +class DictionaryFeatures: + def __init__(self, dictDir): + self.brownClusters = None + self.word2dictionaries = {} + self.dictionaries = [] + i = 0 + for d in os.listdir(dictDir): + self.dictionaries.append(d) + if d == '.svn': + continue + for line in open(dictDir + "/" + d): + word = line.rstrip('\n') + word = word.strip(' ').lower() + if not self.word2dictionaries.has_key(word): #Tab-seperated string is more memory efficient than a list? + self.word2dictionaries[word] = str(i) + else: + self.word2dictionaries[word] += "\t%s" % i + i += 1 + + def AddBrownClusters(self, brownFile): + self.brownClusters = {} + for line in open(brownFile): + line = line.rstrip('\n') + (word, bits) = line.split(' ') + bits = int(bits) + self.brownClusters[word] = bits + + MAX_WINDOW_SIZE=6 + def GetDictFeatures(self, words, i): + features = [] + for window in range(self.MAX_WINDOW_SIZE): + for start in range(max(i-window+1, 0), i+1): + end = start + window + phrase = ' '.join(words[start:end]).lower().strip(string.punctuation) + if self.word2dictionaries.has_key(phrase): + for j in self.word2dictionaries[phrase].split('\t'): + features.append('DICT=%s' % self.dictionaries[int(j)]) + if window > 1: + features.append('DICTWIN=%s' % window) + if self.brownClusters and self.brownClusters.has_key(words[i].lower()): + for j in [4, 8, 12]: + bitstring = Brown2Bits(self.brownClusters[words[i].lower()]) + features.append('BROWN=%s' % bitstring[0:j+1]) + return list(set(features)) + +class DictionaryFeatures2(DictionaryFeatures): + def __init__(self, dictFile): + self.word2dictionaries = {} + for line in open(dictFile): + (word, dictionary) = line.rstrip('\n').split('\t') + if re.search(r'^/(common|user|type|freebase|base)/', dictionary): + continue + if not self.word2dictionaries.has_key(word): + self.word2dictionaries[word] = [] + self.word2dictionaries[word].append(dictionary) + +def GetQuotes(words): + string = ' '.join(words) + quoted = [] + string = re.sub(r"' ([^']+) '", r"' |||[ \1 ]||| '", string) + string = re.sub(r'" ([^"]+) "', r'" |||[ \1 ]||| "', string) + + isquoted = False + words = string.split(' ') + for i in range(len(words)): + if words[i] == "|||[": + isquoted = True + elif words[i] == "]|||": + isquoted = False + else: + quoted.append(isquoted) + + return quoted + +class FeatureExtractor: + def __init__(self, dictDir="data/dictionaries", brownFile="%s/data/brown_clusters/60K_clusters.txt" % (BASE_DIR)): + self.df = DictionaryFeatures(dictDir) + if brownFile: + self.df.AddBrownClusters(brownFile) + + LEFT_RIGHT_WINDOW=3 + def Extract(self, words, pos, chunk, i, goodCap=True): + features = GetOrthographicFeatures(words[i], goodCap) + self.df.GetDictFeatures(words, i) + ["goodCap=%s" % goodCap] + + for j in range(i-self.LEFT_RIGHT_WINDOW,i+self.LEFT_RIGHT_WINDOW): + if j > 0 and j < i: + features.append('LEFT_WINDOW=%s' % words[j]) + elif j < len(words) and j > i: + features.append('RIGHT_WINDOW=%s' % words[j]) + + if pos: + features.append('POS=%s' % pos[i]) + features.append('POS=%s' % pos[i][0:1]) + features.append('POS=%s' % pos[i][0:2]) + + if chunk: + features.append('CHUNK=%s' % chunk[i]) + + if i == 0: + features.append('BEGIN') + + if pos: + features.append('POS=%s_X_%s' % ('_'.join(pos[i-1:i]),'_'.join(pos[i+1:i+2]))) + if chunk: + features.append('CHUNK=%s_X_%s' % ('_'.join(chunk[i-1:i]),'_'.join(chunk[i+1:i+2]))) + + if i > 0: + features += ["p1=%s" % x for x in GetOrthographicFeatures(words[i-1], goodCap) + self.df.GetDictFeatures(words, i-1)] + if pos: + features.append('PREV_POS=%s' % pos[i-1]) + features.append('PREV_POS=%s' % pos[i-1][0:1]) + features.append('PREV_POS=%s' % pos[i-1][0:2]) + if i > 1: + if pos: + features.append('PREV_POS=%s_%s' % (pos[i-1], pos[i-2])) + features.append('PREV_POS=%s_%s' % (pos[i-1][0:1], pos[i-2][0:1])) + features.append('PREV_POS=%s_%s' % (pos[i-1][0:2], pos[i-2][0:2])) + if i < len(words)-1: + features += ["n1=%s" % x for x in GetOrthographicFeatures(words[i+1], goodCap) + self.df.GetDictFeatures(words, i+1)] + if pos: + features.append('NEXT_POS=%s' % pos[i+1]) + features.append('NEXT_POS=%s' % pos[i+1][0:1]) + features.append('NEXT_POS=%s' % pos[i+1][0:2]) + if i < len(words)-2: + if pos: + features.append('NEXT_POS=%s_%s' % (pos[i+1], pos[i+2])) + features.append('NEXT_POS=%s_%s' % (pos[i+1][0:1], pos[i+2][0:1])) + features.append('NEXT_POS=%s_%s' % (pos[i+1][0:2], pos[i+2][0:2])) + return features + +#!/usr/bin/env python + +'''Pygame object for storing rectangular coordinates. +''' + +__docformat__ = 'restructuredtext' +__version__ = '$Id$' + +import copy + +#import SDL.video +import SDL + +class _RectProxy: + '''Proxy for SDL_Rect that can handle negative size.''' + + __slots__ = ['x', 'y', 'w', 'h'] + + def __init__(self, r): + if isinstance(r, SDL.SDL_Rect) or isinstance(r, Rect): + self.x = r.x + self.y = r.y + self.w = r.w + self.h = r.h + else: + self.x = r[0] + self.y = r[1] + self.w = r[2] + self.h = r[3] + + def _get_as_parameter_(self): + return SDL.SDL_Rect(self.x, self.y, self.w, self.h) + + _as_parameter_ = property(_get_as_parameter_) + +class Rect: + __slots__ = ['_r'] + + def __init__(self, *args): + if len(args) == 1: + arg = args[0] + if isinstance(arg, Rect): + object.__setattr__(self, '_r', copy.copy(arg._r)) + return + elif isinstance(arg, SDL.SDL_Rect): + object.__setattr__(self, '_r', copy.copy(arg)) + return + elif hasattr(arg, 'rect'): + arg = arg.rect + if callable(arg): + arg = arg() + self.__init__(arg) + return + elif hasattr(arg, '__len__'): + args = arg + else: + raise TypeError('Argument must be rect style object') + if len(args) == 4: + if args[2] < 0 or args[3] < 0: + object.__setattr__(self, '_r', _RectProxy((int(args[0]), + int(args[1]), + int(args[2]), + int(args[3])))) + else: + object.__setattr__(self, '_r', SDL.SDL_Rect(int(args[0]), + int(args[1]), + int(args[2]), + int(args[3]))) + elif len(args) == 2: + if args[1][0] < 0 or args[1][1] < 0: + object.__setattr__(self, '_r', + _RectProxy((int(args[0][0]), + int(args[0][1]), + int(args[1][0]), + int(args[1][1])))) + else: + object.__setattr__(self, '_r', + SDL.SDL_Rect(int(args[0][0]), + int(args[0][1]), + int(args[1][0]), + int(args[1][1]))) + else: + raise TypeError('Argument must be rect style object') + + def __copy__(self): + return Rect(self) + + def __repr__(self): + return '' % \ + (self._r.x, self._r.y, self._r.w, self._r.h) + + def __cmp__(self, *other): + other = _rect_from_object(other) + + if self._r.x != other._r.x: + return cmp(self._r.x, other._r.x) + if self._r.y != other._r.y: + return cmp(self._r.y, other._r.y) + if self._r.w != other._r.w: + return cmp(self._r.w, other._r.w) + if self._r.h != other._r.h: + return cmp(self._r.h, other._r.h) + return 0 + + def __nonzero__(self): + return self._r.w != 0 and self._r.h != 0 + + def __getattr__(self, name): + if name == 'top': + return self._r.y + elif name == 'left': + return self._r.x + elif name == 'bottom': + return self._r.y + self._r.h + elif name == 'right': + return self._r.x + self._r.w + elif name == 'topleft': + return self._r.x, self._r.y + elif name == 'bottomleft': + return self._r.x, self._r.y + self._r.h + elif name == 'topright': + return self._r.x + self._r.w, self._r.y + elif name == 'bottomright': + return self._r.x + self._r.w, self._r.y + self._r.h + elif name == 'midtop': + return self._r.x + self._r.w / 2, self._r.y + elif name == 'midleft': + return self._r.x, self._r.y + self._r.h / 2 + elif name == 'midbottom': + return self._r.x + self._r.w / 2, self._r.y + self._r.h + elif name == 'midright': + return self._r.x + self._r.w, self._r.y + self._r.h / 2 + elif name == 'center': + return self._r.x + self._r.w / 2, self._r.y + self._r.h / 2 + elif name == 'centerx': + return self._r.x + self._r.w / 2 + elif name == 'centery': + return self._r.y + self._r.h / 2 + elif name == 'size': + return self._r.w, self._r.h + elif name == 'width': + return self._r.w + elif name == 'height': + return self._r.h + else: + raise AttributeError(name) + + def __setattr__(self, name, value): + if name == 'top' or name == 'y': + self._r.y = value + elif name == 'left' or name == 'x': + self._r.x = int(value) + elif name == 'bottom': + self._r.y = int(value) - self._r.h + elif name == 'right': + self._r.x = int(value) - self._r.w + elif name == 'topleft': + self._r.x = int(value[0]) + self._r.y = int(value[1]) + elif name == 'bottomleft': + self._r.x = int(value[0]) + self._r.y = int(value[1]) - self._r.h + elif name == 'topright': + self._r.x = int(value[0]) - self._r.w + self._r.y = int(value[1]) + elif name == 'bottomright': + self._r.x = int(value[0]) - self._r.w + self._r.y = int(value[1]) - self._r.h + elif name == 'midtop': + self._r.x = int(value[0]) - self._r.w / 2 + self._r.y = int(value[1]) + elif name == 'midleft': + self._r.x = int(value[0]) + self._r.y = int(value[1]) - self._r.h / 2 + elif name == 'midbottom': + self._r.x = int(value[0]) - self._r.w / 2 + self._r.y = int(value[1]) - self._r.h + elif name == 'midright': + self._r.x = int(value[0]) - self._r.w + self._r.y = int(value[1]) - self._r.h / 2 + elif name == 'center': + self._r.x = int(value[0]) - self._r.w / 2 + self._r.y = int(value[1]) - self._r.h / 2 + elif name == 'centerx': + self._r.x = int(value) - self._r.w / 2 + elif name == 'centery': + self._r.y = int(value) - self._r.h / 2 + elif name == 'size': + if int(value[0]) < 0 or int(value[1]) < 0: + self._ensure_proxy() + self._r.w, self._r.h = int(value) + elif name == 'width': + if int(value) < 0: + self._ensure_proxy() + self._r.w = int(value) + elif name == 'height': + if int(value) < 0: + self._ensure_proxy() + self._r.h = int(value) + else: + raise AttributeError(name) + + def _ensure_proxy(self): + if not isinstance(self._r, _RectProxy): + object.__setattr__(self, '_r', _RectProxy(self._r)) + + def __len__(self): + return 4 + + def __getitem__(self, key): + return (self._r.x, self._r.y, self._r.w, self._r.h)[key] + + def __setitem__(self, key, value): + r = [self._r.x, self._r.y, self._r.w, self._r.h] + r[key] = value + self._r.x, self._r.y, self._r.w, self._r.h = r + + def __coerce__(self, *other): + try: + return self, Rect(*other) + except TypeError: + return None + + def move(self, *pos): + x, y = _two_ints_from_args(pos) + return Rect(self._r.x + x, self._r.y + y, self._r.w, self._r.h) + + def move_ip(self, *pos): + x, y = _two_ints_from_args(pos) + self._r.x += x + self._r.y += y + + def inflate(self, x, y): + return Rect(self._r.x - x / 2, self._r.y - y / 2, + self._r.w + x, self._r.h + y) + + def inflate_ip(self, x, y): + self._r.x -= x / 2 + self._r.y -= y / 2 + self._r.w += x + self._r.h += y + + def clamp(self, *other): + r = Rect(self) + r.clamp_ip(*other) + return r + + def clamp_ip(self, *other): + other = _rect_from_object(other)._r + if self._r.w >= other.w: + x = other.x + other.w / 2 - self._r.w / 2 + elif self._r.x < other.x: + x = other.x + elif self._r.x + self._r.w > other.x + other.w: + x = other.x + other.w - self._r.w + else: + x = self._r.x + + if self._r.h >= other.h: + y = other.y + other.h / 2 - self._r.h / 2 + elif self._r.y < other.y: + y = other.y + elif self._r.y + self._r.h > other.y + other.h: + y = other.y + other.h - self._r.h + else: + y = self._r.y + + self._r.x, self._r.y = x, y + + def clip(self, *other): + r = Rect(self) + r.clip_ip(*other) + return r + + def clip_ip(self, *other): + other = _rect_from_object(other)._r + x = max(self._r.x, other.x) + w = min(self._r.x + self._r.w, other.x + other.w) - x + y = max(self._r.y, other.y) + h = min(self._r.y + self._r.h, other.y + other.h) - y + + if w <= 0 or h <= 0: + self._r.w, self._r.h = 0, 0 + else: + self._r.x, self._r.y, self._r.w, self._r.h = x, y, w, h + + def union(self, *other): + r = Rect(self) + r.union_ip(*other) + return r + + def union_ip(self, *other): + other = _rect_from_object(other)._r + x = min(self._r.x, other.x) + y = min(self._r.y, other.y) + w = max(self._r.x + self._r.w, other.x + other.w) - x + h = max(self._r.y + self._r.h, other.y + other.h) - y + self._r.x, self._r.y, self._r.w, self._r.h = x, y, w, h + + def unionall(self, others): + r = Rect(self) + r.unionall_ip(others) + return r + + def unionall_ip(self, others): + l = self._r.x + r = self._r.x + self._r.w + t = self._r.y + b = self._r.y + self._r.h + for other in others: + other = _rect_from_object(other)._r + l = min(l, other.x) + r = max(r, other.x + other.w) + t = min(t, other.y) + b = max(b, other.y + other.h) + self._r.x, self._r.y, self._r.w, self._r.h = l, t, r - l, b - t + + def fit(self, *other): + r = Rect(self) + r.fit_ip(*other) + return r + + def fit_ip(self, *other): + other = _rect_from_object(other)._r + + xratio = self._r.w / float(other.w) + yratio = self._r.h / float(other.h) + maxratio = max(xratio, yratio) + self._r.w = int(self._r.w / maxratio) + self._r.h = int(self._r.h / maxratio) + self._r.x = other.x + (other.w - self._r.w) / 2 + self._r.y = other.y + (other.h - self._r.h) / 2 + + def normalize(self): + if self._r.w < 0: + self._r.x += self._r.w + self._r.w = -self._r.w + if self._r.h < 0: + self._r.y += self._r.h + self._r.h = -self._r.h + if isinstance(self._r, _RectProxy): + object.__setattr__(self, '_r', SDL.SDL_Rect(self._r.x, + self._r.y, + self._r.w, + self._r.h)) + + def contains(self, *other): + other = _rect_from_object(other)._r + return self._r.x <= other.x and \ + self._r.y <= other.y and \ + self._r.x + self._r.w >= other.x + other.w and \ + self._r.y + self._r.h >= other.y + other.h and \ + self._r.x + self._r.w > other.x and \ + self._r.y + self._r.h > other.y + + def collidepoint(self, x, y): + return x >= self._r.x and \ + y >= self._r.y and \ + x < self._r.x + self._r.w and \ + y < self._r.y + self._r.h + + def colliderect(self, *other): + return _rect_collide(self._r, _rect_from_object(other)._r) + + def collidelist(self, others): + for i in range(len(others)): + if _rect_collide(self._r, _rect_from_object(others[i])._r): + return i + return -1 + + def collidelistall(self, others): + matches = [] + for i in range(len(others)): + if _rect_collide(self._r, _rect_from_object(others[i])._r): + matches.append(i) + return matches + + def collidedict(self, d): + for key, other in d.items(): + if _rect_collide(self._r, _rect_from_object(other)._r): + return key, other + return None + + def collidedictall(self, d): + matches = [] + for key, other in d.items(): + if _rect_collide(self._r, _rect_from_object(other)._r): + matches.append((key, other)) + return matches + +def _rect_from_object(obj): + if isinstance(obj, Rect): + return obj + if type(obj) in (tuple, list): + return Rect(*obj) + else: + return Rect(obj) + +def _rect_collide(a, b): + return a.x + a.w > b.x and b.x + b.w > a.x and \ + a.y + a.h > b.y and b.y + b.h > a.y + +def _two_ints_from_args(arg): + if len(arg) == 1: + return _two_ints_from_args(arg[0]) + else: + return arg[:2] + +# Copyright 2014, Doug Wiegley, A10 Networks. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import unittest2 as unittest + +import acos_client.errors as acos_errors + +import v21_mocks as mocks + + +class TestVirtualPort(unittest.TestCase): + + def test_virtual_port_delete(self): + with mocks.VirtualPortDelete().client() as c: + c.slb.virtual_server.vport.delete('vip1', 'vip1_VPORT', + c.slb.virtual_server.vport.HTTP, + '80') + + def test_virtual_port_delete_not_found(self): + with mocks.VirtualPortDeleteNotFound().client() as c: + c.slb.virtual_server.vport.delete('vip1', 'vip1_VPORT', + c.slb.virtual_server.vport.HTTP, + '80') + + def test_virtual_port_create(self): + with mocks.VirtualPortCreate().client() as c: + c.slb.virtual_server.vport.create( + 'vip1', 'vip1_VPORT', + protocol=c.slb.virtual_server.vport.HTTP, + port='80', + service_group_name='pool1') + + def test_virtual_port_create_exists(self): + with mocks.VirtualPortCreateExists().client() as c: + with self.assertRaises(acos_errors.Exists): + c.slb.virtual_server.vport.create( + 'vip1', 'vip1_VPORT', + protocol=c.slb.virtual_server.vport.HTTP, + port='80', + service_group_name='pool1') + +""" +Formtools Preview application. +""" + +import cPickle as pickle + +from django.conf import settings +from django.http import Http404 +from django.shortcuts import render_to_response +from django.template.context import RequestContext +from django.utils.hashcompat import md5_constructor +from django.utils.crypto import constant_time_compare +from django.contrib.formtools.utils import security_hash + +AUTO_ID = 'formtools_%s' # Each form here uses this as its auto_id parameter. + +class FormPreview(object): + preview_template = 'formtools/preview.html' + form_template = 'formtools/form.html' + + # METHODS SUBCLASSES SHOULDN'T OVERRIDE ################################### + + def __init__(self, form): + # form should be a Form class, not an instance. + self.form, self.state = form, {} + + def __call__(self, request, *args, **kwargs): + stage = {'1': 'preview', '2': 'post'}.get(request.POST.get(self.unused_name('stage')), 'preview') + self.parse_params(*args, **kwargs) + try: + method = getattr(self, stage + '_' + request.method.lower()) + except AttributeError: + raise Http404 + return method(request) + + def unused_name(self, name): + """ + Given a first-choice name, adds an underscore to the name until it + reaches a name that isn't claimed by any field in the form. + + This is calculated rather than being hard-coded so that no field names + are off-limits for use in the form. + """ + while 1: + try: + f = self.form.base_fields[name] + except KeyError: + break # This field name isn't being used by the form. + name += '_' + return name + + def preview_get(self, request): + "Displays the form" + f = self.form(auto_id=self.get_auto_id(), initial=self.get_initial(request)) + return render_to_response(self.form_template, + self.get_context(request, f), + context_instance=RequestContext(request)) + + def preview_post(self, request): + "Validates the POST data. If valid, displays the preview page. Else, redisplays form." + f = self.form(request.POST, auto_id=self.get_auto_id()) + context = self.get_context(request, f) + if f.is_valid(): + self.process_preview(request, f, context) + context['hash_field'] = self.unused_name('hash') + context['hash_value'] = self.security_hash(request, f) + return render_to_response(self.preview_template, context, context_instance=RequestContext(request)) + else: + return render_to_response(self.form_template, context, context_instance=RequestContext(request)) + + def _check_security_hash(self, token, request, form): + expected = self.security_hash(request, form) + if constant_time_compare(token, expected): + return True + else: + # Fall back to Django 1.2 method, for compatibility with forms that + # are in the middle of being used when the upgrade occurs. However, + # we don't want to do this fallback if a subclass has provided their + # own security_hash method - because they might have implemented a + # more secure method, and this would punch a hole in that. + + # PendingDeprecationWarning <- left here to remind us that this + # compatibility fallback should be removed in Django 1.5 + FormPreview_expected = FormPreview.security_hash(self, request, form) + if expected == FormPreview_expected: + # They didn't override security_hash, do the fallback: + old_expected = security_hash(request, form) + return constant_time_compare(token, old_expected) + else: + return False + + def post_post(self, request): + "Validates the POST data. If valid, calls done(). Else, redisplays form." + f = self.form(request.POST, auto_id=self.get_auto_id()) + if f.is_valid(): + if not self._check_security_hash(request.POST.get(self.unused_name('hash'), ''), + request, f): + return self.failed_hash(request) # Security hash failed. + return self.done(request, f.cleaned_data) + else: + return render_to_response(self.form_template, + self.get_context(request, f), + context_instance=RequestContext(request)) + + # METHODS SUBCLASSES MIGHT OVERRIDE IF APPROPRIATE ######################## + + def get_auto_id(self): + """ + Hook to override the ``auto_id`` kwarg for the form. Needed when + rendering two form previews in the same template. + """ + return AUTO_ID + + def get_initial(self, request): + """ + Takes a request argument and returns a dictionary to pass to the form's + ``initial`` kwarg when the form is being created from an HTTP get. + """ + return {} + + def get_context(self, request, form): + "Context for template rendering." + return {'form': form, 'stage_field': self.unused_name('stage'), 'state': self.state} + + + def parse_params(self, *args, **kwargs): + """ + Given captured args and kwargs from the URLconf, saves something in + self.state and/or raises Http404 if necessary. + + For example, this URLconf captures a user_id variable: + + (r'^contact/(?P\d{1,6})/$', MyFormPreview(MyForm)), + + In this case, the kwargs variable in parse_params would be + {'user_id': 32} for a request to '/contact/32/'. You can use that + user_id to make sure it's a valid user and/or save it for later, for + use in done(). + """ + pass + + def process_preview(self, request, form, context): + """ + Given a validated form, performs any extra processing before displaying + the preview page, and saves any extra data in context. + """ + pass + + def security_hash(self, request, form): + """ + Calculates the security hash for the given HttpRequest and Form instances. + + Subclasses may want to take into account request-specific information, + such as the IP address. + """ + return security_hash(request, form) + + def failed_hash(self, request): + "Returns an HttpResponse in the case of an invalid security hash." + return self.preview_post(request) + + # METHODS SUBCLASSES MUST OVERRIDE ######################################## + + def done(self, request, cleaned_data): + """ + Does something with the cleaned_data and returns an + HttpResponseRedirect. + """ + raise NotImplementedError('You must define a done() method on your %s subclass.' % self.__class__.__name__) + +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import os +import subprocess + +C_GREEN = '\033[92m' +C_BLUE = '\033[94m' +C_END = '\033[00m' + + +def grouping(fileList): + root = {} + for path in fileList: + current = root + for p in path.rstrip('\n').split('/'): + current.setdefault(p, {}) + current = current[p] + return root + + +def displayItems(items, path, prefix, color): + for index, item in enumerate(sorted(items.keys())): + if index == len(items)-1: + print prefix + '└── ' + appendColor(path, item, color) + nextPrefix = prefix + ' ' + else: + print prefix + '├── ' + appendColor(path, item, color) + nextPrefix = prefix + '│   ' + if len(items[item]) > 0: + nextpath = os.path.join(path, item) + displayItems(items[item], nextpath, nextPrefix, color) + + +def appendColor(path, item, color=False): + filepath = os.path.join(path, item) + colorCode = '' + endCode = C_END if color else '' + indicator = '' + if color: + if os.path.isdir(filepath): + colorCode = C_BLUE + elif os.access(filepath, os.X_OK): + colorCode = C_GREEN + else: + colorCode = C_END + + if os.path.isdir(filepath): + indicator = '/' + + return colorCode + item + endCode + indicator + + +def main(): + cmd = 'git ls-files' + p = subprocess.Popen( + cmd, + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + p.wait() + stdout_data = p.stdout.readlines() + stderr_data = p.stderr.read() + if len(stderr_data) > 0: + print stderr_data, + else: + color = True + currentDir = os.path.split(os.getcwd()) + print appendColor(currentDir[0], currentDir[1], color) + group = grouping(stdout_data) + + displayItems(group, '.', '', color) + + +if __name__ == '__main__': + main() + +import datetime +import re + +from flask import abort +from flask import current_app as app +from flask import redirect, request, session, url_for + +from CTFd.cache import cache +from CTFd.constants.teams import TeamAttrs +from CTFd.constants.users import UserAttrs +from CTFd.models import Fails, Teams, Tracking, Users, db +from CTFd.utils import get_config +from CTFd.utils.security.auth import logout_user +from CTFd.utils.security.signing import hmac + + +def get_current_user(): + if authed(): + user = Users.query.filter_by(id=session["id"]).first() + + # Check if the session is still valid + session_hash = session.get("hash") + if session_hash: + if session_hash != hmac(user.password): + logout_user() + if request.content_type == "application/json": + error = 401 + else: + error = redirect(url_for("auth.login", next=request.full_path)) + abort(error) + + return user + else: + return None + + +def get_current_user_attrs(): + if authed(): + return get_user_attrs(user_id=session["id"]) + else: + return None + + +@cache.memoize(timeout=300) +def get_user_attrs(user_id): + user = Users.query.filter_by(id=user_id).first() + if user: + d = {} + for field in UserAttrs._fields: + d[field] = getattr(user, field) + return UserAttrs(**d) + return None + + +@cache.memoize(timeout=300) +def get_user_place(user_id): + user = Users.query.filter_by(id=user_id).first() + if user: + return user.account.place + return None + + +@cache.memoize(timeout=300) +def get_user_score(user_id): + user = Users.query.filter_by(id=user_id).first() + if user: + return user.account.score + return None + + +@cache.memoize(timeout=300) +def get_team_place(team_id): + team = Teams.query.filter_by(id=team_id).first() + if team: + return team.place + return None + + +@cache.memoize(timeout=300) +def get_team_score(team_id): + team = Teams.query.filter_by(id=team_id).first() + if team: + return team.score + return None + + +def get_current_team(): + if authed(): + user = get_current_user() + return user.team + else: + return None + + +def get_current_team_attrs(): + if authed(): + user = get_user_attrs(user_id=session["id"]) + if user.team_id: + return get_team_attrs(team_id=user.team_id) + return None + + +@cache.memoize(timeout=300) +def get_team_attrs(team_id): + team = Teams.query.filter_by(id=team_id).first() + if team: + d = {} + for field in TeamAttrs._fields: + d[field] = getattr(team, field) + return TeamAttrs(**d) + return None + + +def get_current_user_type(fallback=None): + if authed(): + user = get_current_user_attrs() + return user.type + else: + return fallback + + +def authed(): + return bool(session.get("id", False)) + + +def is_admin(): + if authed(): + user = get_current_user_attrs() + return user.type == "admin" + else: + return False + + +def is_verified(): + if get_config("verify_emails"): + user = get_current_user_attrs() + if user: + return user.verified + else: + return False + else: + return True + + +def get_ip(req=None): + """ Returns the IP address of the currently in scope request. The approach is to define a list of trusted proxies + (in this case the local network), and only trust the most recently defined untrusted IP address. + Taken from http://stackoverflow.com/a/22936947/4285524 but the generator there makes no sense. + The trusted_proxies regexes is taken from Ruby on Rails. + + This has issues if the clients are also on the local network so you can remove proxies from config.py. + + CTFd does not use IP address for anything besides cursory tracking of teams and it is ill-advised to do much + more than that if you do not know what you're doing. + """ + if req is None: + req = request + trusted_proxies = app.config["TRUSTED_PROXIES"] + combined = "(" + ")|(".join(trusted_proxies) + ")" + route = req.access_route + [req.remote_addr] + for addr in reversed(route): + if not re.match(combined, addr): # IP is not trusted but we trust the proxies + remote_addr = addr + break + else: + remote_addr = req.remote_addr + return remote_addr + + +def get_current_user_recent_ips(): + if authed(): + return get_user_recent_ips(user_id=session["id"]) + else: + return None + + +@cache.memoize(timeout=300) +def get_user_recent_ips(user_id): + hour_ago = datetime.datetime.now() - datetime.timedelta(hours=1) + addrs = ( + Tracking.query.with_entities(Tracking.ip.distinct()) + .filter(Tracking.user_id == user_id, Tracking.date >= hour_ago) + .all() + ) + return {ip for (ip,) in addrs} + + +def get_wrong_submissions_per_minute(account_id): + """ + Get incorrect submissions per minute. + + :param account_id: + :return: + """ + one_min_ago = datetime.datetime.utcnow() + datetime.timedelta(minutes=-1) + fails = ( + db.session.query(Fails) + .filter(Fails.account_id == account_id, Fails.date >= one_min_ago) + .all() + ) + return len(fails) + +# Author: Sebastien Erard +# URL: http://code.google.com/p/sickbeard/ +# +# This file is part of SickRage. +# +# SickRage is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickRage is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickRage. If not, see . + +import os +import subprocess + +import sickbeard + +from sickbeard import logger +from sickrage.helper.encoding import ek +from sickrage.helper.exceptions import ex + + +class synoIndexNotifier: + def notify_snatch(self, ep_name): + pass + + def notify_download(self, ep_name): + pass + + def notify_subtitle_download(self, ep_name, lang): + pass + + def notify_git_update(self, new_version): + pass + + def moveFolder(self, old_path, new_path): + self.moveObject(old_path, new_path) + + def moveFile(self, old_file, new_file): + self.moveObject(old_file, new_file) + + def moveObject(self, old_path, new_path): + if sickbeard.USE_SYNOINDEX: + synoindex_cmd = ['/usr/syno/bin/synoindex', '-N', ek(os.path.abspath, new_path), + ek(os.path.abspath, old_path)] + logger.log(u"Executing command " + str(synoindex_cmd), logger.DEBUG) + logger.log(u"Absolute path to command: " + ek(os.path.abspath, synoindex_cmd[0]), logger.DEBUG) + try: + p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, + cwd=sickbeard.PROG_DIR) + out, err = p.communicate() #@UnusedVariable + logger.log(u"Script result: " + str(out), logger.DEBUG) + except OSError, e: + logger.log(u"Unable to run synoindex: " + ex(e), logger.ERROR) + + def deleteFolder(self, cur_path): + self.makeObject('-D', cur_path) + + def addFolder(self, cur_path): + self.makeObject('-A', cur_path) + + def deleteFile(self, cur_file): + self.makeObject('-d', cur_file) + + def addFile(self, cur_file): + self.makeObject('-a', cur_file) + + def makeObject(self, cmd_arg, cur_path): + if sickbeard.USE_SYNOINDEX: + synoindex_cmd = ['/usr/syno/bin/synoindex', cmd_arg, ek(os.path.abspath, cur_path)] + logger.log(u"Executing command " + str(synoindex_cmd), logger.DEBUG) + logger.log(u"Absolute path to command: " + ek(os.path.abspath, synoindex_cmd[0]), logger.DEBUG) + try: + p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, + cwd=sickbeard.PROG_DIR) + out, err = p.communicate() #@UnusedVariable + logger.log(u"Script result: " + str(out), logger.DEBUG) + except OSError, e: + logger.log(u"Unable to run synoindex: " + ex(e), logger.ERROR) + + +notifier = synoIndexNotifier + +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright: (c) 2012, Matt Wright +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + + +DOCUMENTATION = ''' +--- +module: pip +short_description: Manages Python library dependencies +description: + - "Manage Python library dependencies. To use this module, one of the following keys is required: C(name) + or C(requirements)." +version_added: "0.7" +options: + name: + description: + - The name of a Python library to install or the url(bzr+,hg+,git+,svn+) of the remote package. + - This can be a list (since 2.2) and contain version specifiers (since 2.7). + type: list + version: + description: + - The version number to install of the Python library specified in the I(name) parameter. + type: str + requirements: + description: + - The path to a pip requirements file, which should be local to the remote system. + File can be specified as a relative path if using the chdir option. + type: str + virtualenv: + description: + - An optional path to a I(virtualenv) directory to install into. + It cannot be specified together with the 'executable' parameter + (added in 2.1). + If the virtualenv does not exist, it will be created before installing + packages. The optional virtualenv_site_packages, virtualenv_command, + and virtualenv_python options affect the creation of the virtualenv. + type: path + virtualenv_site_packages: + description: + - Whether the virtual environment will inherit packages from the + global site-packages directory. Note that if this setting is + changed on an already existing virtual environment it will not + have any effect, the environment must be deleted and newly + created. + type: bool + default: "no" + version_added: "1.0" + virtualenv_command: + description: + - The command or a pathname to the command to create the virtual + environment with. For example C(pyvenv), C(virtualenv), + C(virtualenv2), C(~/bin/virtualenv), C(/usr/local/bin/virtualenv). + type: path + default: virtualenv + version_added: "1.1" + virtualenv_python: + description: + - The Python executable used for creating the virtual environment. + For example C(python3.5), C(python2.7). When not specified, the + Python version used to run the ansible module is used. This parameter + should not be used when C(virtualenv_command) is using C(pyvenv) or + the C(-m venv) module. + type: str + version_added: "2.0" + state: + description: + - The state of module + - The 'forcereinstall' option is only available in Ansible 2.1 and above. + type: str + choices: [ absent, forcereinstall, latest, present ] + default: present + extra_args: + description: + - Extra arguments passed to pip. + type: str + version_added: "1.0" + editable: + description: + - Pass the editable flag. + type: bool + default: 'no' + version_added: "2.0" + chdir: + description: + - cd into this directory before running the command + type: path + version_added: "1.3" + executable: + description: + - The explicit executable or pathname for the pip executable, + if different from the Ansible Python interpreter. For + example C(pip3.3), if there are both Python 2.7 and 3.3 installations + in the system and you want to run pip for the Python 3.3 installation. + - Mutually exclusive with I(virtualenv) (added in 2.1). + - Does not affect the Ansible Python interpreter. + - The setuptools package must be installed for both the Ansible Python interpreter + and for the version of Python specified by this option. + type: path + version_added: "1.3" + umask: + description: + - The system umask to apply before installing the pip package. This is + useful, for example, when installing on systems that have a very + restrictive umask by default (e.g., "0077") and you want to pip install + packages which are to be used by all users. Note that this requires you + to specify desired umask mode as an octal string, (e.g., "0022"). + type: str + version_added: "2.1" +notes: + - The virtualenv (U(http://www.virtualenv.org/)) must be + installed on the remote host if the virtualenv parameter is specified and + the virtualenv needs to be created. + - Although it executes using the Ansible Python interpreter, the pip module shells out to + run the actual pip command, so it can use any pip version you specify with I(executable). + By default, it uses the pip version for the Ansible Python interpreter. For example, pip3 on python 3, and pip2 or pip on python 2. + - The interpreter used by Ansible + (see :ref:`ansible_python_interpreter`) + requires the setuptools package, regardless of the version of pip set with + the I(executable) option. +requirements: +- pip +- virtualenv +- setuptools +author: +- Matt Wright (@mattupstate) +''' + +EXAMPLES = ''' +- name: Install bottle python package + pip: + name: bottle + +- name: Install bottle python package on version 0.11 + pip: + name: bottle==0.11 + +- name: Install bottle python package with version specifiers + pip: + name: bottle>0.10,<0.20,!=0.11 + +- name: Install multi python packages with version specifiers + pip: + name: + - django>1.11.0,<1.12.0 + - bottle>0.10,<0.20,!=0.11 + +- name: Install python package using a proxy + # Pip doesn't use the standard environment variables, please use the CAPITALIZED ones below + pip: + name: six + environment: + HTTP_PROXY: '127.0.0.1:8080' + HTTPS_PROXY: '127.0.0.1:8080' + +# You do not have to supply '-e' option in extra_args +- name: Install MyApp using one of the remote protocols (bzr+,hg+,git+,svn+) + pip: + name: svn+http://myrepo/svn/MyApp#egg=MyApp + +- name: Install MyApp using one of the remote protocols (bzr+,hg+,git+) + pip: + name: git+http://myrepo/app/MyApp + +- name: Install MyApp from local tarball + pip: + name: file:///path/to/MyApp.tar.gz + +- name: Install bottle into the specified (virtualenv), inheriting none of the globally installed modules + pip: + name: bottle + virtualenv: /my_app/venv + +- name: Install bottle into the specified (virtualenv), inheriting globally installed modules + pip: + name: bottle + virtualenv: /my_app/venv + virtualenv_site_packages: yes + +- name: Install bottle into the specified (virtualenv), using Python 2.7 + pip: + name: bottle + virtualenv: /my_app/venv + virtualenv_command: virtualenv-2.7 + +- name: Install bottle within a user home directory + pip: + name: bottle + extra_args: --user + +- name: Install specified python requirements + pip: + requirements: /my_app/requirements.txt + +- name: Install specified python requirements in indicated (virtualenv) + pip: + requirements: /my_app/requirements.txt + virtualenv: /my_app/venv + +- name: Install specified python requirements and custom Index URL + pip: + requirements: /my_app/requirements.txt + extra_args: -i https://example.com/pypi/simple + +- name: Install specified python requirements offline from a local directory with downloaded packages + pip: + requirements: /my_app/requirements.txt + extra_args: "--no-index --find-links=file:///my_downloaded_packages_dir" + +- name: Install bottle for Python 3.3 specifically, using the 'pip3.3' executable + pip: + name: bottle + executable: pip3.3 + +- name: Install bottle, forcing reinstallation if it's already installed + pip: + name: bottle + state: forcereinstall + +- name: Install bottle while ensuring the umask is 0022 (to ensure other users can use it) + pip: + name: bottle + umask: "0022" + become: True +''' + +RETURN = ''' +cmd: + description: pip command used by the module + returned: success + type: str + sample: pip2 install ansible six +name: + description: list of python modules targetted by pip + returned: success + type: list + sample: ['ansible', 'six'] +requirements: + description: Path to the requirements file + returned: success, if a requirements file was provided + type: str + sample: "/srv/git/project/requirements.txt" +version: + description: Version of the package specified in 'name' + returned: success, if a name and version were provided + type: str + sample: "2.5.1" +virtualenv: + description: Path to the virtualenv + returned: success, if a virtualenv path was provided + type: str + sample: "/tmp/virtualenv" +''' + +import os +import re +import sys +import tempfile +import operator +import shlex +import traceback +from distutils.version import LooseVersion + +SETUPTOOLS_IMP_ERR = None +try: + from pkg_resources import Requirement + + HAS_SETUPTOOLS = True +except ImportError: + HAS_SETUPTOOLS = False + SETUPTOOLS_IMP_ERR = traceback.format_exc() + +from ansible.module_utils.basic import AnsibleModule, is_executable, missing_required_lib +from ansible.module_utils._text import to_native +from ansible.module_utils.six import PY3 + + +#: Python one-liners to be run at the command line that will determine the +# installed version for these special libraries. These are libraries that +# don't end up in the output of pip freeze. +_SPECIAL_PACKAGE_CHECKERS = {'setuptools': 'import setuptools; print(setuptools.__version__)', + 'pip': 'import pkg_resources; print(pkg_resources.get_distribution("pip").version)'} + +_VCS_RE = re.compile(r'(svn|git|hg|bzr)\+') + +op_dict = {">=": operator.ge, "<=": operator.le, ">": operator.gt, + "<": operator.lt, "==": operator.eq, "!=": operator.ne, "~=": operator.ge} + + +def _is_vcs_url(name): + """Test whether a name is a vcs url or not.""" + return re.match(_VCS_RE, name) + + +def _is_package_name(name): + """Test whether the name is a package name or a version specifier.""" + return not name.lstrip().startswith(tuple(op_dict.keys())) + + +def _recover_package_name(names): + """Recover package names as list from user's raw input. + + :input: a mixed and invalid list of names or version specifiers + :return: a list of valid package name + + eg. + input: ['django>1.11.1', '<1.11.3', 'ipaddress', 'simpleproject>1.1.0', '<2.0.0'] + return: ['django>1.11.1,<1.11.3', 'ipaddress', 'simpleproject>1.1.0,<2.0.0'] + + input: ['django>1.11.1,<1.11.3,ipaddress', 'simpleproject>1.1.0,<2.0.0'] + return: ['django>1.11.1,<1.11.3', 'ipaddress', 'simpleproject>1.1.0,<2.0.0'] + """ + # rebuild input name to a flat list so we can tolerate any combination of input + tmp = [] + for one_line in names: + tmp.extend(one_line.split(",")) + names = tmp + + # reconstruct the names + name_parts = [] + package_names = [] + in_brackets = False + for name in names: + if _is_package_name(name) and not in_brackets: + if name_parts: + package_names.append(",".join(name_parts)) + name_parts = [] + if "[" in name: + in_brackets = True + if in_brackets and "]" in name: + in_brackets = False + name_parts.append(name) + package_names.append(",".join(name_parts)) + return package_names + + +def _get_cmd_options(module, cmd): + thiscmd = cmd + " --help" + rc, stdout, stderr = module.run_command(thiscmd) + if rc != 0: + module.fail_json(msg="Could not get output from %s: %s" % (thiscmd, stdout + stderr)) + + words = stdout.strip().split() + cmd_options = [x for x in words if x.startswith('--')] + return cmd_options + + +def _get_packages(module, pip, chdir): + '''Return results of pip command to get packages.''' + # Try 'pip list' command first. + command = '%s list --format=freeze' % pip + lang_env = {'LANG': 'C', 'LC_ALL': 'C', 'LC_MESSAGES': 'C'} + rc, out, err = module.run_command(command, cwd=chdir, environ_update=lang_env) + + # If there was an error (pip version too old) then use 'pip freeze'. + if rc != 0: + command = '%s freeze' % pip + rc, out, err = module.run_command(command, cwd=chdir) + if rc != 0: + _fail(module, command, out, err) + + return command, out, err + + +def _is_present(module, req, installed_pkgs, pkg_command): + '''Return whether or not package is installed.''' + for pkg in installed_pkgs: + if '==' in pkg: + pkg_name, pkg_version = pkg.split('==') + pkg_name = Package.canonicalize_name(pkg_name) + else: + continue + + if pkg_name == req.package_name and req.is_satisfied_by(pkg_version): + return True + + return False + + +def _get_pip(module, env=None, executable=None): + # Older pip only installed under the "/usr/bin/pip" name. Many Linux + # distros install it there. + # By default, we try to use pip required for the current python + # interpreter, so people can use pip to install modules dependencies + candidate_pip_basenames = ('pip2', 'pip') + if PY3: + # pip under python3 installs the "/usr/bin/pip3" name + candidate_pip_basenames = ('pip3',) + + pip = None + if executable is not None: + if os.path.isabs(executable): + pip = executable + else: + # If you define your own executable that executable should be the only candidate. + # As noted in the docs, executable doesn't work with virtualenvs. + candidate_pip_basenames = (executable,) + + if pip is None: + if env is None: + opt_dirs = [] + for basename in candidate_pip_basenames: + pip = module.get_bin_path(basename, False, opt_dirs) + if pip is not None: + break + else: + # For-else: Means that we did not break out of the loop + # (therefore, that pip was not found) + module.fail_json(msg='Unable to find any of %s to use. pip' + ' needs to be installed.' % ', '.join(candidate_pip_basenames)) + else: + # If we're using a virtualenv we must use the pip from the + # virtualenv + venv_dir = os.path.join(env, 'bin') + candidate_pip_basenames = (candidate_pip_basenames[0], 'pip') + for basename in candidate_pip_basenames: + candidate = os.path.join(venv_dir, basename) + if os.path.exists(candidate) and is_executable(candidate): + pip = candidate + break + else: + # For-else: Means that we did not break out of the loop + # (therefore, that pip was not found) + module.fail_json(msg='Unable to find pip in the virtualenv, %s, ' % env + + 'under any of these names: %s. ' % (', '.join(candidate_pip_basenames)) + + 'Make sure pip is present in the virtualenv.') + + return pip + + +def _fail(module, cmd, out, err): + msg = '' + if out: + msg += "stdout: %s" % (out, ) + if err: + msg += "\n:stderr: %s" % (err, ) + module.fail_json(cmd=cmd, msg=msg) + + +def _get_package_info(module, package, env=None): + """This is only needed for special packages which do not show up in pip freeze + + pip and setuptools fall into this category. + + :returns: a string containing the version number if the package is + installed. None if the package is not installed. + """ + if env: + opt_dirs = ['%s/bin' % env] + else: + opt_dirs = [] + python_bin = module.get_bin_path('python', False, opt_dirs) + + if python_bin is None: + formatted_dep = None + else: + rc, out, err = module.run_command([python_bin, '-c', _SPECIAL_PACKAGE_CHECKERS[package]]) + if rc: + formatted_dep = None + else: + formatted_dep = '%s==%s' % (package, out.strip()) + return formatted_dep + + +def setup_virtualenv(module, env, chdir, out, err): + if module.check_mode: + module.exit_json(changed=True) + + cmd = shlex.split(module.params['virtualenv_command']) + + # Find the binary for the command in the PATH + # and switch the command for the explicit path. + if os.path.basename(cmd[0]) == cmd[0]: + cmd[0] = module.get_bin_path(cmd[0], True) + + # Add the system-site-packages option if that + # is enabled, otherwise explicitly set the option + # to not use system-site-packages if that is an + # option provided by the command's help function. + if module.params['virtualenv_site_packages']: + cmd.append('--system-site-packages') + else: + cmd_opts = _get_cmd_options(module, cmd[0]) + if '--no-site-packages' in cmd_opts: + cmd.append('--no-site-packages') + + virtualenv_python = module.params['virtualenv_python'] + # -p is a virtualenv option, not compatible with pyenv or venv + # this conditional validates if the command being used is not any of them + if not any(ex in module.params['virtualenv_command'] for ex in ('pyvenv', '-m venv')): + if virtualenv_python: + cmd.append('-p%s' % virtualenv_python) + elif PY3: + # Ubuntu currently has a patch making virtualenv always + # try to use python2. Since Ubuntu16 works without + # python2 installed, this is a problem. This code mimics + # the upstream behaviour of using the python which invoked + # virtualenv to determine which python is used inside of + # the virtualenv (when none are specified). + cmd.append('-p%s' % sys.executable) + + # if venv or pyvenv are used and virtualenv_python is defined, then + # virtualenv_python is ignored, this has to be acknowledged + elif module.params['virtualenv_python']: + module.fail_json( + msg='virtualenv_python should not be used when' + ' using the venv module or pyvenv as virtualenv_command' + ) + + cmd.append(env) + rc, out_venv, err_venv = module.run_command(cmd, cwd=chdir) + out += out_venv + err += err_venv + if rc != 0: + _fail(module, cmd, out, err) + return out, err + + +class Package: + """Python distribution package metadata wrapper. + + A wrapper class for Requirement, which provides + API to parse package name, version specifier, + test whether a package is already satisfied. + """ + + _CANONICALIZE_RE = re.compile(r'[-_.]+') + + def __init__(self, name_string, version_string=None): + self._plain_package = False + self.package_name = name_string + self._requirement = None + + if version_string: + version_string = version_string.lstrip() + separator = '==' if version_string[0].isdigit() else ' ' + name_string = separator.join((name_string, version_string)) + try: + self._requirement = Requirement.parse(name_string) + # old pkg_resource will replace 'setuptools' with 'distribute' when it's already installed + if self._requirement.project_name == "distribute" and "setuptools" in name_string: + self.package_name = "setuptools" + self._requirement.project_name = "setuptools" + else: + self.package_name = Package.canonicalize_name(self._requirement.project_name) + self._plain_package = True + except ValueError as e: + pass + + @property + def has_version_specifier(self): + if self._plain_package: + return bool(self._requirement.specs) + return False + + def is_satisfied_by(self, version_to_test): + if not self._plain_package: + return False + try: + return self._requirement.specifier.contains(version_to_test, prereleases=True) + except AttributeError: + # old setuptools has no specifier, do fallback + version_to_test = LooseVersion(version_to_test) + return all( + op_dict[op](version_to_test, LooseVersion(ver)) + for op, ver in self._requirement.specs + ) + + @staticmethod + def canonicalize_name(name): + # This is taken from PEP 503. + return Package._CANONICALIZE_RE.sub("-", name).lower() + + def __str__(self): + if self._plain_package: + return to_native(self._requirement) + return self.package_name + + +def main(): + state_map = dict( + present=['install'], + absent=['uninstall', '-y'], + latest=['install', '-U'], + forcereinstall=['install', '-U', '--force-reinstall'], + ) + + module = AnsibleModule( + argument_spec=dict( + state=dict(type='str', default='present', choices=state_map.keys()), + name=dict(type='list', elements='str'), + version=dict(type='str'), + requirements=dict(type='str'), + virtualenv=dict(type='path'), + virtualenv_site_packages=dict(type='bool', default=False), + virtualenv_command=dict(type='path', default='virtualenv'), + virtualenv_python=dict(type='str'), + extra_args=dict(type='str'), + editable=dict(type='bool', default=False), + chdir=dict(type='path'), + executable=dict(type='path'), + umask=dict(type='str'), + ), + required_one_of=[['name', 'requirements']], + mutually_exclusive=[['name', 'requirements'], ['executable', 'virtualenv']], + supports_check_mode=True, + ) + + if not HAS_SETUPTOOLS: + module.fail_json(msg=missing_required_lib("setuptools"), + exception=SETUPTOOLS_IMP_ERR) + + state = module.params['state'] + name = module.params['name'] + version = module.params['version'] + requirements = module.params['requirements'] + extra_args = module.params['extra_args'] + chdir = module.params['chdir'] + umask = module.params['umask'] + env = module.params['virtualenv'] + + venv_created = False + if env and chdir: + env = os.path.join(chdir, env) + + if umask and not isinstance(umask, int): + try: + umask = int(umask, 8) + except Exception: + module.fail_json(msg="umask must be an octal integer", + details=to_native(sys.exc_info()[1])) + + old_umask = None + if umask is not None: + old_umask = os.umask(umask) + try: + if state == 'latest' and version is not None: + module.fail_json(msg='version is incompatible with state=latest') + + if chdir is None: + # this is done to avoid permissions issues with privilege escalation and virtualenvs + chdir = tempfile.gettempdir() + + err = '' + out = '' + + if env: + if not os.path.exists(os.path.join(env, 'bin', 'activate')): + venv_created = True + out, err = setup_virtualenv(module, env, chdir, out, err) + + pip = _get_pip(module, env, module.params['executable']) + + cmd = [pip] + state_map[state] + + # If there's a virtualenv we want things we install to be able to use other + # installations that exist as binaries within this virtualenv. Example: we + # install cython and then gevent -- gevent needs to use the cython binary, + # not just a python package that will be found by calling the right python. + # So if there's a virtualenv, we add that bin/ to the beginning of the PATH + # in run_command by setting path_prefix here. + path_prefix = None + if env: + path_prefix = "/".join(pip.split('/')[:-1]) + + # Automatically apply -e option to extra_args when source is a VCS url. VCS + # includes those beginning with svn+, git+, hg+ or bzr+ + has_vcs = False + if name: + for pkg in name: + if pkg and _is_vcs_url(pkg): + has_vcs = True + break + + # convert raw input package names to Package instances + packages = [Package(pkg) for pkg in _recover_package_name(name)] + # check invalid combination of arguments + if version is not None: + if len(packages) > 1: + module.fail_json( + msg="'version' argument is ambiguous when installing multiple package distributions. " + "Please specify version restrictions next to each package in 'name' argument." + ) + if packages[0].has_version_specifier: + module.fail_json( + msg="The 'version' argument conflicts with any version specifier provided along with a package name. " + "Please keep the version specifier, but remove the 'version' argument." + ) + # if the version specifier is provided by version, append that into the package + packages[0] = Package(to_native(packages[0]), version) + + if module.params['editable']: + args_list = [] # used if extra_args is not used at all + if extra_args: + args_list = extra_args.split(' ') + if '-e' not in args_list: + args_list.append('-e') + # Ok, we will reconstruct the option string + extra_args = ' '.join(args_list) + + if extra_args: + cmd.extend(shlex.split(extra_args)) + + if name: + cmd.extend(to_native(p) for p in packages) + elif requirements: + cmd.extend(['-r', requirements]) + else: + module.exit_json( + changed=False, + warnings=["No valid name or requirements file found."], + ) + + if module.check_mode: + if extra_args or requirements or state == 'latest' or not name: + module.exit_json(changed=True) + + pkg_cmd, out_pip, err_pip = _get_packages(module, pip, chdir) + + out += out_pip + err += err_pip + + changed = False + if name: + pkg_list = [p for p in out.split('\n') if not p.startswith('You are using') and not p.startswith('You should consider') and p] + + if pkg_cmd.endswith(' freeze') and ('pip' in name or 'setuptools' in name): + # Older versions of pip (pre-1.3) do not have pip list. + # pip freeze does not list setuptools or pip in its output + # So we need to get those via a specialcase + for pkg in ('setuptools', 'pip'): + if pkg in name: + formatted_dep = _get_package_info(module, pkg, env) + if formatted_dep is not None: + pkg_list.append(formatted_dep) + out += '%s\n' % formatted_dep + + for package in packages: + is_present = _is_present(module, package, pkg_list, pkg_cmd) + if (state == 'present' and not is_present) or (state == 'absent' and is_present): + changed = True + break + module.exit_json(changed=changed, cmd=pkg_cmd, stdout=out, stderr=err) + + out_freeze_before = None + if requirements or has_vcs: + _, out_freeze_before, _ = _get_packages(module, pip, chdir) + + rc, out_pip, err_pip = module.run_command(cmd, path_prefix=path_prefix, cwd=chdir) + out += out_pip + err += err_pip + if rc == 1 and state == 'absent' and \ + ('not installed' in out_pip or 'not installed' in err_pip): + pass # rc is 1 when attempting to uninstall non-installed package + elif rc != 0: + _fail(module, cmd, out, err) + + if state == 'absent': + changed = 'Successfully uninstalled' in out_pip + else: + if out_freeze_before is None: + changed = 'Successfully installed' in out_pip + else: + _, out_freeze_after, _ = _get_packages(module, pip, chdir) + changed = out_freeze_before != out_freeze_after + + changed = changed or venv_created + + module.exit_json(changed=changed, cmd=cmd, name=name, version=version, + state=state, requirements=requirements, virtualenv=env, + stdout=out, stderr=err) + finally: + if old_umask is not None: + os.umask(old_umask) + + +if __name__ == '__main__': + main() + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from copy import deepcopy + +import numpy as np +import pandas as pd + +from xarray import Dataset, DataArray, auto_combine, concat, Variable +from xarray.core.pycompat import iteritems, OrderedDict + +from . import TestCase, InaccessibleArray, requires_dask +from .test_dataset import create_test_data + + +class TestConcatDataset(TestCase): + def test_concat(self): + # TODO: simplify and split this test case + + # drop the third dimension to keep things relatively understandable + data = create_test_data() + for k in list(data): + if 'dim3' in data[k].dims: + del data[k] + + split_data = [data.isel(dim1=slice(3)), + data.isel(dim1=slice(3, None))] + self.assertDatasetIdentical(data, concat(split_data, 'dim1')) + + def rectify_dim_order(dataset): + # return a new dataset with all variable dimensions transposed into + # the order in which they are found in `data` + return Dataset(dict((k, v.transpose(*data[k].dims)) + for k, v in iteritems(dataset.data_vars)), + dataset.coords, attrs=dataset.attrs) + + for dim in ['dim1', 'dim2']: + datasets = [g for _, g in data.groupby(dim, squeeze=False)] + self.assertDatasetIdentical(data, concat(datasets, dim)) + + dim = 'dim2' + self.assertDatasetIdentical( + data, concat(datasets, data[dim])) + self.assertDatasetIdentical( + data, concat(datasets, data[dim], coords='minimal')) + + datasets = [g for _, g in data.groupby(dim, squeeze=True)] + concat_over = [k for k, v in iteritems(data.coords) + if dim in v.dims and k != dim] + actual = concat(datasets, data[dim], coords=concat_over) + self.assertDatasetIdentical(data, rectify_dim_order(actual)) + + actual = concat(datasets, data[dim], coords='different') + self.assertDatasetIdentical(data, rectify_dim_order(actual)) + + # make sure the coords argument behaves as expected + data.coords['extra'] = ('dim4', np.arange(3)) + for dim in ['dim1', 'dim2']: + datasets = [g for _, g in data.groupby(dim, squeeze=True)] + actual = concat(datasets, data[dim], coords='all') + expected = np.array([data['extra'].values + for _ in range(data.dims[dim])]) + self.assertArrayEqual(actual['extra'].values, expected) + + actual = concat(datasets, data[dim], coords='different') + self.assertDataArrayEqual(data['extra'], actual['extra']) + actual = concat(datasets, data[dim], coords='minimal') + self.assertDataArrayEqual(data['extra'], actual['extra']) + + # verify that the dim argument takes precedence over + # concatenating dataset variables of the same name + dim = (2 * data['dim1']).rename('dim1') + datasets = [g for _, g in data.groupby('dim1', squeeze=False)] + expected = data.copy() + expected['dim1'] = dim + self.assertDatasetIdentical(expected, concat(datasets, dim)) + + def test_concat_data_vars(self): + data = Dataset({'foo': ('x', np.random.randn(10))}) + objs = [data.isel(x=slice(5)), data.isel(x=slice(5, None))] + for data_vars in ['minimal', 'different', 'all', [], ['foo']]: + actual = concat(objs, dim='x', data_vars=data_vars) + self.assertDatasetIdentical(data, actual) + + def test_concat_coords(self): + data = Dataset({'foo': ('x', np.random.randn(10))}) + expected = data.assign_coords(c=('x', [0] * 5 + [1] * 5)) + objs = [data.isel(x=slice(5)).assign_coords(c=0), + data.isel(x=slice(5, None)).assign_coords(c=1)] + for coords in ['different', 'all', ['c']]: + actual = concat(objs, dim='x', coords=coords) + self.assertDatasetIdentical(expected, actual) + for coords in ['minimal', []]: + with self.assertRaisesRegexp(ValueError, 'not equal across'): + concat(objs, dim='x', coords=coords) + + def test_concat_constant_index(self): + # GH425 + ds1 = Dataset({'foo': 1.5}, {'y': 1}) + ds2 = Dataset({'foo': 2.5}, {'y': 1}) + expected = Dataset({'foo': ('y', [1.5, 2.5]), 'y': [1, 1]}) + for mode in ['different', 'all', ['foo']]: + actual = concat([ds1, ds2], 'y', data_vars=mode) + self.assertDatasetIdentical(expected, actual) + with self.assertRaisesRegexp(ValueError, 'not equal across datasets'): + concat([ds1, ds2], 'y', data_vars='minimal') + + def test_concat_size0(self): + data = create_test_data() + split_data = [data.isel(dim1=slice(0, 0)), data] + actual = concat(split_data, 'dim1') + self.assertDatasetIdentical(data, actual) + + actual = concat(split_data[::-1], 'dim1') + self.assertDatasetIdentical(data, actual) + + def test_concat_autoalign(self): + ds1 = Dataset({'foo': DataArray([1, 2], coords=[('x', [1, 2])])}) + ds2 = Dataset({'foo': DataArray([1, 2], coords=[('x', [1, 3])])}) + actual = concat([ds1, ds2], 'y') + expected = Dataset({'foo': DataArray([[1, 2, np.nan], [1, np.nan, 2]], + dims=['y', 'x'], + coords={'x': [1, 2, 3]})}) + self.assertDatasetIdentical(expected, actual) + + def test_concat_errors(self): + data = create_test_data() + split_data = [data.isel(dim1=slice(3)), + data.isel(dim1=slice(3, None))] + + with self.assertRaisesRegexp(ValueError, 'must supply at least one'): + concat([], 'dim1') + + with self.assertRaisesRegexp(ValueError, 'are not coordinates'): + concat([data, data], 'new_dim', coords=['not_found']) + + with self.assertRaisesRegexp(ValueError, 'global attributes not'): + data0, data1 = deepcopy(split_data) + data1.attrs['foo'] = 'bar' + concat([data0, data1], 'dim1', compat='identical') + self.assertDatasetIdentical( + data, concat([data0, data1], 'dim1', compat='equals')) + + with self.assertRaisesRegexp(ValueError, 'encountered unexpected'): + data0, data1 = deepcopy(split_data) + data1['foo'] = ('bar', np.random.randn(10)) + concat([data0, data1], 'dim1') + + with self.assertRaisesRegexp(ValueError, 'compat.* invalid'): + concat(split_data, 'dim1', compat='foobar') + + with self.assertRaisesRegexp(ValueError, 'unexpected value for'): + concat([data, data], 'new_dim', coords='foobar') + + with self.assertRaisesRegexp( + ValueError, 'coordinate in some datasets but not others'): + concat([Dataset({'x': 0}), Dataset({'x': [1]})], dim='z') + + with self.assertRaisesRegexp( + ValueError, 'coordinate in some datasets but not others'): + concat([Dataset({'x': 0}), Dataset({}, {'x': 1})], dim='z') + + with self.assertRaisesRegexp(ValueError, 'no longer a valid'): + concat([data, data], 'new_dim', mode='different') + with self.assertRaisesRegexp(ValueError, 'no longer a valid'): + concat([data, data], 'new_dim', concat_over='different') + + def test_concat_promote_shape(self): + # mixed dims within variables + objs = [Dataset({}, {'x': 0}), Dataset({'x': [1]})] + actual = concat(objs, 'x') + expected = Dataset({'x': [0, 1]}) + self.assertDatasetIdentical(actual, expected) + + objs = [Dataset({'x': [0]}), Dataset({}, {'x': 1})] + actual = concat(objs, 'x') + self.assertDatasetIdentical(actual, expected) + + # mixed dims between variables + objs = [Dataset({'x': [2], 'y': 3}), Dataset({'x': [4], 'y': 5})] + actual = concat(objs, 'x') + expected = Dataset({'x': [2, 4], 'y': ('x', [3, 5])}) + self.assertDatasetIdentical(actual, expected) + + # mixed dims in coord variable + objs = [Dataset({'x': [0]}, {'y': -1}), + Dataset({'x': [1]}, {'y': ('x', [-2])})] + actual = concat(objs, 'x') + expected = Dataset({'x': [0, 1]}, {'y': ('x', [-1, -2])}) + self.assertDatasetIdentical(actual, expected) + + # scalars with mixed lengths along concat dim -- values should repeat + objs = [Dataset({'x': [0]}, {'y': -1}), + Dataset({'x': [1, 2]}, {'y': -2})] + actual = concat(objs, 'x') + expected = Dataset({'x': [0, 1, 2]}, {'y': ('x', [-1, -2, -2])}) + self.assertDatasetIdentical(actual, expected) + + # broadcast 1d x 1d -> 2d + objs = [Dataset({'z': ('x', [-1])}, {'x': [0], 'y': [0]}), + Dataset({'z': ('y', [1])}, {'x': [1], 'y': [0]})] + actual = concat(objs, 'x') + expected = Dataset({'z': (('x', 'y'), [[-1], [1]])}, + {'x': [0, 1], 'y': [0]}) + self.assertDatasetIdentical(actual, expected) + + def test_concat_do_not_promote(self): + # GH438 + objs = [Dataset({'y': ('t', [1])}, {'x': 1, 't': [0]}), + Dataset({'y': ('t', [2])}, {'x': 1, 't': [0]})] + expected = Dataset({'y': ('t', [1, 2])}, {'x': 1, 't': [0, 0]}) + actual = concat(objs, 't') + self.assertDatasetIdentical(expected, actual) + + objs = [Dataset({'y': ('t', [1])}, {'x': 1, 't': [0]}), + Dataset({'y': ('t', [2])}, {'x': 2, 't': [0]})] + with self.assertRaises(ValueError): + concat(objs, 't', coords='minimal') + + def test_concat_dim_is_variable(self): + objs = [Dataset({'x': 0}), Dataset({'x': 1})] + coord = Variable('y', [3, 4]) + expected = Dataset({'x': ('y', [0, 1]), 'y': [3, 4]}) + actual = concat(objs, coord) + self.assertDatasetIdentical(actual, expected) + + def test_concat_multiindex(self): + x = pd.MultiIndex.from_product([[1, 2, 3], ['a', 'b']]) + expected = Dataset({'x': x}) + actual = concat([expected.isel(x=slice(2)), + expected.isel(x=slice(2, None))], 'x') + assert expected.equals(actual) + assert isinstance(actual.x.to_index(), pd.MultiIndex) + + +class TestConcatDataArray(TestCase): + def test_concat(self): + ds = Dataset({'foo': (['x', 'y'], np.random.random((2, 3))), + 'bar': (['x', 'y'], np.random.random((2, 3)))}, + {'x': [0, 1]}) + foo = ds['foo'] + bar = ds['bar'] + + # from dataset array: + expected = DataArray(np.array([foo.values, bar.values]), + dims=['w', 'x', 'y'], coords={'x': [0, 1]}) + actual = concat([foo, bar], 'w') + self.assertDataArrayEqual(expected, actual) + # from iteration: + grouped = [g for _, g in foo.groupby('x')] + stacked = concat(grouped, ds['x']) + self.assertDataArrayIdentical(foo, stacked) + # with an index as the 'dim' argument + stacked = concat(grouped, ds.indexes['x']) + self.assertDataArrayIdentical(foo, stacked) + + actual = concat([foo[0], foo[1]], pd.Index([0, 1])).reset_coords(drop=True) + expected = foo[:2].rename({'x': 'concat_dim'}) + self.assertDataArrayIdentical(expected, actual) + + actual = concat([foo[0], foo[1]], [0, 1]).reset_coords(drop=True) + expected = foo[:2].rename({'x': 'concat_dim'}) + self.assertDataArrayIdentical(expected, actual) + + with self.assertRaisesRegexp(ValueError, 'not identical'): + concat([foo, bar], dim='w', compat='identical') + + with self.assertRaisesRegexp(ValueError, 'not a valid argument'): + concat([foo, bar], dim='w', data_vars='minimal') + + @requires_dask + def test_concat_lazy(self): + import dask.array as da + + arrays = [DataArray( + da.from_array(InaccessibleArray(np.zeros((3, 3))), 3), + dims=['x', 'y']) for _ in range(2)] + # should not raise + combined = concat(arrays, dim='z') + self.assertEqual(combined.shape, (2, 3, 3)) + self.assertEqual(combined.dims, ('z', 'x', 'y')) + + +class TestAutoCombine(TestCase): + + @requires_dask # only for toolz + def test_auto_combine(self): + objs = [Dataset({'x': [0]}), Dataset({'x': [1]})] + actual = auto_combine(objs) + expected = Dataset({'x': [0, 1]}) + self.assertDatasetIdentical(expected, actual) + + actual = auto_combine([actual]) + self.assertDatasetIdentical(expected, actual) + + objs = [Dataset({'x': [0, 1]}), Dataset({'x': [2]})] + actual = auto_combine(objs) + expected = Dataset({'x': [0, 1, 2]}) + self.assertDatasetIdentical(expected, actual) + + # ensure auto_combine handles non-sorted variables + objs = [Dataset(OrderedDict([('x', ('a', [0])), ('y', ('a', [0]))])), + Dataset(OrderedDict([('y', ('a', [1])), ('x', ('a', [1]))]))] + actual = auto_combine(objs) + expected = Dataset({'x': ('a', [0, 1]), 'y': ('a', [0, 1])}) + self.assertDatasetIdentical(expected, actual) + + objs = [Dataset({'x': [0], 'y': [0]}), Dataset({'y': [1], 'x': [1]})] + with self.assertRaisesRegexp(ValueError, 'too many .* dimensions'): + auto_combine(objs) + + objs = [Dataset({'x': 0}), Dataset({'x': 1})] + with self.assertRaisesRegexp(ValueError, 'cannot infer dimension'): + auto_combine(objs) + + objs = [Dataset({'x': [0], 'y': [0]}), Dataset({'x': [0]})] + with self.assertRaises(KeyError): + auto_combine(objs) + + @requires_dask # only for toolz + def test_auto_combine_previously_failed(self): + # In the above scenario, one file is missing, containing the data for + # one year's data for one variable. + datasets = [Dataset({'a': ('x', [0]), 'x': [0]}), + Dataset({'b': ('x', [0]), 'x': [0]}), + Dataset({'a': ('x', [1]), 'x': [1]})] + expected = Dataset({'a': ('x', [0, 1]), 'b': ('x', [0, np.nan])}, + {'x': [0, 1]}) + actual = auto_combine(datasets) + self.assertDatasetIdentical(expected, actual) + + # Your data includes "time" and "station" dimensions, and each year's + # data has a different set of stations. + datasets = [Dataset({'a': ('x', [2, 3]), 'x': [1, 2]}), + Dataset({'a': ('x', [1, 2]), 'x': [0, 1]})] + expected = Dataset({'a': (('t', 'x'), + [[np.nan, 2, 3], [1, 2, np.nan]])}, + {'x': [0, 1, 2]}) + actual = auto_combine(datasets, concat_dim='t') + self.assertDatasetIdentical(expected, actual) + + @requires_dask # only for toolz + def test_auto_combine_still_fails(self): + # concat can't handle new variables (yet): + # https://github.com/pydata/xarray/issues/508 + datasets = [Dataset({'x': 0}, {'y': 0}), + Dataset({'x': 1}, {'y': 1, 'z': 1})] + with self.assertRaises(ValueError): + auto_combine(datasets, 'y') + + @requires_dask # only for toolz + def test_auto_combine_no_concat(self): + objs = [Dataset({'x': 0}), Dataset({'y': 1})] + actual = auto_combine(objs) + expected = Dataset({'x': 0, 'y': 1}) + self.assertDatasetIdentical(expected, actual) + + objs = [Dataset({'x': 0, 'y': 1}), Dataset({'y': np.nan, 'z': 2})] + actual = auto_combine(objs) + expected = Dataset({'x': 0, 'y': 1, 'z': 2}) + self.assertDatasetIdentical(expected, actual) + + data = Dataset({'x': 0}) + actual = auto_combine([data, data, data], concat_dim=None) + self.assertDatasetIdentical(data, actual) + +""" + This module is for the miscellaneous GEOS routines, particularly the + ones that return the area, distance, and length. +""" +from ctypes import POINTER, c_double, c_int + +from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOSFuncFactory +from django.contrib.gis.geos.prototypes.errcheck import check_dbl, check_string +from django.contrib.gis.geos.prototypes.geom import geos_char_p +from django.utils.six.moves import range + +__all__ = ['geos_area', 'geos_distance', 'geos_length', 'geos_isvalidreason'] + + +class DblFromGeom(GEOSFuncFactory): + """ + Argument is a Geometry, return type is double that is passed + in by reference as the last argument. + """ + restype = c_int # Status code returned + errcheck = staticmethod(check_dbl) + + def get_func(self, num_geom=1): + argtypes = [GEOM_PTR for i in range(num_geom)] + argtypes += [POINTER(c_double)] + self.argtypes = argtypes + return super(DblFromGeom, self).get_func() + + +# ### ctypes prototypes ### + +# Area, distance, and length prototypes. +geos_area = DblFromGeom('GEOSArea') +geos_distance = DblFromGeom('GEOSDistance', num_geom=2) +geos_length = DblFromGeom('GEOSLength') +geos_isvalidreason = GEOSFuncFactory( + 'GEOSisValidReason', restype=geos_char_p, errcheck=check_string, argtypes=[GEOM_PTR] +) + +from sympy.core import S +from sympy.integrals.quadrature import (gauss_legendre, gauss_laguerre, + gauss_hermite, gauss_gen_laguerre, + gauss_chebyshev_t, gauss_chebyshev_u, + gauss_jacobi) + +def test_legendre(): + x, w = gauss_legendre(1, 17) + assert [str(r) for r in x] == ['0'] + assert [str(r) for r in w] == ['2.0000000000000000'] + + x, w = gauss_legendre(2, 17) + assert [str(r) for r in x] == ['-0.57735026918962576', + '0.57735026918962576'] + assert [str(r) for r in w] == ['1.0000000000000000', '1.0000000000000000'] + + x, w = gauss_legendre(3, 17) + assert [str(r) for r in x] == ['-0.77459666924148338', '0', + '0.77459666924148338'] + assert [str(r) for r in w] == ['0.55555555555555556', + '0.88888888888888889', '0.55555555555555556'] + + x, w = gauss_legendre(4, 17) + assert [str(r) for r in x] == ['-0.86113631159405258', + '-0.33998104358485626', '0.33998104358485626', + '0.86113631159405258'] + assert [str(r) for r in w] == ['0.34785484513745386', + '0.65214515486254614', '0.65214515486254614', + '0.34785484513745386'] + +def test_legendre_precise(): + x, w = gauss_legendre(3, 40) + assert [str(r) for r in x] == \ + ['-0.7745966692414833770358530799564799221666', '0', + '0.7745966692414833770358530799564799221666'] + assert [str(r) for r in w] == \ + ['0.5555555555555555555555555555555555555556', + '0.8888888888888888888888888888888888888889', + '0.5555555555555555555555555555555555555556'] + +def test_laguerre(): + x, w = gauss_laguerre(1, 17) + assert [str(r) for r in x] == ['1.0000000000000000'] + assert [str(r) for r in w] == ['1.0000000000000000'] + + x, w = gauss_laguerre(2, 17) + assert [str(r) for r in x] == ['0.58578643762690495', + '3.4142135623730950'] + assert [str(r) for r in w] == ['0.85355339059327376', + '0.14644660940672624'] + + x, w = gauss_laguerre(3, 17) + assert [str(r) for r in x] == [ + '0.41577455678347908', + '2.2942803602790417', + '6.2899450829374792', + ] + assert [str(r) for r in w] == [ + '0.71109300992917302', + '0.27851773356924085', + '0.010389256501586136', + ] + + x, w = gauss_laguerre(4, 17) + assert [str(r) for r in x] == ['0.32254768961939231', '1.7457611011583466', + '4.5366202969211280', '9.3950709123011331'] + assert [str(r) for r in w] == ['0.60315410434163360', + '0.35741869243779969', '0.038887908515005384', + '0.00053929470556132745'] + + x, w = gauss_laguerre(5, 17) + assert [str(r) for r in x] == ['0.26356031971814091', '1.4134030591065168', + '3.5964257710407221', '7.0858100058588376', '12.640800844275783'] + assert [str(r) for r in w] == ['0.52175561058280865', + '0.39866681108317593', '0.075942449681707595', + '0.0036117586799220485', '2.3369972385776228e-5'] + +def test_laguerre_precise(): + x, w = gauss_laguerre(3, 40) + assert [str(r) for r in x] == \ + ['0.4157745567834790833115338731282744735466', + '2.294280360279041719822050361359593868960', + '6.289945082937479196866415765512131657493'] + assert [str(r) for r in w] == \ + ['0.7110930099291730154495901911425944313094', + '0.2785177335692408488014448884567264810349', + '0.01038925650158613574896492040067908765572'] + +def test_hermite(): + x, w = gauss_hermite(1, 17) + assert [str(r) for r in x] == ['0'] + assert [str(r) for r in w] == ['1.7724538509055160'] + + x, w = gauss_hermite(2, 17) + assert [str(r) for r in x] == ['-0.70710678118654752', + '0.70710678118654752'] + + assert [str(r) for r in w] == ['0.88622692545275801', + '0.88622692545275801'] + + x, w = gauss_hermite(3, 17) + assert [str(r) for r in x] == [ + '-1.2247448713915890', + '0', + '1.2247448713915890'] + assert [str(r) for r in w] == [ + '0.29540897515091934', + '1.1816359006036774', + '0.29540897515091934'] + + x, w = gauss_hermite(4, 17) + assert [str(r) for r in x] == [ + '-1.6506801238857846', + '-0.52464762327529032', + '0.52464762327529032', + '1.6506801238857846' + ] + assert [str(r) for r in w] == [ + '0.081312835447245177', + '0.80491409000551284', + '0.80491409000551284', + '0.081312835447245177' + ] + + x, w = gauss_hermite(5, 17) + assert [str(r) for r in x] == [ + '-2.0201828704560856', + '-0.95857246461381851', + '0', + '0.95857246461381851', + '2.0201828704560856' + ] + assert [str(r) for r in w] == [ + '0.019953242059045913', + '0.39361932315224116', + '0.94530872048294188', + '0.39361932315224116', + '0.019953242059045913' + ] + +def test_hermite_precise(): + x, w = gauss_hermite(3, 40) + assert [str(r) for r in x] == [ + '-1.224744871391589049098642037352945695983', + '0', + '1.224744871391589049098642037352945695983' + ] + assert [str(r) for r in w] == [ + '0.2954089751509193378830279138901908637996', + '1.181635900603677351532111655560763455198', + '0.2954089751509193378830279138901908637996' + ] + +def test_gen_laguerre(): + x, w = gauss_gen_laguerre(1, -S.Half, 17) + assert [str(r) for r in x] == ['0.50000000000000000'] + assert [str(r) for r in w] == ['1.7724538509055160'] + + x, w = gauss_gen_laguerre(2, -S.Half, 17) + assert [str(r) for r in x] == ['0.27525512860841095', + '2.7247448713915890'] + assert [str(r) for r in w] == ['1.6098281800110257', + '0.16262567089449035'] + + x, w = gauss_gen_laguerre(3, -S.Half, 17) + assert [str(r) for r in x] == ['0.19016350919348813', + '1.7844927485432516', + '5.5253437422632603'] + assert [str(r) for r in w] == ['1.4492591904487850', + '0.31413464064571329', + '0.0090600198110176913'] + + x, w = gauss_gen_laguerre(4, -S.Half, 17) + assert [str(r) for r in x] == ['0.14530352150331709', + '1.3390972881263614', + '3.9269635013582872', + '8.5886356890120343'] + assert [str(r) for r in w] ==['1.3222940251164826', + '0.41560465162978376', + '0.034155966014826951', + '0.00039920814442273524'] + + x, w = gauss_gen_laguerre(5, -S.Half, 17) + assert [str(r) for r in x] ==['0.11758132021177814', + '1.0745620124369040', + '3.0859374437175500', + '6.4147297336620305', + '11.807189489971737'] + assert [str(r) for r in w] ==['1.2217252674706516', + '0.48027722216462937', + '0.067748788910962126', + '0.0026872914935624654', + '1.5280865710465241e-5'] + + x, w = gauss_gen_laguerre(1, 2, 17) + assert [str(r) for r in x] ==['3.0000000000000000'] + assert [str(r) for r in w] == ['2.0000000000000000'] + + x, w = gauss_gen_laguerre(2, 2, 17) + assert [str(r) for r in x] == ['2.0000000000000000', + '6.0000000000000000'] + assert [str(r) for r in w] ==['1.5000000000000000', + '0.50000000000000000'] + + x, w = gauss_gen_laguerre(3, 2, 17) + assert [str(r) for r in x] ==['1.5173870806774125', + '4.3115831337195203', + '9.1710297856030672'] + assert [str(r) for r in w] ==['1.0374949614904253', + '0.90575000470306537', + '0.056755033806509347'] + + x, w = gauss_gen_laguerre(4, 2, 17) + assert [str(r) for r in x] ==['1.2267632635003021', + '3.4125073586969460', + '6.9026926058516134', + '12.458036771951139'] + assert [str(r) for r in w] ==['0.72552499769865438', + '1.0634242919791946', + '0.20669613102835355', + '0.0043545792937974889'] + + x, w = gauss_gen_laguerre(5, 2, 17) + assert [str(r) for r in x] ==['1.0311091440933816', + '2.8372128239538217', + '5.6202942725987079', + '9.6829098376640271', + '15.828473921690062'] + assert [str(r) for r in w] == ['0.52091739683509184', + '1.0667059331592211', + '0.38354972366693113', + '0.028564233532974658', + '0.00026271280578124935'] + +def test_gen_laguerre_precise(): + x, w = gauss_gen_laguerre(3, -S.Half, 40) + assert [str(r) for r in x] ==['0.1901635091934881328718554276203028970878', + '1.784492748543251591186722461957367638500', + '5.525343742263260275941422110422329464413'] + assert [str(r) for r in w] == ['1.449259190448785048183829411195134343108', + '0.3141346406457132878326231270167565378246', + '0.009060019811017691281714945129254301865020'] + + x, w = gauss_gen_laguerre(3, 2, 40) + assert [str(r) for r in x] == ['1.517387080677412495020323111016672547482', + '4.311583133719520302881184669723530562299', + '9.171029785603067202098492219259796890218'] + assert [str(r) for r in w] ==['1.037494961490425285817554606541269153041', + '0.9057500047030653669269785048806009945254', + '0.05675503380650934725546688857812985243312'] + +def test_chebyshev_t(): + x, w = gauss_chebyshev_t(1, 17) + assert [str(r) for r in x] == ['0'] + assert [str(r) for r in w] == ['3.1415926535897932'] + + x, w = gauss_chebyshev_t(2, 17) + assert [str(r) for r in x] == ['0.70710678118654752', + '-0.70710678118654752'] + assert [str(r) for r in w] == ['1.5707963267948966', + '1.5707963267948966'] + + x, w = gauss_chebyshev_t(3, 17) + assert [str(r) for r in x] == ['0.86602540378443865', + '0', + '-0.86602540378443865'] + assert [str(r) for r in w] == ['1.0471975511965977', + '1.0471975511965977', + '1.0471975511965977'] + + x, w = gauss_chebyshev_t(4, 17) + assert [str(r) for r in x] == ['0.92387953251128676', + '0.38268343236508977', + '-0.38268343236508977', + '-0.92387953251128676'] + + assert [str(r) for r in w] == ['0.78539816339744831', + '0.78539816339744831', + '0.78539816339744831', + '0.78539816339744831'] + + x, w = gauss_chebyshev_t(5, 17) + assert [str(r) for r in x] == ['0.95105651629515357', + '0.58778525229247313', + '0', + '-0.58778525229247313', + '-0.95105651629515357'] + + assert [str(r) for r in w] == ['0.62831853071795865', + '0.62831853071795865', + '0.62831853071795865', + '0.62831853071795865', + '0.62831853071795865'] + +def test_chebyshev_t_precise(): + x, w = gauss_chebyshev_t(3, 40) + assert [str(r) for r in x] == [ + '0.8660254037844386467637231707529361834714', + '0', + '-0.8660254037844386467637231707529361834714'] + assert [str(r) for r in w] == [ + '1.047197551196597746154214461093167628066', + '1.047197551196597746154214461093167628066', + '1.047197551196597746154214461093167628066'] + +def test_chebyshev_u(): + x, w = gauss_chebyshev_u(1, 17) + assert [str(r) for r in x] == ['0'] + assert [str(r) for r in w] == ['1.5707963267948966'] + + x, w = gauss_chebyshev_u(2, 17) + assert [str(r) for r in x] == ['0.50000000000000000', + '-0.50000000000000000'] + assert [str(r) for r in w] == ['0.78539816339744831', + '0.78539816339744831'] + + x, w = gauss_chebyshev_u(3, 17) + assert [str(r) for r in x] == ['0.70710678118654752', + '0', + '-0.70710678118654752'] + assert [str(r) for r in w] == ['0.39269908169872415', + '0.78539816339744831', + '0.39269908169872415'] + + x, w = gauss_chebyshev_u(4, 17) + assert [str(r) for r in x] == ['0.80901699437494742', + '0.30901699437494742', + '-0.30901699437494742', + '-0.80901699437494742'] + assert [str(r) for r in w] == ['0.21707871342270599', + '0.56831944997474231', + '0.56831944997474231', + '0.21707871342270599'] + + x, w = gauss_chebyshev_u(5, 17) + assert [str(r) for r in x] == ['0.86602540378443865', + '0.50000000000000000', + '0', + '-0.50000000000000000', + '-0.86602540378443865'] + assert [str(r) for r in w] == ['0.13089969389957472', + '0.39269908169872415', + '0.52359877559829887', + '0.39269908169872415', + '0.13089969389957472'] + +def test_chebyshev_u_precise(): + x, w = gauss_chebyshev_u(3, 40) + assert [str(r) for r in x] == [ + '0.7071067811865475244008443621048490392848', + '0', + '-0.7071067811865475244008443621048490392848'] + assert [str(r) for r in w] == [ + '0.3926990816987241548078304229099378605246', + '0.7853981633974483096156608458198757210493', + '0.3926990816987241548078304229099378605246'] + +def test_jacobi(): + x, w = gauss_jacobi(1, -S.Half, S.Half, 17) + assert [str(r) for r in x] == ['0.50000000000000000'] + assert [str(r) for r in w] == ['3.1415926535897932'] + + x, w = gauss_jacobi(2, -S.Half, S.Half, 17) + assert [str(r) for r in x] == ['-0.30901699437494742', + '0.80901699437494742'] + assert [str(r) for r in w] == ['0.86831485369082398', + '2.2732777998989693'] + + x, w = gauss_jacobi(3, -S.Half, S.Half, 17) + assert [str(r) for r in x] == ['-0.62348980185873353', + '0.22252093395631440', + '0.90096886790241913'] + assert [str(r) for r in w] == ['0.33795476356635433', + '1.0973322242791115', + '1.7063056657443274'] + + x, w = gauss_jacobi(4, -S.Half, S.Half, 17) + assert [str(r) for r in x] == ['-0.76604444311897804', + '-0.17364817766693035', + '0.50000000000000000', + '0.93969262078590838'] + assert [str(r) for r in w] == ['0.16333179083642836', + '0.57690240318269103', + '1.0471975511965977', + '1.3541609083740761'] + + x, w = gauss_jacobi(5, -S.Half, S.Half, 17) + assert [str(r) for r in x] == ['-0.84125353283118117', + '-0.41541501300188643', + '0.14231483827328514', + '0.65486073394528506', + '0.95949297361449739'] + assert [str(r) for r in w] == ['0.090675770007435371', + '0.33391416373675607', + '0.65248870981926643', + '0.94525424081394926', + '1.1192597692123861'] + + x, w = gauss_jacobi(1, 2, 3, 17) + assert [str(r) for r in x] == ['0.14285714285714286'] + assert [str(r) for r in w] == ['1.0666666666666667'] + + x, w = gauss_jacobi(2, 2, 3, 17) + assert [str(r) for r in x] == ['-0.24025307335204215', + '0.46247529557426437'] + assert [str(r) for r in w] == ['0.48514624517838660', + '0.58152042148828007'] + + x, w = gauss_jacobi(3, 2, 3, 17) + assert [str(r) for r in x] == ['-0.46115870378089762', + '0.10438533038323902', + '0.62950064612493132'] + assert [str(r) for r in w] == ['0.17937613502213266', + '0.61595640991147154', + '0.27133412173306246'] + + x, w = gauss_jacobi(4, 2, 3, 17) + assert [str(r) for r in x] == ['-0.59903470850824782', + '-0.14761105199952565', + '0.32554377081188859', + '0.72879429738819258'] + assert [str(r) for r in w] == ['0.067809641836772187', + '0.38956404952032481', + '0.47995970868024150', + '0.12933326662932816'] + + x, w = gauss_jacobi(5, 2, 3, 17) + assert [str(r) for r in x] == ['-0.69045775012676106', + '-0.32651993134900065', + '0.082337849552034905', + '0.47517887061283164', + '0.79279429464422850'] + assert [str(r) for r in w] ==['0.027410178066337099', + '0.21291786060364828', + '0.43908437944395081', + '0.32220656547221822', + '0.065047683080512268'] + +def test_jacobi_precise(): + x, w = gauss_jacobi(3, -S.Half, S.Half, 40) + assert [str(r) for r in x] == [ + '-0.6234898018587335305250048840042398106323', + '0.2225209339563144042889025644967947594664', + '0.9009688679024191262361023195074450511659'] + assert [str(r) for r in w] == [ + '0.3379547635663543330553835737094171534907', + '1.097332224279111467485302294320899710461', + '1.706305665744327437921957515249186020246'] + + x, w = gauss_jacobi(3, 2, 3, 40) + assert [str(r) for r in x] == [ + '-0.4611587037808976179121958105554375981274', + '0.1043853303832390210914918407615869143233', + '0.6295006461249313240934312425211234110769'] + assert [str(r) for r in w] == [ + '0.1793761350221326596137764371503859752628', + '0.6159564099114715430909548532229749439714', + '0.2713341217330624639619353762933057474325'] + +# Copyright 2008-2015 Nokia Solutions and Networks +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from robot.utils import Utf8Reader + + +NBSP = u'\xA0' + + +class TsvReader(object): + + def read(self, tsvfile, populator): + process = False + for row in Utf8Reader(tsvfile).readlines(): + row = self._process_row(row) + cells = [self._process_cell(cell) for cell in self.split_row(row)] + if cells and cells[0].strip().startswith('*') and \ + populator.start_table([c.replace('*', '') for c in cells]): + process = True + elif process: + populator.add(cells) + populator.eof() + + def _process_row(self, row): + if NBSP in row: + row = row.replace(NBSP, ' ') + return row.rstrip() + + @classmethod + def split_row(cls, row): + return row.split('\t') + + def _process_cell(self, cell): + if len(cell) > 1 and cell[0] == cell[-1] == '"': + cell = cell[1:-1].replace('""', '"') + return cell + +from __future__ import absolute_import, division, unicode_literals # noqa + +import io +import os + +import numpy as np +import oslotest.base +import scipy.sparse + +import lda.utils as utils + + +class TestUtils(oslotest.base.BaseTestCase): + + np.random.seed(99) + + D = 100 + W = 50 + N_WORDS_PER_DOC = 500 + N = W * N_WORDS_PER_DOC + dtm = np.zeros((D, W), dtype=int) + for d in range(D): + dtm[d] = np.random.multinomial(N_WORDS_PER_DOC, np.ones(W) / W) + dtm_sparse = scipy.sparse.csr_matrix(dtm) + N_BY_W = np.sum(dtm, axis=0) + N_BY_D = np.sum(dtm, axis=1) + + def test_setup(self): + dtm, D, N_WORDS_PER_DOC = self.dtm, self.D, self.N_WORDS_PER_DOC + self.assertEqual(np.sum(dtm), D * N_WORDS_PER_DOC) + + def test_matrix_to_lists(self): + dtm, D, N_WORDS_PER_DOC = self.dtm, self.D, self.N_WORDS_PER_DOC + N_BY_D, N_BY_W = self.N_BY_D, self.N_BY_W + WS, DS = utils.matrix_to_lists(dtm) + self.assertEqual(len(WS), D * N_WORDS_PER_DOC) + self.assertEqual(len(WS), len(DS)) + self.assertEqual(dtm.shape, (max(DS) + 1, max(WS) + 1)) + self.assertTrue(all(DS == sorted(DS))) + self.assertTrue(np.all(np.bincount(DS) == N_BY_D)) + self.assertTrue(np.all(np.bincount(WS) == N_BY_W)) + + def test_matrix_row_to_lists(self): + dtm = self.dtm + N = sum(dtm[0]) + + WS, DS = utils.matrix_to_lists(dtm) + WS_row, DS_row = utils.matrix_to_lists(np.atleast_2d(dtm[0])) + + np.testing.assert_array_equal(WS_row, WS[:N]) + np.testing.assert_array_equal(DS_row, DS[:N]) + + def test_matrix_rows_to_lists(self): + dtm = self.dtm + rows = dtm[0:2] + N = rows.ravel().sum() + + WS, DS = utils.matrix_to_lists(dtm) + WS_rows, DS_rows = utils.matrix_to_lists(rows) + + np.testing.assert_array_equal(WS_rows, WS[:N]) + np.testing.assert_array_equal(DS_rows, DS[:N]) + + def test_matrix_row_to_lists_sparse(self): + dtm = self.dtm_sparse + N = dtm[0].sum() + + WS, DS = utils.matrix_to_lists(dtm) + WS_row, DS_row = utils.matrix_to_lists(dtm[0]) + + np.testing.assert_array_equal(WS_row, WS[:N]) + np.testing.assert_array_equal(DS_row, DS[:N]) + + def test_matrix_rows_to_lists_sparse(self): + dtm = self.dtm_sparse + rows = dtm[0:2] + N = rows.sum() + + WS, DS = utils.matrix_to_lists(dtm) + WS_rows, DS_rows = utils.matrix_to_lists(rows) + + np.testing.assert_array_equal(WS_rows, WS[:N]) + np.testing.assert_array_equal(DS_rows, DS[:N]) + + def test_lists_to_matrix(self): + dtm = self.dtm + WS, DS = utils.matrix_to_lists(dtm) + dtm_new = utils.lists_to_matrix(WS, DS) + self.assertTrue(np.all(dtm == dtm_new)) + + def test_ldac2dtm_offset(self): + test_dir = os.path.dirname(__file__) + reuters_ldac_fn = os.path.join(test_dir, 'reuters.ldac') + self.assertRaises(ValueError, utils.ldac2dtm, open(reuters_ldac_fn), offset=1) + + def test_ldac2dtm(self): + test_dir = os.path.dirname(__file__) + reuters_ldac_fn = os.path.join(test_dir, 'reuters.ldac') + dtm = utils.ldac2dtm(open(reuters_ldac_fn)) + self.assertEqual(dtm.shape, (395, 4258)) + self.assertEqual(dtm.sum(), 84010) + + def test_ldac_conversion(self): + dtm = self.dtm + N, V = dtm.shape + doclines = list(utils.dtm2ldac(self.dtm)) + nd_unique = np.sum(dtm > 0, axis=1) + for n, docline in zip(nd_unique, doclines): + self.assertEqual(n, int(docline.split(' ')[0])) + self.assertEqual(len(doclines), N) + f = io.StringIO('\n'.join(doclines)) + dtm_new = utils.ldac2dtm(f) + self.assertTrue(np.all(dtm == dtm_new)) + + def test_lists_to_matrix_sparse(self): + dtm = self.dtm_sparse + WS, DS = utils.matrix_to_lists(dtm) + dtm_new = utils.lists_to_matrix(WS, DS) + self.assertTrue(np.all(dtm == dtm_new)) + + def test_ldac_conversion_sparse(self): + dtm = self.dtm + dtm_sparse = self.dtm_sparse + N, V = dtm.shape + doclines = list(utils.dtm2ldac(dtm_sparse)) + nd_unique = np.sum(dtm > 0, axis=1) + for n, docline in zip(nd_unique, doclines): + self.assertEqual(n, int(docline.split(' ')[0])) + self.assertEqual(len(doclines), N) + f = io.StringIO('\n'.join(doclines)) + dtm_new = utils.ldac2dtm(f) + self.assertTrue(np.all(dtm == dtm_new)) + +#!/usr/bin/python +# -*- coding: utf-8 -*- + + +""" +========================================================= +Logit function +========================================================= + +Show in the plot is how the logistic regression would, in this +synthetic dataset, classify values as either 0 or 1, +i.e. class one or two, using the logit-curve. + +""" +print(__doc__) + + +# Code source: Gael Varoquaux +# License: BSD 3 clause + +import numpy as np +import matplotlib.pyplot as plt + +from sklearn import linear_model + +# this is our test set, it's just a straight line with some +# Gaussian noise +xmin, xmax = -5, 5 +n_samples = 100 +np.random.seed(0) +X = np.random.normal(size=n_samples) +y = (X > 0).astype(np.float) +X[X > 0] *= 4 +X += .3 * np.random.normal(size=n_samples) + +X = X[:, np.newaxis] +# run the classifier +clf = linear_model.LogisticRegression(C=1e5) +clf.fit(X, y) + +# and plot the result +plt.figure(1, figsize=(4, 3)) +plt.clf() +plt.scatter(X.ravel(), y, color='black', zorder=20) +X_test = np.linspace(-5, 10, 300) + + +def model(x): + return 1 / (1 + np.exp(-x)) +loss = model(X_test * clf.coef_ + clf.intercept_).ravel() +plt.plot(X_test, loss, color='blue', linewidth=3) + +ols = linear_model.LinearRegression() +ols.fit(X, y) +plt.plot(X_test, ols.coef_ * X_test + ols.intercept_, linewidth=1) +plt.axhline(.5, color='.5') + +plt.ylabel('y') +plt.xlabel('X') +plt.xticks(()) +plt.yticks(()) +plt.ylim(-.25, 1.25) +plt.xlim(-4, 10) + +plt.show() + +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""libcloud driver for the Linode(R) API + +This driver implements all libcloud functionality for the Linode API. +Since the API is a bit more fine-grained, create_node abstracts a significant +amount of work (and may take a while to run). + +Linode home page http://www.linode.com/ +Linode API documentation http://www.linode.com/api/ +Alternate bindings for reference http://github.com/tjfontaine/linode-python + +Linode(R) is a registered trademark of Linode, LLC. + +""" + +import os +import re + +try: + import simplejson as json +except ImportError: + import json + +import itertools +import binascii +from datetime import datetime + +from copy import copy + +from libcloud.utils.py3 import PY3, httplib +from libcloud.utils.networking import is_private_subnet + +from libcloud.common.linode import (API_ROOT, LinodeException, + LinodeConnection, LinodeConnectionV4, + LinodeDisk, LinodeIPAddress, + LinodeExceptionV4, + LINODE_PLAN_IDS, LINODE_DISK_FILESYSTEMS, + LINODE_DISK_FILESYSTEMS_V4, + DEFAULT_API_VERSION) +from libcloud.compute.types import Provider, NodeState, StorageVolumeState +from libcloud.compute.base import NodeDriver, NodeSize, Node, NodeLocation +from libcloud.compute.base import NodeAuthPassword, NodeAuthSSHKey +from libcloud.compute.base import NodeImage, StorageVolume + + +class LinodeNodeDriver(NodeDriver): + name = 'Linode' + website = 'http://www.linode.com/' + type = Provider.LINODE + + def __new__(cls, key, secret=None, secure=True, host=None, port=None, + api_version=DEFAULT_API_VERSION, region=None, **kwargs): + if cls is LinodeNodeDriver: + if api_version == '3.0': + cls = LinodeNodeDriverV3 + elif api_version == '4.0': + cls = LinodeNodeDriverV4 + else: + raise NotImplementedError( + 'No Linode driver found for API version: %s' % + (api_version)) + return super(LinodeNodeDriver, cls).__new__(cls) + + +class LinodeNodeDriverV3(LinodeNodeDriver): + """libcloud driver for the Linode API + + Rough mapping of which is which: + + - list_nodes linode.list + - reboot_node linode.reboot + - destroy_node linode.delete + - create_node linode.create, linode.update, + linode.disk.createfromdistribution, + linode.disk.create, linode.config.create, + linode.ip.addprivate, linode.boot + - list_sizes avail.linodeplans + - list_images avail.distributions + - list_locations avail.datacenters + - list_volumes linode.disk.list + - destroy_volume linode.disk.delete + + For more information on the Linode API, be sure to read the reference: + + http://www.linode.com/api/ + """ + connectionCls = LinodeConnection + _linode_plan_ids = LINODE_PLAN_IDS + _linode_disk_filesystems = LINODE_DISK_FILESYSTEMS + features = {'create_node': ['ssh_key', 'password']} + + def __init__(self, key, secret=None, secure=True, host=None, port=None, + api_version=None, region=None, **kwargs): + """Instantiate the driver with the given API key + + :param key: the API key to use (required) + :type key: ``str`` + + :rtype: ``None`` + """ + self.datacenter = None + NodeDriver.__init__(self, key) + + # Converts Linode's state from DB to a NodeState constant. + LINODE_STATES = { + (-2): NodeState.UNKNOWN, # Boot Failed + (-1): NodeState.PENDING, # Being Created + 0: NodeState.PENDING, # Brand New + 1: NodeState.RUNNING, # Running + 2: NodeState.STOPPED, # Powered Off + 3: NodeState.REBOOTING, # Shutting Down + 4: NodeState.UNKNOWN # Reserved + } + + def list_nodes(self): + """ + List all Linodes that the API key can access + + This call will return all Linodes that the API key in use has access + to. + If a node is in this list, rebooting will work; however, creation and + destruction are a separate grant. + + :return: List of node objects that the API key can access + :rtype: ``list`` of :class:`Node` + """ + params = {"api_action": "linode.list"} + data = self.connection.request(API_ROOT, params=params).objects[0] + return self._to_nodes(data) + + def start_node(self, node): + """ + Boot the given Linode + + """ + params = {"api_action": "linode.boot", "LinodeID": node.id} + self.connection.request(API_ROOT, params=params) + return True + + def stop_node(self, node): + """ + Shutdown the given Linode + + """ + params = {"api_action": "linode.shutdown", "LinodeID": node.id} + self.connection.request(API_ROOT, params=params) + return True + + def reboot_node(self, node): + """ + Reboot the given Linode + + Will issue a shutdown job followed by a boot job, using the last booted + configuration. In most cases, this will be the only configuration. + + :param node: the Linode to reboot + :type node: :class:`Node` + + :rtype: ``bool`` + """ + params = {"api_action": "linode.reboot", "LinodeID": node.id} + self.connection.request(API_ROOT, params=params) + return True + + def destroy_node(self, node): + """Destroy the given Linode + + Will remove the Linode from the account and issue a prorated credit. A + grant for removing Linodes from the account is required, otherwise this + method will fail. + + In most cases, all disk images must be removed from a Linode before the + Linode can be removed; however, this call explicitly skips those + safeguards. There is no going back from this method. + + :param node: the Linode to destroy + :type node: :class:`Node` + + :rtype: ``bool`` + """ + params = {"api_action": "linode.delete", "LinodeID": node.id, + "skipChecks": True} + self.connection.request(API_ROOT, params=params) + return True + + def create_node(self, name, image, size, auth, location=None, ex_swap=None, + ex_rsize=None, ex_kernel=None, ex_payment=None, + ex_comment=None, ex_private=False, lconfig=None, + lroot=None, lswap=None): + """Create a new Linode, deploy a Linux distribution, and boot + + This call abstracts much of the functionality of provisioning a Linode + and getting it booted. A global grant to add Linodes to the account is + required, as this call will result in a billing charge. + + Note that there is a safety valve of 5 Linodes per hour, in order to + prevent a runaway script from ruining your day. + + :keyword name: the name to assign the Linode (mandatory) + :type name: ``str`` + + :keyword image: which distribution to deploy on the Linode (mandatory) + :type image: :class:`NodeImage` + + :keyword size: the plan size to create (mandatory) + :type size: :class:`NodeSize` + + :keyword auth: an SSH key or root password (mandatory) + :type auth: :class:`NodeAuthSSHKey` or :class:`NodeAuthPassword` + + :keyword location: which datacenter to create the Linode in + :type location: :class:`NodeLocation` + + :keyword ex_swap: size of the swap partition in MB (128) + :type ex_swap: ``int`` + + :keyword ex_rsize: size of the root partition in MB (plan size - swap). + :type ex_rsize: ``int`` + + :keyword ex_kernel: a kernel ID from avail.kernels (Latest 2.6 Stable). + :type ex_kernel: ``str`` + + :keyword ex_payment: one of 1, 12, or 24; subscription length (1) + :type ex_payment: ``int`` + + :keyword ex_comment: a small comment for the configuration (libcloud) + :type ex_comment: ``str`` + + :keyword ex_private: whether or not to request a private IP (False) + :type ex_private: ``bool`` + + :keyword lconfig: what to call the configuration (generated) + :type lconfig: ``str`` + + :keyword lroot: what to call the root image (generated) + :type lroot: ``str`` + + :keyword lswap: what to call the swap space (generated) + :type lswap: ``str`` + + :return: Node representing the newly-created Linode + :rtype: :class:`Node` + """ + auth = self._get_and_check_auth(auth) + + # Pick a location (resolves LIBCLOUD-41 in JIRA) + if location: + chosen = location.id + elif self.datacenter: + chosen = self.datacenter + else: + raise LinodeException(0xFB, "Need to select a datacenter first") + + # Step 0: Parameter validation before we purchase + # We're especially careful here so we don't fail after purchase, rather + # than getting halfway through the process and having the API fail. + + # Plan ID + plans = self.list_sizes() + if size.id not in [p.id for p in plans]: + raise LinodeException(0xFB, "Invalid plan ID -- avail.plans") + + # Payment schedule + payment = "1" if not ex_payment else str(ex_payment) + if payment not in ["1", "12", "24"]: + raise LinodeException(0xFB, "Invalid subscription (1, 12, 24)") + + ssh = None + root = None + # SSH key and/or root password + if isinstance(auth, NodeAuthSSHKey): + ssh = auth.pubkey # pylint: disable=no-member + elif isinstance(auth, NodeAuthPassword): + root = auth.password + + if not ssh and not root: + raise LinodeException(0xFB, "Need SSH key or root password") + if root is not None and len(root) < 6: + raise LinodeException(0xFB, "Root password is too short") + + # Swap size + try: + swap = 128 if not ex_swap else int(ex_swap) + except Exception: + raise LinodeException(0xFB, "Need an integer swap size") + + # Root partition size + imagesize = (size.disk - swap) if not ex_rsize else\ + int(ex_rsize) + if (imagesize + swap) > size.disk: + raise LinodeException(0xFB, "Total disk images are too big") + + # Distribution ID + distros = self.list_images() + if image.id not in [d.id for d in distros]: + raise LinodeException(0xFB, + "Invalid distro -- avail.distributions") + + # Kernel + if ex_kernel: + kernel = ex_kernel + else: + if image.extra['64bit']: + # For a list of available kernel ids, see + # https://www.linode.com/kernels/ + kernel = 138 + else: + kernel = 137 + params = {"api_action": "avail.kernels"} + kernels = self.connection.request(API_ROOT, params=params).objects[0] + if kernel not in [z["KERNELID"] for z in kernels]: + raise LinodeException(0xFB, "Invalid kernel -- avail.kernels") + + # Comments + comments = "Created by Apache libcloud " if\ + not ex_comment else ex_comment + + # Step 1: linode.create + params = { + "api_action": "linode.create", + "DatacenterID": chosen, + "PlanID": size.id, + "PaymentTerm": payment + } + data = self.connection.request(API_ROOT, params=params).objects[0] + linode = {"id": data["LinodeID"]} + + # Step 1b. linode.update to rename the Linode + params = { + "api_action": "linode.update", + "LinodeID": linode["id"], + "Label": name + } + self.connection.request(API_ROOT, params=params) + + # Step 1c. linode.ip.addprivate if it was requested + if ex_private: + params = { + "api_action": "linode.ip.addprivate", + "LinodeID": linode["id"] + } + self.connection.request(API_ROOT, params=params) + + # Step 1d. Labels + # use the linode id as the name can be up to 63 chars and the labels + # are limited to 48 chars + label = { + "lconfig": "[%s] Configuration Profile" % linode["id"], + "lroot": "[%s] %s Disk Image" % (linode["id"], image.name), + "lswap": "[%s] Swap Space" % linode["id"] + } + + if lconfig: + label['lconfig'] = lconfig + + if lroot: + label['lroot'] = lroot + + if lswap: + label['lswap'] = lswap + + # Step 2: linode.disk.createfromdistribution + if not root: + root = binascii.b2a_base64(os.urandom(8)).decode('ascii').strip() + + params = { + "api_action": "linode.disk.createfromdistribution", + "LinodeID": linode["id"], + "DistributionID": image.id, + "Label": label["lroot"], + "Size": imagesize, + "rootPass": root, + } + if ssh: + params["rootSSHKey"] = ssh + data = self.connection.request(API_ROOT, params=params).objects[0] + linode["rootimage"] = data["DiskID"] + + # Step 3: linode.disk.create for swap + params = { + "api_action": "linode.disk.create", + "LinodeID": linode["id"], + "Label": label["lswap"], + "Type": "swap", + "Size": swap + } + data = self.connection.request(API_ROOT, params=params).objects[0] + linode["swapimage"] = data["DiskID"] + + # Step 4: linode.config.create for main profile + disks = "%s,%s,,,,,,," % (linode["rootimage"], linode["swapimage"]) + params = { + "api_action": "linode.config.create", + "LinodeID": linode["id"], + "KernelID": kernel, + "Label": label["lconfig"], + "Comments": comments, + "DiskList": disks + } + if ex_private: + params['helper_network'] = True + params['helper_distro'] = True + + data = self.connection.request(API_ROOT, params=params).objects[0] + linode["config"] = data["ConfigID"] + + # Step 5: linode.boot + params = { + "api_action": "linode.boot", + "LinodeID": linode["id"], + "ConfigID": linode["config"] + } + self.connection.request(API_ROOT, params=params) + + # Make a node out of it and hand it back + params = {"api_action": "linode.list", "LinodeID": linode["id"]} + data = self.connection.request(API_ROOT, params=params).objects[0] + nodes = self._to_nodes(data) + + if len(nodes) == 1: + node = nodes[0] + if getattr(auth, "generated", False): + node.extra['password'] = auth.password + return node + + return None + + def ex_resize_node(self, node, size): + """Resizes a Linode from one plan to another + + Immediately shuts the Linode down, charges/credits the account, + and issue a migration to another host server. + Requires a size (numeric), which is the desired PlanID available from + avail.LinodePlans() + After resize is complete the node needs to be booted + """ + + params = {"api_action": "linode.resize", "LinodeID": node.id, + "PlanID": size} + self.connection.request(API_ROOT, params=params) + return True + + def ex_start_node(self, node): + # NOTE: This method is here for backward compatibility reasons after + # this method was promoted to be part of the standard compute API in + # Libcloud v2.7.0 + return self.start_node(node=node) + + def ex_stop_node(self, node): + # NOTE: This method is here for backward compatibility reasons after + # this method was promoted to be part of the standard compute API in + # Libcloud v2.7.0 + return self.stop_node(node=node) + + def ex_rename_node(self, node, name): + """Renames a node""" + + params = { + "api_action": "linode.update", + "LinodeID": node.id, + "Label": name + } + self.connection.request(API_ROOT, params=params) + return True + + def list_sizes(self, location=None): + """ + List available Linode plans + + Gets the sizes that can be used for creating a Linode. Since available + Linode plans vary per-location, this method can also be passed a + location to filter the availability. + + :keyword location: the facility to retrieve plans in + :type location: :class:`NodeLocation` + + :rtype: ``list`` of :class:`NodeSize` + """ + params = {"api_action": "avail.linodeplans"} + data = self.connection.request(API_ROOT, params=params).objects[0] + sizes = [] + for obj in data: + n = NodeSize(id=obj["PLANID"], name=obj["LABEL"], ram=obj["RAM"], + disk=(obj["DISK"] * 1024), bandwidth=obj["XFER"], + price=obj["PRICE"], driver=self.connection.driver) + sizes.append(n) + return sizes + + def list_images(self): + """ + List available Linux distributions + + Retrieve all Linux distributions that can be deployed to a Linode. + + :rtype: ``list`` of :class:`NodeImage` + """ + params = {"api_action": "avail.distributions"} + data = self.connection.request(API_ROOT, params=params).objects[0] + distros = [] + for obj in data: + i = NodeImage(id=obj["DISTRIBUTIONID"], + name=obj["LABEL"], + driver=self.connection.driver, + extra={'pvops': obj['REQUIRESPVOPSKERNEL'], + '64bit': obj['IS64BIT']}) + distros.append(i) + return distros + + def list_locations(self): + """ + List available facilities for deployment + + Retrieve all facilities that a Linode can be deployed in. + + :rtype: ``list`` of :class:`NodeLocation` + """ + params = {"api_action": "avail.datacenters"} + data = self.connection.request(API_ROOT, params=params).objects[0] + nl = [] + for dc in data: + country = None + if "USA" in dc["LOCATION"]: + country = "US" + elif "UK" in dc["LOCATION"]: + country = "GB" + elif "JP" in dc["LOCATION"]: + country = "JP" + else: