+ '''
+
+ # Source any specified config/ini file
+ # Turn off help, so we print all options in response to -h
+ conf_parser = argparse.ArgumentParser(add_help=False)
+
+ conf_parser.add_argument("-c", "--conf_file",
+ help="Specify config file", metavar="FILE")
+ args, remaining_argv = conf_parser.parse_known_args(args_str.split())
+
+ defaults = {
+ 'api_server_ip': '127.0.0.1',
+ 'api_server_port': '8082',
+ 'oper': 'add',
+ }
+ ksopts = {
+ 'admin_user': 'user1',
+ 'admin_password': 'password1',
+ 'admin_tenant_name': 'default-domain'
+ }
+
+ if args.conf_file:
+ config = ConfigParser.SafeConfigParser()
+ config.read([args.conf_file])
+ defaults.update(dict(config.items("DEFAULTS")))
+ if 'KEYSTONE' in config.sections():
+ ksopts.update(dict(config.items("KEYSTONE")))
+
+ # Override with CLI options
+ # Don't surpress add_help here so it will handle -h
+ parser = argparse.ArgumentParser(
+ # Inherit options from config_parser
+ parents=[conf_parser],
+ # print script description with -h/--help
+ description=__doc__,
+ # Don't mess with format of description
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ )
+ defaults.update(ksopts)
+ parser.set_defaults(**defaults)
+
+ parser.add_argument(
+ "--host_name", help="hostname name of database node", required=True)
+ parser.add_argument("--host_ip", help="IP address of database node", required=True)
+ parser.add_argument(
+ "--api_server_ip", help="IP address of api server", required=True)
+ parser.add_argument("--api_server_port", help="Port of api server")
+ parser.add_argument(
+ "--oper", default='add',
+ help="Provision operation to be done(add or del)")
+ parser.add_argument(
+ "--admin_user", help="Name of keystone admin user")
+ parser.add_argument(
+ "--admin_password", help="Password of keystone admin user")
+ parser.add_argument(
+ "--admin_tenant_name", help="Tenamt name for keystone admin user")
+ parser.add_argument(
+ "--openstack_ip", help="IP address of openstack node")
+
+ self._args = parser.parse_args(remaining_argv)
+
+ # end _parse_args
+
+ def add_database_node(self):
+ gsc_obj = self._global_system_config_obj
+
+ database_node_obj = DatabaseNode(
+ self._args.host_name, gsc_obj,
+ database_node_ip_address=self._args.host_ip)
+ database_node_exists = True
+ try:
+ database_node_obj = self._vnc_lib.database_node_read(
+ fq_name=database_node_obj.get_fq_name())
+ except NoIdError:
+ database_node_exists = False
+
+ if database_node_exists:
+ self._vnc_lib.database_node_update(database_node_obj)
+ else:
+ self._vnc_lib.database_node_create(database_node_obj)
+
+ # end add_database_node
+
+ def del_database_node(self):
+ gsc_obj = self._global_system_config_obj
+ database_node_obj = DatabaseNode(self._args.host_name, gsc_obj)
+ self._vnc_lib.database_node_delete(
+ fq_name=database_node_obj.get_fq_name())
+ # end del_database_node
+
+# end class DatabaseNodeProvisioner
+
+
+def main(args_str=None):
+ DatabaseNodeProvisioner(args_str)
+# end main
+
+if __name__ == "__main__":
+ main()
+
+# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
+# License: GNU General Public License v3. See license.txt
+
+from __future__ import unicode_literals
+import webnotes
+import json
+
+def execute():
+ doctypes_child_tables_map = {}
+
+ # Get all saved report columns
+ columns = webnotes.conn.sql("""select defvalue, defkey from `tabDefaultValue` where
+ defkey like '_list_settings:%'""")
+
+ # Make map of doctype and child tables
+ for value, key in columns:
+ doctype = key.split(':')[-1]
+ child_tables = webnotes.conn.sql_list("""select options from `tabDocField`
+ where parent=%s and fieldtype='Table'""", doctype)
+ doctypes_child_tables_map.setdefault(doctype, child_tables + [doctype])
+
+ # If defvalue contains child doctypes then only append the column
+ for value, key in columns:
+ new_columns = []
+ column_doctype = key.split(':')[-1]
+ for field, field_doctype in json.loads(value):
+ if field_doctype in doctypes_child_tables_map.get(column_doctype):
+ new_columns.append([field, field_doctype])
+
+ if new_columns:
+ webnotes.conn.sql("""update `tabDefaultValue` set defvalue=%s
+ where defkey=%s""" % ('%s', '%s'), (json.dumps(new_columns), key))
+ else:
+ webnotes.conn.sql("""delete from `tabDefaultValue` where defkey=%s""", key)
+# [The "BSD license"]
+# Copyright (c) 2013 Terence Parr
+# Copyright (c) 2013 Sam Harwell
+# Copyright (c) 2014 Eric Vergnaud
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# 1. Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+# 3. The name of the author may not be used to endorse or promote products
+# derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#/
+
+
+# A rule context is a record of a single rule invocation. It knows
+# which context invoked it, if any. If there is no parent context, then
+# naturally the invoking state is not valid. The parent link
+# provides a chain upwards from the current rule invocation to the root
+# of the invocation tree, forming a stack. We actually carry no
+# information about the rule associated with this context (except
+# when parsing). We keep only the state number of the invoking state from
+# the ATN submachine that invoked this. Contrast this with the s
+# pointer inside ParserRuleContext that tracks the current state
+# being "executed" for the current rule.
+#
+# The parent contexts are useful for computing lookahead sets and
+# getting error information.
+#
+# These objects are used during parsing and prediction.
+# For the special case of parsers, we use the subclass
+# ParserRuleContext.
+#
+# @see ParserRuleContext
+#/
+from io import StringIO
+from antlr4.tree.Tree import RuleNode, INVALID_INTERVAL
+from antlr4.tree.Trees import Trees
+
+class RuleContext(RuleNode):
+
+ EMPTY = None
+
+ def __init__(self, parent=None, invokingState=-1):
+ super(RuleContext, self).__init__()
+ # What context invoked this rule?
+ self.parentCtx = parent
+ # What state invoked the rule associated with this context?
+ # The "return address" is the followState of invokingState
+ # If parent is null, this should be -1.
+ self.invokingState = invokingState
+
+
+ def depth(self):
+ n = 0
+ p = self
+ while p is not None:
+ p = p.parentCtx
+ n += 1
+ return n
+
+ # A context is empty if there is no invoking state; meaning nobody call
+ # current context.
+ def isEmpty(self):
+ return self.invokingState == -1
+
+ # satisfy the ParseTree / SyntaxTree interface
+
+ def getSourceInterval(self):
+ return INVALID_INTERVAL
+
+ def getRuleContext(self):
+ return self
+
+ def getPayload(self):
+ return self
+
+ # Return the combined text of all child nodes. This method only considers
+ # tokens which have been added to the parse tree.
+ #
+ # Since tokens on hidden channels (e.g. whitespace or comments) are not
+ # added to the parse trees, they will not appear in the output of this
+ # method.
+ #/
+ def getText(self):
+ if self.getChildCount() == 0:
+ return u""
+ with StringIO() as builder:
+ for child in self.getChildren():
+ builder.write(child.getText())
+ return builder.getvalue()
+
+ def getRuleIndex(self):
+ return -1
+
+ def getChild(self, i):
+ return None
+
+ def getChildCount(self):
+ return 0
+
+ def getChildren(self):
+ for c in []:
+ yield c
+
+ def accept(self, visitor):
+ return visitor.visitChildren(self)
+
+ # # Call this method to view a parse tree in a dialog box visually.#/
+ # public Future inspect(@Nullable Parser parser) {
+ # List ruleNames = parser != null ? Arrays.asList(parser.getRuleNames()) : null;
+ # return inspect(ruleNames);
+ # }
+ #
+ # public Future inspect(@Nullable List ruleNames) {
+ # TreeViewer viewer = new TreeViewer(ruleNames, this);
+ # return viewer.open();
+ # }
+ #
+ # # Save this tree in a postscript file#/
+ # public void save(@Nullable Parser parser, String fileName)
+ # throws IOException, PrintException
+ # {
+ # List ruleNames = parser != null ? Arrays.asList(parser.getRuleNames()) : null;
+ # save(ruleNames, fileName);
+ # }
+ #
+ # # Save this tree in a postscript file using a particular font name and size#/
+ # public void save(@Nullable Parser parser, String fileName,
+ # String fontName, int fontSize)
+ # throws IOException
+ # {
+ # List ruleNames = parser != null ? Arrays.asList(parser.getRuleNames()) : null;
+ # save(ruleNames, fileName, fontName, fontSize);
+ # }
+ #
+ # # Save this tree in a postscript file#/
+ # public void save(@Nullable List ruleNames, String fileName)
+ # throws IOException, PrintException
+ # {
+ # Trees.writePS(this, ruleNames, fileName);
+ # }
+ #
+ # # Save this tree in a postscript file using a particular font name and size#/
+ # public void save(@Nullable List ruleNames, String fileName,
+ # String fontName, int fontSize)
+ # throws IOException
+ # {
+ # Trees.writePS(this, ruleNames, fileName, fontName, fontSize);
+ # }
+ #
+ # # Print out a whole tree, not just a node, in LISP format
+ # # (root child1 .. childN). Print just a node if this is a leaf.
+ # # We have to know the recognizer so we can get rule names.
+ # #/
+ # @Override
+ # public String toStringTree(@Nullable Parser recog) {
+ # return Trees.toStringTree(this, recog);
+ # }
+ #
+ # Print out a whole tree, not just a node, in LISP format
+ # (root child1 .. childN). Print just a node if this is a leaf.
+ #
+ def toStringTree(self, ruleNames=None, recog=None):
+ return Trees.toStringTree(self, ruleNames=ruleNames, recog=recog)
+ # }
+ #
+ # @Override
+ # public String toStringTree() {
+ # return toStringTree((List)null);
+ # }
+ #
+ def __unicode__(self):
+ return self.toString(None, None)
+
+ # @Override
+ # public String toString() {
+ # return toString((List)null, (RuleContext)null);
+ # }
+ #
+ # public final String toString(@Nullable Recognizer,?> recog) {
+ # return toString(recog, ParserRuleContext.EMPTY);
+ # }
+ #
+ # public final String toString(@Nullable List ruleNames) {
+ # return toString(ruleNames, null);
+ # }
+ #
+ # // recog null unless ParserRuleContext, in which case we use subclass toString(...)
+ # public String toString(@Nullable Recognizer,?> recog, @Nullable RuleContext stop) {
+ # String[] ruleNames = recog != null ? recog.getRuleNames() : null;
+ # List ruleNamesList = ruleNames != null ? Arrays.asList(ruleNames) : null;
+ # return toString(ruleNamesList, stop);
+ # }
+
+ def toString(self, ruleNames, stop):
+ with StringIO() as buf:
+ p = self
+ buf.write(u"[")
+ while p is not None and p is not stop:
+ if ruleNames is None:
+ if not p.isEmpty():
+ buf.write(unicode(p.invokingState))
+ else:
+ ri = p.getRuleIndex()
+ ruleName = ruleNames[ri] if ri >= 0 and ri < len(ruleNames) else unicode(ri)
+ buf.write(ruleName)
+
+ if p.parentCtx is not None and (ruleNames is not None or not p.parentCtx.isEmpty()):
+ buf.write(u" ")
+
+ p = p.parentCtx
+
+ buf.write(u"]")
+ return buf.getvalue()
+
+
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import, division, print_function
+from future.builtins import int, object
+import re
+import six
+import unicodedata
+import random
+from abc import ABCMeta
+from bitstring import BitArray
+
+
+class Component(object):
+ """Base class for all ISCC identifier components"""
+ __metaclass__ = ABCMeta
+
+ #: Base35 custom symbol table for conversion between `ident` and `code`
+ SYMBOLS = u"H9ITDKR83F4SV12PAXWBYG57JQ6OCNMLUEZ"
+ #: Base35
+ BASE = len(SYMBOLS)
+ #: Regex for `code` validation
+ STR_PATT = re.compile("^([A-Z1-9]*)$", re.UNICODE)
+ #: Min value of internal `ident`
+ INT_MIN = 0
+ #: Min length of `code` representation
+ STR_MIN = 1
+
+ @property
+ def INT_MAX(self):
+ """Max value of internal `ident` (2**64-1)"""
+ return 2 ** self.BIT_LENGTH - 1
+
+ @property
+ def STR_MAX(self):
+ return len(self.encode(self.INT_MAX))
+
+ def __init__(self, ident=None, code=None, normalize=True, validate=True, bits=64):
+ """
+ :param int ident: Identifier integers value.
+ :param str or unicode code: Identifier string representation for display
+ :param bool normalize: Normalize `ident` and `code` before processing
+ :param bool validate: Validate the identifier at instantiation.
+ :param int bits: Number of bits of identifier component.
+ """
+
+ self.BIT_LENGTH = bits
+
+ # Case: mormalization
+ if normalize and code is not None:
+ code = self.normalize_code(code)
+ if normalize and ident is not None:
+ ident = self.normalize_ident(ident)
+
+ # Case: create random identifier
+ if ident is None and code is None:
+ ident = self.random_ident()
+
+ # Case: only `ident` passed in
+ if ident is not None and code is None:
+ code = self.encode(ident)
+
+ # Case: only `code` passed in
+ if code is not None and ident is None:
+ ident = self.decode(code)
+
+ self._int = ident
+ self._str = code
+
+ # Case: validation
+ if validate:
+ self.validate()
+
+ @property
+ def ident(self):
+ """Internal integer value of identifier"""
+ return self._int
+
+ @property
+ def code(self):
+ """External string representation of identifier"""
+ return self._str
+
+ @property
+ def bitstring(self):
+ """String representation of bit-seqence"""
+ return BitArray(uint=self.ident, length=self.BIT_LENGTH).bin
+
+ @classmethod
+ def normalize_code(cls, code):
+ return unicodedata.normalize('NFKC', code).strip().upper()
+
+ @staticmethod
+ def normalize_ident(ident):
+ return int(ident)
+
+ @classmethod
+ def random_ident(cls, bits=64):
+ """Create a random identifier.
+ :return int: Random identifier
+ """
+ rand_crypt = random.SystemRandom()
+ rand_id = rand_crypt.randint(cls.INT_MIN, 2 ** bits - 1)
+ return rand_id
+
+ def __int__(self):
+ return self._int
+
+ def __str__(self):
+ return self._str
+
+ def __repr__(self):
+ return '{}({})'.format(self.__class__.__name__, self._int)
+
+ def __eq__(self, other):
+ """Identifiers are identical if their `ident`s are equal"""
+ return self.ident == other.ident
+
+ def __hash__(self):
+ """Override for set uniqueness."""
+ return self.ident
+
+ @classmethod
+ def encode(cls, ident):
+ """
+ :param int ident: Integer value of identifier
+ :return str: String representation of identifier
+ """
+ code = ''
+ while ident > 0 or not code:
+ ident, i = divmod(ident, cls.BASE)
+ code += cls.SYMBOLS[i]
+ return code
+
+ @classmethod
+ def decode(cls, code):
+ """
+ :param str code: String representation of identifier
+ :return int: Integer value of identifier
+ """
+ ident = 0
+ for i, digit in enumerate(code):
+ ident += cls.SYMBOLS.index(digit) * (cls.BASE ** i)
+ return ident
+
+ def hamming_distance(self, other):
+ x = (self.ident ^ other.ident) & ((1 << self.BIT_LENGTH) - 1)
+ tot = 0
+ while x:
+ tot += 1
+ x &= x - 1
+ return tot
+
+ def jaccard_similarity(self, other):
+ """Bitwise jaccard coefficient of integers a, b"""
+ same_bits = [(bit == other.bitstring[i]) for i, bit in enumerate(self.bitstring)].count(True)
+ return same_bits / (2 * len(self.bitstring) - same_bits)
+
+ def is_valid(self):
+ """
+ :return bool: True or False
+ """
+ return all((
+ self._int_valid(self._int),
+ self._str_valid(self._str),
+ self.encode(self._int) == self._str,
+ self.decode(self._str) == self._int,
+ ))
+
+ def validate(self):
+ """
+ :raises ValueError: Raises ValueError with help text if invalid
+ :return bool: Returns True if valid or raises ValueError
+ """
+ self._validate_int(self._int)
+ self._validate_str(self._str)
+ self._validate_match(self._str, self._int)
+ return True
+
+ def _validate_str(self, s):
+ if not isinstance(s, six.text_type):
+ raise ValueError(
+ u's must be {} not {}'.format(six.text_type, type(s))
+ )
+ if not self._str_valid_chars(s):
+ raise ValueError(
+ u'text value `{}` must only contain [1-9][A-Z]'.format(s)
+ )
+ if not self._str_valid_len(s):
+ raise ValueError(u'text value `{}` must be {} to {} chars'.format(
+ s, self.STR_MIN, self.STR_MAX
+ ))
+
+ def _validate_int(self, n):
+ if not self._int_valid(n):
+ raise ValueError(u'number value `{}` not between {} and {}'.format(
+ n, self.INT_MIN, self.INT_MAX
+ ))
+
+ def _validate_match(self, s, n):
+ if not self._is_match(s, n):
+ raise ValueError(
+ u'text/number representations donĀ“t match: {}!={}'.format(
+ self.encode(n), s
+ )
+ )
+
+ def _int_valid(self, n):
+ return self.INT_MIN <= n <= self.INT_MAX
+
+ def _str_valid(self, s):
+ return self._str_valid_chars(s) and self._str_valid_len(s)
+
+ def _str_valid_chars(self, s):
+ return bool(self.STR_PATT.match(s))
+
+ def _str_valid_len(self, s):
+ return self.STR_MIN <= len(s) <= self.STR_MAX
+
+ def _is_match(self, s, n):
+ return self.encode(n) == s
+
+
+from classtime.logging import logging
+logging = logging.getLogger(__name__) #pylint: disable=C0103
+
+from classtime.core import db
+from classtime.models import Term, Schedule, Course, Section
+
+class StandardLocalDatabase(object):
+ """A single institution's view of the local database
+
+ Uses a stack-based accessor idiom. Usage:
+ self.push_()
+ ... use self.cur_datatype_model() ...
+ self.pop_()
+ """
+
+ def __init__(self, institution):
+ self._institution = institution
+ self._model_stack = list()
+
+ self.Term = Term
+ self.Schedule = Schedule
+ self.Course = Course
+ self.Section = Section
+
+ def create(self):
+ """Create the database, if it did not already exist
+ """
+ db.create_all()
+
+ def push_datatype(self, datatype):
+ datatype = datatype.lower()
+ if 'term' in datatype:
+ self.push_terms()
+ elif 'schedule' in datatype:
+ self.push_schedules()
+ elif 'course' in datatype:
+ self.push_courses()
+ elif 'section' in datatype:
+ self.push_sections()
+ else:
+ logging.error('Cannot find datatype <{}>'.format(datatype))
+ return self
+
+ def push_terms(self):
+ """Filter all requests to Term objects only. Returns self,
+ so this method should be chained with other methods.
+
+ :returns: self
+ :rtype: StandardLocalDatabase
+ """
+ self._model_stack.append(Term)
+ return self
+
+ def push_schedules(self):
+ """Filter all requests to Schedule objects only. Returns self,
+ so this method should be chained with other methods.
+
+ :returns: self
+ :rtype: StandardLocalDatabase
+ """
+ self._model_stack.append(Schedule)
+ return self
+
+ def push_courses(self):
+ """Filter all requests to Course objects only. Returns self,
+ so this method should be chained with other methods.
+
+ :returns: self
+ :rtype: StandardLocalDatabase
+ """
+ self._model_stack.append(Course)
+ return self
+
+ def push_sections(self):
+ """Filter all requests to Section objects only. Should be
+ the first call in every chained call to the StandardLocalDatabase.
+
+ :returns: self
+ :rtype: StandardLocalDatabase
+ """
+ self._model_stack.append(Section)
+ return self
+
+ def pop_datatype(self):
+ self._model_stack.pop()
+ return self
+
+ def cur_datatype_model(self):
+ return self._model_stack[-1]
+
+ def exists(self, datatype, identifiers=None, **kwargs):
+ """Checks whether an object exists with the given identifiers (primary key values).
+ If no identifiers are given, checks if *any* object exists.
+
+ Primary keys are specified in each models/*.py definition. Institution must be
+ be omitted. It will be inferred from the institution of this local database instance.
+
+ :returns: whether the object(s) exist(s)
+ :rtype: boolean
+ """
+ if kwargs:
+ retval = self.query(datatype) \
+ .filter_by(**kwargs) \
+ .first() is not None
+ elif identifiers is None:
+ retval = self.query(datatype) \
+ .first() is not None
+ else:
+ retval = self.get(datatype, identifiers) is not None
+ return retval
+
+ def get(self, datatype, identifiers):
+ self.push_datatype(datatype)
+
+ identifiers = (self._institution,) + identifiers
+ retval = self.cur_datatype_model().query.get(identifiers)
+
+ self.pop_datatype()
+ return retval
+
+ def query(self, datatype):
+ self.push_datatype(datatype)
+ retval = self.cur_datatype_model() \
+ .query \
+ .filter_by(institution=self._institution)
+ self.pop_datatype()
+ return retval
+
+ def add(self, model_dict, datatype):
+ """Adds an 'add command' to the running transaction which will
+ add a new model with attributes specified by dict 'data_dict'
+
+ :param dict data_dict: dictionary of attributes to store in the
+ object.
+ """
+ self.push_datatype(datatype)
+
+ model_dict['institution'] = self._institution
+ db.session.add(self.cur_datatype_model()(model_dict))
+
+ self.pop_datatype()
+
+ def update(self, model_dict, datatype, identifiers):
+ db_obj = self.get(datatype=datatype,
+ identifiers=identifiers)
+ for attr, value in model_dict.iteritems():
+ setattr(db_obj, attr, value)
+
+ def commit(self):
+ """Commits the running transaction to the database
+
+ If the commit fails, it will be rolled back to a safe state.
+ """
+ try:
+ db.session.commit()
+ except:
+ db.session.rollback()
+ raise
+
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+from ast import literal_eval
+import cPickle
+
+from openerp import models, fields, api
+
+
+class pos_cache(models.Model):
+ _name = 'pos.cache'
+
+ cache = fields.Binary()
+ product_domain = fields.Text(required=True)
+ product_fields = fields.Text(required=True)
+
+ config_id = fields.Many2one('pos.config', ondelete='cascade', required=True)
+ compute_user_id = fields.Many2one('res.users', 'Cache compute user', required=True)
+
+ @api.model
+ def refresh_all_caches(self):
+ self.env['pos.cache'].search([]).refresh_cache()
+
+ @api.one
+ def refresh_cache(self):
+ products = self.env['product.product'].search(self.get_product_domain())
+ prod_ctx = products.with_context(pricelist=self.config_id.pricelist_id.id, display_default_code=False)
+ prod_ctx = prod_ctx.sudo(self.compute_user_id.id)
+ res = prod_ctx.read(self.get_product_fields())
+ datas = {
+ 'cache': cPickle.dumps(res, protocol=cPickle.HIGHEST_PROTOCOL),
+ }
+
+ self.write(datas)
+
+ @api.model
+ def get_product_domain(self):
+ return literal_eval(self.product_domain)
+
+ @api.model
+ def get_product_fields(self):
+ return literal_eval(self.product_fields)
+
+ @api.model
+ def get_cache(self, domain, fields):
+ if not self.cache or domain != self.get_product_domain() or fields != self.get_product_fields():
+ self.product_domain = str(domain)
+ self.product_fields = str(fields)
+ self.refresh_cache()
+
+ return cPickle.loads(self.cache)
+
+
+class pos_config(models.Model):
+ _inherit = 'pos.config'
+
+ @api.one
+ @api.depends('cache_ids')
+ def _get_oldest_cache_time(self):
+ pos_cache = self.env['pos.cache']
+ oldest_cache = pos_cache.search([('config_id', '=', self.id)], order='write_date', limit=1)
+ if oldest_cache:
+ self.oldest_cache_time = oldest_cache.write_date
+
+ # Use a related model to avoid the load of the cache when the pos load his config
+ cache_ids = fields.One2many('pos.cache', 'config_id')
+ oldest_cache_time = fields.Datetime(compute='_get_oldest_cache_time', string='Oldest cache time', readonly=True)
+
+ def _get_cache_for_user(self):
+ pos_cache = self.env['pos.cache']
+ cache_for_user = pos_cache.search([('id', 'in', self.cache_ids.ids), ('compute_user_id', '=', self.env.uid)])
+
+ if cache_for_user:
+ return cache_for_user[0]
+ else:
+ return None
+
+ @api.multi
+ def get_products_from_cache(self, fields, domain):
+ cache_for_user = self._get_cache_for_user()
+
+ if cache_for_user:
+ return cache_for_user.get_cache(domain, fields)
+ else:
+ pos_cache = self.env['pos.cache']
+ pos_cache.create({
+ 'config_id': self.id,
+ 'product_domain': str(domain),
+ 'product_fields': str(fields),
+ 'compute_user_id': self.env.uid
+ })
+ new_cache = self._get_cache_for_user()
+ return new_cache.get_cache(domain, fields)
+
+ @api.one
+ def delete_cache(self):
+ # throw away the old caches
+ self.cache_ids.unlink()
+
+"""distutils.command.build_ext
+
+Implements the Distutils 'build_ext' command, for building extension
+modules (currently limited to C extensions, should accommodate C++
+extensions ASAP)."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id$"
+
+import sys, os, string, re
+from types import *
+from site import USER_BASE, USER_SITE
+from distutils.core import Command
+from distutils.errors import *
+from distutils.sysconfig import customize_compiler, get_python_version
+from distutils.dep_util import newer_group
+from distutils.extension import Extension
+from distutils.util import get_platform
+from distutils import log
+
+if os.name == 'nt':
+ from distutils.msvccompiler import get_build_version
+ MSVC_VERSION = int(get_build_version())
+
+# An extension name is just a dot-separated list of Python NAMEs (ie.
+# the same as a fully-qualified module name).
+extension_name_re = re.compile \
+ (r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
+
+
+def show_compilers ():
+ from distutils.ccompiler import show_compilers
+ show_compilers()
+
+
+class build_ext (Command):
+
+ description = "build C/C++ extensions (compile/link to build directory)"
+
+ # XXX thoughts on how to deal with complex command-line options like
+ # these, i.e. how to make it so fancy_getopt can suck them off the
+ # command line and make it look like setup.py defined the appropriate
+ # lists of tuples of what-have-you.
+ # - each command needs a callback to process its command-line options
+ # - Command.__init__() needs access to its share of the whole
+ # command line (must ultimately come from
+ # Distribution.parse_command_line())
+ # - it then calls the current command class' option-parsing
+ # callback to deal with weird options like -D, which have to
+ # parse the option text and churn out some custom data
+ # structure
+ # - that data structure (in this case, a list of 2-tuples)
+ # will then be present in the command object by the time
+ # we get to finalize_options() (i.e. the constructor
+ # takes care of both command-line and client options
+ # in between initialize_options() and finalize_options())
+
+ sep_by = " (separated by '%s')" % os.pathsep
+ user_options = [
+ ('build-lib=', 'b',
+ "directory for compiled extension modules"),
+ ('build-temp=', 't',
+ "directory for temporary files (build by-products)"),
+ ('plat-name=', 'p',
+ "platform name to cross-compile for, if supported "
+ "(default: %s)" % get_platform()),
+ ('inplace', 'i',
+ "ignore build-lib and put compiled extensions into the source " +
+ "directory alongside your pure Python modules"),
+ ('include-dirs=', 'I',
+ "list of directories to search for header files" + sep_by),
+ ('define=', 'D',
+ "C preprocessor macros to define"),
+ ('undef=', 'U',
+ "C preprocessor macros to undefine"),
+ ('libraries=', 'l',
+ "external C libraries to link with"),
+ ('library-dirs=', 'L',
+ "directories to search for external C libraries" + sep_by),
+ ('rpath=', 'R',
+ "directories to search for shared C libraries at runtime"),
+ ('link-objects=', 'O',
+ "extra explicit link objects to include in the link"),
+ ('debug', 'g',
+ "compile/link with debugging information"),
+ ('force', 'f',
+ "forcibly build everything (ignore file timestamps)"),
+ ('compiler=', 'c',
+ "specify the compiler type"),
+ ('swig-cpp', None,
+ "make SWIG create C++ files (default is C)"),
+ ('swig-opts=', None,
+ "list of SWIG command line options"),
+ ('swig=', None,
+ "path to the SWIG executable"),
+ ('user', None,
+ "add user include, library and rpath"),
+ ]
+
+ boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user']
+
+ help_options = [
+ ('help-compiler', None,
+ "list available compilers", show_compilers),
+ ]
+
+ def initialize_options (self):
+ self.extensions = None
+ self.build_lib = None
+ self.plat_name = None
+ self.build_temp = None
+ self.inplace = 0
+ self.package = None
+
+ self.include_dirs = None
+ self.define = None
+ self.undef = None
+ self.libraries = None
+ self.library_dirs = None
+ self.rpath = None
+ self.link_objects = None
+ self.debug = None
+ self.force = None
+ self.compiler = None
+ self.swig = None
+ self.swig_cpp = None
+ self.swig_opts = None
+ self.user = None
+
+ def finalize_options(self):
+ from distutils import sysconfig
+
+ self.set_undefined_options('build',
+ ('build_lib', 'build_lib'),
+ ('build_temp', 'build_temp'),
+ ('compiler', 'compiler'),
+ ('debug', 'debug'),
+ ('force', 'force'),
+ ('plat_name', 'plat_name'),
+ )
+
+ if self.package is None:
+ self.package = self.distribution.ext_package
+
+ self.extensions = self.distribution.ext_modules
+
+ # Make sure Python's include directories (for Python.h, pyconfig.h,
+ # etc.) are in the include search path.
+ py_include = sysconfig.get_python_inc()
+ plat_py_include = sysconfig.get_python_inc(plat_specific=1)
+ if self.include_dirs is None:
+ self.include_dirs = self.distribution.include_dirs or []
+ if isinstance(self.include_dirs, str):
+ self.include_dirs = self.include_dirs.split(os.pathsep)
+
+ # Put the Python "system" include dir at the end, so that
+ # any local include dirs take precedence.
+ self.include_dirs.append(py_include)
+ if plat_py_include != py_include:
+ self.include_dirs.append(plat_py_include)
+
+ if isinstance(self.libraries, str):
+ self.libraries = [self.libraries]
+
+ # Life is easier if we're not forever checking for None, so
+ # simplify these options to empty lists if unset
+ if self.libraries is None:
+ self.libraries = []
+ if self.library_dirs is None:
+ self.library_dirs = []
+ elif type(self.library_dirs) is StringType:
+ self.library_dirs = string.split(self.library_dirs, os.pathsep)
+
+ if self.rpath is None:
+ self.rpath = []
+ elif type(self.rpath) is StringType:
+ self.rpath = string.split(self.rpath, os.pathsep)
+
+ # for extensions under windows use different directories
+ # for Release and Debug builds.
+ # also Python's library directory must be appended to library_dirs
+ if os.name == 'nt':
+ # the 'libs' directory is for binary installs - we assume that
+ # must be the *native* platform. But we don't really support
+ # cross-compiling via a binary install anyway, so we let it go.
+ self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
+ if self.debug:
+ self.build_temp = os.path.join(self.build_temp, "Debug")
+ else:
+ self.build_temp = os.path.join(self.build_temp, "Release")
+
+ # Append the source distribution include and library directories,
+ # this allows distutils on windows to work in the source tree
+ self.include_dirs.append(os.path.join(sys.exec_prefix, 'PC'))
+ if MSVC_VERSION == 9:
+ # Use the .lib files for the correct architecture
+ if self.plat_name == 'win32':
+ suffix = ''
+ else:
+ # win-amd64 or win-ia64
+ suffix = self.plat_name[4:]
+ new_lib = os.path.join(sys.exec_prefix, 'PCbuild')
+ if suffix:
+ new_lib = os.path.join(new_lib, suffix)
+ self.library_dirs.append(new_lib)
+
+ elif MSVC_VERSION == 8:
+ self.library_dirs.append(os.path.join(sys.exec_prefix,
+ 'PC', 'VS8.0'))
+ elif MSVC_VERSION == 7:
+ self.library_dirs.append(os.path.join(sys.exec_prefix,
+ 'PC', 'VS7.1'))
+ else:
+ self.library_dirs.append(os.path.join(sys.exec_prefix,
+ 'PC', 'VC6'))
+
+ # OS/2 (EMX) doesn't support Debug vs Release builds, but has the
+ # import libraries in its "Config" subdirectory
+ if os.name == 'os2':
+ self.library_dirs.append(os.path.join(sys.exec_prefix, 'Config'))
+
+ # for extensions under Cygwin and AtheOS Python's library directory must be
+ # appended to library_dirs
+ if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos':
+ if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
+ # building third party extensions
+ self.library_dirs.append(os.path.join(sys.prefix, "lib",
+ "python" + get_python_version(),
+ "config"))
+ else:
+ # building python standard extensions
+ self.library_dirs.append('.')
+
+ # for extensions under Linux or Solaris with a shared Python library,
+ # Python's library directory must be appended to library_dirs
+ sysconfig.get_config_var('Py_ENABLE_SHARED')
+ if ((sys.platform.startswith('linux') or sys.platform.startswith('gnu')
+ or sys.platform.startswith('sunos'))
+ and sysconfig.get_config_var('Py_ENABLE_SHARED')):
+ if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
+ # building third party extensions
+ self.library_dirs.append(sysconfig.get_config_var('LIBDIR'))
+ else:
+ # building python standard extensions
+ self.library_dirs.append('.')
+
+ # The argument parsing will result in self.define being a string, but
+ # it has to be a list of 2-tuples. All the preprocessor symbols
+ # specified by the 'define' option will be set to '1'. Multiple
+ # symbols can be separated with commas.
+
+ if self.define:
+ defines = self.define.split(',')
+ self.define = map(lambda symbol: (symbol, '1'), defines)
+
+ # The option for macros to undefine is also a string from the
+ # option parsing, but has to be a list. Multiple symbols can also
+ # be separated with commas here.
+ if self.undef:
+ self.undef = self.undef.split(',')
+
+ if self.swig_opts is None:
+ self.swig_opts = []
+ else:
+ self.swig_opts = self.swig_opts.split(' ')
+
+ # Finally add the user include and library directories if requested
+ if self.user:
+ user_include = os.path.join(USER_BASE, "include")
+ user_lib = os.path.join(USER_BASE, "lib")
+ if os.path.isdir(user_include):
+ self.include_dirs.append(user_include)
+ if os.path.isdir(user_lib):
+ self.library_dirs.append(user_lib)
+ self.rpath.append(user_lib)
+
+ def run(self):
+ from distutils.ccompiler import new_compiler
+
+ # 'self.extensions', as supplied by setup.py, is a list of
+ # Extension instances. See the documentation for Extension (in
+ # distutils.extension) for details.
+ #
+ # For backwards compatibility with Distutils 0.8.2 and earlier, we
+ # also allow the 'extensions' list to be a list of tuples:
+ # (ext_name, build_info)
+ # where build_info is a dictionary containing everything that
+ # Extension instances do except the name, with a few things being
+ # differently named. We convert these 2-tuples to Extension
+ # instances as needed.
+
+ if not self.extensions:
+ return
+
+ # If we were asked to build any C/C++ libraries, make sure that the
+ # directory where we put them is in the library search path for
+ # linking extensions.
+ if self.distribution.has_c_libraries():
+ build_clib = self.get_finalized_command('build_clib')
+ self.libraries.extend(build_clib.get_library_names() or [])
+ self.library_dirs.append(build_clib.build_clib)
+
+ # Setup the CCompiler object that we'll use to do all the
+ # compiling and linking
+ self.compiler = new_compiler(compiler=self.compiler,
+ verbose=self.verbose,
+ dry_run=self.dry_run,
+ force=self.force)
+ customize_compiler(self.compiler)
+ # If we are cross-compiling, init the compiler now (if we are not
+ # cross-compiling, init would not hurt, but people may rely on
+ # late initialization of compiler even if they shouldn't...)
+ if os.name == 'nt' and self.plat_name != get_platform():
+ self.compiler.initialize(self.plat_name)
+
+ # And make sure that any compile/link-related options (which might
+ # come from the command-line or from the setup script) are set in
+ # that CCompiler object -- that way, they automatically apply to
+ # all compiling and linking done here.
+ if self.include_dirs is not None:
+ self.compiler.set_include_dirs(self.include_dirs)
+ if self.define is not None:
+ # 'define' option is a list of (name,value) tuples
+ for (name, value) in self.define:
+ self.compiler.define_macro(name, value)
+ if self.undef is not None:
+ for macro in self.undef:
+ self.compiler.undefine_macro(macro)
+ if self.libraries is not None:
+ self.compiler.set_libraries(self.libraries)
+ if self.library_dirs is not None:
+ self.compiler.set_library_dirs(self.library_dirs)
+ if self.rpath is not None:
+ self.compiler.set_runtime_library_dirs(self.rpath)
+ if self.link_objects is not None:
+ self.compiler.set_link_objects(self.link_objects)
+
+ # Now actually compile and link everything.
+ self.build_extensions()
+
+ def check_extensions_list(self, extensions):
+ """Ensure that the list of extensions (presumably provided as a
+ command option 'extensions') is valid, i.e. it is a list of
+ Extension objects. We also support the old-style list of 2-tuples,
+ where the tuples are (ext_name, build_info), which are converted to
+ Extension instances here.
+
+ Raise DistutilsSetupError if the structure is invalid anywhere;
+ just returns otherwise.
+ """
+ if not isinstance(extensions, list):
+ raise DistutilsSetupError, \
+ "'ext_modules' option must be a list of Extension instances"
+
+ for i, ext in enumerate(extensions):
+ if isinstance(ext, Extension):
+ continue # OK! (assume type-checking done
+ # by Extension constructor)
+
+ if not isinstance(ext, tuple) or len(ext) != 2:
+ raise DistutilsSetupError, \
+ ("each element of 'ext_modules' option must be an "
+ "Extension instance or 2-tuple")
+
+ ext_name, build_info = ext
+
+ log.warn(("old-style (ext_name, build_info) tuple found in "
+ "ext_modules for extension '%s'"
+ "-- please convert to Extension instance" % ext_name))
+
+ if not (isinstance(ext_name, str) and
+ extension_name_re.match(ext_name)):
+ raise DistutilsSetupError, \
+ ("first element of each tuple in 'ext_modules' "
+ "must be the extension name (a string)")
+
+ if not isinstance(build_info, dict):
+ raise DistutilsSetupError, \
+ ("second element of each tuple in 'ext_modules' "
+ "must be a dictionary (build info)")
+
+ # OK, the (ext_name, build_info) dict is type-safe: convert it
+ # to an Extension instance.
+ ext = Extension(ext_name, build_info['sources'])
+
+ # Easy stuff: one-to-one mapping from dict elements to
+ # instance attributes.
+ for key in ('include_dirs', 'library_dirs', 'libraries',
+ 'extra_objects', 'extra_compile_args',
+ 'extra_link_args'):
+ val = build_info.get(key)
+ if val is not None:
+ setattr(ext, key, val)
+
+ # Medium-easy stuff: same syntax/semantics, different names.
+ ext.runtime_library_dirs = build_info.get('rpath')
+ if 'def_file' in build_info:
+ log.warn("'def_file' element of build info dict "
+ "no longer supported")
+
+ # Non-trivial stuff: 'macros' split into 'define_macros'
+ # and 'undef_macros'.
+ macros = build_info.get('macros')
+ if macros:
+ ext.define_macros = []
+ ext.undef_macros = []
+ for macro in macros:
+ if not (isinstance(macro, tuple) and len(macro) in (1, 2)):
+ raise DistutilsSetupError, \
+ ("'macros' element of build info dict "
+ "must be 1- or 2-tuple")
+ if len(macro) == 1:
+ ext.undef_macros.append(macro[0])
+ elif len(macro) == 2:
+ ext.define_macros.append(macro)
+
+ extensions[i] = ext
+
+ def get_source_files(self):
+ self.check_extensions_list(self.extensions)
+ filenames = []
+
+ # Wouldn't it be neat if we knew the names of header files too...
+ for ext in self.extensions:
+ filenames.extend(ext.sources)
+
+ return filenames
+
+ def get_outputs(self):
+ # Sanity check the 'extensions' list -- can't assume this is being
+ # done in the same run as a 'build_extensions()' call (in fact, we
+ # can probably assume that it *isn't*!).
+ self.check_extensions_list(self.extensions)
+
+ # And build the list of output (built) filenames. Note that this
+ # ignores the 'inplace' flag, and assumes everything goes in the
+ # "build" tree.
+ outputs = []
+ for ext in self.extensions:
+ outputs.append(self.get_ext_fullpath(ext.name))
+ return outputs
+
+ def build_extensions(self):
+ # First, sanity-check the 'extensions' list
+ self.check_extensions_list(self.extensions)
+
+ for ext in self.extensions:
+ self.build_extension(ext)
+
+ def build_extension(self, ext):
+ sources = ext.sources
+ if sources is None or type(sources) not in (ListType, TupleType):
+ raise DistutilsSetupError, \
+ ("in 'ext_modules' option (extension '%s'), " +
+ "'sources' must be present and must be " +
+ "a list of source filenames") % ext.name
+ sources = list(sources)
+
+ ext_path = self.get_ext_fullpath(ext.name)
+ depends = sources + ext.depends
+ if not (self.force or newer_group(depends, ext_path, 'newer')):
+ log.debug("skipping '%s' extension (up-to-date)", ext.name)
+ return
+ else:
+ log.info("building '%s' extension", ext.name)
+
+ # First, scan the sources for SWIG definition files (.i), run
+ # SWIG on 'em to create .c files, and modify the sources list
+ # accordingly.
+ sources = self.swig_sources(sources, ext)
+
+ # Next, compile the source code to object files.
+
+ # XXX not honouring 'define_macros' or 'undef_macros' -- the
+ # CCompiler API needs to change to accommodate this, and I
+ # want to do one thing at a time!
+
+ # Two possible sources for extra compiler arguments:
+ # - 'extra_compile_args' in Extension object
+ # - CFLAGS environment variable (not particularly
+ # elegant, but people seem to expect it and I
+ # guess it's useful)
+ # The environment variable should take precedence, and
+ # any sensible compiler will give precedence to later
+ # command line args. Hence we combine them in order:
+ extra_args = ext.extra_compile_args or []
+
+ macros = ext.define_macros[:]
+ for undef in ext.undef_macros:
+ macros.append((undef,))
+
+ objects = self.compiler.compile(sources,
+ output_dir=self.build_temp,
+ macros=macros,
+ include_dirs=ext.include_dirs,
+ debug=self.debug,
+ extra_postargs=extra_args,
+ depends=ext.depends)
+
+ # XXX -- this is a Vile HACK!
+ #
+ # The setup.py script for Python on Unix needs to be able to
+ # get this list so it can perform all the clean up needed to
+ # avoid keeping object files around when cleaning out a failed
+ # build of an extension module. Since Distutils does not
+ # track dependencies, we have to get rid of intermediates to
+ # ensure all the intermediates will be properly re-built.
+ #
+ self._built_objects = objects[:]
+
+ # Now link the object files together into a "shared object" --
+ # of course, first we have to figure out all the other things
+ # that go into the mix.
+ if ext.extra_objects:
+ objects.extend(ext.extra_objects)
+ extra_args = ext.extra_link_args or []
+
+ # Detect target language, if not provided
+ language = ext.language or self.compiler.detect_language(sources)
+
+ self.compiler.link_shared_object(
+ objects, ext_path,
+ libraries=self.get_libraries(ext),
+ library_dirs=ext.library_dirs,
+ runtime_library_dirs=ext.runtime_library_dirs,
+ extra_postargs=extra_args,
+ export_symbols=self.get_export_symbols(ext),
+ debug=self.debug,
+ build_temp=self.build_temp,
+ target_lang=language)
+
+
+ def swig_sources (self, sources, extension):
+
+ """Walk the list of source files in 'sources', looking for SWIG
+ interface (.i) files. Run SWIG on all that are found, and
+ return a modified 'sources' list with SWIG source files replaced
+ by the generated C (or C++) files.
+ """
+
+ new_sources = []
+ swig_sources = []
+ swig_targets = {}
+
+ # XXX this drops generated C/C++ files into the source tree, which
+ # is fine for developers who want to distribute the generated
+ # source -- but there should be an option to put SWIG output in
+ # the temp dir.
+
+ if self.swig_cpp:
+ log.warn("--swig-cpp is deprecated - use --swig-opts=-c++")
+
+ if self.swig_cpp or ('-c++' in self.swig_opts) or \
+ ('-c++' in extension.swig_opts):
+ target_ext = '.cpp'
+ else:
+ target_ext = '.c'
+
+ for source in sources:
+ (base, ext) = os.path.splitext(source)
+ if ext == ".i": # SWIG interface file
+ new_sources.append(base + '_wrap' + target_ext)
+ swig_sources.append(source)
+ swig_targets[source] = new_sources[-1]
+ else:
+ new_sources.append(source)
+
+ if not swig_sources:
+ return new_sources
+
+ swig = self.swig or self.find_swig()
+ swig_cmd = [swig, "-python"]
+ swig_cmd.extend(self.swig_opts)
+ if self.swig_cpp:
+ swig_cmd.append("-c++")
+
+ # Do not override commandline arguments
+ if not self.swig_opts:
+ for o in extension.swig_opts:
+ swig_cmd.append(o)
+
+ for source in swig_sources:
+ target = swig_targets[source]
+ log.info("swigging %s to %s", source, target)
+ self.spawn(swig_cmd + ["-o", target, source])
+
+ return new_sources
+
+ # swig_sources ()
+
+ def find_swig (self):
+ """Return the name of the SWIG executable. On Unix, this is
+ just "swig" -- it should be in the PATH. Tries a bit harder on
+ Windows.
+ """
+
+ if os.name == "posix":
+ return "swig"
+ elif os.name == "nt":
+
+ # Look for SWIG in its standard installation directory on
+ # Windows (or so I presume!). If we find it there, great;
+ # if not, act like Unix and assume it's in the PATH.
+ for vers in ("1.3", "1.2", "1.1"):
+ fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
+ if os.path.isfile(fn):
+ return fn
+ else:
+ return "swig.exe"
+
+ elif os.name == "os2":
+ # assume swig available in the PATH.
+ return "swig.exe"
+
+ else:
+ raise DistutilsPlatformError, \
+ ("I don't know how to find (much less run) SWIG "
+ "on platform '%s'") % os.name
+
+ # find_swig ()
+
+ # -- Name generators -----------------------------------------------
+ # (extension names, filenames, whatever)
+ def get_ext_fullpath(self, ext_name):
+ """Returns the path of the filename for a given extension.
+
+ The file is located in `build_lib` or directly in the package
+ (inplace option).
+ """
+ # makes sure the extension name is only using dots
+ all_dots = string.maketrans('/'+os.sep, '..')
+ ext_name = ext_name.translate(all_dots)
+
+ fullname = self.get_ext_fullname(ext_name)
+ modpath = fullname.split('.')
+ filename = self.get_ext_filename(ext_name)
+ filename = os.path.split(filename)[-1]
+
+ if not self.inplace:
+ # no further work needed
+ # returning :
+ # build_dir/package/path/filename
+ filename = os.path.join(*modpath[:-1]+[filename])
+ return os.path.join(self.build_lib, filename)
+
+ # the inplace option requires to find the package directory
+ # using the build_py command for that
+ package = '.'.join(modpath[0:-1])
+ build_py = self.get_finalized_command('build_py')
+ package_dir = os.path.abspath(build_py.get_package_dir(package))
+
+ # returning
+ # package_dir/filename
+ return os.path.join(package_dir, filename)
+
+ def get_ext_fullname(self, ext_name):
+ """Returns the fullname of a given extension name.
+
+ Adds the `package.` prefix"""
+ if self.package is None:
+ return ext_name
+ else:
+ return self.package + '.' + ext_name
+
+ def get_ext_filename(self, ext_name):
+ r"""Convert the name of an extension (eg. "foo.bar") into the name
+ of the file from which it will be loaded (eg. "foo/bar.so", or
+ "foo\bar.pyd").
+ """
+ from distutils.sysconfig import get_config_var
+ ext_path = string.split(ext_name, '.')
+ # OS/2 has an 8 character module (extension) limit :-(
+ if os.name == "os2":
+ ext_path[len(ext_path) - 1] = ext_path[len(ext_path) - 1][:8]
+ # extensions in debug_mode are named 'module_d.pyd' under windows
+ so_ext = get_config_var('SO')
+ if os.name == 'nt' and self.debug:
+ return os.path.join(*ext_path) + '_d' + so_ext
+ return os.path.join(*ext_path) + so_ext
+
+ def get_export_symbols (self, ext):
+ """Return the list of symbols that a shared extension has to
+ export. This either uses 'ext.export_symbols' or, if it's not
+ provided, "init" + module_name. Only relevant on Windows, where
+ the .pyd file (DLL) must export the module "init" function.
+ """
+ initfunc_name = "init" + ext.name.split('.')[-1]
+ if initfunc_name not in ext.export_symbols:
+ ext.export_symbols.append(initfunc_name)
+ return ext.export_symbols
+
+ def get_libraries (self, ext):
+ """Return the list of libraries to link against when building a
+ shared extension. On most platforms, this is just 'ext.libraries';
+ on Windows and OS/2, we add the Python library (eg. python20.dll).
+ """
+ # The python library is always needed on Windows. For MSVC, this
+ # is redundant, since the library is mentioned in a pragma in
+ # pyconfig.h that MSVC groks. The other Windows compilers all seem
+ # to need it mentioned explicitly, though, so that's what we do.
+ # Append '_d' to the python import library on debug builds.
+ if sys.platform == "win32":
+ from distutils.msvccompiler import MSVCCompiler
+ if not isinstance(self.compiler, MSVCCompiler):
+ template = "python%d%d"
+ if self.debug:
+ template = template + '_d'
+ pythonlib = (template %
+ (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+ # don't extend ext.libraries, it may be shared with other
+ # extensions, it is a reference to the original list
+ return ext.libraries + [pythonlib]
+ else:
+ return ext.libraries
+ elif sys.platform == "os2emx":
+ # EMX/GCC requires the python library explicitly, and I
+ # believe VACPP does as well (though not confirmed) - AIM Apr01
+ template = "python%d%d"
+ # debug versions of the main DLL aren't supported, at least
+ # not at this time - AIM Apr01
+ #if self.debug:
+ # template = template + '_d'
+ pythonlib = (template %
+ (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+ # don't extend ext.libraries, it may be shared with other
+ # extensions, it is a reference to the original list
+ return ext.libraries + [pythonlib]
+ elif sys.platform[:6] == "cygwin":
+ template = "python%d.%d"
+ pythonlib = (template %
+ (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+ # don't extend ext.libraries, it may be shared with other
+ # extensions, it is a reference to the original list
+ return ext.libraries + [pythonlib]
+ elif sys.platform[:6] == "atheos":
+ from distutils import sysconfig
+
+ template = "python%d.%d"
+ pythonlib = (template %
+ (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+ # Get SHLIBS from Makefile
+ extra = []
+ for lib in sysconfig.get_config_var('SHLIBS').split():
+ if lib.startswith('-l'):
+ extra.append(lib[2:])
+ else:
+ extra.append(lib)
+ # don't extend ext.libraries, it may be shared with other
+ # extensions, it is a reference to the original list
+ return ext.libraries + [pythonlib, "m"] + extra
+
+ elif sys.platform == 'darwin':
+ # Don't use the default code below
+ return ext.libraries
+ elif sys.platform[:3] == 'aix':
+ # Don't use the default code below
+ return ext.libraries
+ else:
+ from distutils import sysconfig
+ if sysconfig.get_config_var('Py_ENABLE_SHARED'):
+ template = "python%d.%d"
+ pythonlib = (template %
+ (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+ return ext.libraries + [pythonlib]
+ else:
+ return ext.libraries
+
+# class build_ext
+
+from umlfri2.application.events.solution import CloseSolutionEvent
+from .events.model import DiagramDeletedEvent
+from .events.tabs import OpenTabEvent, ChangedCurrentTabEvent, ClosedTabEvent
+from .tab import Tab
+
+
+class TabList:
+ def __init__(self, application):
+ self.__tabs = []
+ self.__application = application
+ self.__current_tab = None
+ self.__locked_tabs = set()
+
+ application.event_dispatcher.subscribe(DiagramDeletedEvent, self.__diagram_deleted)
+ application.event_dispatcher.subscribe(CloseSolutionEvent, self.__solution_closed)
+
+ def __diagram_deleted(self, event):
+ tab = self.get_tab_for(event.diagram)
+ if tab is not None:
+ tab.close()
+
+ def __solution_closed(self, event):
+ events = []
+
+ for tab in self.__tabs:
+ events.append(ClosedTabEvent(tab))
+
+ self.__tabs = []
+ self.__application.event_dispatcher.dispatch_all(events)
+ self.__current_tab = None
+ self.__application.event_dispatcher.dispatch(ChangedCurrentTabEvent(None))
+
+ def reset_lock_status(self):
+ self.__locked_tabs = {tab.drawing_area.diagram.save_id for tab in self.__tabs if tab.locked}
+
+ @property
+ def lock_status_changed(self):
+ new_locked_tabs = {tab.drawing_area.diagram.save_id for tab in self.__tabs if tab.locked}
+
+ return self.__locked_tabs != new_locked_tabs
+
+ def get_tab_for(self, diagram):
+ for tab in self.__tabs:
+ if tab.drawing_area.diagram is diagram:
+ return tab
+
+ return None
+
+ def open_new_project_tabs(self, tabs):
+ last_tab = None
+
+ for tab_info in tabs:
+ tab = Tab(self.__application, self, tab_info.diagram, locked=tab_info.locked)
+ self.__tabs.append(tab)
+ self.__application.event_dispatcher.dispatch(OpenTabEvent(tab))
+ last_tab = tab
+
+ if last_tab is not None:
+ self.__current_tab = last_tab
+ self.__application.event_dispatcher.dispatch(ChangedCurrentTabEvent(last_tab))
+
+ def select_tab(self, diagram):
+ if self.__current_tab is not None:
+ self.__current_tab.drawing_area.reset_action()
+
+ if diagram is None:
+ self.__current_tab = None
+ self.__application.event_dispatcher.dispatch(ChangedCurrentTabEvent(None))
+ return
+
+ for tab in self.__tabs:
+ if tab.drawing_area.diagram is diagram:
+ if self.__current_tab is not tab:
+ self.__current_tab = tab
+ self.__application.event_dispatcher.dispatch(ChangedCurrentTabEvent(tab))
+ return tab
+ else:
+ tab = Tab(self.__application, self, diagram)
+ self.__tabs.append(tab)
+ self.__current_tab = tab
+ self.__application.event_dispatcher.dispatch(OpenTabEvent(tab))
+ self.__application.event_dispatcher.dispatch(ChangedCurrentTabEvent(tab))
+ return tab
+
+ def _close_tab(self, tab):
+ if tab.locked:
+ tab.unlock()
+ tab_id = self.__tabs.index(tab)
+ del self.__tabs[tab_id]
+
+ if tab_id < len(self.__tabs):
+ self.__current_tab = self.__tabs[tab_id]
+ elif self.__tabs:
+ self.__current_tab = self.__tabs[-1]
+ else:
+ self.__current_tab = None
+
+ self.__application.event_dispatcher.dispatch(ClosedTabEvent(tab))
+ self.__application.event_dispatcher.dispatch(ChangedCurrentTabEvent(self.__current_tab))
+
+ def close_all(self):
+ events = []
+ new_tabs = []
+
+ for tab in self.__tabs:
+ if tab.locked:
+ new_tabs.append(tab)
+ else:
+ events.append(ClosedTabEvent(tab))
+
+ self.__tabs = new_tabs
+ self.__application.event_dispatcher.dispatch_all(events)
+ if new_tabs:
+ self.__current_tab = new_tabs[-1]
+ self.__application.event_dispatcher.dispatch(ChangedCurrentTabEvent(new_tabs[-1]))
+ else:
+ self.__current_tab = None
+ self.__application.event_dispatcher.dispatch(ChangedCurrentTabEvent(None))
+
+ @property
+ def current_tab(self):
+ return self.__current_tab
+
+ def __iter__(self):
+ yield from self.__tabs
+
+# coding: utf-8
+from __future__ import unicode_literals
+
+import hashlib
+import re
+
+from .common import InfoExtractor
+from ..compat import (
+ compat_parse_qs,
+ compat_urllib_request,
+ compat_urlparse,
+)
+from ..utils import (
+ ExtractorError,
+ sanitized_Request,
+ urlencode_postdata,
+)
+
+
+class FC2IE(InfoExtractor):
+ _VALID_URL = r'^(?:https?://video\.fc2\.com/(?:[^/]+/)*content/|fc2:)(?P[^/]+)'
+ IE_NAME = 'fc2'
+ _NETRC_MACHINE = 'fc2'
+ _TESTS = [{
+ 'url': 'http://video.fc2.com/en/content/20121103kUan1KHs',
+ 'md5': 'a6ebe8ebe0396518689d963774a54eb7',
+ 'info_dict': {
+ 'id': '20121103kUan1KHs',
+ 'ext': 'flv',
+ 'title': 'Boxing again with Puff',
+ },
+ }, {
+ 'url': 'http://video.fc2.com/en/content/20150125cEva0hDn/',
+ 'info_dict': {
+ 'id': '20150125cEva0hDn',
+ 'ext': 'mp4',
+ },
+ 'params': {
+ 'username': 'ytdl@yt-dl.org',
+ 'password': '(snip)',
+ },
+ 'skip': 'requires actual password',
+ }, {
+ 'url': 'http://video.fc2.com/en/a/content/20130926eZpARwsF',
+ 'only_matching': True,
+ }]
+
+ def _login(self):
+ username, password = self._get_login_info()
+ if username is None or password is None:
+ return False
+
+ # Log in
+ login_form_strs = {
+ 'email': username,
+ 'password': password,
+ 'done': 'video',
+ 'Submit': ' Login ',
+ }
+
+ login_data = urlencode_postdata(login_form_strs)
+ request = sanitized_Request(
+ 'https://secure.id.fc2.com/index.php?mode=login&switch_language=en', login_data)
+
+ login_results = self._download_webpage(request, None, note='Logging in', errnote='Unable to log in')
+ if 'mode=redirect&login=done' not in login_results:
+ self.report_warning('unable to log in: bad username or password')
+ return False
+
+ # this is also needed