ordering = ['name', 'extra'] class Related(models.Model): custom = models.ForeignKey(CustomPk) class CustomPkTag(models.Model): id = models.CharField(max_length=20, primary_key=True) custom_pk = models.ManyToManyField(CustomPk) tag = models.CharField(max_length=20) # An inter-related setup with a model subclass that has a nullable # path to another model, and a return path from that model. @python_2_unicode_compatible class Celebrity(models.Model): name = models.CharField("Name", max_length=20) greatest_fan = models.ForeignKey("Fan", null=True, unique=True) def __str__(self): return self.name class TvChef(Celebrity): pass class Fan(models.Model): fan_of = models.ForeignKey(Celebrity) # Multiple foreign keys @python_2_unicode_compatible class LeafA(models.Model): data = models.CharField(max_length=10) def __str__(self): return self.data class LeafB(models.Model): data = models.CharField(max_length=10) class Join(models.Model): a = models.ForeignKey(LeafA) b = models.ForeignKey(LeafB) @python_2_unicode_compatible class ReservedName(models.Model): name = models.CharField(max_length=20) order = models.IntegerField() def __str__(self): return self.name # A simpler shared-foreign-key setup that can expose some problems. @python_2_unicode_compatible class SharedConnection(models.Model): data = models.CharField(max_length=10) def __str__(self): return self.data class PointerA(models.Model): connection = models.ForeignKey(SharedConnection) class PointerB(models.Model): connection = models.ForeignKey(SharedConnection) # Multi-layer ordering @python_2_unicode_compatible class SingleObject(models.Model): name = models.CharField(max_length=10) class Meta: ordering = ['name'] def __str__(self): return self.name class RelatedObject(models.Model): single = models.ForeignKey(SingleObject, null=True) f = models.IntegerField(null=True) class Meta: ordering = ['single'] @python_2_unicode_compatible class Plaything(models.Model): name = models.CharField(max_length=10) others = models.ForeignKey(RelatedObject, null=True) class Meta: ordering = ['others'] def __str__(self): return self.name @python_2_unicode_compatible class Article(models.Model): name = models.CharField(max_length=20) created = models.DateTimeField() def __str__(self): return self.name @python_2_unicode_compatible class Food(models.Model): name = models.CharField(max_length=20, unique=True) def __str__(self): return self.name @python_2_unicode_compatible class Eaten(models.Model): food = models.ForeignKey(Food, to_field="name", null=True) meal = models.CharField(max_length=20) def __str__(self): return "%s at %s" % (self.food, self.meal) @python_2_unicode_compatible class Node(models.Model): num = models.IntegerField(unique=True) parent = models.ForeignKey("self", to_field="num", null=True) def __str__(self): return "%s" % self.num # Bug #12252 @python_2_unicode_compatible class ObjectA(models.Model): name = models.CharField(max_length=50) def __str__(self): return self.name def __iter__(self): # Ticket #23721 assert False, 'type checking should happen without calling model __iter__' class ProxyObjectA(ObjectA): class Meta: proxy = True class ChildObjectA(ObjectA): pass @python_2_unicode_compatible class ObjectB(models.Model): name = models.CharField(max_length=50) objecta = models.ForeignKey(ObjectA) num = models.PositiveSmallIntegerField() def __str__(self): return self.name class ProxyObjectB(ObjectB): class Meta: proxy = True @python_2_unicode_compatible class ObjectC(models.Model): name = models.CharField(max_length=50) objecta = models.ForeignKey(ObjectA, null=True) objectb = models.ForeignKey(ObjectB, null=True) childobjecta = models.ForeignKey(ChildObjectA, null=True, related_name='ca_pk') def __str__(self): return self.name @python_2_unicode_compatible class SimpleCategory(models.Model): name = models.CharField(max_length=15) def __str__(self): return self.name @python_2_unicode_compatible class SpecialCategory(SimpleCategory): special_name = models.CharField(max_length=15) def __str__(self): return self.name + " " + self.special_name @python_2_unicode_compatible class CategoryItem(models.Model): category = models.ForeignKey(SimpleCategory) def __str__(self): return "category item: " + str(self.category) @python_2_unicode_compatible class OneToOneCategory(models.Model): new_name = models.CharField(max_length=15) category = models.OneToOneField(SimpleCategory) def __str__(self): return "one2one " + self.new_name class CategoryRelationship(models.Model): first = models.ForeignKey(SimpleCategory, related_name='first_rel') second = models.ForeignKey(SimpleCategory, related_name='second_rel') class NullableName(models.Model): name = models.CharField(max_length=20, null=True) class Meta: ordering = ['id'] class ModelD(models.Model): name = models.TextField() class ModelC(models.Model): name = models.TextField() class ModelB(models.Model): name = models.TextField() c = models.ForeignKey(ModelC) class ModelA(models.Model): name = models.TextField() b = models.ForeignKey(ModelB, null=True) d = models.ForeignKey(ModelD) @python_2_unicode_compatible class Job(models.Model): name = models.CharField(max_length=20, unique=True) def __str__(self): return self.name class JobResponsibilities(models.Model): job = models.ForeignKey(Job, to_field='name') responsibility = models.ForeignKey('Responsibility', to_field='description') @python_2_unicode_compatible class Responsibility(models.Model): description = models.CharField(max_length=20, unique=True) jobs = models.ManyToManyField(Job, through=JobResponsibilities, related_name='responsibilities') def __str__(self): return self.description # Models for disjunction join promotion low level testing. class FK1(models.Model): f1 = models.TextField() f2 = models.TextField() class FK2(models.Model): f1 = models.TextField() f2 = models.TextField() class FK3(models.Model): f1 = models.TextField() f2 = models.TextField() class BaseA(models.Model): a = models.ForeignKey(FK1, null=True) b = models.ForeignKey(FK2, null=True) c = models.ForeignKey(FK3, null=True) @python_2_unicode_compatible class Identifier(models.Model): name = models.CharField(max_length=100) def __str__(self): return self.name class Program(models.Model): identifier = models.OneToOneField(Identifier) class Channel(models.Model): programs = models.ManyToManyField(Program) identifier = models.OneToOneField(Identifier) class Book(models.Model): title = models.TextField() chapter = models.ForeignKey('Chapter') class Chapter(models.Model): title = models.TextField() paragraph = models.ForeignKey('Paragraph') class Paragraph(models.Model): text = models.TextField() page = models.ManyToManyField('Page') class Page(models.Model): text = models.TextField() class MyObject(models.Model): parent = models.ForeignKey('self', null=True, blank=True, related_name='children') data = models.CharField(max_length=100) created_at = models.DateTimeField(auto_now_add=True) # Models for #17600 regressions @python_2_unicode_compatible class Order(models.Model): id = models.IntegerField(primary_key=True) class Meta: ordering = ('pk', ) def __str__(self): return '%s' % self.pk @python_2_unicode_compatible class OrderItem(models.Model): order = models.ForeignKey(Order, related_name='items') status = models.IntegerField() class Meta: ordering = ('pk', ) def __str__(self): return '%s' % self.pk class BaseUser(models.Model): pass @python_2_unicode_compatible class Task(models.Model): title = models.CharField(max_length=10) owner = models.ForeignKey(BaseUser, related_name='owner') creator = models.ForeignKey(BaseUser, related_name='creator') def __str__(self): return self.title @python_2_unicode_compatible class Staff(models.Model): name = models.CharField(max_length=10) def __str__(self): return self.name @python_2_unicode_compatible class StaffUser(BaseUser): staff = models.OneToOneField(Staff, related_name='user') def __str__(self): return self.staff class Ticket21203Parent(models.Model): parentid = models.AutoField(primary_key=True) parent_bool = models.BooleanField(default=True) created = models.DateTimeField(auto_now=True) class Ticket21203Child(models.Model): childid = models.AutoField(primary_key=True) parent = models.ForeignKey(Ticket21203Parent) class Person(models.Model): name = models.CharField(max_length=128) @python_2_unicode_compatible class Company(models.Model): name = models.CharField(max_length=128) employees = models.ManyToManyField(Person, related_name='employers', through='Employment') def __str__(self): return self.name class Employment(models.Model): employer = models.ForeignKey(Company) employee = models.ForeignKey(Person) title = models.CharField(max_length=128) # Bug #22429 class School(models.Model): pass class Student(models.Model): school = models.ForeignKey(School) class Classroom(models.Model): school = models.ForeignKey(School) students = models.ManyToManyField(Student, related_name='classroom') class Ticket23605A(models.Model): pass class Ticket23605B(models.Model): modela_fk = models.ForeignKey(Ticket23605A) modelc_fk = models.ForeignKey("Ticket23605C") field_b0 = models.IntegerField(null=True) field_b1 = models.BooleanField(default=False) class Ticket23605C(models.Model): field_c0 = models.FloatField() # -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . # ############################################################################## import schedulers_all # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: # This file is part of Invenio. # Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2013, 2015 CERN. # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """WebBasket Web Interface.""" __revision__ = "$Id$" __lastupdated__ = """$Date$""" from invenio.utils import apache import os import cgi import urllib from invenio.config import CFG_SITE_SECURE_URL, \ CFG_ACCESS_CONTROL_LEVEL_SITE, \ CFG_SITE_SECURE_URL, CFG_PREFIX, CFG_SITE_LANG from invenio.base.globals import cfg from invenio.base.i18n import gettext_set_language from invenio.legacy.webpage import page from invenio.legacy.webuser import getUid, page_not_authorized, isGuestUser from invenio.legacy.webbasket.api import \ check_user_can_comment, \ check_sufficient_rights, \ perform_request_display, \ perform_request_search, \ create_guest_warning_box, \ create_basket_navtrail, \ perform_request_write_note, \ perform_request_save_note, \ perform_request_delete_note, \ perform_request_add_group, \ perform_request_edit, \ perform_request_edit_topic, \ perform_request_list_public_baskets, \ perform_request_unsubscribe, \ perform_request_subscribe, \ perform_request_display_public, \ perform_request_write_public_note, \ perform_request_save_public_note, \ delete_record, \ move_record, \ perform_request_add, \ perform_request_create_basket, \ perform_request_delete, \ wash_topic, \ wash_group, \ perform_request_export_xml, \ page_start, \ page_end from invenio.legacy.webbasket.db_layer import get_basket_name, \ get_max_user_rights_on_basket from invenio.utils.url import get_referer, redirect_to_url, make_canonical_urlargd from invenio.ext.legacy.handler import wash_urlargd, WebInterfaceDirectory from invenio.legacy.webstat.api import register_customevent from invenio.ext.logging import register_exception from invenio.legacy.webuser import collect_user_info from invenio.modules.comments.api import check_user_can_attach_file_to_comments from invenio.modules.access.engine import acc_authorize_action from invenio.utils.html import is_html_text_editor_installed from invenio.legacy.ckeditor.connector import process_CKEditor_upload, send_response from invenio.legacy.bibdocfile.api import stream_file class WebInterfaceBasketCommentsFiles(WebInterfaceDirectory): """Handle upload and access to files for comments in WebBasket. The upload is currently only available through the CKEditor. """ def _lookup(self, component, path): """ This handler is invoked for the dynamic URLs (for getting and putting attachments) Eg: /yourbaskets/attachments/get/31/652/5/file/myfile.pdf /yourbaskets/attachments/get/31/552/5/image/myfigure.png bskid/recid/uid/ /yourbaskets/attachments/put/31/550/ bskid/recid """ if component == 'get' and len(path) > 4: bskid = path[0] # Basket id recid = path[1] # Record id uid = path[2] # uid of the submitter file_type = path[3] # file, image, flash or media (as # defined by CKEditor) if file_type in ['file', 'image', 'flash', 'media']: file_name = '/'.join(path[4:]) # the filename def answer_get(req, form): """Accessing files attached to comments.""" form['file'] = file_name form['type'] = file_type form['uid'] = uid form['recid'] = recid form['bskid'] = bskid return self._get(req, form) return answer_get, [] elif component == 'put' and len(path) > 1: bskid = path[0] # Basket id recid = path[1] # Record id def answer_put(req, form): """Attaching file to a comment.""" form['recid'] = recid form['bskid'] = bskid return self._put(req, form) return answer_put, [] # All other cases: file not found return None, [] def _get(self, req, form): """ Returns a file attached to a comment. A file is attached to a comment of a record of a basket, by a user (who is the author of the comment), and is of a certain type (file, image, etc). Therefore these 5 values are part of the URL. Eg: CFG_SITE_SECURE_URL/yourbaskets/attachments/get/31/91/5/file/myfile.pdf bskid/recid/uid """ argd = wash_urlargd(form, {'file': (str, None), 'type': (str, None), 'uid': (int, 0), 'bskid': (int, 0), 'recid': (int, 0)}) _ = gettext_set_language(argd['ln']) # Can user view this basket & record & comment, i.e. can user # access its attachments? #uid = getUid(req) user_info = collect_user_info(req) rights = get_max_user_rights_on_basket(argd['uid'], argd['bskid']) if not user_info['precached_usebaskets']: return page_not_authorized(req, "../", \ text = _("You are not authorized to use baskets.")) if user_info['email'] == 'guest': # Ask to login target = CFG_SITE_SECURE_URL + '/youraccount/login' + \ make_canonical_urlargd({'ln' : argd['ln'], 'referer' : \ CFG_SITE_SECURE_URL + user_info['uri']}, {}) return redirect_to_url(req, target) elif not(check_sufficient_rights(rights, cfg['CFG_WEBBASKET_SHARE_LEVELS']['READITM'])): return page_not_authorized(req, "../", \ text = _("You are not authorized to view this attachment")) if not argd['file'] is None: # Prepare path to file on disk. Normalize the path so that # ../ and other dangerous components are removed. path = os.path.abspath(CFG_PREFIX + '/var/data/baskets/comments/' + \ str(argd['bskid']) + '/' + str(argd['recid']) + '/' + \ str(argd['uid']) + '/' + argd['type'] + '/' + \ argd['file']) # Check that we are really accessing attachements # directory, for the declared basket and record. if path.startswith(CFG_PREFIX + '/var/data/baskets/comments/' + \ str(argd['bskid']) + '/' + str(argd['recid'])) and \ os.path.exists(path): return stream_file(req, path) # Send error 404 in all other cases return apache.HTTP_NOT_FOUND def _put(self, req, form): """ Process requests received from CKEditor to upload files, etc. URL eg: CFG_SITE_SECURE_URL/yourbaskets/attachments/put/31/91/ bskid/recid/ """ if not is_html_text_editor_installed(): return argd = wash_urlargd(form, {'bskid': (int, 0), 'recid': (int, 0)}) uid = getUid(req) # URL where the file can be fetched after upload user_files_path = '%(CFG_SITE_SECURE_URL)s/yourbaskets/attachments/get/%(bskid)s/%(recid)i/%(uid)s' % \ {'uid': uid, 'recid': argd['recid'], 'bskid': argd['bskid'], 'CFG_SITE_SECURE_URL': CFG_SITE_SECURE_URL} # Path to directory where uploaded files are saved user_files_absolute_path = '%(CFG_PREFIX)s/var/data/baskets/comments/%(bskid)s/%(recid)s/%(uid)s' % \ {'uid': uid, 'recid': argd['recid'], 'bskid': argd['bskid'], 'CFG_PREFIX': CFG_PREFIX} # Check that user can # 1. is logged in # 2. comment records of this basket (to simplify, we use # WebComment function to check this, even if it is not # entirely adequate) # 3. attach files user_info = collect_user_info(req) (auth_code, dummy) = check_user_can_attach_file_to_comments(user_info, argd['recid']) fileurl = '' callback_function = '' if user_info['email'] == 'guest': # 1. User is guest: must login prior to upload data ='Please login before uploading file.' if not user_info['precached_usebaskets']: msg = 'Sorry, you are not allowed to use WebBasket' elif not check_user_can_comment(uid, argd['bskid']): # 2. User cannot edit comment of this basket msg = 'Sorry, you are not allowed to submit files' elif auth_code: # 3. User cannot submit msg = 'Sorry, you are not allowed to submit files.' else: # Process the upload and get the response (msg, uploaded_file_path, filename, fileurl, callback_function) = \ process_CKEditor_upload(form, uid, user_files_path, user_files_absolute_path, recid=argd['recid']) send_response(req, msg, fileurl, callback_function) class WebInterfaceYourBasketsPages(WebInterfaceDirectory): """Defines the set of /yourbaskets pages.""" _exports = ['', 'display_item', 'display', 'search', 'write_note', 'save_note', 'delete_note', 'add', 'delete', 'modify', 'edit', 'edit_topic', 'create_basket', 'display_public', 'list_public_baskets', 'subscribe', 'unsubscribe', 'write_public_note', 'save_public_note', 'attachments'] attachments = WebInterfaceBasketCommentsFiles() def index(self, req, dummy): """Index page.""" redirect_to_url(req, '%s/yourbaskets/display?%s' % (CFG_SITE_SECURE_URL, req.args)) def display_item(self, req, dummy): """Legacy URL redirection.""" redirect_to_url(req, '%s/yourbaskets/display?%s' % (CFG_SITE_SECURE_URL, req.args)) def display(self, req, form): """Display basket interface.""" #import rpdb2; rpdb2.start_embedded_debugger('password', fAllowRemote=True) argd = wash_urlargd(form, {'category': (str, cfg['CFG_WEBBASKET_CATEGORIES']['PRIVATE']), 'topic': (str, ""), 'group': (int, 0), 'bskid': (int, 0), 'recid': (int, 0), 'bsk_to_sort': (int, 0), 'sort_by_title': (str, ""), 'sort_by_date': (str, ""), 'of': (str, "hb"), 'ln': (str, CFG_SITE_LANG)}) _ = gettext_set_language(argd['ln']) uid = getUid(req) if uid == -1 or CFG_ACCESS_CONTROL_LEVEL_SITE >= 1: return page_not_authorized(req, "../yourbaskets/display", navmenuid = 'yourbaskets') if isGuestUser(uid): return redirect_to_url(req, "%s/youraccount/login%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd({ 'referer' : "%s/yourbaskets/display%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd(argd, {})), "ln" : argd['ln']}, {}))) user_info = collect_user_info(req) if not user_info['precached_usebaskets']: return page_not_authorized(req, "../", \ text = _("You are not authorized to use baskets.")) (body, dummy, navtrail) = perform_request_display(uid=uid, selected_category=argd['category'], selected_topic=argd['topic'], selected_group_id=argd['group'], selected_bskid=argd['bskid'], selected_recid=argd['recid'], of=argd['of'], ln=argd['ln']) if isGuestUser(uid): body = create_guest_warning_box(argd['ln']) + body # register event in webstat if user_info['email']: user_str = "%s (%d)" % (user_info['email'], user_info['uid']) else: user_str = "" try: register_customevent("baskets", ["display", "", user_str]) except: register_exception(suffix="Do the webstat tables exists? Try with 'webstatadmin --load-config'") rssurl = CFG_SITE_SECURE_URL + "/rss" if argd['of'] != 'hb': page_start(req, of=argd['of']) if argd['of'].startswith('x'): req.write(body) page_end(req, of=argd['of']) return elif argd['bskid']: rssurl = "%s/yourbaskets/display?category=%s&topic=%s&group=%i&bskid=%i&of=xr" % \ (CFG_SITE_SECURE_URL, argd['category'], urllib.quote(argd['topic']), argd['group'], argd['bskid']) return page(title = _("Display baskets"), body = body, navtrail = navtrail, uid = uid, lastupdated = __lastupdated__, language = argd['ln'], req = req, navmenuid = 'yourbaskets', of = argd['of'], navtrail_append_title_p = 0, secure_page_p=1, rssurl=rssurl) def search(self, req, form): """Search baskets interface.""" argd = wash_urlargd(form, {'category': (str, ""), 'topic': (str, ""), 'group': (int, 0), 'p': (str, ""), 'b': (str, ""), 'n': (int, 0), 'of': (str, "hb"), 'verbose': (int, 0), 'ln': (str, CFG_SITE_LANG)}) _ = gettext_set_language(argd['ln']) uid = getUid(req) if uid == -1 or CFG_ACCESS_CONTROL_LEVEL_SITE >= 1: return page_not_authorized(req, "../yourbaskets/search", navmenuid = 'yourbaskets') if isGuestUser(uid): return redirect_to_url(req, "%s/youraccount/login%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd({ 'referer' : "%s/yourbaskets/search%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd(argd, {})), "ln" : argd['ln']}, {}))) user_info = collect_user_info(req) if not user_info['precached_usebaskets']: return page_not_authorized(req, "../", \ text = _("You are not authorized to use baskets.")) (body, navtrail) = perform_request_search(uid=uid, selected_category=argd['category'], selected_topic=argd['topic'], selected_group_id=argd['group'], p=argd['p'], b=argd['b'], n=argd['n'], # format=argd['of'], ln=argd['ln']) # register event in webstat if user_info['email']: user_str = "%s (%d)" % (user_info['email'], user_info['uid']) else: user_str = "" try: register_customevent("baskets", ["search", "", user_str]) except: register_exception(suffix="Do the webstat tables exists? Try with 'webstatadmin --load-config'") return page(title = _("Search baskets"), body = body, navtrail = navtrail, uid = uid, lastupdated = __lastupdated__, language = argd['ln'], req = req, navmenuid = 'yourbaskets', of = argd['of'], navtrail_append_title_p = 0, secure_page_p=1) def write_note(self, req, form): """Write a comment (just interface for writing)""" argd = wash_urlargd(form, {'category': (str, cfg['CFG_WEBBASKET_CATEGORIES']['PRIVATE']), 'topic': (str, ""), 'group': (int, 0), 'bskid': (int, 0), 'recid': (int, 0), 'cmtid': (int, 0), 'of' : (str, ''), 'ln': (str, CFG_SITE_LANG)}) _ = gettext_set_language(argd['ln']) uid = getUid(req) if uid == -1 or CFG_ACCESS_CONTROL_LEVEL_SITE >= 1: return page_not_authorized(req, "../yourbaskets/write_note", navmenuid = 'yourbaskets') if isGuestUser(uid): return redirect_to_url(req, "%s/youraccount/login%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd({ 'referer' : "%s/yourbaskets/write_note%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd(argd, {})), "ln" : argd['ln']}, {}))) user_info = collect_user_info(req) if not user_info['precached_usebaskets']: return page_not_authorized(req, "../", \ text = _("You are not authorized to use baskets.")) (body, navtrail) = perform_request_write_note(uid=uid, category=argd['category'], topic=argd['topic'], group_id=argd['group'], bskid=argd['bskid'], recid=argd['recid'], cmtid=argd['cmtid'], ln=argd['ln']) # register event in webstat basket_str = "%s (%d)" % (get_basket_name(argd['bskid']), argd['bskid']) if user_info['email']: user_str = "%s (%d)" % (user_info['email'], user_info['uid']) else: user_str = "" try: register_customevent("baskets", ["write_note", basket_str, user_str]) except: register_exception(suffix="Do the webstat tables exists? Try with 'webstatadmin --load-config'") return page(title = _("Add a note"), body = body, navtrail = navtrail, uid = uid, lastupdated = __lastupdated__, language = argd['ln'], req = req, navmenuid = 'yourbaskets', of = argd['of'], secure_page_p=1) def save_note(self, req, form): """Save comment on record in basket""" argd = wash_urlargd(form, {'category': (str, cfg['CFG_WEBBASKET_CATEGORIES']['PRIVATE']), 'topic': (str, ""), 'group': (int, 0), 'bskid': (int, 0), 'recid': (int, 0), 'note_title': (str, ""), 'note_body': (str, ""), 'date_creation': (str, ""), 'editor_type': (str, ""), 'of': (str, ''), 'ln': (str, CFG_SITE_LANG), 'reply_to': (int, 0)}) _ = gettext_set_language(argd['ln']) uid = getUid(req) if uid == -1 or CFG_ACCESS_CONTROL_LEVEL_SITE >= 1: return page_not_authorized(req, "../yourbaskets/save_note", navmenuid = 'yourbaskets') if isGuestUser(uid): return redirect_to_url(req, "%s/youraccount/login%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd({ 'referer' : "%s/yourbaskets/save_note%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd(argd, {})), "ln" : argd['ln']}, {}))) user_info = collect_user_info(req) if not user_info['precached_usebaskets']: return page_not_authorized(req, "../", \ text = _("You are not authorized to use baskets.")) (body, navtrail) = perform_request_save_note(uid=uid, category=argd['category'], topic=argd['topic'], group_id=argd['group'], bskid=argd['bskid'], recid=argd['recid'], note_title=argd['note_title'], note_body=argd['note_body'], date_creation=argd['date_creation'], editor_type=argd['editor_type'], ln=argd['ln'], reply_to=argd['reply_to']) # TODO: do not stat event if save was not succussful # register event in webstat basket_str = "%s (%d)" % (get_basket_name(argd['bskid']), argd['bskid']) if user_info['email']: user_str = "%s (%d)" % (user_info['email'], user_info['uid']) else: user_str = "" try: register_customevent("baskets", ["save_note", basket_str, user_str]) except: register_exception(suffix="Do the webstat tables exists? Try with 'webstatadmin --load-config'") return page(title = _("Display item and notes"), body = body, navtrail = navtrail, uid = uid, lastupdated = __lastupdated__, language = argd['ln'], req = req, navmenuid = 'yourbaskets', of = argd['of'], navtrail_append_title_p = 0, secure_page_p=1) def delete_note(self, req, form): """Delete a comment @param bskid: id of basket (int) @param recid: id of record (int) @param cmtid: id of comment (int) @param category: category (see webbasket_config) (str) @param topic: nb of topic currently displayed (int) @param group: id of group baskets currently displayed (int) @param ln: language""" argd = wash_urlargd(form, {'category': (str, cfg['CFG_WEBBASKET_CATEGORIES']['PRIVATE']), 'topic': (str, ""), 'group': (int, 0), 'bskid': (int, 0), 'recid': (int, 0), 'cmtid': (int, 0), 'of' : (str, ''), 'ln': (str, CFG_SITE_LANG)}) _ = gettext_set_language(argd['ln']) uid = getUid(req) if uid == -1 or CFG_ACCESS_CONTROL_LEVEL_SITE >= 1: return page_not_authorized(req, "../yourbaskets/delete_note", navmenuid = 'yourbaskets') if isGuestUser(uid): return redirect_to_url(req, "%s/youraccount/delete_note%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd({ 'referer' : "%s/yourbaskets/display%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd(argd, {})), "ln" : argd['ln']}, {}))) user_info = collect_user_info(req) if not user_info['precached_usebaskets']: return page_not_authorized(req, "../", \ text = _("You are not authorized to use baskets.")) (body, navtrail) = perform_request_delete_note(uid=uid, category=argd['category'], topic=argd['topic'], group_id=argd['group'], bskid=argd['bskid'], recid=argd['recid'], cmtid=argd['cmtid'], ln=argd['ln']) # TODO: do not stat event if delete was not succussful # register event in webstat basket_str = "%s (%d)" % (get_basket_name(argd['bskid']), argd['bskid']) user_info = collect_user_info(req) if user_info['email']: user_str = "%s (%d)" % (user_info['email'], user_info['uid']) else: user_str = "" try: register_customevent("baskets", ["delete_note", basket_str, user_str]) except: register_exception(suffix="Do the webstat tables exists? Try with 'webstatadmin --load-config'") return page(title = _("Display item and notes"), body = body, navtrail = navtrail, uid = uid, lastupdated = __lastupdated__, language = argd['ln'], req = req, navmenuid = 'yourbaskets', of = argd['of'], navtrail_append_title_p = 0, secure_page_p=1) def add(self, req, form): """Add records to baskets. @param recid: list of records to add @param colid: in case of external collections, the id of the collection the records belong to @param bskids: list of baskets to add records to. if not provided, will return a page where user can select baskets @param referer: URL of the referring page @param new_basket_name: add record to new basket @param new_topic_name: new basket goes into new topic @param create_in_topic: # of topic to put basket into @param ln: language""" # TODO: apply a maximum limit of items (100) that can be added to a basket # at once. Also see the build_search_url function of websearch_..._searcher.py # for the "rg" GET variable. argd = wash_urlargd(form, {'recid': (list, []), 'category': (str, ""), 'bskid': (int, 0), 'colid': (int, 0), 'es_title': (str, ""), 'es_desc': (str, ""), 'es_url': (str, ""), 'note_body': (str, ""), 'date_creation': (str, ""), 'editor_type': (str, ""), 'b': (str, ""), 'copy': (int, 0), 'move_from_basket': (int, 0), 'wait': (int, 0), 'referer': (str, ""), 'of': (str, ''), 'ln': (str, CFG_SITE_LANG)}) _ = gettext_set_language(argd['ln']) uid = getUid(req) if uid == -1 or CFG_ACCESS_CONTROL_LEVEL_SITE >= 1: return page_not_authorized(req, "../yourbaskets/add", navmenuid = 'yourbaskets') if isGuestUser(uid): return redirect_to_url(req, "%s/youraccount/login%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd({ 'referer' : "%s/yourbaskets/add%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd(argd, {})), "ln" : argd['ln']}, {}))) user_info = collect_user_info(req) if not user_info['precached_usebaskets']: return page_not_authorized(req, "../", \ text = _("You are not authorized to use baskets.")) if not argd['referer']: argd['referer'] = get_referer(req) (body, navtrail) = perform_request_add(uid=uid, recids=argd['recid'], colid=argd['colid'], bskid=argd['bskid'], es_title=argd['es_title'], es_desc=argd['es_desc'], es_url=argd['es_url'], note_body=argd['note_body'], date_creation=argd['date_creation'], editor_type=argd['editor_type'], category=argd['category'], b=argd['b'], copy=argd['copy'], move_from_basket=argd['move_from_basket'], wait=argd['wait'], referer=argd['referer'], ln=argd['ln']) if isGuestUser(uid): body = create_guest_warning_box(argd['ln']) + body # register event in webstat bskid = argd['bskid'] basket_str = "%s (%s)" % (get_basket_name(bskid), bskid) if user_info['email']: user_str = "%s (%d)" % (user_info['email'], user_info['uid']) else: user_str = "" try: register_customevent("baskets", ["add", basket_str, user_str]) except: register_exception(suffix="Do the webstat tables exists? Try with 'webstatadmin --load-config'") return page(title = _('Add to basket'), body = body, navtrail = navtrail, uid = uid, lastupdated = __lastupdated__, language = argd['ln'], req = req, navmenuid = 'yourbaskets', of = argd['of'], navtrail_append_title_p = 0, secure_page_p=1) def delete(self, req, form): """Delete basket interface""" argd = wash_urlargd(form, {'bskid' : (int, -1), 'confirmed' : (int, 0), 'category' : (str, cfg['CFG_WEBBASKET_CATEGORIES']['PRIVATE']), 'topic' : (str, ""), 'group' : (int, 0), 'of' : (str, ''), 'ln' : (str, CFG_SITE_LANG)}) _ = gettext_set_language(argd['ln']) uid = getUid(req) if uid == -1 or CFG_ACCESS_CONTROL_LEVEL_SITE >= 1: return page_not_authorized(req, "../yourbaskets/delete", navmenuid = 'yourbaskets') if isGuestUser(uid): return redirect_to_url(req, "%s/youraccount/login%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd({ 'referer' : "%s/yourbaskets/delete%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd(argd, {})), "ln" : argd['ln']}, {}))) user_info = collect_user_info(req) if not user_info['precached_usebaskets']: return page_not_authorized(req, "../", \ text = _("You are not authorized to use baskets.")) body=perform_request_delete(uid=uid, bskid=argd['bskid'], confirmed=argd['confirmed'], category=argd['category'], selected_topic=argd['topic'], selected_group_id=argd['group'], ln=argd['ln']) if argd['confirmed']: if argd['category'] == cfg['CFG_WEBBASKET_CATEGORIES']['PRIVATE']: argd['topic'] = wash_topic(uid, argd['topic'])[0] elif argd['category'] == cfg['CFG_WEBBASKET_CATEGORIES']['GROUP']: argd['group'] = wash_group(uid, argd['group'])[0] url = """%s/yourbaskets/display?category=%s&topic=%s&group=%i&ln=%s""" % \ (CFG_SITE_SECURE_URL, argd['category'], urllib.quote(argd['topic']), argd['group'], argd['ln']) redirect_to_url(req, url) else: navtrail = ''\ '%s' navtrail %= (CFG_SITE_SECURE_URL, argd['ln'], _("Your Account")) navtrail_end = create_basket_navtrail(uid=uid, category=argd['category'], topic=argd['topic'], group=argd['group'], bskid=argd['bskid'], ln=argd['ln']) if isGuestUser(uid): body = create_guest_warning_box(argd['ln']) + body # register event in webstat basket_str = "%s (%d)" % (get_basket_name(argd['bskid']), argd['bskid']) if user_info['email']: user_str = "%s (%d)" % (user_info['email'], user_info['uid']) else: user_str = "" try: register_customevent("baskets", ["delete", basket_str, user_str]) except: register_exception(suffix="Do the webstat tables exists? Try with 'webstatadmin --load-config'") return page(title = _("Delete a basket"), body = body, navtrail = navtrail + navtrail_end, uid = uid, lastupdated = __lastupdated__, language = argd['ln'], req = req, navmenuid = 'yourbaskets', of = argd['of'], secure_page_p=1) def modify(self, req, form): """Modify basket content interface (reorder, suppress record, etc.)""" argd = wash_urlargd(form, {'action': (str, ""), 'bskid': (int, -1), 'recid': (int, 0), 'category': (str, cfg['CFG_WEBBASKET_CATEGORIES']['PRIVATE']), 'topic': (str, ""), 'group': (int, 0), 'of' : (str, ''), 'ln': (str, CFG_SITE_LANG)}) _ = gettext_set_language(argd['ln']) uid = getUid(req) if uid == -1 or CFG_ACCESS_CONTROL_LEVEL_SITE >= 1: return page_not_authorized(req, "../yourbaskets/modify", navmenuid = 'yourbaskets') if isGuestUser(uid): return redirect_to_url(req, "%s/youraccount/login%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd({ 'referer' : "%s/yourbaskets/modify%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd(argd, {})), "ln" : argd['ln']}, {}))) user_info = collect_user_info(req) if not user_info['precached_usebaskets']: return page_not_authorized(req, "../", \ text = _("You are not authorized to use baskets.")) url = CFG_SITE_SECURE_URL url += '/yourbaskets/display?category=%s&topic=%s&group=%i&bskid=%i&ln=%s' % \ (argd['category'], urllib.quote(argd['topic']), argd['group'], argd['bskid'], argd['ln']) if argd['action'] == cfg['CFG_WEBBASKET_ACTIONS']['DELETE']: delete_record(uid, argd['bskid'], argd['recid']) redirect_to_url(req, url) elif argd['action'] == cfg['CFG_WEBBASKET_ACTIONS']['UP']: move_record(uid, argd['bskid'], argd['recid'], argd['action']) redirect_to_url(req, url) elif argd['action'] == cfg['CFG_WEBBASKET_ACTIONS']['DOWN']: move_record(uid, argd['bskid'], argd['recid'], argd['action']) redirect_to_url(req, url) elif argd['action'] == cfg['CFG_WEBBASKET_ACTIONS']['COPY'] or \ argd['action'] == cfg['CFG_WEBBASKET_ACTIONS']['MOVE']: if(argd['action'] == cfg['CFG_WEBBASKET_ACTIONS']['MOVE']): title = _("Move record to basket") from_bsk = argd['bskid'] else: title = _("Copy record to basket") from_bsk = 0 referer = get_referer(req) (body, navtrail) = perform_request_add(uid=uid, recids=argd['recid'], copy=True, move_from_basket=from_bsk, referer=referer, ln=argd['ln']) if isGuestUser(uid): body = create_guest_warning_box(argd['ln']) + body else: title = '' body = '' # warnings = [('WRN_WEBBASKET_UNDEFINED_ACTION',)] navtrail = ''\ '%s' navtrail %= (CFG_SITE_SECURE_URL, argd['ln'], _("Your Account")) navtrail_end = create_basket_navtrail(uid=uid, category=argd['category'], topic=argd['topic'], group=argd['group'], bskid=argd['bskid'], ln=argd['ln']) # register event in webstat basket_str = "%s (%d)" % (get_basket_name(argd['bskid']), argd['bskid']) if user_info['email']: user_str = "%s (%d)" % (user_info['email'], user_info['uid']) else: user_str = "" try: register_customevent("baskets", ["modify", basket_str, user_str]) except: register_exception(suffix="Do the webstat tables exists? Try with 'webstatadmin --load-config'") return page(title = title, body = body, navtrail = navtrail + navtrail_end, uid = uid, lastupdated = __lastupdated__, language = argd['ln'], req = req, navmenuid = 'yourbaskets', of = argd['of'], secure_page_p=1) def edit(self, req, form): """Edit basket interface""" argd = wash_urlargd(form, {'bskid': (int, 0), 'groups': (list, []), 'topic': (str, ""), 'add_group': (str, ""), 'group_cancel': (str, ""), 'submit': (str, ""), 'cancel': (str, ""), 'delete': (str, ""), 'new_name': (str, ""), 'new_topic': (str, ""), 'new_topic_name': (str, ""), 'new_group': (str, ""), 'external': (str, ""), 'of' : (str, ''), 'ln': (str, CFG_SITE_LANG)}) uid = getUid(req) if uid == -1 or CFG_ACCESS_CONTROL_LEVEL_SITE >= 1: return page_not_authorized(req, "../yourbaskets/edit", navmenuid = 'yourbaskets') if isGuestUser(uid): return redirect_to_url(req, "%s/youraccount/login%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd({ 'referer' : "%s/yourbaskets/edit%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd(argd, {})), "ln" : argd['ln']}, {}))) _ = gettext_set_language(argd['ln']) user_info = collect_user_info(req) if not user_info['precached_usebaskets']: return page_not_authorized(req, "../", \ text = _("You are not authorized to use baskets.")) if argd['cancel']: url = CFG_SITE_SECURE_URL + '/yourbaskets/display?category=%s&topic=%s&ln=%s' url %= (cfg['CFG_WEBBASKET_CATEGORIES']['PRIVATE'], urllib.quote(argd['topic']), argd['ln']) redirect_to_url(req, url) elif argd['delete']: url = CFG_SITE_SECURE_URL url += '/yourbaskets/delete?bskid=%i&category=%s&topic=%s&ln=%s' % \ (argd['bskid'], cfg['CFG_WEBBASKET_CATEGORIES']['PRIVATE'], urllib.quote(argd['topic']), argd['ln']) redirect_to_url(req, url) elif argd['add_group'] and not(argd['new_group']): body = perform_request_add_group(uid=uid, bskid=argd['bskid'], topic=argd['topic'], ln=argd['ln']) # warnings = [] elif (argd['add_group'] and argd['new_group']) or argd['group_cancel']: if argd['add_group']: perform_request_add_group(uid=uid, bskid=argd['bskid'], topic=argd['topic'], group_id=argd['new_group'], ln=argd['ln']) body = perform_request_edit(uid=uid, bskid=argd['bskid'], topic=argd['topic'], ln=argd['ln']) elif argd['submit']: body = perform_request_edit(uid=uid, bskid=argd['bskid'], topic=argd['topic'], new_name=argd['new_name'], new_topic=argd['new_topic'], new_topic_name=argd['new_topic_name'], groups=argd['groups'], external=argd['external'], ln=argd['ln']) if argd['new_topic'] != "-1": argd['topic'] = argd['new_topic'] url = CFG_SITE_SECURE_URL + '/yourbaskets/display?category=%s&topic=%s&ln=%s' % \ (cfg['CFG_WEBBASKET_CATEGORIES']['PRIVATE'], urllib.quote(argd['topic']), argd['ln']) redirect_to_url(req, url) else: body = perform_request_edit(uid=uid, bskid=argd['bskid'], topic=argd['topic'], ln=argd['ln']) navtrail = ''\ '%s' navtrail %= (CFG_SITE_SECURE_URL, argd['ln'], _("Your Account")) navtrail_end = create_basket_navtrail( uid=uid, category=cfg['CFG_WEBBASKET_CATEGORIES']['PRIVATE'], topic=argd['topic'], group=0, bskid=argd['bskid'], ln=argd['ln']) if isGuestUser(uid): body = create_guest_warning_box(argd['ln']) + body # register event in webstat basket_str = "%s (%d)" % (get_basket_name(argd['bskid']), argd['bskid']) if user_info['email']: user_str = "%s (%d)" % (user_info['email'], user_info['uid']) else: user_str = "" try: register_customevent("baskets", ["edit", basket_str, user_str]) except: register_exception(suffix="Do the webstat tables exists? Try with 'webstatadmin --load-config'") return page(title = _("Edit basket"), body = body, navtrail = navtrail + navtrail_end, uid = uid, lastupdated = __lastupdated__, language = argd['ln'], req = req, navmenuid = 'yourbaskets', of = argd['of'], secure_page_p=1) def edit_topic(self, req, form): """Edit topic interface""" argd = wash_urlargd(form, {'topic': (str, ""), 'submit': (str, ""), 'cancel': (str, ""), 'delete': (str, ""), 'new_name': (str, ""), 'of' : (str, ''), 'ln': (str, CFG_SITE_LANG)}) uid = getUid(req) if uid == -1 or CFG_ACCESS_CONTROL_LEVEL_SITE >= 1: return page_not_authorized(req, "../yourbaskets/edit", navmenuid = 'yourbaskets') if isGuestUser(uid): return redirect_to_url(req, "%s/youraccount/login%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd({ 'referer' : "%s/yourbaskets/edit_topic%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd(argd, {})), "ln" : argd['ln']}, {}))) _ = gettext_set_language(argd['ln']) user_info = collect_user_info(req) if not user_info['precached_usebaskets']: return page_not_authorized(req, "../", \ text = _("You are not authorized to use baskets.")) if argd['cancel']: url = CFG_SITE_SECURE_URL + '/yourbaskets/display?category=%s&ln=%s' url %= (cfg['CFG_WEBBASKET_CATEGORIES']['PRIVATE'], argd['ln']) redirect_to_url(req, url) elif argd['delete']: url = CFG_SITE_SECURE_URL url += '/yourbaskets/delete?bskid=%i&category=%s&topic=%s&ln=%s' % \ (argd['bskid'], cfg['CFG_WEBBASKET_CATEGORIES']['PRIVATE'], urllib.quote(argd['topic']), argd['ln']) redirect_to_url(req, url) elif argd['submit']: body = perform_request_edit_topic(uid=uid, topic=argd['topic'], new_name=argd['new_name'], ln=argd['ln']) url = CFG_SITE_SECURE_URL + '/yourbaskets/display?category=%s&ln=%s' % \ (cfg['CFG_WEBBASKET_CATEGORIES']['PRIVATE'], argd['ln']) redirect_to_url(req, url) else: body = perform_request_edit_topic(uid=uid, topic=argd['topic'], ln=argd['ln']) navtrail = ''\ '%s' navtrail %= (CFG_SITE_SECURE_URL, argd['ln'], _("Your Account")) navtrail_end = "" #navtrail_end = create_basket_navtrail( # uid=uid, # category=cfg['CFG_WEBBASKET_CATEGORIES']['PRIVATE'], # topic=argd['topic'], # group=0, # ln=argd['ln']) if isGuestUser(uid): body = create_guest_warning_box(argd['ln']) + body # register event in webstat #basket_str = "%s (%d)" % (get_basket_name(argd['bskid']), argd['bskid']) #if user_info['email']: # user_str = "%s (%d)" % (user_info['email'], user_info['uid']) #else: # user_str = "" #try: # register_customevent("baskets", ["edit", basket_str, user_str]) #except: # register_exception(suffix="Do the webstat tables exists? Try with 'webstatadmin --load-config'") return page(title = _("Edit topic"), body = body, navtrail = navtrail + navtrail_end, uid = uid, lastupdated = __lastupdated__, language = argd['ln'], req = req, navmenuid = 'yourbaskets', of = argd['of'], secure_page_p=1) def create_basket(self, req, form): """Create basket interface""" argd = wash_urlargd(form, {'new_basket_name': (str, ""), 'new_topic_name' : (str, ""), 'create_in_topic': (str, "-1"), 'topic' : (str, ""), 'recid' : (list, []), 'colid' : (int, -1), 'es_title' : (str, ''), 'es_desc' : (str, ''), 'es_url' : (str, ''), 'copy' : (int, 0), 'move_from_basket':(int, 0), 'referer' : (str, ''), 'of' : (str, ''), 'ln' : (str, CFG_SITE_LANG)}) uid = getUid(req) if uid == -1 or CFG_ACCESS_CONTROL_LEVEL_SITE >= 1: return page_not_authorized(req, "../yourbaskets/create_basket", navmenuid = 'yourbaskets') if isGuestUser(uid): return redirect_to_url(req, "%s/youraccount/login%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd({ 'referer' : "%s/yourbaskets/create_basket%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd(argd, {})), "ln" : argd['ln']}, {}))) user_info = collect_user_info(req) _ = gettext_set_language(argd['ln']) if not user_info['precached_usebaskets']: return page_not_authorized(req, "../", \ text = _("You are not authorized to use baskets.")) if argd['new_basket_name'] and \ (argd['new_topic_name'] or argd['create_in_topic'] != "-1"): (bskid, topic) = perform_request_create_basket( req, uid=uid, new_basket_name=argd['new_basket_name'], new_topic_name=argd['new_topic_name'], create_in_topic=argd['create_in_topic'], recids=argd['recid'], colid=argd['colid'], es_title=argd['es_title'], es_desc=argd['es_desc'], es_url=argd['es_url'], copy=argd['copy'], move_from_basket=argd['move_from_basket'], referer=argd['referer'], ln=argd['ln']) # register event in webstat basket_str = "%s ()" % argd['new_basket_name'] if user_info['email']: user_str = "%s (%d)" % (user_info['email'], user_info['uid']) else: user_str = "" try: register_customevent("baskets", ["create_basket", basket_str, user_str]) except: register_exception(suffix="Do the webstat tables exists? Try with 'webstatadmin --load-config'") if ( argd['recid'] and argd['colid'] >= 0 ): url = CFG_SITE_SECURE_URL + '/yourbaskets/add?category=%s©=%i&referer=%s&bskid=%i&colid=%i&move_from_basket=%i&recid=%s&wait=1&ln=%s' url %= (cfg['CFG_WEBBASKET_CATEGORIES']['PRIVATE'], argd['copy'], urllib.quote(argd['referer']), bskid, argd['colid'], argd['move_from_basket'], '&recid='.join(str(recid) for recid in argd['recid']), argd['ln']) elif ( argd['es_title'] and argd['es_desc'] and argd['es_url'] and argd['colid'] == -1 ): # Adding NEW external record - this does not need 'move_from_basket' data url = CFG_SITE_SECURE_URL + '/yourbaskets/add?category=%s&bskid=%i&colid=%i&es_title=%s&es_desc=%s&es_url=%s&wait=1&ln=%s' url %= (cfg['CFG_WEBBASKET_CATEGORIES']['PRIVATE'], bskid, argd['colid'], urllib.quote(argd['es_title']), urllib.quote(argd['es_desc']), urllib.quote(argd['es_url']), argd['ln']) else: url = CFG_SITE_SECURE_URL + '/yourbaskets/display?category=%s&topic=%s&ln=%s' url %= (cfg['CFG_WEBBASKET_CATEGORIES']['PRIVATE'], urllib.quote(topic), argd['ln']) redirect_to_url(req, url) else: body = perform_request_create_basket(req, uid=uid, new_basket_name=argd['new_basket_name'], new_topic_name=argd['new_topic_name'], create_in_topic=argd['create_in_topic'], topic=argd['topic'], recids=argd['recid'], colid=argd['colid'], es_title=argd['es_title'], es_desc=argd['es_desc'], es_url=argd['es_url'], copy=argd['copy'], move_from_basket=argd['move_from_basket'], referer=argd['referer'], ln=argd['ln']) navtrail = '%s' navtrail %= (CFG_SITE_SECURE_URL, argd['ln'], _("Your Account")) if isGuestUser(uid): body = create_guest_warning_box(argd['ln']) + body return page(title = _("Create basket"), body = body, navtrail = navtrail, uid = uid, lastupdated = __lastupdated__, language = argd['ln'], req = req, navmenuid = 'yourbaskets', of = argd['of'], secure_page_p=1) def display_public(self, req, form): """Display a public basket""" argd = wash_urlargd(form, {'bskid': (int, 0), 'recid': (int, 0), 'of': (str, "hb"), 'ln': (str, CFG_SITE_LANG)}) _ = gettext_set_language(argd['ln']) uid = getUid(req) if uid == -1 or CFG_ACCESS_CONTROL_LEVEL_SITE >= 1: return page_not_authorized(req, "../yourbaskets/display", navmenuid = 'yourbaskets') user_info = collect_user_info(req) if not argd['bskid']: (body, navtrail) = perform_request_list_public_baskets(uid) title = _('List of public baskets') # register event in webstat if user_info['email']: user_str = "%s (%d)" % (user_info['email'], user_info['uid']) else: user_str = "" try: register_customevent("baskets", ["list_public_baskets", "", user_str]) except: register_exception(suffix="Do the webstat tables exists? Try with 'webstatadmin --load-config'") else: (body, dummy, navtrail) = perform_request_display_public(uid=uid, selected_bskid=argd['bskid'], selected_recid=argd['recid'], of=argd['of'], ln=argd['ln']) title = _('Public basket') # register event in webstat basket_str = "%s (%d)" % (get_basket_name(argd['bskid']), argd['bskid']) if user_info['email']: user_str = "%s (%d)" % (user_info['email'], user_info['uid']) else: user_str = "" try: register_customevent("baskets", ["display_public", basket_str, user_str]) except: register_exception(suffix="Do the webstat tables exists? Try with 'webstatadmin --load-config'") rssurl = CFG_SITE_SECURE_URL + "/rss" if argd['of'] != 'hb': page_start(req, of=argd['of']) if argd['of'].startswith('x'): req.write(body) page_end(req, of=argd['of']) return elif argd['bskid']: rssurl = "%s/yourbaskets/display_public?&bskid=%i&of=xr" % \ (CFG_SITE_SECURE_URL, argd['bskid']) return page(title = title, body = body, navtrail = navtrail, uid = uid, lastupdated = __lastupdated__, language = argd['ln'], req = req, navmenuid = 'yourbaskets', of = argd['of'], navtrail_append_title_p = 0, secure_page_p=1, rssurl=rssurl) def list_public_baskets(self, req, form): """List of public baskets interface.""" argd = wash_urlargd(form, {'limit': (int, 1), 'sort': (str, 'name'), 'asc': (int, 1), 'of': (str, ''), 'ln': (str, CFG_SITE_LANG)}) _ = gettext_set_language(argd['ln']) uid = getUid(req) if uid == -1 or CFG_ACCESS_CONTROL_LEVEL_SITE == 2: return page_not_authorized(req, "../yourbaskets/list_public_baskets", navmenuid = 'yourbaskets') user_info = collect_user_info(req) nb_views_show = acc_authorize_action(user_info, 'runwebstatadmin') nb_views_show_p = not(nb_views_show[0]) (body, navtrail) = perform_request_list_public_baskets(uid, argd['limit'], argd['sort'], argd['asc'], nb_views_show_p, argd['ln']) return page(title = _("List of public baskets"), body = body, navtrail = navtrail, uid = uid, lastupdated = __lastupdated__, language = argd['ln'], req = req, navmenuid = 'yourbaskets', of = argd['of'], navtrail_append_title_p = 0, secure_page_p=1) def subscribe(self, req, form): """Subscribe to a basket pseudo-interface.""" argd = wash_urlargd(form, {'bskid': (int, 0), 'of': (str, 'hb'), 'ln': (str, CFG_SITE_LANG)}) _ = gettext_set_language(argd['ln']) uid = getUid(req) if uid == -1 or CFG_ACCESS_CONTROL_LEVEL_SITE == 2: return page_not_authorized(req, "../yourbaskets/subscribe", navmenuid = 'yourbaskets') if isGuestUser(uid): return redirect_to_url(req, "%s/youraccount/login%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd({ 'referer' : "%s/yourbaskets/subscribe%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd(argd, {})), "ln" : argd['ln']}, {}))) user_info = collect_user_info(req) if not user_info['precached_usebaskets']: return page_not_authorized(req, "../", \ text = _("You are not authorized to use baskets.")) if not argd['bskid']: (body, navtrail) = perform_request_list_public_baskets(uid) title = _('List of public baskets') else: # TODO: Take care of XML output as shown below #req.content_type = "text/xml" #req.send_http_header() #return perform_request_display_public(bskid=argd['bskid'], of=argd['of'], ln=argd['ln']) subscribe_warnings_html = perform_request_subscribe(uid, argd['bskid'], argd['ln']) (body, dummy, navtrail) = perform_request_display_public(uid=uid, selected_bskid=argd['bskid'], selected_recid=0, of=argd['of'], ln=argd['ln']) #warnings.extend(subscribe_warnings) body = subscribe_warnings_html + body title = _('Public basket') return page(title = title, body = body, navtrail = navtrail, uid = uid, lastupdated = __lastupdated__, language = argd['ln'], req = req, navmenuid = 'yourbaskets', of = argd['of'], navtrail_append_title_p = 0, secure_page_p=1) def unsubscribe(self, req, form): """Unsubscribe from basket pseudo-interface.""" argd = wash_urlargd(form, {'bskid': (int, 0), 'of': (str, 'hb'), 'ln': (str, CFG_SITE_LANG)}) _ = gettext_set_language(argd['ln']) uid = getUid(req) if uid == -1 or CFG_ACCESS_CONTROL_LEVEL_SITE == 2: return page_not_authorized(req, "../yourbaskets/unsubscribe", navmenuid = 'yourbaskets') if isGuestUser(uid): return redirect_to_url(req, "%s/youraccount/login%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd({ 'referer' : "%s/yourbaskets/unsubscribe%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd(argd, {})), "ln" : argd['ln']}, {}))) user_info = collect_user_info(req) if not user_info['precached_usebaskets']: return page_not_authorized(req, "../", \ text = _("You are not authorized to use baskets.")) if not argd['bskid']: (body, navtrail) = perform_request_list_public_baskets(uid) title = _('List of public baskets') else: # TODO: Take care of XML output as shown below #req.content_type = "text/xml" #req.send_http_header() #return perform_request_display_public(bskid=argd['bskid'], of=argd['of'], ln=argd['ln']) unsubscribe_warnings_html = perform_request_unsubscribe(uid, argd['bskid'], argd['ln']) (body, dummy, navtrail) = perform_request_display_public(uid=uid, selected_bskid=argd['bskid'], selected_recid=0, of=argd['of'], ln=argd['ln']) # warnings.extend(unsubscribe_warnings) body = unsubscribe_warnings_html + body title = _('Public basket') return page(title = title, body = body, navtrail = navtrail, uid = uid, lastupdated = __lastupdated__, language = argd['ln'], req = req, navmenuid = 'yourbaskets', of = argd['of'], navtrail_append_title_p = 0, secure_page_p=1) def write_public_note(self, req, form): """Write a comment (just interface for writing)""" argd = wash_urlargd(form, {'bskid': (int, 0), 'recid': (int, 0), 'cmtid': (int, 0), 'of' : (str, ''), 'ln' : (str, CFG_SITE_LANG)}) _ = gettext_set_language(argd['ln']) uid = getUid(req) if uid == -1 or CFG_ACCESS_CONTROL_LEVEL_SITE >= 1: return page_not_authorized(req, "../yourbaskets/write_public_note", navmenuid = 'yourbaskets') if isGuestUser(uid): return redirect_to_url(req, "%s/youraccount/login%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd({ 'referer' : "%s/yourbaskets/write_public_note%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd(argd, {})), "ln" : argd['ln']}, {}))) user_info = collect_user_info(req) if not user_info['precached_usebaskets']: return page_not_authorized(req, "../", \ text = _("You are not authorized to use baskets.")) (body, navtrail) = perform_request_write_public_note(uid=uid, bskid=argd['bskid'], recid=argd['recid'], cmtid=argd['cmtid'], ln=argd['ln']) # register event in webstat basket_str = "%s (%d)" % (get_basket_name(argd['bskid']), argd['bskid']) if user_info['email']: user_str = "%s (%d)" % (user_info['email'], user_info['uid']) else: user_str = "" try: register_customevent("baskets", ["write_public_note", basket_str, user_str]) except: register_exception(suffix="Do the webstat tables exists? Try with 'webstatadmin --load-config'") return page(title = _("Add a note"), body = body, navtrail = navtrail, uid = uid, lastupdated = __lastupdated__, language = argd['ln'], req = req, navmenuid = 'yourbaskets', of = argd['of'], secure_page_p=1) def save_public_note(self, req, form): """Save comment on record in basket""" argd = wash_urlargd(form, {'bskid': (int, 0), 'recid': (int, 0), 'note_title': (str, ""), 'note_body': (str, ""), 'editor_type': (str, ""), 'of': (str, ''), 'ln': (str, CFG_SITE_LANG), 'reply_to': (str, 0)}) _ = gettext_set_language(argd['ln']) uid = getUid(req) if uid == -1 or CFG_ACCESS_CONTROL_LEVEL_SITE >= 1: return page_not_authorized(req, "../yourbaskets/save_public_note", navmenuid = 'yourbaskets') if isGuestUser(uid): return redirect_to_url(req, "%s/youraccount/login%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd({ 'referer' : "%s/yourbaskets/save_public_note%s" % ( CFG_SITE_SECURE_URL, make_canonical_urlargd(argd, {})), "ln" : argd['ln']}, {}))) user_info = collect_user_info(req) if not user_info['precached_usebaskets']: return page_not_authorized(req, "../", \ text = _("You are not authorized to use baskets.")) (body, navtrail) = perform_request_save_public_note(uid=uid, bskid=argd['bskid'], recid=argd['recid'], note_title=argd['note_title'], note_body=argd['note_body'], editor_type=argd['editor_type'], ln=argd['ln'], reply_to=argd['reply_to']) # TODO: do not stat event if save was not succussful # register event in webstat basket_str = "%s (%d)" % (get_basket_name(argd['bskid']), argd['bskid']) if user_info['email']: user_str = "%s (%d)" % (user_info['email'], user_info['uid']) else: user_str = "" try: register_customevent("baskets", ["save_public_note", basket_str, user_str]) except: register_exception(suffix="Do the webstat tables exists? Try with 'webstatadmin --load-config'") return page(title = _("Display item and notes"), body = body, navtrail = navtrail, uid = uid, lastupdated = __lastupdated__, language = argd['ln'], req = req, navmenuid = 'yourbaskets', of = argd['of'], navtrail_append_title_p = 0, secure_page_p=1) # Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Module that declares the functions in tf.contrib.receptive_field's API.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function # pylint: disable=unused-import from tensorflow.contrib.receptive_field.python.util.graph_compute_order import get_compute_order from tensorflow.contrib.receptive_field.python.util.receptive_field import compute_receptive_field_from_graph_def # pylint: enable=unused-import del absolute_import del division del print_function from types import NoneType import copy import re import sys import pymongo from pulp.server import exceptions as pulp_exceptions from pulp.server.db.model.base import Model class Criteria(Model): def __init__(self, filters=None, sort=None, limit=None, skip=None, fields=None): super(Criteria, self).__init__() assert isinstance(filters, (dict, NoneType)) assert isinstance(sort, (list, tuple, NoneType)) assert isinstance(limit, (int, NoneType)) assert isinstance(skip, (int, NoneType)) assert isinstance(fields, (list, tuple, NoneType)) self.filters = filters self.sort = sort self.limit = limit self.skip = skip self.fields = fields def as_dict(self): """ @return: the Criteria as a dict, suitable for serialization by something like JSON, and compatible as input to the from_dict method. @rtype: dict """ return { 'filters': self.filters, 'sort': self.sort, 'limit': self.limit, 'skip': self.skip, 'fields': self.fields } @classmethod def from_client_input(cls, doc): """ Accept input provided by a client (such as through a GET or POST request), validate that the provided data is part of a Criteria definition, and ensure that no additional data is present. @param doc: a dict including only data that corresponds to attributes of a Criteria object @type doc: dict @return: new Criteria instance based on provided data @rtype: pulp.server.db.model.criteria.Criteria """ if not isinstance(doc, dict): raise pulp_exceptions.InvalidValue(['criteria']), None, sys.exc_info()[2] doc = copy.copy(doc) filters = _validate_filters(doc.pop('filters', None)) sort = _validate_sort(doc.pop('sort', None)) limit = _validate_limit(doc.pop('limit', None)) skip = _validate_skip(doc.pop('skip', None)) fields = _validate_fields(doc.pop('fields', None)) if doc: raise pulp_exceptions.InvalidValue(doc.keys()) return cls(filters, sort, limit, skip, fields) @classmethod def from_dict(cls, input_dictionary): """ Convert a dictionary representation of the Criteria into a new Criteria object. The output of as_dict() is suitable as input to this method. :param input_dictionary: The dictionary representation of a Criteria object that will be used to construct one. :type input_dictionary: dict :return: A new Criteria object :rtype: Criteria """ return cls(input_dictionary['filters'], input_dictionary['sort'], input_dictionary['limit'], input_dictionary['skip'], input_dictionary['fields']) @property def spec(self): if self.filters is None: return None spec = copy.copy(self.filters) _compile_regexs_for_not(spec) return spec class UnitAssociationCriteria(Model): # Shadowed here for convenience SORT_ASCENDING = pymongo.ASCENDING SORT_DESCENDING = pymongo.DESCENDING def __init__(self, type_ids=None, association_filters=None, unit_filters=None, association_sort=None, unit_sort=None, limit=None, skip=None, association_fields=None, unit_fields=None, remove_duplicates=False): """ There are a number of entry points into creating one of these instances: multiple REST interfaces, the plugins, etc. As such, this constructor does quite a bit of validation on the parameter values. @param type_ids: list of types to search @type type_ids: [str] @param association_filters: mongo spec describing search parameters on association metadata @type association_filters: dict @param unit_filters: mongo spec describing search parameters on unit metadata; only used when a single type ID is specified @type unit_filters: dict @param association_sort: ordered list of fields and directions; may only contain association metadata @type association_sort: [(str, )] @param unit_sort: ordered list of fields and directions; only used when a single type ID is specified @type unit_sort: [(str, )] @param limit: maximum number of results to return @type limit: int @param skip: number of results to skip @type skip: int @param association_fields: if specified, only the given fields from the association's metadata will be included in returned units @type association_fields: list of str @param unit_fields: if specified, only the given fields from the unit's metadata are returned; only applies when a single type ID is specified @type unit_fields: list of str @param remove_duplicates: if True, units with multiple associations will only return a single association; defaults to False @type remove_duplicates: bool """ super(UnitAssociationCriteria, self).__init__() # A default instance will be used in the case where no criteria is # passed in, so use sane defaults here. if type_ids is not None and not isinstance(type_ids, (list, tuple)): type_ids = [type_ids] self.type_ids = type_ids self.association_filters = association_filters or {} self.unit_filters = unit_filters or {} self.association_sort = association_sort self.unit_sort = unit_sort self.limit = limit self.skip = skip # The unit_id and unit_type_id are required as association returned data; # frankly it doesn't make sense without them but it's also a technical # requirement for the algorithm to run. Make sure they are there. if association_fields is not None: if 'unit_id' not in association_fields: association_fields.append('unit_id') if 'unit_type_id' not in association_fields: association_fields.append('unit_type_id') self.association_fields = association_fields self.unit_fields = unit_fields self.remove_duplicates = remove_duplicates @classmethod def from_client_input(cls, query): """ Parses a unit association query document and assembles a corresponding internal criteria object. Example: { "type_ids" : ["rpm"], "filters" : { "unit" : , "association" : }, "sort" : { "unit" : [ ["name", "ascending"], ["version", "descending"] ], "association" : [ ["created", "descending"] ] }, "limit" : 100, "skip" : 200, "fields" : { "unit" : ["name", "version", "arch"], "association" : ["created"] }, "remove_duplicates" : True } @param query: user-provided query details @type query: dict @return: criteria object for the unit association query @rtype: L{UnitAssociationCriteria} @raises ValueError: on an invalid value in the query """ query = copy.copy(query) type_ids = query.pop('type_ids', None) filters = query.pop('filters', None) if filters is None: association_filters = None unit_filters = None else: association_filters = _validate_filters(filters.pop('association', None)) unit_filters = _validate_filters(filters.pop('unit', None)) sort = query.pop('sort', None) if sort is None: association_sort = None unit_sort = None else: association_sort = _validate_sort(sort.pop('association', None)) unit_sort = _validate_sort(sort.pop('unit', None)) limit = _validate_limit(query.pop('limit', None)) skip = _validate_skip(query.pop('skip', None)) fields = query.pop('fields', None) if fields is None: association_fields = None unit_fields = None else: association_fields = _validate_fields(fields.pop('association', None)) unit_fields = _validate_fields(fields.pop('unit', None)) remove_duplicates = bool(query.pop('remove_duplicates', False)) # report any superfluous doc key, value pairs as errors for d in (query, filters, sort, fields): if d: raise pulp_exceptions.InvalidValue(d.keys()) # These are here for backward compatibility, in the future, these # should be removed and the corresponding association_spec and unit_spec # properties should be used if association_filters: _compile_regexs_for_not(association_filters) if unit_filters: _compile_regexs_for_not(unit_filters) return cls(type_ids=type_ids, association_filters=association_filters, unit_filters=unit_filters, association_sort=association_sort, unit_sort=unit_sort, limit=limit, skip=skip, association_fields=association_fields, unit_fields=unit_fields, remove_duplicates=remove_duplicates) @property def association_spec(self): if self.association_filters is None: return None association_spec = copy.copy(self.association_filters) _compile_regexs_for_not(association_spec) return association_spec @property def unit_spec(self): if self.unit_filters is None: return None unit_spec = copy.copy(self.unit_filters) _compile_regexs_for_not(unit_spec) return unit_spec def __str__(self): s = '' if self.type_ids: s += 'Type IDs [%s] ' % self.type_ids if self.association_filters: s += 'Assoc Filters [%s] ' % self.association_filters if self.unit_filters is not None: s += 'Unit Filters [%s] ' % self.unit_filters if self.association_sort is not None: s += 'Assoc Sort [%s] ' % self.association_sort if self.unit_sort is not None: s += 'Unit Sort [%s] ' % self.unit_sort if self.limit: s += 'Limit [%s] ' % self.limit if self.skip: s += 'Skip [%s] ' % self.skip if self.association_fields: s += 'Assoc Fields [%s] ' % self.association_fields if self.unit_fields: s += 'Unit Fields [%s] ' % self.unit_fields s += 'Remove Duplicates [%s]' % self.remove_duplicates return s def _validate_filters(filters): if filters is None: return None if not isinstance(filters, dict): raise pulp_exceptions.InvalidValue(['filters']) return filters def _validate_sort(sort): """ @type sort: list, tuple @rtype: tuple """ if sort is None: return None if not isinstance(sort, (list, tuple)): raise pulp_exceptions.InvalidValue(['sort']), None, sys.exc_info()[2] try: valid_sort = [] for entry in sort: if not isinstance(entry[0], basestring): raise TypeError('Invalid field name [%s]' % str(entry[0])) flag = str(entry[1]).lower() direction = None if flag in ('ascending', '1'): direction = pymongo.ASCENDING if flag in ('descending', '-1'): direction = pymongo.DESCENDING if direction is None: raise ValueError('Invalid sort direction [%s]' % flag) valid_sort.append((entry[0], direction)) except (TypeError, ValueError): raise pulp_exceptions.InvalidValue(['sort']), None, sys.exc_info()[2] else: return valid_sort def _validate_limit(limit): if isinstance(limit, bool): raise pulp_exceptions.InvalidValue(['limit']), None, sys.exc_info()[2] if limit is None: return None try: limit = int(limit) if limit < 1: raise TypeError() except (TypeError, ValueError): raise pulp_exceptions.InvalidValue(['limit']), None, sys.exc_info()[2] else: return limit def _validate_skip(skip): if isinstance(skip, bool): raise pulp_exceptions.InvalidValue(['skip']), None, sys.exc_info()[2] if skip is None: return None try: skip = int(skip) if skip < 0: raise TypeError() except (TypeError, ValueError): raise pulp_exceptions.InvalidValue(['skip']), None, sys.exc_info()[2] else: return skip def _validate_fields(fields): if fields is None: return None try: if isinstance(fields, (basestring, dict)): raise TypeError fields = list(fields) for f in fields: if not isinstance(f, basestring): raise TypeError() except TypeError: raise pulp_exceptions.InvalidValue(['fields']), None, sys.exc_info()[2] return fields def _compile_regexs_for_not(spec): if not isinstance(spec, (dict, list, tuple)): return if isinstance(spec, (list, tuple)): map(_compile_regexs_for_not, spec) return for key, value in spec.items(): if key == '$not' and isinstance(value, basestring): spec[key] = re.compile(value) _compile_regexs_for_not(value) # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. r"""Code to validate and convert settings of the Microsoft build tools. This file contains code to validate and convert settings of the Microsoft build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(), and ValidateMSBuildSettings() are the entry points. This file was created by comparing the projects created by Visual Studio 2008 and Visual Studio 2010 for all available settings through the user interface. The MSBuild schemas were also considered. They are typically found in the MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild """ import sys import re # Dictionaries of settings validators. The key is the tool name, the value is # a dictionary mapping setting names to validation functions. _msvs_validators = {} _msbuild_validators = {} # A dictionary of settings converters. The key is the tool name, the value is # a dictionary mapping setting names to conversion functions. _msvs_to_msbuild_converters = {} # Tool name mapping from MSVS to MSBuild. _msbuild_name_of_tool = {} class _Tool(object): """Represents a tool used by MSVS or MSBuild. Attributes: msvs_name: The name of the tool in MSVS. msbuild_name: The name of the tool in MSBuild. """ def __init__(self, msvs_name, msbuild_name): self.msvs_name = msvs_name self.msbuild_name = msbuild_name def _AddTool(tool): """Adds a tool to the four dictionaries used to process settings. This only defines the tool. Each setting also needs to be added. Args: tool: The _Tool object to be added. """ _msvs_validators[tool.msvs_name] = {} _msbuild_validators[tool.msbuild_name] = {} _msvs_to_msbuild_converters[tool.msvs_name] = {} _msbuild_name_of_tool[tool.msvs_name] = tool.msbuild_name def _GetMSBuildToolSettings(msbuild_settings, tool): """Returns an MSBuild tool dictionary. Creates it if needed.""" return msbuild_settings.setdefault(tool.msbuild_name, {}) class _Type(object): """Type of settings (Base class).""" def ValidateMSVS(self, value): """Verifies that the value is legal for MSVS. Args: value: the value to check for this type. Raises: ValueError if value is not valid for MSVS. """ def ValidateMSBuild(self, value): """Verifies that the value is legal for MSBuild. Args: value: the value to check for this type. Raises: ValueError if value is not valid for MSBuild. """ def ConvertToMSBuild(self, value): """Returns the MSBuild equivalent of the MSVS value given. Args: value: the MSVS value to convert. Returns: the MSBuild equivalent. Raises: ValueError if value is not valid. """ return value class _String(_Type): """A setting that's just a string.""" def ValidateMSVS(self, value): if not isinstance(value, basestring): raise ValueError('expected string; got %r' % value) def ValidateMSBuild(self, value): if not isinstance(value, basestring): raise ValueError('expected string; got %r' % value) def ConvertToMSBuild(self, value): # Convert the macros return ConvertVCMacrosToMSBuild(value) class _StringList(_Type): """A settings that's a list of strings.""" def ValidateMSVS(self, value): if not isinstance(value, basestring) and not isinstance(value, list): raise ValueError('expected string list; got %r' % value) def ValidateMSBuild(self, value): if not isinstance(value, basestring) and not isinstance(value, list): raise ValueError('expected string list; got %r' % value) def ConvertToMSBuild(self, value): # Convert the macros if isinstance(value, list): return [ConvertVCMacrosToMSBuild(i) for i in value] else: return ConvertVCMacrosToMSBuild(value) class _Boolean(_Type): """Boolean settings, can have the values 'false' or 'true'.""" def _Validate(self, value): if value != 'true' and value != 'false': raise ValueError('expected bool; got %r' % value) def ValidateMSVS(self, value): self._Validate(value) def ValidateMSBuild(self, value): self._Validate(value) def ConvertToMSBuild(self, value): self._Validate(value) return value class _Integer(_Type): """Integer settings.""" def __init__(self, msbuild_base=10): _Type.__init__(self) self._msbuild_base = msbuild_base def ValidateMSVS(self, value): # Try to convert, this will raise ValueError if invalid. self.ConvertToMSBuild(value) def ValidateMSBuild(self, value): # Try to convert, this will raise ValueError if invalid. int(value, self._msbuild_base) def ConvertToMSBuild(self, value): msbuild_format = (self._msbuild_base == 10) and '%d' or '0x%04x' return msbuild_format % int(value) class _Enumeration(_Type): """Type of settings that is an enumeration. In MSVS, the values are indexes like '0', '1', and '2'. MSBuild uses text labels that are more representative, like 'Win32'. Constructor args: label_list: an array of MSBuild labels that correspond to the MSVS index. In the rare cases where MSVS has skipped an index value, None is used in the array to indicate the unused spot. new: an array of labels that are new to MSBuild. """ def __init__(self, label_list, new=None): _Type.__init__(self) self._label_list = label_list self._msbuild_values = set(value for value in label_list if value is not None) if new is not None: self._msbuild_values.update(new) def ValidateMSVS(self, value): # Try to convert. It will raise an exception if not valid. self.ConvertToMSBuild(value) def ValidateMSBuild(self, value): if value not in self._msbuild_values: raise ValueError('unrecognized enumerated value %s' % value) def ConvertToMSBuild(self, value): index = int(value) if index < 0 or index >= len(self._label_list): raise ValueError('index value (%d) not in expected range [0, %d)' % (index, len(self._label_list))) label = self._label_list[index] if label is None: raise ValueError('converted value for %s not specified.' % value) return label # Instantiate the various generic types. _boolean = _Boolean() _integer = _Integer() # For now, we don't do any special validation on these types: _string = _String() _file_name = _String() _folder_name = _String() _file_list = _StringList() _folder_list = _StringList() _string_list = _StringList() # Some boolean settings went from numerical values to boolean. The # mapping is 0: default, 1: false, 2: true. _newly_boolean = _Enumeration(['', 'false', 'true']) def _Same(tool, name, setting_type): """Defines a setting that has the same name in MSVS and MSBuild. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. name: the name of the setting. setting_type: the type of this setting. """ _Renamed(tool, name, name, setting_type) def _Renamed(tool, msvs_name, msbuild_name, setting_type): """Defines a setting for which the name has changed. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. msvs_name: the name of the MSVS setting. msbuild_name: the name of the MSBuild setting. setting_type: the type of this setting. """ def _Translate(value, msbuild_settings): msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) msbuild_tool_settings[msbuild_name] = setting_type.ConvertToMSBuild(value) _msvs_validators[tool.msvs_name][msvs_name] = setting_type.ValidateMSVS _msbuild_validators[tool.msbuild_name][msbuild_name] = ( setting_type.ValidateMSBuild) _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate def _Moved(tool, settings_name, msbuild_tool_name, setting_type): _MovedAndRenamed(tool, settings_name, msbuild_tool_name, settings_name, setting_type) def _MovedAndRenamed(tool, msvs_settings_name, msbuild_tool_name, msbuild_settings_name, setting_type): """Defines a setting that may have moved to a new section. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. msvs_settings_name: the MSVS name of the setting. msbuild_tool_name: the name of the MSBuild tool to place the setting under. msbuild_settings_name: the MSBuild name of the setting. setting_type: the type of this setting. """ def _Translate(value, msbuild_settings): tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {}) tool_settings[msbuild_settings_name] = setting_type.ConvertToMSBuild(value) _msvs_validators[tool.msvs_name][msvs_settings_name] = ( setting_type.ValidateMSVS) validator = setting_type.ValidateMSBuild _msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator _msvs_to_msbuild_converters[tool.msvs_name][msvs_settings_name] = _Translate def _MSVSOnly(tool, name, setting_type): """Defines a setting that is only found in MSVS. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. name: the name of the setting. setting_type: the type of this setting. """ def _Translate(unused_value, unused_msbuild_settings): # Since this is for MSVS only settings, no translation will happen. pass _msvs_validators[tool.msvs_name][name] = setting_type.ValidateMSVS _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate def _MSBuildOnly(tool, name, setting_type): """Defines a setting that is only found in MSBuild. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. name: the name of the setting. setting_type: the type of this setting. """ def _Translate(value, msbuild_settings): # Let msbuild-only properties get translated as-is from msvs_settings. tool_settings = msbuild_settings.setdefault(tool.msbuild_name, {}) tool_settings[name] = value _msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate def _ConvertedToAdditionalOption(tool, msvs_name, flag): """Defines a setting that's handled via a command line option in MSBuild. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. msvs_name: the name of the MSVS setting that if 'true' becomes a flag flag: the flag to insert at the end of the AdditionalOptions """ def _Translate(value, msbuild_settings): if value == 'true': tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) if 'AdditionalOptions' in tool_settings: new_flags = '%s %s' % (tool_settings['AdditionalOptions'], flag) else: new_flags = flag tool_settings['AdditionalOptions'] = new_flags _msvs_validators[tool.msvs_name][msvs_name] = _boolean.ValidateMSVS _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate def _CustomGeneratePreprocessedFile(tool, msvs_name): def _Translate(value, msbuild_settings): tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) if value == '0': tool_settings['PreprocessToFile'] = 'false' tool_settings['PreprocessSuppressLineNumbers'] = 'false' elif value == '1': # /P tool_settings['PreprocessToFile'] = 'true' tool_settings['PreprocessSuppressLineNumbers'] = 'false' elif value == '2': # /EP /P tool_settings['PreprocessToFile'] = 'true' tool_settings['PreprocessSuppressLineNumbers'] = 'true' else: raise ValueError('value must be one of [0, 1, 2]; got %s' % value) # Create a bogus validator that looks for '0', '1', or '2' msvs_validator = _Enumeration(['a', 'b', 'c']).ValidateMSVS _msvs_validators[tool.msvs_name][msvs_name] = msvs_validator msbuild_validator = _boolean.ValidateMSBuild msbuild_tool_validators = _msbuild_validators[tool.msbuild_name] msbuild_tool_validators['PreprocessToFile'] = msbuild_validator msbuild_tool_validators['PreprocessSuppressLineNumbers'] = msbuild_validator _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate fix_vc_macro_slashes_regex_list = ('IntDir', 'OutDir') fix_vc_macro_slashes_regex = re.compile( r'(\$\((?:%s)\))(?:[\\/]+)' % "|".join(fix_vc_macro_slashes_regex_list) ) # Regular expression to detect keys that were generated by exclusion lists _EXCLUDED_SUFFIX_RE = re.compile('^(.*)_excluded$') def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr): """Verify that 'setting' is valid if it is generated from an exclusion list. If the setting appears to be generated from an exclusion list, the root name is checked. Args: setting: A string that is the setting name to validate settings: A dictionary where the keys are valid settings error_msg: The message to emit in the event of error stderr: The stream receiving the error messages. """ # This may be unrecognized because it's an exclusion list. If the # setting name has the _excluded suffix, then check the root name. unrecognized = True m = re.match(_EXCLUDED_SUFFIX_RE, setting) if m: root_setting = m.group(1) unrecognized = root_setting not in settings if unrecognized: # We don't know this setting. Give a warning. print >> stderr, error_msg def FixVCMacroSlashes(s): """Replace macros which have excessive following slashes. These macros are known to have a built-in trailing slash. Furthermore, many scripts hiccup on processing paths with extra slashes in the middle. This list is probably not exhaustive. Add as needed. """ if '$' in s: s = fix_vc_macro_slashes_regex.sub(r'\1', s) return s def ConvertVCMacrosToMSBuild(s): """Convert the the MSVS macros found in the string to the MSBuild equivalent. This list is probably not exhaustive. Add as needed. """ if '$' in s: replace_map = { '$(ConfigurationName)': '$(Configuration)', '$(InputDir)': '%(RelativeDir)', '$(InputExt)': '%(Extension)', '$(InputFileName)': '%(Filename)%(Extension)', '$(InputName)': '%(Filename)', '$(InputPath)': '%(Identity)', '$(ParentName)': '$(ProjectFileName)', '$(PlatformName)': '$(Platform)', '$(SafeInputName)': '%(Filename)', } for old, new in replace_map.iteritems(): s = s.replace(old, new) s = FixVCMacroSlashes(s) return s def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): """Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+). Args: msvs_settings: A dictionary. The key is the tool name. The values are themselves dictionaries of settings and their values. stderr: The stream receiving the error messages. Returns: A dictionary of MSBuild settings. The key is either the MSBuild tool name or the empty string (for the global settings). The values are themselves dictionaries of settings and their values. """ msbuild_settings = {} for msvs_tool_name, msvs_tool_settings in msvs_settings.iteritems(): if msvs_tool_name in _msvs_to_msbuild_converters: msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name] for msvs_setting, msvs_value in msvs_tool_settings.iteritems(): if msvs_setting in msvs_tool: # Invoke the translation function. try: msvs_tool[msvs_setting](msvs_value, msbuild_settings) except ValueError, e: print >> stderr, ('Warning: while converting %s/%s to MSBuild, ' '%s' % (msvs_tool_name, msvs_setting, e)) else: _ValidateExclusionSetting(msvs_setting, msvs_tool, ('Warning: unrecognized setting %s/%s ' 'while converting to MSBuild.' % (msvs_tool_name, msvs_setting)), stderr) else: print >> stderr, ('Warning: unrecognized tool %s while converting to ' 'MSBuild.' % msvs_tool_name) return msbuild_settings def ValidateMSVSSettings(settings, stderr=sys.stderr): """Validates that the names of the settings are valid for MSVS. Args: settings: A dictionary. The key is the tool name. The values are themselves dictionaries of settings and their values. stderr: The stream receiving the error messages. """ _ValidateSettings(_msvs_validators, settings, stderr) def ValidateMSBuildSettings(settings, stderr=sys.stderr): """Validates that the names of the settings are valid for MSBuild. Args: settings: A dictionary. The key is the tool name. The values are themselves dictionaries of settings and their values. stderr: The stream receiving the error messages. """ _ValidateSettings(_msbuild_validators, settings, stderr) def _ValidateSettings(validators, settings, stderr): """Validates that the settings are valid for MSBuild or MSVS. We currently only validate the names of the settings, not their values. Args: validators: A dictionary of tools and their validators. settings: A dictionary. The key is the tool name. The values are themselves dictionaries of settings and their values. stderr: The stream receiving the error messages. """ for tool_name in settings: if tool_name in validators: tool_validators = validators[tool_name] for setting, value in settings[tool_name].iteritems(): if setting in tool_validators: try: tool_validators[setting](value) except ValueError, e: print >> stderr, ('Warning: for %s/%s, %s' % (tool_name, setting, e)) else: _ValidateExclusionSetting(setting, tool_validators, ('Warning: unrecognized setting %s/%s' % (tool_name, setting)), stderr) else: print >> stderr, ('Warning: unrecognized tool %s' % tool_name) # MSVS and MBuild names of the tools. _compile = _Tool('VCCLCompilerTool', 'ClCompile') _link = _Tool('VCLinkerTool', 'Link') _midl = _Tool('VCMIDLTool', 'Midl') _rc = _Tool('VCResourceCompilerTool', 'ResourceCompile') _lib = _Tool('VCLibrarianTool', 'Lib') _manifest = _Tool('VCManifestTool', 'Manifest') _masm = _Tool('MASM', 'MASM') _AddTool(_compile) _AddTool(_link) _AddTool(_midl) _AddTool(_rc) _AddTool(_lib) _AddTool(_manifest) _AddTool(_masm) # Add sections only found in the MSBuild settings. _msbuild_validators[''] = {} _msbuild_validators['ProjectReference'] = {} _msbuild_validators['ManifestResourceCompile'] = {} # Descriptions of the compiler options, i.e. VCCLCompilerTool in MSVS and # ClCompile in MSBuild. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\cl.xml" for # the schema of the MSBuild ClCompile settings. # Options that have the same name in MSVS and MSBuild _Same(_compile, 'AdditionalIncludeDirectories', _folder_list) # /I _Same(_compile, 'AdditionalOptions', _string_list) _Same(_compile, 'AdditionalUsingDirectories', _folder_list) # /AI _Same(_compile, 'AssemblerListingLocation', _file_name) # /Fa _Same(_compile, 'BrowseInformationFile', _file_name) _Same(_compile, 'BufferSecurityCheck', _boolean) # /GS _Same(_compile, 'DisableLanguageExtensions', _boolean) # /Za _Same(_compile, 'DisableSpecificWarnings', _string_list) # /wd _Same(_compile, 'EnableFiberSafeOptimizations', _boolean) # /GT _Same(_compile, 'EnablePREfast', _boolean) # /analyze Visible='false' _Same(_compile, 'ExpandAttributedSource', _boolean) # /Fx _Same(_compile, 'FloatingPointExceptions', _boolean) # /fp:except _Same(_compile, 'ForceConformanceInForLoopScope', _boolean) # /Zc:forScope _Same(_compile, 'ForcedIncludeFiles', _file_list) # /FI _Same(_compile, 'ForcedUsingFiles', _file_list) # /FU _Same(_compile, 'GenerateXMLDocumentationFiles', _boolean) # /doc _Same(_compile, 'IgnoreStandardIncludePath', _boolean) # /X _Same(_compile, 'MinimalRebuild', _boolean) # /Gm _Same(_compile, 'OmitDefaultLibName', _boolean) # /Zl _Same(_compile, 'OmitFramePointers', _boolean) # /Oy _Same(_compile, 'PreprocessorDefinitions', _string_list) # /D _Same(_compile, 'ProgramDataBaseFileName', _file_name) # /Fd _Same(_compile, 'RuntimeTypeInfo', _boolean) # /GR _Same(_compile, 'ShowIncludes', _boolean) # /showIncludes _Same(_compile, 'SmallerTypeCheck', _boolean) # /RTCc _Same(_compile, 'StringPooling', _boolean) # /GF _Same(_compile, 'SuppressStartupBanner', _boolean) # /nologo _Same(_compile, 'TreatWChar_tAsBuiltInType', _boolean) # /Zc:wchar_t _Same(_compile, 'UndefineAllPreprocessorDefinitions', _boolean) # /u _Same(_compile, 'UndefinePreprocessorDefinitions', _string_list) # /U _Same(_compile, 'UseFullPaths', _boolean) # /FC _Same(_compile, 'WholeProgramOptimization', _boolean) # /GL _Same(_compile, 'XMLDocumentationFileName', _file_name) _Same(_compile, 'AssemblerOutput', _Enumeration(['NoListing', 'AssemblyCode', # /FA 'All', # /FAcs 'AssemblyAndMachineCode', # /FAc 'AssemblyAndSourceCode'])) # /FAs _Same(_compile, 'BasicRuntimeChecks', _Enumeration(['Default', 'StackFrameRuntimeCheck', # /RTCs 'UninitializedLocalUsageCheck', # /RTCu 'EnableFastChecks'])) # /RTC1 _Same(_compile, 'BrowseInformation', _Enumeration(['false', 'true', # /FR 'true'])) # /Fr _Same(_compile, 'CallingConvention', _Enumeration(['Cdecl', # /Gd 'FastCall', # /Gr 'StdCall', # /Gz 'VectorCall'])) # /Gv _Same(_compile, 'CompileAs', _Enumeration(['Default', 'CompileAsC', # /TC 'CompileAsCpp'])) # /TP _Same(_compile, 'DebugInformationFormat', _Enumeration(['', # Disabled 'OldStyle', # /Z7 None, 'ProgramDatabase', # /Zi 'EditAndContinue'])) # /ZI _Same(_compile, 'EnableEnhancedInstructionSet', _Enumeration(['NotSet', 'StreamingSIMDExtensions', # /arch:SSE 'StreamingSIMDExtensions2', # /arch:SSE2 'AdvancedVectorExtensions', # /arch:AVX (vs2012+) 'NoExtensions', # /arch:IA32 (vs2012+) # This one only exists in the new msbuild format. 'AdvancedVectorExtensions2', # /arch:AVX2 (vs2013r2+) ])) _Same(_compile, 'ErrorReporting', _Enumeration(['None', # /errorReport:none 'Prompt', # /errorReport:prompt 'Queue'], # /errorReport:queue new=['Send'])) # /errorReport:send" _Same(_compile, 'ExceptionHandling', _Enumeration(['false', 'Sync', # /EHsc 'Async'], # /EHa new=['SyncCThrow'])) # /EHs _Same(_compile, 'FavorSizeOrSpeed', _Enumeration(['Neither', 'Speed', # /Ot 'Size'])) # /Os _Same(_compile, 'FloatingPointModel', _Enumeration(['Precise', # /fp:precise 'Strict', # /fp:strict 'Fast'])) # /fp:fast _Same(_compile, 'InlineFunctionExpansion', _Enumeration(['Default', 'OnlyExplicitInline', # /Ob1 'AnySuitable'], # /Ob2 new=['Disabled'])) # /Ob0 _Same(_compile, 'Optimization', _Enumeration(['Disabled', # /Od 'MinSpace', # /O1 'MaxSpeed', # /O2 'Full'])) # /Ox _Same(_compile, 'RuntimeLibrary', _Enumeration(['MultiThreaded', # /MT 'MultiThreadedDebug', # /MTd 'MultiThreadedDLL', # /MD 'MultiThreadedDebugDLL'])) # /MDd _Same(_compile, 'StructMemberAlignment', _Enumeration(['Default', '1Byte', # /Zp1 '2Bytes', # /Zp2 '4Bytes', # /Zp4 '8Bytes', # /Zp8 '16Bytes'])) # /Zp16 _Same(_compile, 'WarningLevel', _Enumeration(['TurnOffAllWarnings', # /W0 'Level1', # /W1 'Level2', # /W2 'Level3', # /W3 'Level4'], # /W4 new=['EnableAllWarnings'])) # /Wall # Options found in MSVS that have been renamed in MSBuild. _Renamed(_compile, 'EnableFunctionLevelLinking', 'FunctionLevelLinking', _boolean) # /Gy _Renamed(_compile, 'EnableIntrinsicFunctions', 'IntrinsicFunctions', _boolean) # /Oi _Renamed(_compile, 'KeepComments', 'PreprocessKeepComments', _boolean) # /C _Renamed(_compile, 'ObjectFile', 'ObjectFileName', _file_name) # /Fo _Renamed(_compile, 'OpenMP', 'OpenMPSupport', _boolean) # /openmp _Renamed(_compile, 'PrecompiledHeaderThrough', 'PrecompiledHeaderFile', _file_name) # Used with /Yc and /Yu _Renamed(_compile, 'PrecompiledHeaderFile', 'PrecompiledHeaderOutputFile', _file_name) # /Fp _Renamed(_compile, 'UsePrecompiledHeader', 'PrecompiledHeader', _Enumeration(['NotUsing', # VS recognized '' for this value too. 'Create', # /Yc 'Use'])) # /Yu _Renamed(_compile, 'WarnAsError', 'TreatWarningAsError', _boolean) # /WX _ConvertedToAdditionalOption(_compile, 'DefaultCharIsUnsigned', '/J') # MSVS options not found in MSBuild. _MSVSOnly(_compile, 'Detect64BitPortabilityProblems', _boolean) _MSVSOnly(_compile, 'UseUnicodeResponseFiles', _boolean) # MSBuild options not found in MSVS. _MSBuildOnly(_compile, 'BuildingInIDE', _boolean) _MSBuildOnly(_compile, 'CompileAsManaged', _Enumeration([], new=['false', 'true'])) # /clr _MSBuildOnly(_compile, 'CreateHotpatchableImage', _boolean) # /hotpatch _MSBuildOnly(_compile, 'MultiProcessorCompilation', _boolean) # /MP _MSBuildOnly(_compile, 'PreprocessOutputPath', _string) # /Fi _MSBuildOnly(_compile, 'ProcessorNumber', _integer) # the number of processors _MSBuildOnly(_compile, 'TrackerLogDirectory', _folder_name) _MSBuildOnly(_compile, 'TreatSpecificWarningsAsErrors', _string_list) # /we _MSBuildOnly(_compile, 'UseUnicodeForAssemblerListing', _boolean) # /FAu # Defines a setting that needs very customized processing _CustomGeneratePreprocessedFile(_compile, 'GeneratePreprocessedFile') # Directives for converting MSVS VCLinkerTool to MSBuild Link. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\link.xml" for # the schema of the MSBuild Link settings. # Options that have the same name in MSVS and MSBuild _Same(_link, 'AdditionalDependencies', _file_list) _Same(_link, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH # /MANIFESTDEPENDENCY: _Same(_link, 'AdditionalManifestDependencies', _file_list) _Same(_link, 'AdditionalOptions', _string_list) _Same(_link, 'AddModuleNamesToAssembly', _file_list) # /ASSEMBLYMODULE _Same(_link, 'AllowIsolation', _boolean) # /ALLOWISOLATION _Same(_link, 'AssemblyLinkResource', _file_list) # /ASSEMBLYLINKRESOURCE _Same(_link, 'BaseAddress', _string) # /BASE _Same(_link, 'CLRUnmanagedCodeCheck', _boolean) # /CLRUNMANAGEDCODECHECK _Same(_link, 'DelayLoadDLLs', _file_list) # /DELAYLOAD _Same(_link, 'DelaySign', _boolean) # /DELAYSIGN _Same(_link, 'EmbedManagedResourceFile', _file_list) # /ASSEMBLYRESOURCE _Same(_link, 'EnableUAC', _boolean) # /MANIFESTUAC _Same(_link, 'EntryPointSymbol', _string) # /ENTRY _Same(_link, 'ForceSymbolReferences', _file_list) # /INCLUDE _Same(_link, 'FunctionOrder', _file_name) # /ORDER _Same(_link, 'GenerateDebugInformation', _boolean) # /DEBUG _Same(_link, 'GenerateMapFile', _boolean) # /MAP _Same(_link, 'HeapCommitSize', _string) _Same(_link, 'HeapReserveSize', _string) # /HEAP _Same(_link, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB _Same(_link, 'IgnoreEmbeddedIDL', _boolean) # /IGNOREIDL _Same(_link, 'ImportLibrary', _file_name) # /IMPLIB _Same(_link, 'KeyContainer', _file_name) # /KEYCONTAINER _Same(_link, 'KeyFile', _file_name) # /KEYFILE _Same(_link, 'ManifestFile', _file_name) # /ManifestFile _Same(_link, 'MapExports', _boolean) # /MAPINFO:EXPORTS _Same(_link, 'MapFileName', _file_name) _Same(_link, 'MergedIDLBaseFileName', _file_name) # /IDLOUT _Same(_link, 'MergeSections', _string) # /MERGE _Same(_link, 'MidlCommandFile', _file_name) # /MIDL _Same(_link, 'ModuleDefinitionFile', _file_name) # /DEF _Same(_link, 'OutputFile', _file_name) # /OUT _Same(_link, 'PerUserRedirection', _boolean) _Same(_link, 'Profile', _boolean) # /PROFILE _Same(_link, 'ProfileGuidedDatabase', _file_name) # /PGD _Same(_link, 'ProgramDatabaseFile', _file_name) # /PDB _Same(_link, 'RegisterOutput', _boolean) _Same(_link, 'SetChecksum', _boolean) # /RELEASE _Same(_link, 'StackCommitSize', _string) _Same(_link, 'StackReserveSize', _string) # /STACK _Same(_link, 'StripPrivateSymbols', _file_name) # /PDBSTRIPPED _Same(_link, 'SupportUnloadOfDelayLoadedDLL', _boolean) # /DELAY:UNLOAD _Same(_link, 'SuppressStartupBanner', _boolean) # /NOLOGO _Same(_link, 'SwapRunFromCD', _boolean) # /SWAPRUN:CD _Same(_link, 'TurnOffAssemblyGeneration', _boolean) # /NOASSEMBLY _Same(_link, 'TypeLibraryFile', _file_name) # /TLBOUT _Same(_link, 'TypeLibraryResourceID', _integer) # /TLBID _Same(_link, 'UACUIAccess', _boolean) # /uiAccess='true' _Same(_link, 'Version', _string) # /VERSION _Same(_link, 'EnableCOMDATFolding', _newly_boolean) # /OPT:ICF _Same(_link, 'FixedBaseAddress', _newly_boolean) # /FIXED _Same(_link, 'LargeAddressAware', _newly_boolean) # /LARGEADDRESSAWARE _Same(_link, 'OptimizeReferences', _newly_boolean) # /OPT:REF _Same(_link, 'RandomizedBaseAddress', _newly_boolean) # /DYNAMICBASE _Same(_link, 'TerminalServerAware', _newly_boolean) # /TSAWARE _subsystem_enumeration = _Enumeration( ['NotSet', 'Console', # /SUBSYSTEM:CONSOLE 'Windows', # /SUBSYSTEM:WINDOWS 'Native', # /SUBSYSTEM:NATIVE 'EFI Application', # /SUBSYSTEM:EFI_APPLICATION 'EFI Boot Service Driver', # /SUBSYSTEM:EFI_BOOT_SERVICE_DRIVER 'EFI ROM', # /SUBSYSTEM:EFI_ROM 'EFI Runtime', # /SUBSYSTEM:EFI_RUNTIME_DRIVER 'WindowsCE'], # /SUBSYSTEM:WINDOWSCE new=['POSIX']) # /SUBSYSTEM:POSIX _target_machine_enumeration = _Enumeration( ['NotSet', 'MachineX86', # /MACHINE:X86 None, 'MachineARM', # /MACHINE:ARM 'MachineEBC', # /MACHINE:EBC 'MachineIA64', # /MACHINE:IA64 None, 'MachineMIPS', # /MACHINE:MIPS 'MachineMIPS16', # /MACHINE:MIPS16 'MachineMIPSFPU', # /MACHINE:MIPSFPU 'MachineMIPSFPU16', # /MACHINE:MIPSFPU16 None, None, None, 'MachineSH4', # /MACHINE:SH4 None, 'MachineTHUMB', # /MACHINE:THUMB 'MachineX64']) # /MACHINE:X64 _Same(_link, 'AssemblyDebug', _Enumeration(['', 'true', # /ASSEMBLYDEBUG 'false'])) # /ASSEMBLYDEBUG:DISABLE _Same(_link, 'CLRImageType', _Enumeration(['Default', 'ForceIJWImage', # /CLRIMAGETYPE:IJW 'ForcePureILImage', # /Switch="CLRIMAGETYPE:PURE 'ForceSafeILImage'])) # /Switch="CLRIMAGETYPE:SAFE _Same(_link, 'CLRThreadAttribute', _Enumeration(['DefaultThreadingAttribute', # /CLRTHREADATTRIBUTE:NONE 'MTAThreadingAttribute', # /CLRTHREADATTRIBUTE:MTA 'STAThreadingAttribute'])) # /CLRTHREADATTRIBUTE:STA _Same(_link, 'DataExecutionPrevention', _Enumeration(['', 'false', # /NXCOMPAT:NO 'true'])) # /NXCOMPAT _Same(_link, 'Driver', _Enumeration(['NotSet', 'Driver', # /Driver 'UpOnly', # /DRIVER:UPONLY 'WDM'])) # /DRIVER:WDM _Same(_link, 'LinkTimeCodeGeneration', _Enumeration(['Default', 'UseLinkTimeCodeGeneration', # /LTCG 'PGInstrument', # /LTCG:PGInstrument 'PGOptimization', # /LTCG:PGOptimize 'PGUpdate'])) # /LTCG:PGUpdate _Same(_link, 'ShowProgress', _Enumeration(['NotSet', 'LinkVerbose', # /VERBOSE 'LinkVerboseLib'], # /VERBOSE:Lib new=['LinkVerboseICF', # /VERBOSE:ICF 'LinkVerboseREF', # /VERBOSE:REF 'LinkVerboseSAFESEH', # /VERBOSE:SAFESEH 'LinkVerboseCLR'])) # /VERBOSE:CLR _Same(_link, 'SubSystem', _subsystem_enumeration) _Same(_link, 'TargetMachine', _target_machine_enumeration) _Same(_link, 'UACExecutionLevel', _Enumeration(['AsInvoker', # /level='asInvoker' 'HighestAvailable', # /level='highestAvailable' 'RequireAdministrator'])) # /level='requireAdministrator' _Same(_link, 'MinimumRequiredVersion', _string) _Same(_link, 'TreatLinkerWarningAsErrors', _boolean) # /WX # Options found in MSVS that have been renamed in MSBuild. _Renamed(_link, 'ErrorReporting', 'LinkErrorReporting', _Enumeration(['NoErrorReport', # /ERRORREPORT:NONE 'PromptImmediately', # /ERRORREPORT:PROMPT 'QueueForNextLogin'], # /ERRORREPORT:QUEUE new=['SendErrorReport'])) # /ERRORREPORT:SEND _Renamed(_link, 'IgnoreDefaultLibraryNames', 'IgnoreSpecificDefaultLibraries', _file_list) # /NODEFAULTLIB _Renamed(_link, 'ResourceOnlyDLL', 'NoEntryPoint', _boolean) # /NOENTRY _Renamed(_link, 'SwapRunFromNet', 'SwapRunFromNET', _boolean) # /SWAPRUN:NET _Moved(_link, 'GenerateManifest', '', _boolean) _Moved(_link, 'IgnoreImportLibrary', '', _boolean) _Moved(_link, 'LinkIncremental', '', _newly_boolean) _Moved(_link, 'LinkLibraryDependencies', 'ProjectReference', _boolean) _Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean) # MSVS options not found in MSBuild. _MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean) _MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean) # MSBuild options not found in MSVS. _MSBuildOnly(_link, 'BuildingInIDE', _boolean) _MSBuildOnly(_link, 'ImageHasSafeExceptionHandlers', _boolean) # /SAFESEH _MSBuildOnly(_link, 'LinkDLL', _boolean) # /DLL Visible='false' _MSBuildOnly(_link, 'LinkStatus', _boolean) # /LTCG:STATUS _MSBuildOnly(_link, 'PreventDllBinding', _boolean) # /ALLOWBIND _MSBuildOnly(_link, 'SupportNobindOfDelayLoadedDLL', _boolean) # /DELAY:NOBIND _MSBuildOnly(_link, 'TrackerLogDirectory', _folder_name) _MSBuildOnly(_link, 'MSDOSStubFileName', _file_name) # /STUB Visible='false' _MSBuildOnly(_link, 'SectionAlignment', _integer) # /ALIGN _MSBuildOnly(_link, 'SpecifySectionAttributes', _string) # /SECTION _MSBuildOnly(_link, 'ForceFileOutput', _Enumeration([], new=['Enabled', # /FORCE # /FORCE:MULTIPLE 'MultiplyDefinedSymbolOnly', 'UndefinedSymbolOnly'])) # /FORCE:UNRESOLVED _MSBuildOnly(_link, 'CreateHotPatchableImage', _Enumeration([], new=['Enabled', # /FUNCTIONPADMIN 'X86Image', # /FUNCTIONPADMIN:5 'X64Image', # /FUNCTIONPADMIN:6 'ItaniumImage'])) # /FUNCTIONPADMIN:16 _MSBuildOnly(_link, 'CLRSupportLastError', _Enumeration([], new=['Enabled', # /CLRSupportLastError 'Disabled', # /CLRSupportLastError:NO # /CLRSupportLastError:SYSTEMDLL 'SystemDlls'])) # Directives for converting VCResourceCompilerTool to ResourceCompile. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\rc.xml" for # the schema of the MSBuild ResourceCompile settings. _Same(_rc, 'AdditionalOptions', _string_list) _Same(_rc, 'AdditionalIncludeDirectories', _folder_list) # /I _Same(_rc, 'Culture', _Integer(msbuild_base=16)) _Same(_rc, 'IgnoreStandardIncludePath', _boolean) # /X _Same(_rc, 'PreprocessorDefinitions', _string_list) # /D _Same(_rc, 'ResourceOutputFileName', _string) # /fo _Same(_rc, 'ShowProgress', _boolean) # /v # There is no UI in VisualStudio 2008 to set the following properties. # However they are found in CL and other tools. Include them here for # completeness, as they are very likely to have the same usage pattern. _Same(_rc, 'SuppressStartupBanner', _boolean) # /nologo _Same(_rc, 'UndefinePreprocessorDefinitions', _string_list) # /u # MSBuild options not found in MSVS. _MSBuildOnly(_rc, 'NullTerminateStrings', _boolean) # /n _MSBuildOnly(_rc, 'TrackerLogDirectory', _folder_name) # Directives for converting VCMIDLTool to Midl. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\midl.xml" for # the schema of the MSBuild Midl settings. _Same(_midl, 'AdditionalIncludeDirectories', _folder_list) # /I _Same(_midl, 'AdditionalOptions', _string_list) _Same(_midl, 'CPreprocessOptions', _string) # /cpp_opt _Same(_midl, 'ErrorCheckAllocations', _boolean) # /error allocation _Same(_midl, 'ErrorCheckBounds', _boolean) # /error bounds_check _Same(_midl, 'ErrorCheckEnumRange', _boolean) # /error enum _Same(_midl, 'ErrorCheckRefPointers', _boolean) # /error ref _Same(_midl, 'ErrorCheckStubData', _boolean) # /error stub_data _Same(_midl, 'GenerateStublessProxies', _boolean) # /Oicf _Same(_midl, 'GenerateTypeLibrary', _boolean) _Same(_midl, 'HeaderFileName', _file_name) # /h _Same(_midl, 'IgnoreStandardIncludePath', _boolean) # /no_def_idir _Same(_midl, 'InterfaceIdentifierFileName', _file_name) # /iid _Same(_midl, 'MkTypLibCompatible', _boolean) # /mktyplib203 _Same(_midl, 'OutputDirectory', _string) # /out _Same(_midl, 'PreprocessorDefinitions', _string_list) # /D _Same(_midl, 'ProxyFileName', _file_name) # /proxy _Same(_midl, 'RedirectOutputAndErrors', _file_name) # /o _Same(_midl, 'SuppressStartupBanner', _boolean) # /nologo _Same(_midl, 'TypeLibraryName', _file_name) # /tlb _Same(_midl, 'UndefinePreprocessorDefinitions', _string_list) # /U _Same(_midl, 'WarnAsError', _boolean) # /WX _Same(_midl, 'DefaultCharType', _Enumeration(['Unsigned', # /char unsigned 'Signed', # /char signed 'Ascii'])) # /char ascii7 _Same(_midl, 'TargetEnvironment', _Enumeration(['NotSet', 'Win32', # /env win32 'Itanium', # /env ia64 'X64'])) # /env x64 _Same(_midl, 'EnableErrorChecks', _Enumeration(['EnableCustom', 'None', # /error none 'All'])) # /error all _Same(_midl, 'StructMemberAlignment', _Enumeration(['NotSet', '1', # Zp1 '2', # Zp2 '4', # Zp4 '8'])) # Zp8 _Same(_midl, 'WarningLevel', _Enumeration(['0', # /W0 '1', # /W1 '2', # /W2 '3', # /W3 '4'])) # /W4 _Renamed(_midl, 'DLLDataFileName', 'DllDataFileName', _file_name) # /dlldata _Renamed(_midl, 'ValidateParameters', 'ValidateAllParameters', _boolean) # /robust # MSBuild options not found in MSVS. _MSBuildOnly(_midl, 'ApplicationConfigurationMode', _boolean) # /app_config _MSBuildOnly(_midl, 'ClientStubFile', _file_name) # /cstub _MSBuildOnly(_midl, 'GenerateClientFiles', _Enumeration([], new=['Stub', # /client stub 'None'])) # /client none _MSBuildOnly(_midl, 'GenerateServerFiles', _Enumeration([], new=['Stub', # /client stub 'None'])) # /client none _MSBuildOnly(_midl, 'LocaleID', _integer) # /lcid DECIMAL _MSBuildOnly(_midl, 'ServerStubFile', _file_name) # /sstub _MSBuildOnly(_midl, 'SuppressCompilerWarnings', _boolean) # /no_warn _MSBuildOnly(_midl, 'TrackerLogDirectory', _folder_name) _MSBuildOnly(_midl, 'TypeLibFormat', _Enumeration([], new=['NewFormat', # /newtlb 'OldFormat'])) # /oldtlb # Directives for converting VCLibrarianTool to Lib. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\lib.xml" for # the schema of the MSBuild Lib settings. _Same(_lib, 'AdditionalDependencies', _file_list) _Same(_lib, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH _Same(_lib, 'AdditionalOptions', _string_list) _Same(_lib, 'ExportNamedFunctions', _string_list) # /EXPORT _Same(_lib, 'ForceSymbolReferences', _string) # /INCLUDE _Same(_lib, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB _Same(_lib, 'IgnoreSpecificDefaultLibraries', _file_list) # /NODEFAULTLIB _Same(_lib, 'ModuleDefinitionFile', _file_name) # /DEF _Same(_lib, 'OutputFile', _file_name) # /OUT _Same(_lib, 'SuppressStartupBanner', _boolean) # /NOLOGO _Same(_lib, 'UseUnicodeResponseFiles', _boolean) _Same(_lib, 'LinkTimeCodeGeneration', _boolean) # /LTCG _Same(_lib, 'TargetMachine', _target_machine_enumeration) # TODO(jeanluc) _link defines the same value that gets moved to # ProjectReference. We may want to validate that they are consistent. _Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean) _MSBuildOnly(_lib, 'DisplayLibrary', _string) # /LIST Visible='false' _MSBuildOnly(_lib, 'ErrorReporting', _Enumeration([], new=['PromptImmediately', # /ERRORREPORT:PROMPT 'QueueForNextLogin', # /ERRORREPORT:QUEUE 'SendErrorReport', # /ERRORREPORT:SEND 'NoErrorReport'])) # /ERRORREPORT:NONE _MSBuildOnly(_lib, 'MinimumRequiredVersion', _string) _MSBuildOnly(_lib, 'Name', _file_name) # /NAME _MSBuildOnly(_lib, 'RemoveObjects', _file_list) # /REMOVE _MSBuildOnly(_lib, 'SubSystem', _subsystem_enumeration) _MSBuildOnly(_lib, 'TrackerLogDirectory', _folder_name) _MSBuildOnly(_lib, 'TreatLibWarningAsErrors', _boolean) # /WX _MSBuildOnly(_lib, 'Verbose', _boolean) # Directives for converting VCManifestTool to Mt. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\mt.xml" for # the schema of the MSBuild Lib settings. # Options that have the same name in MSVS and MSBuild _Same(_manifest, 'AdditionalManifestFiles', _file_list) # /manifest _Same(_manifest, 'AdditionalOptions', _string_list) _Same(_manifest, 'AssemblyIdentity', _string) # /identity: _Same(_manifest, 'ComponentFileName', _file_name) # /dll _Same(_manifest, 'GenerateCatalogFiles', _boolean) # /makecdfs _Same(_manifest, 'InputResourceManifests', _string) # /inputresource _Same(_manifest, 'OutputManifestFile', _file_name) # /out _Same(_manifest, 'RegistrarScriptFile', _file_name) # /rgs _Same(_manifest, 'ReplacementsFile', _file_name) # /replacements _Same(_manifest, 'SuppressStartupBanner', _boolean) # /nologo _Same(_manifest, 'TypeLibraryFile', _file_name) # /tlb: _Same(_manifest, 'UpdateFileHashes', _boolean) # /hashupdate _Same(_manifest, 'UpdateFileHashesSearchPath', _file_name) _Same(_manifest, 'VerboseOutput', _boolean) # /verbose # Options that have moved location. _MovedAndRenamed(_manifest, 'ManifestResourceFile', 'ManifestResourceCompile', 'ResourceOutputFileName', _file_name) _Moved(_manifest, 'EmbedManifest', '', _boolean) # MSVS options not found in MSBuild. _MSVSOnly(_manifest, 'DependencyInformationFile', _file_name) _MSVSOnly(_manifest, 'UseFAT32Workaround', _boolean) _MSVSOnly(_manifest, 'UseUnicodeResponseFiles', _boolean) # MSBuild options not found in MSVS. _MSBuildOnly(_manifest, 'EnableDPIAwareness', _boolean) _MSBuildOnly(_manifest, 'GenerateCategoryTags', _boolean) # /category _MSBuildOnly(_manifest, 'ManifestFromManagedAssembly', _file_name) # /managedassemblyname _MSBuildOnly(_manifest, 'OutputResourceManifests', _string) # /outputresource _MSBuildOnly(_manifest, 'SuppressDependencyElement', _boolean) # /nodependency _MSBuildOnly(_manifest, 'TrackerLogDirectory', _folder_name) # Directives for MASM. # See "$(VCTargetsPath)\BuildCustomizations\masm.xml" for the schema of the # MSBuild MASM settings. # Options that have the same name in MSVS and MSBuild. _Same(_masm, 'UseSafeExceptionHandlers', _boolean) # /safeseh import simplejson from lxml import etree from ..exceptions import except_orm from ..models import ( MetaModel, BaseModel, Model, TransientModel, AbstractModel, MAGIC_COLUMNS, LOG_ACCESS_COLUMNS, ) # extra definitions for backward compatibility browse_record_list = BaseModel class browse_record(object): """ Pseudo-class for testing record instances """ class __metaclass__(type): def __instancecheck__(self, inst): return isinstance(inst, BaseModel) and len(inst) <= 1 class browse_null(object): """ Pseudo-class for testing null instances """ class __metaclass__(type): def __instancecheck__(self, inst): return isinstance(inst, BaseModel) and not inst def transfer_field_to_modifiers(field, modifiers): default_values = {} state_exceptions = {} for attr in ('invisible', 'readonly', 'required'): state_exceptions[attr] = [] default_values[attr] = bool(field.get(attr)) for state, modifs in (field.get("states",{})).items(): for modif in modifs: if default_values[modif[0]] != modif[1]: state_exceptions[modif[0]].append(state) for attr, default_value in default_values.items(): if state_exceptions[attr]: modifiers[attr] = [("state", "not in" if default_value else "in", state_exceptions[attr])] else: modifiers[attr] = default_value # Don't deal with groups, it is done by check_group(). # Need the context to evaluate the invisible attribute on tree views. # For non-tree views, the context shouldn't be given. def transfer_node_to_modifiers(node, modifiers, context=None, in_tree_view=False): if node.get('attrs'): modifiers.update(eval(node.get('attrs'))) if node.get('states'): if 'invisible' in modifiers and isinstance(modifiers['invisible'], list): # TODO combine with AND or OR, use implicit AND for now. modifiers['invisible'].append(('state', 'not in', node.get('states').split(','))) else: modifiers['invisible'] = [('state', 'not in', node.get('states').split(','))] for a in ('invisible', 'readonly', 'required'): if node.get(a): v = bool(eval(node.get(a), {'context': context or {}})) if in_tree_view and a == 'invisible': # Invisible in a tree view has a specific meaning, make it a # new key in the modifiers attribute. modifiers['tree_invisible'] = v elif v or (a not in modifiers or not isinstance(modifiers[a], list)): # Don't set the attribute to False if a dynamic value was # provided (i.e. a domain from attrs or states). modifiers[a] = v def simplify_modifiers(modifiers): for a in ('invisible', 'readonly', 'required'): if a in modifiers and not modifiers[a]: del modifiers[a] def transfer_modifiers_to_node(modifiers, node): if modifiers: simplify_modifiers(modifiers) node.set('modifiers', simplejson.dumps(modifiers)) def setup_modifiers(node, field=None, context=None, in_tree_view=False): """ Processes node attributes and field descriptors to generate the ``modifiers`` node attribute and set it on the provided node. Alters its first argument in-place. :param node: ``field`` node from an OpenERP view :type node: lxml.etree._Element :param dict field: field descriptor corresponding to the provided node :param dict context: execution context used to evaluate node attributes :param bool in_tree_view: triggers the ``tree_invisible`` code path (separate from ``invisible``): in tree view there are two levels of invisibility, cell content (a column is present but the cell itself is not displayed) with ``invisible`` and column invisibility (the whole column is hidden) with ``tree_invisible``. :returns: nothing """ modifiers = {} if field is not None: transfer_field_to_modifiers(field, modifiers) transfer_node_to_modifiers( node, modifiers, context=context, in_tree_view=in_tree_view) transfer_modifiers_to_node(modifiers, node) def test_modifiers(what, expected): modifiers = {} if isinstance(what, basestring): node = etree.fromstring(what) transfer_node_to_modifiers(node, modifiers) simplify_modifiers(modifiers) json = simplejson.dumps(modifiers) assert json == expected, "%s != %s" % (json, expected) elif isinstance(what, dict): transfer_field_to_modifiers(what, modifiers) simplify_modifiers(modifiers) json = simplejson.dumps(modifiers) assert json == expected, "%s != %s" % (json, expected) # To use this test: # import openerp # openerp.osv.orm.modifiers_tests() def modifiers_tests(): test_modifiers('', '{}') test_modifiers('', '{"invisible": true}') test_modifiers('', '{"readonly": true}') test_modifiers('', '{"required": true}') test_modifiers('', '{}') test_modifiers('', '{}') test_modifiers('', '{}') test_modifiers('', '{"invisible": true, "required": true}') # TODO order is not guaranteed test_modifiers('', '{"invisible": true}') test_modifiers('', '{"required": true}') test_modifiers("""""", '{"invisible": [["b", "=", "c"]]}') # The dictionary is supposed to be the result of fields_get(). test_modifiers({}, '{}') test_modifiers({"invisible": True}, '{"invisible": true}') test_modifiers({"invisible": False}, '{}') #!/usr/bin/python # coding: utf-8 -*- # (c) 2018, Nikhil Jain # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: tower_settings author: "Nikhil Jain (@jainnikhil30)" version_added: "2.7" short_description: Modify Ansible Tower settings. description: - Modify Ansible Tower settings. See U(https://www.ansible.com/tower) for an overview. options: name: description: - Name of setting to modify required: True value: description: - Value to be modified for given setting. required: True extends_documentation_fragment: tower ''' RETURN = ''' # ''' EXAMPLES = ''' - name: Set the value of AWX_PROOT_BASE_PATH tower_settings: name: AWX_PROOT_BASE_PATH value: "/tmp" register: testing_settings - name: Set the value of AWX_PROOT_SHOW_PATHS tower_settings: name: "AWX_PROOT_SHOW_PATHS" value: "'/var/lib/awx/projects/', '/tmp'" register: testing_settings - name: Set the LDAP Auth Bind Password tower_settings: name: "AUTH_LDAP_BIND_PASSWORD" value: "Password" no_log: true ''' from ansible.module_utils.ansible_tower import TowerModule, tower_auth_config, tower_check_mode try: import tower_cli import tower_cli.exceptions as exc from tower_cli.conf import settings except ImportError: pass def main(): argument_spec = dict( name=dict(required=True), value=dict(required=True), ) module = TowerModule( argument_spec=argument_spec, supports_check_mode=False ) json_output = {} name = module.params.get('name') value = module.params.get('value') tower_auth = tower_auth_config(module) with settings.runtime_values(**tower_auth): tower_check_mode(module) try: setting = tower_cli.get_resource('setting') result = setting.modify(setting=name, value=value) json_output['id'] = result['id'] json_output['value'] = result['value'] except (exc.ConnectionError, exc.BadRequest, exc.AuthError) as excinfo: module.fail_json(msg='Failed to modify the setting: {0}'.format(excinfo), changed=False) json_output['changed'] = result['changed'] module.exit_json(**json_output) if __name__ == '__main__': main() #!/usr/bin/env python # # Copyright 2012, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Tests for handshake._base module.""" import unittest import set_sys_path # Update sys.path to locate mod_pywebsocket module. from mod_pywebsocket.common import ExtensionParameter from mod_pywebsocket.common import ExtensionParsingException from mod_pywebsocket.common import format_extensions from mod_pywebsocket.common import parse_extensions from mod_pywebsocket.handshake._base import HandshakeException from mod_pywebsocket.handshake._base import validate_subprotocol class ValidateSubprotocolTest(unittest.TestCase): """A unittest for validate_subprotocol method.""" def test_validate_subprotocol(self): # Should succeed. validate_subprotocol('sample') validate_subprotocol('Sample') validate_subprotocol('sample\x7eprotocol') # Should fail. self.assertRaises(HandshakeException, validate_subprotocol, '') self.assertRaises(HandshakeException, validate_subprotocol, 'sample\x09protocol') self.assertRaises(HandshakeException, validate_subprotocol, 'sample\x19protocol') self.assertRaises(HandshakeException, validate_subprotocol, 'sample\x20protocol') self.assertRaises(HandshakeException, validate_subprotocol, 'sample\x7fprotocol') self.assertRaises(HandshakeException, validate_subprotocol, # "Japan" in Japanese u'\u65e5\u672c') _TEST_TOKEN_EXTENSION_DATA = [ ('foo', [('foo', [])]), ('foo; bar', [('foo', [('bar', None)])]), ('foo; bar=baz', [('foo', [('bar', 'baz')])]), ('foo; bar=baz; car=cdr', [('foo', [('bar', 'baz'), ('car', 'cdr')])]), ('foo; bar=baz, car; cdr', [('foo', [('bar', 'baz')]), ('car', [('cdr', None)])]), ('a, b, c, d', [('a', []), ('b', []), ('c', []), ('d', [])]), ] _TEST_QUOTED_EXTENSION_DATA = [ ('foo; bar=""', [('foo', [('bar', '')])]), ('foo; bar=" baz "', [('foo', [('bar', ' baz ')])]), ('foo; bar=",baz;"', [('foo', [('bar', ',baz;')])]), ('foo; bar="\\\r\\\nbaz"', [('foo', [('bar', '\r\nbaz')])]), ('foo; bar="\\"baz"', [('foo', [('bar', '"baz')])]), ('foo; bar="\xbbbaz"', [('foo', [('bar', '\xbbbaz')])]), ] _TEST_REDUNDANT_TOKEN_EXTENSION_DATA = [ ('foo \t ', [('foo', [])]), ('foo; \r\n bar', [('foo', [('bar', None)])]), ('foo; bar=\r\n \r\n baz', [('foo', [('bar', 'baz')])]), ('foo ;bar = baz ', [('foo', [('bar', 'baz')])]), ('foo,bar,,baz', [('foo', []), ('bar', []), ('baz', [])]), ] _TEST_REDUNDANT_QUOTED_EXTENSION_DATA = [ ('foo; bar="\r\n \r\n baz"', [('foo', [('bar', ' baz')])]), ] class ExtensionsParserTest(unittest.TestCase): def _verify_extension_list(self, expected_list, actual_list): """Verifies that ExtensionParameter objects in actual_list have the same members as extension definitions in expected_list. Extension definition used in this test is a pair of an extension name and a parameter dictionary. """ self.assertEqual(len(expected_list), len(actual_list)) for expected, actual in zip(expected_list, actual_list): (name, parameters) = expected self.assertEqual(name, actual._name) self.assertEqual(parameters, actual._parameters) def test_parse(self): for formatted_string, definition in _TEST_TOKEN_EXTENSION_DATA: self._verify_extension_list( definition, parse_extensions(formatted_string, allow_quoted_string=False)) for formatted_string, unused_definition in _TEST_QUOTED_EXTENSION_DATA: self.assertRaises( ExtensionParsingException, parse_extensions, formatted_string, False) def test_parse_with_allow_quoted_string(self): for formatted_string, definition in _TEST_TOKEN_EXTENSION_DATA: self._verify_extension_list( definition, parse_extensions(formatted_string, allow_quoted_string=True)) for formatted_string, definition in _TEST_QUOTED_EXTENSION_DATA: self._verify_extension_list( definition, parse_extensions(formatted_string, allow_quoted_string=True)) def test_parse_redundant_data(self): for (formatted_string, definition) in _TEST_REDUNDANT_TOKEN_EXTENSION_DATA: self._verify_extension_list( definition, parse_extensions(formatted_string, allow_quoted_string=False)) for (formatted_string, definition) in _TEST_REDUNDANT_QUOTED_EXTENSION_DATA: self.assertRaises( ExtensionParsingException, parse_extensions, formatted_string, False) def test_parse_redundant_data_with_allow_quoted_string(self): for (formatted_string, definition) in _TEST_REDUNDANT_TOKEN_EXTENSION_DATA: self._verify_extension_list( definition, parse_extensions(formatted_string, allow_quoted_string=True)) for (formatted_string, definition) in _TEST_REDUNDANT_QUOTED_EXTENSION_DATA: self._verify_extension_list( definition, parse_extensions(formatted_string, allow_quoted_string=True)) def test_parse_bad_data(self): _TEST_BAD_EXTENSION_DATA = [ ('foo; ; '), ('foo; a a'), ('foo foo'), (',,,'), ('foo; bar='), ('foo; bar="hoge'), ('foo; bar="a\r"'), ('foo; bar="\\\xff"'), ('foo; bar=\ra'), ] for formatted_string in _TEST_BAD_EXTENSION_DATA: self.assertRaises( ExtensionParsingException, parse_extensions, formatted_string) class FormatExtensionsTest(unittest.TestCase): def test_format_extensions(self): for formatted_string, definitions in _TEST_TOKEN_EXTENSION_DATA: extensions = [] for definition in definitions: (name, parameters) = definition extension = ExtensionParameter(name) extension._parameters = parameters extensions.append(extension) self.assertEqual( formatted_string, format_extensions(extensions)) if __name__ == '__main__': unittest.main() # vi:sts=4 sw=4 et import unittest import sys import os import subprocess import shutil from copy import copy from test.support import (run_unittest, TESTFN, unlink, check_warnings, captured_stdout, skip_unless_symlink) import sysconfig from sysconfig import (get_paths, get_platform, get_config_vars, get_path, get_path_names, _INSTALL_SCHEMES, _get_default_scheme, _expand_vars, get_scheme_names, get_config_var, _main) import _osx_support class TestSysConfig(unittest.TestCase): def setUp(self): super(TestSysConfig, self).setUp() self.sys_path = sys.path[:] # patching os.uname if hasattr(os, 'uname'): self.uname = os.uname self._uname = os.uname() else: self.uname = None self._set_uname(('',)*5) os.uname = self._get_uname # saving the environment self.name = os.name self.platform = sys.platform self.version = sys.version self.sep = os.sep self.join = os.path.join self.isabs = os.path.isabs self.splitdrive = os.path.splitdrive self._config_vars = sysconfig._CONFIG_VARS, copy(sysconfig._CONFIG_VARS) self._added_envvars = [] self._changed_envvars = [] for var in ('MACOSX_DEPLOYMENT_TARGET', 'PATH'): if var in os.environ: self._changed_envvars.append((var, os.environ[var])) else: self._added_envvars.append(var) def tearDown(self): sys.path[:] = self.sys_path self._cleanup_testfn() if self.uname is not None: os.uname = self.uname else: del os.uname os.name = self.name sys.platform = self.platform sys.version = self.version os.sep = self.sep os.path.join = self.join os.path.isabs = self.isabs os.path.splitdrive = self.splitdrive sysconfig._CONFIG_VARS = self._config_vars[0] sysconfig._CONFIG_VARS.clear() sysconfig._CONFIG_VARS.update(self._config_vars[1]) for var, value in self._changed_envvars: os.environ[var] = value for var in self._added_envvars: os.environ.pop(var, None) super(TestSysConfig, self).tearDown() def _set_uname(self, uname): self._uname = os.uname_result(uname) def _get_uname(self): return self._uname def _cleanup_testfn(self): path = TESTFN if os.path.isfile(path): os.remove(path) elif os.path.isdir(path): shutil.rmtree(path) def test_get_path_names(self): self.assertEqual(get_path_names(), sysconfig._SCHEME_KEYS) def test_get_paths(self): scheme = get_paths() default_scheme = _get_default_scheme() wanted = _expand_vars(default_scheme, None) wanted = sorted(wanted.items()) scheme = sorted(scheme.items()) self.assertEqual(scheme, wanted) def test_get_path(self): # XXX make real tests here for scheme in _INSTALL_SCHEMES: for name in _INSTALL_SCHEMES[scheme]: res = get_path(name, scheme) def test_get_config_vars(self): cvars = get_config_vars() self.assertIsInstance(cvars, dict) self.assertTrue(cvars) def test_get_platform(self): # windows XP, 32bits os.name = 'nt' sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' '[MSC v.1310 32 bit (Intel)]') sys.platform = 'win32' self.assertEqual(get_platform(), 'win32') # windows XP, amd64 os.name = 'nt' sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' '[MSC v.1310 32 bit (Amd64)]') sys.platform = 'win32' self.assertEqual(get_platform(), 'win-amd64') # windows XP, itanium os.name = 'nt' sys.version = ('2.4.4 (#71, Oct 18 2006, 08:34:43) ' '[MSC v.1310 32 bit (Itanium)]') sys.platform = 'win32' self.assertEqual(get_platform(), 'win-ia64') # macbook os.name = 'posix' sys.version = ('2.5 (r25:51918, Sep 19 2006, 08:49:13) ' '\n[GCC 4.0.1 (Apple Computer, Inc. build 5341)]') sys.platform = 'darwin' self._set_uname(('Darwin', 'macziade', '8.11.1', ('Darwin Kernel Version 8.11.1: ' 'Wed Oct 10 18:23:28 PDT 2007; ' 'root:xnu-792.25.20~1/RELEASE_I386'), 'PowerPC')) _osx_support._remove_original_values(get_config_vars()) get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3' get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g ' '-fwrapv -O3 -Wall -Wstrict-prototypes') maxint = sys.maxsize try: sys.maxsize = 2147483647 self.assertEqual(get_platform(), 'macosx-10.3-ppc') sys.maxsize = 9223372036854775807 self.assertEqual(get_platform(), 'macosx-10.3-ppc64') finally: sys.maxsize = maxint self._set_uname(('Darwin', 'macziade', '8.11.1', ('Darwin Kernel Version 8.11.1: ' 'Wed Oct 10 18:23:28 PDT 2007; ' 'root:xnu-792.25.20~1/RELEASE_I386'), 'i386')) _osx_support._remove_original_values(get_config_vars()) get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3' get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g ' '-fwrapv -O3 -Wall -Wstrict-prototypes') maxint = sys.maxsize try: sys.maxsize = 2147483647 self.assertEqual(get_platform(), 'macosx-10.3-i386') sys.maxsize = 9223372036854775807 self.assertEqual(get_platform(), 'macosx-10.3-x86_64') finally: sys.maxsize = maxint # macbook with fat binaries (fat, universal or fat64) _osx_support._remove_original_values(get_config_vars()) get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.4' get_config_vars()['CFLAGS'] = ('-arch ppc -arch i386 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-fat') _osx_support._remove_original_values(get_config_vars()) get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch i386 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-intel') _osx_support._remove_original_values(get_config_vars()) get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc -arch i386 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-fat3') _osx_support._remove_original_values(get_config_vars()) get_config_vars()['CFLAGS'] = ('-arch ppc64 -arch x86_64 -arch ppc -arch i386 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-universal') _osx_support._remove_original_values(get_config_vars()) get_config_vars()['CFLAGS'] = ('-arch x86_64 -arch ppc64 -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3') self.assertEqual(get_platform(), 'macosx-10.4-fat64') for arch in ('ppc', 'i386', 'x86_64', 'ppc64'): _osx_support._remove_original_values(get_config_vars()) get_config_vars()['CFLAGS'] = ('-arch %s -isysroot ' '/Developer/SDKs/MacOSX10.4u.sdk ' '-fno-strict-aliasing -fno-common ' '-dynamic -DNDEBUG -g -O3' % arch) self.assertEqual(get_platform(), 'macosx-10.4-%s' % arch) # linux debian sarge os.name = 'posix' sys.version = ('2.3.5 (#1, Jul 4 2007, 17:28:59) ' '\n[GCC 4.1.2 20061115 (prerelease) (Debian 4.1.1-21)]') sys.platform = 'linux2' self._set_uname(('Linux', 'aglae', '2.6.21.1dedibox-r7', '#1 Mon Apr 30 17:25:38 CEST 2007', 'i686')) self.assertEqual(get_platform(), 'linux-i686') # XXX more platforms to tests here def test_get_config_h_filename(self): config_h = sysconfig.get_config_h_filename() self.assertTrue(os.path.isfile(config_h), config_h) def test_get_scheme_names(self): wanted = ('nt', 'nt_user', 'osx_framework_user', 'posix_home', 'posix_prefix', 'posix_user') self.assertEqual(get_scheme_names(), wanted) @skip_unless_symlink def test_symlink(self): # On Windows, the EXE needs to know where pythonXY.dll is at so we have # to add the directory to the path. if sys.platform == "win32": os.environ["PATH"] = "{};{}".format( os.path.dirname(sys.executable), os.environ["PATH"]) # Issue 7880 def get(python): cmd = [python, '-c', 'import sysconfig; print(sysconfig.get_platform())'] p = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=os.environ) return p.communicate() real = os.path.realpath(sys.executable) link = os.path.abspath(TESTFN) os.symlink(real, link) try: self.assertEqual(get(real), get(link)) finally: unlink(link) def test_user_similar(self): # Issue #8759: make sure the posix scheme for the users # is similar to the global posix_prefix one base = get_config_var('base') user = get_config_var('userbase') # the global scheme mirrors the distinction between prefix and # exec-prefix but not the user scheme, so we have to adapt the paths # before comparing (issue #9100) adapt = sys.base_prefix != sys.base_exec_prefix for name in ('stdlib', 'platstdlib', 'purelib', 'platlib'): global_path = get_path(name, 'posix_prefix') if adapt: global_path = global_path.replace(sys.exec_prefix, sys.base_prefix) base = base.replace(sys.exec_prefix, sys.base_prefix) elif sys.base_prefix != sys.prefix: # virtual environment? Likewise, we have to adapt the paths # before comparing global_path = global_path.replace(sys.base_prefix, sys.prefix) base = base.replace(sys.base_prefix, sys.prefix) user_path = get_path(name, 'posix_user') self.assertEqual(user_path, global_path.replace(base, user, 1)) def test_main(self): # just making sure _main() runs and returns things in the stdout with captured_stdout() as output: _main() self.assertTrue(len(output.getvalue().split('\n')) > 0) @unittest.skipIf(sys.platform == "win32", "Does not apply to Windows") def test_ldshared_value(self): ldflags = sysconfig.get_config_var('LDFLAGS') ldshared = sysconfig.get_config_var('LDSHARED') self.assertIn(ldflags, ldshared) @unittest.skipUnless(sys.platform == "darwin", "test only relevant on MacOSX") def test_platform_in_subprocess(self): my_platform = sysconfig.get_platform() # Test without MACOSX_DEPLOYMENT_TARGET in the environment env = os.environ.copy() if 'MACOSX_DEPLOYMENT_TARGET' in env: del env['MACOSX_DEPLOYMENT_TARGET'] p = subprocess.Popen([ sys.executable, '-c', 'import sysconfig; print(sysconfig.get_platform())', ], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, env=env) test_platform = p.communicate()[0].strip() test_platform = test_platform.decode('utf-8') status = p.wait() self.assertEqual(status, 0) self.assertEqual(my_platform, test_platform) # Test with MACOSX_DEPLOYMENT_TARGET in the environment, and # using a value that is unlikely to be the default one. env = os.environ.copy() env['MACOSX_DEPLOYMENT_TARGET'] = '10.1' p = subprocess.Popen([ sys.executable, '-c', 'import sysconfig; print(sysconfig.get_platform())', ], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, env=env) test_platform = p.communicate()[0].strip() test_platform = test_platform.decode('utf-8') status = p.wait() self.assertEqual(status, 0) self.assertEqual(my_platform, test_platform) def test_srcdir(self): # See Issues #15322, #15364. srcdir = sysconfig.get_config_var('srcdir') self.assertTrue(os.path.isabs(srcdir), srcdir) self.assertTrue(os.path.isdir(srcdir), srcdir) if sysconfig._PYTHON_BUILD: # The python executable has not been installed so srcdir # should be a full source checkout. Python_h = os.path.join(srcdir, 'Include', 'Python.h') self.assertTrue(os.path.exists(Python_h), Python_h) self.assertTrue(sysconfig._is_python_source_dir(srcdir)) elif os.name == 'posix': makefile_dir = os.path.dirname(sysconfig.get_makefile_filename()) # Issue #19340: srcdir has been realpath'ed already makefile_dir = os.path.realpath(makefile_dir) self.assertEqual(makefile_dir, srcdir) def test_srcdir_independent_of_cwd(self): # srcdir should be independent of the current working directory # See Issues #15322, #15364. srcdir = sysconfig.get_config_var('srcdir') cwd = os.getcwd() try: os.chdir('..') srcdir2 = sysconfig.get_config_var('srcdir') finally: os.chdir(cwd) self.assertEqual(srcdir, srcdir2) @unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None, 'EXT_SUFFIX required for this test') def test_SO_deprecation(self): self.assertWarns(DeprecationWarning, sysconfig.get_config_var, 'SO') @unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None, 'EXT_SUFFIX required for this test') def test_SO_value(self): with check_warnings(('', DeprecationWarning)): self.assertEqual(sysconfig.get_config_var('SO'), sysconfig.get_config_var('EXT_SUFFIX')) @unittest.skipIf(sysconfig.get_config_var('EXT_SUFFIX') is None, 'EXT_SUFFIX required for this test') def test_SO_in_vars(self): vars = sysconfig.get_config_vars() self.assertIsNotNone(vars['SO']) self.assertEqual(vars['SO'], vars['EXT_SUFFIX']) class MakefileTests(unittest.TestCase): @unittest.skipIf(sys.platform.startswith('win'), 'Test is not Windows compatible') def test_get_makefile_filename(self): makefile = sysconfig.get_makefile_filename() self.assertTrue(os.path.isfile(makefile), makefile) def test_parse_makefile(self): self.addCleanup(unlink, TESTFN) with open(TESTFN, "w") as makefile: print("var1=a$(VAR2)", file=makefile) print("VAR2=b$(var3)", file=makefile) print("var3=42", file=makefile) print("var4=$/invalid", file=makefile) print("var5=dollar$$5", file=makefile) vars = sysconfig._parse_makefile(TESTFN) self.assertEqual(vars, { 'var1': 'ab42', 'VAR2': 'b42', 'var3': 42, 'var4': '$/invalid', 'var5': 'dollar$5', }) def test_main(): run_unittest(TestSysConfig, MakefileTests) if __name__ == "__main__": test_main() # Copyright (c) 2010 Citrix Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Parts of this file are based upon xmlrpclib.py, the XML-RPC client # interface included in the Python distribution. # # Copyright (c) 1999-2002 by Secret Labs AB # Copyright (c) 1999-2002 by Fredrik Lundh # # By obtaining, using, and/or copying this software and/or its # associated documentation, you agree that you have read, understood, # and will comply with the following terms and conditions: # # Permission to use, copy, modify, and distribute this software and # its associated documentation for any purpose and without fee is # hereby granted, provided that the above copyright notice appears in # all copies, and that both that copyright notice and this permission # notice appear in supporting documentation, and that the name of # Secret Labs AB or the author not be used in advertising or publicity # pertaining to distribution of the software without specific, written # prior permission. # # SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD # TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT- # ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR # BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY # DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, # WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS # ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE # OF THIS SOFTWARE. # -------------------------------------------------------------------- """ A fake XenAPI SDK. """ import base64 import pickle import random import uuid from xml.sax import saxutils import zlib from oslo_log import log as logging from oslo_serialization import jsonutils from oslo_utils import timeutils from oslo_utils import units from nova import exception from nova.i18n import _ from nova.virt.xenapi.client import session as xenapi_session _CLASSES = ['host', 'network', 'session', 'pool', 'SR', 'VBD', 'PBD', 'VDI', 'VIF', 'PIF', 'VM', 'VLAN', 'task'] _db_content = {} LOG = logging.getLogger(__name__) def reset(): for c in _CLASSES: _db_content[c] = {} host = create_host('fake') create_vm('fake dom 0', 'Running', is_a_template=False, is_control_domain=True, resident_on=host) def reset_table(table): if table not in _CLASSES: return _db_content[table] = {} def _create_pool(name_label): return _create_object('pool', {'name_label': name_label}) def create_host(name_label, hostname='fake_name', address='fake_addr'): host_ref = _create_object('host', {'name_label': name_label, 'hostname': hostname, 'address': address}) host_default_sr_ref = _create_local_srs(host_ref) _create_local_pif(host_ref) # Create a pool if we don't have one already if len(_db_content['pool']) == 0: pool_ref = _create_pool('') _db_content['pool'][pool_ref]['master'] = host_ref _db_content['pool'][pool_ref]['default-SR'] = host_default_sr_ref _db_content['pool'][pool_ref]['suspend-image-SR'] = host_default_sr_ref def create_network(name_label, bridge): return _create_object('network', {'name_label': name_label, 'bridge': bridge}) def create_vm(name_label, status, **kwargs): if status == 'Running': domid = random.randrange(1, 1 << 16) resident_on = _db_content['host'].keys()[0] else: domid = -1 resident_on = '' vm_rec = kwargs.copy() vm_rec.update({'name_label': name_label, 'domid': domid, 'power_state': status, 'blocked_operations': {}, 'resident_on': resident_on}) vm_ref = _create_object('VM', vm_rec) after_VM_create(vm_ref, vm_rec) return vm_ref def destroy_vm(vm_ref): vm_rec = _db_content['VM'][vm_ref] vbd_refs = vm_rec['VBDs'] # NOTE(johannes): Shallow copy since destroy_vbd will remove itself # from the list for vbd_ref in vbd_refs[:]: destroy_vbd(vbd_ref) del _db_content['VM'][vm_ref] def destroy_vbd(vbd_ref): vbd_rec = _db_content['VBD'][vbd_ref] vm_ref = vbd_rec['VM'] vm_rec = _db_content['VM'][vm_ref] vm_rec['VBDs'].remove(vbd_ref) vdi_ref = vbd_rec['VDI'] vdi_rec = _db_content['VDI'][vdi_ref] vdi_rec['VBDs'].remove(vbd_ref) del _db_content['VBD'][vbd_ref] def destroy_vdi(vdi_ref): vdi_rec = _db_content['VDI'][vdi_ref] vbd_refs = vdi_rec['VBDs'] # NOTE(johannes): Shallow copy since destroy_vbd will remove itself # from the list for vbd_ref in vbd_refs[:]: destroy_vbd(vbd_ref) del _db_content['VDI'][vdi_ref] def create_vdi(name_label, sr_ref, **kwargs): vdi_rec = { 'SR': sr_ref, 'read_only': False, 'type': '', 'name_label': name_label, 'name_description': '', 'sharable': False, 'other_config': {}, 'location': '', 'xenstore_data': {}, 'sm_config': {'vhd-parent': None}, 'physical_utilisation': '123', 'managed': True, } vdi_rec.update(kwargs) vdi_ref = _create_object('VDI', vdi_rec) after_VDI_create(vdi_ref, vdi_rec) return vdi_ref def after_VDI_create(vdi_ref, vdi_rec): vdi_rec.setdefault('VBDs', []) def create_vbd(vm_ref, vdi_ref, userdevice=0, other_config=None): if other_config is None: other_config = {} vbd_rec = {'VM': vm_ref, 'VDI': vdi_ref, 'userdevice': str(userdevice), 'currently_attached': False, 'other_config': other_config} vbd_ref = _create_object('VBD', vbd_rec) after_VBD_create(vbd_ref, vbd_rec) return vbd_ref def after_VBD_create(vbd_ref, vbd_rec): """Create read-only fields and backref from VM and VDI to VBD when VBD is created. """ vbd_rec['currently_attached'] = False vbd_rec['device'] = '' vbd_rec.setdefault('other_config', {}) vm_ref = vbd_rec['VM'] vm_rec = _db_content['VM'][vm_ref] vm_rec['VBDs'].append(vbd_ref) vm_name_label = _db_content['VM'][vm_ref]['name_label'] vbd_rec['vm_name_label'] = vm_name_label vdi_ref = vbd_rec['VDI'] if vdi_ref and vdi_ref != "OpaqueRef:NULL": vdi_rec = _db_content['VDI'][vdi_ref] vdi_rec['VBDs'].append(vbd_ref) def after_VIF_create(vif_ref, vif_rec): """Create backref from VM to VIF when VIF is created. """ vm_ref = vif_rec['VM'] vm_rec = _db_content['VM'][vm_ref] vm_rec['VIFs'].append(vif_ref) def after_VM_create(vm_ref, vm_rec): """Create read-only fields in the VM record.""" vm_rec.setdefault('domid', -1) vm_rec.setdefault('is_control_domain', False) vm_rec.setdefault('is_a_template', False) vm_rec.setdefault('memory_static_max', str(8 * units.Gi)) vm_rec.setdefault('memory_dynamic_max', str(8 * units.Gi)) vm_rec.setdefault('VCPUs_max', str(4)) vm_rec.setdefault('VBDs', []) vm_rec.setdefault('VIFs', []) vm_rec.setdefault('resident_on', '') def create_pbd(host_ref, sr_ref, attached): config = {'path': '/var/run/sr-mount/%s' % sr_ref} return _create_object('PBD', {'device_config': config, 'host': host_ref, 'SR': sr_ref, 'currently_attached': attached}) def create_task(name_label): return _create_object('task', {'name_label': name_label, 'status': 'pending'}) def _create_local_srs(host_ref): """Create an SR that looks like the one created on the local disk by default by the XenServer installer. Also, fake the installation of an ISO SR. """ create_sr(name_label='Local storage ISO', type='iso', other_config={'i18n-original-value-name_label': 'Local storage ISO', 'i18n-key': 'local-storage-iso'}, physical_size=80000, physical_utilisation=40000, virtual_allocation=80000, host_ref=host_ref) return create_sr(name_label='Local storage', type='ext', other_config={'i18n-original-value-name_label': 'Local storage', 'i18n-key': 'local-storage'}, physical_size=40000, physical_utilisation=20000, virtual_allocation=10000, host_ref=host_ref) def create_sr(**kwargs): sr_ref = _create_object( 'SR', {'name_label': kwargs.get('name_label'), 'type': kwargs.get('type'), 'content_type': kwargs.get('type', 'user'), 'shared': kwargs.get('shared', False), 'physical_size': kwargs.get('physical_size', str(1 << 30)), 'physical_utilisation': str( kwargs.get('physical_utilisation', 0)), 'virtual_allocation': str(kwargs.get('virtual_allocation', 0)), 'other_config': kwargs.get('other_config', {}), 'VDIs': kwargs.get('VDIs', [])}) pbd_ref = create_pbd(kwargs.get('host_ref'), sr_ref, True) _db_content['SR'][sr_ref]['PBDs'] = [pbd_ref] return sr_ref def _create_local_pif(host_ref): pif_ref = _create_object('PIF', {'name-label': 'Fake PIF', 'MAC': '00:11:22:33:44:55', 'physical': True, 'VLAN': -1, 'device': 'fake0', 'host_uuid': host_ref, 'network': '', 'IP': '10.1.1.1', 'IPv6': '', 'uuid': '', 'management': 'true'}) _db_content['PIF'][pif_ref]['uuid'] = pif_ref return pif_ref def _create_object(table, obj): ref = str(uuid.uuid4()) obj['uuid'] = str(uuid.uuid4()) _db_content[table][ref] = obj return ref def _create_sr(table, obj): sr_type = obj[6] # Forces fake to support iscsi only if sr_type != 'iscsi' and sr_type != 'nfs': raise Failure(['SR_UNKNOWN_DRIVER', sr_type]) host_ref = _db_content['host'].keys()[0] sr_ref = _create_object(table, obj[2]) if sr_type == 'iscsi': vdi_ref = create_vdi('', sr_ref) pbd_ref = create_pbd(host_ref, sr_ref, True) _db_content['SR'][sr_ref]['VDIs'] = [vdi_ref] _db_content['SR'][sr_ref]['PBDs'] = [pbd_ref] _db_content['VDI'][vdi_ref]['SR'] = sr_ref _db_content['PBD'][pbd_ref]['SR'] = sr_ref return sr_ref def _create_vlan(pif_ref, vlan_num, network_ref): pif_rec = get_record('PIF', pif_ref) vlan_pif_ref = _create_object('PIF', {'name-label': 'Fake VLAN PIF', 'MAC': '00:11:22:33:44:55', 'physical': True, 'VLAN': vlan_num, 'device': pif_rec['device'], 'host_uuid': pif_rec['host_uuid']}) return _create_object('VLAN', {'tagged-pif': pif_ref, 'untagged-pif': vlan_pif_ref, 'tag': vlan_num}) def get_all(table): return _db_content[table].keys() def get_all_records(table): return _db_content[table] def _query_matches(record, query): # Simple support for the XenServer query language: # 'field "host"="" and field "SR"=""' # Tested through existing tests (e.g. calls to find_network_with_bridge) and_clauses = query.split(" and ") if len(and_clauses) > 1: matches = True for clause in and_clauses: matches = matches and _query_matches(record, clause) return matches or_clauses = query.split(" or ") if len(or_clauses) > 1: matches = False for clause in or_clauses: matches = matches or _query_matches(record, clause) return matches if query[:4] == 'not ': return not _query_matches(record, query[4:]) # Now it must be a single field - bad queries never match if query[:5] != 'field': return False (field, value) = query[6:].split('=', 1) # Some fields (e.g. name_label, memory_overhead) have double # underscores in the DB, but only single underscores when querying field = field.replace("__", "_").strip(" \"'") value = value.strip(" \"'") # Strings should be directly compared if isinstance(record[field], str): return record[field] == value # But for all other value-checks, convert to a string first # (Notably used for booleans - which can be lower or camel # case and are interpreted/sanitised by XAPI) return str(record[field]).lower() == value.lower() def get_all_records_where(table_name, query): matching_records = {} table = _db_content[table_name] for record in table: if _query_matches(table[record], query): matching_records[record] = table[record] return matching_records def get_record(table, ref): if ref in _db_content[table]: return _db_content[table].get(ref) else: raise Failure(['HANDLE_INVALID', table, ref]) def check_for_session_leaks(): if len(_db_content['session']) > 0: raise exception.NovaException('Sessions have leaked: %s' % _db_content['session']) def as_value(s): """Helper function for simulating XenAPI plugin responses. It escapes and wraps the given argument. """ return '%s' % saxutils.escape(s) def as_json(*args, **kwargs): """Helper function for simulating XenAPI plugin responses for those that are returning JSON. If this function is given plain arguments, then these are rendered as a JSON list. If it's given keyword arguments then these are rendered as a JSON dict. """ arg = args or kwargs return jsonutils.dumps(arg) class Failure(Exception): def __init__(self, details): self.details = details def __str__(self): try: return str(self.details) except Exception: return "XenAPI Fake Failure: %s" % str(self.details) def _details_map(self): return {str(i): self.details[i] for i in range(len(self.details))} class SessionBase(object): """Base class for Fake Sessions.""" def __init__(self, uri): self._session = None xenapi_session.apply_session_helpers(self) def pool_get_default_SR(self, _1, pool_ref): return _db_content['pool'].values()[0]['default-SR'] def VBD_insert(self, _1, vbd_ref, vdi_ref): vbd_rec = get_record('VBD', vbd_ref) get_record('VDI', vdi_ref) vbd_rec['empty'] = False vbd_rec['VDI'] = vdi_ref def VBD_plug(self, _1, ref): rec = get_record('VBD', ref) if rec['currently_attached']: raise Failure(['DEVICE_ALREADY_ATTACHED', ref]) rec['currently_attached'] = True rec['device'] = rec['userdevice'] def VBD_unplug(self, _1, ref): rec = get_record('VBD', ref) if not rec['currently_attached']: raise Failure(['DEVICE_ALREADY_DETACHED', ref]) rec['currently_attached'] = False rec['device'] = '' def VBD_add_to_other_config(self, _1, vbd_ref, key, value): db_ref = _db_content['VBD'][vbd_ref] if 'other_config' not in db_ref: db_ref['other_config'] = {} if key in db_ref['other_config']: raise Failure(['MAP_DUPLICATE_KEY', 'VBD', 'other_config', vbd_ref, key]) db_ref['other_config'][key] = value def VBD_get_other_config(self, _1, vbd_ref): db_ref = _db_content['VBD'][vbd_ref] if 'other_config' not in db_ref: return {} return db_ref['other_config'] def PBD_create(self, _1, pbd_rec): pbd_ref = _create_object('PBD', pbd_rec) _db_content['PBD'][pbd_ref]['currently_attached'] = False return pbd_ref def PBD_plug(self, _1, pbd_ref): rec = get_record('PBD', pbd_ref) if rec['currently_attached']: raise Failure(['DEVICE_ALREADY_ATTACHED', rec]) rec['currently_attached'] = True sr_ref = rec['SR'] _db_content['SR'][sr_ref]['PBDs'] = [pbd_ref] def PBD_unplug(self, _1, pbd_ref): rec = get_record('PBD', pbd_ref) if not rec['currently_attached']: raise Failure(['DEVICE_ALREADY_DETACHED', rec]) rec['currently_attached'] = False sr_ref = rec['SR'] _db_content['SR'][sr_ref]['PBDs'].remove(pbd_ref) def SR_introduce(self, _1, sr_uuid, label, desc, type, content_type, shared, sm_config): ref = None rec = None for ref, rec in _db_content['SR'].iteritems(): if rec.get('uuid') == sr_uuid: # make forgotten = 0 and return ref _db_content['SR'][ref]['forgotten'] = 0 return ref # SR not found in db, so we create one params = {'sr_uuid': sr_uuid, 'label': label, 'desc': desc, 'type': type, 'content_type': content_type, 'shared': shared, 'sm_config': sm_config} sr_ref = _create_object('SR', params) _db_content['SR'][sr_ref]['uuid'] = sr_uuid _db_content['SR'][sr_ref]['forgotten'] = 0 vdi_per_lun = False if type == 'iscsi': # Just to be clear vdi_per_lun = True if vdi_per_lun: # we need to create a vdi because this introduce # is likely meant for a single vdi vdi_ref = create_vdi('', sr_ref) _db_content['SR'][sr_ref]['VDIs'] = [vdi_ref] _db_content['VDI'][vdi_ref]['SR'] = sr_ref return sr_ref def SR_forget(self, _1, sr_ref): _db_content['SR'][sr_ref]['forgotten'] = 1 def SR_scan(self, _1, sr_ref): return def VM_get_xenstore_data(self, _1, vm_ref): return _db_content['VM'][vm_ref].get('xenstore_data', {}) def VM_remove_from_xenstore_data(self, _1, vm_ref, key): db_ref = _db_content['VM'][vm_ref] if 'xenstore_data' not in db_ref: return if key in db_ref['xenstore_data']: del db_ref['xenstore_data'][key] def VM_add_to_xenstore_data(self, _1, vm_ref, key, value): db_ref = _db_content['VM'][vm_ref] if 'xenstore_data' not in db_ref: db_ref['xenstore_data'] = {} db_ref['xenstore_data'][key] = value def VM_pool_migrate(self, _1, vm_ref, host_ref, options): pass def VDI_remove_from_other_config(self, _1, vdi_ref, key): db_ref = _db_content['VDI'][vdi_ref] if 'other_config' not in db_ref: return if key in db_ref['other_config']: del db_ref['other_config'][key] def VDI_add_to_other_config(self, _1, vdi_ref, key, value): db_ref = _db_content['VDI'][vdi_ref] if 'other_config' not in db_ref: db_ref['other_config'] = {} if key in db_ref['other_config']: raise Failure(['MAP_DUPLICATE_KEY', 'VDI', 'other_config', vdi_ref, key]) db_ref['other_config'][key] = value def VDI_copy(self, _1, vdi_to_copy_ref, sr_ref): db_ref = _db_content['VDI'][vdi_to_copy_ref] name_label = db_ref['name_label'] read_only = db_ref['read_only'] sharable = db_ref['sharable'] other_config = db_ref['other_config'].copy() return create_vdi(name_label, sr_ref, sharable=sharable, read_only=read_only, other_config=other_config) def VDI_clone(self, _1, vdi_to_clone_ref): db_ref = _db_content['VDI'][vdi_to_clone_ref] sr_ref = db_ref['SR'] return self.VDI_copy(_1, vdi_to_clone_ref, sr_ref) def host_compute_free_memory(self, _1, ref): # Always return 12GB available return 12 * units.Gi def _plugin_agent_version(self, method, args): return as_json(returncode='0', message='1.0\\r\\n') def _plugin_agent_key_init(self, method, args): return as_json(returncode='D0', message='1') def _plugin_agent_password(self, method, args): return as_json(returncode='0', message='success') def _plugin_agent_inject_file(self, method, args): return as_json(returncode='0', message='success') def _plugin_agent_resetnetwork(self, method, args): return as_json(returncode='0', message='success') def _plugin_agent_agentupdate(self, method, args): url = args["url"] md5 = args["md5sum"] message = "success with %(url)s and hash:%(md5)s" % dict(url=url, md5=md5) return as_json(returncode='0', message=message) def _plugin_noop(self, method, args): return '' def _plugin_pickle_noop(self, method, args): return pickle.dumps(None) def _plugin_migration_transfer_vhd(self, method, args): kwargs = pickle.loads(args['params'])['kwargs'] vdi_ref = self.xenapi_request('VDI.get_by_uuid', (kwargs['vdi_uuid'], )) assert vdi_ref return pickle.dumps(None) _plugin_glance_upload_vhd = _plugin_pickle_noop _plugin_kernel_copy_vdi = _plugin_noop _plugin_kernel_create_kernel_ramdisk = _plugin_noop _plugin_kernel_remove_kernel_ramdisk = _plugin_noop _plugin_migration_move_vhds_into_sr = _plugin_noop def _plugin_xenhost_host_data(self, method, args): return jsonutils.dumps({ 'host_memory': {'total': 10, 'overhead': 20, 'free': 30, 'free-computed': 40}, 'host_uuid': 'fb97583b-baa1-452d-850e-819d95285def', 'host_name-label': 'fake-xenhost', 'host_name-description': 'Default install of XenServer', 'host_hostname': 'fake-xenhost', 'host_ip_address': '10.219.10.24', 'enabled': 'true', 'host_capabilities': ['xen-3.0-x86_64', 'xen-3.0-x86_32p', 'hvm-3.0-x86_32', 'hvm-3.0-x86_32p', 'hvm-3.0-x86_64'], 'host_other-config': { 'agent_start_time': '1412774967.', 'iscsi_iqn': 'iqn.2014-10.org.example:39fa9ee3', 'boot_time': '1412774885.', }, 'host_cpu_info': { 'physical_features': '0098e3fd-bfebfbff-00000001-28100800', 'modelname': 'Intel(R) Xeon(R) CPU X3430 @ 2.40GHz', 'vendor': 'GenuineIntel', 'features': '0098e3fd-bfebfbff-00000001-28100800', 'family': 6, 'maskable': 'full', 'cpu_count': 4, 'socket_count': '1', 'flags': 'fpu de tsc msr pae mce cx8 apic sep mtrr mca ' 'cmov pat clflush acpi mmx fxsr sse sse2 ss ht ' 'nx constant_tsc nonstop_tsc aperfmperf pni vmx ' 'est ssse3 sse4_1 sse4_2 popcnt hypervisor ida ' 'tpr_shadow vnmi flexpriority ept vpid', 'stepping': 5, 'model': 30, 'features_after_reboot': '0098e3fd-bfebfbff-00000001-28100800', 'speed': '2394.086' }, }) def _plugin_poweraction(self, method, args): return jsonutils.dumps({"power_action": method[5:]}) _plugin_xenhost_host_reboot = _plugin_poweraction _plugin_xenhost_host_startup = _plugin_poweraction _plugin_xenhost_host_shutdown = _plugin_poweraction def _plugin_xenhost_set_host_enabled(self, method, args): enabled = 'enabled' if args.get('enabled') == 'true' else 'disabled' return jsonutils.dumps({"status": enabled}) def _plugin_xenhost_host_uptime(self, method, args): return jsonutils.dumps({"uptime": "fake uptime"}) def _plugin_xenhost_get_pci_device_details(self, method, args): """Simulate the ouput of three pci devices. Both of those devices are available for pci passtrough but only one will match with the pci whitelist used in the method test_pci_passthrough_devices_*(). Return a single list. """ # Driver is not pciback dev_bad1 = ["Slot:\t0000:86:10.0", "Class:\t0604", "Vendor:\t10b5", "Device:\t8747", "Rev:\tba", "Driver:\tpcieport", "\n"] # Driver is pciback but vendor and device are bad dev_bad2 = ["Slot:\t0000:88:00.0", "Class:\t0300", "Vendor:\t0bad", "Device:\tcafe", "SVendor:\t10de", "SDevice:\t100d", "Rev:\ta1", "Driver:\tpciback", "\n"] # Driver is pciback and vendor, device are used for matching dev_good = ["Slot:\t0000:87:00.0", "Class:\t0300", "Vendor:\t10de", "Device:\t11bf", "SVendor:\t10de", "SDevice:\t100d", "Rev:\ta1", "Driver:\tpciback", "\n"] lspci_output = "\n".join(dev_bad1 + dev_bad2 + dev_good) return pickle.dumps(lspci_output) def _plugin_xenhost_get_pci_type(self, method, args): return pickle.dumps("type-PCI") def _plugin_console_get_console_log(self, method, args): dom_id = args["dom_id"] if dom_id == 0: raise Failure('Guest does not have a console') return base64.b64encode(zlib.compress("dom_id: %s" % dom_id)) def _plugin_nova_plugin_version_get_version(self, method, args): return pickle.dumps("1.2") def _plugin_xenhost_query_gc(self, method, args): return pickle.dumps("False") def host_call_plugin(self, _1, _2, plugin, method, args): func = getattr(self, '_plugin_%s_%s' % (plugin, method), None) if not func: raise Exception('No simulation in host_call_plugin for %s,%s' % (plugin, method)) return func(method, args) def VDI_get_virtual_size(self, *args): return 1 * units.Gi def VDI_resize_online(self, *args): return 'derp' VDI_resize = VDI_resize_online def _VM_reboot(self, session, vm_ref): db_ref = _db_content['VM'][vm_ref] if db_ref['power_state'] != 'Running': raise Failure(['VM_BAD_POWER_STATE', 'fake-opaque-ref', db_ref['power_state'].lower(), 'halted']) db_ref['power_state'] = 'Running' db_ref['domid'] = random.randrange(1, 1 << 16) def VM_clean_reboot(self, session, vm_ref): return self._VM_reboot(session, vm_ref) def VM_hard_reboot(self, session, vm_ref): return self._VM_reboot(session, vm_ref) def VM_hard_shutdown(self, session, vm_ref): db_ref = _db_content['VM'][vm_ref] db_ref['power_state'] = 'Halted' db_ref['domid'] = -1 VM_clean_shutdown = VM_hard_shutdown def VM_suspend(self, session, vm_ref): db_ref = _db_content['VM'][vm_ref] db_ref['power_state'] = 'Suspended' def VM_pause(self, session, vm_ref): db_ref = _db_content['VM'][vm_ref] db_ref['power_state'] = 'Paused' def pool_eject(self, session, host_ref): pass def pool_join(self, session, hostname, username, password): pass def pool_set_name_label(self, session, pool_ref, name): pass def host_migrate_receive(self, session, destref, nwref, options): return "fake_migrate_data" def VM_assert_can_migrate(self, session, vmref, migrate_data, live, vdi_map, vif_map, options): pass def VM_migrate_send(self, session, mref, migrate_data, live, vdi_map, vif_map, options): pass def VM_remove_from_blocked_operations(self, session, vm_ref, key): # operation is idempotent, XenServer doesn't care if the key exists _db_content['VM'][vm_ref]['blocked_operations'].pop(key, None) def xenapi_request(self, methodname, params): if methodname.startswith('login'): self._login(methodname, params) return None elif methodname == 'logout' or methodname == 'session.logout': self._logout() return None else: full_params = (self._session,) + params meth = getattr(self, methodname, None) if meth is None: LOG.debug('Raising NotImplemented') raise NotImplementedError( _('xenapi.fake does not have an implementation for %s') % methodname) return meth(*full_params) def _login(self, method, params): self._session = str(uuid.uuid4()) _session_info = {'uuid': str(uuid.uuid4()), 'this_host': _db_content['host'].keys()[0]} _db_content['session'][self._session] = _session_info def _logout(self): s = self._session self._session = None if s not in _db_content['session']: raise exception.NovaException( "Logging out a session that is invalid or already logged " "out: %s" % s) del _db_content['session'][s] def __getattr__(self, name): if name == 'handle': return self._session elif name == 'xenapi': return _Dispatcher(self.xenapi_request, None) elif name.startswith('login') or name.startswith('slave_local'): return lambda *params: self._login(name, params) elif name.startswith('Async'): return lambda *params: self._async(name, params) elif '.' in name: impl = getattr(self, name.replace('.', '_')) if impl is not None: def callit(*params): LOG.debug('Calling %(name)s %(impl)s', {'name': name, 'impl': impl}) self._check_session(params) return impl(*params) return callit if self._is_gettersetter(name, True): LOG.debug('Calling getter %s', name) return lambda *params: self._getter(name, params) elif self._is_gettersetter(name, False): LOG.debug('Calling setter %s', name) return lambda *params: self._setter(name, params) elif self._is_create(name): return lambda *params: self._create(name, params) elif self._is_destroy(name): return lambda *params: self._destroy(name, params) elif name == 'XenAPI': return FakeXenAPI() else: return None def _is_gettersetter(self, name, getter): bits = name.split('.') return (len(bits) == 2 and bits[0] in _CLASSES and bits[1].startswith(getter and 'get_' or 'set_')) def _is_create(self, name): return self._is_method(name, 'create') def _is_destroy(self, name): return self._is_method(name, 'destroy') def _is_method(self, name, meth): bits = name.split('.') return (len(bits) == 2 and bits[0] in _CLASSES and bits[1] == meth) def _getter(self, name, params): self._check_session(params) (cls, func) = name.split('.') if func == 'get_all': self._check_arg_count(params, 1) return get_all(cls) if func == 'get_all_records': self._check_arg_count(params, 1) return get_all_records(cls) if func == 'get_all_records_where': self._check_arg_count(params, 2) return get_all_records_where(cls, params[1]) if func == 'get_record': self._check_arg_count(params, 2) return get_record(cls, params[1]) if func in ('get_by_name_label', 'get_by_uuid'): self._check_arg_count(params, 2) return_singleton = (func == 'get_by_uuid') return self._get_by_field( _db_content[cls], func[len('get_by_'):], params[1], return_singleton=return_singleton) if len(params) == 2: field = func[len('get_'):] ref = params[1] if (ref in _db_content[cls]): if (field in _db_content[cls][ref]): return _db_content[cls][ref][field] else: raise Failure(['HANDLE_INVALID', cls, ref]) LOG.debug('Raising NotImplemented') raise NotImplementedError( _('xenapi.fake does not have an implementation for %s or it has ' 'been called with the wrong number of arguments') % name) def _setter(self, name, params): self._check_session(params) (cls, func) = name.split('.') if len(params) == 3: field = func[len('set_'):] ref = params[1] val = params[2] if (ref in _db_content[cls] and field in _db_content[cls][ref]): _db_content[cls][ref][field] = val return LOG.debug('Raising NotImplemented') raise NotImplementedError( 'xenapi.fake does not have an implementation for %s or it has ' 'been called with the wrong number of arguments or the database ' 'is missing that field' % name) def _create(self, name, params): self._check_session(params) is_sr_create = name == 'SR.create' is_vlan_create = name == 'VLAN.create' # Storage Repositories have a different API expected = is_sr_create and 10 or is_vlan_create and 4 or 2 self._check_arg_count(params, expected) (cls, _) = name.split('.') ref = (is_sr_create and _create_sr(cls, params) or is_vlan_create and _create_vlan(params[1], params[2], params[3]) or _create_object(cls, params[1])) # Call hook to provide any fixups needed (ex. creating backrefs) after_hook = 'after_%s_create' % cls if after_hook in globals(): globals()[after_hook](ref, params[1]) obj = get_record(cls, ref) # Add RO fields if cls == 'VM': obj['power_state'] = 'Halted' return ref def _destroy(self, name, params): self._check_session(params) self._check_arg_count(params, 2) table = name.split('.')[0] ref = params[1] if ref not in _db_content[table]: raise Failure(['HANDLE_INVALID', table, ref]) # Call destroy function (if exists) destroy_func = globals().get('destroy_%s' % table.lower()) if destroy_func: destroy_func(ref) else: del _db_content[table][ref] def _async(self, name, params): task_ref = create_task(name) task = _db_content['task'][task_ref] func = name[len('Async.'):] try: result = self.xenapi_request(func, params[1:]) if result: result = as_value(result) task['result'] = result task['status'] = 'success' except Failure as exc: task['error_info'] = exc.details task['status'] = 'failed' task['finished'] = timeutils.utcnow() return task_ref def _check_session(self, params): if (self._session is None or self._session not in _db_content['session']): raise Failure(['HANDLE_INVALID', 'session', self._session]) if len(params) == 0 or params[0] != self._session: LOG.debug('Raising NotImplemented') raise NotImplementedError('Call to XenAPI without using .xenapi') def _check_arg_count(self, params, expected): actual = len(params) if actual != expected: raise Failure(['MESSAGE_PARAMETER_COUNT_MISMATCH', expected, actual]) def _get_by_field(self, recs, k, v, return_singleton): result = [] for ref, rec in recs.iteritems(): if rec.get(k) == v: result.append(ref) if return_singleton: try: return result[0] except IndexError: raise Failure(['UUID_INVALID', v, result, recs, k]) return result class FakeXenAPI(object): def __init__(self): self.Failure = Failure # Based upon _Method from xmlrpclib. class _Dispatcher(object): def __init__(self, send, name): self.__send = send self.__name = name def __repr__(self): if self.__name: return '' % self.__name else: return '' def __getattr__(self, name): if self.__name is None: return _Dispatcher(self.__send, name) else: return _Dispatcher(self.__send, "%s.%s" % (self.__name, name)) def __call__(self, *args): return self.__send(self.__name, args) #!/usr/bin/env python # coding: utf8 import math from integration import f_student_t_distribution, simpson_rule_integrate def mean(values): "Calculate the average of the numbers given" return sum(values) / float(len(values)) def calc_correlation(x_values, y_values): "Calculate strength of a relationship between two sets of data" # calculate aux variables n = len(x_values) sum_xy = sum([(x_values[i] * y_values[i]) for i in range(n)]) sum_x = sum([(x_values[i]) for i in range(n)]) sum_y = sum([(y_values[i]) for i in range(n)]) sum_x2 = sum([(x_values[i] ** 2) for i in range(n)]) sum_y2 = sum([(y_values[i] ** 2) for i in range(n)]) # calculate corelation r = (n * sum_xy - (sum_x * sum_y)) / math.sqrt((n * sum_x2 - sum_x ** 2) * (n * sum_y2 - sum_y ** 2)) return r def calc_significance(x_values, y_values): "Calculate the significance (likelihood of two set of data correlation)" n = len (x_values) r = calc_correlation(x_values, y_values) r2 = r**2 t = abs(r)*math.sqrt(n - 2)/math.sqrt(1 - r**2) return t, r2, n def calc_linear_regression(x_values, y_values): "Calculate the linear regression parameters for a set of n values" # calculate aux variables x_avg = mean(x_values) y_avg = mean(y_values) n = len(x_values) sum_xy = sum([(x_values[i] * y_values[i]) for i in range(n)]) sum_x2 = sum([(x_values[i] ** 2) for i in range(n)]) # calculate regression coefficients b1 = (sum_xy - (n * x_avg * y_avg)) / (sum_x2 - n * (x_avg ** 2)) b0 = y_avg - b1 * x_avg return (b0, b1) def calc_standard_deviation(values): "Calculate the standard deviation of a list of number values" x_avg = mean(values) n = len(values) sd = math.sqrt(sum([(x_i - x_avg)**2 for x_i in values]) / float(n)) return sd, x_avg def calc_student_t_probability(x, n): "Integrate t distribution from -infinity to x with n degrees of freedom" inf = float("infinity") p = simpson_rule_integrate(f_student_t_distribution(n), -inf, x) return p def calc_double_sided_student_t_probability(t, n): "Calculate the p-value using a double sided student t distribution" # integrate a finite area from the origin to t p_aux = simpson_rule_integrate(f_student_t_distribution(n), 0, t) # return the area of the two tails of the distribution (symmetrical) return (0.5 - p_aux) * 2 def calc_double_sided_student_t_value(p, n): "Calculate the t-value using a double sided student t distribution" # replaces table lookup, thanks to http://statpages.org/pdfs.html v = dv = 0.5 t = 0 while dv > 0.000001: t = 1 / v - 1 dv = dv / 2 if calc_double_sided_student_t_probability(t, n) > p: v = v - dv else: v = v + dv return t def calc_variance(x_values, y_values, b0, b1): "Calculate the mean square deviation of the linear regeression line" # take the variance from the regression line instead of the data average sum_aux = sum([(y - b0 - b1 * x) ** 2 for x, y in zip(x_values, y_values)]) n = float(len(x_values)) return (1 / (n - 2.0)) * sum_aux def calc_prediction_interval(x_values, y_values, x_k, y_k, alpha): """Calculate the linear regression parameters for a set of n values then calculate the upper and lower prediction interval """ # calculate aux variables x_avg = mean(x_values) y_avg = mean(y_values) n = len(x_values) sum_xy = sum([(x_values[i] * y_values[i]) for i in range(n)]) sum_x2 = sum([(x_values[i] ** 2) for i in range(n)]) # calculate regression coefficients b1 = (sum_xy - (n * x_avg * y_avg)) / (sum_x2 - n * (x_avg ** 2)) b0 = y_avg - b1 * x_avg # calculate the t-value for the given alpha p-value t = calc_double_sided_student_t_value(1 - alpha, n - 2) # calculate the standard deviation sigma = math.sqrt(calc_variance(x_values, y_values, b0, b1)) # calculate the range sum_xi_xavg = sum([(x - x_avg) ** 2 for x in x_values], 0.0) aux = 1 + (1 / float(n)) + ((x_k - x_avg) ** 2) / sum_xi_xavg p_range = t * sigma * math.sqrt(aux) # combine the range with the x_k projection: return b0, b1, p_range, y_k + p_range, y_k - p_range, t # -*- coding: utf-8 -*- # © Copyright 2009 Andre Engelbrecht. All Rights Reserved. # This script is licensed under the BSD Open Source Licence # Please see the text file LICENCE for more information # If this script is distributed, it must be accompanied by the Licence import re from django import template from django.db.models import Count from tagging.models import Tag, TaggedItem from tehblog.models import Entry, Category register = template.Library() @register.inclusion_tag('tehblog/tags/category_list.html') def category_list(count=None): """ Renders a list of categories. Only categories that contains published blog entries will be returned to the tag and rendered. The number of categories returned can be restricted with the ``count`` argument """ return { 'category_list': Category.objects.all().filter( entry___statemachine__state='published').distinct()[:count] } @register.inclusion_tag('tehblog/tags/tag_list.html') def tag_list(slice_count=None): """ Requires django-tagging. Renders a list of Tags used for all published blog entries. ``slice_count`` is the number of items that the list in the template should be sliced to """ slice_count = str(slice_count) try: tag_list = Tag.objects.usage_for_model(Entry, counts=True, filters={ '_statemachine__state': 'published' }) except: pass return locals() @register.inclusion_tag('tehblog/tags/date_hierarchy.html') def date_hierarchy(): """ This tag will show a dynamic date hierarchy, which can be used to search for entries in specific years, months or days. Note that this tag is dependant on the generic views specified in urls. If you decide to customize the urls and views in any way, then this template tag may not function as intended. usage: {% load tehblog_tags %} {% date_hierarchy %} """ return { 'hierarchy': Entry.objects.public().order_by('publish_date')\ .values('publish_date') } @register.inclusion_tag('tehblog/tags/date_list.html') def date_list(count=None): """ This is a simpler version of the date_hierarchy tag, and will show recent dates as a list showing the month and year. Output would typically be: "November 2009" You can also pass the ``count`` attribute to limit the results. To return a full list of dates, use ``None`` Usage: {% load tehblog_tags %} {% date_list %} or: {% date_list 30 %} """ date_list = Entry.objects.public().dates('publish_date', 'month', order="DESC")[:count] return locals() @register.inclusion_tag('tehblog/tags/related_entries.html') def related_entries(entry, count=5): """ Renders a list of related blog entries based on the Entry Tags. This tag will only work if django-tagging is installed. usage: {% related_entries entry %} """ try: related_blog_entries = TaggedItem.objects.get_related( entry, Entry, num=count) except: return {} return { 'related_entries': related_blog_entries, } ## Filters @register.filter(name='entries_for_month') def entries_for_month(date_value): """ Returns the number of entries that was published on a specific date. """ count = Entry.objects.public().filter( publish_date__year=date_value.year, publish_date__month=date_value.month, ).count() return count # Copyright (c) 1999-2008 Mark D. Hill and David A. Wood # Copyright (c) 2009 The Hewlett-Packard Development Company # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from slicc.ast.DeclAST import DeclAST from slicc.symbols import StateMachine, Type class MachineAST(DeclAST): def __init__(self, slicc, mtype, pairs_ast, config_parameters, decls): super(MachineAST, self).__init__(slicc, pairs_ast) self.ident = mtype.value self.pairs_ast = pairs_ast self.config_parameters = config_parameters self.decls = decls def __repr__(self): return "[Machine: %r]" % self.ident def files(self, parent=None): s = set(('%s_Controller.cc' % self.ident, '%s_Controller.hh' % self.ident, '%s_Controller.py' % self.ident, '%s_Transitions.cc' % self.ident, '%s_Wakeup.cc' % self.ident)) s |= self.decls.files(self.ident) return s def generate(self): # Make a new frame self.symtab.pushFrame() # Create a new machine machine = StateMachine(self.symtab, self.ident, self.location, self.pairs, self.config_parameters) self.symtab.newCurrentMachine(machine) # Generate code for all the internal decls self.decls.generate() # Build the transition table machine.buildTable() # Pop the frame self.symtab.popFrame() def findMachines(self): mtype = self.ident machine_type = self.symtab.find("MachineType", Type) if not machine_type.checkEnum(mtype): self.error("Duplicate machine name: %s:%s" % (machine_type, mtype)) # -*- coding: utf-8 -*- from django.forms import CharField, Form, Media, MultiWidget, TextInput from django.template import Context, Template from django.test import SimpleTestCase, override_settings from django.utils.encoding import force_text @override_settings( STATIC_URL='http://media.example.com/static/', ) class FormsMediaTestCase(SimpleTestCase): """Tests for the media handling on widgets and forms""" def test_construction(self): # Check construction of media objects m = Media( css={'all': ('path/to/css1', '/path/to/css2')}, js=('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3'), ) self.assertEqual( str(m), """ """ ) class Foo: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') m3 = Media(Foo) self.assertEqual( str(m3), """ """ ) # A widget can exist without a media definition class MyWidget(TextInput): pass w = MyWidget() self.assertEqual(str(w.media), '') def test_media_dsl(self): ############################################################### # DSL Class-based media definitions ############################################################### # A widget can define media if it needs to. # Any absolute path will be preserved; relative paths are combined # with the value of settings.MEDIA_URL class MyWidget1(TextInput): class Media: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') w1 = MyWidget1() self.assertEqual( str(w1.media), """ """ ) # Media objects can be interrogated by media type self.assertEqual( str(w1.media['css']), """ """ ) self.assertEqual( str(w1.media['js']), """ """ ) def test_combine_media(self): # Media objects can be combined. Any given media resource will appear only # once. Duplicated media definitions are ignored. class MyWidget1(TextInput): class Media: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') class MyWidget2(TextInput): class Media: css = { 'all': ('/path/to/css2', '/path/to/css3') } js = ('/path/to/js1', '/path/to/js4') class MyWidget3(TextInput): class Media: css = { 'all': ('/path/to/css3', 'path/to/css1') } js = ('/path/to/js1', '/path/to/js4') w1 = MyWidget1() w2 = MyWidget2() w3 = MyWidget3() self.assertEqual( str(w1.media + w2.media + w3.media), """ """ ) # Check that media addition hasn't affected the original objects self.assertEqual( str(w1.media), """ """ ) # Regression check for #12879: specifying the same CSS or JS file # multiple times in a single Media instance should result in that file # only being included once. class MyWidget4(TextInput): class Media: css = {'all': ('/path/to/css1', '/path/to/css1')} js = ('/path/to/js1', '/path/to/js1') w4 = MyWidget4() self.assertEqual(str(w4.media), """ """) def test_media_property(self): ############################################################### # Property-based media definitions ############################################################### # Widget media can be defined as a property class MyWidget4(TextInput): def _media(self): return Media(css={'all': ('/some/path',)}, js=('/some/js',)) media = property(_media) w4 = MyWidget4() self.assertEqual(str(w4.media), """ """) # Media properties can reference the media of their parents class MyWidget5(MyWidget4): def _media(self): return super(MyWidget5, self).media + Media(css={'all': ('/other/path',)}, js=('/other/js',)) media = property(_media) w5 = MyWidget5() self.assertEqual(str(w5.media), """ """) def test_media_property_parent_references(self): # Media properties can reference the media of their parents, # even if the parent media was defined using a class class MyWidget1(TextInput): class Media: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') class MyWidget6(MyWidget1): def _media(self): return super(MyWidget6, self).media + Media(css={'all': ('/other/path',)}, js=('/other/js',)) media = property(_media) w6 = MyWidget6() self.assertEqual( str(w6.media), """ """ ) def test_media_inheritance(self): ############################################################### # Inheritance of media ############################################################### # If a widget extends another but provides no media definition, it inherits the parent widget's media class MyWidget1(TextInput): class Media: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') class MyWidget7(MyWidget1): pass w7 = MyWidget7() self.assertEqual( str(w7.media), """ """ ) # If a widget extends another but defines media, it extends the parent widget's media by default class MyWidget8(MyWidget1): class Media: css = { 'all': ('/path/to/css3', 'path/to/css1') } js = ('/path/to/js1', '/path/to/js4') w8 = MyWidget8() self.assertEqual( str(w8.media), """ """ ) def test_media_inheritance_from_property(self): # If a widget extends another but defines media, it extends the parents widget's media, # even if the parent defined media using a property. class MyWidget1(TextInput): class Media: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') class MyWidget4(TextInput): def _media(self): return Media(css={'all': ('/some/path',)}, js=('/some/js',)) media = property(_media) class MyWidget9(MyWidget4): class Media: css = { 'all': ('/other/path',) } js = ('/other/js',) w9 = MyWidget9() self.assertEqual( str(w9.media), """ """ ) # A widget can disable media inheritance by specifying 'extend=False' class MyWidget10(MyWidget1): class Media: extend = False css = { 'all': ('/path/to/css3', 'path/to/css1') } js = ('/path/to/js1', '/path/to/js4') w10 = MyWidget10() self.assertEqual(str(w10.media), """ """) def test_media_inheritance_extends(self): # A widget can explicitly enable full media inheritance by specifying 'extend=True' class MyWidget1(TextInput): class Media: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') class MyWidget11(MyWidget1): class Media: extend = True css = { 'all': ('/path/to/css3', 'path/to/css1') } js = ('/path/to/js1', '/path/to/js4') w11 = MyWidget11() self.assertEqual( str(w11.media), """ """ ) def test_media_inheritance_single_type(self): # A widget can enable inheritance of one media type by specifying extend as a tuple class MyWidget1(TextInput): class Media: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') class MyWidget12(MyWidget1): class Media: extend = ('css',) css = { 'all': ('/path/to/css3', 'path/to/css1') } js = ('/path/to/js1', '/path/to/js4') w12 = MyWidget12() self.assertEqual( str(w12.media), """ """ ) def test_multi_media(self): ############################################################### # Multi-media handling for CSS ############################################################### # A widget can define CSS media for multiple output media types class MultimediaWidget(TextInput): class Media: css = { 'screen, print': ('/file1', '/file2'), 'screen': ('/file3',), 'print': ('/file4',) } js = ('/path/to/js1', '/path/to/js4') multimedia = MultimediaWidget() self.assertEqual( str(multimedia.media), """ """ ) def test_multi_widget(self): ############################################################### # Multiwidget media handling ############################################################### class MyWidget1(TextInput): class Media: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') class MyWidget2(TextInput): class Media: css = { 'all': ('/path/to/css2', '/path/to/css3') } js = ('/path/to/js1', '/path/to/js4') class MyWidget3(TextInput): class Media: css = { 'all': ('/path/to/css3', 'path/to/css1') } js = ('/path/to/js1', '/path/to/js4') # MultiWidgets have a default media definition that gets all the # media from the component widgets class MyMultiWidget(MultiWidget): def __init__(self, attrs=None): widgets = [MyWidget1, MyWidget2, MyWidget3] super(MyMultiWidget, self).__init__(widgets, attrs) mymulti = MyMultiWidget() self.assertEqual( str(mymulti.media), """ """ ) def test_form_media(self): ############################################################### # Media processing for forms ############################################################### class MyWidget1(TextInput): class Media: css = { 'all': ('path/to/css1', '/path/to/css2') } js = ('/path/to/js1', 'http://media.other.com/path/to/js2', 'https://secure.other.com/path/to/js3') class MyWidget2(TextInput): class Media: css = { 'all': ('/path/to/css2', '/path/to/css3') } js = ('/path/to/js1', '/path/to/js4') class MyWidget3(TextInput): class Media: css = { 'all': ('/path/to/css3', 'path/to/css1') } js = ('/path/to/js1', '/path/to/js4') # You can ask a form for the media required by its widgets. class MyForm(Form): field1 = CharField(max_length=20, widget=MyWidget1()) field2 = CharField(max_length=20, widget=MyWidget2()) f1 = MyForm() self.assertEqual( str(f1.media), """ """ ) # Form media can be combined to produce a single media definition. class AnotherForm(Form): field3 = CharField(max_length=20, widget=MyWidget3()) f2 = AnotherForm() self.assertEqual( str(f1.media + f2.media), """ """ ) # Forms can also define media, following the same rules as widgets. class FormWithMedia(Form): field1 = CharField(max_length=20, widget=MyWidget1()) field2 = CharField(max_length=20, widget=MyWidget2()) class Media: js = ('/some/form/javascript',) css = { 'all': ('/some/form/css',) } f3 = FormWithMedia() self.assertEqual( str(f3.media), """ """ ) # Media works in templates self.assertEqual( Template("{{ form.media.js }}{{ form.media.css }}").render(Context({'form': f3})), """ """ """ """ ) def test_html_safe(self): media = Media(css={'all': ['/path/to/css']}, js=['/path/to/js']) self.assertTrue(hasattr(Media, '__html__')) self.assertEqual(force_text(media), media.__html__()) from django.contrib import admin from django.contrib.auth.models import User from reversion.admin import VersionAdmin from pages.models import ( Template, Page, ) from pages.forms import ( TemplateAdminForm, PageAdminForm ) @admin.register(Template) class TemplateAdmin(VersionAdmin): list_display = ('handle', 'site', 'template_path') list_filter = ('site__name',) readonly_fields = ('template_path', 'fs_full_path') form = TemplateAdminForm @admin.register(Page) class PageAdmin(VersionAdmin): list_display = ('title', 'site', 'handle', 'role', 'description') list_filter = ('site__name',) ordering = ('site', 'role', 'title') form = PageAdminForm def formfield_for_foreignkey(self, db_field, request=None, **kwargs): if db_field.name == 'page_author': kwargs["initial"] = request.user if not request.user.is_superuser: kwargs["queryset"] = User.objects.filter(pk=request.user.pk) return super(PageAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) from .keyboard import Keyboard from json import loads, dumps class Message: # Message클래스를 생성할 때 기본적인 틀만 구현하고 # 값들은 던져주면 알아서 메시지를 리턴한다 baseKeyboard = { "type": "buttons", "buttons": Keyboard.buttons, } baseMessage = { "message": { "text": "", }, "keyboard": baseKeyboard } # Uesage : baseMessage["message"].update(baseWeekend) baseWeekend = { "message_button": { "label": "이번주 메뉴 보기", "url": "http://apps.hongik.ac.kr/food/food.php" } } def __init__(self): self.returnedMessage = None def getMessage(self): return self.returnedMessage class BaseMessage(Message): def __init__(self): super().__init__() self.returnedMessage = loads(dumps(Message.baseMessage)) def updateMessage(self, message): self.returnedMessage["message"]["text"] = message def updateKeyboard(self, argKeyboard): keyboard = Message.baseKeyboard keyboard["buttons"] = argKeyboard self.returnedMessage["keyboard"] = keyboard def add_photo(self, url, width, height): photo_message = { "photo": { "url": "http://www.hongik.ac.kr/front/images/local/header_logo.png", "width": 198, "height": 45, }, } photo_message["photo"]["url"] = url photo_message["photo"]["width"] = width photo_message["photo"]["height"] = height self.returnedMessage["message"].update(photo_message) class EvaluateMessage(BaseMessage): def __init__(self, message, step): ''' step 1 : 식단 평가하기 -> 장소 step 2 : 장소 -> 시간대 step 3 : 시간대 -> 점수 step 4 : 점수 -> 끝 ''' super().__init__() self.updateMessage(message) if step == 1: self.updateKeyboard(Keyboard.placeButtons) elif step == 2: self.updateKeyboard(Keyboard.timeButtons) elif step == 3: self.updateKeyboard(Keyboard.scoreButtons) elif step == 4: self.updateKeyboard(Keyboard.homeButtons) else: raise class SummaryMenuMessage(BaseMessage): def __init__(self, message, isToday): super().__init__() self.updateMessage(message) if isToday: self.updateKeyboard(Keyboard.todayButtons) else: self.updateKeyboard(Keyboard.tomorrowButtons) class HomeMessage(Message): def __init__(self): self.returnedMessage = Message.baseKeyboard homeKeyboard = HomeMessage.returnHomeKeyboard() self.returnedMessage["buttons"] = homeKeyboard @staticmethod def returnHomeKeyboard():\ return Keyboard.homeButtons class FailMessage(BaseMessage): def __init__(self): super().__init__() self.updateMessage("오류가 발생하였습니다.") self.updateKeyboard(Keyboard.homeButtons) class SuccessMessage(Message): def __init__(self): self.returnedMessage = "SUCCESS" # -*- coding: utf-8 -*- ''' Sitemap ------- The sitemap plugin generates plain-text or XML sitemaps. ''' from __future__ import unicode_literals import re import collections import os.path from datetime import datetime from logging import warning, info from codecs import open from pytz import timezone from pelican import signals, contents from pelican.utils import get_date TXT_HEADER = """{0}/index.html {0}/archives.html {0}/tags.html {0}/categories.html """ XML_HEADER = """ """ XML_URL = """ {0}/{1} {2} {3} {4} """ XML_FOOTER = """ """ def format_date(date): if date.tzinfo: tz = date.strftime('%z') tz = tz[:-2] + ':' + tz[-2:] else: tz = "-00:00" return date.strftime("%Y-%m-%dT%H:%M:%S") + tz class SitemapGenerator(object): def __init__(self, context, settings, path, theme, output_path, *null): self.output_path = output_path self.context = context self.now = datetime.now() self.siteurl = settings.get('SITEURL') self.default_timezone = settings.get('TIMEZONE', 'UTC') self.timezone = getattr(self, 'timezone', self.default_timezone) self.timezone = timezone(self.timezone) self.format = 'xml' self.changefreqs = { 'articles': 'monthly', 'indexes': 'daily', 'pages': 'monthly' } self.priorities = { 'articles': 0.5, 'indexes': 0.5, 'pages': 0.5 } self.sitemapExclude = [] config = settings.get('SITEMAP', {}) if not isinstance(config, dict): warning("sitemap plugin: the SITEMAP setting must be a dict") else: fmt = config.get('format') pris = config.get('priorities') chfreqs = config.get('changefreqs') self.sitemapExclude = config.get('exclude', []) if fmt not in ('xml', 'txt'): warning("sitemap plugin: SITEMAP['format'] must be `txt' or `xml'") warning("sitemap plugin: Setting SITEMAP['format'] on `xml'") elif fmt == 'txt': self.format = fmt return valid_keys = ('articles', 'indexes', 'pages') valid_chfreqs = ('always', 'hourly', 'daily', 'weekly', 'monthly', 'yearly', 'never') if isinstance(pris, dict): # We use items for Py3k compat. .iteritems() otherwise for k, v in pris.items(): if k in valid_keys and not isinstance(v, (int, float)): default = self.priorities[k] warning("sitemap plugin: priorities must be numbers") warning("sitemap plugin: setting SITEMAP['priorities']" "['{0}'] on {1}".format(k, default)) pris[k] = default self.priorities.update(pris) elif pris is not None: warning("sitemap plugin: SITEMAP['priorities'] must be a dict") warning("sitemap plugin: using the default values") if isinstance(chfreqs, dict): # .items() for py3k compat. for k, v in chfreqs.items(): if k in valid_keys and v not in valid_chfreqs: default = self.changefreqs[k] warning("sitemap plugin: invalid changefreq `{0}'".format(v)) warning("sitemap plugin: setting SITEMAP['changefreqs']" "['{0}'] on '{1}'".format(k, default)) chfreqs[k] = default self.changefreqs.update(chfreqs) elif chfreqs is not None: warning("sitemap plugin: SITEMAP['changefreqs'] must be a dict") warning("sitemap plugin: using the default values") def write_url(self, page, fd): if getattr(page, 'status', 'published') != 'published': return # We can disable categories/authors/etc by using False instead of '' if not page.save_as: return page_path = os.path.join(self.output_path, page.save_as) if not os.path.exists(page_path): return lastdate = getattr(page, 'date', self.now) try: lastdate = self.get_date_modified(page, lastdate) except ValueError: warning("sitemap plugin: " + page.save_as + " has invalid modification date,") warning("sitemap plugin: using date value as lastmod.") lastmod = format_date(lastdate) if isinstance(page, contents.Article): pri = self.priorities['articles'] chfreq = self.changefreqs['articles'] elif isinstance(page, contents.Page): pri = self.priorities['pages'] chfreq = self.changefreqs['pages'] else: pri = self.priorities['indexes'] chfreq = self.changefreqs['indexes'] pageurl = '' if page.url == 'index.html' else page.url #Exclude URLs from the sitemap: if self.format == 'xml': flag = False for regstr in self.sitemapExclude: if re.match(regstr, pageurl): flag = True break if not flag: fd.write(XML_URL.format(self.siteurl, pageurl, lastmod, chfreq, pri)) else: fd.write(self.siteurl + '/' + pageurl + '\n') def get_date_modified(self, page, default): if hasattr(page, 'modified'): if isinstance(page.modified, datetime): return page.modified return get_date(page.modified) else: return default def set_url_wrappers_modification_date(self, wrappers): for (wrapper, articles) in wrappers: lastmod = datetime.min.replace(tzinfo=self.timezone) for article in articles: lastmod = max(lastmod, article.date.replace(tzinfo=self.timezone)) try: modified = self.get_date_modified(article, datetime.min).replace(tzinfo=self.timezone) lastmod = max(lastmod, modified) except ValueError: # Supressed: user will be notified. pass setattr(wrapper, 'modified', str(lastmod)) def generate_output(self, writer): path = os.path.join(self.output_path, 'sitemap.{0}'.format(self.format)) pages = self.context['pages'] + self.context['articles'] \ + [ c for (c, a) in self.context['categories']] \ + [ t for (t, a) in self.context['tags']] \ + [ a for (a, b) in self.context['authors']] self.set_url_wrappers_modification_date(self.context['categories']) self.set_url_wrappers_modification_date(self.context['tags']) self.set_url_wrappers_modification_date(self.context['authors']) for article in self.context['articles']: pages += article.translations info('writing {0}'.format(path)) with open(path, 'w', encoding='utf-8') as fd: if self.format == 'xml': fd.write(XML_HEADER) else: fd.write(TXT_HEADER.format(self.siteurl)) FakePage = collections.namedtuple('FakePage', ['status', 'date', 'url', 'save_as']) for standard_page_url in ['index.html', 'archives.html', 'tags.html', 'categories.html']: fake = FakePage(status='published', date=self.now, url=standard_page_url, save_as=standard_page_url) self.write_url(fake, fd) for page in pages: self.write_url(page, fd) if self.format == 'xml': fd.write(XML_FOOTER) def get_generators(generators): return SitemapGenerator def register(): signals.get_generators.connect(get_generators) # -*- coding: utf-8 -*- # enzyme - Video metadata parser # Copyright 2011-2012 Antoine Bertin # Copyright 2003-2006 Thomas Schueppel # Copyright 2003-2006 Dirk Meyer # # This file is part of enzyme. # # enzyme is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # enzyme is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with enzyme. If not, see . __all__ = ['Parser'] import struct import logging from exceptions import ParseError import core # http://www.pcisys.net/~melanson/codecs/rmff.htm # http://www.pcisys.net/~melanson/codecs/ # get logging object log = logging.getLogger(__name__) class RealVideo(core.AVContainer): def __init__(self, file): core.AVContainer.__init__(self) self.mime = 'video/real' self.type = 'Real Video' h = file.read(10) try: (object_id, object_size, object_version) = struct.unpack('>4sIH', h) except struct.error: # EOF. raise ParseError() if not object_id == '.RMF': raise ParseError() file_version, num_headers = struct.unpack('>II', file.read(8)) log.debug(u'size: %d, ver: %d, headers: %d' % \ (object_size, file_version, num_headers)) for _ in range(0, num_headers): try: oi = struct.unpack('>4sIH', file.read(10)) except (struct.error, IOError): # Header data we expected wasn't there. File may be # only partially complete. break if object_id == 'DATA' and oi[0] != 'INDX': log.debug(u'INDX chunk expected after DATA but not found -- file corrupt') break (object_id, object_size, object_version) = oi if object_id == 'DATA': # Seek over the data chunk rather than reading it in. file.seek(object_size - 10, 1) else: self._read_header(object_id, file.read(object_size - 10)) log.debug(u'%r [%d]' % (object_id, object_size - 10)) # Read all the following headers def _read_header(self, object_id, s): if object_id == 'PROP': prop = struct.unpack('>9IHH', s) log.debug(u'PROP: %r' % prop) if object_id == 'MDPR': mdpr = struct.unpack('>H7I', s[:30]) log.debug(u'MDPR: %r' % mdpr) self.length = mdpr[7] / 1000.0 (stream_name_size,) = struct.unpack('>B', s[30:31]) stream_name = s[31:31 + stream_name_size] pos = 31 + stream_name_size (mime_type_size,) = struct.unpack('>B', s[pos:pos + 1]) mime = s[pos + 1:pos + 1 + mime_type_size] pos += mime_type_size + 1 (type_specific_len,) = struct.unpack('>I', s[pos:pos + 4]) type_specific = s[pos + 4:pos + 4 + type_specific_len] pos += 4 + type_specific_len if mime[:5] == 'audio': ai = core.AudioStream() ai.id = mdpr[0] ai.bitrate = mdpr[2] self.audio.append(ai) elif mime[:5] == 'video': vi = core.VideoStream() vi.id = mdpr[0] vi.bitrate = mdpr[2] self.video.append(vi) else: log.debug(u'Unknown: %r' % mime) if object_id == 'CONT': pos = 0 (title_len,) = struct.unpack('>H', s[pos:pos + 2]) self.title = s[2:title_len + 2] pos += title_len + 2 (author_len,) = struct.unpack('>H', s[pos:pos + 2]) self.artist = s[pos + 2:pos + author_len + 2] pos += author_len + 2 (copyright_len,) = struct.unpack('>H', s[pos:pos + 2]) self.copyright = s[pos + 2:pos + copyright_len + 2] pos += copyright_len + 2 (comment_len,) = struct.unpack('>H', s[pos:pos + 2]) self.comment = s[pos + 2:pos + comment_len + 2] Parser = RealVideo from __future__ import absolute_import, division, unicode_literals from six import text_type import gettext _ = gettext.gettext try: from functools import reduce except ImportError: pass from ..constants import voidElements, booleanAttributes, spaceCharacters from ..constants import rcdataElements, entities, xmlEntities from .. import utils from xml.sax.saxutils import escape spaceCharacters = "".join(spaceCharacters) try: from codecs import register_error, xmlcharrefreplace_errors except ImportError: unicode_encode_errors = "strict" else: unicode_encode_errors = "htmlentityreplace" encode_entity_map = {} is_ucs4 = len("\U0010FFFF") == 1 for k, v in list(entities.items()): # skip multi-character entities if ((is_ucs4 and len(v) > 1) or (not is_ucs4 and len(v) > 2)): continue if v != "&": if len(v) == 2: v = utils.surrogatePairToCodepoint(v) else: v = ord(v) if not v in encode_entity_map or k.islower(): # prefer < over < and similarly for &, >, etc. encode_entity_map[v] = k def htmlentityreplace_errors(exc): if isinstance(exc, (UnicodeEncodeError, UnicodeTranslateError)): res = [] codepoints = [] skip = False for i, c in enumerate(exc.object[exc.start:exc.end]): if skip: skip = False continue index = i + exc.start if utils.isSurrogatePair(exc.object[index:min([exc.end, index + 2])]): codepoint = utils.surrogatePairToCodepoint(exc.object[index:index + 2]) skip = True else: codepoint = ord(c) codepoints.append(codepoint) for cp in codepoints: e = encode_entity_map.get(cp) if e: res.append("&") res.append(e) if not e.endswith(";"): res.append(";") else: res.append("&#x%s;" % (hex(cp)[2:])) return ("".join(res), exc.end) else: return xmlcharrefreplace_errors(exc) register_error(unicode_encode_errors, htmlentityreplace_errors) del register_error class HTMLSerializer(object): # attribute quoting options quote_attr_values = False quote_char = '"' use_best_quote_char = True # tag syntax options omit_optional_tags = True minimize_boolean_attributes = True use_trailing_solidus = False space_before_trailing_solidus = True # escaping options escape_lt_in_attrs = False escape_rcdata = False resolve_entities = True # miscellaneous options alphabetical_attributes = False inject_meta_charset = True strip_whitespace = False sanitize = False options = ("quote_attr_values", "quote_char", "use_best_quote_char", "omit_optional_tags", "minimize_boolean_attributes", "use_trailing_solidus", "space_before_trailing_solidus", "escape_lt_in_attrs", "escape_rcdata", "resolve_entities", "alphabetical_attributes", "inject_meta_charset", "strip_whitespace", "sanitize") def __init__(self, **kwargs): """Initialize HTMLSerializer. Keyword options (default given first unless specified) include: inject_meta_charset=True|False Whether it insert a meta element to define the character set of the document. quote_attr_values=True|False Whether to quote attribute values that don't require quoting per HTML5 parsing rules. quote_char=u'"'|u"'" Use given quote character for attribute quoting. Default is to use double quote unless attribute value contains a double quote, in which case single quotes are used instead. escape_lt_in_attrs=False|True Whether to escape < in attribute values. escape_rcdata=False|True Whether to escape characters that need to be escaped within normal elements within rcdata elements such as style. resolve_entities=True|False Whether to resolve named character entities that appear in the source tree. The XML predefined entities < > & " ' are unaffected by this setting. strip_whitespace=False|True Whether to remove semantically meaningless whitespace. (This compresses all whitespace to a single space except within pre.) minimize_boolean_attributes=True|False Shortens boolean attributes to give just the attribute value, for example becomes . use_trailing_solidus=False|True Includes a close-tag slash at the end of the start tag of void elements (empty elements whose end tag is forbidden). E.g.
. space_before_trailing_solidus=True|False Places a space immediately before the closing slash in a tag using a trailing solidus. E.g.
. Requires use_trailing_solidus. sanitize=False|True Strip all unsafe or unknown constructs from output. See `html5lib user documentation`_ omit_optional_tags=True|False Omit start/end tags that are optional. alphabetical_attributes=False|True Reorder attributes to be in alphabetical order. .. _html5lib user documentation: http://code.google.com/p/html5lib/wiki/UserDocumentation """ if 'quote_char' in kwargs: self.use_best_quote_char = False for attr in self.options: setattr(self, attr, kwargs.get(attr, getattr(self, attr))) self.errors = [] self.strict = False def encode(self, string): assert(isinstance(string, text_type)) if self.encoding: return string.encode(self.encoding, unicode_encode_errors) else: return string def encodeStrict(self, string): assert(isinstance(string, text_type)) if self.encoding: return string.encode(self.encoding, "strict") else: return string def serialize(self, treewalker, encoding=None): self.encoding = encoding in_cdata = False self.errors = [] if encoding and self.inject_meta_charset: from ..filters.inject_meta_charset import Filter treewalker = Filter(treewalker, encoding) # WhitespaceFilter should be used before OptionalTagFilter # for maximum efficiently of this latter filter if self.strip_whitespace: from ..filters.whitespace import Filter treewalker = Filter(treewalker) if self.sanitize: from ..filters.sanitizer import Filter treewalker = Filter(treewalker) if self.omit_optional_tags: from ..filters.optionaltags import Filter treewalker = Filter(treewalker) # Alphabetical attributes must be last, as other filters # could add attributes and alter the order if self.alphabetical_attributes: from ..filters.alphabeticalattributes import Filter treewalker = Filter(treewalker) for token in treewalker: type = token["type"] if type == "Doctype": doctype = "= 0: if token["systemId"].find("'") >= 0: self.serializeError(_("System identifer contains both single and double quote characters")) quote_char = "'" else: quote_char = '"' doctype += " %s%s%s" % (quote_char, token["systemId"], quote_char) doctype += ">" yield self.encodeStrict(doctype) elif type in ("Characters", "SpaceCharacters"): if type == "SpaceCharacters" or in_cdata: if in_cdata and token["data"].find("= 0: self.serializeError(_("Unexpected \"'=", False) v = v.replace("&", "&") if self.escape_lt_in_attrs: v = v.replace("<", "<") if quote_attr: quote_char = self.quote_char if self.use_best_quote_char: if "'" in v and '"' not in v: quote_char = '"' elif '"' in v and "'" not in v: quote_char = "'" if quote_char == "'": v = v.replace("'", "'") else: v = v.replace('"', """) yield self.encodeStrict(quote_char) yield self.encode(v) yield self.encodeStrict(quote_char) else: yield self.encode(v) if name in voidElements and self.use_trailing_solidus: if self.space_before_trailing_solidus: yield self.encodeStrict(" /") else: yield self.encodeStrict("/") yield self.encode(">") elif type == "EndTag": name = token["name"] if name in rcdataElements: in_cdata = False elif in_cdata: self.serializeError(_("Unexpected child element of a CDATA element")) yield self.encodeStrict("" % name) elif type == "Comment": data = token["data"] if data.find("--") >= 0: self.serializeError(_("Comment contains --")) yield self.encodeStrict("" % token["data"]) elif type == "Entity": name = token["name"] key = name + ";" if not key in entities: self.serializeError(_("Entity %s not recognized" % name)) if self.resolve_entities and key not in xmlEntities: data = entities[key] else: data = "&%s;" % name yield self.encodeStrict(data) else: self.serializeError(token["data"]) def render(self, treewalker, encoding=None): if encoding: return b"".join(list(self.serialize(treewalker, encoding))) else: return "".join(list(self.serialize(treewalker))) def serializeError(self, data="XXX ERROR MESSAGE NEEDED"): # XXX The idea is to make data mandatory. self.errors.append(data) if self.strict: raise SerializeError def SerializeError(Exception): """Error in serialized tree""" pass # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import frappe from frappe.utils import add_days, getdate, cint from frappe import throw, _ from erpnext.utilities.transaction_base import TransactionBase, delete_events from erpnext.stock.utils import get_valid_serial_nos class MaintenanceSchedule(TransactionBase): def get_item_details(self, item_code): item = frappe.db.sql("""select item_name, description from `tabItem` where name=%s""", (item_code), as_dict=1) ret = { 'item_name': item and item[0]['item_name'] or '', 'description' : item and item[0]['description'] or '' } return ret def generate_schedule(self): self.set('maintenance_schedule_detail', []) frappe.db.sql("""delete from `tabMaintenance Schedule Detail` where parent=%s""", (self.name)) count = 1 for d in self.get('item_maintenance_detail'): self.validate_maintenance_detail() s_list = [] s_list = self.create_schedule_list(d.start_date, d.end_date, d.no_of_visits, d.sales_person) for i in range(d.no_of_visits): child = self.append('maintenance_schedule_detail') child.item_code = d.item_code child.item_name = d.item_name child.scheduled_date = s_list[i].strftime('%Y-%m-%d') if d.serial_no: child.serial_no = d.serial_no child.idx = count count = count + 1 child.sales_person = d.sales_person self.save() def on_submit(self): if not self.get('maintenance_schedule_detail'): throw(_("Please click on 'Generate Schedule' to get schedule")) self.check_serial_no_added() self.validate_schedule() email_map = {} for d in self.get('item_maintenance_detail'): if d.serial_no: serial_nos = get_valid_serial_nos(d.serial_no) self.validate_serial_no(serial_nos, d.start_date) self.update_amc_date(serial_nos, d.end_date) if d.sales_person not in email_map: sp = frappe.get_doc("Sales Person", d.sales_person) email_map[d.sales_person] = sp.get_email_id() scheduled_date = frappe.db.sql("""select scheduled_date from `tabMaintenance Schedule Detail` where sales_person=%s and item_code=%s and parent=%s""", (d.sales_person, d.item_code, self.name), as_dict=1) for key in scheduled_date: if email_map[d.sales_person]: description = "Reference: %s, Item Code: %s and Customer: %s" % \ (self.name, d.item_code, self.customer) frappe.get_doc({ "doctype": "Event", "owner": email_map[d.sales_person] or self.owner, "subject": description, "description": description, "starts_on": key["scheduled_date"] + " 10:00:00", "event_type": "Private", "ref_type": self.doctype, "ref_name": self.name }).insert(ignore_permissions=1) frappe.db.set(self, 'status', 'Submitted') def create_schedule_list(self, start_date, end_date, no_of_visit, sales_person): schedule_list = [] start_date_copy = start_date date_diff = (getdate(end_date) - getdate(start_date)).days add_by = date_diff / no_of_visit for visit in range(cint(no_of_visit)): if (getdate(start_date_copy) < getdate(end_date)): start_date_copy = add_days(start_date_copy, add_by) if len(schedule_list) < no_of_visit: schedule_date = self.validate_schedule_date_for_holiday_list(getdate(start_date_copy), sales_person) if schedule_date > getdate(end_date): schedule_date = getdate(end_date) schedule_list.append(schedule_date) return schedule_list def validate_schedule_date_for_holiday_list(self, schedule_date, sales_person): from erpnext.accounts.utils import get_fiscal_year validated = False fy_details = "" try: fy_details = get_fiscal_year(date=schedule_date, verbose=0) except Exception: pass if fy_details and fy_details[0]: # check holiday list in employee master holiday_list = frappe.db.sql_list("""select h.holiday_date from `tabEmployee` emp, `tabSales Person` sp, `tabHoliday` h, `tabHoliday List` hl where sp.name=%s and emp.name=sp.employee and hl.name=emp.holiday_list and h.parent=hl.name and hl.fiscal_year=%s""", (sales_person, fy_details[0])) if not holiday_list: # check global holiday list holiday_list = frappe.db.sql("""select h.holiday_date from `tabHoliday` h, `tabHoliday List` hl where h.parent=hl.name and ifnull(hl.is_default, 0) = 1 and hl.fiscal_year=%s""", fy_details[0]) if not validated and holiday_list: if schedule_date in holiday_list: schedule_date = add_days(schedule_date, -1) else: validated = True return schedule_date def validate_dates_with_periodicity(self): for d in self.get("item_maintenance_detail"): if d.start_date and d.end_date and d.periodicity and d.periodicity!="Random": date_diff = (getdate(d.end_date) - getdate(d.start_date)).days + 1 days_in_period = { "Weekly": 7, "Monthly": 30, "Quarterly": 90, "Half Yearly": 180, "Yearly": 365 } if date_diff < days_in_period[d.periodicity]: throw(_("Row {0}: To set {1} periodicity, difference between from and to date \ must be greater than or equal to {2}") .format(d.idx, d.periodicity, days_in_period[d.periodicity])) def validate_maintenance_detail(self): if not self.get('item_maintenance_detail'): throw(_("Please enter Maintaince Details first")) for d in self.get('item_maintenance_detail'): if not d.item_code: throw(_("Please select item code")) elif not d.start_date or not d.end_date: throw(_("Please select Start Date and End Date for Item {0}".format(d.item_code))) elif not d.no_of_visits: throw(_("Please mention no of visits required")) elif not d.sales_person: throw(_("Please select Incharge Person's name")) if getdate(d.start_date) >= getdate(d.end_date): throw(_("Start date should be less than end date for Item {0}").format(d.item_code)) def validate_sales_order(self): for d in self.get('item_maintenance_detail'): if d.prevdoc_docname: chk = frappe.db.sql("""select ms.name from `tabMaintenance Schedule` ms, `tabMaintenance Schedule Item` msi where msi.parent=ms.name and msi.prevdoc_docname=%s and ms.docstatus=1""", d.prevdoc_docname) if chk: throw(_("Maintenance Schedule {0} exists against {0}").format(chk[0][0], d.prevdoc_docname)) def validate(self): self.validate_maintenance_detail() self.validate_dates_with_periodicity() self.validate_sales_order() def on_update(self): frappe.db.set(self, 'status', 'Draft') def update_amc_date(self, serial_nos, amc_expiry_date=None): for serial_no in serial_nos: serial_no_doc = frappe.get_doc("Serial No", serial_no) serial_no_doc.amc_expiry_date = amc_expiry_date serial_no_doc.save() def validate_serial_no(self, serial_nos, amc_start_date): for serial_no in serial_nos: sr_details = frappe.db.get_value("Serial No", serial_no, ["warranty_expiry_date", "amc_expiry_date", "status", "delivery_date"], as_dict=1) if not sr_details: frappe.throw(_("Serial No {0} not found").format(serial_no)) if sr_details.warranty_expiry_date and sr_details.warranty_expiry_date>=amc_start_date: throw(_("Serial No {0} is under warranty upto {1}").format(serial_no, sr_details.warranty_expiry_date)) if sr_details.amc_expiry_date and sr_details.amc_expiry_date >= amc_start_date: throw(_("Serial No {0} is under maintenance contract upto {1}").format(serial_no, sr_details.amc_start_date)) if sr_details.status=="Delivered" and sr_details.delivery_date and \ sr_details.delivery_date >= amc_start_date: throw(_("Maintenance start date can not be before delivery date for Serial No {0}").format(serial_no)) def validate_schedule(self): item_lst1 =[] item_lst2 =[] for d in self.get('item_maintenance_detail'): if d.item_code not in item_lst1: item_lst1.append(d.item_code) for m in self.get('maintenance_schedule_detail'): if m.item_code not in item_lst2: item_lst2.append(m.item_code) if len(item_lst1) != len(item_lst2): throw(_("Maintenance Schedule is not generated for all the items. Please click on 'Generate Schedule'")) else: for x in item_lst1: if x not in item_lst2: throw(_("Please click on 'Generate Schedule'")) def check_serial_no_added(self): serial_present =[] for d in self.get('item_maintenance_detail'): if d.serial_no: serial_present.append(d.item_code) for m in self.get('maintenance_schedule_detail'): if serial_present: if m.item_code in serial_present and not m.serial_no: throw(_("Please click on 'Generate Schedule' to fetch Serial No added for Item {0}").format(m.item_code)) def on_cancel(self): for d in self.get('item_maintenance_detail'): if d.serial_no: serial_nos = get_valid_serial_nos(d.serial_no) self.update_amc_date(serial_nos) frappe.db.set(self, 'status', 'Cancelled') delete_events(self.doctype, self.name) def on_trash(self): delete_events(self.doctype, self.name) @frappe.whitelist() def make_maintenance_visit(source_name, target_doc=None): from frappe.model.mapper import get_mapped_doc def update_status(source, target, parent): target.maintenance_type = "Scheduled" doclist = get_mapped_doc("Maintenance Schedule", source_name, { "Maintenance Schedule": { "doctype": "Maintenance Visit", "field_map": { "name": "maintenance_schedule" }, "validation": { "docstatus": ["=", 1] }, "postprocess": update_status }, "Maintenance Schedule Item": { "doctype": "Maintenance Visit Purpose", "field_map": { "parent": "prevdoc_docname", "parenttype": "prevdoc_doctype", "sales_person": "service_person" } } }, target_doc) return doclist #!/usr/bin/env python #coding=utf-8 import sys, os from bottle import Bottle from bottle import request from bottle import response from bottle import redirect from bottle import MakoTemplate from bottle import static_file from bottle import abort from beaker.cache import cache_managers from toughradius.console.libs.paginator import Paginator from toughradius.console.libs import utils from toughradius.console.websock import websock from toughradius.console import models from toughradius.console.base import * from toughradius.console.admin import node_forms from hashlib import md5 from twisted.python import log import bottle import datetime import json import functools __prefix__ = "/node" app = Bottle() app.config['__prefix__'] = __prefix__ ############################################################################### # node manage ############################################################################### @app.get('/', apply=auth_opr) def node(db, render): return render("sys_node_list", page_data=get_page_data(db.query(models.SlcNode))) permit.add_route("/node", u"区域信息管理", u"系统管理", is_menu=True, order=1) @app.get('/add', apply=auth_opr) def node_add(db, render): return render("base_form", form=node_forms.node_add_form()) @app.post('/add', apply=auth_opr) def node_add_post(db, render): form = node_forms.node_add_form() if not form.validates(source=request.forms): return render("base_form", form=form) node = models.SlcNode() node.node_name = form.d.node_name node.node_desc = form.d.node_desc db.add(node) ops_log = models.SlcRadOperateLog() ops_log.operator_name = get_cookie("username") ops_log.operate_ip = get_cookie("login_ip") ops_log.operate_time = utils.get_currtime() ops_log.operate_desc = u'操作员(%s)新增区域信息:%s' % (get_cookie("username"), node.node_name) db.add(ops_log) db.commit() redirect("/node") permit.add_route("/node/add", u"新增区域", u"系统管理", order=1.01, is_open=False) @app.get('/update', apply=auth_opr) def node_update(db, render): node_id = request.params.get("node_id") form = node_forms.node_update_form() form.fill(db.query(models.SlcNode).get(node_id)) return render("base_form", form=form) @app.post('/update', apply=auth_opr) def node_add_update(db, render): form = node_forms.node_update_form() if not form.validates(source=request.forms): return render("base_form", form=form) node = db.query(models.SlcNode).get(form.d.id) node.node_name = form.d.node_name node.node_desc = form.d.node_desc ops_log = models.SlcRadOperateLog() ops_log.operator_name = get_cookie("username") ops_log.operate_ip = get_cookie("login_ip") ops_log.operate_time = utils.get_currtime() ops_log.operate_desc = u'操作员(%s)修改区域信息:%s' % (get_cookie("username"), node.node_name) db.add(ops_log) db.commit() redirect("/node") permit.add_route("/node/update", u"修改区域", u"系统管理", order=1.02, is_open=False) @app.get('/delete', apply=auth_opr) def node_delete(db, render): node_id = request.params.get("node_id") if db.query(models.SlcMember.member_id).filter_by(node_id=node_id).count() > 0: return render("error", msg=u"该节点下有用户,不允许删除") db.query(models.SlcNode).filter_by(id=node_id).delete() ops_log = models.SlcRadOperateLog() ops_log.operator_name = get_cookie("username") ops_log.operate_ip = get_cookie("login_ip") ops_log.operate_time = utils.get_currtime() ops_log.operate_desc = u'操作员(%s)删除区域信息:%s' % (get_cookie("username"), node_id) db.add(ops_log) db.commit() redirect("/node") permit.add_route("/node/delete", u"删除区域", u"系统管理", order=1.03, is_open=False) import unittest from minerva.storage.valuedescriptor import ValueDescriptor from minerva.storage.outputdescriptor import OutputDescriptor from minerva.storage import datatype class TestOutputDescriptor(unittest.TestCase): def test_constructor(self): value_descriptor = ValueDescriptor( 'x', datatype.registry['smallint'] ) output_descriptor = OutputDescriptor( value_descriptor ) assert output_descriptor is not None def test_serialize_smallint(self): output_descriptor = OutputDescriptor( ValueDescriptor('x', datatype.registry['smallint']) ) assert output_descriptor.serialize(43) == '43' def test_load_from_config(self): config = { 'name': 'x', 'data_type': 'smallint', 'serializer_config': { } } output_descriptor = OutputDescriptor.load(config) self.assertIs( output_descriptor.value_descriptor.data_type, datatype.registry['smallint'] ) self.assertEqual(output_descriptor.value_descriptor.name, 'x') self.assertEqual(config, output_descriptor.to_dict()) # -*- coding: utf-8 -*- """ markupsafe._constants ~~~~~~~~~~~~~~~~~~~~~ Highlevel implementation of the Markup string. :copyright: (c) 2010 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ HTML_ENTITIES = { 'AElig': 198, 'Aacute': 193, 'Acirc': 194, 'Agrave': 192, 'Alpha': 913, 'Aring': 197, 'Atilde': 195, 'Auml': 196, 'Beta': 914, 'Ccedil': 199, 'Chi': 935, 'Dagger': 8225, 'Delta': 916, 'ETH': 208, 'Eacute': 201, 'Ecirc': 202, 'Egrave': 200, 'Epsilon': 917, 'Eta': 919, 'Euml': 203, 'Gamma': 915, 'Iacute': 205, 'Icirc': 206, 'Igrave': 204, 'Iota': 921, 'Iuml': 207, 'Kappa': 922, 'Lambda': 923, 'Mu': 924, 'Ntilde': 209, 'Nu': 925, 'OElig': 338, 'Oacute': 211, 'Ocirc': 212, 'Ograve': 210, 'Omega': 937, 'Omicron': 927, 'Oslash': 216, 'Otilde': 213, 'Ouml': 214, 'Phi': 934, 'Pi': 928, 'Prime': 8243, 'Psi': 936, 'Rho': 929, 'Scaron': 352, 'Sigma': 931, 'THORN': 222, 'Tau': 932, 'Theta': 920, 'Uacute': 218, 'Ucirc': 219, 'Ugrave': 217, 'Upsilon': 933, 'Uuml': 220, 'Xi': 926, 'Yacute': 221, 'Yuml': 376, 'Zeta': 918, 'aacute': 225, 'acirc': 226, 'acute': 180, 'aelig': 230, 'agrave': 224, 'alefsym': 8501, 'alpha': 945, 'amp': 38, 'and': 8743, 'ang': 8736, 'apos': 39, 'aring': 229, 'asymp': 8776, 'atilde': 227, 'auml': 228, 'bdquo': 8222, 'beta': 946, 'brvbar': 166, 'bull': 8226, 'cap': 8745, 'ccedil': 231, 'cedil': 184, 'cent': 162, 'chi': 967, 'circ': 710, 'clubs': 9827, 'cong': 8773, 'copy': 169, 'crarr': 8629, 'cup': 8746, 'curren': 164, 'dArr': 8659, 'dagger': 8224, 'darr': 8595, 'deg': 176, 'delta': 948, 'diams': 9830, 'divide': 247, 'eacute': 233, 'ecirc': 234, 'egrave': 232, 'empty': 8709, 'emsp': 8195, 'ensp': 8194, 'epsilon': 949, 'equiv': 8801, 'eta': 951, 'eth': 240, 'euml': 235, 'euro': 8364, 'exist': 8707, 'fnof': 402, 'forall': 8704, 'frac12': 189, 'frac14': 188, 'frac34': 190, 'frasl': 8260, 'gamma': 947, 'ge': 8805, 'gt': 62, 'hArr': 8660, 'harr': 8596, 'hearts': 9829, 'hellip': 8230, 'iacute': 237, 'icirc': 238, 'iexcl': 161, 'igrave': 236, 'image': 8465, 'infin': 8734, 'int': 8747, 'iota': 953, 'iquest': 191, 'isin': 8712, 'iuml': 239, 'kappa': 954, 'lArr': 8656, 'lambda': 955, 'lang': 9001, 'laquo': 171, 'larr': 8592, 'lceil': 8968, 'ldquo': 8220, 'le': 8804, 'lfloor': 8970, 'lowast': 8727, 'loz': 9674, 'lrm': 8206, 'lsaquo': 8249, 'lsquo': 8216, 'lt': 60, 'macr': 175, 'mdash': 8212, 'micro': 181, 'middot': 183, 'minus': 8722, 'mu': 956, 'nabla': 8711, 'nbsp': 160, 'ndash': 8211, 'ne': 8800, 'ni': 8715, 'not': 172, 'notin': 8713, 'nsub': 8836, 'ntilde': 241, 'nu': 957, 'oacute': 243, 'ocirc': 244, 'oelig': 339, 'ograve': 242, 'oline': 8254, 'omega': 969, 'omicron': 959, 'oplus': 8853, 'or': 8744, 'ordf': 170, 'ordm': 186, 'oslash': 248, 'otilde': 245, 'otimes': 8855, 'ouml': 246, 'para': 182, 'part': 8706, 'permil': 8240, 'perp': 8869, 'phi': 966, 'pi': 960, 'piv': 982, 'plusmn': 177, 'pound': 163, 'prime': 8242, 'prod': 8719, 'prop': 8733, 'psi': 968, 'quot': 34, 'rArr': 8658, 'radic': 8730, 'rang': 9002, 'raquo': 187, 'rarr': 8594, 'rceil': 8969, 'rdquo': 8221, 'real': 8476, 'reg': 174, 'rfloor': 8971, 'rho': 961, 'rlm': 8207, 'rsaquo': 8250, 'rsquo': 8217, 'sbquo': 8218, 'scaron': 353, 'sdot': 8901, 'sect': 167, 'shy': 173, 'sigma': 963, 'sigmaf': 962, 'sim': 8764, 'spades': 9824, 'sub': 8834, 'sube': 8838, 'sum': 8721, 'sup': 8835, 'sup1': 185, 'sup2': 178, 'sup3': 179, 'supe': 8839, 'szlig': 223, 'tau': 964, 'there4': 8756, 'theta': 952, 'thetasym': 977, 'thinsp': 8201, 'thorn': 254, 'tilde': 732, 'times': 215, 'trade': 8482, 'uArr': 8657, 'uacute': 250, 'uarr': 8593, 'ucirc': 251, 'ugrave': 249, 'uml': 168, 'upsih': 978, 'upsilon': 965, 'uuml': 252, 'weierp': 8472, 'xi': 958, 'yacute': 253, 'yen': 165, 'yuml': 255, 'zeta': 950, 'zwj': 8205, 'zwnj': 8204 } # -*- encoding: utf-8 -*- ############################################################################## # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see http://www.gnu.org/licenses/. # ############################################################################## from openerp import models, fields, api, _ import sys class MrpWorkcenter(models.Model): _inherit = 'mrp.workcenter' capacity_per_cycle = fields.Float( string='Capacity per Cycle Max.', help='Capacity per cycle maximum.') capacity_per_cycle_min = fields.Float( string='Capacity per Cycle Min.', help='Capacity per cycle minimum.') class MrpRoutingWorkcenter(models.Model): _inherit = 'mrp.routing.workcenter' limited_production_capacity = fields.Boolean() class MrpProduction(models.Model): _inherit = 'mrp.production' @api.multi def product_qty_change_production_capacity(self, product_qty=0, routing_id=False): result = {} routing_obj = self.env['mrp.routing'] if product_qty and routing_id: routing = routing_obj.browse(routing_id) for line in routing.workcenter_lines: if line.limited_production_capacity: capacity_min = ( line.workcenter_id.capacity_per_cycle_min or sys.float_info.min) capacity_max = (line.workcenter_id.capacity_per_cycle or sys.float_info.max) if capacity_min and capacity_max: if (product_qty < capacity_min or product_qty > capacity_max): warning = { 'title': _('Warning!'), 'message': _('Product QTY < Capacity per cycle' ' minimun, or > Capacity per' ' cycle maximun') } result['warning'] = warning return result @api.one @api.onchange('routing_id') def onchange_routing(self): if self.routing_id: for line in self.routing_id.workcenter_lines: if (line.limited_production_capacity and line.workcenter_id.capacity_per_cycle): self.product_qty = line.workcenter_id.capacity_per_cycle class MrpProductionWorkcenterLine(models.Model): _inherit = 'mrp.production.workcenter.line' @api.multi def workcenter_change_production_capacity(self, product_qty=0, workcenter_id=False): result = {} result['value'] = {} workcenter_obj = self.env['mrp.workcenter'] if product_qty and workcenter_id: workcenter = workcenter_obj.browse(workcenter_id) capacity_min = (workcenter.capacity_per_cycle_min or sys.float_info.min) capacity_max = (workcenter.capacity_per_cycle or sys.float_info.max) if capacity_min and capacity_max: if (product_qty < capacity_min or product_qty > capacity_max): warning = { 'title': _('Warning!'), 'message': _('Product QTY < Capacity per cycle' ' minimun, or > Capacity per' ' cycle maximun') } result['warning'] = warning return result """ End-to-end test for cohorted courseware. This uses both Studio and LMS. """ import json from nose.plugins.attrib import attr from studio.base_studio_test import ContainerBase from ..pages.studio.settings_group_configurations import GroupConfigurationsPage from ..pages.studio.auto_auth import AutoAuthPage as StudioAutoAuthPage from ..fixtures.course import XBlockFixtureDesc from ..fixtures import LMS_BASE_URL from ..pages.studio.component_editor import ComponentVisibilityEditorView from ..pages.lms.instructor_dashboard import InstructorDashboardPage from ..pages.lms.courseware import CoursewarePage from ..pages.lms.auto_auth import AutoAuthPage as LmsAutoAuthPage from ..tests.lms.test_lms_user_preview import verify_expected_problem_visibility from bok_choy.promise import EmptyPromise @attr('shard_5') class EndToEndCohortedCoursewareTest(ContainerBase): def setUp(self, is_staff=True): super(EndToEndCohortedCoursewareTest, self).setUp(is_staff=is_staff) self.staff_user = self.user self.content_group_a = "Content Group A" self.content_group_b = "Content Group B" # Create a student who will be in "Cohort A" self.cohort_a_student_username = "cohort_a_student" self.cohort_a_student_email = "cohort_a_student@example.com" StudioAutoAuthPage( self.browser, username=self.cohort_a_student_username, email=self.cohort_a_student_email, no_login=True ).visit() # Create a student who will be in "Cohort B" self.cohort_b_student_username = "cohort_b_student" self.cohort_b_student_email = "cohort_b_student@example.com" StudioAutoAuthPage( self.browser, username=self.cohort_b_student_username, email=self.cohort_b_student_email, no_login=True ).visit() # Create a student who will end up in the default cohort group self.cohort_default_student_username = "cohort_default_student" self.cohort_default_student_email = "cohort_default_student@example.com" StudioAutoAuthPage( self.browser, username=self.cohort_default_student_username, email=self.cohort_default_student_email, no_login=True ).visit() # Start logged in as the staff user. StudioAutoAuthPage( self.browser, username=self.staff_user["username"], email=self.staff_user["email"] ).visit() def populate_course_fixture(self, course_fixture): """ Populate the children of the test course fixture. """ self.group_a_problem = 'GROUP A CONTENT' self.group_b_problem = 'GROUP B CONTENT' self.group_a_and_b_problem = 'GROUP A AND B CONTENT' self.visible_to_all_problem = 'VISIBLE TO ALL CONTENT' course_fixture.add_children( XBlockFixtureDesc('chapter', 'Test Section').add_children( XBlockFixtureDesc('sequential', 'Test Subsection').add_children( XBlockFixtureDesc('vertical', 'Test Unit').add_children( XBlockFixtureDesc('problem', self.group_a_problem, data=''), XBlockFixtureDesc('problem', self.group_b_problem, data=''), XBlockFixtureDesc('problem', self.group_a_and_b_problem, data=''), XBlockFixtureDesc('problem', self.visible_to_all_problem, data='') ) ) ) ) def enable_cohorting(self, course_fixture): """ Enables cohorting for the current course. """ url = LMS_BASE_URL + "/courses/" + course_fixture._course_key + '/cohorts/settings' # pylint: disable=protected-access data = json.dumps({'is_cohorted': True}) response = course_fixture.session.patch(url, data=data, headers=course_fixture.headers) self.assertTrue(response.ok, "Failed to enable cohorts") def create_content_groups(self): """ Creates two content groups in Studio Group Configurations Settings. """ group_configurations_page = GroupConfigurationsPage( self.browser, self.course_info['org'], self.course_info['number'], self.course_info['run'] ) group_configurations_page.visit() group_configurations_page.create_first_content_group() config = group_configurations_page.content_groups[0] config.name = self.content_group_a config.save() group_configurations_page.add_content_group() config = group_configurations_page.content_groups[1] config.name = self.content_group_b config.save() def link_problems_to_content_groups_and_publish(self): """ Updates 3 of the 4 existing problems to limit their visibility by content group. Publishes the modified units. """ container_page = self.go_to_unit_page() def set_visibility(problem_index, content_group, second_content_group=None): problem = container_page.xblocks[problem_index] problem.edit_visibility() if second_content_group: ComponentVisibilityEditorView(self.browser, problem.locator).select_option( second_content_group, save=False ) ComponentVisibilityEditorView(self.browser, problem.locator).select_option(content_group) set_visibility(1, self.content_group_a) set_visibility(2, self.content_group_b) set_visibility(3, self.content_group_a, self.content_group_b) container_page.publish_action.click() def create_cohorts_and_assign_students(self): """ Adds 2 manual cohorts, linked to content groups, to the course. Each cohort is assigned one student. """ instructor_dashboard_page = InstructorDashboardPage(self.browser, self.course_id) instructor_dashboard_page.visit() cohort_management_page = instructor_dashboard_page.select_cohort_management() def add_cohort_with_student(cohort_name, content_group, student): cohort_management_page.add_cohort(cohort_name, content_group=content_group) # After adding the cohort, it should automatically be selected EmptyPromise( lambda: cohort_name == cohort_management_page.get_selected_cohort(), "Waiting for new cohort" ).fulfill() cohort_management_page.add_students_to_selected_cohort([student]) add_cohort_with_student("Cohort A", self.content_group_a, self.cohort_a_student_username) add_cohort_with_student("Cohort B", self.content_group_b, self.cohort_b_student_username) def view_cohorted_content_as_different_users(self): """ View content as staff, student in Cohort A, student in Cohort B, and student in Default Cohort. """ courseware_page = CoursewarePage(self.browser, self.course_id) def login_and_verify_visible_problems(username, email, expected_problems): LmsAutoAuthPage( self.browser, username=username, email=email, course_id=self.course_id ).visit() courseware_page.visit() verify_expected_problem_visibility(self, courseware_page, expected_problems) login_and_verify_visible_problems( self.staff_user["username"], self.staff_user["email"], [self.group_a_problem, self.group_b_problem, self.group_a_and_b_problem, self.visible_to_all_problem] ) login_and_verify_visible_problems( self.cohort_a_student_username, self.cohort_a_student_email, [self.group_a_problem, self.group_a_and_b_problem, self.visible_to_all_problem] ) login_and_verify_visible_problems( self.cohort_b_student_username, self.cohort_b_student_email, [self.group_b_problem, self.group_a_and_b_problem, self.visible_to_all_problem] ) login_and_verify_visible_problems( self.cohort_default_student_username, self.cohort_default_student_email, [self.visible_to_all_problem] ) def test_cohorted_courseware(self): """ Scenario: Can create content that is only visible to students in particular cohorts Given that I have course with 4 problems, 1 staff member, and 3 students When I enable cohorts in the course And I create two content groups, Content Group A, and Content Group B, in the course And I link one problem to Content Group A And I link one problem to Content Group B And I link one problem to both Content Group A and Content Group B And one problem remains unlinked to any Content Group And I create two manual cohorts, Cohort A and Cohort B, linked to Content Group A and Content Group B, respectively And I assign one student to each manual cohort And one student remains in the default cohort Then the staff member can see all 4 problems And the student in Cohort A can see all the problems except the one linked to Content Group B And the student in Cohort B can see all the problems except the one linked to Content Group A And the student in the default cohort can ony see the problem that is unlinked to any Content Group """ self.enable_cohorting(self.course_fixture) self.create_content_groups() self.link_problems_to_content_groups_and_publish() self.create_cohorts_and_assign_students() self.view_cohorted_content_as_different_users() import os import bz2 import sys import logging import traceback import cStringIO import tempfile from django.conf import settings from django.http import HttpResponse, HttpResponseBadRequest from django.core.servers.basehttp import FileWrapper from django.core import serializers from buildmanager.models import Project, ProjectBuild from transformers.zip import TarCompressor, build_tarfile from hq.models import Domain from django_rest_interface import util def get_builds(request): """Takes a POST containing a tar of all MD5's and returns a tar of all missing submissions Heh, this is explicitly against good REST methodology We leave this inside the django-rest 'Resource' so we can use their authentication tools """ try: return _get_builds(request) except Exception, e: type, value, tb = sys.exc_info() logging.error( "EXCEPTION raised: %s" % (str(e)) ) logging.error( "TRACEBACK:\n%s" % ('\n'.join(traceback.format_tb(tb))) ) raise return HttpResponseBadRequest( "Exception raised %s." % e ) def get_builds_for_domain(request, domain_id): """Takes a POST containing a tar of all MD5's and returns a tar of all missing submissions Heh, this is explicitly against good REST methodology We leave this inside the django-rest 'Resource' so we can use their authentication tools """ try: return _get_submissions(request, domain_id) except Exception, e: type, value, tb = sys.exc_info() logging.error( "EXCEPTION raised: %s" % (str(e)) ) logging.error( "TRACEBACK:\n%s" % ('\n'.join(traceback.format_tb(tb))) ) return HttpResponseBadRequest( "Exception raised %s." % e ) def _get_builds(request, domain_id=None): projects = Project.objects.all() if domain_id: # filter on domain, if it's set try: domain = Domain.objects.get(id=domain_id) except Domain.DoesNotExist: logging.error("Domain with id %s could not found." % domain_id) return HttpResponseBadRequest("Domain with id %s could not found." % domain_id) projects = projects.filter(domain=domain) if 'export_path' not in settings.RAPIDSMS_APPS['buildmanager']: logging.error("Please set 'export_path' in your hq buildmanager settings.") return HttpResponseBadRequest("Please set 'export_path' in your hq buildmanager settings.") export_dir = settings.RAPIDSMS_APPS['buildmanager']['export_path'] # For now this is RESTful, and de-facto returns all projects and builds. # At some point we may require this to take in a list of guids or # checksums much like the receiver does. if projects.count() == 0: logging.info("No projects could be found.") return HttpResponse("No projects could be found.") builds = ProjectBuild.objects.filter(project__in=projects) if builds.count() == 0: logging.info("No builds could be found.") return HttpResponse("No builds could be found.") compressor = TarCompressor() export_path = os.path.join( export_dir, "commcarehq-builds.tar") compressor.open(name=export_path) # add the root project summaries to the compressor _add_to_compressor(compressor, _get_project_summary(projects), "projects.json") tars = [] for build in builds: try: summary_tar = _get_build_summary(build) tars.append(summary_tar) compressor.add_file(summary_tar.name) except Exception, e: logging.error("Unable to export build: %s. Error is %s." % (build, e)) raise compressor.close() response = HttpResponse() response['Content-Length'] = os.path.getsize(export_path) fin = open(export_path, 'rb') wrapper = FileWrapper(fin) response = HttpResponse(wrapper, content_type='application/tar') response['Content-Disposition'] = 'attachment; filename=commcarehq-builds.tar' return response def _get_project_summary(projects): """Returns a single json string with the summary of the projects""" return serializers.serialize('json', projects) def _get_build_summary(build): """Package a build's metadata with its jad and jar and return it as a tarball""" temp_tar_path = tempfile.TemporaryFile().name temp_json_path = os.path.join(tempfile.tempdir, "build%s.json" % build.id) json_file = open(temp_json_path, "wb") json_file.write(serializers.serialize('json', [build])) json_file.close() tarball = build_tarfile([json_file.name, build.jar_file, build.jad_file], temp_tar_path) tarball.close() return tarball def _get_build_filename(build): """A unique but semi-readable filename to reference the build""" return "%s-%s-%s.build" % (build.project.domain.name, build.project.name, build.id) def _add_to_compressor(compressor, data, filename): """Add some data to the (assumed to be open) tar archive""" compressor.add_stream(cStringIO.StringIO( data ), len(data), name=filename) def _add_stream_to_compressor(compressor, data, length, filename): """Add some data to the (assumed to be open) tar archive""" compressor.add_stream(data, length, name=filename) # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from ....testing import assert_equal from ..preprocess import ApplyDeformations def test_ApplyDeformations_inputs(): input_map = dict(deformation_field=dict(field='comp{1}.def', mandatory=True, ), ignore_exception=dict(nohash=True, usedefault=True, ), in_files=dict(field='fnames', mandatory=True, ), interp=dict(field='interp', ), matlab_cmd=dict(), mfile=dict(usedefault=True, ), paths=dict(), reference_volume=dict(field='comp{2}.id.space', mandatory=True, ), use_mcr=dict(), use_v8struct=dict(min_ver='8', usedefault=True, ), ) inputs = ApplyDeformations.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): yield assert_equal, getattr(inputs.traits()[key], metakey), value def test_ApplyDeformations_outputs(): output_map = dict(out_files=dict(), ) outputs = ApplyDeformations.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): yield assert_equal, getattr(outputs.traits()[key], metakey), value """Text widgets""" from __future__ import division, absolute_import, unicode_literals import math from qtpy import QtCore from qtpy import QtGui from qtpy import QtWidgets from qtpy.QtCore import Qt from qtpy.QtCore import Signal from ..qtutils import get from .. import hotkeys from .. import qtutils from ..i18n import N_ from ..models import prefs from . import defs def get_stripped(widget): return widget.get().strip() class LineEdit(QtWidgets.QLineEdit): cursor_changed = Signal(int, int) def __init__(self, parent=None, row=1, get_value=None, clear_button=False): QtWidgets.QLineEdit.__init__(self, parent) self._row = row if get_value is None: get_value = get_stripped self._get_value = get_value self.cursor_position = LineEditCursorPosition(self, row) if clear_button and hasattr(self, 'setClearButtonEnabled'): self.setClearButtonEnabled(True) def get(self): """Return the raw unicode value from Qt""" return self.text() def value(self): """Return the processed value, e.g. stripped""" return self._get_value(self) def set_value(self, value, block=False): if block: blocksig = self.blockSignals(True) pos = self.cursorPosition() self.setText(value) self.setCursorPosition(pos) if block: self.blockSignals(blocksig) class LineEditCursorPosition(object): """Translate cursorPositionChanged(int,int) into cursorPosition(int,int) """ def __init__(self, widget, row): self._widget = widget self._row = row # Translate cursorPositionChanged into cursor_changed(int, int) widget.cursorPositionChanged.connect(lambda old, new: self.emit()) def emit(self): widget = self._widget row = self._row col = widget.cursorPosition() widget.cursor_changed.emit(row, col) def reset(self): self._widget.setCursorPosition(0) class BaseTextEditExtension(QtCore.QObject): def __init__(self, widget, get_value, readonly): QtCore.QObject.__init__(self, widget) self.widget = widget self.cursor_position = TextEditCursorPosition(widget, self) if get_value is None: get_value = get_stripped self._get_value = get_value self._tabwidth = 8 self._readonly = readonly self._init_flags() self.init() def _init_flags(self): widget = self.widget widget.setMinimumSize(QtCore.QSize(1, 1)) widget.setWordWrapMode(QtGui.QTextOption.WordWrap) widget.setLineWrapMode(widget.NoWrap) widget.setCursorWidth(defs.cursor_width) if self._readonly: widget.setReadOnly(True) widget.setAcceptDrops(False) widget.setTabChangesFocus(True) widget.setUndoRedoEnabled(False) widget.setTextInteractionFlags(Qt.TextSelectableByKeyboard | Qt.TextSelectableByMouse) def get(self): """Return the raw unicode value from Qt""" return self.widget.toPlainText() def value(self): """Return a safe value, e.g. a stripped value""" return self._get_value(self.widget) def set_value(self, value, block=False): if block: blocksig = self.widget.blockSignals(True) # Save cursor position offset, selection_text = self.offset_and_selection() old_value = get(self.widget) # Update text self.widget.setPlainText(value) # Restore cursor if selection_text and selection_text in value: # If the old selection exists in the new text then re-select it. idx = value.index(selection_text) cursor = self.widget.textCursor() cursor.setPosition(idx) cursor.setPosition(idx + len(selection_text), QtGui.QTextCursor.KeepAnchor) self.widget.setTextCursor(cursor) elif value == old_value: # Otherwise, if the text is identical and there is no selection # then restore the cursor position. cursor = self.widget.textCursor() cursor.setPosition(offset) self.widget.setTextCursor(cursor) else: # If none of the above applied then restore the cursor position. position = max(0, min(offset, len(value) - 1)) cursor = self.widget.textCursor() cursor.setPosition(position) self.widget.setTextCursor(cursor) cursor = self.widget.textCursor() cursor.movePosition(QtGui.QTextCursor.StartOfLine) self.widget.setTextCursor(cursor) if block: self.widget.blockSignals(blocksig) def set_cursor_position(self, new_position): cursor = self.widget.textCursor() cursor.setPosition(new_position) self.widget.setTextCursor(cursor) def tabwidth(self): return self._tabwidth def set_tabwidth(self, width): self._tabwidth = width font = self.widget.font() fm = QtGui.QFontMetrics(font) pixels = fm.width('M' * width) self.widget.setTabStopWidth(pixels) def selected_line(self): contents = self.value() cursor = self.widget.textCursor() offset = min(cursor.position(), len(contents)-1) while (offset >= 1 and contents[offset-1] and contents[offset-1] != '\n'): offset -= 1 data = contents[offset:] if '\n' in data: line, rest = data.split('\n', 1) else: line = data return line def cursor(self): return self.widget.textCursor() def has_selection(self): return self.cursor().hasSelection() def offset_and_selection(self): cursor = self.cursor() offset = cursor.selectionStart() selection_text = cursor.selection().toPlainText() return offset, selection_text def mouse_press_event(self, event): # Move the text cursor so that the right-click events operate # on the current position, not the last left-clicked position. widget = self.widget if event.button() == Qt.RightButton: if not widget.textCursor().hasSelection(): cursor = widget.cursorForPosition(event.pos()) widget.setTextCursor(widget.cursorForPosition(event.pos())) # For extension by sub-classes def init(self): """Called during init for class-specific settings""" pass def set_textwidth(self, width): """Set the text width""" pass def set_linebreak(self, brk): """Enable word wrapping""" pass class PlainTextEditExtension(BaseTextEditExtension): def set_linebreak(self, brk): if brk: wrapmode = QtWidgets.QPlainTextEdit.WidgetWidth else: wrapmode = QtWidgets.QPlainTextEdit.NoWrap self.widget.setLineWrapMode(wrapmode) class PlainTextEdit(QtWidgets.QPlainTextEdit): cursor_changed = Signal(int, int) leave = Signal() def __init__(self, parent=None, get_value=None, readonly=False): QtWidgets.QPlainTextEdit.__init__(self, parent) self.ext = PlainTextEditExtension(self, get_value, readonly) self.cursor_position = self.ext.cursor_position def get(self): """Return the raw unicode value from Qt""" return self.ext.get() # For compatibility with QTextEdit def setText(self, value): self.set_value(value) def value(self): """Return a safe value, e.g. a stripped value""" return self.ext.value() def set_value(self, value, block=False): self.ext.set_value(value, block=block) def has_selection(self): return self.ext.has_selection() def selected_line(self): return self.ext.selected_line() def set_tabwidth(self, width): self.ext.set_tabwidth(width) def set_textwidth(self, width): self.ext.set_textwidth(width) def set_linebreak(self, brk): self.ext.set_linebreak(brk) def mousePressEvent(self, event): self.ext.mouse_press_event(event) super(PlainTextEdit, self).mousePressEvent(event) def wheelEvent(self, event): """Disable control+wheelscroll text resizing""" if event.modifiers() & Qt.ControlModifier: event.ignore() return return super(PlainTextEdit, self).wheelEvent(event) class TextEditExtension(BaseTextEditExtension): def init(self): widget = self.widget widget.setAcceptRichText(False) def set_linebreak(self, brk): if brk: wrapmode = QtWidgets.QTextEdit.FixedColumnWidth else: wrapmode = QtWidgets.QTextEdit.NoWrap self.widget.setLineWrapMode(wrapmode) def set_textwidth(self, width): self.widget.setLineWrapColumnOrWidth(width) class TextEdit(QtWidgets.QTextEdit): cursor_changed = Signal(int, int) leave = Signal() def __init__(self, parent=None, get_value=None, readonly=False): QtWidgets.QTextEdit.__init__(self, parent) self.ext = TextEditExtension(self, get_value, readonly) self.cursor_position = self.ext.cursor_position def get(self): """Return the raw unicode value from Qt""" return self.ext.get() def value(self): """Return a safe value, e.g. a stripped value""" return self.ext.value() def set_value(self, value, block=False): self.ext.set_value(value, block=block) def selected_line(self): return self.ext.selected_line() def set_tabwidth(self, width): self.ext.set_tabwidth(width) def set_textwidth(self, width): self.ext.set_textwidth(width) def set_linebreak(self, brk): self.ext.set_linebreak(brk) def mousePressEvent(self, event): self.ext.mouse_press_event(event) super(TextEdit, self).mousePressEvent(event) def wheelEvent(self, event): """Disable control+wheelscroll text resizing""" if event.modifiers() & Qt.ControlModifier: event.ignore() return return super(TextEdit, self).wheelEvent(event) def should_expandtab(self, event): return event.key() == Qt.Key_Tab and prefs.expandtab() def expandtab(self): tabwidth = max(self.ext.tabwidth(), 1) cursor = self.textCursor() cursor.insertText(' ' * tabwidth) def keyPressEvent(self, event): expandtab = self.should_expandtab(event) if expandtab: self.expandtab() event.accept() else: QtWidgets.QTextEdit.keyPressEvent(self, event) def keyReleaseEvent(self, event): expandtab = self.should_expandtab(event) if expandtab: event.ignore() else: QtWidgets.QTextEdit.keyReleaseEvent(self, event) class TextEditCursorPosition(object): def __init__(self, widget, ext): self._widget = widget self._ext = ext widget.cursorPositionChanged.connect(self.emit) def emit(self): widget = self._widget ext = self._ext cursor = widget.textCursor() position = cursor.position() txt = widget.get() before = txt[:position] row = before.count('\n') line = before.split('\n')[row] col = cursor.columnNumber() col += line[:col].count('\t') * (ext.tabwidth() - 1) widget.cursor_changed.emit(row+1, col) def reset(self): widget = self._widget cursor = widget.textCursor() cursor.setPosition(0) widget.setTextCursor(cursor) def setup_mono_font(widget): widget.setFont(qtutils.diff_font()) widget.set_tabwidth(prefs.tabwidth()) class MonoTextEdit(PlainTextEdit): def __init__(self, parent=None, readonly=False): PlainTextEdit.__init__(self, parent=parent, readonly=readonly) setup_mono_font(self) def get_value_hinted(widget): text = get_stripped(widget) hint = get(widget.hint) if text == hint: return '' else: return text class HintWidget(QtCore.QObject): """Extend a widget to provide hint messages This primarily exists because setPlaceholderText() is only available in Qt5, so this class provides consistent behavior across versions. """ def __init__(self, widget, hint): QtCore.QObject.__init__(self, widget) self._widget = widget self._hint = hint self._is_error = False self.modern = modern = hasattr(widget, 'setPlaceholderText') if modern: widget.setPlaceholderText(hint) # Palette for normal text QPalette = QtGui.QPalette palette = widget.palette() hint_color = palette.color(QPalette.Disabled, QPalette.Text) error_bg_color = QtGui.QColor(Qt.red).darker() error_fg_color = QtGui.QColor(Qt.white) hint_rgb = qtutils.rgb_css(hint_color) error_bg_rgb = qtutils.rgb_css(error_bg_color) error_fg_rgb = qtutils.rgb_css(error_fg_color) env = dict(name=widget.__class__.__name__, error_fg_rgb=error_fg_rgb, error_bg_rgb=error_bg_rgb, hint_rgb=hint_rgb) self._default_style = '' self._hint_style = """ %(name)s { color: %(hint_rgb)s; } """ % env self._error_style = """ %(name)s { color: %(error_fg_rgb)s; background-color: %(error_bg_rgb)s; } """ % env def init(self): """Defer initialization to avoid circular dependencies during construction""" if self.modern: self.widget().setPlaceholderText(self.value()) else: self.widget().installEventFilter(self) self.enable(True) def widget(self): """Return the parent text widget""" return self._widget def active(self): """Return True when hint-mode is active""" return self.value() == get_stripped(self._widget) def value(self): """Return the current hint text""" return self._hint def set_error(self, is_error): """Enable/disable error mode""" self._is_error = is_error self.refresh() def set_value(self, hint): """Change the hint text""" if self.modern: self._hint = hint self._widget.setPlaceholderText(hint) else: # If hint-mode is currently active, re-activate it active = self.active() self._hint = hint if active or self.active(): self.enable(True) def enable(self, enable): """Enable/disable hint-mode""" if not self.modern: if enable and self._hint: self._widget.set_value(self._hint, block=True) self._widget.cursor_position.reset() else: self._widget.clear() self._update_palette(enable) def refresh(self): """Update the palette to match the current mode""" self._update_palette(self.active()) def _update_palette(self, hint): """Update to palette for normal/error/hint mode""" if self._is_error: style = self._error_style elif not self.modern and hint: style = self._hint_style else: style = self._default_style self._widget.setStyleSheet(style) def eventFilter(self, obj, event): """Enable/disable hint-mode when focus changes""" etype = event.type() if etype == QtCore.QEvent.FocusIn: self.focus_in() elif etype == QtCore.QEvent.FocusOut: self.focus_out() return False def focus_in(self): """Disable hint-mode when focused""" widget = self.widget() if self.active(): self.enable(False) widget.cursor_position.emit() def focus_out(self): """Re-enable hint-mode when losing focus""" widget = self.widget() if not get(widget): self.enable(True) class HintedPlainTextEdit(PlainTextEdit): """A hinted plain text edit""" def __init__(self, hint, parent=None, readonly=False): PlainTextEdit.__init__(self, parent=parent, get_value=get_value_hinted, readonly=readonly) self.hint = HintWidget(self, hint) self.hint.init() setup_mono_font(self) # Refresh palettes when text changes self.textChanged.connect(self.hint.refresh) def set_value(self, value, block=False): """Set the widget text or enable hint mode when empty""" if value or self.hint.modern: PlainTextEdit.set_value(self, value, block=block) else: self.hint.enable(True) class HintedTextEdit(TextEdit): """A hinted text edit""" def __init__(self, hint, parent=None, readonly=False): TextEdit.__init__(self, parent=parent, get_value=get_value_hinted, readonly=readonly) self.hint = HintWidget(self, hint) self.hint.init() setup_mono_font(self) # Refresh palettes when text changes self.textChanged.connect(self.hint.refresh) def set_value(self, value, block=False): """Set the widget text or enable hint mode when empty""" if value or self.hint.modern: TextEdit.set_value(self, value, block=block) else: self.hint.enable(True) # The vim-like read-only text view class VimMixin(object): def __init__(self, widget): self.widget = widget self.Base = widget.Base # Common vim/unix-ish keyboard actions self.add_navigation('Up', hotkeys.MOVE_UP, shift=hotkeys.MOVE_UP_SHIFT) self.add_navigation('Down', hotkeys.MOVE_DOWN, shift=hotkeys.MOVE_DOWN_SHIFT) self.add_navigation('Left', hotkeys.MOVE_LEFT, shift=hotkeys.MOVE_LEFT_SHIFT) self.add_navigation('Right', hotkeys.MOVE_RIGHT, shift=hotkeys.MOVE_RIGHT_SHIFT) self.add_navigation('WordLeft', hotkeys.WORD_LEFT) self.add_navigation('WordRight', hotkeys.WORD_RIGHT) self.add_navigation('StartOfLine', hotkeys.START_OF_LINE) self.add_navigation('EndOfLine', hotkeys.END_OF_LINE) qtutils.add_action(widget, 'PageUp', lambda: widget.page(-widget.height()//2), hotkeys.SECONDARY_ACTION) qtutils.add_action(widget, 'PageDown', lambda: widget.page(widget.height()//2), hotkeys.PRIMARY_ACTION) def add_navigation(self, name, hotkey, shift=None): """Add a hotkey along with a shift-variant""" widget = self.widget direction = getattr(QtGui.QTextCursor, name) qtutils.add_action(widget, name, lambda: self.move(direction), hotkey) if shift: qtutils.add_action(widget, 'Shift' + name, lambda: self.move(direction, True), shift) def move(self, direction, select=False, n=1): widget = self.widget cursor = widget.textCursor() if select: mode = QtGui.QTextCursor.KeepAnchor else: mode = QtGui.QTextCursor.MoveAnchor if cursor.movePosition(direction, mode, n): self.set_text_cursor(cursor) def page(self, offset): widget = self.widget rect = widget.cursorRect() x = rect.x() y = rect.y() + offset new_cursor = widget.cursorForPosition(QtCore.QPoint(x, y)) if new_cursor is not None: self.set_text_cursor(new_cursor) def set_text_cursor(self, cursor): widget = self.widget widget.setTextCursor(cursor) widget.ensureCursorVisible() widget.viewport().update() def keyPressEvent(self, event): """Custom keyboard behaviors The leave() signal is emitted when `Up` is pressed and we're already at the beginning of the text. This allows the parent widget to orchestrate some higher-level interaction, such as giving focus to another widget. When in the middle of the first line and `Up` is pressed, the cursor is moved to the beginning of the line. """ widget = self.widget if event.key() == Qt.Key_Up: cursor = widget.textCursor() position = cursor.position() if position == 0: # The cursor is at the beginning of the line. # Emit a signal so that the parent can e.g. change focus. widget.leave.emit() elif get(widget)[:position].count('\n') == 0: # The cursor is in the middle of the first line of text. # We can't go up ~ jump to the beginning of the line. # Select the text if shift is pressed. if event.modifiers() & Qt.ShiftModifier: mode = QtGui.QTextCursor.KeepAnchor else: mode = QtGui.QTextCursor.MoveAnchor cursor.movePosition(QtGui.QTextCursor.StartOfLine, mode) widget.setTextCursor(cursor) return self.Base.keyPressEvent(widget, event) class VimHintedPlainTextEdit(HintedPlainTextEdit): """HintedPlainTextEdit with vim hotkeys This can only be used in read-only mode. """ Base = HintedPlainTextEdit Mixin = VimMixin def __init__(self, hint, parent=None): HintedPlainTextEdit.__init__(self, hint, parent=parent, readonly=True) self._mixin = self.Mixin(self) def move(self, direction, select=False, n=1): return self._mixin.page(direction, select=select, n=n) def page(self, offset): return self._mixin.page(offset) def keyPressEvent(self, event): return self._mixin.keyPressEvent(event) class VimTextEdit(MonoTextEdit): """Text viewer with vim-like hotkeys This can only be used in read-only mode. """ Base = MonoTextEdit Mixin = VimMixin def __init__(self, parent=None): MonoTextEdit.__init__(self, parent=None, readonly=True) self._mixin = self.Mixin(self) def move(self, direction, select=False, n=1): return self._mixin.page(direction, select=select, n=n) def page(self, offset): return self._mixin.page(offset) def keyPressEvent(self, event): return self._mixin.keyPressEvent(event) class HintedLineEdit(LineEdit): def __init__(self, hint, parent=None): LineEdit.__init__(self, parent=parent, get_value=get_value_hinted) self.hint = HintWidget(self, hint) self.hint.init() self.setFont(qtutils.diff_font()) self.textChanged.connect(lambda text: self.hint.refresh()) def text_dialog(text, title): """Show a wall of text in a dialog""" parent = qtutils.active_window() label = QtWidgets.QLabel(parent) label.setFont(qtutils.diff_font()) label.setText(text) label.setMargin(defs.large_margin) text_flags = Qt.TextSelectableByKeyboard | Qt.TextSelectableByMouse label.setTextInteractionFlags(text_flags) widget = QtWidgets.QDialog(parent) widget.setWindowModality(Qt.WindowModal) widget.setWindowTitle(title) scroll = QtWidgets.QScrollArea() scroll.setWidget(label) layout = qtutils.hbox(defs.margin, defs.spacing, scroll) widget.setLayout(layout) qtutils.add_action(widget, N_('Close'), widget.accept, Qt.Key_Question, Qt.Key_Enter, Qt.Key_Return) widget.show() return widget class VimTextBrowser(VimTextEdit): """Text viewer with line number annotations""" def __init__(self, parent=None, readonly=False): VimTextEdit.__init__(self, parent=parent) self.numbers = LineNumbers(self) def resizeEvent(self, event): super(VimTextBrowser, self).resizeEvent(event) self.numbers.refresh_size() class TextDecorator(QtWidgets.QWidget): """Common functionality for providing line numbers in text widgets""" def __init__(self, parent): QtWidgets.QWidget.__init__(self, parent) self.editor = parent parent.blockCountChanged.connect(lambda x: self._refresh_viewport()) parent.cursorPositionChanged.connect(self.refresh) parent.updateRequest.connect(self._refresh_rect) def refresh(self): """Refresh the numbers display""" rect = self.editor.viewport().rect() self._refresh_rect(rect, 0) def _refresh_rect(self, rect, dy): if dy: self.scroll(0, dy) else: self.update(0, rect.y(), self.width(), rect.height()) if rect.contains(self.editor.viewport().rect()): self._refresh_viewport() def _refresh_viewport(self): self.editor.setViewportMargins(self.width_hint(), 0, 0, 0) def refresh_size(self): rect = self.editor.contentsRect() geom = QtCore.QRect(rect.left(), rect.top(), self.width_hint(), rect.height()) self.setGeometry(geom) def sizeHint(self): return QtCore.QSize(self.width_hint(), 0) class LineNumbers(TextDecorator): """Provide line numbers for QPlainTextEdit widgets""" def __init__(self, parent): TextDecorator.__init__(self, parent) self.highlight_line = -1 def width_hint(self): document = self.editor.document() digits = int(math.log(max(1, document.blockCount()), 10)) return defs.margin + self.fontMetrics().width('0') * (digits + 2) def set_highlighted(self, line_number): """Set the line to highlight""" self.highlight_line = line_number def paintEvent(self, event): """Paint the line number""" QPalette = QtGui.QPalette painter = QtGui.QPainter(self) palette = self.palette() painter.fillRect(event.rect(), palette.color(QPalette.Base)) editor = self.editor content_offset = editor.contentOffset() block = editor.firstVisibleBlock() current_block_number = max(0, self.editor.textCursor().blockNumber()) width = self.width() event_rect_bottom = event.rect().bottom() highlight = palette.color(QPalette.Highlight) window = palette.color(QPalette.Window) disabled = palette.color(QPalette.Disabled, QPalette.Text) painter.setPen(disabled) while block.isValid(): block_geom = editor.blockBoundingGeometry(block) block_top = block_geom.translated(content_offset).top() if not block.isVisible() or block_top >= event_rect_bottom: break rect = block_geom.translated(content_offset).toRect() block_number = block.blockNumber(); if block_number == self.highlight_line: painter.fillRect(rect.x(), rect.y(), width, rect.height(), highlight) elif block_number == current_block_number: painter.fillRect(rect.x(), rect.y(), width, rect.height(), window) number = '%s' % (block_number + 1) painter.drawText(rect.x(), rect.y(), self.width() - (defs.margin * 2), rect.height(), Qt.AlignRight | Qt.AlignVCenter, number) block = block.next() # pylint: disable=next-method-called # Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """TensorFlow Ops to work with embeddings. Note: categorical variables are handled via embeddings in many cases. For example, in case of words. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib.framework import deprecated from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops as array_ops_ from tensorflow.python.ops import math_ops from tensorflow.python.ops import nn from tensorflow.python.ops import variable_scope as vs @deprecated('2016-12-01', 'Use `tf.embedding_lookup` instead.') def embedding_lookup(params, ids, name='embedding_lookup'): """Provides a N dimensional version of tf.embedding_lookup. Ids are flattened to a 1d tensor before being passed to embedding_lookup then, they are unflattend to match the original ids shape plus an extra leading dimension of the size of the embeddings. Args: params: List of tensors of size D0 x D1 x ... x Dn-2 x Dn-1. ids: N-dimensional tensor of B0 x B1 x .. x Bn-2 x Bn-1. Must contain indexes into params. name: Optional name for the op. Returns: A tensor of size B0 x B1 x .. x Bn-2 x Bn-1 x D1 x ... x Dn-2 x Dn-1 containing the values from the params tensor(s) for indecies in ids. Raises: ValueError: if some parameters are invalid. """ with ops.name_scope(name, 'embedding_lookup', [params, ids]): params = ops.convert_to_tensor(params) ids = ops.convert_to_tensor(ids) shape = array_ops_.shape(ids) ids_flat = array_ops_.reshape( ids, math_ops.reduce_prod(shape, keep_dims=True)) embeds_flat = nn.embedding_lookup(params, ids_flat, name) embed_shape = array_ops_.concat([shape, [-1]], 0) embeds = array_ops_.reshape(embeds_flat, embed_shape) embeds.set_shape(ids.get_shape().concatenate(params.get_shape()[1:])) return embeds @deprecated('2016-12-01', 'Use `tf.contrib.layers.embed_sequence` instead.') def categorical_variable(tensor_in, n_classes, embedding_size, name): """Creates an embedding for categorical variable with given number of classes. Args: tensor_in: Input tensor with class identifier (can be batch or N-dimensional). n_classes: Number of classes. embedding_size: Size of embedding vector to represent each class. name: Name of this categorical variable. Returns: Tensor of input shape, with additional dimension for embedding. Example: Calling categorical_variable([1, 2], 5, 10, "my_cat"), will return 2 x 10 tensor, where each row is representation of the class. """ with vs.variable_scope(name): embeddings = vs.get_variable(name + '_embeddings', [n_classes, embedding_size]) return embedding_lookup(embeddings, tensor_in) # -*- test-case-name: twisted.test.test_task,twisted.test.test_cooperator -*- # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ Scheduling utility methods and classes. """ from __future__ import division, absolute_import __metaclass__ = type import sys import time from zope.interface import implementer from twisted.python import log from twisted.python import reflect from twisted.python.failure import Failure from twisted.internet import base, defer from twisted.internet.interfaces import IReactorTime from twisted.internet.error import ReactorNotRunning class LoopingCall: """Call a function repeatedly. If C{f} returns a deferred, rescheduling will not take place until the deferred has fired. The result value is ignored. @ivar f: The function to call. @ivar a: A tuple of arguments to pass the function. @ivar kw: A dictionary of keyword arguments to pass to the function. @ivar clock: A provider of L{twisted.internet.interfaces.IReactorTime}. The default is L{twisted.internet.reactor}. Feel free to set this to something else, but it probably ought to be set *before* calling L{start}. @type running: C{bool} @ivar running: A flag which is C{True} while C{f} is scheduled to be called (or is currently being called). It is set to C{True} when L{start} is called and set to C{False} when L{stop} is called or if C{f} raises an exception. In either case, it will be C{False} by the time the C{Deferred} returned by L{start} fires its callback or errback. @type _expectNextCallAt: C{float} @ivar _expectNextCallAt: The time at which this instance most recently scheduled itself to run. @type _realLastTime: C{float} @ivar _realLastTime: When counting skips, the time at which the skip counter was last invoked. @type _runAtStart: C{bool} @ivar _runAtStart: A flag indicating whether the 'now' argument was passed to L{LoopingCall.start}. """ call = None running = False deferred = None interval = None _expectNextCallAt = 0.0 _runAtStart = False starttime = None def __init__(self, f, *a, **kw): self.f = f self.a = a self.kw = kw from twisted.internet import reactor self.clock = reactor def withCount(cls, countCallable): """ An alternate constructor for L{LoopingCall} that makes available the number of calls which should have occurred since it was last invoked. Note that this number is an C{int} value; It represents the discrete number of calls that should have been made. For example, if you are using a looping call to display an animation with discrete frames, this number would be the number of frames to advance. The count is normally 1, but can be higher. For example, if the reactor is blocked and takes too long to invoke the L{LoopingCall}, a Deferred returned from a previous call is not fired before an interval has elapsed, or if the callable itself blocks for longer than an interval, preventing I{itself} from being called. @param countCallable: A callable that will be invoked each time the resulting LoopingCall is run, with an integer specifying the number of calls that should have been invoked. @type countCallable: 1-argument callable which takes an C{int} @return: An instance of L{LoopingCall} with call counting enabled, which provides the count as the first positional argument. @rtype: L{LoopingCall} @since: 9.0 """ def counter(): now = self.clock.seconds() lastTime = self._realLastTime if lastTime is None: lastTime = self.starttime if self._runAtStart: lastTime -= self.interval self._realLastTime = now lastInterval = self._intervalOf(lastTime) thisInterval = self._intervalOf(now) count = thisInterval - lastInterval return countCallable(count) self = cls(counter) self._realLastTime = None return self withCount = classmethod(withCount) def _intervalOf(self, t): """ Determine the number of intervals passed as of the given point in time. @param t: The specified time (from the start of the L{LoopingCall}) to be measured in intervals @return: The C{int} number of intervals which have passed as of the given point in time. """ elapsedTime = t - self.starttime intervalNum = int(elapsedTime / self.interval) return intervalNum def start(self, interval, now=True): """ Start running function every interval seconds. @param interval: The number of seconds between calls. May be less than one. Precision will depend on the underlying platform, the available hardware, and the load on the system. @param now: If True, run this call right now. Otherwise, wait until the interval has elapsed before beginning. @return: A Deferred whose callback will be invoked with C{self} when C{self.stop} is called, or whose errback will be invoked when the function raises an exception or returned a deferred that has its errback invoked. """ assert not self.running, ("Tried to start an already running " "LoopingCall.") if interval < 0: raise ValueError("interval must be >= 0") self.running = True d = self.deferred = defer.Deferred() self.starttime = self.clock.seconds() self._expectNextCallAt = self.starttime self.interval = interval self._runAtStart = now if now: self() else: self._reschedule() return d def stop(self): """Stop running function. """ assert self.running, ("Tried to stop a LoopingCall that was " "not running.") self.running = False if self.call is not None: self.call.cancel() self.call = None d, self.deferred = self.deferred, None d.callback(self) def reset(self): """ Skip the next iteration and reset the timer. @since: 11.1 """ assert self.running, ("Tried to reset a LoopingCall that was " "not running.") if self.call is not None: self.call.cancel() self.call = None self._expectNextCallAt = self.clock.seconds() self._reschedule() def __call__(self): def cb(result): if self.running: self._reschedule() else: d, self.deferred = self.deferred, None d.callback(self) def eb(failure): self.running = False d, self.deferred = self.deferred, None d.errback(failure) self.call = None d = defer.maybeDeferred(self.f, *self.a, **self.kw) d.addCallback(cb) d.addErrback(eb) def _reschedule(self): """ Schedule the next iteration of this looping call. """ if self.interval == 0: self.call = self.clock.callLater(0, self) return currentTime = self.clock.seconds() # Find how long is left until the interval comes around again. untilNextTime = (self._expectNextCallAt - currentTime) % self.interval # Make sure it is in the future, in case more than one interval worth # of time passed since the previous call was made. nextTime = max( self._expectNextCallAt + self.interval, currentTime + untilNextTime) # If the interval falls on the current time exactly, skip it and # schedule the call for the next interval. if nextTime == currentTime: nextTime += self.interval self._expectNextCallAt = nextTime self.call = self.clock.callLater(nextTime - currentTime, self) def __repr__(self): if hasattr(self.f, '__qualname__'): func = self.f.__qualname__ elif hasattr(self.f, '__name__'): func = self.f.__name__ if hasattr(self.f, 'im_class'): func = self.f.im_class.__name__ + '.' + func else: func = reflect.safe_repr(self.f) return 'LoopingCall<%r>(%s, *%s, **%s)' % ( self.interval, func, reflect.safe_repr(self.a), reflect.safe_repr(self.kw)) class SchedulerError(Exception): """ The operation could not be completed because the scheduler or one of its tasks was in an invalid state. This exception should not be raised directly, but is a superclass of various scheduler-state-related exceptions. """ class SchedulerStopped(SchedulerError): """ The operation could not complete because the scheduler was stopped in progress or was already stopped. """ class TaskFinished(SchedulerError): """ The operation could not complete because the task was already completed, stopped, encountered an error or otherwise permanently stopped running. """ class TaskDone(TaskFinished): """ The operation could not complete because the task was already completed. """ class TaskStopped(TaskFinished): """ The operation could not complete because the task was stopped. """ class TaskFailed(TaskFinished): """ The operation could not complete because the task died with an unhandled error. """ class NotPaused(SchedulerError): """ This exception is raised when a task is resumed which was not previously paused. """ class _Timer(object): MAX_SLICE = 0.01 def __init__(self): self.end = time.time() + self.MAX_SLICE def __call__(self): return time.time() >= self.end _EPSILON = 0.00000001 def _defaultScheduler(x): from twisted.internet import reactor return reactor.callLater(_EPSILON, x) class CooperativeTask(object): """ A L{CooperativeTask} is a task object inside a L{Cooperator}, which can be paused, resumed, and stopped. It can also have its completion (or termination) monitored. @see: L{Cooperator.cooperate} @ivar _iterator: the iterator to iterate when this L{CooperativeTask} is asked to do work. @ivar _cooperator: the L{Cooperator} that this L{CooperativeTask} participates in, which is used to re-insert it upon resume. @ivar _deferreds: the list of L{defer.Deferred}s to fire when this task completes, fails, or finishes. @type _deferreds: C{list} @type _cooperator: L{Cooperator} @ivar _pauseCount: the number of times that this L{CooperativeTask} has been paused; if 0, it is running. @type _pauseCount: C{int} @ivar _completionState: The completion-state of this L{CooperativeTask}. C{None} if the task is not yet completed, an instance of L{TaskStopped} if C{stop} was called to stop this task early, of L{TaskFailed} if the application code in the iterator raised an exception which caused it to terminate, and of L{TaskDone} if it terminated normally via raising C{StopIteration}. @type _completionState: L{TaskFinished} """ def __init__(self, iterator, cooperator): """ A private constructor: to create a new L{CooperativeTask}, see L{Cooperator.cooperate}. """ self._iterator = iterator self._cooperator = cooperator self._deferreds = [] self._pauseCount = 0 self._completionState = None self._completionResult = None cooperator._addTask(self) def whenDone(self): """ Get a L{defer.Deferred} notification of when this task is complete. @return: a L{defer.Deferred} that fires with the C{iterator} that this L{CooperativeTask} was created with when the iterator has been exhausted (i.e. its C{next} method has raised C{StopIteration}), or fails with the exception raised by C{next} if it raises some other exception. @rtype: L{defer.Deferred} """ d = defer.Deferred() if self._completionState is None: self._deferreds.append(d) else: d.callback(self._completionResult) return d def pause(self): """ Pause this L{CooperativeTask}. Stop doing work until L{CooperativeTask.resume} is called. If C{pause} is called more than once, C{resume} must be called an equal number of times to resume this task. @raise TaskFinished: if this task has already finished or completed. """ self._checkFinish() self._pauseCount += 1 if self._pauseCount == 1: self._cooperator._removeTask(self) def resume(self): """ Resume processing of a paused L{CooperativeTask}. @raise NotPaused: if this L{CooperativeTask} is not paused. """ if self._pauseCount == 0: raise NotPaused() self._pauseCount -= 1 if self._pauseCount == 0 and self._completionState is None: self._cooperator._addTask(self) def _completeWith(self, completionState, deferredResult): """ @param completionState: a L{TaskFinished} exception or a subclass thereof, indicating what exception should be raised when subsequent operations are performed. @param deferredResult: the result to fire all the deferreds with. """ self._completionState = completionState self._completionResult = deferredResult if not self._pauseCount: self._cooperator._removeTask(self) # The Deferreds need to be invoked after all this is completed, because # a Deferred may want to manipulate other tasks in a Cooperator. For # example, if you call "stop()" on a cooperator in a callback on a # Deferred returned from whenDone(), this CooperativeTask must be gone # from the Cooperator by that point so that _completeWith is not # invoked reentrantly; that would cause these Deferreds to blow up with # an AlreadyCalledError, or the _removeTask to fail with a ValueError. for d in self._deferreds: d.callback(deferredResult) def stop(self): """ Stop further processing of this task. @raise TaskFinished: if this L{CooperativeTask} has previously completed, via C{stop}, completion, or failure. """ self._checkFinish() self._completeWith(TaskStopped(), Failure(TaskStopped())) def _checkFinish(self): """ If this task has been stopped, raise the appropriate subclass of L{TaskFinished}. """ if self._completionState is not None: raise self._completionState def _oneWorkUnit(self): """ Perform one unit of work for this task, retrieving one item from its iterator, stopping if there are no further items in the iterator, and pausing if the result was a L{defer.Deferred}. """ try: result = next(self._iterator) except StopIteration: self._completeWith(TaskDone(), self._iterator) except: self._completeWith(TaskFailed(), Failure()) else: if isinstance(result, defer.Deferred): self.pause() def failLater(f): self._completeWith(TaskFailed(), f) result.addCallbacks(lambda result: self.resume(), failLater)