text
stringlengths 3
1.05M
|
---|
// ==========================================================================
// Project: SproutCore - JavaScript Application Framework
// Copyright: ©2006-2011 Strobe Inc. and contributors.
// Portions ©2008-2011 Apple Inc. All rights reserved.
// License: Licensed under MIT license (see license.js)
// ==========================================================================
/*globals SC */
/**
* @class
*
* The MediaSlider element takes the original SC.SliderView and adds an
* indicator of which ranges of the media have been loaded.
*
* @extends SC.SliderView
*/
SC.MediaSlider = SC.SliderView.extend(
/** @scope SC.MediaSlider.prototype */
{
/**
* The media view which this slider should attach itself to.
*/
// TODO: Deprecate, bind to loadedTimeRanges instead.
mediaView: null,
/**
* The name of our render delegate
*/
renderDelegateName: 'mediaSliderRenderDelegate',
/**
* @private
*
* Appends a loaded ranges span to the div element.
*
* @param context
* @param firstTime
*/
render: function(context, firstTime) {
sc_super();
// Render the loaded time ranges.
this.renderLoadedTimeRanges();
},
renderLoadedTimeRanges: function() {
var ranges = this.getPath('mediaView.loadedTimeRanges');
var rangesElement = this.$('.sc-loaded-ranges');
var max = this.get('maximum');
// TODO: Remove support for mediaView, simply bind to loadedTimeRanges.
// Read the ranges element, kick out if it doesn't exist yet.
if(SC.empty(rangesElement)) {
return;
}
// Scrub all children.
rangesElement.empty(".sc-loaded-range");
// If there are no ranges, exit.
if(SC.empty(ranges)) {
return;
}
var width = rangesElement.width();
for( var i = 0; i < ranges.length; i += 2) {
try {
// Ranges are reported as an array of numbers. Odds are start indexes,
// evens are end indexes of the previous start index.
var startRange = ranges[i];
var endRange = ranges[i + 1];
var pixelLeft = width * (startRange / max);
var pixelWidth = width * ((endRange - startRange) / max);
var tag = $('<span class="sc-loaded-range" />');
tag.css('left', pixelLeft);
tag.css('width', pixelWidth);
rangesElement.append(tag);
} catch(e) {
}
}
}.observes('*mediaView.loadedTimeRanges'),
});
|
// @ts-check
const HotelCharging = {
data() {
return {
activeSection: 'form',
bookingNumber: '',
acceptedConditions: false,
validationMessage: '',
validationMessageShowHelpAddon: false,
folioId: null,
chargeId: null,
chargingSession: null,
}
},
methods: {
submitStartForm(event) {
this.validationMessage = '';
this.validationMessageShowHelpAddon = false;
this.activeSection = 'loading';
fetch(event.target.action, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
bookingNumber: this.bookingNumber,
acceptedConditions: this.acceptedConditions,
}),
})
.then(response => response.json())
.then(data => {
console.log(data);
if (data.error) {
this.activeSection = 'form';
this.validationMessage = data.error;
this.validationMessageShowHelpAddon = true;
return;
}
this.activeSection = 'charging';
this.folioId = data.folioId;
this.chargeId = data.chargeId;
this.chargingSession = data.chargingSession;
})
.catch(error => console.error(error));
},
submitStopForm(event) {
this.activeSection = 'loading';
fetch(event.target.action, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
bookingNumber: this.bookingNumber,
folioId: this.folioId,
chargeId: this.chargeId,
chargingSessionId: this.chargingSession.id,
}),
})
.then(response => response.json())
.then(data => {
console.log(data);
if (data.error) {
this.activeSection = 'charging';
this.validationMessage = data.error;
this.validationMessageShowHelpAddon = true;
return;
}
this.activeSection = 'receipt';
this.chargingSession = data.chargingSession;
})
.catch(error => console.error(error));
},
submitCloseForm(event) {
this.bookingNumber = '';
this.acceptedConditions = false;
this.activeSection = 'form';
this.folioId = null;
this.chargeId = null;
this.chargingSession = null;
}
}
};
// @ts-ignore
Vue
.createApp(HotelCharging)
.mount('#app');
|
/*
* EnvConfig.h
*
* Author: Amir Rasouli
* email: [email protected]
*/
#include <opencv2/opencv.hpp>
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/highgui/highgui.hpp"
#ifndef ENVCONFIG_H_
#define ENVCONFIG_H_
namespace SearchConfig
{
enum searchMethod {greedy, lookMove};
}
class PanTiltConfig
{
friend class Environment;
public:
PanTiltConfig();
~PanTiltConfig(){};
float pan;
float tilt;
int maxTilt, minTilt;
int maxPan;
int tiltDelta, panDelta;
};
class CameraConfig{
public:
CameraConfig();
~CameraConfig(){};
int cameraHeight; //camera height in mm
float cameraVerticalViewAngle; //degrees
float cameraHorizontalViewAngle; //degrees
float cameraToRobotFrontDistance;
float cameraToTiltPointDistance;
float baseLine;//meter
float focalLength;//pixel
float cameraEffectiveRange;//mm
};
class RobotConfig
{
public:
RobotConfig();
~RobotConfig(){};
double robotSpeed;//max speed 700 mm/sec
int robotRadius;//mm
int robotLength;//mm
int robotWidth ;//mm
};
class EnvConfig {
friend class Environment;
public:
EnvConfig ();
~EnvConfig();
cv::Point3i envSize;
float voxelSize;
cv::Point2d robotPos;
cv::Point2d robotDir; //point in environment where robot is facing
float recognitionMaxRadius, recognitionMinRadius;
PanTiltConfig PTConf;
CameraConfig CamConf;
RobotConfig RobotConf;
SearchConfig::searchMethod searchMethod;
double searchThreshold;
private:
void initWithDefaults();
};
#endif /* ENVCONFIG_H_ */
|
import abc
import copy
from functools import reduce
import hearthbreaker.constants
from hearthbreaker.tags.base import Aura, AuraUntil, Effect, Buff, BuffUntil, Deathrattle
from hearthbreaker.tags.event import TurnEnded
from hearthbreaker.tags.selector import CurrentPlayer
from hearthbreaker.tags.status import Stealth, ChangeAttack, ChangeHealth, SetAttack, Charge, Taunt, DivineShield, \
Windfury, NoSpellTarget, SpellDamage, MinimumHealth, CanAttack
import hearthbreaker.targeting
# from hearthbreaker.game_objects import Hero
class GameException(Exception):
"""
An :class:`Exception` relating to the operation of the game
"""
def __init__(self, message):
super().__init__(message)
class Bindable:
"""
A class which inherits from Bindable has an event structure added to it.
This event structure follows the observer pattern. It consists of two parts: binding and triggering.
A function handler is bound to an event using the :meth:`bind` or :meth:`bind_once` methods. When the event is
triggered using the :meth:`trigger` method, then any function handlers which have been bound to that event are
called.
Arguments can be passed to a bound function when binding or when triggering, or both. Arguments from triggering
are passed first, followed by arguments from binding.
Functions can be bound such that they are called each time an event is triggered, or so that they are only called
the next time a function is triggered. The former case is handled by :meth:`bind` and the latter by
:meth:`bind_once`
**Examples**:
Simple Binding::
class EventTarget(Bindable):
def __init__(self):
super().__init__()
def handler(fangs, scales):
print("fangs: {:d}, scales: {:d}".format(fangs, scales))
target = EventTarget()
target.bind("attack", handler, 1001)
target.trigger("attack", 2) # outputs "fangs: 2, scales: 1001"
target.trigger("attack", 6) # outputs "fangs: 6, scales: 1001"
Binding Once::
class EventTarget(Bindable):
def __init__(self):
super().__init__()
def handler(joke):
print("{:s}! HAHAHA".format(joke))
target = EventTarget()
target.bind_once("joke_told", handler)
# outputs "Well, I'd better replace it then! HAHAHA"
target.trigger("joke_told", "Well, I'd better replace it then")
# outputs nothing
target.trigger("joke_told", "What a senseless waste of human life")
Any class which subclasses this class must be sure to call :meth:`__init__`
"""
def __init__(self):
"""
Set up a new :class:`Bindable`. Must be called by any subclasses.
"""
self.events = {}
def bind(self, event, function):
"""
Bind a function to an event. Each time the event is triggered, the function will be called.
:param string event: The event to bind a function to
:param function function: The function to bind. The parameters are not checked until it is called, so
ensure its signature matches the parameters called from :meth:`trigger`
:see: :class:`Bindable`
"""
if event not in self.events:
self.events[event] = []
self.events[event].append((function, False))
def bind_once(self, event, function):
"""
Bind a function to an event. This function will only be called the next time the event is triggered, and
then ignored.
:param string event: The event to bind a function to
:param function function: The function to bind. The parameters are not checked until it is called, so
ensure its signature matches the parameters called from :meth:`trigger`
:see: :class:`Bindable`
"""
if event not in self.events:
self.events[event] = []
self.events[event].append((function, True))
def trigger(self, event, *args):
"""
Trigger an event. Any functions which have been bound to that event will be called.
The parameters passed to this function as `args` will be passed along to the bound functions.
:param string event: The name of the event to trigger
:param list args: The arguments to pass to the bound function
:see: :class:`Bindable`
"""
if event in self.events:
for handler in copy.copy(self.events[event]):
if handler[1]:
self.events[event].remove(handler)
# tidy up the events dict so we don't have entries for events with no handlers
if len(self.events[event]) is 0:
del(self.events[event])
handler[0](*args)
def unbind(self, event, function):
"""
Unbind a function from an event. When this event is triggered, the function is no longer called.
`function` must be the same function reference as was passed in to :meth:`bind` or :meth:`bind_once`
:param string event: The event to unbind the function from
:param function function: The function to unbind.
"""
if event in self.events:
self.events[event] = [handler for handler in self.events[event] if not handler[0] == function]
if len(self.events[event]) is 0:
del (self.events[event])
class GameObject:
"""
Provides typing for the various game objects in the engine. Allows for checking the type of an object without
needing to know about and import the various objects in the game engine
"""
def __init__(self, effects=None, auras=None, buffs=None):
# A list of the effects that this player has
if effects:
self.effects = effects
else:
self.effects = []
#: A list of auras originate with this character
if auras:
self.auras = auras
else:
self.auras = []
#: A list of buffs applied to this character
if buffs:
self.buffs = buffs
else:
self.buffs = []
#: The player associated with this Game Object
self.player = None
self._attached = False
def attach(self, obj, player):
if not self._attached:
self.player = player
for effect in self.effects:
effect.set_owner(obj)
effect.apply()
for buff in self.buffs:
buff.set_owner(obj)
buff.apply()
for aura in self.auras:
aura.set_owner(obj)
player.add_aura(aura)
self._attached = True
def calculate_stat(self, stat_class, starting_value=0):
"""
Calculates the amount of a particular stat this :class:`GameObject` has at current time.
"""
# Add together all the attack amounts from buffs
stat = reduce(lambda a, b: b.update(self, a), [buff.status for buff in self.buffs
if isinstance(buff.status, stat_class) and
(not buff.condition or buff.condition.evaluate(self, self))],
starting_value)
stat = reduce(lambda a, b: b.update(self, a), [aura.status
for player in self.player.game.players
for aura in player.object_auras
if aura.match(self) and isinstance(aura.status, stat_class)],
stat)
# print(">>>Stat:", stat)
return max(0, stat)
def __to_json__(self):
jsn = {}
if self.effects:
jsn['effects'] = self.effects
if self.auras:
jsn['auras'] = self.auras
if self.buffs:
jsn['buffs'] = self.buffs
return jsn
@staticmethod
def __from_json__(minion, effects=None, auras=None, buffs=None, **kwargs):
if effects:
minion.effects = [Effect.from_json(**effect) for effect in effects]
else:
minion.effects = []
if auras:
minion.auras = [AuraUntil.from_json(**aura) if 'until' in aura else Aura.from_json(**aura)
for aura in auras]
else:
minion.auras = []
if buffs:
minion.buffs = [BuffUntil.from_json(**buff) if 'until' in buff else Buff.from_json(**buff)
for buff in buffs]
else:
minion.buffs = []
@staticmethod
def is_spell():
"""
Checks if this object is a spell card
:rtype: bool
:returns: True if this is a spell card, false otherwise
"""
return False
@staticmethod
def is_secret():
"""
Checks if this object is a secret
:rtype: bool
:returns: True if this is a secret, false otherwise
"""
return False
@staticmethod
def is_minion():
"""
Checks if this object is a minion (card or actual minion)
:rtype: bool
:returns: True if this is a minion, false otherwise
"""
return False
@staticmethod
def is_weapon():
"""
Checks if this object is a weapon (card or actual weapon)
:rtype: bool
:returns: True if this is a weapon, false otherwise
"""
@staticmethod
def is_card():
"""
Checks if this object is a card of any kind
:rtype: bool
:returns: True if this is a card, false otherwise
"""
return False
@staticmethod
def is_hero():
"""
Checks if this object is a hero
:rtype: bool
:returns: True if this is a hero, false otherwise
"""
return False
def is_valid(self):
"""
Checks if this object is a valid target for actions and statuses
"""
return True
def is_character(self):
"""
Checks if this object is a character (minion or hero)
"""
return False
def is_player(self):
"""
Checks if this is a player object
"""
return False
def add_effect(self, effect):
"""
Applies the the given effect to the :class:`GameObject`. The effect will be unapplied in the case of silence,
and will be applied to any copies that are made.
:param MinionEffect effect: The effect to apply to this :class:`GameObject
"""
effect.set_owner(self)
effect.apply()
self.effects.append(effect)
def add_aura(self, aura):
if not isinstance(aura, Aura):
raise TypeError("Expected an aura to be added")
self.auras.append(aura)
aura.set_owner(self)
self.player.add_aura(aura)
def remove_aura(self, aura):
for an_aura in self.auras:
if an_aura.eq(aura):
self.auras.remove(an_aura)
break
self.player.remove_aura(aura)
def add_buff(self, buff):
if not isinstance(buff, Buff):
raise TypeError("Expected a buff to be added")
self.buffs.append(buff)
buff.set_owner(self)
buff.apply()
def remove_buff(self, buff):
for a_buff in self.buffs:
if a_buff.eq(buff):
self.buffs.remove(a_buff)
break
buff.unapply()
def unattach(self):
if self._attached:
for effect in reversed(self.effects):
effect.unapply()
self.effects = []
for aura in reversed(self.auras):
self.player.remove_aura(aura)
self.auras = []
for buff in reversed(self.buffs):
buff.unapply()
self.buffs = []
self._attached = False
class Character(Bindable, GameObject, metaclass=abc.ABCMeta):
"""
A Character in Hearthstone is something that can attack, i.e. a :class:`Hero` or :class:`Minion`.
This common superclass handles all of the status tags and calculations involved in attacking or being attacked.
"""
def __init__(self, attack_power, health, enrage=None, effects=None, auras=None, buffs=None):
"""
Create a new Character with the given attack power and health
:param int attack_power: the amount of attack this character has at creation
:param int health: the maximum health of this character
:param List[Action]: (optional) A list of :class:`hearthbreaker.tags.base.ReversibleActions` that describe
what will happen when this character is enraged
"""
Bindable.__init__(self)
GameObject.__init__(self, effects, auras, buffs)
# : The current health of this character
self.health = health
# : The maximum health of this character
self.base_health = health
#: The amount of attack this character has
self.base_attack = attack_power
#: How many attacks this character has performed this turn
self.attacks_performed = 0
#: Whether or not this character has died
self.dead = False
#: If this character has windfury
self.used_windfury = False
#: If this character is currently frozen
self.frozen = 0
#: The :class:`Player` that owns this character
self.player = None
#: Whether or not this character is immune to damage (but not other tags)
self.immune = 0
#: The list of delayed events
self.delayed = []
#: Non zero if this character has stealth
self.stealth = 0
#: Non zero if this character has divine shield
self.divine_shield = 0
#: If this character is enraged
self.enraged = False
#: If this character has been removed from the board
self.removed = False
#: An integer describing when this character was created. The lower, the earlier it was created
self.born = -1
#: An integer describing how much the health of this character has been adjusted
self.health_delta = 0
#: A list of actions that describe what will happen when this character is enraged
self.enrage = enrage if enrage else []
#: The character that this minion is attacking, while it is carrying out its attack
self.current_target = None
def _remove_stealth(self):
if self.stealth:
for buff in self.buffs:
if isinstance(buff.status, Stealth):
buff.unapply()
self.buffs = [buff for buff in self.buffs if not isinstance(buff.status, Stealth)]
# Add function that gets all targets to attack
def get_targets(self):
found_taunt = False
targets = []
for enemy in self.player.game.other_player.minions:
if enemy.taunt and enemy.can_be_attacked():
found_taunt = True
if enemy.can_be_attacked():
targets.append(enemy)
if isinstance(enemy, Hero):
print("=\n==\n===\n====\n===== ERROR\n====\n===\n==\n=")
if found_taunt:
targets = [target for target in targets if target.taunt]
else:
targets.append(self.player.game.other_player.hero)
return targets
# -----------------------------------------
def attack_target(self, target):
self.current_target = target
self.player.trigger("character_attack", self, self.current_target)
self.trigger("attack", self.current_target)
if self.removed or self.dead: # removed won't be set yet if the Character died during this attack
return
target = self.current_target
my_attack = self.calculate_attack() # In case the damage causes my attack to grow
target_attack = target.calculate_attack()
if target_attack > 0:
self.damage(target_attack, target)
target.damage(my_attack, self)
self.player.game.check_delayed()
self.trigger("attack_completed")
self.attacks_performed += 1
self.stealth = False
self.current_target = None
def attack(self, target=None):
"""
Causes this :class:`Character` to attack.
The Character will assemble a list of possible targets and then ask the agent associated with this Character to
select which target from the list it would like to attack.
This method will not succeed if the Character can't attack, either because it is not active, or it is frozen,
or some other factor. All of the damage and death triggers will be processed at the end of this method, so that
the order or evaluation doesn't affect the calculations. For example, if Amani Berserker is damaged in the
attack, its attack power shouldn't go up before the damage to its attacker is calculated.
The attack will not take place if the Character dies or is otherwise removed as a result of attacking
(e.g. various secrets)
"""
if not self.can_attack():
raise GameException("That minion cannot attack")
found_taunt = False
targets = []
for enemy in self.player.game.other_player.minions:
if enemy.taunt and enemy.can_be_attacked():
found_taunt = True
if enemy.can_be_attacked():
targets.append(enemy)
if isinstance(enemy, Hero):
print("=\n==\n===\n====\n===== ERROR\n====\n===\n==\n=")
if found_taunt:
targets = [target for target in targets if target.taunt]
else:
targets.append(self.player.game.other_player.hero)
# print("---\nTARGETS:", targets)
if target is None:
target = self.choose_target(targets)
self._remove_stealth()
self.current_target = target
self.player.trigger("character_attack", self, self.current_target)
self.trigger("attack", self.current_target)
if self.removed or self.dead: # removed won't be set yet if the Character died during this attack
return
target = self.current_target
my_attack = self.calculate_attack() # In case the damage causes my attack to grow
target_attack = target.calculate_attack()
if target_attack > 0:
self.damage(target_attack, target)
target.damage(my_attack, self)
self.player.game.check_delayed()
self.trigger("attack_completed")
self.attacks_performed += 1
self.stealth = False
self.current_target = None
def choose_target(self, targets):
"""
Consults the associated player to select a target from a list of targets
:param list[Character] targets: the targets to choose a target from
"""
return self.player.choose_target(targets)
def calculate_stat(self, stat_class, starting_value=0):
"""
Calculates the amount of attack this :class:`Character` has, including the base attack, any temporary attack
bonuses for this turn
"""
stat = starting_value
return super().calculate_stat(stat_class, stat)
def calculate_attack(self):
"""
Calculates the amount of attack this :class:`Character` has, including the base attack, any temporary attack
bonuses for this turn
"""
return self.calculate_stat(ChangeAttack, self.base_attack)
def calculate_max_health(self):
"""
Calculates the maximum amount of health this :class:`Character` has, including the base health, and any aura
tags
"""
return self.base_health + self.health_delta
def attacks_allowed(self):
"""
Checks the number of attacks this character can make per turn.
"""
return self.calculate_stat(Windfury, 1)
def windfury(self):
"""
Checks if this character has windfury attached
"""
return self.calculate_stat(Windfury, 1) > 1
def delayed_trigger(self, event, *args):
"""
Set up a delayed trigger for an event. Any events triggered with this method will not be called until
:meth:`activate_delayed` is called.
The purpose of this method is to allow for simultaneous events. For example, if a minion is attacked
then any damage events should be triggered after the attack, and at the same time as each other.
:param string event: The event to set up a delayed trigger for
:param list args: The arguments to pass to the handler when it is called.
:see: :class:`Bindable`
"""
self.delayed.append({'event': event, 'args': args})
self.player.game.delayed_minions.add(self)
def activate_delayed(self):
"""
Activate any events that were delayed.
:see: :meth:`delayed_trigger`
"""
for delayed in self.delayed:
self.trigger(delayed['event'], *delayed['args'])
self.delayed = []
def damage(self, amount, attacker):
"""
Deal damage to this :class:`Character`. This method uses the ``attacker`` parameter to determine the nature
of the damage taken. If the attacker is a :class:`Character`, then it is assumed to be a physical attack.
If attacker is a :class:`SpellCard`, then it assumes a spell attack. If ``None``, then something else
(hero ability or battlecry). This method will also trigger the various events associated with taking damage
or dying.
If the character has a divine shield, it will be removed, and the character will take no damage. If the
character's health is below the max_health, then the character is considered enraged.
:param int amount: The amount of damage done (should be positive)
:param Object attacker: The :class:`Character`or :class:`SpellCard that did the damage or ``None``.
"""
if self.dead:
return
self.player.trigger("pre_damage", self, attacker, amount)
if not self.immune:
# This is constructed to avoid infinite recursion when mistress of pain and auchenai soulpriest
# are in use. This will prevent the did_damage event from going off if the character being damaged is
# already dead.
# We could simply do the check for death before this, but then the Mistress of Pain can't bring a dead
# hero back to life after damaging it via misdirection.
if attacker and attacker.is_character() and self.health >= 0:
self.health -= amount
attacker.trigger("did_damage", self, amount)
attacker._remove_stealth()
else:
self.health -= amount
min_health = self.calculate_stat(MinimumHealth, 0)
if self.health < min_health:
self.health = min_health
self.trigger("damaged", amount, attacker)
self.player.trigger("character_damaged", self, attacker, amount)
if self.health <= 0:
self.die(attacker)
self.trigger("health_changed")
if not self.enraged and self.health != self.calculate_max_health():
self.enraged = True
self.trigger("enraged")
self._do_enrage()
def change_attack(self, amount):
"""
Change the amount of attack this :class:`Character` has. The amount can be either positive or negative.
This method will automatically undo its effect when silenced, and re-apply its effect when copied
:param int amount: The amount to change the attack by
"""
self.add_buff(Buff(ChangeAttack(amount)))
def change_temp_attack(self, amount):
"""
Change the amount of attack this :class:`Character` has on this turn only. The amount can be either positive
or negative. This method will automatically undo its effect when silenced, and re-apply its effect when copied
:param int amount: The amount to change the temporary attack by
"""
self.add_buff(BuffUntil(ChangeAttack(amount), TurnEnded(player=CurrentPlayer())))
self.trigger("attack_changed", amount)
def increase_health(self, amount):
"""
Increase the amount of total health this :class:`Character` has. This is a permanent effect (unless the
Character is silenced). This effect will increase both the player's current health and maximum health
:param int amount: the amount to increase health by
"""
self.trigger("health_increased", amount)
self.add_buff(Buff(ChangeHealth(amount)))
self.trigger("health_changed")
def decrease_health(self, amount):
"""
Decrease the amount of total health this :class:`Character` has. This is a permanent effect (unless the
Character is silenced). This effect will decrease the player's maximum health, but will only decrease
the player's health if it is above the new value for maximum health
:param int amount: the amount to decrease health by
"""
if self.enraged and self.health == self.calculate_max_health():
self.enraged = False
self.trigger("unenraged")
self._do_unenrage()
self.add_buff(Buff(ChangeHealth(-amount)))
self.trigger("health_changed")
self.trigger("health_decreased", amount)
def set_attack_to(self, new_attack):
"""
Sets the amount of total attack this :class:`Character` has.
:param new_attack: An integer specifying what this character's new attack should be
"""
self.buffs.append(Buff(SetAttack(new_attack)))
def set_health_to(self, new_health):
"""
Sets the amount of total health this :class:`Character` has. This will adjust its actual health if necessary
:param new_health: An integer specifying what this character's new health should be
"""
was_enraged = self.health < self.calculate_max_health()
diff = new_health - (self.base_health + self.health_delta)
for player in self.game.players:
for aura in player.object_auras:
if aura.match(self) and isinstance(aura.status, ChangeHealth):
diff += aura.status.amount
if diff > 0:
self.increase_health(diff)
elif diff < 0:
self.decrease_health(-diff)
self.health = self.calculate_max_health()
if was_enraged:
self._do_unenrage()
self.trigger('unenraged')
def heal(self, amount, source):
"""
Heals the :class:`Character`. The health cannot exceed the character's max health. If the amount
being healed is less than 0, then the character is damaged instead.
If the character's health is brought back to its maximum, then it is no longer enraged.
:param int amount: The amount this character is being healed by. Can be negative
:param source: The source of this healing. Could be a :class:`Minion`, a :class:`spell card <Card>` or None
"""
if amount < 0:
self.damage(-amount, source)
if amount > 0:
self.trigger("healed", amount)
self.player.trigger("character_healed", self, amount)
self.health += amount
if self.health > self.calculate_max_health():
self.health = self.calculate_max_health()
if self.enraged and self.health == self.calculate_max_health():
self.enraged = False
self.trigger("unenraged")
self._do_unenrage()
self.trigger("health_changed")
def silence(self):
"""
Silence this :class:`Character`. This will trigger the silence event, and undo any status tags that have
affected this character (immune, attack & health increases, frozen, windfury)
"""
health_full = self.health == self.calculate_max_health()
for effect in reversed(self.effects):
effect.unapply()
for aura in reversed(self.auras):
self.player.remove_aura(aura)
for buff in reversed(self.buffs):
if isinstance(buff, BuffUntil):
buff.until.unbind(buff.owner, buff.__until__)
buff.unapply()
self.effects = []
self.auras = []
self.buffs = []
self.enrage = []
if self.calculate_max_health() < self.health or health_full:
self.health = self.calculate_max_health()
self.trigger("silenced")
def die(self, by):
"""
Kills this :class:`Character`. The death event will not be processed until :meth:`activate_delayed` is called.
:param by: The object that killed this character. Could be a :class:`Character`, a :class:`spell card <Card>`
or None
"""
self.delayed_trigger("died", by)
self.dead = True
def can_attack(self):
"""
Checks if this :class:`Character` can attack. Evaluates whether or not is has already attacked, if its frozen
and if it has an attack value
:rtype boolean:
"""
can_attack = self.calculate_stat(CanAttack, True)
return can_attack and self.calculate_attack() > 0 and self.attacks_performed < self.attacks_allowed() and \
not self.frozen and not (self.dead or self.removed)
def spell_targetable(self):
"""
Checks if a :class:`Character` can be targeted by a spell. Minions with stealth or whose text say they can't be
targeted by spells cannot be targeted, but any other character can.
"""
return True
def is_valid(self):
return not self.dead and not self.removed
def is_character(self):
return True
def _do_enrage(self):
for aura in self.enrage:
self.add_aura(aura)
def _do_unenrage(self):
for aura in self.enrage:
self.remove_aura(aura)
class Weapon(Bindable, GameObject):
"""
Represents a Hearthstone weapon. All weapons have attack power and durability. The logic for handling the
attacks is handled by :class:`Hero`, but it can be modified through the use of events.
"""
def __init__(self, attack_power, durability, deathrattle=None,
effects=None, auras=None, buffs=None):
"""
Creates a new weapon with the given attack power and durability. A battlecry and deathrattle can also
optionally be set.
:param int attack_power: The amount of attack this weapon gives the hero
:param int durability: The number of times this weapon can be used to attack before being discarded
:param function battlecry: Called when this weapon is equipped
:param function deathrattle: Called when the weapon is destroyed
"""
Bindable.__init__(self)
GameObject.__init__(self, effects, auras, buffs)
# : The amount of attack this weapon gives the hero
self.base_attack = attack_power
# : The number of times this weapon can be used to attack before being discarded
self.durability = durability
#: Called when the weapon is destroyed
self.deathrattle = deathrattle
#: The :class:`Player` associated with this weapon
self.player = None
#: The :class:`WeaponCard` that created this weapon
self.card = None
def copy(self, new_owner):
new_weapon = Weapon(self.base_attack, self.durability, copy.deepcopy(self.deathrattle),
copy.deepcopy(self.effects), copy.deepcopy(self.auras), copy.deepcopy(self.buffs))
new_weapon.player = new_owner
return new_weapon
def destroy(self):
self.trigger("destroyed")
# Deathrattle is triggered no matter how the weapon is destroyed, see
# http://www.hearthhead.com/card=1805/deaths-bite#comments:id=1983510
if self.deathrattle is not None:
self.deathrattle.do(self)
self.player.weapon = None
self.player.trigger("weapon_destroyed")
self.unattach()
def equip(self, player):
self.player = player
if self.player.weapon is not None:
self.player.weapon.destroy()
self.player.weapon = self
self.attach(self, player)
self.player.trigger("weapon_equipped")
def calculate_attack(self):
"""
Calculates the amount of attack this :class:`Wea[on` has, including the base attack, any temporary attack
bonuses for this turn
"""
return self.calculate_stat(ChangeAttack, self.base_attack)
def __to_json__(self):
parent_json = super().__to_json__()
parent_json.update({
'name': self.card.name,
'attack': self.base_attack,
'durability': self.durability,
})
return parent_json
@staticmethod
def is_weapon():
return True
@staticmethod
def __from_json__(wd, player):
from hearthbreaker.engine import card_lookup
weapon_card = card_lookup(wd['name'])
weapon = weapon_card.create_weapon(player)
weapon.base_attack = wd['attack']
weapon.durability = wd['durability']
weapon.card = weapon_card
GameObject.__from_json__(weapon, **wd)
return weapon
class Minion(Character):
auto_key = 0
def __init__(self, attack, health,
deathrattle=None, taunt=False, charge=False, spell_damage=0, divine_shield=False, stealth=False,
windfury=False, spell_targetable=True, effects=None, auras=None, buffs=None,
enrage=None, key=None):
super().__init__(attack, health, enrage, effects, auras, buffs)
self.game = None
self.card = None
self.index = -1
if key is None:
self.key = Minion.auto_key
Minion.auto_key += 1
else:
self.key = key
self.taunt = 0
self.replaced_by = None
self.can_be_targeted_by_spells = True
if deathrattle:
if isinstance(deathrattle, Deathrattle):
self.deathrattle = [deathrattle]
else:
self.deathrattle = deathrattle
else:
self.deathrattle = []
self.exhausted = True
self.removed = False
if charge:
self.buffs.append(Buff(Charge()))
if taunt:
self.buffs.append(Buff(Taunt()))
if stealth:
self.buffs.append(Buff(Stealth()))
if divine_shield:
self.buffs.append(Buff(DivineShield()))
if windfury:
self.buffs.append(Buff(Windfury()))
if not spell_targetable:
self.buffs.append(Buff(NoSpellTarget()))
if spell_damage:
self.buffs.append(Buff(SpellDamage(spell_damage)))
def __repr__(self):
return 'Minion: '+ self.card.__str__() + " ("+str(self.health) + " health) ("+str(self.base_attack)+" attack)"
def __eq__(self, other):
return self.card.name == other.card.name and self.card.mana == other.card.mana and self.base_health == other.base_health and self.base_attack == other.base_attack
def __hash__(self):
return hash((self.card.name, self.card.mana, self.base_health, self.base_attack))
def add_to_board(self, index):
aura_affects = {}
for player in self.game.players:
for aura in player.object_auras:
aura_affects[aura] = set()
for minion in self.player.minions:
if aura.match(minion):
aura_affects[aura].add(minion)
self.game.minion_counter += 1
self.player.minions.insert(index, self)
self.born = self.game.minion_counter
for minion in self.player.minions[index + 1:]:
minion.index += 1
self.index = index
self.health += self.calculate_max_health() - self.base_health - self.health_delta
self.attach(self, self.player)
for player in self.game.players:
for aura in player.object_auras:
for minion in self.player.minions:
if aura in aura_affects:
is_in = minion in aura_affects[aura]
if not is_in and aura.match(minion):
aura.status.act(aura.owner, minion)
elif is_in and not aura.match(minion):
aura.status.unact(aura.owner, minion)
self.trigger("added_to_board", self, index)
def calculate_attack(self):
"""
Calculates the amount of attack this :class:`Minion` has, including the base attack, any temporary attack
bonuses for this turn and any aura tags
"""
return super().calculate_attack()
def calculate_max_health(self):
"""
Calculates the maximum amount of health this :class:`Character` has, including the base health, and any aura
tags
"""
return super().calculate_max_health()
def charge(self):
return self.calculate_stat(Charge, False)
def remove_from_board(self):
if not self.removed:
aura_affects = {}
for aura in self.player.object_auras:
aura_affects[aura] = set()
for minion in self.player.minions:
if aura.match(minion):
aura_affects[aura].add(minion)
for minion in self.player.minions:
if minion.index > self.index:
minion.index -= 1
self.player.minions.remove(self)
self.player.trigger("minion_removed", self)
self.removed = True
for aura in self.player.object_auras:
for minion in self.player.minions:
is_in = minion in aura_affects[aura]
if not is_in and aura.match(minion):
aura.status.act(aura.owner, minion)
elif is_in and not aura.match(minion):
aura.status.unact(aura.owner, minion)
def replace(self, new_minion):
"""
Replaces this minion with another one
:param hearthbreaker.game_objects.Minion new_minion: The minion to replace this minion with
"""
self.unattach()
new_minion.index = self.index
new_minion.player = self.player
new_minion.game = self.game
new_minion.exhausted = True
self.game.minion_counter += 1
new_minion.born = self.game.minion_counter
if self.index >= len(self.player.minions):
raise ValueError("Attempting to replace minion with invalid index")
self.player.minions[self.index] = new_minion
new_minion.attach(new_minion, self.player)
for aura in self.player.object_auras:
if aura.match(new_minion):
aura.status.act(self, new_minion)
new_minion.health += new_minion.calculate_max_health() - new_minion.base_health
self.removed = True
self.replaced_by = new_minion
def attack(self, target=None):
super().attack(target)
def damage(self, amount, attacker):
if self.divine_shield:
self.buffs = [buff for buff in self.buffs if not isinstance(buff.status, DivineShield)]
self.divine_shield = 0
else:
super().damage(amount, attacker)
def heal(self, amount, source):
super().heal(amount, source)
def die(self, by):
# Since deathrattle gets removed by silence, save it
if not self.dead and not self.removed:
deathrattle = self.deathrattle
def delayed_death(c):
self.remove_from_board()
self.unattach()
if deathrattle is not None:
for rattle in deathrattle:
rattle.do(self)
if self.player.double_deathrattle:
rattle.do(self)
self.player.trigger("minion_died", self, by)
# Used to activate any secrets applied during the death phase
self.player.trigger("after_death", self.player)
self.player.graveyard.append(self.card.name)
self.bind_once("died", delayed_death)
super().die(by)
self.player.dead_this_turn.append(self)
def silence(self):
super().silence()
self.battlecry = None
self.deathrattle = []
def can_attack(self):
return (self.charge() or not self.exhausted) and super().can_attack()
def can_be_attacked(self):
return not self.stealth
def spell_targetable(self):
return (not self.stealth or self.player is self.player.game.current_player) and self.can_be_targeted_by_spells
@staticmethod
def is_minion():
return True
def __str__(self): # pragma: no cover
return "Minion: {0} ({1} health) ({2} attack) ...at index {3}".format(self.card.__str__(), self.health, self.calculate_attack(), self.index)
def copy(self, new_owner, new_game=None):
new_minion = Minion(self.base_attack, self.base_health,
effects=copy.deepcopy(self.effects),
auras=copy.deepcopy(self.auras),
buffs=copy.deepcopy(self.buffs),
deathrattle=copy.deepcopy(self.deathrattle),
enrage=copy.deepcopy(self.enrage), key=self.key)
new_minion.health = self.base_health - (self.calculate_max_health() - self.health)
new_minion.enraged = self.enraged
new_minion.immune = self.immune
new_minion.index = self.index
new_minion.attacks_performed = self.attacks_performed
new_minion.exhausted = self.exhausted
new_minion.born = self.born
card_type = type(self.card)
new_minion.card = card_type()
new_minion.player = new_owner
if new_game:
new_minion.game = new_game
else:
new_minion.game = new_owner.game
return new_minion
@staticmethod
def __from_json__(md, player, game):
from hearthbreaker.engine import card_lookup
minion = Minion(md['attack'], md['max_health'])
GameObject.__from_json__(minion, **md)
minion.health = md['max_health'] - md['damage']
minion.exhausted = md['exhausted']
minion.attacks_performed = not md['attacks_performed']
minion.born = md['sequence_id']
if 'enrage' in md:
minion.enrage = [Aura.from_json(**enrage) for enrage in md['enrage']]
minion.deathrattle = []
for rattle in md['deathrattles']:
minion.deathrattle.append(Deathrattle.from_json(**rattle))
minion.card = card_lookup(md["name"])
minion.game = game
minion.player = player
return minion
def bounce(self):
if len(self.player.hand) < 10:
self.unattach()
self.remove_from_board()
self.card.attach(self.card, self.player)
self.player.hand.append(self.card)
else:
self.die(None)
self.game.check_delayed()
def __to_json__(self):
r_val = super().__to_json__()
r_val.update({
'name': self.card.name,
'sequence_id': self.born,
'position': self.index,
'damage': self.calculate_max_health() - self.health,
'max_health': self.base_health,
'attack': self.base_attack,
"exhausted": self.exhausted,
"attacks_performed": not self.attacks_performed,
'deathrattles': self.deathrattle,
})
if self.enrage:
r_val['enrage'] = self.enrage
return r_val
class Hero(Character):
def __init__(self, health, character_class, power, player):
super().__init__(0, health)
self.armor = 0
self.character_class = character_class
self.player = player
self.game = player.game
self.power = power
self.power.hero = self
self.card = None
self.power_targets_minions = False
def __repr__(self):
return 'Hero: '+ self.card.__str__() + " ("+str(self.health) + " health) ("+str(self.base_attack)+" attack)"
def calculate_attack(self):
if self.player == self.player.game.current_player and self.player.weapon:
base = self.base_attack + self.player.weapon.base_attack
else:
base = self.base_attack
return self.calculate_stat(ChangeAttack, base)
def calculate_stat(self, stat_class, starting_value=0):
if self.player == self.player.game.current_player and self.player.weapon:
starting_value = self.player.weapon.calculate_stat(stat_class, starting_value)
return super().calculate_stat(stat_class, starting_value)
def copy(self, new_owner):
new_hero = Hero(self.base_health, self.character_class, self.power, new_owner)
new_hero.health = self.health
new_hero.armor = self.armor
new_hero.used_windfury = False
new_hero.attacks_performed = self.attacks_performed
new_hero.effects = copy.deepcopy(self.effects)
new_hero.auras = copy.deepcopy(self.auras)
new_hero.buffs = copy.deepcopy(self.buffs)
new_hero.card = type(self.card)()
return new_hero
def attack(self):
super().attack()
if self.player.weapon is not None:
self.player.weapon.durability -= 1
if self.player.weapon.durability == 0:
self.player.weapon.destroy()
def damage(self, amount, attacker):
self.armor -= amount
if self.armor < 0:
new_amount = -self.armor
self.armor = 0
super().damage(new_amount, attacker)
elif issubclass(type(attacker), Character):
attacker.trigger("did_damage", self, 0)
def increase_armor(self, amount):
self.player.trigger("armor_increased", amount)
self.armor += amount
def die(self, by):
super().die(by)
self.player.game.game_over()
def find_power_target(self):
targets = hearthbreaker.targeting.find_spell_target(self.player.game, lambda t: t.spell_targetable())
target = self.choose_target(targets)
self.trigger("found_power_target", target)
return target
def replace(self, new_hero):
"""
Replaces this hero with another one
:param hearthbreaker.game_objects.Hero new_hero: The hero to replace this hero with
"""
self.unattach()
new_hero.player = self.player
new_hero.game = self.game
new_hero.exhausted = False
self.game.minion_counter += 1
new_hero.born = self.game.minion_counter
self.player.hero = new_hero
new_hero.power.hero = new_hero
new_hero.attach(new_hero, self.player)
for aura in self.player.object_auras:
if aura.match(new_hero):
aura.status.act(self, new_hero)
@staticmethod
def is_hero():
return True
def __to_json__(self):
r_val = super().__to_json__()
r_val.update({
'character': hearthbreaker.constants.CHARACTER_CLASS.to_str(self.character_class),
'health': self.health,
'armor': self.armor,
'name': self.card.short_name,
'attack': self.base_attack,
'immune': self.immune,
'used_windfury': self.used_windfury,
'attacks_performed': self.attacks_performed,
})
return r_val
@classmethod
def __from_json__(cls, hd, player):
hero = player.deck.hero.create_hero(player)
hero.card = player.deck.hero
GameObject.__from_json__(hero, **hd)
hero.health = hd["health"]
hero.base_attack = hd["attack"]
hero.armor = hd["armor"]
hero.immune = hd["immune"]
hero.used_windfury = hd["used_windfury"]
hero.attacks_performed = not hd["attacks_performed"]
return hero
|
# coding=utf-8
# Author: Rion B Correia
# Date: Jul 24, 2019
#
# Description: Plots HS and MM genes that are differentially expressed, calculated by 1_calc_diff_gene_exp.R
#
# Instructions:
# For each species, two different comparisons are necessary:
# Cytes vs Gonia (interested in genes upregulated in Cytes)
# Cytes vs Tids (interested in genes downregulated in Cytes)
# Genes are considered upregulated if: log2FC >1 + Pval < 0.01 + FDR≤0.05
# Genes are considered downregulated if: log2FC <-1 + Pval < 0.01 + FDR≤0.05
#
import math
import pandas as pd
pd.set_option('display.max_rows', 100)
pd.set_option('display.max_columns', 500)
pd.set_option('display.width', 1000)
import matplotlib as mpl
#mpl.use('agg')
mpl.rcParams['mathtext.fontset'] = 'cm'
mpl.rcParams['mathtext.rm'] = 'serif'
import matplotlib.pyplot as plt
from matplotlib import transforms
from utils import ensurePathExists
def plot_MA(df, core=[], pool=[], file='image.pdf', title="plotMA",
c_up='#ff9896', c_not='black', c_down='#aec7e8'
):
s = 5
lw = 0
fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(4, 3))
minLogFC = math.log2(2)
maxFDR = 0.05
# Divide data into DGE Blocks
dfU = df.loc[(df['FDR'] <= maxFDR) & (df['logFC'].abs() >= minLogFC) & (df['logFC'] >= 0), :]
dfD = df.loc[(df['FDR'] <= maxFDR) & (df['logFC'].abs() >= minLogFC) & (df['logFC'] <= 0), :]
dfN = df.loc[~df.index.isin(dfU.index.tolist() + dfD.index.tolist()), :]
# Counts
n_up, n_down, n_not = len(dfU), len(dfD), len(dfN)
print("Up : {rest:d} rest".format(rest=n_up))
print("Down: {rest:d} rest".format(rest=n_down))
print("Not : {rest:d} rest".format(rest=n_not))
# Plot
ax.scatter(dfU['logCPM'], dfU['logFC'], c=c_up, s=s, lw=lw, marker='o', zorder=3, rasterized=True)
ax.scatter(dfD['logCPM'], dfD['logFC'], c=c_down, s=s, lw=lw, marker='o', zorder=3, rasterized=True)
ax.scatter(dfN['logCPM'], dfN['logFC'], c=c_not, s=s / 3, lw=lw, marker='o', zorder=2, rasterized=True)
# Draw a line at y=(-1,0,1)
ax.axhline(y=-1, color='b', lw=1, linestyle='--', zorder=5)
ax.axhline(y=0, color='gray', lw=1, linestyle='--', zorder=5)
ax.axhline(y=+1, color='b', lw=1, linestyle='--', zorder=5)
ax.set_xlim(-1, 18)
ax.set_ylim(-15, 15)
# Labels
ax.set_title(title)
ax.set_ylabel('logFC')
ax.set_xlabel('Average logCPM')
# Layout
#plt.tight_layout()
plt.subplots_adjust(left=0.17, bottom=0.17, right=0.97, top=0.90)
# Save
ensurePathExists(file)
fig.savefig(file, dpi=300)
if __name__ == '__main__':
#
# [H]omo [S]apiens
#
# Cytes vs Gonia (interested in genes upregulated in Cytes)
#
df = pd.read_csv('../01-diff-gene-exp/results/HS/HS-DGE_Cyte_vs_Gonia.csv', index_col=0)
df.index = df.index.map(lambda x: x.split('.')[0])
plot_MA(df=df, file='images/simpler/HS-DGE_Cyte_vs_Gonia-smp.pdf', title="Human (Up)Cyte vs Gonia")
#
# Tid vs Cyte (interested in genes downregulated in Tid)
#
df = pd.read_csv('../01-diff-gene-exp/results/HS/HS-DGE_Tid_vs_Cyte.csv', index_col=0)
df.index = df.index.map(lambda x: x.split('.')[0])
plot_MA(df=df, file='images/simpler/HS-DGE_Tid_vs_Cyte-smp.pdf', title="Human (Down)Tid vs Cyte")
#
# MM
#
# Cytes vs Gonia (interested in genes upregulated in Cytes)
#
df = pd.read_csv('../01-diff-gene-exp/results/MM/MM-DGE_Cyte_vs_Gonia.csv', index_col=0)
plot_MA(df=df, file='images/simpler/MM-DGE_Cyte_vs_Gonia-smp.pdf', title="Mouse (Up)Cyte vs Gonia")
#
# Cytes vs Tids (interested in genes downregulated in Tid)
#
df = pd.read_csv('../01-diff-gene-exp/results/MM/MM-DGE_Tid_vs_Cyte.csv', index_col=0)
plot_MA(df=df, file='images/simpler/MM-DGE_Tid_vs_Cyte-smp.pdf', title="Mouse (Down)Tid vs Cyte")
#
# DM
#
# Middle vs Apical (interested in genes upregulated in Middle)
#
df = pd.read_csv('../01-diff-gene-exp/results/DM/DM-DGE_Middle_vs_Apical.csv', index_col=0)
plot_MA(df=df, file='images/simpler/DM-DGE_Middle_vs_Apical-smp.pdf', title="Insect (Up)Middle vs Apical")
#
# Basal vs Middle (interested in genes downregulated in Basal)
#
df = pd.read_csv('../01-diff-gene-exp/results/DM/DM-DGE_Basal_vs_Middle.csv', index_col=0)
plot_MA(df=df, file='images/simpler/DM-DGE_Basal_vs_Middle-smp.pdf', title="Insect (Down)Basal vs Middle")
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import os
from pathlib import Path
import click
from clk.colors import Colorer
from clk.config import config, merge_settings
from clk.core import DynamicChoiceType, cache_disk
from clk.customcommands import CustomCommandResolver
from clk.decorators import argument, flag, group, option, table_fields, table_format, use_settings
from clk.externalcommands import ExternalCommandResolver
from clk.flow import get_flow_commands_to_run
from clk.lib import TablePrinter, copy, createfile, makedirs, move, quote, rm
from clk.log import get_logger
from clk.overloads import Argument, CommandType, Group, Option, get_command
from clk.types import DirectoryProfile as DirectoryProfileType
LOGGER = get_logger(__name__)
class CustomCommandConfig:
pass
@group(handle_dry_run=True)
@use_settings('customcommands', CustomCommandConfig, override=False)
def command():
"""Display all the available commands"""
@command.command()
def display():
ctx = click.get_current_context()
display_subcommands(ctx, config.main_command)
def display_subcommands(ctx, cmd, indent=''):
# type: (click.Context, Group, str) -> None
for sub_cmd_name in cmd.list_commands(ctx):
sub_cmd = cmd.get_command(ctx, sub_cmd_name)
if sub_cmd:
click.echo(cmd_format(sub_cmd_name, sub_cmd.short_help, indent))
for param in sub_cmd.params:
if not hasattr(param, 'help') or not param.help:
LOGGER.warn('no help message in parameter %s' % param.name)
if isinstance(sub_cmd, click.Group):
if not hasattr(sub_cmd, 'original_command'):
display_subcommands(ctx, sub_cmd, indent + ' ')
else:
LOGGER.warn("Can't get " + sub_cmd_name)
def cmd_format(name, cmd_help, indent):
cmd_help = cmd_help or ''
end = len(indent) + len(name)
spacer = ' ' * max(20 - end, 1)
return indent + name + spacer + cmd_help
@command.command()
@argument('path', type=CommandType(), help='The command to resolve')
def resolve(path):
"""Resolve a command to help understanding where a command comes from"""
cmd, resolver = get_command(path, True)
click.echo(f'The command {path} is resolved by the resolver {resolver.name}')
class CustomCommandPathType(DynamicChoiceType):
def __init__(self, type):
self.type = type
def choices(self):
_, settings = merge_settings(config.iter_settings(explicit_only=True))
return settings['customcommands'].get(self.type, [])
class CustomCommandNameType(DynamicChoiceType):
def __init__(self, settings=None):
self.resolvers = [
ExternalCommandResolver(settings),
CustomCommandResolver(settings),
]
def choices(self):
return sum([resolver._list_command_paths() for resolver in self.resolvers], [])
class CustomCommandType(CustomCommandNameType):
def converter(self, path):
for resolver in self.resolvers:
if path in resolver._list_command_paths():
return resolver._get_command(path)
raise Exception(f'Could not find a resolver matching {path}')
def format_paths(path):
return ' '.join(map(quote, path))
@command.group(default_command='show')
def path():
"""Manipulate paths where to find extra commands"""
@path.command()
@Colorer.color_options
@table_format(default='key_value')
@table_fields(choices=['name', 'paths'])
def show(fields, format, **kwargs):
"""Show all the custom commands paths"""
with Colorer(kwargs) as colorer, TablePrinter(fields, format) as tp:
values = {
profile.name: format_paths(config.customcommands.all_settings.get(profile.name, {}).get('pythonpaths', []))
for profile in config.all_enabled_profiles
}
args = colorer.colorize(values, config.customcommands.readprofile)
tp.echo('pythonpaths', ' '.join(args))
values = {
profile.name:
format_paths(config.customcommands.all_settings.get(profile.name, {}).get('executablepaths', []))
for profile in config.all_enabled_profiles
}
args = colorer.colorize(values, config.customcommands.readprofile)
tp.echo('executablepaths', ' '.join(args))
def custom_command_type():
return option('--type',
help='What kind of object should I find at this locations',
type=click.Choice(['executable', 'python']),
default='executable')
@path.command()
@argument('paths', nargs=-1, type=Path, help='The paths to add to load custom commands')
@custom_command_type()
def add(paths, type):
"""Add custom command paths"""
paths = [str(d) for d in paths]
config.customcommands.writable[f'{type}paths'] = config.customcommands.writable.get(f'{type}paths',
[]) + list(paths)
config.customcommands.write()
LOGGER.info(f'Added {format_paths(paths)} ({type}) to the profile {config.customcommands.writeprofile}')
@path.command()
@argument('paths', nargs=-1, type=CustomCommandPathType('pythonpaths'), help='The paths to remove from custom commands')
@custom_command_type()
def remove(paths, type):
"""Remove all the custom commands paths from the profile"""
to_remove = set(config.customcommands.writable.get(f'{type}paths', [])).intersection(paths)
if not to_remove:
raise click.UsageError('None of the given path is present. This command would be a no-op.')
config.customcommands.writable[f'{type}paths'] = [
path for path in config.customcommands.writable.get(f'{type}paths', []) if path not in to_remove
]
config.customcommands.write()
LOGGER.info(f'Removed {format_paths(to_remove)} ({type}) from the profile {config.customcommands.writeprofile}')
@command.command()
@argument('customcommand', type=CustomCommandType(), help='The custom command to consider')
def which(customcommand):
"""Print the location of the given custom command"""
print(customcommand.customcommand_path)
@command.command()
@argument('customcommand', type=CustomCommandType(), help='The custom command to consider')
@flag('--force', help="Don't ask for confirmation")
def _remove(force, customcommand):
"""Remove the given custom command"""
path = Path(customcommand.customcommand_path)
if force or click.confirm(f'This will remove {path}, are you sure ?'):
rm(path)
@command.command()
@argument('customcommand', type=CustomCommandType(), help='The custom command to consider')
def edit(customcommand):
"""Edit the given custom command"""
path = Path(customcommand.customcommand_path)
click.edit(filename=path)
class AliasesType(DynamicChoiceType):
def choices(self):
return list(config.settings['alias'].keys())
@command.group()
def create():
"""Create custom commands directly from the command line."""
@create.command()
@argument('file', help='Install this file as customcommand')
@option('--name', help='Name of the customcommand (default to the name of the file)')
@flag('--delete', help='Delete the source file when done')
@flag('--force', help='Overwrite a file if it already exists')
def from_file(file, name, delete, force):
"""Install the given file as a customcommand, infering its type.
It works only for python scripts or bash scripts.
"""
import mimetypes
type = mimetypes.guess_type(file)[0]
name = name or Path(file).name
if type == 'text/x-python':
command = python
elif type == 'text/x-sh':
command = 'bash'
else:
raise click.UsageError('I can only install a python script or a bash script.'
f' This is a script of type {type}.'
" I don't know what to do with it.")
ctx = click.get_current_context()
ctx.invoke(command, name=name, from_file=file, force=force)
if delete:
rm(file)
@create.command()
@argument('name', help='The name of the new command')
@flag('--open/--no-open', help='Also open the file after its creation', default=True)
@flag('--force', help='Overwrite a file if it already exists')
@option('--body', help='The initial body to put', default='')
@option('--from-alias', help='The alias to use as base', type=AliasesType())
@flag('--replace-alias', help='Use an alias of the same name and replace it')
@option('--flowdeps', help='Add a flowdeps', multiple=True, type=CommandType())
@option('--description', help='The initial description to put', default='Description')
@option('--source-bash-helpers/--no-source-bash-helpers', help='Source the bash helpers', default=True)
@option('--from-file', help='Copy this file instead of using the template')
def bash(name, open, force, description, body, from_alias, replace_alias, flowdeps, source_bash_helpers, from_file):
"""Create a bash custom command"""
if from_alias and replace_alias:
raise click.UsageError('You can only set --from-alias or --from-alias-move,' ' not both at the same time.')
if name.endswith('.sh'):
LOGGER.warning("Removing the extra .sh so that clk won't confuse it" ' with a command name.')
name = name[:len('.sh')]
script_path = Path(config.customcommands.profile.location) / 'bin' / name
makedirs(script_path.parent)
if script_path.exists() and not force:
raise click.UsageError(f"Won't overwrite {script_path} unless" ' explicitly asked so with --force')
options = []
arguments = []
flags = []
remaining = ''
args = ''
from_alias = from_alias or (replace_alias and name)
if from_alias:
if body:
body = body + '\n'
body = body + '\n'.join(config.main_command.path + ' ' + ' '.join(map(quote, command))
for command in config.settings['alias'][from_alias]['commands'])
flowdeps = list(flowdeps) + get_flow_commands_to_run(from_alias)
alias_cmd = get_command(from_alias)
if description:
description = description + '\n'
description = description + f'Converted from the alias {from_alias}'
def guess_type(param):
if type(param.type) == click.Choice:
return json.dumps(list(param.type.choices))
elif param.type == int:
return 'int'
elif param.type == float:
return 'float'
else:
return 'str'
for param in alias_cmd.params:
if type(param) == Option:
if param.is_flag:
flags.append(f"F:{','.join(param.opts)}:{param.help}:{param.default}")
args += f"""
if [ "${{{config.main_command.path.upper()}___{param.name.upper()}}}" == "True" ]
then
args+=({param.opts[-1]})
fi"""
else:
options.append(f"O:{','.join(param.opts)}:{guess_type(param)}:{param.help}")
args += f"""
if [ -n "${{{config.main_command.path.upper()}___{param.name.upper()}}}" ]
then
args+=({param.opts[-1]} "${{{config.main_command.path.upper()}___{param.name.upper()}}}")
fi"""
elif type(param) == Argument:
if param.nargs == -1:
remaining = param.help
else:
arguments.append(f"A:{','.join(param.opts)}:{guess_type(param)}:{param.help}")
args += f"""
args+=("${{{config.main_command.path.upper()}___{param.name.upper()}}}")
"""
if args:
args = """# Build the arguments of the last command of the alias
args=()""" + args
body += ' "${args[@]}"'
if remaining:
body += ' "${@}"'
if flowdeps:
flowdeps_str = 'flowdepends: ' + ', '.join(flowdeps) + '\n'
else:
flowdeps_str = ''
if options:
options_str = '\n'.join(options) + '\n'
else:
options_str = ''
if arguments:
arguments_str = '\n'.join(arguments) + '\n'
else:
arguments_str = ''
if flags:
flags_str = '\n'.join(flags) + '\n'
else:
flags_str = ''
if remaining:
remaining_str = f'N:{remaining}\n'
else:
remaining_str = ''
script_content = f"""#!/bin/bash -eu
source "_clk.sh"
clk_usage () {{
cat<<EOF
$0
{description}
--
{flowdeps_str}{options_str}{flags_str}{arguments_str}{remaining_str}
EOF
}}
clk_help_handler "$@"
{args}
{body}
"""
if from_file:
script_content = Path(from_file).read_text()
createfile(script_path, script_content, mode=0o755)
if replace_alias:
from clk.core import run
run(['alias', 'unset', name])
if open:
click.edit(filename=str(script_path))
@create.command()
@argument('name', help='The name of the new command')
@flag('--open/--no-open', help='Also open the file after its creation', default=True)
@flag('--force', help='Overwrite a file if it already exists')
@flag('--group/--command', help='Bootstrap a command or a group of commands')
@flag('--with-data', help='Create a directory module instead of a single file.' ' So that you can ship data with it')
@option('--body', help='The initial body to put', default='')
@option('--description', help='The initial description to put', default='Description')
@option('--from-file', help='Copy this file instead of using the template')
def python(name, open, force, description, body, with_data, from_file, group):
"""Create a bash custom command"""
script_path = Path(config.customcommands.profile.location) / 'python'
if with_data:
if name.endswith('.py'):
name = name[:-len('.py')]
script_path /= name
command_name = name
name = '__init__.py'
else:
if not name.endswith('.py'):
name += '.py'
command_name = name[:-len('.py')]
script_path /= name
makedirs(script_path.parent)
if script_path.exists() and not force:
raise click.UsageError(f"Won't overwrite {script_path} unless" ' explicitly asked so with --force')
script_text = f"""#!/usr/bin/env python3
# -*- coding:utf-8 -*-
from pathlib import Path
import click
from clk.decorators import (
argument,
flag,
option,
{'group' if group else 'command'},
use_settings,
table_format,
table_fields,
)
from clk.lib import (
TablePrinter,
call,
)
from clk.config import config
from clk.log import get_logger
from clk.types import DynamicChoice
LOGGER = get_logger(__name__)
"""
if with_data:
script_text += """
def data_file(name):
return Path(__file__).parent / name
"""
script_text += f"""
@{'group' if group else 'command'}()
def {command_name}():
"{description}"
{body}
"""
if from_file:
script_text = Path(from_file).read_text()
createfile(script_path, script_text)
if open:
click.edit(filename=str(script_path))
@command.command()
@argument('customcommand', type=CustomCommandType(), help='The custom command to consider')
@argument('new-name', help='The new name to use for the custom command')
@flag('--force', help='Overwrite destination')
def rename(customcommand, new_name, force):
"""Rename a custom commands"""
ext = os.path.splitext(customcommand.customcommand_path)[1]
if ext in {'.sh', '.py'} and not new_name.endswith(ext):
new_name += ext
new_path = Path(customcommand.customcommand_path).parent / new_name
if new_path.exists() and not force:
raise click.UsageError(f"I won't overwrite {new_path}," ' unless called with --force')
Path(customcommand.customcommand_path).rename(new_path)
LOGGER.status(f'Renamed {customcommand.customcommand_path} into {new_path}')
@command.command()
@argument('customcommand', type=CustomCommandType(), help='The custom command to move')
@argument('profile', type=DirectoryProfileType(), help='The profile where to move the command')
@flag('--force', help='Overwrite destination')
def _move(customcommand, profile, force):
"""Move a custom commands"""
directory = ('python' if customcommand.customcommand_path.endswith('.py') else 'bin')
new_location = Path(profile.location) / directory / Path(customcommand.customcommand_path).name
if new_location.exists() and not force:
raise click.UsageError(f"I won't overwrite {new_location}," ' unless called with --force')
makedirs(new_location.parent)
move(customcommand.customcommand_path, new_location)
LOGGER.status(f'Moved {customcommand.customcommand_path} into {new_location}')
@command.command()
@argument('customcommand', type=CustomCommandType(), help='The custom command to copy')
@argument('profile', type=DirectoryProfileType(), help='The profile where to copy the command')
@argument('name', help='The new name')
@flag('--force', help='Overwrite destination')
def _copy(customcommand, profile, force, name):
"""copy a custom commands"""
directory = ('python' if customcommand.customcommand_path.endswith('.py') else 'bin')
new_location = Path(profile.location) / directory / name
if new_location.exists() and not force:
raise click.UsageError(f"I won't overwrite {new_location}," ' unless called with --force')
makedirs(new_location.parent)
copy(customcommand.customcommand_path, new_location)
LOGGER.status(f'copied {customcommand.customcommand_path} into {new_location}')
@command.command()
def _list():
"""List the path of all custom commands."""
settings = (None if config.customcommands.readprofile == 'context' else config.customcommands.profile.settings)
type = CustomCommandType(settings)
@cache_disk(expire=600)
def customcommand_path(command_path):
return type.converter(command_path).customcommand_path
for path in type.choices():
print(customcommand_path(path))
|
#include"../../../../INCLUDE/blaswrap.h"
|
angular.module('MainCtrl', []).controller('MainController', ['$scope', '$route', 'socket', 'localStorageService', function ($scope, $route, socket, localStorageService) {
if (window.location.pathname == '/') {
$('#navHome').addClass("active");
}
$scope.clearHistory = function() {
if (localStorageService.isSupported) {
localStorageService.remove('keywords');
localStorageService.remove('tweets');
}
$route.reload(); // reload view (easiest way to clear view)
}
$scope.keywords = '...';
if (localStorageService.isSupported) {
var storedKeywords = localStorageService.get('keywords');
if (storedKeywords !== null) { // already have keywords in this session, no need to wait for socket message
$scope.keywords = storedKeywords;
$scope.status = 'Listening...';
}
}
socket.on('keywords', function(words){
if ($scope.keywords == '...') { // if not already displayed keywords in view
var wordListStr = '';
for (var c = 0; c < words.length; c++) { // convert words object into a list string
wordListStr = wordListStr + words[c] + ', ';
}
wordListStr = wordListStr.slice(0,-2); // remove trailing comma and space
$scope.keywords = wordListStr;
if (localStorageService.isSupported) {
localStorageService.set('keywords', wordListStr); // save keywords to localStorage
}
$scope.status = 'Listening...'; // now that keywords are shown, update status
}
});
$scope.tweets = [];
displayedTweets = [];
if (localStorageService.isSupported) {
var analyzedTweets = localStorageService.get('tweets');
if (analyzedTweets !== null) {
analyzedTweets = JSON.parse(analyzedTweets);
for (var t = 0; t < analyzedTweets.length; t++) { // display all previous analyzed tweets
if (displayedTweets.indexOf(analyzedTweets[t].id) < 0) {
displayedTweets.push(analyzedTweets[t].id);
$scope.tweets.push({sentimentClass: analyzedTweets[t].sentimentClass, content: analyzedTweets[t].content, id: analyzedTweets[t].id});
}
}
}
}
socket.on('stream', function(classification, text, tweetId){
if (displayedTweets.indexOf(tweetId) < 0) {
displayedTweets.push(tweetId);
var sentimentViewClass;
if (classification == 1) {
sentimentViewClass = 'alert-success';
} else if (classification == -1) {
sentimentViewClass = 'alert-danger';
} else {
sentimentViewClass = 'alert-warning';
}
if (localStorageService.isSupported) {
// add tweet to localStorage (so client can leave then return later and have this, and other tweets, redisplayed)
var storedTweets = localStorageService.get('tweets');
if (storedTweets === null) { // no stored tweets yet
storedTweets = []; // initialise as an array
} else {
storedTweets = JSON.parse(storedTweets);
}
storedTweets.push({sentimentClass: sentimentViewClass, content: text, id: tweetId}); // add to tweets stored so far
localStorageService.set('tweets', JSON.stringify(storedTweets));
}
// $scope.tweets.push({sentimentClass: sentimentViewClass, content: text, id: tweetId}); // add tweet to view
$route.reload(); // for some reason, above doesn't work; so must reload each time to display tweets from storage
}
});
}]); |
'use strict';
/*jshint unused: false*/
/*jshint bitwise: false*/
(function() {
var app = angular.module('upsConsole', [
'ngResource',
'ngNewRouter',
'ngAnimate',
'ngIdle',
'ui.bootstrap',
'patternfly.autofocus',
'patternfly.notification',
'patternfly.select',
'angular-c3',
'ngClipboard'
]);
app.run(function($rootScope) {
// allow to retrieve $rootScope in views (for clarification of access scope)
$rootScope.$rootScope = $rootScope;
});
var appConfig = {
logDebugEnabled: false,
idleDuration: 300,
idleWarningDuration : 30,
keepaliveInterval: 5
};
app.provider('appConfig', function () {
return {
set: function (settings) {
// allow to override configuration (e.g. in tests)
angular.extend(appConfig, settings);
},
$get: function () {
// default configuration
return appConfig;
}
};
});
app.config(function ($logProvider, appConfigProvider) {
var appConfig = appConfigProvider.$get();
$logProvider.debugEnabled( appConfig.logDebugEnabled );
});
app.factory('docsLinks', function( $http, staticResourcePrefix ) {
var result = {};
$http.get( staticResourcePrefix + 'docs-links.json' )
.then(function( response ) {
angular.extend( result, response.data );
});
return result;
});
app.config(function(ngClipProvider) {
ngClipProvider.setPath( 'img/ZeroClipboard.swf' );
});
app.value('apiPrefix', '');
app.value('staticResourcePrefix', '');
app.value('bootstrapedAppName', function uuid() {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
var r = Math.random()*16|0, v = c === 'x' ? r : (r&0x3|0x8);
return v.toString(16);
});
});
app.constant('allVariantTypes', ['android', 'ios', 'windows_mpns', 'windows_wns', 'simplePush', 'adm']);
app.value('allowCreateVariant', function( app, variantType ) {
return true;
});
})();
|
const { Engine } = require('json-rules-engine')
const report = require('./report')
const getRulesEngine = () => {
const engine = new Engine()
engine.on('success', async (event, almanac, ruleResult) => {
report(event, almanac, ruleResult)
})
engine.on('failure', async (event, almanac, ruleResult) => {
report(event, almanac, ruleResult)
})
return engine
}
module.exports = getRulesEngine
|
import React, { Component } from "react";
import { Link } from "react-router-dom";
import "./Home.css";
export default class Home extends Component {
render() {
return (
<div className="row">
<div className="card w-75 offset-1">
<div className="card-body">
<h1 className="card-title">Welcome to NBA Roster Manager</h1>
<h2 className="card-text">
Create and manage all your favorite NBA fantasy teams
</h2>
<h2 className="card-title">
View stats, and other info from all your favorite NBA players
</h2>
{this.props.isLoggedIn === true && (
<div className="quick-actions">
<Link to="/players">
<button className="btn btn-danger">
Start Adding Players
</button>
</Link>
<Link to="/my-players">
<button className="btn btn-danger">View Your Players</button>
</Link>
</div>
)}
</div>
</div>
</div>
);
}
}
|
from io import StringIO
from pathlib import Path
from ....source_shared.app_id import SteamAppId
from .. import Lump, lump_tag
from ..datatypes.overlay import Overlay, VOverlay
@lump_tag(45, 'LUMP_OVERLAYS')
class OverlayLump(Lump):
def __init__(self, bsp, lump_id):
super().__init__(bsp, lump_id)
self.overlays = []
def parse(self):
reader = self.reader
while reader:
overlay = Overlay(self, self._bsp).parse(reader)
self.overlays.append(overlay)
return self
@lump_tag(45, 'LUMP_OVERLAYS', steam_id=SteamAppId.VINDICTUS)
class VOverlayLump(Lump):
def __init__(self, bsp, lump_id):
super().__init__(bsp, lump_id)
self.overlays = []
def parse(self):
reader = self.reader
while reader:
overlay = VOverlay(self, self._bsp).parse(reader)
self.overlays.append(overlay)
return self
|
module.exports = {
executeSimulation: require('./exec-simulation'),
runSignalOptimization: require('./exec-optimization')
}
|
module.exports = {
extends: ['standard', 'prettier'],
plugins: ['prettier'],
rules: {
'prettier/prettier': [
'error',
{
singleQuote: true,
semi: false,
},
]
},
env: { mocha: true }
}
|
/**
* @license Licensed under the Apache License, Version 2.0 (the "License"):
* http://www.apache.org/licenses/LICENSE-2.0
*
* @fileoverview General javaScript for Arduino app with material design.
*/
'use strict';
/** Create a namespace for the application. */
var Ardublockly = Ardublockly || {};
/** Initialize function for Ardublockly, to be called on page load. */
Ardublockly.init = function() {
// Lang init must run first for the rest of the page to pick the right msgs
Ardublockly.initLanguage();
// Inject Blockly into content_blocks and fetch additional blocks
Ardublockly.injectBlockly(document.getElementById('content_blocks'),
Ardublockly.TOOLBOX_XML, '../blockly/');
Ardublockly.importExtraBlocks();
Ardublockly.designJsInit();
Ardublockly.initialiseIdeButtons();
Ardublockly.bindDesignEventListeners();
Ardublockly.bindActionFunctions();
Ardublockly.bindBlocklyEventListeners();
// Check if not running locally
if (document.location.hostname != 'localhost') {
Ardublockly.openNotConnectedModal();
}
};
/** Binds functions to each of the buttons, nav links, and related. */
Ardublockly.bindActionFunctions = function() {
// Navigation buttons
Ardublockly.bindClick_('button_load', Ardublockly.loadUserXmlFile);
Ardublockly.bindClick_('button_save', Ardublockly.saveXmlFile);
Ardublockly.bindClick_('button_delete', Ardublockly.discardAllBlocks);
// Side menu buttons, they also close the side menu
Ardublockly.bindClick_('menu_load', function() {
Ardublockly.loadUserXmlFile();
$('.button-collapse').sideNav('hide');
});
Ardublockly.bindClick_('menu_save', function() {
Ardublockly.saveXmlFile();
$('.button-collapse').sideNav('hide');
});
Ardublockly.bindClick_('menu_delete', function() {
Ardublockly.discardAllBlocks();
$('.button-collapse').sideNav('hide');
});
Ardublockly.bindClick_('menu_settings', function() {
Ardublockly.openSettings();
$('.button-collapse').sideNav('hide');
});
Ardublockly.bindClick_('menu_example_1', function() {
Ardublockly.loadServerXmlFile('../examples/testemotor_v1.xml');
$('.button-collapse').sideNav('hide');
});
Ardublockly.bindClick_('menu_example_2', function() {
Ardublockly.loadServerXmlFile('../examples/nandy_v1.xml');
$('.button-collapse').sideNav('hide');
});
Ardublockly.bindClick_('menu_example_3', function() {
Ardublockly.loadServerXmlFile('../examples/arthur_v1.xml');
$('.button-collapse').sideNav('hide');
});
Ardublockly.bindClick_('menu_example_4', function() {
Ardublockly.loadServerXmlFile('../examples/servo_knob.xml');
$('.button-collapse').sideNav('hide');
});
Ardublockly.bindClick_('menu_example_5', function() {
Ardublockly.loadServerXmlFile('../examples/stepper_knob.xml');
$('.button-collapse').sideNav('hide');
});
// Floating buttons
Ardublockly.bindClick_('button_ide_large', function() {
Ardublockly.ideButtonLargeAction();
});
Ardublockly.bindClick_('button_ide_middle', function() {
Ardublockly.ideButtonMiddleAction();
});
Ardublockly.bindClick_('button_ide_left', function() {
Ardublockly.ideButtonLeftAction();
});
Ardublockly.bindClick_('button_load_xml', Ardublockly.XmlTextareaToBlocks);
Ardublockly.bindClick_('button_toggle_toolbox', Ardublockly.toogleToolbox);
// Settings modal input field listeners
Ardublockly.bindClick_('settings_compiler_location', function() {
ArdublocklyServer.requestNewCompilerLocation(
Ardublockly.setCompilerLocationHtml);
});
Ardublockly.bindClick_('settings_sketch_location', function() {
ArdublocklyServer.requestNewSketchLocation(
Ardublockly.setSketchLocationHtml);
});
};
/** Sets the Ardublockly server IDE setting to upload and sends the code. */
Ardublockly.ideSendUpload = function() {
// Check if this is the currently selected option before edit sever setting
if (Ardublockly.ideButtonLargeAction !== Ardublockly.ideSendUpload) {
Ardublockly.showExtraIdeButtons(false);
Ardublockly.setIdeSettings(null, 'upload');
}
Ardublockly.shortMessage(Ardublockly.getLocalStr('uploadingSketch'));
Ardublockly.resetIdeOutputContent();
Ardublockly.sendCode();
};
/** Sets the Ardublockly server IDE setting to verify and sends the code. */
Ardublockly.ideSendVerify = function() {
// Check if this is the currently selected option before edit sever setting
if (Ardublockly.ideButtonLargeAction !== Ardublockly.ideSendVerify) {
Ardublockly.showExtraIdeButtons(false);
Ardublockly.setIdeSettings(null, 'verify');
}
Ardublockly.shortMessage(Ardublockly.getLocalStr('verifyingSketch'));
Ardublockly.resetIdeOutputContent();
Ardublockly.sendCode();
};
/** Sets the Ardublockly server IDE setting to open and sends the code. */
Ardublockly.ideSendOpen = function() {
// Check if this is the currently selected option before edit sever setting
if (Ardublockly.ideButtonLargeAction !== Ardublockly.ideSendOpen) {
Ardublockly.showExtraIdeButtons(false);
Ardublockly.setIdeSettings(null, 'open');
}
Ardublockly.shortMessage(Ardublockly.getLocalStr('openingSketch'));
Ardublockly.resetIdeOutputContent();
Ardublockly.sendCode();
};
/** Function bound to the left IDE button, to be changed based on settings. */
Ardublockly.ideButtonLargeAction = Ardublockly.ideSendUpload;
/** Function bound to the middle IDE button, to be changed based on settings. */
Ardublockly.ideButtonMiddleAction = Ardublockly.ideSendVerify;
/** Function bound to the large IDE button, to be changed based on settings. */
Ardublockly.ideButtonLeftAction = Ardublockly.ideSendOpen;
/** Initialises the IDE buttons with the default option from the server. */
Ardublockly.initialiseIdeButtons = function() {
document.getElementById('button_ide_left').title =
Ardublockly.getLocalStr('openSketch');
document.getElementById('button_ide_middle').title =
Ardublockly.getLocalStr('verifySketch');
document.getElementById('button_ide_large').title =
Ardublockly.getLocalStr('uploadSketch');
ArdublocklyServer.requestIdeOptions(function(jsonResponse) {
if (jsonResponse != null) {
var parsedJson = JSON.parse(jsonResponse);
// "response_type" : "settings_board",
// "element" : "dropdown",
// "options" : [ {"value" : "XXX", "text" : "XXX"}, ...]
// "selected": "selected key"}
Ardublockly.changeIdeButtons(parsedJson.selected);
} // else Null: Ardublockly server is not running, do nothing
});
};
/**
* Changes the IDE launch buttons based on the option indicated in the argument.
* @param {!string} value One of the 3 possible values from the drop down select
* in the settings modal: 'upload', 'verify', or 'open'.
*/
Ardublockly.changeIdeButtons = function(value) {
var largeButton = document.getElementById('button_ide_large');
var middleButton = document.getElementById('button_ide_middle');
var leftButton = document.getElementById('button_ide_left');
var openTitle = Ardublockly.getLocalStr('openSketch');
var verifyTitle = Ardublockly.getLocalStr('verifySketch');
var uploadTitle = Ardublockly.getLocalStr('uploadSketch');
if (value === 'upload') {
Ardublockly.changeIdeButtonsDesign(value);
Ardublockly.ideButtonLeftAction = Ardublockly.ideSendOpen;
Ardublockly.ideButtonMiddleAction = Ardublockly.ideSendVerify;
Ardublockly.ideButtonLargeAction = Ardublockly.ideSendUpload;
leftButton.title = openTitle;
middleButton.title = verifyTitle;
largeButton.title = uploadTitle;
} else if (value === 'verify') {
Ardublockly.changeIdeButtonsDesign(value);
Ardublockly.ideButtonLeftAction = Ardublockly.ideSendOpen;
Ardublockly.ideButtonMiddleAction = Ardublockly.ideSendUpload;
Ardublockly.ideButtonLargeAction = Ardublockly.ideSendVerify;
leftButton.title = openTitle;
middleButton.title = uploadTitle;
largeButton.title = verifyTitle;
} else if (value === 'open') {
Ardublockly.changeIdeButtonsDesign(value);
Ardublockly.ideButtonLeftAction = Ardublockly.ideSendVerify;
Ardublockly.ideButtonMiddleAction = Ardublockly.ideSendUpload;
Ardublockly.ideButtonLargeAction = Ardublockly.ideSendOpen;
leftButton.title = verifyTitle;
middleButton.title = uploadTitle;
largeButton.title = openTitle;
}
};
/**
* Loads an XML file from the server and replaces the current blocks into the
* Blockly workspace.
* @param {!string} xmlFile Server location of the XML file to load.
*/
Ardublockly.loadServerXmlFile = function(xmlFile) {
var loadXmlfileAccepted = function() {
// loadXmlBlockFile loads the file asynchronously and needs a callback
var loadXmlCb = function(sucess) {
if (sucess) {
Ardublockly.renderContent();
} else {
Ardublockly.alertMessage(
Ardublockly.getLocalStr('invalidXmlTitle'),
Ardublockly.getLocalStr('invalidXmlBody'),
false);
}
};
var connectionErrorCb = function() {
Ardublockly.openNotConnectedModal();
};
Ardublockly.loadXmlBlockFile(xmlFile, loadXmlCb, connectionErrorCb);
};
if (Ardublockly.isWorkspaceEmpty()) {
loadXmlfileAccepted();
} else {
Ardublockly.alertMessage(
Ardublockly.getLocalStr('loadNewBlocksTitle'),
Ardublockly.getLocalStr('loadNewBlocksBody'),
true, loadXmlfileAccepted);
}
};
/**
* Loads an XML file from the users file system and adds the blocks into the
* Blockly workspace.
*/
Ardublockly.loadUserXmlFile = function() {
// Create File Reader event listener function
var parseInputXMLfile = function(e) {
var xmlFile = e.target.files[0];
var filename = xmlFile.name;
var extensionPosition = filename.lastIndexOf('.');
if (extensionPosition !== -1) {
filename = filename.substr(0, extensionPosition);
}
var reader = new FileReader();
reader.onload = function() {
var success = Ardublockly.replaceBlocksfromXml(reader.result);
if (success) {
Ardublockly.renderContent();
Ardublockly.sketchNameSet(filename);
} else {
Ardublockly.alertMessage(
Ardublockly.getLocalStr('invalidXmlTitle'),
Ardublockly.getLocalStr('invalidXmlBody'),
false);
}
};
reader.readAsText(xmlFile);
};
// Create once invisible browse button with event listener, and click it
var selectFile = document.getElementById('select_file');
if (selectFile === null) {
var selectFileDom = document.createElement('INPUT');
selectFileDom.type = 'file';
selectFileDom.id = 'select_file';
var selectFileWrapperDom = document.createElement('DIV');
selectFileWrapperDom.id = 'select_file_wrapper';
selectFileWrapperDom.style.display = 'none';
selectFileWrapperDom.appendChild(selectFileDom);
document.body.appendChild(selectFileWrapperDom);
selectFile = document.getElementById('select_file');
selectFile.addEventListener('change', parseInputXMLfile, false);
}
selectFile.click();
};
/**
* Creates an XML file containing the blocks from the Blockly workspace and
* prompts the users to save it into their local file system.
*/
Ardublockly.saveXmlFile = function() {
Ardublockly.saveTextFileAs(
document.getElementById('sketch_name').value + '.xml',
Ardublockly.generateXml());
};
/**
* Creates an Arduino Sketch file containing the Arduino code generated from
* the Blockly workspace and prompts the users to save it into their local file
* system.
*/
Ardublockly.saveSketchFile = function() {
Ardublockly.saveTextFileAs(
document.getElementById('sketch_name').value + '.ino',
Ardublockly.generateArduino());
};
/**
* Creates an text file with the input content and files name, and prompts the
* users to save it into their local file system.
* @param {!string} fileName Name for the file to be saved.
* @param {!string} content Text datd to be saved in to the file.
*/
Ardublockly.saveTextFileAs = function(fileName, content) {
var blob = new Blob([content], {type: 'text/plain;charset=utf-8'});
saveAs(blob, fileName);
};
/**
* Retrieves the Settings from ArdublocklyServer to populates the form data
* and opens the Settings modal dialog.
*/
Ardublockly.openSettings = function() {
ArdublocklyServer.requestCompilerLocation(
Ardublockly.setCompilerLocationHtml);
ArdublocklyServer.requestSketchLocation(Ardublockly.setSketchLocationHtml);
ArdublocklyServer.requestArduinoBoards(Ardublockly.setArduinoBoardsHtml);
ArdublocklyServer.requestSerialPorts(Ardublockly.setSerialPortsHtml);
ArdublocklyServer.requestIdeOptions(Ardublockly.setIdeHtml);
// Language menu only set on page load within Ardublockly.initLanguage()
Ardublockly.openSettingsModal();
};
/**
* Sets the compiler location form data retrieve from an updated element.
* @param {element} jsonResponse JSON data coming back from the server.
* @return {undefined} Might exit early if response is null.
*/
Ardublockly.setCompilerLocationHtml = function(jsonResponse) {
if (jsonResponse === null) return Ardublockly.openNotConnectedModal();
var newEl = ArdublocklyServer.createElementFromJson(jsonResponse);
var compLocIp = document.getElementById('settings_compiler_location');
if (compLocIp != null) {
compLocIp.value = newEl.value;
}
};
/**
* Sets the sketch location form data retrieve from an updated element.
* @param {element} jsonResponse JSON data coming back from the server.
* @return {undefined} Might exit early if response is null.
*/
Ardublockly.setSketchLocationHtml = function(jsonResponse) {
if (jsonResponse === null) return Ardublockly.openNotConnectedModal();
var newEl = ArdublocklyServer.createElementFromJson(jsonResponse);
var sketchLocIp = document.getElementById('settings_sketch_location');
if (sketchLocIp != null) {
sketchLocIp.value = newEl.value;
}
};
/**
* Replaces the Arduino Boards form data with a new HTMl element.
* Ensures there is a change listener to call 'setSerialPort' function
* @param {element} jsonResponse JSON data coming back from the server.
* @return {undefined} Might exit early if response is null.
*/
Ardublockly.setArduinoBoardsHtml = function(jsonResponse) {
if (jsonResponse === null) return Ardublockly.openNotConnectedModal();
var newEl = ArdublocklyServer.createElementFromJson(jsonResponse);
var boardDropdown = document.getElementById('board');
if (boardDropdown !== null) {
// Restarting the select elements built by materialize
$('select').material_select('destroy');
newEl.name = 'settings_board';
newEl.id = 'board';
newEl.onchange = Ardublockly.setBoard;
boardDropdown.parentNode.replaceChild(newEl, boardDropdown);
// Refresh the materialize select menus
$('select').material_select();
}
};
/**
* Sets the Arduino Board type with the selected user input from the drop down.
*/
Ardublockly.setBoard = function() {
var el = document.getElementById('board');
var boardValue = el.options[el.selectedIndex].value;
//TODO: Check how ArdublocklyServer deals with invalid data and sanitise
ArdublocklyServer.setArduinoBoard(
boardValue, Ardublockly.setArduinoBoardsHtml);
Ardublockly.changeBlocklyArduinoBoard(
boardValue.toLowerCase().replace(/ /g, '_'));
};
/**
* Replaces the Serial Port form data with a new HTMl element.
* Ensures there is a change listener to call 'setSerialPort' function
* @param {element} jsonResponse JSON data coming back from the server.
* @return {undefined} Might exit early if response is null.
*/
Ardublockly.setSerialPortsHtml = function(jsonResponse) {
if (jsonResponse === null) return Ardublockly.openNotConnectedModal();
var newEl = ArdublocklyServer.createElementFromJson(jsonResponse);
var serialDropdown = document.getElementById('serial_port');
if (serialDropdown !== null) {
// Restarting the select elements built by materialize
$('select').material_select('destroy');
newEl.name = 'settings_serial';
newEl.id = 'serial_port';
newEl.onchange = Ardublockly.setSerial;
serialDropdown.parentNode.replaceChild(newEl, serialDropdown);
// Refresh the materialize select menus
$('select').material_select();
}
};
/** Sets the Serial Port with the selected user input from the drop down. */
Ardublockly.setSerial = function() {
var el = document.getElementById('serial_port');
var serialValue = el.options[el.selectedIndex].value;
//TODO: check how ArdublocklyServer deals with invalid data and sanitise
ArdublocklyServer.setSerialPort(
serialValue, Ardublockly.setSerialPortsHtml);
};
/**
* Replaces IDE options form data with a new HTMl element.
* Ensures there is a change listener to call 'setIdeSettings' function
* @param {element} jsonResponse JSON data coming back from the server.
* @return {undefined} Might exit early if response is null.
*/
Ardublockly.setIdeHtml = function(jsonResponse) {
if (jsonResponse === null) return Ardublockly.openNotConnectedModal();
var newEl = ArdublocklyServer.createElementFromJson(jsonResponse);
var ideDropdown = document.getElementById('ide_settings');
if (ideDropdown !== null) {
// Restarting the select elements built by materialize
$('select').material_select('destroy');
newEl.name = 'settings_ide';
newEl.id = 'ide_settings';
newEl.onchange = Ardublockly.setIdeSettings;
ideDropdown.parentNode.replaceChild(newEl, ideDropdown);
// Refresh the materialize select menus
$('select').material_select();
}
};
/**
* Sets the IDE settings data with the selected user input from the drop down.
* @param {Event} e Event that triggered this function call. Required for link
* it to the listeners, but not used.
* @param {string} preset A value to set the IDE settings bypassing the drop
* down selected value. Valid data: 'upload', 'verify', or 'open'.
*/
Ardublockly.setIdeSettings = function(e, preset) {
if (preset !== undefined) {
var ideValue = preset;
} else {
var el = document.getElementById('ide_settings');
var ideValue = el.options[el.selectedIndex].value;
}
Ardublockly.changeIdeButtons(ideValue);
//TODO: check how ArdublocklyServer deals with invalid data and sanitise here
ArdublocklyServer.setIdeOptions(ideValue, Ardublockly.setIdeHtml);
};
/**
* Send the Arduino Code to the ArdublocklyServer to process.
* Shows a loader around the button, blocking it (unblocked upon received
* message from server).
*/
Ardublockly.sendCode = function() {
Ardublockly.largeIdeButtonSpinner(true);
/**
* Receives the IDE data back to be displayed and stops spinner.
* @param {element} jsonResponse JSON data coming back from the server.
* @return {undefined} Might exit early if response is null.
*/
var sendCodeReturn = function(jsonResponse) {
Ardublockly.largeIdeButtonSpinner(false);
if (jsonResponse === null) return Ardublockly.openNotConnectedModal();
var dataBack = ArdublocklyServer.createElementFromJson(jsonResponse);
Ardublockly.arduinoIdeOutput(dataBack);
};
ArdublocklyServer.sendSketchToServer(
Ardublockly.generateArduino(), sendCodeReturn);
};
/** Populate the workspace blocks with the XML written in the XML text area. */
Ardublockly.XmlTextareaToBlocks = function() {
var success = Ardublockly.replaceBlocksfromXml(
document.getElementById('content_xml').value);
if (success) {
Ardublockly.renderContent();
} else {
Ardublockly.alertMessage(
Ardublockly.getLocalStr('invalidXmlTitle'),
Ardublockly.getLocalStr('invalidXmlBody'),
false);
}
};
/**
* Private variable to save the previous version of the Arduino Code.
* @type {!String}
* @private
*/
Ardublockly.PREV_ARDUINO_CODE_ = 'void setup() {\n\n}\n\n\nvoid loop() {\n\n}';
/**
* Populate the Arduino Code and Blocks XML panels with content generated from
* the blocks.
*/
Ardublockly.renderContent = function() {
// Only regenerate the code if a block is not being dragged
if (Ardublockly.blocklyIsDragging()) return;
// Render Arduino Code with latest change highlight and syntax highlighting
var arduinoCode = Ardublockly.generateArduino();
if (arduinoCode !== Ardublockly.PREV_ARDUINO_CODE_) {
var diff = JsDiff.diffWords(Ardublockly.PREV_ARDUINO_CODE_, arduinoCode);
var resultStringArray = [];
for (var i = 0; i < diff.length; i++) {
if (!diff[i].removed) {
var escapedCode = diff[i].value.replace(/</g, '<')
.replace(/>/g, '>');
if (diff[i].added) {
resultStringArray.push(
'<span class="code_highlight_new">' + escapedCode + '</span>');
} else {
resultStringArray.push(escapedCode);
}
}
}
document.getElementById('content_arduino').innerHTML =
prettyPrintOne(resultStringArray.join(''), 'cpp', false);
Ardublockly.PREV_ARDUINO_CODE_ = arduinoCode;
}
// Generate plain XML into element
document.getElementById('content_xml').value = Ardublockly.generateXml();
};
/**
* Private variable to indicate if the toolbox is meant to be shown.
* @type {!boolean}
* @private
*/
Ardublockly.TOOLBAR_SHOWING_ = true;
/**
* Toggles the blockly toolbox and the Ardublockly toolbox button On and Off.
* Uses namespace member variable TOOLBAR_SHOWING_ to toggle state.
*/
Ardublockly.toogleToolbox = function() {
if (Ardublockly.TOOLBAR_SHOWING_) {
Ardublockly.blocklyCloseToolbox();
Ardublockly.displayToolbox(false);
} else {
Ardublockly.displayToolbox(true);
}
Ardublockly.TOOLBAR_SHOWING_ = !Ardublockly.TOOLBAR_SHOWING_;
};
/** @return {boolean} Indicates if the toolbox is currently visible. */
Ardublockly.isToolboxVisible = function() {
return Ardublockly.TOOLBAR_SHOWING_;
};
/**
* Lazy loads the additional block JS files from the ./block directory.
* Initialises any additional Ardublockly extensions.
* TODO: Loads the examples into the examples modal
*/
Ardublockly.importExtraBlocks = function() {
/**
* Parses the JSON data to find the block and languages js files.
* @param {jsonDataObj} jsonDataObj JSON in JavaScript object format, null
* indicates an error occurred.
* @return {undefined} Might exit early if response is null.
*/
var jsonDataCb = function(jsonDataObj) {
if (jsonDataObj === null) return Ardublockly.openNotConnectedModal();
if (jsonDataObj.categories !== undefined) {
var head = document.getElementsByTagName('head')[0];
for (var catDir in jsonDataObj.categories) {
var blocksJsLoad = document.createElement('script');
blocksJsLoad.src = '../blocks/' + catDir + '/blocks.js';
head.appendChild(blocksJsLoad);
var blocksLangJsLoad = document.createElement('script');
blocksLangJsLoad.src = '../blocks/' + catDir + '/msg/' + 'messages.js';
//'lang/' + Ardublockly.LANG + '.js';
head.appendChild(blocksLangJsLoad);
var blocksGeneratorJsLoad = document.createElement('script');
blocksGeneratorJsLoad.src = '../blocks/' + catDir +
'/generator_arduino.js';
head.appendChild(blocksGeneratorJsLoad);
// Check if the blocks add additional Ardublockly functionality
var extensions = jsonDataObj.categories[catDir].extensions;
if (extensions) {
for (var i = 0; i < extensions.length; i++) {
var blockExtensionJsLoad = document.createElement('script');
blockExtensionJsLoad.src = '../blocks/' + catDir + '/extensions.js';
head.appendChild(blockExtensionJsLoad);
// Add function to scheduler as lazy loading has to complete first
setTimeout(function(category, extension) {
var extensionNamespaces = extension.split('.');
var extensionCall = window;
var invalidFunc = false;
for (var j = 0; j < extensionNamespaces.length; j++) {
extensionCall = extensionCall[extensionNamespaces[j]];
if (extensionCall === undefined) {
invalidFunc = true;
break;
}
}
if (typeof extensionCall != 'function') {
invalidFunc = true;
}
if (invalidFunc) {
throw 'Blocks ' + category.categoryName + ' extension "' +
extension + '" is not a valid function.';
} else {
extensionCall();
}
}, 800, jsonDataObj.categories[catDir], extensions[i]);
}
}
}
}
};
// Reads the JSON data containing all block categories from ./blocks directory
// TODO: Now reading a local file, to be replaced by server generated JSON
Ardublockly.getJsonData('../blocks/blocks_data.json', jsonDataCb);
};
/** Opens a modal with a list of categories to add or remove to the toolbox */
Ardublockly.openExtraCategoriesSelect = function() {
/**
* Parses the JSON data from the server into a list of additional categories.
* @param {jsonDataObj} jsonDataObj JSON in JavaScript object format, null
* indicates an error occurred.
* @return {undefined} Might exit early if response is null.
*/
var jsonDataCb = function(jsonDataObj) {
if (jsonDataObj === null) return Ardublockly.openNotConnectedModal();
var htmlContent = document.createElement('div');
if (jsonDataObj.categories !== undefined) {
for (var catDir in jsonDataObj.categories) {
// Function required to maintain each loop variable scope separated
(function(cat) {
var clickBind = function(tickValue) {
if (tickValue) {
var catDom = (new DOMParser()).parseFromString(
cat.toolbox.join(''), 'text/xml').firstChild;
Ardublockly.addToolboxCategory(cat.toolboxName, catDom);
} else {
Ardublockly.removeToolboxCategory(cat.toolboxName);
}
};
htmlContent.appendChild(Ardublockly.createExtraBlocksCatHtml(
cat.categoryName, cat.description, clickBind));
})(jsonDataObj.categories[catDir]);
}
}
Ardublockly.openAdditionalBlocksModal(htmlContent);
};
// Reads the JSON data containing all block categories from ./blocks directory
// TODO: Now reading a local file, to be replaced by server generated JSON
Ardublockly.getJsonData('../blocks/blocks_data.json', jsonDataCb);
};
/** Informs the user that the selected function is not yet implemented. */
Ardublockly.functionNotImplemented = function() {
Ardublockly.shortMessage('Function not yet implemented');
};
/**
* Interface to display messages with a possible action.
* @param {!string} title HTML to include in title.
* @param {!element} body HTML to include in body.
* @param {boolean=} confirm Indicates if the user is shown a single option (ok)
* or an option to cancel, with an action applied to the "ok".
* @param {string=|function=} callback If confirm option is selected this would
* be the function called when clicked 'OK'.
*/
Ardublockly.alertMessage = function(title, body, confirm, callback) {
Ardublockly.materialAlert(title, body, confirm, callback);
};
/**
* Interface to displays a short message, which disappears after a time out.
* @param {!string} message Text to be temporarily displayed.
*/
Ardublockly.shortMessage = function(message) {
Ardublockly.MaterialToast(message);
};
/**
* Bind a function to a button's click event.
* On touch enabled browsers, ontouchend is treated as equivalent to onclick.
* @param {!Element|string} el Button element or ID thereof.
* @param {!function} func Event handler to bind.
* @private
*/
Ardublockly.bindClick_ = function(el, func) {
if (typeof el == 'string') {
el = document.getElementById(el);
}
// Need to ensure both, touch and click, events don't fire for the same thing
var propagateOnce = function(e) {
e.stopPropagation();
e.preventDefault();
func();
};
el.addEventListener('ontouchend', propagateOnce);
el.addEventListener('click', propagateOnce);
};
|
#############################################################################
##
## Copyright (C) 2016 The Qt Company Ltd.
## Contact: http://www.qt.io/licensing/
##
## This file is part of the Qt for Python examples of the Qt Toolkit.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of The Qt Company Ltd nor the names of its
## contributors may be used to endorse or promote products derived
## from this software without specific prior written permission.
##
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
## $QT_END_LICENSE$
##
#############################################################################
import sys
import argparse
import pandas as pd
from PySide2.QtCore import (QDateTime, QTimeZone)
from PySide2.QtWidgets import QApplication
from main_window import MainWindow
def transform_date(utc, timezone=None):
utc_fmt = "yyyy-MM-ddTHH:mm:ss.zzzZ"
new_date = QDateTime().fromString(utc, utc_fmt)
if timezone:
new_date.setTimeZone(timezone)
return new_date
def read_data(fname):
# Read the CSV content
df = pd.read_csv(fname)
# Remove wrong magnitudes
df = df.drop(df[df.mag < 0].index)
magnitudes = df["mag"]
# My local timezone
timezone = QTimeZone(b"Europe/Berlin")
# Get timestamp transformed to our timezone
times = df["time"].apply(lambda x: transform_date(x, timezone))
return times, magnitudes
if __name__ == "__main__":
options = argparse.ArgumentParser()
options.add_argument("-f", "--file", type=str, required=True)
args = options.parse_args()
data = read_data(args.file)
# Qt Application
app = QApplication(sys.argv)
window = MainWindow()
window.show()
sys.exit(app.exec_()) |
/**
* Copyright (c) UNA, Inc - https://una.io
* MIT License - https://opensource.org/licenses/MIT
*
* @defgroup UnaStudio UNA Studio
* @{
*/
function BxDolStudioBuilderPage(oOptions) {
this.sActionUrl = oOptions.sActionUrl;
this.sPageUrl = oOptions.sPageUrl;
this.sObjName = oOptions.sObjName == undefined ? 'oBxDolStudioBuilderPage' : oOptions.sObjName;
this.sType = oOptions.sType == undefined ? '' : oOptions.sType;
this.sPage = oOptions.sPage == undefined ? '' : oOptions.sPage;
this.sAnimationEffect = oOptions.sAnimationEffect == undefined ? 'fade' : oOptions.sAnimationEffect;
this.iAnimationSpeed = oOptions.iAnimationSpeed == undefined ? 'slow' : oOptions.iAnimationSpeed;
this.oHtmlIds = oOptions.oHtmlIds == undefined ? {} : oOptions.oHtmlIds;
this.oLanguages = oOptions.oLanguages == undefined ? {} : oOptions.oLanguages;
this.aSortingConf = {
parent: '.adm-bp-cell-cnt',
parent_prefix: 'adm-bpc-',
item: '.adm-bp-block',
placeholder: 'adm-bp-block adm-bp-block-empty'
};
var $this = this;
$(document).ready(function() {
$($this.aSortingConf.parent).sortable({
handle: $($this.aSortingConf.item + ' .adm-bpb-drag-handle'),
items: $this.aSortingConf.item,
placeholder: $this.aSortingConf.placeholder,
connectWith: $this.aSortingConf.parent,
start: function(oEvent, oUi) {
oUi.item.addClass('adm-bp-block-dragging').width(300);
},
stop: function(oEvent, oUi) {
oUi.item.removeClass('adm-bp-block-dragging');
$this.reorder(oUi.item);
}
});
});
}
/**
* Main page methods.
*/
BxDolStudioBuilderPage.prototype.onChangePage = function(oSelect) {
document.location.href = this.parsePageUrl({page: $(oSelect).val()});
};
BxDolStudioBuilderPage.prototype.getUri = function(oElement) {
var mixedParams = {};
var sUri = $('#' + this.oHtmlIds['uri_field_id']).val();
if(sUri.length > 0)
mixedParams['uri'] = sUri;
else {
var bGot = false;
$.each(this.oLanguages, function(sKey, sValue) {
mixedParams[sKey] = $('#bx-form-input-title-' + sKey).val();
if(mixedParams[sKey].length > 0)
bGot = true;
});
if(!bGot)
return;
}
bx_loading(this.oHtmlIds['add_popup_id'], true);
this.performAction('uri_get', mixedParams);
};
BxDolStudioBuilderPage.prototype.onGetUri = function(oData) {
bx_loading(this.oHtmlIds['add_popup_id'], false);
$('#' + this.oHtmlIds['uri_field_id']).val(oData.uri);
$('#' + this.oHtmlIds['url_field_id']).val(oData.url);
};
BxDolStudioBuilderPage.prototype.onCreatePage = function(sType, sPage) {
window.location.href = this.parsePageUrl({type: sType, page: sPage});
};
BxDolStudioBuilderPage.prototype.deletePage = function() {
var $this = this;
bx_confirm(aDolLang['_adm_bp_wrn_page_delete'], function() {
$this.performAction('page_delete');
});
};
BxDolStudioBuilderPage.prototype.reorder = function(oDraggable) {
var $this = this;
var oDate = new Date();
var aParams = new Array();
$(this.aSortingConf.parent).each(function(iIndex, oElement){
var sId = $(oElement).attr('id');
aParams.push($('#' + sId).sortable('serialize', {key: 'bp_items_' + sId.replace($this.aSortingConf.parent_prefix, '') + '[]'}));
});
$.post(
this.sActionUrl + (this.sActionUrl.indexOf('?') == -1 ? '?' : '&') + aParams.join('&'),
{
bp_action: 'reorder',
bp_page: $this.sPage,
_t:oDate.getTime()
},
function(oData) {
if(oData.code != 0) {
bx_alert(oData.message);
return;
}
},
'json'
);
return true;
};
/**
* "Add Block" popup methods.
*/
BxDolStudioBuilderPage.prototype.onChangeModule = function(sName, oLink) {
var $this = this;
var oDate = new Date();
var sClass = 'bx-menu-tab-active';
$(oLink).parents('.bx-std-pmen-item:first').addClass(sClass).siblings('.bx-std-pmen-item').removeClass(sClass);
var sId = '#' + $this.oHtmlIds['block_list_id'] + sName;
if($(sId).length != 0) {
$('#' + this.oHtmlIds['block_lists_id'] + ' > div:visible').bx_anim('hide', this.sAnimationEffect, 0, function() {
$(sId).show();
});
return;
}
bx_loading(this.oHtmlIds['create_block_popup_id'], true);
$.post(
this.sActionUrl,
{
bp_action: 'block_list',
bp_module: sName,
_t:oDate.getTime()
},
function(oData) {
bx_loading($this.oHtmlIds['create_block_popup_id'], false);
$('#' + $this.oHtmlIds['block_lists_id'] + ' > div:visible').bx_anim('hide', $this.sAnimationEffect, 0, function() {
$(this).parent().append(oData.content);
});
},
'json'
);
};
BxDolStudioBuilderPage.prototype.onSelectBlock = function(oCheckbox) {
var iCounter = parseInt($('#adm-bp-cbf-counter').html());
iCounter += $(oCheckbox).prop('checked') ? 1 : -1;
$('#adm-bp-cbf-counter').html(iCounter);
};
BxDolStudioBuilderPage.prototype.onCreateBlock = function(oData) {
window.location.href = this.parsePageUrl({page: this.sPage});
};
BxDolStudioBuilderPage.prototype.deleteBlockImage = function(iId) {
bx_loading(this.oHtmlIds['edit_block_popup_id'], true);
this.performAction('image_delete', {id:iId});
};
BxDolStudioBuilderPage.prototype.onChangeVisibleFor = function(oSelect) {
$(oSelect).parents('form:first').find('#bx-form-element-visible_for_levels').bx_anim($(oSelect).val() == 'all' ? 'hide' : 'show', this.sAnimationEffect, this.iAnimationSpeed);
};
BxDolStudioBuilderPage.prototype.onEditBlockBeforeShow = function(oPopup) {
var oTextarea = $(oPopup).find("textarea.bx-form-input-textarea-codemirror[name = 'content']");
if(oTextarea.length > 0)
CodeMirror.fromTextArea(oTextarea.get(0), {
lineNumbers: true,
mode: "htmlmixed",
htmlMode: true,
matchBrackets: true
}).on('blur', function(oEditor) {
oEditor.save();
});
};
BxDolStudioBuilderPage.prototype.onEditBlock = function(oData) {
window.location.href = this.parsePageUrl({page: this.sPage});
};
BxDolStudioBuilderPage.prototype.onEditBlockCancel = function(oButton) {
if ('undefined' !== typeof(bx_editor_remove_all))
bx_editor_remove_all($(oButton).parents('form:first'));
$('.bx-popup-applied:visible').dolPopupHide();
};
BxDolStudioBuilderPage.prototype.deleteBlock = function(iId) {
var $this = this;
bx_confirm(aDolLang['_adm_bp_wrn_page_block_delete'], function() {
bx_loading($this.oHtmlIds['edit_block_popup_id'], true);
$this.performAction('block_delete', {id:iId});
});
};
BxDolStudioBuilderPage.prototype.onDeleteBlock = function(iId, oData) {
bx_loading(this.oHtmlIds['edit_block_popup_id'], false);
$('.bx-popup-applied:visible').dolPopupHide();
$('#' + this.oHtmlIds['block_id'] + iId).bx_anim('hide', this.sAnimationEffect, this.iAnimationSpeed, function() {
$(this).remove();
});
};
/**
* "Settings" popup methods.
*/
BxDolStudioBuilderPage.prototype.onChangeSettingGroup = function(sName, oLink) {
var $this = this;
var sClass = 'bx-menu-tab-active';
$(oLink).parents('.bx-std-pmen-item:first').addClass(sClass).siblings('.bx-std-pmen-item').removeClass(sClass);
$('#' + this.oHtmlIds['settings_groups_id'] + ' > div:visible').bx_anim('hide', this.sAnimationEffect, 0, function() {
$('#' + $this.oHtmlIds['settings_group_id'] + sName).show();
});
};
BxDolStudioBuilderPage.prototype.onChangeLayout = function(iId, oLink) {
$('#' + this.oHtmlIds['settings_group_id'] + 'layout > .adm-bp-layout-active').removeClass('adm-bp-layout-active');
$('#' + this.oHtmlIds['layout_id'] + iId).addClass('adm-bp-layout-active');
$("[name = 'layout_id']").val(iId);
};
BxDolStudioBuilderPage.prototype.onSaveSettingsLayout = function() {
window.location.href = this.parsePageUrl({page: this.sPage});
};
BxDolStudioBuilderPage.prototype.onCloseSettings = function() {
$('.bx-popup-applied:visible').dolPopupHide({
onHide: function(oPopup) {
$(oPopup).remove();
}
});
};
/**
* General methods.
*/
BxDolStudioBuilderPage.prototype.performAction = function(sAction, aParams) {
var $this = this;
var oDate = new Date();
if(aParams == undefined)
aParams = {};
aParams.bp_action = sAction;
aParams.bp_type = $this.sType;
aParams.bp_page = $this.sPage;
aParams._t = oDate.getTime();
if($('.bx-loading-ajax:visible').length == 0)
bx_loading('bx-std-page-columns', true);
$.post(
this.sActionUrl,
aParams,
function(oData) {
oBxDolStudioPage.processJson(oData);
},
'json'
);
};
BxDolStudioBuilderPage.prototype.parsePageUrl = function(aParams) {
var sType = aParams.type != undefined ? aParams.type : this.sType;
var sPage = aParams.page != undefined ? aParams.page : '';
return this.sPageUrl.replace('{0}', sType).replace('{1}', sPage);
};
BxDolStudioBuilderPage.prototype.changeDimension = function(oText) {
var oText = $(oText);
var sKey = oText.attr('name');
var iValue = parseInt(oText.val());
if(!iValue || iValue <= 0)
return;
var oUrl = oText.parents('.bx-uploader-ghost:first').find('.bx-ug-url');
var aUrl = oUrl.val().split('?');
var aPairs = aUrl[1].split('&');
var aParams = {};
var bUpdated = false;
for(var i in aPairs) {
var aPair = aPairs[i].split('=');
aParams[aPair[0]] = aPair[1];
if(aPair[0] == sKey) {
aParams[aPair[0]] = iValue;
bUpdated = true;
}
}
if(!bUpdated)
aParams[sKey] = iValue;
oUrl.val(bx_append_url_params(aUrl[0], aParams));
};
BxDolStudioBuilderPage.prototype.deleteGhost = function(iFileId, sFileUrl, sFileIcon, aEditors, oUploaderInstance) {
bx_editor_remove_img(aEditors, ['img[src="' + sFileIcon + '"]', 'img[src="' + sFileUrl + '"]', '#bx-base-text-img-' + iFileId, '#bx-base-text-icon-' + iFileId]);
oUploaderInstance.deleteGhost(iFileId);
};
BxDolStudioBuilderPage.prototype.insertIntoPost = function(oButton, iFileId, sEditorId) {
var sFileUrl = $(oButton).parents('.bx-uploader-ghost:first').find('.bx-ug-url').val();
bx_editor_insert_img (sEditorId, 'bx-base-text-img-' + iFileId, sFileUrl, 'bx-base-text-img');
};
/** @} */
|
import textwrap
from collections import defaultdict
from custom.inddex import filters
from custom.inddex.const import (
FOOD_ITEM,
NON_STANDARD_FOOD_ITEM,
NON_STANDARD_RECIPE,
STANDARD_RECIPE,
ConvFactorGaps,
FctGaps,
)
from custom.inddex.food import FoodData
from .utils import MultiTabularReport, format_row
class GapsSummaryReport(MultiTabularReport):
name = 'Output 2a - Gaps Summary by Food Type'
slug = 'report_2a_gaps_summary_by_food_type'
description = textwrap.dedent("""
This output includes summaries of the existing conversion factor gaps
and FCT gaps in the recall data.It provides researchers with an
overview of the number of data gaps that must be addressed before the
recall data can be analyzed. Information in this output is
disaggregated by food type.
""")
@property
def fields(self):
return [
filters.CaseOwnersFilter,
filters.DateRangeFilter,
filters.GapTypeFilter,
filters.RecallStatusFilter,
]
@property
def data_providers(self):
cf_gaps_data, fct_gaps_data = get_gaps_data(self.domain, self.request)
return [cf_gaps_data, fct_gaps_data]
def get_gaps_data(domain, request):
cf_gaps = defaultdict(int)
fct_gaps = defaultdict(int)
food_data = FoodData.from_request(domain, request)
for row in food_data.rows:
cf_gaps[(row.conv_factor_gap_code, row.food_type or '')] += 1
fct_gaps[(row.fct_gap_code, row.food_type or '')] += 1
return (
ConvFactorGapsData(cf_gaps),
FctGapsData(fct_gaps),
)
class GapsData:
def __init__(self, gaps):
self._gaps = gaps
@property
def rows(self):
for gap_code in self._gaps_descriptions:
for food_type in [FOOD_ITEM, NON_STANDARD_FOOD_ITEM, STANDARD_RECIPE, NON_STANDARD_RECIPE]:
description = self._gaps_descriptions[gap_code]
count = self._gaps.get((gap_code, food_type), 0)
yield format_row([gap_code, description, food_type, count])
class ConvFactorGapsData(GapsData):
title = 'Conv Factor Gaps Summary'
slug = 'conv_factor_gaps_summary'
_gaps_descriptions = ConvFactorGaps.DESCRIPTIONS
@property
def headers(self):
return ['conv_factor_gap_code', 'conv_factor_gap_desc', 'food_type', 'conv_gap_food_type_total']
class FctGapsData(GapsData):
title = 'FCT Gaps Summary'
slug = 'fct_gaps_summary'
_gaps_descriptions = FctGaps.DESCRIPTIONS
@property
def headers(self):
return ['fct_gap_code', 'fct_gap_desc', 'food_type', 'fct_gap_food_type_total']
|
import sys
python_major_version = sys.version_info[0]
if python_major_version == 3:
_string_types = (
str,
)
else:
_string_types = (
str,
unicode,
)
if python_major_version == 3:
def _text(value, encoding='utf-8', errors='strict'):
if isinstance(value, str):
return value
if isinstance(value, (bytearray, bytes)):
return value.decode(encoding=encoding, errors=errors)
return str(value)
else:
def _text(value, encoding='utf-8', errors='strict'): # flake8: noqa
if isinstance(value, unicode):
return value
if isinstance(value, basestring):
return value.decode(encoding=encoding, errors=errors)
return unicode(value)
if python_major_version == 3:
from queue import Queue, Empty as QueueEmpty
else:
from Queue import Queue, Empty as QueueEmpty
|
# Made by Mr. Have fun!
# Version 0.3 by H1GHL4ND3R
import sys
from com.l2jfrozen.gameserver.model.quest import State
from com.l2jfrozen.gameserver.model.quest import QuestState
from com.l2jfrozen.gameserver.model.quest.jython import QuestJython as JQuest
qn = "217_TestimonyOfTrust"
MARK_OF_TRUST_ID = 2734
LETTER_TO_ELF_ID = 1558
LETTER_TO_DARKELF_ID = 1556
LETTER_TO_DWARF_ID, LETTER_TO_ORC_ID, LETTER_TO_SERESIN_ID, SCROLL_OF_DARKELF_TRUST_ID, \
SCROLL_OF_ELF_TRUST_ID, SCROLL_OF_DWARF_TRUST_ID,SCROLL_OF_ORC_TRUST_ID,RECOMMENDATION_OF_HOLLIN_ID,\
ORDER_OF_OZZY_ID, BREATH_OF_WINDS_ID, SEED_OF_VERDURE_ID, LETTER_OF_THIFIELL_ID, \
BLOOD_OF_GUARDIAN_BASILISK_ID,GIANT_APHID_ID, STAKATOS_FLUIDS_ID, BASILISK_PLASMA_ID, \
HONEY_DEW_ID, STAKATO_ICHOR_ID, ORDER_OF_CLAYTON_ID, PARASITE_OF_LOTA_ID, \
LETTER_TO_MANAKIA_ID, LETTER_OF_MANAKIA_ID, LETTER_TO_NICHOLA_ID, ORDER_OF_NICHOLA_ID, \
HEART_OF_PORTA_ID = range(2737,2762)
DROPLIST={
# For condition 2
27120:[ORDER_OF_OZZY_ID,BREATH_OF_WINDS_ID, 1],
27121:[ORDER_OF_OZZY_ID,SEED_OF_VERDURE_ID, 1],
# For condition 6
20550 :[ORDER_OF_CLAYTON_ID,BLOOD_OF_GUARDIAN_BASILISK_ID,10],
20082 :[ORDER_OF_CLAYTON_ID,GIANT_APHID_ID, 10],
20084 :[ORDER_OF_CLAYTON_ID,GIANT_APHID_ID, 10],
20086 :[ORDER_OF_CLAYTON_ID,GIANT_APHID_ID, 10],
20087 :[ORDER_OF_CLAYTON_ID,GIANT_APHID_ID, 10],
20088 :[ORDER_OF_CLAYTON_ID,GIANT_APHID_ID, 10],
20157 :[ORDER_OF_CLAYTON_ID,STAKATOS_FLUIDS_ID, 10],
20230 :[ORDER_OF_CLAYTON_ID,STAKATOS_FLUIDS_ID, 10],
20232 :[ORDER_OF_CLAYTON_ID,STAKATOS_FLUIDS_ID, 10],
20234 :[ORDER_OF_CLAYTON_ID,STAKATOS_FLUIDS_ID, 10],
# For condition 19
20213 :[ORDER_OF_NICHOLA_ID,HEART_OF_PORTA_ID, 10]
}
class Quest (JQuest) :
def __init__(self,id,name,descr): JQuest.__init__(self,id,name,descr)
def onEvent (self,event,st) :
htmltext = event
if event == "30191-04.htm" :
st.set("cond","1")
st.setState(STARTED)
st.playSound("ItemSound.quest_accept")
st.giveItems(LETTER_TO_ELF_ID,1)
st.giveItems(LETTER_TO_DARKELF_ID,1)
elif event == "30154-03.htm" :
st.takeItems(LETTER_TO_ELF_ID,1)
st.giveItems(ORDER_OF_OZZY_ID,1)
st.set("cond","2")
elif event == "30358-02.htm" :
st.takeItems(LETTER_TO_DARKELF_ID,1)
st.giveItems(LETTER_OF_THIFIELL_ID,1)
st.set("cond","5")
elif event == "30657-03.htm" :
if st.getPlayer().getLevel() >= 38 : # Condition 12 meet the Lord Kakai (Orc Master)
st.takeItems(LETTER_TO_SERESIN_ID,1)
st.giveItems(LETTER_TO_ORC_ID,1)
st.giveItems(LETTER_TO_DWARF_ID,1)
st.set("cond","12")
else: # Condition 11 A lack of Experience
htmltext = "30657-02.htm"
st.set("cond","11")
elif event == "30565-02.htm" :
st.takeItems(LETTER_TO_ORC_ID,1)
st.giveItems(LETTER_TO_MANAKIA_ID,1)
st.set("cond","13")
elif event == "30515-02.htm" :
st.takeItems(LETTER_TO_MANAKIA_ID,1)
st.set("cond","14")
elif event == "30531-02.htm" :
st.takeItems(LETTER_TO_DWARF_ID,1)
st.giveItems(LETTER_TO_NICHOLA_ID,1)
st.set("cond","18")
elif event == "30621-02.htm" :
st.takeItems(LETTER_TO_NICHOLA_ID,1)
st.giveItems(ORDER_OF_NICHOLA_ID,1)
st.set("cond","19")
return htmltext
def onTalk (self,npc,player):
htmltext = "<html><body>You are either not carrying out your quest or don't meet the criteria.</body></html>"
st = player.getQuestState(qn)
if not st : return htmltext
npcId = npc.getNpcId()
id = st.getState()
if npcId != 30191 and id != STARTED : return htmltext
if id == CREATED : # Check if is starting the quest
st.set("cond","0")
st.set("id","0")
if npcId == 30191 :
if player.getRace().ordinal() == 0 :
if player.getLevel() >= 37 :
htmltext = "30191-03.htm"
else:
htmltext = "30191-01.htm"
st.exitQuest(1)
else:
htmltext = "30191-02.htm"
st.exitQuest(1)
elif id == COMPLETED : # Check if the quest is already made
if npcId == 30191 :
htmltext = "<html><body>This quest has already been completed.</body></html>"
else : # The quest it self
try :
cond = st.getInt("cond")
except :
cond = None
if cond == 1 : # Condition 1 take the letter to Hierarch Asterios (Elven Master)
if npcId == 30191 :
htmltext = "30191-08.htm"
elif npcId == 30154 and st.getQuestItemsCount(LETTER_TO_ELF_ID) :
htmltext = "30154-01.htm"
elif cond == 2 : # Condition 2 kill the Luel of Zephy and Aktea of the Woods
if npcId == 30154 and st.getQuestItemsCount(ORDER_OF_OZZY_ID) :
htmltext = "30154-04.htm"
elif cond == 3 : # Condition 3 bring back the Breath of winds and Seed of Verdure to Asterios
if npcId == 30154 and st.getQuestItemsCount(BREATH_OF_WINDS_ID) and st.getQuestItemsCount(SEED_OF_VERDURE_ID) :
htmltext = "30154-05.htm"
st.takeItems(BREATH_OF_WINDS_ID,1)
st.takeItems(SEED_OF_VERDURE_ID,1)
st.takeItems(ORDER_OF_OZZY_ID,1)
st.giveItems(SCROLL_OF_ELF_TRUST_ID,1)
st.set("cond","4")
elif cond == 4 : # Condition 4 take the letter to Tetrarch Thifiell (Dark Elven Master)
if npcId == 30154 :
htmltext = "30154-06.htm"
elif npcId == 30358 and st.getQuestItemsCount(LETTER_TO_DARKELF_ID) :
htmltext = "30358-01.htm"
elif cond == 5 : # Condition 5 meet the Magister Clayton
if npcId == 30358 :
htmltext = "30358-05.htm"
elif npcId == 30464 and st.getQuestItemsCount(LETTER_OF_THIFIELL_ID) :
htmltext = "30464-01.htm"
st.takeItems(LETTER_OF_THIFIELL_ID,1)
st.giveItems(ORDER_OF_CLAYTON_ID,1)
st.set("cond","6")
elif cond == 6 : # Condition 6 get 10 of each, Stakato ichor, honey dew and basilisk plasma
if npcId == 30464 and st.getQuestItemsCount(ORDER_OF_CLAYTON_ID) :
htmltext = "30464-02.htm"
elif cond == 7 : # Condition 7 bring back the Stakato ichor, honey dew and basilisk plasma to Magister Clayton
if npcId == 30464 and st.getQuestItemsCount(ORDER_OF_CLAYTON_ID) and st.getQuestItemsCount(STAKATO_ICHOR_ID) and st.getQuestItemsCount(HONEY_DEW_ID) and st.getQuestItemsCount(BASILISK_PLASMA_ID) :
htmltext = "30464-03.htm"
st.set("cond","8")
elif cond == 8 : # Condition 8 take the Stakato ichor, honey dew and basilisk plasma to Thifiell
if npcId == 30358 and st.getQuestItemsCount(ORDER_OF_CLAYTON_ID) and st.getQuestItemsCount(STAKATO_ICHOR_ID) and st.getQuestItemsCount(HONEY_DEW_ID) and st.getQuestItemsCount(BASILISK_PLASMA_ID) :
htmltext = "30358-03.htm"
st.takeItems(ORDER_OF_CLAYTON_ID,1)
st.takeItems(BASILISK_PLASMA_ID,1)
st.takeItems(STAKATO_ICHOR_ID,1)
st.takeItems(HONEY_DEW_ID,1)
st.giveItems(SCROLL_OF_DARKELF_TRUST_ID,1)
st.set("cond","9")
elif cond == 9 : # Condition 9 take the Elven and Dark Elven scroll to Hollint
if npcId == 30191 and st.getQuestItemsCount(SCROLL_OF_ELF_TRUST_ID) and st.getQuestItemsCount(SCROLL_OF_DARKELF_TRUST_ID) :
htmltext = "30191-05.htm"
st.takeItems(SCROLL_OF_DARKELF_TRUST_ID,1)
st.takeItems(SCROLL_OF_ELF_TRUST_ID,1)
st.giveItems(LETTER_TO_SERESIN_ID,1)
st.set("cond","10")
elif npcId == 30358 :
htmltext = "30358-04.htm"
elif cond in [ 10, 11 ] : # Condition 10 meet the Seresin or Condition 11 A lack of Experience
if npcId == 30191 :
htmltext = "30191-09.htm"
elif npcId == 30657 and st.getQuestItemsCount(LETTER_TO_SERESIN_ID) :
htmltext = "30657-01.htm"
elif cond == 12 : # Condition 12 meet the Lord Kakai (Orc Master)
if npcId == 30657 :
htmltext = "30657-04.htm"
elif npcId == 30565 and st.getQuestItemsCount(LETTER_TO_ORC_ID) :
htmltext = "30565-01.htm"
elif cond == 13 : # Condition 13 meet the Seer Manakia
if npcId == 30565 :
htmltext = "30565-03.htm"
elif npcId == 30515 and st.getQuestItemsCount(LETTER_TO_MANAKIA_ID) :
htmltext = "30515-01.htm"
elif cond == 14 : # Condition 14 get 10 Parasite of lota
if npcId == 30515 :
htmltext = "30515-03.htm"
elif cond == 15 : # Condition 15 bring back the Parasite of lota to Seer Manakia
if npcId == 30515 and st.getQuestItemsCount(PARASITE_OF_LOTA_ID)==10 :
htmltext = "30515-04.htm"
st.takeItems(PARASITE_OF_LOTA_ID,10)
st.giveItems(LETTER_OF_MANAKIA_ID,1)
st.set("cond","16")
elif cond == 16 : # Condition 16 bring the letter of Manakia to the Lord Kakai
if npcId == 30565 and st.getQuestItemsCount(LETTER_OF_MANAKIA_ID) :
htmltext = "30565-04.htm"
st.takeItems(LETTER_OF_MANAKIA_ID,1)
st.giveItems(SCROLL_OF_ORC_TRUST_ID,1)
st.set("cond","17")
elif npcId == 30515 :
htmltext = "30515-05.htm"
elif cond == 17 : # Condition 17 meet the Lockirin (Dwarven Master)
if npcId == 30565 :
htmltext = "30565-05.htm"
elif npcId == 30531 and st.getQuestItemsCount(LETTER_TO_DWARF_ID) :
htmltext = "30531-01.htm"
elif cond == 18 : # Condition 18 take the letter to Nichola
if npcId == 30531 :
htmltext = "30531-03.htm"
elif npcId == 30621 and st.getQuestItemsCount(LETTER_TO_NICHOLA_ID) :
htmltext = "30621-01.htm"
elif cond == 19 : # Condition 19 get 10 Heart of Porta
if npcId == 30621 :
htmltext = "30621-03.htm"
elif cond == 20 : # Condition 20 bring the 10 Heart of Porta to Nichola
if npcId == 30621 and st.getQuestItemsCount(ORDER_OF_NICHOLA_ID) and st.getQuestItemsCount(HEART_OF_PORTA_ID)==10 :
htmltext = "30621-04.htm"
st.takeItems(HEART_OF_PORTA_ID,10)
st.takeItems(ORDER_OF_NICHOLA_ID,1)
st.set("cond","21")
elif cond == 21 : # Condition 21 take the letter to Lockirin
if npcId == 30621 :
htmltext = "30621-05.htm"
elif npcId == 30531 :
htmltext = "30531-04.htm"
st.giveItems(SCROLL_OF_DWARF_TRUST_ID,1)
st.set("cond","22")
elif cond == 22 : # Condition 22 take the Orc and Dwarven scroll to High Priest Hollint
if npcId == 30191 and st.getQuestItemsCount(SCROLL_OF_DWARF_TRUST_ID) and st.getQuestItemsCount(SCROLL_OF_ORC_TRUST_ID) :
htmltext = "30191-06.htm"
st.takeItems(SCROLL_OF_DWARF_TRUST_ID,1)
st.takeItems(SCROLL_OF_ORC_TRUST_ID,1)
st.giveItems(RECOMMENDATION_OF_HOLLIN_ID,1)
st.set("cond","23")
elif npcId == 30657 :
htmltext = "30657-05.htm"
elif npcId == 30531 :
htmltext = "30531-05.htm"
elif cond == 23 : # Condition 23 take the Recommendation of Hollin to the High Priest Biotin
if npcId == 30191 :
htmltext = "30191-07.htm"
elif npcId == 30031 and st.getQuestItemsCount(RECOMMENDATION_OF_HOLLIN_ID) :
st.addExpAndSp(39571,2500)
st.giveItems(7562,16)
htmltext = "30031-01.htm"
st.takeItems(RECOMMENDATION_OF_HOLLIN_ID,1)
st.giveItems(MARK_OF_TRUST_ID,1)
st.unset("cond")
st.unset("id")
st.setState(COMPLETED)
st.playSound("ItemSound.quest_finish")
return htmltext
def onKill(self,npc,player,isPet):
st = player.getQuestState(qn)
if not st : return
if st.getState() != STARTED : return
npcId = npc.getNpcId()
cond = st.getInt("cond")
if cond == 2 and npcId in [ 20013, 20019, 20036, 20044 ] : # Condition 2 kill the Luel of Zephy and Aktea of the Woods
if npcId in [ 20036,20044 ] and st.getQuestItemsCount(BREATH_OF_WINDS_ID) == 0 :
st.set("id",str(st.getInt("id")+1))
if st.getRandom(100)<(st.getInt("id")*33) :
st.playSound("Itemsound.quest_before_battle")
st.addSpawn(27120,9410,50301,-3713,600000) ### FIXME ### Temp fix for spawn
st.addRadar(9410,50301,-3713)
return "Luell Of Zephyr Winds has spawned at X=9410 Y=50301 Z=-3713"
# st.addSpawn(27120) # The original spawn code
elif npcId in [ 20013,20019 ] and st.getQuestItemsCount(SEED_OF_VERDURE_ID) == 0 :
st.set("id",str(st.getInt("id")+1))
if st.getRandom(100)<(st.getInt("id")*33) :
st.playSound("Itemsound.quest_before_battle")
st.addSpawn(27121,16895,47210,-3673,600000) ### FIXME ### Temp fix for spawn
st.addRadar(16895,47210,-3673)
return "Actea Of Verdant Wilds has spawned at X=16895 Y=47210 Z=-3673"
# st.addSpawn(27121) # The original spawn code
elif cond == 14 : # Condition 14 get 10 Parasite of lota
parasite = st.getQuestItemsCount(PARASITE_OF_LOTA_ID)
if npcId == 20553 and parasite < 10 :
if st.getRandom(2) == 1 :
st.giveItems(PARASITE_OF_LOTA_ID,1)
if parasite+1 == 10 :
st.set("cond","15")
st.playSound("Itemsound.quest_middle")
else:
st.playSound("Itemsound.quest_itemget")
elif cond in [ 2,6,19 ] and npcId in DROPLIST.keys() :
required,item,maxqty=DROPLIST[npcId]
count = st.getQuestItemsCount(item)
if st.getQuestItemsCount(required) and count < maxqty :
st.giveItems(item,1)
if count+1 == maxqty : # Check if got enough number of items
# Special Sound event
if npcId in [ 20550, 20082, 20084, 20086, 20087, 20088, 20157, 20230, 20232, 20234 ] :
# Condition 6 get 10 of each, Stakato ichor, honey dew and basilisk plasma, and transform it
if item == BLOOD_OF_GUARDIAN_BASILISK_ID :
st.takeItems(BLOOD_OF_GUARDIAN_BASILISK_ID, maxqty)
st.giveItems(BASILISK_PLASMA_ID, 1)
elif item == GIANT_APHID_ID :
st.takeItems(GIANT_APHID_ID, maxqty)
st.giveItems(HONEY_DEW_ID, 1)
elif item == STAKATOS_FLUIDS_ID :
st.takeItems(STAKATOS_FLUIDS_ID, maxqty)
st.giveItems(STAKATO_ICHOR_ID, 1)
# Check if player got all the items of condition 6 and set the condition to 7
if st.getQuestItemsCount(BASILISK_PLASMA_ID) and st.getQuestItemsCount(HONEY_DEW_ID) and st.getQuestItemsCount(STAKATO_ICHOR_ID) :
st.set("cond","7")
st.playSound("Itemsound.quest_middle")
else:
st.playSound("Itemsound.quest_itemget")
elif npcId in [ 27120,27121 ] : # Condition 2 kill the Luel of Zephy and Aktea of the Woods
# Check if player got all the items of condition 2 and set the condition to 3
if st.getQuestItemsCount(SEED_OF_VERDURE_ID) and st.getQuestItemsCount(BREATH_OF_WINDS_ID) :
st.set("cond","3")
st.playSound("Itemsound.quest_middle")
else :
st.playSound("Itemsound.quest_itemget")
elif npcId == 20213 : # Condition 19 Porta
st.set("cond","20")
st.playSound("ItemSound.quest_middle")
else:
st.playSound("Itemsound.quest_itemget")
return
QUEST = Quest(217,qn,"Testimony Of Trust")
CREATED = State('Start', QUEST)
STARTING = State('Starting', QUEST)
STARTED = State('Started', QUEST)
COMPLETED = State('Completed', QUEST)
QUEST.setInitialState(CREATED)
QUEST.addStartNpc(30191)
QUEST.addTalkId(30191)
QUEST.addTalkId(30031)
QUEST.addTalkId(30154)
QUEST.addTalkId(30358)
QUEST.addTalkId(30464)
QUEST.addTalkId(30515)
QUEST.addTalkId(30531)
QUEST.addTalkId(30565)
QUEST.addTalkId(30621)
QUEST.addTalkId(30657)
for i in DROPLIST.keys()+[20013,20019,20036,20044,20553] :
QUEST.addKillId(i)
STARTED.addQuestDrop(30358,SCROLL_OF_DARKELF_TRUST_ID,1)
STARTED.addQuestDrop(30154,SCROLL_OF_ELF_TRUST_ID,1)
STARTED.addQuestDrop(30531,SCROLL_OF_DWARF_TRUST_ID,1)
STARTED.addQuestDrop(30565,SCROLL_OF_ORC_TRUST_ID,1)
STARTED.addQuestDrop(27120,BREATH_OF_WINDS_ID,1)
STARTED.addQuestDrop(27121,SEED_OF_VERDURE_ID,1)
STARTED.addQuestDrop(30154,ORDER_OF_OZZY_ID,1)
STARTED.addQuestDrop(30191,LETTER_TO_ELF_ID,1)
STARTED.addQuestDrop(30464,ORDER_OF_CLAYTON_ID,1)
STARTED.addQuestDrop(20550,BASILISK_PLASMA_ID,1)
STARTED.addQuestDrop(20157,STAKATO_ICHOR_ID,1)
STARTED.addQuestDrop(20082,HONEY_DEW_ID,1)
STARTED.addQuestDrop(30191,LETTER_TO_DARKELF_ID,1)
STARTED.addQuestDrop(30358,LETTER_OF_THIFIELL_ID,1)
STARTED.addQuestDrop(30191,LETTER_TO_SERESIN_ID,1)
STARTED.addQuestDrop(30657,LETTER_TO_ORC_ID,1)
STARTED.addQuestDrop(30515,LETTER_OF_MANAKIA_ID,1)
STARTED.addQuestDrop(30565,LETTER_TO_MANAKIA_ID,1)
STARTED.addQuestDrop(20553,PARASITE_OF_LOTA_ID,1)
STARTED.addQuestDrop(30657,LETTER_TO_DWARF_ID,1)
STARTED.addQuestDrop(30531,LETTER_TO_NICHOLA_ID,1)
STARTED.addQuestDrop(20213,HEART_OF_PORTA_ID,1)
STARTED.addQuestDrop(30621,ORDER_OF_NICHOLA_ID,1)
STARTED.addQuestDrop(30191,RECOMMENDATION_OF_HOLLIN_ID,1)
STARTED.addQuestDrop(20550,BLOOD_OF_GUARDIAN_BASILISK_ID,1)
STARTED.addQuestDrop(20157,STAKATOS_FLUIDS_ID,1)
STARTED.addQuestDrop(20082,GIANT_APHID_ID,1) |
// Copyright 2015-2016 Espressif Systems (Shanghai) PTE LTD
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <string.h>
#include "esp_attr.h"
#include "esp_types.h"
#include "esp_log.h"
#include "esp32/rom/cache.h"
#include "esp32/rom/ets_sys.h"
#include "esp32/rom/secure_boot.h"
#include "soc/efuse_periph.h"
#include "soc/rtc_periph.h"
#include "sdkconfig.h"
#include "bootloader_flash.h"
#include "bootloader_random.h"
#include "esp_image_format.h"
#include "esp_secure_boot.h"
#include "esp_flash_encrypt.h"
#include "esp_efuse.h"
/* The following API implementations are used only when called
* from the bootloader code.
*/
static const char* TAG = "secure_boot";
/**
* @function : secure_boot_generate
* @description: generate boot digest (aka "abstract") & iv
*
* @inputs: image_len - length of image to calculate digest for
*/
static bool secure_boot_generate(uint32_t image_len){
esp_err_t err;
esp_secure_boot_iv_digest_t digest;
const uint32_t *image;
/* hardware secure boot engine only takes full blocks, so round up the
image length. The additional data should all be 0xFF (or the appended SHA, if it falls in the same block).
*/
if (image_len % sizeof(digest.iv) != 0) {
image_len = (image_len / sizeof(digest.iv) + 1) * sizeof(digest.iv);
}
ets_secure_boot_start();
ets_secure_boot_rd_iv((uint32_t *)digest.iv);
ets_secure_boot_hash(NULL);
/* iv stored in sec 0 */
err = bootloader_flash_erase_sector(0);
if (err != ESP_OK)
{
ESP_LOGE(TAG, "SPI erase failed: 0x%x", err);
return false;
}
/* generate digest from image contents */
image = bootloader_mmap(ESP_BOOTLOADER_OFFSET, image_len);
if (!image) {
ESP_LOGE(TAG, "bootloader_mmap(0x1000, 0x%x) failed", image_len);
return false;
}
for (int i = 0; i < image_len; i+= sizeof(digest.iv)) {
ets_secure_boot_hash(&image[i/sizeof(uint32_t)]);
}
bootloader_munmap(image);
ets_secure_boot_obtain();
ets_secure_boot_rd_abstract((uint32_t *)digest.digest);
ets_secure_boot_finish();
ESP_LOGD(TAG, "write iv+digest to flash");
err = bootloader_flash_write(FLASH_OFFS_SECURE_BOOT_IV_DIGEST, &digest,
sizeof(digest), esp_flash_encryption_enabled());
if (err != ESP_OK) {
ESP_LOGE(TAG, "SPI write failed: 0x%x", err);
return false;
}
Cache_Read_Enable(0);
return true;
}
/* Burn values written to the efuse write registers */
static inline void burn_efuses()
{
esp_efuse_burn_new_values();
}
esp_err_t esp_secure_boot_generate_digest(void)
{
esp_err_t err;
if (esp_secure_boot_enabled()) {
ESP_LOGI(TAG, "bootloader secure boot is already enabled."
" No need to generate digest. continuing..");
return ESP_OK;
}
uint32_t coding_scheme = REG_GET_FIELD(EFUSE_BLK0_RDATA6_REG, EFUSE_CODING_SCHEME);
if (coding_scheme != EFUSE_CODING_SCHEME_VAL_NONE && coding_scheme != EFUSE_CODING_SCHEME_VAL_34) {
ESP_LOGE(TAG, "Unknown/unsupported CODING_SCHEME value 0x%x", coding_scheme);
return ESP_ERR_NOT_SUPPORTED;
}
/* Verify the bootloader */
esp_image_metadata_t bootloader_data = { 0 };
err = esp_image_verify_bootloader_data(&bootloader_data);
if (err != ESP_OK) {
ESP_LOGE(TAG, "bootloader image appears invalid! error %d", err);
return err;
}
/* Generate secure boot key and keep in EFUSE */
uint32_t dis_reg = REG_READ(EFUSE_BLK0_RDATA0_REG);
bool efuse_key_read_protected = dis_reg & EFUSE_RD_DIS_BLK2;
bool efuse_key_write_protected = dis_reg & EFUSE_WR_DIS_BLK2;
if (efuse_key_read_protected == false
&& efuse_key_write_protected == false
&& REG_READ(EFUSE_BLK2_RDATA0_REG) == 0
&& REG_READ(EFUSE_BLK2_RDATA1_REG) == 0
&& REG_READ(EFUSE_BLK2_RDATA2_REG) == 0
&& REG_READ(EFUSE_BLK2_RDATA3_REG) == 0
&& REG_READ(EFUSE_BLK2_RDATA4_REG) == 0
&& REG_READ(EFUSE_BLK2_RDATA5_REG) == 0
&& REG_READ(EFUSE_BLK2_RDATA6_REG) == 0
&& REG_READ(EFUSE_BLK2_RDATA7_REG) == 0) {
ESP_LOGI(TAG, "Generating new secure boot key...");
esp_efuse_write_random_key(EFUSE_BLK2_WDATA0_REG);
burn_efuses();
} else {
ESP_LOGW(TAG, "Using pre-loaded secure boot key in EFUSE block 2");
}
/* Generate secure boot digest using programmed key in EFUSE */
ESP_LOGI(TAG, "Generating secure boot digest...");
uint32_t image_len = bootloader_data.image_len;
if(bootloader_data.image.hash_appended) {
/* Secure boot digest doesn't cover the hash */
image_len -= ESP_IMAGE_HASH_LEN;
}
if (false == secure_boot_generate(image_len)){
ESP_LOGE(TAG, "secure boot generation failed");
return ESP_FAIL;
}
ESP_LOGI(TAG, "Digest generation complete.");
return ESP_OK;
}
esp_err_t esp_secure_boot_permanently_enable(void)
{
if (esp_secure_boot_enabled()) {
ESP_LOGI(TAG, "bootloader secure boot is already enabled, continuing..");
return ESP_OK;
}
uint32_t dis_reg = REG_READ(EFUSE_BLK0_RDATA0_REG);
bool efuse_key_read_protected = dis_reg & EFUSE_RD_DIS_BLK2;
bool efuse_key_write_protected = dis_reg & EFUSE_WR_DIS_BLK2;
if (efuse_key_read_protected == false
&& efuse_key_write_protected == false) {
ESP_LOGI(TAG, "Read & write protecting new key...");
REG_WRITE(EFUSE_BLK0_WDATA0_REG, EFUSE_WR_DIS_BLK2 | EFUSE_RD_DIS_BLK2);
burn_efuses();
efuse_key_read_protected = true;
efuse_key_write_protected = true;
}
if (!efuse_key_read_protected) {
ESP_LOGE(TAG, "Pre-loaded key is not read protected. Refusing to blow secure boot efuse.");
return ESP_ERR_INVALID_STATE;
}
if (!efuse_key_write_protected) {
ESP_LOGE(TAG, "Pre-loaded key is not write protected. Refusing to blow secure boot efuse.");
return ESP_ERR_INVALID_STATE;
}
ESP_LOGI(TAG, "blowing secure boot efuse...");
ESP_LOGD(TAG, "before updating, EFUSE_BLK0_RDATA6 %x", REG_READ(EFUSE_BLK0_RDATA6_REG));
uint32_t new_wdata6 = EFUSE_RD_ABS_DONE_0;
#ifndef CONFIG_SECURE_BOOT_ALLOW_JTAG
ESP_LOGI(TAG, "Disable JTAG...");
new_wdata6 |= EFUSE_RD_DISABLE_JTAG;
#else
ESP_LOGW(TAG, "Not disabling JTAG - SECURITY COMPROMISED");
#endif
#ifndef CONFIG_SECURE_BOOT_ALLOW_ROM_BASIC
ESP_LOGI(TAG, "Disable ROM BASIC interpreter fallback...");
new_wdata6 |= EFUSE_RD_CONSOLE_DEBUG_DISABLE;
#else
ESP_LOGW(TAG, "Not disabling ROM BASIC fallback - SECURITY COMPROMISED");
#endif
REG_WRITE(EFUSE_BLK0_WDATA6_REG, new_wdata6);
burn_efuses();
uint32_t after = REG_READ(EFUSE_BLK0_RDATA6_REG);
ESP_LOGD(TAG, "after updating, EFUSE_BLK0_RDATA6 %x", after);
if (after & EFUSE_RD_ABS_DONE_0) {
ESP_LOGI(TAG, "secure boot is now enabled for bootloader image");
return ESP_OK;
} else {
ESP_LOGE(TAG, "secure boot not enabled for bootloader image, EFUSE_RD_ABS_DONE_0 is probably write protected!");
return ESP_ERR_INVALID_STATE;
}
}
|
from __future__ import absolute_import
from django.contrib.auth import authenticate
from django.contrib.sites.models import Site
from django.contrib.sites.shortcuts import get_current_site
from django.core.exceptions import PermissionDenied
from django.db import models
from django.utils.crypto import get_random_string
import allauth.app_settings
from allauth.account.models import EmailAddress
from allauth.account.utils import get_next_redirect_url, setup_user_email
from allauth.compat import (
force_str,
python_2_unicode_compatible,
ugettext_lazy as _,
)
from allauth.utils import get_user_model
from ..utils import get_request_param
from . import app_settings, providers
from .adapter import get_adapter
from .fields import JSONField
class SocialAppManager(models.Manager):
def get_current(self, provider, request=None):
cache = {}
if request:
cache = getattr(request, '_socialapp_cache', {})
request._socialapp_cache = cache
app = cache.get(provider)
if not app:
site = get_current_site(request)
app = self.get(
sites__id=site.id,
provider=provider)
cache[provider] = app
return app
@python_2_unicode_compatible
class SocialApp(models.Model):
objects = SocialAppManager()
provider = models.CharField(verbose_name=_('provider'),
max_length=30,
choices=providers.registry.as_choices())
name = models.CharField(verbose_name=_('name'),
max_length=40)
client_id = models.CharField(verbose_name=_('client id'),
max_length=191,
help_text=_('App ID, or consumer key'))
secret = models.CharField(verbose_name=_('secret key'),
max_length=191,
help_text=_('API secret, client secret, or'
' consumer secret'))
key = models.CharField(verbose_name=_('key'),
max_length=191,
blank=True,
help_text=_('Key'))
# Most apps can be used across multiple domains, therefore we use
# a ManyToManyField. Note that Facebook requires an app per domain
# (unless the domains share a common base name).
# blank=True allows for disabling apps without removing them
sites = models.ManyToManyField(Site, blank=True)
class Meta:
verbose_name = _('social application')
verbose_name_plural = _('social applications')
def __str__(self):
return self.name
@python_2_unicode_compatible
class SocialAccount(models.Model):
user = models.ForeignKey(allauth.app_settings.USER_MODEL,
on_delete=models.CASCADE)
provider = models.CharField(verbose_name=_('provider'),
max_length=30,
choices=providers.registry.as_choices())
# Just in case you're wondering if an OpenID identity URL is going
# to fit in a 'uid':
#
# Ideally, URLField(max_length=1024, unique=True) would be used
# for identity. However, MySQL has a max_length limitation of 191
# for URLField (in case of utf8mb4). How about
# models.TextField(unique=True) then? Well, that won't work
# either for MySQL due to another bug[1]. So the only way out
# would be to drop the unique constraint, or switch to shorter
# identity URLs. Opted for the latter, as [2] suggests that
# identity URLs are supposed to be short anyway, at least for the
# old spec.
#
# [1] http://code.djangoproject.com/ticket/2495.
# [2] http://openid.net/specs/openid-authentication-1_1.html#limits
uid = models.CharField(verbose_name=_('uid'),
max_length=app_settings.UID_MAX_LENGTH)
last_login = models.DateTimeField(verbose_name=_('last login'),
auto_now=True)
date_joined = models.DateTimeField(verbose_name=_('date joined'),
auto_now_add=True)
extra_data = JSONField(verbose_name=_('extra data'), default=dict)
class Meta:
unique_together = ('provider', 'uid')
verbose_name = _('social account')
verbose_name_plural = _('social accounts')
def authenticate(self):
return authenticate(account=self)
def __str__(self):
return force_str(self.user)
def get_profile_url(self):
return self.get_provider_account().get_profile_url()
def get_avatar_url(self):
return self.get_provider_account().get_avatar_url()
def get_provider(self):
return providers.registry.by_id(self.provider)
def get_provider_account(self):
return self.get_provider().wrap_account(self)
@python_2_unicode_compatible
class SocialToken(models.Model):
app = models.ForeignKey(SocialApp, on_delete=models.CASCADE)
account = models.ForeignKey(SocialAccount, on_delete=models.CASCADE)
token = models.TextField(
verbose_name=_('token'),
help_text=_(
'"oauth_token" (OAuth1) or access token (OAuth2)'))
token_secret = models.TextField(
blank=True,
verbose_name=_('token secret'),
help_text=_(
'"oauth_token_secret" (OAuth1) or refresh token (OAuth2)'))
expires_at = models.DateTimeField(blank=True, null=True,
verbose_name=_('expires at'))
class Meta:
unique_together = ('app', 'account')
verbose_name = _('social application token')
verbose_name_plural = _('social application tokens')
def __str__(self):
return self.token
class SocialLogin(object):
"""
Represents a social user that is in the process of being logged
in. This consists of the following information:
`account` (`SocialAccount` instance): The social account being
logged in. Providers are not responsible for checking whether or
not an account already exists or not. Therefore, a provider
typically creates a new (unsaved) `SocialAccount` instance. The
`User` instance pointed to by the account (`account.user`) may be
prefilled by the provider for use as a starting point later on
during the signup process.
`token` (`SocialToken` instance): An optional access token token
that results from performing a successful authentication
handshake.
`state` (`dict`): The state to be preserved during the
authentication handshake. Note that this state may end up in the
url -- do not put any secrets in here. It currently only contains
the url to redirect to after login.
`email_addresses` (list of `EmailAddress`): Optional list of
e-mail addresses retrieved from the provider.
"""
def __init__(self, user=None, account=None, token=None,
email_addresses=[]):
if token:
assert token.account is None or token.account == account
self.token = token
self.user = user
self.account = account
self.email_addresses = email_addresses
self.state = {}
def connect(self, request, user):
self.user = user
self.save(request, connect=True)
def serialize(self):
serialize_instance = get_adapter().serialize_instance
ret = dict(account=serialize_instance(self.account),
user=serialize_instance(self.user),
state=self.state,
email_addresses=[serialize_instance(ea)
for ea in self.email_addresses])
if self.token:
ret['token'] = serialize_instance(self.token)
return ret
@classmethod
def deserialize(cls, data):
deserialize_instance = get_adapter().deserialize_instance
account = deserialize_instance(SocialAccount, data['account'])
user = deserialize_instance(get_user_model(), data['user'])
if 'token' in data:
token = deserialize_instance(SocialToken, data['token'])
else:
token = None
email_addresses = []
for ea in data['email_addresses']:
email_address = deserialize_instance(EmailAddress, ea)
email_addresses.append(email_address)
ret = cls()
ret.token = token
ret.account = account
ret.user = user
ret.email_addresses = email_addresses
ret.state = data['state']
return ret
def save(self, request, connect=False):
"""
Saves a new account. Note that while the account is new,
the user may be an existing one (when connecting accounts)
"""
assert not self.is_existing
user = self.user
user.save()
self.account.user = user
self.account.save()
if app_settings.STORE_TOKENS and self.token:
self.token.account = self.account
self.token.save()
if connect:
# TODO: Add any new email addresses automatically?
pass
else:
setup_user_email(request, user, self.email_addresses)
@property
def is_existing(self):
"""
Account is temporary, not yet backed by a database record.
"""
return self.account.pk
def lookup(self):
"""
Lookup existing account, if any.
"""
assert not self.is_existing
try:
a = SocialAccount.objects.get(provider=self.account.provider,
uid=self.account.uid)
# Update account
a.extra_data = self.account.extra_data
self.account = a
self.user = self.account.user
a.save()
# Update token
if app_settings.STORE_TOKENS and self.token:
assert not self.token.pk
try:
t = SocialToken.objects.get(account=self.account,
app=self.token.app)
t.token = self.token.token
if self.token.token_secret:
# only update the refresh token if we got one
# many oauth2 providers do not resend the refresh token
t.token_secret = self.token.token_secret
t.expires_at = self.token.expires_at
t.save()
self.token = t
except SocialToken.DoesNotExist:
self.token.account = a
self.token.save()
except SocialAccount.DoesNotExist:
pass
def get_redirect_url(self, request):
url = self.state.get('next')
return url
@classmethod
def state_from_request(cls, request):
state = {}
next_url = get_next_redirect_url(request)
if next_url:
state['next'] = next_url
state['process'] = get_request_param(request, 'process', 'login')
state['scope'] = get_request_param(request, 'scope', '')
state['auth_params'] = get_request_param(request, 'auth_params', '')
return state
@classmethod
def stash_state(cls, request):
state = cls.state_from_request(request)
verifier = get_random_string()
request.session['socialaccount_state'] = (state, verifier)
return verifier
@classmethod
def unstash_state(cls, request):
if 'socialaccount_state' not in request.session:
raise PermissionDenied()
state, verifier = request.session.pop('socialaccount_state')
return state
@classmethod
def verify_and_unstash_state(cls, request, verifier):
if 'socialaccount_state' not in request.session:
raise PermissionDenied()
state, verifier2 = request.session.pop('socialaccount_state')
if verifier != verifier2:
raise PermissionDenied()
return state
|
import chai from 'chai';
import MetaFormStateManager from '../src/components/MetaFormStateManager.js';
import metadataProvider from '../src/lib/metadataProvider.js';
import console from '../src/lib/helpers/consoleHelpers.js';
describe('MetaFormStateManagerSpec', function() {
it('Something', function() {
let schema = require('./assets/metadataProviderTestData/completeWithNestedEntity');
let model = {};
let stateWrapper = { state: {} };
let stateGetter = () => stateWrapper.state;
let stateSetter = (state) => stateWrapper.state = state;
let metaformStateManager = new MetaFormStateManager(
schema,
'contact',
'contact-edit',
model,
stateGetter,
stateSetter
);
stateSetter(metaformStateManager.getInitialState());
//console.logObject(stateWrapper.state.componentProps);
});
});
const assert = chai.assert;
|
import {
warningCardHeader,
successCardHeader,
dangerCardHeader,
infoCardHeader,
primaryCardHeader,
roseCardHeader,
whiteColor
} from "../../../../assets/jss/material-dashboard-react.js";
const cardHeaderStyle = {
cardHeader: {
padding: "0.75rem 1.25rem",
marginBottom: "0",
borderBottom: "none",
background: "transparent",
zIndex: "3 !important",
"&$cardHeaderPlain,&$cardHeaderIcon,&$cardHeaderStats,&$warningCardHeader,&$successCardHeader,&$dangerCardHeader,&$infoCardHeader,&$primaryCardHeader,&$roseCardHeader": {
margin: "0 15px",
padding: "0",
position: "relative",
color: whiteColor
},
"&:first-child": {
borderRadius: "calc(.25rem - 1px) calc(.25rem - 1px) 0 0"
},
"&$warningCardHeader,&$successCardHeader,&$dangerCardHeader,&$infoCardHeader,&$primaryCardHeader,&$roseCardHeader": {
"&:not($cardHeaderIcon)": {
borderRadius: "3px",
marginTop: "-20px",
padding: "15px"
}
},
"&$cardHeaderStats svg": {
fontSize: "36px",
lineHeight: "56px",
textAlign: "center",
width: "36px",
height: "36px",
margin: "10px 10px 4px"
},
"&$cardHeaderStats i,&$cardHeaderStats .material-icons": {
fontSize: "36px",
lineHeight: "56px",
width: "56px",
height: "56px",
textAlign: "center",
overflow: "unset",
marginBottom: "1px"
},
"&$cardHeaderStats$cardHeaderIcon": {
textAlign: "right"
}
},
cardHeaderPlain: {
marginLeft: "0px !important",
marginRight: "0px !important"
},
cardHeaderStats: {
"& $cardHeaderIcon": {
textAlign: "right"
},
"& h1,& h2,& h3,& h4,& h5,& h6": {
margin: "0 !important"
}
},
cardHeaderIcon: {
"&$warningCardHeader,&$successCardHeader,&$dangerCardHeader,&$infoCardHeader,&$primaryCardHeader,&$roseCardHeader": {
background: "transparent",
boxShadow: "none"
},
"& i,& .material-icons": {
width: "33px",
height: "33px",
textAlign: "center",
lineHeight: "33px"
},
"& svg": {
width: "24px",
height: "24px",
textAlign: "center",
lineHeight: "33px",
margin: "5px 4px 0px"
}
},
warningCardHeader: {
color: whiteColor,
"&:not($cardHeaderIcon)": {
...warningCardHeader
}
},
successCardHeader: {
color: whiteColor,
"&:not($cardHeaderIcon)": {
...successCardHeader
}
},
dangerCardHeader: {
color: whiteColor,
"&:not($cardHeaderIcon)": {
...dangerCardHeader
}
},
infoCardHeader: {
color: whiteColor,
"&:not($cardHeaderIcon)": {
...infoCardHeader
}
},
primaryCardHeader: {
color: whiteColor,
"&:not($cardHeaderIcon)": {
...primaryCardHeader
}
},
roseCardHeader: {
color: whiteColor,
"&:not($cardHeaderIcon)": {
...roseCardHeader
}
}
};
export default cardHeaderStyle;
|
/******************************************************************************************************************************************
** **
** Fonctions liées à l'édition d'une IRL et/ou de ses participants **
** **
******************************************************************************************************************************************/
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Ajout d'un participant à une IRL
//
// chemin est le chemin jusqu'à la racine du site
// id est l'id de l'irl à laquelle le participant est ajouté
function irl_ajouter_participant(chemin, id)
{
// On masque le formulaire d'ajout
toggle_row('irl_ajouter_participant');
// On prépare le postdata
postdata = "irl_add_pseudo=" + dynamique_prepare('irl_add_pseudo');
postdata += "&irl_add_details_fr=" + dynamique_prepare('irl_add_details_fr');
postdata += "&irl_add_details_en=" + dynamique_prepare('irl_add_details_en');
postdata += "&irl_add_confirme=" + encodeURIComponent(document.getElementById('irl_add_confirme').checked);
// On nettoie le formulaire d'ajout
document.getElementById('irl_add_pseudo').value = '';
document.getElementById('irl_add_details_fr').value = '';
document.getElementById('irl_add_details_en').value = '';
document.getElementById('irl_add_confirme').checked = false;
// Et on ajoute le participant en XHR
dynamique(chemin, 'irl?id='+id, 'irl_participants_tbody', postdata, 1);
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Modification d'un participant à une IRL
//
// chemin est le chemin jusqu'à la racine du site
// irl est l'id de l'irl à laquelle le participant est ajouté
// participant est l'id du participant à modifier dans l'irl
function irl_modifier_participant(chemin, irl, participant)
{
// On masque le formulaire d'édition
toggle_row('irl_edition_'+participant, 1);
// On prépare le postdata
postdata = "&irl_edit_id=" + encodeURIComponent(participant);
postdata += "&irl_edit_pseudo=" + dynamique_prepare('irl_edit_pseudo_'+participant);
postdata += "&irl_edit_details_fr=" + dynamique_prepare('irl_edit_details_fr_'+participant);
postdata += "&irl_edit_details_en=" + dynamique_prepare('irl_edit_details_en_'+participant);
postdata += "&irl_edit_confirme=" + encodeURIComponent(document.getElementById('irl_edit_confirme_'+participant).checked);
// Et on envoie la modification en xhr
dynamique(chemin, 'irl?id='+irl, 'irl_participants_tbody', postdata, 1);
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Suppression d'un participant à une IRL
//
// chemin est le chemin jusqu'à la racine du site
// irl est l'id de l'irl à laquelle le participant est ajouté
// participant est l'id du participant à supprimer de l'IRL
function irl_supprimer_participant(chemin, irl, participant)
{
// On confirme l'action
if(!confirm('Confirmer la suppression du participant à l\'IRL? '))
return;
// Puis on supprime le participant en XHR
dynamique(chemin, 'irl?id='+irl, 'irl_participants_tbody', 'irl_supprimer_participant='+encodeURIComponent(participant), 1);
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Suppression d'une IRL
//
// chemin est le chemin jusqu'à la racine du site
// id est l'id de l'irl à supprimer
function irl_supprimer(chemin, id)
{
// On confirme l'action
if(!confirm('Confirmer la suppression définitive de l\'IRL ?'))
return;
// Une seconde fois parce que bon
if(!confirm('Confirmer une seconde fois la suppression définitive de l\'IRL (on est jamais trop sûr) ?'))
return;
// On supprime l'IRL en XHR
dynamique(chemin, 'irl?id='+id, 'irl_titre', 'irl_supprimer=1', 1);
} |
#ifndef ALIALGDETTRD_H
#define ALIALGDETTRD_H
#include "AliAlgDet.h"
/*--------------------------------------------------------
TRD detector wrapper
-------------------------------------------------------*/
// Author: [email protected]
class AliAlgDetTRD : public AliAlgDet
{
public:
//
enum {kCalibNRCCorrDzDtgl, // correction parameter for NonRC tracklets
kCalibDVT, // global correction to Vdrift*t
kNCalibParams}; // calibration parameters
//
AliAlgDetTRD(const char* title="");
virtual ~AliAlgDetTRD();
//
virtual void DefineVolumes();
virtual void Print(const Option_t *opt="") const;
//
Bool_t AcceptTrack(const AliESDtrack* trc,Int_t trtype) const;
//
virtual const char* GetCalibDOFName(int i) const;
//
virtual void WritePedeInfo(FILE* parOut,const Option_t *opt="") const;
//
void SetNonRCCorrDzDtgl(double v=0) {fNonRCCorrDzDtgl = v;}
Double_t GetNonRCCorrDzDtgl() const {return fNonRCCorrDzDtgl;}
Double_t GetNonRCCorrDzDtglWithCal() const {return GetNonRCCorrDzDtgl()+GetParVal(kCalibNRCCorrDzDtgl);}
//
void SetCorrDVT(double v=0) {fCorrDVT = 0;}
Double_t GetCorrDVT() const {return fCorrDVT;}
Double_t GetCorrDVTWithCal() const {return GetCorrDVT() + GetParVal(kCalibDVT);}
//
virtual Double_t GetCalibDOFVal(int id) const;
virtual Double_t GetCalibDOFValWithCal(int id) const;
//
const Double_t* GetExtraErrRC() const {return fExtraErrRC;}
void SetExtraErrRC(double y=0.2, double z=1.0) {fExtraErrRC[0]=y;fExtraErrRC[1]=z;}
//
protected:
//
// -------- dummies --------
AliAlgDetTRD(const AliAlgDetTRD&);
AliAlgDetTRD& operator=(const AliAlgDetTRD&);
//
protected:
//
Double_t fNonRCCorrDzDtgl; // correction in Z for non-crossing tracklets
Double_t fCorrDVT; // correction to Vdrift*t
Double_t fExtraErrRC[2]; // extra errors for RC tracklets
//
static const char* fgkCalibDOFName[kNCalibParams];
//
ClassDef(AliAlgDetTRD,1);
};
#endif
|
const test = require('ava');
const env = require('./setup/environment');
const OBSWebSocket = require('..');
let unauthServer;
const obs = new OBSWebSocket();
test.before(async t => {
unauthServer = env.makeServer(4445);
await t.notThrowsAsync(obs.connect({
address: 'localhost:4445'
}));
});
test.after.always('cleanup', () => {
unauthServer.close();
});
test('resolves when a valid request is sent', async t => {
await t.notThrowsAsync(obs.send('ValidMethodName'));
});
test('rejects when an invalid request is sent', async t => {
try {
await obs.send('InvalidMethodName');
t.fail('expected promise rejection');
} catch (e) {
t.is(e.status, 'error');
t.is(e.error, 'invalid request type');
}
});
test('permits null args', async t => {
await t.notThrowsAsync(obs.send('ValidMethodName', null));
});
test.cb('sendCallback -- success case', t => {
obs.sendCallback('ValidMethodName', {}, (err, data) => {
t.falsy(err);
t.is(data.status, 'ok');
t.end();
});
});
test.cb('sendCallback -- omitted args', t => {
obs.sendCallback('ValidMethodName', (err, data) => {
t.falsy(err);
t.is(data.status, 'ok');
t.end();
});
});
test.cb('sendCallback -- error case', t => {
obs.sendCallback('InvalidMethodName', {}, (err, data) => {
t.falsy(data);
t.is(err.status, 'error');
t.end();
});
});
test('rejects when no open connection exists', async t => {
const obs2 = new OBSWebSocket();
try {
await obs2.send('ValidMethodName');
t.fail('expected promise rejection');
} catch (e) {
t.is(e.status, 'error');
t.is(e.code, 'NOT_CONNECTED');
}
});
test('rejects when no request type is specified', async t => {
try {
await obs.send();
t.fail('expected promise rejection');
} catch (e) {
t.is(e.status, 'error');
t.is(e.code, 'REQUEST_TYPE_NOT_SPECIFIED');
}
});
|
/* Copyright (c) 2022 Jin Li, [email protected]
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
#pragma once
#include "Basic/Object.h"
#include "Cache/XmlItemCache.h"
#include "Common/Singleton.h"
NS_DOROTHY_BEGIN
class SpriteDef;
class AnimationDef;
class KeyAnimationDef;
class PlayTrackDef;
class ModelDef;
class Model;
class ModelCache : public XmlItemCache<ModelDef>
{
protected:
ModelCache() { }
virtual std::shared_ptr<XmlParser<ModelDef>> prepareParser(String filename) override;
private:
class Parser : public XmlParser<ModelDef>, public rapidxml::xml_sax2_handler
{
public:
Parser(ModelDef* def, String path);
virtual void xmlSAX2StartElement(const char* name, size_t len, const std::vector<AttrSlice>& attrs) override;
virtual void xmlSAX2EndElement(const char* name, size_t len) override;
virtual void xmlSAX2Text(const char* s, size_t len) override;
private:
std::string _path;
void getPosFromStr(String str, float& x, float& y);
KeyAnimationDef* getCurrentKeyAnimation();
std::stack<Own<SpriteDef>> _nodeStack;
Own<AnimationDef> _currentAnimationDef;
};
SINGLETON_REF(ModelCache, Director, AsyncThread);
};
#define SharedModelCache \
Dorothy::Singleton<Dorothy::ModelCache>::shared()
NS_DOROTHY_END
|
import React, { Component } from "react";
import {NavLink} from "react-router-dom";
import logo from "../images/name1.png";
import email from "../images/icon/email.png";
import github from "../images/icon/github.png";
import linkedIn from "../images/icon/linkedin.jpg";
import "../style.css";
class Navbar extends Component{
constructor() {
super();
this.state = {
scrolled:false,
};
}
componentDidMount(){
window.addEventListener("scroll", ()=>{
const isTop = window.scrollY<100;
if(isTop !== true){
this.setState({scrolled: true});
}else{
this.setState({scrolled: false});
}
});
}
componentWillUnmount() {
window.removeEventListener("scroll");
}
render() {
return(
<div className={this.state.scrolled ? "nav scrolled" : "nav"}>
<div className="nav_text">
<NavLink to = "/Tiffolin"><img id="logo" alt="logo" src={logo}></img></NavLink>
<a href="https://github.com/Tiffolin?tab=repositories">
<span><img src={github} className="NavSnsImg img-fluid" alt="..."></img></span>
</a>
<a href="mailto:[email protected]">
<span><img src={email} className="NavSnsImg img-fluid" alt="..."></img></span>
</a>
<a href="https://www.linkedin.com/in/szuchinlin">
<span><img src={linkedIn} className="NavSnsImg img-fluid" alt="..."></img></span>
</a>
</div>
</div>
);
}
}
export default Navbar; |
#define RSTRING_NOT_MODIFIED 1
#include <ruby.h>
#include <rubyio.h>
#ifdef HAVE_OPENSSL_BIO_H
#include <openssl/bio.h>
#include <openssl/ssl.h>
#include <openssl/dh.h>
#include <openssl/err.h>
#include <openssl/x509.h>
typedef struct {
BIO* read;
BIO* write;
SSL* ssl;
SSL_CTX* ctx;
} ms_conn;
typedef struct {
unsigned char* buf;
int bytes;
} ms_cert_buf;
void engine_free(ms_conn* conn) {
ms_cert_buf* cert_buf = (ms_cert_buf*)SSL_get_app_data(conn->ssl);
if(cert_buf) {
OPENSSL_free(cert_buf->buf);
free(cert_buf);
}
SSL_free(conn->ssl);
SSL_CTX_free(conn->ctx);
free(conn);
}
ms_conn* engine_alloc(VALUE klass, VALUE* obj) {
ms_conn* conn;
*obj = Data_Make_Struct(klass, ms_conn, 0, engine_free, conn);
conn->read = BIO_new(BIO_s_mem());
BIO_set_nbio(conn->read, 1);
conn->write = BIO_new(BIO_s_mem());
BIO_set_nbio(conn->write, 1);
conn->ssl = 0;
conn->ctx = 0;
return conn;
}
DH *get_dh1024() {
/* `openssl dhparam 1024 -C`
* -----BEGIN DH PARAMETERS-----
* MIGHAoGBALPwcEv0OstmQCZdfHw0N5r+07lmXMxkpQacy1blwj0LUqC+Divp6pBk
* usTJ9W2/dOYr1X7zi6yXNLp4oLzc/31PUL3D9q8CpGS7vPz5gijKSw9BwCTT5z9+
* KF9v46qw8XqT5HHV87sWFlGQcVFq+pEkA2kPikkKZ/X/CCcpCAV7AgEC
* -----END DH PARAMETERS-----
*/
static unsigned char dh1024_p[] = {
0xB3,0xF0,0x70,0x4B,0xF4,0x3A,0xCB,0x66,0x40,0x26,0x5D,0x7C,
0x7C,0x34,0x37,0x9A,0xFE,0xD3,0xB9,0x66,0x5C,0xCC,0x64,0xA5,
0x06,0x9C,0xCB,0x56,0xE5,0xC2,0x3D,0x0B,0x52,0xA0,0xBE,0x0E,
0x2B,0xE9,0xEA,0x90,0x64,0xBA,0xC4,0xC9,0xF5,0x6D,0xBF,0x74,
0xE6,0x2B,0xD5,0x7E,0xF3,0x8B,0xAC,0x97,0x34,0xBA,0x78,0xA0,
0xBC,0xDC,0xFF,0x7D,0x4F,0x50,0xBD,0xC3,0xF6,0xAF,0x02,0xA4,
0x64,0xBB,0xBC,0xFC,0xF9,0x82,0x28,0xCA,0x4B,0x0F,0x41,0xC0,
0x24,0xD3,0xE7,0x3F,0x7E,0x28,0x5F,0x6F,0xE3,0xAA,0xB0,0xF1,
0x7A,0x93,0xE4,0x71,0xD5,0xF3,0xBB,0x16,0x16,0x51,0x90,0x71,
0x51,0x6A,0xFA,0x91,0x24,0x03,0x69,0x0F,0x8A,0x49,0x0A,0x67,
0xF5,0xFF,0x08,0x27,0x29,0x08,0x05,0x7B
};
static unsigned char dh1024_g[] = { 0x02 };
DH *dh;
dh = DH_new();
#if OPENSSL_VERSION_NUMBER < 0x10100005L || defined(LIBRESSL_VERSION_NUMBER)
dh->p = BN_bin2bn(dh1024_p, sizeof(dh1024_p), NULL);
dh->g = BN_bin2bn(dh1024_g, sizeof(dh1024_g), NULL);
if ((dh->p == NULL) || (dh->g == NULL)) {
DH_free(dh);
return NULL;
}
#else
BIGNUM *p, *g;
p = BN_bin2bn(dh1024_p, sizeof(dh1024_p), NULL);
g = BN_bin2bn(dh1024_g, sizeof(dh1024_g), NULL);
if (p == NULL || g == NULL || !DH_set0_pqg(dh, p, NULL, g)) {
DH_free(dh);
BN_free(p);
BN_free(g);
return NULL;
}
#endif
return dh;
}
static int engine_verify_callback(int preverify_ok, X509_STORE_CTX* ctx) {
X509* err_cert;
SSL* ssl;
int bytes;
unsigned char* buf = NULL;
if(!preverify_ok) {
err_cert = X509_STORE_CTX_get_current_cert(ctx);
if(err_cert) {
/*
* Save the failed certificate for inspection/logging.
*/
bytes = i2d_X509(err_cert, &buf);
if(bytes > 0) {
ms_cert_buf* cert_buf = (ms_cert_buf*)malloc(sizeof(ms_cert_buf));
cert_buf->buf = buf;
cert_buf->bytes = bytes;
ssl = X509_STORE_CTX_get_ex_data(ctx, SSL_get_ex_data_X509_STORE_CTX_idx());
SSL_set_app_data(ssl, cert_buf);
}
}
}
return preverify_ok;
}
VALUE engine_init_server(VALUE self, VALUE mini_ssl_ctx) {
VALUE obj;
SSL_CTX* ctx;
SSL* ssl;
ms_conn* conn = engine_alloc(self, &obj);
ID sym_key = rb_intern("key");
VALUE key = rb_funcall(mini_ssl_ctx, sym_key, 0);
ID sym_cert = rb_intern("cert");
VALUE cert = rb_funcall(mini_ssl_ctx, sym_cert, 0);
ID sym_ca = rb_intern("ca");
VALUE ca = rb_funcall(mini_ssl_ctx, sym_ca, 0);
ID sym_verify_mode = rb_intern("verify_mode");
VALUE verify_mode = rb_funcall(mini_ssl_ctx, sym_verify_mode, 0);
ctx = SSL_CTX_new(SSLv23_server_method());
conn->ctx = ctx;
SSL_CTX_use_certificate_file(ctx, RSTRING_PTR(cert), SSL_FILETYPE_PEM);
SSL_CTX_use_PrivateKey_file(ctx, RSTRING_PTR(key), SSL_FILETYPE_PEM);
if (!NIL_P(ca)) {
SSL_CTX_load_verify_locations(ctx, RSTRING_PTR(ca), NULL);
}
SSL_CTX_set_options(ctx, SSL_OP_CIPHER_SERVER_PREFERENCE | SSL_OP_NO_SSLv2 | SSL_OP_NO_SSLv3 | SSL_OP_SINGLE_DH_USE | SSL_OP_SINGLE_ECDH_USE);
SSL_CTX_set_session_cache_mode(ctx, SSL_SESS_CACHE_OFF);
SSL_CTX_set_cipher_list(ctx, "HIGH:!aNULL@STRENGTH");
DH *dh = get_dh1024();
SSL_CTX_set_tmp_dh(ctx, dh);
#ifndef OPENSSL_NO_ECDH
EC_KEY *ecdh = EC_KEY_new_by_curve_name(NID_secp521r1);
if (ecdh) {
SSL_CTX_set_tmp_ecdh(ctx, ecdh);
EC_KEY_free(ecdh);
}
#endif
ssl = SSL_new(ctx);
conn->ssl = ssl;
SSL_set_app_data(ssl, NULL);
if (NIL_P(verify_mode)) {
/* SSL_set_verify(ssl, SSL_VERIFY_NONE, NULL); */
} else {
SSL_set_verify(ssl, NUM2INT(verify_mode), engine_verify_callback);
}
SSL_set_bio(ssl, conn->read, conn->write);
SSL_set_accept_state(ssl);
return obj;
}
VALUE engine_init_client(VALUE klass) {
VALUE obj;
ms_conn* conn = engine_alloc(klass, &obj);
conn->ctx = SSL_CTX_new(DTLSv1_method());
conn->ssl = SSL_new(conn->ctx);
SSL_set_app_data(conn->ssl, NULL);
SSL_set_verify(conn->ssl, SSL_VERIFY_NONE, NULL);
SSL_set_bio(conn->ssl, conn->read, conn->write);
SSL_set_connect_state(conn->ssl);
return obj;
}
VALUE engine_inject(VALUE self, VALUE str) {
ms_conn* conn;
long used;
Data_Get_Struct(self, ms_conn, conn);
StringValue(str);
used = BIO_write(conn->read, RSTRING_PTR(str), (int)RSTRING_LEN(str));
if(used == 0 || used == -1) {
return Qfalse;
}
return INT2FIX(used);
}
static VALUE eError;
void raise_error(SSL* ssl, int result) {
char buf[512];
char msg[512];
const char* err_str;
int err = errno;
int ssl_err = SSL_get_error(ssl, result);
int verify_err = SSL_get_verify_result(ssl);
if(SSL_ERROR_SYSCALL == ssl_err) {
snprintf(msg, sizeof(msg), "System error: %s - %d", strerror(err), err);
} else if(SSL_ERROR_SSL == ssl_err) {
if(X509_V_OK != verify_err) {
err_str = X509_verify_cert_error_string(verify_err);
snprintf(msg, sizeof(msg),
"OpenSSL certificate verification error: %s - %d",
err_str, verify_err);
} else {
err = ERR_get_error();
ERR_error_string_n(err, buf, sizeof(buf));
snprintf(msg, sizeof(msg), "OpenSSL error: %s - %d", buf, err);
}
} else {
snprintf(msg, sizeof(msg), "Unknown OpenSSL error: %d", ssl_err);
}
ERR_clear_error();
rb_raise(eError, "%s", msg);
}
VALUE engine_read(VALUE self) {
ms_conn* conn;
char buf[512];
int bytes, n, error;
Data_Get_Struct(self, ms_conn, conn);
ERR_clear_error();
bytes = SSL_read(conn->ssl, (void*)buf, sizeof(buf));
if(bytes > 0) {
return rb_str_new(buf, bytes);
}
if(SSL_want_read(conn->ssl)) return Qnil;
error = SSL_get_error(conn->ssl, bytes);
if(error == SSL_ERROR_ZERO_RETURN) {
rb_eof_error();
} else {
raise_error(conn->ssl, bytes);
}
return Qnil;
}
VALUE engine_write(VALUE self, VALUE str) {
ms_conn* conn;
int bytes;
Data_Get_Struct(self, ms_conn, conn);
StringValue(str);
ERR_clear_error();
bytes = SSL_write(conn->ssl, (void*)RSTRING_PTR(str), (int)RSTRING_LEN(str));
if(bytes > 0) {
return INT2FIX(bytes);
}
if(SSL_want_write(conn->ssl)) return Qnil;
raise_error(conn->ssl, bytes);
return Qnil;
}
VALUE engine_extract(VALUE self) {
ms_conn* conn;
int bytes;
size_t pending;
char buf[512];
Data_Get_Struct(self, ms_conn, conn);
pending = BIO_pending(conn->write);
if(pending > 0) {
bytes = BIO_read(conn->write, buf, sizeof(buf));
if(bytes > 0) {
return rb_str_new(buf, bytes);
} else if(!BIO_should_retry(conn->write)) {
raise_error(conn->ssl, bytes);
}
}
return Qnil;
}
VALUE engine_peercert(VALUE self) {
ms_conn* conn;
X509* cert;
int bytes;
unsigned char* buf = NULL;
ms_cert_buf* cert_buf = NULL;
VALUE rb_cert_buf;
Data_Get_Struct(self, ms_conn, conn);
cert = SSL_get_peer_certificate(conn->ssl);
if(!cert) {
/*
* See if there was a failed certificate associated with this client.
*/
cert_buf = (ms_cert_buf*)SSL_get_app_data(conn->ssl);
if(!cert_buf) {
return Qnil;
}
buf = cert_buf->buf;
bytes = cert_buf->bytes;
} else {
bytes = i2d_X509(cert, &buf);
X509_free(cert);
if(bytes < 0) {
return Qnil;
}
}
rb_cert_buf = rb_str_new(buf, bytes);
if(!cert_buf) {
OPENSSL_free(buf);
}
return rb_cert_buf;
}
VALUE noop(VALUE self) {
return Qnil;
}
void Init_mini_ssl(VALUE puma) {
VALUE mod, eng;
SSL_library_init();
OpenSSL_add_ssl_algorithms();
SSL_load_error_strings();
ERR_load_crypto_strings();
mod = rb_define_module_under(puma, "MiniSSL");
eng = rb_define_class_under(mod, "Engine", rb_cObject);
rb_define_singleton_method(mod, "check", noop, 0);
eError = rb_define_class_under(mod, "SSLError", rb_eStandardError);
rb_define_singleton_method(eng, "server", engine_init_server, 1);
rb_define_singleton_method(eng, "client", engine_init_client, 0);
rb_define_method(eng, "inject", engine_inject, 1);
rb_define_method(eng, "read", engine_read, 0);
rb_define_method(eng, "write", engine_write, 1);
rb_define_method(eng, "extract", engine_extract, 0);
rb_define_method(eng, "peercert", engine_peercert, 0);
}
#else
VALUE raise_error(VALUE self) {
rb_raise(rb_eStandardError, "SSL not available in this build");
return Qnil;
}
void Init_mini_ssl(VALUE puma) {
VALUE mod, eng;
mod = rb_define_module_under(puma, "MiniSSL");
rb_define_class_under(mod, "SSLError", rb_eStandardError);
rb_define_singleton_method(mod, "check", raise_error, 0);
}
#endif
|
// Use this test page to test the API and features of the ValueHelp container.
// The interaction with the Field is tested on the field test page.
/* global QUnit, sinon */
/*eslint max-nested-callbacks: [2, 5]*/
sap.ui.define([
"sap/ui/mdc/ValueHelpDelegate",
"sap/ui/mdc/valuehelp/base/Container",
"sap/ui/mdc/valuehelp/base/Content",
"sap/ui/mdc/condition/Condition",
"sap/ui/mdc/enum/SelectType",
"sap/ui/core/Icon",
"sap/ui/model/json/JSONModel",
"sap/m/library"
], function (
ValueHelpDelegate,
Container,
Content,
Condition,
SelectType,
Icon,
JSONModel,
mLibrary
) {
"use strict";
var oContainer;
var _fPressHandler = function(oEvent) {}; // just dummy handler to make Icon focusable
var oField;
var oValueHelp = { //to fake ValueHelp
getControl: function() {
return oField;
},
_handleClosed: function () {
},
_handleOpened: function () {
},
getTypeahead: function () {
return oContainer;
},
getControlDelegate: function () {
return ValueHelpDelegate;
},
getPayload: function () {
return {x: "X"};
},
awaitControlDelegate: function () {
return Promise.resolve();
},
bDelegateInitialized: true
};
var oValueHelpConfig;
var oModel; // to fake ManagedObjectModel of ValueHelp
/* use dummy control to simulate Field */
var _teardown = function() {
if (oField) {
oField.destroy();
oField = undefined;
}
oContainer.destroy();
oContainer = undefined;
oValueHelpConfig = undefined;
if (oModel) {
oModel.destroy();
oModel = undefined;
}
};
QUnit.module("basic features", {
beforeEach: function() {
oContainer = new Container("C1", {
});
},
afterEach: _teardown
});
QUnit.test("default values", function(assert) {
assert.equal(oContainer.getMaxConditions(), undefined, "getMaxConditions");
assert.notOk(oContainer.isMultiSelect(), "isMultiSelect");
assert.notOk(oContainer._isSingleSelect(), "_isSingleSelect");
assert.notOk(oContainer.getUseAsValueHelp(), "getUseAsValueHelp");
assert.notOk(oContainer.shouldOpenOnClick(), "shouldOpenOnClick");
assert.notOk(oContainer.shouldOpenOnNavigate(), "shouldOpenOnNavigate");
assert.ok(oContainer.isFocusInHelp(), "isFocusInHelp");
assert.notOk(oContainer.isValidationSupported(), "isValidationSupported");
});
QUnit.test("getAriaAttributes", function(assert) {
var oCheckAttributes = {
contentId: null,
ariaHasPopup: "listbox",
role: "combobox",
roleDescription: null
};
var oAttributes = oContainer.getAriaAttributes();
assert.ok(oAttributes, "Aria attributes returned");
assert.deepEqual(oAttributes, oCheckAttributes, "returned attributes");
});
QUnit.test("getScrollDelegate", function(assert) {
oField = new Icon("I1", {src:"sap-icon://sap-ui5", decorative: false, press: _fPressHandler});
oField.getScrollDelegate = function() {
return "X"; // just fake
};
oContainer.setAggregation("_container", oField, true);
assert.equal(oContainer.getScrollDelegate(), "X", "ScrollDelegate of Content returned");
});
QUnit.module("assigned to ValueHelp", {
beforeEach: function() {
oValueHelpConfig = {maxConditions: 1};
oModel = new JSONModel({
_config: oValueHelpConfig,
filterValue: "X",
conditions: [Condition.createItemCondition("X", "Text")]
});
oContainer = new Container("C1", {
}).setModel(oModel, "$valueHelp");
sinon.stub(oContainer, "getParent").returns(oValueHelp);
oField = new Icon("I1", {src:"sap-icon://sap-ui5", decorative: false, press: _fPressHandler});
oField.placeAt("content");
},
afterEach: _teardown
});
QUnit.test("default values", function(assert) {
assert.equal(oContainer.getMaxConditions(), 1, "getMaxConditions");
assert.notOk(oContainer.isMultiSelect(), "isMultiSelect"); // as needs to be defined by Popover or Dialog
assert.ok(oContainer._isSingleSelect(), "_isSingleSelect");
});
QUnit.test("open", function(assert) {
sinon.stub(oContainer, "_open").callsFake(function(oContainer) {
this._handleOpened();
});
sinon.stub(oContainer, "_getContainer").returns(oField);
var iOpened = 0;
oContainer.attachEvent("opened", function(oEvent) {
iOpened++;
});
var oPromise = oContainer.open(Promise.resolve());
assert.ok(oPromise instanceof Promise, "open returns promise");
if (oPromise) {
var oPromise2 = oContainer.open(Promise.resolve()); // to test double call
assert.ok(oPromise2 instanceof Promise, "open returns promise");
var fnDone = assert.async();
oPromise.then(function() {
assert.ok(oContainer._open.calledWith(oField), "_open called");
assert.equal(iOpened, 1, "Opened event fired once");
fnDone();
}).catch(function(oError) {
assert.notOk(true, "Promise Catch called");
fnDone();
});
}
});
QUnit.test("close", function(assert) {
sinon.stub(oContainer, "_close").callsFake(function(oContainer) {
this._handleClosed();
});
var iClosed = 0;
oContainer.attachEvent("closed", function(oEvent) {
iClosed++;
});
oContainer.close();
assert.notOk(oContainer._close.called, "_close not called if not open");
sinon.stub(oContainer, "_open").callsFake(function(oContainer) {
this._handleOpened();
});
sinon.stub(oContainer, "_getContainer").returns(oField);
var oPromise = oContainer.open(Promise.resolve());
if (oPromise) {
var fnDone = assert.async();
oPromise.then(function() {
oContainer.close();
assert.ok(oContainer._close.called, "_close called if not open");
assert.equal(iClosed, 1, "Closed event fired");
fnDone();
}).catch(function(oError) {
assert.notOk(true, "Promise Catch called");
fnDone();
});
}
});
QUnit.test("close while opening", function(assert) {
sinon.stub(oContainer, "_close").callsFake(function(oContainer) {
this._handleClosed();
});
// var iClosed = 0;
// oContainer.attachEvent("closed", function(oEvent) {
// iClosed++;
// });
// var iOpened = 0;
// oContainer.attachEvent("opened", function(oEvent) {
// iOpened++;
// });
sinon.stub(oContainer, "_open").callsFake(function(oContainer) {
this._handleOpened();
});
sinon.stub(oContainer, "_getContainer").returns(oField);
sinon.spy(oContainer, "_cancelPromise"); // TODO: better way to test
var oPromise = oContainer.open(Promise.resolve());
if (oPromise) {
oContainer.close();
assert.ok(oContainer._cancelPromise.called, "Open promise cancelled");
// var fnDone = assert.async();
// oPromise.then(function() {
// assert.ok(oContainer._close.called, "_close called if not open");
// assert.equal(iClosed, 1, "Closed event fired");
// fnDone();
// }).catch(function(oError) {
// assert.notOk(true, "Promise Catch called");
// fnDone();
// });
}
});
QUnit.test("isOpen / isOpening", function(assert) {
assert.notOk(oContainer.isOpen(), "Container not open");
assert.notOk(oContainer.isOpening(), "Container not opening");
sinon.stub(oContainer, "_open").callsFake(function(oContainer) {
this._handleOpened();
});
sinon.stub(oContainer, "_getContainer").returns(oField);
var oPromise = oContainer.open(Promise.resolve());
assert.notOk(oContainer.isOpen(), "Container not open while opening");
assert.ok(oContainer.isOpening(), "Container opening");
if (oPromise) {
var fnDone = assert.async();
oPromise.then(function() {
assert.ok(oContainer.isOpen(), "Container open");
assert.notOk(oContainer.isOpening(), "Container not opening");
fnDone();
}).catch(function(oError) {
assert.notOk(true, "Promise Catch called");
fnDone();
});
}
});
QUnit.test("content", function(assert) {
// add
var oContent = new Content("Content1");
oContainer.addContent(oContent);
assert.equal(oContent.getFilterValue(), "X", "filterValue from ValueHelp");
assert.deepEqual(oContent.getConfig(), oValueHelpConfig, "_config from ValueHelp");
assert.deepEqual(oContent.getConditions(), [Condition.createItemCondition("X", "Text")], "conditions from ValueHelp");
// remove
oContent.destroy();
assert.equal(oContent.getFilterValue(), "", "filterValue initialized");
assert.deepEqual(oContent.getConfig(), {}, "_config initialized");
assert.deepEqual(oContent.getConditions(), [], "conditions initialized");
});
QUnit.test("confirm event", function(assert) {
var iConfirm = 0;
oContainer.attachEvent("confirm", function(oEvent) {
iConfirm++;
});
// add
var oContent = new Content("Content1");
oContainer.addContent(oContent);
oContent.fireConfirm();
assert.equal(iConfirm, 1, "Confirm event fired");
});
QUnit.test("cancel event", function(assert) {
var iCancel = 0;
oContainer.attachEvent("cancel", function(oEvent) {
iCancel++;
});
// add
var oContent = new Content("Content1");
oContainer.addContent(oContent);
oContent.fireCancel();
assert.equal(iCancel, 1, "Cancel event fired");
});
QUnit.test("requestDelegateContent event", function(assert) {
var iRequestDelegateContent = 0;
var oEventContainer;
oContainer.attachEvent("requestDelegateContent", function(oEvent) {
iRequestDelegateContent++;
oEventContainer = oEvent.getParameter("container");
});
// add
var oContent = new Content("Content1");
oContainer.addContent(oContent);
oContent.fireRequestDelegateContent();
assert.equal(iRequestDelegateContent, 1, "RequestDelegateContent event fired");
assert.equal(oEventContainer, oContainer, "RequestDelegateContent event container");
});
QUnit.test("requestSwitchToDialog event", function(assert) {
var iRequestSwitchToDialog = 0;
var oEventContainer;
oContainer.attachEvent("requestSwitchToDialog", function(oEvent) {
iRequestSwitchToDialog++;
oEventContainer = oEvent.getParameter("container");
});
// add
var oContent = new Content("Content1");
oContainer.addContent(oContent);
oContent.fireRequestSwitchToDialog();
assert.equal(iRequestSwitchToDialog, 1, "RequestSwitchToDialog event fired");
assert.equal(oEventContainer, oContainer, "RequestSwitchToDialog event container");
});
QUnit.test("select event", function(assert) {
var iSelect = 0;
var aConditions;
var sType;
oContainer.attachEvent("select", function(oEvent) {
iSelect++;
aConditions = oEvent.getParameter("conditions");
sType = oEvent.getParameter("type");
});
// add
var oContent = new Content("Content1");
oContainer.addContent(oContent);
oContent.fireSelect({conditions: [Condition.createItemCondition("X", "Text")], type: SelectType.Set});
assert.equal(iSelect, 1, "select event fired");
assert.deepEqual(aConditions, [Condition.createItemCondition("X", "Text")], "select event conditions");
assert.equal(sType, SelectType.Set, "select event type");
});
QUnit.test("navigated event", function(assert) {
var iNavigated = 0;
var oCondition;
var bLeaveFocus;
var sItemId;
oContainer.attachEvent("navigated", function(oEvent) {
iNavigated++;
oCondition = oEvent.getParameter("condition");
bLeaveFocus = oEvent.getParameter("leaveFocus");
sItemId = oEvent.getParameter("itemId");
});
// add
var oContent = new Content("Content1");
oContainer.addContent(oContent);
oContent.fireNavigated({condition: Condition.createItemCondition("X", "Text"), leaveFocus: true, itemId:"X"});
assert.equal(iNavigated, 1, "navigated event fired");
assert.deepEqual(oCondition, Condition.createItemCondition("X", "Text"), "navigated event condition");
assert.equal(bLeaveFocus, true, "navigated event leaveFocus");
assert.equal(sItemId, "X", "navigated event itemId");
});
QUnit.test("navigate", function(assert) {
sinon.stub(oContainer, "_getContainer").returns(oField);
sinon.spy(oContainer, "_navigate");
var oPromise = oContainer.navigate(1);
assert.ok(oPromise instanceof Promise, "navigate returns promise");
if (oPromise) {
var fnDone = assert.async();
oPromise.then(function() {
assert.ok(oContainer._navigate.calledOnce, "_navigate called");
assert.ok(oContainer._navigate.calledWith(1), "_navigate called with 1");
fnDone();
}).catch(function(oError) {
assert.notOk(true, "Promise Catch called");
fnDone();
});
}
});
QUnit.test("_getControl", function(assert) {
var oControl = oContainer._getControl();
assert.equal(oControl, oField, "Control returned from ValueHelp");
});
QUnit.test("isTypeahead", function(assert) {
var bTypeahead = oContainer.isTypeahead();
assert.ok(bTypeahead, "Is used as typeahead");
assert.notOk(oContainer.isFocusInHelp(), "isFocusInHelp");
});
QUnit.test("providesScrolling", function(assert) {
var bScrolling = oContainer.providesScrolling();
assert.notOk(bScrolling, "provides no scrolling");
});
QUnit.test("getValueHelpDelegate", function(assert) {
var oDelegate = oContainer.getValueHelpDelegate();
assert.equal(oDelegate, ValueHelpDelegate, "Delegate returned");
});
QUnit.test("getValueHelpDelegatePayload", function(assert) {
var oPayload = oContainer.getValueHelpDelegatePayload();
assert.deepEqual(oPayload, {x: "X"}, "Payload returned");
});
QUnit.test("awaitValueHelpDelegate", function(assert) {
var oPromise = oContainer.awaitValueHelpDelegate();
assert.ok(oPromise instanceof Promise, "Promise returned");
});
QUnit.test("isValueHelpDelegateInitialized", function(assert) {
var bDelegateInitialized = oContainer.isValueHelpDelegateInitialized();
assert.ok(bDelegateInitialized, "Delegate initialized");
});
QUnit.test("_getContainerConfig", function(assert) {
var oParentConfig = {
showHeader: true
};
var oChildConfig = {
showHeader: false
};
var oContainerConfig = {
"sap.ui.mdc.qunit.valuehelp.ParentContainer": oParentConfig
};
var oContent = new Content("Content2");
sinon.stub(oContent, "getContainerConfig").returns(oContainerConfig);
var ParentContainer = Container.extend("sap.ui.mdc.qunit.valuehelp.ParentContainer");
var ChildContainer = ParentContainer.extend("sap.ui.mdc.qunit.valuehelp.ChildContainer");
var oParentContainer = new ParentContainer();
var oChildContainer = new ChildContainer();
assert.equal(oParentContainer._getContainerConfig(oContent), oParentConfig, "Configuration found");
assert.equal(oChildContainer._getContainerConfig(oContent), oParentConfig, "Configuration for inherited type found");
oContainerConfig["sap.ui.mdc.qunit.valuehelp.ChildContainer"] = oChildConfig;
assert.equal(oChildContainer._getContainerConfig(oContent), oChildConfig, "Specific configuration found and prefered");
oContent.getContainerConfig.restore();
oContent.destroy();
oParentContainer.destroy();
oChildContainer.destroy();
});
// TODO: Test Operator determination on Content
// TODO: Test condition creation on Content
});
|
import configparser
import os
import pytest
import subprocess
ABS_PATH = os.path.abspath(os.path.dirname(__file__))
FIXTURES_PATH = os.path.join(ABS_PATH, 'fixtures')
class MissingConfigError(Exception):
def __init__(self, *args):
super().__init__(*args)
class Utils:
def __init__(self, cfg, tmpdir, sh):
self.cfg = cfg
self.tmpdir = tmpdir
self.sh = sh
@property
def package_info(self):
# Run package_info script in virtual env
return '{} {}'.format(self.python, os.path.join(FIXTURES_PATH, 'package_info.py'))
@property
def package_name(self):
return self.cfg['vars']['testpypiname']
@property
def committee_entrypoint(self):
# Run entrypoint (should be in same path as python in virtual env)
return os.path.join(self.venv, self.cfg['tests']['entrypoint'])
@property
def git(self):
return self.cfg['commands']['git']
@property
def create_venv(self):
return self.cfg['commands']['create_venv']
@property
def python(self):
return str(self.tmpdir.join(self.cfg['commands']['python']))
@property
def venv(self):
return os.path.split(self.python)[0]
def venv_activate(self):
os.environ['PATH'] = self.venv + ':' + os.environ['PATH']
@property
def pytest(self):
return '{} -m {}'.format(self.python, self.cfg['commands']['pytest'])
@property
def pip(self):
return '{} -m {}'.format(self.python, self.cfg['commands']['pip'])
@property
def pip_install_testpypi(self):
return '{} -m {}'.format(self.python, self.cfg['commands']['pip_install_testpypi'])
@property
def repo_ssh(self):
return self.cfg['vars']['repo_full']
@property
def repo_branch(self):
return self.cfg['vars']['branch']
def get_set(self, set_name):
return frozenset(self.cfg.get('sets', set_name, fallback='').split(' '))
def create_fresh_venv(self):
result = self.sh(self.create_venv)
assert result.was_successful, \
'Could not create virtualenv for Python: {}'.format(result.stderr)
self.sh(self.pip, 'install', '--upgrade', 'setuptools')
self.sh(self.pip, 'install', '--upgrade', 'pip')
def clone_repo(self, repo_dir):
result = self.sh(self.git, 'clone', '-b', self.repo_branch, self.repo_ssh, repo_dir)
assert result.was_successful, \
'Could not clone the repository {}: {}'.format(self.repo_ssh, result.stderr)
def clone_repo_with_fresh_venv(self, repo_dir):
self.create_fresh_venv()
self.clone_repo(repo_dir)
class ShellExecutionResult:
def __init__(self, stdout, stderr, return_code):
self.stdout = stdout
self.stderr = stderr
self.return_code = return_code
@property
def was_successful(self):
return self.return_code == 0
@property
def outerr(self):
return '{}\n{}\n{}'.format(self.stdout, '-'*80, self.stderr)
@pytest.fixture()
def config():
ext_vars = {
'COMMITTEE_BRANCH': 'master'
}
ext_vars.update(os.environ)
cfg = configparser.ConfigParser(ext_vars)
cfg.read(os.path.join(FIXTURES_PATH, 'test_config.cfg'))
if not cfg.has_option('vars', 'ctu_username'):
raise MissingConfigError('CTU_USERNAME env var is missing!')
if not cfg.has_option('vars', 'committee_repo'):
raise MissingConfigError('COMMITTEE_REPO env var is missing!')
if not cfg.has_option('vars', 'github_user'):
raise MissingConfigError('GITHUB_USER env var is missing!')
if not cfg.has_option('vars', 'github_token'):
raise MissingConfigError('GITHUB_TOKEN env var is missing!')
return cfg
@pytest.fixture()
def sh():
def shell_executor(command, *args):
p = subprocess.Popen(
' '.join([command, *args]),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True,
universal_newlines=True
)
stdout, stderr = p.communicate()
return ShellExecutionResult(stdout, stderr, p.returncode)
return shell_executor
@pytest.fixture()
def utils(config, tmpdir, sh):
return Utils(config, tmpdir, sh)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Note: To use the 'upload' functionality of this file, you must:
# $ pipenv install twine --dev
import io
import os
import sys
from shutil import rmtree
from setuptools import Command, find_packages, setup
# Package meta-data.
NAME = "src"
DESCRIPTION = "My short description for my project."
URL = "https://github.com/jejjohnson/myproject"
EMAIL = "[email protected]"
AUTHOR = "J. Emmanuel Johnson"
REQUIRES_PYTHON = ">=3.6.0"
VERSION = "0.1.0"
# What packages are required for this module to be executed?
REQUIRED = [
# 'requests', 'maya', 'records',
]
# What packages are optional?
EXTRAS = {
# 'fancy feature': ['django'],
}
# The rest you shouldn't have to touch too much :)
# ------------------------------------------------
# Except, perhaps the License and Trove Classifiers!
# If you do change the License, remember to change the Trove Classifier for that!
here = os.path.abspath(os.path.dirname(__file__))
# Import the README and use it as the long-description.
# Note: this will only work if 'README.md' is present in your MANIFEST.in file!
try:
with io.open(os.path.join(here, "README.md"), encoding="utf-8") as f:
long_description = "\n" + f.read()
except FileNotFoundError:
long_description = DESCRIPTION
# Load the package's __version__.py module as a dictionary.
about = {}
if not VERSION:
project_slug = NAME.lower().replace("-", "_").replace(" ", "_")
with open(os.path.join(here, project_slug, "__version__.py")) as f:
exec(f.read(), about)
else:
about["__version__"] = VERSION
class UploadCommand(Command):
"""Support setup.py upload."""
description = "Build and publish the package."
user_options = []
@staticmethod
def status(s):
"""Prints things in bold."""
print("\033[1m{0}\033[0m".format(s))
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
try:
self.status("Removing previous builds…")
rmtree(os.path.join(here, "dist"))
except OSError:
pass
self.status("Building Source and Wheel (universal) distribution…")
os.system("{0} setup.py sdist bdist_wheel --universal".format(sys.executable))
self.status("Uploading the package to PyPI via Twine…")
os.system("twine upload dist/*")
self.status("Pushing git tags…")
os.system("git tag v{0}".format(about["__version__"]))
os.system("git push --tags")
sys.exit()
# Where the magic happens:
setup(
name=NAME,
version=about["__version__"],
description=DESCRIPTION,
long_description=long_description,
long_description_content_type="text/markdown",
author=AUTHOR,
author_email=EMAIL,
python_requires=REQUIRES_PYTHON,
url=URL,
packages=find_packages(exclude=["tests", "*.tests", "*.tests.*", "tests.*"]),
# If your package is a single module, use this instead of 'packages':
# py_modules=['mypackage'],
# entry_points={
# 'console_scripts': ['mycli=mymodule:cli'],
# },
install_requires=REQUIRED,
extras_require=EXTRAS,
include_package_data=True,
license="MIT",
classifiers=[
# Trove classifiers
# Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
],
# $ setup.py publish support.
cmdclass={"upload": UploadCommand},
)
|
load("bf4b12814bc95f34eeb130127d8438ab.js");
load("93fae755edd261212639eed30afa2ca4.js");
load("9943750f07ea537be5f5aa14a5f7b1b7.js");
// Copyright (C) 2015 the V8 project authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
es6id: 23.3.1.1
description: >
WeakMap ( [ iterable ] )
17 ECMAScript Standard Built-in Objects
includes: [propertyHelper.js]
---*/
verifyNotEnumerable(this, 'WeakMap');
verifyWritable(this, 'WeakMap');
verifyConfigurable(this, 'WeakMap');
|
import smartPosition from "../methods/smart-position";
import slideShow from "../methods/slide-show";
import slideHide from "../methods/slide-hide";
import simpleHide from "../methods/simple-hide";
import simpleShow from "../methods/simple-show";
export default function slide(el, config) {
$(config.target).addClass(config.slideEffectClass.slice(1)).css('display', 'none');
function clickFunc() {
if (!$(config.target).hasClass(config.hiddenClass.slice(1))) {
slideHide($(config.target), config, function() {
el.removeClass(config.invokerActiveClass.slice(1));
});
} else {
slideShow($(config.target), config, function() {
el.addClass(config.invokerActiveClass.slice(1));
});
if(!config.smartPositionOff) {
smartPosition($(config.target), el, config);
}
}
}
function mouseEnterFunc() {
slideShow($(config.target), config, function() {
el.addClass(config.invokerActiveClass.slice(1));
});
if(!config.smartPositionOff) {
smartPosition($(config.target), el, config);
}
}
function mouseLeaveFunc() {
slideHide($(config.target), config, function() {
el.removeClass(config.invokerActiveClass.slice(1));
});
}
function initSlide() {
if (window.navigator.userAgent.indexOf('Mobile') !== -1) {
el[0].addEventListener('click', clickFunc);
} else {
if (config.event === 'hover') {
// Hover
el.parent(config.wrapperSelector)[0].addEventListener('mouseenter', mouseEnterFunc)
el.parent(config.wrapperSelector)[0].addEventListener('mouseleave', mouseLeaveFunc)
} else {
// Click
el[0].addEventListener('click', clickFunc);
}
}
}
$(window).on('resize', function () {
initSlide()
})
initSlide()
}
|
/**
* @fileoverview Forbid certain propTypes
*/
'use strict';
const variableUtil = require('../util/variable');
const propsUtil = require('../util/props');
const astUtil = require('../util/ast');
// ------------------------------------------------------------------------------
// Constants
// ------------------------------------------------------------------------------
const DEFAULTS = ['any', 'array', 'object'];
// ------------------------------------------------------------------------------
// Rule Definition
// ------------------------------------------------------------------------------
module.exports = {
meta: {
docs: {
description: 'Forbid certain propTypes',
category: 'Best Practices',
recommended: false
},
schema: [{
type: 'object',
properties: {
forbid: {
type: 'array',
items: {
type: 'string'
}
},
checkContextTypes: {
type: 'boolean'
},
checkChildContextTypes: {
type: 'boolean'
}
},
additionalProperties: true
}]
},
create: function(context) {
const propWrapperFunctions = new Set(context.settings.propWrapperFunctions || []);
const configuration = context.options[0] || {};
const checkContextTypes = configuration.checkContextTypes || false;
const checkChildContextTypes = configuration.checkChildContextTypes || false;
function isForbidden(type) {
const forbid = configuration.forbid || DEFAULTS;
return forbid.indexOf(type) >= 0;
}
function shouldCheckContextTypes(node) {
if (checkContextTypes && propsUtil.isContextTypesDeclaration(node)) {
return true;
}
return false;
}
function shouldCheckChildContextTypes(node) {
if (checkChildContextTypes && propsUtil.isChildContextTypesDeclaration(node)) {
return true;
}
return false;
}
/**
* Checks if propTypes declarations are forbidden
* @param {Array} declarations The array of AST nodes being checked.
* @returns {void}
*/
function checkProperties(declarations) {
declarations.forEach(declaration => {
if (declaration.type !== 'Property') {
return;
}
let target;
let value = declaration.value;
if (
value.type === 'MemberExpression' &&
value.property &&
value.property.name &&
value.property.name === 'isRequired'
) {
value = value.object;
}
if (
value.type === 'CallExpression' &&
value.callee.type === 'MemberExpression'
) {
value = value.callee;
}
if (value.property) {
target = value.property.name;
} else if (value.type === 'Identifier') {
target = value.name;
}
if (isForbidden(target)) {
context.report({
node: declaration,
message: `Prop type \`${target}\` is forbidden`
});
}
});
}
function checkNode(node) {
switch (node && node.type) {
case 'ObjectExpression':
checkProperties(node.properties);
break;
case 'Identifier':
const propTypesObject = variableUtil.findVariableByName(context, node.name);
if (propTypesObject && propTypesObject.properties) {
checkProperties(propTypesObject.properties);
}
break;
case 'CallExpression':
const innerNode = node.arguments && node.arguments[0];
if (propWrapperFunctions.has(node.callee.name) && innerNode) {
checkNode(innerNode);
}
break;
default:
break;
}
}
return {
ClassProperty: function(node) {
if (
!propsUtil.isPropTypesDeclaration(node) &&
!shouldCheckContextTypes(node) &&
!shouldCheckChildContextTypes(node)
) {
return;
}
checkNode(node.value);
},
MemberExpression: function(node) {
if (
!propsUtil.isPropTypesDeclaration(node) &&
!shouldCheckContextTypes(node) &&
!shouldCheckChildContextTypes(node)
) {
return;
}
checkNode(node.parent.right);
},
MethodDefinition: function(node) {
if (
!propsUtil.isPropTypesDeclaration(node) &&
!shouldCheckContextTypes(node) &&
!shouldCheckChildContextTypes(node)
) {
return;
}
const returnStatement = astUtil.findReturnStatement(node);
if (returnStatement && returnStatement.argument) {
checkNode(returnStatement.argument);
}
},
ObjectExpression: function(node) {
node.properties.forEach(property => {
if (!property.key) {
return;
}
if (
!propsUtil.isPropTypesDeclaration(property) &&
!shouldCheckContextTypes(property) &&
!shouldCheckChildContextTypes(property)
) {
return;
}
if (property.value.type === 'ObjectExpression') {
checkProperties(property.value.properties);
}
});
}
};
}
};
|
var isUpdate = false;
// REAL TIME CHART
$(function () {
// We use an inline data source in the example, usually data would
// be fetched from a server
var data = [],
totalPoints = 100, lastRead = 0, lastValue = 0;
function getRandomData() {
if (data.length > 0){
data = data.slice(1);
}
if (data.length == 0){
console.log("Primeira");
$.ajax({
url : "http://www.projetoefigenia.com.br/efigenia/controllers/painelcontroller.php",
type: 'POST',
async: false,
data: {
servico: "getHistorical",
qtde: totalPoints,
idpaciente: 1
},
success: function (e) {
var obj = JSON.parse(e);
for (i in obj){
data.push(obj[i].sao2);
}
lastRead = obj[0].id_leituras;
lastValue = obj[0].sao2;
data.reverse();
isUpdate = true;
}
});
}else{
// console.log("Segunda");
// $.ajax({
// url : "http://localhost/efigenia/controllers/painelcontroller.php",
// type: 'POST',
// data: {
// servico: "getHistoricalSync",
// lastread: lastRead,
// idpaciente: 1
// },
// success: function (e) {
// var obj = JSON.parse(e);
// console.log("Verificando");
// if (obj != null && obj != "null"){
// console.log("Localizou novos registros");
// obj.reverse();
// for (i in obj){
// data.push(obj[i].sao2);
// }
// lastRead = obj[0].id_leituras;
// lastValue = obj[0].sao2;
// isUpdate = true;
// }else{
//// console.log(obj);
//// data.push(lastValue);
// isUpdate = false;
// }
// }
// });
}
var res = [];
for (var i = 0; i < data.length; ++i) {
res.push([i, data[i]]);
}
return res;
}
// Set up the control widget
var updateInterval = 1000;
$("#updateInterval").val(updateInterval).change(function () {
var v = $(this).val();
if (v && !isNaN(+v)) {
updateInterval = +v;
if (updateInterval < 1) {
updateInterval = 1;
} else if (updateInterval > 2000) {
updateInterval = 2000;
}
$(this).val("" + updateInterval);
}
});
var plot = $.plot("#placeholderRT", [getRandomData()], {
series: {
shadowSize: 0 // Drawing is faster without shadows
},
yaxis: {
min: 0,
max: 100
},
xaxis: {
show: false
}
});
function update() {
console.log(plot.getData());
var ret = getRandomData();
if (ret) {
plot.setData([getRandomData()]);
plot.draw();
}
// Since the axes don't change, we don't need to call plot.setupGrid()
setTimeout(update, updateInterval);
// }
}
function getDados(){
console.log("Seg");
$.ajax({
url : "http://www.projetoefigenia.com.br/efigenia/controllers/painelcontroller.php",
type: 'POST',
data: {
servico: "getHistoricalSync",
lastread: lastRead,
idpaciente: 1
},
success: function (e) {
var obj = JSON.parse(e);
if (obj != null && obj != "null"){
console.log("Localizou novos registros");
obj.reverse();
for (i in obj){
data.push(obj[i].sao2);
}
lastRead = obj[0].id_leituras;
lastValue = obj[0].sao2;
isUpdate = true;
}else{
// console.log(obj);
// data.push(lastValue);
isUpdate = false;
}
}
});
if (isUpdate){
plot.setData([getRandomData()]);
plot.draw();
// var res = [];
// for (var i = 0; i < data.length; ++i) {
// res.push([i, data[i]]);
// }
// return res;
}
//var ret = getRandomData();
// console.log(ret);
// if (ret) {
// plot.setData([getRandomData()]);
//// if (isUpdate){
// plot.draw();
//// }
// }
}
setInterval(getDados, updateInterval);
// update();
// getDados();
});
// END REAL TIME CHART
|
import numpy as np
import pytest
import qutip
import warnings
@pytest.mark.parametrize(['method', 'kwargs'], [
pytest.param('direct', {}, id="direct"),
pytest.param('direct', {'solver':'mkl'}, id="direct_mkl",
marks=pytest.mark.skipif(not qutip.settings.has_mkl,
reason='MKL extensions not found.')),
pytest.param('direct', {'return_info':True}, id="direct_info"),
pytest.param('direct', {'sparse':False}, id="direct_dense"),
pytest.param('direct', {'use_rcm':True}, id="direct_rcm"),
pytest.param('direct', {'use_wbm':True}, id="direct_wbm"),
pytest.param('eigen', {}, id="eigen"),
pytest.param('eigen', {'use_rcm':True}, id="eigen_rcm"),
pytest.param('svd', {}, id="svd"),
pytest.param('power', {'mtol':1e-5}, id="power"),
pytest.param('power', {'mtol':1e-5, 'solver':'mkl'}, id="power_mkl",
marks=pytest.mark.skipif(not qutip.settings.has_mkl,
reason='MKL extensions not found.')),
pytest.param('power-gmres', {'mtol':1e-1}, id="power-gmres"),
pytest.param('power-gmres', {'mtol':1e-1, 'use_rcm':True, 'use_wbm':True},
id="power-gmres_perm"),
pytest.param('power-bicgstab', {'use_precond':1}, id="power-bicgstab"),
pytest.param('iterative-gmres', {}, id="iterative-gmres"),
pytest.param('iterative-gmres', {'use_rcm':True, 'use_wbm':True},
id="iterative-gmres_perm"),
pytest.param('iterative-bicgstab', {'return_info':True},
id="iterative-bicgstab"),
])
def test_qubit(method, kwargs):
# thermal steadystate of a qubit: compare numerics with analytical formula
sz = qutip.sigmaz()
sm = qutip.destroy(2)
H = 0.5 * 2 * np.pi * sz
gamma1 = 0.05
wth_vec = np.linspace(0.1, 3, 20)
p_ss = np.zeros(np.shape(wth_vec))
with warnings.catch_warnings():
if 'use_wbm' in kwargs:
# The deprecation has been fixed in dev.major
warnings.simplefilter("ignore", category=DeprecationWarning)
for idx, wth in enumerate(wth_vec):
n_th = 1.0 / (np.exp(1.0 / wth) - 1) # bath temperature
c_op_list = []
rate = gamma1 * (1 + n_th)
c_op_list.append(np.sqrt(rate) * sm)
rate = gamma1 * n_th
c_op_list.append(np.sqrt(rate) * sm.dag())
rho_ss = qutip.steadystate(H, c_op_list, method=method, **kwargs)
if 'return_info' in kwargs:
rho_ss, info = rho_ss
assert isinstance(info, dict)
p_ss[idx] = qutip.expect(sm.dag() * sm, rho_ss)
p_ss_analytic = np.exp(-1.0 / wth_vec) / (1 + np.exp(-1.0 / wth_vec))
np.testing.assert_allclose(p_ss_analytic, p_ss, atol=1e-5)
@pytest.mark.parametrize(['method', 'kwargs'], [
pytest.param('direct', {}, id="direct"),
pytest.param('direct', {'solver': 'mkl'}, id="direct_mkl",
marks=pytest.mark.skipif(not qutip.settings.has_mkl,
reason='MKL extensions not found.')),
pytest.param('direct', {'sparse': False}, id="direct_dense"),
pytest.param('direct', {'use_rcm': True}, id="direct_rcm"),
pytest.param('direct', {'use_wbm': True}, id="direct_wbm"),
pytest.param('eigen', {}, id="eigen"),
pytest.param('eigen', {'use_rcm': True}, id="eigen_rcm"),
pytest.param('svd', {}, id="svd"),
])
def test_exact_solution_for_simple_methods(method, kwargs):
# this tests that simple methods correctly determine the steadystate
# with high accuracy for a small Liouvillian requiring correct weighting.
H = qutip.identity(2)
c_ops = [qutip.sigmam(), 1e-8 * qutip.sigmap()]
rho_ss = qutip.steadystate(H, c_ops, method=method, **kwargs)
expected_rho_ss = np.array([
[1.e-16+0.j, 0.e+00-0.j],
[0.e+00-0.j, 1.e+00+0.j],
])
np.testing.assert_allclose(expected_rho_ss, rho_ss, atol=1e-16)
assert rho_ss.tr() == pytest.approx(1, abs=1e-14)
@pytest.mark.parametrize(['method', 'kwargs'], [
pytest.param('direct', {}, id="direct"),
pytest.param('direct', {'sparse':False}, id="direct_dense"),
pytest.param('eigen', {}, id="eigen"),
pytest.param('power', {'mtol':1e-5}, id="power"),
pytest.param('power-gmres', {'mtol':1e-1, 'use_precond':1}, id="power-gmres"),
pytest.param('power-bicgstab', {'use_precond':1}, id="power-bicgstab"),
pytest.param('iterative-lgmres', {'use_precond':1}, id="iterative-lgmres"),
pytest.param('iterative-gmres', {}, id="iterative-gmres"),
pytest.param('iterative-bicgstab', {}, id="iterative-bicgstab"),
])
def test_ho(method, kwargs):
# thermal steadystate of an oscillator: compare numerics with analytical
# formula
a = qutip.destroy(35)
H = 0.5 * 2 * np.pi * a.dag() * a
gamma1 = 0.05
wth_vec = np.linspace(0.1, 3, 20)
p_ss = np.zeros(np.shape(wth_vec))
for idx, wth in enumerate(wth_vec):
n_th = 1.0 / (np.exp(1.0 / wth) - 1) # bath temperature
c_op_list = []
rate = gamma1 * (1 + n_th)
c_op_list.append(np.sqrt(rate) * a)
rate = gamma1 * n_th
c_op_list.append(np.sqrt(rate) * a.dag())
rho_ss = qutip.steadystate(H, c_op_list, method=method, **kwargs)
p_ss[idx] = np.real(qutip.expect(a.dag() * a, rho_ss))
p_ss_analytic = 1.0 / (np.exp(1.0 / wth_vec) - 1)
np.testing.assert_allclose(p_ss_analytic, p_ss, atol=1e-3)
@pytest.mark.parametrize(['method', 'kwargs'], [
pytest.param('direct', {}, id="direct"),
pytest.param('direct', {'sparse':False}, id="direct_dense"),
pytest.param('eigen', {}, id="eigen"),
pytest.param('svd', {}, id="svd"),
pytest.param('power', {'mtol':1e-5}, id="power"),
pytest.param('power-gmres', {'mtol':1e-1, 'use_precond':1, 'M':'iterative'},
id="power-gmres"),
pytest.param('power-bicgstab', {'use_precond':1, 'M':'power'},
id="power-bicgstab"),
pytest.param('iterative-gmres', {}, id="iterative-gmres"),
pytest.param('iterative-bicgstab', {}, id="iterative-bicgstab"),
])
def test_driven_cavity(method, kwargs):
N = 30
Omega = 0.01 * 2 * np.pi
Gamma = 0.05
a = qutip.destroy(N)
H = Omega * (a.dag() + a)
c_ops = [np.sqrt(Gamma) * a]
if 'use_precond' in kwargs:
kwargs['M'] = qutip.build_preconditioner(H, c_ops, method=kwargs['M'])
rho_ss = qutip.steadystate(H, c_ops, method=method, **kwargs)
rho_ss_analytic = qutip.coherent_dm(N, -1.0j * (Omega)/(Gamma/2))
np.testing.assert_allclose(rho_ss, rho_ss_analytic, atol=1e-4)
assert rho_ss.tr() == pytest.approx(1, abs=1e-12)
@pytest.mark.parametrize(['method', 'kwargs'], [
pytest.param('splu', {'sparse':False}, id="dense_direct"),
pytest.param('numpy', {'sparse':False}, id="dense_numpy"),
pytest.param('scipy', {'sparse':False}, id="dense_scipy"),
pytest.param('splu', {}, id="splu"),
pytest.param('spilu', {}, id="spilu"),
])
def test_pseudo_inverse(method, kwargs):
N = 4
a = qutip.destroy(N)
H = (a.dag() + a)
L = qutip.liouvillian(H, [a])
rho = qutip.steadystate(L)
Lpinv = qutip.pseudo_inverse(L, rho, method=method, **kwargs)
np.testing.assert_allclose((L * Lpinv * L).full(), L.full())
np.testing.assert_allclose((Lpinv * L * Lpinv).full(), Lpinv.full())
assert rho.tr() == pytest.approx(1, abs=1e-15)
@pytest.mark.parametrize('sparse', [True, False])
def test_steadystate_floquet(sparse):
"""
Test the steadystate solution for a periodically
driven system.
"""
N_c = 20
a = qutip.destroy(N_c)
a_d = a.dag()
X_c = a + a_d
w_c = 1
A_l = 0.001
w_l = w_c
gam = 0.01
H = w_c * a_d * a
H_t = [H, [X_c, lambda t, args: args["A_l"] * np.cos(args["w_l"] * t)]]
psi0 = qutip.fock(N_c, 0)
args = {"A_l": A_l, "w_l": w_l}
c_ops = []
c_ops.append(np.sqrt(gam) * a)
t_l = np.linspace(0, 20 / gam, 2000)
expect_me = qutip.mesolve(H_t, psi0, t_l,
c_ops, [a_d * a], args=args).expect[0]
rho_ss = qutip.steadystate_floquet(H, c_ops,
A_l * X_c, w_l, n_it=3, sparse=sparse)
expect_ss = qutip.expect(a_d * a, rho_ss)
np.testing.assert_allclose(expect_me[-20:], expect_ss, atol=1e-3)
assert rho_ss.tr() == pytest.approx(1, abs=1e-15)
def test_bad_options_steadystate():
N = 4
a = qutip.destroy(N)
H = (a.dag() + a)
c_ops = [a]
with pytest.raises(ValueError):
qutip.steadystate(H, c_ops, method='not a method')
with pytest.raises(TypeError):
qutip.steadystate(H, c_ops, method='direct', bad_opt=True)
with pytest.raises(ValueError):
qutip.steadystate(H, c_ops, method='direct', solver='Error')
def test_bad_options_pseudo_inverse():
N = 4
a = qutip.destroy(N)
H = (a.dag() + a)
L = qutip.liouvillian(H, [a])
with pytest.raises(TypeError):
qutip.pseudo_inverse(L, method='splu', bad_opt=True)
with pytest.raises(ValueError):
qutip.pseudo_inverse(L, method='not a method', sparse=False)
with pytest.raises(ValueError):
qutip.pseudo_inverse(L, method='not a method')
def test_bad_options_build_preconditioner():
N = 4
a = qutip.destroy(N)
H = (a.dag() + a)
c_ops = [a]
with pytest.raises(TypeError):
qutip.build_preconditioner(H, c_ops, method='power', bad_opt=True)
with pytest.raises(ValueError):
qutip.build_preconditioner(H, c_ops, method='not a method')
def test_bad_system():
N = 4
a = qutip.destroy(N)
H = (a.dag() + a)
with pytest.raises(TypeError):
qutip.steadystate(H, [], method='direct')
with pytest.raises(TypeError):
qutip.steadystate(qutip.basis(N, N-1), [], method='direct')
|
import webapp2, logging
from database import store_feed_source, store_feed, \
store_backup_feed
class CleanAndMigrateHandler(webapp2.RequestHandler):
def get(self):
from google.appengine.ext import db
from datetime import timedelta
from database import FeedSource, Feed
# Any changes to this model are automatically propagated to DetailsBackup
# and might need to be (manually) propagated to the migration phase.
class Details(db.Model):
feed = db.TextProperty()
urls = db.TextProperty()
class DetailsBackup(Details):
deprecation_date = db.DateTimeProperty()
names = {}
for source in FeedSource.all():
if str(source.name) in names:
source.delete()
names[str(source.name)] = source
self.response.write('Cleaned duplicate names.<br/>')
# Migrate Details and DetailsBackup to FeedSource
legacy_feed = Details.all().get()
legacy_feed_backup = DetailsBackup.all().get()
source = db.Query(FeedSource).filter('name =', 'codereviews').get()
if source.name == 'codereviews':
if legacy_feed_backup:
feed = Feed()
feed.xml = legacy_feed_backup.feed
feed.urls = legacy_feed_backup.urls
feed.source = source
store_feed(source, feed, source.last_fetched)
store_backup_feed(source, legacy_feed_backup.deprecation_date)
source.feed.get().delete()
self.response.write('Migrated a backup feed.<br/>')
if legacy_feed:
feed = Feed()
feed.xml = legacy_feed.feed
feed.urls = legacy_feed.urls
feed.source = source
store_feed(source, feed, source.last_fetched)
self.response.write('Migrated a feed.<br/>') |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Simulate Illumina sequencing reads.
"""
import os
import os.path as op
import random
import shutil
import sys
import logging
import math
from jcvi.formats.fasta import Fasta
from jcvi.apps.base import OptionParser, ActionDispatcher, sh
def main():
actions = (
('wgsim', 'sample paired end reads using dwgsim'),
('eagle', 'simulate Illumina reads using EAGLE'),
)
p = ActionDispatcher(actions)
p.dispatch(globals())
def add_sim_options(p):
"""
Add options shared by eagle or wgsim.
"""
p.add_option("--distance", default=500, type="int",
help="Outer distance between the two ends [default: %default]")
p.add_option("--readlen", default=150, type="int",
help="Length of the read")
p.set_depth(depth=10)
p.set_outfile(outfile=None)
def eagle(args):
"""
%prog eagle fastafile
"""
p = OptionParser(eagle.__doc__)
p.add_option("--share", default="/usr/local/share/EAGLE/",
help="Default EAGLE share path")
add_sim_options(p)
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
fastafile, = args
share = opts.share
depth = opts.depth
readlen = opts.readlen
distance = opts.distance
pf = op.basename(fastafile).split(".")[0]
# Since EAGLE does not natively support read length other than 100bp and
# 250bp - for an arbitrary read length we need to generate a bunch of
# support files
# First file is the Runinfo
runinfo_readlen = "RunInfo_PairedReads2x{}Cycles1x1Tiles.xml".format(readlen)
if not op.exists(runinfo_readlen):
runinfo = op.join(share, "RunInfo/RunInfo_PairedReads2x251Cycles1x1Tiles.xml")
runinfo_xml = open(runinfo).read()
runinfo_xml = runinfo_xml.replace("251", str(readlen))\
.replace("252", str(readlen + 1))\
.replace("502", str(2 * readlen))
fw = open(runinfo_readlen, "w")
print >> fw, runinfo_xml.strip()
fw.close()
# Generate quality profiles
quality_file1 = "QualityTable.read1.length{}.qval".format(readlen)
quality_file2 = "QualityTable.read2.length{}.qval".format(readlen)
if not (op.exists(quality_file1) and op.exists(quality_file2)):
for i, qq in enumerate([quality_file1, quality_file2]):
cmd = "/usr/local/libexec/EAGLE/scaleQualityTable.pl"
cmd += " --input {}".format(op.join(share,
"QualityTables/DefaultQualityTable.read{}.length101.qval".format(i + 1)))
cmd += " --cycles {}".format(readlen)
cmd += " --output {}".format(qq)
sh(cmd, silent=True)
# Since distance is different from the default distribution which is
# centered around 319, we shift our peak to the new peak
template_lengths = op.join(share,
"TemplateLengthTables/DefaultTemplateLengthTable.tsv")
template_distance = "TemplateLengthTable{}.tsv".format(distance)
shift = distance - 319
if not op.exists(template_distance):
fp = open(template_lengths)
fw = open(template_distance, "w")
for row in fp:
size, counts = row.split()
size = int(size)
counts = int(counts)
size += shift
if size < readlen:
continue
print >> fw, "\t".join(str(x) for x in (size, counts))
fw.close()
# All done, let's simulate!
cmd = "configureEAGLE.pl"
cmd += " --reference-genome {}".format(fastafile)
cmd += " --coverage-depth {}".format(depth)
cmd += " --gc-coverage-fit-table {}".format(op.join(share,
"GcCoverageFitTables/Homo_sapiens.example1.tsv"))
cmd += " --run-info {}".format(runinfo_readlen)
cmd += " --quality-table {}".format(quality_file1)
cmd += " --quality-table {}".format(quality_file2)
cmd += " --template-length-table {}".format(template_distance)
cmd += " --random-seed {}".format(random.randint(1, 65535))
sh(cmd, silent=True)
# Retrieve results
outpf = opts.outfile or "{0}.{1}bp.{2}x".format(pf, distance, depth)
outpf += ".bwa"
cwd = os.getcwd()
eagle_dir = "EAGLE"
os.chdir(eagle_dir)
sh("make bam", silent=True)
# Convert BAM to FASTQ
from jcvi.formats.sam import fastq
a, b = fastq(["eagle.bam", outpf])
sh("mv {} {} ../".format(a, b))
os.chdir(cwd)
# Clean-up
shutil.rmtree(eagle_dir)
def wgsim(args):
"""
%prog wgsim fastafile
Run dwgsim on fastafile.
"""
p = OptionParser(wgsim.__doc__)
p.add_option("--erate", default=.01, type="float",
help="Base error rate of the read [default: %default]")
p.add_option("--noerrors", default=False, action="store_true",
help="Simulate reads with no errors [default: %default]")
p.add_option("--genomesize", type="int",
help="Genome size in Mb [default: estimate from data]")
add_sim_options(p)
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
fastafile, = args
pf = op.basename(fastafile).split(".")[0]
genomesize = opts.genomesize
size = genomesize * 1000000 if genomesize else Fasta(fastafile).totalsize
depth = opts.depth
readlen = opts.readlen
readnum = int(math.ceil(size * depth / (2 * readlen)))
distance = opts.distance
stdev = distance / 10
outpf = opts.outfile or "{0}.{1}bp.{2}x".format(pf, distance, depth)
logging.debug("Total genome size: {0} bp".format(size))
logging.debug("Target depth: {0}x".format(depth))
logging.debug("Number of read pairs (2x{0}): {1}".format(readlen, readnum))
if opts.noerrors:
opts.erate = 0
cmd = "dwgsim -e {0} -E {0}".format(opts.erate)
if opts.noerrors:
cmd += " -r 0 -R 0 -X 0 -y 0"
cmd += " -d {0} -s {1}".format(distance, stdev)
cmd += " -N {0} -1 {1} -2 {1}".format(readnum, readlen)
cmd += " {0} {1}".format(fastafile, outpf)
sh(cmd)
if __name__ == '__main__':
main()
|
import {splitEvery} from './splitEvery'
test('happy', () => {
expect(splitEvery(3, [1, 2, 3, 4, 5, 6, 7])).toEqual([
[1, 2, 3],
[4, 5, 6],
[7],
])
expect(splitEvery(3)('foobarbaz')).toEqual(['foo', 'bar', 'baz'])
})
test('with bad input', () => {
expect(() =>
expect(splitEvery(0)('foo')).toEqual(['f', 'o', 'o'])
).toThrowWithMessage(
Error,
'First argument to splitEvery must be a positive integer'
)
})
|
#!/usr/bin/env python
##
# Copyright (c) 2014 Cyrus Daboo. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from __future__ import print_function
from pycalendar.datetime import DateTime
from pycalendar.icalendar.recurrence import Recurrence
from pycalendar.period import Period
import sys
def instances(start, rrule):
"""
Expand an RRULE.
"""
recur = Recurrence()
recur.parse(rrule)
start = DateTime.parseText(start)
end = start.duplicate()
end.offsetYear(100)
items = []
range = Period(start, end)
recur.expand(start, range, items)
print("DTSTART:{}".format(start))
print("RRULE:{}".format(rrule))
print("Instances: {}".format(", ".join(map(str, items))))
if __name__ == '__main__':
instances(sys.argv[1], sys.argv[2])
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Internal dependencies
*/
import { renderWithTheme } from '../../../../../testUtils';
import formattedUsersObject from '../../../../../storybookUtils/formattedUsersObject';
import { VIEW_STYLE, STORY_STATUSES } from '../../../../../constants';
import LayoutProvider from '../../../../../components/layout/provider';
import Content from '../';
const fakeStories = [
{
id: 1,
status: 'publish',
title: 'Story A',
pages: [{ id: '10' }],
centerTargetAction: () => {},
bottomTargetAction: () => {},
},
{
id: 2,
status: 'draft',
title: 'Story B',
pages: [{ id: '20' }],
centerTargetAction: () => {},
bottomTargetAction: () => {},
},
{
id: 3,
status: 'publish',
title: 'Story C',
pages: [{ id: '30' }],
centerTargetAction: () => {},
bottomTargetAction: () => {},
},
];
jest.mock('../../../../../components/previewPage.js', () => () => null);
jest.mock('../../../../../app/font/fontProvider.js', () => ({ children }) =>
children
);
describe('My Stories <Content />', function () {
afterEach(() => {
jest.resetAllMocks();
});
it('should render the content grid with the correct story count.', function () {
const { getAllByTestId } = renderWithTheme(
<LayoutProvider>
<Content
filter={STORY_STATUSES[0]}
search={{ keyword: '' }}
stories={fakeStories}
users={formattedUsersObject}
page={{
requestNextPage: jest.fn,
}}
view={{
style: VIEW_STYLE.GRID,
pageSize: { width: 200, height: 300 },
}}
storyActions={{
createTemplateFromStory: jest.fn,
duplicateStory: jest.fn,
trashStory: jest.fn,
updateStory: jest.fn,
}}
/>
</LayoutProvider>
);
expect(getAllByTestId('grid-item')).toHaveLength(fakeStories.length);
});
it('should show "Create a story to get started!" if no stories are present.', function () {
const { getByText } = renderWithTheme(
<LayoutProvider>
<Content
filter={STORY_STATUSES[0]}
search={{ keyword: '' }}
stories={[]}
users={{}}
page={{
requestNextPage: jest.fn,
}}
view={{
style: VIEW_STYLE.GRID,
pageSize: { width: 200, height: 300 },
}}
storyActions={{
createTemplateFromStory: jest.fn,
duplicateStory: jest.fn,
trashStory: jest.fn,
updateStory: jest.fn,
}}
/>
</LayoutProvider>
);
expect(getByText('Create a story to get started!')).toBeInTheDocument();
});
it('should show "Sorry, we couldn\'t find any results matching "scooby dooby doo" if no stories are found for a search query are present.', function () {
const { getByText } = renderWithTheme(
<LayoutProvider>
<Content
filter={STORY_STATUSES[0]}
search={{ keyword: 'scooby dooby doo' }}
stories={[]}
users={{}}
page={{
requestNextPage: jest.fn,
}}
view={{
style: VIEW_STYLE.GRID,
pageSize: { width: 200, height: 300 },
}}
storyActions={{
createTemplateFromStory: jest.fn,
duplicateStory: jest.fn,
trashStory: jest.fn,
updateStory: jest.fn,
}}
/>
</LayoutProvider>
);
expect(
getByText(
'Sorry, we couldn\'t find any results matching "scooby dooby doo"'
)
).toBeInTheDocument();
});
});
|
//
// ChatExtent.h
// BGExamples
//
// Created by Sean Levin on 5/13/20.
// Copyright © 2020 Verizon Media. All rights reserved.
//
#import "BGGraph.h"
NS_ASSUME_NONNULL_BEGIN
@class ParticipantExtent;
// @tag::chat_extent[]
@class ChatExtent;
@interface ChatExtent : BGExtent<ChatExtent*>
// @end::chat_extent[]
@property (nonatomic, readonly) BGMoment<NSString *> *participantJoined;
@property (nonatomic, readonly) BGMoment<NSString *> *participantDisconnected;
// @tag::chat_participants_resources[]
@property (nonatomic, readonly) BGState<NSMutableDictionary *> *participants;
// @end::chat_participants_resources[]
@property (nonatomic, readonly) BGState<ParticipantExtent *> *pinnedParticipant;
@property (nonatomic, readonly) BGResource *participantsRelink;
@property (nonatomic, readonly) BGBehavior *pinnedBehavior;
@end
NS_ASSUME_NONNULL_END
|
// Copyright (c) m8mble 2020.
// SPDX-License-Identifier: BSL-1.0
#pragma once
#include "CaseStatus.h"
#include "Observation.h"
#include <chrono>
#include <cstdint>
#include <vector>
namespace clean_test::execute {
/// Collection of observation details about executing a single test-case.
class CaseResult {
public:
using Clock = std::chrono::system_clock;
using Duration = std::chrono::nanoseconds;
using TimePoint = std::chrono::time_point<Clock, Duration>;
using Observations = std::vector<Observation>;
/// Detailed c'tor: initialize from @p name_path, @p wall_time and @p observations.
///
/// Stores worst outcome of any @p observations and @p execution_outcome into @c m_outcome.
/// The @p execution_outcome is assumed to result from the test execution.
CaseResult(std::string name_path, CaseStatus execution_outcome, Duration wall_time, Observations observations);
std::string m_name_path; //!< Name of the test-case.
CaseStatus m_status; //!< Overall outcome of the test-case.
Duration m_wall_time; //!< Total execution (wall) time.
Observations m_observations; //!< Observation details (including passed ones).
};
}
|
//
// GMSProjection.h
// Google Maps SDK for iOS
//
// Copyright 2012 Google Inc.
//
// Usage of this SDK is subject to the Google Maps/Google Earth APIs Terms of
// Service: https://developers.google.com/maps/terms
//
#import <CoreLocation/CoreLocation.h>
/**
* GMSVisibleRegion is returned by visibleRegion on GMSProjection,
* it contains a set of four coordinates.
*/
typedef struct {
CLLocationCoordinate2D nearLeft;
CLLocationCoordinate2D nearRight;
CLLocationCoordinate2D farLeft;
CLLocationCoordinate2D farRight;
} GMSVisibleRegion;
/**
* Defines a mapping between Earth coordinates (CLLocationCoordinate2D) and
* coordinates in the map's view (CGPoint). A projection is constant and
* immutable, in that the mapping it embodies never changes. The mapping is not
* necessarily linear.
*
* Passing invalid Earth coordinates (i.e., per CLLocationCoordinate2DIsValid)
* to this object may result in undefined behavior.
*
* This class should not be instantiated directly, instead, obtained via
* projection on GMSMapView.
*/
@interface GMSProjection : NSObject
/** Maps an Earth coordinate to a point coordinate in the map's view. */
- (CGPoint)pointForCoordinate:(CLLocationCoordinate2D)coordinate;
/** Maps a point coordinate in the map's view to an Earth coordinate. */
- (CLLocationCoordinate2D)coordinateForPoint:(CGPoint)point;
/**
* Converts a distance in meters to content size. This is only accurate for
* small Earth distances, as we're using CGFloat for screen distances.
*/
- (CGFloat)pointsForMeters:(CGFloat)meters
atCoordinate:(CLLocationCoordinate2D)coordinate;
/**
* Returns whether a given coordinate (lat/lng) is contained within the
* projection.
*/
- (BOOL)containsCoordinate:(CLLocationCoordinate2D)coordinate;
/**
* Returns the region (four location coordinates) that is visible according to
* the projection. If padding was set on GMSMapView, this region takes the
* padding into account.
*
* The visible region can be non-rectangular. The result is undefined if the
* projection includes points that do not map to anywhere on the map (e.g.,
* camera sees outer space).
*/
- (GMSVisibleRegion)visibleRegion;
@end
|
def get_gaussian_kernel(k=3, mu=0, sigma=1, normalize=True):
# compute 1 dimension gaussian
gaussian_1D = np.linspace(-1, 1, k)
# compute a grid distance from center
x, y = np.meshgrid(gaussian_1D, gaussian_1D)
distance = (x ** 2 + y ** 2) ** 0.5
# compute the 2 dimension gaussian
gaussian_2D = np.exp(-(distance - mu) ** 2 / (2 * sigma ** 2))
gaussian_2D = gaussian_2D / (2 * np.pi *sigma **2)
# normalize part (mathematically)
if normalize:
gaussian_2D = gaussian_2D / np.sum(gaussian_2D)
return gaussian_2D
def get_sobel_kernel(k=3):
# get range
range = np.linspace(-(k // 2), k // 2, k)
# compute a grid the numerator and the axis-distances
x, y = np.meshgrid(range, range)
sobel_2D_numerator = x
sobel_2D_denominator = (x ** 2 + y ** 2)
sobel_2D_denominator[:, k // 2] = 1 # avoid division by zero
sobel_2D = sobel_2D_numerator / sobel_2D_denominator
return sobel_2D
def get_thin_kernels(start=0, end=360, step=45):
k_thin = 3 # actual size of the directional kernel
# increase for a while to avoid interpolation when rotating
k_increased = k_thin + 2
# get 0° angle directional kernel
thin_kernel_0 = np.zeros((k_increased, k_increased))
thin_kernel_0[k_increased // 2, k_increased // 2] = 1
thin_kernel_0[k_increased // 2, k_increased // 2 + 1:] = -1
# rotate the 0° angle directional kernel to get the other ones
thin_kernels = []
for angle in range(start, end, step):
(h, w) = thin_kernel_0.shape
# get the center to not rotate around the (0, 0) coord point
center = (w // 2, h // 2)
# apply rotation
rotation_matrix = cv2.getRotationMatrix2D(center, angle, 1)
kernel_angle_increased = cv2.warpAffine(thin_kernel_0, rotation_matrix, (w, h), cv2.INTER_NEAREST)
# get the k=3 kerne
kernel_angle = kernel_angle_increased[1:-1, 1:-1]
is_diag = (abs(kernel_angle) == 1) # because of the interpolation
kernel_angle = kernel_angle * is_diag # because of the interpolation
thin_kernels.append(kernel_angle)
return thin_kernels
class CannyFilter(nn.Module):
def __init__(self,
k_gaussian=3,
mu=0,
sigma=1,
k_sobel=3,
use_cuda=False):
super(CannyFilter, self).__init__()
# device
self.device = 'cuda' if use_cuda else 'cpu'
# gaussian
gaussian_2D = get_gaussian_kernel(k_gaussian, mu, sigma)
self.gaussian_filter = nn.Conv2d(in_channels=1,
out_channels=1,
kernel_size=k_gaussian,
padding=k_gaussian // 2,
bias=False)
self.gaussian_filter.weight[:] = torch.from_numpy(gaussian_2D)
# sobel
sobel_2D = get_sobel_kernel(k_sobel)
self.sobel_filter_x = nn.Conv2d(in_channels=1,
out_channels=1,
kernel_size=k_sobel,
padding=k_sobel // 2,
bias=False)
self.sobel_filter_x.weight[:] = torch.from_numpy(sobel_2D)
self.sobel_filter_y = nn.Conv2d(in_channels=1,
out_channels=1,
kernel_size=k_sobel,
padding=k_sobel // 2,
bias=False)
self.sobel_filter_y.weight[:] = torch.from_numpy(sobel_2D.T)
# thin
thin_kernels = get_thin_kernels()
directional_kernels = np.stack(thin_kernels)
self.directional_filter = nn.Conv2d(in_channels=1,
out_channels=8,
kernel_size=thin_kernels[0].shape,
padding=thin_kernels[0].shape[-1] // 2,
bias=False)
self.directional_filter.weight[:, 0] = torch.from_numpy(directional_kernels)
# hysteresis
hysteresis = np.ones((3, 3)) + 0.25
self.hysteresis = nn.Conv2d(in_channels=1,
out_channels=1,
kernel_size=3,
padding=1,
bias=False)
self.hysteresis.weight[:] = torch.from_numpy(hysteresis)
def forward(self, img, low_threshold=None, high_threshold=None, hysteresis=False):
# set the setps tensors
B, C, H, W = img.shape
blurred = torch.zeros((B, C, H, W)).to(self.device)
grad_x = torch.zeros((B, 1, H, W)).to(self.device)
grad_y = torch.zeros((B, 1, H, W)).to(self.device)
grad_magnitude = torch.zeros((B, 1, H, W)).to(self.device)
grad_orientation = torch.zeros((B, 1, H, W)).to(self.device)
# gaussian
for c in range(C):
blurred[:, c:c+1] = self.gaussian_filter(img[:, c:c+1])
grad_x = grad_x + self.sobel_filter_x(blurred[:, c:c+1])
grad_y = grad_y + self.sobel_filter_y(blurred[:, c:c+1])
# thick edges
grad_x, grad_y = grad_x / C, grad_y / C
grad_magnitude = (grad_x ** 2 + grad_y ** 2) ** 0.5
grad_orientation = torch.atan(grad_y / grad_x)
grad_orientation = grad_orientation * (360 / np.pi) + 180 # convert to degree
grad_orientation = torch.round(grad_orientation / 45) * 45 # keep a split by 45
# thin edges
directional = self.directional_filter(grad_magnitude)
# get indices of positive and negative directions
positive_idx = (grad_orientation / 45) % 8
negative_idx = ((grad_orientation / 45) + 4) % 8
thin_edges = grad_magnitude.clone()
# non maximum suppression direction by direction
for pos_i in range(4):
neg_i = pos_i + 4
# get the oriented grad for the angle
is_oriented_i = (positive_idx == pos_i) * 1
is_oriented_i = is_oriented_i + (positive_idx == neg_i) * 1
pos_directional = directional[:, pos_i]
neg_directional = directional[:, neg_i]
selected_direction = torch.stack([pos_directional, neg_directional])
# get the local maximum pixels for the angle
is_max = selected_direction.min(dim=0)[0] > 0.0
is_max = torch.unsqueeze(is_max, dim=1)
# apply non maximum suppression
to_remove = (is_max == 0) * 1 * (is_oriented_i) > 0
thin_edges[to_remove] = 0.0
# thresholds
if low_threshold is not None:
low = thin_edges > low_threshold
if high_threshold is not None:
high = thin_edges > high_threshold
# get black/gray/white only
thin_edges = low * 0.5 + high * 0.5
if hysteresis:
# get weaks and check if they are high or not
weak = (thin_edges == 0.5) * 1
weak_is_high = (self.hysteresis(thin_edges) > 1) * weak
thin_edges = high * 1 + weak_is_high * 1
else:
thin_edges = low * 1
return blurred, grad_x, grad_y, grad_magnitude, grad_orientation, thin_edges |
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_DEPTHWISECONV_FLOAT_H_
#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_DEPTHWISECONV_FLOAT_H_
#include "tensorflow/lite/kernels/internal/common.h"
#include "tensorflow/lite/kernels/internal/compatibility.h"
#include "tensorflow/lite/kernels/internal/types.h"
#include "custom_quantizer.h"
namespace tflite {
namespace reference_ops {
inline void DepthwiseConv(
const DepthwiseParams& params, const RuntimeShape& input_shape,
const float* input_data, const RuntimeShape& filter_shape,
const float* filter_data, const RuntimeShape& bias_shape,
const float* bias_data, const RuntimeShape& output_shape,
float* output_data) {
const int stride_width = params.stride_width;
const int stride_height = params.stride_height;
const int dilation_width_factor = params.dilation_width_factor;
const int dilation_height_factor = params.dilation_height_factor;
const int pad_width = params.padding_values.width;
const int pad_height = params.padding_values.height;
const int depth_multiplier = params.depth_multiplier;
const float output_activation_min = params.float_activation_min;
const float output_activation_max = params.float_activation_max;
TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4);
TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4);
TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4);
const int batches = MatchingDim(input_shape, 0, output_shape, 0);
const int output_depth = MatchingDim(filter_shape, 3, output_shape, 3);
const int input_height = input_shape.Dims(1);
const int input_width = input_shape.Dims(2);
const int input_depth = input_shape.Dims(3);
const int filter_height = filter_shape.Dims(1);
const int filter_width = filter_shape.Dims(2);
const int output_height = output_shape.Dims(1);
const int output_width = output_shape.Dims(2);
TFLITE_DCHECK_EQ(output_depth, input_depth * depth_multiplier);
TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth);
filter_data = custom_float::TensorQuantizer_quantize(filter_data, filter_shape.FlatSize());
for (int b = 0; b < batches; ++b) {
for (int out_y = 0; out_y < output_height; ++out_y) {
for (int out_x = 0; out_x < output_width; ++out_x) {
for (int ic = 0; ic < input_depth; ++ic) {
for (int m = 0; m < depth_multiplier; m++) {
const int oc = m + ic * depth_multiplier;
const int in_x_origin = (out_x * stride_width) - pad_width;
const int in_y_origin = (out_y * stride_height) - pad_height;
float total = 0.f;
for (int filter_y = 0; filter_y < filter_height; ++filter_y) {
for (int filter_x = 0; filter_x < filter_width; ++filter_x) {
const int in_x = in_x_origin + dilation_width_factor * filter_x;
const int in_y =
in_y_origin + dilation_height_factor * filter_y;
// If the location is outside the bounds of the input image,
// use zero as a default value.
if ((in_x >= 0) && (in_x < input_width) && (in_y >= 0) &&
(in_y < input_height)) {
float input_value =
input_data[Offset(input_shape, b, in_y, in_x, ic)];
float filter_value = filter_data[Offset(
filter_shape, 0, filter_y, filter_x, oc)];
total += (input_value * filter_value);
}
}
}
float bias_value = 0.0f;
if (bias_data) {
bias_value = bias_data[oc];
}
output_data[Offset(output_shape, b, out_y, out_x, oc)] =
ActivationFunctionWithMinMax(total + bias_value,
output_activation_min,
output_activation_max);
}
}
}
}
}
}
} // end namespace reference_ops
} // end namespace tflite
#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_DEPTHWISECONV_FLOAT_H_
|
# coding: utf-8
"""
Argo Server API
You can get examples of requests and responses by using the CLI with `--gloglevel=9`, e.g. `argo list --gloglevel=9` # noqa: E501
The version of the OpenAPI document: v2.11.8
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from argo.workflows.client.configuration import Configuration
class V1alpha1WorkflowSuspendRequest(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'name': 'str',
'namespace': 'str'
}
attribute_map = {
'name': 'name',
'namespace': 'namespace'
}
def __init__(self, name=None, namespace=None, local_vars_configuration=None): # noqa: E501
"""V1alpha1WorkflowSuspendRequest - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._name = None
self._namespace = None
self.discriminator = None
if name is not None:
self.name = name
if namespace is not None:
self.namespace = namespace
@property
def name(self):
"""Gets the name of this V1alpha1WorkflowSuspendRequest. # noqa: E501
:return: The name of this V1alpha1WorkflowSuspendRequest. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this V1alpha1WorkflowSuspendRequest.
:param name: The name of this V1alpha1WorkflowSuspendRequest. # noqa: E501
:type: str
"""
self._name = name
@property
def namespace(self):
"""Gets the namespace of this V1alpha1WorkflowSuspendRequest. # noqa: E501
:return: The namespace of this V1alpha1WorkflowSuspendRequest. # noqa: E501
:rtype: str
"""
return self._namespace
@namespace.setter
def namespace(self, namespace):
"""Sets the namespace of this V1alpha1WorkflowSuspendRequest.
:param namespace: The namespace of this V1alpha1WorkflowSuspendRequest. # noqa: E501
:type: str
"""
self._namespace = namespace
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1alpha1WorkflowSuspendRequest):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1alpha1WorkflowSuspendRequest):
return True
return self.to_dict() != other.to_dict()
|
#!/usr/bin/env node
/*jslint node: true, unused:true */
'use strict';
var shutdown = require('shutdown');
var roomService = require('gitter-web-rooms');
var troupeService = require('gitter-web-rooms/lib/troupe-service');
var Promise = require('bluebird');
require('../../server/event-listeners').install();
var opts = require('yargs')
.option('uri', {
alias: 'u',
required: true,
description: 'Uri of the room to delete'
})
.help('help')
.alias('help', 'h').argv;
var readline = require('readline');
var rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
troupeService
.findByUri(opts.uri)
.then(function(room) {
return new Promise(function(resolve, reject) {
rl.question(
'Are you sure you want to delete ' +
room.uri +
' with ' +
room.userCount +
' users in it? (yes/no)',
function(answer) {
rl.close();
console.log(answer);
if (answer === 'yes') {
resolve(room);
} else {
reject(new Error('Answered no'));
}
}
);
});
})
.then(function(room) {
return roomService.deleteRoom(room);
})
.then(function() {
console.log('DONE. finishing up.');
})
// wait 5 seconds to allow for asynchronous `event-listeners` to finish
// https://github.com/troupe/gitter-webapp/issues/580#issuecomment-147445395
// https://gitlab.com/gitterHQ/webapp/merge_requests/1605#note_222861592
.then(() => {
console.log(`Waiting 5 seconds to allow for the asynchronous \`event-listeners\` to finish...`);
return new Promise(resolve => setTimeout(resolve, 5000));
})
.then(function() {
shutdown.shutdownGracefully();
})
.catch(function(err) {
console.error('Error: ' + err, err);
console.log(err.stack);
shutdown.shutdownGracefully(1);
})
.done();
|
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @emails react-core
*/
'use strict';
let React;
let ReactDOM;
let ReactDOMServer;
let Scheduler;
let act;
// These tests rely both on ReactDOMServer and ReactDOM.
// If a test only needs ReactDOMServer, put it in ReactServerRendering-test instead.
describe('ReactDOMServerHydration', () => {
beforeEach(() => {
jest.resetModules();
React = require('react');
ReactDOM = require('react-dom');
ReactDOMServer = require('react-dom/server');
Scheduler = require('scheduler');
act = require('react-dom/test-utils').act;
});
it('should have the correct mounting behavior (old hydrate API)', () => {
let mountCount = 0;
let numClicks = 0;
class TestComponent extends React.Component {
componentDidMount() {
mountCount++;
}
click = () => {
numClicks++;
};
render() {
return (
<span ref="span" onClick={this.click}>
Name: {this.props.name}
</span>
);
}
}
const element = document.createElement('div');
document.body.appendChild(element);
try {
ReactDOM.render(<TestComponent />, element);
let lastMarkup = element.innerHTML;
// Exercise the update path. Markup should not change,
// but some lifecycle methods should be run again.
ReactDOM.render(<TestComponent name="x" />, element);
expect(mountCount).toEqual(1);
// Unmount and remount. We should get another mount event and
// we should get different markup, as the IDs are unique each time.
ReactDOM.unmountComponentAtNode(element);
expect(element.innerHTML).toEqual('');
ReactDOM.render(<TestComponent name="x" />, element);
expect(mountCount).toEqual(2);
expect(element.innerHTML).not.toEqual(lastMarkup);
// Now kill the node and render it on top of server-rendered markup, as if
// we used server rendering. We should mount again, but the markup should
// be unchanged. We will append a sentinel at the end of innerHTML to be
// sure that innerHTML was not changed.
ReactDOM.unmountComponentAtNode(element);
expect(element.innerHTML).toEqual('');
lastMarkup = ReactDOMServer.renderToString(<TestComponent name="x" />);
element.innerHTML = lastMarkup;
let instance;
expect(() => {
instance = ReactDOM.render(<TestComponent name="x" />, element);
}).toLowPriorityWarnDev(
'render(): Calling ReactDOM.render() to hydrate server-rendered markup ' +
'will stop working in React v17. Replace the ReactDOM.render() call ' +
'with ReactDOM.hydrate() if you want React to attach to the server HTML.',
{withoutStack: true},
);
expect(mountCount).toEqual(3);
expect(element.innerHTML).toBe(lastMarkup);
// Ensure the events system works after mount into server markup
expect(numClicks).toEqual(0);
instance.refs.span.click();
expect(numClicks).toEqual(1);
ReactDOM.unmountComponentAtNode(element);
expect(element.innerHTML).toEqual('');
// Now simulate a situation where the app is not idempotent. React should
// warn but do the right thing.
element.innerHTML = lastMarkup;
expect(() => {
instance = ReactDOM.render(<TestComponent name="y" />, element);
}).toWarnDev('Text content did not match. Server: "x" Client: "y"', {
withoutStack: true,
});
expect(mountCount).toEqual(4);
expect(element.innerHTML.length > 0).toBe(true);
expect(element.innerHTML).not.toEqual(lastMarkup);
// Ensure the events system works after markup mismatch.
expect(numClicks).toEqual(1);
instance.refs.span.click();
expect(numClicks).toEqual(2);
} finally {
document.body.removeChild(element);
}
});
it('should have the correct mounting behavior (new hydrate API)', () => {
let mountCount = 0;
let numClicks = 0;
class TestComponent extends React.Component {
componentDidMount() {
mountCount++;
}
click = () => {
numClicks++;
};
render() {
return (
<span ref="span" onClick={this.click}>
Name: {this.props.name}
</span>
);
}
}
const element = document.createElement('div');
document.body.appendChild(element);
try {
ReactDOM.render(<TestComponent />, element);
let lastMarkup = element.innerHTML;
// Exercise the update path. Markup should not change,
// but some lifecycle methods should be run again.
ReactDOM.render(<TestComponent name="x" />, element);
expect(mountCount).toEqual(1);
// Unmount and remount. We should get another mount event and
// we should get different markup, as the IDs are unique each time.
ReactDOM.unmountComponentAtNode(element);
expect(element.innerHTML).toEqual('');
ReactDOM.render(<TestComponent name="x" />, element);
expect(mountCount).toEqual(2);
expect(element.innerHTML).not.toEqual(lastMarkup);
// Now kill the node and render it on top of server-rendered markup, as if
// we used server rendering. We should mount again, but the markup should
// be unchanged. We will append a sentinel at the end of innerHTML to be
// sure that innerHTML was not changed.
ReactDOM.unmountComponentAtNode(element);
expect(element.innerHTML).toEqual('');
lastMarkup = ReactDOMServer.renderToString(<TestComponent name="x" />);
element.innerHTML = lastMarkup;
let instance = ReactDOM.hydrate(<TestComponent name="x" />, element);
expect(mountCount).toEqual(3);
expect(element.innerHTML).toBe(lastMarkup);
// Ensure the events system works after mount into server markup
expect(numClicks).toEqual(0);
instance.refs.span.click();
expect(numClicks).toEqual(1);
ReactDOM.unmountComponentAtNode(element);
expect(element.innerHTML).toEqual('');
// Now simulate a situation where the app is not idempotent. React should
// warn but do the right thing.
element.innerHTML = lastMarkup;
expect(() => {
instance = ReactDOM.hydrate(<TestComponent name="y" />, element);
}).toWarnDev('Text content did not match. Server: "x" Client: "y"', {
withoutStack: true,
});
expect(mountCount).toEqual(4);
expect(element.innerHTML.length > 0).toBe(true);
expect(element.innerHTML).not.toEqual(lastMarkup);
// Ensure the events system works after markup mismatch.
expect(numClicks).toEqual(1);
instance.refs.span.click();
expect(numClicks).toEqual(2);
} finally {
document.body.removeChild(element);
}
});
// We have a polyfill for autoFocus on the client, but we intentionally don't
// want it to call focus() when hydrating because this can mess up existing
// focus before the JS has loaded.
it('should emit autofocus on the server but not focus() when hydrating', () => {
const element = document.createElement('div');
element.innerHTML = ReactDOMServer.renderToString(
<input autoFocus={true} />,
);
expect(element.firstChild.autofocus).toBe(true);
// It should not be called on mount.
element.firstChild.focus = jest.fn();
ReactDOM.hydrate(<input autoFocus={true} />, element);
expect(element.firstChild.focus).not.toHaveBeenCalled();
// Or during an update.
ReactDOM.render(<input autoFocus={true} />, element);
expect(element.firstChild.focus).not.toHaveBeenCalled();
});
it('should not focus on either server or client with autofocus={false}', () => {
const element = document.createElement('div');
element.innerHTML = ReactDOMServer.renderToString(
<input autoFocus={false} />,
);
expect(element.firstChild.autofocus).toBe(false);
element.firstChild.focus = jest.fn();
ReactDOM.hydrate(<input autoFocus={false} />, element);
expect(element.firstChild.focus).not.toHaveBeenCalled();
ReactDOM.render(<input autoFocus={false} />, element);
expect(element.firstChild.focus).not.toHaveBeenCalled();
});
// Regression test for https://github.com/facebook/react/issues/11726
it('should not focus on either server or client with autofocus={false} even if there is a markup mismatch', () => {
const element = document.createElement('div');
element.innerHTML = ReactDOMServer.renderToString(
<button autoFocus={false}>server</button>,
);
expect(element.firstChild.autofocus).toBe(false);
element.firstChild.focus = jest.fn();
expect(() =>
ReactDOM.hydrate(<button autoFocus={false}>client</button>, element),
).toWarnDev(
'Warning: Text content did not match. Server: "server" Client: "client"',
{withoutStack: true},
);
expect(element.firstChild.focus).not.toHaveBeenCalled();
});
it('should warn when the style property differs', () => {
const element = document.createElement('div');
element.innerHTML = ReactDOMServer.renderToString(
<div style={{textDecoration: 'none', color: 'black', height: '10px'}} />,
);
expect(element.firstChild.style.textDecoration).toBe('none');
expect(element.firstChild.style.color).toBe('black');
expect(() =>
ReactDOM.hydrate(
<div
style={{textDecoration: 'none', color: 'white', height: '10px'}}
/>,
element,
),
).toWarnDev(
'Warning: Prop `style` did not match. Server: ' +
'"text-decoration:none;color:black;height:10px" Client: ' +
'"text-decoration:none;color:white;height:10px"',
{withoutStack: true},
);
});
it('should not warn when the style property differs on whitespace or order in IE', () => {
document.documentMode = 11;
jest.resetModules();
React = require('react');
ReactDOM = require('react-dom');
ReactDOMServer = require('react-dom/server');
try {
const element = document.createElement('div');
// Simulate IE normalizing the style attribute. IE makes it equal to
// what's available under `node.style.cssText`.
element.innerHTML =
'<div style="height: 10px; color: black; text-decoration: none;" data-reactroot=""></div>';
// We don't expect to see false positive warnings.
// https://github.com/facebook/react/issues/11807
ReactDOM.hydrate(
<div
style={{textDecoration: 'none', color: 'black', height: '10px'}}
/>,
element,
);
} finally {
delete document.documentMode;
}
});
it('should warn when the style property differs on whitespace in non-IE browsers', () => {
const element = document.createElement('div');
element.innerHTML =
'<div style="text-decoration: none; color: black; height: 10px;" data-reactroot=""></div>';
expect(() =>
ReactDOM.hydrate(
<div
style={{textDecoration: 'none', color: 'black', height: '10px'}}
/>,
element,
),
).toWarnDev(
'Warning: Prop `style` did not match. Server: ' +
'"text-decoration: none; color: black; height: 10px;" Client: ' +
'"text-decoration:none;color:black;height:10px"',
{withoutStack: true},
);
});
it('should throw rendering portals on the server', () => {
const div = document.createElement('div');
expect(() => {
ReactDOMServer.renderToString(
<div>{ReactDOM.createPortal(<div />, div)}</div>,
);
}).toThrow(
'Portals are not currently supported by the server renderer. ' +
'Render them conditionally so that they only appear on the client render.',
);
});
it('should be able to render and hydrate Mode components', () => {
class ComponentWithWarning extends React.Component {
componentWillMount() {
// Expected warning
}
render() {
return 'Hi';
}
}
const markup = (
<React.StrictMode>
<ComponentWithWarning />
</React.StrictMode>
);
const element = document.createElement('div');
expect(() => {
element.innerHTML = ReactDOMServer.renderToString(markup);
}).toLowPriorityWarnDev(['componentWillMount has been renamed'], {
withoutStack: true,
});
expect(element.textContent).toBe('Hi');
expect(() => {
ReactDOM.hydrate(markup, element);
}).toLowPriorityWarnDev(['componentWillMount has been renamed'], {
withoutStack: true,
});
expect(element.textContent).toBe('Hi');
});
it('should be able to render and hydrate forwardRef components', () => {
const FunctionComponent = ({label, forwardedRef}) => (
<div ref={forwardedRef}>{label}</div>
);
const WrappedFunctionComponent = React.forwardRef((props, ref) => (
<FunctionComponent {...props} forwardedRef={ref} />
));
const ref = React.createRef();
const markup = <WrappedFunctionComponent ref={ref} label="Hi" />;
const element = document.createElement('div');
element.innerHTML = ReactDOMServer.renderToString(markup);
expect(element.textContent).toBe('Hi');
expect(ref.current).toBe(null);
ReactDOM.hydrate(markup, element);
expect(element.textContent).toBe('Hi');
expect(ref.current.tagName).toBe('DIV');
});
it('should be able to render and hydrate Profiler components', () => {
const callback = jest.fn();
const markup = (
<React.Profiler id="profiler" onRender={callback}>
<div>Hi</div>
</React.Profiler>
);
const element = document.createElement('div');
element.innerHTML = ReactDOMServer.renderToString(markup);
expect(element.textContent).toBe('Hi');
expect(callback).not.toHaveBeenCalled();
ReactDOM.hydrate(markup, element);
expect(element.textContent).toBe('Hi');
if (__DEV__) {
expect(callback).toHaveBeenCalledTimes(1);
const [id, phase] = callback.mock.calls[0];
expect(id).toBe('profiler');
expect(phase).toBe('mount');
} else {
expect(callback).toHaveBeenCalledTimes(0);
}
});
// Regression test for https://github.com/facebook/react/issues/11423
it('should ignore noscript content on the client and not warn about mismatches', () => {
const callback = jest.fn();
const TestComponent = ({onRender}) => {
onRender();
return <div>Enable JavaScript to run this app.</div>;
};
const markup = (
<noscript>
<TestComponent onRender={callback} />
</noscript>
);
const element = document.createElement('div');
element.innerHTML = ReactDOMServer.renderToString(markup);
expect(callback).toHaveBeenCalledTimes(1);
expect(element.textContent).toBe(
'<div>Enable JavaScript to run this app.</div>',
);
// On the client we want to keep the existing markup, but not render the
// actual elements for performance reasons and to avoid for example
// downloading images. This should also not warn for hydration mismatches.
ReactDOM.hydrate(markup, element);
expect(callback).toHaveBeenCalledTimes(1);
expect(element.textContent).toBe(
'<div>Enable JavaScript to run this app.</div>',
);
});
it('should be able to use lazy components after hydrating', async () => {
const Lazy = React.lazy(
() =>
new Promise(resolve => {
setTimeout(
() =>
resolve({
default: function World() {
return 'world';
},
}),
1000,
);
}),
);
class HelloWorld extends React.Component {
state = {isClient: false};
componentDidMount() {
this.setState({
isClient: true,
});
}
render() {
return (
<div>
Hello{' '}
{this.state.isClient && (
<React.Suspense fallback="loading">
<Lazy />
</React.Suspense>
)}
</div>
);
}
}
const element = document.createElement('div');
element.innerHTML = ReactDOMServer.renderToString(<HelloWorld />);
expect(element.textContent).toBe('Hello ');
ReactDOM.hydrate(<HelloWorld />, element);
expect(element.textContent).toBe('Hello loading');
jest.runAllTimers();
await Promise.resolve();
Scheduler.unstable_flushAll();
expect(element.textContent).toBe('Hello world');
});
it('does not re-enter hydration after committing the first one', () => {
let finalHTML = ReactDOMServer.renderToString(<div />);
let container = document.createElement('div');
container.innerHTML = finalHTML;
let root = ReactDOM.unstable_createRoot(container, {hydrate: true});
root.render(<div />);
Scheduler.unstable_flushAll();
root.render(null);
Scheduler.unstable_flushAll();
// This should not reenter hydration state and therefore not trigger hydration
// warnings.
root.render(<div />);
Scheduler.unstable_flushAll();
});
it('does not invoke an event on a concurrent hydrating node until it commits', () => {
function Sibling({text}) {
Scheduler.unstable_yieldValue('Sibling');
return <span>Sibling</span>;
}
function Sibling2({text}) {
Scheduler.unstable_yieldValue('Sibling2');
return null;
}
let clicks = 0;
function Button() {
Scheduler.unstable_yieldValue('Button');
let [clicked, setClicked] = React.useState(false);
if (clicked) {
return null;
}
return (
<a
onClick={() => {
setClicked(true);
clicks++;
}}>
Click me
</a>
);
}
function App() {
return (
<div>
<Button />
<Sibling />
<Sibling2 />
</div>
);
}
let finalHTML = ReactDOMServer.renderToString(<App />);
let container = document.createElement('div');
container.innerHTML = finalHTML;
expect(Scheduler).toHaveYielded(['Button', 'Sibling', 'Sibling2']);
// We need this to be in the document since we'll dispatch events on it.
document.body.appendChild(container);
let a = container.getElementsByTagName('a')[0];
// Hydrate asynchronously.
let root = ReactDOM.unstable_createRoot(container, {hydrate: true});
root.render(<App />);
// Flush part way through the render.
if (__DEV__) {
// In DEV effects gets double invoked.
expect(Scheduler).toFlushAndYieldThrough(['Button', 'Button', 'Sibling']);
} else {
expect(Scheduler).toFlushAndYieldThrough(['Button', 'Sibling']);
}
expect(container.textContent).toBe('Click meSibling');
// We're now partially hydrated.
a.click();
// Clicking should not invoke the event yet because we haven't committed
// the hydration yet.
expect(clicks).toBe(0);
// Finish the rest of the hydration.
expect(Scheduler).toFlushAndYield(['Sibling2']);
// TODO: With selective hydration the event should've been replayed
// but for now we'll have to issue it again.
act(() => {
a.click();
});
expect(clicks).toBe(1);
expect(container.textContent).toBe('Sibling');
document.body.removeChild(container);
});
it('does not invoke an event on a parent tree when a subtree is hydrating', () => {
let clicks = 0;
let childSlotRef = React.createRef();
function Parent() {
return <div onClick={() => clicks++} ref={childSlotRef} />;
}
function App() {
return (
<div>
<a>Click me</a>
</div>
);
}
let finalHTML = ReactDOMServer.renderToString(<App />);
let parentContainer = document.createElement('div');
let childContainer = document.createElement('div');
// We need this to be in the document since we'll dispatch events on it.
document.body.appendChild(parentContainer);
// We're going to use a different root as a parent.
// This lets us detect whether an event goes through React's event system.
let parentRoot = ReactDOM.unstable_createRoot(parentContainer);
parentRoot.render(<Parent />);
Scheduler.unstable_flushAll();
childSlotRef.current.appendChild(childContainer);
childContainer.innerHTML = finalHTML;
let a = childContainer.getElementsByTagName('a')[0];
// Hydrate asynchronously.
let root = ReactDOM.unstable_createRoot(childContainer, {hydrate: true});
root.render(<App />);
// Nothing has rendered so far.
a.click();
expect(clicks).toBe(0);
Scheduler.unstable_flushAll();
// We're now full hydrated.
// TODO: With selective hydration the event should've been replayed
// but for now we'll have to issue it again.
act(() => {
a.click();
});
expect(clicks).toBe(1);
document.body.removeChild(parentContainer);
});
});
|
const path = require("path")
const setupServer = require("../../../helpers/setup-server")
const { useApi } = require("../../../helpers/use-api")
const { initDb, useDb } = require("../../../helpers/use-db")
const adminSeeder = require("../../helpers/admin-seeder")
const {
simpleOrderFactory,
simpleShippingOptionFactory,
simpleProductFactory,
} = require("../../factories")
describe("Swaps", () => {
let medusaProcess
let dbConnection
const doAfterEach = async () => {
const db = useDb()
return await db.teardown()
}
beforeAll(async () => {
const cwd = path.resolve(path.join(__dirname, "..", ".."))
try {
dbConnection = await initDb({ cwd })
medusaProcess = await setupServer({ cwd })
} catch (error) {
console.log(error)
}
})
afterAll(async () => {
const db = useDb()
await db.shutdown()
medusaProcess.kill()
})
afterEach(async () => {
return await doAfterEach()
})
test("creates a swap", async () => {
await adminSeeder(dbConnection)
const order = await createReturnableOrder(dbConnection)
const api = useApi()
const response = await api.post(
`/admin/orders/${order.id}/swaps`,
{
additional_items: [
{
variant_id: "variant-2",
quantity: 1,
},
],
return_items: [
{
item_id: "test-item",
quantity: 1,
},
],
},
{
headers: {
authorization: "Bearer test_token",
},
}
)
expect(response.status).toEqual(200)
const cartId = response.data.order.swaps[0].cart_id
/*
* The return line item should use its tax_lines; the new line doesn't have
* a tax line and uses the default region tax of 12.5
*
* Return line: 1000 * 1.2 = -1200
* New line: 1000 * 1.125 = 1125
* -
* Difference should be -75
*/
const cartRes = await api.get(`/store/carts/${cartId}`)
expect(cartRes.status).toEqual(200)
expect(cartRes.data.cart.subtotal).toEqual(0)
expect(cartRes.data.cart.total).toEqual(-75)
expect(cartRes.data.cart.tax_total).toEqual(-75)
})
test("creates a swap w. shipping", async () => {
await adminSeeder(dbConnection)
const order = await createReturnableOrder(dbConnection)
const returnOption = await simpleShippingOptionFactory(dbConnection, {
name: "Return method",
region_id: "test-region",
is_return: true,
price: 100,
})
const api = useApi()
const response = await api.post(
`/admin/orders/${order.id}/swaps`,
{
additional_items: [
{
variant_id: "variant-2",
quantity: 1,
},
],
return_shipping: {
option_id: returnOption.id,
},
return_items: [
{
item_id: "test-item",
quantity: 1,
},
],
},
{
headers: {
authorization: "Bearer test_token",
},
}
)
expect(response.status).toEqual(200)
const cartId = response.data.order.swaps[0].cart_id
/*
* The return line item should use its tax_lines; the new line doesn't have
* a tax line and uses the default region tax of 12.5
*
* Return line: 1000 * 1.2 = -1200
* New line: 1000 * 1.125 = 1125
* Shipping line: 100 * 1.125 = 112.5 ~ 113
* -
* Difference should be 38
*/
const cartRes = await api.get(`/store/carts/${cartId}`)
expect(cartRes.status).toEqual(200)
expect(cartRes.data.cart.subtotal).toEqual(100)
expect(cartRes.data.cart.tax_total).toEqual(-62)
expect(cartRes.data.cart.total).toEqual(38)
})
test("retrieves a swap w. shipping", async () => {
await adminSeeder(dbConnection)
const order = await createReturnableOrder(dbConnection)
const returnOption = await simpleShippingOptionFactory(dbConnection, {
name: "Return method",
region_id: "test-region",
is_return: true,
price: 100,
})
const api = useApi()
const response = await api.post(
`/admin/orders/${order.id}/swaps`,
{
additional_items: [
{
variant_id: "variant-2",
quantity: 1,
},
],
return_shipping: {
option_id: returnOption.id,
},
return_items: [
{
item_id: "test-item",
quantity: 1,
},
],
},
{
headers: {
authorization: "Bearer test_token",
},
}
)
expect(response.status).toEqual(200)
const swapRes = await api.get(
`/admin/swaps/${response.data.order.swaps[0].id}`,
{
headers: {
authorization: "Bearer test_token",
},
}
)
expect(swapRes.status).toEqual(200)
expect(swapRes.data.swap.cart.subtotal).toEqual(100)
expect(swapRes.data.swap.cart.tax_total).toEqual(-62)
expect(swapRes.data.swap.cart.total).toEqual(38)
})
test("creates a swap from storefront", async () => {
const order = await createReturnableOrder(dbConnection)
const api = useApi()
const response = await api.post(`/store/swaps`, {
order_id: order.id,
additional_items: [
{
variant_id: "variant-2",
quantity: 1,
},
],
return_items: [
{
item_id: "test-item",
quantity: 1,
},
],
})
expect(response.status).toEqual(200)
const cartId = response.data.swap.cart_id
/*
* The return line item should use its tax_lines; the new line doesn't have
* a tax line and uses the default region tax of 12.5
*
* Return line: 1000 * 1.2 = -1200
* New line: 1000 * 1.125 = 1125
* -
* Difference should be -75
*/
const cartRes = await api.get(`/store/carts/${cartId}`)
expect(cartRes.status).toEqual(200)
expect(cartRes.data.cart.subtotal).toEqual(0)
expect(cartRes.data.cart.total).toEqual(-75)
expect(cartRes.data.cart.tax_total).toEqual(-75)
})
test("completes a swap", async () => {
await adminSeeder(dbConnection)
const order = await createReturnableOrder(dbConnection)
const shippingOut = await simpleShippingOptionFactory(dbConnection, {
region_id: "test-region",
price: 500,
})
const returnOption = await simpleShippingOptionFactory(dbConnection, {
name: "Return method",
region_id: "test-region",
is_return: true,
price: 100,
})
const api = useApi()
const response = await api.post(
`/admin/orders/${order.id}/swaps`,
{
additional_items: [
{
variant_id: "variant-2",
quantity: 1,
},
],
return_shipping: {
option_id: returnOption.id,
},
return_items: [
{
item_id: "test-item",
quantity: 1,
},
],
},
{
headers: {
authorization: "Bearer test_token",
},
}
)
expect(response.status).toEqual(200)
const cartId = response.data.order.swaps[0].cart_id
await api.post(`/store/carts/${cartId}`, {
shipping_address: {
address_1: "121 W Something St",
postal_code: "1234",
province: "something",
city: "ville la something",
phone: "12353245",
},
})
await api.post(`/store/carts/${cartId}/shipping-methods`, {
option_id: shippingOut.id,
})
await api.post(`/store/carts/${cartId}/payment-sessions`)
const completion = await api.post(`/store/carts/${cartId}/complete`)
expect(completion.status).toEqual(200)
expect(completion.data.type).toEqual("swap")
})
})
const createReturnableOrder = async (dbConnection, options = {}) => {
await simpleProductFactory(
dbConnection,
{
id: "test-product",
variants: [
{ id: "test-variant" },
{ id: "variant-2", prices: [{ currency: "usd", amount: 1000 }] },
],
},
100
)
let discounts = []
if (options.discount) {
discounts = [
{
code: "TESTCODE",
},
]
}
return await simpleOrderFactory(dbConnection, {
email: "[email protected]",
tax_rate: null,
fulfillment_status: "fulfilled",
payment_status: "captured",
region: {
id: "test-region",
name: "Test region",
tax_rate: 12.5, // Should be ignored due to item tax line
},
discounts,
line_items: [
{
id: "test-item",
variant_id: "test-variant",
quantity: 2,
fulfilled_quantity: 2,
shipped_quantity: 2,
unit_price: 1000,
tax_lines: [
{
name: "default",
code: "default",
rate: 20,
},
],
},
],
})
}
|
import {combineReducers} from 'redux';
import AppReducer from './app_reducer'
const rootReducer = combineReducers({astronomy: AppReducer});
export default rootReducer; |
import os.path as osp
import logging
import time
import argparse
from collections import OrderedDict
import os
import options.options as option
import utils.util as util
from data.util import bgr2ycbcr
from data import create_dataset, create_dataloader
from models import create_model
from preprocess.utils import imresize
import torch
if __name__ == '__main__':
#### options
torch.multiprocessing.freeze_support()
#### options
parser = argparse.ArgumentParser()
parser.add_argument('-opt', type=str, required=True, help='Path to options YMAL file.')
opt = option.parse(parser.parse_args().opt, is_train=False)
opt = option.dict_to_nonedict(opt)
logger = logging.getLogger('base')
logger.info(option.dict2str(opt))
dataset_opt = opt['datasets']['test']
#### Create test dataset and dataloader
test_set = create_dataset(dataset_opt)
test_loader = create_dataloader(test_set, dataset_opt)
logger.info('Number of test images in [{:s}]: {:d}'.format(dataset_opt['name'], len(test_set)))
model = create_model(opt)
test_set_name = test_loader.dataset.opt['name']
logger.info('\nTesting [{:s}]...'.format(test_set_name))
test_start_time = time.time()
dataset_dir = opt['path']['out_path']
util.mkdir(dataset_dir)
for data in test_loader:
img_path = data['LQ_path'][0]
img_name = osp.splitext(osp.basename(img_path))[0]
data['LQ'] = torch.stack([imresize(data['LQ'][0], 1.0 / 4, True)])
model.feed_data(data, is_val=True)
model.test()
visuals = model.get_current_visuals(need_GT=False)
reconstructed = util.tensor2img(visuals['SR']) # uint8
# Save SR images for reference
save_img_path = os.path.join(dataset_dir, img_name + '.png')
print(img_name)
util.save_img(reconstructed, save_img_path)
#save_lr_img_path = os.path.join("../datasets/va-x-compare/", img_name + '.png')
#util.save_img(util.tensor2img(data['LQ'].detach()), save_lr_img_path)
test_run_time = time.time()-test_start_time
print('Runtime {} (s) per image'.format(test_run_time / len(test_loader)))
|
# Generated by Django 3.0.2 on 2020-02-25 17:39
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='AudioModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('word', models.TextField(default='"Unknown"')),
('language', models.TextField(default='"english"')),
],
),
]
|
import inspect
import logging
from .splunk_event import SplunkEvent
from .utilities import get_class_from_frame, get_frame_from_log_record
class SplunkHandler(logging.Handler):
def emit(self, record):
try:
logger_name = record.name
if record.exc_info:
event = self.event_from_exception_record(record)
else:
event = self.event_from_log_record(record)
SplunkEvent().send_event(event, sourcetype=logger_name)
except Exception:
# Don't block application if something goes wrong with logging
pass
@staticmethod
def event_from_log_record(log_record):
event_data = {
"level": log_record.levelname,
}
if type(log_record.msg) is dict:
event_data['message'] = log_record.msg
else:
event_data['message'] = log_record.getMessage()
return event_data
@staticmethod
def event_from_exception_record(record):
frame = get_frame_from_log_record(record)
message = {
'path': frame.f_code.co_filename,
'line': frame.f_lineno,
'method': frame.f_code.co_name,
'class': get_class_from_frame(frame),
'module': inspect.getmodule(frame).__name__,
'message': record.getMessage(),
'traceback': record.exc_text
}
event_data = {
"level": record.levelname,
"message": message
}
return event_data
|
/*
TODO
- Add flexible config for:
- data size, stop bits, parity, baud, etc.
- dma vs interrupt (or not).
- Error handling
- Transmit function improvements.
- Other UART Peripherals (currently only handles USART1 in UART mode.
- Overflow handling, etc. for Rx Queue.
*/
#pragma once
#ifndef DSY_UART_H
#define DSY_UART_H /**< macro */
#include "daisy_core.h"
namespace daisy
{
/** @addtogroup serial
@{
*/
/**
Uart Peripheral
@author shensley
@date March 2020
*/
class UartHandler
{
public:
struct Config
{
enum class Peripheral
{
USART_1,
USART_2,
USART_3,
UART_4,
UART_5,
USART_6,
UART_7,
UART_8,
LPUART_1,
};
enum class StopBits
{
BITS_0_5,
BITS_1,
BITS_1_5,
BITS_2,
};
enum class Parity
{
NONE,
EVEN,
ODD,
};
enum class Mode
{
RX,
TX,
TX_RX,
};
enum class WordLength
{
BITS_7,
BITS_8,
BITS_9,
};
struct
{
dsy_gpio_pin tx; /**< & */
dsy_gpio_pin rx; /**< & */
} pin_config; /**< & */
Peripheral periph;
StopBits stopbits;
Parity parity;
Mode mode;
WordLength wordlength;
uint32_t baudrate;
};
UartHandler() : pimpl_(nullptr) {}
UartHandler(const UartHandler& other) = default;
UartHandler& operator=(const UartHandler& other) = default;
/** Return values for Uart functions. */
enum class Result
{
OK, /**< & */
ERR /**< & */
};
/** Initializes the UART Peripheral */
Result Init(const Config& config);
/** Returns the current config. */
const Config& GetConfig() const;
/** Reads the amount of bytes in blocking mode with a 10ms timeout.
\param *buff Buffer to read to
\param size Buff size
\param timeout How long to timeout for (10ms?)
\return Data received
*/
int PollReceive(uint8_t* buff, size_t size, uint32_t timeout);
/** Starts a DMA Receive callback to fill a buffer of specified size.
Data is populated into a FIFO queue, and can be queried with the
functions below.
Size of the buffer is internally fixed to 256.
Variable message lengths are transferred to the FIFO queue
anytime there is 1 byte-period without incoming data
\return OK or ERROR
*/
Result StartRx();
/** \return whether Rx DMA is listening or not. */
bool RxActive();
/** Flushes the Receive Queue
\return OK or ERROR
*/
Result FlushRx();
/** Sends an amount of data in blocking mode.
\param *buff Buffer of data to send
\param size Buffer size
\return OK or ERROR
*/
Result PollTx(uint8_t* buff, size_t size);
/** Pops the oldest byte from the FIFO.
\return Popped byte
*/
uint8_t PopRx();
/** Checks if there are any unread bytes in the FIFO
\return 1 or 0 ??
*/
size_t Readable();
/** \return the result of HAL_UART_GetError() to the user. */
int CheckError();
class Impl; /**< & */
private:
Impl* pimpl_;
};
/** @} */
} // namespace daisy
#endif
|
import os
import shutil
from src.text2system.config import SUFIX_ENV
def deleteOldManagedFiles():
root_path = os.path.dirname(os.path.dirname(__file__)) #parent directory
print('Excluding the old files...')
for dir in os.listdir(root_path):
full_dir_path = os.path.join(root_path, dir)
if ((os.path.isdir(full_dir_path))
and ((dir.find(SUFIX_ENV, len(dir) - len(SUFIX_ENV)) > 0)
or (dir == '__pycache__'))):
shutil.rmtree(full_dir_path)
|
#!/usr/bin/env python
"""
This module provides various representations of transformed structures. A
TransformedStructure is a structure that has been modified by undergoing a
series of transformations.
"""
from __future__ import division
__author__ = "Shyue Ping Ong, Will Richards"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Shyue Ping Ong"
__email__ = "[email protected]"
__date__ = "Mar 2, 2012"
import os
import re
import json
import datetime
from copy import deepcopy
from pymatgen.core.structure import Structure
from pymatgen.transformations.transformation_abc import AbstractTransformation
from pymatgen.io.cifio import CifParser
from pymatgen.io.vaspio.vasp_input import Poscar
from pymatgen.serializers.json_coders import MSONable
class TransformedStructure(MSONable):
"""
Container object for new structures that include history of
transformations.
Each transformed structure is made up of a sequence of structures with
associated transformation history.
"""
def __init__(self, structure, transformations, history=None,
other_parameters=None):
"""
Standard constructor for a TransformedStructure.
Args:
structure:
input structure
transformations:
Sequence of transformations to be applied to the input
structure.
history:
optional history for the input structure, which provides a way
to track structures having undergone multiple series of
transformations.
other_parameters:
optional parameters to store along with the
TransformedStructure. This can include tags (a list) or author
which will be parsed.
"""
history = [] if history is None else history
self._source = {}
self._structures = []
self._changes = []
self._change_parameters = []
self._redo_trans = []
self._other_parameters = {} if other_parameters is None \
else deepcopy(other_parameters)
if len(history) > 0:
self._source = history[0]
for i in xrange(1, len(history)):
struct = Structure.from_dict(history[i]["input_structure"])
trans = AbstractTransformation.from_dict(history[i])
param = history[i].get("output_parameters", {})
self._structures.append(struct)
self._changes.append(trans)
self._change_parameters.append(param)
self._structures.append(structure)
for t in transformations:
self.append_transformation(t)
def undo_last_change(self):
"""
Undo the last change in the TransformedStructure.
Raises:
IndexError if already at the oldest change.
"""
if len(self._changes) == 0:
raise IndexError("Can't undo. Already at oldest change.")
self._structures.pop()
self._change_parameters.pop()
self._redo_trans.append(self._changes.pop())
def redo_next_change(self):
"""
Redo the last undone change in the TransformedStructure.
Raises:
IndexError if already at the latest change.
"""
if len(self._redo_trans) == 0:
raise IndexError("Can't undo. Already at latest change.")
t = self._redo_trans.pop()
if hasattr(t, 'apply_transformation'):
self.append_transformation(t, clear_redo=False)
else:
self.append_filter(t)
def __getitem__(self, index):
return self._structures[index], self._changes[0:index]
def __getattr__(self, name):
s = object.__getattribute__(self, '_structures')[-1]
return getattr(s, name)
def __len__(self):
return len(self._structures)
def append_transformation(self, transformation, return_alternatives=False,
clear_redo=True):
"""
Appends a transformation to the TransformedStructure.
Args:
transformation:
Transformation to append
return_alternatives:
Whether to return alternative TransformedStructures for
one-to-many transformations. return_alternatives can be a
number, which stipulates the total number of structures to
return.
clear_redo:
Boolean indicating whether to clear the redo list. By default,
this is True, meaning any appends clears the history of
undoing. However, when using append_transformation to do a
redo, the redo list should not be cleared to allow multiple
redos.
"""
if clear_redo:
self._redo_trans = []
if return_alternatives and transformation.is_one_to_many:
starting_struct = self._structures[-1]
ranked_list = transformation.apply_transformation(
starting_struct, return_ranked_list=return_alternatives)
#generate the alternative structures
alts = []
for x in ranked_list[1:]:
struct = x.pop("structure")
other_paras = self._other_parameters.copy()
hist = self.history
actual_transformation = x.pop("transformation", transformation)
tdict = actual_transformation.to_dict
tdict["input_structure"] = starting_struct.to_dict
tdict["output_parameters"] = x
hist.append(tdict)
alts.append(TransformedStructure(struct, [], history=hist,
other_parameters=other_paras))
#use the first item in the ranked_list and apply it to this
#transformed_structure
x = ranked_list[0]
struct = x.pop("structure")
actual_transformation = x.pop("transformation", transformation)
self._structures.append(struct)
self._changes.append(actual_transformation)
self._change_parameters.append(x)
return alts
else:
new_s = transformation.apply_transformation(self._structures[-1])
self._structures.append(new_s)
self._change_parameters.append({})
self._changes.append(transformation)
def append_filter(self, structure_filter):
"""
Adds a transformation parameter to the last transformation.
"""
self._structures.append(self._structures[-1])
self._change_parameters.append({})
self._changes.append(structure_filter)
def extend_transformations(self, transformations):
"""
Extends a sequence of transformations to the TransformedStructure.
Args:
transformations:
Sequence of Transformations
"""
for t in transformations:
self.append_transformation(t)
def get_vasp_input(self, vasp_input_set, generate_potcar=True):
"""
Returns VASP input as a dict of vaspio objects.
Args:
vasp_input_set:
pymatgen.io.vaspio_set.VaspInputSet like object that creates
vasp input files from structures
generate_potcar:
Set to False to generate a POTCAR.spec file instead of a
POTCAR, which contains the POTCAR labels but not the actual
POTCAR. Defaults to True.
"""
d = vasp_input_set.get_all_vasp_input(self._structures[-1],
generate_potcar)
d["transformations.json"] = json.dumps(self.to_dict)
return d
def write_vasp_input(self, vasp_input_set, output_dir,
create_directory=True):
"""
Writes VASP input to an output_dir.
Args:
vasp_input_set:
pymatgen.io.vaspio_set.VaspInputSet like object that creates
vasp input files from structures
output_dir:
Directory to output files
create_directory:
Create the directory if not present. Defaults to True.
"""
vasp_input_set.write_input(self._structures[-1], output_dir,
make_dir_if_not_present=create_directory)
with open(os.path.join(output_dir, "transformations.json"), "w") as fp:
json.dump(self.to_dict, fp)
def __str__(self):
output = ["Current structure", "------------",
str(self._structures[-1]), "\nSource", "------------",
str(self._source), "\nTransformation history", "------------"]
for i, t in enumerate(self._changes):
output.append("{} {}".format(t.to_dict,
self._change_parameters[i]))
output.append("\nOther parameters")
output.append("------------")
output.append(str(self._other_parameters))
return "\n".join(output)
def set_parameter(self, key, value):
self._other_parameters[key] = value
@property
def other_parameters(self):
return self._other_parameters
@property
def was_modified(self):
"""
Boolean describing whether the last transformation on the structure
made any alterations to it one example of when this would return false
is in the case of performing a substitution transformation on the
structure when the specie to replace isn't in the structure.
"""
return not self._structures[-1] == self._structures[-2]
@property
def structures(self):
"""
Returns a copy of all structures in the TransformedStructure. A
structure is stored after every single transformation.
"""
return [s for s in self._structures]
@property
def transformations(self):
"""
Returns a copy of all transformations in the TransformedStructure.
"""
return [t for t in self._changes]
@property
def final_structure(self):
"""
Returns the final structure in the TransformedStructure.
"""
return self._structures[-1]
@staticmethod
def from_dict(d):
"""
Creates a TransformedStructure from a dict.
"""
s = Structure.from_dict(d)
return TransformedStructure(s, [], d["history"],
d.get("other_parameters", None))
@property
def history(self):
history = [self._source]
for i, t in enumerate(self._changes):
tdict = t.to_dict
tdict["input_structure"] = self._structures[i].to_dict
tdict["output_parameters"] = self._change_parameters[i]
history.append(tdict)
return history
@property
def to_dict(self):
"""
Returns a dict representation of the TransformedStructure.
"""
d = self._structures[-1].to_dict
d["@module"] = self.__class__.__module__
d["@class"] = self.__class__.__name__
d["history"] = self.history
d["version"] = __version__
d["last_modified"] = str(datetime.datetime.utcnow())
d["other_parameters"] = self._other_parameters
return d
@staticmethod
def from_cif_string(cif_string, transformations=None, primitive=True,
occupancy_tolerance=1.):
"""
Generates TransformedStructure from a cif string.
Args:
cif_string:
Input cif string. Should contain only one structure. For cifs
containing multiple structures, please use CifTransmuter.
transformations:
Sequence of transformations to be applied to the input
structure.
primitive:
Option to set if the primitive cell should be extracted.
Defaults to True. However, there are certain instances where
you might want to use a non-primitive cell, e.g., if you are
trying to generate all possible orderings of partial removals
or order a disordered structure.
occupancy_tolerance:
If total occupancy of a site is between 1 and
occupancy_tolerance, the occupancies will be scaled down to 1.
"""
parser = CifParser.from_string(cif_string, occupancy_tolerance)
raw_string = re.sub("'", "\"", cif_string)
cif_dict = parser.to_dict
cif_keys = cif_dict.keys()
s = parser.get_structures(primitive)[0]
partial_cif = cif_dict[cif_keys[0]]
if "_database_code_ICSD" in partial_cif:
source = partial_cif["_database_code_ICSD"] + "-ICSD"
else:
source = "uploaded cif"
source_info = {"source": source,
"datetime": str(datetime.datetime.now()),
"original_file": raw_string,
"cif_data": cif_dict[cif_keys[0]]}
return TransformedStructure(s, transformations, [source_info])
@staticmethod
def from_poscar_string(poscar_string, transformations=None):
"""
Generates TransformedStructure from a poscar string.
Args:
poscar_string:
Input POSCAR string.
transformations:
Sequence of transformations to be applied to the input
structure.
"""
p = Poscar.from_string(poscar_string)
if not p.true_names:
raise ValueError("Transformation can be craeted only from POSCAR "
"strings with proper VASP5 element symbols.")
raw_string = re.sub("'", "\"", poscar_string)
s = p.structure
source_info = {"source": "uploaded POSCAR",
"datetime": str(datetime.datetime.now()),
"original_file": raw_string}
return TransformedStructure(s, transformations, [source_info])
|
export default [
{
id: 0,
title: 'VIRTUe test building deadline',
allDay: true,
start: new Date(2018, 5, 2),
end: new Date(2018, 5, 3),
},
{
id: 1,
title: 'All day Demo Day',
allDay: true,
start: new Date(2018, 5, 8),
end: new Date(2018, 5, 9),
},
{
id: 2,
title: 'VIRTUe help at building site',
start: new Date(2018, 5, 0),
end: new Date(2018, 5, 1),
},
{
id: 3,
title: 'Demo Day setup',
allDay: true,
start: new Date(2018, 5, 6),
end: new Date(2018, 5, 6),
// start: new Date(new Date().setHours(new Date().getHours() - 3)),
// end: new Date(new Date().setHours(new Date().getHours() + 3)),
},
{
id: 4,
title: 'Report deadline',
allDay: true,
start: new Date(2018, 5, 14, 17, 0, 0, 0),
end: new Date(2018, 5, 14, 18, 0, 0, 0),
},
{
id: 5,
title: 'Portfolio deadline',
allDay: true,
start: new Date(2018, 5, 21, 17, 0, 0, 0),
end: new Date(2018, 5, 21, 18, 0, 0, 0),
},
{
id: 6,
title: 'Exam',
start: new Date(2018, 5, 29, 9, 0, 0, 0),
end: new Date(2018, 5, 29, 12, 0, 0, 0),
},
] |
module.exports = () => ({
presets: [
'@babel/preset-env',
],
});
|
# Copyright 2021 AI Singapore
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test for draw poses node
"""
import pytest
import numpy as np
from peekingduck.pipeline.nodes.draw.poses import Node
@pytest.fixture
def draw_poses():
node = Node({'input': ["keypoints", "keypoint_scores", "keypoint_conns", "img"],
'output': ["none"],
'keypoint_dot_color': [0, 255, 0],
'keypoint_dot_radius': 5,
'keypoint_connect_color': [0, 255, 255],
'keypoint_text_color': [255, 0, 255],
})
return node
class TestBtmMidpoint:
def test_no_poses(self, draw_poses, create_image):
poses = np.empty((0, 2))
scores = []
keypoint_conns = []
original_img = create_image((28, 28, 3))
output_img = original_img.copy()
input1 = {
"keypoints": poses,
"keypoint_scores": scores,
"keypoint_conns": keypoint_conns,
"img": output_img
}
draw_poses.run(input1)
np.testing.assert_equal(original_img, output_img)
|
import { StyleSheet } from 'react-native';
const styles = StyleSheet.create({
container: { flex: 1, flexDirection: 'row' },
text: {
flex: 1,
marginHorizontal: 10,
textAlignVertical: 'center',
fontSize: 18
}
});
export default styles; |
! function(e) {
var t = {};
function n(r) {
if (t[r]) return t[r].exports;
var o = t[r] = {
i: r,
l: !1,
exports: {}
};
return e[r].call(o.exports, o, o.exports, n), o.l = !0, o.exports
}
n.m = e, n.c = t, n.d = function(e, t, r) {
n.o(e, t) || Object.defineProperty(e, t, {
enumerable: !0,
get: r
})
}, n.r = function(e) {
"undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(e, Symbol.toStringTag, {
value: "Module"
}), Object.defineProperty(e, "__esModule", {
value: !0
})
}, n.t = function(e, t) {
if (1 & t && (e = n(e)), 8 & t) return e;
if (4 & t && "object" == typeof e && e && e.__esModule) return e;
var r = Object.create(null);
if (n.r(r), Object.defineProperty(r, "default", {
enumerable: !0,
value: e
}), 2 & t && "string" != typeof e)
for (var o in e) n.d(r, o, function(t) {
return e[t]
}.bind(null, o));
return r
}, n.n = function(e) {
var t = e && e.__esModule ? function() {
return e.default
} : function() {
return e
};
return n.d(t, "a", t), t
}, n.o = function(e, t) {
return Object.prototype.hasOwnProperty.call(e, t)
}, n.p = "", n(n.s = 1)
}([, function(e, t, n) {
"use strict";
n.r(t);
const r = {
toolbar: {
undo: "Undo",
redo: "Redo",
paintformat: "Paint format",
clearformat: "Clear format",
format: "Format",
font: "Font",
fontSize: "Font size",
fontBold: "Έντονη γραφή",
fontItalic: "Πλάγια γραφή italic",
underline: "Underline",
strike: "Strike",
textColor: "Text color",
fillColor: "Fill color",
border: "Borders",
merge: "Merge cells",
align: "Horizontal align",
valign: "Vertical align",
textwrap: "Text wrapping",
freeze: "Freeze cell",
formula: "Functions",
more: "More"
},
contextmenu: {
copy: "Αντιγραφή",
cut: "Αποκοπή",
paste: "Επικόλληση",
pasteValue: "Paste values only",
pasteFormat: "Paste format only",
insertRow: "Εισαγωγή Γραμμής",
insertColumn: "Εισαγωγή Στήλης",
deleteRow: "Διαγραφή Γραμμής",
deleteColumn: "Διαγραφή Στήλης",
deleteCell: "Διαγραφή κελιού",
deleteCellText: "Διαγραφή κειμένου κελιού",
validation: "Data validations"
},
format: {
normal: "Normal",
text: "Plain Text",
number: "Number",
percent: "Percent",
rmb: "RMB",
usd: "USD",
date: "Date",
time: "Time",
datetime: "Date time",
duration: "Duration"
},
formula: {
sum: "Sum",
average: "Average",
max: "Max",
min: "Min",
concat: "Concat"
},
validation: {
required: "it must be required",
notMatch: "it not match its validation rule",
between: "it is between {} and {}",
notBetween: "it is not between {} and {}",
notIn: "it is not in list",
equal: "it equal to {}",
notEqual: "it not equal to {}",
lessThan: "it less than {}",
lessThanEqual: "it less than or equal to {}",
greaterThan: "it greater than {}",
greaterThanEqual: "it greater than or equal to {}"
},
error: {
pasteForMergedCell: "Unable to do this for merged cells"
},
calendar: {
weeks: ["Κύρ", "Δευ", "Τρί", "Τέτ", "Πέμ", "Πάρ", "Σάβ"],
months: ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"]
},
button: {
cancel: "Cancel",
remove: "Remove",
save: "Save"
},
dataValidation: {
mode: "Mode",
range: "Cell Range",
criteria: "Criteria",
modeType: {
cell: "Cell",
column: "Colun",
row: "Row"
},
type: {
list: "List",
number: "Number",
date: "Date",
phone: "Phone",
email: "Email"
},
operator: {
be: "between",
nbe: "not betwwen",
lt: "less than",
lte: "less than or equal to",
gt: "greater than",
gte: "greater than or equal to",
eq: "equal to",
neq: "not equal to"
}
}
};
window && window.x && window.x.spreadsheet && (window.x.spreadsheet.$messages = window.x.spreadsheet.$messages || {}, window.x.spreadsheet.$messages.en = r), t.default = r
}]); |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Kholloscope
~~~~~~~~~~~
Petit framework pour grand kholloscope. Publier facilement,
en Python, le kholloscope d'une classe préparatoire.
Fichier de configuration.
"""
__zone = 'c'
__debug = False
__domain = 'localhost'
__port = 8080
__route = '/<classe:re:[a-zA-Z0-9_-]+>'
__ordre = '-'
__decal = 0
__server = 'wsgiref'
|
from setuptools import setup
import sys
import os
import imp
from setuptools import Extension
import platform
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
dirname = os.path.dirname(__file__)
path_version = os.path.join(dirname, "vaex/core/_version.py")
version = imp.load_source('version', path_version)
name = 'vaex'
author = "Maarten A. Breddels"
author_email = "[email protected]"
license = 'MIT'
version = version.__version__
url = 'https://www.github.com/maartenbreddels/vaex'
# TODO: can we do without requests and progressbar2?
# TODO: after python2 supports frops, future and futures can also be dropped
# TODO: would be nice to have astropy only as dep in vaex-astro
install_requires_core = ["numpy>=1.16", "aplus", "tabulate>=0.8.3",
"future>=0.15.2", "pyyaml", "progressbar2",
"requests", "six", "cloudpickle", "pandas", "dask",
"nest-asyncio>=1.3.3", "pyarrow>=3.0", "frozendict",
"blake3", "filelock",
]
if sys.version_info[0] == 2:
install_requires_core.append("futures>=2.2.0")
install_requires_viz = ["matplotlib>=1.3.1", ]
install_requires_astro = ["kapteyn"]
if "MACOSX_DEPLOYMENT_TARGET" not in os.environ:
os.environ["MACOSX_DEPLOYMENT_TARGET"] = "10.9"
extra_dev_options = []
# MB: I like these options during development, the second if for ccache
# extra_dev_options = ['-fmax-errors=4', '-fdiagnostics-color', '-pedantic-errors']
class get_numpy_include(object):
"""Helper class to determine the numpy include path
The purpose of this class is to postpone importing numpy
until it is actually installed, so that the ``get_include()``
method can be invoked. """
def __init__(self):
pass
def __str__(self):
import numpy as np
return np.get_include()
class get_pybind_include(object):
"""Helper class to determine the pybind11 include path
The purpose of this class is to postpone importing pybind11
until it is actually installed, so that the ``get_include()``
method can be invoked. """
def __init__(self, user=False):
self.user = user
def __str__(self):
# this trick does not work anymore it seems, we now just vendor it
# import pybind11
# return pybind11.get_include(self.user)
return 'vendor/pybind11/include'
USE_ABSL = False
USE_TSL = True
define_macros = []
if USE_ABSL:
define_macros += [('VAEX_USE_ABSL', None)]
if USE_TSL:
define_macros += [('VAEX_USE_TSL', None)]
dll_files = []
if platform.system().lower() == 'windows':
extra_compile_args = ["/EHsc"]
dll_files = ['pcre.dll', 'pcrecpp.dll', 'vcruntime140_1.dll']
else:
# TODO: maybe enable these flags for non-wheel/conda builds? ["-mtune=native", "-march=native"]
extra_compile_args = ["-std=c++11", "-O3", "-funroll-loops", "-Werror=return-type", "-Wno-unused-parameter"]
extra_compile_args.append("-g")
extra_compile_args += extra_dev_options
if sys.platform == 'darwin':
extra_compile_args.append("-mmacosx-version-min=10.9")
# on windows (Conda-forge builds), the dirname is an absolute path
extension_vaexfast = Extension("vaex.vaexfast", [os.path.relpath(os.path.join(dirname, "src/vaexfast.cpp"))],
include_dirs=[get_numpy_include()],
extra_compile_args=extra_compile_args)
extension_strings = Extension("vaex.superstrings", [
os.path.relpath(os.path.join(dirname, "src/strings.cpp")),
os.path.relpath(os.path.join(dirname, "src/string_utils.cpp")),
],
include_dirs=[
get_numpy_include(),
get_pybind_include(),
get_pybind_include(user=True),
'vendor/string-view-lite/include',
'vendor/boost',
os.path.join(sys.prefix, 'include'),
os.path.join(sys.prefix, 'Library', 'include'), # windows
os.path.join(dirname, 'vendor', 'pcre', 'Library', 'include') # windows pcre from conda-forge
],
library_dirs=[
os.path.join(sys.prefix, 'lib'),
os.path.join(sys.prefix, 'Library', 'lib'), # windows
os.path.join(dirname, 'vendor', 'pcre', 'Library', 'lib'), # windows pcre from conda-forge
],
extra_compile_args=extra_compile_args,
libraries=['pcre', 'pcrecpp']
)
extension_superutils = Extension("vaex.superutils", [
os.path.relpath(os.path.join(dirname, "src/hash_string.cpp")),
os.path.relpath(os.path.join(dirname, "src/hash_primitives_pot.cpp")),
os.path.relpath(os.path.join(dirname, "src/hash_object.cpp")),
os.path.relpath(os.path.join(dirname, "src/hash_primitives_prime.cpp")),
os.path.relpath(os.path.join(dirname, "src/superutils.cpp")),
os.path.relpath(os.path.join(dirname, "src/string_utils.cpp")),
] + ([os.path.relpath(os.path.join(dirname, "vendor/abseil-cpp/absl/container/internal/raw_hash_set.cc"))] if USE_ABSL else []),
include_dirs=[
get_numpy_include(), get_pybind_include(),
get_pybind_include(user=True),
'vendor/abseil-cpp',
'vendor/flat_hash_map',
'vendor/sparse-map/include',
'vendor/hopscotch-map/include',
'vendor/string-view-lite/include',
],
extra_compile_args=extra_compile_args,
define_macros=define_macros,
)
extension_superagg = Extension("vaex.superagg", [
os.path.relpath(os.path.join(dirname, "src/superagg_binners.cpp")),
os.path.relpath(os.path.join(dirname, "src/superagg.cpp")),
os.path.relpath(os.path.join(dirname, "src/agg_hash_string.cpp")),
os.path.relpath(os.path.join(dirname, "src/string_utils.cpp")),
os.path.relpath(os.path.join(dirname, "src/agg_hash_primitive.cpp")),
],
include_dirs=[
get_numpy_include(), get_pybind_include(),
get_pybind_include(user=True),
'vendor/flat_hash_map',
'vendor/sparse-map/include',
'vendor/hopscotch-map/include',
'vendor/string-view-lite/include'
],
extra_compile_args=extra_compile_args,
define_macros=define_macros,
)
setup(name=name + '-core',
version=version,
description='Core of vaex',
url=url,
author=author,
author_email=author_email,
setup_requires=['numpy'],
install_requires=install_requires_core,
license=license,
package_data={'vaex': dll_files + ['test/files/*.fits', 'test/files/*.vot', 'test/files/*.hdf5']},
packages=['vaex', 'vaex.arrow', 'vaex.core', 'vaex.file', 'vaex.test', 'vaex.ext', 'vaex.misc'],
ext_modules=[extension_vaexfast] if on_rtd else [extension_vaexfast, extension_strings, extension_superutils, extension_superagg],
zip_safe=False,
extras_require={
'all': ["gcsfs>=0.6.2", "s3fs"]
},
entry_points={
'console_scripts': ['vaex = vaex.__main__:main'],
'gui_scripts': ['vaexgui = vaex.__main__:main'], # sometimes in osx, you need to run with this
'vaex.dataframe.accessor': ['geo = vaex.geo:DataFrameAccessorGeo'],
'vaex.dataset.opener': [
'arrow = vaex.arrow.opener:ArrowOpener',
'parquet = vaex.arrow.opener:ParquetOpener',
'feather = vaex.arrow.opener:FeatherOpener',
],
'vaex.memory.tracker': [
'default = vaex.memory:MemoryTracker'
],
'vaex.file.scheme': [
's3 = vaex.file.s3',
'fsspec+s3 = vaex.file.s3fs',
'arrow+s3 = vaex.file.s3arrow',
'gs = vaex.file.gcs',
'fsspec+gs = vaex.file.gcs',
]
}
)
|
angular.module('application', ['ui.scroll', 'ui.scroll.grid'])
.controller('gridController', [
'$scope', '$log', '$timeout', function ($scope, console, $timeout) {
var datasource = {};
datasource.get = function (index, count, success) {
$timeout(function () {
var result = [];
for (var i = index; i <= index + count - 1; i++) {
result.push({
col1: i,
col2: 'item #' + i,
col3: (Math.random() < 0.5)
});
}
success(result);
}, 100);
};
$scope.datasource = datasource;
var clearLayout = [
{index: 0, mapTo: 0, css: {backgroundColor: ''}},
{index: 1, mapTo: 1, css: {backgroundColor: ''}},
{index: 2, mapTo: 2, css: {backgroundColor: ''}}
];
var someLayout = [
{index: 0, mapTo: 2, css: {backgroundColor: '#ccc'}},
{index: 1, mapTo: 1, css: {backgroundColor: '#ddd'}},
{index: 2, mapTo: 0, css: {backgroundColor: '#eee'}}
];
$scope.applyLayout = function () {
$scope.adapter.gridAdapter.applyLayout(someLayout);
};
$scope.clearLayout = function () {
$scope.adapter.gridAdapter.applyLayout(clearLayout);
};
}
]);
|
"""Wrapper for signalfd(2) system call.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import errno
import logging
import os
import ctypes
from ctypes import (
c_int,
c_void_p,
c_uint32,
c_uint64,
c_uint8,
c_int32,
)
from ctypes.util import find_library
import enum
from ._sigsetops import (
SigSet,
sigaddset,
sigfillset,
)
_LOGGER = logging.getLogger(__name__)
###############################################################################
# Map the C interface
_LIBC_PATH = find_library('c')
_LIBC = ctypes.CDLL(_LIBC_PATH, use_errno=True)
if getattr(_LIBC, 'signalfd', None) is None:
raise ImportError('Unsupported libc version found: %s' % _LIBC_PATH)
###############################################################################
# int signalfd(int fd, const sigset_t *mask, int flags);
_SIGNALFD_DECL = ctypes.CFUNCTYPE(c_int, c_int, c_void_p, c_int,
use_errno=True)
_SIGNALFD = _SIGNALFD_DECL(('signalfd', _LIBC))
def signalfd(sigset, flags=0, prev_fd=-1):
"""create/update a signal file descriptor.
"""
if isinstance(sigset, SigSet):
new_set = sigset
elif sigset == 'all':
new_set = SigSet()
sigfillset(new_set)
else:
new_set = SigSet()
for signum in sigset:
sigaddset(new_set, signum)
new_set_p = ctypes.pointer(new_set)
fileno = _SIGNALFD(prev_fd, new_set_p, flags)
if fileno < 0:
err = ctypes.get_errno()
raise OSError(err, os.strerror(err),
'signalfd(%r, %r, %r)' % (prev_fd, new_set, flags))
return fileno
###############################################################################
# Constants copied from sys/signalfd.h
#
# See man signalfd(2) for more details.
#
class SFDFlags(enum.IntEnum):
"""Flags supported by SignalFD.
"""
#: Set the O_NONBLOCK file status flag on the new open file description.
#: Using this flag saves extra calls to fcntl(2) to achieve the same
#: result.
NONBLOCK = 0o4000
#: Set the close-on-exec (FD_CLOEXEC) flag on the new file descriptor. See
#: the description of the O_CLOEXEC flag in open(2) for reasons why this
#: may be useful.
CLOEXEC = 0o2000000
#: Set the O_NONBLOCK file status flag on the new open file description. Using
#: this flag saves extra calls to fcntl(2) to achieve the same result.
#: (since Linux 2.6.27)
SFD_NONBLOCK = SFDFlags.NONBLOCK
#: Set the close-on-exec (FD_CLOEXEC) flag on the new file descriptor. See the
#: description of the O_CLOEXEC flag in open(2) for reasons why this may be
#: useful.
#: (since Linux 2.6.27)
SFD_CLOEXEC = SFDFlags.CLOEXEC
###############################################################################
# The signalfd_siginfo structure
#
class SFDSigInfo(ctypes.Structure):
"""The signalfd_siginfo structure.
The format of the signalfd_siginfo structure(s) returned by read(2)s from a
signalfd file descriptor is as follows:
struct signalfd_siginfo {
uint32_t ssi_signo; /* Signal number */
int32_t ssi_errno; /* Error number (unused) */
int32_t ssi_code; /* Signal code */
uint32_t ssi_pid; /* PID of sender */
uint32_t ssi_uid; /* Real UID of sender */
int32_t ssi_fd; /* File descriptor (SIGIO) */
uint32_t ssi_tid; /* Kernel timer ID (POSIX timers)
uint32_t ssi_band; /* Band event (SIGIO) */
uint32_t ssi_overrun; /* POSIX timer overrun count */
uint32_t ssi_trapno; /* Trap number that caused signal */
int32_t ssi_status; /* Exit status or signal (SIGCHLD) */
int32_t ssi_int; /* Integer sent by sigqueue(2) */
uint64_t ssi_ptr; /* Pointer sent by sigqueue(2) */
uint64_t ssi_utime; /* User CPU time consumed (SIGCHLD) */
uint64_t ssi_stime; /* System CPU time consumed (SIGCHLD) */
uint64_t ssi_addr; /* Address that generated signal
(for hardware-generated signals) */
uint8_t pad[X]; /* Pad size to 128 bytes (allow for
additional fields in the future) */
};
"""
# pylint: disable=bad-whitespace
_FIELDS = [
('ssi_signo', c_uint32), #: Signal number
('ssi_errno', c_int32), #: Error number (unused)
('ssi_code', c_int32), #: Signal code
('ssi_pid', c_uint32), #: PID of sender
('ssi_uid', c_uint32), #: Real UID of sender
('ssi_fd', c_int32), #: File descriptor (SIGIO)
('ssi_tid', c_uint32), #: Kernel timer ID (POSIX timers)
('ssi_band', c_uint32), #: Band event (SIGIO)
('ssi_overrun', c_uint32), #: POSIX timer overrun count
('ssi_trapno', c_uint32), #: Trap number that caused signal
('ssi_status', c_int32), #: Exit status or signal (SIGCHLD)
('ssi_int', c_int32), #: Integer sent by sigqueue(2)
('ssi_ptr', c_uint64), #: Pointer sent by sigqueue(2)
('ssi_utime', c_uint64), #: User CPU time consumed (SIGCHLD)
('ssi_stime', c_uint64), #: System CPU time consumed (SIGCHLD)
('ssi_addr', c_uint64), #: Address that generated signal
]
__PADWORDS = 128 - sum([ctypes.sizeof(field[1]) for
field in _FIELDS])
_fields_ = _FIELDS + [
('_pad', c_uint8 * __PADWORDS), # Pad size to 128 bytes (allow for
# additional fields in the future)
]
def signalfd_read(sfd):
"""Read signalfd_siginfo data from a signalfd filedescriptor.
"""
try:
data = os.read(sfd, ctypes.sizeof(SFDSigInfo))
except OSError as err:
# Ignore signal interruptions
if err.errno != errno.EINTR:
raise
return None
return SFDSigInfo.from_buffer_copy(data)
###############################################################################
__all__ = [
'SFD_NONBLOCK',
'SFD_CLOEXEC',
'signalfd',
'signalfd_read',
]
|
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _interopRequireWildcard = require("@babel/runtime/helpers/interopRequireWildcard");
exports.__esModule = true;
exports["default"] = void 0;
var React = _interopRequireWildcard(require("react"));
var _reactOnclickoutside = _interopRequireDefault(require("react-onclickoutside"));
var TriggerContent = function TriggerContent(_ref) {
var children = _ref.children,
ref = _ref.ref;
return React.createElement("div", {
ref: ref
}, children);
};
var _default = (0, _reactOnclickoutside["default"])(TriggerContent);
exports["default"] = _default; |
/*
* This file is part of the Symfony Webpack Encore package.
*
* (c) Fabien Potencier <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
'use strict';
const chalk = require('chalk');
const messagesKeys = [
'debug',
'recommendation',
'warning',
'deprecation',
];
const defaultConfig = {
isVerbose: false,
quiet: false
};
let messages = {};
let config = {};
const reset = function() {
messages = {};
for (let messageKey of messagesKeys) {
messages[messageKey] = [];
}
config = Object.assign({}, defaultConfig);
};
reset();
function log(message) {
if (config.quiet) {
return;
}
console.log(message);
}
module.exports = {
debug(message) {
messages.debug.push(message);
if (config.isVerbose) {
log(`${chalk.bgBlack.white(' DEBUG ')} ${message}`);
}
},
recommendation(message) {
messages.recommendation.push(message);
log(`${chalk.bgBlue.white(' RECOMMEND ')} ${message}`);
},
warning(message) {
messages.warning.push(message);
log(`${chalk.bgYellow.black(' WARNING ')} ${chalk.yellow(message)}`);
},
deprecation(message) {
messages.deprecation.push(message);
log(`${chalk.bgYellow.black(' DEPRECATION ')} ${chalk.yellow(message)}`);
},
getMessages() {
return messages;
},
quiet(setQuiet = true) {
config.quiet = setQuiet;
},
verbose(setVerbose = true) {
config.isVerbose = setVerbose;
},
reset() {
reset();
}
};
|
# exported from PySB model 'model'
from pysb import Model, Monomer, Parameter, Expression, Compartment, Rule, Observable, Initial, MatchOnce, Annotation, ANY, WILD
Model()
Monomer('Ligand', ['Receptor'])
Monomer('ParpU', ['C3A'])
Monomer('C8A', ['BidU'])
Monomer('SmacM', ['BaxA'])
Monomer('BaxM', ['BidM', 'BaxA'])
Monomer('Apop', ['C3pro', 'Xiap'])
Monomer('Fadd', ['Receptor', 'C8pro'])
Monomer('SmacC', ['Xiap'])
Monomer('ParpC')
Monomer('Xiap', ['SmacC', 'Apop', 'C3A'])
Monomer('C9')
Monomer('C3ub')
Monomer('C8pro', ['Fadd'])
Monomer('Bcl2', ['BidM', 'BaxA'])
Monomer('C3pro', ['Apop'])
Monomer('CytoCM', ['BaxA'])
Monomer('CytoCC')
Monomer('BaxA', ['BaxM', 'Bcl2', 'BaxA_1', 'BaxA_2', 'SmacM', 'CytoCM'])
Monomer('ApafI')
Monomer('BidU', ['C8A'])
Monomer('BidT')
Monomer('C3A', ['Xiap', 'ParpU'])
Monomer('ApafA')
Monomer('BidM', ['BaxM', 'Bcl2'])
Monomer('Receptor', ['Ligand', 'Fadd'])
Parameter('bind_0_Ligand_binder_Receptor_binder_target_2kf', 1.0)
Parameter('bind_0_Ligand_binder_Receptor_binder_target_1kr', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_2kf', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_1kr', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr', 1.0)
Parameter('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr', 1.0)
Parameter('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kf', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kr', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr', 1.0)
Parameter('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr', 1.0)
Parameter('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BidM_inh_target_2kf', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BidM_inh_target_1kr', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BaxA_inh_target_2kf', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BaxA_inh_target_1kr', 1.0)
Parameter('pore_formation_0_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_0_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_1_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_1_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_2_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_2_BaxA_pore_1kr', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc', 1.0)
Parameter('Ligand_0', 1000.0)
Parameter('ParpU_0', 1000000.0)
Parameter('C8A_0', 0.0)
Parameter('SmacM_0', 100000.0)
Parameter('BaxM_0', 40000.0)
Parameter('Apop_0', 0.0)
Parameter('Fadd_0', 130000.0)
Parameter('SmacC_0', 0.0)
Parameter('ParpC_0', 0.0)
Parameter('Xiap_0', 103750.0)
Parameter('C9_0', 100000.0)
Parameter('C3ub_0', 0.0)
Parameter('C8pro_0', 130000.0)
Parameter('Bcl2_0', 328000.0)
Parameter('C3pro_0', 21000.0)
Parameter('CytoCM_0', 500000.0)
Parameter('CytoCC_0', 0.0)
Parameter('BaxA_0', 0.0)
Parameter('ApafI_0', 100000.0)
Parameter('BidU_0', 171000.0)
Parameter('BidT_0', 0.0)
Parameter('C3A_0', 0.0)
Parameter('ApafA_0', 0.0)
Parameter('BidM_0', 0.0)
Parameter('Receptor_0', 100.0)
Observable('Ligand_obs', Ligand())
Observable('ParpU_obs', ParpU())
Observable('C8A_obs', C8A())
Observable('SmacM_obs', SmacM())
Observable('BaxM_obs', BaxM())
Observable('Apop_obs', Apop())
Observable('Fadd_obs', Fadd())
Observable('SmacC_obs', SmacC())
Observable('ParpC_obs', ParpC())
Observable('Xiap_obs', Xiap())
Observable('C9_obs', C9())
Observable('C3ub_obs', C3ub())
Observable('C8pro_obs', C8pro())
Observable('Bcl2_obs', Bcl2())
Observable('C3pro_obs', C3pro())
Observable('CytoCM_obs', CytoCM())
Observable('CytoCC_obs', CytoCC())
Observable('BaxA_obs', BaxA())
Observable('ApafI_obs', ApafI())
Observable('BidU_obs', BidU())
Observable('BidT_obs', BidT())
Observable('C3A_obs', C3A())
Observable('ApafA_obs', ApafA())
Observable('BidM_obs', BidM())
Observable('Receptor_obs', Receptor())
Rule('bind_0_Ligand_binder_Receptor_binder_target', Ligand(Receptor=None) + Receptor(Ligand=None, Fadd=None) | Ligand(Receptor=1) % Receptor(Ligand=1, Fadd=None), bind_0_Ligand_binder_Receptor_binder_target_2kf, bind_0_Ligand_binder_Receptor_binder_target_1kr)
Rule('bind_0_Receptor_binder_Fadd_binder_target', Receptor(Ligand=ANY, Fadd=None) + Fadd(Receptor=None, C8pro=None) | Receptor(Ligand=ANY, Fadd=1) % Fadd(Receptor=1, C8pro=None), bind_0_Receptor_binder_Fadd_binder_target_2kf, bind_0_Receptor_binder_Fadd_binder_target_1kr)
Rule('substrate_binding_0_Fadd_catalyzer_C8pro_substrate', Fadd(Receptor=ANY, C8pro=None) + C8pro(Fadd=None) | Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1), substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf, substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr)
Rule('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product', Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1) >> Fadd(Receptor=ANY, C8pro=None) + C8A(BidU=None), catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc)
Rule('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=None) + BidU(C8A=None) | C8A(BidU=1) % BidU(C8A=1), catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf, catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr)
Rule('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=1) % BidU(C8A=1) >> C8A(BidU=None) + BidT(), catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc)
Rule('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex', ApafI() + CytoCC() | ApafA(), conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf, conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr)
Rule('inhibition_0_SmacC_inhibitor_Xiap_inh_target', SmacC(Xiap=None) + Xiap(SmacC=None, Apop=None, C3A=None) | SmacC(Xiap=1) % Xiap(SmacC=1, Apop=None, C3A=None), inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf, inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr)
Rule('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex', ApafA() + C9() | Apop(C3pro=None, Xiap=None), conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf, conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr)
Rule('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=None, Xiap=None) + C3pro(Apop=None) | Apop(C3pro=1, Xiap=None) % C3pro(Apop=1), catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=1, Xiap=None) % C3pro(Apop=1) >> Apop(C3pro=None, Xiap=None) + C3A(Xiap=None, ParpU=None), catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('inhibition_0_Xiap_inhibitor_Apop_inh_target', Xiap(SmacC=None, Apop=None, C3A=None) + Apop(C3pro=None, Xiap=None) | Xiap(SmacC=None, Apop=1, C3A=None) % Apop(C3pro=None, Xiap=1), inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf, inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr)
Rule('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=None) + C3A(Xiap=None, ParpU=None) | Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None), catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf, catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr)
Rule('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None) >> Xiap(SmacC=None, Apop=None, C3A=None) + C3ub(), catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc)
Rule('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=None) + ParpU(C3A=None) | C3A(Xiap=None, ParpU=1) % ParpU(C3A=1), catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf, catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr)
Rule('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=1) % ParpU(C3A=1) >> C3A(Xiap=None, ParpU=None) + ParpC(), catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc)
Rule('equilibration_0_BidT_equil_a_BidM_equil_b', BidT() | BidM(BaxM=None, Bcl2=None), equilibration_0_BidT_equil_a_BidM_equil_b_1kf, equilibration_0_BidT_equil_a_BidM_equil_b_1kr)
Rule('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=None, Bcl2=None) + BaxM(BidM=None, BaxA=None) | BidM(BaxM=1, Bcl2=None) % BaxM(BidM=1, BaxA=None), catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf, catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr)
Rule('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=1, Bcl2=None) % BaxM(BidM=1, BaxA=None) >> BidM(BaxM=None, Bcl2=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc)
Rule('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxM(BidM=None, BaxA=None) | BaxA(BaxM=1, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1), self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf, self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr)
Rule('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=1, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc)
Rule('inhibition_0_Bcl2_inhibitor_BidM_inh_target', Bcl2(BidM=None, BaxA=None) + BidM(BaxM=None, Bcl2=None) | Bcl2(BidM=1, BaxA=None) % BidM(BaxM=None, Bcl2=1), inhibition_0_Bcl2_inhibitor_BidM_inh_target_2kf, inhibition_0_Bcl2_inhibitor_BidM_inh_target_1kr)
Rule('inhibition_0_Bcl2_inhibitor_BaxA_inh_target', Bcl2(BidM=None, BaxA=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) | Bcl2(BidM=None, BaxA=1) % BaxA(BaxM=None, Bcl2=1, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), inhibition_0_Bcl2_inhibitor_BaxA_inh_target_2kf, inhibition_0_Bcl2_inhibitor_BaxA_inh_target_1kr)
Rule('pore_formation_0_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None), pore_formation_0_BaxA_pore_2kf, pore_formation_0_BaxA_pore_1kr)
Rule('pore_formation_1_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None), pore_formation_1_BaxA_pore_2kf, pore_formation_1_BaxA_pore_1kr)
Rule('pore_formation_2_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None), pore_formation_2_BaxA_pore_2kf, pore_formation_2_BaxA_pore_1kr)
Rule('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacM(BaxA=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5), transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf, transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacC(Xiap=None), transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc)
Rule('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCM(BaxA=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5), transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf, transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCC(), transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc)
Initial(Ligand(Receptor=None), Ligand_0)
Initial(ParpU(C3A=None), ParpU_0)
Initial(C8A(BidU=None), C8A_0)
Initial(SmacM(BaxA=None), SmacM_0)
Initial(BaxM(BidM=None, BaxA=None), BaxM_0)
Initial(Apop(C3pro=None, Xiap=None), Apop_0)
Initial(Fadd(Receptor=None, C8pro=None), Fadd_0)
Initial(SmacC(Xiap=None), SmacC_0)
Initial(ParpC(), ParpC_0)
Initial(Xiap(SmacC=None, Apop=None, C3A=None), Xiap_0)
Initial(C9(), C9_0)
Initial(C3ub(), C3ub_0)
Initial(C8pro(Fadd=None), C8pro_0)
Initial(Bcl2(BidM=None, BaxA=None), Bcl2_0)
Initial(C3pro(Apop=None), C3pro_0)
Initial(CytoCM(BaxA=None), CytoCM_0)
Initial(CytoCC(), CytoCC_0)
Initial(BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), BaxA_0)
Initial(ApafI(), ApafI_0)
Initial(BidU(C8A=None), BidU_0)
Initial(BidT(), BidT_0)
Initial(C3A(Xiap=None, ParpU=None), C3A_0)
Initial(ApafA(), ApafA_0)
Initial(BidM(BaxM=None, Bcl2=None), BidM_0)
Initial(Receptor(Ligand=None, Fadd=None), Receptor_0)
|
from random import randint
import pytest
from starlette.applications import Starlette
from starlette.endpoints import HTTPEndpoint
from starlette.responses import JSONResponse
from starlette.routing import Mount, Route
from starlette.testclient import TestClient
from spectree import SpecTree, Response
from .common import Query, Resp, JSON, Headers, Cookies
def before_handler(req, resp, err, instance):
if err:
resp.headers['X-Error'] = 'Validation Error'
def after_handler(req, resp, err, instance):
resp.headers['X-Validation'] = 'Pass'
def method_handler(req, resp, err, instance):
resp.headers['X-Name'] = instance.name
api = SpecTree('starlette', before=before_handler, after=after_handler)
class Ping(HTTPEndpoint):
name = 'Ping'
@api.validate(headers=Headers, tags=['test', 'health'], after=method_handler)
def get(self, request):
"""summary
description"""
return JSONResponse({'msg': 'pong'})
@api.validate(
query=Query,
json=JSON,
cookies=Cookies,
resp=Response(HTTP_200=Resp, HTTP_401=None),
tags=['api', 'test'])
async def user_score(request):
score = [randint(0, request.context.json.limit) for _ in range(5)]
score.sort(reverse=request.context.query.order)
assert request.context.cookies.pub == 'abcdefg'
assert request.cookies['pub'] == 'abcdefg'
return JSONResponse({
'name': request.context.json.name,
'score': score
})
app = Starlette(routes=[
Route('/ping', Ping),
Mount('/api', routes=[
Mount('/user', routes=[
Route('/{name}', user_score, methods=['POST']),
])
])
])
api.register(app)
@pytest.fixture
def client():
with TestClient(app) as client:
yield client
def test_starlette_validate(client):
resp = client.get('/ping')
assert resp.status_code == 422
assert resp.headers.get('X-Error') == 'Validation Error', resp.headers
resp = client.get('/ping', headers={'lang': 'en-US'})
assert resp.json() == {'msg': 'pong'}
assert resp.headers.get('X-Error') is None
assert resp.headers.get('X-Name') == 'Ping'
assert resp.headers.get('X-Validation') is None
resp = client.post('/api/user/starlette')
assert resp.status_code == 422
assert resp.headers.get('X-Error') == 'Validation Error'
resp = client.post(
'/api/user/starlette?order=1',
json=dict(name='starlette', limit=10),
cookies=dict(pub='abcdefg'),
)
resp_body = resp.json()
assert resp_body['name'] == 'starlette'
assert resp_body['score'] == sorted(resp_body['score'], reverse=True)
assert resp.headers.get('X-Validation') == 'Pass'
resp = client.post(
'/api/user/starlette?order=0',
json=dict(name='starlette', limit=10),
cookies=dict(pub='abcdefg'),
)
resp_body = resp.json()
assert resp_body['name'] == 'starlette'
assert resp_body['score'] == sorted(resp_body['score'], reverse=False)
assert resp.headers.get('X-Validation') == 'Pass'
def test_starlette_doc(client):
resp = client.get('/apidoc/openapi.json')
assert resp.json() == api.spec
resp = client.get('/apidoc/redoc')
assert resp.status_code == 200
resp = client.get('/apidoc/swagger')
assert resp.status_code == 200
|
#define _GUN_SOURCE
#include "opend.h"
#include <syslog.h>
int debug, oflag, client_size, log_to_stderr;
char errmsg[MAXLINE];
char *pathname;
Client *client = NULL;
int main(int argc, char *argv[]){
int c;
log_open("open.serv",LOG_PID,LOG_USER);
opterr = 0;
while((c = getopt(argc, argv,"d")) != EOF){
switch(c){
case 'd':
debug = log_to_stderr = 1;
break;
case '?':
err_quit("unrecognized option: -%c", optopt);
};
}
if(debug == 0){
daemonize("opend");
};
loop();
};
|
/*global L, Ti, Titanium, joli, uploader, logger, models, sus, cust*/
/*jslint nomen: true, sloppy : true, plusplus: true, vars: true, newcap: true*/
var theme = require('/ui/common/theme');
var _ = require('/lib/underscore-min');
var model = require('model/model');
function cf_view() {
var win = Ti.UI.createWindow(_.extend({
title : L('postcards')
}, theme.window));
var images = [];
images.push('/images/bart_born.jpg');
images.push('/images/200px-Lisa_Simpson.png');
images.push('/images/Bart_graffiti.jpg');
images.push('/images/homer_wedding.jpg');
images.push('/images/simpson_family.jpg');
var osname = Titanium.Platform.osname;
if (osname !== 'android') {
// create coverflow view with images
var view = Titanium.UI.iOS.createCoverFlowView({
images : images,
backgroundColor : '#000'
});
// click listener - when image is clicked
view.addEventListener('click', function(e) {
Titanium.API.info("image clicked: " + e.index + ', selected is ' + view.selected);
});
// change listener when active image changes
view.addEventListener('change', function(e) {
Titanium.API.info("image changed: " + e.index + ', selected is ' + view.selected);
});
win.add(view);
} else {
var view1 = Ti.UI.createView({
backgroundColor : theme.backgroundColor
});
var view2 = Ti.UI.createView({
backgroundColor : theme.backgroundColor
});
var view3 = Ti.UI.createView({
backgroundColor : theme.backgroundColor
});
var view4 = Ti.UI.createView({
backgroundColor : theme.backgroundColor
});
var view5 = Ti.UI.createView({
backgroundColor : theme.backgroundColor
});
var image1 = Ti.UI.createImageView({
image : '/images/bart_born.jpg'
});
var image2 = Ti.UI.createImageView({
image : '/images/200px-Lisa_Simpson.png'
});
var image3 = Ti.UI.createImageView({
image : '/images/Bart_graffiti.jpg'
});
var image4 = Ti.UI.createImageView({
image : '/images/homer_wedding.jpg'
});
var image5 = Ti.UI.createImageView({
image : '/images/simpson_family.jpg'
});
view1.add(image1);
view2.add(image2);
view3.add(image3);
view4.add(image4);
view5.add(image5);
var scrollableView = Ti.UI.createScrollableView({
views : [view1, view2, view3, view4, view5],
showPagingControl : true
});
win.add(scrollableView);
}
return win;
}
module.exports = cf_view;
|
import baseFlatten from './_baseFlatten';
import baseIteratee from './_baseIteratee';
import baseUniq from './_baseUniq';
import isArrayLikeObject from './isArrayLikeObject';
import last from './last';
import rest from './rest';
/**
* This method is like `_.union` except that it accepts `iteratee` which is
* invoked for each element of each `arrays` to generate the criterion by
* which uniqueness is computed. The iteratee is invoked with one argument:
* (value).
*
* @static
* @memberOf _
* @since 4.0.0
* @category Array
* @param {...Array} [arrays] The arrays to inspect.
* @param {Array|Function|Object|string} [iteratee=_.identity]
* The iteratee invoked per element.
* @returns {Array} Returns the new array of combined values.
* @example
*
* _.unionBy([2.1, 1.2], [4.3, 2.4], Math.floor);
* // => [2.1, 1.2, 4.3]
*
* // The `_.property` iteratee shorthand.
* _.unionBy([{ 'x': 1 }], [{ 'x': 2 }, { 'x': 1 }], 'x');
* // => [{ 'x': 1 }, { 'x': 2 }]
*/
var unionBy = rest(function(arrays) {
var iteratee = last(arrays);
if (isArrayLikeObject(iteratee)) {
iteratee = undefined;
}
return baseUniq(baseFlatten(arrays, 1, isArrayLikeObject, true), baseIteratee(iteratee));
});
export default unionBy;
|
/*
* This header is generated by classdump-dyld 1.0
* on Tuesday, November 5, 2019 at 2:51:02 AM Mountain Standard Time
* Operating System: Version 13.0 (Build 17J586)
* Image Source: /System/Library/Frameworks/Vision.framework/Vision
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by Elias Limneos.
*/
#import <Vision/VNTargetedImageRequest.h>
@class VNImageBuffer, VNImageRegistrationSignature;
@interface VNImageRegistrationRequest : VNTargetedImageRequest {
VNImageBuffer* _cachedFloatingImageBuffer;
VNImageRegistrationSignature* _cachedFloatingImageSignature;
}
+(BOOL)warmUpRequestPerformer:(id)arg1 error:(id*)arg2 ;
-(BOOL)warmUpRequestPerformer:(id)arg1 error:(id*)arg2 ;
-(BOOL)allowsCachingOfResults;
-(BOOL)internalPerformInContext:(id)arg1 error:(id*)arg2 ;
-(BOOL)wantsSequencedRequestObservationsRecording;
-(id)cachedFloatingImageBufferReturningError:(id*)arg1 ;
-(id)cachedFloatingImageRegistrationSignatureReturningError:(id*)arg1 ;
-(BOOL)getReferenceImageBuffer:(id*)arg1 registrationSignature:(id*)arg2 forRequestPerformingContext:(id)arg3 options:(id)arg4 error:(id*)arg5 ;
@end
|
import pytest
import os
import numpy as np
import shutil
@pytest.fixture
def dummy_metadata():
import zarr
from ..metadata import MetaData
fn = os.path.join('scarf', 'tests', 'datasets', 'dummy_metadata.zarr')
if os.path.isdir(fn):
shutil.rmtree(fn)
g = zarr.open(fn)
data = np.array([1, 1, 1, 1, 0, 0, 1, 1, 1]).astype(bool)
g.create_dataset('I', data=data, chunks=(100000,),
shape=len(data), dtype=data.dtype)
return MetaData(g)
def test_metadata_attrs(dummy_metadata):
assert dummy_metadata.N == 9
assert np.all(dummy_metadata.index == np.array(range(9)))
def test_metadata_fetch(dummy_metadata):
assert len(dummy_metadata.fetch('I')) == 7
assert len(dummy_metadata.fetch_all('I')) == 9
def test_metadata_verify_bool(dummy_metadata):
assert dummy_metadata._verify_bool('I') is True
def test_metadata_active_index(dummy_metadata):
a = np.array([0, 1, 2, 3, 6, 7, 8])
assert np.all(dummy_metadata.active_index(key='I') == a)
|
#!/usr/bin/env python3
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
from test_framework.blocktools import *
from test_framework.xray import *
"""
This test specifically tests that inputs to transactions in the mempool are not used in staking.
"""
class XrayPOSConflictingStakingMempoolTxTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
self.extra_args = [['-txindex=1', '-aggressive-staking'], ['-staking=1', '-txindex=1']]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def sync_disconnected_nodes(self, receive_from, sync_to):
for block_number in range(sync_to.getblockcount(), receive_from.getblockcount()+1):
block_hash = receive_from.getblockhash(block_number)
block_raw_hex = receive_from.getblock(block_hash, False)
sync_to.submitblock(block_raw_hex)
def run_test(self):
privkey = byte_to_base58(hash256(struct.pack('<I', 0)), 239)
self.nodes[0].importprivkey(privkey)
disconnect_nodes(self.nodes[0], 1)
for n in self.nodes: n.setmocktime(int(time.time())-10000)
# First generate some blocks so we have 20 valid staking txs for the node we run the test on (node#0)
# We also mature three coinbases for the node that will orphan node#0s blocks
# We contiously sync the blocks between the disconnected nodes, using getblock and submitblock.
staking_nodes_prevouts = []
self.nodes[1].generate(3)
self.sync_disconnected_nodes(self.nodes[1], self.nodes[0])
last_block_hashes = self.nodes[0].generatetoaddress(20, "qSrM9K6FMhZ29Vkp8Rdk8Jp66bbfpjFETq")
self.sync_disconnected_nodes(self.nodes[0], self.nodes[1])
self.nodes[1].generate(COINBASE_MATURITY)
self.sync_disconnected_nodes(self.nodes[1], self.nodes[0])
# Spend the only available staking tx for node#0, give the staker some time to start before sending the tx that spends the only available staking tx
txs = []
for last_block_hash in last_block_hashes:
last_coinbase = self.nodes[0].getblock(last_block_hash)['tx'][0]
staking_prevout = COutPoint(int(last_coinbase, 16), 0)
tx = CTransaction()
tx.vin = [CTxIn(staking_prevout)]
tx.vout = [CTxOut(int((20000-0.01)*COIN), CScript([OP_DUP, OP_HASH160, hex_str_to_bytes(p2pkh_to_hex_hash(self.nodes[0].getnewaddress())), OP_EQUALVERIFY, OP_CHECKSIG]))]
txs.append(rpc_sign_transaction(self.nodes[0], tx))
print("blkcnt", self.nodes[0].getblockcount())
for n in self.nodes: n.setmocktime(int(time.time()))
staking_prevouts = collect_prevouts(self.nodes[0])
print(len(staking_prevouts))
nTime = int(time.time()) & (~TIMESTAMP_MASK)
block, key = create_unsigned_pos_block(self.nodes[0], staking_prevouts, nTime=nTime)
block.sign_block(key)
block.rehash()
for tx in txs:
self.nodes[0].sendrawtransaction(bytes_to_hex_str(tx.serialize()))
print("blkcnt", self.nodes[0].getblockcount())
assert_equal(self.nodes[0].submitblock(bytes_to_hex_str(block.serialize())), None)
assert_equal(self.nodes[0].getblockcount(), COINBASE_MATURITY+24)
print("blkcnt", self.nodes[0].getblockcount())
# Allow node#1 to stake two blocks, which will orphan any (potentially) staked block in node#0
wait_until(lambda: self.nodes[1].getblockcount() >= COINBASE_MATURITY+25)
self.nodes[0].setmocktime(self.nodes[1].getblock(self.nodes[1].getbestblockhash())['time'])
# Connect the nodes
print(self.nodes[0].getpeerinfo())
connect_nodes_bi(self.nodes, 0, 1)
# Sync the nodes
timeout = time.time() + 10
wait_until(lambda: self.nodes[0].getbestblockhash() == self.nodes[1].getbestblockhash())
print('node#0 %d; blockcount=%d' % (0, self.nodes[0].getblockcount()))
print('node#1 %d; blockcount=%d' % (0, self.nodes[1].getblockcount()))
best_chain_height = self.nodes[1].getblockcount()
assert_equal(self.nodes[0].getblockcount(), best_chain_height)
# Allow one more block to be staked, which will include the txs in the mempool
wait_until(lambda: self.nodes[1].getblockcount() < best_chain_height+1)
print('node#0 %d; blockcount=%d' % (0, self.nodes[0].getblockcount()))
print('node#1 %d; blockcount=%d' % (0, self.nodes[1].getblockcount()))
# Now we should have a balance equal to
assert_equal(int(self.nodes[0].getbalance()*COIN), int((19*(INITIAL_BLOCK_REWARD-0.01)+INITIAL_BLOCK_REWARD)*COIN))
assert_equal(self.nodes[0].getbestblockhash(), self.nodes[1].getbestblockhash())
if __name__ == '__main__':
XrayPOSConflictingStakingMempoolTxTest().main() |
#!/usr/bin/python
#
# Copyright (c) 2019 Zim Kalinowski, (@zikalino)
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: frontdoor
version_added: '2.9'
short_description: Manage Azure FrontDoor instance.
description:
- 'Create, update and delete instance of Azure FrontDoor.'
options:
resource_group:
description:
- Name of the Resource group within the Azure subscription.
required: true
type: str
name:
description:
- Resource name.
type: str
location:
description:
- Resource location.
type: str
friendly_name:
description:
- A friendly name for the frontDoor
type: str
routing_rules:
description:
- Routing rules associated with this Front Door.
type: list
suboptions:
id:
description:
- Resource ID.
type: str
frontend_endpoints:
description:
- Frontend endpoints associated with this rule
type: list
suboptions:
id:
description:
- Resource ID.
type: str
accepted_protocols:
description:
- Protocol schemes to match for this rule
type: list
patterns_to_match:
description:
- The route patterns of the rule.
type: list
enabled_state:
description:
- >-
Whether to enable use of this rule. Permitted values are 'Enabled'
or 'Disabled'
type: str
route_configuration:
description:
- A reference to the routing configuration.
type: dict
resource_state:
description:
- Resource status.
type: str
name:
description:
- Resource name.
type: str
type:
description:
- Resource type.
type: str
load_balancing_settings:
description:
- Load balancing settings associated with this Front Door instance.
type: list
suboptions:
id:
description:
- Resource ID.
type: str
sample_size:
description:
- The number of samples to consider for load balancing decisions
type: number
successful_samples_required:
description:
- The number of samples within the sample period that must succeed
type: number
additional_latency_milliseconds:
description:
- >-
The additional latency in milliseconds for probes to fall into the
lowest latency bucket
type: number
resource_state:
description:
- Resource status.
type: str
name:
description:
- Resource name.
type: str
type:
description:
- Resource type.
type: str
health_probe_settings:
description:
- Health probe settings associated with this Front Door instance.
type: list
suboptions:
id:
description:
- Resource ID.
type: str
path:
description:
- The path to use for the health probe. Default is /
type: str
protocol:
description:
- Protocol scheme to use for this probe
type: str
interval_in_seconds:
description:
- The number of seconds between health probes.
type: number
resource_state:
description:
- Resource status.
type: str
name:
description:
- Resource name.
type: str
type:
description:
- Resource type.
type: str
backend_pools:
description:
- Backend pools available to routing rules.
type: list
suboptions:
id:
description:
- Resource ID.
type: str
backends:
description:
- The set of backends for this pool
type: list
suboptions:
address:
description:
- Location of the backend (IP address or FQDN)
type: str
http_port:
description:
- The HTTP TCP port number. Must be between 1 and 65535.
type: number
https_port:
description:
- The HTTPS TCP port number. Must be between 1 and 65535.
type: number
enabled_state:
description:
- >-
Whether to enable use of this backend. Permitted values are
'Enabled' or 'Disabled'
type: str
priority:
description:
- >-
Priority to use for load balancing. Higher priorities will not
be used for load balancing if any lower priority backend is
healthy.
type: number
weight:
description:
- Weight of this endpoint for load balancing purposes.
type: number
backend_host_header:
description:
- >-
The value to use as the host header sent to the backend. If
blank or unspecified, this defaults to the incoming host.
type: str
load_balancing_settings:
description:
- Load balancing settings for a backend pool
type: dict
suboptions:
id:
description:
- Resource ID.
type: str
health_probe_settings:
description:
- L7 health probe settings for a backend pool
type: dict
suboptions:
id:
description:
- Resource ID.
type: str
resource_state:
description:
- Resource status.
type: str
name:
description:
- Resource name.
type: str
type:
description:
- Resource type.
type: str
frontend_endpoints:
description:
- Frontend endpoints available to routing rules.
type: list
suboptions:
id:
description:
- Resource ID.
type: str
host_name:
description:
- The host name of the frontendEndpoint. Must be a domain name.
type: str
session_affinity_enabled_state:
description:
- >-
Whether to allow session affinity on this host. Valid options are
'Enabled' or 'Disabled'
type: str
session_affinity_ttl_seconds:
description:
- >-
UNUSED. This field will be ignored. The TTL to use in seconds for
session affinity, if applicable.
type: number
web_application_firewall_policy_link:
description:
- >-
Defines the Web Application Firewall policy for each host (if
applicable)
type: dict
suboptions:
id:
description:
- Resource ID.
type: str
resource_state:
description:
- Resource status.
type: str
custom_https_provisioning_state:
description:
- Provisioning status of Custom Https of the frontendEndpoint.
type: str
custom_https_provisioning_substate:
description:
- >-
Provisioning substate shows the progress of custom HTTPS
enabling/disabling process step by step.
type: str
custom_https_configuration:
description:
- The configuration specifying how to enable HTTPS
type: dict
suboptions:
certificate_source:
description:
- Defines the source of the SSL certificate
type: str
protocol_type:
description:
- >-
Defines the TLS extension protocol that is used for secure
delivery
type: str
key_vault_certificate_source_parameters:
description:
- >-
KeyVault certificate source parameters (if
certificateSource=AzureKeyVault)
type: dict
front_door_certificate_source_parameters:
description:
- >-
Parameters required for enabling SSL with Front Door-managed
certificates (if certificateSource=FrontDoor)
type: dict
name:
description:
- Resource name.
type: str
type:
description:
- Resource type.
type: str
backend_pools_settings:
description:
- Settings for all backendPools
type: dict
suboptions:
enforce_certificate_name_check:
description:
- >-
Whether to enforce certificate name check on HTTPS requests to all
backend pools. No effect on non-HTTPS requests.
type: str
enabled_state:
description:
- >-
Operational status of the Front Door load balancer. Permitted values are
'Enabled' or 'Disabled'
type: str
resource_state:
description:
- Resource status of the Front Door.
type: str
provisioning_state:
description:
- Provisioning state of the Front Door.
type: str
cname:
description:
- The host that each frontendEndpoint must CNAME to.
type: str
id:
description:
- Resource ID.
type: str
type:
description:
- Resource type.
type: str
state:
description:
- Assert the state of the FrontDoor.
- >-
Use C(present) to create or update an FrontDoor and C(absent) to delete
it.
default: present
choices:
- absent
- present
extends_documentation_fragment:
- azure
- azure_tags
author:
- Zim Kalinowski (@zikalino)
'''
EXAMPLES = '''
- name: Create or update specific Front Door
azure.rm.frontdoor:
resource_group: myResourceGroup
name: myFrontDoor
front_door_parameters:
id: >-
/subscriptions/{{ subscription_id }}/resourceGroups/{{ resource_group
}}/providers/Microsoft.Network/frontDoors/{{ front_door_name }}
location: westus
tags:
tag1: value1
tag2: value2
properties:
routingRules:
- name: routingRule1
properties:
frontendEndpoints:
- id: >-
/subscriptions/{{ subscription_id }}/resourceGroups/{{
resource_group }}/providers/Microsoft.Network/frontDoors/{{
front_door_name }}/frontendEndpoints/{{
frontend_endpoint_name }}
- id: >-
/subscriptions/{{ subscription_id }}/resourceGroups/{{
resource_group }}/providers/Microsoft.Network/frontDoors/{{
front_door_name }}/frontendEndpoints/{{
frontend_endpoint_name }}
acceptedProtocols:
- Http
patternsToMatch:
- /*
routeConfiguration:
'@odata.type': >-
#Microsoft.Azure.FrontDoor.Models.FrontdoorForwardingConfiguration
backendPool:
id: >-
/subscriptions/{{ subscription_id }}/resourceGroups/{{
resource_group }}/providers/Microsoft.Network/frontDoors/{{
front_door_name }}/backendPools/{{ backend_pool_name }}
enabledState: Enabled
healthProbeSettings:
- name: healthProbeSettings1
properties:
path: /
protocol: Http
intervalInSeconds: '120'
loadBalancingSettings:
- name: loadBalancingSettings1
properties:
sampleSize: '4'
successfulSamplesRequired: '2'
backendPools:
- name: backendPool1
properties:
backends:
- address: w3.contoso.com
httpPort: '80'
httpsPort: '443'
weight: '1'
priority: '2'
- address: contoso.com.website-us-west-2.othercloud.net
httpPort: '80'
httpsPort: '443'
weight: '2'
priority: '1'
- address: contoso1.azurewebsites.net
httpPort: '80'
httpsPort: '443'
weight: '1'
priority: '1'
loadBalancingSettings:
id: >-
/subscriptions/{{ subscription_id }}/resourceGroups/{{
resource_group }}/providers/Microsoft.Network/frontDoors/{{
front_door_name }}/loadBalancingSettings/{{
load_balancing_setting_name }}
healthProbeSettings:
id: >-
/subscriptions/{{ subscription_id }}/resourceGroups/{{
resource_group }}/providers/Microsoft.Network/frontDoors/{{
front_door_name }}/healthProbeSettings/{{
health_probe_setting_name }}
frontendEndpoints:
- name: frontendEndpoint1
properties:
hostName: www.contoso.com
sessionAffinityEnabledState: Enabled
sessionAffinityTtlSeconds: '60'
webApplicationFirewallPolicyLink:
id: >-
/subscriptions/{{ subscription_id }}/resourceGroups/{{
resource_group
}}/providers/Microsoft.Network/frontDoorWebApplicationFirewallPolicies/{{
front_door_web_application_firewall_policy_name }}
- name: default
properties:
hostName: frontDoor1.azurefd.net
backendPoolsSettings:
enforceCertificateNameCheck: Enabled
enabledState: Enabled
- name: Delete Front Door
azure.rm.frontdoor:
resource_group: myResourceGroup
name: myFrontDoor
state: absent
'''
RETURN = '''
id:
description:
- Resource ID.
returned: always
type: str
sample: null
name:
description:
- Resource name.
returned: always
type: str
sample: null
type:
description:
- Resource type.
returned: always
type: str
sample: null
location:
description:
- Resource location.
returned: always
type: str
sample: null
tags:
description:
- Resource tags.
returned: always
type: >-
unknown[DictionaryType
{"$id":"539","$type":"DictionaryType","valueType":{"$id":"540","$type":"PrimaryType","knownPrimaryType":"string","name":{"$id":"541","fixed":false,"raw":"String"},"deprecated":false},"supportsAdditionalProperties":false,"name":{"$id":"542","fixed":false},"deprecated":false}]
sample: null
properties:
description:
- Properties of the Front Door Load Balancer
returned: always
type: dict
sample: null
contains:
friendly_name:
description:
- A friendly name for the frontDoor
returned: always
type: str
sample: null
routing_rules:
description:
- Routing rules associated with this Front Door.
returned: always
type: dict
sample: null
contains:
id:
description:
- Resource ID.
returned: always
type: str
sample: null
properties:
description:
- Properties of the Front Door Routing Rule
returned: always
type: dict
sample: null
contains:
frontend_endpoints:
description:
- Frontend endpoints associated with this rule
returned: always
type: dict
sample: null
contains:
id:
description:
- Resource ID.
returned: always
type: str
sample: null
accepted_protocols:
description:
- Protocol schemes to match for this rule
returned: always
type: str
sample: null
patterns_to_match:
description:
- The route patterns of the rule.
returned: always
type: str
sample: null
enabled_state:
description:
- >-
Whether to enable use of this rule. Permitted values are
'Enabled' or 'Disabled'
returned: always
type: str
sample: null
route_configuration:
description:
- A reference to the routing configuration.
returned: always
type: dict
sample: null
resource_state:
description:
- Resource status.
returned: always
type: str
sample: null
name:
description:
- Resource name.
returned: always
type: str
sample: null
type:
description:
- Resource type.
returned: always
type: str
sample: null
load_balancing_settings:
description:
- Load balancing settings associated with this Front Door instance.
returned: always
type: dict
sample: null
contains:
id:
description:
- Resource ID.
returned: always
type: str
sample: null
properties:
description:
- Properties of the load balancing settings
returned: always
type: dict
sample: null
contains:
sample_size:
description:
- The number of samples to consider for load balancing decisions
returned: always
type: number
sample: null
successful_samples_required:
description:
- >-
The number of samples within the sample period that must
succeed
returned: always
type: number
sample: null
additional_latency_milliseconds:
description:
- >-
The additional latency in milliseconds for probes to fall into
the lowest latency bucket
returned: always
type: number
sample: null
resource_state:
description:
- Resource status.
returned: always
type: str
sample: null
name:
description:
- Resource name.
returned: always
type: str
sample: null
type:
description:
- Resource type.
returned: always
type: str
sample: null
health_probe_settings:
description:
- Health probe settings associated with this Front Door instance.
returned: always
type: dict
sample: null
contains:
id:
description:
- Resource ID.
returned: always
type: str
sample: null
properties:
description:
- Properties of the health probe settings
returned: always
type: dict
sample: null
contains:
path:
description:
- The path to use for the health probe. Default is /
returned: always
type: str
sample: null
protocol:
description:
- Protocol scheme to use for this probe
returned: always
type: str
sample: null
interval_in_seconds:
description:
- The number of seconds between health probes.
returned: always
type: number
sample: null
resource_state:
description:
- Resource status.
returned: always
type: str
sample: null
name:
description:
- Resource name.
returned: always
type: str
sample: null
type:
description:
- Resource type.
returned: always
type: str
sample: null
backend_pools:
description:
- Backend pools available to routing rules.
returned: always
type: dict
sample: null
contains:
id:
description:
- Resource ID.
returned: always
type: str
sample: null
properties:
description:
- Properties of the Front Door Backend Pool
returned: always
type: dict
sample: null
contains:
backends:
description:
- The set of backends for this pool
returned: always
type: dict
sample: null
contains:
address:
description:
- Location of the backend (IP address or FQDN)
returned: always
type: str
sample: null
http_port:
description:
- The HTTP TCP port number. Must be between 1 and 65535.
returned: always
type: number
sample: null
https_port:
description:
- The HTTPS TCP port number. Must be between 1 and 65535.
returned: always
type: number
sample: null
enabled_state:
description:
- >-
Whether to enable use of this backend. Permitted values
are 'Enabled' or 'Disabled'
returned: always
type: str
sample: null
priority:
description:
- >-
Priority to use for load balancing. Higher priorities will
not be used for load balancing if any lower priority
backend is healthy.
returned: always
type: number
sample: null
weight:
description:
- Weight of this endpoint for load balancing purposes.
returned: always
type: number
sample: null
backend_host_header:
description:
- >-
The value to use as the host header sent to the backend.
If blank or unspecified, this defaults to the incoming
host.
returned: always
type: str
sample: null
load_balancing_settings:
description:
- Load balancing settings for a backend pool
returned: always
type: dict
sample: null
contains:
id:
description:
- Resource ID.
returned: always
type: str
sample: null
health_probe_settings:
description:
- L7 health probe settings for a backend pool
returned: always
type: dict
sample: null
contains:
id:
description:
- Resource ID.
returned: always
type: str
sample: null
resource_state:
description:
- Resource status.
returned: always
type: str
sample: null
name:
description:
- Resource name.
returned: always
type: str
sample: null
type:
description:
- Resource type.
returned: always
type: str
sample: null
frontend_endpoints:
description:
- Frontend endpoints available to routing rules.
returned: always
type: dict
sample: null
contains:
id:
description:
- Resource ID.
returned: always
type: str
sample: null
properties:
description:
- Properties of the Frontend endpoint
returned: always
type: dict
sample: null
contains:
host_name:
description:
- The host name of the frontendEndpoint. Must be a domain name.
returned: always
type: str
sample: null
session_affinity_enabled_state:
description:
- >-
Whether to allow session affinity on this host. Valid options
are 'Enabled' or 'Disabled'
returned: always
type: str
sample: null
session_affinity_ttl_seconds:
description:
- >-
UNUSED. This field will be ignored. The TTL to use in seconds
for session affinity, if applicable.
returned: always
type: number
sample: null
web_application_firewall_policy_link:
description:
- >-
Defines the Web Application Firewall policy for each host (if
applicable)
returned: always
type: dict
sample: null
contains:
id:
description:
- Resource ID.
returned: always
type: str
sample: null
resource_state:
description:
- Resource status.
returned: always
type: str
sample: null
custom_https_provisioning_state:
description:
- Provisioning status of Custom Https of the frontendEndpoint.
returned: always
type: str
sample: null
custom_https_provisioning_substate:
description:
- >-
Provisioning substate shows the progress of custom HTTPS
enabling/disabling process step by step.
returned: always
type: str
sample: null
custom_https_configuration:
description:
- The configuration specifying how to enable HTTPS
returned: always
type: dict
sample: null
contains:
certificate_source:
description:
- Defines the source of the SSL certificate
returned: always
type: str
sample: null
protocol_type:
description:
- >-
Defines the TLS extension protocol that is used for secure
delivery
returned: always
type: str
sample: null
key_vault_certificate_source_parameters:
description:
- >-
KeyVault certificate source parameters (if
certificateSource=AzureKeyVault)
returned: always
type: dict
sample: null
front_door_certificate_source_parameters:
description:
- >-
Parameters required for enabling SSL with Front
Door-managed certificates (if certificateSource=FrontDoor)
returned: always
type: dict
sample: null
name:
description:
- Resource name.
returned: always
type: str
sample: null
type:
description:
- Resource type.
returned: always
type: str
sample: null
backend_pools_settings:
description:
- Settings for all backendPools
returned: always
type: dict
sample: null
contains:
enforce_certificate_name_check:
description:
- >-
Whether to enforce certificate name check on HTTPS requests to all
backend pools. No effect on non-HTTPS requests.
returned: always
type: str
sample: null
enabled_state:
description:
- >-
Operational status of the Front Door load balancer. Permitted values
are 'Enabled' or 'Disabled'
returned: always
type: str
sample: null
resource_state:
description:
- Resource status of the Front Door.
returned: always
type: str
sample: null
provisioning_state:
description:
- Provisioning state of the Front Door.
returned: always
type: str
sample: null
cname:
description:
- The host that each frontendEndpoint must CNAME to.
returned: always
type: str
sample: null
'''
import time
import json
import re
from ansible.module_utils.azure_rm_common_ext import AzureRMModuleBaseExt
from ansible.module_utils.azure_rm_common_rest import GenericRestClient
from copy import deepcopy
try:
from msrestazure.azure_exceptions import CloudError
except ImportError:
# this is handled in azure_rm_common
pass
class Actions:
NoAction, Create, Update, Delete = range(4)
class AzureRMFrontDoors(AzureRMModuleBaseExt):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
updatable=False,
disposition='resourceGroupName',
required=true
),
name=dict(
type='str',
updatable=False,
disposition='frontDoorName',
required=true
),
location=dict(
type='str',
updatable=False,
disposition='/'
),
friendly_name=dict(
type='str',
disposition='/properties/friendlyName'
),
routing_rules=dict(
type='list',
disposition='/properties/routingRules',
options=dict(
id=dict(
type='str'
),
frontend_endpoints=dict(
type='list',
disposition='properties/frontendEndpoints',
options=dict(
id=dict(
type='str'
)
)
),
accepted_protocols=dict(
type='list',
disposition='properties/acceptedProtocols',
choices=['Http',
'Https']
),
patterns_to_match=dict(
type='list',
disposition='properties/patternsToMatch'
),
enabled_state=dict(
type='str',
disposition='properties/enabledState',
choices=['Enabled',
'Disabled']
),
route_configuration=dict(
type='dict',
disposition='properties/routeConfiguration'
),
resource_state=dict(
type='str',
disposition='properties/resourceState',
choices=['Creating',
'Enabling',
'Enabled',
'Disabling',
'Disabled',
'Deleting']
),
name=dict(
type='str'
)
)
),
load_balancing_settings=dict(
type='list',
disposition='/properties/loadBalancingSettings',
options=dict(
id=dict(
type='str'
),
sample_size=dict(
type='number',
disposition='properties/sampleSize'
),
successful_samples_required=dict(
type='number',
disposition='properties/successfulSamplesRequired'
),
additional_latency_milliseconds=dict(
type='number',
disposition='properties/additionalLatencyMilliseconds'
),
resource_state=dict(
type='str',
disposition='properties/resourceState',
choices=['Creating',
'Enabling',
'Enabled',
'Disabling',
'Disabled',
'Deleting']
),
name=dict(
type='str'
)
)
),
health_probe_settings=dict(
type='list',
disposition='/properties/healthProbeSettings',
options=dict(
id=dict(
type='str'
),
path=dict(
type='str',
disposition='properties/*'
),
protocol=dict(
type='str',
disposition='properties/*',
choices=['Http',
'Https']
),
interval_in_seconds=dict(
type='number',
disposition='properties/intervalInSeconds'
),
resource_state=dict(
type='str',
disposition='properties/resourceState',
choices=['Creating',
'Enabling',
'Enabled',
'Disabling',
'Disabled',
'Deleting']
),
name=dict(
type='str'
)
)
),
backend_pools=dict(
type='list',
disposition='/properties/backendPools',
options=dict(
id=dict(
type='str'
),
backends=dict(
type='list',
disposition='properties/*',
options=dict(
address=dict(
type='str'
),
http_port=dict(
type='number',
disposition='httpPort'
),
https_port=dict(
type='number',
disposition='httpsPort'
),
enabled_state=dict(
type='str',
disposition='enabledState',
choices=['Enabled',
'Disabled']
),
priority=dict(
type='number'
),
weight=dict(
type='number'
),
backend_host_header=dict(
type='str',
disposition='backendHostHeader'
)
)
),
load_balancing_settings=dict(
type='dict',
disposition='properties/loadBalancingSettings',
options=dict(
id=dict(
type='str'
)
)
),
health_probe_settings=dict(
type='dict',
disposition='properties/healthProbeSettings',
options=dict(
id=dict(
type='str'
)
)
),
resource_state=dict(
type='str',
disposition='properties/resourceState',
choices=['Creating',
'Enabling',
'Enabled',
'Disabling',
'Disabled',
'Deleting']
),
name=dict(
type='str'
)
)
),
frontend_endpoints=dict(
type='list',
disposition='/properties/frontendEndpoints',
options=dict(
id=dict(
type='str'
),
host_name=dict(
type='str',
disposition='properties/hostName'
),
session_affinity_enabled_state=dict(
type='str',
disposition='properties/sessionAffinityEnabledState',
choices=['Enabled',
'Disabled']
),
session_affinity_ttl_seconds=dict(
type='number',
disposition='properties/sessionAffinityTtlSeconds'
),
web_application_firewall_policy_link=dict(
type='dict',
disposition='properties/webApplicationFirewallPolicyLink',
options=dict(
id=dict(
type='str'
)
)
),
resource_state=dict(
type='str',
disposition='properties/resourceState',
choices=['Creating',
'Enabling',
'Enabled',
'Disabling',
'Disabled',
'Deleting']
),
name=dict(
type='str'
)
)
),
backend_pools_settings=dict(
type='dict',
disposition='/properties/backendPoolsSettings',
options=dict(
enforce_certificate_name_check=dict(
type='str',
disposition='enforceCertificateNameCheck',
choices=['Enabled',
'Disabled']
)
)
),
enabled_state=dict(
type='str',
disposition='/properties/enabledState',
choices=['Enabled',
'Disabled']
),
resource_state=dict(
type='str',
disposition='/properties/resourceState',
choices=['Creating',
'Enabling',
'Enabled',
'Disabling',
'Disabled',
'Deleting']
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
)
)
self.resource_group = None
self.name = None
self.id = None
self.name = None
self.type = None
self.results = dict(changed=False)
self.mgmt_client = None
self.state = None
self.url = None
self.status_code = [200, 201, 202]
self.to_do = Actions.NoAction
self.body = {}
self.query_parameters = {}
self.query_parameters['api-version'] = '2019-04-01'
self.header_parameters = {}
self.header_parameters['Content-Type'] = 'application/json; charset=utf-8'
super(AzureRMFrontDoors, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
supports_tags=True)
def exec_module(self, **kwargs):
for key in list(self.module_arg_spec.keys()):
if hasattr(self, key):
setattr(self, key, kwargs[key])
elif kwargs[key] is not None:
self.body[key] = kwargs[key]
self.inflate_parameters(self.module_arg_spec, self.body, 0)
old_response = None
response = None
self.mgmt_client = self.get_mgmt_svc_client(GenericRestClient,
base_url=self._cloud_environment.endpoints.resource_manager)
resource_group = self.get_resource_group(self.resource_group)
if 'location' not in self.body:
self.body['location'] = resource_group.location
self.url = ('/subscriptions' +
'/{{ subscription_id }}' +
'/resourceGroups' +
'/{{ resource_group }}' +
'/providers' +
'/Microsoft.Network' +
'/frontDoors' +
'/{{ front_door_name }}')
self.url = self.url.replace('{{ subscription_id }}', self.subscription_id)
self.url = self.url.replace('{{ resource_group }}', self.resource_group)
self.url = self.url.replace('{{ front_door_name }}', self.name)
old_response = self.get_resource()
if not old_response:
self.log("FrontDoor instance doesn't exist")
if self.state == 'absent':
self.log("Old instance didn't exist")
else:
self.to_do = Actions.Create
else:
self.log('FrontDoor instance already exists')
if self.state == 'absent':
self.to_do = Actions.Delete
else:
modifiers = {}
self.create_compare_modifiers(self.module_arg_spec, '', modifiers)
self.results['modifiers'] = modifiers
self.results['compare'] = []
self.create_compare_modifiers(self.module_arg_spec, '', modifiers)
if not self.default_compare(modifiers, self.body, old_response, '', self.results):
self.to_do = Actions.Update
if (self.to_do == Actions.Create) or (self.to_do == Actions.Update):
self.log('Need to Create / Update the FrontDoor instance')
if self.check_mode:
self.results['changed'] = True
return self.results
response = self.create_update_resource()
# if not old_response:
self.results['changed'] = True
# else:
# self.results['changed'] = old_response.__ne__(response)
self.log('Creation / Update done')
elif self.to_do == Actions.Delete:
self.log('FrontDoor instance deleted')
self.results['changed'] = True
if self.check_mode:
return self.results
self.delete_resource()
# make sure instance is actually deleted, for some Azure resources, instance is hanging around
# for some time after deletion -- this should be really fixed in Azure
while self.get_resource():
time.sleep(20)
else:
self.log('FrontDoor instance unchanged')
self.results['changed'] = False
response = old_response
if response:
self.results["id"] = response["id"]
self.results["name"] = response["name"]
self.results["type"] = response["type"]
self.results["location"] = response["location"]
self.results["tags"] = response["tags"]
self.results["properties"] = response["properties"]
return self.results
def create_update_resource(self):
# self.log('Creating / Updating the FrontDoor instance {0}'.format(self.))
try:
response = self.mgmt_client.query(self.url,
'PUT',
self.query_parameters,
self.header_parameters,
self.body,
self.status_code,
600,
30)
except CloudError as exc:
self.log('Error attempting to create the FrontDoor instance.')
self.fail('Error creating the FrontDoor instance: {0}'.format(str(exc)))
try:
response = json.loads(response.text)
except Exception:
response = {'text': response.text}
pass
return response
def delete_resource(self):
# self.log('Deleting the FrontDoor instance {0}'.format(self.))
try:
response = self.mgmt_client.query(self.url,
'DELETE',
self.query_parameters,
self.header_parameters,
None,
self.status_code,
600,
30)
except CloudError as e:
self.log('Error attempting to delete the FrontDoor instance.')
self.fail('Error deleting the FrontDoor instance: {0}'.format(str(e)))
return True
def get_resource(self):
# self.log('Checking if the FrontDoor instance {0} is present'.format(self.))
found = False
try:
response = self.mgmt_client.query(self.url,
'GET',
self.query_parameters,
self.header_parameters,
None,
self.status_code,
600,
30)
found = True
self.log("Response : {0}".format(response))
# self.log("FrontDoor instance : {0} found".format(response.name))
except CloudError as e:
self.log('Did not find the FrontDoor instance.')
if found is True:
return response
return False
def main():
AzureRMFrontDoors()
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
#
# PROJ documentation build configuration file, created by
# sphinx-quickstart on Wed Feb 24 10:47:15 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import datetime
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('_extensions'))
import bibstyle
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.mathjax',
'sphinxcontrib.bibtex',
'breathe',
'redirects',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
# Keep legacy project name
project = 'PROJ'
# See CITATION file
title = 'PROJ coordinate transformation software library'
author = 'PROJ contributors'
now = datetime.datetime.now()
copyright = u'1983-{0}'.format(now.year)
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
version = '7.2.0'
data_version = '1.2'
# use same |release| as |version|
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['operations/options/*.rst']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Specify default language for syntax highlighting.
highlight_language = 'none'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# Replacement macros for use in code-blocks etc. With inspiration from
# https://github.com/sphinx-doc/sphinx/issues/4054#issuecomment-329097229
def replace_words(app, docname, source):
result = source[0]
for key in app.config.replacements:
result = result.replace(key, app.config.replacements[key])
source[0] = result
replacements = {
"{PROJVERSION}" : "{version_number}".format(version_number=version),
"{PROJDATAVERSION}" : "{data_version_number}".format(data_version_number=data_version),
}
def setup(app):
app.add_config_value('replacements', {}, True)
app.connect('source-read', replace_words)
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
import sphinx_rtd_theme
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'canonical_url': 'https://proj.org',
'logo_only': True,
'display_version': True,
'prev_next_buttons_location': 'both',
'style_external_links': False,
'style_nav_header_background': '#353130',
# Toc options
'collapse_navigation': True,
'sticky_navigation': True,
#'navigation_depth': 4,
'includehidden': True,
'titles_only': False
}
# Add any paths that contain custom themes here, relative to this directory.
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = '../images/logo.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = '../images/favicon.png'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_context = {
'display_github': True,
'theme_vcs_pageview_mode': 'edit',
'github_user': 'OSGeo',
'github_repo': 'PROJ',
# TODO: edit when switching active branch
'github_version': '/7.1/docs/source/',
'css_files': [
'_static/theme_overrides.css', # override wide tables in RTD theme
],
}
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%d %b %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'projdoc'
# -- Options for LaTeX output ---------------------------------------------
preamble = r"""
\ifdefined\DeclareUnicodeCharacter
\DeclareUnicodeCharacter{2032}{$'$}% prime
\fi
"""
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
'preamble': preamble,
'inputenc':'\\usepackage[utf8]{inputenc}'
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'proj.tex', title, author, 'manual'),
]
latex_toplevel_sectioning = 'chapter'
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
'apps/proj',
'proj',
u'Cartographic projection filter',
['Gerald I. Evenden'],
1
),
(
'apps/cs2cs',
'cs2cs',
u'Cartographic coordinate system filter',
['Frank Warmerdam'],
1
),
(
'apps/cct',
'cct',
u'Coordinate Conversion and Transformation',
['Thomas Knudsen'],
1
),
(
'apps/geod',
'geod',
u'Geodesic computations',
['Charles Karney'],
1
),
(
'apps/gie',
'gie',
u'The Geospatial Integrity Investigation Environment',
['Thomas Knudsen'],
1
),
(
'apps/projinfo',
'projinfo',
u'Geodetic object and coordinate operation queries',
['Even Rouault'],
1
),
(
'apps/projsync',
'projsync',
u'Downloading tool of resource files',
['Even Rouault'],
1
),
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'proj', title, author, 'proj',
'Cartographic projections software library.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
breathe_projects = {
"doxygen_api":"../build/xml/",
}
import redirects
redirect_files = redirects.gather_redirects()
|
/**
* @license
*
* Copyright (c) 2016, Syuuhei Kuno
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of xplain_for_js nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
(function (xpl) {
"use strict";
const VX = 0, VY = 1, VZ = 2;
const CR = 0, CI = 1, CJ = 2, CK = 3;
/**
* 四元数のユーティリティクラスです。
*
* @constructor
*/
xpl.Quaternion = function () {
throw new Error("Unsupported operation!");
};
/**
* 任意の数値を四元数に読み込ませます。
*
* d = (a, b, c, d)
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number} rp - 入力元の四元数の実数部
* @param {number} ip - 入力元の四元数の虚数I部
* @param {number} jp - 入力元の四元数の虚数J部
* @param {number} kp - 入力元の四元数の虚数K部
*/
xpl.Quaternion.load = function (d, d_off, rp, ip, jp, kp) {
d[d_off + CR] = rp;
d[d_off + CI] = ip;
d[d_off + CJ] = jp;
d[d_off + CK] = kp;
};
/**
* 任意の数値を四元数に読み込ませます。
*
* d = (a, b, c, d)
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number[]} q - 入力元の四元数
* @param {number} q_off - 入力元の四元数の配列インデックス
*/
xpl.Quaternion.loadv = function (d, d_off, q, q_off) {
xpl.Quaternion.load(d, d_off, q[q_off + CR], q[q_off + CI], q[q_off + CJ], q[q_off + CK]);
};
/**
* 全ての要素が0の値を四元数に読み込ませます。
*
* d = (0, 0, 0, 0)
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - The 出力先の四元数の配列インデックス
*/
xpl.Quaternion.loadZero = function (d, d_off) {
xpl.Quaternion.load(d, d_off, 0, 0, 0, 0);
};
/**
* 単位値を四元数に読み込ませます。
*
* d = (1, 0, 0, 0)
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
*/
xpl.Quaternion.loadIdentity = function (d, d_off) {
xpl.Quaternion.load(d, d_off, 1, 0, 0, 0);
};
/**
* 四元数の絶対値の2乗の値を算出します。
*
* d = |q|^2
*
* @param {number[]} q - 対象の四元数
* @param {number} q_off - 対象の四元数の配列インデックス
* @returns {number} 四元数の絶対値の2乗の値
*/
xpl.Quaternion.absSq = function (q, q_off) {
let rp = q[q_off + CR];
let ip = q[q_off + CI];
let jp = q[q_off + CJ];
let kp = q[q_off + CK];
return rp * rp + ip * ip + jp * jp + kp * kp;
};
/**
* 四元数の絶対値を算出します。
*
* d = |q|
*
* @param {number[]} q - 対象の四元数
* @param {number} q_off - 対象の四元数の配列インデックス
* @returns {number} 四元数の絶対値
*/
xpl.Quaternion.abs = function (q, q_off) {
return Math.sqrt(xpl.Quaternion.absSq(q, q_off));
};
/**
* 四元数を正規化します。
*
* d = q / |q|
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number[]} q - 対象の四元数
* @param {number} q_off - 対象の四元数の配列インデックス
*/
xpl.Quaternion.normalizev = function (d, d_off, q, q_off) {
let rp = q[q_off + CR];
let ip = q[q_off + CI];
let jp = q[q_off + CJ];
let kp = q[q_off + CK];
let len = rp * rp + ip * ip + jp * jp + kp * kp;
if (0 < len) {
len = Math.sqrt(len);
xpl.Quaternion.load(d, d_off, rp / len, ip / len, jp / len, kp / len);
} else {
xpl.Quaternion.loadZero(d, d_off);
}
};
/**
* ネイピア数を底として指数を算出します。
*
* d = e^q
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number} rp - 指数部の四元数の実数部
* @param {number} ip - 指数部の四元数の虚数I部
* @param {number} jp - 指数部の四元数の虚数J部
* @param {number} kp - 指数部の四元数の虚数K部
*/
xpl.Quaternion.exp = function (d, d_off, rp, ip, jp, kp) {
// e^(a + bi + cj + dk) = e^(a + v) = e^a * (cos|v| + (v / |v|) * sin|v|)
let aexp = Math.exp(rp); // e^a
let vnorm = Math.sqrt(ip * ip + jp * jp + kp * kp); // |v| = √(bi^2 + cj^2 + dk^2)
let vscale = aexp * Math.sin(vnorm) / vnorm; // e^a * sin|v| / |v|
xpl.Quaternion.load(
d, d_off,
aexp * Math.cos(vnorm), // e^2 * cos|v|
ip * vscale, jp * vscale, kp * vscale);
};
/**
* ネイピア数を底として指数を算出します。
*
* d = e^q
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number[]} q - 指数部の四元数
* @param {number} q_off - 指数部の四元数の配列インデックス
*/
xpl.Quaternion.expv = function (d, d_off, q, q_off) {
xpl.Quaternion.exp(d, d_off, q[q_off + CR], q[q_off + CI], q[q_off + CJ], q[q_off + CK]);
};
/**
* ネイピア数を底として純虚数の指数を算出します。
*
* d = e^iv
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number} ip - 指数部の虚数I部
* @param {number} jp - 指数部の虚数J部
* @param {number} kp - 指数部の虚数K部
*/
xpl.Quaternion.cis = function (d, d_off, ip, jp, kp) {
// e^(bi + cj + dk) = e^v = cos|v| + (v / |v|) * sin|v|
let vnorm = Math.sqrt(ip * ip + jp * jp + kp * kp); // |v| = √(bi^2 + cj^2 + dk^2)
let vscale = Math.sin(vnorm) / vnorm; // e^a * sin|v| / |v|
xpl.Quaternion.load(
d, d_off,
Math.cos(vnorm), // cos|v|
ip * vscale, jp * vscale, kp * vscale);
};
/**
* ネイピア数を底として純虚数の指数を算出します。
*
* d = e^iv
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number[]} v - 指数部の虚数ベクトル
* @param {number} v_off - 指数部の虚数ベクトルの配列インデックス
*/
xpl.Quaternion.cisv = function (d, d_off, v, v_off) {
xpl.Quaternion.cis(d, d_off, v[v_off + VX], v[v_off + VY], v[v_off + VZ]);
};
/**
* 対数を算出します。
*
* d = log(q)
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number} rp - 対象の四元数の実数部
* @param {number} ip - 対象の四元数の虚数I部
* @param {number} jp - 対象の四元数の虚数J部
* @param {number} kp - 対象の四元数の虚数K部
*/
xpl.Quaternion.log = function (d, d_off, rp, ip, jp, kp) {
// ln(a + bi + cj + dk) = ln(a + v) = ln(q) = ln|q| + v / |v| * cos^-1 (a / |q|)
let qnorm = Math.sqrt(rp * rp + ip * ip + jp * jp + kp * kp); // |q| = √(a^2 + bi^2 + cj^2 + dk^2)
let qln = Math.log(qnorm); // ln|q|
let vnorm = Math.sqrt(ip * ip + jp * jp + kp * kp); // |v| = √(bi^2 + cj^2 + dk^2)
let vscale = Math.acos(rp / qnorm) / vnorm; // cos^-1 (a / |q|) / |v|
xpl.Quaternion.load(d, d_off, qln, ip * vscale, jp * vscale, kp * vscale);
};
/**
* 対数を算出します。
*
* d = log(q)
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number[]} q - 対象の四元数
* @param {number} q_off - 対象の四元数の配列インデックス
*/
xpl.Quaternion.logv = function (d, d_off, q, q_off) {
xpl.Quaternion.log(d, d_off, q[q_off + CR], q[q_off + CI], q[q_off + CJ], q[q_off + CK]);
};
/**
* 2つの四元数を線形補間します。
*
* d = lerp(q1, q2; t)
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number} rp1 - 開始の四元数の実数部
* @param {number} ip1 - 開始の四元数の虚数I部
* @param {number} jp1 - 開始の四元数の虚数J部
* @param {number} kp1 - 開始の四元数の虚数K部
* @param {number} rp2 - 終了の四元数の実数部
* @param {number} ip2 - 終了の四元数の虚数I部
* @param {number} jp2 - 終了の四元数の虚数J部
* @param {number} kp2 - 終了の四元数の虚数K部
* @param {number} t - 補間係数
*/
xpl.Quaternion.lerp = function (d, d_off, rp1, ip1, jp1, kp1, rp2, ip2, jp2, kp2, t) {
// lerp(p1, p2; t) = (1.0 - t) * p1 + t * p2
let t1 = 1.0 - t;
xpl.Quaternion.load(
d, d_off,
rp1 * t1 + rp2 * t,
ip1 * t1 + ip2 * t,
jp1 * t1 + jp2 * t,
kp1 * t1 + kp2 * t);
};
/**
* 2つの四元数を線形補間します。
*
* d = lerp(q1, q2; t)
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number[]} q1 - 開始の四元数
* @param {number} q1_off - 開始の四元数の配列インデックス
* @param {number[]} q2 - 終了の四元数
* @param {number} q2_off - 終了の四元数の配列インデックス
* @param {number} t - 補間係数
*/
xpl.Quaternion.lerpv = function (d, d_off, q1, q1_off, q2, q2_off, t) {
xpl.Quaternion.lerp(
d, d_off,
q1[q1_off + CR], q1[q1_off + CI], q1[q1_off + CJ], q1[q1_off + CK],
q2[q2_off + CR], q2[q2_off + CI], q2[q2_off + CJ], q2[q2_off + CK],
t);
};
/**
* 2つの四元数を球面線形補間します。
*
* d = slerp(q1, q2; t)
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number} rp1 - 開始の四元数の実数部
* @param {number} ip1 - 開始の四元数の虚数I部
* @param {number} jp1 - 開始の四元数の虚数J部
* @param {number} kp1 - 開始の四元数の虚数K部
* @param {number} rp2 - 終了の四元数の実数部
* @param {number} ip2 - 終了の四元数の虚数I部
* @param {number} jp2 - 終了の四元数の虚数J部
* @param {number} kp2 - 終了の四元数の虚数K部
* @param {number} t - 補間係数
*/
xpl.Quaternion.slerp = function (d, d_off, rp1, ip1, jp1, kp1, rp2, ip2, jp2, kp2, t) {
// 開始の四元数の正規化
let abs1 = rp1 * rp1 + ip1 * ip1 + jp1 * jp1 + kp1 * kp1;
if (0 < abs1) {
abs1 = Math.sqrt(abs1);
rp1 /= abs1;
ip1 /= abs1;
jp1 /= abs1;
kp1 /= abs1;
}
// 終了の四元数の正規化
let abs2 = rp2 * rp2 + ip2 * ip2 + jp2 * jp2 + kp2 * kp2;
if (0 < abs2) {
abs2 = Math.sqrt(abs2);
rp2 /= abs2;
ip2 /= abs2;
jp2 /= abs2;
kp2 /= abs2;
}
// 四元数同士のcos値を算出
let cs = rp1 * rp2 + ip1 * ip2 + jp1 * jp2 + kp1 * kp2;
if (1.0 <= cs) {
// 2つの四元数の向きが同一の場合
// lerp(p0, p1; t) = (1.0 - t) * p0 + t * p1
let abs = abs1 * (1.0 - t) + abs2 * t;
xpl.Quaternion.load(d, d_off, rp1 * abs, ip1 * abs, jp1 * abs, kp1 * abs);
} else if (cs <= -1.0) {
// 2つの四元数の向きが真逆の場合
// lerp(p0, p1; t) = (1.0 - t) * p0 + t * p1
let abs = abs1 * (1.0 - t) - abs2 * t;
xpl.Quaternion.load(d, d_off, rp1 * abs, ip1 * abs, jp1 * abs, kp1 * abs);
} else {
// その他の場合
// 四元数の絶対値を線形補間
// lerp(p0, p1; t) = (1.0 - t) * p0 + t * p1
let abs = abs1 * (1.0 - t) + abs2 * t;
// 四元数の方向を球面線形補間
// slerp(p0, p1; t) = (sin((1.0 - t) * Ω) / sin(Ω)) * p0 + (sin(t * Ω) / sin(Ω)) * p1
let rad1 = Math.acos(cs);
let rad2 = rad1 * (1.0 - t);
let rad3 = rad1 * t;
let sn = Math.sin(rad1);
let sn1 = Math.sin(rad2) / sn;
let sn2 = Math.sin(rad3) / sn;
// 結果の書き出し
xpl.Quaternion.load(
d, d_off,
(rp1 * sn1 + rp2 * sn2) * abs,
(ip1 * sn1 + ip2 * sn2) * abs,
(jp1 * sn1 + jp2 * sn2) * abs,
(kp1 * sn1 + kp2 * sn2) * abs);
}
};
/**
* 2つの四元数を球面線形補間します。
*
* d = slerp(q1, q2; t)
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number[]} q1 - 開始の四元数
* @param {number} q1_off - 開始の四元数の配列インデックス
* @param {number[]} q2 - 終了の四元数
* @param {number} q2_off - 終了の四元数の配列インデックス
* @param {number} t - 補間係数
*/
xpl.Quaternion.slerpv = function (d, d_off, q1, q1_off, q2, q2_off, t) {
xpl.Quaternion.slerp(
d, d_off,
q1[q1_off + CR], q1[q1_off + CI], q1[q1_off + CJ], q1[q1_off + CK],
q2[q2_off + CR], q2[q2_off + CI], q2[q2_off + CJ], q2[q2_off + CK],
t);
};
/**
* 内積を算出します。
*
* d = a1 ・ a2
*
* @param {number} rp1 - 演算子の左側の四元数の実数部
* @param {number} ip1 - 演算子の左側の四元数の虚数I部
* @param {number} jp1 - 演算子の左側の四元数の虚数J部
* @param {number} kp1 - 演算子の左側の四元数の虚数K部
* @param {number} rp2 - 演算子の右側の四元数の実数部
* @param {number} ip2 - 演算子の右側の四元数の虚数I部
* @param {number} jp2 - 演算子の右側の四元数の虚数J部
* @param {number} kp2 - 演算子の右側の四元数の虚数K部
* @returns {number} 内積値
*/
xpl.Quaternion.dot = function (rp1, ip1, jp1, kp1, rp2, ip2, jp2, kp2) {
return rp1 * rp2 + ip1 * ip2 + jp1 * jp2 + kp1 * kp2;
};
/**
* 内積を算出します。
*
* d = a1 ・ a2
*
* @param {number[]} q1 - 演算子の左側の四元数
* @param {number} q1_off - 演算子の左側の四元数の配列インデックス
* @param {number[]} q2 - 演算子の右側の四元数
* @param {number} q2_off - 演算子の右側の四元数の配列インデックス
* @returns {number} 内積値
*/
xpl.Quaternion.dotv = function (q1, q1_off, q2, q2_off) {
return q1[q1_off + CR] * q2[q2_off + CR] +
q1[q1_off + CI] * q2[q2_off + CI] +
q1[q1_off + CJ] * q2[q2_off + CJ] +
q1[q1_off + CK] * q2[q2_off + CK];
};
/**
* 共役を算出します。
* _
* d = q
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number[]} q - 対象の四元数
* @param {number} q_off - 対象の四元数の配列インデックス
*/
xpl.Quaternion.conjugatev = function (d, d_off, q, q_off) {
xpl.Quaternion.load(d, d_off, q[q_off + CR], -q[q_off + CI], -q[q_off + CJ], -q[q_off + CK]);
};
/**
* 符号を反転させます。
*
* d = -q
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number[]} q - 対象の四元数
* @param {number} q_off - 対象の四元数の配列インデックス
*/
xpl.Quaternion.reversev = function (d, d_off, q, q_off) {
xpl.Quaternion.load(d, d_off, -q[q_off + CR], -q[q_off + CI], -q[q_off + CJ], -q[q_off + CK]);
};
/**
* 逆数を算出します。
*
* d = q^-1
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number[]} q - 対象の四元数
* @param {number} q_off - 対象の四元数の配列インデックス
*/
xpl.Quaternion.inversev = function (d, d_off, q, q_off) {
let rp = q[q_off + CR];
let ip = q[q_off + CI];
let jp = q[q_off + CJ];
let kp = q[q_off + CK];
let det = rp * rp + ip * ip + jp * jp + kp * kp;
if (0 < det) {
det = Math.sqrt(det);
rp /= det;
ip /= -det;
jp /= -det;
kp /= -det;
}
xpl.Quaternion.load(d, d_off, rp, ip, jp, kp);
};
/**
* 四元数の加算をします。
*
* d = q1 + q2
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number} rp1 - 演算子の左側の四元数の実数部
* @param {number} ip1 - 演算子の左側の四元数の虚数I部
* @param {number} jp1 - 演算子の左側の四元数の虚数J部
* @param {number} kp1 - 演算子の左側の四元数の虚数K部
* @param {number} rp2 - 演算子の右側の四元数の実数部
* @param {number} ip2 - 演算子の右側の四元数の虚数I部
* @param {number} jp2 - 演算子の右側の四元数の虚数J部
* @param {number} kp2 - 演算子の右側の四元数の虚数K部
*/
xpl.Quaternion.add = function (d, d_off, rp1, ip1, jp1, kp1, rp2, ip2, jp2, kp2) {
xpl.Quaternion.load(d, d_off, rp1 + rp2, ip1 + ip2, jp1 + jp2, kp1 + kp2);
};
/**
* 四元数の加算をします。
*
* d = q1 + q2
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number[]} q1 - 演算子の左側の四元数
* @param {number} q1_off - 演算子の左側の四元数の配列インデックス
* @param {number[]} q2 - 演算子の右側の四元数
* @param {number} q2_off - 演算子の右側の四元数の配列インデックス
*/
xpl.Quaternion.addv = function (d, d_off, q1, q1_off, q2, q2_off) {
xpl.Quaternion.add(
d, d_off,
q1[q1_off + CR], q1[q1_off + CI], q1[q1_off + CJ], q1[q1_off + CK],
q2[q2_off + CR], q2[q2_off + CI], q2[q2_off + CJ], q2[q2_off + CK]);
};
/**
* 四元数の減算をします。
*
* d = q1 - q2
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number} rp1 - 演算子の左側の四元数の実数部
* @param {number} ip1 - 演算子の左側の四元数の虚数I部
* @param {number} jp1 - 演算子の左側の四元数の虚数J部
* @param {number} kp1 - 演算子の左側の四元数の虚数K部
* @param {number} rp2 - 演算子の右側の四元数の実数部
* @param {number} ip2 - 演算子の右側の四元数の虚数I部
* @param {number} jp2 - 演算子の右側の四元数の虚数J部
* @param {number} kp2 - 演算子の右側の四元数の虚数K部
*/
xpl.Quaternion.sub = function (d, d_off, rp1, ip1, jp1, kp1, rp2, ip2, jp2, kp2) {
xpl.Quaternion.load(d, d_off, rp1 - rp2, ip1 - ip2, jp1 - jp2, kp1 - kp2);
};
/**
* 四元数の減算をします。
*
* d = q1 - q2
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number[]} q1 - 演算子の左側の四元数
* @param {number} q1_off - 演算子の左側の四元数の配列インデックス
* @param {number[]} q2 - 演算子の右側の四元数
* @param {number} q2_off - 演算子の右側の四元数の配列インデックス
*/
xpl.Quaternion.subv = function (d, d_off, q1, q1_off, q2, q2_off) {
xpl.Quaternion.sub(
d, d_off,
q1[q1_off + CR], q1[q1_off + CI], q1[q1_off + CJ], q1[q1_off + CK],
q2[q2_off + CR], q2[q2_off + CI], q2[q2_off + CJ], q2[q2_off + CK]);
};
/**
* 四元数の掛け算をします。
*
* d = q1 * q2
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number} rp1 - 演算子の左側の四元数の実数部
* @param {number} ip1 - 演算子の左側の四元数の虚数I部
* @param {number} jp1 - 演算子の左側の四元数の虚数J部
* @param {number} kp1 - 演算子の左側の四元数の虚数K部
* @param {number} rp2 - 演算子の右側の四元数の実数部
* @param {number} ip2 - 演算子の右側の四元数の虚数I部
* @param {number} jp2 - 演算子の右側の四元数の虚数J部
* @param {number} kp2 - 演算子の右側の四元数の虚数K部
*/
xpl.Quaternion.mul = function (d, d_off, rp1, ip1, jp1, kp1, rp2, ip2, jp2, kp2) {
// i^2 = j^2 = k^2 = ijk = -1, ij = -ji = k, jk = -kj = i, ki = -ik = j
// rp = r1r2 - i1 ・ i2, ip = r1i2 + r2i1 + i1 × i2
xpl.Quaternion.load(
d, d_off,
rp1 * rp2 - (ip1 * ip2 + jp1 * jp2 + kp1 * kp2),
rp1 * ip2 + ip1 * rp2 + (jp1 * kp2 - kp1 * jp2),
rp1 * jp2 + jp1 * rp2 + (kp1 * ip2 - ip1 * kp2),
rp1 * kp2 + kp1 * rp2 + (ip1 * jp2 - jp1 * ip2));
};
/**
* 四元数の掛け算をします。
*
* d = q1 * q2
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number[]} q1 - 演算子の左側の四元数
* @param {number} q1_off - 演算子の左側の四元数の配列インデックス
* @param {number[]} q2 - 演算子の右側の四元数
* @param {number} q2_off - 演算子の右側の四元数の配列インデックス
*/
xpl.Quaternion.mulv = function (d, d_off, q1, q1_off, q2, q2_off) {
xpl.Quaternion.mul(
d, d_off,
q1[q1_off + CR], q1[q1_off + CI], q1[q1_off + CJ], q1[q1_off + CK],
q2[q2_off + CR], q2[q2_off + CI], q2[q2_off + CJ], q2[q2_off + CK]);
};
/**
* 四元数とスカラの掛け算をします。
*
* d = q * s
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number} rp - 対象の四元数の実数部
* @param {number} ip - 対象の四元数の虚数I部
* @param {number} jp - 対象の四元数の虚数J部
* @param {number} kp - 対象の四元数の虚数K部
* @param {number} s - 対象のスカラ
*/
xpl.Quaternion.mulScalar = function (d, d_off, rp, ip, jp, kp, s) {
xpl.Quaternion.load(d, d_off, rp * s, ip * s, jp * s, kp * s);
};
/**
* 四元数とスカラの掛け算をします。
*
* d = q * s
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number[]} q - 対象の四元数
* @param {number} q_off - 対象の四元数の配列インデックス
* @param {number} s - 対象のスカラ
*/
xpl.Quaternion.mulScalarv = function (d, d_off, q, q_off, s) {
xpl.Quaternion.mulScalar(d, d_off, q[q_off + CR], q[q_off + CI], q[q_off + CJ], q[q_off + CK], s);
};
/**
* 四元数を割り算します。
*
* d = q1 / q2
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number} rp1 - 演算子の左側の四元数の実数部
* @param {number} ip1 - 演算子の左側の四元数の虚数I部
* @param {number} jp1 - 演算子の左側の四元数の虚数J部
* @param {number} kp1 - 演算子の左側の四元数の虚数K部
* @param {number} rp2 - 演算子の右側の四元数の実数部
* @param {number} ip2 - 演算子の右側の四元数の虚数I部
* @param {number} jp2 - 演算子の右側の四元数の虚数J部
* @param {number} kp2 - 演算子の右側の四元数の虚数K部
*/
xpl.Quaternion.div = function (d, d_off, rp1, ip1, jp1, kp1, rp2, ip2, jp2, kp2) {
// i^2 = j^2 = k^2 = ijk = -1, ij = -ji = k, jk = -kj = i, ki = -ik = j
// rp = (r1r2 - i1 ・ i2) / (r2^2 + i2^2), ip = (r1i2 + r2i1 + i1 × i2) / (r2^2 + i2^2)
let det = rp2 * rp2 + ip2 * ip2 + jp2 * jp2 + kp2 * kp2;
xpl.Quaternion.load(
d, d_off,
( rp1 * rp2 + (ip1 * ip2 + jp1 * jp2 + kp1 * kp2)) / det,
(-rp1 * ip2 + ip1 * rp2 - (jp1 * kp2 - kp1 * jp2)) / det,
(-rp1 * jp2 + jp1 * rp2 - (kp1 * ip2 - ip1 * kp2)) / det,
(-rp1 * kp2 + kp1 * rp2 - (ip1 * jp2 - jp1 * ip2)) / det);
};
/**
* 四元数の割り算をします。
*
* d = q1 / q2
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number[]} q1 - 演算子の左側の四元数
* @param {number} q1_off - 演算子の左側の四元数の配列インデックス
* @param {number[]} q2 - 演算子の右側の四元数
* @param {number} q2_off - 演算子の右側の四元数の配列インデックス
*/
xpl.Quaternion.divv = function (d, d_off, q1, q1_off, q2, q2_off) {
xpl.Quaternion.div(
d, d_off,
q1[q1_off + CR], q1[q1_off + CI], q1[q1_off + CJ], q1[q1_off + CK],
q2[q2_off + CR], q2[q2_off + CI], q2[q2_off + CJ], q2[q2_off + CK]);
};
/**
* 四元数とスカラの割り算をします。
*
* d = q / s
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number} rp - 対象の四元数の実数部
* @param {number} ip - 対象の四元数の虚数I部
* @param {number} jp - 対象の四元数の虚数J部
* @param {number} kp - 対象の四元数の虚数K部
* @param {number} s - 対象のスカラ
*/
xpl.Quaternion.divScalar = function (d, d_off, rp, ip, jp, kp, s) {
xpl.Quaternion.load(d, d_off, rp / s, ip / s, jp / s, kp / s);
};
/**
* 四元数とスカラの割り算をします。
*
* d = q / s
*
* @param {number[]} d - 出力先の四元数
* @param {number} d_off - 出力先の四元数の配列インデックス
* @param {number[]} q - 対象の四元数
* @param {number} q_off - 対象の四元数の配列インデックス
* @param {number} s - 対象のスカラ
*/
xpl.Quaternion.divScalarv = function (d, d_off, q, q_off, s) {
xpl.Quaternion.divScalar(d, d_off, q[q_off + CR], q[q_off + CI], q[q_off + CJ], q[q_off + CK], s);
};
/**
* 四元数の文字列表現を返します。
*
* @param {number[]} q - 対象の四元数
* @param {number} off - 対象の四元数の配列インデックス
* @returns {string} 四元数から変換された文字列
*/
xpl.Quaternion.convertToString = function (q, off) {
return "Quaternion(" + q[off + CR] + ", " + q[off + CI] + ", " + q[off + CJ] + ", " + q[off + CK] + ")";
};
})(xpl);
|
$(document).ready(function() {
$('[data-toggle="log-tooltip"]').tooltip();
$("#monitor-start_time").flatpickr({
dateFormat: "Y-m-d H:i:S",
enableTime: true,
enableSeconds: true,
time_24hr: true,
allowInput: false
});
$("#monitor-end_time").flatpickr({
dateFormat: "Y-m-d H:i:S",
enableTime: true,
enableSeconds: true,
time_24hr: true,
allowInput: false
});
//init select panel
monitor.loadDevice();
monitor.initChart();
});
monitor = (function() {
"use strict";
function Monitor() {
this.chart = null;
this.monitorTimer = null;
this.ReadingList = [{
'profile': 'Light',
'reading': [
'Light_OnOff',
'Light_Dimming',
'Light_LightSensor'
]
},
{
'profile': 'Sensor',
'reading': [
'Sensor_MeasureLight'
]
}
];
this.DeviceCache = [];
}
Monitor.prototype = {
constructor: Monitor,
loadDevice: null,
eraseScreenBtn: null,
renderReading: null,
searchBtn: null,
startMonitor: null,
stopMonitor: null,
// chart
initChart: null,
updateChart: null,
}
var client = new Monitor();
Monitor.prototype.loadDevice = function() {
$.ajax({
url: '/core-metadata/api/v1/device',
type: 'GET',
dataType: 'json',
success: function(data) {
client.DeviceCache = [];
$.each(data, function(i, d) {
$.each(client.ReadingList, function(i, r) {
if (r.profile == d.profile.name) {
var info = {
name: d.name,
profile: d.profile.name
}
client.DeviceCache.push(info);
return;
}
});
});
// render Device List:
$("#monitor-selectReading").empty();
$("#monitor-selectReading").append($("<option></option>").attr("value", "").text(""));
var $el = $("#monitor-selectDevice");
$el.empty(); // remove old options
$el.append($("<option></option>").attr("value", "").text("-- select device--"));
$.each(client.DeviceCache, function(i, s) {
$el.append($("<option></option>").attr("value", s.name).text(s.name));
});
// add event change option:
$('#monitor-tab-main select[name="monitor-selectDevice"]').on('change', function() {
var name = $(this).val();
$.each(client.DeviceCache, function(i, dev) {
if (dev.name == name) {
$.each(client.ReadingList, function(i, r) {
if (r.profile == dev.profile) {
var $el = $("#monitor-selectReading");
$el.empty(); // remove old options
$.each(r.reading, function(i, rd) {
$el.append($("<option></option>").attr("value", rd).text(rd));
});
}
});
return;
}
});
});
}
});
}
Monitor.prototype.eraseScreenBtn = function() {
// $("#log-content div.log_content").empty();
client.updateChart([]);
}
Monitor.prototype.searchBtn = function() {
var device = $('#monitor-selectDevice').val();
var name = $('#monitor-selectReading').val();
var start = $("#monitor-start_time").val();
var end = $("#monitor-end_time").val();
start = new Date(start).valueOf();
end = new Date(end).valueOf();
var limit = $('#monitor-limit').val();
// var url = '/core-data/api/v1/reading/name/' + name + '/device/' + device + '/' + limit;
var url = '/core-data/api/v1/reading/' + start + '/' + end + '/' + limit;
console.log('GET request: url:' + url);
$.ajax({
url: url,
type: 'GET',
dataType: 'json',
success: function(data) {
$("#log-content div.log_content").empty();
client.renderReading(data, start, end, device, name);
},
error: function(xhr, status, error) {
alert(error + '\n' + xhr.responseText);
}
});
}
Monitor.prototype.renderReading = function(data, start, end, device, name) {
if (!data || data.length == 0) {
$("#log-content div.log_content").append('<span style="color:white;">No data.</span>');
return;
}
var readings = [];
for (var i = 0; i < data.length; i++) {
if (data[i].created >= start && data[i].created <= end && data[i].device == device && data[i].name == name) {
readings.push(data[i]);
}
}
if (!readings || readings.length == 0) {
$("#log-content div.log_content").append('<span style="color:white;">No data.</span>');
return;
}
client.updateChart(readings);
}
Monitor.prototype.startMonitor = function() {
//debugger
$("#monitor-search").hide();
$("#monitor-start-monitor").hide();
$("#monitor-stop-monitor").show();
client.monitorTimer = window.setInterval("monitor.searchBtn()", 5000);
}
Monitor.prototype.stopMonitor = function() {
window.clearInterval(client.monitorTimer);
$("#monitor-search").show();
$("#monitor-start-monitor").show();
$("#monitor-stop-monitor").hide();
}
Monitor.prototype.initChart = function() {
client.chart = echarts.init(document.getElementById('chart-reading'));
// specify chart configuration item and data
var option = {
title: { text: 'Line Chart' },
xAxis: {
name: 'Count'
},
yAxis: { name: 'Reading Value' },
series: [{
areaStyle: {
color: 'pink'
},
type: 'line',
data: []
}]
};
// use configuration item and data specified to show chart
client.chart.setOption(option);
}
Monitor.prototype.updateChart = function(readings) {
var device = $('#monitor-selectDevice').val();
var name = $('#monitor-selectReading').val();
if (!readings || readings.length == 0) {
var option = {
title: { text: 'Chart of Device: ' + device },
tooltip: {},
xAxis: {
name: 'Time',
type: 'time'
},
yAxis: {
name: name,
type: 'value'
},
series: [{
type: 'line',
// smooth: true,
data: []
}]
};
client.chart.setOption(option);
return;
}
var Sn, Tn, ti_1, vi_1, Vtb;
Sn = 0;
Vtb = 0;
vi_1 = 0;
ti_1 = readings[0].created;
Tn = readings[readings.length - 1].created - readings[0].created;
var dim2 = [];
for (var i = 0; i < readings.length; i++) {
if (readings[i].valueType == 'Bool') {
var vi = (readings[i].value == 'true');
var ti = new Date(readings[i].created);
// Sn = (t1-t0)*v0 + (t2-t1)*v1 + ... + (tn-1 - tn-2)*vn-1
Sn = Sn + (ti - ti_1) * vi_1;
vi_1 = vi;
ti_1 = ti;
dim2.push([ti, vi]);
} else {
var vi = parseInt(readings[i].value);
var ti = new Date(readings[i].created);
// Sn = (t1-t0)*v0 + (t2-t1)*v1 + ... + (tn-1 - tn-2)*vn-1
Sn = Sn + (ti - ti_1) * vi_1;
vi_1 = vi;
ti_1 = ti;
dim2.push([ti, vi]);
}
}
if (Tn == 0) {
Tn = 1;
Sn = vi_1;
}
Vtb = (Sn / Tn).toFixed(2);
var option = {
title: {
text: 'Device: ' + device + ' Number of values: ' + readings.length + ' Mean: ' + Vtb,
left: 'center'
},
tooltip: {},
xAxis: {
boundaryGap: false,
name: 'Time',
type: 'time'
},
yAxis: {
name: name,
type: 'value'
},
series: [{
// label: {
// normal: {
// show: true,
// position: 'top'
// }
// },
areaStyle: {
color: 'pink'
},
type: 'line',
step: 'end',
data: dim2
}]
};
client.chart.setOption(option);
}
return client;
})(); |
from typing import List, Union
import ray
from ray._private.client_mode_hook import client_mode_hook
@client_mode_hook
def _internal_kv_initialized():
worker = ray.worker.global_worker
return hasattr(worker, "mode") and worker.mode is not None
@client_mode_hook
def _internal_kv_get(key: Union[str, bytes]) -> bytes:
"""Fetch the value of a binary key."""
return ray.worker.global_worker.redis_client.hget(key, "value")
@client_mode_hook
def _internal_kv_put(key: Union[str, bytes],
value: Union[str, bytes],
overwrite: bool = True) -> bool:
"""Globally associates a value with a given binary key.
This only has an effect if the key does not already have a value.
Returns:
already_exists (bool): whether the value already exists.
"""
worker = ray.worker.global_worker
if overwrite:
updated = worker.redis_client.hset(key, "value", value)
else:
updated = worker.redis_client.hsetnx(key, "value", value)
return updated == 0 # already exists
@client_mode_hook
def _internal_kv_del(key: Union[str, bytes]):
return ray.worker.global_worker.redis_client.delete(key)
@client_mode_hook
def _internal_kv_list(prefix: Union[str, bytes]) -> List[bytes]:
"""List all keys in the internal KV store that start with the prefix."""
if isinstance(prefix, bytes):
pattern = prefix + b"*"
else:
pattern = prefix + "*"
return ray.worker.global_worker.redis_client.keys(pattern=pattern)
|
/*
* Copyright 2016 Google Inc.
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#ifndef Sk4fGradientPriv_DEFINED
#define Sk4fGradientPriv_DEFINED
#include "SkColor.h"
#include "SkHalf.h"
#include "SkImageInfo.h"
#include "SkNx.h"
#include "SkPM4f.h"
#include "SkPM4fPriv.h"
#include "SkUtils.h"
// Templates shared by various 4f gradient flavors.
namespace {
enum class ApplyPremul { True, False };
enum class DstType {
L32, // Linear 32bit. Used for both shader/blitter paths.
S32, // SRGB 32bit. Used for the blitter path only.
F16, // Linear half-float. Used for blitters only.
F32, // Linear float. Used for shaders only.
};
template <ApplyPremul premul>
inline SkPMColor trunc_from_4f_255(const Sk4f& c) {
SkPMColor pmc;
SkNx_cast<uint8_t>(c).store(&pmc);
if (premul == ApplyPremul::True) {
pmc = SkPreMultiplyARGB(SkGetPackedA32(pmc), SkGetPackedR32(pmc),
SkGetPackedG32(pmc), SkGetPackedB32(pmc));
}
return pmc;
}
template <ApplyPremul>
struct PremulTraits;
template <>
struct PremulTraits<ApplyPremul::False> {
static Sk4f apply(const Sk4f& c) { return c; }
};
template <>
struct PremulTraits<ApplyPremul::True> {
static Sk4f apply(const Sk4f& c) {
const float alpha = c[SkPM4f::A];
// FIXME: portable swizzle?
return c * Sk4f(alpha, alpha, alpha, 1);
}
};
// Struct encapsulating various dest-dependent ops:
//
// - load() Load a SkPM4f value into Sk4f. Normally called once per interval
// advance. Also applies a scale and swizzle suitable for DstType.
//
// - store() Store one Sk4f to dest. Optionally handles premul, color space
// conversion, etc.
//
// - store(count) Store the Sk4f value repeatedly to dest, count times.
//
// - store4x() Store 4 Sk4f values to dest (opportunistic optimization).
//
template <DstType, ApplyPremul premul = ApplyPremul::False>
struct DstTraits;
template <ApplyPremul premul>
struct DstTraits<DstType::L32, premul> {
using Type = SkPMColor;
// For L32, we prescale the values by 255 to save a per-pixel multiplication.
static Sk4f load(const SkPM4f& c) {
return c.to4f_pmorder() * Sk4f(255);
}
static void store(const Sk4f& c, Type* dst) {
*dst = trunc_from_4f_255<premul>(c);
}
static void store(const Sk4f& c, Type* dst, int n) {
sk_memset32(dst, trunc_from_4f_255<premul>(c), n);
}
static void store4x(const Sk4f& c0, const Sk4f& c1,
const Sk4f& c2, const Sk4f& c3,
Type* dst) {
if (premul == ApplyPremul::False) {
Sk4f_ToBytes((uint8_t*)dst, c0, c1, c2, c3);
} else {
store(c0, dst + 0);
store(c1, dst + 1);
store(c2, dst + 2);
store(c3, dst + 3);
}
}
};
template <ApplyPremul premul>
struct DstTraits<DstType::S32, premul> {
using PM = PremulTraits<premul>;
using Type = SkPMColor;
static Sk4f load(const SkPM4f& c) {
return c.to4f_pmorder();
}
static void store(const Sk4f& c, Type* dst) {
// FIXME: this assumes opaque colors. Handle unpremultiplication.
*dst = Sk4f_toS32(PM::apply(c));
}
static void store(const Sk4f& c, Type* dst, int n) {
sk_memset32(dst, Sk4f_toS32(PM::apply(c)), n);
}
static void store4x(const Sk4f& c0, const Sk4f& c1,
const Sk4f& c2, const Sk4f& c3,
Type* dst) {
store(c0, dst + 0);
store(c1, dst + 1);
store(c2, dst + 2);
store(c3, dst + 3);
}
};
template <ApplyPremul premul>
struct DstTraits<DstType::F16, premul> {
using PM = PremulTraits<premul>;
using Type = uint64_t;
static Sk4f load(const SkPM4f& c) {
return c.to4f();
}
static void store(const Sk4f& c, Type* dst) {
SkFloatToHalf_finite_ftz(PM::apply(c)).store(dst);
}
static void store(const Sk4f& c, Type* dst, int n) {
uint64_t color;
SkFloatToHalf_finite_ftz(PM::apply(c)).store(&color);
sk_memset64(dst, color, n);
}
static void store4x(const Sk4f& c0, const Sk4f& c1,
const Sk4f& c2, const Sk4f& c3,
Type* dst) {
store(c0, dst + 0);
store(c1, dst + 1);
store(c2, dst + 2);
store(c3, dst + 3);
}
};
template <ApplyPremul premul>
struct DstTraits<DstType::F32, premul> {
using PM = PremulTraits<premul>;
using Type = SkPM4f;
static Sk4f load(const SkPM4f& c) {
return c.to4f();
}
static void store(const Sk4f& c, Type* dst) {
PM::apply(c).store(dst->fVec);
}
static void store(const Sk4f& c, Type* dst, int n) {
const Sk4f pmc = PM::apply(c);
for (int i = 0; i < n; ++i) {
pmc.store(dst[i].fVec);
}
}
static void store4x(const Sk4f& c0, const Sk4f& c1,
const Sk4f& c2, const Sk4f& c3,
Type* dst) {
store(c0, dst + 0);
store(c1, dst + 1);
store(c2, dst + 2);
store(c3, dst + 3);
}
};
} // anonymous namespace
#endif // Sk4fGradientPriv_DEFINED
|
from django.contrib import admin
from django.http import HttpResponseRedirect
from django.urls import reverse_lazy
from .forms import ExcursionForm
from .models import Excursion, Participant
@admin.register(Excursion)
class ExcursionAdmin(admin.ModelAdmin):
form = ExcursionForm
list_display = ('__str__', 'seats', 'date')
@admin.register(Participant)
class ParticipantAdmin(admin.ModelAdmin):
list_display = ('__str__', 'is_car_owner', 'is_seat_owner', 'created_on')
list_filter = ('excursion__title', 'is_car_owner')
ordering = ('created_on',)
actions = ['contact_selected_participants']
@admin.action(
permissions=['add', 'change'],
description="Send selected participants an email",
)
def contact_selected_participants(self, request, queryset):
selected = queryset.order_by('pk').values_list('pk', flat=True)
return HttpResponseRedirect(reverse_lazy('excursions:contact') + '?ids=%s' % (
','.join(str(pk) for pk in selected),
))
|
/*
belle-sip - SIP (RFC3261) library.
Copyright (C) 2010-2013 Belledonne Communications SARL
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef BELLE_SIP_DEFS_H
#define BELLE_SIP_DEFS_H
#ifdef __cplusplus
#define BELLE_SIP_BEGIN_DECLS extern "C"{
#define BELLE_SIP_END_DECLS }
#else
#define BELLE_SIP_BEGIN_DECLS
#define BELLE_SIP_END_DECLS
#endif
#ifdef _MSC_VER
#define BELLESIP_INLINE __inline
typedef signed char int8_t;
typedef unsigned char uint8_t;
typedef __int16 int16_t;
typedef unsigned __int16 uint16_t;
typedef __int32 int32_t;
typedef unsigned __int32 uint32_t;
typedef __int64 int64_t;
typedef unsigned __int64 uint64_t;
#else
#include <inttypes.h>
#define BELLESIP_INLINE inline
#endif
#ifdef _MSC_VER
#ifdef BELLESIP_STATIC
#define BELLESIP_EXPORT
#else
#ifdef BELLESIP_EXPORTS
#define BELLESIP_EXPORT __declspec(dllexport)
#else
#define BELLESIP_EXPORT __declspec(dllimport)
#endif
#endif
#else
#define BELLESIP_EXPORT
#endif
#define BELLESIP_UNUSED(a) (void)a;
#undef TRUE
#define TRUE 1
#undef FALSE
#define FALSE 0
#endif
|
# -*- coding: utf-8 -*-
"""
oauthlib.oauth2.rfc6749.grant_types
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
from __future__ import absolute_import, unicode_literals
import json
import logging
from .. import errors, utils
from ..request_validator import RequestValidator
from .base import GrantTypeBase
log = logging.getLogger(__name__)
class RefreshTokenGrant(GrantTypeBase):
"""`Refresh token grant`_
.. _`Refresh token grant`: https://tools.ietf.org/html/rfc6749#section-6
"""
def __init__(self, request_validator=None,
issue_new_refresh_tokens=True,
**kwargs):
super(RefreshTokenGrant, self).__init__(
request_validator,
issue_new_refresh_tokens=issue_new_refresh_tokens,
**kwargs)
def create_token_response(self, request, token_handler):
"""Create a new access token from a refresh_token.
:param request: OAuthlib request.
:type request: oauthlib.common.Request
:param token_handler: A token handler instance, for example of type
oauthlib.oauth2.BearerToken.
If valid and authorized, the authorization server issues an access
token as described in `Section 5.1`_. If the request failed
verification or is invalid, the authorization server returns an error
response as described in `Section 5.2`_.
The authorization server MAY issue a new refresh token, in which case
the client MUST discard the old refresh token and replace it with the
new refresh token. The authorization server MAY revoke the old
refresh token after issuing a new refresh token to the client. If a
new refresh token is issued, the refresh token scope MUST be
identical to that of the refresh token included by the client in the
request.
.. _`Section 5.1`: https://tools.ietf.org/html/rfc6749#section-5.1
.. _`Section 5.2`: https://tools.ietf.org/html/rfc6749#section-5.2
"""
headers = self._get_default_headers()
try:
log.debug('Validating refresh token request, %r.', request)
self.validate_token_request(request)
except errors.OAuth2Error as e:
log.debug('Client error in token request, %s.', e)
headers.update(e.headers)
return headers, e.json, e.status_code
token = token_handler.create_token(request,
refresh_token=self.issue_new_refresh_tokens, save_token=False)
for modifier in self._token_modifiers:
token = modifier(token)
self.request_validator.save_token(token, request)
log.debug('Issuing new token to client id %r (%r), %r.',
request.client_id, request.client, token)
return headers, json.dumps(token), 200
def validate_token_request(self, request):
"""
:param request: OAuthlib request.
:type request: oauthlib.common.Request
"""
# REQUIRED. Value MUST be set to "refresh_token".
if request.grant_type != 'refresh_token':
raise errors.UnsupportedGrantTypeError(request=request)
for validator in self.custom_validators.pre_token:
validator(request)
if request.refresh_token is None:
raise errors.InvalidRequestError(
description='Missing refresh token parameter.',
request=request)
# Because refresh tokens are typically long-lasting credentials used to
# request additional access tokens, the refresh token is bound to the
# client to which it was issued. If the client type is confidential or
# the client was issued client credentials (or assigned other
# authentication requirements), the client MUST authenticate with the
# authorization server as described in Section 3.2.1.
# https://tools.ietf.org/html/rfc6749#section-3.2.1
if self.request_validator.client_authentication_required(request):
log.debug('Authenticating client, %r.', request)
if not self.request_validator.authenticate_client(request):
log.debug('Invalid client (%r), denying access.', request)
raise errors.InvalidClientError(request=request)
elif not self.request_validator.authenticate_client_id(request.client_id, request):
log.debug('Client authentication failed, %r.', request)
raise errors.InvalidClientError(request=request)
# Ensure client is authorized use of this grant type
self.validate_grant_type(request)
# REQUIRED. The refresh token issued to the client.
log.debug('Validating refresh token %s for client %r.',
request.refresh_token, request.client)
if not self.request_validator.validate_refresh_token(
request.refresh_token, request.client, request):
log.debug('Invalid refresh token, %s, for client %r.',
request.refresh_token, request.client)
raise errors.InvalidGrantError(request=request)
original_scopes = utils.scope_to_list(
self.request_validator.get_original_scopes(
request.refresh_token, request))
if request.scope:
request.scopes = utils.scope_to_list(request.scope)
if (not all((s in original_scopes for s in request.scopes))
and not self.request_validator.is_within_original_scope(
request.scopes, request.refresh_token, request)):
log.debug('Refresh token %s lack requested scopes, %r.',
request.refresh_token, request.scopes)
raise errors.InvalidScopeError(request=request)
else:
request.scopes = original_scopes
for validator in self.custom_validators.post_token:
validator(request)
|
'use strict';
var https = require('https');
/**
* Checks SSL Expiry date
* @param {string} host
* @param {string} method
* @param {number} port
* @return {object}
*/
module.exports = (host, method, port) => {
if (!host) throw new Error("Invalid host");
const options = {
host: host,
method: method || 'HEAD',
port: port || 443,
rejectUnauthorized: false,
agent: false,
};
let numericPort = (!isNaN(parseFloat(options.port)) && isFinite(options.port));
if (numericPort === false) throw new Error("Invalid port");
let daysBetween = (from, to) => Math.round(Math.abs((+from) - (+to))/8.64e7);
if (options.host === null || options.port === null) throw new Error("Invalid host or port");
return new Promise(function(resolve, reject) {
try {
const req = https.request(options, res => {
let { valid_from, valid_to } = res.connection.getPeerCertificate();
let days_remaining = daysBetween(new Date(), new Date(valid_to))
// Check if a certificate has already expired
let now = new Date();
if (new Date(valid_to).getTime() < now.getTime()){
days_remaining = -days_remaining;
}
resolve({
valid_from: valid_from,
valid_to: valid_to,
days_remaining: days_remaining
});
});
req.on('error', (e) => { reject(e) });
req.end();
} catch (e) {
reject(e);
}
})
};
|
// Copyright 2021 DeepMind Technologies Limited
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//---------------------------------//
#include "engine/engine_ray.h"
#include <stddef.h>
#include <mujoco/mjdata.h>
#include <mujoco/mjmodel.h>
#include <mujoco/mjvisualize.h>
#include "engine/engine_macro.h"
#include "engine/engine_util_blas.h"
#include "engine/engine_util_errmem.h"
#include "engine/engine_util_misc.h"
#include "engine/engine_util_spatial.h"
//---------------------------- utility functions ---------------------------------------------------
// map ray to local geom frame
static void ray_map(const mjtNum* pos, const mjtNum* mat, const mjtNum* pnt, const mjtNum* vec,
mjtNum* lpnt, mjtNum* lvec) {
const mjtNum dif[3] = {pnt[0]-pos[0], pnt[1]-pos[1], pnt[2]-pos[2]};
// lpnt = mat' * dif
lpnt[0] = mat[0]*dif[0] + mat[3]*dif[1] + mat[6]*dif[2];
lpnt[1] = mat[1]*dif[0] + mat[4]*dif[1] + mat[7]*dif[2];
lpnt[2] = mat[2]*dif[0] + mat[5]*dif[1] + mat[8]*dif[2];
// lvec = mat' * vec
lvec[0] = mat[0]*vec[0] + mat[3]*vec[1] + mat[6]*vec[2];
lvec[1] = mat[1]*vec[0] + mat[4]*vec[1] + mat[7]*vec[2];
lvec[2] = mat[2]*vec[0] + mat[5]*vec[1] + mat[8]*vec[2];
}
// eliminate geom
static int ray_eliminate(const mjModel* m, const mjData* d, int geomid,
const mjtByte* geomgroup, mjtByte flg_static, int bodyexclude) {
// body exclusion
if (m->geom_bodyid[geomid]==bodyexclude) {
return 1;
}
// invisible geom exclusion
if (m->geom_matid[geomid]<0 && m->geom_rgba[4*geomid+3]==0) {
return 1;
}
// invisible material exclusion
if (m->geom_matid[geomid]>=0 && m->mat_rgba[4*m->geom_matid[geomid]+3]==0) {
return 1;
}
// static exclusion
if (!flg_static && m->geom_bodyid[geomid]==0) {
return 1;
}
// plane and hfield inclusion
if (m->geom_type[geomid]==mjGEOM_PLANE || m->geom_type[geomid]==mjGEOM_HFIELD) {
return 0;
}
// no geomgroup inclusion
if (!geomgroup) {
return 0;
}
// group inclusion/exclusion
int groupid = mjMIN(mjNGROUP-1, mjMAX(0, m->geom_group[geomid]));
return (geomgroup[groupid]==0);
}
// compute solution from quadratic: a*x^2 + 2*b*x + c = 0
static mjtNum ray_quad(mjtNum a, mjtNum b, mjtNum c, mjtNum* x) {
// compute determinant and check
mjtNum det = b*b - a*c;
if (det<mjMINVAL) {
x[0] = -1;
x[1] = -1;
return -1;
}
det = mju_sqrt(det);
// compute the two solutions
x[0] = (-b-det)/a;
x[1] = (-b+det)/a;
// finalize result
if (x[0]>=0) {
return x[0];
} else if (x[1]>=0) {
return x[1];
} else {
return -1;
}
}
// intersect ray with triangle
static mjtNum ray_triangle(mjtNum v[][3], const mjtNum* lpnt, const mjtNum* lvec,
const mjtNum* b0, const mjtNum* b1) {
// dif = v[i] - lpnt
mjtNum dif[3][3];
for (int i=0; i<3; i++) {
for (int j=0; j<3; j++) {
dif[i][j] = v[i][j] - lpnt[j];
}
}
// project difference vectors in normal plane
mjtNum planar[3][2];
for (int i=0; i<3; i++) {
planar[i][0] = mju_dot3(b0, dif[i]);
planar[i][1] = mju_dot3(b1, dif[i]);
}
// reject if on the same side of any coordinate axis
if ((planar[0][0]>0 && planar[1][0]>0 && planar[2][0]>0) ||
(planar[0][0]<0 && planar[1][0]<0 && planar[2][0]<0) ||
(planar[0][1]>0 && planar[1][1]>0 && planar[2][1]>0) ||
(planar[0][1]<0 && planar[1][1]<0 && planar[2][1]<0)) {
return -1;
}
// determine if origin is inside planar projection of triangle
// A = (p0-p2, p1-p2), b = -p2, solve A*t = b
mjtNum A[4] = {planar[0][0]-planar[2][0], planar[1][0]-planar[2][0],
planar[0][1]-planar[2][1], planar[1][1]-planar[2][1]};
mjtNum b[2] = {-planar[2][0], -planar[2][1]};
mjtNum det = A[0]*A[3] - A[1]*A[2];
if (mju_abs(det)<mjMINVAL) {
return -1;
}
mjtNum t0 = (A[3]*b[0] - A[1]*b[1]) / det;
mjtNum t1 = (-A[2]*b[0] + A[0]*b[1]) / det;
// check if outside
if (t0<0 || t1<0|| t0+t1>1) {
return -1;
}
// intersect ray with plane of triangle
mju_sub3(dif[0], v[0], v[2]); // v0-v2
mju_sub3(dif[1], v[1], v[2]); // v1-v2
mju_sub3(dif[2], lpnt, v[2]); // lp-v2
mjtNum nrm[3];
mju_cross(nrm, dif[0], dif[1]); // normal to triangle plane
mjtNum denom = mju_dot3(lvec, nrm);
if (mju_abs(denom)<mjMINVAL) {
return -1;
}
return (-mju_dot3(dif[2], nrm) / denom);
}
//---------------------------- geom-specific intersection functions --------------------------------
// plane
static mjtNum ray_plane(const mjtNum* pos, const mjtNum* mat, const mjtNum* size,
const mjtNum* pnt, const mjtNum* vec) {
// map to local frame
mjtNum lpnt[3], lvec[3];
ray_map(pos, mat, pnt, vec, lpnt, lvec);
// z-vec not pointing towards front face: reject
if (lvec[2]>-mjMINVAL) {
return -1;
}
// intersection with plane
const mjtNum x = -lpnt[2]/lvec[2];
if (x<0) {
return -1;
}
mjtNum p0 = lpnt[0] + x*lvec[0];
mjtNum p1 = lpnt[1] + x*lvec[1];
// accept only within rendered rectangle
if ((size[0]<=0 || mju_abs(p0)<=size[0]) &&
(size[1]<=0 || mju_abs(p1)<=size[1])) {
return x;
} else {
return -1;
}
}
// sphere
static mjtNum ray_sphere(const mjtNum* pos, const mjtNum* mat, const mjtNum* size,
const mjtNum* pnt, const mjtNum* vec) {
// (x*vec+pnt-pos)'*(x*vec+pnt-pos) = size[0]*size[0]
mjtNum dif[3] = {pnt[0]-pos[0], pnt[1]-pos[1], pnt[2]-pos[2]};
mjtNum a = vec[0]*vec[0] + vec[1]*vec[1] + vec[2]*vec[2];
mjtNum b = vec[0]*dif[0] + vec[1]*dif[1] + vec[2]*dif[2];
mjtNum c = dif[0]*dif[0] + dif[1]*dif[1] + dif[2]*dif[2] - size[0]*size[0];
// solve a*x^2 + 2*b*x + c = 0
mjtNum xx[2];
return ray_quad(a, b, c, xx);
}
// capsule
static mjtNum ray_capsule(const mjtNum* pos, const mjtNum* mat, const mjtNum* size,
const mjtNum* pnt, const mjtNum* vec) {
// bounding sphere test
mjtNum ssz = size[0] + size[1];
if (ray_sphere(pos, NULL, &ssz, pnt, vec)<0) {
return -1;
}
// map to local frame
mjtNum lpnt[3], lvec[3];
ray_map(pos, mat, pnt, vec, lpnt, lvec);
// init solution
mjtNum x = -1, sol, xx[2];
// cylinder round side: (x*lvec+lpnt)'*(x*lvec+lpnt) = size[0]*size[0]
mjtNum a = lvec[0]*lvec[0] + lvec[1]*lvec[1];
mjtNum b = lvec[0]*lpnt[0] + lvec[1]*lpnt[1];
mjtNum c = lpnt[0]*lpnt[0] + lpnt[1]*lpnt[1] - size[0]*size[0];
// solve a*x^2 + 2*b*x + c = 0
sol = ray_quad(a, b, c, xx);
// make sure round solution is between flat sides
if (sol>=0 && mju_abs(lpnt[2]+sol*lvec[2])<=size[1]) {
if (x<0 || sol<x) {
x = sol;
}
}
// top cap
mjtNum ldif[3] = {lpnt[0], lpnt[1], lpnt[2]-size[1]};
a = lvec[0]*lvec[0] + lvec[1]*lvec[1] + lvec[2]*lvec[2];
b = lvec[0]*ldif[0] + lvec[1]*ldif[1] + lvec[2]*ldif[2];
c = ldif[0]*ldif[0] + ldif[1]*ldif[1] + ldif[2]*ldif[2] - size[0]*size[0];
ray_quad(a, b, c, xx);
// accept only top half of sphere
for (int i=0; i<2; i++) {
if (xx[i]>=0 && lpnt[2]+xx[i]*lvec[2]>=size[1]) {
if (x<0 || xx[i]<x) {
x = xx[i];
}
}
}
// bottom cap
ldif[2] = lpnt[2]+size[1];
b = lvec[0]*ldif[0] + lvec[1]*ldif[1] + lvec[2]*ldif[2];
c = ldif[0]*ldif[0] + ldif[1]*ldif[1] + ldif[2]*ldif[2] - size[0]*size[0];
ray_quad(a, b, c, xx);
// accept only bottom half of sphere
for (int i=0; i<2; i++) {
if (xx[i]>=0 && lpnt[2]+xx[i]*lvec[2]<=-size[1]) {
if (x<0 || xx[i]<x) {
x = xx[i];
}
}
}
return x;
}
// ellipsoid
static mjtNum ray_ellipsoid(const mjtNum* pos, const mjtNum* mat, const mjtNum* size,
const mjtNum* pnt, const mjtNum* vec) {
// map to local frame
mjtNum lpnt[3], lvec[3];
ray_map(pos, mat, pnt, vec, lpnt, lvec);
// invert size^2
mjtNum s[3] = {1/(size[0]*size[0]), 1/(size[1]*size[1]), 1/(size[2]*size[2])};
// (x*lvec+lpnt)' * diag(1./size^2) * (x*lvec+lpnt) = 1
mjtNum a = s[0]*lvec[0]*lvec[0] + s[1]*lvec[1]*lvec[1] + s[2]*lvec[2]*lvec[2];
mjtNum b = s[0]*lvec[0]*lpnt[0] + s[1]*lvec[1]*lpnt[1] + s[2]*lvec[2]*lpnt[2];
mjtNum c = s[0]*lpnt[0]*lpnt[0] + s[1]*lpnt[1]*lpnt[1] + s[2]*lpnt[2]*lpnt[2] - 1;
// solve a*x^2 + 2*b*x + c = 0
mjtNum xx[2];
return ray_quad(a, b, c, xx);
}
// cylinder
static mjtNum ray_cylinder(const mjtNum* pos, const mjtNum* mat, const mjtNum* size,
const mjtNum* pnt, const mjtNum* vec) {
// bounding sphere test
mjtNum ssz = mju_sqrt(size[0]*size[0] + size[1]*size[1]);
if (ray_sphere(pos, NULL, &ssz, pnt, vec)<0) {
return -1;
}
// map to local frame
mjtNum lpnt[3], lvec[3];
ray_map(pos, mat, pnt, vec, lpnt, lvec);
// init solution
mjtNum x = -1, sol;
// flat sides
int side;
if (mju_abs(lvec[2])>mjMINVAL) {
for (side=-1; side<=1; side+=2) {
// soludion of: lpnt[2] + x*lvec[2] = side*height_size
sol = (side*size[1]-lpnt[2])/lvec[2];
// process if non-negative
if (sol>=0) {
// intersection with horizontal face
mjtNum p0 = lpnt[0] + sol*lvec[0];
mjtNum p1 = lpnt[1] + sol*lvec[1];
// accept within radius
if (p0*p0 + p1*p1 <= size[0]*size[0]) {
if (x<0 || sol<x) {
x = sol;
}
}
}
}
}
// (x*lvec+lpnt)'*(x*lvec+lpnt) = size[0]*size[0]
mjtNum a = lvec[0]*lvec[0] + lvec[1]*lvec[1];
mjtNum b = lvec[0]*lpnt[0] + lvec[1]*lpnt[1];
mjtNum c = lpnt[0]*lpnt[0] + lpnt[1]*lpnt[1] - size[0]*size[0];
// solve a*x^2 + 2*b*x + c = 0
mjtNum xx[2];
sol = ray_quad(a, b, c, xx);
// make sure round solution is between flat sides
if (sol>=0 && mju_abs(lpnt[2]+sol*lvec[2])<=size[1]) {
if (x<0 || sol<x) {
x = sol;
}
}
return x;
}
// box
static mjtNum ray_box(const mjtNum* pos, const mjtNum* mat, const mjtNum* size,
const mjtNum* pnt, const mjtNum* vec, mjtNum* all) {
// clear all
if (all) {
for (int i=0; i<6; i++) {
all[i] = -1;
}
}
// bounding sphere test
mjtNum ssz = mju_sqrt(size[0]*size[0] + size[1]*size[1] + size[2]*size[2]);
if (ray_sphere(pos, NULL, &ssz, pnt, vec)<0) {
return -1;
}
// faces
const int iface[3][2] = {
{1, 2},
{0, 2},
{0, 1}
};
// map to local frame
mjtNum lpnt[3], lvec[3];
ray_map(pos, mat, pnt, vec, lpnt, lvec);
// init solution
mjtNum x = -1, sol;
// loop over axes with non-zero vec
for (int i=0; i<3; i++) {
if (mju_abs(lvec[i])>mjMINVAL) {
for (int side=-1; side<=1; side+=2) {
// soludion of: lpnt[i] + x*lvec[i] = side*size[i]
sol = (side*size[i]-lpnt[i])/lvec[i];
// process if non-negative
if (sol>=0) {
// intersection with face
mjtNum p0 = lpnt[iface[i][0]] + sol*lvec[iface[i][0]];
mjtNum p1 = lpnt[iface[i][1]] + sol*lvec[iface[i][1]];
// accept within rectangle
if (mju_abs(p0)<=size[iface[i][0]] &&
mju_abs(p1)<=size[iface[i][1]]) {
// update
if (x<0 || sol<x) {
x = sol;
}
// save in all
if (all) {
all[2*i+(side+1)/2] = sol;
}
}
}
}
}
}
return x;
}
// interect ray with hfield
mjtNum mj_rayHfield(const mjModel* m, const mjData* d, int id,
const mjtNum* pnt, const mjtNum* vec) {
// check geom type
if (m->geom_type[id]!=mjGEOM_HFIELD) {
mju_error("mj_rayHfield: geom with hfield type expected");
}
// hfield id and dimensions
int hid = m->geom_dataid[id];
int nrow = m->hfield_nrow[hid];
int ncol = m->hfield_ncol[hid];
const mjtNum* size = m->hfield_size + 4*hid;
const float* data = m->hfield_data + m->hfield_adr[hid];
// compute size and pos of base box
mjtNum base_size[3] = {size[0], size[1], size[3]*0.5};
mjtNum base_pos[3] = {
d->geom_xpos[3*id] - d->geom_xmat[9*id+2]*size[3]*0.5,
d->geom_xpos[3*id+1] - d->geom_xmat[9*id+5]*size[3]*0.5,
d->geom_xpos[3*id+2] - d->geom_xmat[9*id+8]*size[3]*0.5
};
// compute size and pos of top box
mjtNum top_size[3] = {size[0], size[1], size[2]*0.5};
mjtNum top_pos[3] = {
d->geom_xpos[3*id] + d->geom_xmat[9*id+2]*size[2]*0.5,
d->geom_xpos[3*id+1] + d->geom_xmat[9*id+5]*size[2]*0.5,
d->geom_xpos[3*id+2] + d->geom_xmat[9*id+8]*size[2]*0.5
};
// init: intersection with base box
mjtNum x = ray_box(base_pos, d->geom_xmat+9*id, base_size, pnt, vec, NULL);
// check top box: done if no intersection
mjtNum all[6];
mjtNum top_intersect = ray_box(top_pos, d->geom_xmat+9*id, top_size, pnt, vec, all);
if (top_intersect<0) {
return x;
}
// map to local frame
mjtNum lpnt[3], lvec[3];
ray_map(d->geom_xpos+3*id, d->geom_xmat+9*id, pnt, vec, lpnt, lvec);
// construct basis vectors of normal plane
mjtNum b0[3] = {1, 1, 1}, b1[3];
if (mju_abs(lvec[0])>=mju_abs(lvec[1]) && mju_abs(lvec[0])>=mju_abs(lvec[2])) {
b0[0] = 0;
} else if (mju_abs(lvec[1])>=mju_abs(lvec[2])) {
b0[1] = 0;
} else {
b0[2] = 0;
}
mju_addScl3(b1, b0, lvec, -mju_dot3(lvec, b0)/mju_dot3(lvec, lvec));
mju_normalize3(b1);
mju_cross(b0, b1, lvec);
mju_normalize3(b0);
// find ray segment intersecting top box
mjtNum seg[2] = {0, top_intersect};
for (int i=0; i<6; i++) {
if (all[i]>seg[1]) {
seg[0] = top_intersect;
seg[1] = all[i];
}
}
// project segment endpoints in horizontal plane, discretize
mjtNum dx = (2.0*size[0]) / (ncol-1);
mjtNum dy = (2.0*size[1]) / (nrow-1);
mjtNum SX[2], SY[2];
for (int i=0; i<2; i++) {
SX[i] = (lpnt[0] + seg[i]*lvec[0] + size[0]) / dx;
SY[i] = (lpnt[1] + seg[i]*lvec[1] + size[1]) / dy;
}
// compute ranges, with +1 padding
int cmin = mjMAX(0, (int)mju_floor(mjMIN(SX[0], SX[1]))-1);
int cmax = mjMIN(ncol-1, (int)mju_ceil(mjMAX(SX[0], SX[1]))+1);
int rmin = mjMAX(0, (int)mju_floor(mjMIN(SY[0], SY[1]))-1);
int rmax = mjMIN(nrow-1, (int)mju_ceil(mjMAX(SY[0], SY[1]))+1);
// check triangles within bounds
for (int r=rmin; r<rmax; r++) {
for (int c=cmin; c<cmax; c++) {
// first triangle
mjtNum va[3][3] = {
{dx*c-size[0], dy*r-size[1], data[r*ncol+c]*size[2]},
{dx*(c+1)-size[0], dy*(r+1)-size[1], data[(r+1)*ncol+(c+1)]*size[2]},
{dx*(c+1)-size[0], dy*r-size[1], data[r*ncol+(c+1)]*size[2]}
};
mjtNum sol = ray_triangle(va, lpnt, lvec, b0, b1);
if (sol>=0 && (x<0 || sol<x)) {
x = sol;
}
// second triangle
mjtNum vb[3][3] = {
{dx*c-size[0], dy*r-size[1], data[r*ncol+c]*size[2]},
{dx*(c+1)-size[0], dy*(r+1)-size[1], data[(r+1)*ncol+(c+1)]*size[2]},
{dx*c-size[0], dy*(r+1)-size[1], data[(r+1)*ncol+c]*size[2]}
};
sol = ray_triangle(vb, lpnt, lvec, b0, b1);
if (sol>=0 && (x<0 || sol<x)) {
x = sol;
}
}
}
// check viable sides of top box
for (int i=0; i<4; i++) {
if (all[i]>=0 && (all[i]<x || x<0)) {
// normalized height of intersection point
mjtNum z = (lpnt[2] + all[i]*lvec[2]) / size[2];
// rectangle points
mjtNum y, y0, z0, z1;
// side normal to x-axis
if (i<2) {
y = (lpnt[1] + all[i]*lvec[1] + size[1]) / dy;
y0 = mjMAX(0, mjMIN(nrow-2, mju_floor(y)));
z0 = (mjtNum)data[mju_round(y0)*nrow + (i==1 ? ncol-1 : 0)];
z1 = (mjtNum)data[mju_round(y0+1)*nrow + (i==1 ? ncol-1 : 0)];
}
// side normal to y-axis
else {
y = (lpnt[0] + all[i]*lvec[0] + size[0]) / dx;
y0 = mjMAX(0, mjMIN(ncol-2, mju_floor(y)));
z0 = (mjtNum)data[mju_round(y0) + (i==3 ? (nrow-1)*ncol : 0)];
z1 = (mjtNum)data[mju_round(y0+1) + (i==3 ? (nrow-1)*ncol : 0)];
}
// check if point is below line segment
if (z < z0*(y0+1-y) + z1*(y-y0)) {
x = all[i];
}
}
}
return x;
}
// interect ray with mesh
mjtNum mj_rayMesh(const mjModel* m, const mjData* d, int id,
const mjtNum* pnt, const mjtNum* vec) {
// check geom type
if (m->geom_type[id]!=mjGEOM_MESH) {
mju_error("mj_rayMesh: geom with mesh type expected");
}
// bounding box test
if (ray_box(d->geom_xpos+3*id, d->geom_xmat+9*id, m->geom_size+3*id, pnt, vec, NULL)<0) {
return -1;
}
// map to local frame
mjtNum lpnt[3], lvec[3];
ray_map(d->geom_xpos+3*id, d->geom_xmat+9*id, pnt, vec, lpnt, lvec);
// construct basis vectors of normal plane
mjtNum b0[3] = {1, 1, 1}, b1[3];
if (mju_abs(lvec[0])>=mju_abs(lvec[1]) && mju_abs(lvec[0])>=mju_abs(lvec[2])) {
b0[0] = 0;
} else if (mju_abs(lvec[1])>=mju_abs(lvec[2])) {
b0[1] = 0;
} else {
b0[2] = 0;
}
mju_addScl3(b1, b0, lvec, -mju_dot3(lvec, b0)/mju_dot3(lvec, lvec));
mju_normalize3(b1);
mju_cross(b0, b1, lvec);
mju_normalize3(b0);
// init solution
mjtNum x = -1, sol;
// process all triangles
int face, meshid = m->geom_dataid[id];
for (face = m->mesh_faceadr[meshid];
face < m->mesh_faceadr[meshid] + m->mesh_facenum[meshid];
face++) {
// get float vertices
float* vf[3];
vf[0] = m->mesh_vert + 3*(m->mesh_face[3*face] + m->mesh_vertadr[meshid]);
vf[1] = m->mesh_vert + 3*(m->mesh_face[3*face+1] + m->mesh_vertadr[meshid]);
vf[2] = m->mesh_vert + 3*(m->mesh_face[3*face+2] + m->mesh_vertadr[meshid]);
// convert to mjtNum
mjtNum v[3][3];
for (int i=0; i<3; i++) {
for (int j=0; j<3; j++) {
v[i][j] = (mjtNum)vf[i][j];
}
}
// solve
sol = ray_triangle(v, lpnt, lvec, b0, b1);
// update
if (sol>=0 && (x<0 || sol<x)) {
x = sol;
}
}
return x;
}
// interect ray with pure geom, no meshes or hfields
mjtNum mju_rayGeom(const mjtNum* pos, const mjtNum* mat, const mjtNum* size,
const mjtNum* pnt, const mjtNum* vec, int geomtype) {
switch (geomtype) {
case mjGEOM_PLANE:
return ray_plane(pos, mat, size, pnt, vec);
case mjGEOM_SPHERE:
return ray_sphere(pos, mat, size, pnt, vec);
case mjGEOM_CAPSULE:
return ray_capsule(pos, mat, size, pnt, vec);
case mjGEOM_ELLIPSOID:
return ray_ellipsoid(pos, mat, size, pnt, vec);
case mjGEOM_CYLINDER:
return ray_cylinder(pos, mat, size, pnt, vec);
case mjGEOM_BOX:
return ray_box(pos, mat, size, pnt, vec, NULL);
default:
mju_error_i("mju_rayGeom: unexpected geom type %d", geomtype);
return -1;
}
}
// interect ray with skin, return nearest vertex id
mjtNum mju_raySkin(int nface, int nvert, const int* face, const float* vert,
const mjtNum* pnt, const mjtNum* vec, int vertid[1]) {
// compute bounding box
mjtNum box[3][2] = {{0, 0}, {0, 0}, {0, 0}};
for (int i=0; i<nvert; i++) {
for (int j=0; j<3; j++) {
// update minimum along side j
if (box[j][0]>vert[3*i+j] || i==0) {
box[j][0] = vert[3*i+j];
}
// update maximum along side j
if (box[j][1]<vert[3*i+j] || i==0) {
box[j][1] = vert[3*i+j];
}
}
}
// construct box geom
mjtNum pos[3], size[3], mat[9] = {1, 0, 0, 0, 1, 0, 0, 0, 1};
for (int j=0; j<3; j++) {
pos[j] = 0.5*(box[j][0]+box[j][1]);
size[j] = 0.5*(box[j][1]-box[j][0]);
}
// apply bounding-box filter
if (ray_box(pos, mat, size, pnt, vec, NULL)<0) {
return -1;
}
// construct basis vectors of normal plane
mjtNum b0[3] = {1, 1, 1}, b1[3];
if (mju_abs(vec[0])>=mju_abs(vec[1]) && mju_abs(vec[0])>=mju_abs(vec[2])) {
b0[0] = 0;
} else if (mju_abs(vec[1])>=mju_abs(vec[2])) {
b0[1] = 0;
} else {
b0[2] = 0;
}
mju_addScl3(b1, b0, vec, -mju_dot3(vec, b0)/mju_dot3(vec, vec));
mju_normalize3(b1);
mju_cross(b0, b1, vec);
mju_normalize3(b0);
// init solution
mjtNum x = -1, sol;
// process all faces
for (int i=0; i<nface; i++) {
// get float vertices
const float* vf[3];
vf[0] = vert + 3*(face[3*i]);
vf[1] = vert + 3*(face[3*i+1]);
vf[2] = vert + 3*(face[3*i+2]);
// convert to mjtNum
mjtNum v[3][3];
for (int j=0; j<3; j++) {
for (int k=0; k<3; k++) {
v[j][k] = (mjtNum)vf[j][k];
}
}
// solve
sol = ray_triangle(v, pnt, vec, b0, b1);
// update
if (sol>=0 && (x<0 || sol<x)) {
x = sol;
// construct intersection point
mjtNum intersect[3];
mju_addScl3(intersect, pnt, vec, sol);
// find nearest vertex
mjtNum dist = mju_dist3(intersect, v[0]);
*vertid = face[3*i];
for (int j=1; j<3; j++) {
mjtNum newdist = mju_dist3(intersect, v[j]);
if (newdist<dist) {
dist = newdist;
*vertid = face[3*i+j];
}
}
}
}
return x;
}
//---------------------------- main entry point ---------------------------------------------------
// intersect ray (pnt+x*vec, x>=0) with visible geoms, except geoms on bodyexclude
// return geomid and distance (x) to nearest surface, or -1 if no intersection
// geomgroup, flg_static are as in mjvOption; geomgroup==NULL skips group exclusion
mjtNum mj_ray(const mjModel* m, const mjData* d, const mjtNum* pnt, const mjtNum* vec,
const mjtByte* geomgroup, mjtByte flg_static, int bodyexclude,
int geomid[1]) {
mjtNum dist, newdist;
// check vector length
if (mju_norm3(vec)<mjMINVAL) {
mju_error("mj_ray: vector length is too small");
}
// clear result
dist = -1;
*geomid = -1;
// loop over geoms not eliminated by mask and bodyexclude
for (int i=0; i<m->ngeom; i++) {
if (!ray_eliminate(m, d, i, geomgroup, flg_static, bodyexclude)) {
// handle mesh and hfield separately
if (m->geom_type[i]==mjGEOM_MESH) {
newdist = mj_rayMesh(m, d, i, pnt, vec);
} else if (m->geom_type[i]==mjGEOM_HFIELD) {
newdist = mj_rayHfield(m, d, i, pnt, vec);
}
// otherwise general dispatch
else {
newdist = mju_rayGeom(d->geom_xpos+3*i, d->geom_xmat+9*i,
m->geom_size+3*i, pnt, vec, m->geom_type[i]);
}
// update if closer intersection found
if (newdist>=0 && (newdist<dist || dist<0)) {
dist = newdist;
*geomid = i;
}
}
}
return dist;
}
|
/* -------------------------- */
/* forcecrash *
//* -------------------------- */
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports)
: typeof define === 'function' && define.amd ? define(['exports'], factory)
: (factory((global.forcecrash = global.forcecrash || {})))
}(this, function (exports) {
'use strict'
/* -------------------------- */
/* static */
/* -------------------------- */
let constant = function constant (x) {
return function () {
return x
}
}
/* -------------------------- */
/* forcecrash */
/* -------------------------- */
var forcecrash = function forcecrash (__mapper = {}) {
let props = __mapper('props')()
/* -------------------------- */
/* muonApi */
/* -------------------------- */
var muonApi = function muonApi () {}
// -------------------------------------//
// force //
// -------------------------------------//
muonApi.force = function (params) {
var nodes = params.nodes
let x0 = (params.x0 !== undefined) ? params.x0 : 0
let y0 = (params.y0 !== undefined) ? params.y0 : 0
let r = (params.r !== undefined) ? params.r : 1
function force () {
for (let i = 0; i < nodes.length; ++i) {
let node = nodes[i]
let x = node.x
let y = node.y
let d2 = props.lib.distance2p([x0, y0], [x, y])
let dd = Math.sqrt(d2)
if (dd < r) {
__mapper('muonStore').apply({'type': 'DELANIMA', 'caller': 'force limit', 'anima': node})
}
}
}
function initialize () {
if (!nodes) return
}
force.initialize = function (_) {
nodes = _
initialize()
}
force.x0 = function (_) {
return arguments.length ? (x0 = typeof _ === 'function' ? _ : constant(+_), initialize(), force) : x0
}
force.y0 = function (_) {
return arguments.length ? (y0 = typeof _ === 'function' ? _ : constant(+_), initialize(), force) : y0
}
force.r = function (_) {
return arguments.length ? (r = typeof _ === 'function' ? _ : constant(+_), initialize(), force) : r
}
return force
}
// -------------------------------------//
// muonApi //
// -------------------------------------//
return muonApi
}
exports.forcecrash = forcecrash
}))
|
import { Implementation } from '../../Implementation';
import { MongooseFieldAdapter } from '@keystonejs/adapter-mongoose';
import { KnexFieldAdapter } from '@keystonejs/adapter-knex';
export class UuidImplementation extends Implementation {
constructor(path, { caseTo = 'lower' }) {
super(...arguments);
this.normaliseValue = a => a;
if (caseTo && caseTo.toString().toLowerCase() === 'upper') {
this.normaliseValue = a => a.toString().toUpperCase();
} else if (caseTo && caseTo.toString().toLowerCase() === 'lower') {
this.normaliseValue = a => a.toString().toLowerCase();
}
this.isOrderable = true;
}
get _supportsUnique() {
return true;
}
gqlOutputFields() {
return [`${this.path}: ID`];
}
gqlOutputFieldResolvers() {
return { [`${this.path}`]: item => item[this.path] };
}
gqlQueryInputFields() {
return [...this.equalityInputFields('ID'), ...this.inInputFields('ID')];
}
gqlUpdateInputFields() {
return [`${this.path}: ID`];
}
gqlCreateInputFields() {
return [`${this.path}: ID`];
}
}
const validator = a =>
typeof a === 'string' &&
/^[0-9a-fA-F]{8}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{12}$/.test(a);
// TODO: UUIDs _should_ be stored in Mongo using binary subtype 0x04 but strings are easier; see README.md
export class MongoUuidInterface extends MongooseFieldAdapter {
addToMongooseSchema(schema, mongoose) {
const schemaOptions = {
type: mongoose.Schema.Types.String,
validate: {
validator: this.buildValidator(validator),
message: '{VALUE} is not a valid UUID. Must be 8-4-4-4-12 hex format',
},
};
schema.add({ [this.path]: this.mergeSchemaOptions(schemaOptions, this.config) });
}
setupHooks({ addPreSaveHook, addPostReadHook }) {
// TODO: Remove the need to dereference the list and field to get the normalise function
addPreSaveHook(item => {
// Only run the hook if the item actually contains the field
// NOTE: Can't use hasOwnProperty here, as the mongoose data object
// returned isn't a POJO
if (!(this.path in item)) {
return item;
}
if (item[this.path]) {
if (typeof item[this.path] === 'string') {
item[this.path] = this.field.normaliseValue(item[this.path]);
} else {
// Should have been caught by the validator??
throw `Invalid UUID value given for '${this.path}'`;
}
} else {
item[this.path] = null;
}
return item;
});
addPostReadHook(item => {
if (item[this.path]) {
item[this.path] = this.field.normaliseValue(item[this.path]);
}
return item;
});
}
getQueryConditions(dbPath) {
return {
...this.equalityConditions(dbPath, this.field.normaliseValue),
...this.inConditions(dbPath, this.field.normaliseValue),
};
}
}
export class KnexUuidInterface extends KnexFieldAdapter {
constructor() {
super(...arguments);
// TODO: Warning on invalid config for primary keys?
if (!this.field.isPrimaryKey) {
this.isUnique = !!this.config.isUnique;
this.isIndexed = !!this.config.isIndexed && !this.config.isUnique;
}
}
addToTableSchema(table) {
const column = table.uuid(this.path);
// Fair to say primary keys are always non-nullable and uniqueness is implied by primary()
if (this.field.isPrimaryKey) {
column.primary().notNullable();
} else {
if (this.isUnique) column.unique();
else if (this.isIndexed) column.index();
if (this.isNotNullable) column.notNullable();
}
if (this.defaultTo) column.defaultTo(this.defaultTo);
}
addToForeignTableSchema(table, { path, isUnique, isIndexed, isNotNullable }) {
if (!this.field.isPrimaryKey) {
throw `Can't create foreign key '${path}' on table "${table._tableName}"; ` +
`'${this.path}' on list '${this.field.listKey}' as is not the primary key.`;
}
const column = table.uuid(path);
if (isUnique) column.unique();
else if (isIndexed) column.index();
if (isNotNullable) column.notNullable();
}
getQueryConditions(dbPath) {
return {
...this.equalityConditions(dbPath, this.field.normaliseValue),
...this.inConditions(dbPath, this.field.normaliseValue),
};
}
}
|
# -*- coding: utf-8 -*-
"""
flask.app
~~~~~~~~~
This module implements the central WSGI application object.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
from threading import Lock
from datetime import timedelta
from itertools import chain
from functools import update_wrapper
from werkzeug.datastructures import ImmutableDict
from werkzeug.routing import Map, Rule, RequestRedirect, BuildError
from werkzeug.exceptions import HTTPException, InternalServerError, \
MethodNotAllowed, BadRequest
from .helpers import _PackageBoundObject, url_for, get_flashed_messages, \
locked_cached_property, _endpoint_from_view_func, find_package
from . import json
from .wrappers import Request, Response
from .config import ConfigAttribute, Config
from .ctx import RequestContext, AppContext, _AppCtxGlobals
from .globals import _request_ctx_stack, request, session, g
from .sessions import SecureCookieSessionInterface
from .module import blueprint_is_module
from .templating import DispatchingJinjaLoader, Environment, \
_default_template_ctx_processor
from .signals import request_started, request_finished, got_request_exception, \
request_tearing_down, appcontext_tearing_down
from ._compat import reraise, string_types, text_type, integer_types
# a lock used for logger initialization
_logger_lock = Lock()
def _make_timedelta(value):
if not isinstance(value, timedelta):
return timedelta(seconds=value)
return value
def setupmethod(f):
"""Wraps a method so that it performs a check in debug mode if the
first request was already handled.
"""
def wrapper_func(self, *args, **kwargs):
if self.debug and self._got_first_request:
raise AssertionError('A setup function was called after the '
'first request was handled. This usually indicates a bug '
'in the application where a module was not imported '
'and decorators or other functionality was called too late.\n'
'To fix this make sure to import all your view modules, '
'database models and everything related at a central place '
'before the application starts serving requests.')
return f(self, *args, **kwargs)
return update_wrapper(wrapper_func, f)
class Flask(_PackageBoundObject):
"""The flask object implements a WSGI application and acts as the central
object. It is passed the name of the module or package of the
application. Once it is created it will act as a central registry for
the view functions, the URL rules, template configuration and much more.
The name of the package is used to resolve resources from inside the
package or the folder the module is contained in depending on if the
package parameter resolves to an actual python package (a folder with
an `__init__.py` file inside) or a standard module (just a `.py` file).
For more information about resource loading, see :func:`open_resource`.
Usually you create a :class:`Flask` instance in your main module or
in the `__init__.py` file of your package like this::
from flask import Flask
app = Flask(__name__)
.. admonition:: About the First Parameter
The idea of the first parameter is to give Flask an idea what
belongs to your application. This name is used to find resources
on the file system, can be used by extensions to improve debugging
information and a lot more.
So it's important what you provide there. If you are using a single
module, `__name__` is always the correct value. If you however are
using a package, it's usually recommended to hardcode the name of
your package there.
For example if your application is defined in `yourapplication/app.py`
you should create it with one of the two versions below::
app = Flask('yourapplication')
app = Flask(__name__.split('.')[0])
Why is that? The application will work even with `__name__`, thanks
to how resources are looked up. However it will make debugging more
painful. Certain extensions can make assumptions based on the
import name of your application. For example the Flask-SQLAlchemy
extension will look for the code in your application that triggered
an SQL query in debug mode. If the import name is not properly set
up, that debugging information is lost. (For example it would only
pick up SQL queries in `yourapplication.app` and not
`yourapplication.views.frontend`)
.. versionadded:: 0.7
The `static_url_path`, `static_folder`, and `template_folder`
parameters were added.
.. versionadded:: 0.8
The `instance_path` and `instance_relative_config` parameters were
added.
.. versionadded:: 1.0
The `root_path` parameter was added.
:param import_name: the name of the application package
:param static_url_path: can be used to specify a different path for the
static files on the web. Defaults to the name
of the `static_folder` folder.
:param static_folder: the folder with static files that should be served
at `static_url_path`. Defaults to the ``'static'``
folder in the root path of the application.
:param template_folder: the folder that contains the templates that should
be used by the application. Defaults to
``'templates'`` folder in the root path of the
application.
:param instance_path: An alternative instance path for the application.
By default the folder ``'instance'`` next to the
package or module is assumed to be the instance
path.
:param instance_relative_config: if set to `True` relative filenames
for loading the config are assumed to
be relative to the instance path instead
of the application root.
:param root_path: Flask by default will automatically calculate the path
to the root of the application. In certain situations
this cannot be achieved (for instance if the package
is a Python 3 namespace package) and needs to be
manually defined.
"""
#: The class that is used for request objects. See :class:`~flask.Request`
#: for more information.
request_class = Request
#: The class that is used for response objects. See
#: :class:`~flask.Response` for more information.
response_class = Response
#: The class that is used for the :data:`~flask.g` instance.
#:
#: Example use cases for a custom class:
#:
#: 1. Store arbitrary attributes on flask.g.
#: 2. Add a property for lazy per-request database connectors.
#: 3. Return None instead of AttributeError on expected attributes.
#: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g.
#:
#: In Flask 0.9 this property was called `request_globals_class` but it
#: was changed in 0.10 to :attr:`app_ctx_globals_class` because the
#: flask.g object is not application context scoped.
#:
#: .. versionadded:: 0.10
app_ctx_globals_class = _AppCtxGlobals
# Backwards compatibility support
def _get_request_globals_class(self):
return self.app_ctx_globals_class
def _set_request_globals_class(self, value):
from warnings import warn
warn(DeprecationWarning('request_globals_class attribute is now '
'called app_ctx_globals_class'))
self.app_ctx_globals_class = value
request_globals_class = property(_get_request_globals_class,
_set_request_globals_class)
del _get_request_globals_class, _set_request_globals_class
#: The class that is used for the ``config`` attribute of this app.
#: Defaults to :class:`~flask.Config`.
#:
#: Example use cases for a custom class:
#:
#: 1. Default values for certain config options.
#: 2. Access to config values through attributes in addition to keys.
#:
#: .. versionadded:: 1.0
config_class = Config
#: The debug flag. Set this to `True` to enable debugging of the
#: application. In debug mode the debugger will kick in when an unhandled
#: exception occurs and the integrated server will automatically reload
#: the application if changes in the code are detected.
#:
#: This attribute can also be configured from the config with the `DEBUG`
#: configuration key. Defaults to `False`.
debug = ConfigAttribute('DEBUG')
#: The testing flag. Set this to `True` to enable the test mode of
#: Flask extensions (and in the future probably also Flask itself).
#: For example this might activate unittest helpers that have an
#: additional runtime cost which should not be enabled by default.
#:
#: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the
#: default it's implicitly enabled.
#:
#: This attribute can also be configured from the config with the
#: `TESTING` configuration key. Defaults to `False`.
testing = ConfigAttribute('TESTING')
#: If a secret key is set, cryptographic components can use this to
#: sign cookies and other things. Set this to a complex random value
#: when you want to use the secure cookie for instance.
#:
#: This attribute can also be configured from the config with the
#: `SECRET_KEY` configuration key. Defaults to `None`.
secret_key = ConfigAttribute('SECRET_KEY')
#: The secure cookie uses this for the name of the session cookie.
#:
#: This attribute can also be configured from the config with the
#: `SESSION_COOKIE_NAME` configuration key. Defaults to ``'session'``
session_cookie_name = ConfigAttribute('SESSION_COOKIE_NAME')
#: A :class:`~datetime.timedelta` which is used to set the expiration
#: date of a permanent session. The default is 31 days which makes a
#: permanent session survive for roughly one month.
#:
#: This attribute can also be configured from the config with the
#: `PERMANENT_SESSION_LIFETIME` configuration key. Defaults to
#: ``timedelta(days=31)``
permanent_session_lifetime = ConfigAttribute('PERMANENT_SESSION_LIFETIME',
get_converter=_make_timedelta)
#: Enable this if you want to use the X-Sendfile feature. Keep in
#: mind that the server has to support this. This only affects files
#: sent with the :func:`send_file` method.
#:
#: .. versionadded:: 0.2
#:
#: This attribute can also be configured from the config with the
#: `USE_X_SENDFILE` configuration key. Defaults to `False`.
use_x_sendfile = ConfigAttribute('USE_X_SENDFILE')
#: The name of the logger to use. By default the logger name is the
#: package name passed to the constructor.
#:
#: .. versionadded:: 0.4
logger_name = ConfigAttribute('LOGGER_NAME')
#: Enable the deprecated module support? This is active by default
#: in 0.7 but will be changed to False in 0.8. With Flask 1.0 modules
#: will be removed in favor of Blueprints
enable_modules = True
#: The logging format used for the debug logger. This is only used when
#: the application is in debug mode, otherwise the attached logging
#: handler does the formatting.
#:
#: .. versionadded:: 0.3
debug_log_format = (
'-' * 80 + '\n' +
'%(levelname)s in %(module)s [%(pathname)s:%(lineno)d]:\n' +
'%(message)s\n' +
'-' * 80
)
#: The JSON encoder class to use. Defaults to :class:`~flask.json.JSONEncoder`.
#:
#: .. versionadded:: 0.10
json_encoder = json.JSONEncoder
#: The JSON decoder class to use. Defaults to :class:`~flask.json.JSONDecoder`.
#:
#: .. versionadded:: 0.10
json_decoder = json.JSONDecoder
#: Options that are passed directly to the Jinja2 environment.
jinja_options = ImmutableDict(
extensions=['jinja2.ext.autoescape', 'jinja2.ext.with_']
)
#: Default configuration parameters.
default_config = ImmutableDict({
'DEBUG': False,
'TESTING': False,
'PROPAGATE_EXCEPTIONS': None,
'PRESERVE_CONTEXT_ON_EXCEPTION': None,
'SECRET_KEY': None,
'PERMANENT_SESSION_LIFETIME': timedelta(days=31),
'USE_X_SENDFILE': False,
'LOGGER_NAME': None,
'SERVER_NAME': None,
'APPLICATION_ROOT': None,
'SESSION_COOKIE_NAME': 'session',
'SESSION_COOKIE_DOMAIN': None,
'SESSION_COOKIE_PATH': None,
'SESSION_COOKIE_HTTPONLY': True,
'SESSION_COOKIE_SECURE': False,
'SESSION_REFRESH_EACH_REQUEST': True,
'MAX_CONTENT_LENGTH': None,
'SEND_FILE_MAX_AGE_DEFAULT': 12 * 60 * 60, # 12 hours
'TRAP_BAD_REQUEST_ERRORS': False,
'TRAP_HTTP_EXCEPTIONS': False,
'PREFERRED_URL_SCHEME': 'http',
'JSON_AS_ASCII': True,
'JSON_SORT_KEYS': True,
'JSONIFY_PRETTYPRINT_REGULAR': True,
'TEMPLATES_AUTO_RELOAD': True,
})
#: The rule object to use for URL rules created. This is used by
#: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`.
#:
#: .. versionadded:: 0.7
url_rule_class = Rule
#: the test client that is used with when `test_client` is used.
#:
#: .. versionadded:: 0.7
test_client_class = None
#: the session interface to use. By default an instance of
#: :class:`~flask.sessions.SecureCookieSessionInterface` is used here.
#:
#: .. versionadded:: 0.8
session_interface = SecureCookieSessionInterface()
def __init__(self, import_name, static_path=None, static_url_path=None,
static_folder='static', template_folder='templates',
instance_path=None, instance_relative_config=False,
root_path=None):
_PackageBoundObject.__init__(self, import_name,
template_folder=template_folder,
root_path=root_path)
if static_path is not None:
from warnings import warn
warn(DeprecationWarning('static_path is now called '
'static_url_path'), stacklevel=2)
static_url_path = static_path
if static_url_path is not None:
self.static_url_path = static_url_path
if static_folder is not None:
self.static_folder = static_folder
if instance_path is None:
instance_path = self.auto_find_instance_path()
elif not os.path.isabs(instance_path):
raise ValueError('If an instance path is provided it must be '
'absolute. A relative path was given instead.')
#: Holds the path to the instance folder.
#:
#: .. versionadded:: 0.8
self.instance_path = instance_path
#: The configuration dictionary as :class:`Config`. This behaves
#: exactly like a regular dictionary but supports additional methods
#: to load a config from files.
self.config = self.make_config(instance_relative_config)
# Prepare the deferred setup of the logger.
self._logger = None
self.logger_name = self.import_name
#: A dictionary of all view functions registered. The keys will
#: be function names which are also used to generate URLs and
#: the values are the function objects themselves.
#: To register a view function, use the :meth:`route` decorator.
self.view_functions = {}
# support for the now deprecated `error_handlers` attribute. The
# :attr:`error_handler_spec` shall be used now.
self._error_handlers = {}
#: A dictionary of all registered error handlers. The key is `None`
#: for error handlers active on the application, otherwise the key is
#: the name of the blueprint. Each key points to another dictionary
#: where the key is the status code of the http exception. The
#: special key `None` points to a list of tuples where the first item
#: is the class for the instance check and the second the error handler
#: function.
#:
#: To register a error handler, use the :meth:`errorhandler`
#: decorator.
self.error_handler_spec = {None: self._error_handlers}
#: A list of functions that are called when :meth:`url_for` raises a
#: :exc:`~werkzeug.routing.BuildError`. Each function registered here
#: is called with `error`, `endpoint` and `values`. If a function
#: returns `None` or raises a `BuildError` the next function is
#: tried.
#:
#: .. versionadded:: 0.9
self.url_build_error_handlers = []
#: A dictionary with lists of functions that should be called at the
#: beginning of the request. The key of the dictionary is the name of
#: the blueprint this function is active for, `None` for all requests.
#: This can for example be used to open database connections or
#: getting hold of the currently logged in user. To register a
#: function here, use the :meth:`before_request` decorator.
self.before_request_funcs = {}
#: A lists of functions that should be called at the beginning of the
#: first request to this instance. To register a function here, use
#: the :meth:`before_first_request` decorator.
#:
#: .. versionadded:: 0.8
self.before_first_request_funcs = []
#: A dictionary with lists of functions that should be called after
#: each request. The key of the dictionary is the name of the blueprint
#: this function is active for, `None` for all requests. This can for
#: example be used to open database connections or getting hold of the
#: currently logged in user. To register a function here, use the
#: :meth:`after_request` decorator.
self.after_request_funcs = {}
#: A dictionary with lists of functions that are called after
#: each request, even if an exception has occurred. The key of the
#: dictionary is the name of the blueprint this function is active for,
#: `None` for all requests. These functions are not allowed to modify
#: the request, and their return values are ignored. If an exception
#: occurred while processing the request, it gets passed to each
#: teardown_request function. To register a function here, use the
#: :meth:`teardown_request` decorator.
#:
#: .. versionadded:: 0.7
self.teardown_request_funcs = {}
#: A list of functions that are called when the application context
#: is destroyed. Since the application context is also torn down
#: if the request ends this is the place to store code that disconnects
#: from databases.
#:
#: .. versionadded:: 0.9
self.teardown_appcontext_funcs = []
#: A dictionary with lists of functions that can be used as URL
#: value processor functions. Whenever a URL is built these functions
#: are called to modify the dictionary of values in place. The key
#: `None` here is used for application wide
#: callbacks, otherwise the key is the name of the blueprint.
#: Each of these functions has the chance to modify the dictionary
#:
#: .. versionadded:: 0.7
self.url_value_preprocessors = {}
#: A dictionary with lists of functions that can be used as URL value
#: preprocessors. The key `None` here is used for application wide
#: callbacks, otherwise the key is the name of the blueprint.
#: Each of these functions has the chance to modify the dictionary
#: of URL values before they are used as the keyword arguments of the
#: view function. For each function registered this one should also
#: provide a :meth:`url_defaults` function that adds the parameters
#: automatically again that were removed that way.
#:
#: .. versionadded:: 0.7
self.url_default_functions = {}
#: A dictionary with list of functions that are called without argument
#: to populate the template context. The key of the dictionary is the
#: name of the blueprint this function is active for, `None` for all
#: requests. Each returns a dictionary that the template context is
#: updated with. To register a function here, use the
#: :meth:`context_processor` decorator.
self.template_context_processors = {
None: [_default_template_ctx_processor]
}
#: all the attached blueprints in a dictionary by name. Blueprints
#: can be attached multiple times so this dictionary does not tell
#: you how often they got attached.
#:
#: .. versionadded:: 0.7
self.blueprints = {}
#: a place where extensions can store application specific state. For
#: example this is where an extension could store database engines and
#: similar things. For backwards compatibility extensions should register
#: themselves like this::
#:
#: if not hasattr(app, 'extensions'):
#: app.extensions = {}
#: app.extensions['extensionname'] = SomeObject()
#:
#: The key must match the name of the extension module. For example in
#: case of a "Flask-Foo" extension in `flask_foo`, the key would be
#: ``'foo'``.
#:
#: .. versionadded:: 0.7
self.extensions = {}
#: The :class:`~werkzeug.routing.Map` for this instance. You can use
#: this to change the routing converters after the class was created
#: but before any routes are connected. Example::
#:
#: from werkzeug.routing import BaseConverter
#:
#: class ListConverter(BaseConverter):
#: def to_python(self, value):
#: return value.split(',')
#: def to_url(self, values):
#: return ','.join(BaseConverter.to_url(value)
#: for value in values)
#:
#: app = Flask(__name__)
#: app.url_map.converters['list'] = ListConverter
self.url_map = Map()
# tracks internally if the application already handled at least one
# request.
self._got_first_request = False
self._before_request_lock = Lock()
# register the static folder for the application. Do that even
# if the folder does not exist. First of all it might be created
# while the server is running (usually happens during development)
# but also because google appengine stores static files somewhere
# else when mapped with the .yml file.
if self.has_static_folder:
self.add_url_rule(self.static_url_path + '/<path:filename>',
endpoint='static',
view_func=self.send_static_file)
def _get_error_handlers(self):
from warnings import warn
warn(DeprecationWarning('error_handlers is deprecated, use the '
'new error_handler_spec attribute instead.'), stacklevel=1)
return self._error_handlers
def _set_error_handlers(self, value):
self._error_handlers = value
self.error_handler_spec[None] = value
error_handlers = property(_get_error_handlers, _set_error_handlers)
del _get_error_handlers, _set_error_handlers
@locked_cached_property
def name(self):
"""The name of the application. This is usually the import name
with the difference that it's guessed from the run file if the
import name is main. This name is used as a display name when
Flask needs the name of the application. It can be set and overridden
to change the value.
.. versionadded:: 0.8
"""
if self.import_name == '__main__':
fn = getattr(sys.modules['__main__'], '__file__', None)
if fn is None:
return '__main__'
return os.path.splitext(os.path.basename(fn))[0]
return self.import_name
@property
def propagate_exceptions(self):
"""Returns the value of the `PROPAGATE_EXCEPTIONS` configuration
value in case it's set, otherwise a sensible default is returned.
.. versionadded:: 0.7
"""
rv = self.config['PROPAGATE_EXCEPTIONS']
if rv is not None:
return rv
return self.testing or self.debug
@property
def preserve_context_on_exception(self):
"""Returns the value of the `PRESERVE_CONTEXT_ON_EXCEPTION`
configuration value in case it's set, otherwise a sensible default
is returned.
.. versionadded:: 0.7
"""
rv = self.config['PRESERVE_CONTEXT_ON_EXCEPTION']
if rv is not None:
return rv
return self.debug
@property
def logger(self):
"""A :class:`logging.Logger` object for this application. The
default configuration is to log to stderr if the application is
in debug mode. This logger can be used to (surprise) log messages.
Here some examples::
app.logger.debug('A value for debugging')
app.logger.warning('A warning occurred (%d apples)', 42)
app.logger.error('An error occurred')
.. versionadded:: 0.3
"""
if self._logger and self._logger.name == self.logger_name:
return self._logger
with _logger_lock:
if self._logger and self._logger.name == self.logger_name:
return self._logger
from flask.logging import create_logger
self._logger = rv = create_logger(self)
return rv
@locked_cached_property
def jinja_env(self):
"""The Jinja2 environment used to load templates."""
return self.create_jinja_environment()
@property
def got_first_request(self):
"""This attribute is set to `True` if the application started
handling the first request.
.. versionadded:: 0.8
"""
return self._got_first_request
def make_config(self, instance_relative=False):
"""Used to create the config attribute by the Flask constructor.
The `instance_relative` parameter is passed in from the constructor
of Flask (there named `instance_relative_config`) and indicates if
the config should be relative to the instance path or the root path
of the application.
.. versionadded:: 0.8
"""
root_path = self.root_path
if instance_relative:
root_path = self.instance_path
return self.config_class(root_path, self.default_config)
def auto_find_instance_path(self):
"""Tries to locate the instance path if it was not provided to the
constructor of the application class. It will basically calculate
the path to a folder named ``instance`` next to your main file or
the package.
.. versionadded:: 0.8
"""
prefix, package_path = find_package(self.import_name)
if prefix is None:
return os.path.join(package_path, 'instance')
return os.path.join(prefix, 'var', self.name + '-instance')
def open_instance_resource(self, resource, mode='rb'):
"""Opens a resource from the application's instance folder
(:attr:`instance_path`). Otherwise works like
:meth:`open_resource`. Instance resources can also be opened for
writing.
:param resource: the name of the resource. To access resources within
subfolders use forward slashes as separator.
:param mode: resource file opening mode, default is 'rb'.
"""
return open(os.path.join(self.instance_path, resource), mode)
def create_jinja_environment(self):
"""Creates the Jinja2 environment based on :attr:`jinja_options`
and :meth:`select_jinja_autoescape`. Since 0.7 this also adds
the Jinja2 globals and filters after initialization. Override
this function to customize the behavior.
.. versionadded:: 0.5
.. versionchanged:: 1.0
``Environment.auto_reload`` set in accordance with
``TEMPLATES_AUTO_RELOAD`` configuration option.
"""
options = dict(self.jinja_options)
if 'autoescape' not in options:
options['autoescape'] = self.select_jinja_autoescape
if 'auto_reload' not in options:
options['auto_reload'] = self.debug \
or self.config['TEMPLATES_AUTO_RELOAD']
rv = Environment(self, **options)
rv.globals.update(
url_for=url_for,
get_flashed_messages=get_flashed_messages,
config=self.config,
# request, session and g are normally added with the
# context processor for efficiency reasons but for imported
# templates we also want the proxies in there.
request=request,
session=session,
g=g
)
rv.filters['tojson'] = json.tojson_filter
return rv
def create_global_jinja_loader(self):
"""Creates the loader for the Jinja2 environment. Can be used to
override just the loader and keeping the rest unchanged. It's
discouraged to override this function. Instead one should override
the :meth:`jinja_loader` function instead.
The global loader dispatches between the loaders of the application
and the individual blueprints.
.. versionadded:: 0.7
"""
return DispatchingJinjaLoader(self)
def init_jinja_globals(self):
"""Deprecated. Used to initialize the Jinja2 globals.
.. versionadded:: 0.5
.. versionchanged:: 0.7
This method is deprecated with 0.7. Override
:meth:`create_jinja_environment` instead.
"""
def select_jinja_autoescape(self, filename):
"""Returns `True` if autoescaping should be active for the given
template name.
.. versionadded:: 0.5
"""
if filename is None:
return False
return filename.endswith(('.html', '.htm', '.xml', '.xhtml'))
def update_template_context(self, context):
"""Update the template context with some commonly used variables.
This injects request, session, config and g into the template
context as well as everything template context processors want
to inject. Note that the as of Flask 0.6, the original values
in the context will not be overridden if a context processor
decides to return a value with the same key.
:param context: the context as a dictionary that is updated in place
to add extra variables.
"""
funcs = self.template_context_processors[None]
reqctx = _request_ctx_stack.top
if reqctx is not None:
bp = reqctx.request.blueprint
if bp is not None and bp in self.template_context_processors:
funcs = chain(funcs, self.template_context_processors[bp])
orig_ctx = context.copy()
for func in funcs:
context.update(func())
# make sure the original values win. This makes it possible to
# easier add new variables in context processors without breaking
# existing views.
context.update(orig_ctx)
def run(self, host=None, port=None, debug=None, **options):
"""Runs the application on a local development server. If the
:attr:`debug` flag is set the server will automatically reload
for code changes and show a debugger in case an exception happened.
If you want to run the application in debug mode, but disable the
code execution on the interactive debugger, you can pass
``use_evalex=False`` as parameter. This will keep the debugger's
traceback screen active, but disable code execution.
.. admonition:: Keep in Mind
Flask will suppress any server error with a generic error page
unless it is in debug mode. As such to enable just the
interactive debugger without the code reloading, you have to
invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``.
Setting ``use_debugger`` to `True` without being in debug mode
won't catch any exceptions because there won't be any to
catch.
.. versionchanged:: 0.10
The default port is now picked from the ``SERVER_NAME`` variable.
:param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to
have the server available externally as well. Defaults to
``'127.0.0.1'``.
:param port: the port of the webserver. Defaults to ``5000`` or the
port defined in the ``SERVER_NAME`` config variable if
present.
:param debug: if given, enable or disable debug mode.
See :attr:`debug`.
:param options: the options to be forwarded to the underlying
Werkzeug server. See
:func:`werkzeug.serving.run_simple` for more
information.
"""
from werkzeug.serving import run_simple
if host is None:
host = '127.0.0.1'
if port is None:
server_name = self.config['SERVER_NAME']
if server_name and ':' in server_name:
port = int(server_name.rsplit(':', 1)[1])
else:
port = 5000
if debug is not None:
self.debug = bool(debug)
options.setdefault('use_reloader', self.debug)
options.setdefault('use_debugger', self.debug)
try:
run_simple(host, port, self, **options)
finally:
# reset the first request information if the development server
# resetted normally. This makes it possible to restart the server
# without reloader and that stuff from an interactive shell.
self._got_first_request = False
def test_client(self, use_cookies=True):
"""Creates a test client for this application. For information
about unit testing head over to :ref:`testing`.
Note that if you are testing for assertions or exceptions in your
application code, you must set ``app.testing = True`` in order for the
exceptions to propagate to the test client. Otherwise, the exception
will be handled by the application (not visible to the test client) and
the only indication of an AssertionError or other exception will be a
500 status code response to the test client. See the :attr:`testing`
attribute. For example::
app.testing = True
client = app.test_client()
The test client can be used in a `with` block to defer the closing down
of the context until the end of the `with` block. This is useful if
you want to access the context locals for testing::
with app.test_client() as c:
rv = c.get('/?vodka=42')
assert request.args['vodka'] == '42'
See :class:`~flask.testing.FlaskClient` for more information.
.. versionchanged:: 0.4
added support for `with` block usage for the client.
.. versionadded:: 0.7
The `use_cookies` parameter was added as well as the ability
to override the client to be used by setting the
:attr:`test_client_class` attribute.
"""
cls = self.test_client_class
if cls is None:
from flask.testing import FlaskClient as cls
return cls(self, self.response_class, use_cookies=use_cookies)
def open_session(self, request):
"""Creates or opens a new session. Default implementation stores all
session data in a signed cookie. This requires that the
:attr:`secret_key` is set. Instead of overriding this method
we recommend replacing the :class:`session_interface`.
:param request: an instance of :attr:`request_class`.
"""
return self.session_interface.open_session(self, request)
def save_session(self, session, response):
"""Saves the session if it needs updates. For the default
implementation, check :meth:`open_session`. Instead of overriding this
method we recommend replacing the :class:`session_interface`.
:param session: the session to be saved (a
:class:`~werkzeug.contrib.securecookie.SecureCookie`
object)
:param response: an instance of :attr:`response_class`
"""
return self.session_interface.save_session(self, session, response)
def make_null_session(self):
"""Creates a new instance of a missing session. Instead of overriding
this method we recommend replacing the :class:`session_interface`.
.. versionadded:: 0.7
"""
return self.session_interface.make_null_session(self)
def register_module(self, module, **options):
"""Registers a module with this application. The keyword arguments
of this function are the same as the ones for the constructor of the
:class:`Module` class and will override the values of the module if
provided.
.. versionchanged:: 0.7
The module system was deprecated in favor for the blueprint
system.
"""
assert blueprint_is_module(module), 'register_module requires ' \
'actual module objects. Please upgrade to blueprints though.'
if not self.enable_modules:
raise RuntimeError('Module support was disabled but code '
'attempted to register a module named %r' % module)
else:
from warnings import warn
warn(DeprecationWarning('Modules are deprecated. Upgrade to '
'using blueprints. Have a look into the documentation for '
'more information. If this module was registered by a '
'Flask-Extension upgrade the extension or contact the author '
'of that extension instead. (Registered %r)' % module),
stacklevel=2)
self.register_blueprint(module, **options)
@setupmethod
def register_blueprint(self, blueprint, **options):
"""Registers a blueprint on the application.
.. versionadded:: 0.7
"""
first_registration = False
if blueprint.name in self.blueprints:
assert self.blueprints[blueprint.name] is blueprint, \
'A blueprint\'s name collision occurred between %r and ' \
'%r. Both share the same name "%s". Blueprints that ' \
'are created on the fly need unique names.' % \
(blueprint, self.blueprints[blueprint.name], blueprint.name)
else:
self.blueprints[blueprint.name] = blueprint
first_registration = True
blueprint.register(self, options, first_registration)
@setupmethod
def add_url_rule(self, rule, endpoint=None, view_func=None, **options):
"""Connects a URL rule. Works exactly like the :meth:`route`
decorator. If a view_func is provided it will be registered with the
endpoint.
Basically this example::
@app.route('/')
def index():
pass
Is equivalent to the following::
def index():
pass
app.add_url_rule('/', 'index', index)
If the view_func is not provided you will need to connect the endpoint
to a view function like so::
app.view_functions['index'] = index
Internally :meth:`route` invokes :meth:`add_url_rule` so if you want
to customize the behavior via subclassing you only need to change
this method.
For more information refer to :ref:`url-route-registrations`.
.. versionchanged:: 0.2
`view_func` parameter added.
.. versionchanged:: 0.6
`OPTIONS` is added automatically as method.
:param rule: the URL rule as string
:param endpoint: the endpoint for the registered URL rule. Flask
itself assumes the name of the view function as
endpoint
:param view_func: the function to call when serving a request to the
provided endpoint
:param options: the options to be forwarded to the underlying
:class:`~werkzeug.routing.Rule` object. A change
to Werkzeug is handling of method options. methods
is a list of methods this rule should be limited
to (`GET`, `POST` etc.). By default a rule
just listens for `GET` (and implicitly `HEAD`).
Starting with Flask 0.6, `OPTIONS` is implicitly
added and handled by the standard request handling.
"""
if endpoint is None:
endpoint = _endpoint_from_view_func(view_func)
options['endpoint'] = endpoint
methods = options.pop('methods', None)
# if the methods are not given and the view_func object knows its
# methods we can use that instead. If neither exists, we go with
# a tuple of only `GET` as default.
if methods is None:
methods = getattr(view_func, 'methods', None) or ('GET',)
if isinstance(methods, string_types):
raise TypeError('Allowed methods have to be iterables of strings, '
'for example: @app.route(..., methods=["POST"])')
methods = set(methods)
# Methods that should always be added
required_methods = set(getattr(view_func, 'required_methods', ()))
# starting with Flask 0.8 the view_func object can disable and
# force-enable the automatic options handling.
provide_automatic_options = getattr(view_func,
'provide_automatic_options', None)
if provide_automatic_options is None:
if 'OPTIONS' not in methods:
provide_automatic_options = True
required_methods.add('OPTIONS')
else:
provide_automatic_options = False
# Add the required methods now.
methods |= required_methods
rule = self.url_rule_class(rule, methods=methods, **options)
rule.provide_automatic_options = provide_automatic_options
self.url_map.add(rule)
if view_func is not None:
old_func = self.view_functions.get(endpoint)
if old_func is not None and old_func != view_func:
raise AssertionError('View function mapping is overwriting an '
'existing endpoint function: %s' % endpoint)
self.view_functions[endpoint] = view_func
def route(self, rule, **options):
"""A decorator that is used to register a view function for a
given URL rule. This does the same thing as :meth:`add_url_rule`
but is intended for decorator usage::
@app.route('/')
def index():
return 'Hello World'
For more information refer to :ref:`url-route-registrations`.
:param rule: the URL rule as string
:param endpoint: the endpoint for the registered URL rule. Flask
itself assumes the name of the view function as
endpoint
:param options: the options to be forwarded to the underlying
:class:`~werkzeug.routing.Rule` object. A change
to Werkzeug is handling of method options. methods
is a list of methods this rule should be limited
to (`GET`, `POST` etc.). By default a rule
just listens for `GET` (and implicitly `HEAD`).
Starting with Flask 0.6, `OPTIONS` is implicitly
added and handled by the standard request handling.
"""
def decorator(f):
endpoint = options.pop('endpoint', None)
self.add_url_rule(rule, endpoint, f, **options)
return f
return decorator
@setupmethod
def endpoint(self, endpoint):
"""A decorator to register a function as an endpoint.
Example::
@app.endpoint('example.endpoint')
def example():
return "example"
:param endpoint: the name of the endpoint
"""
def decorator(f):
self.view_functions[endpoint] = f
return f
return decorator
@setupmethod
def errorhandler(self, code_or_exception):
"""A decorator that is used to register a function give a given
error code. Example::
@app.errorhandler(404)
def page_not_found(error):
return 'This page does not exist', 404
You can also register handlers for arbitrary exceptions::
@app.errorhandler(DatabaseError)
def special_exception_handler(error):
return 'Database connection failed', 500
You can also register a function as error handler without using
the :meth:`errorhandler` decorator. The following example is
equivalent to the one above::
def page_not_found(error):
return 'This page does not exist', 404
app.error_handler_spec[None][404] = page_not_found
Setting error handlers via assignments to :attr:`error_handler_spec`
however is discouraged as it requires fiddling with nested dictionaries
and the special case for arbitrary exception types.
The first `None` refers to the active blueprint. If the error
handler should be application wide `None` shall be used.
.. versionadded:: 0.7
Use :meth:`register_error_handler` instead of modifying
:attr:`error_handler_spec` directly, for application wide error
handlers.
.. versionadded:: 0.7
One can now additionally also register custom exception types
that do not necessarily have to be a subclass of the
:class:`~werkzeug.exceptions.HTTPException` class.
:param code: the code as integer for the handler
"""
def decorator(f):
self._register_error_handler(None, code_or_exception, f)
return f
return decorator
def register_error_handler(self, code_or_exception, f):
"""Alternative error attach function to the :meth:`errorhandler`
decorator that is more straightforward to use for non decorator
usage.
.. versionadded:: 0.7
"""
self._register_error_handler(None, code_or_exception, f)
@setupmethod
def _register_error_handler(self, key, code_or_exception, f):
if isinstance(code_or_exception, HTTPException):
code_or_exception = code_or_exception.code
if isinstance(code_or_exception, integer_types):
assert code_or_exception != 500 or key is None, \
'It is currently not possible to register a 500 internal ' \
'server error on a per-blueprint level.'
self.error_handler_spec.setdefault(key, {})[code_or_exception] = f
else:
self.error_handler_spec.setdefault(key, {}).setdefault(None, []) \
.append((code_or_exception, f))
@setupmethod
def template_filter(self, name=None):
"""A decorator that is used to register custom template filter.
You can specify a name for the filter, otherwise the function
name will be used. Example::
@app.template_filter()
def reverse(s):
return s[::-1]
:param name: the optional name of the filter, otherwise the
function name will be used.
"""
def decorator(f):
self.add_template_filter(f, name=name)
return f
return decorator
@setupmethod
def add_template_filter(self, f, name=None):
"""Register a custom template filter. Works exactly like the
:meth:`template_filter` decorator.
:param name: the optional name of the filter, otherwise the
function name will be used.
"""
self.jinja_env.filters[name or f.__name__] = f
@setupmethod
def template_test(self, name=None):
"""A decorator that is used to register custom template test.
You can specify a name for the test, otherwise the function
name will be used. Example::
@app.template_test()
def is_prime(n):
if n == 2:
return True
for i in range(2, int(math.ceil(math.sqrt(n))) + 1):
if n % i == 0:
return False
return True
.. versionadded:: 0.10
:param name: the optional name of the test, otherwise the
function name will be used.
"""
def decorator(f):
self.add_template_test(f, name=name)
return f
return decorator
@setupmethod
def add_template_test(self, f, name=None):
"""Register a custom template test. Works exactly like the
:meth:`template_test` decorator.
.. versionadded:: 0.10
:param name: the optional name of the test, otherwise the
function name will be used.
"""
self.jinja_env.tests[name or f.__name__] = f
@setupmethod
def template_global(self, name=None):
"""A decorator that is used to register a custom template global function.
You can specify a name for the global function, otherwise the function
name will be used. Example::
@app.template_global()
def double(n):
return 2 * n
.. versionadded:: 0.10
:param name: the optional name of the global function, otherwise the
function name will be used.
"""
def decorator(f):
self.add_template_global(f, name=name)
return f
return decorator
@setupmethod
def add_template_global(self, f, name=None):
"""Register a custom template global function. Works exactly like the
:meth:`template_global` decorator.
.. versionadded:: 0.10
:param name: the optional name of the global function, otherwise the
function name will be used.
"""
self.jinja_env.globals[name or f.__name__] = f
@setupmethod
def before_request(self, f):
"""Registers a function to run before each request."""
self.before_request_funcs.setdefault(None, []).append(f)
return f
@setupmethod
def before_first_request(self, f):
"""Registers a function to be run before the first request to this
instance of the application.
.. versionadded:: 0.8
"""
self.before_first_request_funcs.append(f)
return f
@setupmethod
def after_request(self, f):
"""Register a function to be run after each request.
Your function must take one parameter, an instance of
:attr:`response_class` and return a new response object or the
same (see :meth:`process_response`).
As of Flask 0.7 this function might not be executed at the end of the
request in case an unhandled exception occurred.
"""
self.after_request_funcs.setdefault(None, []).append(f)
return f
@setupmethod
def teardown_request(self, f):
"""Register a function to be run at the end of each request,
regardless of whether there was an exception or not. These functions
are executed when the request context is popped, even if not an
actual request was performed.
Example::
ctx = app.test_request_context()
ctx.push()
...
ctx.pop()
When ``ctx.pop()`` is executed in the above example, the teardown
functions are called just before the request context moves from the
stack of active contexts. This becomes relevant if you are using
such constructs in tests.
Generally teardown functions must take every necessary step to avoid
that they will fail. If they do execute code that might fail they
will have to surround the execution of these code by try/except
statements and log occurring errors.
When a teardown function was called because of a exception it will
be passed an error object.
.. admonition:: Debug Note
In debug mode Flask will not tear down a request on an exception
immediately. Instead if will keep it alive so that the interactive
debugger can still access it. This behavior can be controlled
by the ``PRESERVE_CONTEXT_ON_EXCEPTION`` configuration variable.
"""
self.teardown_request_funcs.setdefault(None, []).append(f)
return f
@setupmethod
def teardown_appcontext(self, f):
"""Registers a function to be called when the application context
ends. These functions are typically also called when the request
context is popped.
Example::
ctx = app.app_context()
ctx.push()
...
ctx.pop()
When ``ctx.pop()`` is executed in the above example, the teardown
functions are called just before the app context moves from the
stack of active contexts. This becomes relevant if you are using
such constructs in tests.
Since a request context typically also manages an application
context it would also be called when you pop a request context.
When a teardown function was called because of an exception it will
be passed an error object.
.. versionadded:: 0.9
"""
self.teardown_appcontext_funcs.append(f)
return f
@setupmethod
def context_processor(self, f):
"""Registers a template context processor function."""
self.template_context_processors[None].append(f)
return f
@setupmethod
def url_value_preprocessor(self, f):
"""Registers a function as URL value preprocessor for all view
functions of the application. It's called before the view functions
are called and can modify the url values provided.
"""
self.url_value_preprocessors.setdefault(None, []).append(f)
return f
@setupmethod
def url_defaults(self, f):
"""Callback function for URL defaults for all view functions of the
application. It's called with the endpoint and values and should
update the values passed in place.
"""
self.url_default_functions.setdefault(None, []).append(f)
return f
def handle_http_exception(self, e):
"""Handles an HTTP exception. By default this will invoke the
registered error handlers and fall back to returning the
exception as response.
.. versionadded:: 0.3
"""
handlers = self.error_handler_spec.get(request.blueprint)
# Proxy exceptions don't have error codes. We want to always return
# those unchanged as errors
if e.code is None:
return e
if handlers and e.code in handlers:
handler = handlers[e.code]
else:
handler = self.error_handler_spec[None].get(e.code)
if handler is None:
return e
return handler(e)
def trap_http_exception(self, e):
"""Checks if an HTTP exception should be trapped or not. By default
this will return `False` for all exceptions except for a bad request
key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to `True`. It
also returns `True` if ``TRAP_HTTP_EXCEPTIONS`` is set to `True`.
This is called for all HTTP exceptions raised by a view function.
If it returns `True` for any exception the error handler for this
exception is not called and it shows up as regular exception in the
traceback. This is helpful for debugging implicitly raised HTTP
exceptions.
.. versionadded:: 0.8
"""
if self.config['TRAP_HTTP_EXCEPTIONS']:
return True
if self.config['TRAP_BAD_REQUEST_ERRORS']:
return isinstance(e, BadRequest)
return False
def handle_user_exception(self, e):
"""This method is called whenever an exception occurs that should be
handled. A special case are
:class:`~werkzeug.exception.HTTPException`\s which are forwarded by
this function to the :meth:`handle_http_exception` method. This
function will either return a response value or reraise the
exception with the same traceback.
.. versionadded:: 0.7
"""
exc_type, exc_value, tb = sys.exc_info()
assert exc_value is e
# ensure not to trash sys.exc_info() at that point in case someone
# wants the traceback preserved in handle_http_exception. Of course
# we cannot prevent users from trashing it themselves in a custom
# trap_http_exception method so that's their fault then.
if isinstance(e, HTTPException) and not self.trap_http_exception(e):
return self.handle_http_exception(e)
blueprint_handlers = ()
handlers = self.error_handler_spec.get(request.blueprint)
if handlers is not None:
blueprint_handlers = handlers.get(None, ())
app_handlers = self.error_handler_spec[None].get(None, ())
for typecheck, handler in chain(blueprint_handlers, app_handlers):
if isinstance(e, typecheck):
return handler(e)
reraise(exc_type, exc_value, tb)
def handle_exception(self, e):
"""Default exception handling that kicks in when an exception
occurs that is not caught. In debug mode the exception will
be re-raised immediately, otherwise it is logged and the handler
for a 500 internal server error is used. If no such handler
exists, a default 500 internal server error message is displayed.
.. versionadded:: 0.3
"""
exc_type, exc_value, tb = sys.exc_info()
got_request_exception.send(self, exception=e)
handler = self.error_handler_spec[None].get(500)
if self.propagate_exceptions:
# if we want to repropagate the exception, we can attempt to
# raise it with the whole traceback in case we can do that
# (the function was actually called from the except part)
# otherwise, we just raise the error again
if exc_value is e:
reraise(exc_type, exc_value, tb)
else:
raise e
self.log_exception((exc_type, exc_value, tb))
if handler is None:
return InternalServerError()
return handler(e)
def log_exception(self, exc_info):
"""Logs an exception. This is called by :meth:`handle_exception`
if debugging is disabled and right before the handler is called.
The default implementation logs the exception as error on the
:attr:`logger`.
.. versionadded:: 0.8
"""
self.logger.error('Exception on %s [%s]' % (
request.path,
request.method
), exc_info=exc_info)
def raise_routing_exception(self, request):
"""Exceptions that are recording during routing are reraised with
this method. During debug we are not reraising redirect requests
for non ``GET``, ``HEAD``, or ``OPTIONS`` requests and we're raising
a different error instead to help debug situations.
:internal:
"""
if not self.debug \
or not isinstance(request.routing_exception, RequestRedirect) \
or request.method in ('GET', 'HEAD', 'OPTIONS'):
raise request.routing_exception
from .debughelpers import FormDataRoutingRedirect
raise FormDataRoutingRedirect(request)
def dispatch_request(self):
"""Does the request dispatching. Matches the URL and returns the
return value of the view or error handler. This does not have to
be a response object. In order to convert the return value to a
proper response object, call :func:`make_response`.
.. versionchanged:: 0.7
This no longer does the exception handling, this code was
moved to the new :meth:`full_dispatch_request`.
"""
req = _request_ctx_stack.top.request
if req.routing_exception is not None:
self.raise_routing_exception(req)
rule = req.url_rule
# if we provide automatic options for this URL and the
# request came with the OPTIONS method, reply automatically
if getattr(rule, 'provide_automatic_options', False) \
and req.method == 'OPTIONS':
return self.make_default_options_response()
# otherwise dispatch to the handler for that endpoint
return self.view_functions[rule.endpoint](**req.view_args)
def full_dispatch_request(self):
"""Dispatches the request and on top of that performs request
pre and postprocessing as well as HTTP exception catching and
error handling.
.. versionadded:: 0.7
"""
self.try_trigger_before_first_request_functions()
try:
request_started.send(self)
rv = self.preprocess_request()
if rv is None:
rv = self.dispatch_request()
except Exception as e:
rv = self.handle_user_exception(e)
response = self.make_response(rv)
response = self.process_response(response)
request_finished.send(self, response=response)
return response
def try_trigger_before_first_request_functions(self):
"""Called before each request and will ensure that it triggers
the :attr:`before_first_request_funcs` and only exactly once per
application instance (which means process usually).
:internal:
"""
if self._got_first_request:
return
with self._before_request_lock:
if self._got_first_request:
return
for func in self.before_first_request_funcs:
func()
self._got_first_request = True
def make_default_options_response(self):
"""This method is called to create the default `OPTIONS` response.
This can be changed through subclassing to change the default
behavior of `OPTIONS` responses.
.. versionadded:: 0.7
"""
adapter = _request_ctx_stack.top.url_adapter
if hasattr(adapter, 'allowed_methods'):
methods = adapter.allowed_methods()
else:
# fallback for Werkzeug < 0.7
methods = []
try:
adapter.match(method='--')
except MethodNotAllowed as e:
methods = e.valid_methods
except HTTPException as e:
pass
rv = self.response_class()
rv.allow.update(methods)
return rv
def should_ignore_error(self, error):
"""This is called to figure out if an error should be ignored
or not as far as the teardown system is concerned. If this
function returns `True` then the teardown handlers will not be
passed the error.
.. versionadded:: 0.10
"""
return False
def make_response(self, rv):
"""Converts the return value from a view function to a real
response object that is an instance of :attr:`response_class`.
The following types are allowed for `rv`:
.. tabularcolumns:: |p{3.5cm}|p{9.5cm}|
======================= ===========================================
:attr:`response_class` the object is returned unchanged
:class:`str` a response object is created with the
string as body
:class:`unicode` a response object is created with the
string encoded to utf-8 as body
a WSGI function the function is called as WSGI application
and buffered as response object
:class:`tuple` A tuple in the form ``(response, status,
headers)`` or ``(response, headers)``
where `response` is any of the
types defined here, `status` is a string
or an integer and `headers` is a list or
a dictionary with header values.
======================= ===========================================
:param rv: the return value from the view function
.. versionchanged:: 0.9
Previously a tuple was interpreted as the arguments for the
response object.
"""
status_or_headers = headers = None
if isinstance(rv, tuple):
rv, status_or_headers, headers = rv + (None,) * (3 - len(rv))
if rv is None:
raise ValueError('View function did not return a response')
if isinstance(status_or_headers, (dict, list)):
headers, status_or_headers = status_or_headers, None
if not isinstance(rv, self.response_class):
# When we create a response object directly, we let the constructor
# set the headers and status. We do this because there can be
# some extra logic involved when creating these objects with
# specific values (like default content type selection).
if isinstance(rv, (text_type, bytes, bytearray)):
rv = self.response_class(rv, headers=headers, status=status_or_headers)
headers = status_or_headers = None
else:
rv = self.response_class.force_type(rv, request.environ)
if status_or_headers is not None:
if isinstance(status_or_headers, string_types):
rv.status = status_or_headers
else:
rv.status_code = status_or_headers
if headers:
rv.headers.extend(headers)
return rv
def create_url_adapter(self, request):
"""Creates a URL adapter for the given request. The URL adapter
is created at a point where the request context is not yet set up
so the request is passed explicitly.
.. versionadded:: 0.6
.. versionchanged:: 0.9
This can now also be called without a request object when the
URL adapter is created for the application context.
"""
if request is not None:
return self.url_map.bind_to_environ(request.environ,
server_name=self.config['SERVER_NAME'])
# We need at the very least the server name to be set for this
# to work.
if self.config['SERVER_NAME'] is not None:
return self.url_map.bind(
self.config['SERVER_NAME'],
script_name=self.config['APPLICATION_ROOT'] or '/',
url_scheme=self.config['PREFERRED_URL_SCHEME'])
def inject_url_defaults(self, endpoint, values):
"""Injects the URL defaults for the given endpoint directly into
the values dictionary passed. This is used internally and
automatically called on URL building.
.. versionadded:: 0.7
"""
funcs = self.url_default_functions.get(None, ())
if '.' in endpoint:
bp = endpoint.rsplit('.', 1)[0]
funcs = chain(funcs, self.url_default_functions.get(bp, ()))
for func in funcs:
func(endpoint, values)
def handle_url_build_error(self, error, endpoint, values):
"""Handle :class:`~werkzeug.routing.BuildError` on :meth:`url_for`.
"""
exc_type, exc_value, tb = sys.exc_info()
for handler in self.url_build_error_handlers:
try:
rv = handler(error, endpoint, values)
if rv is not None:
return rv
except BuildError as error:
pass
# At this point we want to reraise the exception. If the error is
# still the same one we can reraise it with the original traceback,
# otherwise we raise it from here.
if error is exc_value:
reraise(exc_type, exc_value, tb)
raise error
def preprocess_request(self):
"""Called before the actual request dispatching and will
call every as :meth:`before_request` decorated function.
If any of these function returns a value it's handled as
if it was the return value from the view and further
request handling is stopped.
This also triggers the :meth:`url_value_processor` functions before
the actual :meth:`before_request` functions are called.
"""
bp = _request_ctx_stack.top.request.blueprint
funcs = self.url_value_preprocessors.get(None, ())
if bp is not None and bp in self.url_value_preprocessors:
funcs = chain(funcs, self.url_value_preprocessors[bp])
for func in funcs:
func(request.endpoint, request.view_args)
funcs = self.before_request_funcs.get(None, ())
if bp is not None and bp in self.before_request_funcs:
funcs = chain(funcs, self.before_request_funcs[bp])
for func in funcs:
rv = func()
if rv is not None:
return rv
def process_response(self, response):
"""Can be overridden in order to modify the response object
before it's sent to the WSGI server. By default this will
call all the :meth:`after_request` decorated functions.
.. versionchanged:: 0.5
As of Flask 0.5 the functions registered for after request
execution are called in reverse order of registration.
:param response: a :attr:`response_class` object.
:return: a new response object or the same, has to be an
instance of :attr:`response_class`.
"""
ctx = _request_ctx_stack.top
bp = ctx.request.blueprint
funcs = ctx._after_request_functions
if bp is not None and bp in self.after_request_funcs:
funcs = chain(funcs, reversed(self.after_request_funcs[bp]))
if None in self.after_request_funcs:
funcs = chain(funcs, reversed(self.after_request_funcs[None]))
for handler in funcs:
response = handler(response)
if not self.session_interface.is_null_session(ctx.session):
self.save_session(ctx.session, response)
return response
def do_teardown_request(self, exc=None):
"""Called after the actual request dispatching and will
call every as :meth:`teardown_request` decorated function. This is
not actually called by the :class:`Flask` object itself but is always
triggered when the request context is popped. That way we have a
tighter control over certain resources under testing environments.
.. versionchanged:: 0.9
Added the `exc` argument. Previously this was always using the
current exception information.
"""
if exc is None:
exc = sys.exc_info()[1]
funcs = reversed(self.teardown_request_funcs.get(None, ()))
bp = _request_ctx_stack.top.request.blueprint
if bp is not None and bp in self.teardown_request_funcs:
funcs = chain(funcs, reversed(self.teardown_request_funcs[bp]))
for func in funcs:
func(exc)
request_tearing_down.send(self, exc=exc)
def do_teardown_appcontext(self, exc=None):
"""Called when an application context is popped. This works pretty
much the same as :meth:`do_teardown_request` but for the application
context.
.. versionadded:: 0.9
"""
if exc is None:
exc = sys.exc_info()[1]
for func in reversed(self.teardown_appcontext_funcs):
func(exc)
appcontext_tearing_down.send(self, exc=exc)
def app_context(self):
"""Binds the application only. For as long as the application is bound
to the current context the :data:`flask.current_app` points to that
application. An application context is automatically created when a
request context is pushed if necessary.
Example usage::
with app.app_context():
...
.. versionadded:: 0.9
"""
return AppContext(self)
def request_context(self, environ):
"""Creates a :class:`~flask.ctx.RequestContext` from the given
environment and binds it to the current context. This must be used in
combination with the `with` statement because the request is only bound
to the current context for the duration of the `with` block.
Example usage::
with app.request_context(environ):
do_something_with(request)
The object returned can also be used without the `with` statement
which is useful for working in the shell. The example above is
doing exactly the same as this code::
ctx = app.request_context(environ)
ctx.push()
try:
do_something_with(request)
finally:
ctx.pop()
.. versionchanged:: 0.3
Added support for non-with statement usage and `with` statement
is now passed the ctx object.
:param environ: a WSGI environment
"""
return RequestContext(self, environ)
def test_request_context(self, *args, **kwargs):
"""Creates a WSGI environment from the given values (see
:class:`werkzeug.test.EnvironBuilder` for more information, this
function accepts the same arguments).
"""
from flask.testing import make_test_environ_builder
builder = make_test_environ_builder(self, *args, **kwargs)
try:
return self.request_context(builder.get_environ())
finally:
builder.close()
def wsgi_app(self, environ, start_response):
"""The actual WSGI application. This is not implemented in
`__call__` so that middlewares can be applied without losing a
reference to the class. So instead of doing this::
app = MyMiddleware(app)
It's a better idea to do this instead::
app.wsgi_app = MyMiddleware(app.wsgi_app)
Then you still have the original application object around and
can continue to call methods on it.
.. versionchanged:: 0.7
The behavior of the before and after request callbacks was changed
under error conditions and a new callback was added that will
always execute at the end of the request, independent on if an
error occurred or not. See :ref:`callbacks-and-errors`.
:param environ: a WSGI environment
:param start_response: a callable accepting a status code,
a list of headers and an optional
exception context to start the response
"""
ctx = self.request_context(environ)
ctx.push()
error = None
try:
try:
response = self.full_dispatch_request()
except Exception as e:
error = e
response = self.make_response(self.handle_exception(e))
return response(environ, start_response)
finally:
if self.should_ignore_error(error):
error = None
ctx.auto_pop(error)
@property
def modules(self):
from warnings import warn
warn(DeprecationWarning('Flask.modules is deprecated, use '
'Flask.blueprints instead'), stacklevel=2)
return self.blueprints
def __call__(self, environ, start_response):
"""Shortcut for :attr:`wsgi_app`."""
return self.wsgi_app(environ, start_response)
def __repr__(self):
return '<%s %r>' % (
self.__class__.__name__,
self.name,
)
|
"""
Ideals of Finite Algebras
"""
from __future__ import absolute_import
#*****************************************************************************
# Copyright (C) 2011 Johan Bosman <[email protected]>
# Copyright (C) 2011, 2013 Peter Bruin <[email protected]>
#
# Distributed under the terms of the GNU General Public License (GPL)
# as published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
# http://www.gnu.org/licenses/
#*****************************************************************************
from .finite_dimensional_algebra_element import FiniteDimensionalAlgebraElement
from sage.matrix.constructor import Matrix
from sage.matrix.matrix import is_Matrix
from sage.rings.ideal import Ideal_generic
from sage.structure.element import parent
from sage.structure.sage_object import SageObject
from sage.misc.cachefunc import cached_method
from functools import reduce
from sage.structure.richcmp import (op_LT, op_LE, op_EQ, op_NE,
op_GT, op_GE)
class FiniteDimensionalAlgebraIdeal(Ideal_generic):
"""
An ideal of a :class:`FiniteDimensionalAlgebra`.
INPUT:
- ``A`` -- a finite-dimensional algebra
- ``gens`` -- the generators of this ideal
- ``given_by_matrix`` -- (default: ``False``) whether the basis matrix is
given by ``gens``
EXAMPLES::
sage: A = FiniteDimensionalAlgebra(GF(3), [Matrix([[1, 0], [0, 1]]), Matrix([[0, 1], [0, 0]])])
sage: A.ideal(A([0,1]))
Ideal (e1) of Finite-dimensional algebra of degree 2 over Finite Field of size 3
"""
def __init__(self, A, gens=None, given_by_matrix=False):
"""
EXAMPLES::
sage: A = FiniteDimensionalAlgebra(GF(3), [Matrix([[1, 0], [0, 1]]), Matrix([[0, 1], [0, 0]])])
sage: I = A.ideal(A([0,1]))
sage: TestSuite(I).run(skip="_test_category") # Currently ideals are not using the category framework
"""
k = A.base_ring()
n = A.degree()
if given_by_matrix:
self._basis_matrix = gens
gens = gens.rows()
elif gens is None:
self._basis_matrix = Matrix(k, 0, n)
elif isinstance(gens, (list, tuple)):
B = [FiniteDimensionalAlgebraIdeal(A, x).basis_matrix() for x in gens]
B = reduce(lambda x, y: x.stack(y), B, Matrix(k, 0, n))
self._basis_matrix = B.echelon_form().image().basis_matrix()
elif is_Matrix(gens):
gens = FiniteDimensionalAlgebraElement(A, gens)
elif isinstance(gens, FiniteDimensionalAlgebraElement):
gens = gens.vector()
B = Matrix([(gens * b).list() for b in A.table()])
self._basis_matrix = B.echelon_form().image().basis_matrix()
Ideal_generic.__init__(self, A, gens)
def _richcmp_(self, other, op):
r"""
Comparisons
TESTS::
sage: A = FiniteDimensionalAlgebra(GF(3), [Matrix([[1, 0], [0, 1]]), Matrix([[0, 1], [0, 0]])])
sage: I = A.ideal(A([1,1]))
sage: J = A.ideal(A([0,1]))
sage: I == J
False
sage: I == I
True
sage: I == I + J
True
sage: A2 = FiniteDimensionalAlgebra(GF(3), [Matrix([[1, 0], [0, 1]]), Matrix([[0, 1], [0, 0]])])
sage: A is A2
True
sage: A == A2
True
sage: I2 = A.ideal(A([1,1]))
sage: I == I2
True
sage: I != J, I != I, I != I+J
(True, False, False)
sage: I <= J, I <= I, I <= I+J
(False, True, True)
sage: I < J, I < I, I < I+J
(False, False, False)
sage: I >= J, I >= I, I >= I+J
(True, True, True)
sage: I > J, I > I, I > I+J
(True, False, False)
sage: I = A.ideal(A([1,1]))
sage: J = A.ideal(A([0,1]))
sage: I != J
True
sage: I != I
False
sage: I != I + J
False
"""
if self.basis_matrix() == other.basis_matrix():
return op == op_EQ or op == op_LE or op == op_GE
elif op == op_EQ:
return False
elif op == op_NE:
return True
if op == op_LE or op == op_LT:
return self.vector_space().is_subspace(other.vector_space())
elif op == op_GE or op == op_GT:
return other.vector_space().is_subspace(self.vector_space())
def __contains__(self, elt):
"""
EXAMPLES::
sage: A = FiniteDimensionalAlgebra(GF(3), [Matrix([[1, 0], [0, 1]]), Matrix([[0, 1], [0, 0]])])
sage: J = A.ideal(A([0,1]))
sage: A([0,1]) in J
True
sage: A([1,0]) in J
False
"""
if self.ring() is not parent(elt):
return False
return elt.vector() in self.vector_space()
def basis_matrix(self):
"""
Return the echelonized matrix whose rows form a basis of ``self``.
EXAMPLES::
sage: A = FiniteDimensionalAlgebra(GF(3), [Matrix([[1, 0], [0, 1]]), Matrix([[0, 1], [0, 0]])])
sage: I = A.ideal(A([1,1]))
sage: I.basis_matrix()
[1 0]
[0 1]
"""
return self._basis_matrix
@cached_method
def vector_space(self):
"""
Return ``self`` as a vector space.
EXAMPLES::
sage: A = FiniteDimensionalAlgebra(GF(3), [Matrix([[1, 0], [0, 1]]), Matrix([[0, 1], [0, 0]])])
sage: I = A.ideal(A([1,1]))
sage: I.vector_space()
Vector space of degree 2 and dimension 2 over Finite Field of size 3
Basis matrix:
[1 0]
[0 1]
"""
return self.basis_matrix().image()
|
# As a test suite for the os module, this is woefully inadequate, but this
# does add tests for a few functions which have been determined to be more
# portable than they had been thought to be.
import asynchat
import asyncore
import codecs
import contextlib
import decimal
import errno
import fractions
import getpass
import itertools
import locale
import mmap
import os
import pickle
import platform
import re
import shutil
import signal
import socket
import stat
import subprocess
import sys
import sysconfig
import time
import unittest
import uuid
import warnings
from test import support
try:
import threading
except ImportError:
threading = None
try:
import resource
except ImportError:
resource = None
try:
import fcntl
except ImportError:
fcntl = None
try:
import _winapi
except ImportError:
_winapi = None
try:
import grp
groups = [g.gr_gid for g in grp.getgrall() if getpass.getuser() in g.gr_mem]
if hasattr(os, 'getgid'):
process_gid = os.getgid()
if process_gid not in groups:
groups.append(process_gid)
except ImportError:
groups = []
try:
import pwd
all_users = [u.pw_uid for u in pwd.getpwall()]
except ImportError:
all_users = []
try:
from _testcapi import INT_MAX, PY_SSIZE_T_MAX
except ImportError:
INT_MAX = PY_SSIZE_T_MAX = sys.maxsize
from test.support.script_helper import assert_python_ok
root_in_posix = False
if hasattr(os, 'geteuid'):
root_in_posix = (os.geteuid() == 0)
# Detect whether we're on a Linux system that uses the (now outdated
# and unmaintained) linuxthreads threading library. There's an issue
# when combining linuxthreads with a failed execv call: see
# http://bugs.python.org/issue4970.
if hasattr(sys, 'thread_info') and sys.thread_info.version:
USING_LINUXTHREADS = sys.thread_info.version.startswith("linuxthreads")
else:
USING_LINUXTHREADS = False
# Issue #14110: Some tests fail on FreeBSD if the user is in the wheel group.
HAVE_WHEEL_GROUP = sys.platform.startswith('freebsd') and os.getgid() == 0
def create_file(filename, content=b'content'):
with open(filename, "xb", 0) as fp:
fp.write(content)
# Tests creating TESTFN
class FileTests(unittest.TestCase):
def setUp(self):
if os.path.lexists(support.TESTFN):
os.unlink(support.TESTFN)
tearDown = setUp
def test_access(self):
f = os.open(support.TESTFN, os.O_CREAT|os.O_RDWR)
os.close(f)
self.assertTrue(os.access(support.TESTFN, os.W_OK))
def test_closerange(self):
first = os.open(support.TESTFN, os.O_CREAT|os.O_RDWR)
# We must allocate two consecutive file descriptors, otherwise
# it will mess up other file descriptors (perhaps even the three
# standard ones).
second = os.dup(first)
try:
retries = 0
while second != first + 1:
os.close(first)
retries += 1
if retries > 10:
# XXX test skipped
self.skipTest("couldn't allocate two consecutive fds")
first, second = second, os.dup(second)
finally:
os.close(second)
# close a fd that is open, and one that isn't
os.closerange(first, first + 2)
self.assertRaises(OSError, os.write, first, b"a")
@support.cpython_only
def test_rename(self):
path = support.TESTFN
old = sys.getrefcount(path)
self.assertRaises(TypeError, os.rename, path, 0)
new = sys.getrefcount(path)
self.assertEqual(old, new)
def test_read(self):
with open(support.TESTFN, "w+b") as fobj:
fobj.write(b"spam")
fobj.flush()
fd = fobj.fileno()
os.lseek(fd, 0, 0)
s = os.read(fd, 4)
self.assertEqual(type(s), bytes)
self.assertEqual(s, b"spam")
@support.cpython_only
# Skip the test on 32-bit platforms: the number of bytes must fit in a
# Py_ssize_t type
@unittest.skipUnless(INT_MAX < PY_SSIZE_T_MAX,
"needs INT_MAX < PY_SSIZE_T_MAX")
@support.bigmemtest(size=INT_MAX + 10, memuse=1, dry_run=False)
def test_large_read(self, size):
with open(support.TESTFN, "wb") as fp:
fp.write(b'test')
self.addCleanup(support.unlink, support.TESTFN)
# Issue #21932: Make sure that os.read() does not raise an
# OverflowError for size larger than INT_MAX
with open(support.TESTFN, "rb") as fp:
data = os.read(fp.fileno(), size)
# The test does not try to read more than 2 GB at once because the
# operating system is free to return less bytes than requested.
self.assertEqual(data, b'test')
def test_write(self):
# os.write() accepts bytes- and buffer-like objects but not strings
fd = os.open(support.TESTFN, os.O_CREAT | os.O_WRONLY)
self.assertRaises(TypeError, os.write, fd, "beans")
os.write(fd, b"bacon\n")
os.write(fd, bytearray(b"eggs\n"))
os.write(fd, memoryview(b"spam\n"))
os.close(fd)
with open(support.TESTFN, "rb") as fobj:
self.assertEqual(fobj.read().splitlines(),
[b"bacon", b"eggs", b"spam"])
def write_windows_console(self, *args):
retcode = subprocess.call(args,
# use a new console to not flood the test output
creationflags=subprocess.CREATE_NEW_CONSOLE,
# use a shell to hide the console window (SW_HIDE)
shell=True)
self.assertEqual(retcode, 0)
@unittest.skipUnless(sys.platform == 'win32',
'test specific to the Windows console')
def test_write_windows_console(self):
# Issue #11395: the Windows console returns an error (12: not enough
# space error) on writing into stdout if stdout mode is binary and the
# length is greater than 66,000 bytes (or less, depending on heap
# usage).
code = "print('x' * 100000)"
self.write_windows_console(sys.executable, "-c", code)
self.write_windows_console(sys.executable, "-u", "-c", code)
def fdopen_helper(self, *args):
fd = os.open(support.TESTFN, os.O_RDONLY)
f = os.fdopen(fd, *args)
f.close()
def test_fdopen(self):
fd = os.open(support.TESTFN, os.O_CREAT|os.O_RDWR)
os.close(fd)
self.fdopen_helper()
self.fdopen_helper('r')
self.fdopen_helper('r', 100)
def test_replace(self):
TESTFN2 = support.TESTFN + ".2"
with open(support.TESTFN, 'w') as f:
f.write("1")
with open(TESTFN2, 'w') as f:
f.write("2")
self.addCleanup(os.unlink, TESTFN2)
os.replace(support.TESTFN, TESTFN2)
self.assertRaises(FileNotFoundError, os.stat, support.TESTFN)
with open(TESTFN2, 'r') as f:
self.assertEqual(f.read(), "1")
def test_open_keywords(self):
f = os.open(path=__file__, flags=os.O_RDONLY, mode=0o777,
dir_fd=None)
os.close(f)
def test_symlink_keywords(self):
symlink = support.get_attribute(os, "symlink")
try:
symlink(src='target', dst=support.TESTFN,
target_is_directory=False, dir_fd=None)
except (NotImplementedError, OSError):
pass # No OS support or unprivileged user
# Test attributes on return values from os.*stat* family.
class StatAttributeTests(unittest.TestCase):
def setUp(self):
self.fname = support.TESTFN
self.addCleanup(support.unlink, self.fname)
create_file(self.fname, b"ABC")
@unittest.skipUnless(hasattr(os, 'stat'), 'test needs os.stat()')
def check_stat_attributes(self, fname):
result = os.stat(fname)
# Make sure direct access works
self.assertEqual(result[stat.ST_SIZE], 3)
self.assertEqual(result.st_size, 3)
# Make sure all the attributes are there
members = dir(result)
for name in dir(stat):
if name[:3] == 'ST_':
attr = name.lower()
if name.endswith("TIME"):
def trunc(x): return int(x)
else:
def trunc(x): return x
self.assertEqual(trunc(getattr(result, attr)),
result[getattr(stat, name)])
self.assertIn(attr, members)
# Make sure that the st_?time and st_?time_ns fields roughly agree
# (they should always agree up to around tens-of-microseconds)
for name in 'st_atime st_mtime st_ctime'.split():
floaty = int(getattr(result, name) * 100000)
nanosecondy = getattr(result, name + "_ns") // 10000
self.assertAlmostEqual(floaty, nanosecondy, delta=2)
try:
result[200]
self.fail("No exception raised")
except IndexError:
pass
# Make sure that assignment fails
try:
result.st_mode = 1
self.fail("No exception raised")
except AttributeError:
pass
try:
result.st_rdev = 1
self.fail("No exception raised")
except (AttributeError, TypeError):
pass
try:
result.parrot = 1
self.fail("No exception raised")
except AttributeError:
pass
# Use the stat_result constructor with a too-short tuple.
try:
result2 = os.stat_result((10,))
self.fail("No exception raised")
except TypeError:
pass
# Use the constructor with a too-long tuple.
try:
result2 = os.stat_result((0,1,2,3,4,5,6,7,8,9,10,11,12,13,14))
except TypeError:
pass
def test_stat_attributes(self):
self.check_stat_attributes(self.fname)
def test_stat_attributes_bytes(self):
try:
fname = self.fname.encode(sys.getfilesystemencoding())
except UnicodeEncodeError:
self.skipTest("cannot encode %a for the filesystem" % self.fname)
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
self.check_stat_attributes(fname)
def test_stat_result_pickle(self):
result = os.stat(self.fname)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
p = pickle.dumps(result, proto)
self.assertIn(b'stat_result', p)
if proto < 4:
self.assertIn(b'cos\nstat_result\n', p)
unpickled = pickle.loads(p)
self.assertEqual(result, unpickled)
@unittest.skipUnless(hasattr(os, 'statvfs'), 'test needs os.statvfs()')
def test_statvfs_attributes(self):
try:
result = os.statvfs(self.fname)
except OSError as e:
# On AtheOS, glibc always returns ENOSYS
if e.errno == errno.ENOSYS:
self.skipTest('os.statvfs() failed with ENOSYS')
# Make sure direct access works
self.assertEqual(result.f_bfree, result[3])
# Make sure all the attributes are there.
members = ('bsize', 'frsize', 'blocks', 'bfree', 'bavail', 'files',
'ffree', 'favail', 'flag', 'namemax')
for value, member in enumerate(members):
self.assertEqual(getattr(result, 'f_' + member), result[value])
# Make sure that assignment really fails
try:
result.f_bfree = 1
self.fail("No exception raised")
except AttributeError:
pass
try:
result.parrot = 1
self.fail("No exception raised")
except AttributeError:
pass
# Use the constructor with a too-short tuple.
try:
result2 = os.statvfs_result((10,))
self.fail("No exception raised")
except TypeError:
pass
# Use the constructor with a too-long tuple.
try:
result2 = os.statvfs_result((0,1,2,3,4,5,6,7,8,9,10,11,12,13,14))
except TypeError:
pass
@unittest.skipUnless(hasattr(os, 'statvfs'),
"need os.statvfs()")
def test_statvfs_result_pickle(self):
try:
result = os.statvfs(self.fname)
except OSError as e:
# On AtheOS, glibc always returns ENOSYS
if e.errno == errno.ENOSYS:
self.skipTest('os.statvfs() failed with ENOSYS')
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
p = pickle.dumps(result, proto)
self.assertIn(b'statvfs_result', p)
if proto < 4:
self.assertIn(b'cos\nstatvfs_result\n', p)
unpickled = pickle.loads(p)
self.assertEqual(result, unpickled)
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
def test_1686475(self):
# Verify that an open file can be stat'ed
try:
os.stat(r"c:\pagefile.sys")
except FileNotFoundError:
self.skipTest(r'c:\pagefile.sys does not exist')
except OSError as e:
self.fail("Could not stat pagefile.sys")
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
@unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()")
def test_15261(self):
# Verify that stat'ing a closed fd does not cause crash
r, w = os.pipe()
try:
os.stat(r) # should not raise error
finally:
os.close(r)
os.close(w)
with self.assertRaises(OSError) as ctx:
os.stat(r)
self.assertEqual(ctx.exception.errno, errno.EBADF)
def check_file_attributes(self, result):
self.assertTrue(hasattr(result, 'st_file_attributes'))
self.assertTrue(isinstance(result.st_file_attributes, int))
self.assertTrue(0 <= result.st_file_attributes <= 0xFFFFFFFF)
@unittest.skipUnless(sys.platform == "win32",
"st_file_attributes is Win32 specific")
def test_file_attributes(self):
# test file st_file_attributes (FILE_ATTRIBUTE_DIRECTORY not set)
result = os.stat(self.fname)
self.check_file_attributes(result)
self.assertEqual(
result.st_file_attributes & stat.FILE_ATTRIBUTE_DIRECTORY,
0)
# test directory st_file_attributes (FILE_ATTRIBUTE_DIRECTORY set)
dirname = support.TESTFN + "dir"
os.mkdir(dirname)
self.addCleanup(os.rmdir, dirname)
result = os.stat(dirname)
self.check_file_attributes(result)
self.assertEqual(
result.st_file_attributes & stat.FILE_ATTRIBUTE_DIRECTORY,
stat.FILE_ATTRIBUTE_DIRECTORY)
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
def test_access_denied(self):
# Default to FindFirstFile WIN32_FIND_DATA when access is
# denied. See issue 28075.
# os.environ['TEMP'] should be located on a volume that
# supports file ACLs.
fname = os.path.join(os.environ['TEMP'], self.fname)
self.addCleanup(support.unlink, fname)
create_file(fname, b'ABC')
# Deny the right to [S]YNCHRONIZE on the file to
# force CreateFile to fail with ERROR_ACCESS_DENIED.
DETACHED_PROCESS = 8
subprocess.check_call(
# bpo-30584: Use security identifier *S-1-5-32-545 instead
# of localized "Users" to not depend on the locale.
['icacls.exe', fname, '/deny', '*S-1-5-32-545:(S)'],
creationflags=DETACHED_PROCESS
)
result = os.stat(fname)
self.assertNotEqual(result.st_size, 0)
class UtimeTests(unittest.TestCase):
def setUp(self):
self.dirname = support.TESTFN
self.fname = os.path.join(self.dirname, "f1")
self.addCleanup(support.rmtree, self.dirname)
os.mkdir(self.dirname)
with open(self.fname, 'wb') as fp:
fp.write(b"ABC")
def restore_float_times(state):
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
os.stat_float_times(state)
# ensure that st_atime and st_mtime are float
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
old_float_times = os.stat_float_times(-1)
self.addCleanup(restore_float_times, old_float_times)
os.stat_float_times(True)
def support_subsecond(self, filename):
# Heuristic to check if the filesystem supports timestamp with
# subsecond resolution: check if float and int timestamps are different
st = os.stat(filename)
return ((st.st_atime != st[7])
or (st.st_mtime != st[8])
or (st.st_ctime != st[9]))
def _test_utime(self, set_time, filename=None):
if not filename:
filename = self.fname
support_subsecond = self.support_subsecond(filename)
if support_subsecond:
# Timestamp with a resolution of 1 microsecond (10^-6).
#
# The resolution of the C internal function used by os.utime()
# depends on the platform: 1 sec, 1 us, 1 ns. Writing a portable
# test with a resolution of 1 ns requires more work:
# see the issue #15745.
atime_ns = 1002003000 # 1.002003 seconds
mtime_ns = 4005006000 # 4.005006 seconds
else:
# use a resolution of 1 second
atime_ns = 5 * 10**9
mtime_ns = 8 * 10**9
set_time(filename, (atime_ns, mtime_ns))
st = os.stat(filename)
if support_subsecond:
self.assertAlmostEqual(st.st_atime, atime_ns * 1e-9, delta=1e-6)
self.assertAlmostEqual(st.st_mtime, mtime_ns * 1e-9, delta=1e-6)
else:
self.assertEqual(st.st_atime, atime_ns * 1e-9)
self.assertEqual(st.st_mtime, mtime_ns * 1e-9)
self.assertEqual(st.st_atime_ns, atime_ns)
self.assertEqual(st.st_mtime_ns, mtime_ns)
def test_utime(self):
def set_time(filename, ns):
# test the ns keyword parameter
os.utime(filename, ns=ns)
self._test_utime(set_time)
@staticmethod
def ns_to_sec(ns):
# Convert a number of nanosecond (int) to a number of seconds (float).
# Round towards infinity by adding 0.5 nanosecond to avoid rounding
# issue, os.utime() rounds towards minus infinity.
return (ns * 1e-9) + 0.5e-9
def test_utime_by_indexed(self):
# pass times as floating point seconds as the second indexed parameter
def set_time(filename, ns):
atime_ns, mtime_ns = ns
atime = self.ns_to_sec(atime_ns)
mtime = self.ns_to_sec(mtime_ns)
# test utimensat(timespec), utimes(timeval), utime(utimbuf)
# or utime(time_t)
os.utime(filename, (atime, mtime))
self._test_utime(set_time)
def test_utime_by_times(self):
def set_time(filename, ns):
atime_ns, mtime_ns = ns
atime = self.ns_to_sec(atime_ns)
mtime = self.ns_to_sec(mtime_ns)
# test the times keyword parameter
os.utime(filename, times=(atime, mtime))
self._test_utime(set_time)
@unittest.skipUnless(os.utime in os.supports_follow_symlinks,
"follow_symlinks support for utime required "
"for this test.")
def test_utime_nofollow_symlinks(self):
def set_time(filename, ns):
# use follow_symlinks=False to test utimensat(timespec)
# or lutimes(timeval)
os.utime(filename, ns=ns, follow_symlinks=False)
self._test_utime(set_time)
@unittest.skipUnless(os.utime in os.supports_fd,
"fd support for utime required for this test.")
def test_utime_fd(self):
def set_time(filename, ns):
with open(filename, 'wb') as fp:
# use a file descriptor to test futimens(timespec)
# or futimes(timeval)
os.utime(fp.fileno(), ns=ns)
self._test_utime(set_time)
@unittest.skipUnless(os.utime in os.supports_dir_fd,
"dir_fd support for utime required for this test.")
def test_utime_dir_fd(self):
def set_time(filename, ns):
dirname, name = os.path.split(filename)
dirfd = os.open(dirname, os.O_RDONLY)
try:
# pass dir_fd to test utimensat(timespec) or futimesat(timeval)
os.utime(name, dir_fd=dirfd, ns=ns)
finally:
os.close(dirfd)
self._test_utime(set_time)
def test_utime_directory(self):
def set_time(filename, ns):
# test calling os.utime() on a directory
os.utime(filename, ns=ns)
self._test_utime(set_time, filename=self.dirname)
def _test_utime_current(self, set_time):
# Get the system clock
current = time.time()
# Call os.utime() to set the timestamp to the current system clock
set_time(self.fname)
if not self.support_subsecond(self.fname):
delta = 1.0
else:
# On Windows, the usual resolution of time.time() is 15.6 ms
delta = 0.020
st = os.stat(self.fname)
msg = ("st_time=%r, current=%r, dt=%r"
% (st.st_mtime, current, st.st_mtime - current))
self.assertAlmostEqual(st.st_mtime, current,
delta=delta, msg=msg)
def test_utime_current(self):
def set_time(filename):
# Set to the current time in the new way
os.utime(self.fname)
self._test_utime_current(set_time)
def test_utime_current_old(self):
def set_time(filename):
# Set to the current time in the old explicit way.
os.utime(self.fname, None)
self._test_utime_current(set_time)
def get_file_system(self, path):
if sys.platform == 'win32':
root = os.path.splitdrive(os.path.abspath(path))[0] + '\\'
import ctypes
kernel32 = ctypes.windll.kernel32
buf = ctypes.create_unicode_buffer("", 100)
ok = kernel32.GetVolumeInformationW(root, None, 0,
None, None, None,
buf, len(buf))
if ok:
return buf.value
# return None if the filesystem is unknown
def test_large_time(self):
# Many filesystems are limited to the year 2038. At least, the test
# pass with NTFS filesystem.
if self.get_file_system(self.dirname) != "NTFS":
self.skipTest("requires NTFS")
large = 5000000000 # some day in 2128
os.utime(self.fname, (large, large))
self.assertEqual(os.stat(self.fname).st_mtime, large)
def test_utime_invalid_arguments(self):
# seconds and nanoseconds parameters are mutually exclusive
with self.assertRaises(ValueError):
os.utime(self.fname, (5, 5), ns=(5, 5))
from test import mapping_tests
class EnvironTests(mapping_tests.BasicTestMappingProtocol):
"""check that os.environ object conform to mapping protocol"""
type2test = None
def setUp(self):
self.__save = dict(os.environ)
if os.supports_bytes_environ:
self.__saveb = dict(os.environb)
for key, value in self._reference().items():
os.environ[key] = value
def tearDown(self):
os.environ.clear()
os.environ.update(self.__save)
if os.supports_bytes_environ:
os.environb.clear()
os.environb.update(self.__saveb)
def _reference(self):
return {"KEY1":"VALUE1", "KEY2":"VALUE2", "KEY3":"VALUE3"}
def _empty_mapping(self):
os.environ.clear()
return os.environ
# Bug 1110478
@unittest.skipUnless(os.path.exists('/bin/sh'), 'requires /bin/sh')
def test_update2(self):
os.environ.clear()
os.environ.update(HELLO="World")
with os.popen("/bin/sh -c 'echo $HELLO'") as popen:
value = popen.read().strip()
self.assertEqual(value, "World")
@unittest.skipUnless(os.path.exists('/bin/sh'), 'requires /bin/sh')
def test_os_popen_iter(self):
with os.popen(
"/bin/sh -c 'echo \"line1\nline2\nline3\"'") as popen:
it = iter(popen)
self.assertEqual(next(it), "line1\n")
self.assertEqual(next(it), "line2\n")
self.assertEqual(next(it), "line3\n")
self.assertRaises(StopIteration, next, it)
# Verify environ keys and values from the OS are of the
# correct str type.
def test_keyvalue_types(self):
for key, val in os.environ.items():
self.assertEqual(type(key), str)
self.assertEqual(type(val), str)
def test_items(self):
for key, value in self._reference().items():
self.assertEqual(os.environ.get(key), value)
# Issue 7310
def test___repr__(self):
"""Check that the repr() of os.environ looks like environ({...})."""
env = os.environ
self.assertEqual(repr(env), 'environ({{{}}})'.format(', '.join(
'{!r}: {!r}'.format(key, value)
for key, value in env.items())))
def test_get_exec_path(self):
defpath_list = os.defpath.split(os.pathsep)
test_path = ['/monty', '/python', '', '/flying/circus']
test_env = {'PATH': os.pathsep.join(test_path)}
saved_environ = os.environ
try:
os.environ = dict(test_env)
# Test that defaulting to os.environ works.
self.assertSequenceEqual(test_path, os.get_exec_path())
self.assertSequenceEqual(test_path, os.get_exec_path(env=None))
finally:
os.environ = saved_environ
# No PATH environment variable
self.assertSequenceEqual(defpath_list, os.get_exec_path({}))
# Empty PATH environment variable
self.assertSequenceEqual(('',), os.get_exec_path({'PATH':''}))
# Supplied PATH environment variable
self.assertSequenceEqual(test_path, os.get_exec_path(test_env))
if os.supports_bytes_environ:
# env cannot contain 'PATH' and b'PATH' keys
try:
# ignore BytesWarning warning
with warnings.catch_warnings(record=True):
mixed_env = {'PATH': '1', b'PATH': b'2'}
except BytesWarning:
# mixed_env cannot be created with python -bb
pass
else:
self.assertRaises(ValueError, os.get_exec_path, mixed_env)
# bytes key and/or value
self.assertSequenceEqual(os.get_exec_path({b'PATH': b'abc'}),
['abc'])
self.assertSequenceEqual(os.get_exec_path({b'PATH': 'abc'}),
['abc'])
self.assertSequenceEqual(os.get_exec_path({'PATH': b'abc'}),
['abc'])
@unittest.skipUnless(os.supports_bytes_environ,
"os.environb required for this test.")
def test_environb(self):
# os.environ -> os.environb
value = 'euro\u20ac'
try:
value_bytes = value.encode(sys.getfilesystemencoding(),
'surrogateescape')
except UnicodeEncodeError:
msg = "U+20AC character is not encodable to %s" % (
sys.getfilesystemencoding(),)
self.skipTest(msg)
os.environ['unicode'] = value
self.assertEqual(os.environ['unicode'], value)
self.assertEqual(os.environb[b'unicode'], value_bytes)
# os.environb -> os.environ
value = b'\xff'
os.environb[b'bytes'] = value
self.assertEqual(os.environb[b'bytes'], value)
value_str = value.decode(sys.getfilesystemencoding(), 'surrogateescape')
self.assertEqual(os.environ['bytes'], value_str)
# On FreeBSD < 7 and OS X < 10.6, unsetenv() doesn't return a value (issue
# #13415).
@support.requires_freebsd_version(7)
@support.requires_mac_ver(10, 6)
def test_unset_error(self):
if sys.platform == "win32":
# an environment variable is limited to 32,767 characters
key = 'x' * 50000
self.assertRaises(ValueError, os.environ.__delitem__, key)
else:
# "=" is not allowed in a variable name
key = 'key='
self.assertRaises(OSError, os.environ.__delitem__, key)
def test_key_type(self):
missing = 'missingkey'
self.assertNotIn(missing, os.environ)
with self.assertRaises(KeyError) as cm:
os.environ[missing]
self.assertIs(cm.exception.args[0], missing)
self.assertTrue(cm.exception.__suppress_context__)
with self.assertRaises(KeyError) as cm:
del os.environ[missing]
self.assertIs(cm.exception.args[0], missing)
self.assertTrue(cm.exception.__suppress_context__)
def _test_environ_iteration(self, collection):
iterator = iter(collection)
new_key = "__new_key__"
next(iterator) # start iteration over os.environ.items
# add a new key in os.environ mapping
os.environ[new_key] = "test_environ_iteration"
try:
next(iterator) # force iteration over modified mapping
self.assertEqual(os.environ[new_key], "test_environ_iteration")
finally:
del os.environ[new_key]
def test_iter_error_when_changing_os_environ(self):
self._test_environ_iteration(os.environ)
def test_iter_error_when_changing_os_environ_items(self):
self._test_environ_iteration(os.environ.items())
def test_iter_error_when_changing_os_environ_values(self):
self._test_environ_iteration(os.environ.values())
class WalkTests(unittest.TestCase):
"""Tests for os.walk()."""
# Wrapper to hide minor differences between os.walk and os.fwalk
# to tests both functions with the same code base
def walk(self, top, **kwargs):
if 'follow_symlinks' in kwargs:
kwargs['followlinks'] = kwargs.pop('follow_symlinks')
return os.walk(top, **kwargs)
def setUp(self):
join = os.path.join
# Build:
# TESTFN/
# TEST1/ a file kid and two directory kids
# tmp1
# SUB1/ a file kid and a directory kid
# tmp2
# SUB11/ no kids
# SUB2/ a file kid and a dirsymlink kid
# tmp3
# SUB21/ not readable
# tmp5
# link/ a symlink to TESTFN.2
# broken_link
# broken_link2
# broken_link3
# TEST2/
# tmp4 a lone file
self.walk_path = join(support.TESTFN, "TEST1")
self.sub1_path = join(self.walk_path, "SUB1")
self.sub11_path = join(self.sub1_path, "SUB11")
sub2_path = join(self.walk_path, "SUB2")
self.sub21_path = join(sub2_path, "SUB21")
tmp1_path = join(self.walk_path, "tmp1")
tmp2_path = join(self.sub1_path, "tmp2")
tmp3_path = join(sub2_path, "tmp3")
tmp5_path = join(self.sub21_path, "tmp3")
self.link_path = join(sub2_path, "link")
t2_path = join(support.TESTFN, "TEST2")
tmp4_path = join(support.TESTFN, "TEST2", "tmp4")
broken_link_path = join(sub2_path, "broken_link")
broken_link2_path = join(sub2_path, "broken_link2")
broken_link3_path = join(sub2_path, "broken_link3")
# Create stuff.
os.makedirs(self.sub11_path)
os.makedirs(sub2_path)
os.makedirs(self.sub21_path)
os.makedirs(t2_path)
for path in tmp1_path, tmp2_path, tmp3_path, tmp4_path, tmp5_path:
with open(path, "x") as f:
f.write("I'm " + path + " and proud of it. Blame test_os.\n")
if support.can_symlink():
os.symlink(os.path.abspath(t2_path), self.link_path)
os.symlink('broken', broken_link_path, True)
os.symlink(join('tmp3', 'broken'), broken_link2_path, True)
os.symlink(join('SUB21', 'tmp5'), broken_link3_path, True)
self.sub2_tree = (sub2_path, ["SUB21", "link"],
["broken_link", "broken_link2", "broken_link3",
"tmp3"])
else:
self.sub2_tree = (sub2_path, [], ["tmp3"])
os.chmod(self.sub21_path, 0)
try:
os.listdir(self.sub21_path)
except PermissionError:
pass
else:
os.chmod(self.sub21_path, stat.S_IRWXU)
os.unlink(tmp5_path)
os.rmdir(self.sub21_path)
self.sub21_path = None
del self.sub2_tree[1][:1]
def test_walk_topdown(self):
# Walk top-down.
all = list(self.walk(self.walk_path))
self.assertEqual(len(all), 4)
# We can't know which order SUB1 and SUB2 will appear in.
# Not flipped: TESTFN, SUB1, SUB11, SUB2
# flipped: TESTFN, SUB2, SUB1, SUB11
flipped = all[0][1][0] != "SUB1"
all[0][1].sort()
all[3 - 2 * flipped][-1].sort()
all[3 - 2 * flipped][1].sort()
self.assertEqual(all[0], (self.walk_path, ["SUB1", "SUB2"], ["tmp1"]))
self.assertEqual(all[1 + flipped], (self.sub1_path, ["SUB11"], ["tmp2"]))
self.assertEqual(all[2 + flipped], (self.sub11_path, [], []))
self.assertEqual(all[3 - 2 * flipped], self.sub2_tree)
def test_walk_prune(self):
# Prune the search.
all = []
for root, dirs, files in self.walk(self.walk_path):
all.append((root, dirs, files))
# Don't descend into SUB1.
if 'SUB1' in dirs:
# Note that this also mutates the dirs we appended to all!
dirs.remove('SUB1')
self.assertEqual(len(all), 2)
self.assertEqual(all[0],
(self.walk_path, ["SUB2"], ["tmp1"]))
all[1][-1].sort()
all[1][1].sort()
self.assertEqual(all[1], self.sub2_tree)
def test_walk_bottom_up(self):
# Walk bottom-up.
all = list(self.walk(self.walk_path, topdown=False))
self.assertEqual(len(all), 4)
# We can't know which order SUB1 and SUB2 will appear in.
# Not flipped: SUB11, SUB1, SUB2, TESTFN
# flipped: SUB2, SUB11, SUB1, TESTFN
flipped = all[3][1][0] != "SUB1"
all[3][1].sort()
all[2 - 2 * flipped][-1].sort()
all[2 - 2 * flipped][1].sort()
self.assertEqual(all[3],
(self.walk_path, ["SUB1", "SUB2"], ["tmp1"]))
self.assertEqual(all[flipped],
(self.sub11_path, [], []))
self.assertEqual(all[flipped + 1],
(self.sub1_path, ["SUB11"], ["tmp2"]))
self.assertEqual(all[2 - 2 * flipped],
self.sub2_tree)
def test_walk_symlink(self):
if not support.can_symlink():
self.skipTest("need symlink support")
# Walk, following symlinks.
walk_it = self.walk(self.walk_path, follow_symlinks=True)
for root, dirs, files in walk_it:
if root == self.link_path:
self.assertEqual(dirs, [])
self.assertEqual(files, ["tmp4"])
break
else:
self.fail("Didn't follow symlink with followlinks=True")
def tearDown(self):
# Tear everything down. This is a decent use for bottom-up on
# Windows, which doesn't have a recursive delete command. The
# (not so) subtlety is that rmdir will fail unless the dir's
# kids are removed first, so bottom up is essential.
if self.sub21_path:
os.chmod(self.sub21_path, stat.S_IRWXU)
for root, dirs, files in os.walk(support.TESTFN, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
dirname = os.path.join(root, name)
if not os.path.islink(dirname):
os.rmdir(dirname)
else:
os.remove(dirname)
os.rmdir(support.TESTFN)
def test_walk_bad_dir(self):
# Walk top-down.
errors = []
walk_it = self.walk(self.walk_path, onerror=errors.append)
root, dirs, files = next(walk_it)
self.assertEqual(errors, [])
dir1 = 'SUB1'
path1 = os.path.join(root, dir1)
path1new = os.path.join(root, dir1 + '.new')
os.rename(path1, path1new)
try:
roots = [r for r, d, f in walk_it]
self.assertTrue(errors)
self.assertNotIn(path1, roots)
self.assertNotIn(path1new, roots)
for dir2 in dirs:
if dir2 != dir1:
self.assertIn(os.path.join(root, dir2), roots)
finally:
os.rename(path1new, path1)
@unittest.skipUnless(hasattr(os, 'fwalk'), "Test needs os.fwalk()")
class FwalkTests(WalkTests):
"""Tests for os.fwalk()."""
def walk(self, top, **kwargs):
for root, dirs, files, root_fd in os.fwalk(top, **kwargs):
yield (root, dirs, files)
def _compare_to_walk(self, walk_kwargs, fwalk_kwargs):
"""
compare with walk() results.
"""
walk_kwargs = walk_kwargs.copy()
fwalk_kwargs = fwalk_kwargs.copy()
for topdown, follow_symlinks in itertools.product((True, False), repeat=2):
walk_kwargs.update(topdown=topdown, followlinks=follow_symlinks)
fwalk_kwargs.update(topdown=topdown, follow_symlinks=follow_symlinks)
expected = {}
for root, dirs, files in os.walk(**walk_kwargs):
expected[root] = (set(dirs), set(files))
for root, dirs, files, rootfd in os.fwalk(**fwalk_kwargs):
self.assertIn(root, expected)
self.assertEqual(expected[root], (set(dirs), set(files)))
def test_compare_to_walk(self):
kwargs = {'top': support.TESTFN}
self._compare_to_walk(kwargs, kwargs)
def test_dir_fd(self):
try:
fd = os.open(".", os.O_RDONLY)
walk_kwargs = {'top': support.TESTFN}
fwalk_kwargs = walk_kwargs.copy()
fwalk_kwargs['dir_fd'] = fd
self._compare_to_walk(walk_kwargs, fwalk_kwargs)
finally:
os.close(fd)
def test_yields_correct_dir_fd(self):
# check returned file descriptors
for topdown, follow_symlinks in itertools.product((True, False), repeat=2):
args = support.TESTFN, topdown, None
for root, dirs, files, rootfd in os.fwalk(*args, follow_symlinks=follow_symlinks):
# check that the FD is valid
os.fstat(rootfd)
# redundant check
os.stat(rootfd)
# check that listdir() returns consistent information
self.assertEqual(set(os.listdir(rootfd)), set(dirs) | set(files))
def test_fd_leak(self):
# Since we're opening a lot of FDs, we must be careful to avoid leaks:
# we both check that calling fwalk() a large number of times doesn't
# yield EMFILE, and that the minimum allocated FD hasn't changed.
minfd = os.dup(1)
os.close(minfd)
for i in range(256):
for x in os.fwalk(support.TESTFN):
pass
newfd = os.dup(1)
self.addCleanup(os.close, newfd)
self.assertEqual(newfd, minfd)
def tearDown(self):
# cleanup
if self.sub21_path:
os.chmod(self.sub21_path, stat.S_IRWXU)
for root, dirs, files, rootfd in os.fwalk(support.TESTFN, topdown=False):
for name in files:
os.unlink(name, dir_fd=rootfd)
for name in dirs:
st = os.stat(name, dir_fd=rootfd, follow_symlinks=False)
if stat.S_ISDIR(st.st_mode):
os.rmdir(name, dir_fd=rootfd)
else:
os.unlink(name, dir_fd=rootfd)
os.rmdir(support.TESTFN)
class BytesWalkTests(WalkTests):
"""Tests for os.walk() with bytes."""
def setUp(self):
super().setUp()
self.stack = contextlib.ExitStack()
if os.name == 'nt':
self.stack.enter_context(warnings.catch_warnings())
warnings.simplefilter("ignore", DeprecationWarning)
def tearDown(self):
self.stack.close()
super().tearDown()
def walk(self, top, **kwargs):
if 'follow_symlinks' in kwargs:
kwargs['followlinks'] = kwargs.pop('follow_symlinks')
for broot, bdirs, bfiles in os.walk(os.fsencode(top), **kwargs):
root = os.fsdecode(broot)
dirs = list(map(os.fsdecode, bdirs))
files = list(map(os.fsdecode, bfiles))
yield (root, dirs, files)
bdirs[:] = list(map(os.fsencode, dirs))
bfiles[:] = list(map(os.fsencode, files))
class MakedirTests(unittest.TestCase):
def setUp(self):
os.mkdir(support.TESTFN)
def test_makedir(self):
base = support.TESTFN
path = os.path.join(base, 'dir1', 'dir2', 'dir3')
os.makedirs(path) # Should work
path = os.path.join(base, 'dir1', 'dir2', 'dir3', 'dir4')
os.makedirs(path)
# Try paths with a '.' in them
self.assertRaises(OSError, os.makedirs, os.curdir)
path = os.path.join(base, 'dir1', 'dir2', 'dir3', 'dir4', 'dir5', os.curdir)
os.makedirs(path)
path = os.path.join(base, 'dir1', os.curdir, 'dir2', 'dir3', 'dir4',
'dir5', 'dir6')
os.makedirs(path)
def test_exist_ok_existing_directory(self):
path = os.path.join(support.TESTFN, 'dir1')
mode = 0o777
old_mask = os.umask(0o022)
os.makedirs(path, mode)
self.assertRaises(OSError, os.makedirs, path, mode)
self.assertRaises(OSError, os.makedirs, path, mode, exist_ok=False)
os.makedirs(path, 0o776, exist_ok=True)
os.makedirs(path, mode=mode, exist_ok=True)
os.umask(old_mask)
# Issue #25583: A drive root could raise PermissionError on Windows
os.makedirs(os.path.abspath('/'), exist_ok=True)
def test_exist_ok_s_isgid_directory(self):
path = os.path.join(support.TESTFN, 'dir1')
S_ISGID = stat.S_ISGID
mode = 0o777
old_mask = os.umask(0o022)
try:
existing_testfn_mode = stat.S_IMODE(
os.lstat(support.TESTFN).st_mode)
try:
os.chmod(support.TESTFN, existing_testfn_mode | S_ISGID)
except PermissionError:
raise unittest.SkipTest('Cannot set S_ISGID for dir.')
if (os.lstat(support.TESTFN).st_mode & S_ISGID != S_ISGID):
raise unittest.SkipTest('No support for S_ISGID dir mode.')
# The os should apply S_ISGID from the parent dir for us, but
# this test need not depend on that behavior. Be explicit.
os.makedirs(path, mode | S_ISGID)
# http://bugs.python.org/issue14992
# Should not fail when the bit is already set.
os.makedirs(path, mode, exist_ok=True)
# remove the bit.
os.chmod(path, stat.S_IMODE(os.lstat(path).st_mode) & ~S_ISGID)
# May work even when the bit is not already set when demanded.
os.makedirs(path, mode | S_ISGID, exist_ok=True)
finally:
os.umask(old_mask)
def test_exist_ok_existing_regular_file(self):
base = support.TESTFN
path = os.path.join(support.TESTFN, 'dir1')
f = open(path, 'w')
f.write('abc')
f.close()
self.assertRaises(OSError, os.makedirs, path)
self.assertRaises(OSError, os.makedirs, path, exist_ok=False)
self.assertRaises(OSError, os.makedirs, path, exist_ok=True)
os.remove(path)
def tearDown(self):
path = os.path.join(support.TESTFN, 'dir1', 'dir2', 'dir3',
'dir4', 'dir5', 'dir6')
# If the tests failed, the bottom-most directory ('../dir6')
# may not have been created, so we look for the outermost directory
# that exists.
while not os.path.exists(path) and path != support.TESTFN:
path = os.path.dirname(path)
os.removedirs(path)
@unittest.skipUnless(hasattr(os, 'chown'), "Test needs chown")
class ChownFileTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
os.mkdir(support.TESTFN)
def test_chown_uid_gid_arguments_must_be_index(self):
stat = os.stat(support.TESTFN)
uid = stat.st_uid
gid = stat.st_gid
for value in (-1.0, -1j, decimal.Decimal(-1), fractions.Fraction(-2, 2)):
self.assertRaises(TypeError, os.chown, support.TESTFN, value, gid)
self.assertRaises(TypeError, os.chown, support.TESTFN, uid, value)
self.assertIsNone(os.chown(support.TESTFN, uid, gid))
self.assertIsNone(os.chown(support.TESTFN, -1, -1))
@unittest.skipUnless(len(groups) > 1, "test needs more than one group")
def test_chown(self):
gid_1, gid_2 = groups[:2]
uid = os.stat(support.TESTFN).st_uid
os.chown(support.TESTFN, uid, gid_1)
gid = os.stat(support.TESTFN).st_gid
self.assertEqual(gid, gid_1)
os.chown(support.TESTFN, uid, gid_2)
gid = os.stat(support.TESTFN).st_gid
self.assertEqual(gid, gid_2)
@unittest.skipUnless(root_in_posix and len(all_users) > 1,
"test needs root privilege and more than one user")
def test_chown_with_root(self):
uid_1, uid_2 = all_users[:2]
gid = os.stat(support.TESTFN).st_gid
os.chown(support.TESTFN, uid_1, gid)
uid = os.stat(support.TESTFN).st_uid
self.assertEqual(uid, uid_1)
os.chown(support.TESTFN, uid_2, gid)
uid = os.stat(support.TESTFN).st_uid
self.assertEqual(uid, uid_2)
@unittest.skipUnless(not root_in_posix and len(all_users) > 1,
"test needs non-root account and more than one user")
def test_chown_without_permission(self):
uid_1, uid_2 = all_users[:2]
gid = os.stat(support.TESTFN).st_gid
with self.assertRaises(PermissionError):
os.chown(support.TESTFN, uid_1, gid)
os.chown(support.TESTFN, uid_2, gid)
@classmethod
def tearDownClass(cls):
os.rmdir(support.TESTFN)
class RemoveDirsTests(unittest.TestCase):
def setUp(self):
os.makedirs(support.TESTFN)
def tearDown(self):
support.rmtree(support.TESTFN)
def test_remove_all(self):
dira = os.path.join(support.TESTFN, 'dira')
os.mkdir(dira)
dirb = os.path.join(dira, 'dirb')
os.mkdir(dirb)
os.removedirs(dirb)
self.assertFalse(os.path.exists(dirb))
self.assertFalse(os.path.exists(dira))
self.assertFalse(os.path.exists(support.TESTFN))
def test_remove_partial(self):
dira = os.path.join(support.TESTFN, 'dira')
os.mkdir(dira)
dirb = os.path.join(dira, 'dirb')
os.mkdir(dirb)
with open(os.path.join(dira, 'file.txt'), 'w') as f:
f.write('text')
os.removedirs(dirb)
self.assertFalse(os.path.exists(dirb))
self.assertTrue(os.path.exists(dira))
self.assertTrue(os.path.exists(support.TESTFN))
def test_remove_nothing(self):
dira = os.path.join(support.TESTFN, 'dira')
os.mkdir(dira)
dirb = os.path.join(dira, 'dirb')
os.mkdir(dirb)
with open(os.path.join(dirb, 'file.txt'), 'w') as f:
f.write('text')
with self.assertRaises(OSError):
os.removedirs(dirb)
self.assertTrue(os.path.exists(dirb))
self.assertTrue(os.path.exists(dira))
self.assertTrue(os.path.exists(support.TESTFN))
class DevNullTests(unittest.TestCase):
def test_devnull(self):
with open(os.devnull, 'wb') as f:
f.write(b'hello')
f.close()
with open(os.devnull, 'rb') as f:
self.assertEqual(f.read(), b'')
class URandomTests(unittest.TestCase):
def test_urandom_length(self):
self.assertEqual(len(os.urandom(0)), 0)
self.assertEqual(len(os.urandom(1)), 1)
self.assertEqual(len(os.urandom(10)), 10)
self.assertEqual(len(os.urandom(100)), 100)
self.assertEqual(len(os.urandom(1000)), 1000)
def test_urandom_value(self):
data1 = os.urandom(16)
data2 = os.urandom(16)
self.assertNotEqual(data1, data2)
def get_urandom_subprocess(self, count):
code = '\n'.join((
'import os, sys',
'data = os.urandom(%s)' % count,
'sys.stdout.buffer.write(data)',
'sys.stdout.buffer.flush()'))
out = assert_python_ok('-c', code)
stdout = out[1]
self.assertEqual(len(stdout), 16)
return stdout
def test_urandom_subprocess(self):
data1 = self.get_urandom_subprocess(16)
data2 = self.get_urandom_subprocess(16)
self.assertNotEqual(data1, data2)
# os.urandom() doesn't use a file descriptor when it is implemented with the
# getentropy() function, the getrandom() function or the getrandom() syscall
OS_URANDOM_DONT_USE_FD = (
sysconfig.get_config_var('HAVE_GETENTROPY') == 1
or sysconfig.get_config_var('HAVE_GETRANDOM') == 1
or sysconfig.get_config_var('HAVE_GETRANDOM_SYSCALL') == 1)
@unittest.skipIf(OS_URANDOM_DONT_USE_FD ,
"os.random() does not use a file descriptor")
class URandomFDTests(unittest.TestCase):
@unittest.skipUnless(resource, "test requires the resource module")
def test_urandom_failure(self):
# Check urandom() failing when it is not able to open /dev/random.
# We spawn a new process to make the test more robust (if getrlimit()
# failed to restore the file descriptor limit after this, the whole
# test suite would crash; this actually happened on the OS X Tiger
# buildbot).
code = """if 1:
import errno
import os
import resource
soft_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (1, hard_limit))
try:
os.urandom(16)
except OSError as e:
assert e.errno == errno.EMFILE, e.errno
else:
raise AssertionError("OSError not raised")
"""
assert_python_ok('-c', code)
def test_urandom_fd_closed(self):
# Issue #21207: urandom() should reopen its fd to /dev/urandom if
# closed.
code = """if 1:
import os
import sys
import test.support
os.urandom(4)
with test.support.SuppressCrashReport():
os.closerange(3, 256)
sys.stdout.buffer.write(os.urandom(4))
"""
rc, out, err = assert_python_ok('-Sc', code)
def test_urandom_fd_reopened(self):
# Issue #21207: urandom() should detect its fd to /dev/urandom
# changed to something else, and reopen it.
with open(support.TESTFN, 'wb') as f:
f.write(b"x" * 256)
self.addCleanup(os.unlink, support.TESTFN)
code = """if 1:
import os
import sys
import test.support
os.urandom(4)
with test.support.SuppressCrashReport():
for fd in range(3, 256):
try:
os.close(fd)
except OSError:
pass
else:
# Found the urandom fd (XXX hopefully)
break
os.closerange(3, 256)
with open({TESTFN!r}, 'rb') as f:
os.dup2(f.fileno(), fd)
sys.stdout.buffer.write(os.urandom(4))
sys.stdout.buffer.write(os.urandom(4))
""".format(TESTFN=support.TESTFN)
rc, out, err = assert_python_ok('-Sc', code)
self.assertEqual(len(out), 8)
self.assertNotEqual(out[0:4], out[4:8])
rc, out2, err2 = assert_python_ok('-Sc', code)
self.assertEqual(len(out2), 8)
self.assertNotEqual(out2, out)
@contextlib.contextmanager
def _execvpe_mockup(defpath=None):
"""
Stubs out execv and execve functions when used as context manager.
Records exec calls. The mock execv and execve functions always raise an
exception as they would normally never return.
"""
# A list of tuples containing (function name, first arg, args)
# of calls to execv or execve that have been made.
calls = []
def mock_execv(name, *args):
calls.append(('execv', name, args))
raise RuntimeError("execv called")
def mock_execve(name, *args):
calls.append(('execve', name, args))
raise OSError(errno.ENOTDIR, "execve called")
try:
orig_execv = os.execv
orig_execve = os.execve
orig_defpath = os.defpath
os.execv = mock_execv
os.execve = mock_execve
if defpath is not None:
os.defpath = defpath
yield calls
finally:
os.execv = orig_execv
os.execve = orig_execve
os.defpath = orig_defpath
class ExecTests(unittest.TestCase):
@unittest.skipIf(USING_LINUXTHREADS,
"avoid triggering a linuxthreads bug: see issue #4970")
def test_execvpe_with_bad_program(self):
self.assertRaises(OSError, os.execvpe, 'no such app-',
['no such app-'], None)
def test_execvpe_with_bad_arglist(self):
self.assertRaises(ValueError, os.execvpe, 'notepad', [], None)
@unittest.skipUnless(hasattr(os, '_execvpe'),
"No internal os._execvpe function to test.")
def _test_internal_execvpe(self, test_type):
program_path = os.sep + 'absolutepath'
if test_type is bytes:
program = b'executable'
fullpath = os.path.join(os.fsencode(program_path), program)
native_fullpath = fullpath
arguments = [b'progname', 'arg1', 'arg2']
else:
program = 'executable'
arguments = ['progname', 'arg1', 'arg2']
fullpath = os.path.join(program_path, program)
if os.name != "nt":
native_fullpath = os.fsencode(fullpath)
else:
native_fullpath = fullpath
env = {'spam': 'beans'}
# test os._execvpe() with an absolute path
with _execvpe_mockup() as calls:
self.assertRaises(RuntimeError,
os._execvpe, fullpath, arguments)
self.assertEqual(len(calls), 1)
self.assertEqual(calls[0], ('execv', fullpath, (arguments,)))
# test os._execvpe() with a relative path:
# os.get_exec_path() returns defpath
with _execvpe_mockup(defpath=program_path) as calls:
self.assertRaises(OSError,
os._execvpe, program, arguments, env=env)
self.assertEqual(len(calls), 1)
self.assertSequenceEqual(calls[0],
('execve', native_fullpath, (arguments, env)))
# test os._execvpe() with a relative path:
# os.get_exec_path() reads the 'PATH' variable
with _execvpe_mockup() as calls:
env_path = env.copy()
if test_type is bytes:
env_path[b'PATH'] = program_path
else:
env_path['PATH'] = program_path
self.assertRaises(OSError,
os._execvpe, program, arguments, env=env_path)
self.assertEqual(len(calls), 1)
self.assertSequenceEqual(calls[0],
('execve', native_fullpath, (arguments, env_path)))
def test_internal_execvpe_str(self):
self._test_internal_execvpe(str)
if os.name != "nt":
self._test_internal_execvpe(bytes)
def test_execve_invalid_env(self):
args = [sys.executable, '-c', 'pass']
# null character in the enviroment variable name
newenv = os.environ.copy()
newenv["FRUIT\0VEGETABLE"] = "cabbage"
with self.assertRaises(ValueError):
os.execve(args[0], args, newenv)
# null character in the enviroment variable value
newenv = os.environ.copy()
newenv["FRUIT"] = "orange\0VEGETABLE=cabbage"
with self.assertRaises(ValueError):
os.execve(args[0], args, newenv)
# equal character in the enviroment variable name
newenv = os.environ.copy()
newenv["FRUIT=ORANGE"] = "lemon"
with self.assertRaises(ValueError):
os.execve(args[0], args, newenv)
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
class Win32ErrorTests(unittest.TestCase):
def test_rename(self):
self.assertRaises(OSError, os.rename, support.TESTFN, support.TESTFN+".bak")
def test_remove(self):
self.assertRaises(OSError, os.remove, support.TESTFN)
def test_chdir(self):
self.assertRaises(OSError, os.chdir, support.TESTFN)
def test_mkdir(self):
f = open(support.TESTFN, "w")
try:
self.assertRaises(OSError, os.mkdir, support.TESTFN)
finally:
f.close()
os.unlink(support.TESTFN)
def test_utime(self):
self.assertRaises(OSError, os.utime, support.TESTFN, None)
def test_chmod(self):
self.assertRaises(OSError, os.chmod, support.TESTFN, 0)
class TestInvalidFD(unittest.TestCase):
singles = ["fchdir", "dup", "fdopen", "fdatasync", "fstat",
"fstatvfs", "fsync", "tcgetpgrp", "ttyname"]
#singles.append("close")
#We omit close because it doesn't raise an exception on some platforms
def get_single(f):
def helper(self):
if hasattr(os, f):
self.check(getattr(os, f))
return helper
for f in singles:
locals()["test_"+f] = get_single(f)
def check(self, f, *args):
try:
f(support.make_bad_fd(), *args)
except OSError as e:
self.assertEqual(e.errno, errno.EBADF)
else:
self.fail("%r didn't raise an OSError with a bad file descriptor"
% f)
@unittest.skipUnless(hasattr(os, 'isatty'), 'test needs os.isatty()')
def test_isatty(self):
self.assertEqual(os.isatty(support.make_bad_fd()), False)
@unittest.skipUnless(hasattr(os, 'closerange'), 'test needs os.closerange()')
def test_closerange(self):
fd = support.make_bad_fd()
# Make sure none of the descriptors we are about to close are
# currently valid (issue 6542).
for i in range(10):
try: os.fstat(fd+i)
except OSError:
pass
else:
break
if i < 2:
raise unittest.SkipTest(
"Unable to acquire a range of invalid file descriptors")
self.assertEqual(os.closerange(fd, fd + i-1), None)
@unittest.skipUnless(hasattr(os, 'dup2'), 'test needs os.dup2()')
def test_dup2(self):
self.check(os.dup2, 20)
@unittest.skipUnless(hasattr(os, 'fchmod'), 'test needs os.fchmod()')
def test_fchmod(self):
self.check(os.fchmod, 0)
@unittest.skipUnless(hasattr(os, 'fchown'), 'test needs os.fchown()')
def test_fchown(self):
self.check(os.fchown, -1, -1)
@unittest.skipUnless(hasattr(os, 'fpathconf'), 'test needs os.fpathconf()')
def test_fpathconf(self):
self.check(os.pathconf, "PC_NAME_MAX")
self.check(os.fpathconf, "PC_NAME_MAX")
@unittest.skipUnless(hasattr(os, 'ftruncate'), 'test needs os.ftruncate()')
def test_ftruncate(self):
self.check(os.truncate, 0)
self.check(os.ftruncate, 0)
@unittest.skipUnless(hasattr(os, 'lseek'), 'test needs os.lseek()')
def test_lseek(self):
self.check(os.lseek, 0, 0)
@unittest.skipUnless(hasattr(os, 'read'), 'test needs os.read()')
def test_read(self):
self.check(os.read, 1)
@unittest.skipUnless(hasattr(os, 'readv'), 'test needs os.readv()')
def test_readv(self):
buf = bytearray(10)
self.check(os.readv, [buf])
@unittest.skipUnless(hasattr(os, 'tcsetpgrp'), 'test needs os.tcsetpgrp()')
def test_tcsetpgrpt(self):
self.check(os.tcsetpgrp, 0)
@unittest.skipUnless(hasattr(os, 'write'), 'test needs os.write()')
def test_write(self):
self.check(os.write, b" ")
@unittest.skipUnless(hasattr(os, 'writev'), 'test needs os.writev()')
def test_writev(self):
self.check(os.writev, [b'abc'])
def test_inheritable(self):
self.check(os.get_inheritable)
self.check(os.set_inheritable, True)
@unittest.skipUnless(hasattr(os, 'get_blocking'),
'needs os.get_blocking() and os.set_blocking()')
def test_blocking(self):
self.check(os.get_blocking)
self.check(os.set_blocking, True)
class LinkTests(unittest.TestCase):
def setUp(self):
self.file1 = support.TESTFN
self.file2 = os.path.join(support.TESTFN + "2")
def tearDown(self):
for file in (self.file1, self.file2):
if os.path.exists(file):
os.unlink(file)
def _test_link(self, file1, file2):
with open(file1, "w") as f1:
f1.write("test")
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
os.link(file1, file2)
with open(file1, "r") as f1, open(file2, "r") as f2:
self.assertTrue(os.path.sameopenfile(f1.fileno(), f2.fileno()))
def test_link(self):
self._test_link(self.file1, self.file2)
def test_link_bytes(self):
self._test_link(bytes(self.file1, sys.getfilesystemencoding()),
bytes(self.file2, sys.getfilesystemencoding()))
def test_unicode_name(self):
try:
os.fsencode("\xf1")
except UnicodeError:
raise unittest.SkipTest("Unable to encode for this platform.")
self.file1 += "\xf1"
self.file2 = self.file1 + "2"
self._test_link(self.file1, self.file2)
@unittest.skipIf(sys.platform == "win32", "Posix specific tests")
class PosixUidGidTests(unittest.TestCase):
@unittest.skipUnless(hasattr(os, 'setuid'), 'test needs os.setuid()')
def test_setuid(self):
if os.getuid() != 0:
self.assertRaises(OSError, os.setuid, 0)
self.assertRaises(OverflowError, os.setuid, 1<<32)
@unittest.skipUnless(hasattr(os, 'setgid'), 'test needs os.setgid()')
def test_setgid(self):
if os.getuid() != 0 and not HAVE_WHEEL_GROUP:
self.assertRaises(OSError, os.setgid, 0)
self.assertRaises(OverflowError, os.setgid, 1<<32)
@unittest.skipUnless(hasattr(os, 'seteuid'), 'test needs os.seteuid()')
def test_seteuid(self):
if os.getuid() != 0:
self.assertRaises(OSError, os.seteuid, 0)
self.assertRaises(OverflowError, os.seteuid, 1<<32)
@unittest.skipUnless(hasattr(os, 'setegid'), 'test needs os.setegid()')
def test_setegid(self):
if os.getuid() != 0 and not HAVE_WHEEL_GROUP:
self.assertRaises(OSError, os.setegid, 0)
self.assertRaises(OverflowError, os.setegid, 1<<32)
@unittest.skipUnless(hasattr(os, 'setreuid'), 'test needs os.setreuid()')
def test_setreuid(self):
if os.getuid() != 0:
self.assertRaises(OSError, os.setreuid, 0, 0)
self.assertRaises(OverflowError, os.setreuid, 1<<32, 0)
self.assertRaises(OverflowError, os.setreuid, 0, 1<<32)
@unittest.skipUnless(hasattr(os, 'setreuid'), 'test needs os.setreuid()')
def test_setreuid_neg1(self):
# Needs to accept -1. We run this in a subprocess to avoid
# altering the test runner's process state (issue8045).
subprocess.check_call([
sys.executable, '-c',
'import os,sys;os.setreuid(-1,-1);sys.exit(0)'])
@unittest.skipUnless(hasattr(os, 'setregid'), 'test needs os.setregid()')
def test_setregid(self):
if os.getuid() != 0 and not HAVE_WHEEL_GROUP:
self.assertRaises(OSError, os.setregid, 0, 0)
self.assertRaises(OverflowError, os.setregid, 1<<32, 0)
self.assertRaises(OverflowError, os.setregid, 0, 1<<32)
@unittest.skipUnless(hasattr(os, 'setregid'), 'test needs os.setregid()')
def test_setregid_neg1(self):
# Needs to accept -1. We run this in a subprocess to avoid
# altering the test runner's process state (issue8045).
subprocess.check_call([
sys.executable, '-c',
'import os,sys;os.setregid(-1,-1);sys.exit(0)'])
@unittest.skipIf(sys.platform == "win32", "Posix specific tests")
class Pep383Tests(unittest.TestCase):
def setUp(self):
if support.TESTFN_UNENCODABLE:
self.dir = support.TESTFN_UNENCODABLE
elif support.TESTFN_NONASCII:
self.dir = support.TESTFN_NONASCII
else:
self.dir = support.TESTFN
self.bdir = os.fsencode(self.dir)
bytesfn = []
def add_filename(fn):
try:
fn = os.fsencode(fn)
except UnicodeEncodeError:
return
bytesfn.append(fn)
add_filename(support.TESTFN_UNICODE)
if support.TESTFN_UNENCODABLE:
add_filename(support.TESTFN_UNENCODABLE)
if support.TESTFN_NONASCII:
add_filename(support.TESTFN_NONASCII)
if not bytesfn:
self.skipTest("couldn't create any non-ascii filename")
self.unicodefn = set()
os.mkdir(self.dir)
try:
for fn in bytesfn:
support.create_empty_file(os.path.join(self.bdir, fn))
fn = os.fsdecode(fn)
if fn in self.unicodefn:
raise ValueError("duplicate filename")
self.unicodefn.add(fn)
except:
shutil.rmtree(self.dir)
raise
def tearDown(self):
shutil.rmtree(self.dir)
def test_listdir(self):
expected = self.unicodefn
found = set(os.listdir(self.dir))
self.assertEqual(found, expected)
# test listdir without arguments
current_directory = os.getcwd()
try:
os.chdir(os.sep)
self.assertEqual(set(os.listdir()), set(os.listdir(os.sep)))
finally:
os.chdir(current_directory)
def test_open(self):
for fn in self.unicodefn:
f = open(os.path.join(self.dir, fn), 'rb')
f.close()
@unittest.skipUnless(hasattr(os, 'statvfs'),
"need os.statvfs()")
def test_statvfs(self):
# issue #9645
for fn in self.unicodefn:
# should not fail with file not found error
fullname = os.path.join(self.dir, fn)
os.statvfs(fullname)
def test_stat(self):
for fn in self.unicodefn:
os.stat(os.path.join(self.dir, fn))
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
class Win32KillTests(unittest.TestCase):
def _kill(self, sig):
# Start sys.executable as a subprocess and communicate from the
# subprocess to the parent that the interpreter is ready. When it
# becomes ready, send *sig* via os.kill to the subprocess and check
# that the return code is equal to *sig*.
import ctypes
from ctypes import wintypes
import msvcrt
# Since we can't access the contents of the process' stdout until the
# process has exited, use PeekNamedPipe to see what's inside stdout
# without waiting. This is done so we can tell that the interpreter
# is started and running at a point where it could handle a signal.
PeekNamedPipe = ctypes.windll.kernel32.PeekNamedPipe
PeekNamedPipe.restype = wintypes.BOOL
PeekNamedPipe.argtypes = (wintypes.HANDLE, # Pipe handle
ctypes.POINTER(ctypes.c_char), # stdout buf
wintypes.DWORD, # Buffer size
ctypes.POINTER(wintypes.DWORD), # bytes read
ctypes.POINTER(wintypes.DWORD), # bytes avail
ctypes.POINTER(wintypes.DWORD)) # bytes left
msg = "running"
proc = subprocess.Popen([sys.executable, "-c",
"import sys;"
"sys.stdout.write('{}');"
"sys.stdout.flush();"
"input()".format(msg)],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE)
self.addCleanup(proc.stdout.close)
self.addCleanup(proc.stderr.close)
self.addCleanup(proc.stdin.close)
count, max = 0, 100
while count < max and proc.poll() is None:
# Create a string buffer to store the result of stdout from the pipe
buf = ctypes.create_string_buffer(len(msg))
# Obtain the text currently in proc.stdout
# Bytes read/avail/left are left as NULL and unused
rslt = PeekNamedPipe(msvcrt.get_osfhandle(proc.stdout.fileno()),
buf, ctypes.sizeof(buf), None, None, None)
self.assertNotEqual(rslt, 0, "PeekNamedPipe failed")
if buf.value:
self.assertEqual(msg, buf.value.decode())
break
time.sleep(0.1)
count += 1
else:
self.fail("Did not receive communication from the subprocess")
os.kill(proc.pid, sig)
self.assertEqual(proc.wait(), sig)
def test_kill_sigterm(self):
# SIGTERM doesn't mean anything special, but make sure it works
self._kill(signal.SIGTERM)
def test_kill_int(self):
# os.kill on Windows can take an int which gets set as the exit code
self._kill(100)
def _kill_with_event(self, event, name):
tagname = "test_os_%s" % uuid.uuid1()
m = mmap.mmap(-1, 1, tagname)
m[0] = 0
# Run a script which has console control handling enabled.
proc = subprocess.Popen([sys.executable,
os.path.join(os.path.dirname(__file__),
"win_console_handler.py"), tagname],
creationflags=subprocess.CREATE_NEW_PROCESS_GROUP)
# Let the interpreter startup before we send signals. See #3137.
count, max = 0, 100
while count < max and proc.poll() is None:
if m[0] == 1:
break
time.sleep(0.1)
count += 1
else:
# Forcefully kill the process if we weren't able to signal it.
os.kill(proc.pid, signal.SIGINT)
self.fail("Subprocess didn't finish initialization")
os.kill(proc.pid, event)
# proc.send_signal(event) could also be done here.
# Allow time for the signal to be passed and the process to exit.
time.sleep(0.5)
if not proc.poll():
# Forcefully kill the process if we weren't able to signal it.
os.kill(proc.pid, signal.SIGINT)
self.fail("subprocess did not stop on {}".format(name))
@unittest.skip("subprocesses aren't inheriting Ctrl+C property")
def test_CTRL_C_EVENT(self):
from ctypes import wintypes
import ctypes
# Make a NULL value by creating a pointer with no argument.
NULL = ctypes.POINTER(ctypes.c_int)()
SetConsoleCtrlHandler = ctypes.windll.kernel32.SetConsoleCtrlHandler
SetConsoleCtrlHandler.argtypes = (ctypes.POINTER(ctypes.c_int),
wintypes.BOOL)
SetConsoleCtrlHandler.restype = wintypes.BOOL
# Calling this with NULL and FALSE causes the calling process to
# handle Ctrl+C, rather than ignore it. This property is inherited
# by subprocesses.
SetConsoleCtrlHandler(NULL, 0)
self._kill_with_event(signal.CTRL_C_EVENT, "CTRL_C_EVENT")
def test_CTRL_BREAK_EVENT(self):
self._kill_with_event(signal.CTRL_BREAK_EVENT, "CTRL_BREAK_EVENT")
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
class Win32ListdirTests(unittest.TestCase):
"""Test listdir on Windows."""
def setUp(self):
self.created_paths = []
for i in range(2):
dir_name = 'SUB%d' % i
dir_path = os.path.join(support.TESTFN, dir_name)
file_name = 'FILE%d' % i
file_path = os.path.join(support.TESTFN, file_name)
os.makedirs(dir_path)
with open(file_path, 'w') as f:
f.write("I'm %s and proud of it. Blame test_os.\n" % file_path)
self.created_paths.extend([dir_name, file_name])
self.created_paths.sort()
def tearDown(self):
shutil.rmtree(support.TESTFN)
def test_listdir_no_extended_path(self):
"""Test when the path is not an "extended" path."""
# unicode
self.assertEqual(
sorted(os.listdir(support.TESTFN)),
self.created_paths)
# bytes
self.assertEqual(
sorted(os.listdir(os.fsencode(support.TESTFN))),
[os.fsencode(path) for path in self.created_paths])
def test_listdir_extended_path(self):
"""Test when the path starts with '\\\\?\\'."""
# See: http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx#maxpath
# unicode
path = '\\\\?\\' + os.path.abspath(support.TESTFN)
self.assertEqual(
sorted(os.listdir(path)),
self.created_paths)
# bytes
path = b'\\\\?\\' + os.fsencode(os.path.abspath(support.TESTFN))
self.assertEqual(
sorted(os.listdir(path)),
[os.fsencode(path) for path in self.created_paths])
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
@support.skip_unless_symlink
class Win32SymlinkTests(unittest.TestCase):
filelink = 'filelinktest'
filelink_target = os.path.abspath(__file__)
dirlink = 'dirlinktest'
dirlink_target = os.path.dirname(filelink_target)
missing_link = 'missing link'
def setUp(self):
assert os.path.exists(self.dirlink_target)
assert os.path.exists(self.filelink_target)
assert not os.path.exists(self.dirlink)
assert not os.path.exists(self.filelink)
assert not os.path.exists(self.missing_link)
def tearDown(self):
if os.path.exists(self.filelink):
os.remove(self.filelink)
if os.path.exists(self.dirlink):
os.rmdir(self.dirlink)
if os.path.lexists(self.missing_link):
os.remove(self.missing_link)
def test_directory_link(self):
os.symlink(self.dirlink_target, self.dirlink)
self.assertTrue(os.path.exists(self.dirlink))
self.assertTrue(os.path.isdir(self.dirlink))
self.assertTrue(os.path.islink(self.dirlink))
self.check_stat(self.dirlink, self.dirlink_target)
def test_file_link(self):
os.symlink(self.filelink_target, self.filelink)
self.assertTrue(os.path.exists(self.filelink))
self.assertTrue(os.path.isfile(self.filelink))
self.assertTrue(os.path.islink(self.filelink))
self.check_stat(self.filelink, self.filelink_target)
def _create_missing_dir_link(self):
'Create a "directory" link to a non-existent target'
linkname = self.missing_link
if os.path.lexists(linkname):
os.remove(linkname)
target = r'c:\\target does not exist.29r3c740'
assert not os.path.exists(target)
target_is_dir = True
os.symlink(target, linkname, target_is_dir)
def test_remove_directory_link_to_missing_target(self):
self._create_missing_dir_link()
# For compatibility with Unix, os.remove will check the
# directory status and call RemoveDirectory if the symlink
# was created with target_is_dir==True.
os.remove(self.missing_link)
@unittest.skip("currently fails; consider for improvement")
def test_isdir_on_directory_link_to_missing_target(self):
self._create_missing_dir_link()
# consider having isdir return true for directory links
self.assertTrue(os.path.isdir(self.missing_link))
@unittest.skip("currently fails; consider for improvement")
def test_rmdir_on_directory_link_to_missing_target(self):
self._create_missing_dir_link()
# consider allowing rmdir to remove directory links
os.rmdir(self.missing_link)
def check_stat(self, link, target):
self.assertEqual(os.stat(link), os.stat(target))
self.assertNotEqual(os.lstat(link), os.stat(link))
bytes_link = os.fsencode(link)
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
self.assertEqual(os.stat(bytes_link), os.stat(target))
self.assertNotEqual(os.lstat(bytes_link), os.stat(bytes_link))
def test_12084(self):
level1 = os.path.abspath(support.TESTFN)
level2 = os.path.join(level1, "level2")
level3 = os.path.join(level2, "level3")
try:
os.mkdir(level1)
os.mkdir(level2)
os.mkdir(level3)
file1 = os.path.abspath(os.path.join(level1, "file1"))
with open(file1, "w") as f:
f.write("file1")
orig_dir = os.getcwd()
try:
os.chdir(level2)
link = os.path.join(level2, "link")
os.symlink(os.path.relpath(file1), "link")
self.assertIn("link", os.listdir(os.getcwd()))
# Check os.stat calls from the same dir as the link
self.assertEqual(os.stat(file1), os.stat("link"))
# Check os.stat calls from a dir below the link
os.chdir(level1)
self.assertEqual(os.stat(file1),
os.stat(os.path.relpath(link)))
# Check os.stat calls from a dir above the link
os.chdir(level3)
self.assertEqual(os.stat(file1),
os.stat(os.path.relpath(link)))
finally:
os.chdir(orig_dir)
except OSError as err:
self.fail(err)
finally:
os.remove(file1)
shutil.rmtree(level1)
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
class Win32JunctionTests(unittest.TestCase):
junction = 'junctiontest'
junction_target = os.path.dirname(os.path.abspath(__file__))
def setUp(self):
assert os.path.exists(self.junction_target)
assert not os.path.exists(self.junction)
def tearDown(self):
if os.path.exists(self.junction):
# os.rmdir delegates to Windows' RemoveDirectoryW,
# which removes junction points safely.
os.rmdir(self.junction)
def test_create_junction(self):
_winapi.CreateJunction(self.junction_target, self.junction)
self.assertTrue(os.path.exists(self.junction))
self.assertTrue(os.path.isdir(self.junction))
# Junctions are not recognized as links.
self.assertFalse(os.path.islink(self.junction))
def test_unlink_removes_junction(self):
_winapi.CreateJunction(self.junction_target, self.junction)
self.assertTrue(os.path.exists(self.junction))
os.unlink(self.junction)
self.assertFalse(os.path.exists(self.junction))
@support.skip_unless_symlink
class NonLocalSymlinkTests(unittest.TestCase):
def setUp(self):
"""
Create this structure:
base
\___ some_dir
"""
os.makedirs('base/some_dir')
def tearDown(self):
shutil.rmtree('base')
def test_directory_link_nonlocal(self):
"""
The symlink target should resolve relative to the link, not relative
to the current directory.
Then, link base/some_link -> base/some_dir and ensure that some_link
is resolved as a directory.
In issue13772, it was discovered that directory detection failed if
the symlink target was not specified relative to the current
directory, which was a defect in the implementation.
"""
src = os.path.join('base', 'some_link')
os.symlink('some_dir', src)
assert os.path.isdir(src)
class FSEncodingTests(unittest.TestCase):
def test_nop(self):
self.assertEqual(os.fsencode(b'abc\xff'), b'abc\xff')
self.assertEqual(os.fsdecode('abc\u0141'), 'abc\u0141')
def test_identity(self):
# assert fsdecode(fsencode(x)) == x
for fn in ('unicode\u0141', 'latin\xe9', 'ascii'):
try:
bytesfn = os.fsencode(fn)
except UnicodeEncodeError:
continue
self.assertEqual(os.fsdecode(bytesfn), fn)
class DeviceEncodingTests(unittest.TestCase):
def test_bad_fd(self):
# Return None when an fd doesn't actually exist.
self.assertIsNone(os.device_encoding(123456))
@unittest.skipUnless(os.isatty(0) and (sys.platform.startswith('win') or
(hasattr(locale, 'nl_langinfo') and hasattr(locale, 'CODESET'))),
'test requires a tty and either Windows or nl_langinfo(CODESET)')
def test_device_encoding(self):
encoding = os.device_encoding(0)
self.assertIsNotNone(encoding)
self.assertTrue(codecs.lookup(encoding))
class PidTests(unittest.TestCase):
@unittest.skipUnless(hasattr(os, 'getppid'), "test needs os.getppid")
def test_getppid(self):
p = subprocess.Popen([sys.executable, '-c',
'import os; print(os.getppid())'],
stdout=subprocess.PIPE)
stdout, _ = p.communicate()
# We are the parent of our subprocess
self.assertEqual(int(stdout), os.getpid())
def test_waitpid(self):
args = [sys.executable, '-c', 'pass']
pid = os.spawnv(os.P_NOWAIT, args[0], args)
status = os.waitpid(pid, 0)
self.assertEqual(status, (pid, 0))
class SpawnTests(unittest.TestCase):
def _test_invalid_env(self, spawn):
args = [sys.executable, '-c', 'pass']
# null character in the enviroment variable name
newenv = os.environ.copy()
newenv["FRUIT\0VEGETABLE"] = "cabbage"
try:
exitcode = spawn(os.P_WAIT, args[0], args, newenv)
except ValueError:
pass
else:
self.assertEqual(exitcode, 127)
# null character in the enviroment variable value
newenv = os.environ.copy()
newenv["FRUIT"] = "orange\0VEGETABLE=cabbage"
try:
exitcode = spawn(os.P_WAIT, args[0], args, newenv)
except ValueError:
pass
else:
self.assertEqual(exitcode, 127)
# equal character in the enviroment variable name
newenv = os.environ.copy()
newenv["FRUIT=ORANGE"] = "lemon"
try:
exitcode = spawn(os.P_WAIT, args[0], args, newenv)
except ValueError:
pass
else:
self.assertEqual(exitcode, 127)
# equal character in the enviroment variable value
filename = support.TESTFN
self.addCleanup(support.unlink, filename)
with open(filename, "w") as fp:
fp.write('import sys, os\n'
'if os.getenv("FRUIT") != "orange=lemon":\n'
' raise AssertionError')
args = [sys.executable, filename]
newenv = os.environ.copy()
newenv["FRUIT"] = "orange=lemon"
exitcode = spawn(os.P_WAIT, args[0], args, newenv)
self.assertEqual(exitcode, 0)
@unittest.skipUnless(hasattr(os, 'spawnve'), "test needs os.spawnve")
def test_spawnve_invalid_env(self):
self._test_invalid_env(os.spawnve)
@unittest.skipUnless(hasattr(os, 'spawnvpe'), "test needs os.spawnvpe")
def test_spawnvpe_invalid_env(self):
self._test_invalid_env(os.spawnvpe)
# The introduction of this TestCase caused at least two different errors on
# *nix buildbots. Temporarily skip this to let the buildbots move along.
@unittest.skip("Skip due to platform/environment differences on *NIX buildbots")
@unittest.skipUnless(hasattr(os, 'getlogin'), "test needs os.getlogin")
class LoginTests(unittest.TestCase):
def test_getlogin(self):
user_name = os.getlogin()
self.assertNotEqual(len(user_name), 0)
@unittest.skipUnless(hasattr(os, 'getpriority') and hasattr(os, 'setpriority'),
"needs os.getpriority and os.setpriority")
class ProgramPriorityTests(unittest.TestCase):
"""Tests for os.getpriority() and os.setpriority()."""
def test_set_get_priority(self):
base = os.getpriority(os.PRIO_PROCESS, os.getpid())
os.setpriority(os.PRIO_PROCESS, os.getpid(), base + 1)
try:
new_prio = os.getpriority(os.PRIO_PROCESS, os.getpid())
if base >= 19 and new_prio <= 19:
raise unittest.SkipTest(
"unable to reliably test setpriority at current nice level of %s" % base)
else:
self.assertEqual(new_prio, base + 1)
finally:
try:
os.setpriority(os.PRIO_PROCESS, os.getpid(), base)
except OSError as err:
if err.errno != errno.EACCES:
raise
if threading is not None:
class SendfileTestServer(asyncore.dispatcher, threading.Thread):
class Handler(asynchat.async_chat):
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
self.in_buffer = []
self.closed = False
self.push(b"220 ready\r\n")
def handle_read(self):
data = self.recv(4096)
self.in_buffer.append(data)
def get_data(self):
return b''.join(self.in_buffer)
def handle_close(self):
self.close()
self.closed = True
def handle_error(self):
raise
def __init__(self, address):
threading.Thread.__init__(self)
asyncore.dispatcher.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.bind(address)
self.listen(5)
self.host, self.port = self.socket.getsockname()[:2]
self.handler_instance = None
self._active = False
self._active_lock = threading.Lock()
# --- public API
@property
def running(self):
return self._active
def start(self):
assert not self.running
self.__flag = threading.Event()
threading.Thread.start(self)
self.__flag.wait()
def stop(self):
assert self.running
self._active = False
self.join()
def wait(self):
# wait for handler connection to be closed, then stop the server
while not getattr(self.handler_instance, "closed", False):
time.sleep(0.001)
self.stop()
# --- internals
def run(self):
self._active = True
self.__flag.set()
while self._active and asyncore.socket_map:
self._active_lock.acquire()
asyncore.loop(timeout=0.001, count=1)
self._active_lock.release()
asyncore.close_all()
def handle_accept(self):
conn, addr = self.accept()
self.handler_instance = self.Handler(conn)
def handle_connect(self):
self.close()
handle_read = handle_connect
def writable(self):
return 0
def handle_error(self):
raise
@unittest.skipUnless(threading is not None, "test needs threading module")
@unittest.skipUnless(hasattr(os, 'sendfile'), "test needs os.sendfile()")
class TestSendfile(unittest.TestCase):
DATA = b"12345abcde" * 16 * 1024 # 160 KB
SUPPORT_HEADERS_TRAILERS = not sys.platform.startswith("linux") and \
not sys.platform.startswith("solaris") and \
not sys.platform.startswith("sunos")
requires_headers_trailers = unittest.skipUnless(SUPPORT_HEADERS_TRAILERS,
'requires headers and trailers support')
@classmethod
def setUpClass(cls):
cls.key = support.threading_setup()
with open(support.TESTFN, "wb") as f:
f.write(cls.DATA)
@classmethod
def tearDownClass(cls):
support.threading_cleanup(*cls.key)
support.unlink(support.TESTFN)
def setUp(self):
self.server = SendfileTestServer((support.HOST, 0))
self.server.start()
self.client = socket.socket()
self.client.connect((self.server.host, self.server.port))
self.client.settimeout(1)
# synchronize by waiting for "220 ready" response
self.client.recv(1024)
self.sockno = self.client.fileno()
self.file = open(support.TESTFN, 'rb')
self.fileno = self.file.fileno()
def tearDown(self):
self.file.close()
self.client.close()
if self.server.running:
self.server.stop()
self.server = None
def sendfile_wrapper(self, sock, file, offset, nbytes, headers=[], trailers=[]):
"""A higher level wrapper representing how an application is
supposed to use sendfile().
"""
while 1:
try:
if self.SUPPORT_HEADERS_TRAILERS:
return os.sendfile(sock, file, offset, nbytes, headers,
trailers)
else:
return os.sendfile(sock, file, offset, nbytes)
except OSError as err:
if err.errno == errno.ECONNRESET:
# disconnected
raise
elif err.errno in (errno.EAGAIN, errno.EBUSY):
# we have to retry send data
continue
else:
raise
def test_send_whole_file(self):
# normal send
total_sent = 0
offset = 0
nbytes = 4096
while total_sent < len(self.DATA):
sent = self.sendfile_wrapper(self.sockno, self.fileno, offset, nbytes)
if sent == 0:
break
offset += sent
total_sent += sent
self.assertTrue(sent <= nbytes)
self.assertEqual(offset, total_sent)
self.assertEqual(total_sent, len(self.DATA))
self.client.shutdown(socket.SHUT_RDWR)
self.client.close()
self.server.wait()
data = self.server.handler_instance.get_data()
self.assertEqual(len(data), len(self.DATA))
self.assertEqual(data, self.DATA)
def test_send_at_certain_offset(self):
# start sending a file at a certain offset
total_sent = 0
offset = len(self.DATA) // 2
must_send = len(self.DATA) - offset
nbytes = 4096
while total_sent < must_send:
sent = self.sendfile_wrapper(self.sockno, self.fileno, offset, nbytes)
if sent == 0:
break
offset += sent
total_sent += sent
self.assertTrue(sent <= nbytes)
self.client.shutdown(socket.SHUT_RDWR)
self.client.close()
self.server.wait()
data = self.server.handler_instance.get_data()
expected = self.DATA[len(self.DATA) // 2:]
self.assertEqual(total_sent, len(expected))
self.assertEqual(len(data), len(expected))
self.assertEqual(data, expected)
def test_offset_overflow(self):
# specify an offset > file size
offset = len(self.DATA) + 4096
try:
sent = os.sendfile(self.sockno, self.fileno, offset, 4096)
except OSError as e:
# Solaris can raise EINVAL if offset >= file length, ignore.
if e.errno != errno.EINVAL:
raise
else:
self.assertEqual(sent, 0)
self.client.shutdown(socket.SHUT_RDWR)
self.client.close()
self.server.wait()
data = self.server.handler_instance.get_data()
self.assertEqual(data, b'')
def test_invalid_offset(self):
with self.assertRaises(OSError) as cm:
os.sendfile(self.sockno, self.fileno, -1, 4096)
self.assertEqual(cm.exception.errno, errno.EINVAL)
def test_keywords(self):
# Keyword arguments should be supported
os.sendfile(out=self.sockno, offset=0, count=4096,
**{'in': self.fileno})
if self.SUPPORT_HEADERS_TRAILERS:
os.sendfile(self.sockno, self.fileno, offset=0, count=4096,
headers=(), trailers=(), flags=0)
# --- headers / trailers tests
@requires_headers_trailers
def test_headers(self):
total_sent = 0
sent = os.sendfile(self.sockno, self.fileno, 0, 4096,
headers=[b"x" * 512])
total_sent += sent
offset = 4096
nbytes = 4096
while 1:
sent = self.sendfile_wrapper(self.sockno, self.fileno,
offset, nbytes)
if sent == 0:
break
total_sent += sent
offset += sent
expected_data = b"x" * 512 + self.DATA
self.assertEqual(total_sent, len(expected_data))
self.client.close()
self.server.wait()
data = self.server.handler_instance.get_data()
self.assertEqual(hash(data), hash(expected_data))
@requires_headers_trailers
def test_trailers(self):
TESTFN2 = support.TESTFN + "2"
file_data = b"abcdef"
with open(TESTFN2, 'wb') as f:
f.write(file_data)
with open(TESTFN2, 'rb')as f:
self.addCleanup(os.remove, TESTFN2)
os.sendfile(self.sockno, f.fileno(), 0, len(file_data),
trailers=[b"1234"])
self.client.close()
self.server.wait()
data = self.server.handler_instance.get_data()
self.assertEqual(data, b"abcdef1234")
@requires_headers_trailers
@unittest.skipUnless(hasattr(os, 'SF_NODISKIO'),
'test needs os.SF_NODISKIO')
def test_flags(self):
try:
os.sendfile(self.sockno, self.fileno, 0, 4096,
flags=os.SF_NODISKIO)
except OSError as err:
if err.errno not in (errno.EBUSY, errno.EAGAIN):
raise
def supports_extended_attributes():
if not hasattr(os, "setxattr"):
return False
try:
with open(support.TESTFN, "wb") as fp:
try:
os.setxattr(fp.fileno(), b"user.test", b"")
except OSError:
return False
finally:
support.unlink(support.TESTFN)
# Kernels < 2.6.39 don't respect setxattr flags.
kernel_version = platform.release()
m = re.match("2.6.(\d{1,2})", kernel_version)
return m is None or int(m.group(1)) >= 39
@unittest.skipUnless(supports_extended_attributes(),
"no non-broken extended attribute support")
class ExtendedAttributeTests(unittest.TestCase):
def tearDown(self):
support.unlink(support.TESTFN)
def _check_xattrs_str(self, s, getxattr, setxattr, removexattr, listxattr, **kwargs):
fn = support.TESTFN
open(fn, "wb").close()
with self.assertRaises(OSError) as cm:
getxattr(fn, s("user.test"), **kwargs)
self.assertEqual(cm.exception.errno, errno.ENODATA)
init_xattr = listxattr(fn)
self.assertIsInstance(init_xattr, list)
setxattr(fn, s("user.test"), b"", **kwargs)
xattr = set(init_xattr)
xattr.add("user.test")
self.assertEqual(set(listxattr(fn)), xattr)
self.assertEqual(getxattr(fn, b"user.test", **kwargs), b"")
setxattr(fn, s("user.test"), b"hello", os.XATTR_REPLACE, **kwargs)
self.assertEqual(getxattr(fn, b"user.test", **kwargs), b"hello")
with self.assertRaises(OSError) as cm:
setxattr(fn, s("user.test"), b"bye", os.XATTR_CREATE, **kwargs)
self.assertEqual(cm.exception.errno, errno.EEXIST)
with self.assertRaises(OSError) as cm:
setxattr(fn, s("user.test2"), b"bye", os.XATTR_REPLACE, **kwargs)
self.assertEqual(cm.exception.errno, errno.ENODATA)
setxattr(fn, s("user.test2"), b"foo", os.XATTR_CREATE, **kwargs)
xattr.add("user.test2")
self.assertEqual(set(listxattr(fn)), xattr)
removexattr(fn, s("user.test"), **kwargs)
with self.assertRaises(OSError) as cm:
getxattr(fn, s("user.test"), **kwargs)
self.assertEqual(cm.exception.errno, errno.ENODATA)
xattr.remove("user.test")
self.assertEqual(set(listxattr(fn)), xattr)
self.assertEqual(getxattr(fn, s("user.test2"), **kwargs), b"foo")
setxattr(fn, s("user.test"), b"a"*1024, **kwargs)
self.assertEqual(getxattr(fn, s("user.test"), **kwargs), b"a"*1024)
removexattr(fn, s("user.test"), **kwargs)
many = sorted("user.test{}".format(i) for i in range(100))
for thing in many:
setxattr(fn, thing, b"x", **kwargs)
self.assertEqual(set(listxattr(fn)), set(init_xattr) | set(many))
def _check_xattrs(self, *args, **kwargs):
def make_bytes(s):
return bytes(s, "ascii")
self._check_xattrs_str(str, *args, **kwargs)
support.unlink(support.TESTFN)
self._check_xattrs_str(make_bytes, *args, **kwargs)
def test_simple(self):
self._check_xattrs(os.getxattr, os.setxattr, os.removexattr,
os.listxattr)
def test_lpath(self):
self._check_xattrs(os.getxattr, os.setxattr, os.removexattr,
os.listxattr, follow_symlinks=False)
def test_fds(self):
def getxattr(path, *args):
with open(path, "rb") as fp:
return os.getxattr(fp.fileno(), *args)
def setxattr(path, *args):
with open(path, "wb") as fp:
os.setxattr(fp.fileno(), *args)
def removexattr(path, *args):
with open(path, "wb") as fp:
os.removexattr(fp.fileno(), *args)
def listxattr(path, *args):
with open(path, "rb") as fp:
return os.listxattr(fp.fileno(), *args)
self._check_xattrs(getxattr, setxattr, removexattr, listxattr)
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
class Win32DeprecatedBytesAPI(unittest.TestCase):
def test_deprecated(self):
import nt
filename = os.fsencode(support.TESTFN)
with warnings.catch_warnings():
warnings.simplefilter("error", DeprecationWarning)
for func, *args in (
(nt._getfullpathname, filename),
(nt._isdir, filename),
(os.access, filename, os.R_OK),
(os.chdir, filename),
(os.chmod, filename, 0o777),
(os.getcwdb,),
(os.link, filename, filename),
(os.listdir, filename),
(os.lstat, filename),
(os.mkdir, filename),
(os.open, filename, os.O_RDONLY),
(os.rename, filename, filename),
(os.rmdir, filename),
(os.startfile, filename),
(os.stat, filename),
(os.unlink, filename),
(os.utime, filename),
):
self.assertRaises(DeprecationWarning, func, *args)
@support.skip_unless_symlink
def test_symlink(self):
filename = os.fsencode(support.TESTFN)
with warnings.catch_warnings():
warnings.simplefilter("error", DeprecationWarning)
self.assertRaises(DeprecationWarning,
os.symlink, filename, filename)
@unittest.skipUnless(hasattr(os, 'get_terminal_size'), "requires os.get_terminal_size")
class TermsizeTests(unittest.TestCase):
def test_does_not_crash(self):
"""Check if get_terminal_size() returns a meaningful value.
There's no easy portable way to actually check the size of the
terminal, so let's check if it returns something sensible instead.
"""
try:
size = os.get_terminal_size()
except OSError as e:
if sys.platform == "win32" or e.errno in (errno.EINVAL, errno.ENOTTY):
# Under win32 a generic OSError can be thrown if the
# handle cannot be retrieved
self.skipTest("failed to query terminal size")
raise
self.assertGreaterEqual(size.columns, 0)
self.assertGreaterEqual(size.lines, 0)
def test_stty_match(self):
"""Check if stty returns the same results
stty actually tests stdin, so get_terminal_size is invoked on
stdin explicitly. If stty succeeded, then get_terminal_size()
should work too.
"""
try:
size = subprocess.check_output(['stty', 'size']).decode().split()
except (FileNotFoundError, subprocess.CalledProcessError):
self.skipTest("stty invocation failed")
expected = (int(size[1]), int(size[0])) # reversed order
try:
actual = os.get_terminal_size(sys.__stdin__.fileno())
except OSError as e:
if sys.platform == "win32" or e.errno in (errno.EINVAL, errno.ENOTTY):
# Under win32 a generic OSError can be thrown if the
# handle cannot be retrieved
self.skipTest("failed to query terminal size")
raise
self.assertEqual(expected, actual)
class OSErrorTests(unittest.TestCase):
def setUp(self):
class Str(str):
pass
self.bytes_filenames = []
self.unicode_filenames = []
if support.TESTFN_UNENCODABLE is not None:
decoded = support.TESTFN_UNENCODABLE
else:
decoded = support.TESTFN
self.unicode_filenames.append(decoded)
self.unicode_filenames.append(Str(decoded))
if support.TESTFN_UNDECODABLE is not None:
encoded = support.TESTFN_UNDECODABLE
else:
encoded = os.fsencode(support.TESTFN)
self.bytes_filenames.append(encoded)
self.bytes_filenames.append(memoryview(encoded))
self.filenames = self.bytes_filenames + self.unicode_filenames
def test_oserror_filename(self):
funcs = [
(self.filenames, os.chdir,),
(self.filenames, os.chmod, 0o777),
(self.filenames, os.lstat,),
(self.filenames, os.open, os.O_RDONLY),
(self.filenames, os.rmdir,),
(self.filenames, os.stat,),
(self.filenames, os.unlink,),
]
if sys.platform == "win32":
funcs.extend((
(self.bytes_filenames, os.rename, b"dst"),
(self.bytes_filenames, os.replace, b"dst"),
(self.unicode_filenames, os.rename, "dst"),
(self.unicode_filenames, os.replace, "dst"),
# Issue #16414: Don't test undecodable names with listdir()
# because of a Windows bug.
#
# With the ANSI code page 932, os.listdir(b'\xe7') return an
# empty list (instead of failing), whereas os.listdir(b'\xff')
# raises a FileNotFoundError. It looks like a Windows bug:
# b'\xe7' directory does not exist, FindFirstFileA(b'\xe7')
# fails with ERROR_FILE_NOT_FOUND (2), instead of
# ERROR_PATH_NOT_FOUND (3).
(self.unicode_filenames, os.listdir,),
))
else:
funcs.extend((
(self.filenames, os.listdir,),
(self.filenames, os.rename, "dst"),
(self.filenames, os.replace, "dst"),
))
if hasattr(os, "chown"):
funcs.append((self.filenames, os.chown, 0, 0))
if hasattr(os, "lchown"):
funcs.append((self.filenames, os.lchown, 0, 0))
if hasattr(os, "truncate"):
funcs.append((self.filenames, os.truncate, 0))
if hasattr(os, "chflags"):
funcs.append((self.filenames, os.chflags, 0))
if hasattr(os, "lchflags"):
funcs.append((self.filenames, os.lchflags, 0))
if hasattr(os, "chroot"):
funcs.append((self.filenames, os.chroot,))
if hasattr(os, "link"):
if sys.platform == "win32":
funcs.append((self.bytes_filenames, os.link, b"dst"))
funcs.append((self.unicode_filenames, os.link, "dst"))
else:
funcs.append((self.filenames, os.link, "dst"))
if hasattr(os, "listxattr"):
funcs.extend((
(self.filenames, os.listxattr,),
(self.filenames, os.getxattr, "user.test"),
(self.filenames, os.setxattr, "user.test", b'user'),
(self.filenames, os.removexattr, "user.test"),
))
if hasattr(os, "lchmod"):
funcs.append((self.filenames, os.lchmod, 0o777))
if hasattr(os, "readlink"):
if sys.platform == "win32":
funcs.append((self.unicode_filenames, os.readlink,))
else:
funcs.append((self.filenames, os.readlink,))
for filenames, func, *func_args in funcs:
for name in filenames:
try:
func(name, *func_args)
except OSError as err:
self.assertIs(err.filename, name)
else:
self.fail("No exception thrown by {}".format(func))
class CPUCountTests(unittest.TestCase):
def test_cpu_count(self):
cpus = os.cpu_count()
if cpus is not None:
self.assertIsInstance(cpus, int)
self.assertGreater(cpus, 0)
else:
self.skipTest("Could not determine the number of CPUs")
class FDInheritanceTests(unittest.TestCase):
def test_get_set_inheritable(self):
fd = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd)
self.assertEqual(os.get_inheritable(fd), False)
os.set_inheritable(fd, True)
self.assertEqual(os.get_inheritable(fd), True)
@unittest.skipIf(fcntl is None, "need fcntl")
def test_get_inheritable_cloexec(self):
fd = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd)
self.assertEqual(os.get_inheritable(fd), False)
# clear FD_CLOEXEC flag
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
flags &= ~fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)
self.assertEqual(os.get_inheritable(fd), True)
@unittest.skipIf(fcntl is None, "need fcntl")
def test_set_inheritable_cloexec(self):
fd = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd)
self.assertEqual(fcntl.fcntl(fd, fcntl.F_GETFD) & fcntl.FD_CLOEXEC,
fcntl.FD_CLOEXEC)
os.set_inheritable(fd, True)
self.assertEqual(fcntl.fcntl(fd, fcntl.F_GETFD) & fcntl.FD_CLOEXEC,
0)
def test_open(self):
fd = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd)
self.assertEqual(os.get_inheritable(fd), False)
@unittest.skipUnless(hasattr(os, 'pipe'), "need os.pipe()")
def test_pipe(self):
rfd, wfd = os.pipe()
self.addCleanup(os.close, rfd)
self.addCleanup(os.close, wfd)
self.assertEqual(os.get_inheritable(rfd), False)
self.assertEqual(os.get_inheritable(wfd), False)
def test_dup(self):
fd1 = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd1)
fd2 = os.dup(fd1)
self.addCleanup(os.close, fd2)
self.assertEqual(os.get_inheritable(fd2), False)
@unittest.skipUnless(hasattr(os, 'dup2'), "need os.dup2()")
def test_dup2(self):
fd = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd)
# inheritable by default
fd2 = os.open(__file__, os.O_RDONLY)
try:
os.dup2(fd, fd2)
self.assertEqual(os.get_inheritable(fd2), True)
finally:
os.close(fd2)
# force non-inheritable
fd3 = os.open(__file__, os.O_RDONLY)
try:
os.dup2(fd, fd3, inheritable=False)
self.assertEqual(os.get_inheritable(fd3), False)
finally:
os.close(fd3)
@unittest.skipUnless(hasattr(os, 'openpty'), "need os.openpty()")
def test_openpty(self):
master_fd, slave_fd = os.openpty()
self.addCleanup(os.close, master_fd)
self.addCleanup(os.close, slave_fd)
self.assertEqual(os.get_inheritable(master_fd), False)
self.assertEqual(os.get_inheritable(slave_fd), False)
@unittest.skipUnless(hasattr(os, 'get_blocking'),
'needs os.get_blocking() and os.set_blocking()')
class BlockingTests(unittest.TestCase):
def test_blocking(self):
fd = os.open(__file__, os.O_RDONLY)
self.addCleanup(os.close, fd)
self.assertEqual(os.get_blocking(fd), True)
os.set_blocking(fd, False)
self.assertEqual(os.get_blocking(fd), False)
os.set_blocking(fd, True)
self.assertEqual(os.get_blocking(fd), True)
class ExportsTests(unittest.TestCase):
def test_os_all(self):
self.assertIn('open', os.__all__)
self.assertIn('walk', os.__all__)
class TestScandir(unittest.TestCase):
def setUp(self):
self.path = os.path.realpath(support.TESTFN)
self.addCleanup(support.rmtree, self.path)
os.mkdir(self.path)
def create_file(self, name="file.txt"):
filename = os.path.join(self.path, name)
with open(filename, "wb") as fp:
fp.write(b'python')
return filename
def get_entries(self, names):
entries = dict((entry.name, entry)
for entry in os.scandir(self.path))
self.assertEqual(sorted(entries.keys()), names)
return entries
def assert_stat_equal(self, stat1, stat2, skip_fields):
if skip_fields:
for attr in dir(stat1):
if not attr.startswith("st_"):
continue
if attr in ("st_dev", "st_ino", "st_nlink"):
continue
self.assertEqual(getattr(stat1, attr),
getattr(stat2, attr),
(stat1, stat2, attr))
else:
self.assertEqual(stat1, stat2)
def check_entry(self, entry, name, is_dir, is_file, is_symlink):
self.assertEqual(entry.name, name)
self.assertEqual(entry.path, os.path.join(self.path, name))
self.assertEqual(entry.inode(),
os.stat(entry.path, follow_symlinks=False).st_ino)
entry_stat = os.stat(entry.path)
self.assertEqual(entry.is_dir(),
stat.S_ISDIR(entry_stat.st_mode))
self.assertEqual(entry.is_file(),
stat.S_ISREG(entry_stat.st_mode))
self.assertEqual(entry.is_symlink(),
os.path.islink(entry.path))
entry_lstat = os.stat(entry.path, follow_symlinks=False)
self.assertEqual(entry.is_dir(follow_symlinks=False),
stat.S_ISDIR(entry_lstat.st_mode))
self.assertEqual(entry.is_file(follow_symlinks=False),
stat.S_ISREG(entry_lstat.st_mode))
self.assert_stat_equal(entry.stat(),
entry_stat,
os.name == 'nt' and not is_symlink)
self.assert_stat_equal(entry.stat(follow_symlinks=False),
entry_lstat,
os.name == 'nt')
def test_attributes(self):
link = hasattr(os, 'link')
symlink = support.can_symlink()
dirname = os.path.join(self.path, "dir")
os.mkdir(dirname)
filename = self.create_file("file.txt")
if link:
os.link(filename, os.path.join(self.path, "link_file.txt"))
if symlink:
os.symlink(dirname, os.path.join(self.path, "symlink_dir"),
target_is_directory=True)
os.symlink(filename, os.path.join(self.path, "symlink_file.txt"))
names = ['dir', 'file.txt']
if link:
names.append('link_file.txt')
if symlink:
names.extend(('symlink_dir', 'symlink_file.txt'))
entries = self.get_entries(names)
entry = entries['dir']
self.check_entry(entry, 'dir', True, False, False)
entry = entries['file.txt']
self.check_entry(entry, 'file.txt', False, True, False)
if link:
entry = entries['link_file.txt']
self.check_entry(entry, 'link_file.txt', False, True, False)
if symlink:
entry = entries['symlink_dir']
self.check_entry(entry, 'symlink_dir', True, False, True)
entry = entries['symlink_file.txt']
self.check_entry(entry, 'symlink_file.txt', False, True, True)
def get_entry(self, name):
entries = list(os.scandir(self.path))
self.assertEqual(len(entries), 1)
entry = entries[0]
self.assertEqual(entry.name, name)
return entry
def create_file_entry(self):
filename = self.create_file()
return self.get_entry(os.path.basename(filename))
def test_current_directory(self):
filename = self.create_file()
old_dir = os.getcwd()
try:
os.chdir(self.path)
# call scandir() without parameter: it must list the content
# of the current directory
entries = dict((entry.name, entry) for entry in os.scandir())
self.assertEqual(sorted(entries.keys()),
[os.path.basename(filename)])
finally:
os.chdir(old_dir)
def test_repr(self):
entry = self.create_file_entry()
self.assertEqual(repr(entry), "<DirEntry 'file.txt'>")
def test_removed_dir(self):
path = os.path.join(self.path, 'dir')
os.mkdir(path)
entry = self.get_entry('dir')
os.rmdir(path)
# On POSIX, is_dir() result depends if scandir() filled d_type or not
if os.name == 'nt':
self.assertTrue(entry.is_dir())
self.assertFalse(entry.is_file())
self.assertFalse(entry.is_symlink())
if os.name == 'nt':
self.assertRaises(FileNotFoundError, entry.inode)
# don't fail
entry.stat()
entry.stat(follow_symlinks=False)
else:
self.assertGreater(entry.inode(), 0)
self.assertRaises(FileNotFoundError, entry.stat)
self.assertRaises(FileNotFoundError, entry.stat, follow_symlinks=False)
def test_removed_file(self):
entry = self.create_file_entry()
os.unlink(entry.path)
self.assertFalse(entry.is_dir())
# On POSIX, is_dir() result depends if scandir() filled d_type or not
if os.name == 'nt':
self.assertTrue(entry.is_file())
self.assertFalse(entry.is_symlink())
if os.name == 'nt':
self.assertRaises(FileNotFoundError, entry.inode)
# don't fail
entry.stat()
entry.stat(follow_symlinks=False)
else:
self.assertGreater(entry.inode(), 0)
self.assertRaises(FileNotFoundError, entry.stat)
self.assertRaises(FileNotFoundError, entry.stat, follow_symlinks=False)
def test_broken_symlink(self):
if not support.can_symlink():
return self.skipTest('cannot create symbolic link')
filename = self.create_file("file.txt")
os.symlink(filename,
os.path.join(self.path, "symlink.txt"))
entries = self.get_entries(['file.txt', 'symlink.txt'])
entry = entries['symlink.txt']
os.unlink(filename)
self.assertGreater(entry.inode(), 0)
self.assertFalse(entry.is_dir())
self.assertFalse(entry.is_file()) # broken symlink returns False
self.assertFalse(entry.is_dir(follow_symlinks=False))
self.assertFalse(entry.is_file(follow_symlinks=False))
self.assertTrue(entry.is_symlink())
self.assertRaises(FileNotFoundError, entry.stat)
# don't fail
entry.stat(follow_symlinks=False)
def test_bytes(self):
if os.name == "nt":
# On Windows, os.scandir(bytes) must raise an exception
self.assertRaises(TypeError, os.scandir, b'.')
return
self.create_file("file.txt")
path_bytes = os.fsencode(self.path)
entries = list(os.scandir(path_bytes))
self.assertEqual(len(entries), 1, entries)
entry = entries[0]
self.assertEqual(entry.name, b'file.txt')
self.assertEqual(entry.path,
os.fsencode(os.path.join(self.path, 'file.txt')))
def test_bytes_like(self):
if os.name == "nt":
# On Windows, os.scandir(bytes) must raise an exception
for cls in bytearray, memoryview:
self.assertRaises(TypeError, os.scandir, cls(b'.'))
return
# Deprecated in 3.6.
self.create_file("file.txt")
for cls in bytearray, memoryview:
path_bytes = cls(os.fsencode(self.path))
entries = list(os.scandir(path_bytes))
self.assertEqual(len(entries), 1, entries)
entry = entries[0]
self.assertEqual(entry.name, b'file.txt')
self.assertEqual(entry.path,
os.fsencode(os.path.join(self.path, 'file.txt')))
self.assertIs(type(entry.name), bytes)
self.assertIs(type(entry.path), bytes)
def test_empty_path(self):
self.assertRaises(FileNotFoundError, os.scandir, '')
def test_consume_iterator_twice(self):
self.create_file("file.txt")
iterator = os.scandir(self.path)
entries = list(iterator)
self.assertEqual(len(entries), 1, entries)
# check than consuming the iterator twice doesn't raise exception
entries2 = list(iterator)
self.assertEqual(len(entries2), 0, entries2)
def test_bad_path_type(self):
for obj in [1234, 1.234, {}, []]:
self.assertRaises(TypeError, os.scandir, obj)
if __name__ == "__main__":
unittest.main()
|
var group__FLC__ACNTL =
[
[ "MXC_F_FLC_ACNTL_ACNTL_POS", "group__FLC__ACNTL.html#ga2d3e8c3548b27d9beac2dbc28a1d7f16", null ],
[ "MXC_F_FLC_ACNTL_ACNTL", "group__FLC__ACNTL.html#gabd916dfa6330118f6afe23d41a87c876", null ]
]; |
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2022, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import annotations # isort:skip
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Bokeh imports
from bokeh._testing.util.api import verify_all
# Module under test
#import bokeh.sampledata.sample_geojson as bss # isort:skip
#-----------------------------------------------------------------------------
# Setup
#-----------------------------------------------------------------------------
ALL = (
'geojson',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
Test___all__ = pytest.mark.sampledata(verify_all("bokeh.sampledata.sample_geojson", ALL))
@pytest.mark.sampledata
def test_geojson() -> None:
import bokeh.sampledata.sample_geojson as bsg
assert isinstance(bsg.geojson, str)
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
|
define(["layout/masthead","utils/utils","libs/toastr","mvc/library/library-model","mvc/ui/ui-select"],function(a,b,c,d,e){var f=Backbone.View.extend({el:"#center",events:{"click #toolbtn_create_folder":"createFolderFromModal","click #toolbtn_bulk_import":"modalBulkImport","click #include_deleted_datasets_chk":"checkIncludeDeleted","click #toolbtn_bulk_delete":"deleteSelectedItems","click .toolbtn-show-locinfo":"showLocInfo","click .page_size_prompt":"showPageSizePrompt"},defaults:{can_add_library_item:!1,contains_file_or_folder:!1,chain_call_control:{total_number:0,failed_number:0},disabled_jstree_element:"folders"},modal:null,jstree:null,histories:null,select_genome:null,select_extension:null,list_extensions:[],auto:{id:"auto",text:"Auto-detect",description:"This system will try to detect the file type automatically. If your file is not detected properly as one of the known formats, it most likely means that it has some format problems (e.g., different number of columns on different rows). You can still coerce the system to set your data to the format you think it should be. You can also upload compressed files, which will automatically be decompressed."},list_genomes:[],initialize:function(a){this.options=_.defaults(a||{},this.defaults),this.fetchExtAndGenomes(),this.render()},render:function(a){this.options=_.extend(this.options,a);var b=this.templateToolBar(),c={id:this.options.id,is_admin:!1,is_anonym:!0,mutiple_add_dataset_options:!1};Galaxy.user&&(c.is_admin=Galaxy.user.isAdmin(),c.is_anonym=Galaxy.user.isAnonymous(),(null!==Galaxy.config.user_library_import_dir||Galaxy.config.allow_library_path_paste!==!1||null!==Galaxy.config.library_import_dir)&&(c.mutiple_add_dataset_options=!0)),this.$el.html(b(c))},renderPaginator:function(a){this.options=_.extend(this.options,a);var b=this.templatePaginator();$("body").find(".folder-paginator").html(b({id:this.options.id,show_page:parseInt(this.options.show_page),page_count:parseInt(this.options.page_count),total_items_count:this.options.total_items_count,items_shown:this.options.items_shown}))},configureElements:function(a){this.options=_.extend(this.options,a),this.options.can_add_library_item===!0?$(".add-library-items").show():$(".add-library-items").hide(),this.options.contains_file_or_folder===!0&&Galaxy.user?Galaxy.user.isAnonymous()?($(".dataset-manipulation").show(),$(".logged-dataset-manipulation").hide()):($(".logged-dataset-manipulation").show(),$(".dataset-manipulation").show()):($(".logged-dataset-manipulation").hide(),$(".dataset-manipulation").hide()),this.$el.find("[data-toggle]").tooltip()},createFolderFromModal:function(a){a.preventDefault(),a.stopPropagation();var b=this,c=this.templateNewFolderInModal();this.modal=Galaxy.modal,this.modal.show({closing_events:!0,title:"Create New Folder",body:c(),buttons:{Create:function(){b.create_new_folder_event()},Close:function(){Galaxy.modal.hide()}}})},create_new_folder_event:function(){var a=this.serialize_new_folder();if(this.validate_new_folder(a)){var b,e=new d.FolderAsModel,f=Backbone.history.fragment.split("/");b=f.indexOf("page")>-1?f[f.length-3]:f[f.length-1],e.url=e.urlRoot+b,e.save(a,{success:function(a){Galaxy.modal.hide(),c.success("Folder created."),a.set({type:"folder"}),Galaxy.libraries.folderListView.collection.add(a)},error:function(a,b){Galaxy.modal.hide(),c.error("undefined"!=typeof b.responseJSON?b.responseJSON.err_msg:"An error ocurred.")}})}else c.error("Folder's name is missing.");return!1},serialize_new_folder:function(){return{name:$("input[name='Name']").val(),description:$("input[name='Description']").val()}},validate_new_folder:function(a){return""!==a.name},modalBulkImport:function(){var a=$("#folder_table").find(":checked");if(0===a.length)c.info("You must select some datasets first.");else{var b=this;this.histories=new d.GalaxyHistories,this.histories.fetch().done(function(){var a=b.templateBulkImportInModal();b.modal=Galaxy.modal,b.modal.show({closing_events:!0,title:"Import into History",body:a({histories:b.histories.models}),buttons:{Import:function(){b.importAllIntoHistory()},Close:function(){Galaxy.modal.hide()}}})}).fail(function(a,b){c.error("undefined"!=typeof b.responseJSON?b.responseJSON.err_msg:"An error ocurred.")})}},importAllIntoHistory:function(){this.modal.disableButton("Import");var a=this.modal.$("input[name=history_name]").val(),b=this;if(""!==a)$.post(Galaxy.root+"api/histories",{name:a}).done(function(a){b.options.last_used_history_id=a.id,b.processImportToHistory(a.id,a.name)}).fail(function(){c.error("An error ocurred.")}).always(function(){b.modal.enableButton("Import")});else{var d=$("select[name=dataset_import_bulk] option:selected").val();this.options.last_used_history_id=d;var e=$("select[name=dataset_import_bulk] option:selected").text();this.processImportToHistory(d,e),this.modal.enableButton("Import")}},processImportToHistory:function(a,b){var c=[],e=[];$("#folder_table").find(":checked").each(function(){""!==$(this.parentElement.parentElement).data("id")&&this.parentElement.parentElement.classList.contains("dataset_row")?c.push($(this.parentElement.parentElement).data("id")):""!==$(this.parentElement.parentElement).data("id")&&this.parentElement.parentElement.classList.contains("folder_row")&&e.push($(this.parentElement.parentElement).data("id"))});for(var f=[],g=c.length-1;g>=0;g--){var h=c[g],i=new d.HistoryItem;i.url=i.urlRoot+a+"/contents",i.content=h,i.source="library",f.push(i)}for(var g=e.length-1;g>=0;g--){var j=e[g],i=new d.HistoryItem;i.url=i.urlRoot+a+"/contents",i.content=j,i.source="library_folder",f.push(i)}this.initChainCallControl({length:f.length,action:"to_history",history_name:b}),jQuery.getJSON(Galaxy.root+"history/set_as_current?id="+a),this.chainCallImportingIntoHistory(f,b)},updateProgress:function(){this.progress+=this.progressStep,$(".progress-bar-import").width(Math.round(this.progress)+"%");var a=Math.round(this.progress)+"% Complete";$(".completion_span").text(a)},download:function(a,b){var c=[],d=[];$("#folder_table").find(":checked").each(function(){""!==$(this.parentElement.parentElement).data("id")&&this.parentElement.parentElement.classList.contains("dataset_row")?c.push($(this.parentElement.parentElement).data("id")):""!==$(this.parentElement.parentElement).data("id")&&this.parentElement.parentElement.classList.contains("folder_row")&&d.push($(this.parentElement.parentElement).data("id"))});var e=Galaxy.root+"api/libraries/datasets/download/"+b,f={ld_ids:c,folder_ids:d};this.processDownload(e,f,"get")},processDownload:function(a,b,d){if(a&&b){b="string"==typeof b?b:$.param(b);var e="";$.each(b.split("&"),function(){var a=this.split("=");e+='<input type="hidden" name="'+a[0]+'" value="'+a[1]+'" />'}),$('<form action="'+a+'" method="'+(d||"post")+'">'+e+"</form>").appendTo("body").submit().remove(),c.info("Your download will begin soon.")}else c.error("An error occurred.")},addFilesFromHistoryModal:function(){this.histories=new d.GalaxyHistories;var a=this;this.histories.fetch().done(function(){a.modal=Galaxy.modal;var b=a.templateAddFilesFromHistory(),c=a.options.full_path[a.options.full_path.length-1][1];a.modal.show({closing_events:!0,title:"Adding datasets from your history to "+c,body:b({histories:a.histories.models}),buttons:{Add:function(){a.addAllDatasetsFromHistory()},Close:function(){Galaxy.modal.hide()}},closing_callback:function(){Galaxy.libraries.library_router.navigate("folders/"+a.id,{trigger:!0})}}),a.fetchAndDisplayHistoryContents(a.histories.models[0].id),$("#dataset_add_bulk").change(function(b){a.fetchAndDisplayHistoryContents(b.target.value)})}).fail(function(a,b){c.error("undefined"!=typeof b.responseJSON?b.responseJSON.err_msg:"An error ocurred.")})},importFilesFromPathModal:function(){var a=this;this.modal=Galaxy.modal;var b=this.templateImportPathModal();this.modal.show({closing_events:!0,title:"Please enter paths to import",body:b({}),buttons:{Import:function(){a.importFromPathsClicked(a)},Close:function(){Galaxy.modal.hide()}},closing_callback:function(){Galaxy.libraries.library_router.navigate("folders/"+a.id,{trigger:!0})}}),this.renderSelectBoxes()},fetchExtAndGenomes:function(){var a=this;b.get({url:Galaxy.root+"api/datatypes?extension_only=False",success:function(b){a.list_extensions=[];for(var c in b)a.list_extensions.push({id:b[c].extension,text:b[c].extension,description:b[c].description,description_url:b[c].description_url});a.list_extensions.sort(function(a,b){return a.id>b.id?1:a.id<b.id?-1:0}),a.list_extensions.unshift(a.auto)},cache:!0}),b.get({url:Galaxy.root+"api/genomes",success:function(b){a.list_genomes=[];for(var c in b)a.list_genomes.push({id:b[c][1],text:b[c][0]});a.list_genomes.sort(function(a,b){return a.id>b.id?1:a.id<b.id?-1:0})},cache:!0})},renderSelectBoxes:function(){var a=this;this.select_genome=new e.View({css:"library-genome-select",data:a.list_genomes,container:Galaxy.modal.$el.find("#library_genome_select"),value:"?"}),this.select_extension=new e.View({css:"library-extension-select",data:a.list_extensions,container:Galaxy.modal.$el.find("#library_extension_select"),value:"auto"})},importFilesFromGalaxyFolderModal:function(a){var b=this,c=this.templateBrowserModal();this.modal=Galaxy.modal,this.modal.show({closing_events:!0,title:"Please select folders or files",body:c({}),buttons:{Import:function(){b.importFromJstreePath(b,a)},Close:function(){Galaxy.modal.hide()}},closing_callback:function(){Galaxy.libraries.library_router.navigate("folders/"+b.id,{trigger:!0})}}),$(".libimport-select-all").bind("click",function(){$("#jstree_browser").jstree("check_all")}),$(".libimport-select-none").bind("click",function(){$("#jstree_browser").jstree("uncheck_all")}),this.renderSelectBoxes(),a.disabled_jstree_element="folders",this.renderJstree(a),$("input[type=radio]").change(function(c){"jstree-disable-folders"===c.target.value?(a.disabled_jstree_element="folders",b.renderJstree(a),$(".jstree-folders-message").hide(),$(".jstree-preserve-structure").hide(),$(".jstree-files-message").show()):"jstree-disable-files"===c.target.value&&($(".jstree-files-message").hide(),$(".jstree-folders-message").show(),$(".jstree-preserve-structure").show(),a.disabled_jstree_element="files",b.renderJstree(a))})},renderJstree:function(a){this.options=_.extend(this.options,a);var b=a.source||"userdir",e=this.options.disabled_jstree_element;this.jstree=new d.Jstree,this.jstree.url=this.jstree.urlRoot+"?target="+b+"&format=jstree&disable="+e,this.jstree.fetch({success:function(a){define("jquery",function(){return jQuery}),require(["libs/jquery/jstree"],function(){$("#jstree_browser").jstree("destroy"),$("#jstree_browser").jstree({core:{data:a},plugins:["types","checkbox"],types:{folder:{icon:"jstree-folder"},file:{icon:"jstree-file"}},checkbox:{three_state:!1}})})},error:function(a,b){"undefined"!=typeof b.responseJSON?404001===b.responseJSON.err_code?c.warning(b.responseJSON.err_msg):c.error(b.responseJSON.err_msg):c.error("An error ocurred.")}})},importFromPathsClicked:function(){var a=this.modal.$el.find(".preserve-checkbox").is(":checked"),b=this.modal.$el.find(".link-checkbox").is(":checked"),d=this.modal.$el.find(".spacetab-checkbox").is(":checked"),e=this.modal.$el.find(".posix-checkbox").is(":checked"),f=this.modal.$el.find(".tag-files").is(":checked"),g=this.select_extension.value(),h=this.select_genome.value(),i=$("textarea#import_paths").val(),j=[];if(i){this.modal.disableButton("Import"),i=i.split("\n");for(var k=i.length-1;k>=0;k--){var l=i[k].trim();0!==l.length&&j.push(l)}this.initChainCallControl({length:j.length,action:"adding_datasets"}),this.chainCallImportingFolders({paths:j,preserve_dirs:a,link_data:b,space_to_tab:d,to_posix_lines:e,source:"admin_path",file_type:g,tag_using_filenames:f,dbkey:h})}else c.info("Please enter a path relative to Galaxy root.")},initChainCallControl:function(a){var b;switch(a.action){case"adding_datasets":b=this.templateAddingDatasetsProgressBar(),this.modal.$el.find(".modal-body").html(b({folder_name:this.options.folder_name}));break;case"deleting_datasets":b=this.templateDeletingItemsProgressBar(),this.modal.$el.find(".modal-body").html(b());break;case"to_history":b=this.templateImportIntoHistoryProgressBar(),this.modal.$el.find(".modal-body").html(b({history_name:a.history_name}));break;default:Galaxy.emit.error("Wrong action specified.","datalibs")}this.progress=0,this.progressStep=100/a.length,this.options.chain_call_control.total_number=a.length,this.options.chain_call_control.failed_number=0},importFromJstreePath:function(a,b){var d=$("#jstree_browser").jstree().get_selected(!0),e=_.filter(d,function(a){return 0==a.state.disabled}),f=this.modal.$el.find(".preserve-checkbox").is(":checked"),g=this.modal.$el.find(".link-checkbox").is(":checked"),h=this.modal.$el.find(".spacetab-checkbox").is(":checked"),i=this.modal.$el.find(".posix-checkbox").is(":checked"),j=this.select_extension.value(),k=this.select_genome.value(),l=this.modal.$el.find(".tag-files").is(":checked"),m=e[0].type,n=[];if(e.length<1)c.info("Please select some items first.");else{this.modal.disableButton("Import");for(var o=e.length-1;o>=0;o--)void 0!==e[o].li_attr.full_path&&n.push(e[o].li_attr.full_path);if(this.initChainCallControl({length:n.length,action:"adding_datasets"}),"folder"===m){var p=b.source+"_folder";this.chainCallImportingFolders({paths:n,preserve_dirs:f,link_data:g,space_to_tab:h,to_posix_lines:i,source:p,file_type:j,dbkey:k,tag_using_filenames:l})}else if("file"===m){var p=b.source+"_file";this.chainCallImportingUserdirFiles({paths:n,file_type:j,dbkey:k,link_data:g,space_to_tab:h,to_posix_lines:i,source:p,tag_using_filenames:l})}}},fetchAndDisplayHistoryContents:function(a){var b=new d.HistoryContents({id:a}),e=this;b.fetch({success:function(b){var c=e.templateHistoryContents();e.histories.get(a).set({contents:b}),e.modal.$el.find("#selected_history_content").html(c({history_contents:b.models.reverse()})),e.modal.$el.find(".history-import-select-all").bind("click",function(){$("#selected_history_content [type=checkbox]").prop("checked",!0)}),e.modal.$el.find(".history-import-unselect-all").bind("click",function(){$("#selected_history_content [type=checkbox]").prop("checked",!1)})},error:function(a,b){c.error("undefined"!=typeof b.responseJSON?b.responseJSON.err_msg:"An error ocurred.")}})},addAllDatasetsFromHistory:function(){var a=this.modal.$el.find("#selected_history_content").find(":checked"),b=[],e=[],f=[];if(a.length<1)c.info("You must select some datasets first.");else{this.modal.disableButton("Add"),a.each(function(){var a=$(this.parentElement.parentElement.parentElement).data("id");if(a){var c=$(this.parentElement.parentElement.parentElement).data("name");b.push(a),e.push(c)}});for(var g=b.length-1;g>=0;g--){var h=b[g],i=new d.Item;i.url=Galaxy.root+"api/folders/"+this.options.id+"/contents",i.set("collection"===e[g]?{from_hdca_id:h}:{from_hda_id:h}),f.push(i)}this.initChainCallControl({length:f.length,action:"adding_datasets"}),this.chainCallAddingHdas(f)}},chainCallImportingIntoHistory:function(a,b){var d=this,e=a.pop();if("undefined"==typeof e)return 0===this.options.chain_call_control.failed_number?c.success("Selected datasets imported into history. Click this to start analyzing it.","",{onclick:function(){window.location=Galaxy.root}}):this.options.chain_call_control.failed_number===this.options.chain_call_control.total_number?c.error("There was an error and no datasets were imported into history."):this.options.chain_call_control.failed_number<this.options.chain_call_control.total_number&&c.warning("Some of the datasets could not be imported into history. Click this to see what was imported.","",{onclick:function(){window.location=Galaxy.root}}),Galaxy.modal.hide(),!0;var f=$.when(e.save({content:e.content,source:e.source}));f.done(function(){d.updateProgress(),d.chainCallImportingIntoHistory(a,b)}).fail(function(){d.options.chain_call_control.failed_number+=1,d.updateProgress(),d.chainCallImportingIntoHistory(a,b)})},chainCallImportingUserdirFiles:function(a){var b=this,d=a.paths.pop();if("undefined"==typeof d)return 0===this.options.chain_call_control.failed_number?(c.success("Selected files imported into the current folder"),Galaxy.modal.hide()):c.error("An error occured."),!0;var e=$.when($.post(Galaxy.root+"api/libraries/datasets?encoded_folder_id="+b.id+"&source="+a.source+"&path="+d+"&file_type="+a.file_type+"&link_data="+a.link_data+"&space_to_tab="+a.space_to_tab+"&to_posix_lines="+a.to_posix_lines+"&dbkey="+a.dbkey+"&tag_using_filenames="+a.tag_using_filenames));e.done(function(){b.updateProgress(),b.chainCallImportingUserdirFiles(a)}).fail(function(){b.options.chain_call_control.failed_number+=1,b.updateProgress(),b.chainCallImportingUserdirFiles(a)})},chainCallImportingFolders:function(a){var b=this,d=a.paths.pop();if("undefined"==typeof d)return 0===this.options.chain_call_control.failed_number?(c.success("Selected folders and their contents imported into the current folder."),Galaxy.modal.hide()):c.error("An error occured."),!0;var e=$.when($.post(Galaxy.root+"api/libraries/datasets?encoded_folder_id="+b.id+"&source="+a.source+"&path="+d+"&preserve_dirs="+a.preserve_dirs+"&link_data="+a.link_data+"&to_posix_lines="+a.to_posix_lines+"&space_to_tab="+a.space_to_tab+"&file_type="+a.file_type+"&dbkey="+a.dbkey+"&tag_using_filenames="+a.tag_using_filenames));e.done(function(){b.updateProgress(),b.chainCallImportingFolders(a)}).fail(function(){b.options.chain_call_control.failed_number+=1,b.updateProgress(),b.chainCallImportingFolders(a)})},chainCallAddingHdas:function(a){var b=this;this.added_hdas=new d.Folder;var e=a.pop();if("undefined"==typeof e)return 0===this.options.chain_call_control.failed_number?c.success("Selected datasets from history added to the folder"):this.options.chain_call_control.failed_number===this.options.chain_call_control.total_number?c.error("There was an error and no datasets were added to the folder."):this.options.chain_call_control.failed_number<this.options.chain_call_control.total_number&&c.warning("Some of the datasets could not be added to the folder"),Galaxy.modal.hide(),this.added_hdas;var f=$.when(e.save({from_hda_id:e.get("from_hda_id")}));f.done(function(c){Galaxy.libraries.folderListView.collection.add(c),b.updateProgress(),b.chainCallAddingHdas(a)}).fail(function(){b.options.chain_call_control.failed_number+=1,b.updateProgress(),b.chainCallAddingHdas(a)})},chainCallDeletingItems:function(a){var b=this;this.deleted_items=new d.Folder;var e=a.pop();if("undefined"==typeof e)return 0===this.options.chain_call_control.failed_number?c.success("Selected items were deleted."):this.options.chain_call_control.failed_number===this.options.chain_call_control.total_number?c.error("There was an error and no items were deleted. Please make sure you have sufficient permissions."):this.options.chain_call_control.failed_number<this.options.chain_call_control.total_number&&c.warning("Some of the items could not be deleted. Please make sure you have sufficient permissions."),Galaxy.modal.hide(),this.deleted_items;var f=$.when(e.destroy());f.done(function(c){if(Galaxy.libraries.folderListView.collection.remove(e.id),b.updateProgress(),Galaxy.libraries.folderListView.options.include_deleted){var f=null;"folder"===c.type||"LibraryFolder"===c.model_class?f=new d.FolderAsModel(c):"file"===c.type||"LibraryDataset"===c.model_class?f=new d.Item(c):(Galaxy.emit.error("Unknown library item type found.","datalibs"),Galaxy.emit.error(c.type||c.model_class,"datalibs")),Galaxy.libraries.folderListView.collection.add(f)}b.chainCallDeletingItems(a)}).fail(function(){b.options.chain_call_control.failed_number+=1,b.updateProgress(),b.chainCallDeletingItems(a)})},checkIncludeDeleted:function(a){Galaxy.libraries.folderListView.fetchFolder(a.target.checked?{include_deleted:!0}:{include_deleted:!1})},deleteSelectedItems:function(){var a=$("#folder_table").find(":checked");if(0===a.length)c.info("You must select at least one item for deletion.");else{var b=this.templateDeletingItemsProgressBar();this.modal=Galaxy.modal,this.modal.show({closing_events:!0,title:"Deleting selected items",body:b({}),buttons:{Close:function(){Galaxy.modal.hide()}}}),this.options.chain_call_control.total_number=0,this.options.chain_call_control.failed_number=0;var e=[],f=[];a.each(function(){void 0!==$(this.parentElement.parentElement).data("id")&&("F"==$(this.parentElement.parentElement).data("id").substring(0,1)?f.push($(this.parentElement.parentElement).data("id")):e.push($(this.parentElement.parentElement).data("id")))});var g=e.length+f.length;this.progressStep=100/g,this.progress=0;for(var h=[],i=e.length-1;i>=0;i--){var j=new d.Item({id:e[i]});h.push(j)}for(var i=f.length-1;i>=0;i--){var k=new d.FolderAsModel({id:f[i]});h.push(k)}this.options.chain_call_control.total_number=g.length,this.chainCallDeletingItems(h)}},showLocInfo:function(){var a=null,b=this;null!==Galaxy.libraries.libraryListView?(a=Galaxy.libraries.libraryListView.collection.get(this.options.parent_library_id),this.showLocInfoModal(a)):(a=new d.Library({id:this.options.parent_library_id}),a.fetch({success:function(){b.showLocInfoModal(a)},error:function(a,b){c.error("undefined"!=typeof b.responseJSON?b.responseJSON.err_msg:"An error ocurred.")}}))},showLocInfoModal:function(a){var b=this,c=this.templateLocInfoInModal();this.modal=Galaxy.modal,this.modal.show({closing_events:!0,title:"Location Details",body:c({library:a,options:b.options}),buttons:{Close:function(){Galaxy.modal.hide()}}})},showImportModal:function(a){switch(a.source){case"history":this.addFilesFromHistoryModal();break;case"importdir":this.importFilesFromGalaxyFolderModal({source:"importdir"});break;case"path":this.importFilesFromPathModal();break;case"userdir":this.importFilesFromGalaxyFolderModal({source:"userdir"});break;default:Galaxy.libraries.library_router.back(),c.error("Invalid import source.")}},showPageSizePrompt:function(){var a=prompt("How many items per page do you want to see?",Galaxy.libraries.preferences.get("folder_page_size"));null!=a&&a==parseInt(a)&&(Galaxy.libraries.preferences.set({folder_page_size:parseInt(a)}),Galaxy.libraries.folderListView.render({id:this.options.id,show_page:1}))},templateToolBar:function(){return _.template(['<div class="library_style_container">','<div id="library_toolbar">','<form class="form-inline" role="form">',"<span><strong>DATA LIBRARIES</strong></span>",'<span class="library-paginator folder-paginator"></span>','<div class="checkbox toolbar-item logged-dataset-manipulation" style="height: 20px; display:none;">',"<label>",'<input id="include_deleted_datasets_chk" type="checkbox">include deleted</input>',"</label>","</div>",'<button style="display:none;" data-toggle="tooltip" data-placement="top" title="Create New Folder" id="toolbtn_create_folder" class="btn btn-default primary-button add-library-items toolbar-item" type="button">','<span class="fa fa-plus"></span><span class="fa fa-folder"></span>',"</button>","<% if(mutiple_add_dataset_options) { %>",'<div class="btn-group add-library-items" style="display:none;">','<button title="Add Datasets to Current Folder" id="" type="button" class="primary-button dropdown-toggle" data-toggle="dropdown">','<span class="fa fa-plus"></span><span class="fa fa-file"></span><span class="caret"></span>',"</button>",'<ul class="dropdown-menu" role="menu">','<li><a href="#folders/<%= id %>/import/history"> from History</a></li>',"<% if(Galaxy.config.user_library_import_dir !== null) { %>",'<li><a href="#folders/<%= id %>/import/userdir"> from User Directory</a></li>',"<% } %>","<% if(Galaxy.config.allow_library_path_paste) { %>",'<li class="divider"></li>','<li class="dropdown-header">Admins only</li>',"<% if(Galaxy.config.library_import_dir !== null) { %>",'<li><a href="#folders/<%= id %>/import/importdir">from Import Directory</a></li>',"<% } %>","<% if(Galaxy.config.allow_library_path_paste) { %>",'<li><a href="#folders/<%= id %>/import/path">from Path</a></li>',"<% } %>","<% } %>","</ul>","</div>","<% } else { %>",'<a data-placement="top" title="Add Datasets to Current Folder" style="display:none;" class="btn btn-default add-library-items" href="#folders/<%= id %>/import/history" role="button">','<span class="fa fa-plus"></span><span class="fa fa-file"></span>',"</a>","<% } %>",'<button data-toggle="tooltip" data-placement="top" title="Import selected datasets into history" id="toolbtn_bulk_import" class="primary-button dataset-manipulation" style="margin-left: 0.5em; display:none;" type="button">','<span class="fa fa-book"></span>'," to History","</button>",'<div class="btn-group dataset-manipulation" style="margin-left: 0.5em; display:none; ">','<button title="Download selected items as archive" type="button" class="primary-button dropdown-toggle" data-toggle="dropdown">','<span class="fa fa-download"></span> Download <span class="caret"></span>',"</button>",'<ul class="dropdown-menu" role="menu">','<li><a href="#/folders/<%= id %>/download/tgz">.tar.gz</a></li>','<li><a href="#/folders/<%= id %>/download/tbz">.tar.bz</a></li>','<li><a href="#/folders/<%= id %>/download/zip">.zip</a></li>',"</ul>","</div>",'<button data-toggle="tooltip" data-placement="top" title="Mark selected items deleted" id="toolbtn_bulk_delete" class="primary-button logged-dataset-manipulation" style="margin-left: 0.5em; display:none; " type="button">','<span class="fa fa-times"></span> Delete</button>','<button data-id="<%- id %>" data-toggle="tooltip" data-placement="top" title="Show location details" class="primary-button toolbtn-show-locinfo" style="margin-left: 0.5em;" type="button">','<span class="fa fa-info-circle"></span>'," Details","</button>",'<span class="help-button" data-toggle="tooltip" data-placement="top" title="See this screen annotated">','<a href="https://galaxyproject.org/data-libraries/screen/folder-contents/" target="_blank">','<button class="primary-button" type="button">','<span class="fa fa-question-circle"></span>'," Help","</button>","</a>","</span>","</div>","</form>",'<div id="folder_items_element">',"</div>",'<div class="folder-paginator paginator-bottom"></div>',"</div>"].join(""))},templateLocInfoInModal:function(){return _.template(["<div>",'<table class="grid table table-condensed">',"<thead>",'<th style="width: 25%;">library</th>',"<th></th>","</thead>","<tbody>","<tr>","<td>name</td>",'<td><%- library.get("name") %></td>',"</tr>",'<% if(library.get("description") !== "") { %>',"<tr>","<td>description</td>",'<td><%- library.get("description") %></td>',"</tr>","<% } %>",'<% if(library.get("synopsis") !== "") { %>',"<tr>","<td>synopsis</td>",'<td><%- library.get("synopsis") %></td>',"</tr>","<% } %>",'<% if(library.get("create_time_pretty") !== "") { %>',"<tr>","<td>created</td>",'<td><span title="<%- library.get("create_time") %>"><%- library.get("create_time_pretty") %></span></td>',"</tr>","<% } %>","<tr>","<td>id</td>",'<td><%- library.get("id") %></td>',"</tr>","</tbody>","</table>",'<table class="grid table table-condensed">',"<thead>",'<th style="width: 25%;">folder</th>',"<th></th>","</thead>","<tbody>","<tr>","<td>name</td>","<td><%- options.folder_name %></td>","</tr>",'<% if(options.folder_description !== "") { %>',"<tr>","<td>description</td>","<td><%- options.folder_description %></td>","</tr>","<% } %>","<tr>","<td>id</td>","<td><%- options.id %></td>","</tr>","</tbody>","</table>","</div>"].join(""))},templateNewFolderInModal:function(){return _.template(['<div id="new_folder_modal">',"<form>",'<input type="text" name="Name" value="" placeholder="Name" autofocus>','<input type="text" name="Description" value="" placeholder="Description">',"</form>","</div>"].join(""))},templateBulkImportInModal:function(){return _.template(["<div>",'<div class="library-modal-item">',"Select history: ",'<select id="dataset_import_bulk" name="dataset_import_bulk" style="width:50%; margin-bottom: 1em; " autofocus>',"<% _.each(histories, function(history) { %>",'<option value="<%= _.escape(history.get("id")) %>"><%= _.escape(history.get("name")) %></option>',"<% }); %>","</select>","</div>",'<div class="library-modal-item">',"or create new: ",'<input type="text" name="history_name" value="" placeholder="name of the new history" style="width:50%;">',"</input>","</div>","</div>"].join(""))},templateImportIntoHistoryProgressBar:function(){return _.template(['<div class="import_text">',"Importing selected items to history <b><%= _.escape(history_name) %></b>","</div>",'<div class="progress">','<div class="progress-bar progress-bar-import" role="progressbar" aria-valuenow="0" aria-valuemin="0" aria-valuemax="100" style="width: 00%;">','<span class="completion_span">0% Complete</span>',"</div>","</div>"].join(""))},templateAddingDatasetsProgressBar:function(){return _.template(['<div class="import_text">',"Adding selected datasets to library folder <b><%= _.escape(folder_name) %></b>","</div>",'<div class="progress">','<div class="progress-bar progress-bar-import" role="progressbar" aria-valuenow="0" aria-valuemin="0" aria-valuemax="100" style="width: 00%;">','<span class="completion_span">0% Complete</span>',"</div>","</div>"].join(""))},templateDeletingItemsProgressBar:function(){return _.template(['<div class="import_text">',"</div>",'<div class="progress">','<div class="progress-bar progress-bar-import" role="progressbar" aria-valuenow="0" aria-valuemin="0" aria-valuemax="100" style="width: 00%;">','<span class="completion_span">0% Complete</span>',"</div>","</div>"].join(""))},templateBrowserModal:function(){return _.template(['<div id="file_browser_modal">','<div class="alert alert-info jstree-files-message">All files you select will be imported into the current folder ignoring their folder structure.</div>','<div class="alert alert-info jstree-folders-message" style="display:none;">All files within the selected folders and their subfolders will be imported into the current folder.</div>','<div style="margin-bottom:1em;">','<label title="Switch to selecting files" class="radio-inline import-type-switch">','<input type="radio" name="jstree-radio" value="jstree-disable-folders" checked="checked"> Choose Files',"</label>",'<label title="Switch to selecting folders" class="radio-inline import-type-switch">','<input type="radio" name="jstree-radio" value="jstree-disable-files"> Choose Folders',"</label>","</div>",'<div style="margin-bottom:1em;">','<label class="checkbox-inline jstree-preserve-structure" style="display:none;">','<input class="preserve-checkbox" type="checkbox" value="preserve_directory_structure">',"Preserve directory structure","</label>",'<label class="checkbox-inline">','<input class="link-checkbox" type="checkbox" value="link_files">',"Link files instead of copying","</label>",'<label class="checkbox-inline">','<input class="posix-checkbox" type="checkbox" value="to_posix_lines" checked="checked">',"Convert line endings to POSIX","</label>",'<label class="checkbox-inline">','<input class="spacetab-checkbox" type="checkbox" value="space_to_tab">',"Convert spaces to tabs","</label>","</div>",'<button title="Select all files" type="button" class="button primary-button libimport-select-all">',"Select all","</button>",'<button title="Select no files" type="button" class="button primary-button libimport-select-none">',"Unselect all","</button>","<hr />",'<div id="jstree_browser">',"</div>","<hr />","<p>You can set extension type and genome for all imported datasets at once:</p>","<div>",'Type: <span id="library_extension_select" class="library-extension-select" />','Genome: <span id="library_genome_select" class="library-genome-select" />',"</div>","<br>","<div>",'<label class="checkbox-inline tag-files">',"Tag datasets based on file names.",'<input class="tag-files" type="checkbox" value="tag_using_filenames" checked="checked">',"</label>","</div>","</div>"].join(""))},templateImportPathModal:function(){return _.template(['<div id="file_browser_modal">','<div class="alert alert-info jstree-folders-message">All files within the given folders and their subfolders will be imported into the current folder.</div>','<div style="margin-bottom: 0.5em;">','<label class="checkbox-inline">','<input class="preserve-checkbox" type="checkbox" value="preserve_directory_structure">',"Preserve directory structure","</label>",'<label class="checkbox-inline">','<input class="link-checkbox" type="checkbox" value="link_files">',"Link files instead of copying","</label>","<br>",'<label class="checkbox-inline">','<input class="posix-checkbox" type="checkbox" value="to_posix_lines" checked="checked">',"Convert line endings to POSIX","</label>",'<label class="checkbox-inline">','<input class="spacetab-checkbox" type="checkbox" value="space_to_tab">',"Convert spaces to tabs","</label>","</div>",'<textarea id="import_paths" class="form-control" rows="5" placeholder="Absolute paths (or paths relative to Galaxy root) separated by newline" autofocus></textarea>',"<hr />","<p>You can set extension type and genome for all imported datasets at once:</p>","<div>",'Type: <span id="library_extension_select" class="library-extension-select" />','Genome: <span id="library_genome_select" class="library-genome-select" />',"</div>","<div>",'<label class="checkbox-inline tag-files">',"Tag datasets based on file names.",'<input class="tag-files" type="checkbox" value="tag_using_filenames" checked="checked">',"</label>","</div>","</div>"].join(""));
},templateAddFilesFromHistory:function(){return _.template(['<div id="add_files_modal">',"<div>","Select history: ",'<select id="dataset_add_bulk" name="dataset_add_bulk" style="width:66%; "> ',"<% _.each(histories, function(history) { %>",'<option value="<%= _.escape(history.get("id")) %>"><%= _.escape(history.get("name")) %></option>',"<% }); %>","</select>","</div>","<br/>",'<div id="selected_history_content">',"</div>","</div>"].join(""))},templateHistoryContents:function(){return _.template(["<p>Choose the datasets to import:</p>","<div>",'<button title="Select all datasets" type="button" class="button primary-button history-import-select-all">',"Select all","</button>",'<button title="Select all datasets" type="button" class="button primary-button history-import-unselect-all">',"Unselect all","</button>","</div>","<br>","<ul>","<% _.each(history_contents, function(history_item) { %>",'<% if (history_item.get("deleted") != true ) { %>','<% var item_name = history_item.get("name") %>','<% if (history_item.get("type") === "collection") { %>','<% var collection_type = history_item.get("collection_type") %>','<% if (collection_type === "list") { %>','<li data-id="<%= _.escape(history_item.get("id")) %>" data-name="<%= _.escape(history_item.get("type")) %>">',"<label>",'<label title="<%= _.escape(item_name) %>">','<input style="margin: 0;" type="checkbox"> <%= _.escape(history_item.get("hid")) %>: ','<%= item_name.length > 75 ? _.escape("...".concat(item_name.substr(-75))) : _.escape(item_name) %> (Dataset Collection)',"</label>","</li>","<% } else { %>",'<li><input style="margin: 0;" type="checkbox" onclick="return false;" disabled="disabled">','<span title="You can convert this collection into a collection of type list using the Collection Tools">','<%= _.escape(history_item.get("hid")) %>: ','<%= item_name.length > 75 ? _.escape("...".concat(item_name.substr(-75))) : _.escape(item_name) %> (Dataset Collection of type <%= _.escape(collection_type) %> not supported.)',"</span>","</li>","<% } %>",'<% } else if (history_item.get("visible") === true && history_item.get("state") === "ok") { %>','<li data-id="<%= _.escape(history_item.get("id")) %>" data-name="<%= _.escape(history_item.get("type")) %>">','<label title="<%= _.escape(item_name) %>">','<input style="margin: 0;" type="checkbox"> <%= _.escape(history_item.get("hid")) %>: ','<%= item_name.length > 75 ? _.escape("...".concat(item_name.substr(-75))) : _.escape(item_name) %>',"</label>","</li>","<% } %>","<% } %>","<% }); %>","</ul>"].join(""))},templatePaginator:function(){return _.template(['<ul class="pagination pagination-sm">',"<% if ( ( show_page - 1 ) > 0 ) { %>","<% if ( ( show_page - 1 ) > page_count ) { %>",'<li><a href="#folders/<%= id %>/page/1"><span class="fa fa-angle-double-left"></span></a></li>','<li class="disabled"><a href="#folders/<%= id %>/page/<% print( show_page ) %>"><% print( show_page - 1 ) %></a></li>',"<% } else { %>",'<li><a href="#folders/<%= id %>/page/1"><span class="fa fa-angle-double-left"></span></a></li>','<li><a href="#folders/<%= id %>/page/<% print( show_page - 1 ) %>"><% print( show_page - 1 ) %></a></li>',"<% } %>","<% } else { %>",'<li class="disabled"><a href="#folders/<%= id %>/page/1"><span class="fa fa-angle-double-left"></span></a></li>','<li class="disabled"><a href="#folders/<%= id %>/page/<% print( show_page ) %>"><% print( show_page - 1 ) %></a></li>',"<% } %>",'<li class="active">','<a href="#folders/<%= id %>/page/<% print( show_page ) %>"><% print( show_page ) %></a>',"</li>","<% if ( ( show_page ) < page_count ) { %>",'<li><a href="#folders/<%= id %>/page/<% print( show_page + 1 ) %>"><% print( show_page + 1 ) %></a></li>','<li><a href="#folders/<%= id %>/page/<% print( page_count ) %>"><span class="fa fa-angle-double-right"></span></a></li>',"<% } else { %>",'<li class="disabled"><a href="#folders/<%= id %>/page/<% print( show_page ) %>"><% print( show_page + 1 ) %></a></li>','<li class="disabled"><a href="#folders/<%= id %>/page/<% print( page_count ) %>"><span class="fa fa-angle-double-right"></span></a></li>',"<% } %>","</ul>","<span>"," showing ",'<a data-toggle="tooltip" data-placement="top" title="Click to change the number of items on page" class="page_size_prompt">',"<%- items_shown %>","</a>"," of <%- total_items_count %> items","</span>"].join(""))}});return{FolderToolbarView:f}});
//# sourceMappingURL=../../../maps/mvc/library/library-foldertoolbar-view.js.map |
import os
import sys
import ast
import numpy as np
import torch
from fairseq import checkpoint_utils, options, tasks, utils
from fairseq.dataclass.utils import convert_namespace_to_omegaconf
from fairseq_cli.generate import get_symbols_to_strip_from_output
fairseq_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
class NaiveTranslator():
def __init__(self):
# set config
parser = options.get_interactive_generation_parser()
config_dir = os.path.join(fairseq_dir, "wmt_zhen")
input_args = [config_dir]
args = options.parse_args_and_arch(parser, input_args)
args.source_lang = "zh"
args.target_lang = "en"
args.beam = 4
args.path = os.path.join(config_dir, "model.pt")
args.tokenizer = "moses"
args.bpe = "subword_nmt"
args.bpe_codes = os.path.join(config_dir, "bpecodes")
self.cfg = convert_namespace_to_omegaconf(args)
# set batch size
self.cfg.dataset.batch_size = 1
# fix seed for stochastic decoding
np.random.seed(self.cfg.common.seed)
utils.set_torch_seed(self.cfg.common.seed)
# setup task, e.g. translation
self.task = tasks.setup_task(self.cfg.task)
# load model
overrides = ast.literal_eval(self.cfg.common_eval.model_overrides)
self.models, _model_args = checkpoint_utils.load_model_ensemble(
utils.split_paths(self.cfg.common_eval.path),
arg_overrides=overrides,
task=self.task,
suffix=self.cfg.checkpoint.checkpoint_suffix,
strict=(self.cfg.checkpoint.checkpoint_shard_count == 1),
num_shards=self.cfg.checkpoint.checkpoint_shard_count,
)
# set dictionaries
self.src_dict = self.task.source_dictionary
self.tgt_dict = self.task.target_dictionary
# optimize ensemble for generation
for model in self.models:
if model is None:
continue
model.cuda()
model.prepare_for_inference_(self.cfg)
# initialize generator
self.generator = self.task.build_generator(self.models, self.cfg.generation)
# tokenization and BPE
self.tokenizer = self.task.build_tokenizer(self.cfg.tokenizer)
self.bpe = self.task.build_bpe(self.cfg.bpe)
# Load alignment dictionary for unknown word replacement
self.align_dict = utils.load_align_dict(self.cfg.generation.replace_unk)
self.max_positions = utils.resolve_max_positions(
self.task.max_positions(), *[model.max_positions() for model in self.models]
)
def encode_fn(self, x):
if self.tokenizer is not None:
x = self.tokenizer.encode(x)
if self.bpe is not None:
x = self.bpe.encode(x)
return x
def decode_fn(self, x):
if self.bpe is not None:
x = self.bpe.decode(x)
if self.tokenizer is not None:
x = self.tokenizer.decode(x)
return x
def get_tokens_and_lengths(self, input):
tokens = self.task.source_dictionary.encode_line(
self.encode_fn(input), add_if_not_exist=False
).long().unsqueeze(0).cuda()
lengths = torch.tensor([t.numel() for t in tokens]).cuda()
return tokens, lengths
def translate(self, input):
src_tokens, src_lengths = self.get_tokens_and_lengths(input)
sample = {
"net_input": {
"src_tokens": src_tokens,
"src_lengths": src_lengths,
},
}
translations = self.task.inference_step(
self.generator, self.models, sample,
)
src_tokens = utils.strip_pad(src_tokens[0], self.tgt_dict.pad())
hypos = translations[0]
src_str = self.src_dict.string(src_tokens, self.cfg.common_eval.post_process)
hypo = hypos[0] # top 1 translation
hypo_tokens, hypo_str, alignment = utils.post_process_prediction(
hypo_tokens=hypo["tokens"].int().cpu(),
src_str=src_str,
alignment=hypo["alignment"],
align_dict=self.align_dict,
tgt_dict=self.tgt_dict,
remove_bpe=self.cfg.common_eval.post_process,
extra_symbols_to_ignore=get_symbols_to_strip_from_output(self.generator),
)
detok_hypo_str = self.decode_fn(hypo_str)
print("Source: {}".format(input))
print("Target: {}".format(detok_hypo_str))
return detok_hypo_str
if __name__ == "__main__":
translator = NaiveTranslator()
translator.translate("这个翻译系统怎么样?") |
(function (modules, global) {
var cache = {}, require = function (id) {
var module = cache[id];
if (!module) {
module = cache[id] = {};
var exports = module.exports = {};
modules[id].call(exports, require, module, exports, global);
}
return module.exports;
};
require('0');
}({
'0': function (require, module, exports, global) {
require('1');
var a = require('2').name;
var b = new (require('2'))();
var c = new require('2');
var d = { a: require('2') };
var e = require('1') + require('1');
module.exports = require('2')();
},
'1': function (require, module, exports, global) {
module.exports = 'e';
},
'2': function (require, module, exports, global) {
module.exports = function () {
console.log('up1');
};
}
}, this));
//# sourceMappingURL=http://localhost:8000/test/output/sourcemap.result.map
|
/****************************************************************************
* boards/arm/stm32/stm32f103-minimum/src/stm32_bringup.c
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership. The
* ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
****************************************************************************/
/****************************************************************************
* Included Files
****************************************************************************/
#include <nuttx/config.h>
#include <stdbool.h>
#include <stdio.h>
#include <syslog.h>
#include <debug.h>
#include <errno.h>
#include <nuttx/board.h>
#include <nuttx/fs/fs.h>
#include <nuttx/timers/oneshot.h>
#ifdef CONFIG_USBMONITOR
# include <nuttx/usb/usbmonitor.h>
#endif
#include "stm32.h"
#ifdef CONFIG_STM32_OTGFS
# include "stm32_usbhost.h"
#endif
#ifdef CONFIG_INPUT_BUTTONS
# include <nuttx/input/buttons.h>
#endif
#ifdef CONFIG_USERLED
# include <nuttx/leds/userled.h>
#endif
#ifdef CONFIG_VIDEO_FB
# include <nuttx/video/fb.h>
#endif
#include "stm32f103_minimum.h"
/* Conditional logic in stm32f103_minimum.h will determine if certain
* features are supported. Tests for these features need to be made after
* including stm32f103_minimum.h.
*/
#ifdef HAVE_RTC_DRIVER
# include <nuttx/timers/rtc.h>
# include "stm32_rtc.h"
#endif
/* The following are includes from board-common logic */
#ifdef CONFIG_SENSORS_BMP180
#include "stm32_bmp180.h"
#endif
#ifdef CONFIG_LEDS_APA102
#include "stm32_apa102.h"
#endif
#ifdef CONFIG_WS2812
#include "stm32_ws2812.h"
#endif
#ifdef CONFIG_SENSORS_MAX6675
#include "stm32_max6675.h"
#endif
#ifdef CONFIG_SENSORS_VEML6070
#include "stm32_veml6070.h"
#endif
#ifdef CONFIG_INPUT_NUNCHUCK
#include "stm32_nunchuck.h"
#endif
#ifdef CONFIG_AUDIO_TONE
#include "stm32_tone.h"
#endif
#ifdef CONFIG_SENSORS_LM75
#include "stm32_lm75.h"
#endif
#ifdef CONFIG_WL_NRF24L01
#include "stm32_nrf24l01.h"
#endif
#ifdef CONFIG_SENSORS_HCSR04
#include "stm32_hcsr04.h"
#endif
#ifdef CONFIG_SENSORS_APDS9960
#include "stm32_apds9960.h"
#endif
#ifdef CONFIG_SENSORS_ZEROCROSS
#include "stm32_zerocross.h"
#endif
#ifdef CONFIG_SENSORS_QENCODER
#include "board_qencoder.h"
#endif
#ifdef CONFIG_SENSORS_HYT271
# define HAVE_SENSORS_DEVICE
#endif
#ifdef CONFIG_SENSORS_DS18B20
# define HAVE_SENSORS_DEVICE
#endif
#ifdef CONFIG_LCD_BACKPACK
#include "stm32_lcd_backpack.h"
#endif
#ifdef CONFIG_USBADB
#include <nuttx/usb/adb.h>
#endif
/****************************************************************************
* Pre-processor Definitions
****************************************************************************/
/* Checking needed by W25 Flash */
#define HAVE_W25 1
/* Can't support the W25 device if it SPI1 or W25 support is not enabled */
#if !defined(CONFIG_STM32_SPI1) || !defined(CONFIG_MTD_W25)
# undef HAVE_W25
#endif
/* Can't support W25 features if mountpoints are disabled */
#ifdef CONFIG_DISABLE_MOUNTPOINT
# undef HAVE_W25
#endif
/* Default W25 minor number */
#if defined(HAVE_W25) && !defined(CONFIG_NSH_W25MINOR)
# define CONFIG_NSH_W25MINOR 0
#endif
/* Checking needed by MMC/SDCard */
#ifdef CONFIG_NSH_MMCSDMINOR
# define MMCSD_MINOR CONFIG_NSH_MMCSDMINOR
#else
# define MMCSD_MINOR 0
#endif
/****************************************************************************
* Private Data
****************************************************************************/
#ifdef HAVE_SENSORS_DEVICE
static int g_sensor_devno;
#endif
/****************************************************************************
* Public Functions
****************************************************************************/
/****************************************************************************
* Name: stm32_bringup
*
* Description:
* Perform architecture-specific initialization
*
* CONFIG_BOARD_LATE_INITIALIZE=y :
* Called from board_late_initialize().
*
* CONFIG_BOARD_LATE_INITIALIZE=n && CONFIG_LIB_BOARDCTL=y :
* Called from the NSH library
*
****************************************************************************/
int stm32_bringup(void)
{
#ifdef CONFIG_ONESHOT
struct oneshot_lowerhalf_s *os = NULL;
#endif
int ret = OK;
#ifdef CONFIG_DEV_GPIO
ret = stm32_gpio_initialize();
if (ret < 0)
{
syslog(LOG_ERR, "Failed to initialize GPIO Driver: %d\n", ret);
return ret;
}
#endif
#ifdef CONFIG_VIDEO_FB
/* Initialize and register the framebuffer driver */
ret = fb_register(0, 0);
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: fb_register() failed: %d\n", ret);
}
#endif
#ifdef CONFIG_LCD_BACKPACK
/* slcd:0, i2c:1, rows=2, cols=16 */
ret = board_lcd_backpack_init(0, 1, 2, 16);
if (ret < 0)
{
syslog(LOG_ERR, "Failed to initialize PCF8574 LCD, error %d\n", ret);
return ret;
}
#endif
#ifdef CONFIG_SENSORS_ZEROCROSS
/* Configure the zero-crossing driver */
ret = board_zerocross_initialize(0);
if (ret < 0)
{
syslog(LOG_ERR, "Failed to initialize Zero-Cross, error %d\n", ret);
return ret;
}
#endif
#ifdef CONFIG_MMCSD
ret = stm32_mmcsd_initialize(MMCSD_MINOR);
if (ret < 0)
{
syslog(LOG_ERR, "Failed to initialize SD slot %d: %d\n", ret);
return ret;
}
#endif
#ifdef CONFIG_SENSORS_BMP180
/* Initialize the BMP180 pressure sensor. */
ret = board_bmp180_initialize(0, 1);
if (ret < 0)
{
syslog(LOG_ERR, "Failed to initialize BMP180, error %d\n", ret);
return ret;
}
#endif
#ifdef HAVE_W25
/* Initialize and register the W25 FLASH file system. */
ret = stm32_w25initialize(CONFIG_NSH_W25MINOR);
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: Failed to initialize W25 minor %d: %d\n",
CONFIG_NSH_W25MINOR, ret);
return ret;
}
#endif
#ifdef CONFIG_FS_PROCFS
/* Mount the procfs file system */
ret = nx_mount(NULL, STM32_PROCFS_MOUNTPOINT, "procfs", 0, NULL);
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: Failed to mount procfs at %s: %d\n",
STM32_PROCFS_MOUNTPOINT, ret);
}
#endif
#ifdef HAVE_AT24
/* Initialize the AT24 driver */
ret = stm32_at24_automount(AT24_MINOR);
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: stm32_at24_automount() failed: %d\n", ret);
return ret;
}
#endif /* HAVE_AT24 */
#ifdef CONFIG_PWM
/* Initialize PWM and register the PWM device. */
ret = stm32_pwm_setup();
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: stm32_pwm_setup() failed: %d\n", ret);
}
#endif
#ifdef CONFIG_AUDIO_TONE
/* Configure and initialize the tone generator. */
ret = board_tone_initialize(0);
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: board_tone_initialize() failed: %d\n", ret);
}
#endif
#ifdef CONFIG_LEDS_APA102
/* Configure and initialize the APA102 LED Strip. */
ret = board_apa102_initialize(0, 1);
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: board_apa102_initialize() failed: %d\n", ret);
}
#endif
#ifdef CONFIG_WS2812
/* Configure and initialize the WS2812 LEDs. */
ret = board_ws2812_initialize(0, WS2812_SPI, WS2812_NLEDS);
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: board_ws2812_initialize() failed: %d\n", ret);
}
#endif
#ifdef CONFIG_SENSORS_HYT271
/* Configure and initialize the HYT271 sensors */
ret = stm32_hyt271initialize(g_sensor_devno);
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: stm32_hyt271initialize() failed: %d\n", ret);
}
else
{
g_sensor_devno += ret;
}
#endif
#ifdef CONFIG_SENSORS_DS18B20
/* Configure and initialize the DS18B20 sensors */
ret = stm32_ds18b20initialize(g_sensor_devno);
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: stm32_ds18b20initialize() failed: %d\n", ret);
}
else
{
g_sensor_devno += ret;
}
#endif
#ifdef CONFIG_LM75_I2C
/* Configure and initialize the LM75 sensor */
ret = board_lm75_initialize(0, 1);
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: board_lm75_initialize() failed: %d\n", ret);
}
#endif
#ifdef CONFIG_RGBLED
/* Configure and initialize the RGB LED. */
ret = stm32_rgbled_setup();
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: stm32_rgbled_setup() failed: %d\n", ret);
}
#endif
#ifdef CONFIG_SENSORS_HCSR04
/* Configure and initialize the HC-SR04 distance sensor */
ret = board_hcsr04_initialize(0);
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: board_hcsr04_initialize() failed: %d\n", ret);
}
#endif
#ifdef CONFIG_SENSORS_MAX6675
ret = board_max6675_initialize(0, 1);
if (ret < 0)
{
serr("ERROR: board_max6675_initialize() failed: %d\n", ret);
}
#endif
#ifdef CONFIG_CAN_MCP2515
/* Configure and initialize the MCP2515 CAN device */
ret = stm32_mcp2515initialize("/dev/can0");
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: stm32_mcp2515initialize() failed: %d\n", ret);
}
#endif
#ifdef CONFIG_CL_MFRC522
ret = stm32_mfrc522initialize("/dev/rfid0");
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: stm32_mfrc522initialize() failed: %d\n", ret);
}
#endif
#ifdef CONFIG_ONESHOT
os = oneshot_initialize(1, 10);
if (os)
{
ret = oneshot_register("/dev/oneshot", os);
}
#endif
#ifdef CONFIG_INPUT_BUTTONS
/* Register the BUTTON driver */
ret = btn_lower_initialize("/dev/buttons");
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: btn_lower_initialize() failed: %d\n", ret);
}
#endif
#ifdef CONFIG_INPUT_NUNCHUCK
/* Register the Nunchuck driver */
ret = board_nunchuck_initialize(0, 1);
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: board_nunchuck_initialize() failed: %d\n",
ret);
}
#endif
#ifdef CONFIG_SENSORS_QENCODER
/* Initialize and register the qencoder driver */
ret = board_qencoder_initialize(0,
CONFIG_STM32F103MINIMUM_QETIMER);
if (ret != OK)
{
syslog(LOG_ERR,
"ERROR: Failed to register the qencoder: %d\n",
ret);
}
#endif
#ifdef CONFIG_USERLED
/* Register the LED driver */
ret = userled_lower_initialize("/dev/userleds");
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: userled_lower_initialize() failed: %d\n", ret);
}
#endif
#ifdef CONFIG_SENSORS_APDS9960
/* Register the APDS-9960 gesture sensor */
ret = board_apds9960_initialize(0, 1);
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: board_apds9960_initialize() failed: %d\n",
ret);
}
#endif
#ifdef CONFIG_SENSORS_VEML6070
/* Register the UV-A light sensor */
ret = board_veml6070_initialize(0, 1);
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: board_veml6070_initialize() failed: %d\n",
ret);
}
#endif
#ifdef CONFIG_ADC
/* Initialize ADC and register the ADC driver. */
ret = stm32_adc_setup();
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: stm32_adc_setup() failed: %d\n", ret);
}
#endif
#if defined(CONFIG_WL_NRF24L01)
/* Initialize the NRF24L01 wireless module */
ret = board_nrf24l01_initialize(1);
if (ret < 0)
{
syslog(LOG_ERR, "ERROR: board_nrf24l01_initialize() failed: %d\n",
ret);
}
#endif
#ifdef CONFIG_USBADB
usbdev_adb_initialize();
#endif
return ret;
}
|